index int64 0 1,000k | blob_id stringlengths 40 40 | code stringlengths 7 10.4M |
|---|---|---|
983,800 | fba955620bd391deb9a899daf6331c049d320fbf | from django.shortcuts import render
from django.http import HttpResponse
from .models import Produto, Categoria, Opcoes, Adicional
# Create your views here.
def home(request):
if not request.session.get('carrinho'):
request.session['carrinho'] = []
request.session.save()
produtos = Produto.objects.all()
categorias = Categoria.objects.all()
#print(produtos[0].categoria)
return render(request, 'home.html', {'produtos': produtos,
'carrinho': len(request.session['carrinho']),
'categorias': categorias,
})
def categorias(request, id):
if not request.session.get('carrinho'):
request.session['carrinho'] = []
request.session.save()
produtos = Produto.objects.filter(categoria_id = id)
categorias = Categoria.objects.all()
return render(request, 'home.html', {'produtos': produtos,
'carrinho': len(request.session['carrinho']),
'categorias': categorias})
def produto(request, id):
if not request.session.get('carrinho'):
request.session['carrinho'] = []
request.session.save()
erro = request.GET.get('erro')
produto = Produto.objects.filter(id=id)[0]
categorias = Categoria.objects.all()
return render(request, 'produto.html', {'produto': produto,
'carrinho': len(request.session['carrinho']),
'categorias': categorias,
'erro': erro}) |
983,801 | 6231ddc70d5b5b48abf972c37c875020f2bdd0a8 | #
# Copyright (c) 2023 Airbyte, Inc., all rights reserved.
#
from http import HTTPStatus
from unittest.mock import MagicMock
import pytest
import requests
from source_onesignal.streams import OnesignalStream
@pytest.fixture
def patch_base_class(mocker):
# Mock abstract methods to enable instantiating abstract class
mocker.patch.object(OnesignalStream, "path", "v0/example_endpoint")
mocker.patch.object(OnesignalStream, "primary_key", "test_primary_key")
mocker.patch.object(OnesignalStream, "__abstractmethods__", set())
@pytest.fixture
def stream(patch_base_class):
args = {"authenticator": None, "config": {"user_auth_key": "", "start_date": "2021-01-01T00:00:00Z", "outcome_names": ""}}
return OnesignalStream(**args)
def test_next_page_token(stream):
inputs = {"response": MagicMock()}
expected_token = None
assert stream.next_page_token(**inputs) == expected_token
def test_parse_response(stream, requests_mock):
requests_mock.get("https://dummy", json=[{"id": 123, "basic_auth_key": "xx"}])
resp = requests.get("https://dummy")
inputs = {"response": resp, "stream_state": MagicMock()}
expected_parsed_object = {"id": 123, "basic_auth_key": "xx"}
assert next(stream.parse_response(**inputs)) == expected_parsed_object
def test_request_headers(stream):
inputs = {"stream_slice": None, "stream_state": None, "next_page_token": None}
expected_headers = {}
assert stream.request_headers(**inputs) == expected_headers
def test_http_method(stream):
expected_method = "GET"
assert stream.http_method == expected_method
@pytest.mark.parametrize(
("http_status", "should_retry"),
[
(HTTPStatus.OK, False),
(HTTPStatus.BAD_REQUEST, False),
(HTTPStatus.TOO_MANY_REQUESTS, True),
(HTTPStatus.INTERNAL_SERVER_ERROR, True),
],
)
def test_should_retry(stream, http_status, should_retry):
response_mock = MagicMock()
response_mock.status_code = http_status
assert stream.should_retry(response_mock) == should_retry
def test_backoff_time(stream):
response_mock = MagicMock()
expected_backoff_time = 60
assert stream.backoff_time(response_mock) == expected_backoff_time
|
983,802 | b4f81aaa20ffbca5b8b56a5338b3fe3121bf5940 | from Classes.Card import Card
from random import shuffle
class Deck:
def __init__(self):
self.cards = []
for suit in Card.suits:
for card in Card.card_values:
self.cards.append(Card(card, suit))
def __repr__(self):
return f'deck of {len(self.cards)} cards'
def count(self):
return len(self.cards)
def _deal(self, num):
count = self.count()
actual = min([count,num])
if count == 0:
raise ValueError("All cards have been dealt!")
cards = self.cards[-actual:]
self.cards = self.cards[:-actual]
return cards
def deal_card(self):
return self._deal(1)[0]
def deal_hand(self, hand_size):
return self._deal(hand_size)
def shuffle(self):
if self.count() < 52:
raise ValueError("you must have a full deck to shuffle")
shuffle(self.cards)
|
983,803 | 0dd01bc2e646a4baae1248bb19a1b30da657a831 | from django.urls import path
from . import views
urlpatterns = [
path('', views.index, name='index'),
path('register', views.register, name='register'),
path('recruiterLanding', views.recruiterLanding, name='recruiterLanding'),
path('recruiterRegistration', views.recruiterRegistration, name='recruiterRegistration'),
path('studentProfile1', views.studentProfile1, name='studentProfile1'),
path('availableInternships', views.availableInternships, name='availableInternships'),
]
|
983,804 | 9bed4a3a8e5ab6318b5b506f01ddd7bf259c262e | # Generated by Django 2.0.5 on 2018-06-01 19:23
from django.db import migrations, models
import django.db.models.deletion
import django.utils.timezone
import tinymce.models
class Migration(migrations.Migration):
initial = True
dependencies = [
]
operations = [
migrations.CreateModel(
name='Album',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('nombre', models.CharField(max_length=200, verbose_name='Titulo')),
('descripcion', tinymce.models.HTMLField()),
('portada', models.FileField(upload_to='fotos/')),
('pub_date', models.DateTimeField(default=django.utils.timezone.now, verbose_name='Fecha de Publicacion')),
],
),
migrations.CreateModel(
name='Foto',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('foto', models.FileField(upload_to='fotos/')),
('album', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='fotos.Album')),
],
),
]
|
983,805 | 1f49efe68c23cb4abbad849f5eae2b64e2a4e89c | from __future__ import division
import os, cv2, sys, re
import numpy as np
import keras.backend as K
from keras.optimizers import SGD
from keras.callbacks import EarlyStopping, ModelCheckpoint, TensorBoard, ReduceLROnPlateau
import tensorflow as tf
from zk_config import *
from zk_utilities import *
from zk_models import *
def generator_test(b_s, imgs_test_path):
images = [imgs_test_path + f for f in os.listdir(imgs_test_path) if (f.endswith('.jpg') or f.endswith('.jpeg') or f.endswith('.png'))]
images.sort()
counter = 0
while True:
X_img = preprocess_images(images[counter:counter + b_s], shape_r, shape_c)
X_cb = preprocess_priors(b_s, shape_r_out, shape_c_out, nb_gaussian)
yield [X_img, X_cb]
counter = (counter + b_s) % len(images)
if __name__ == '__main__':
config = tf.ConfigProto()
config.gpu_options.visible_device_list = "0"
K.set_session(tf.Session(config=config))
width = 2048
height = 1024
dataset = 'Images'
method_name = 'Results_' + task_type
model_path = wkdir + '/Models/model4img-'+ task_type +'.h5'
output_folder = wkdir + '/DataSet/Images/' + method_name + '/'
if not os.path.exists(output_folder):
os.makedirs(output_folder)
print("Build Static SalCNN Model: " + task_type)
model = salcnn_Static_Net(img_cols=shape_c, img_rows=shape_r, img_channels=3)
model.load_weights(model_path)
# for many image saliency prediction
imgs_test_path = wkdir + '/DataSet/Images/Stimuli/'
file_names = [f for f in os.listdir(imgs_test_path) if (f.endswith('.jpg') or f.endswith('.jpeg') or f.endswith('.png'))]
file_names.sort()
nb_imgs_test = len(file_names)
print("Predict saliency maps for " + imgs_test_path)
predictions = model.predict_generator(generator_test(b_s=bs_st_c2d, imgs_test_path=imgs_test_path), math.ceil(nb_imgs_test / bs_st_c2d))
if with_CB:
cmap = cv2.imread(wkdir + '/' + task_type + '_CB.png', -1)
get_file_info = re.compile("(\w+\d{1,2})_(\d+)x(\d+)")
for pred, imgname in zip(predictions[0], file_names):
name, _, _ = get_file_info.findall(imgname.split(os.sep)[-1])[0]
predimg = pred[:, :, 0]
res = postprocess_predictions(predimg, height, width)
cv2.imwrite(output_folder + name + '_woCB.png', res.astype(int))
if with_CB:
res = addCB(res,cmap)
cv2.imwrite(output_folder + name + '.png', res.astype(int))
with open(output_folder + name + '.bin', "wb") as f:
f.write(res)
# for single image saliency prediction
# image_path = wkdir + '/DataSet/Images/Stimuli/P28_4000x2000.jpg'
# X_img = preprocess_images([image_path], shape_r, shape_c)
# X_cb = preprocess_priors(1, shape_r_out, shape_c_out, nb_gaussian)
# X_input = [X_img, X_cb]
# prediction = model.predict(X_input,1)[0]
#
# get_file_info = re.compile("(\w+\d{1,2})_(\d+)x(\d+)")
# name, width, height = get_file_info.findall(image_path.split(os.sep)[-1])[0]
# res = postprocess_predictions(prediction[0], int(height), int(width))
# cv2.imwrite(output_folder + name + '.png', res.astype(int))
|
983,806 | 8e97ee334ea13e0bfbd463c7100743f4a75cee5d | v=float(input('Qual o valor da mercadoria que o cliente está comprando?'))
print('Ele está comprando no valor de {} reias? \ [1] - SIM \[2] - NÃO:'.format(v))
op=int(input('Confirme por favor!!'))
if op==1:
print('Escolha a forma de pagamento:\n A vista / Cheque [1]\n A vista no cartão [2]\n No credito 2x [3]\n No credito 3x ou mais [4]')
p=int(input('Informe a forma de pagamento:'))
if p == 1:
print('O valor terá um desconto de 10% e o cliente pagará {} reias'.format(v*0.9))
elif p== 2:
print('O valor téra um desconto de 5% e o cliente paragá {} reias'.format(v*0.95))
elif p==3:
print('O valor não terá desconto,será divido de 2 x e cada parcela será {} reias sem juros totalizando o montante de {} reias'.format(v/2,v))
elif p==4:
print('O valor terá um acrecimo de 30%')
par=int(input('Informe quantas parcelas o cliente vai querer:'))
if par==3 or par>3:
print('O valor total da compra ficará em {} reias e cada parcela sairá a {} reias'.format(v*1.30,v*1.30/par))
else:
print('FIM')
else:
print('FIM DA COMPRA')
else:
print('Informe o valor correto')
|
983,807 | ec188fc5cab2861afc7e0cecb2b199ea270db6c8 | """
Module description:
"""
__version__ = '0.3.1'
__author__ = 'Vito Walter Anelli, Claudio Pomo'
__email__ = 'vitowalter.anelli@poliba.it, claudio.pomo@poliba.it'
import tensorflow as tf
import numpy as np
import random
class Sampler():
def __init__(self, indexed_ratings, m, transactions, random_seed=42):
np.random.seed(random_seed)
random.seed(random_seed)
self._transactions = transactions
self._indexed_ratings = indexed_ratings
self._users = list(self._indexed_ratings.keys())
self._nusers = len(self._users)
self._items = list({k for a in self._indexed_ratings.values() for k in a.keys()})
self._nitems = len(self._items)
self._ui_dict = {u: list(set(indexed_ratings[u])) for u in indexed_ratings}
self._lui_dict = {u: len(v) for u, v in self._ui_dict.items()}
self._m = m
self._pos = self._pos_generator(self._ui_dict)
@staticmethod
def _pos_generator(ui_dict):
# ui_dict = self._ui_dict
pos = {(u, i, 1) for u, items in ui_dict.items() for i in items}
while True:
for u, i, _ in pos:
yield u, i
# @staticmethod
def _generator(self, num_samples: int):
r_int = np.random.randint
n_items = self._nitems
ui_dict = self._ui_dict
for _ in range(num_samples):
u, i = next(self._pos)
ui = ui_dict[u]
for _ in range(self._m):
j = r_int(n_items)
while j in ui:
j = r_int(n_items)
yield u, i, j
def create_dataset(self, batch_size=512, random_seed=42):
data = tf.data.Dataset.from_generator(generator=self._generator,
output_shapes=((), (), ()),
output_types=(tf.int64, tf.int64, tf.int64),
args=(self._transactions * self._m,))
data = data.batch(batch_size=batch_size)
data = data.prefetch(buffer_size=tf.data.experimental.AUTOTUNE)
# data._indexed_ratings = indexed_ratings
# data._users = list(data._indexed_ratings.keys())
# data._nusers = len(data._users)
# data._items = list({k for a in data._indexed_ratings.values() for k in a.keys()})
# data._nitems = len(data._items)
# data._ui_dict = {u: list(set(indexed_ratings[u])) for u in indexed_ratings}
# data._lui_dict = {u: len(v) for u, v in data._ui_dict.items()}
# data._m = m
# data._pos_generator = cls._pos_generator
# data._pos = self._pos_generator(data._ui_dict)
return data |
983,808 | b658e4f75ef6b42aeba131d870d123616a32870f | """Tagman admin classes and also helpers/mixins for users of Tagman"""
from django.contrib import admin
from tagman.models import TagGroup
from tagman.models import Tag
from django import forms
class TaggedContentItemForm(forms.ModelForm):
"""
Form for use on model admins that have a 'tags' field in which you want
a nice filtered list without system tags polluting it. Typical for all
TaggedContentItem models.
"""
def __init__(self, *args, **kwargs):
"""
Find all fields in a page ending in 'tags', assume that they are a
tags M2M and reset the widget's choices to a filtered list that
excludes system tags.
This is very crude and rather inelegant but it solved a particular
problem. It is suggested this is used with care, or used as an
example of how to manage filtering if you'd like to do some such
in another way.
"""
super(TaggedContentItemForm, self).__init__(*args, **kwargs)
wtf = Tag.objects.filter(group__system=False)
wlist = [w for t, w in self.fields.items() if t.endswith("tags")]
choices = []
for choice in wtf:
choices.append((choice.id, str(choice)))
[setattr(w, 'choices', choices) for w in wlist]
class TaggedContentAdminMixin(object):
"""
When this is the first in the list of base classes for the admin class
of a model that has tags it will ensure your 'tags' are filtered.
"""
form = TaggedContentItemForm
class TagGroupAdmin(admin.ModelAdmin):
list_display = ["name", "slug", "system"]
search_fields = ["name"]
list_filter = ["system"]
prepopulated_fields = {"slug": ("name",)}
class TagAdmin(admin.ModelAdmin):
list_display = ["name", "slug", "group", "system"]
search_fields = ["name"]
list_filter = ["group"]
prepopulated_fields = {"slug": ("name",)}
def system(self, _object):
return _object.system
system.short_description = u'System'
system.boolean = True
def queryset(self, request):
# use our manager, rather than the default one
qs = self.model.objects.get_query_set()
# we need this from the superclass method
# otherwise we might try to *None, which is bad ;)
ordering = self.ordering or ()
if ordering:
qs = qs.order_by(*ordering)
return qs
try:
admin.site.register(Tag, TagAdmin)
admin.site.register(TagGroup, TagGroupAdmin)
except admin.sites.AlreadyRegistered:
pass
|
983,809 | 4586513d6d74715e309cf0815ceed5111df972be | import PyPDF2
import os
os.chdir('/home/daniel/Downloads')
pdfFile = open('meetingminutes1.pdf', 'rb')
reader = PyPDF2.PdfFileReader(pdfFile)
print(reader.numPages)
page = reader.getPage(0)
print(page.extractText())
# Print entire PDF
for pageNum in range(reader.numPages):
print(reader.getPage(pageNum).extractText())
pdfFile.close()
|
983,810 | 275ccc33a076f595ec09027b03103ee4c9110618 | # /urs/bin/env python
# -*- coding: utf-8 -*-
import os
import sys
import subprocess
import stat
import shutil
import urllib2
import requests
import zipfile
edk2platformurl="https://github.com/tianocore/edk2-platforms.git -b devel-IntelAtomProcessorE3900"
edk2url=r"-b vUDK2018 https://github.com/tianocore/edk2.git"
binaryurl="https://firmware.intel.com/sites/default/files/intelatome3900-0.71-binary.objects.zip"
fspurl="https://github.com/IntelFsp/FSP.git"
root_path=os.getcwd()
edk2path=os.path.join(root_path,"edk2")
edk2platformpath=os.path.join(root_path,"edk2-platforms")
fspdownloadpath=os.path.join(root_path,"FSP")
fsp_path=os.path.join(edk2platformpath,"Silicon\\BroxtonSoC\\BroxtonFspPkg")
class BasicFunctionLib:
"This class mainly provides all individal function"
def __init__(self):
pass
# Use request mode to download file
def rfiles(self,url,name,attr="zip"):
f=requests.get(url)
filename=str(name+"."+attr)
with open(filename,"wb") as code:
code.write(f.content)
code.close()
# Use urllib2 method to download file
def u2files(self,url,name,attr="zip"):
req = urllib2.Request(url)
f = urllib2.urlopen(req)
filename = str(name + "." + attr)
with open(filename, "wb") as code:
code.write(f.read())
code.close()
def filezip(self):
file_zip = zipfile.ZipFile("binary", 'w', zipfile.ZIP_DEFLATED)
file_zip.write(filename, file_url)
file_zip.close()
def fileextract(self,name):
file_zip = zipfile.ZipFile(name, 'r')
for file in file_zip.namelist():
file_zip.extract(file, "binary")
file_zip.close()
os.remove(name)
def binarycp(self,filename,targetpath):
binarypath = os.path.join(root_path, "binary")
sourcepath = os.path.join(binarypath, filename)
if True:
subprocess.check_call("xcopy /E /Y %s %s" % (sourcepath, targetpath), shell=True)
shutil.rmtree(binarypath)
def systemdetect(self):
if sys.platform=="win32":
return "windows"
elif sys.platform=="linux":
return "linux"
else:
print "Please switch the build environmnt to linux or windows system"
raise Exception("Not support this system")
def delete_file(self):
for edkpath in (edk2path,edk2platformpath):
if os.path.exists(edkpath):
for path,dirs,names in os.walk(edkpath):
for eachname in names:
absolutepath=os.path.join(path,eachname)
if self.systemkey=="windows":
os.chmod(absolutepath,stat.S_IWRITE)
os.remove(absolutepath)
else:
os.chmod(absolutepath, stat.S_IRWXU)
os.remove(absolutepath)
shutil.rmtree(edkpath)
def maingitclone(self):
self.systemkey = self.systemdetect()
self.delete_file()
for downloadmeter in (edk2url,edk2platformurl,fspurl):
self.checkcode = 1
while self.checkcode:
self.checkcode = subprocess.check_call("git clone --depth=1 %s" %downloadmeter)
shutil.copytree(os.path.join(fspdownloadpath,"ApolloLakeFspBinPkg"),os.path.join(fsp_path,"ApolloLakeFspBinPkg"))
if os.path.exists(fspdownloadpath):
for path, dirs, names in os.walk(fspdownloadpath):
for eachname in names:
absolutepath = os.path.join(path, eachname)
if self.systemkey == "windows":
os.chmod(absolutepath, stat.S_IWRITE)
os.remove(absolutepath)
else:
os.chmod(absolutepath, stat.S_IRWXU)
os.remove(absolutepath)
shutil.rmtree(fspdownloadpath)
def main():
buildbasic=BasicFunctionLib()
buildbasic.u2files(binaryurl,"test")
buildbasic.fileextract("test.zip")
buildbasic.binarycp("IntelAtomE3900-0.71-Binary.Objects",edk2platformpath)
if __name__=="__main__":
main() |
983,811 | b008dbe2ce42d7ab79f43474fd07d88ce762ccca | # -*- coding: UTF-8 -*-
from flask import render_template, redirect, url_for
from app import app
from forms import GuestBookForm
from models import db, GuestBook
@app.route('/guestbook/delete/<gbid>')
def guestbook_delete(gbid):
"""
根据gbid主键值删除数据
"""
getdata = GuestBook.query.filter_by(id=gbid).first()
if not getdata is None:
db.session.delete(getdata)
db.session.commit()
return redirect(url_for('guestbook'))
@app.route('/guestbook', methods=['GET', 'POST'])
def guestbook():
form = GuestBookForm()
if form.validate_on_submit():
nickname = form.nickname.data
text = form.text.data
email = form.email.data
guesttext = GuestBook(nickname, text, email)
db.session.add(guesttext)
db.session.commit()
return redirect(url_for('guestbook'))
# 查询数据
guestbook = GuestBook.query.all()
# guestbook = GuestBook.query.filter_by(id=2).first()
# guestbook = GuestBook.query.filter_by(nickname='aaa1')
# 修改数据
# guestbook = GuestBook.query.filter_by(id=2).first()
# guestbook.nickname='aaa2'
# guestbook.email='aaa2@qq.com'
# db.session.add(guestbook)
# db.session.commit()
# 批量修改数据
# guestbook = GuestBook.query.all()
# for data in guestbook:
# if data.id == 2:
# data.nickname = data.nickname+'3'
# db.session.add(data)
# db.session.commit()
# print guestbook
return render_template('guestbook.html', title='留言簿', form=form, guestbook=guestbook)
|
983,812 | a0d2da426015c6f0eaf8434d1066a5a183456058 | ''' Escreva um programa que leia um número inteiro qualquer e peça para o usuário escolher
qual será a base de conversão:
- 1 para binário
- 2 para octal
- 3 para hexadecimal '''
n = int(input('Digite um número inteiro: '))
print('''escolha uma das bases para conversão:
[1] converter para BANÁRIO
[2] converter para OCTAL
[3] converter para HEXADECIMAL''')
opcao = int(input('Sua opção: '))
if opcao == 1:
print(f'{n} convertido para BINÁRIO é {bin(n)[2:]}') # Usando fatiamento para retirar os dois primeiros digitos ([2:])
elif opcao == 2:
print(f'{n} convertido para OCTAL é {oct(n)[2:]}')
elif opcao == 3:
print(f'{n} convertido para HEXADECIMAL é {hex(n)[2:]}')
else:
print('Opção inválida, tente novamente.')
|
983,813 | b5fca80f45049de3f05fdac488ca85bda1ae3d6a | from models import *
from django.shortcuts import *
from django.db.models import *
from datetime import *
years = Player.objects.all().values('year').distinct().order_by('-year')
states = Player.objects.all().values('state').distinct().order_by('state')
# All of the Signing Day pages
def Signing(request):
recentlist = Player.objects.all().filter(year='2011').order_by('-lastchange')
topschools = Player.objects.all().values('highschool', 'city').annotate(schoolcount=Count('highschool')).order_by('-schoolcount')[:10]
yearlist = Player.objects.all().values('year').distinct()
topschoolcount = topschools[0]
headline = RecruitHeadline.objects.all()[:1]
dictionaries = { 'headline':headline, 'recentlist': recentlist, 'topschools': topschools, 'topschoolcount': topschoolcount, 'yearlist': yearlist, }
return render_to_response('huskers/signing-day-2011.html', dictionaries)
def Signing2012(request):
recentlist = Player.objects.all().filter(status='Scholarship').filter(year='2012').order_by('last_name')
headline = RecruitHeadline.objects.all()[1:2]
dictionaries = { 'headline':headline, 'recentlist': recentlist, }
return render_to_response('huskers/signing-day-2012.html', dictionaries)
def Signing2013(request):
recentlist = Player.objects.all().filter(status='Scholarship').filter(year='2013').order_by('last_name')
headline = RecruitHeadline.objects.filter(tags__icontains='signing-day-2013')
video = RecruitHeadline.objects.filter(tags__icontains='signing-day-video-2013')
rankings = RecruitHeadline.objects.filter(tags__icontains='signing-day-rankings-2013')
dictionaries = { 'headline':headline, 'recentlist': recentlist, 'video': video, 'rankings': rankings, }
return render_to_response('huskers/signing-day-2013.html', dictionaries)
def Signing2014(request):
recentlist = Player.objects.all().filter(status='Scholarship').filter(year='2014').order_by('last_name')
headline = RecruitHeadline.objects.filter(tags__icontains='signing-day-2014').order_by('-priority')
video = RecruitHeadline.objects.filter(tags__icontains='signing-day-video-2014')
rankings = RecruitHeadline.objects.filter(tags__icontains='signing-day-rankings-2014')
photos = RecruitHeadline.objects.filter(tags__icontains='signing-day-photos-2014')
schedule = RecruitHeadline.objects.filter(tags__icontains='signing-day-schedule-2014')
dictionaries = { 'headline':headline, 'recentlist': recentlist, 'video': video, 'rankings': rankings, 'photos': photos, 'schedule': schedule, }
return render_to_response('huskers/signing-day-2014.html', dictionaries)
def Signing2015(request):
recentlist = Player.objects.all().filter(status='Scholarship').filter(year='2015').order_by('last_name')
headline = RecruitHeadline.objects.filter(tags__icontains='signing-day-2015').order_by('-priority')
top_story = RecruitHeadline.objects.filter(tags__icontains='signing-day-2015-top-story').order_by('-priority')
video = RecruitHeadline.objects.filter(tags__icontains='signing-day-video-2015').order_by('-priority')
top_video = RecruitHeadline.objects.filter(tags__icontains='signing-day-video-2015-main').order_by('-priority')[:1]
rankings = RecruitHeadline.objects.filter(tags__icontains='signing-day-rankings-2015')
photos = RecruitHeadline.objects.filter(tags__icontains='signing-day-photos-2015')
schedule = RecruitHeadline.objects.filter(tags__icontains='signing-day-schedule-2015')
dictionaries = { 'headline':headline, 'top_story': top_story, 'recentlist': recentlist, 'video': video, 'top_video': top_video, 'rankings': rankings, 'photos': photos, 'schedule': schedule, }
return render_to_response('huskers/signing-day-2015.html', dictionaries)
def Signing2016(request):
recentlist = Player.objects.all().filter(status='Scholarship').filter(year='2016').exclude(transfer_status='University').order_by('last_name')
headline = RecruitHeadline.objects.filter(tags__icontains='signing-day-2016').exclude(tags__icontains='signing-day-2016-top-story').order_by('-priority')
top_story = RecruitHeadline.objects.filter(tags__icontains='signing-day-2016-top-story').order_by('-priority')
video = RecruitHeadline.objects.filter(tags__icontains='signing-day-video-2016').order_by('-priority')
top_video = RecruitHeadline.objects.filter(tags__icontains='signing-day-video-2016-main').order_by('-priority')[:1]
rankings = RecruitHeadline.objects.filter(tags__icontains='signing-day-rankings-2016')
photos = RecruitHeadline.objects.filter(tags__icontains='signing-day-photos-2016')
schedule = RecruitHeadline.objects.filter(tags__icontains='signing-day-schedule-2016')
story_count = headline.count() + 1
dictionaries = { 'headline':headline, 'top_story': top_story, 'recentlist': recentlist, 'video': video, 'top_video': top_video, 'rankings': rankings, 'photos': photos, 'schedule': schedule, 'story_count': story_count, }
return render_to_response('huskers/signing-day-2016.html', dictionaries)
def Splash(request):
recentlist = Player.objects.all().filter(status='Scholarship').filter(year='2014').order_by('-lastchange')
dictionaries = { 'recentlist': recentlist, }
return render_to_response('huskers/signing-day-splash.html', dictionaries)
def YearXML(request, year):
statelist = Player.objects.filter(year=year).values('state').filter(Q(status='Scholarship') | Q(status__isnull=True)).exclude(transfer_status='University').annotate(statecount=Count('state'))
return render_to_response('huskers/main.xml', { "statelist": statelist, })
def StateXML(request):
statelist = Player.objects.all().values('state').filter(Q(status='Scholarship') | Q(status__isnull=True)).exclude(transfer_status='University').annotate(statecount=Count('state'))
return render_to_response('huskers/main.xml', { "statelist": statelist, }, mimetype="application/xhtml+xml")
def AllYears(request):
allyears = Player.objects.filter(Q(status='Scholarship') | Q(status__isnull=True)).order_by('-year').values('year').annotate(scholarshipcount=Count('year'))
walkons = Player.objects.filter(status='Walk-on').order_by('-year').values('year').annotate(walkoncount=Count('year'))
transfers = Player.objects.filter(transfer_status='University').order_by('-year').values('year').annotate(transfercount=Count('year'))
return render_to_response('huskers/years-all.html', { "allyears": allyears, "walkons": walkons, "transfers": transfers, })
def Year(request, year):
title = year
scholarships = Player.objects.filter(year=year).filter(Q(status__isnull=True) | Q(status="Scholarship")).exclude(transfer_status='University').order_by("last_name", "first_name")
ratings = scholarships.aggregate(two_star=Sum(Case(When(stars_247c="2 stars", then=1),output_field=IntegerField())), three_star=Sum(Case(When(stars_247c="3 stars", then=1),output_field=IntegerField())), four_star=Sum(Case(When(stars_247c="4 stars", then=1),output_field=IntegerField())), five_star=Sum(Case(When(stars_247c="5 stars", then=1),output_field=IntegerField())))
walkons = Player.objects.filter(year=year).filter(status="Walk-on").order_by("last_name", "first_name")
transfers = Player.objects.filter(year=year).filter(transfer_status="University").order_by("last_name", "first_name")
targets = Player.objects.filter(year=year).filter(status="Target").order_by("last_name", "first_name")
return render_to_response('huskers/yearpage2.html', { "title": title, "scholarships": scholarships, "walkons": walkons, "targets": targets, "years": years, "states": states, "ratings": ratings, "transfers": transfers, })
def YearWidget(request, year):
year = year
scholarships = Player.objects.filter(year=year).filter(Q(status__isnull=True) | Q(status="Scholarship")).exclude(transfer_status='University').order_by("last_name", "first_name")
return render_to_response('huskers/year-widget.html', { "year": year, "scholarships": scholarships, })
#def State(request, state):
# title = state
# statelist = Player.objects.filter(state=state).order_by('-year', 'last_name')
# playercount = Player.objects.filter(state=state).filter(Q(status='Scholarship') | Q(status__isnull=True)).aggregate(statecount=Count('id'))
# return render_to_response('huskers/statepage.html', { "title": title, "statelist": statelist, "playercount": playercount, "years": years, "states": states, })
def StateMap(request):
statelist = Player.objects.all().values('state').filter(Q(status='Scholarship') | Q(status__isnull=True)).exclude(transfer_status='University').annotate(statecount=Count('state'))
return render_to_response('huskers/state-map.js', { "statelist": statelist, })
def State2(request, state):
title = state
players = Player.objects.filter(state=state).exclude(status="Target").order_by('last_name', 'first_name')
scholarships = players.filter(status="Scholarship")
walkons = players.filter(status="Walk-on")
transfers = players.filter(transfer_status="University")
positions = players.values("position").distinct().annotate(positioncount=Count('position')).order_by('-positioncount')
state_rank = Player.objects.exclude(status="Target").order_by('state').values('state').annotate(statecount=Count('state')).order_by('-statecount')
return render_to_response('huskers/statepage2.html', { "title": title, "players": players, "scholarships": scholarships, "walkons": walkons, 'positions': positions, "years": years, "states": states, "transfers": transfers, "state_rank": state_rank, })
def AllStates(request):
scholarships = Player.objects.filter(status="Scholarship").order_by('state').values('state').annotate(statecount=Count('state'))
transfers = Player.objects.filter(transfer_status="University").order_by('state').values('state').annotate(transfercount=Count('state'))
walkons = Player.objects.filter(status="Walk-on").order_by('state').values('state').annotate(walkoncount=Count('state'))
return render_to_response('huskers/states-all.html', { "scholarships": scholarships, "walkons": walkons, "transfers": transfers, })
#def Search(request):
# query = request.GET.get('q', '')
# if query:
# qset = (
# Q(player_name__icontains=query)
# )
# results = Player.objects.filter(qset)
# else:
# results = []
# return render_to_response("huskers/search.html", {
# "results": results,
# "query": query,
# })
def Search2(request):
query = request.GET.get('q', '')
if query:
qset = (
Q(player_name__icontains=query)
)
results = Player.objects.filter(qset)
else:
results = []
return render_to_response("huskers/search-new.html", {
"results": results,
"query": query,
"years": years,
"states": states,
})
def PlayerPage(request, playername):
player = Player.objects.get(nameslug=playername)
yearlist = Player.objects.all().values('year').distinct().order_by('-year')
same_year = Player.objects.filter(year=player.year).exclude(status="Target").order_by('-year')[:10]
same_position = Player.objects.filter(position=player.position).exclude(status="Target").order_by('-year')[:10]
same_state = Player.objects.filter(state=player.state).exclude(status="Target").order_by('-year')[:10]
return render_to_response('huskers/player2.html', { "player": player, "yearlist": yearlist, "same_position": same_position, "same_year": same_year, "same_state": same_state, "years": years, "states": states, })
def Recruiting(request):
recentlist = Player.objects.all().filter(year='2017').exclude(transfer_status="University").order_by('last_name', 'first_name')
headlines = RecruitHeadline.objects.filter(tags__icontains='recruiting').order_by('priority').order_by('-priority')[:15]
topschools = Player.objects.all().values('highschool', 'city').annotate(schoolcount=Count('highschool')).order_by('-schoolcount')[:10]
topschoolcount = topschools[0]
yearlist = Player.objects.all().values('year').distinct().order_by('-year')
dictionaries = { 'recentlist': recentlist, 'headlines': headlines, 'topschools': topschools, 'topschoolcount': topschoolcount, 'yearlist': yearlist, }
return render_to_response('huskers/recruiting.html', dictionaries)
def RecruitingMapWidget(request):
recentlist = Player.objects.all().filter(year='2017').exclude(transfer_status="University").order_by('last_name', 'first_name')
dictionaries = { 'recentlist': recentlist, }
return render_to_response('huskers/recruit-map-widget.html', dictionaries)
def RecruitingWidget(request):
headlines = RecruitHeadline.objects.filter(tags__icontains='recruiting').order_by('priority').order_by('-priority')[:1]
dictionaries = { 'headlines': headlines, }
return render_to_response('huskers/recruiting-widget.html', dictionaries)
def TargetsSam(request, year):
targets = Player.objects.all().filter(year=year).order_by('last_name')
year = year
dictionaries = { 'targets': targets, 'year': year, }
return render_to_response('huskers/targets-sam.html', dictionaries)
def Targets(request, year):
targets = Player.objects.all().filter(year=year).order_by('last_name', 'first_name')
year = year
dictionaries = { 'targets': targets, 'year': year, "years": years, "states": states, }
return render_to_response('huskers/targets.html', dictionaries)
def DraftPicks(request):
all_picks = Player.objects.all().filter(draft_year__isnull=False).order_by('-draft_year__year', 'draft_overall_pick')
dictionaries = { 'all_picks': all_picks, }
return render_to_response('huskers/draft-all.html', dictionaries)
def DraftSingleTeam(request, slug):
team = DraftTeam.objects.get(team_name_slug=slug)
players = Player.objects.filter(draft_team__isnull=False).filter(draft_team__team_name_slug=slug).order_by('-draft_year__year', 'draft_overall_pick')
dictionaries = { 'team': team, 'players': players, }
return render_to_response('huskers/draft-team.html', dictionaries)
def DraftSingleYear(request, year):
year = Draft.objects.get(year=year)
players = Player.objects.filter(draft_team__isnull=False).filter(draft_year__year=year).order_by('-draft_year__year', 'draft_overall_pick')
dictionaries = { 'year': year, 'players': players, }
return render_to_response('huskers/draft-year.html', dictionaries)
def BadgesAll(request):
badges = Badge.objects.all().order_by('name')
dictionaries = { 'badges': badges, }
return render_to_response('huskers/badges-all.html', dictionaries)
def BadgesSingle(request, nameslug):
this_badge = Badge.objects.get(nameslug=nameslug)
players = Player.objects.filter(badges__nameslug=nameslug)
dictionaries = { 'this_badge': this_badge, 'players': players, }
return render_to_response('huskers/badges-single.html', dictionaries)
def Recruiters(request):
list = Recruiter.objects.all().order_by('last_name')
for coach in list:
coach.player_count = Player.objects.filter(status="Scholarship").filter(Q(recruiter_1__nameslug=coach.nameslug) | Q(recruiter_2__nameslug=coach.nameslug)).count()
dictionaries = { 'list': list, }
return render_to_response('huskers/recruiter-all.html', dictionaries)
def RecruiterSingle(request, nameslug):
this_recruiter = Recruiter.objects.get(nameslug=nameslug)
players = Player.objects.filter(status="Scholarship").filter(Q(recruiter_1__nameslug=nameslug) | Q(recruiter_2__nameslug=nameslug)).order_by('-year')
dictionaries = { 'this_recruiter': this_recruiter, 'players': players, }
return render_to_response('huskers/recruiter-single.html', dictionaries)
def BigBoard(request, year):
year = year
players = Player.objects.filter(year=year).filter(top_target=True).exclude(hard_commit_elsewhere=True).exclude(committed_school='Nebraska').order_by('last_name', 'first_name')
got_away = Player.objects.filter(year=year).filter(top_target=True).filter(hard_commit_elsewhere=True).order_by('last_name', 'first_name')
commits = Player.objects.filter(year=year).filter(committed_school='Nebraska').order_by('last_name', 'first_name')
ratings = players.aggregate(two_star=Sum(Case(When(stars_247c="2 stars", then=1),output_field=IntegerField())), three_star=Sum(Case(When(stars_247c="3 stars", then=1),output_field=IntegerField())), four_star=Sum(Case(When(stars_247c="4 stars", then=1),output_field=IntegerField())), five_star=Sum(Case(When(stars_247c="5 stars", then=1),output_field=IntegerField())))
dictionaries = { 'year': year, 'players': players, 'got_away': got_away, 'ratings': ratings, 'commits': commits, }
return render_to_response('huskers/yearpage-bigboard.html', dictionaries)
def Visits(request, year, month, day):
date = datetime(int(year),int(month),int(day))
players = Player.objects.filter(official_visit_date__year=year).filter(official_visit_date__month=month).filter(official_visit_date__day=day).order_by('last_name', 'first_name')
ratings = players.aggregate(two_star=Sum(Case(When(stars_247c="2 stars", then=1),output_field=IntegerField())), three_star=Sum(Case(When(stars_247c="3 stars", then=1),output_field=IntegerField())), four_star=Sum(Case(When(stars_247c="4 stars", then=1),output_field=IntegerField())), five_star=Sum(Case(When(stars_247c="5 stars", then=1),output_field=IntegerField())))
dictionaries = { 'date': date, 'players': players, 'ratings': ratings, }
return render_to_response('huskers/official-visit-date.html', dictionaries) |
983,814 | ff074c84843633be149d8fcb910694bf605d521e | import multiprocessing
# 享元模式的主要目的是实现对象的共享,即共享池,当系统中对象多的时候可以减少内存的开销,通常与工厂模式一起使用。
# multiprocessing.Pool
|
983,815 | 09d23d8f5882b753093599d3bb2929b425db1db7 | num=[]
def appendToList(x):
x.append(1)
return x
print(appendToList(num)) |
983,816 | 7d1bc260d02ad99b9e9373764e4483dd87db87fd | '''
~~~3+1~~~
This file contains the necessary code to run the regressions and
visualizations.
'''
import sys
import pandas as pd
import numpy as np
import matplotlib.pyplot as plt
from sklearn.linear_model import LinearRegression
import datetime
from linearmodels.panel import PanelOLS
CHECKS = ["%", "vf", "next", "vapor", " fly ", "vapour", "percent"]
RACES = ["BS", "BS14", "BS15", "BS16", "BS17", "BS18", "BS19", \
"NY", "NY14", "NY15", "NY16", "NY17", "NY18", "NY19", \
"CH", "CH14", "CH15", "CH16", "CH17", "CH18", "CH19"]
SEXES = ["M", "F"]
MASTER_MATCHES = "../scraping/race_result/master_matches.csv"
IMAGE_PATH = "strava/static/images/{}"
'''
Dataframe with Vaporfly identified in new column:
True for Vaporfly, False for non-Vaporfly shoes,
and None for no shoes inputted in Strava
'''
MARATHON_DF = pd.read_csv(MASTER_MATCHES, sep=",")
MARATHON_DF = MARATHON_DF.dropna()
MARATHON_DF["Vaporfly"] = MARATHON_DF["Shoes"].apply(
lambda s: any([check in s.lower() for check in CHECKS]))
def sec_to_hour(sec):
'''
Convert seconds to hours:minutes:seconds
Input:
s (int): seconds
Output:
string in the format H:M:S
'''
return str(datetime.timedelta(seconds = int(sec)))
def hour_to_sec(hms_str):
'''
Convert hours:minutes:seconds to seconds
Inputs:
hms_str (string): string formatted like H:M:S
Returns:
time in seconds
'''
h,m,s = hms_str.split(":")
return int(h) * 3600 + int(m) * 60 + int(s)
def regressions(marathon_df=MARATHON_DF, race=None, sex=None, age=None, time=None):
'''
Function that takes in various demographic data points input
by the user to calculate a coefficient and estimate how much
faster Vaporflies would make them run. We use a log
transformation on time prior to our regression to account for
diminishing returns on time the faster you get.
Inputs:
race (string): name of race (and year if you want to be
specific)
sex (string): "M" for male and "F" for female
age (int): age of the user
time (str): time to complete a marathon, inputted as HH:MM:SS
Returns:
(float) Regression coefficient
and saves two .png files
'''
param_str = ""
if race is not None:
assert race in RACES
marathon_df = marathon_df[marathon_df["RaceID"] == race]
param_str += ", " + race
if age is not None:
marathon_df = marathon_df[(marathon_df["Age_Lower"] <= age) & \
(marathon_df["Age_Upper"] >= age)]
param_str += ", " + "Age={}".format(age)
if sex is not None:
assert sex in SEXES
marathon_df = marathon_df[marathon_df["Gender"] == sex]
param_str += ", " + "Sex={}".format(sex)
#Running the regression
marathon_df["Vaporfly"].astype("category") # change to categorical variables
marathon_df["logTime"] = np.log(marathon_df["Time"]) # log transformation
X = marathon_df["Vaporfly"].values.reshape(-1, 1)
y = marathon_df["logTime"].values.reshape(-1, 1)
reg = LinearRegression()
reg.fit(X, y)
beta0 = np.exp(reg.intercept_[0])
beta1 = np.exp(reg.coef_[0][0])
print("The linear model is: Y = {:.5} + {:.5}X"\
.format(beta0 , beta1))
if time is not None:
time = hour_to_sec(time)
newtime = time * beta1
percent = (1 - newtime / time) * 100
time = sec_to_hour(time)
newtime = sec_to_hour(newtime)
print("If you bought the Vaporflies, you would improve your time from",\
"{} to {}, decreasing your finish time by {} percent".format(time, newtime, percent))
#Scatter plot and regression line
predictions = reg.predict(X)
plt.figure(figsize=(16, 8))
plt.scatter(marathon_df["Vaporfly"], marathon_df["logTime"], c="black")
plt.plot(marathon_df["Vaporfly"], predictions, c="blue", linewidth=2)
plt.xlabel("Presence of Vaporfly")
plt.ylabel("Marathon Times")
plt.title('Marathon Finish Time vs. Presence of Vaporfly{}'.format(param_str))
plt.savefig(IMAGE_PATH.format("linearfit.png"))
#Histograms
marathon_df_y_vf = marathon_df[marathon_df["Vaporfly"] == True]
y_vf = marathon_df_y_vf["Time"].values.reshape(-1,1)
marathon_df_y_no_vf = marathon_df[marathon_df["Vaporfly"] == False]
y_no_vf = marathon_df_y_no_vf["Time"].values.reshape(-1,1)
num_bins = 100
fig, ax = plt.subplots(figsize=(16, 8))
n, bins, patches = ax.hist(y_vf, num_bins, density=1, label="VF",\
histtype="barstacked", rwidth=0.5)
n, bins, patches = ax.hist(y_no_vf, num_bins, density=1, label="No VF",\
histtype="barstacked", rwidth=0.5)
ax.legend(loc='upper right')
ax.set_xlabel('Frequency of Finish Times (in seconds)')
ax.set_ylabel('Probability Density')
ax.set_title('Histogram of Marathon Finish Times{}'.format(param_str))
plt.savefig(IMAGE_PATH.format("hist.png"))
#Return coefficient on Vaporfly indicator
return beta1
def find_runner(name):
'''
Function that takes in various demographic data points given a
runner's name and returns how much faster they would have ran
if they wore Vaporflys. Note: This works even if the runner is
already wearing Vaporflys (e.g. it ignores prior shoe type).
Inputs:
name (string): name of runner
Returns:
(float) Regression coefficient
and saves two .png files
'''
runner_df = MARATHON_DF[MARATHON_DF["Name"] == name]
RaceID = runner_df.iloc[0,0]
time = sec_to_hour(runner_df.iloc[0,2])
sex = runner_df.iloc[0,3]
avg_age = (runner_df.iloc[0,4] + runner_df.iloc[0,5])/2
return regressions(race=RaceID, sex=sex, age=avg_age, time=time)
#Dataframe containing all runners who have run multiple races
#This will be used to run a panel data regression
MULTIPLE_DF = MARATHON_DF[MARATHON_DF.groupby("Name")["Name"].transform("size") > 1]
MULTIPLE_DF = MULTIPLE_DF.assign(Year=pd.to_numeric("20" + MULTIPLE_DF["RaceID"].str[2:]))
MULTIPLE_DF = MULTIPLE_DF[["Name", "Year", "Time", "Vaporfly"]]
MULTIPLE_DF = MULTIPLE_DF.sort_values(by=["Name", "Year"])
MULTIPLE_DF["logTime"] = np.log(MULTIPLE_DF["Time"])
rows_to_delete = []
prev_row = [None, None, None, None]
for ind, row in MULTIPLE_DF.iterrows():
if prev_row[0] == row[0] and prev_row[1] == row[1]:
rows_to_delete.append(ind)
prev_row = row
MULTIPLE_DF = MULTIPLE_DF.drop(rows_to_delete)
MULTIPLE_DF = MULTIPLE_DF.set_index(["Name", "Year"])
mod = PanelOLS(MULTIPLE_DF.logTime, MULTIPLE_DF.Vaporfly, entity_effects=True)
res = mod.fit(cov_type='clustered', cluster_entity=True)
def get_panel_regression():
'''
Runs a fixed effects regression on the MULTIPLE_DF dataframe,
with Name as the individual index i and Year as the time index t
Time_it ~ Vaporfly_it + FE_i + U_it
This returns the same result each time: we only make it a function
so this data can be accessed by strava/views.py
'''
return res
if __name__=="__main__":
age = sys.argv[1]
sex = sys.argv[2]
time = sys.argv[3]
regressions(age=int(age), sex=sex, time=time)
|
983,817 | 528ed05411946cdfb8b2fd6746f9edeaf130dd56 | import turtle
import re
import random
from collections import Counter
class Diagrams:
COLORS = ["#CED23A", "#002F55", "#44944A",
"#6E5160", "#CD7F32", "#990066",
"#6A5ACD", "#1CAC78", "#FF0033",
"#7A7666", "#806B2A", "#FF8C69"]
input_text = ""
legend_start_point_y = 200
def __init__(self, text, my_turtle, type):
self.input_text = text
if type == 0:
self.__sector__(my_turtle)
else:
self.__rays__(my_turtle)
def __sector__(self, my_turtle):
words = re.sub('\W', ' ', self.input_text).split()
count_word = Counter(words)
total_words = len(words)
for c in count_word:
color = random.choice(self.COLORS)
my_turtle.fillcolor(color)
my_turtle.pencolor("white")
my_turtle.pensize(3)
angle = 360.0*count_word[c] / total_words
my_turtle.pendown()
my_turtle.begin_fill()
my_turtle.forward(140)
my_turtle.left(90)
my_turtle.circle(140, angle)
my_turtle.left(90)
my_turtle.forward(140)
my_turtle.left(180)
my_turtle.end_fill()
self.__legend__(c, count_word[c], color, my_turtle)
my_turtle.hideturtle()
def __legend__(self, word, quantity, color, my_turtle):
my_turtle.penup()
self.legend_start_point_y -= 20
my_turtle.goto(200, self.legend_start_point_y)
my_turtle.pencolor(color)
my_turtle.pensize(1)
my_turtle.pendown()
my_turtle.dot(15)
my_turtle.penup()
my_turtle.goto(220, self.legend_start_point_y - 5)
my_turtle.pendown()
my_turtle.write(word + ' - (' + repr(quantity) + ') time (-s)')
my_turtle.penup()
my_turtle.setpos(0, 0)
def __rays__(self, my_turtle):
words = re.sub('\W', ' ', self.input_text).split()
count_word = Counter(words)
quantity_type_words = len(count_word)
angle = 360 / quantity_type_words
for c in count_word:
j = 1
my_turtle.pendown()
my_turtle.pencolor(random.choice(self.COLORS))
while j <= count_word[c]:
my_turtle.forward(50)
my_turtle.circle(2)
j += 1
my_turtle.penup()
position = my_turtle.pos()
if position[1] > 5.00:
my_turtle.goto(my_turtle.xcor(), my_turtle.ycor() + 10)
elif -5.00 < position[1] < 5.00:
if position[0] < -5.00:
my_turtle.goto(my_turtle.xcor() - 50, my_turtle.ycor())
elif position[0] > 5.00:
my_turtle.goto(my_turtle.xcor() + 20, my_turtle.ycor())
else:
my_turtle.goto(my_turtle.xcor(), my_turtle.ycor() - 20)
my_turtle.pendown()
my_turtle.write(c)
my_turtle.penup()
my_turtle.goto(0, 0)
my_turtle.left(angle)
my_turtle.hideturtle()
def main():
print "Input type (0 - sector, 1 - rays)"
a = input()
a = int(a)
turt = turtle.Turtle()
turt.speed(10)
Diagrams("My name is Nastya. My cat is cool. It is funny and nice.", turt, a)
turtle.done()
if __name__ == "__main__":
main()
|
983,818 | cc37c8447491cb7290657b45f50032f008f55cce | import os,sys
import numpy as np
#--------------------------------------------------------------#
pfam_id_list = "../../pfam_full_list.txt"
s = np.loadtxt(pfam_id_list,dtype='str')
missing_files = open("missing_er_pickle_files.txt","w")
print("Checking current directory for Protein data from %s\n",pfam_id_list)
for pfam_id in s:
if not os.path.exists('er_DI_%s.pickle'%pfam_id):
missing_files.write("%s\n"% pfam_id)
missing_files.close()
|
983,819 | 3e46845b45c5a04b4512acd09d9beb4e170c5015 | from PIL import Image
import glob
x = []
i1 = Image.open(glob.glob('*.jpg')[0])
for i in glob.glob('*.jpg')[1:]:
print(i)
x.append(Image.open(i))
i1.save('x.pdf',save_all=True, append_images=x) |
983,820 | f889fad7f90f8732aba5e73724514b7f137ef615 | # Written by Michael Kirylo 2011
# Create Vray material ID
# -------imports ------------------------------------
import sys
import os
import shutil
import maya.cmds as cmds
import glob
import time
import operator
import random
import maya.mel as mel
from pprint import pprint
# -----------------------------------------------------
# create material Attribute and multimattes
# -----------------------------------------------------
def mkCreateMatID():
mkSelItems = mkSelection()
mkAddIDAttr(mkSelItems,"1",1)
def mkCreateMM():
mkSelItems = mkAllshaders()
mkGetShdrnumbers(mkSelItems)
def mkGroupID():
mkSelItems = mkSelection()
mkAddIDAttr(mkSelItems,"1",0)
# -----------------------------------------------------
# remove ID's
# -----------------------------------------------------
def mkRemoveMatID():
mkSelItems = mkSelection()
mkAddIDAttr(mkSelItems,"0",1)
def mkRemoveAllMatID():
mkSelItems = mkAllshaders()
mkAddIDAttr(mkSelItems,"0",1)
# -----------------------------------------------------
# find Shaders
# -----------------------------------------------------
def mkSelection():
mkSel = cmds.ls(sl=True)
return mkSel
def mkAllshaders():
mkAllShdr = cmds.ls(mat = True)
return mkAllShdr
# -----------------------------------------------------
# add vray attribute to shaders
# -----------------------------------------------------
def mkAddIDAttr(item,num,type):
for entry in (item):
mkattr = 'vray addAttributesFromGroup %s vray_material_id %s;'%(entry, num)
mkcolorAttr = 'vrayAddAttr %s vrayColorId;vrayAddAttr %s vrayMaterialId;'%(entry, entry)
mkUIT = 'setUITemplate -pst attributeEditorTemplate;'
mel.eval(mkattr)
mel.eval(mkcolorAttr)
mel.eval(mkUIT)
if num == "1" :
mkAddMatID(item,type)
# -----------------------------------------------------
# add ID number to shaders
# -----------------------------------------------------
def mkAddMatID(item,type):
mkStartNum = []
NameResult = cmds.promptDialog( title='StartNumber', message='Enter Start number:', button=['OK', 'Cancel'], defaultButton='OK',cancelButton='Cancel', dismissString='Cancel')
if NameResult == 'OK':
mkStartNum = cmds.promptDialog(query=True, text=True)
mkStartNum = int(mkStartNum)
for entry in (item):
mkStartNumStr = str(mkStartNum)
mkAddIDCmd = 'setAttr "%s.vrayMaterialId" %s;'%(entry,mkStartNumStr)
mel.eval (mkAddIDCmd)
mkStartNum = mkStartNum + type
# -----------------------------------------------------
# find material id numbers in scene
# -----------------------------------------------------
def mkGetShdrnumbers(shdrList):
IDlist = []
for i in range (len(shdrList)):
try:
mkIDNum = 'getAttr("'+shdrList[i]+'.vrayMaterialId")'
mk_id = mel.eval(mkIDNum)
mk_id = str(mk_id)
IDlist.insert(i,mk_id)
except:
print "no id assigned"
if IDlist:
IDlist.sort()
last = IDlist[-1]
for i in range (len(IDlist)-2,-1,-1):
if last == IDlist[i]:
del IDlist[i]
else:
last = IDlist[i]
createMM(IDlist)
# -----------------------------------------------------
# create vray render elements
# -----------------------------------------------------
def createMM(list):
for i in range (0,len(list),3):
mel.eval ("vrayAddRenderElement MultiMatteElement;")
mel.eval ("rename vrayRE_Multi_Matte "+'"'+"MatID_01"+'"'+ ";")
cmds.select("MatID*")
mkMatID = cmds.ls(sl=True)
for i in range (len (mkMatID)):
mel.eval ("setAttr "+'"'+ mkMatID[i]+'.vray_usematid_multimatte" 1;')
mel.eval ("setAttr -type "+'"'+"string"+'"'+mkMatID[i]+".vray_name_multimatte "+'"'+"MatID"+"%d" %i+'"'+";")
mkObjIter = iter(list)
for i in range(0,len(list),3):
for entry in (mkMatID):
mel.eval ("setAttr "+'"'+entry+".vray_redid_multimatte" +'"'+mkObjIter.next()+";")
mel.eval ("setAttr "+'"'+entry+".vray_greenid_multimatte" +'"'+mkObjIter.next()+";")
mel.eval ("setAttr "+'"'+entry+".vray_blueid_multimatte" +'"'+mkObjIter.next()+";") |
983,821 | 1ec81aa75d0c271a7423a54fadb4fc9ec4de65b9 | import json
from random import randint, choice
from twisted.web import resource
class StatisticsEndpoint(resource.Resource):
"""
This endpoint is responsible for handing requests regarding statistics in Tribler.
"""
def __init__(self):
resource.Resource.__init__(self)
child_handler_dict = {"tribler": StatisticsTriblerEndpoint, "ipv8": StatisticsIPv8Endpoint}
for path, child_cls in child_handler_dict.iteritems():
self.putChild(path, child_cls())
class StatisticsTriblerEndpoint(resource.Resource):
"""
This class handles requests regarding Tribler statistics.
"""
def render_GET(self, request):
return json.dumps({'tribler_statistics': {
"db_size": randint(1000, 1000000),
"num_channels": randint(1, 100),
"num_torrents": randint(1000, 10000)
}})
class StatisticsIPv8Endpoint(resource.Resource):
"""
This class handles requests regarding IPv8 statistics.
"""
def render_GET(self, request):
return json.dumps({'ipv8_statistics': {
"total_up": 13423,
"total_down": 3252
}})
|
983,822 | dbe92ab719c79090708e54e757a1cc4303525332 | import random
#################
# Base conversion
#################
# ALPHABET is printable characters to use for encoding; excludes l and I
# ALPHABET = 'abcdefghijkmnopqrstuvwxyzABCDEFGHJKLMNOPQRSTUVWXYZ'
ALPHABET = 'abcdefghijklmnopqrstuvwxyz'
# RANDOM_SEED is used mostly for testing
RANDOM_SEED = None
def set_random_seed(random_seed):
global RANDOM_SEED
# Force seed if one is given (used for tests)
RANDOM_SEED = random_seed
def base_random_number(num_len, alphabet=ALPHABET):
global RANDOM_SEED
if RANDOM_SEED:
random.seed(RANDOM_SEED)
RANDOM_SEED = None
return random.randint(0, len(alphabet)**num_len - 1)
def base_alphabet_encode(num, min_num_pos=None, alphabet=ALPHABET):
"""Encode a number in Base X
Field notes:
num: The number to encode
min_num_pos: Minimum number of generated positions (zero-fill missing)
alphabet: The alphabet to use for encoding
"""
if num == 0:
if min_num_pos:
return alphabet[0] * min_num_pos
else:
return alphabet[0]
if num < 0:
num *= -1
arr = ['-']
else:
arr = []
base = len(alphabet)
while num:
rem = num % base
num //= base
arr.append(alphabet[rem])
arr.reverse()
base_num = ''.join(arr)
if min_num_pos and len(base_num) < min_num_pos:
return ''.join([alphabet[0] * (min_num_pos - len(base_num)), base_num])
else:
return base_num
def base_alphabet_decode(string, alphabet=ALPHABET):
"""Decode a Base X encoded string into the number
Arguments:
- `string`: The encoded string
- `alphabet`: The alphabet to use for encoding
"""
base = len(alphabet)
is_pos = True
if string[-1] == '-':
is_pos = False
string = string[0:-1]
str_len = len(string)
num = 0
idx = 0
for char in string:
power = (str_len - (idx + 1))
num += alphabet.index(char) * (base ** power)
idx += 1
return num if is_pos else -num
|
983,823 | bdd8cc0fc246361d8ff56b8c1346736474b8d5fd | '''10진법 124 나라 10진법 124 나라
1 1 6 14
2 2 7 21
3 4 8 22
4 11 9 24'''
''' 문제 : 자연수 n이 매개변수로 주어질 때,n을 124 나라에서 사용하는 숫자로
바꾼 값을 return 하도록 solution 함수를 완성해 주세요. '''
#코드
def solution(n):
answer = ''
while n > 0:
n -= 1 # index 때문에
answer += '124'[n%3] + answer
n //= 3
return answer
''' 느낀 점
'문자열'[인덱스]
ex) '문자열'[0] 결과 값은 '문'
//는 정수를 보여줌
/ 는 소숫점까지 보여줍니다.
|
983,824 | bb932a701ec53b61c78f93796b3441e94dd3cfae | from drawman import *
from time import sleep
A = [(0,0), (100,0), (100, 100), (0, 100), (0,0)]
pen_down()
for x,y in A:
to_point(x,y)
pen_up()
sleep(20)
|
983,825 | 43b9d427fac12fb03d5f60f22a22db4178339ba0 | from sampler import RegularSampler, MultiJitteredSampler
from geometry import Plane, AxisAlignedBox
from tracer import ViewPlane, Tracer
from material import Matte
from light import AmbientOccluder, PointLight
from camera import PinholeCamera
import numpy
from buildfunctionbase import BuildFunctionBase
class BuildFunction(BuildFunctionBase):
BUILD_FUNCTION_NAME = 'e'
@classmethod
def build_function(cls, world, viewmode):
world.viewmode = viewmode
if viewmode == "realtime":
resolution = (64, 64)
pixel_size = 5
sampler = RegularSampler()
else:
resolution = (200, 200)
pixel_size = 1.6
sampler = MultiJitteredSampler(sample_dim=3)
world.viewplane = ViewPlane(resolution=resolution, pixel_size=pixel_size, sampler=sampler)
world.camera = PinholeCamera(eye=(35., 10., 45.), up=(0.,1.,0.), lookat=(0.,1.,0.), viewing_distance=5000.)
world.background_color = (0.0,0.0,0.0)
world.tracer = Tracer(world)
world.objects = []
matte1 = Matte(ka=.75, kd=1, cd=numpy.array([1., 1., 0]))
matte2 = Matte(ka=.75, kd=1, cd=numpy.array([1., 1., 1.]))
occluder = AmbientOccluder(numpy.array((1.,1.,1.)), .2, sampler)
world.ambient_color = occluder
box = AxisAlignedBox(-1, 1, 0.5, 2.5, -1, 1, material=matte2)
world.objects.append(box)
plane = Plane(origin=(0,0,0), normal=(0,1,0), material=matte1)
world.objects.append(plane)
world.lights = [
PointLight(numpy.array((1.,1.,1.)), 1., numpy.array((2., 4., -2.)), radius=5, attenuation=2)
]
|
983,826 | 92320f5b51847ac35808f1deb41f5b7772992fcd | # Generated by Django 2.2 on 2019-04-23 08:35
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('accounts', '0005_books'),
]
operations = [
migrations.AddField(
model_name='books',
name='categoryname',
field=models.CharField(default='', max_length=200),
),
]
|
983,827 | 03b75d968f0121067e5698bf74b69264e516aa3f | #Defines classes for front-end and accelerator components
#This stuff should be imported by 10-fp-devices.py
import time
import datetime
from ophyd import (EpicsMotor, Device,
Component as Cpt, EpicsSignal,
EpicsSignalRO, DeviceStatus)
#ring current
beam_ring_current = EpicsSignalRO('SR:OPS-BI{DCCT:1}I:Real-I', name='ring_current')
#FE slits, real motors only
class FE_WhiteBeam_Slits(Device):
top = Cpt(EpicsMotor, '1-Ax:T}Mtr', labels=('FE Slits',))
bot = Cpt(EpicsMotor, '2-Ax:B}Mtr', labels=('FE Slits',))
inb = Cpt(EpicsMotor, '2-Ax:I}Mtr', labels=('FE Slits',))
outb = Cpt(EpicsMotor, '1-Ax:O}Mtr', labels=('FE Slits',))
fe_wb_slits = FE_WhiteBeam_Slits('FE:C17B-OP{Slt:', name='fe_wb_slits')
#FE mirror including thermocouple signals
class XFP_FE_Mirror(Device):
hor_up = Cpt(EpicsMotor, '-Ax:XU}Mtr', labels=('FE Mirror',))
hor_down = Cpt(EpicsMotor, '-Ax:XD}Mtr', labels=('FE Mirror',))
lift_up = Cpt(EpicsMotor, '-Ax:YUI}Mtr', labels=('FE Mirror',))
lift_ctr = Cpt(EpicsMotor, '-Ax:YO}Mtr', labels=('FE Mirror',))
lift_down = Cpt(EpicsMotor, '-Ax:YDI}Mtr', labels=('FE Mirror',))
focus = Cpt(EpicsMotor, '-Ax:Bend}Mtr', labels=('FE Mirror',))
X = Cpt(EpicsMotor, '-Ax:X}Mtr', labels=('FE Mirror',))
Y = Cpt(EpicsMotor, '-Ax:Y}Mtr', labels=('FE Mirror',))
pitch = Cpt(EpicsMotor, '-Ax:P}Mtr', labels=('FE Mirror',))
yaw = Cpt(EpicsMotor, '-Ax:Yaw}Mtr', labels=('FE Mirror',))
roll = Cpt(EpicsMotor, '-Ax:R}Mtr', labels=('FE Mirror',))
temp1 = Cpt(EpicsSignalRO, '}T:1-I', labels=('FE Mirror',))
temp2 = Cpt(EpicsSignalRO, '}T:2-I', labels=('FE Mirror',))
xfp_fe_mirror = XFP_FE_Mirror('XF:17BM-OP{Mir:1', name='xfp_fe_mirror')
|
983,828 | f730e6743818c7a0841f0be2ee483bb1b8ee9225 | import os
import sys
import time
import random
import argparse
import imageio
import numpy as np
import matplotlib.pyplot as plt
plt.switch_backend('agg')
from PIL import Image
import torch
import torch.nn as nn
import torchvision.utils as vutils
from data import data_utils
from misc import utils
from misc import visualize
parser = argparse.ArgumentParser()
parser.add_argument('--ckpt', type=str, default='', help='your model.pth file')
parser.add_argument('--video', type=str, default='', help='your .mp4 video file')
parser.add_argument('--output_root', type=str, default='gen_outputs')
parser.add_argument('--seed', type=int, default=1, help='seed to use')
args = parser.parse_args()
def read_video(vid_name):
"""return a torch tensor with shape=(t, b, c, h, w)"""
reader = imageio.get_reader(vid_name)
vid_tensor = []
for i, im in enumerate(reader):
im = (im/255.).astype(np.float32)
vid_tensor.append(torch.from_numpy(im))
ret = torch.stack(vid_tensor).permute(0, 3, 1, 2)
ret = torch.unsqueeze(ret, 1)
return ret
def make_dirs(d):
if not os.path.exists(d):
os.makedirs(d)
if __name__ == '__main__':
states = torch.load(args.ckpt)
states_opt = states['opt']
# ------ set up the models ------
if states_opt.dataset != 'h36m':
if states_opt.backbone == 'dcgan':
if states_opt.image_width == 64:
import models.dcgan_64 as backbone_net
elif states_opt.image_width == 128:
import models.dcgan_128 as backbone_net
elif states_opt.backbone == 'vgg':
if states_opt.image_width == 64:
import models.vgg_64 as backbone_net
elif states_opt.image_width == 128:
import models.vgg_128 as backbone_net
elif states_opt.dataset == 'h36m':
import models.h36m_mlp as backbone
else:
raise ValueError('Unknown backbone: %s' % states_opt.backbone)
states_opt.backbone_net = backbone_net
from models.p2p_model import P2PModel
# set seed
random.seed(args.seed)
torch.manual_seed(args.seed)
torch.cuda.manual_seed_all(args.seed)
# model
batch_size = 1
model = P2PModel(batch_size, states_opt.channels, states_opt.g_dim, states_opt.z_dim,
states_opt.rnn_size, states_opt.prior_rnn_layers, states_opt.posterior_rnn_layers,
states_opt.predictor_rnn_layers, opt=states_opt)
model.cuda()
model.load(states=states)
model.eval()
nsamples = 5
ndisplays = 5
assert ndisplays <= nsamples
gen_lenths = [10, 20, 30]
# input
if args.video != '':
seq = read_video(args.video)
elif args.start_img != '':
assert args.end_img != ''
start = Image.open(args.start_img)
end = Image.open(args.end_img)
seq = torch.stack([start, end])
seq = torch.unsqueeze(seq, 1) # unsqueeze batch dim
seq = seq.cuda()
seq_len = len(seq)
# output path
output_root = args.output_root
if output_root == '':
output_root = 'gen_outputs'
make_dirs(output_root)
for length_to_gen in gen_lenths:
output_cp_ix = length_to_gen - 1
samples = []
# maybe make a block
for s in range(nsamples):
out = model.p2p_generate(seq, length_to_gen, output_cp_ix, model_mode='full')
out = torch.stack(out)
samples.append(out)
samples = torch.stack(samples)
idx = np.random.choice(len(samples), ndisplays, replace=False)
samples_to_save = samples[idx]
# pad gt if necessary
padded_seq = seq.clone()
x_cp = seq[seq_len-1]
if length_to_gen > seq_len:
pad_frames = x_cp.repeat(length_to_gen-seq_len, 1, 1, 1, 1)
padded_seq = torch.cat([padded_seq, pad_frames], dim=0)
# add cp border
seq_with_border = visualize.add_gt_cp_border(padded_seq, seq_len, length_to_gen)
samples_to_save = visualize.add_samples_cp_border(samples_to_save, seq_len, length_to_gen)
# save as img
seq_grid = vutils.make_grid(seq_with_border[:, 0], nrow=len(seq_with_border), padding=0)
name = '%s/len_%d-gt.png' % (output_root, length_to_gen)
vutils.save_image(seq_grid, name)
block = []
for ix, s in enumerate(samples_to_save):
name = '%s/len_%d-gen_%03d.png' % (output_root, length_to_gen, ix)
s_row = vutils.make_grid(s[:, 0], nrow=len(s), padding=0)
vutils.save_image(s_row, name)
block.append(s_row)
block = torch.cat(block, 1)
name = '%s/len_%d-gen_full.png' % (output_root, length_to_gen)
vutils.save_image(block, name)
# save as gif or mp4
for ix, s in enumerate(samples_to_save):
frames = []
for t in range(len(s)):
frame_np = (s[t, 0].permute(1, 2, 0).data.cpu().numpy() * 255).astype(np.uint8)
frames.append(frame_np)
name = '%s/len_%d-gen_%03d.gif' % (output_root, length_to_gen, ix)
imageio.mimsave(name, frames)
gifs = []
for t in range(length_to_gen):
col = vutils.make_grid(samples_to_save[:, t, 0], nrow=ndisplays, padding=0)
col_np = (col.permute(1, 2, 0).data.cpu().numpy() * 255).astype(np.uint8)
gifs.append(col_np)
name = '%s/len_%d-gen_full.gif' % (output_root, length_to_gen)
imageio.mimsave(name, gifs) |
983,829 | 2e764ee96cc4a2e5b00668091c883f1c2f42966e | # -*- coding: utf-8 -*-
"""
Created on Thu Jun 29 16:56:28 2017
@author: sunhp
临时用
"""
infe9 = "905.txt"
infe42 = "4290.txt"
def find_value(s,file):
IN = open(file,"r",encoding = "UTF-8")
lt = []
line = IN.readline()
while line:
a = str(line)
a = a.strip('\n')
if a.find(s) != -1:
lt.append(a)
line = IN.readline()
return lt
inf9 = open(infe9,"r",encoding = "UTF-8")
d = dict()
line = inf9.readline()
while line:
id1 = str(line)
id1 = id1.strip('\n')
a = find_value(id1,infe42)
d[id1] = a
line = inf9.readline()
inf9.close()
out = open("Count.txt","w",encoding='utf-8')
for i in sorted(d.keys()):
a = i
for j in range(len(d[i])):
out.writelines(a+'\t'+d[i][j]+'\n')
out.close()
|
983,830 | 8a7ee05c9d7746db2344d1069322dd08750afd64 | # Dane są następujące struktury: struct Node { Node* next; int val; }; struct TwoLists { Node* even; Node* odd; };
# Napisać funkcję: TwoLists split(Node* list); Funkcja rozdziela listę na dwie: jedną zawierającą liczby parzyste
# i drugą zawierającą liczby nieparzyste. Listy nie zawierają wartowników.
class Node:
def __init__(self, val):
self.val = val
self.next = None
def add(head, node):
if head is None:
head = node
else:
tmp = head
while tmp.next is not None:
tmp = tmp.next
tmp.next = node
return head
def print_list(head):
curr = head
while curr:
print(curr.val, end =" ")
curr = curr.next
print("\n")
def two_list_split(head):
# we create odd list
head_odd = None
last_odd = None
prev = head
curr = head.next
node = None
while curr:
# when we cut first element
if head.val % 2 == 1:
node = head
head = head.next
node.next = None
prev = head
curr = head.next
else:
if curr.val % 2 == 1:
node = curr
prev.next = curr.next
curr = curr.next
node.next = None
else:
prev = curr
curr = curr.next
# we have cut node
if node is not None:
if head_odd is None:
head_odd = node
last_odd = node
else:
last_odd.next = node
last_odd = last_odd.next
# because changing pointers depends of what happens!!!
# prev = curr
# curr = curr.next
node = None
# head is head_even
return head, head_odd
head = None
n1 = Node(2)
n2 = Node(3)
n3 = Node(16)
n4 = Node(7)
n5 = Node(10)
n6 = Node(12)
head = add(head, n1)
head = add(head, n2)
head = add(head, n3)
head = add(head, n4)
head = add(head, n5)
head = add(head, n6)
head_even, head_odd = two_list_split(head)
print_list(head_even)
print_list(head_odd)
|
983,831 | 80d01239a9e4dd1603378af0e00846cabcaa2c5f | """
Author Steven Pennington
Population v 1.0 last modified on 6/7/20
Calculate the population over a given amount of time and growth rate for x number of starting organisms.
INPUT
Starting population
Growth rate in %
Amount of time in days
PROCESSES
Calculate the growth over time
OUTPUT
Table of growth over time
Day Approximate
Population
"""
population = int(input("How many organisms in the inital population? "))
growthRate = int(input("What is the average growth rate of the organisms in a day (in %)? "))
days = int(input("How many days will the organisms be tracked? "))
i = 1
data = []
x = 0
while i <= days:
data.append(str(i) + "\t")
data.append(str(population) + "\n")
i += 1
population += population*growthRate/100
print("Day Approximate\tPopulation\n")
while x < days * 2:
print(data[x] + "\t" + data[x + 1])
x += 2
|
983,832 | 7c73ae9c2ad0dacdfbe239f788779f495bc2b3f1 | import string
lowercase = string.ascii_lowercase
letter_frequency = {
'e': 0.1268,
't': 0.0978,
'a': 0.0788,
'o': 0.0776,
'i': 0.0707,
'n': 0.0706,
's': 0.0634,
'r': 0.0594,
'h': 0.0573,
'l': 0.0394,
'd': 0.0389,
'u': 0.0280,
'c': 0.0268,
'f': 0.0256,
'm': 0.0244,
'w': 0.0214,
'y': 0.0202,
'g': 0.0187,
'p': 0.0186,
'b': 0.0156,
'v': 0.0102,
'k': 0.0060,
'x': 0.0016,
'j': 0.0010,
'q': 0.0009,
'z': 0.0006
}
def get_letter_frequency():
result = ''
for key in letter_frequency:
result += key
return result
def get_key_size_space(num):
i = 0
while True:
minv = 2**i
maxv = 2**(i+1)
if minv < num < maxv:
if num - minv > maxv - num:
i += 1
return i
i += 1
def substitution(text, key_table):
text = text.lower()
result = ''
for l in text:
i = lowercase.find(l)
if i < 0:
result += l
else:
result += key_table[i]
return result
def caesar_cypher_encrypt(text, shift):
key_table = lowercase[shift:] + lowercase[:shift]
return substitution(text, key_table)
def caesar_cypher_decrypt(text, shift):
return caesar_cypher_encrypt(text, -shift)
def crack_caesar_cypher(text):
for i in range(26):
key_table = lowercase[-i:] + lowercase[:-i]
print(substitution(text, key_table)[:75], '| shift is ', i, )
def insert_letter(text, i, l):
return text[:i] + l + text[i:]
def get_blank_record(text):
text = text.lower()
blank_record = []
for i in range(len(text)):
l = text[i]
item = []
if lowercase.find(l) < 0:
item.append(i)
item.append(l)
blank_record.append(item)
return blank_record
def restore_blank_record(text, blank_record):
for i in blank_record:
text = insert_letter(text, i[0], i[1])
return text
def get_trim_text(text):
text = text.lower()
trim_text = ''
for l in text:
if lowercase.find(l) >= 0:
trim_text += l
return trim_text
def get_vigener_key_table(text, key):
trim_text = get_trim_text(text)
total_length = len(trim_text)
key_length = len(key)
quotient = total_length // key_length
reminder = total_length % key_length
key_table = quotient * key + key[:reminder]
return trim_text, key_table
def get_var(data, mean=0.067):
if not data:
return 0
var_sum = 0
for d in data:
var_sum += (d - mean) ** 2
return var_sum / len(data)
def get_coincidence_index(text):
trim_text = get_trim_text(text)
length = len(trim_text)
letter_stats = []
for l in lowercase:
lt = {}
count = trim_text.count(l)
lt[l] = count
letter_stats.append(lt)
index = 0
for d in letter_stats:
v = list(d.values())[0]
index += (v/length) ** 2
return index
def get_key_length(text):
trim_text = get_trim_text(text)
# assume text length less than 26
group = []
for n in range(1, 26):
group_str = ['' for i in range(n)]
for i in range(len(trim_text)):
l = trim_text[i]
for j in range(n):
if i % n == j:
group_str[j] += l
group.append(group_str)
var_list = []
length = 1
for text in group:
data = []
for t in text:
index = get_coincidence_index(t)
data.append(index)
var_list.append([length, get_var(data)])
length += 1
var_list = sorted(var_list, key=lambda x: x[1])
return [v[0] for v in var_list[:12]]
def crack_vigener_cypher(text, key_length):
blank_record = get_blank_record(text)
trim_text = get_trim_text(text)
group = ['' for i in range(key_length)]
for i in range(len(trim_text)):
l = trim_text[i]
for j in range(key_length):
if i % key_length == j:
group[j] += l
key = ''
letter_stats_group = []
for j in range(key_length):
letter_stats = []
for l in lowercase:
lt = {}
count = group[j].count(l)
lt[l] = count
letter_stats.append(lt)
letter_stats = sorted(letter_stats, key=lambda x: list(x.values())[0], reverse=True)
letter_stats_group.append(letter_stats)
# print('group', j, ':', letter_stats[:8])
# gvctxs
score_list = []
for i in range(3):
current_letter = list(letter_stats[i].keys())[0]
index = lowercase.find(current_letter)
key_letter = lowercase[index - lowercase.find('e')]
item = []
item.append(key_letter)
score = 0
for k in range(3):
vl = list(letter_stats[k].keys())[0]
for fl in ['t', 'a']:
#if i == 1 and (k == 1 or k == 2) and j == 1:
if (lowercase.find(key_letter) + lowercase.find(fl)) % 26 == lowercase.find(vl):
score += 1
item.append(score)
score_list.append(item)
score_list = sorted(score_list, key=lambda x: x[1], reverse=True)
key += score_list[0][0]
plain_text = vigener_cypher_decrypt(trim_text, key)
return key, restore_blank_record(plain_text, blank_record)
def vigener_cypher_encrypt(text, key, is_encrypt=True):
blank_record = get_blank_record(text)
trim_text, key_table = get_vigener_key_table(text, key)
result = ''
for i in range(len(trim_text)):
l = trim_text[i]
index_lowercase = lowercase.find(l)
index_key_table = lowercase.find(key_table[i])
if not is_encrypt:
index_key_table = -index_key_table
result += lowercase[(index_lowercase + index_key_table) % 26]
return restore_blank_record(result, blank_record)
def vigener_cypher_decrypt(text, key):
return vigener_cypher_encrypt(text, key, False)
def get_index(text):
result = ''
for l in text:
i = lowercase.find(l)
if not i < 0:
result += str(i)
print(result)
if __name__ == '__main__':
shift = 3
plain_text = 'We intend to begin on the first of February unrestricted submarine warfare. We shall endeavor in spite of this to keep the United States of America neutral. In the event of this not succeeding, we make Mexico a proposal of alliance on the following basis: make war together, make peace together, generous financial support and an understanding on our part that Mexico is to reconquer the lost territory in Texas, New Mexico, and Arizona. The settlement in detail is left to you. You will inform the President of the above most secretly as soon as the outbreak of war with the United States of America is certain and add the suggestion that he should, on his own initiative, invite Japan to immediate adherence and at the same time mediate between Japan and ourselves. Please call the Presidents attention to the fact that the ruthless employment of our submarines now offers the prospect of compelling England in a few months to make peace.'
#cypher_text = caesar_cypher_encrypt(plain_text, shift)
#caesar_cypher_decrypt(cypher_text, shift)
#crack_caesar_cypher(cypher_text)
cypher_text = vigener_cypher_encrypt(plain_text, 'crypto')
#print(vigener_cypher_decrypt(cypher_text, 'crypto'))
data = get_key_length(cypher_text)
for d in data:
key, plain_text = crack_vigener_cypher(cypher_text, d)
print(plain_text[:75], '| key length is', d, '| key is', key)
|
983,833 | fc20fdfe3d3febe332a980985514d5b66c390bda | # Generated by Django 3.1.4 on 2020-12-17 22:54
import django.contrib.postgres.fields
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('users', '0005_user_likes'),
]
operations = [
migrations.CreateModel(
name='YourMom',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('name', models.CharField(max_length=200)),
('weight', models.FloatField(null=True)),
('shops', django.contrib.postgres.fields.ArrayField(base_field=models.CharField(max_length=1000), blank=True, null=True, size=None)),
],
),
]
|
983,834 | 08bdf603d099382adabf16259c4562803a204013 | from python_imagesearch.imagesearch import *
# Search for the github logo on the whole screen
# note that the search only works on your primary screen.
# This is intended to be used as examples to be copy pasted, do not run the whole file at once
pos = imagesearch("./github.png")
if pos[0] != -1:
print("position : ", pos[0], pos[1])
pyautogui.moveTo(pos[0], pos[1])
else:
print("image not found")
# search for the github logo until found :
pos = imagesearch_loop("./github.png", 0.5)
print("image found ", pos[0], pos[1])
# search for the logo on the 0,0,800,600 region
# (a rectangle starting from the top left going 800 pixels to the right and down 600 pixels)
pos = imagesearcharea("./github.png", 0, 0, 800, 600)
if pos[0] != -1:
print("position : ", pos[0], pos[1])
pyautogui.moveTo(pos[0], pos[1])
else:
print("image not found")
# the im parameter is useful if you plan on looking for several different images without the need for recapturing the screen
# the screen capture being one of the most time consuming function it's a good way to optimize
# non -optimized way :
time1 = time.clock()
for i in range(10):
imagesearcharea("./github.png", 0, 0, 800, 600)
imagesearcharea("./panda.png", 0, 0, 800, 600)
print(str(time.clock() - time1) + " seconds (non optimized)")
# optimized way :
time1 = time.clock()
im = region_grabber((0, 0, 800, 600))
for i in range(10):
imagesearcharea("./github.png", 0, 0, 800, 600, 0.8, im)
imagesearcharea("./panda.png", 0, 0, 800, 600, 0.8, im)
print(str(time.clock() - time1) + " seconds (optimized)")
# sample output :
# 1.6233619831305721 seconds (non optimized)
# 0.4075934110084374 seconds (optimized)
# click image is to be used after having found the image
pos = imagesearch("github.png")
if pos[0] != -1:
click_image("github.png", pos, "right", 0.2, offset=5)
# when you have various images to find on the screen, you can
# use this function, it iterates through the files in the path
# you provide and outputs an dictionary where the key is the path
# to the file and the value is the position array.
#
# I create this to be used with the same image with different sizes,
# so no matter the size it appears on the screen, i can find it.
#
# In this example it iterates through the main folder of the project
# and find the panda.png and github.png
print(str(imagesearch_from_folder('./', 0.8)))
|
983,835 | c0a60c75f31122e98ae1e2abf91ddb4ed1cf28f1 | #example to run: py change_label_naming.py --labels_path FINAL_final_labels --fixed_labels_path corrected_FINAL_final_labels
'''
Changes the 4th column for coin type from notation 0,1,2,3,4,5 to 1,5,10,25,100,200 respectively
'''
import os
import numpy as np
import argparse
parser = argparse.ArgumentParser()
parser.add_argument('--labels_path', type=str, required = True)
parser.add_argument('--fixed_labels_path', type=str, required = True)
args = parser.parse_args()
labels = sorted(os.listdir(args.labels_path))
#dictionary for mapping
mapping = {}
mapping['0'] = "1"
mapping['1'] = "5"
mapping['2'] = "10"
mapping['3'] = "25"
mapping['4'] = "100"
mapping['5'] = "200"
for i in labels:
name = i
f = open(f"{args.labels_path}/{name}","r")
f2 = open(f"{args.fixed_labels_path}/{name}","w")
lines = f.readlines()
for j in lines:
line = j.split("\t")
#the fourth column is the same as the second last column
key = line[-2]
coin_value = mapping[key]
line[-2] = coin_value
#join back with tab seperation
string = "\t".join(line)
f2.write(string)
f.close()
f2.close()
|
983,836 | f17319c170f418ca4852c98115c546969e6ab334 | # Go Base Game Class
# Author: Matthew Bird
# date: 10/5/2018
from copy import deepcopy
from math import floor
from random import choice
class Game:
def __init__(self, board_size=19, rules=None):
"""
The Game class is for playing go. There is only one method for now, "play".
:param board_size: int
:param rules: {'suicide': False, 'komi': 6.5, 'superko': True, 'editmode': False}
"""
self.board_size = board_size
self.board = board_generate_empty(board_size)
self.board_history = [deepcopy(self.board)]
self.rules = rules if rules else {'suicide': False, 'komi': 6.5, 'superko': True, 'editmode': False}
self.captures = {'w': [], 'b': []}
self.turn = "b"
self.latest_status = None
def play(self, xy, color):
"""
Place a stone at xy of the given color.
All associated actions that normally take place when playing a stone are taken care of.
:param xy: (int,int)
:param color: 'b' or 'w'
:return: N/A
"""
validity = xy_is_valid(xy, color, self.board, self.rules, self.board_history)
self.latest_status = validity
self.latest_status['xy'] = xy
self.latest_status['captured_stones'] = []
if validity["status"] == "valid":
self.board = xy_play_on_board(xy, self.board, color)
new_captures = list(xy_to_captures(xy, color, self.board_history[-1]))
self.captures[color] += new_captures
self.board_history.append(deepcopy(self.board))
self.latest_status['captured_stones'] = new_captures
return True
return False
def xy_is_valid(xy, color, board, rules, board_history):
"""
Determines if a play at xy is valid for a given color, board, rules, and board_history.
:param xy: tuple (x, y)
:param color: 'w' or 'b'
:param board: 2d list
:param rules: dict
:param board_history: 3d list
:return: dict
"""
response = {"status": "valid", "result": []}
# =========CAN I PLACE IT==========
# if xy is off the board
if xy_off_board(xy, board):
response["status"] = "invalid"
response["result"].append("off_board")
#
# if location is occupied
if xy_occupied(xy, board):
response["status"] = "invalid"
response["result"].append("occupied_location")
# =========IF I PLACE IT==========
if response["status"] == "valid":
fictional_board = xy_play_on_board(xy, deepcopy(board), color)
# if it violates suicide
if not rules['suicide']:
if xy_suicide(xy, fictional_board, color):
response["status"] = "invalid"
response["result"].append("suicide")
# if it violates superko
if rules['superko']:
if not rule_superko(fictional_board, board_history):
response["status"] = "invalid"
response["result"].append("superko")
return response
def xy_off_board(xy, board):
"""
Return True if xy is off the board.
:param xy: (int, int)
:param board: 2d list
:return: bool
"""
return False if 0 <= xy[0] < len(board) and 0 <= xy[1] < len(board) else True
def xy_occupied(xy, board):
"""
Returns True if xy is already occupied on the given board.
:param xy: (int, int)
:param board: 2d list
:return: bool
"""
return True if board[xy[0]][xy[1]] else False
def xy_play_on_board(xy, board, color):
"""
Returns board after stone is played at xy.
:param xy: (int, int)
:param board: 2d list
:param color: 'b' or 'w'
:return: 2d list
"""
board[xy[0]][xy[1]] = color
potential_adjacent_captures = xy_adjacents(xy, board)
opp_color = switch_color(color)
p_a_p = filter(lambda xy_: board[xy_[0]][xy_[1]] == opp_color, potential_adjacent_captures)
for xy_opp in p_a_p:
group = xy_to_group(xy_opp, board)
if group_is_surrounded(group, board):
board = group_remove(group, board)
return board
def xy_to_group(xy, board):
"""
Returns the group of which the stone at xy is a member.
:param xy: (int, int)
:param board: 2d list
:return: group {(int,int), (int,int), ...}
"""
group = {xy}
inspected = set([])
to_inspect = group - inspected
while to_inspect:
for stone in to_inspect:
inspected.add(stone)
group |= xy_adjacents(stone, board, filter_by="friend")
to_inspect = group - inspected
return group
def xy_adjacents(xy, board=None, filter_by=None, color=None):
"""
Returns locations neighboring xy.
if color is given, it is preferred, otherwise it is inferred from the board.
if filter_by == "friend" then friendly adjacents are returned.
if filter_by == "foe" then opponents adjacents are returned.
if filter_by == "None" then open liberties are returned.
:param xy: (int, int)
:param board: 2d list
:param filter_by: None, "None", "friend", "foe"
:param color: "b" or "w"
:return: {(int,int), (int,int), ...}
"""
color = board[xy[0]][xy[1]] if not color else color
adjacents = {(xy[0] + 1, xy[1]), (xy[0] - 1, xy[1]), (xy[0], xy[1] + 1), (xy[0], xy[1] - 1)}
legal_adjs = set(filter(lambda xy_: 0 <= xy_[0] <= len(board) - 1 and 0 <= xy_[1] <= len(board) - 1, adjacents))
if filter_by == "friend":
legal_adjs &= {xy_ for xy_ in legal_adjs if board[xy_[0]][xy_[1]] == color}
elif filter_by == "foe":
legal_adjs &= {xy_ for xy_ in legal_adjs if board[xy_[0]][xy_[1]] == switch_color(color)}
elif filter_by == "None":
legal_adjs &= {xy_ for xy_ in legal_adjs if not board[xy_[0]][xy_[1]]}
return legal_adjs
def xy_suicide(xy, board, color):
"""
Return True if xy is a suicide move.
:param xy: (int, int)
:param board: 2d list
:param color: 'b' or 'w'
:return: bool
"""
group = xy_to_group(xy, board)
if group_adjacents(group, board, color) == group_adjacents(group, board, filter_by="foe"):
for xy_adj in xy_adjacents(xy, board, filter_by="foe", color=color):
group_adj = xy_to_group(xy_adj,board)
if group_is_surrounded(group_adj,board):
return False
return True
else:
return False
def xy_to_captures(xy, color, board):
"""
Returns the number of captures the move at xy produces.
:param xy: (int, int)
:param color: 'b' or 'w'
:param board: 2d list
:return: int
"""
captures = set([])
for adj in xy_adjacents(xy, board, "foe", color):
potential_captured_group = xy_to_group(adj, board)
captured_groups_adjacents = group_adjacents(potential_captured_group, board, filter_by="None")
if len(captured_groups_adjacents) <= 1:
captures |= potential_captured_group
return captures
def group_adjacents(group, board, filter_by=None):
"""
Returns what the adjacent locations are for a group.
if filter_by == "None" then returns open liberties.
if filter_by == "friend" then returns friendly neighbors.
if filter_by == "foe" then returns opponents neighbors.
:param group: {(int,int), (int,int), ...}
:param board: 2d list
:param filter_by: None, "None", "friend", "foe"
:return: {(int,int), (int,int), ...}
"""
liberties = set([])
for location in group:
if filter_by == "None":
liberties |= xy_adjacents(location, board, filter_by="None")
elif filter_by == "friend":
liberties |= xy_adjacents(location, board, filter_by="friend")
elif filter_by == "foe":
liberties |= xy_adjacents(location, board, filter_by="foe")
else:
liberties |= xy_adjacents(location, board)
liberties -= group
return liberties
def group_is_surrounded(group, board):
"""
Returns True if a group is surrounded.
:param group: {(int,int), (int,int), ...}
:param board: 2d list
:return: bool
"""
if group_adjacents(group, board, filter_by="None"):
return False
else:
return True
def group_remove(group, board):
"""
Removes the group from the board and returns the new board.
:param group: {(int,int), (int,int), ...}
:param board: 2d list
:return: 2d list
"""
for xy in group:
board[xy[0]][xy[1]] = None
return deepcopy(board)
def rule_superko(board, board_history):
"""
Returns True is board position is not in the history.
False if it is.
:param board: 2d list
:param board_history: 3d list
:return: bool
"""
if board in board_history:
return False
return True
def board_generate_empty(size: 'board size'):
"""
Generates an empty board.
:param size: int
:return: 2d list
"""
empty_board = [[None] * size for _ in range(size)]
return empty_board
def switch_color(color):
"""
Returns 'w' if 'b'.
Returns 'b' if 'w'.
:param color: 'w' or 'b'
:return: 'w' or 'b'
"""
return "b" if color == "w" else "w"
def flatten(list_of_lists):
"""
Turns a 2d list into a 1d list by means of unraveling it.
:param list_of_lists: 2d list
:return: list
"""
flattened_list = [y for x in list_of_lists for y in x]
return flattened_list
def get_int_width(integer):
"""
Quite literally tells you the length of an integer (cast as a string).
:param integer: int
:return: int
"""
return len(str(integer))
def render_board(board, empty=' '):
"""
Generates a 2d ascii image of the board.
:param board: 2d list
:param empty: char which will represent how empties are shown
:return: ascii image of board
"""
board_image = ''
size_list = list(range(len(board[0])))
first_digit = [floor(x1 / 10.) for x1 in size_list]
second_digit = [x2 % 10 for x2 in size_list]
largest_int_width = get_int_width(size_list[-1])
print_row = ' ' * (largest_int_width + 2)
for ix in range(len(size_list)):
if first_digit[ix]:
print_row += str(first_digit[ix]) + " "
else:
print_row += " "
board_image += print_row + "\n" # print(print_row)
print_row = ' ' * (largest_int_width + 2)
for ix in range(len(size_list)):
print_row += str(second_digit[ix]) + " "
board_image += print_row + "\n" # print(print_row)
board_image += "\n" # print()
for i, row in enumerate(board):
int_width = get_int_width(i)
blank_spaces = largest_int_width - int_width
print_row = ''
for b_s in range(blank_spaces):
print_row += " "
print_row += str(i) + " " * 2
for element in row:
if not element:
print_row += empty + ' '
else:
print_row += element + ' '
board_image += print_row + "\n" # print(print_row)
return board_image
if __name__ == "__main__":
# Initialize
player = 'b'
b_size = 19
g = Game(b_size)
# Test Moves
for _ in range(100):
open_spaces = []
for i in range(len(g.board)):
for j in range(len(g.board)):
if not g.board[i][j]:
open_spaces.append((i, j))
chosen = choice(open_spaces)
g.play(chosen, player)
player = switch_color(player)
print(render_board(g.board, "."))
print("captures: ", g.captures)
print("rules: ", g.rules)
|
983,837 | b4e8bcce56485892a88ea0e52c49f899822bdf08 | # -*- coding: utf-8 -*-
from __future__ import absolute_import
from .fifo_memory import FIFOMemory
from .memory import Memory
from .rollouts import rollout, rollout_to_src, RolloutPool, rollout_dataset
from .trajectory import Trajectory, Transition
|
983,838 | 0fa7dd56b87e02ed09839b94490b5f7bd91809a3 | import tensorflow as tf
val_ragged = tf.ragged.constant([[1, 2, 3], [1, 2], [1, 2, 3, 4]])
val_tensor = val_ragged.to_tensor()
inputs = tf.keras.layers.Input(shape=(None, None,), ragged=False)
outputs = tf.keras.layers.Embedding(5, 4)(inputs)
model = tf.keras.Model(inputs=inputs, outputs=outputs)
# this model with normal tensor works
print(model(val_tensor))
inputs_ragged = tf.keras.layers.Input(shape=(None, None,), ragged=True)
outputs_ragged = tf.keras.layers.Embedding(5, 4)(inputs_ragged)
model_ragged = tf.keras.Model(inputs=inputs_ragged, outputs=outputs_ragged)
# this one with RaggedTensor doesn't
print(model_ragged(val_ragged))
#solution: Upgrade to >= tf-nightly 20191111
|
983,839 | 8ab01aa55c8d29776cd502a3d80973ae278dcd83 | '''
Write a function that accepts two (matrices) 2 dimensional lists a and b of unknown lengths and returns their product.
Hint: Two matrices a and b can be multiplied together only if the number of columns of the first matrix(a) is the same as the number of rows of the second matrix(b).
Do NOT use numpy module for this exercise. The input for this function will be two 2 Dimensional lists. For example if the input lists are:
a = [[2, 3, 4],
[3, 4, 5]]
b = [[4, -3, 12],
[1, 1, 5],
[1, 3, 2]]
'''
def _product_of_two_vectors_sample_(a, b):
if len(a[0]) != len(b):
return None
# Create the result matrix and fill it with zeros
output_list=[]
temp_row=len(b[0])*[0]
for r in range(len(a)):
output_list.append(temp_row[:])
for row_index in range(len(a)):
for col_index in range(len(b[0])):
sum=0
for k in range(len(a[0])):
sum=sum+a[row_index][k]*b[k][col_index]
output_list[row_index][col_index]=sum
return output_list
a = [[2, 3, 4],
[3, 4, 5]]
b = [[4, -3, 12],
[1, 1, 5],
[1, 3, 2]]
print(_product_of_two_vectors_sample_(a,b))
#print(a[0][0]*b[0][0] + a[0][1]*b[1][0]+ a[0][2]*b[2][0], a[0][0]*b[0][1] + a[0][1]*b[1][1]+ a[0][2]*b[2][1]) |
983,840 | ed4a27da0d7a27d6a7f12614cff75d5709eab8bc | from .proxytopic import ProxyTopic |
983,841 | 64c4f69ebfd543c17bd73d26d2130575fcff5028 | n1,k=map(str,input().split())
c=0
for i in range(0,len(n1)):
if n1[i]==k:
c+=1
print(c)
|
983,842 | ddc92e91582bc9817670a1f99a552f16196db0c4 | import sys, pygame, time
pygame.init()
size = width, height = 800, 600
BLACK = (0, 0, 0)
WHITE = (255, 255, 255)
RED = (255, 0, 0)
GREEN = (0, 255, 0)
BLUE = (0, 0, 255)
points = [10]*width
screen = pygame.display.set_mode(size)
loopNum = 0
interval = 0.05
offset = 10
def addDot(x):
points.pop(0)
points.append(x)
def dot(x,y):
pygame.draw.line(screen, GREEN, (x,y), (x,y), 1)
def line(x1,y1,x2,y2):
pygame.draw.line(screen, GREEN, (x1,y1), (x2,y2), 1)
def drawDots():
for i in range(width-1):
line(i,height - points[i],i+1,height - points[i+1])
def translate(val):
newVal = ( ((val+1)/1024) * (height-offset) ) + offset
print(newVal)
return newVal
while 1:
for event in pygame.event.get():
if event.type == pygame.QUIT: sys.exit()
loopNum = loopNum + 1
addDot(loopNum)
screen.fill(BLACK)
drawDots()
pygame.display.flip()
time.sleep(interval)
|
983,843 | 67fb6ab1bb9da70d5d560f0198d7726c205f3c5e |
def sqrt(number):
"""
Calculate the floored square root of a number
Args:
number(int): Number to find the floored squared root
Returns:
int: Floored Square Root
"""
# check for negative inputs
if number < 0:
return None
# square root of 1 and 0 is 1 and 0
elif number in [1, 0]:
return number
# initialise upper and lower bound
high = number
low = 0
while low < high:
# mid is the average of high and low
mid = (high + low) // 2
# if mid ** 2 is the number, return the mid value
# OR, if mid ** 2 is smaller than the number and (mid + 1) ** 2 is larger than the number,
# return the mid number as it's the floor value
if mid**2 <= number < (mid+1)**2:
return mid
# mid is too high, change high var to mid
elif mid**2 > number:
high = mid
# mid is too low, change low var to mid
else:
low = mid
# =================================================================================
# Tests
# =================================================================================
print ("Pass" if (3 == sqrt(9)) else "Fail")
print ("Pass" if (0 == sqrt(0)) else "Fail")
print ("Pass" if (4 == sqrt(16)) else "Fail")
print ("Pass" if (1 == sqrt(1)) else "Fail")
print ("Pass" if (5 == sqrt(27)) else "Fail")
print ("Pass" if (27 == sqrt(783)) else "Fail")
print ("Pass" if (28 == sqrt(784)) else "Fail")
print ("Pass" if (28 == sqrt(785)) else "Fail")
print ("Pass" if (99999 == sqrt(9999800001)) else "Fail")
print ("Pass" if (99998 == sqrt(9999800000)) else "Fail")
|
983,844 | e6ac7b72ec571c54189784b451b48222619a64ae | import time
def methodA(filename):
data = []
for line in open(filename):
data.append(line.strip().split(','))
return data
def methodB(filename):
with open(filename) as f:
data = f.readlines()
data = [line.strip().split(',') for line in data]
return data
def measureTimeAB():
start = time.time()
print(methodA('data/data.csv'))
end = time.time()
print(end - start)
start = time.time()
print(methodB('data/data.csv'))
end = time.time()
print(end - start)
if __name__ == '__main__':
measureTimeAB()
# [['8.84', '17.22', '13.22', '3.84'], ['3.99', '11.73', '19.66', '1.27'], ['16.14', '18.72', '7.43', '11.09']]
# 0.0019943714141845703
# [['8.84', '17.22', '13.22', '3.84'], ['3.99', '11.73', '19.66', '1.27'], ['16.14', '18.72', '7.43', '11.09']]
# 0.0009984970092773438 |
983,845 | 616a3fb4a3862bba2df638e3ff0d1e5aa62cbacb | class Solution(object):
def maxSubArray(self, nums):
"""
:type nums: List[int]
:rtype: int
"""
max_sum = nums[0]
cur_sum = 0
for i,num in enumerate(nums):
cur_sum += num
if cur_sum > max_sum:
max_sum = cur_sum
if cur_sum < 0:
cur_sum = 0
return max_sum |
983,846 | d00dcd87ea7dd05bfd04b6f98506ebf4af0cb62b | from __future__ import division
# ## Python function/script to estimate character frequencies
# Given a .cp, profile file and encoding, estimate the character frequences in a class
# Approach:
# Read in group profile and then set a threshold.
# Consider all positions that are above the threshold to be part of the group
#
# Get corresponding position in the alignment encoding and count the frequency
#
# output summary
import os
import argparse
import csv
import pandas as pd
from collections import Counter
import glob
parser = argparse.ArgumentParser()
#-db DATABSE -u USERNAME -p PASSWORD -size 20
# parser.add_argument("-gpfile", "--groupfile", help="group profile", type = str)
parser.add_argument("-i", "--input", help="Coded alignment", type = str)
parser.add_argument("-t", "--threshold", help="Threshold", type = float)
parser.add_argument("-o", "--output", type = str)
alphabet='UVWXYZabcdefghijklmnopqrstuvwxyz'
letters=list(alphabet)
args = parser.parse_args()
isFirst=True
# print( args.groupfile )
with open(args.input) as f:
lines = f.readlines()
lines = lines[0].split('#')
gpFileList = glob.glob('*burnin.p*')
gpFileList
for file in gpFileList:
print(file)
rowIndex = 0
if os.path.isfile('gpFileTemp'):
os.remove('gpFileTemp')
#init an empty dict with None values
letters.append('000gpNum')
myDict=dict.fromkeys(letters)
myDict['000gpNum']=file
#This block creates a file containing all the characters from the alignment
# that are allocated to this group with greater than threshold probability
with open(file, 'r') as f:
with open('gpFileTemp','w') as g:
myFile = csv.reader(f)
for line in myFile:
clean_line = line[1:]
# print clean_line
for i in range(0,len(clean_line)-1):
value = float(clean_line[i])
if value > args.threshold:
char = lines[rowIndex][i]
g.write(char)
rowIndex = rowIndex + 1
# import pdb; pdb.set_trace()
with open('gpFileTemp','r') as g:
s = g.read()
# print(s+'\n')
slen = len(s)
for letter in alphabet:
if slen == 0:
charProp = 0
else:
charProp = s.count(letter)/slen
myDict[letter]=charProp
myDf=pd.DataFrame(myDict,index=[0])
if isFirst:
myDf.to_csv(args.output,index=False,header=True)
isFirst=False
else:
storageDf=pd.read_csv(args.output,header=0)
storageDf=storageDf.append(myDf,ignore_index=True)
storageDf.to_csv(args.output,index=False,header=True)
# consProp = s.count('a') + s.count('v')
# consProp = consProp/len(s)
# print "The frequency of a is {}".format(s.count('a')/len(s))
# print "The frequency of v is {}".format(s.count('v')/len(s))
# print "Conservation proportion is {}".format(consProp)
# print "The number of sequence positions with > {} probability of being in this class is {}".format(args.threshold, len(s))
os.remove('gpFileTemp')
|
983,847 | a4e5d145c0b430ae9c47f1868098d5314f814739 | from p5 import *
from vector_2d import Vector
class Neutron:
def __init__(self, p):
self.r = 10
self.v = [np.random.randint(-10,10), np.random.randint(-10,10)]
self.p = p
def show(self):
fill(255, 100, 100, 100)
circle((self.p[0], self.p[1]), self.r)
def update(self):
self.p = np.add(self.p, self.v)
def check_collision(self, others):
for i in others:
if hasattr(i, 'r'):
d = dist(self.p, i.p)
if d*10 <= self.r + i.r:
return True
elif hasattr(i, 'h'):
if i.p[0] < self.p[0] <= i.p[0] + i.w and i.p[1] < self.p[1] <= i.p[1] + i.h:
return True
def check_wall_collision(self):
if self.p[0] - self.r < 0:
self.p[0] = self.r
self.v[0] = -self.v[0]
if self.p[0] + self.r > p5.sketch.size[0]:
self.p[0] = p5.sketch.size[0] - self.r
self.v[0] = -self.v[0]
if self.p[1] - self.r < 0:
self.p[1] = self.r
self.v[1] = -self.v[1]
if self.p[1] + self.r > p5.sketch.size[1]:
self.p[1] = p5.sketch.size[1] - self.r
self.v[1] = -self.v[1]
class Fuel:
def __init__(self, i):
self.i = i
self.r = 30
self.p = (p5.sketch.size[0]/2 + 300*np.cos(i*20*PI/180), p5.sketch.size[1]/2 + 300*np.sin(i*PI/180*20))
def show(self):
fill(100, 255, 100, 300)
circle((self.p[0], self.p[1]), self.r)
class Control_rod:
def __init__(self, h, w, p):
self.h = h
self.w = w
self.p = p
def show(self):
fill(100, 255, 255, 300)
rect(self.p[0], self.p[1], self.w, self.h)
|
983,848 | 1a265acb93e02a783dc93d1ae52841c08233457e | import numpy as np
import random as rn
from time import clock,time
N = np.array([rn.randint(-10,10) for i in range (10)])
N1 = np.array([rn.randint(-100,100) for i in range (100)])
N2 = np.array([rn.randint(-1000,1000) for i in range (1000)])
print('Исходный массив: ')
for i in range (len(N)):
print(N[i],end = ' ')
print()
print()
print()
# Улучшенный пузырек
clock()
i = -1
flag = True
while flag:
flag = False
for j in range(len(N) - i - 2):
if N[j] > N[j+1]:
N[j], N[j+1] = N[j+1], N[j]
flag = True
i += 1
t = clock()
print('Отсортированный массив Улучшенный пузырек: ')
for i in range (len(N)):
print(N[i],end = ' ')
print()
print('Время выполнения: ',t)
print()
|
983,849 | ecac96fbb326ba91a637e49ca9eda30784a324fb | # -*- coding: utf-8 -*-
# Generated by Django 1.11 on 2018-05-29 05:15
from __future__ import unicode_literals
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
dependencies = [
('chit_main_app', '0002_auto_20180523_0654'),
]
operations = [
migrations.CreateModel(
name='Loan',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('loan_amount', models.IntegerField()),
('interest', models.IntegerField()),
('approved_date', models.DateField()),
('cid', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='chit_main_app.Customer')),
],
),
]
|
983,850 | 8ee7a240dd0043202f0a70a3bccaabddb975b35e | from graph_db.configuration import Configuration, makeAttribute, makeAllowedRelation
class TestConstructor(Configuration):
def __init__(self):
Configuration.__init__(self)
############ NODES #############
base_attributes_top = [
makeAttribute("readable_name", "Readable Name", "For rapid people understanding", self.TYPE_STRING, ""),
makeAttribute("name", "Unique API Name", "For automated scripts, should be permanent", self.TYPE_STRING, ""),
]
base_attributes_bottom = [
makeAttribute("description", "Description", "For people some additional information", self.TYPE_TEXT, ""),
]
# addEntityClass(cid, name, readable_name, description, attributes_list)
# basic type classes
self.addEntityClass(1, "object_type", "Object type", "", base_attributes_top + base_attributes_bottom)
self.addEntityClass(2, "pin_type", "Pin type", "", base_attributes_top + base_attributes_bottom)
self.addEntityClass(3, "socket_type", "Socket type", "", base_attributes_top + [
makeAttribute("direction", "Links direction", "in or out or none", self.TYPE_STRING, ""),
] + base_attributes_bottom)
self.addEntityClass(4, "link_type", "Link type", "", base_attributes_top + base_attributes_bottom)
# templates
self.addEntityClass(5, "template", "Template", "", base_attributes_top + base_attributes_bottom)
self.addEntityClass(6, "template_socket", "Template Socket", "", base_attributes_top + [
] + base_attributes_bottom)
# instances
self.addEntityClass(7, "object", "Object", "", base_attributes_top + base_attributes_bottom)
self.addEntityClass(8, "pin", "Pin", "", base_attributes_top + base_attributes_bottom)
self.addEntityClass(9, "socket", "Socket", "", base_attributes_top + [
] + base_attributes_bottom)
self.addEntityClass(10, "link", "Link", "", base_attributes_top + [
] + base_attributes_bottom)
# view attributes
self.addEntityClass(11, "view", "View", "", base_attributes_top + [
] + base_attributes_bottom)
self.addEntityClass(12, "coords", "Coordinates", "", base_attributes_top + [
makeAttribute("x", "x", "X coordinate on view", self.TYPE_DOUBLE, ""),
makeAttribute("y", "y", "Y coordinate on view", self.TYPE_DOUBLE, ""),
] + base_attributes_bottom)
self.addEntityClass(13, "display_attrs", "Display Attributes", "", base_attributes_top + [
makeAttribute("shape", "Shape", "Circle, Square or something else", self.TYPE_STRING, ""),
makeAttribute("color", "Color", "#ffffff", self.TYPE_STRING, ""),
makeAttribute("size", "Size", "Should be > 0", self.TYPE_INTEGER, ""),
makeAttribute("image", "Image", "Should be an url", self.TYPE_STRING, ""),
makeAttribute("scale", "Scale", "Should be > 0", self.TYPE_DOUBLE, ""),
] + base_attributes_bottom)
# tag class
self.addEntityClass(149, "tag", "tag", "", base_attributes_top + [
])
############ EDGES #############
self.addRelationClass(102, "instanceof", "Instance of", "Instance of type", [
], [
makeAllowedRelation(
{"cname":"template", "multiplicity" : self.MUL_ZERO_OR_MORE},
{"cname":"object_type", "multiplicity" : self.MUL_ONE}
),
makeAllowedRelation(
{"cname":"object", "multiplicity" : self.MUL_ZERO_OR_MORE},
{"cname":"template", "multiplicity" : self.MUL_ONE}
),
makeAllowedRelation(
{"cname":"pin", "multiplicity" : self.MUL_ZERO_OR_MORE},
{"cname":"pin_type", "multiplicity" : self.MUL_ONE}
),
makeAllowedRelation(
{"cname":"template_socket", "multiplicity" : self.MUL_ZERO_OR_MORE},
{"cname":"socket_type", "multiplicity" : self.MUL_ONE}
),
makeAllowedRelation(
{"cname":"socket", "multiplicity" : self.MUL_ZERO_OR_MORE},
{"cname":"template_socket", "multiplicity" : self.MUL_ONE}
),
makeAllowedRelation(
{"cname":"link", "multiplicity" : self.MUL_ZERO_OR_MORE},
{"cname":"link_type", "multiplicity" : self.MUL_ONE}
),
])
self.addRelationClass(103, "composition", "Composition", "Composition links", [
], [
makeAllowedRelation(
{"cname":"object_type", "multiplicity" : self.MUL_ONE},
{"cname":"pin", "multiplicity" : self.MUL_ZERO_OR_MORE}
),
makeAllowedRelation(
{"cname":"socket_type", "multiplicity" : self.MUL_ONE},
{"cname":"pin_type", "multiplicity" : self.MUL_ZERO_OR_MORE}
),
makeAllowedRelation(
{"cname":"template_socket", "multiplicity" : self.MUL_ONE},
{"cname":"pin", "multiplicity" : self.MUL_ZERO_OR_MORE}
),
makeAllowedRelation(
{"cname":"template", "multiplicity" : self.MUL_ONE},
{"cname":"template_socket", "multiplicity" : self.MUL_ZERO_OR_MORE}
),
makeAllowedRelation(
{"cname":"object", "multiplicity" : self.MUL_ONE},
{"cname":"socket", "multiplicity" : self.MUL_ZERO_OR_MORE}
),
])
self.addRelationClass(104, "attributes", "Attributes", "Additional attribute instance", [
], [
makeAllowedRelation(
{"cname":"object", "multiplicity" : self.MUL_ONE},
{"cname":"coords", "multiplicity" : self.MUL_ZERO_OR_MORE}
),
makeAllowedRelation(
{"cname":"template_socket", "multiplicity" : self.MUL_ONE},
{"cname":"coords", "multiplicity" : self.MUL_ZERO_OR_MORE}
),
makeAllowedRelation(
{"cname":"socket_type", "multiplicity" : self.MUL_ONE},
{"cname":"display_attrs", "multiplicity" : self.MUL_ZERO_OR_MORE}
),
makeAllowedRelation(
{"cname":"template", "multiplicity" : self.MUL_ONE},
{"cname":"display_attrs", "multiplicity" : self.MUL_ZERO_OR_MORE}
),
])
self.addRelationClass(105, "logical", "Logical", "Logical connections", base_attributes_top+[
], [
makeAllowedRelation(
{"cname":"coords", "multiplicity" : self.MUL_ZERO_OR_MORE},
{"cname":"view", "multiplicity" : self.MUL_ONE}
),
makeAllowedRelation(
{"cname":"display_attrs", "multiplicity" : self.MUL_ZERO_OR_MORE},
{"cname":"view", "multiplicity" : self.MUL_ONE}
),
])
self.addRelationClass(106, "connectable", "Connectable", "Connection is allowed", base_attributes_top+[
], [
makeAllowedRelation(
{"cname":"pin_type", "multiplicity" : self.MUL_ZERO_OR_MORE},
{"cname":"pin_type", "multiplicity" : self.MUL_ZERO_OR_MORE}
),
])
self.addRelationClass(107, "from_link", "From Link", "Link connection: from_socket", base_attributes_top+[
], [
makeAllowedRelation(
{"cname":"link_type", "multiplicity" : self.MUL_ONE},
{"cname":"socket_type", "multiplicity" : self.MUL_ZERO_OR_MORE}
),
makeAllowedRelation(
{"cname":"link", "multiplicity" : self.MUL_ONE},
{"cname":"socket", "multiplicity" : self.MUL_ZERO_OR_ONE}
),
])
self.addRelationClass(108, "to_link", "To Link", "Link connection: to_socket", base_attributes_top+[
], [
makeAllowedRelation(
{"cname":"link_type", "multiplicity" : self.MUL_ONE},
{"cname":"socket_type", "multiplicity" : self.MUL_ZERO_OR_MORE}
),
makeAllowedRelation(
{"cname":"link", "multiplicity" : self.MUL_ONE},
{"cname":"socket", "multiplicity" : self.MUL_ZERO_OR_ONE}
),
])
self.addRelationClass(1149, "tag_link", "tag_link", "Tag link", [
], [
makeAllowedRelation(
{"cname":"object", "multiplicity" : self.MUL_ZERO_OR_MORE},
{"cname":"tag", "multiplicity" : self.MUL_ZERO_OR_MORE}
),
])
|
983,851 | 27d45ed089dde4f1c00b3cdde741a06c66a29284 | #!/usr/bin/python3
##################################################
## main.py | Hasan Abdullah ##
## Main file invoking functions from parse.py ##
##################################################
import re
from parse import *
def main():
ticker = str(input('Enter ticker or CIK: ')).strip()
if re.search('[^0-9]', ticker):
print('TickerError: ticker or CIK must contain digits only')
sys.exit(1)
results_page = search_ticker(ticker)
reports = parse_text(results_page, 1)
filename = str(input('Enter the filename (without file extension) to put the data in: ')).strip()
create_file(filename, reports)
if __name__ == '__main__':
main()
|
983,852 | b891990ab6c43ee98ee55f2ef4d8851844561f50 | import aliencompletion
class StoryParts():
start = ['In some few years from now into the future, scientists have discovered life on Mars and want to inspect it so they can know more about it.','They want to send a group of astronauts there to do that. They choose you as one of the people to go there.','You will have to stay away from your family for years and stay there.','Will you accept this and go ?']
A1 = ['You accepted to go to Mars. ', 'You say goodbye to your family and go in the rocket to Mars with 3 other astronauts. The other 3 were Sam, Jane and Richard. They rocket also had a machine to catch the alien and it could move too. ', 'The rocket takes off and takes you to Mars. Sam says that we should go out and search for the life forms but Jane says that we should stay in the ship for some time because it will be easier to take them if they are near the ship.', 'What do you want to do ?']
A2 = ['You think that the aliens can be dangerous and decide to stay at home with your family without knowing about the life on Mars.']
B1 = ['Nobody was joining Sam in going out so even Sam decided to not go out.', 'After waiting for some time too no life form came near the spaceship. Sam said again that we should go out and lure them towards the spaceship.']
B2 = ['You decide to go out with Sam to search for the life form. The others decide to stay in the spaceship. You both wear your spacesuits and go out.', 'While searching you see a Mars rover which was broken by one of the aliens. ', 'After some time you see something moving at some distance. Sam asks you if you want to lure it to the spaceship so that it is easier to catch it to do research on it or if you want to follow it and find out about how their civilization looks like first.']
C1 = ['You decide to lure the creature to the spaceship but you have no idea about how to lure it there.', "You shout but the creature doesn't turn back and you fail to lure it. You try a couple of more things but even they don't work.", "You can't talk to the creature because they won't understand you and you don't know how they would react to something unknown.", 'You give up and decide to call the spaceship to send the machine to catch the alien.']
C2 = ['You decide to follow the life form because you are interested in their civilization.', 'You have a family on Earth so you are interested in how these aliens have developed their civilization. ', 'You miss your family on Earth and think about what they would have been doing now. ', "You and Sam follow the organism for some time from some distance as you don't want to be seen by it.", 'The alien stops midway and stays there. You and Sam decide to call the spaceship to send the machine for catching the alien.']
C = ['The machine arrives and the alien is caught and is being brought to the spaceship.', 'You and Sam saw the alien from up close after it had been caught. It had huge eyes and tentacles. It was about 5 feet tall. It looked more like an octopus monster from an horror movie rather than the aliens from sci-fi movies.','The space station above Mars had a chamber to put this alien in there so it had to be taken there.', ' You and Sam saw the alien holding something using one of its tentacles. It looked like the alien had taken a bite from it.', ' You saw that thing on your way to the spaceship so you and Sam decided to pick them up so that the alien can survive.', 'The alien is loaded into the spaceship and the spaceship launches to the space station. The alien is put inside of the container meant for it and you and the others do their normal work.', 'After some time, while you are working with Richard you both hear some sounds coming from around the area where the alien is kept. Richard says that it might just be Sam doing his work.']
D1 = ["You decided to check out the sound and go to the chamber where the alien is kept.", "It doesn't look like something happened there. The alien looks calm. ","You see that a lose screw is floating around in the room. It came out from the one of the parts of the alien container. ", "You fix it and then check the whole room and return to Richard and tell him about it. ","Richard says that something might have hit the container while floating around in zero gravity. You agree with him."]
D = ["You agree with Richard and continue doing your work.","After some time, you and the other 3 astronauts start doing the research and observations on the alien.", "While everyone is doing the observations you notice a scratch on the glass shield between you and the alien. ", "You think it won't be a problem because you are supposed to send the alien on Mars again after the observations are done. But you still tell everyone else about the scratch and everyone says the same thing that you were thinking.","Most of the observations and research related to the alien are done and the other few are supposed to be completed after a certain period of time.", "The 4 of you have some fun together for some time. ", "You need to send the results back to Earth and you need a helping hand with you. Who are you going to take with yourself ?"]
E = ["You chose {choice} to help you with the sending of data. ","While you two are working, after some time the other two decide to continue the tests. They come to inform both of you that they are going to continue the tests now.","They both inform you two and go towards the room of the alien and you continue your work. ","After a couple of moments you hear a loud sound coming from there. The sound startles you and {choice}.","It sounded like some glass broke and something hard hit on a wall or a floor. Both of you get worried about the other two and decide to go and check on them. ","As you start to go towards there, you see the both of them coming towards you. ","They tell you that the alien has broke the glass and is trying to escape. ","Another sound similar to the last one comes from the room. The lights in the space station turn red.","Jane suggests everyone that they should go in the control room and close the door of the aliens room. Everyone agrees with her and heads to the control room.","While everyone is heading to the control room you want to see the alien. What do you do?"]
F1 = ["You decide to go to the aliens room.","You move towards there while floating in zero gravity through the red passageway. The sound of the banging on the glass increases in volume as you reach closer.","You reach the room's door.","You stop there before looking.","You here a loud scream by the alien. The alien is banging on the glass to get out. You are scared.","You peek from the door at the alien. The container has been broken. Three of the tentacles are out of the container and the alien is trying to get out through the hole but it can't.","There is also some kind of liquid coming out of its tentacles. It might possibly be the blood of the alien.","It hits again and the hole gets bigger.","The alien looks at you as you stand still and terrified. The alien lets out a screech while trying to get out through the hole.","Suddenly, you are pulled out of the room.","You turn and see Sam next to you. He was the one who pulled you out.","He then calls the others and tells them to shut the door. You and Sam proceed to go to the control room."]
F = ["Everyone is in the control room and the door for the alien's room has been closed.","Jane says that we need to send the alien back on Mars and to do that we need put it in the spaceship.","Richard proceeds to say that we can't do that by just closing all the doors because there is a chance it might just roam in the station and not go in the ship and we will need someone to pilot the ship.","You remember the alien food you collected while bringing the alien here. It hasn't been used till now.","You tell everyone else about it. They agree that we can use it to lure the alien to the ship.You and Richard say that you want to be the one who do that.","The others tell you that they worried about you. But you say that the alien might be doing this to go back to Mars and it might not like that we kidnapped it from its home."]
G = ["You place the alien food in the passageway in line going towards the ship. You put the rest of it in the back of the ship.","Richard is sitting in the ship waiting for you. You come and sit at the controls.","You tell Jane and Sam to open the door of the alien's room. They open the door and the alien comes out.","The alien looks at the floating food and collects it. It is not eating it but it is following the path.","When it enters the spaceship, the door to go out of it is closed and it is then released to be sent to Mars. The alien is hitting the doors because it is trapped once again.","You and Richard take the ship to Mars. The landing was difficult due to the shaking caused by the alien."]
end = ["When the landing is done, the door is opened for the alien to go out. After the alien has climbed out of the ship and moved away from the ship, the doors are closed and the ship flies back towards the station.","\nWhen you and Richard reach the station everyone is relieved that the everyone is safe.The results of the first observations are present on the space station.","The research could not have been completed because the alien broke free.","But, at least all 4 of you are alive."]
# Node super class exists if we want to add a function in all of the nodes
class Node():
curNodeName = ""
def GotoNextPart(self, index):
if len(self.nextParts) == 0:
return "main"
return self.nextParts[index]
class StoryStart(Node):
def __init__(self):
self.nextParts = [A1(), A2()]
self.choices = ["Yes", "No"]
self.story = StoryParts.start
class A1(Node):
def __init__(self):
self.nextParts = [B1(), B2()]
self.choices = ["Stay In", "Go Out"]
self.story = StoryParts.A1
class A2(Node):
def __init__(self):
self.nextParts = []
self.choices = ["The End"]
self.story = StoryParts.A2
class B1(Node):
def __init__(self):
self.nextParts = [B2()]
self.choices = ["Go Out"]
self.story = StoryParts.B1
class B2(Node):
def __init__(self):
self.nextParts = [C1(), C2()]
self.choices = ["Lure It", "Follow It"]
self.story = StoryParts.B2
class C1(Node):
def __init__(self):
self.nextParts = [C()]
self.choices = ["Next"]
self.story = StoryParts.C1
class C2(Node):
def __init__(self):
self.nextParts = [C()]
self.choices = ["Next"]
self.story = StoryParts.C2
class C(Node):
def __init__(self):
self.nextParts = [D1(), D()]
self.choices = ["Check out the Sound", "Keep doing your work"]
self.story = StoryParts.C
class D1(Node):
def __init__(self):
self.nextParts = [D()]
self.choices = ["Next"]
self.story = StoryParts.D1
class D(Node):
def __init__(self):
self.nextParts = [E()]
self.choices = ["Jane", "Sam", "Richard"]
self.story = StoryParts.D
self.extraStore = "D"
self.extraValues = self.choices
def GotoNextPart(self, index):
return E()
class E(Node):
def __init__(self):
self.nextParts = [F1(), F()]
self.choices = ["Go to the Alien", "Go with everyone else"]
self.story = StoryParts.E
self.extraStore = "E"
self.extraValues = [" ", 'You think that it might be dangerous to go there now and decide to stay with the others.']
self.checkForExtra = "D"
def AccessExtra(self):
name = aliencompletion.GetExtra(self.checkForExtra)["NodeData"].replace("\n","")
for index in range(len(self.story)):
self.story[index] = self.story[index].replace("{choice}", name)
class F1(Node):
def __init__(self):
self.nextParts = [F()]
self.choices = ["Next"]
self.story = StoryParts.F1
class F(Node):
def __init__(self):
self.nextParts = [G()]
self.choices = ["Next"]
self.story = StoryParts.F
self.checkForExtra = "E"
def AccessExtra(self):
val = aliencompletion.GetExtra(self.checkForExtra)["NodeData"].replace("\n","")
if val != " ":
self.story.insert(0, val)
class G(Node):
def __init__(self):
self.nextParts = [AlienEnd()]
self.choices = ["Next"]
self.story = StoryParts.G
class AlienEnd(Node):
def __init__(self):
self.nextParts = []
self.choices = ["Next"]
self.story = StoryParts.end
|
983,853 | ac48d7cb5390bc03c55c748fd1507b8b7589f755 | from selenium import webdriver
from selenium.webdriver.common.by import By
class ListOfElements:
def test(self, base_url=None, els_by_class_name=None, els_by_tag_name=None):
if base_url == None:
exit(code="No Url Specified")
driver = webdriver.Chrome()
try:
driver.get(base_url)
except:
exit(code="Url invalid or incorrectly entered!")
if els_by_class_name is not None:
try:
cn = len(driver.find_elements(By.CLASS_NAME, els_by_class_name))
if cn is not None:
print(f"{cn} class-name='{els_by_class_name}' instances found!")
except :
print("An error occurred, check search value for ID")
if els_by_tag_name is not None:
try:
tn = len(driver.find_elements(By.TAG_NAME, els_by_tag_name))
if tn is not None:
print(f"{tn} tag-name='{els_by_tag_name}' instances found!")
except:
print("An error occurred, check search value for Xpath")
chrm = ListOfElements()
chrm.test("https://www.usd.edu/", "fb_reset", "div") |
983,854 | 1b84e542e14a5d595e522e1525934cc1d962ace1 | import os
# Definition for a binary tree node.
class TreeNode(object):
def __init__(self, x, l=None, r=None):
self.val = x
self.left = l
self.right = r
class Solution(object):
def isSameTree(self, p, q):
"""
:type p: TreeNode
:type q: TreeNode
:rtype: bool
"""
if p is None and q is None: return True
if p is None and q is not None: return False
if p is not None and q is None: return False
return p.val == q.val and self.isSameTree(p.left, q.left) and self.isSameTree(p.right, q.right)
if __name__ == "__main__":
print("Running", os.path.basename(__file__), end=' ')
t1 = TreeNode(1, None, TreeNode(2, TreeNode(3), None))
t2 = TreeNode(1, None, TreeNode(2, TreeNode(3), None))
assert Solution().isSameTree(t1, t2) == True
assert Solution().isSameTree(t1, None) == False
print(" ---> Success")
|
983,855 | 76b02037e93c3beafaeca31df5bfdb0d7f63219d | def extraLongFact(n):
prod =1
for i in range(2,n+1):
prod *= i;
return prod
print(extraLongFact(25)) |
983,856 | f16053c5ff0ac7b4585b44a1a805b96f6d24a35c | # Generated by Django 2.0.1 on 2018-02-11 17:23
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('blog1', '0002_auto_20180211_1723'),
]
operations = [
migrations.AlterField(
model_name='post',
name='published',
field=models.DateTimeField(blank=True, null=True),
),
]
|
983,857 | 1acc5bc9b261ddc8e45b69eed703c6d334797b1b | # Import pandas
import pandas as pd
import json
class Correspondence:
def __init__(self, datetime, line):
self.user = 1
self.date = datetime
self.utterance = line
class Conversation:
def __init__(self, identifier):
self.id = identifier
self.correspondence = []
def add_correspondence(self, datetime, line):
self.correspondence.append(Correspondence(datetime, line))
# Assign spreadsheet filename to `file`
file = '/Users/agenc/Downloads/Whole conversations.xlsx'
# Load spreadsheet
xl = pd.ExcelFile(file)
# Print the sheet names
print(xl.sheet_names)
# Load a sheet into a DataFrame by name: df1
df1 = xl.parse(xl.sheet_names[0])
conversations = []
conversation = Conversation(None)
conversations.append(conversation)
for r in range(3):
#line = ""
#for c in range(df1.shape[1]):
# line += str(df1.iloc[r][c]) + "\t"
#print(line)
if not conversation.id:
conversation.id = df1.iloc[r][1]
if conversation.id != df1.iloc[r][1]:
conversation = Conversation(df1.iloc[r][1])
conversations.append(conversation)
conversation.add_correspondence(df1.iloc[r][3], df1.iloc[r][5])
with open("data_file.json", "w") as write_file:
json.dump(conversations, write_file)
|
983,858 | e0b22f71606794d63f5d0fd928d468a0c80472a5 | import time
import traceback
import threading
import logging
import collections
import re
import inspect
from functools import partial
from.import filtering,exception
from.import(flavor,chat_flavors,inline_flavors,is_event,message_identifier,origin_identifier)
try:
import Queue as queue
except ImportError:
import queue
class Microphone(object):
def __init__(self):
self._queues=set()
self._lock=threading.Lock()
def _locked(func):
def k(self,*args,**kwargs):
with self._lock:
return func(self,*args,**kwargs)
return k
@_locked
def add(self,q):
self._queues.add(q)
@_locked
def remove(self,q):
self._queues.remove(q)
@_locked
def send(self,msg):
for q in self._queues:
try:
q.put_nowait(msg)
except queue.Full:
traceback.print_exc()
class Listener(object):
def __init__(self,mic,q):
self._mic=mic
self._queue=q
self._patterns=[]
def __del__(self):
self._mic.remove(self._queue)
def capture(self,pattern):
self._patterns.append(pattern)
def wait(self):
if not self._patterns:
raise RuntimeError('Listener has nothing to capture')
while 1:
msg=self._queue.get(block=True)
if any(map(lambda p:filtering.match_all(msg,p),self._patterns)):
return msg
class Sender(object):
def __init__(self,bot,chat_id):
for method in['sendMessage','forwardMessage','sendPhoto','sendAudio','sendDocument','sendSticker','sendVideo','sendVoice','sendVideoNote','sendMediaGroup','sendLocation','sendVenue','sendContact','sendGame','sendChatAction',]:
setattr(self,method,partial(getattr(bot,method),chat_id))
class Administrator(object):
def __init__(self,bot,chat_id):
for method in['kickChatMember','unbanChatMember','restrictChatMember','promoteChatMember','exportChatInviteLink','setChatPhoto','deleteChatPhoto','setChatTitle','setChatDescription','pinChatMessage','unpinChatMessage','leaveChat','getChat','getChatAdministrators','getChatMembersCount','getChatMember','setChatStickerSet','deleteChatStickerSet']:
setattr(self,method,partial(getattr(bot,method),chat_id))
class Editor(object):
def __init__(self,bot,msg_identifier):
if isinstance(msg_identifier,dict):
msg_identifier=message_identifier(msg_identifier)
for method in['editMessageText','editMessageCaption','editMessageReplyMarkup','deleteMessage','editMessageLiveLocation','stopMessageLiveLocation']:
setattr(self,method,partial(getattr(bot,method),msg_identifier))
class Answerer(object):
def __init__(self,bot):
self._bot=bot
self._workers={}
self._lock=threading.Lock()
def answer(outerself,inline_query,compute_fn,*compute_args,**compute_kwargs):
from_id=inline_query['from']['id']
class Worker(threading.Thread):
def __init__(innerself):
super(Worker,innerself).__init__()
innerself._cancelled=False
def cancel(innerself):
innerself._cancelled=True
def run(innerself):
try:
query_id=inline_query['id']
if innerself._cancelled:
return
ans=compute_fn(*compute_args,**compute_kwargs)
if innerself._cancelled:
return
if isinstance(ans,list):
outerself._bot.answerInlineQuery(query_id,ans)
elif isinstance(ans,tuple):
outerself._bot.answerInlineQuery(query_id,*ans)
elif isinstance(ans,dict):
outerself._bot.answerInlineQuery(query_id,**ans)
else:
raise ValueError('Invalid answer format')
finally:
with outerself._lock:
if not innerself._cancelled:
del outerself._workers[from_id]
with outerself._lock:
if from_id in outerself._workers:
outerself._workers[from_id].cancel()
outerself._workers[from_id]=Worker()
outerself._workers[from_id].start()
class AnswererMixin(object):
Answerer=Answerer
def __init__(self,*args,**kwargs):
self._answerer=self.Answerer(self.bot)
super(AnswererMixin,self).__init__(*args,**kwargs)
@property
def answerer(self):
return self._answerer
class CallbackQueryCoordinator(object):
def __init__(self,id,origin_set,enable_chat,enable_inline):
self._id=id
self._origin_set=origin_set
def dissolve(enable):
if not enable:
return False,None
elif enable is True:
return True,None
elif callable(enable):
return True,enable
else:
raise ValueError()
self._enable_chat,self._chat_notify=dissolve(enable_chat)
self._enable_inline,self._inline_notify=dissolve(enable_inline)
def configure(self,listener):
listener.capture([lambda msg:flavor(msg)=='callback_query',{'message':self._chat_origin_included}])
listener.capture([lambda msg:flavor(msg)=='callback_query',{'inline_message_id':self._inline_origin_included}])
def _chat_origin_included(self,msg):
try:
return(msg['chat']['id'],msg['message_id'])in self._origin_set
except KeyError:
return False
def _inline_origin_included(self,inline_message_id):
return(inline_message_id,)in self._origin_set
def _rectify(self,msg_identifier):
if isinstance(msg_identifier,tuple):
if len(msg_identifier)==2:
return msg_identifier,self._chat_notify
elif len(msg_identifier)==1:
return msg_identifier,self._inline_notify
else:
raise ValueError()
else:
return(msg_identifier,),self._inline_notify
def capture_origin(self,msg_identifier,notify=True):
msg_identifier,notifier=self._rectify(msg_identifier)
self._origin_set.add(msg_identifier)
notify and notifier and notifier(msg_identifier,self._id,True)
def uncapture_origin(self,msg_identifier,notify=True):
msg_identifier,notifier=self._rectify(msg_identifier)
self._origin_set.discard(msg_identifier)
notify and notifier and notifier(msg_identifier,self._id,False)
def _contains_callback_data(self,message_kw):
def contains(obj,key):
if isinstance(obj,dict):
return key in obj
else:
return hasattr(obj,key)
if contains(message_kw,'reply_markup'):
reply_markup=filtering.pick(message_kw,'reply_markup')
if contains(reply_markup,'inline_keyboard'):
inline_keyboard=filtering.pick(reply_markup,'inline_keyboard')
for array in inline_keyboard:
if any(filter(lambda button:contains(button,'callback_data'),array)):
return True
return False
def augment_send(self,send_func):
def augmented(*aa,**kw):
sent=send_func(*aa,**kw)
if self._enable_chat and self._contains_callback_data(kw):
self.capture_origin(message_identifier(sent))
return sent
return augmented
def augment_edit(self,edit_func):
def augmented(msg_identifier,*aa,**kw):
edited=edit_func(msg_identifier,*aa,**kw)
if(edited is True and self._enable_inline)or(isinstance(edited,dict)and self._enable_chat):
if self._contains_callback_data(kw):
self.capture_origin(msg_identifier)
else:
self.uncapture_origin(msg_identifier)
return edited
return augmented
def augment_delete(self,delete_func):
def augmented(msg_identifier,*aa,**kw):
deleted=delete_func(msg_identifier,*aa,**kw)
if deleted is True:
self.uncapture_origin(msg_identifier)
return deleted
return augmented
def augment_on_message(self,handler):
def augmented(msg):
if(self._enable_inline and flavor(msg)=='chosen_inline_result' and 'inline_message_id' in msg):
inline_message_id=msg['inline_message_id']
self.capture_origin(inline_message_id)
return handler(msg)
return augmented
def augment_bot(self,bot):
class BotProxy(object):
pass
proxy=BotProxy()
send_methods=['sendMessage','forwardMessage','sendPhoto','sendAudio','sendDocument','sendSticker','sendVideo','sendVoice','sendVideoNote','sendLocation','sendVenue','sendContact','sendGame','sendInvoice','sendChatAction',]
for method in send_methods:
setattr(proxy,method,self.augment_send(getattr(bot,method)))
edit_methods=['editMessageText','editMessageCaption','editMessageReplyMarkup',]
for method in edit_methods:
setattr(proxy,method,self.augment_edit(getattr(bot,method)))
delete_methods=['deleteMessage']
for method in delete_methods:
setattr(proxy,method,self.augment_delete(getattr(bot,method)))
def public_untouched(nv):
name,value=nv
return(not name.startswith('_')and name not in send_methods+edit_methods+delete_methods)
for name,value in filter(public_untouched,inspect.getmembers(bot)):
setattr(proxy,name,value)
return proxy
class SafeDict(dict):
def __init__(self,*args,**kwargs):
super(SafeDict,self).__init__(*args,**kwargs)
self._lock=threading.Lock()
def _locked(func):
def k(self,*args,**kwargs):
with self._lock:
return func(self,*args,**kwargs)
return k
@_locked
def __getitem__(self,key):
return super(SafeDict,self).__getitem__(key)
@_locked
def __setitem__(self,key,value):
return super(SafeDict,self).__setitem__(key,value)
@_locked
def __delitem__(self,key):
return super(SafeDict,self).__delitem__(key)
_cqc_origins=SafeDict()
class InterceptCallbackQueryMixin(object):
CallbackQueryCoordinator=CallbackQueryCoordinator
def __init__(self,intercept_callback_query,*args,**kwargs):
global _cqc_origins
if self.id in _cqc_origins:
origin_set=_cqc_origins[self.id]
else:
origin_set=set()
_cqc_origins[self.id]=origin_set
if isinstance(intercept_callback_query,tuple):
cqc_enable=intercept_callback_query
else:
cqc_enable=(intercept_callback_query,)*2
self._callback_query_coordinator=self.CallbackQueryCoordinator(self.id,origin_set,*cqc_enable)
cqc=self._callback_query_coordinator
cqc.configure(self.listener)
self.__bot=self._bot
self._bot=cqc.augment_bot(self._bot)
self.on_message=cqc.augment_on_message(self.on_message)
super(InterceptCallbackQueryMixin,self).__init__(*args,**kwargs)
def __del__(self):
global _cqc_origins
if self.id in _cqc_origins and not _cqc_origins[self.id]:
del _cqc_origins[self.id]
@property
def callback_query_coordinator(self):
return self._callback_query_coordinator
class IdleEventCoordinator(object):
def __init__(self,scheduler,timeout):
self._scheduler=scheduler
self._timeout_seconds=timeout
self._timeout_event=None
def refresh(self):
try:
if self._timeout_event:
self._scheduler.cancel(self._timeout_event)
except exception.EventNotFound:
pass
finally:
self._timeout_event=self._scheduler.event_later(self._timeout_seconds,('_idle',{'seconds':self._timeout_seconds}))
def augment_on_message(self,handler):
def augmented(msg):
is_event(msg)or self.refresh()
if flavor(msg)=='_idle' and msg is not self._timeout_event.data:
return
return handler(msg)
return augmented
def augment_on_close(self,handler):
def augmented(ex):
try:
if self._timeout_event:
self._scheduler.cancel(self._timeout_event)
self._timeout_event=None
except exception.EventNotFound:
self._timeout_event=None
return handler(ex)
return augmented
class IdleTerminateMixin(object):
IdleEventCoordinator=IdleEventCoordinator
def __init__(self,timeout,*args,**kwargs):
self._idle_event_coordinator=self.IdleEventCoordinator(self.scheduler,timeout)
idlec=self._idle_event_coordinator
idlec.refresh()
self.on_message=idlec.augment_on_message(self.on_message)
self.on_close=idlec.augment_on_close(self.on_close)
super(IdleTerminateMixin,self).__init__(*args,**kwargs)
@property
def idle_event_coordinator(self):
return self._idle_event_coordinator
def on__idle(self,event):
raise exception.IdleTerminate(event['_idle']['seconds'])
class StandardEventScheduler(object):
def __init__(self,scheduler,event_space,source_id):
self._base=scheduler
self._event_space=event_space
self._source_id=source_id
@property
def event_space(self):
return self._event_space
def configure(self,listener):
listener.capture([{re.compile('^_.+'):{'source':{'space':self._event_space,'id':self._source_id}}}])
def make_event_data(self,flavor,data):
if not flavor.startswith('_'):
raise ValueError('Event flavor must start with _underscore')
d={'source':{'space':self._event_space,'id':self._source_id}}
d.update(data)
return{flavor:d}
def event_at(self,when,data_tuple):
return self._base.event_at(when,self.make_event_data(*data_tuple))
def event_later(self,delay,data_tuple):
return self._base.event_later(delay,self.make_event_data(*data_tuple))
def event_now(self,data_tuple):
return self._base.event_now(self.make_event_data(*data_tuple))
def cancel(self,event):
return self._base.cancel(event)
class StandardEventMixin(object):
StandardEventScheduler=StandardEventScheduler
def __init__(self,event_space,*args,**kwargs):
self._scheduler=self.StandardEventScheduler(self.bot.scheduler,event_space,self.id)
self._scheduler.configure(self.listener)
super(StandardEventMixin,self).__init__(*args,**kwargs)
@property
def scheduler(self):
return self._scheduler
class ListenerContext(object):
def __init__(self,bot,context_id,*args,**kwargs):
self._bot=bot
self._id=context_id
self._listener=bot.create_listener()
super(ListenerContext,self).__init__(*args,**kwargs)
@property
def bot(self):
return self._bot
@property
def id(self):
return self._id
@property
def listener(self):
return self._listener
class ChatContext(ListenerContext):
def __init__(self,bot,context_id,*args,**kwargs):
super(ChatContext,self).__init__(bot,context_id,*args,**kwargs)
self._chat_id=context_id
self._sender=Sender(self.bot,self._chat_id)
self._administrator=Administrator(self.bot,self._chat_id)
@property
def chat_id(self):
return self._chat_id
@property
def sender(self):
return self._sender
@property
def administrator(self):
return self._administrator
class UserContext(ListenerContext):
def __init__(self,bot,context_id,*args,**kwargs):
super(UserContext,self).__init__(bot,context_id,*args,**kwargs)
self._user_id=context_id
self._sender=Sender(self.bot,self._user_id)
@property
def user_id(self):
return self._user_id
@property
def sender(self):
return self._sender
class CallbackQueryOriginContext(ListenerContext):
def __init__(self,bot,context_id,*args,**kwargs):
super(CallbackQueryOriginContext,self).__init__(bot,context_id,*args,**kwargs)
self._origin=context_id
self._editor=Editor(self.bot,self._origin)
@property
def origin(self):
return self._origin
@property
def editor(self):
return self._editor
class InvoiceContext(ListenerContext):
def __init__(self,bot,context_id,*args,**kwargs):
super(InvoiceContext,self).__init__(bot,context_id,*args,**kwargs)
self._payload=context_id
@property
def payload(self):
return self._payload
def openable(cls):
def open(self,initial_msg,seed):
pass
def on_message(self,msg):
raise NotImplementedError()
def on_close(self,ex):
logging.error('on_close() called due to %s: %s',type(ex).__name__,ex)
def close(self,ex=None):
raise ex if ex else exception.StopListening()
@property
def listener(self):
raise NotImplementedError()
def ensure_method(name,fn):
if getattr(cls,name,None)is None:
setattr(cls,name,fn)
ensure_method('open',open)
ensure_method('on_message',on_message)
ensure_method('on_close',on_close)
ensure_method('close',close)
ensure_method('listener',listener)
return cls
class Router(object):
def __init__(self,key_function,routing_table):
super(Router,self).__init__()
self.key_function=key_function
self.routing_table=routing_table
def map(self,msg):
k=self.key_function(msg)
key=k[0]if isinstance(k,(tuple,list))else k
return self.routing_table[key]
def route(self,msg,*aa,**kw):
k=self.key_function(msg)
if isinstance(k,(tuple,list)):
key,args,kwargs={1:tuple(k)+((),{}),2:tuple(k)+({},),3:tuple(k),}[len(k)]
else:
key,args,kwargs=k,(),{}
try:
fn=self.routing_table[key]
except KeyError as e:
if None in self.routing_table:
fn=self.routing_table[None]
else:
raise RuntimeError('No handler for key: %s, and default handler not defined'%str(e.args))
return fn(msg,*args,**kwargs)
class DefaultRouterMixin(object):
def __init__(self,*args,**kwargs):
self._router=Router(flavor,{'chat':lambda msg:self.on_chat_message(msg),'callback_query':lambda msg:self.on_callback_query(msg),'inline_query':lambda msg:self.on_inline_query(msg),'chosen_inline_result':lambda msg:self.on_chosen_inline_result(msg),'shipping_query':lambda msg:self.on_shipping_query(msg),'pre_checkout_query':lambda msg:self.on_pre_checkout_query(msg),'_idle':lambda event:self.on__idle(event)})
super(DefaultRouterMixin,self).__init__(*args,**kwargs)
@property
def router(self):
return self._router
def on_message(self,msg):
self._router.route(msg)
@openable
class Monitor(ListenerContext,DefaultRouterMixin):
def __init__(self,seed_tuple,capture,**kwargs):
bot,initial_msg,seed=seed_tuple
super(Monitor,self).__init__(bot,seed,**kwargs)
for pattern in capture:
self.listener.capture(pattern)
@openable
class ChatHandler(ChatContext,DefaultRouterMixin,StandardEventMixin,IdleTerminateMixin):
def __init__(self,seed_tuple,include_callback_query=False,**kwargs):
bot,initial_msg,seed=seed_tuple
super(ChatHandler,self).__init__(bot,seed,**kwargs)
self.listener.capture([{'chat':{'id':self.chat_id}}])
if include_callback_query:
self.listener.capture([{'message':{'chat':{'id':self.chat_id}}}])
@openable
class UserHandler(UserContext,DefaultRouterMixin,StandardEventMixin,IdleTerminateMixin):
def __init__(self,seed_tuple,include_callback_query=False,flavors=chat_flavors+inline_flavors,**kwargs):
bot,initial_msg,seed=seed_tuple
super(UserHandler,self).__init__(bot,seed,**kwargs)
if flavors=='all':
self.listener.capture([{'from':{'id':self.user_id}}])
else:
self.listener.capture([lambda msg:flavor(msg)in flavors,{'from':{'id':self.user_id}}])
if include_callback_query:
self.listener.capture([{'message':{'chat':{'id':self.user_id}}}])
class InlineUserHandler(UserHandler):
def __init__(self,seed_tuple,**kwargs):
super(InlineUserHandler,self).__init__(seed_tuple,flavors=inline_flavors,**kwargs)
@openable
class CallbackQueryOriginHandler(CallbackQueryOriginContext,DefaultRouterMixin,StandardEventMixin,IdleTerminateMixin):
def __init__(self,seed_tuple,**kwargs):
bot,initial_msg,seed=seed_tuple
super(CallbackQueryOriginHandler,self).__init__(bot,seed,**kwargs)
self.listener.capture([lambda msg:flavor(msg)=='callback_query' and origin_identifier(msg)==self.origin])
@openable
class InvoiceHandler(InvoiceContext,DefaultRouterMixin,StandardEventMixin,IdleTerminateMixin):
def __init__(self,seed_tuple,**kwargs):
bot,initial_msg,seed=seed_tuple
super(InvoiceHandler,self).__init__(bot,seed,**kwargs)
self.listener.capture([{'invoice_payload':self.payload}])
self.listener.capture([{'successful_payment':{'invoice_payload':self.payload}}])
# Created by pyminifier (https://github.com/liftoff/pyminifier)
|
983,859 | 3f9bf4e784ed1617b630069bbb4e35fd392435af | """
657. Judge Route Circle
Initially, there is a Robot at position (0, 0). Given a sequence of its moves, judge if this robot makes a circle, which means it moves back to the original place.
The move sequence is represented by a string. And each move is represent by a character. The valid robot moves are R (Right), L (Left), U (Up) and D (down). The output should be true or false representing whether the robot makes a circle.
Example 1:
Input: "UD"
Output: true
Example 2:
Input: "LL"
Output: false
"""
# Result AC 80 ms 18.96%
# Or simply use counter in Python return (moves.count('U') == moves.count('D')) and (moves.count('R') == moves.count('L'))
class Solution:
def judgeCircle(self, moves):
"""
:type moves: str
:rtype: bool
"""
mov = {'L': 0, 'R':0, 'U': 0, 'D': 0}
for move in moves:
if move == 'L':
if mov['R']:
mov['R'] -= 1
else:
mov['L'] += 1
elif move == 'R':
if mov['L']:
mov['L'] -= 1
else:
mov['R'] += 1
elif move == 'U':
if mov['D']:
mov['D'] -= 1
else:
mov['U'] += 1
elif move == 'D':
if mov['U']:
mov['U'] -= 1
else:
mov['D'] += 1
s = 0
for m in mov:
s += mov[m]
return True if s == 0 else False |
983,860 | 5c55f6439d3bbe873e396d6ebfdccc29d7c609b7 | from bitonique import bitonique
from bitonique_iterative import bitonique_iter
import numpy as np
import matplotlib.pyplot as plt
height = []
bar = []
colors = []
k = 0
# Make a fake dataset:
for i in range (3,15):
height.append(bitonique(i))
height.append(bitonique_iter(i))
bar.append(i)
bar.append(i)
colors.append('blue')
colors.append('cyan')
bars = tuple(bar)
y_pos = np.arange(len(bars))
# Create bars
plt.bar(y_pos, height, color = colors)
# Create names on the x-axis
plt.xticks(y_pos, bars)
# Show graphic
plt.show()
|
983,861 | cb2d7e5d687dd30722a5ff33a91f3e06c341606b | #!/usr/bin/env python
# -*- coding: utf-8 -*-
'''
File name: identify_as.py
Author: CrazyHsu @ crazyhsu9627@gmail.com
Created on: 2021-05-12 22:07:52
Last modified: 2021-05-12 22:07:52
'''
def identify_as(dataObj=None, refParams=None, dirSpec=None):
from find_as import find_as
find_as(dataObj=dataObj, refParams=refParams, dirSpec=dirSpec)
from find_pa import find_pa
find_pa(dataObj=dataObj, refParams=refParams, dirSpec=dirSpec)
from charaterize_as import charaterize_as
charaterize_as(dataObj=dataObj, refParams=refParams, dirSpec=dirSpec) |
983,862 | fbf70ecddfcf25411b7ef710453cc42582d2751d | #!/usr/bin/env python
import requests
from io import StringIO
import os
import contextlib
import tempfile
import rootpy.io
import rootpy.ROOT as ROOT
host = "http://127.0.0.1:5000/rest/api/v1/files"
class ErrorInGettingFile(Exception):
def __init__(self, value):
self.value = value
def __repr__(self):
return repr("ErrorInGettingFile: {}".format(self.value))
def getFile(filename, fileObject, stream = False):
print("sending request to {host}/{filename}".format(host = host, filename = filename))
r = requests.get("{host}/{filename}".format(host = host, filename = filename), stream=True)
print("response: {}".format(r))
if r.ok:
print("Get response is okay! Writing received file")
# Write in chunks to allow for streaming. See: https://stackoverflow.com/a/13137873
# To stream a response, we need a generator. See: https://gist.github.com/gear11/8006132#file-main-py-L36
for chunk in r:
fileObject.write(chunk.encode())
# Return to start of file so the read is seamless
fileObject.seek(0)
return (r.ok, r.status_code, fileObject)
else:
if "error" in r.headers:
print("ERROR: {}".format(r.headers["error"]))
raise ErrorInGettingFile(r.headers["error"])
return (r.ok, r.status_code, fileObject)
def putFile(filename, file = None, localFilename = None):
""" Use StringIO to write from memory. """
if not file and not filename:
print("Please pass a valid file or filename")
if filename and not file:
file = open(filename, "rb")
print("filename: {}, file: {}".format(filename, file))
r = requests.put("{host}/{filename}".format(host = host, filename = filename), files = {"file": file})
return (r.ok, r.status_code, r.text)
@contextlib.contextmanager
def FileInMemory(filename, writeFile = False):
fileInMemory = StringIO()
try:
yield getFile(filename = filename, fileObject = fileInMemory)
print("Successfully completed FileInMemory")
except IOError as e:
# Just need an exception so that else is valid.
print("IOError: {}".format(e))
else:
# Only do this if there are no exceptions above
print("Potentially writing file")
if writeFile:
fileInMemory.seek(0)
(success, status, returnValue) = putFile(filename = filename, file = fileInMemory)
print("Successfully wrote file")
finally:
fileInMemory.close()
print("Finally exiting from FileInMemory")
# See: https://stackoverflow.com/a/28401296
@contextlib.contextmanager
def FileWithLocalFilename(filename, writeFile = False):
with tempfile.NamedTemporaryFile() as f:
try:
with FileInMemory(filename) as (success, status, fileInMemory):
if success:
print("Writing to temporary file")
print("success: {}, status: {}".format(success, status))
f.write(fileInMemory.read().encode())
f.flush()
#f.write("Hello".encode())
# Return to start of file so the read is seamless
f.seek(0)
# May be required to fully flush, although flush() seems sufficient for now
# See: https://docs.python.org/2/library/os.html#os.fsync
#os.fsync(f.fileno())
#print("f.read(): {}".format(f.read()))
#f.seek(0)
yield f.name
#print("Post yield")
#f.seek(0, os.SEEK_END)
#print("f length in with def: {}".format(f.tell()))
else:
#yield (False, status, fileInMemory)
yield False
print("Successfully completed FileWithLocalFilename")
except IOError as e:
# Just need an exception so that else is valid.
print("IOError: {}".format(e))
else:
# Only do this if there are no exceptions above
print("Potentially writing file")
if writeFile:
(success, status, returnValue) = putFile(filename = filename, file = f)
print("Wrote file. success: {}, status: {}, returnValue: {}".format(success, status, returnValue))
finally:
print("Finally exiting from FileWithLocalFilename")
if __name__ == "__main__":
# Get the file
#(success, status, strIO) = getFile(filename = "246980/EMC/combined")
writeFile = True
textFile = True
textFileTempFile = True
rootFile = True
if textFile:
try:
#with FileInMemory(filename = "246980/EMC/helloworld.txt", writeFile = True) as (success, status, fileInMemory):
with FileInMemory(filename = "246980/EMC/EMChists.2015_12_13_5_8_22.root", writeFile = writeFile) as (success, status, fileInMemory):
if success:
# Just to find the length
fileInMemory.seek(0)
print("fileInMemory.read(): {}".format(fileInMemory.read()))
fileInMemory.seek(0, os.SEEK_END)
print("success: {}, status: {}, file length: {}".format(success, status, fileInMemory.tell()))
fileInMemory.write("Appended information in memory.\n".encode())
fileInMemory.seek(0)
print("fileInMemory.read(): {}".format(fileInMemory.read()))
else:
print("Failed to retrieve file. status: {}".format(status))
except ErrorInGettingFile as e:
print(e)
if textFileTempFile:
try:
with FileWithLocalFilename(filename = "246980/EMC/EMChists.2015_12_13_5_8_22.root", writeFile = writeFile) as filename:
# Stricktly speaking, this only works on unix! But this should be fine for our purposes,
# as Overwatch is not designed to work on Windows anyway.
# "w" does not seem to work properly, even if we page to the end of the file!
with open(filename, "a+b") as f:
print("looking inside if statement")
print("Temporary filename: {}".format(filename))
f.seek(0, os.SEEK_END)
print("f length with localfile: {}".format(f.tell()))
f.write("Appended information in temp file.\n".encode())
f.seek(0)
print("f.read(): {}".format(f.read()))
except ErrorInGettingFile as e:
print(e)
if rootFile:
try:
with FileWithLocalFilename(filename = "246980/EMC/EMChists.2015_12_13_5_7_22.root", writeFile = writeFile) as filename:
print("Temporary filename: {}".format(filename))
testHist = ROOT.TH1F("testHist", "testHist", 10, 0, 10)
testHist.Fill(3)
# Stricktly speaking, this only works on unix! But this should be fine for our purposes,
# as Overwatch is not designed to work on Windows anyway.
# "RECREATE" will not work, as the file is being recreated in a way that isn't
# compatiable with the temp file!
with rootpy.io.root_open(filename, "UPDATE") as f:
print("f.ls() pre write:")
# Needs to be in a separate line. Otherwise, it will print before saying "pre/post write"
f.ls()
# Write hist
testHist.Write()
# Needs to be in a separate line. Otherwise, it will print before saying "pre/post write"
print("f.ls() post write:")
# Needs to be in a separate line. Otherwise, it will print before saying "pre/post write"
f.ls()
# Check that it was written properly
with rootpy.io.root_open(filename, "READ") as f:
print("f.ls() post post write:")
# Needs to be in a separate line. Otherwise, it will print before saying "pre/post write"
f.ls()
except ErrorInGettingFile as e:
print(e)
# Put the file
#(success, status, returnText) = putFile("246980/EMC/helloworld.txt", file = open("test.txt", "rb"))
#print("success: {}, status: {}, returnText: {}".format(success, status, returnText))
### Additional testing
with tempfile.NamedTemporaryFile() as f:
f.write("Hello".encode())
f.seek(0)
print("temp named file: {}".format(f.read()))
f.seek(0)
with open(f.name, "rb") as f2:
print("read f2: {}".format(f2.read()))
|
983,863 | c793187a709a73087f352010572c504e370023c4 | import os
import unittest
from pyptlib.config import EnvError, Config
from pyptlib.server_config import get_transport_options_impl
from pyptlib.server import ServerTransportPlugin
from pyptlib.test.test_core import PluginCoreTestMixin
from pyptlib.core import SUPPORTED_TRANSPORT_VERSIONS
# a good valid environment to base modifications from
# so it's clearer to see exactly why an environment fails
BASE_ENVIRON = {
"TOR_PT_STATE_LOCATION" : "/pt_stat",
"TOR_PT_MANAGED_TRANSPORT_VER" : "1",
"TOR_PT_EXTENDED_SERVER_PORT" : "",
"TOR_PT_ORPORT" : "127.0.0.1:43210",
"TOR_PT_SERVER_BINDADDR" : "dummy-127.0.0.1:5556,boom-127.0.0.1:6666",
"TOR_PT_SERVER_TRANSPORTS" : "dummy,boom"
}
class testServer(PluginCoreTestMixin, unittest.TestCase):
pluginType = ServerTransportPlugin
def test_fromEnv_legit(self):
"""Legit environment."""
os.environ = BASE_ENVIRON
self.plugin._loadConfigFromEnv()
self.assertOutputLinesEmpty()
def test_fromEnv_bad(self):
"""Missing TOR_PT_MANAGED_TRANSPORT_VER."""
TEST_ENVIRON = dict(BASE_ENVIRON)
TEST_ENVIRON.pop("TOR_PT_MANAGED_TRANSPORT_VER")
os.environ = TEST_ENVIRON
self.assertRaises(EnvError, self.plugin._loadConfigFromEnv)
self.assertOutputLinesStartWith("ENV-ERROR ")
def test_fromEnv_bad2(self):
"""Missing TOR_PT_ORPORT."""
TEST_ENVIRON = dict(BASE_ENVIRON)
TEST_ENVIRON.pop("TOR_PT_ORPORT")
os.environ = TEST_ENVIRON
self.assertRaises(EnvError, self.plugin._loadConfigFromEnv)
self.assertOutputLinesStartWith("ENV-ERROR ")
def test_fromEnv_bad3(self):
"""Missing TOR_PT_EXTENDED_SERVER_PORT."""
TEST_ENVIRON = dict(BASE_ENVIRON)
TEST_ENVIRON.pop("TOR_PT_EXTENDED_SERVER_PORT")
os.environ = TEST_ENVIRON
self.assertRaises(EnvError, self.plugin._loadConfigFromEnv)
self.assertOutputLinesStartWith("ENV-ERROR ")
def test_fromEnv_bad4(self):
"""TOR_PT_EXTENDED_SERVER_PORT not an addport."""
TEST_ENVIRON = dict(BASE_ENVIRON)
TEST_ENVIRON["TOR_PT_EXTENDED_SERVER_PORT"] = "cakez"
os.environ = TEST_ENVIRON
self.assertRaises(EnvError, self.plugin._loadConfigFromEnv)
self.assertOutputLinesStartWith("ENV-ERROR ")
def test_fromEnv_bad5(self):
"""TOR_PT_ORPORT not an addport."""
TEST_ENVIRON = dict(BASE_ENVIRON)
TEST_ENVIRON["TOR_PT_ORPORT"] = "lulz"
os.environ = TEST_ENVIRON
self.assertRaises(EnvError, self.plugin._loadConfigFromEnv)
self.assertOutputLinesStartWith("ENV-ERROR ")
def test_fromEnv_bad6(self):
"""TOR_PT_SERVER_BINDADDR not an addport."""
TEST_ENVIRON = dict(BASE_ENVIRON)
TEST_ENVIRON["TOR_PT_SERVER_BINDADDR"] = "dummy-lyrical_content,boom-127.0.0.1:6666"
os.environ = TEST_ENVIRON
self.assertRaises(EnvError, self.plugin._loadConfigFromEnv)
self.assertOutputLinesStartWith("ENV-ERROR ")
def test_fromEnv_bad7(self):
"""Assymetric TOR_PT_SERVER_TRANSPORTS and TOR_PT_SERVER_BINDADDR."""
TEST_ENVIRON = dict(BASE_ENVIRON)
TEST_ENVIRON["TOR_PT_SERVER_BINDADDR"] = "dummy-127.0.0.1:5556,laughs-127.0.0.1:6666"
TEST_ENVIRON["TOR_PT_SERVER_TRANSPORTS"] = "dummy,boom"
os.environ = TEST_ENVIRON
self.assertRaises(EnvError, self.plugin._loadConfigFromEnv)
self.assertOutputLinesStartWith("ENV-ERROR ")
def test_fromEnv_bad8(self):
"""Assymetric TOR_PT_SERVER_TRANSPORTS and TOR_PT_SERVER_BINDADDR."""
TEST_ENVIRON = dict(BASE_ENVIRON)
TEST_ENVIRON["TOR_PT_SERVER_BINDADDR"] = "dummy-127.0.0.1:5556,laughs-127.0.0.1:6666"
TEST_ENVIRON["TOR_PT_SERVER_TRANSPORTS"] = "dummy"
os.environ = TEST_ENVIRON
self.assertRaises(EnvError, self.plugin._loadConfigFromEnv)
self.assertOutputLinesStartWith("ENV-ERROR ")
def test_fromEnv_bad9(self):
"""Assymetric TOR_PT_SERVER_TRANSPORTS and TOR_PT_SERVER_BINDADDR."""
TEST_ENVIRON = dict(BASE_ENVIRON)
TEST_ENVIRON["TOR_PT_SERVER_BINDADDR"] = "dummy-127.0.0.1:5556"
TEST_ENVIRON["TOR_PT_SERVER_TRANSPORTS"] = "dummy,laughs"
os.environ = TEST_ENVIRON
self.assertRaises(EnvError, self.plugin._loadConfigFromEnv)
self.assertOutputLinesStartWith("ENV-ERROR ")
def test_fromEnv_disabled_extorport(self):
"""Disabled TOR_PT_EXTENDED_SERVER_PORT."""
os.environ = BASE_ENVIRON
config = self.plugin._loadConfigFromEnv()
self.assertIsNone(config.getExtendedORPort())
def test_fromEnv_ext_or_but_no_auth_cookie(self):
"""TOR_PT_EXTENDED_SERVER_PORT without TOR_PT_AUTH_COOKIE_FILE."""
TEST_ENVIRON = dict(BASE_ENVIRON)
TEST_ENVIRON["TOR_PT_EXTENDED_SERVER_PORT"] = "127.0.0.1:5555"
os.environ = TEST_ENVIRON
self.assertRaises(EnvError, self.plugin._loadConfigFromEnv)
def test_fromEnv_auth_cookie_but_no_ext_or(self):
"""TOR_PT_AUTH_COOKIE_FILE without TOR_PT_EXTENDED_SERVER_PORT."""
TEST_ENVIRON = dict(BASE_ENVIRON)
TEST_ENVIRON.pop("TOR_PT_EXTENDED_SERVER_PORT")
TEST_ENVIRON["TOR_PT_AUTH_COOKIE_FILE"] = "/lulzie"
os.environ = TEST_ENVIRON
self.assertRaises(EnvError, self.plugin.init, ["what"])
def test_init_correct_ext_orport(self):
"""Correct Extended ORPort configuration."""
TEST_ENVIRON = dict(BASE_ENVIRON)
TEST_ENVIRON["TOR_PT_EXTENDED_SERVER_PORT"] = "127.0.0.1:5555"
TEST_ENVIRON["TOR_PT_AUTH_COOKIE_FILE"] = "/lulzie"
os.environ = TEST_ENVIRON
self.plugin.init([])
self.assertEqual(self.plugin.config.getAuthCookieFile(), '/lulzie')
self.assertEqual(self.plugin.config.getExtendedORPort(), ('127.0.0.1', 5555))
self.assertOutputLinesStartWith("VERSION ")
def test_init_correct_transport_bindaddr(self):
"""Correct Extended ORPort configuration."""
os.environ = BASE_ENVIRON
self.plugin.init(["dummy", "boom"])
bindaddr = self.plugin.getBindAddresses()
self.assertEqual(bindaddr["dummy"], ('127.0.0.1', 5556))
self.assertEqual(bindaddr["boom"], ('127.0.0.1', 6666))
self.assertOutputLinesStartWith("VERSION ")
class testServerOutput(PluginCoreTestMixin, unittest.TestCase):
"""
Test the output of pyptlib. That is, test the SMETHOD lines, etc.
"""
pluginType = ServerTransportPlugin
def test_smethod_line(self):
"""Test output SMETHOD lines."""
os.environ = BASE_ENVIRON
self.plugin.init(["dummy", "boom"])
for transport, transport_bindaddr in list(self.plugin.getBindAddresses().items()):
self.plugin.reportMethodSuccess(transport, transport_bindaddr, None)
self.plugin.reportMethodsEnd()
self.assertIn("SMETHOD dummy 127.0.0.1:5556\n", self.getOutputLines())
self.assertIn("SMETHOD boom 127.0.0.1:6666\n", self.getOutputLines())
self.assertIn("SMETHODS DONE\n", self.getOutputLines())
def test_smethod_line_args(self):
"""Test an SMETHOD line with extra arguments."""
TEST_ENVIRON = dict(BASE_ENVIRON)
TEST_ENVIRON["TOR_PT_SERVER_TRANSPORT_OPTIONS"] = "boom:roots=culture;random:no=care;boom:first=fire"
os.environ = TEST_ENVIRON
self.plugin.init(["dummy", "boom"])
for transport, transport_bindaddr in list(self.plugin.getBindAddresses().items()):
self.plugin.reportMethodSuccess(transport, transport_bindaddr, None)
self.plugin.reportMethodsEnd()
self.assertIn("SMETHOD boom 127.0.0.1:6666 ARGS:roots=culture,first=fire\n", self.getOutputLines())
def test_smethod_line_explicit_args(self):
"""Test an SMETHOD line with extra arguments."""
os.environ = BASE_ENVIRON
self.plugin.init(["dummy", "boom"])
for transport, transport_bindaddr in list(self.plugin.getBindAddresses().items()):
self.plugin.reportMethodSuccess(transport, transport_bindaddr, "roots=culture,first=fire")
self.plugin.reportMethodsEnd()
self.assertIn("SMETHOD boom 127.0.0.1:6666 ARGS:roots=culture,first=fire\n", self.getOutputLines())
class testUtils(unittest.TestCase):
def test_get_transport_options_wrong(self):
"""Invalid options string"""
to_parse = "trebuchet_secret=nou"
self.assertRaises(ValueError, get_transport_options_impl, to_parse)
def test_get_transport_options_wrong_2(self):
"""No k=v value"""
to_parse = "trebuchet:secret~nou"
self.assertRaises(ValueError, get_transport_options_impl, to_parse)
def test_get_transport_options_correct(self):
to_parse = "trebuchet:secret=nou;trebuchet:cache=/tmp/cache;ballista:secret=yes;ballista:fun=no;archer:bow=yes"
expected = {"trebuchet" : {"secret" : "nou", "cache" : "/tmp/cache"} , "ballista" : {"secret" : "yes", "fun" : "no"}, "archer" : {"bow" : "yes" } }
result = get_transport_options_impl(to_parse)
self.assertEqual(result, expected)
if __name__ == '__main__':
unittest.main()
|
983,864 | 116f80b3b05f92be54af7f3dfe9383d6dd64e123 | import json
import requests
from django.conf import settings
from .models import Bill, BillItem, BillSession
api_url = settings.BILL_DOT_COM_API_URL
DEVICE_ID = '916E333F-1BBC-4471-946D-8059DB9488B6'
def make_request(url, data, session_id):
url = "{}{}".format(api_url, url)
headers = {'Content-Type': 'application/x-www-form-urlencoded', 'Accept': 'application/json'}
request_data = dict(
devKey=settings.DEV_KEY,
sessionId=session_id,
data=json.dumps(data)
)
response = requests.post(url, data=request_data, headers=headers)
response_json = response.json()
return response_json
def get_bill_list(session_id):
api_url = settings.BILL_DOT_COM_API_URL
list_url = "{}{}".format(api_url, "List/Bill.json")
headers = {'Content-Type': 'application/x-www-form-urlencoded', 'Accept': 'application/json'}
request_data = dict(
devKey=settings.DEV_KEY,
sessionId=session_id,
data=json.dumps({"start": 0, "max": 999})
)
response = requests.post(list_url, data=request_data, headers=headers)
response_json = response.json()
if response_json.get('response_data'):
for data in response_json.get('response_data'):
bill_id = data.get('id')
is_active = data.get('isActive')
vendor_id = data.get('vendorId')
invoice_number = data.get('invoiceNumber')
approval_status = data.get('approvalStatus')
amount = data.get('amount')
bill, _ = Bill.objects.get_or_create(bill_id=bill_id, vendor_id=vendor_id, invoice_number=invoice_number)
if bill:
bill.is_active = is_active
bill.approval_status = approval_status
bill.amount = amount
bill.json_data = data
bill.save()
for bill_item in data.get('billLineItems'):
id = bill_item.get('id')
amount = bill_item.get('amount')
bill_item_object, _ = BillItem.objects.get_or_create(bill_item_id=id, bill=bill)
if bill_item:
bill_item_object.json_data = bill_item
bill_item_object.amount = amount
bill_item_object.save()
def get_bills():
api_url = settings.BILL_DOT_COM_API_URL
login_url = "{}{}".format(api_url, "Login.json")
json_data = dict(orgId=settings.ORG_ID, devKey=settings.DEV_KEY, userName=settings.USER_NAME,
password=settings.PASSWORD) # {'OrgId': settings.ORG_ID, 'devKey': settings.DEV_KEY, 'userName': settings.USER_NAME,'password': settings.PASSWORD}
headers = {'Content-Type': 'application/x-www-form-urlencoded', 'Accept': 'application/json'}
response = requests.post(login_url, data=json_data, headers=headers)
response_json = response.json()
session_id = ''
if response_json.get('response_data') and response_json.get('response_data').get('sessionId'):
session_id = response_json.get('response_data').get('sessionId')
get_bill_list(session_id)
pass
def get_session_id():
api_url = settings.BILL_DOT_COM_API_URL
login_url = "{}{}".format(api_url, "Login.json")
json_data = dict(orgId=settings.ORG_ID, devKey=settings.DEV_KEY, userName=settings.USER_NAME,
password=settings.PASSWORD) # {'OrgId': settings.ORG_ID, 'devKey': settings.DEV_KEY, 'userName': settings.USER_NAME,'password': settings.PASSWORD}
headers = {'Content-Type': 'application/x-www-form-urlencoded', 'Accept': 'application/json'}
response = requests.post(login_url, data=json_data, headers=headers)
response_json = response.json()
if response_json.get('response_data') and response_json.get('response_data').get('sessionId'):
return response_json.get('response_data').get('sessionId')
def get_bill_approvers(bill_id):
data = {"objectId": bill_id, "entity": "Bill"}
session_id = get_session_id()
response = make_request('ListApprovers.json', data, session_id)
users = []
if response.get('response_data'):
for approver in response.get('response_data'):
user_id = approver.get('usersId')
user_response = make_request('Crud/Read/User.json', {"id": user_id}, session_id)
if user_response.get('response_data'):
users.append('{} {}'.format(user_response.get('response_data').get('firstName'),
user_response.get('response_data').get('lastName')))
return users
def approve_bills(bill_id):
session_id = get_session_id()
data = {"objectId": bill_id, "entity": "Bill", "comment": "Looks good to me."}
response = make_request('Approve.json', data, session_id)
if response.get('response_message') == 'Success':
return "Successfully Approved bill"
elif response.get('response_data').get('error_message'):
return '{} {}'.format(response.get('response_data').get('error_message'),
"You are not Authorized to approve this bill. Please check bill Approvers.")
return 'Something Went wrong'
def send_token():
session_id = get_session_id()
BillSession.objects.all().delete()
data = {"useBackup": False}
response = make_request('MFAChallenge.json', data, session_id)
if response.get('response_message') == 'Success':
challenge_id = response.get('response_data', {}).get('challengeId')
BillSession.objects.create(session_id=session_id, challenge_id=challenge_id, device_id=DEVICE_ID)
return "Please verify access token."
elif response.get('response_data').get('error_message'):
return '{}'.format(response.get('response_data').get('error_message'))
return 'Something Went wrong'
def verify_auth_token(token):
bill_session = BillSession.objects.all().first()
session_id = bill_session.session_id
data = {"challengeId": bill_session.challenge_id, "token": token,
"deviceId": bill_session.device_id, "machineName": "Test Phone", "rememberMe": True
}
response = make_request('MFAAuthenticate.json', data, session_id)
if response.get('response_message') == 'Success':
mfa_id = response.get('response_data', {}).get('mfaId')
bill_session.mfa_id = mfa_id
bill_session.save()
return "Access Token Has been verified"
elif response.get('response_data').get('error_message'):
return '{}'.format(response.get('response_data').get('error_message'))
return 'Something Went wrong'
def pay_bill(vendor_id, bill_id, amount):
bill_session = BillSession.objects.all().first()
data = {"vendorId": vendor_id, "billPays": [{"billId": bill_id, "amount": float(amount)}]}
url = "{}{}".format(api_url, 'PayBills.json')
headers = {'Content-Type': 'application/x-www-form-urlencoded', 'Accept': 'application/json'}
request_data = dict(
devKey=settings.DEV_KEY,
sessionId=bill_session.session_id,
data=json.dumps(data),
mfaId=bill_session.mfa_id,
deviceId=bill_session.device_id,
)
response = requests.post(url, data=request_data, headers=headers)
response = response.json()
return response
# PayBills
|
983,865 | 072fcfc0bb7bf5ab8d139332ac0cea95651d873d | # Shim for notebook server or jupyter_server
#
# Provides:
# - ServerTestBase
# - assert_http_error
#
try:
from notebook.tests.launchnotebook import (
assert_http_error,
NotebookTestBase as ServerTestBase
)
except ImportError:
from jupyter_server.tests.launchnotebook import assert_http_error # noqa
from jupyter_server.tests.launchserver import ServerTestBase # noqa
|
983,866 | dcd88e8f74264a97dcaf51f3c2575202a473c409 | import logging
import os
from errno import ENOTDIR
import shutil
logger = logging.getLogger(__name__)
# Create the directory @param(path) and return the path after creation [Error safe]
def make_dir(path):
# Avoid the raise of IOError exception by checking if the directory exists first
try:
os.mkdir(path)
except OSError as e:
if e.errno != 17:
logger.warning(u'Exception in make_dir(%s): %s' % (e.filename, repr(e)))
return path
# Create the directorise @param(path) and return the directory_path after creation [Error safe]
def make_dirs(path):
# Avoid the raise of IOError exception by checking if the directory exists first
path += os.sep
try:
os.makedirs(path)
except OSError as e:
if e.errno != 17:
logger.warning(u'Exception in make_dir(%s): %s' % (e.filename, repr(e)))
return path
def delete_dir(path, include_root=True):
"""deletes the path entirely"""
for root, dirs, files in os.walk(path, topdown=False):
for name in files:
os.remove(os.path.join(root, name))
for name in dirs:
os.rmdir(os.path.join(root, name))
if include_root:
os.rmdir(path)
def listdir_abs(parent):
return [os.path.join(parent, child) for child in os.listdir(parent)]
def get_size(file_name):
return os.path.getsize(os.path.abspath(file_name))
def get_dir_size(dir_name):
# TODO : Write unite test for that method
return sum([get_size(os.path.join(dir_name, x)) for x in os.listdir(dir_name)]) if os.path.exists(dir_name) else 0
def safe_delete(path):
if os.path.exists(path):
if os.path.islink(path):
os.unlink(path)
elif os.path.isdir(path):
shutil.rmtree(path)
else:
os.remove(path)
def dict_apply(path, dictionnary, symlink_method=None):
'''
This method expect a dict with any depth where leaf are a list of tuple (name, path) where a symlink is going to be created
following the path in the tree to match the patch in the file system.
{'a': {'b': {'c': [('a', '/path_to/a')]}}} is going to create path/a/b/c/a (where a is a symlink to /path_to/a)
:param dictionnary:
:return:
'''
if not dictionnary:
return
path_content = set(os.listdir(path))
dictionarry_keys = set(dictionnary.keys())
to_remove = path_content - dictionarry_keys
for remove in to_remove:
full_remove = os.path.join(path, remove)
safe_delete(full_remove)
for root, leaf in dictionnary.items():
full_leaf = os.path.join(path, root)
if not leaf:
safe_delete(full_leaf)
continue
current_path = make_dir(os.path.join(path, root))
current_path_content = set(os.listdir(current_path))
if isinstance(leaf, list):
for name, abs_path_to_name in leaf:
new_one = os.path.join(current_path, name)
if name not in current_path_content:
try:
if not symlink_method:
os.symlink(abs_path_to_name, new_one)
else:
symlink_method(abs_path_to_name, new_one)
except OSError as e:
logger.error(u'Tried to symlink: "%s" to "%s/%s"' % (abs_path_to_name,
current_path,
name))
logger.error(u'Error: %s' % e)
else:
current_path_content.remove(name)
if get_dir_size(abs_path_to_name) != get_dir_size(new_one):
safe_delete(new_one)
try:
if not symlink_method:
os.symlink(abs_path_to_name, new_one)
else:
symlink_method(abs_path_to_name, new_one)
except OSError as e:
logger.error(u'Tried to symlink: "%s" to "%s/%s"' % (abs_path_to_name,
current_path,
name))
logger.error(u'Error: %s' % e)
if current_path_content:
for content in current_path_content:
full_content = os.path.join(current_path, content)
safe_delete(full_content)
else:
dict_apply(current_path, leaf, symlink_method=symlink_method)
|
983,867 | 8f7758a0de198a04cdcddea396c12be71ca2f118 |
# 设计一个登录程序,不同的用户名和密码存在字典里
# 输入用户名,如果用户名不存在或者为空,则一直提示请输入用户名
# 如果用户名正确,提示请输入密码,如果密码跟用户名不对应,则提示密码错误
# 如果密码输入错误超过3次,中断程序运行
# 当密码错误时,提示还有几次机会
# 当用户名和密码都输入成功的时候,提示登录成功
user={"admin":"111111","user":"000000"}
username=input("请输入用户名:")
while username not in user:
print("用户名错误")
username = input("请输入用户名:")
password=input("请输入密码:")
times=3
while password != user[username]:
times-=1
print("您还有{0}次机会".format(times))
if times==0:
password = input("登录失败!")
break
password = input("密码错误,请重新输入:")
if password == user[username]:
print("登录成功") |
983,868 | 1166589d97f913bc07cb4f7db98764ddcc33107d | # -*- coding: utf-8 -*-
import cv2
cascade_src = 'Bus_front.xml'
video_src = 'bus1.mp4'
cap = cv2.VideoCapture(video_src)
bus_cascade = cv2.CascadeClassifier(cascade_src)
while True:
ret, img = cap.read()
if (type(img) == type(None)):
break
gray = cv2.cvtColor(img, cv2.COLOR_BGR2GRAY)
bus = bus_cascade.detectMultiScale(gray, 1.16, 1)
ret, thresh = cv2.threshold(gray, 127, 255, 0)
cnts, hierarchy = cv2.findContours(thresh, cv2.RETR_TREE, cv2.CHAIN_APPROX_SIMPLE)
QttyOfContours = 0
for (x,y,w,h) in bus:
QttyOfContours = QttyOfContours+1
cv2.rectangle(img,(x,y),(x+w,y+h),(0,0,255),2)
CoordXCentroid = int((x+x+w)/2)
CoordYCentroid = int((y+y+h)/2)
ObjectCentroid = (CoordXCentroid,CoordYCentroid)
cv2.circle(img, ObjectCentroid, 1, (0, 0, 0), 5)
print ("Total countours found: " , str(QttyOfContours))
cv2.imshow('video', img)
if cv2.waitKey(33) == 27:
break
cv2.destroyAllWindows()
|
983,869 | 829e0a5c00d76b71bdc741ffbd846960bf74b15c | from django.apps import AppConfig
class RestprofileConfig(AppConfig):
name = 'restprofile'
|
983,870 | 9b8d715bf7860783c1c59485cfa8fcdd3489d0e7 | r"""
Example: Multiphase diffusion with heterogeneous reaction
2D network, consists of air and water. Air occupies the middle of the
network and is surrounded by two film-like regions of water at the top
and the bottom. The top and the bottom faces of the network is assumed
to be coated w/ a catalyst, and are therefore reactive.
The diffusing species diffuses through air, which is followed by mass
partitioning at the two air-water interfaces, then continues diffusing
through the water, and finally reacting at the two reacting plates at the
top and the bottom of the network.
"""
import openpnm as op
import numpy as np
import matplotlib.pyplot as plt
np.random.seed(10)
# Define network, geometry and constituent phases
net = op.network.Cubic(shape=[100, 100, 1])
geom = op.geometry.StickAndBall(network=net)
air = op.phases.Air(network=net, name="air")
water = op.phases.Water(network=net, name="water")
water["pore.diffusivity"] = air["pore.diffusivity"] * 0.05
# Define the regions to be occupied by the two phases (air and water)
x, y, z = net["pore.coords"].T
ps_water = net.Ps[(y >= 75) + (y <= 25)]
ps_air = np.setdiff1d(net.Ps, ps_water)
ts_water = net.find_neighbor_throats(pores=ps_water, mode="xnor")
ts_air = net.find_neighbor_throats(pores=ps_air, mode="xnor")
ts_interface = net.find_neighbor_throats(pores=ps_water, mode="xor")
# Define multiphase and set phase occupancy
mphase = op.phases.MultiPhase(network=net, phases=[air, water], name="mphase")
mphase._set_automatic_throat_occupancy()
mphase.set_occupancy(air, pores=ps_air, throats=ts_air)
mphase.set_occupancy(water, pores=ps_water, throats=ts_water)
# Define physics
phys = op.physics.Standard(network=net, phase=mphase, geometry=geom)
# Assign a partition coefficient (concentration ratio)
K_water_air = 0.5 # c @ water / c @ air
const = op.models.misc.constant
mphase.set_binary_partition_coef(propname="throat.partition_coef",
phases=[water, air], model=const, value=K_water_air)
# Replace the "default" ordinary_diffusion w/ multiphase_diffusion conductance model
mdiff = op.models.physics.diffusive_conductance.multiphase_diffusion
phys.add_model(propname="throat.diffusive_conductance", model=mdiff)
# Fickian diffusion
fd = op.algorithms.FickianDiffusion(network=net, phase=mphase)
# Set source term
phys["pore.A1"] = -1e-8 * geom["pore.area"]
phys["pore.A2"] = 0.0
linear = op.models.physics.generic_source_term.linear
phys.add_model(propname="pore.rxn", model=linear, X="pore.concentration",
A1="pore.A1", A2="pore.A2", regen_mode="deferred")
rxn_pores = net.pores(["left", "right"])
net.set_label("rxn", pores=rxn_pores)
fd.set_source(propname="pore.rxn", pores=rxn_pores)
# Set BCs and run simulation
net.set_label("air", pores=ps_air)
front_air = net.pores(["front", "air"], mode="and")
back_air = net.pores(["back", "air"], mode="and")
fd.set_value_BC(pores=front_air, values=1.0)
fd.set_value_BC(pores=back_air, values=0.1)
fd.run()
# Post-processing
mphase.update(fd.results())
c = mphase["pore.concentration"]
c2d = np.rot90(c.reshape(net._shape).squeeze())
plt.imshow(c2d)
plt.colorbar()
op.io.XDMF.save(network=net, phases=mphase, filename="network")
|
983,871 | b5999c833d67e0fc888c7e44fceb82771ff29749 | from som.interpreter.objectstorage.storage_location import (
NUMBER_OF_POINTER_FIELDS,
NUMBER_OF_PRIMITIVE_FIELDS,
create_location_for_long,
create_location_for_double,
create_location_for_object,
create_location_for_unwritten,
)
from som.vmobjects.double import Double
from som.vmobjects.integer import Integer
from rlib.jit import elidable_promote
class ObjectLayout(object):
_immutable_fields_ = [
"for_class",
"_prim_locations_used",
"_ptr_locations_used",
"_total_locations",
"_storage_locations[*]",
"_storage_type[*]",
"is_latest?",
]
def __init__(self, number_of_fields, for_class=None, known_types=None):
assert number_of_fields >= 0
from som.vmobjects.object_with_layout import Object
self.is_latest = True
self.for_class = for_class
self._storage_types = known_types or [None] * number_of_fields
self._total_locations = number_of_fields
self._storage_locations = [None] * number_of_fields
next_free_prim_idx = 0
next_free_ptr_idx = 0
for i in range(0, number_of_fields):
storage_type = self._storage_types[i]
if storage_type is Integer:
location = create_location_for_long(i, next_free_prim_idx)
next_free_prim_idx += 1
elif storage_type is Double:
location = create_location_for_double(i, next_free_prim_idx)
next_free_prim_idx += 1
elif storage_type is Object:
location = create_location_for_object(i, next_free_ptr_idx)
next_free_ptr_idx += 1
else:
assert storage_type is None
location = create_location_for_unwritten(i)
self._storage_locations[i] = location
self._prim_locations_used = next_free_prim_idx
self._ptr_locations_used = next_free_ptr_idx
def get_number_of_fields(self):
return self._total_locations
def with_generalized_field(self, field_idx):
from som.vmobjects.object_with_layout import Object
if self._storage_types[field_idx] is Object:
return self
self.is_latest = False
assert self._storage_types[field_idx] is not None
with_generalized_field = self._storage_types[:]
with_generalized_field[field_idx] = Object
return ObjectLayout(
self._total_locations, self.for_class, with_generalized_field
)
def with_initialized_field(self, field_idx, spec_class):
from som.vmobjects.object_with_layout import Object
# First we generalize to Integer, Double, or Object
# don't need more precision
if spec_class is Integer or spec_class is Double:
spec_type = spec_class
else:
spec_type = Object
if self._storage_types[field_idx] is spec_type:
return self
self.is_latest = False
assert self._storage_types[field_idx] is None
with_initialized_field = self._storage_types[:]
with_initialized_field[field_idx] = spec_type
return ObjectLayout(
self._total_locations, self.for_class, with_initialized_field
)
def get_storage_location(self, field_idx):
return self._storage_locations[field_idx]
def create_access_node(self, field_idx, next_entry):
return self._storage_locations[field_idx].create_access_node(self, next_entry)
def get_number_of_used_extended_ptr_locations(self):
required_ext_fields = self._ptr_locations_used - NUMBER_OF_POINTER_FIELDS
if required_ext_fields < 0:
return 0
return required_ext_fields
def get_number_of_used_extended_prim_locations(self):
required_ext_field = self._prim_locations_used - NUMBER_OF_PRIMITIVE_FIELDS
if required_ext_field < 0:
return 0
return required_ext_field
@elidable_promote("all")
def lookup_invokable(self, signature):
return self.for_class.lookup_invokable(signature)
|
983,872 | a4d78db35f7c818459c4d68f933ab190dac81213 |
import numpy as np
from flask import Flask, request, jsonify, render_template
import pickle
app = Flask(__name__)
model = pickle.load(open('model.pkl', 'rb'))
gender = {'male':1,'female':0}
education={'a level or equivalent':0,'he qualification':1,'lower than a level':2,
'no formal quals':3,'post graduate qualification':4}
disability={'yes':1,'no':0}
age={'0-35':0,'35-55':1,'55-75':2}
region={'east anglian region':0,'wales':10,'scotland':6,'south region':8,'london region':3,
'west midlands region':11,'south west region':9,'south east region':7,
'east midlands region':1,'north western region':5,'yorkshire region':12,
'ireland':2,'north region':4}
@app.route('/')
def home():
return render_template('index.html')
@app.route('/predict',methods=['POST'])
def predict():
'''
For rendering results on HTML GUI
'''
int_features = [x for x in request.form.values()]
print(int_features)
int_features[1]=gender[int_features[1]]
int_features[2]=region[int_features[2]]
int_features[3]=education[int_features[3]]
int_features[4]=age[int_features[4]]
int_features[7]=disability[int_features[7]]
print(int_features)
final_features = [np.array(int_features)]
prediction = model.predict(final_features)
output = (prediction[0])
if output<98:
output=output
else:
output=98
return render_template('index.html', prediction_text= '{}'.format(output))
@app.route('/predict_api',methods=['POST'])
def predict_api():
'''
For direct API calls trought request
'''
data = request.get_json(force=True)
prediction = model.predict([np.array(list(data.values()))])
output = prediction[0]
return jsonify(output)
if __name__ == "__main__":
app.run(debug=True)
|
983,873 | fd7c2db7bb87c86a343ddf92f716997692b2e9e9 |
from django.http import Http404, HttpResponse, HttpResponseBadRequest
from django.views.generic import View
import logging
logger = logging.getLogger('Rovor')
from dirmanage.models import Filesystem
import json
import traceback
class PathProcessView(View):
'''View class for Views that want to use the true path from
a path provided by the 'path' POST request
'''
http_method_names = ['post','options'] #only allow post requests
error_mimetype='text/plain'
invalidPathResult = 'Invalid path'
noPathResult = "no path supplied"
innerView = None #lambda request,path: return HttpResponse("") #default behaviour is to do nothing
def post(self,request):
'''method to wrap the code for getting the true path
and calling the actual code, which takes too paramaters,
the request object and the path and returns an HttpResponse'''
if 'path' in request.POST:
try:
path = Filesystem.getTruePath(request.POST['path'])
except (ValueError, Filesystem.DoesNotExist, IOError):
logger.debug(traceback.format_exc())
logger.info("Attempted access to invalid path: "+request.POST['path'])
return HttpResponseBadRequest(self.invalidPathResult, mimetype=self.error_mimetype)
return self.innerView(request,path)
else:
return HttpResponseBadRequest(self.noPathResult, mimetype=self.error_mimetype)
@classmethod
def decorate(cls, innerView):
'''decorate a function innerView and change it into a view as
returned by as_view
the function should take two paramaters:
@param request the request object
@param path the true path on the server machine'''
return cls.as_view(innerView=innerView)
@classmethod
def pathOnly(cls, innerFunc):
'''decorate a function which takes only one paramater, the true path
and return a view which will call that function with the true path
'''
return cls.as_view(innerView= lambda req,path: innerFunc(path))
def process_path(request, block):
'''
@deprecated[use the PathProcessView and PathProcessView.decorate instead]
helper function to abstract common code when accessing
a path over POST method. This will check that the method is POST
and that path is a supplied field and a valid path, if it is not
it will return a proper HttpResponse. If path is a valid path,
then this will call block, passing in the true filesystem path to
block, and will return the result wrapped in a HttpResponse
after dumping the object as a json string
if block returns None the result
will simply be {"ok":true} '''
try:
res = PathProcessView.pathOnly(block)(request)
if isinstance(res,HttpResponse):
return res
except Exception as err:
#if there was some kind of uncaught exception return it to the client
resp = {"ok":False, "error":str(err), "errtype":type(err).__name__,"traceback":traceback.format_exc()}
return HttpResponse(json.dumps(resp),mimetype='application/json')
#default to simply responding an ok response if return was false
if res is None:
res = '{"ok":true}'
else:
res = json.dumps(res)
return HttpResponse(res,mimetype='application/json')
|
983,874 | e1ae9ebdd11985c5f928406d11dba09366bd4697 | #!/usr/bin/python
#coding:utf8
import threading
import time
import os
import paramiko
import sys
#def func(ip,user,cmd):
# ssh = paramiko.SSHClient()
# ssh.set_missing_host_key_policy(paramiko.AutoAddPolicy())
# key = paramiko.RSAKey.from_private_key_file('/root/.ssh/id_rsa')
# ssh.connect(hostname=ip,username=user,pkey=key)
# stdin, stdout, stderr = ssh.exec_command(cmd)
# print '%s %s : \n %s'%(ip, cmd, stdout.read().strip())
# ssh.close()
class MyCmd(threading.Thread):
def __init__(self, ip, user, cmd):
threading.Thread.__init__(self)
self.ip = ip
self.user = user
self.cmd = cmd
def run(self):
ssh = paramiko.SSHClient()
ssh.set_missing_host_key_policy(paramiko.AutoAddPolicy())
key = paramiko.RSAKey.from_private_key_file('/root/.ssh/id_rsa')
try:
ssh.connect(hostname=self.ip,username=self.user,pkey=key, timeout=1)
except:
print '%s \n TIMEOUT or NOT PRIVILEGES'%(self.ip)
sys.exit()
stdin, stdout, stderr = ssh.exec_command(self.cmd)
if stdout.read():
print '%s %s : \n %s'%(self.ip, self.cmd, stdout.read().strip())
ssh.close()
else:
print '%s %s : \n %s'%(self.ip, self.cmd, stderr.read().strip())
ssh.close()
if __name__ == '__main__':
cmd = ' '.join(sys.argv[1:])
print '执行的命令 %s '% cmd
ips = ['47.90.44.152', '118.26.161.27']
user = 'root'
th = []
for ip in ips:
#print ip
mycmd = MyCmd(ip, user, cmd)
th.append(mycmd)
mycmd.start()
for i in th:
i.join()
print 'Done!'
|
983,875 | 3ada0d95b3d89ef601eb5d1755c3a35f386acf6e | #!/usr/bin/env python3
from matplotlib import pyplot
import tensorflow.keras as K
import tensorflow as tf
def preprocess_data(X, Y):
""" doc """
X_p = K.applications.resnet50.preprocess_input(X)
X_y = K.utils.to_categorical(Y, 10)
return(X_p, X_y)
def resize(X):
""" resize """
return K.backend.resize_images(X, 7, 7,
data_format="channels_last",
interpolation='bilinear')
if __name__ == "__main__":
(Xtrain, Ytrain), (Xtest, Ytest) = K.datasets.cifar10.load_data()
Xtrain, Ytrain = preprocess_data(Xtrain, Ytrain)
Xtest, Ytest = preprocess_data(Xtest, Ytest)
ResNet50_model = K.applications.ResNet50(weights='imagenet',
include_top=False,
input_shape=(224, 224, 3))
ResNet50_model.trainable = False
Input = K.Input(shape=(32, 32, 3))
resizeImage = K.layers.Lambda(resize)(Input)
x = ResNet50_model(resizeImage, training=False)
x = K.layers.Flatten()(x)
x = K.layers.Dense(1000, activation='relu')(x)
x = K.layers.Dropout(0.2)(x)
x = K.layers.Dense(10, activation='softmax')(x)
model = K.Model(Input, x)
opt = K.optimizers.SGD(lr=0.001, momentum=0.9)
model.compile(optimizer=opt, loss='categorical_crossentropy',
metrics=['accuracy'])
checkpoint = K.callbacks.ModelCheckpoint(save_best_only=True, mode="max",
monitor="val_acc",
filepath="cifar10.h5")
model.fit(Xtrain, Ytrain, epochs=5, batch_size=224,
validation_data=(Xtest, Ytest),
verbose=1, callbacks=[checkpoint])
model.save("cifar10.h5")
|
983,876 | 58a51d430731ab921ddf3ee90ab44ec5c7cd4a34 | dx = [0,1,0,-1]
dy = [1,0,-1,0]
N, M, K = map(int,input().split())
fir_graph = []
for _ in range(N):
fir_graph.append(list(map(int,input().split())))
turns = []
for _ in range(K):
tmp = list(map(int,input().split()))
tmp[0] -= 1
tmp[1] -= 1
turns.append(tmp)
permute = []
def track(depth,per):
global min_num
if depth == K:
tmp = []
for i in range(len(per)):
tmp.append(turns[per[i]])
permute.append(tmp)
return
for i in range(K):
if i in per:
continue
per.append(i)
track(depth+1,per)
per.pop()
def turn(r,c,s):
for i in range(1,s+1):
sx = r-i
sy = c-i
tmp = graph[sx][sy]
sx += dx[0]
sy += dy[0]
tmp, graph[sx][sy] = graph[sx][sy],tmp
direction = 0
while True:
if sx == r-i and sy == c-i:
break
nx = sx + dx[direction]
ny = sy + dy[direction]
if nx > r+i or nx < r-i or ny > c+i or ny < c-i:
direction = (direction+1)%4
continue
tmp, graph[nx][ny] = graph[nx][ny], tmp
sx = nx
sy = ny
def calArray(graph):
nums = []
for i in range(N):
nums.append(sum(graph[i]))
return min(nums)
track(0,[])
min_num = 987654321
# print(permute)
for p in range(len(permute)):
graph = [[0 for _ in range(M)] for _ in range(N)]
for i in range(len(fir_graph)):
for j in range(len(fir_graph[i])):
graph[i][j] = fir_graph[i][j]
for loc in range(len(permute[p])):
# print(permute[p][loc])
r = permute[p][loc][0]
c = permute[p][loc][1]
s = permute[p][loc][2]
turn(r,c,s)
# print(calArray(graph))
min_num = min(min_num,calArray(graph))
print(min_num) |
983,877 | 2c6cfbe4c755aafbbd88527a54f43b2e54b22682 | from pynput.keyboard import key,Listener
import logging
log_dir = r"C:/Users/Andreas/logger/"
logging.basicConfig(filename = (log_dir + "keyLog.txt"), level=logging.DEBUG, format='%(asctime)s: %(message)s')
def on_press(key):
logging.info(str(key))
def on_press(key):
logging.info(str(key))
def on_release(key):
if key == Key.esc:
return False
with Listener(on_press=on_press,on_release=on_release ) as listener:
listener.join()
|
983,878 | db9a011f0d121d68b11c0b91cd3ab941a897174d | from GridVector import GridVector
from Piece import Piece
from Move import Move
class Checkerboard:
BOARD_SIZE = 8
NW = GridVector(-1, -1)
NE = GridVector(-1, 1)
SW = GridVector(1, -1)
SE = GridVector(1, 1)
def __init__(self, description=None):
"""
description: list of pairs (square_id, piece_descriptor)
where piece_descriptor is one of
r, R, w or W for red, red king, white, white king
"""
self.grid = [
[None for j in range(self.BOARD_SIZE)]
for y in range(self.BOARD_SIZE)]
if description:
self.create_from_description(description)
else:
self.create_pieces()
def create_from_description(self, desc):
for square_id, piece_descriptor in desc:
loc = GridVector.to_coords(square_id)
piece = Piece.from_descriptor(piece_descriptor)
self.put_down(loc, piece)
def create_pieces(self):
for i in range(self.BOARD_SIZE):
for j in range(self.BOARD_SIZE):
loc = GridVector(i, j)
# red pieces always go on the first three rows
if i < 3 and self.is_valid(loc):
self.put_down(loc, Piece(Piece.COLOR_RED))
# white pieces always go on the bottom.
elif i >= self.BOARD_SIZE - 3 and self.is_valid(loc):
self.put_down(loc, Piece(Piece.COLOR_WHITE))
def is_valid(self, loc):
return self.in_grid(loc) and loc.diagonal % 2 == 1
def is_empty(self, loc):
return self.get(loc) is None
def in_grid(self, loc):
return (
0 <= loc.row < self.BOARD_SIZE
and 0 <= loc.col < self.BOARD_SIZE)
def put_down(self, loc, piece):
self.grid[loc.row][loc.col] = piece
def get(self, loc):
return self.grid[loc.row][loc.col]
def pick_up(self, loc):
piece = self.get(loc)
self.put_down(loc, None)
return piece
def find_pieces(self, color):
locations = []
for i, row in enumerate(self.grid):
for j, piece in enumerate(row):
if piece and piece.color == color:
locations.append(GridVector(i, j))
return locations
def basic_move(self, from_loc, to_loc):
piece = self.pick_up(from_loc)
self.put_down(to_loc, piece)
def capture_move(self, from_loc, to_loc):
enemy_loc = GridVector.midpoint(from_loc, to_loc)
self.basic_move(from_loc, to_loc)
self.pick_up(enemy_loc)
def find_basic_moves(self, from_loc):
piece = self.get(from_loc)
north_facing = piece.color == Piece.COLOR_WHITE
moves = []
#TODO: Refactor me!
if north_facing or piece.is_king:
for offset in [self.NW, self.NE]:
to_loc = from_loc + offset
if self.is_valid(to_loc) and self.is_empty(to_loc):
moves.append(Move(from_loc, to_loc, Move.MOVE_BASIC))
if not north_facing or piece.is_king:
for offset in [self.SW, self.SE]:
to_loc = from_loc + offset
if self.is_valid(to_loc) and self.is_empty(to_loc):
moves.append(Move(from_loc, to_loc, Move.MOVE_BASIC))
return moves
def find_capture_moves(self, from_loc):
piece = self.get(from_loc)
north_facing = piece.color == Piece.COLOR_WHITE
moves = []
#TODO: Refactor me!
if north_facing or piece.is_king:
for offset in [self.NW, self.NE]:
to_loc = from_loc + 2 * offset
if not self.is_valid(to_loc) and self.is_empty(to_loc):
continue
enemy_loc = GridVector.midpoint(from_loc, to_loc)
enemy = self.get(enemy_loc)
if enemy is not None and enemy.color != piece.color:
moves.append(Move(from_loc, to_loc, Move.MOVE_CAPTURE))
if not north_facing or piece.is_king:
for offset in [self.SW, self.SE]:
to_loc = from_loc + 2 * offset
if not self.is_valid(to_loc) and self.is_empty(to_loc):
continue
enemy_loc = GridVector.midpoint(from_loc, to_loc)
enemy = self.get(enemy_loc)
if enemy is not None and enemy.color != piece.color:
moves.append(Move(from_loc, to_loc, Move.MOVE_CAPTURE))
return moves
def find_all_basic_moves(self, color):
locs = self.find_pieces(color)
results = []
for loc in locs:
results.extend(self.find_basic_moves(loc))
return results
def find_all_capture_moves(self, color):
locs = self.find_pieces(color)
results = []
for loc in locs:
results.extend(self.find_capture_moves(loc))
return results
def find_all_moves(self, color):
return (
self.find_all_basic_moves(color)
+ self.find_all_capture_moves(color))
def __str__(self):
output = [" 0 1 2 3 4 5 6 7"]
for i, row in enumerate(self.grid):
row_str = ""
for piece in row:
if piece is None:
row_str += ". "
else:
row_str += "{} ".format(piece)
output.append("{} {}".format(i, row_str))
return "\n".join(output)
if __name__ == "__main__":
board = Checkerboard()
print(board)
print("--------")
print("Red Moves:")
print(board.find_all_basic_moves(Piece.COLOR_RED))
print("White Moves:")
print(board.find_all_basic_moves(Piece.COLOR_WHITE))
board2 = Checkerboard([
(14, 'r'),
(18, 'w'),
(22, 'w'),
(27, 'w'),
(7, 'r')])
print(board2)
print(board2.find_all_moves(Piece.COLOR_RED))
print([x.to_notation() for x in board2.find_all_moves(Piece.COLOR_RED)])
|
983,879 | f5e3f2ef45fa8caafa0617b4c27315189485e1a1 | from selenium.webdriver.common.by import By
from selenium.webdriver.common.keys import Keys
from generics.config import TestData
from pages.base_page import BasePage
class ConnectLoginPage(BasePage):
USERNAME_FIELD = (By.ID, 'login')
PASSWORD_FIELD = (By.ID, 'password')
def __init__(self, driver):
super().__init__(driver)
self.driver.get(TestData.URL)
def log_in(self, username, password):
self.type_on_element(self.USERNAME_FIELD, username)
self.type_on_element(self.PASSWORD_FIELD, password + Keys.RETURN)
|
983,880 | fd28881ab156a7a2a3e8237f39d8bbac1695af9b | import unittest
from itertools import combinations as cb
class FindSumCombinations(object):
"""
This class is used for find all pairs of elements from a
list of integers that add up to a specific sum/target.
"""
def __init__(self, nums, target):
"""
@summary: Initializes the list object from which the pais
need to be determined
@param nums: List of elements
@type nums: List
@param target: The target sum
@type target: int
"""
self.nums = nums
self.sum = target
def __str__(self):
return "class to find all pairs that add up to the target sum"
def list_of_pairs(self):
"""
@summary: find all pairs of elements in the input_list
that add up to a specific sum/target
@param:input_list: Input list that needs to be traversed
@type: list
@param: target
@type: int
@return: list of pairs split from actual list
@type: List
"""
subsets = []
input_list = self.nums
target = self.sum
if not input_list:
return []
if len(input_list) == 1 and target not in input_list:
return []
if len(input_list) == 2 and sum(input_list) == target:
return [input_list]
if len(input_list) == 1 and target in input_list:
return [input_list]
my_gen = (list(comb) for comb in cb(input_list, 2) if sum(comb) == target)
for comb in my_gen:
subsets.append(comb)
if target in subsets:
subsets.append([target])
return subsets
class TestCombinationSum(unittest.TestCase):
def test_combination_sum_empty_list(self):
fsc_object = FindSumCombinations([], 11)
my_subsets = fsc_object.list_of_pairs()
self.assertEquals(my_subsets, [])
def test_combination_sum_single_element_list(self):
fsc_object = FindSumCombinations([12], 11)
my_subsets = fsc_object.list_of_pairs()
self.assertEquals(my_subsets, [])
def test_combination_sum_single_element_list_1(self):
fsc_object = FindSumCombinations([11], 11)
my_subsets = fsc_object.list_of_pairs()
self.assertEquals(my_subsets, [[11]])
def test_combination_sum_two_element_list(self):
fsc_object = FindSumCombinations([5, 6], 11)
my_subsets = fsc_object.list_of_pairs()
self.assertEquals(my_subsets, [[5, 6]])
def test_combination_sum_general_list(self):
fsc_object = FindSumCombinations([1, 2, 3, 7, 9, 11], 11)
my_subsets = fsc_object.list_of_pairs()
self.assertEquals(my_subsets, [[2, 9], [4, 7], [11]])
if __name__ == '__main__':
unittest.main()
|
983,881 | 3ff861b10b91561da6b81846247da49127a99539 | from utilities.log import TFRecord_log
from skimage import io
from shutil import copy
import tensorflow as tf
import numpy as np
import os
# 二進位資料
def bytes_feature(value):
return tf.train.Feature(bytes_list=tf.train.BytesList(value=[value]))
# 整數資料
def int64_feature(value):
return tf.train.Feature(int64_list=tf.train.Int64List(value=[value]))
# 浮點數資料
def float32_feature(value):
return tf.train.Feature(float_list=tf.train.FloatList(value=value))
def get_File(file_dir,filename):
# The images in each subfolder
images = []
# The subfolders
subfolder = []
folders = []
# Using "os.walk" function to grab all the files in each folder
for dirPath, dirNames, fileNames in os.walk(file_dir):
for name in dirNames:
subfolder.append(os.path.join(dirPath, name))
folders.append(name)
for folder in subfolder:
for dirPath, dirNames, fileNames in os.walk(folder):
for image_name in fileNames:
images.append(image_name)
# copy file to make training data.
if filename == 'train':
copy(os.path.join(dirPath, image_name),'./train/')
if filename == 'test':
copy(os.path.join(dirPath, image_name),'./test/')
# To record the labels of the image dataset. ex: [0,0,1,1,2,2,2]
labels = []
count = 0
for a_folder in subfolder:
n_img = len(os.listdir(a_folder))
TFRecord_log.info('label - folder : %s %d',a_folder,count)
labels = np.append(labels, n_img * [count])
count+=1
# merge label and feature.
subfolders = np.array([images, labels])
subfolders = subfolders[:, np.random.permutation(subfolders.shape[1])].T
image_list = list(subfolders[:, 0])
label_list = list(subfolders[:, 1])
label_list = [int(float(i)) for i in label_list]
return image_list, label_list, folders
def TFRecord_Writer(images, labels, images_dir,image_folder, TFrecord_dir, TFrecord_name):
n_samples = len(labels)
TFWriter = tf.python_io.TFRecordWriter(TFrecord_dir+TFrecord_name)
TFRecord_log.info('Start make TFRecord file.')
for i in np.arange(0, n_samples):
try:
image = io.imread(images_dir+image_folder+'/'+images[i])
if image is None:
TFRecord_log.warning('Error image:' + images[i])
else:
image_raw = image.tostring()
label = int(labels[i])
height, width, depth = image.shape
# check the image shape.
if height != 640 or width !=640:
continue
# take tf.train.Feature and merge to tf.train.Features.
ftrs = tf.train.Features(feature={'Label': int64_feature(label),'image_raw': bytes_feature(image_raw),
'height':int64_feature(height),'width': int64_feature(width)})
# take tf.train.Features and change to tf.train.Example.
example = tf.train.Example(features=ftrs)
# take tf.train.Example and write in tfRecord file.
TFWriter.write(example.SerializeToString())
except:
# image is not in this folder.
continue
TFWriter.close()
TFRecord_log.info('Make TFRecord file done.')
def TFRecord_Reader(TFRecord_Files,IMAGE_HEIGHT,IMAGE_WIDTH,IMAGE_DEPTH,Batch_Size):
TFRecord_log.info('Start read TFRecord file.')
# create queue.
try:
filename_queue = tf.train.string_input_producer([TFRecord_Files],shuffle=True,num_epochs=None)
except:
TFRecord_log.error('Input data in queue faild !!')
# reader.
reader = tf.TFRecordReader()
_, serialized_example = reader.read(filename_queue)
# get features.
img_features = tf.parse_single_example(serialized_example,features={
'Label' : tf.FixedLenFeature([], tf.int64),
'image_raw': tf.FixedLenFeature([], tf.string),
'height': tf.FixedLenFeature([], tf.int64),
'width': tf.FixedLenFeature([], tf.int64), })
# recover image.
TFRecord_log.info('Reshape image.')
try:
image_content = tf.decode_raw(img_features['image_raw'], tf.uint8)
# image_float32 = tf.image.convert_image_dtype(image_content,tf.float32)
image = tf.reshape(image_content, [IMAGE_HEIGHT, IMAGE_WIDTH, IMAGE_DEPTH])
img = tf.cast(image, tf.float32) * (1. / 255) - 0.5
label = tf.cast(img_features['Label'], tf.float32)
except:
TFRecord_log.error('Reshape image failed !!')
# regulate images size.
resized_image = tf.image.resize_image_with_crop_or_pad(image=img,target_height=224,target_width=224)
images, labels = tf.train.shuffle_batch(
[resized_image, label],
batch_size= Batch_Size,
capacity=80+3*Batch_Size,
min_after_dequeue=80)
return images, labels
|
983,882 | 547f383d48c348e3cb614f723ef364f4faf89455 | def optimal_play(alice_stamina, bob_stamina, first_is_alice=True):
if bob_stamina == 0:
return (alice_stamina, 0)
if alice_stamina == 0:
return (0, bob_stamina)
return (alice_stamina - 1, bob_stamina)
def solve():
alice_stamina, bob_stamina = map(int, input().split())
return ' '.join(map(str, optimal_play(alice_stamina, bob_stamina)))
if __name__=='__main__':
for _ in range(int(input())):
print(solve()) |
983,883 | 954c0277e0ece6431ede162f385e3afed432549c | #8. Escribí un programa que separe y
# devuelva los caracteres númericos de un string.
import re
string = "1 2 3 4 hola 5 6 7 %^"
patron = "\d"
lista = re.findall(patron, string)
for i in lista:
print(int(i)) |
983,884 | a93a4b0ed18f17529b69b44d82903a8455101152 | import streamlit as st
import numpy as np
import pandas as pd
import requests
import base64
#------------------------------------------------#
#title
st.title("🌐 HTML Table Scraper 🕸️")
st.markdown(" A simple HTML table scraper made in Python 🐍 & the amazing [Streamlit!](https://www.streamlit.io/) ")
st.markdown('### **1️⃣ Enter a URL to scrape **')
#------------------------------------------------------------------#
#main
url = st.text_input("", value='https://stackexchange.com/leagues/1/alltime/stackoverflow', max_chars=None, key=None, type='default')
if url:
arr= ['https://', 'http://']
if any(c in url for c in arr):
header = {"User-Agent": "Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/74.0.3729.169 Safari/537.36","X-Requested-With": "XMLHttpRequest"}
@st.cache(persist = True, show_spinner=False)
def load_data():
r = requests.get(url, headers=header)
return pd.read_html(r.text)
df = load_data()
length = len(df)
if length ==1:
st.write('This webpage contains 1 table')
else:
st.write('This webpage contains 1 table', str(length) + 'tables')
if st.button('Show Scraped Tables'):
st.table(df)
else:
st.empty()
def createlist(r1,r2):
return[item for item in range(r1,r2+1)]
r1,r2 = 1, length
funct = createlist(r1,r2)
st.markdown('### **2️⃣ Select a table to export **')
value_selected = st.selectbox('', funct)
df1 = df[value_selected-1]
if df1.empty:
st.warning('ℹ️ - This DataFrame is empty!')
else:
df1 = df1.replace(np.nan,'empty cell', regex = True )
st.dataframe(df1)
##Download the file
csv = df1.to_csv(index = False)
b64 = base64.b64encode(csv.encode()).decode()
st.markdown('### ** ⬇️ Download the selected table to CSV **')
href = f'<a href= "data:file/csv;base64,{b64}" download="filtered_table.csv"> **Click Here**</a>'
st.markdown(href, unsafe_allow_html=True) |
983,885 | c9130d0154845e4ea45a0256fc3fda5fe85728a2 | #!/usr/bin/env python3
#https://leetcode.com/problems/length-of-last-word/description/
class Solution:
def lengthOfLastWord(self, s):
"""
:type s: str
:rtype: int
"""
words = [x for x in s.split(' ') if x.strip()]
if not len(words):
return 0
return len(words[-1])
|
983,886 | 7d187db961761a8e47641d7844a3aad8e654038a |
# @Title: 滑动窗口最大值 (Sliding Window Maximum)
# @Author: 2464512446@qq.com
# @Date: 2020-12-28 12:01:53
# @Runtime: 368 ms
# @Memory: 26.8 MB
class Solution:
def maxSlidingWindow(self, nums: List[int], k: int) -> List[int]:
queue = collections.deque()
res = []
for index, i in enumerate(nums):
if index >= k and index - queue[0] >= k:
queue.popleft()
while queue and nums[queue[-1]] < i:
queue.pop()
queue.append(index)
if index >= k- 1:
res.append(nums[queue[0]])
return res
|
983,887 | d087d8646d996aa014ba92077213c8b30ff7c942 | from django.db import models
from django.conf import settings
# Create your models here.
class Todo(models.Model):
author = models.ForeignKey(settings.AUTH_USER_MODEL, on_delete=models.PROTECT)
description = models.TextField(max_length=1000)
completed = models.BooleanField()
date = models.DateTimeField(auto_now_add=True)
def __str__(self):
return self.description[:30]
# class TodoQuerySet(models.QuerySet):
#
# def annotate_everything(self):
# qs = self.select_related('author')
# return qs
|
983,888 | d0c1755872bab6d8d2726016d21579b2643871d5 | from python_basic import *
#WP5
l = [0, 3, 5, -2, 9, 8, 10, -5]
print(filter_integers_greater_than(l, 4))
print(filter_integers_greater_than(l, 6)) |
983,889 | 168032bc38d70f27eca9046291783aae8baf2e26 | from flask_migrate import Migrate, MigrateCommand
from flask_script import Manager, Server
from main import app
from applications.models import db
manager = Manager(app)
Migrate(app=app, db=db)
manager.add_command('db', MigrateCommand) # 创建数据库映射命令
# import models不能省
from applications.models import admin_user,admin_role,admin_power,admin_log,admin_photo,admin_dict,admin_role_power,admin_user_role
manager.add_command('start', Server(port=8080, use_debugger=True)) # 创建启动命令
if __name__ == '__main__':
manager.run() |
983,890 | e4a17d3d5039059cd83854db004fda43ebe0fa4a | import os
import numpy as np
import pandas as pd
import tensorflow as tf
import cv2
import math
import shutil
import PIL
import matplotlib.pyplot as plt
from numpy import asarray
from PIL import Image
import tensorflow as tf
import tensorflow.python.keras as keras
import tensorflow.python.keras.layers as layers
from tensorflow.python.keras.models import Sequential
from tensorflow.python.keras.preprocessing.image import ImageDataGenerator
from tensorflow.python.keras.callbacks import TensorBoard
#from tensorflow.python.keras.utils import to_categorical
from tensorflow.keras.optimizers import Adam
from tensorflow.keras.preprocessing.image import ImageDataGenerator
from tensorflow.keras.callbacks import EarlyStopping, ModelCheckpoint, TensorBoard
from tensorflow.keras import Model
from tensorflow.keras.optimizers import RMSprop
from tensorflow.keras.applications import VGG16
from tensorflow.python.framework.ops import Tensor
from tensorflow.python.keras.layers import Convolution2D
from tensorflow.python.keras.layers import MaxPooling2D
from tensorflow.python.keras.layers import Flatten
from tensorflow.python.keras.layers import Dense
from tensorflow.python.keras.layers import Dropout
from tensorflow.python.keras.layers import BatchNormalization
from tensorflow.python.keras.layers import Activation
# Instantiate global constants
data_dir = os.path.abspath('./drive/MyDrive/final_assignment_xrays/XRays')
test_path = data_dir + '/test'
train_path = data_dir + '/train'
normal_path = train_path + '/NORMAL'
pneumonia_path = train_path + '/PNEUMONIA'
# Class for creating Data Generators
class GenerateData:
def Random_Contrast(img: tf.Tensor) -> tf.Tensor:
# Contrast Augmentation for addition to the ImageDataGenerator
img_ = tf.image.random_contrast(img, 1, 1.5)
return img_
def Random_Contrast_denoising(img: tf.Tensor) -> tf.Tensor:
# Contrast Augmentation for addition to the ImageDataGenerator
img = tf.image.random_contrast(img, 1, 1.5)
img = tf.reshape(img,[162,128])
img = tf.dtypes.cast(img, tf.uint8)
img = np.array(img).astype('uint8')
img_ = cv2.fastNlMeansDenoising(img,h=10,templateWindowSize=7,searchWindowSize=21)
#img_denoised = GenerateData.denoising(img_)
return img_.reshape(162, 128, 1).astype('float64')
def Random_Contrast_denoising_rgb(img: tf.Tensor) -> tf.Tensor:
img = tf.image.random_contrast(img, 1, 1.5)
img = tf.dtypes.cast(img, tf.uint8)
img = cv2.cvtColor(np.float32(img), cv2.COLOR_RGB2GRAY)
img = cv2.cvtColor(np.float32(img), cv2.COLOR_GRAY2RGB)
img = tf.reshape(img,[162,128,-1])
img = np.array(img).astype('uint8')
img_ = cv2.fastNlMeansDenoising(img,h=10,templateWindowSize=7,searchWindowSize=21)
#img_denoised = GenerateData.denoising(img_)
return img_.reshape(162, 128, 3).astype('float64')
def denoising(img: tf.Tensor) -> tf.Tensor:
img = img.reshape(162, 128).astype('uint8')
img_ = cv2.fastNlMeansDenoising(img,h=10,templateWindowSize=7,searchWindowSize=21)
return img_.reshape(162, 128, 1).astype('float64')
def denoising_rgb(img: tf.Tensor) -> tf.Tensor:
img = cv2.cvtColor(img.astype('uint8'), cv2.COLOR_RGB2GRAY)
#img = cv2.Canny(enhance_contrast(img, disk(6)), 50, 210)
img = cv2.cvtColor(img, cv2.COLOR_GRAY2RGB)
img = img.astype('float64')
img = img.reshape(162, 128,-1).astype('uint8')
img_ = cv2.fastNlMeansDenoising(img,h=10,templateWindowSize=7,searchWindowSize=21)
return img_.reshape(162, 128, 3).astype('float64')
#Simple thresholding
def simple_thresholding(img: tf.Tensor) -> tf.Tensor:
img = img.reshape(162, 128).astype('uint8')
#img = cv2.cvtColor(img, cv2.COLOR_BGR2GRAY)
ret,img = cv2.threshold(img,127,255,cv2.THRESH_BINARY)
return img.reshape(162, 128, -1).astype('float64')
def edge_detect(img: tf.Tensor) -> tf.Tensor:
# Canny edge detection for addition to the ImageDataGenerator
img_ = cv2.Canny(enhance_contrast(img.reshape(162, 128).astype('uint8'), disk(6)), 50, 210)
return img_.reshape(162, 128, 1).astype('float64')
def rgb_edge_detect(img: tf.Tensor) -> tf.Tensor:
# Canny edge detection for addition to the ImageDataGenerator for RGB images
grey_img = cv2.cvtColor(img.astype('uint8'), cv2.COLOR_RGB2GRAY)
img_ = cv2.Canny(enhance_contrast(grey_img, disk(6)), 50, 210)
color_img = cv2.cvtColor(img_, cv2.COLOR_GRAY2RGB)
return color_img.astype('float64')
def initialise():
# Instantiate Generator Object, holding back 10% of samples for Validation, and normalising the pixel values.
# Note that the holdout is only relevant if 'subset' is defined when instantiating a generator, otherwise
# the whole set is returned
global generator, generator_denoising,generator_denoising_rgb, generator_edge, generator_edge_rgb, augmented_generator, augmented_generator_denoising,augmented_generator_denoising_rgb,augmented_generator_edge, augmented_generator_edge_rgb,generator_ST
generator = ImageDataGenerator(rescale=1./255,
validation_split=0.1)
generator_ST = ImageDataGenerator(rescale=1./255,validation_split=0.1,
preprocessing_function=GenerateData.simple_thresholding)
generator_denoising = ImageDataGenerator(rescale=1./255,
validation_split=0.1,
preprocessing_function=GenerateData.denoising)
generator_denoising_rgb = ImageDataGenerator(rescale=1./255,
validation_split=0.1,
preprocessing_function=GenerateData.denoising_rgb)
generator_edge = ImageDataGenerator(rescale=1./255,
validation_split=0.1,
preprocessing_function=GenerateData.edge_detect)
generator_edge_rgb = ImageDataGenerator(rescale=1./255,
validation_split=0.1,
preprocessing_function=GenerateData.rgb_edge_detect)
augmented_generator = ImageDataGenerator(rescale=1./255,
rotation_range=10,
width_shift_range=0,
height_shift_range=0,
vertical_flip=False,
horizontal_flip=False,
validation_split=0.1,
preprocessing_function = GenerateData.Random_Contrast)
augmented_generator_denoising = ImageDataGenerator(rescale=1./255,
rotation_range=10,
width_shift_range=0,
height_shift_range=0,
vertical_flip=False,
horizontal_flip=False,
validation_split=0.1,
preprocessing_function = GenerateData.Random_Contrast_denoising)
augmented_generator_denoising_rgb = ImageDataGenerator(rescale=1./255,
rotation_range=10,
width_shift_range=0,
height_shift_range=0,
vertical_flip=False,
horizontal_flip=False,
validation_split=0.1,
preprocessing_function = GenerateData.Random_Contrast_denoising_rgb)
augmented_generator_edge = ImageDataGenerator(rescale=1./255,
rotation_range=10,
validation_split=0.1,
preprocessing_function = GenerateData.edge_detect)
augmented_generator_edge_rgb = ImageDataGenerator(rescale=1./255,
rotation_range=10,
validation_split=0.1,
preprocessing_function = GenerateData.rgb_edge_detect)
def data_flow(train_path, color,denoise=False,ST=False):
# Instantiate training data generators. Convert image to grayscale, and resize image to 162*128 pixels for
# LeNet5 architecture, also used for examining class imbalance.
if denoise and color=='grayscale':
data = generator_denoising.flow_from_directory(train_path,
target_size=(162,128),
color_mode=color,
batch_size=32,
class_mode="categorical",
shuffle=True)
elif denoise and color=='rgb':
data = generator_denoising_rgb.flow_from_directory(train_path,
target_size=(162,128),
color_mode=color,
batch_size=32,
class_mode="categorical",
shuffle=True)
elif denoise == False and ST:
print('\n entered the if condition')
data = generator_ST.flow_from_directory(train_path,
target_size=(162,128),
color_mode=color,
batch_size=32,
class_mode="categorical",
shuffle=True)
else:
data = generator.flow_from_directory(train_path,
target_size=(162,128),
color_mode=color,
batch_size=32,
class_mode="categorical",
shuffle=True)
return data
def data_flow_augmented(train_path, color,denoise=False):
# Instantiate training data generators using the augmented generator. Convert image to grayscale, and
# resize image to 162*128 pixels for classic LeNet5 architecture, also used for examining class imbalance.
if denoise and color=='grayscale':
data = augmented_generator_denoising.flow_from_directory(train_path,
target_size=(162,128),
color_mode=color,
batch_size=32,
class_mode="categorical",
shuffle=True)
elif denoise and color=='rgb':
data = augmented_generator_denoising_rgb.flow_from_directory(train_path,
target_size=(162,128),
color_mode=color,
batch_size=32,
class_mode="categorical",
shuffle=True)
else:
data = augmented_generator.flow_from_directory(train_path,
target_size=(162,128),
color_mode=color,
batch_size=32,
class_mode="categorical",
shuffle=True)
return data
def training_data_flow(train_path, color, denoise=False,ST=False):
# Create Training Set Generator from training subset, for quick runs not executing a k-fold strategy
print(denoise)
print(ST)
print(color)
if denoise and color=='grayscale':
data_train = generator_denoising.flow_from_directory(train_path,
target_size=(162,128),
color_mode=color,
batch_size=32,
class_mode="categorical",
shuffle=True,
subset='training')
elif denoise and color=='rgb':
data_train = generator_denoising_rgb.flow_from_directory(train_path,
target_size=(162,128),
color_mode=color,
batch_size=32,
class_mode="categorical",
shuffle=True,
subset='training')
elif denoise == False and ST:
print('\n entered the if condition')
data_train = generator_ST.flow_from_directory(train_path,
target_size=(162,128),
color_mode=color,
batch_size=32,
class_mode="categorical",
shuffle=True,
subset='training')
else:
data_train = generator.flow_from_directory(train_path,
target_size=(162,128),
color_mode=color,
batch_size=32,
class_mode="categorical",
shuffle=True,
subset='training')
return data_train
def training_data_flow_augmented(train_path, color,denoise=False):
# Create Training Set Generator from training subset using the augmented generator,
# for quick runs not executing a k-fold strategy
if denoise and color=='grayscale':
data_train = augmented_generator_denoising.flow_from_directory(train_path,
target_size=(162,128),
color_mode=color,
batch_size=32,
class_mode="categorical",
shuffle=True,
subset='training')
elif denoise and color=='rgb':
data_train = augmented_generator_denoising_rgb.flow_from_directory(train_path,
target_size=(162,128),
color_mode=color,
batch_size=32,
class_mode="categorical",
shuffle=True,
subset='training')
else:
data_train = augmented_generator.flow_from_directory(train_path,
target_size=(162,128),
color_mode=color,
batch_size=32,
class_mode="categorical",
shuffle=True,
subset='training')
return data_train
def validation_data_flow(train_path, color,denoise=False,ST=False):
# Create Validation Set Generator from validation subset for quick runs not executing a k-fold strategy
if denoise and color=='grayscale':
data_val = generator_denoising.flow_from_directory(train_path,
target_size=(162,128),
color_mode=color,
batch_size=32,
class_mode="categorical",
shuffle=True,
subset='validation')
elif denoise and color=='rgb':
data_val = generator_denoising_rgb.flow_from_directory(train_path,
target_size=(162,128),
color_mode=color,
batch_size=32,
class_mode="categorical",
shuffle=True,
subset='validation')
elif denoise == False and ST:
print('\n entered the if condition')
data_val = generator_ST.flow_from_directory(train_path,
target_size=(162,128),
color_mode=color,
batch_size=32,
class_mode="categorical",
shuffle=True,
subset='validation')
else:
data_val = generator.flow_from_directory(train_path,
target_size=(162,128),
color_mode=color,
batch_size=32,
class_mode="categorical",
shuffle=True,
subset='validation')
return data_val
def validation_data_flow_augmented(train_path, color,denoise=False):
# Create Validation Set Generator from validation subset using the augmented generator,
# for quick runs not executing a k-fold strategy
if denoise and color=='grayscale':
data_val = augmented_generator_denoising.flow_from_directory(train_path,
target_size=(162,128),
color_mode=color,
batch_size=32,
class_mode="categorical",
shuffle=True,
subset='validation')
elif denoise and color=='rgb':
data_val = augmented_generator_denoising_rgb.flow_from_directory(train_path,
target_size=(162,128),
color_mode=color,
batch_size=32,
class_mode="categorical",
shuffle=True,
subset='validation')
else:
data_val = augmented_generator.flow_from_directory(train_path,
target_size=(162,128),
color_mode=color,
batch_size=32,
class_mode="categorical",
shuffle=True,
subset='validation')
return data_val
def test_data_flow(test_path, color,denoise=False,ST=False):
# Create test data generator. Note shuffle=false here, this is important for extracting the 'true' class
# labels later on
if denoise and color=='grayscale':
data_test = generator_denoising.flow_from_directory(test_path,
target_size=(162,128),
color_mode=color,
batch_size=16,
class_mode="categorical",
shuffle=False,
seed = 42)
elif denoise and color=='rgb':
data_test = generator_denoising_rgb.flow_from_directory(test_path,
target_size=(162,128),
color_mode=color,
batch_size=16,
class_mode="categorical",
shuffle=False,
seed = 42)
elif denoise == False and ST:
print('\n entered the if condition')
data_test = generator_ST.flow_from_directory(test_path,
target_size=(162,128),
color_mode=color,
batch_size=32,
class_mode="categorical",
shuffle=True,
seed=42)
else:
data_test = generator.flow_from_directory(test_path,
target_size=(162,128),
color_mode=color,
batch_size=16,
class_mode="categorical",
shuffle=False,
seed = 42)
return data_test
def test_data_flow_augmented(test_path, color,denoise=False):
# Create test data generator. Note shuffle=false here, this is important for extracting the 'true' class
# labels later on
if denoise and color=='grayscale':
data_test = augmented_generator_denoising.flow_from_directory(test_path,
target_size=(162,128),
color_mode=color,
batch_size=16,
class_mode="categorical",
shuffle=False,
seed = 42)
elif denoise and color=='rgb':
data_test = augmented_generator_denoising_rgb.flow_from_directory(test_path,
target_size=(162,128),
color_mode=color,
batch_size=16,
class_mode="categorical",
shuffle=False,
seed = 42)
else:
data_test = augmented_generator.flow_from_directory(test_path,
target_size=(162,128),
color_mode=color,
batch_size=16,
class_mode="categorical",
shuffle=False,
seed = 42)
return data_test
def ShowImages():
# Method to print some x-rays to the screen for evaluation by my extraordinarily medically learned self ......
fig, ax = plt.subplots(4, 4, figsize=(15, 7))
ax = ax.ravel()
plt.tight_layout()
for i in range(0,8):
dir_ = 'train'
set_dir = data_dir + '/' + dir_
ax[i].imshow(plt.imread(set_dir+'/NORMAL/'+os.listdir(set_dir+'/NORMAL')[i]), cmap='gray')
ax[i].set_title('Set: {}, Condition: Normal'.format(dir_))
dir_ = 'test'
set_dir = data_dir + '/' + dir_
ax[i+8].imshow(plt.imread(set_dir+'/PNEUMONIA/'+os.listdir(set_dir+'/PNEUMONIA')[i]), cmap='gray')
ax[i+8].set_title('Set: {}, Condition: Pneumonia'.format(dir_))
def CalculateDataStats():
data = GenerateData.data_flow(train_path, "grayscale")
# Examine class imbalance across training and validation data, using the 'data' generator
df = pd.DataFrame({'data':data.classes})
# Class Counts and ratio
normal = int(df[df.data==data.class_indices['NORMAL']].count())
pneumonia = int(df[df.data==data.class_indices['PNEUMONIA']].count())
ratio = round(pneumonia / normal, 2)
# Class Weights
normal_weight = ratio
pneumonia_weight = 1.0
class_weights = {
data.class_indices['NORMAL']:normal_weight,
data.class_indices['PNEUMONIA']:pneumonia_weight
}
text = "Normal:{:.0f}\nPneumonia:{:.0f}\nImbalance Ratio: {:.2f}\n".format(normal, pneumonia, ratio)
print(text)
text = "Weighting classes by:\nNormal:{:.2f}\nPneumonia:{:.2f}\n".format(normal_weight, pneumonia_weight)
print(text)
return class_weights
def MakeDirectories(k):
# Main loop - 'for each fold'
for i in range (0, k):
print('Creating Directory for fold ' + str(i+1))
# Create Fold Directories
if not os.path.exists(data_dir + '/fold' + str(i+1)):
os.mkdir(data_dir + '/fold' + str(i+1))
# Create Train and Validate Directories in each Fold
if not os.path.exists(data_dir + '/fold' + str(i+1) + '/train'):
os.mkdir(data_dir + '/fold' + str(i+1) + '/train')
if not os.path.exists(data_dir + '/fold' + str(i+1) + '/validate'):
os.mkdir(data_dir + '/fold' + str(i+1) + '/validate')
# Create Class Directories in Train and Validate Directories
if not os.path.exists(data_dir + '/fold' + str(i+1) + '/train' + '/NORMAL'):
os.mkdir(data_dir + '/fold' + str(i+1) + '/train' + '/NORMAL')
if not os.path.exists(data_dir + '/fold' + str(i+1) + '/train' + '/PNEUMONIA'):
os.mkdir(data_dir + '/fold' + str(i+1) + '/train' + '/PNEUMONIA')
if not os.path.exists(data_dir + '/fold' + str(i+1) + '/validate' + '/NORMAL'):
os.mkdir(data_dir + '/fold' + str(i+1) + '/validate' + '/NORMAL')
if not os.path.exists(data_dir + '/fold' + str(i+1) + '/validate' + '/PNEUMONIA'):
os.mkdir(data_dir + '/fold' + str(i+1) + '/validate' + '/PNEUMONIA')
# Create Checkpoint Directories
if not os.path.exists(data_dir + '/checkpoints'):
os.mkdir(data_dir + '/checkpoints')
if not os.path.exists(data_dir + '/checkpoints' + '/LogReg'):
os.mkdir(data_dir + '/checkpoints'+ '/LogReg')
if not os.path.exists(data_dir + '/checkpoints' + '/VGG16'):
os.mkdir(data_dir + '/checkpoints'+ '/VGG16')
if not os.path.exists(data_dir + '/fold' + str(i+1) + '/checkpoints'):
os.mkdir(data_dir + '/fold' + str(i+1) + '/checkpoints')
if not os.path.exists(data_dir + '/fold' + str(i+1) + '/checkpoints' + '/augmented'):
os.mkdir(data_dir + '/fold' + str(i+1) + '/checkpoints' + '/augmented')
def Create_KFold_TrainingData(k):
# Copy All Training Data Into Train Folds
# Main loop - 'for each fold'
for k in range (0, k):
# Set Directory to the current fold for NORMAL class
dir_ = (data_dir + '/fold' + str(k+1) + '/train' + '/NORMAL')
print('Copying all Training Data into Fold ' + str(k+1) + ' Directory')
# Iterate over every 'NORMAL' x-ray and copy
for filename in os.listdir(normal_path):
shutil.copy(normal_path + '/' + filename, dir_)
# Set Directory to the current fold for PNEUMONIA class
dir_ = (data_dir + '/fold' + str(k+1) + '/train' + '/PNEUMONIA')
# Iterate over every 'PNEUMONIA' x-ray and copy
for filename in os.listdir(pneumonia_path):
shutil.copy(pneumonia_path + '/' + filename, dir_)
def Create_KFold_ValidationData(k):
# Move Validation Fold Data out of Train Fold Directories and into Validation Fold Directories
data = GenerateData.data_flow(train_path, "grayscale")
# Class Counts and ratio
df = pd.DataFrame({'data':data.classes})
normal = int(df[df.data==data.class_indices['NORMAL']].count())
pneumonia = int(df[df.data==data.class_indices['PNEUMONIA']].count())
count_normal = math.ceil(normal/k)
count_pneumonia = math.ceil(pneumonia/k)
# 'NORMAL' x-rays
# Instantiate counts
i = 0
j = 1
# Set source and target directories to fold 1
dir_Normal = (data_dir + '/fold' + str(j) + '/train' + '/NORMAL')
dir_ = (data_dir + '/fold' + str(j) + '/validate' + '/NORMAL')
print('Moving NORMAL Validation Data Out Of Fold ' + str(j) + ' Train Directory')
# Iterate over every 'NORMAL' x-ray in source directory
for filename in os.listdir(dir_Normal):
# Move Files from source to. target directories
shutil.move(dir_Normal + '/' + filename, dir_)
# When we have moved 1/k'th of the images, Set source and target directories to the next fold
if i > j*count_normal:
j = j + 1
dir_ = (data_dir + '/fold' + str(j) + '/validate' + '/NORMAL')
dir_Normal = (data_dir + '/fold' + str(j) + '/train' + '/NORMAL')
print('Moving NORMAL Validation Data Out Of Fold ' + str(j) + ' Train Directory')
i = i + 1
# 'PNEUMONIA' x-rays
# Instantiate counts
i = 0
j = 1
# Set source and target directories to fold 1
dir_Pneumonia = (data_dir + '/fold' + str(j) + '/train' + '/PNEUMONIA')
dir_ = (data_dir + '/fold' + str(j) + '/validate' + '/PNEUMONIA')
print('Moving PNEUMONIA Validation Data Out Of Fold ' + str(j) + ' Train Directory')
# Iterate over every 'PNEUMONIA' x-ray in source directory
for filename in os.listdir(dir_Pneumonia):
# Move Files from source to. target directories
shutil.move(dir_Pneumonia + '/' + filename, dir_)
# When we have moved 1/k'th of the images, Set source and target directories to the next fold
if i > j*count_pneumonia:
j = j + 1
dir_ = (data_dir + '/fold' + str(j) + '/validate' + '/PNEUMONIA')
dir_Pneumonia = (data_dir + '/fold' + str(j) + '/train' + '/PNEUMONIA')
print('Moving PNEUMONIA Validation Data Out Of Fold ' + str(j) + ' Train Directory')
i = i + 1
def LogReg():
classifier = Sequential()
classifier.add(Flatten(input_shape=(162,128,1)))
classifier.add(Dense(2))
classifier.add(Activation('softmax'))
classifier.compile(loss='binary_crossentropy', optimizer = Adam(lr=0.0001), metrics=['accuracy'])
classifier.summary()
return classifier
def VGG_16(freeze):
# Instantiate a pre-trained model, trained on the imagenet database, and unfreeze the final convolutional layer
base_model = VGG16(weights='imagenet', input_shape=(162,128,3), include_top=False)
x = base_model.output
x = Flatten()(x)
x = Dense(64, activation='relu')(x)
x = Dropout(0.33)(x)
x = BatchNormalization()(x)
output = Dense(2, activation='softmax')(x)
classifier = Model(inputs=base_model.input, outputs=output)
# Unfreeze the final convolutional layer
if freeze == True:
for layer in base_model.layers:
if layer.name != 'block5_conv3':
layer.trainable = False
else:
layer.trainable = True
print("Unfreezing layer: block5_conv3")
# If we do not want to unfreeze the final layer
else:
for layer in base_model.layers:
layer.trainable = False
classifier.compile(loss='binary_crossentropy', optimizer = Adam(learning_rate=0.0001), metrics=['accuracy'])
classifier.summary()
return classifier
def LeNet5():
# Define the Lenet5 model
classifier = Sequential()
classifier.add(layers.Conv2D(6,(5,5), input_shape=(162,128,1),strides=1, padding='valid', activation='relu'))
classifier.add(layers.AveragePooling2D(pool_size=(2,2),strides=2))
classifier.add(layers.Conv2D(16,(5,5),strides=1, padding='valid', activation='relu'))
classifier.add(layers.AveragePooling2D(pool_size=(2,2),strides=2))
classifier.add(layers.Conv2D(120,(5,5),strides=1, padding='valid', activation='relu'))
classifier.add(layers.Flatten())
classifier.add(layers.Dense(84, input_shape=(120,)))
classifier.add(layers.Dense(2, activation='softmax'))
classifier.summary()
classifier.compile(loss=keras.losses.categorical_crossentropy, optimizer = Adam(lr=0.0001), metrics=['accuracy'])
return classifier |
983,891 | 474ee69677100fe3fceba7e448482aa1e34fb293 | import os
from flask import Flask
from flask_sqlalchemy import SQLAlchemy
from flask_migrate import Migrate
###### CREATING A SQLite DATABASE ###########
basedir = os.path.abspath(os.path.dirname(__file__))
app = Flask(__name__)
app.config['SQLALCHEMY_DATABASE_URI'] = 'sqlite:///'+os.path.join(basedir, 'data.sqlite') #setting the location and name of the database
app.config['SQLALCHEMY_TRACK_MODIFICATIONS'] = False # turning off the feature to track modifications, we dont need this right now :)
db = SQLAlchemy(app)
Migrate(app, db)
##############################################
###### CREATE A MODEL ##################
class Puppy(db.Model):
__tablename__ = 'puppies'
id = db.Column(db.Integer, primary_key = True)
name = db.Column(db.Text)
age = db.Column(db.Integer)
breed = db.Column(db.Text)
def __init__(self, name, age, breed):
self.name = name
self.age = age
self.breed = breed
def __repr__(self):
return f"The puppy name is: {self.name} and it is: {self.age} years old"
##################################################
|
983,892 | a7a9209657d2049201a8ba9e8ef108bd86758057 | import sys
class Solution(object):
"""
This is my implementation of 3sum closet using two pointer, loop over i, use j, k two pointers;
j is designed to move right and k is designed to more left.
Time complexity: O(nlogn + n^2)
Space complexity: O(1)
"""
def threeSumClosest(self, nums, target):
"""
:type nums: List[int]
:type target: int
:rtype: int
"""
if not nums or len(nums) <= 2:
return None
nums.sort() # sort s
diff = sys.maxsize
for i in range(len(nums)-2):
if i >= 1 and nums[i] == nums[i-1]:
continue
j = i + 1
k = len(nums) - 1
while (j < k):
s = nums[i] + nums[j] + nums[k]
if abs(s - target) < diff:
diff = abs(s - target)
res = s
if s > target:
k -= 1 # if sum is larger, k move to the left
while k > 0 and k < len(nums)-1 and nums[k] == nums[k+1]: # k is designed to move left
k -= 1
elif s < target:
j += 1 # if sum is smaller, j move to the right
while j < len(nums)-1 and j > 0 and nums[j] == nums[j-1]: # j is designed to move right
j += 1
else:
return target # sum is equal to the target, return target
return res
"""
This is my second implementation of 3sum closet using brutal force (TLE)
"""
def threeSumClosest2(self, nums, target):
if not nums or len(nums) <= 2:
return None
diff = sys.maxsize
for i in range(len(nums)-2):
for j in range(i+1, len(nums)-1):
for k in range(j+1, len(nums)):
s = nums[i] + nums[j] + nums[k]
if abs(s-target) < diff:
diff = abs(s-target)
res = s
return res
print(Solution().threeSumClosest2([1,1,-1,-1,3],-1))
|
983,893 | 05659bed839f82441833256f716f08ba5c49fa79 | # Uncomment the next two lines to enable the admin:
from django.conf.urls import patterns, include, url
from gestio import views
urlpatterns = patterns('',
url(r'^$', views.menu, name='menu'),
url(r'^dataHora/$', views.dataHora, name='dataHora'),
url(r'^llistarComandes/$', views.llistarComandes, name='llistarComandes'),
url(r'^llistarPendents/$', views.llistarComandesPendents, name='llistarComandesPendents'),
url(r'^llistarTancades/$', views.llistarComandesTancades, name='llistarComandesTancades'),
url(r'^llistarPagades/$', views.llistarComandesPagades, name='llistarComandesPagades'),
url(r'^veureDetalls/(?P<idComanda>\d+)$', views.veureDetalls, name='veureDetalls'),
url(r'^tancarComanda/(?P<idComanda>\d+)/$', views.tancarComanda, name='tancarComanda'),
url(r'^guardarPagament/(?P<idComanda>\d+)/(?P<pagament>\w+)/$', views.guardarPagament, name='guardarPagament'),
url(r'^donaCanvi/(?P<qtatDonada>(\d+\.\d+))/(?P<total>(\d+\.\d+))/$', views.donaCanvi, name='donaCanvi'),
url(r'^donaCanvi/(?P<qtatDonada>\d+)/(?P<total>\d+)/$', views.donaCanvi, name='donaCanvi'),
url(r'^donaCanvi/(?P<qtatDonada>\d+)/(?P<total>(\d+\.\d+))/$', views.donaCanvi, name='donaCanvi'),
url(r'^donaCanvi/(?P<qtatDonada>(\d+\.\d+))/(?P<total>\d+)/$', views.donaCanvi, name='donaCanvi'),
)
|
983,894 | 2eea4b9930cfc8df01112f548bfcf88ef0dbece9 | # -*- coding: utf-8 -*-
from io import BytesIO
from struct import pack
from struct import unpack
from datetime import datetime
import time
def _calcCRC(crc, byte):
table = [0x0000, 0xCC01, 0xD801, 0x1400, 0xF001, 0x3C00, 0x2800, 0xE401,
0xA001, 0x6C00, 0x7800, 0xB401, 0x5000, 0x9C01, 0x8801, 0x4400]
# compute checksum of lower four bits of byte
tmp = table[crc & 0xF]
crc = (crc >> 4) & 0x0FFF
crc = crc ^ tmp ^ table[byte & 0xF]
# now compute checksum of upper four bits of byte
tmp = table[crc & 0xF]
crc = (crc >> 4) & 0x0FFF
crc = crc ^ tmp ^ table[(byte >> 4) & 0xF]
return crc
class FitBaseType(object):
"""BaseType Definition
see FIT Protocol Document(Page.20)"""
enum = {'#': 0, 'endian': 0, 'field': 0x00, 'name': 'enum', 'invalid': 0xFF, 'size': 1}
sint8 = {'#': 1, 'endian': 0, 'field': 0x01, 'name': 'sint8', 'invalid': 0x7F, 'size': 1}
uint8 = {'#': 2, 'endian': 0, 'field': 0x02, 'name': 'uint8', 'invalid': 0xFF, 'size': 1}
sint16 = {'#': 3, 'endian': 1, 'field': 0x83, 'name': 'sint16', 'invalid': 0x7FFF, 'size': 2}
uint16 = {'#': 4, 'endian': 1, 'field': 0x84, 'name': 'uint16', 'invalid': 0xFFFF, 'size': 2}
sint32 = {'#': 5, 'endian': 1, 'field': 0x85, 'name': 'sint32', 'invalid': 0x7FFFFFFF, 'size': 4}
uint32 = {'#': 6, 'endian': 1, 'field': 0x86, 'name': 'uint32', 'invalid': 0xFFFFFFFF, 'size': 4}
string = {'#': 7, 'endian': 0, 'field': 0x07, 'name': 'string', 'invalid': 0x00, 'size': 1}
float32 = {'#': 8, 'endian': 1, 'field': 0x88, 'name': 'float32', 'invalid': 0xFFFFFFFF, 'size': 2}
float64 = {'#': 9, 'endian': 1, 'field': 0x89, 'name': 'float64', 'invalid': 0xFFFFFFFFFFFFFFFF, 'size': 4}
uint8z = {'#': 10, 'endian': 0, 'field': 0x0A, 'name': 'uint8z', 'invalid': 0x00, 'size': 1}
uint16z = {'#': 11, 'endian': 1, 'field': 0x8B, 'name': 'uint16z', 'invalid': 0x0000, 'size': 2}
uint32z = {'#': 12, 'endian': 1, 'field': 0x8C, 'name': 'uint32z', 'invalid': 0x00000000, 'size': 4}
byte = {'#': 13, 'endian': 0, 'field': 0x0D, 'name': 'byte', 'invalid': 0xFF, 'size': 1} # array of byte, field is invalid if all bytes are invalid
@staticmethod
def get_format(basetype):
formats = {
0: 'B', 1: 'b', 2: 'B', 3: 'h', 4: 'H', 5: 'i', 6: 'I', 7: 's', 8: 'f',
9: 'd', 10: 'B', 11: 'H', 12: 'I', 13: 'c',
}
return formats[basetype['#']]
@staticmethod
def pack(basetype, value):
"""function to avoid DeprecationWarning"""
if basetype['#'] in (1,2,3,4,5,6,10,11,12):
value = int(value)
fmt = FitBaseType.get_format(basetype)
return pack(fmt, value)
class Fit(object):
HEADER_SIZE = 12
GMSG_NUMS = {
'file_id': 0,
'device_info': 23,
'weight_scale': 30,
'file_creator': 49,
}
class FitEncoder(Fit):
def timestamp(self, t):
"""the timestamp in fit protocol is seconds since
UTC 00:00 Dec 31 1989 (631065600)"""
if isinstance(t, datetime):
t = time.mktime(t.timetuple())
return t - 631065600
class FitEncoder_Weight(FitEncoder):
FILE_TYPE = 9
LMSG_TYPE_FILE_INFO = 0
LMSG_TYPE_FILE_CREATOR = 1
LMSG_TYPE_DEVICE_INFO = 2
LMSG_TYPE_WEIGHT_SCALE = 3
def __init__(self):
self.buf = BytesIO()
self.write_header() # create header first
self.device_info_defined = False
self.weight_scale_defined = False
def __str__(self):
orig_pos = self.buf.tell()
self.buf.seek(0)
lines = []
while True:
b = self.buf.read(16)
if not b:
break
lines.append(' '.join(['%02x' % ord(c) for c in b]))
self.buf.seek(orig_pos)
return '\n'.join(lines)
def write_header(self, header_size=Fit.HEADER_SIZE,
protocol_version=16,
profile_version=108,
data_size=0,
data_type=b'.FIT'):
self.buf.seek(0)
s = pack('BBHI4s', header_size, protocol_version, profile_version, data_size, data_type)
self.buf.write(s)
def _build_content_block(self, content):
field_defs = []
values = []
for num, basetype, value, scale in content:
s = pack('BBB', num, basetype['size'], basetype['field'])
field_defs.append(s)
if value is None:
# invalid value
value = basetype['invalid']
elif scale is not None:
value *= scale
values.append(FitBaseType.pack(basetype, value))
return (b''.join(field_defs), b''.join(values))
def write_file_info(self, serial_number=None, time_created=None, manufacturer=None, product=None, number=None):
if time_created is None:
time_created = datetime.now()
content = [
(3, FitBaseType.uint32z, serial_number, None),
(4, FitBaseType.uint32, self.timestamp(time_created), None),
(1, FitBaseType.uint16, manufacturer, None),
(2, FitBaseType.uint16, product, None),
(5, FitBaseType.uint16, number, None),
(0, FitBaseType.enum, self.FILE_TYPE, None), # type
]
fields, values = self._build_content_block(content)
# create fixed content
msg_number = self.GMSG_NUMS['file_id']
fixed_content = pack('BBHB', 0, 0, msg_number, len(content)) # reserved, architecture(0: little endian)
self.buf.write(b''.join([
# definition
self.record_header(definition=True, lmsg_type=self.LMSG_TYPE_FILE_INFO),
fixed_content,
fields,
#record
self.record_header(lmsg_type=self.LMSG_TYPE_FILE_INFO),
values,
]))
def write_file_creator(self, software_version=None, hardware_version=None):
content = [
(0, FitBaseType.uint16, software_version, None),
(1, FitBaseType.uint8, hardware_version, None),
]
fields, values = self._build_content_block(content)
msg_number = self.GMSG_NUMS['file_creator']
fixed_content = pack('BBHB', 0, 0, msg_number, len(content)) # reserved, architecture(0: little endian)
self.buf.write(b''.join([
# definition
self.record_header(definition=True, lmsg_type=self.LMSG_TYPE_FILE_CREATOR),
fixed_content,
fields,
#record
self.record_header(lmsg_type=self.LMSG_TYPE_FILE_CREATOR),
values,
]))
def write_device_info(self, timestamp, serial_number=None, cum_operationg_time=None, manufacturer=None,
product=None, software_version=None, battery_voltage=None, device_index=None,
device_type=None, hardware_version=None, battery_status=None):
content = [
(253, FitBaseType.uint32, self.timestamp(timestamp), 1),
(3, FitBaseType.uint32z, serial_number, 1),
(7, FitBaseType.uint32, cum_operationg_time, 1),
(8, FitBaseType.uint32, None, None), # unknown field(undocumented)
(2, FitBaseType.uint16, manufacturer, 1),
(4, FitBaseType.uint16, product, 1),
(5, FitBaseType.uint16, software_version, 100),
(10, FitBaseType.uint16, battery_voltage, 256),
(0, FitBaseType.uint8, device_index, 1),
(1, FitBaseType.uint8, device_type, 1),
(6, FitBaseType.uint8, hardware_version, 1),
(11, FitBaseType.uint8, battery_status, None),
]
fields, values = self._build_content_block(content)
if not self.device_info_defined:
header = self.record_header(definition=True, lmsg_type=self.LMSG_TYPE_DEVICE_INFO)
msg_number = self.GMSG_NUMS['device_info']
fixed_content = pack('BBHB', 0, 0, msg_number, len(content)) # reserved, architecture(0: little endian)
self.buf.write(header + fixed_content + fields)
self.device_info_defined = True
header = self.record_header(lmsg_type=self.LMSG_TYPE_DEVICE_INFO)
self.buf.write(header + values)
def write_weight_scale(self, timestamp, weight, percent_fat=None, percent_hydration=None,
visceral_fat_mass=None, bone_mass=None, muscle_mass=None, basal_met=None,
active_met=None, physique_rating=None, metabolic_age=None, visceral_fat_rating=None):
content = [
(253, FitBaseType.uint32, self.timestamp(timestamp), 1),
(0, FitBaseType.uint16, weight, 100),
(1, FitBaseType.uint16, percent_fat, 100),
(2, FitBaseType.uint16, percent_hydration, 100),
(3, FitBaseType.uint16, visceral_fat_mass, 100),
(4, FitBaseType.uint16, bone_mass, 100),
(5, FitBaseType.uint16, muscle_mass, 100),
(7, FitBaseType.uint16, basal_met, 4),
(9, FitBaseType.uint16, active_met, 4),
(8, FitBaseType.uint8, physique_rating, 1),
(10, FitBaseType.uint8, metabolic_age, 1),
(11, FitBaseType.uint8, visceral_fat_rating, 1),
]
fields, values = self._build_content_block(content)
if not self.weight_scale_defined:
header = self.record_header(definition=True, lmsg_type=self.LMSG_TYPE_WEIGHT_SCALE)
msg_number = self.GMSG_NUMS['weight_scale']
fixed_content = pack('BBHB', 0, 0, msg_number, len(content)) # reserved, architecture(0: little endian)
self.buf.write(header + fixed_content + fields)
self.weight_scale_defined = True
header = self.record_header(lmsg_type=self.LMSG_TYPE_WEIGHT_SCALE)
self.buf.write(header + values)
def record_header(self, definition=False, lmsg_type=0):
msg = 0
if definition:
msg = 1 << 6 # 6th bit is a definition message
return pack('B', msg + lmsg_type)
def crc(self):
orig_pos = self.buf.tell()
self.buf.seek(0)
crc = 0
while(True):
b = self.buf.read(1)
if not b:
break
crc = _calcCRC(crc, unpack('b', b)[0])
self.buf.seek(orig_pos)
return pack('H', crc)
def finish(self):
"""re-weite file-header, then append crc to end of file"""
data_size = self.get_size() - self.HEADER_SIZE
self.write_header(data_size=data_size)
crc = self.crc()
self.buf.seek(0, 2)
self.buf.write(crc)
def get_size(self):
orig_pos = self.buf.tell()
self.buf.seek(0, 2)
size = self.buf.tell()
self.buf.seek(orig_pos)
return size
def getvalue(self):
return self.buf.getvalue()
|
983,895 | f4835dc1927e53b58abded5a24781f26f640d3f4 | # -*- coding: utf-8 -*-
# Define your item pipelines here
#
# Remember to add pipeline to the ITEM_PIPELINES setting
# See: https://docs.scrapy.org/en/latest/topics/item-pipeline.html
import csv
import time
class SchoolPipeline(object):
head_line = ["name","score","rate_type","overall_quality_score","level_of_difficulty_score",
"com","credit","attendance","textbook","would_take_again","grade","comment"]
def open_spider(self, spider):
self.f = open("./data2.csv", "a+", newline="", encoding="utf-8")
self.writer = csv.DictWriter(self.f, self.head_line)
self.writer.writeheader()
def process_item(self, item, spider):
self.writer.writerow(dict(item))
# time.sleep(2)
return item
def close_spider(self, spider):
self.f.close()
|
983,896 | f6010c7f3936a2ab1cf7c371b702e5a2ef4c70b5 | # Generated by Django 3.2 on 2021-06-25 18:16
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('evaluation', '0008_remove_teachersdetails_age'),
]
operations = [
migrations.CreateModel(
name='Stud',
fields=[
('id', models.BigAutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('Sub', models.CharField(max_length=30)),
],
),
]
|
983,897 | 974ee94ce484461cae64f6d4f465c14211cce729 | from django.db import models
from django.urls import exceptions
from django.db.models import Max
from django.shortcuts import resolve_url
from django.utils.translation import ugettext_lazy as _
class Menu(models.Model):
title = models.CharField(
_('title of menu'),
max_length=128,
unique=True
)
updated = models.DateTimeField(auto_now=True)
def __str__(self):
return self.title
class Meta:
verbose_name = _('Menu')
verbose_name_plural = _('Menus')
class MenuItem(models.Model):
parent = models.ForeignKey(
'self',
verbose_name=_('parent menu item'),
blank=True,
null=True,
related_name='children',
on_delete=models.CASCADE,
)
menu = models.ForeignKey(
Menu,
verbose_name=_('menu'),
related_name='menu_items',
on_delete=models.CASCADE
)
name = models.CharField(
_('name of menu item'),
max_length=128
)
raw_url = models.CharField(
_('url of menu item'),
help_text=_('can be url or named django app url'),
max_length=300
)
sort_order = models.PositiveIntegerField(_('order'), null=True, blank=True)
class Meta:
verbose_name = _('Menu Item')
verbose_name_plural = _('Menu Items')
ordering = ('sort_order', )
@property
def url(self):
try:
return resolve_url(self.raw_url)
except exceptions.NoReverseMatch:
return self.raw_url
def __str__(self):
return self.name
def save(self, *args, **kwargs):
if self.sort_order is None:
max_sort_order = self._meta.model.objects.filter(menu=self.menu).aggregate(max_order=Max('sort_order'))['max_order']
self.sort_order = max_sort_order + 1 if max_sort_order is not None else 0
super().save(*args, **kwargs) |
983,898 | 92374e5b65b23291bbc10ef20b8e3eedabf8f096 | DEBUG = True
ALLOWED_HOSTS = (
'app.captivise.com',
'.jpclients.com',
)
# Database
# https://docs.djangoproject.com/en/1.10/ref/settings/#databases
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.mysql',
'NAME': 'captivise',
'USER': 'jp74',
'PASSWORD': 'BbK8of$R9%rtCHf6D*RXwKse6pPf3!1e',
'HOST': '127.0.0.1',
'PORT': '',
},
}
# Email
EMAIL_HOST = 'mailtrap.io'
EMAIL_HOST_USER = '1999352a376c5581b'
EMAIL_HOST_PASSWORD = '3f2730828b18f1'
EMAIL_PORT = '2525'
# Googleads
ADWORDS_DEVELOPER_TOKEN = 'WyUU6lrzentIzanlF7BzQQ'
ADWORDS_CLIENT_ID = '451839149375-d7cujajfgfgb2g7s2abp5l7pnuvi2vn1.apps.googleusercontent.com'
ADWORDS_SECRET_KEY = 'PaZFjB-9PVaHG66tGJC1nNhM'
ANALYTICS_TID = 'UA-107932867-1'
# django-compressor
COMPRESS_ENABLED = True
# Payment settings
ECOM6_PAYMENT_OPTIONS = {
'default': {
'merchant_ID': 103237,
'secret_key': 'Agency12Also14Entity',
'country_code': 'gb',
},
'continuous_authority': {
'merchant_ID': 103237,
'secret_key': 'Agency12Also14Entity',
'country_code': 'gb',
},
}
ECOM6_CALLBACK_SCHEME = 'http'
ECOM6_CALLBACK_HOST = 'captivise.jpclients.com'
# Determines whether the environment should be able to make google ads
# changes.
SHOULD_MUTATE_GOOGLE_ADS = False
|
983,899 | 6ae7bbe4d4b282e2757d1a7a5ebc0e78f00b1a8c | from pynput.keyboard import Listener, Key
lista = []
def press(key):
lista.append(key)
def release(key):
if key == Key.shift:
print(lista)
with Listener(on_press=press, on_release=release) as listener:
listener.join()
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.