text stringlengths 38 1.54M |
|---|
from django.shortcuts import render, redirect
from django.http import HttpResponse, JsonResponse
# Create your views here.
"""
request是httprequest的一个对象
request包含一些浏览器的提交信息
"""
def index(request):
# num = "1" + 1
return render(request, 'booktest/index.html')
def show_arg(num):
return HttpResponse(num)
def login(request):
"""显示登录页面"""
# 判断用户是否登录
if request.session.has_key('islogin'):
# 用户已登录
return redirect('/index')
else:
# 获取页面cookie username
if 'username' in request.COOKIES:
username = request.COOKIES['username']
else:
username = ''
return render(request, 'booktest/login.html', {'username': username})
def login_check(request):
"""登录校验"""
# request.post 保存post方式提交的参数 Querydict类型
# request.get 保存get提交的参数 Querydict类型
# 1.校验密码
username = request.POST.get('username') # get的是input标签的name值
password = request.POST.get('password')
# print(username+":"+password)
# 获取remember值,判断cookie
remember = request.POST.get('remember')
# 2. 根据用户名和密码在数据库中查找数据
if username == "admin" and password == "123456":
"登录成功,跳转首页"
# 根据cookie判断是否需要记住用户名
if remember == 'on':
response = redirect('/index') # 返回一个httpresponseredirect类的对象
response.set_cookie('username', username, max_age=14*24*3600) # 设置coolie过期时间为2周
# 记住用户登录状态
request.session['islogin'] = True
return response
else:
"登录失败,跳转登录页"
return redirect('/login')
# 2.相应页面
# return HttpResponse('ok')
def ajax_test(request):
"""显示ajax页面"""
return render(request, 'booktest/ajax.html')
def ajax_handle():
"""处理ajax请求"""
# 返回一个数据{‘res’:1}
return JsonResponse({'res': 1})
def login_ajax(request):
"""ajax登录页"""
return render(request, 'booktest/login_ajax.html')
# login_ajax_check
def login_ajax_check(request):
"""
ajax登录校验
ajax请求返回的是一个json,不是一个页面
"""
# 1.获取用户名、密码
username = request.POST.get('username')
password = request.POST.get('password')
# print(username+":"+password)
# 2. 根据用户名和密码在数据库中查找数据
if username == "admin" and password == "123456":
"登录成功,跳转首页"
return JsonResponse({'res': 1})
else:
"登录失败,跳转登录页"
return JsonResponse({'res': 0})
def set_cookie():
"""设置网页cookie信息"""
response = HttpResponse('设置cookie')
# 设置一个cookie信息
response.set_cookie('num', 1, max_age=14*24*3600)
return response
def get_cookie(request):
"""获取cookie信息"""
num = request.COOKIES['num']
return HttpResponse(num)
def set_session(request):
"""设置seeeion"""
request.session['username'] = 'zsk'
request.session['age'] = 18
return HttpResponse('设置session')
def get_session(request):
"""获取session"""
username = request.session['username']
age = request.session['age']
return HttpResponse(username+":"+str(age))
|
from fonctions.conn_liste import conn
from fonctions.fonction_print_armures import print_armure
import fonctions.fonction_lancement_menu_combat as jeu
def equiper_armure(user, armure) :
update_cursor=conn.cursor()
if armure.type_armure == "casque" :
update_query = ("UPDATE equipement_users SET {0} = {1} WHERE id_Users = {2}".format("id_casque", armure.id, user.id))
elif armure.type_armure == "plastron" :
update_query = ("UPDATE equipement_users SET {0} = {1} WHERE id_Users = {2}".format("id_plastron", armure.id, user.id))
elif armure.type_armure == "jambières" :
update_query = ("UPDATE equipement_users SET {0} = {1} WHERE id_Users = {2}".format("id_jambières", armure.id, user.id))
elif armure.type_armure == "bottes" :
update_query = ("UPDATE equipement_users SET {0} = {1} WHERE id_Users = {2}".format("id_bottes", armure.id, user.id))
elif armure.type_armure == "anneau" :
update_query = ("UPDATE equipement_users SET {0} = {1} WHERE id_Users = {2}".format("id_anneau", armure.id, user.id))
elif armure.type_armure == "collier" :
update_query = ("UPDATE equipement_users SET {0} = {1} WHERE id_Users = {2}".format("id_collier", armure.id, user.id))
update_cursor.execute(update_query)
conn.commit()
jeu.ShowMenu(user)
|
# добавим обработку исключений. ЭТО БАЗОВАЯ ГИГИЕНА ПРОГРАММИСТА, БЕЗ НЕЕ НЕ БЕРУТ НА РАБОТУ
# исключение надо добавлять перед той частью кода, которая выдает ошибку:
# 1) отсоединяемся от интернета ->
# Traceback (most recent call last):
# File "API_and_weather.py", line 31, in <module>
# print(weather_by_city('Moscow, Russia'))
# File "API_and_weather.py", line 18, in weather_by_city
# result = requests.get(weather_url, params= params)
# 2) отдаем неверный адрес сайта для обработки запроса (-> 404) ->
# json.decoder.JSONDecodeError: Expecting value...
# File "weatherweather.py", line 25, in weather_by_city
# weather = result.json()
from flask import current_app # так мы обращаемся к текущему flask-приолжению
import requests
def weather_by_city(city_name):
weather_url = current_app.config['WEATHER_URL']
params = {
'key': current_app.config['WEATHER_API_KEY'], # '89c2900d5e6843f49da170013191706' - ранний вариант, до config
'q': city_name,
'format': 'json',
'num_of_days': 1,
'lang': 'ru'
}
try: # если все пойдет хорошо
result = requests.get(weather_url, params= params)
result.raise_for_status() # обработка ошибок на сервере погоды. Вызов raise_for_status сгенерирует exception
# если сервер ответил кодом, начинающимся с 4xx или 5xx
weather = result.json() # Сервер может прислать некорректно сформированный результат (JSON).
# тогда на строчке return result.json() мы получим exception ValueError
if 'data' in weather:
if 'current_condition' in weather['data']:
try:
return weather['data']['current_condition'][0]
except(IndexError, TypeError):
return False
except(requests.RequestException, ValueError): # если все пойдет не очень
print('Сетевая ошибка') # для себя
return False # для сетевого клиента
return False
if __name__ == '__main__':
print(weather_by_city('Moscow, Russia'))
# варианты ответов сервера на запросы клиента:
# 200 - все отлично (любые двухсотые коды)
# 301 - страница перемещена на другой адрес (коды 301, 302 и др)
# 401 - нужно авторизоваться
# 404 - страница не найдена
# 500 - на сервере произошла ошибка (можно его увидеть, если из application.py убрать debug=True,
# из weatherweather.py убрать ValueError и задать неверный адрес страницы для запроса)
# Ошибка 500 выглядит так: Internal Server Error
# The server encountered an internal error and was unable to complete your request.
# Either the server is overloaded or there is an error in the application (в браузере) |
from rest.api.views import CreateOrGetGame, CreateOrUpdatePlayer
from django.urls import path
app_name = "rest"
urlpatterns = [
path('game/', CreateOrGetGame.as_view(), name='post-create'),
path('player/', CreateOrUpdatePlayer.as_view(), name='post-create-1'),
]
|
import re
import sqlalchemy as sa
from sqlalchemy.ext.declarative import (
declarative_base,
declared_attr,
)
from sqlalchemy.orm import (
scoped_session,
sessionmaker,
relationship,
)
from zope.sqlalchemy import ZopeTransactionExtension
from horus.models import (
GroupMixin,
UserMixin,
UserGroupMixin,
ActivationMixin,
)
class BaseModel(object):
"""Base class which auto-generates tablename, and surrogate
primary key column.
"""
__table_args__ = {
'mysql_engine': 'InnoDB',
'mysql_charset': 'utf8'
}
@declared_attr
def id(self):
return sa.Column(sa.Integer, primary_key=True)
_traversal_lookup_key = 'id'
@declared_attr
def __tablename__(cls):
"""Convert CamelCase class name to underscores_between_words
table name."""
name = cls.__name__.replace('Mixin', '')
return (
name[0].lower() +
re.sub(r'([A-Z])', lambda m: '_' + m.group(0).lower(), name[1:])
)
DBSession = scoped_session(sessionmaker(extension=ZopeTransactionExtension()))
Base = declarative_base(cls=BaseModel)
class User(UserMixin, Base):
name = sa.Column(sa.Unicode(255))
class Group(GroupMixin, Base):
pass
class UserGroup(UserGroupMixin, Base):
pass
class Activation(ActivationMixin, Base):
pass
class Badge(Base):
name = sa.Column(sa.Unicode(255), nullable=False)
users = relationship('UserBadge', backref='badges')
class UserBadge(Base):
user_id = sa.Column(sa.Integer, sa.ForeignKey('{0}.id'.format(User.__tablename__)))
badge_id = sa.Column(sa.Integer, sa.ForeignKey('{0}.id'.format(Badge.__tablename__)))
class Video(Base):
name = sa.Column(sa.Unicode(255))
youtube_id = sa.Column(sa.Unicode(255))
accesses = relationship('AccessedVideo', backref='access_videos')
block = relationship('Block', uselist=False)
def __json__(self, request):
return {u'youtube_id': self.youtube_id, u'name': self.name}
class AccessedVideo(Base):
video_id = sa.Column(sa.Integer, sa.ForeignKey('{0}.id'.format(Video.__tablename__)))
user_id = sa.Column(sa.Integer, sa.ForeignKey('{0}.id'.format(User.__tablename__)))
start = sa.Column(sa.DateTime())
stop = sa.Column(sa.DateTime())
position = sa.Column(sa.Integer())
class Course(Base):
"""Course """
slug = sa.Column(sa.Unicode(255), nullable=False)
name = sa.Column(sa.Unicode(255))
description = sa.Column(sa.UnicodeText())
abstract = sa.Column(sa.UnicodeText())
knowledge_acquired = sa.Column(sa.UnicodeText())
knowledge_required = sa.Column(sa.UnicodeText())
professors = relationship('CourseProfessors', backref='course')
intro_video_id = sa.Column(sa.Integer, sa.ForeignKey('{0}.id'.format(Video.__tablename__)))
intro_video = relationship('Video')
students = relationship('CourseStudents', backref='courses')
status = sa.Column(sa.Unicode(255))
# discutir com anderson como vai ser o tempo estimado
time_estimated = sa.Column(sa.Unicode(255))
extra_dadication = sa.Column(sa.Unicode(255))
publication_date = sa.Column(sa.Date())
def __unicode__(self):
"""This is used to render the model in a relation field. Must return an
unicode string."""
return self.name
def __repr__(self):
return '<Curso {0}>'.format(self.name)
# wiki
# forum
# notes
class CourseStudents(Base):
course_id = sa.Column(sa.Integer, sa.ForeignKey('{0}.id'.format(Course.__tablename__)))
students_id = sa.Column(sa.Integer, sa.ForeignKey('{0}.id'.format(User.__tablename__)))
enrollment = sa.Column(sa.DateTime())
class CourseProfessors(Base):
course_id = sa.Column(sa.Integer, sa.ForeignKey('{0}.id'.format(Course.__tablename__)), nullable=False)
user_id = sa.Column(sa.Integer, sa.ForeignKey('{0}.id'.format(User.__tablename__)), nullable=False)
user = relationship('User', backref='course')
start = sa.Column(sa.DateTime())
biography = sa.Column(sa.UnicodeText())
# couse role
class Lesson(Base):
name = sa.Column(sa.Unicode(255))
desc = sa.Column(sa.Unicode(255))
position = sa.Column(sa.Integer())
course_id = sa.Column(sa.Integer, sa.ForeignKey('{0}.id'.format(Course.__tablename__)))
course = relationship('Course', backref='lessons')
students = relationship('LessonStudent', backref='lessons')
blocks = relationship('Block', secondary='lesson_block', order_by="Block.position")
class LessonStudent(Base):
lesson_id = sa.Column(sa.Integer, sa.ForeignKey('{0}.id'.format(Lesson.__tablename__)))
user_id = sa.Column(sa.Integer, sa.ForeignKey('{0}.id'.format(User.__tablename__)))
start = sa.Column(sa.DateTime())
end = sa.Column(sa.DateTime())
progress = sa.Column(sa.Integer())
class Note(Base):
text = sa.Column(sa.UnicodeText())
video_id = sa.Column(sa.Integer, sa.ForeignKey('{0}.id'.format(Video.__tablename__)))
lesson_id = sa.Column(sa.Integer, sa.ForeignKey('{0}.id'.format(Lesson.__tablename__)))
user_id = sa.Column(sa.Integer, sa.ForeignKey('{0}.id'.format(User.__tablename__)))
class Access(Base):
user_id = sa.Column(sa.Integer, sa.ForeignKey('{0}.id'.format(User.__tablename__)))
class Activity(Base):
"""Generic class to activities
Data templates (data e type atributes):
Multiple choice
type: multiplechoice
data: {question: "", choices: ["choice1", "choice2", ...]}
expected_answer_data: {choices: [0, 2, 5]} # list of espected choices, zero starting
Single choice
type: singlechoice
data: {question: "", choices: ["choice1", "choice2", ...]}
expected_answer_data: {choice: 1}
"""
title = sa.Column(sa.Unicode(255))
type = sa.Column(sa.Unicode(255))
data = sa.Column(sa.UnicodeText())
expected_answer_data = sa.Column(sa.UnicodeText())
block = relationship('Block', uselist=False)
def __json__(self, request):
return {
u'title': self.title,
u'type': self.type,
u'data': self.data,
u'expected_answer_data': self.expected_answer_data
}
class Block(Base):
activity_id = sa.Column(sa.Integer, sa.ForeignKey('{0}.id'.format(Activity.__tablename__)))
activity = relationship('Activity', uselist=False)
video_id = sa.Column(sa.Integer, sa.ForeignKey('{0}.id'.format(Video.__tablename__)))
video = relationship('Video', uselist=False)
lessons = relationship('Lesson', secondary='lesson_block')
position = sa.Column(sa.Integer())
def __json__(self, request):
return {u'activity': self.activity, u'video': self.video, u'position': self.position}
lesson_block = sa.Table('lesson_block', Base.metadata,
sa.Column(u'lesson_id', sa.Integer, sa.ForeignKey('{0}.id'.format(Lesson.__tablename__))),
sa.Column(u'block_id', sa.Integer, sa.ForeignKey('{0}.id'.format(Block.__tablename__))),
)
class Answer(Base):
activity_id = sa.Column(sa.Integer, sa.ForeignKey('{0}.id'.format(Activity.__tablename__)))
activity = relationship('Activity', uselist=False)
user_id = sa.Column(sa.Integer, sa.ForeignKey('{0}.id'.format(User.__tablename__)), nullable=False)
user = relationship('User', uselist=False)
timestamp = sa.Column(sa.DateTime())
free_text_answer = sa.Column(sa.UnicodeText())
|
"""
Посчитать четные и нечетные цифры введенного натурального числа.
Например, если введено число 34560, то у него 3 четные цифры (4, 6 и 0) и 2 нечетные (3 и 5)
https://drive.google.com/file/d/1-c9vOMQsLOY7X0w8UF7x7N5d7shKWT_h/view?usp=sharing
"""
def recursion(n):
if n < 10:
if n % 2 == 0:
return 1
else:
return 0
else:
return recursion(n // 10) + recursion(n % 10)
number = int(input('Введите, пожалуста, натуральное число: '))
count = recursion(number)
print(f'Введенное число содержит {count} четных чисел и {len(str(number)) - count} нечетных чисел')
|
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
"""
Created on Wed May 23 17:19:26 2018
@author: kazuki.onodera
previous_application
"""
import numpy as np
import pandas as pd
import gc
from multiprocessing import Pool
from glob import glob
import utils
utils.start(__file__)
#==============================================================================
KEY = 'SK_ID_CURR'
PREF = 'prev_102'
NTHREAD = 16
col_num = ['AMT_ANNUITY', 'AMT_APPLICATION', 'AMT_CREDIT', 'AMT_DOWN_PAYMENT',
'AMT_GOODS_PRICE', 'HOUR_APPR_PROCESS_START',
'FLAG_LAST_APPL_PER_CONTRACT', 'NFLAG_LAST_APPL_IN_DAY',
'RATE_DOWN_PAYMENT', 'RATE_INTEREST_PRIMARY', 'RATE_INTEREST_PRIVILEGED',
'CNT_PAYMENT',
'DAYS_DECISION', 'DAYS_FIRST_DRAWING', 'DAYS_FIRST_DUE',
'DAYS_LAST_DUE_1ST_VERSION', 'DAYS_LAST_DUE', 'DAYS_TERMINATION',
'NFLAG_INSURED_ON_APPROVAL']
col_cat = ['NAME_CONTRACT_TYPE', 'WEEKDAY_APPR_PROCESS_START',
'NAME_CASH_LOAN_PURPOSE', 'NAME_CONTRACT_STATUS', 'NAME_PAYMENT_TYPE',
'CODE_REJECT_REASON', 'NAME_TYPE_SUITE', 'NAME_CLIENT_TYPE',
'NAME_GOODS_CATEGORY', 'NAME_PORTFOLIO', 'NAME_PRODUCT_TYPE',
'CHANNEL_TYPE', 'NAME_SELLER_INDUSTRY', 'NAME_YIELD_GROUP', 'PRODUCT_COMBINATION']
col_group = ['SK_ID_PREV', 'NAME_CONTRACT_TYPE', 'WEEKDAY_APPR_PROCESS_START',
'NAME_CASH_LOAN_PURPOSE', 'NAME_CONTRACT_STATUS', 'NAME_PAYMENT_TYPE',
'CODE_REJECT_REASON', 'NAME_TYPE_SUITE', 'NAME_CLIENT_TYPE',
'NAME_GOODS_CATEGORY', 'NAME_PORTFOLIO', 'NAME_PRODUCT_TYPE',
'CHANNEL_TYPE', 'NAME_SELLER_INDUSTRY', 'NAME_YIELD_GROUP', 'PRODUCT_COMBINATION']
# =============================================================================
# feature
# =============================================================================
prev = utils.read_pickles('../data/previous_application')
base = prev[[KEY]].drop_duplicates().set_index(KEY)
train = utils.load_train([KEY])
test = utils.load_test([KEY])
def nunique(x):
return len(set(x))
def multi_gr2(k):
gr2 = prev.groupby([KEY, k])
gc.collect()
print(k)
keyname = 'gby-'+'-'.join([KEY, k])
# size
gr1 = gr2.size().groupby(KEY)
name = f'{PREF}_{keyname}_size'
base[f'{name}_min'] = gr1.min()
base[f'{name}_max'] = gr1.max()
base[f'{name}_max-min'] = base[f'{name}_max'] - base[f'{name}_min']
base[f'{name}_mean'] = gr1.mean()
base[f'{name}_std'] = gr1.std()
base[f'{name}_sum'] = gr1.sum()
base[f'{name}_nunique'] = gr1.size()
for v in col_num:
# min
gr1 = gr2[v].min().groupby(KEY)
name = f'{PREF}_{keyname}_{v}_min'
base[f'{name}_max'] = gr1.max()
base[f'{name}_mean'] = gr1.mean()
base[f'{name}_std'] = gr1.std()
base[f'{name}_sum'] = gr1.sum()
base[f'{name}_nunique'] = gr1.apply(nunique)
# max
gr1 = gr2[v].max().groupby(KEY)
name = f'{PREF}_{keyname}_{v}_max'
base[f'{name}_min'] = gr1.min()
base[f'{name}_mean'] = gr1.mean()
base[f'{name}_std'] = gr1.std()
base[f'{name}_sum'] = gr1.sum()
base[f'{name}_nunique'] = gr1.apply(nunique)
# mean
gr1 = gr2[v].mean().groupby(KEY)
name = f'{PREF}_{keyname}_{v}_mean'
base[f'{name}_min'] = gr1.min()
base[f'{name}_max'] = gr1.max()
base[f'{name}_max-min'] = base[f'{name}_max'] - base[f'{name}_min']
base[f'{name}_mean'] = gr1.mean()
base[f'{name}_std'] = gr1.std()
base[f'{name}_sum'] = gr1.sum()
base[f'{name}_nunique'] = gr1.apply(nunique)
# std
gr1 = gr2[v].std().groupby(KEY)
name = f'{PREF}_{keyname}_{v}_std'
base[f'{name}_min'] = gr1.min()
base[f'{name}_max'] = gr1.max()
base[f'{name}_max-min'] = base[f'{name}_max'] - base[f'{name}_min']
base[f'{name}_mean'] = gr1.mean()
base[f'{name}_std'] = gr1.std()
base[f'{name}_sum'] = gr1.sum()
base[f'{name}_nunique'] = gr1.apply(nunique)
# sum
gr1 = gr2[v].sum().groupby(KEY)
name = f'{PREF}_{keyname}_{v}_sum'
base[f'{name}_min'] = gr1.min()
base[f'{name}_max'] = gr1.max()
base[f'{name}_max-min'] = base[f'{name}_max'] - base[f'{name}_min']
base[f'{name}_mean'] = gr1.mean()
base[f'{name}_std'] = gr1.std()
base[f'{name}_nunique'] = gr1.apply(nunique)
base.reset_index(inplace=True)
df = pd.merge(train, base, on=KEY, how='left').drop(KEY, axis=1)
utils.to_pickles(df, f'../data/102_{k}_train', utils.SPLIT_SIZE)
df = pd.merge(test, base, on=KEY, how='left').drop(KEY, axis=1)
utils.to_pickles(df, f'../data/102_{k}_test', utils.SPLIT_SIZE)
print(f'finish {k}')
return
# =============================================================================
# gr2
# =============================================================================
pool = Pool(NTHREAD)
callback = pool.map(multi_gr2, col_group)
pool.close()
#==============================================================================
utils.end(__file__)
|
# -*- coding: utf-8 -*-
"""
Created on Tue Oct 8 23:42:51 2013
@author: olgis
Problem:
The series, 1**1 + 2**2 + 3**3 + ... + 10**10 = 10405071317.
Find the last ten digits of the series, 1**1 + 2**2 + 3**3 + ... + 1000**1000.
"""
import time
res = 0
for i in xrange(1, 1001):
res += pow(i,i)
start = time.time()
product = str(res)[-10:]
elapsed = time.time() - start
print "Last 10 digits is %s, found in %s sec." %(product, elapsed)
#Last 10 digits is 9110846700, found in 0.00121998786926 sec. |
search_list = ["a", "b", "c", "d", "e", "f"]
chat_n = "a"
find_num = lambda s_list, n: find_num_fun(s_list, n)
def find_num_fun(s_list, n):
for item in s_list:
if n == item:
print("ok")
break
else:
print("not ok")
return "fuck"
print(find_num(search_list, chat_n))
# for n in search_list:
# if n is chat_n:
# print(id(n))
# print(id(chat_n))
# print("ok")
# break
# else:
# print("bad") |
# -*- coding: utf-8 -*-
# Generated by Django 1.10.3 on 2016-12-01 23:58
#
# Sites migrations because I can't figure out how to set Kloud51 to stop redirecting me to davehub.net
#
#############################################
from __future__ import unicode_literals
from django.db import migrations
import socket
from django.contrib.sites.models import Site
def changeHostname(apps, schemd_editor) -> None:
mySite = Site.objects.get_current()
if 'calligraphy' in socket.gethostname():
mySite.name = 'CaliSet'
mySite.domain = 'davehub.net'
mySite.save()
else:
mySite.name = 'CaliSet Local'
mySite.domain = '127.0.0.1:8000'
mySite.save()
class Migration(migrations.Migration):
dependencies = [
('calligraphy', '0044_auto_20161121_1843'),
]
operations = [ migrations.RunPython(changeHostname) ]
|
import sys
input = lambda: sys.stdin.readline().rstrip()
n = int(input())
arr = list(map(int,input().split()))
arr.sort()
total = sum(arr)
ret = total-n
c = 2
while c**(n-1)<=2*total:
tmp = [-1*(c**i) for i in range(n)]
cand = 0
for i in range(n):
cand += abs(tmp[i]+arr[i])
ret = min(ret,cand)
c += 1
print(ret)
|
"""
本节用一个曲线拟合的例子体现普通训练的 overfitting以及 dropout 的优越性
"""
import tensorflow as tf
import numpy as np
import matplotlib.pyplot as plt
tf.set_random_seed(1)
np.random.seed(1)
# 超参数
N_SAMPLES = 20
N_HIDDEN = 300 # 大网络,过拟合
LR = 0.01
# training data
x = np.linspace(-1 , 1 , N_SAMPLES)[ : ,np.newaxis]
y = x + 0.3 * np.random.randn(N_SAMPLES)[ : ,np.newaxis] # 加白噪声
# test data
test_x = x.copy()
test_y = test_x + 0.3 * np.random.randn(N_SAMPLES)[ : ,np.newaxis]
# 展示数据
plt.scatter(x, y, c='magenta', s=50, alpha=0.5, label='train')
plt.scatter(test_x, test_y, c='cyan', s=50, alpha=0.5, label='test')
plt.legend(loc='upper left')
plt.ylim((-2.5, 2.5))
plt.show()
# 设置传入值
tf_x = tf.placeholder(tf.float32 , [None , 1])
tf_y = tf.placeholder(tf.float32 , [None , 1])
tf_is_training = tf.placeholder(tf.bool , None) # 训练时就有dropout,否则就没有
# 搭建overfitting网络:
# placeholder - 网络层 - 输出层 - loss - train_op
o1 = tf.layers.dense(tf_x , N_HIDDEN , tf.nn.relu)
o2 = tf.layers.dense(o1 , N_HIDDEN , tf.nn.relu)
o_out = tf.layers.dense(o2 , 1)
o_loss = tf.losses.mean_squared_error(tf_y , o_out)
o_train = tf.train.AdamOptimizer(LR).minimize(o_loss)
# 搭建dropout网络,就是每层中间夹一层dropout
d1 = tf.layers.dense(tf_x , N_HIDDEN , tf.nn.relu)
d1 = tf.layers.dropout(d1 , rate = 0.5 , training = tf_is_training) # dropout掉 50%的神经元
d2 = tf.layers.dense(d1 , N_HIDDEN , tf.nn.relu)
d2 = tf.layers.dropout(d2 , rate = 0.5 , training = tf_is_training)
d_out = tf.layers.dense(d2 , 1)
d_loss = tf.losses.mean_squared_error(tf_y , d_out)
d_train = tf.train.AdamOptimizer(LR).minimize(d_loss)
sess = tf.Session()
sess.run(tf.global_variables_initializer())
plt.ion()
for i in range(500):
sess.run([o_train , d_train] , feed_dict={tf_x : x , tf_y : y , tf_is_training : True})
if i % 10 == 0:
plt.cla() # 清除轴
o_loss_ , d_loss_ , o_out_ , d_out_ = sess.run(
[o_loss , d_loss , o_out , d_out],
{tf_x : test_x , tf_y : test_y , tf_is_training : False}, # 测试的时候不需要dropout
)
# 画图部分,直接复制了
plt.scatter(x, y, c='magenta', s=50, alpha=0.3, label='train'); plt.scatter(test_x, test_y, c='cyan', s=50, alpha=0.3, label='test')
plt.plot(test_x, o_out_, 'r-', lw=3, label='overfitting'); plt.plot(test_x, d_out_, 'b--', lw=3, label='dropout(50%)')
plt.text(0, -1.2, 'overfitting loss=%.4f' % o_loss_, fontdict={'size': 20, 'color': 'red'}); plt.text(0, -1.5, 'dropout loss=%.4f' % d_loss_, fontdict={'size': 20, 'color': 'blue'})
plt.legend(loc='upper left'); plt.ylim((-2.5, 2.5)); plt.pause(0.1)
plt.ioff()
plt.show()
|
#671. Second Minimum Node In a Binary Tree
# Definition for a binary tree node.
# class TreeNode:
# def __init__(self, val=0, left=None, right=None):
# self.val = val
# self.left = left
# self.right = right
class Solution:
def findSecondMinimumValue(self, root: TreeNode) -> int:
## O(n), O(n)
out = []
if not root: return -1
# target= root.val
def traverse(node):
# nonlocal target
if not node: return
if node.val >root.val:
out.append(node.val)
return
traverse(node.left)
traverse(node.right)
traverse(root)
if not out: return -1
else: return min(out)
|
import scrapy
class JobsSpider(scrapy.Spider):
name = 'jobs'
region_url_to_name = {}
start_urls = ['https://geo.craigslist.org/iso/us']
custom_settings = {
'DOWNLOAD_TIMEOUT': 10,
}
def parse(self, response):
region_links = response.css('.geo-site-list a')
for link in region_links:
region_name = link.css('::text').get()
region_url = link.css('::attr(href)').get()
self.region_url_to_name[region_url] = region_name
for (current_url, current_region_name) in self.region_url_to_name.items():
print(current_url)
current_query_url = current_url + "/search/jjj?query=landscaping"
cb_kwargs = {"region_name": current_region_name}
yield response.follow(current_query_url, self.parse_job_search, cb_kwargs=cb_kwargs)
def parse_job_search(self, response, region_name):
job_page_links = response.css('.result-row a::attr(href)').getall()
cb_kwargs = {"region_name": region_name}
yield from response.follow_all(job_page_links, self.parse_job, cb_kwargs=cb_kwargs)
@staticmethod
def parse_job(response, region_name):
def extract_with_css(query):
return response.css(query).get(default='').strip()
def extract_body(query):
return ''.join(response.css(query).getall())
location_name = response.css('.postingtitle small::text').get()
if location_name is not None:
location_name = location_name.replace("(", "").replace(")", "").strip()
yield {
'title': extract_with_css('#titletextonly::text'),
'description': extract_body('#postingbody::text'),
'lattitude': response.css('#map::attr(data-latitude)').get(),
'longitude': response.css('#map::attr(data-longitude)').get(),
'location_name': location_name,
'region_name': region_name,
'url': response.url,
}
|
for a in range(1,101):
if (a%7)!=0 and (a-7)%10!=0 and (a<70 or a>79):
print(a)
else:
continue
|
# Copyright 2013
# Pramod Dematagoda <pmd.lotr.gandalf@gmail.com>
#
# This software may be freely redistributed under the terms of the GNU
# general public license.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 675 Mass Ave, Cambridge, MA 02139, USA.
'''
Module that provides a class that helps working with the
victims hash collection
'''
import pymongo
import os
from datetime import datetime, timedelta
MTIME_FMT = "%j:%Y:%H:%M:%S"
CTIME_FMT = "%d:%m:%Y"
# The seconds in a day
DAY_SECONDS = 86400
VICTIM_HASHES = "hashes"
class VictimDB:
"""
Class to provide an easy connection to the victims database
"""
__hash_table = None # Reference to the table in use
__hash_table_name = None # Name of the table in use
__hash_db = None # Reference to the DB in use
__victim_table = None # Reference to the victim hashes table
def __init__ (self, db_name='victims',
host=os.getenv ("OPENSHIFT_MONGODB_DB_HOST"),
port=int (os.getenv ("OPENSHIFT_MONGODB_DB_PORT")),
table='submissions',
victim_conn=False):
try:
'''
If pymongo is version 2.3 or less we need
to get a Connection object
'''
if float (pymongo.version) <= 2.3:
client = pymongo.Connection (host, port)
else:
client = pymongo.MongoClient (host, port)
except ConnectionFailure:
raise
try:
db = client[db_name]
if (os.getenv ("OPENSHIFT_MONGODB_DB_USERNAME") and
os.getenv ("OPENSHIFT_MONGODB_DB_PASSWORD")):
'''
Authenticate against the given database
using the credentials in the environment
'''
db.authenticate (os.getenv ("OPENSHIFT_MONGODB_DB_USERNAME"),
os.getenv ("OPENSHIFT_MONGODB_DB_PASSWORD"))
except InvalidName:
raise ConnectionFailure ()
self.__hash_table = pymongo.collection.Collection (db, table)
if victim_conn:
self.__victim_table = pymongo.collection.Collection (db,
VICTIM_HASHES)
'''
Save the table name and DB reference in case
the table needs to be renewed.
'''
self.__hash_table_name = table
self.__hash_db = db
def add_victim (self, package_name,
package_version,
vendor,
cves,
package_format,
package_url,
state='REQUESTED'):
"""
Adds a potential victim entry to the victims database.
Inputs :
package_name - name of the victim package
package_version - version of the victim package
vendor - vendor of the victim package
cves - list of cves affecting the victim
package_format - language of the package
package_url - URL of the package
state - the state of the victim entry
Outputs :
Returns True on success
Returns False on failure
"""
if self.__hash_table.find ({'name' : package_name,
'version' : package_version}).count ():
return False
elif self.__victim_table:
if self.__victim_table.find ({'name' : package_name,
'version' : package_version}).count ():
return False
date = datetime.strftime (datetime.utcnow (),
CTIME_FMT)
self.__hash_table.insert ({
'submitter' :
{'name' : "victims-ingestor"},
'name' : package_name,
'version' : package_version,
'vendor' : vendor,
'cves' : cves,
'format' : package_format,
'source' : package_url,
'approval' :
{'date' : date, 'status' : state},
'entry' : {}
})
return True
def get_victim_entry (self, package_name, package_version):
"""
Get a single victim entry that
corresponds to the given parameters.
Inputs :
package_name - name of the victim to be fetched
package_version - version of the victim to be fetched
Outputs :
Returns the matching table entry
"""
return self.__hash_table.find_one ({'name' : package_name,
'version' : package_version})
def get_victim_entries (self, package_name, package_version):
"""
Get a list of all victim entries that
correspond to the given parameters.
Inputs :
package_name - name of the victim to be fetched
package_version - version of the victim to be fetched
Outputs :
Returns the matching table entries
"""
return self.__hash_table.find ({'name' : package_name,
'version' : package_version})
def create_cache (self, data):
"""
Function that creates the cache from the given data
dictionary.
Inputs :
data - dictionary, format specified in sources.py,
to be added as the cache.
"""
for p_name in data.keys ():
for p_version in data[p_name].keys ():
self.__hash_table.insert ({'name' : p_name,
'version' : p_version,
'cves' : data[p_name][p_version],
'vendor' : data[p_name]['vendor']})
def get_cache (self):
"""
Returns the cache from the connected DB.
Outputs :
Returns a dictionary containing victim entries
conforming to the format specified in sources.py.
"""
entries = {}
data = self.__hash_table.find ({'cache_att' : None})
for entry in data:
if entry['name'] not in entries:
entries[entry['name']] = {}
entries[entry['name']]['vendor'] = entry['vendor']
entries[entry['name']][entry['version']] = entry['cves']
return entries
def renew_table (self):
"""
Renew the table in use by recreating it from scratch.
"""
self.__hash_table.drop ()
self.__hash_table = pymongo.collection.Collection (self.__hash_db,
self.__hash_table_name)
def add_mtime_stamp (self):
"""
Add a modificiation time stamp to the database.
"""
#Insert a new modified timestamp in to the cache collection
mtimestr = datetime.strftime (datetime.utcnow (), MTIME_FMT)
self.__hash_table.insert ({'cache_att' : True, 'mtime' : mtimestr})
def check_mtime_within (self, d_seconds=DAY_SECONDS):
"""
Check if the cache is up to date.
Inputs :
d_seconds - the delta to check for in seconds
Outputs :
Returns True on mtime stamp within d_seconds
Returns False on mtime stamp outside d_seconds
"""
if self.__hash_table.find_one ({'cache_att' : True}):
mtimestr = self.__hash_table.find_one ({'cache_att' : True})['mtime']
mtime = datetime.strptime (mtimestr, MTIME_FMT)
if mtime >= (datetime.utcnow () - timedelta (seconds=d_seconds)):
return True
return False
|
#! /usr/bin/env python
#
#This file is used to get the flow and system parameters for the simulation.
import tkMessageBox as tkmb
from Tkinter import Frame, Label, Entry, OptionMenu, Button, Text, \
DoubleVar, StringVar, IntVar
from capsim_object_types import CapSimWindow, Chemical
from capsim_functions import get_superfont
from database import Database
class LayerEditor:
"""Gets the contaminant properties."""
def __init__(self, master, system, layer, layers, editflag):
"""Constructor method. Defines the parameters to be obtained in this
window."""
self.master = master
self.fonttype = system.fonttype
self.version = system.version
self.superfont = get_superfont(self.fonttype) #superscript font
self.tframe = Frame(master.tframe)
self.frame = Frame(master.frame)
self.bframe = Frame(master.bframe)
self.top = None #flag for existence of toplevel#
self.system = system
self.lengthunit = system.lengthunit
self.layer = layer
self.layers = layers
self.types = []
self.tort_types = []
self.torts = layer.torts
self.h = DoubleVar(value = 10.0)
self.alpha = DoubleVar(value = 1.0)
self.doc = DoubleVar(value = 0.0)
for matrix in system.matrices:
self.types.append(matrix.name)
self.tort_types.append(matrix.components[0].tort)
self.type = StringVar(value = self.types[0])
self.tort = StringVar(value = self.tort_types[0])
self.editflag = editflag
self.cancelflag = 0
if self.editflag == 1:
self.type.set(layer.type)
self.tort.set(layer.tort)
self.h.set(layer.h)
self.alpha.set(layer.alpha)
self.doc.set(layer.doc)
self.names = []
if len(layers) == 0:
self.names.append('Deposition')
self.names.append('Layer 1')
else:
for layer in self.layers: self.names.append(layer.name)
self.names.append('Layer ' + str(layers[-1].number + 1))
if self.names[0] == 'Deposition': self.names.remove(self.names[0])
else: self.names.insert(0, 'Deposition')
if self.editflag == 0: self.name = StringVar(value = self.names[-1])
else: self.name = StringVar(value = self.layer.name)
self.matrices = system.matrices
self.chemicals = system.chemicals
self.sorptions = system.sorptions
def make_widgets(self):
"""Make the widgets for the window."""
self.bgcolor = self.frame.cget('bg')
self.instructions = Label(self.frame, text = ' Please input the following information about the layer properties: ')
self.namelabel = Label(self.frame, text = 'Layer')
self.typelabel = Label(self.frame, text = 'Material')
self.tortlabel = Label(self.frame, text = 'Tortuosity Correction')
self.hlabel = Label(self.frame, text = 'Thickness')
self.alphalabel = Label(self.frame, text = 'Hydrodynamic\n' +'Dispersivity')
self.doclabel = Label(self.frame, text = 'Dissolved organic\n'+'matter concentration ')
if self.editflag == 0: self.namewidget = OptionMenu(self.frame, self.name, *self.names, command = self.click_type)
else: self.namewidget = Label(self.frame, textvariable = self.name, justify = 'center' )
self.typewidget = OptionMenu(self.frame, self.type, *self.types, command = self.click_tortuosity)
self.hwidget = Entry(self.frame, textvariable = self.h, width = 8, justify = 'center')
self.alphawidget = Entry(self.frame, textvariable = self.alpha, width = 8, justify = 'center')
self.docwidget = Entry(self.frame, textvariable = self.doc, width = 8, justify = 'center')
self.thickunits = Label(self.frame, text = self.lengthunit)
self.depthickunits = Label(self.frame, text = self.lengthunit + '/yr')
self.alphaunits = Label(self.frame, text = self.lengthunit)
self.docunits = Label(self.frame, text = 'mg/L')
self.blankcolumn = Label(self.frame, text = ' ', width = 2)
self.namecolumn = Label(self.frame, text = ' ', width = 14)
self.typecolumn = Label(self.frame, text = ' ', width = 18)
self.tortcolumn = Label(self.frame, text = ' ', width = 20)
self.thickcolumn = Label(self.frame, text = ' ', width = 12)
self.alphacolumn = Label(self.frame, text = ' ', width = 12)
self.doccolumn = Label(self.frame, text = ' ', width = 12)
self.okbutton = Button(self.frame, text = 'OK', width = 20, command = self.OK)
self.cancelbutton = Button(self.frame, text = 'Cancel', width = 20, command = self.Cancel)
self.blank1 = Label(self.frame, text = ' ')
self.blank2 = Label(self.frame, text = ' ')
#show the widgets on the grid
self.instructions.grid( row = 0, column = 0, columnspan = 6, padx = 8, sticky = 'W')
self.blankcolumn.grid( row = 1, column = 0)
self.namecolumn.grid( row = 1, column = 1)
self.typecolumn.grid( row = 1, column = 2)
self.tortcolumn.grid( row = 1, column = 3)
self.thickcolumn.grid( row = 1, column = 4)
self.alphacolumn.grid( row = 1, column = 5)
self.doccolumn.grid( row = 1, column = 6)
self.namelabel.grid( row = 2, column = 1, sticky = 'WE', padx = 4, pady = 1)
self.typelabel.grid( row = 2, column = 2, sticky = 'WE', padx = 1, pady = 1)
self.tortlabel.grid( row = 2, column = 3, sticky = 'WE', padx = 1, pady = 1)
self.hlabel.grid( row = 2, column = 4, sticky = 'WE', padx = 1, pady = 1)
self.alphalabel.grid( row = 2, column = 5, sticky = 'WE', padx = 1, pady = 1)
self.doclabel.grid( row = 2, column = 6, sticky = 'WE', padx = 1, pady = 1)
self.namewidget.grid( row = 4, column = 1, sticky = 'WE', padx = 1, pady = 1)
self.hwidget.grid( row = 4, column = 4, padx = 1, pady = 1)
self.alphawidget.grid( row = 4, column = 5, padx = 1, pady = 1)
self.docwidget.grid( row = 4, column = 6, padx = 1, pady = 1)
self.thickunits.grid( row = 3, column = 4, sticky = 'WE', padx = 1, pady = 1)
self.alphaunits.grid( row = 3, column = 5, sticky = 'WE', padx = 1, pady = 1)
self.docunits.grid( row = 3, column = 6, sticky = 'WE', padx = 1, pady = 1)
self.blank1.grid( row = 5)
self.okbutton.grid( row = 6, columnspan = 11)
self.cancelbutton.grid( row = 7, columnspan = 11)
self.blank2.grid( row = 8)
self.okbutton.bind('<Return>', self.OK)
self.focusbutton = self.okbutton
#if self.editflag == 0: self.click_type()
self.click_type()
def click_type(self, event = None):
try:
self.thickunits.grid_forget()
self.depthickunits.grid_forget()
except:pass
if self.name.get() == 'Deposition': self.depthickunits.grid( row = 3, column = 4, sticky = 'WE', padx = 1, pady = 1)
else: self.thickunits.grid( row = 3, column = 4, sticky = 'WE', padx = 1, pady = 1)
self.typewidget.grid( row = 4, column = 2, sticky = 'WE', padx = 1, pady = 1)
self.click_tortuosity(event = 1)
def click_tortuosity(self, event = None):
"""Give the default tortuosity model corresponding to the selected layer type."""
#try:
if event != 1: self.tort.set(self.tort_types[self.types.index(self.type.get())])
self.tortwidget = OptionMenu(self.frame, self.tort, *self.torts)
self.tortwidget.grid_forget()
self.tortwidget.grid(row = 4, column = 3, padx =2, pady = 1, sticky = 'WE')
def OK(self, event = None):
"""Finish and move on. Checks that the number chemicals are less than the
total number of chemicals in database."""
kinetic_sorption_check = 0
for component in self.matrices[self.types.index(self.type.get())].components:
for chemical in self.chemicals:
if self.sorptions[component.name][chemical.name].kinetic != 'Equilibrium':
kinetic_sorption_check = 1
print(kinetic_sorption_check)
if self.master.window.top is not None:
self.master.open_toplevel()
elif self.name.get() == 'Deposition' and kinetic_sorption_check == 1:
self.layer_deposition_error()
else: self.master.tk.quit()
def layer_deposition_error(self):
tkmb.showerror(title = self.version, message = 'The deposition layer can not contain solid materials that have kinetic sorption!')
self.focusbutton = self.okbutton
self.master.tk.lift()
def Cancel(self):
try:
self.number.set(self.layer.number)
self.type.set(self.layer.type)
self.tort.set(self.layer.tort)
self.h.set(self.layer.h)
self.alpha.set(self.layer.alpha)
self.doc.set(self.layer.doc)
except: self.cancelflag = 1
if self.master.window.top is not None: self.master.open_toplevel()
else: self.master.tk.quit()
class LayerDeleter:
def __init__(self, master, system, layer):
"""Constructor method. Defines the parameters to be obtained in this
window."""
self.master = master
self.fonttype = system.fonttype
self.version = system.version
self.superfont = get_superfont(self.fonttype) #superscript font
self.tframe = Frame(master.tframe)
self.frame = Frame(master.frame)
self.bframe = Frame(master.bframe)
self.top = None #flag for existence of toplevel#
self.type = layer.type
self.tort = layer.tort
self.h = layer.h
self.alpha = layer.alpha
self.doc = layer.doc
self.layer = layer
self.cancelflag = 0
def make_widgets(self):
self.bgcolor = self.frame.cget('bg')
self.instructions = Label(self.frame, text = ' Are you sure to delete the following layer? ')
self.namelabel = Label(self.frame, text = 'Layer')
self.typelabel = Label(self.frame, text = 'Material')
self.tortlabel = Label(self.frame, text = 'Tortuosity Correction')
self.hlabel = Label(self.frame, text = 'Thickness')
self.alphalabel = Label(self.frame, text = 'Hydrodynamic\n' +'Dispersivity')
self.doclabel = Label(self.frame, text = 'Dissolved organic\n'+'matter concentration ')
if self.layer.number == 0: self.namewidget = Label(self.frame, text = 'Deposition')
else: self.namewidget = Label(self.frame, text = 'Layer '+ str(self.layer.number))
self.typewidget = Label(self.frame, text = self.type)
self.tortwidget = Label(self.frame, text = self.tort)
self.hwidget = Label(self.frame, text = self.h, width = 10)
self.alphawidget = Label(self.frame, text = self.alpha, width = 10)
self.docwidget = Label(self.frame, text = self.doc, width = 10)
self.thickunits = Label(self.frame, text = 'cm')
self.depthickunits = Label(self.frame, text = 'cm/yr')
self.alphaunits = Label(self.frame, text = 'cm')
self.docunits = Label(self.frame, text = 'mg/L')
self.blankcolumn = Label(self.frame, text = ' ', width = 2)
self.namecolumn = Label(self.frame, text = ' ', width = 14)
self.typecolumn = Label(self.frame, text = ' ', width = 18)
self.tortcolumn = Label(self.frame, text = ' ', width = 20)
self.thickcolumn = Label(self.frame, text = ' ', width = 12)
self.alphacolumn = Label(self.frame, text = ' ', width = 12)
self.doccolumn = Label(self.frame, text = ' ', width = 12)
self.deletebutton = Button(self.frame, text = 'Delete', width = 20, command = self.Delete)
self.cancelbutton = Button(self.frame, text = 'Cancel', width = 20, command = self.Cancel)
self.blank1 = Label(self.frame, text = ' ')
self.blank2 = Label(self.frame, text = ' ')
#show the widgets on the grid
self.instructions.grid(row = 0, column = 0, columnspan = 6, padx = 8, sticky = 'W')
self.blankcolumn.grid( row = 1, column = 0)
self.namecolumn.grid( row = 1, column = 1)
self.typecolumn.grid( row = 1, column = 2)
self.tortcolumn.grid( row = 1, column = 3)
self.thickcolumn.grid( row = 1, column = 4)
self.alphacolumn.grid( row = 1, column = 5)
self.doccolumn.grid( row = 1, column = 6)
self.namelabel.grid( row = 2, column = 1, sticky = 'WE', padx = 4, pady = 1)
self.typelabel.grid( row = 2, column = 2, sticky = 'WE', padx = 1, pady = 1)
self.tortlabel.grid( row = 2, column = 3, sticky = 'WE', padx = 1, pady = 1)
self.hlabel.grid( row = 2, column = 4, sticky = 'WE', padx = 1, pady = 1)
self.alphalabel.grid( row = 2, column = 5, sticky = 'WE', padx = 1, pady = 1)
self.doclabel.grid( row = 2, column = 6, sticky = 'WE', padx = 1, pady = 1)
if self.layer.number == 0: self.depthickunits.grid( row = 3, column = 4, sticky = 'WE', padx = 1, pady = 1)
else: self.thickunits.grid( row = 3, column = 4, sticky = 'WE', padx = 1, pady = 1)
self.alphaunits.grid( row = 3, column = 5, sticky = 'WE', padx = 1, pady = 1)
self.docunits.grid( row = 3, column = 6, sticky = 'WE', padx = 1, pady = 1)
self.namewidget.grid( row = 4, column = 1, sticky = 'WE', padx = 1, pady = 1)
self.typewidget.grid( row = 4, column = 2, sticky = 'WE', padx = 1, pady = 1)
self.tortwidget.grid( row = 4, column = 3, sticky = 'WE', padx = 1, pady = 1)
self.hwidget.grid( row = 4, column = 4, sticky = 'WE', padx = 1, pady = 1)
self.alphawidget.grid( row = 4, column = 5, sticky = 'WE', padx = 1, pady = 1)
self.docwidget.grid( row = 4, column = 6, sticky = 'WE', padx = 1, pady = 1)
self.blank1.grid( row = 5)
self.deletebutton.grid( row = 6, columnspan = 11)
self.cancelbutton.grid( row = 7, columnspan = 11)
self.blank2.grid( row = 8)
self.deletebutton.bind('<Return>', self.Delete)
self.focusbutton = self.deletebutton
def Delete(self, event = None):
"""Finish and move on. Checks that the number chemicals are less than the
total number of chemicals in database."""
if self.master.window.top is not None: self.master.open_toplevel()
else: self.master.tk.quit()
def Cancel(self):
try:
self.number.set(self.layer.number)
self.type.set(self.layer.type)
self.tort.set(self.layer.tort)
self.h.set(self.layer.h)
self.alpha.set(self.layer.alpha)
self.doc.set(self.layer.doc)
except: self.cancelflag = 1
if self.master.window.top is not None: self.master.open_toplevel()
else: self.master.tk.quit()
|
#!/usr/bin/env python3
from tkinter import Tk, Button, messagebox
def say_hi():
print("They said hello!")
messagebox.showinfo("title", "message")
return 0
root = Tk()
hi = Button(text="hi", command=say_hi) # say_hi no perenthesis
hi.pack(side="top")
quit_win = Button(text="quit", fg="red", command=root.destroy)
quit_win.pack(side="bottom")
root.mainloop()
|
from django.db import models
from django.db.models import fields
from django.contrib.auth.models import User
from rest_framework import serializers
from rest_framework.serializers import CurrentUserDefault
from rest_framework.authtoken.models import Token
from .models import Task
class TaskSerializer(serializers.ModelSerializer):
name = serializers.CharField(max_length=64, required=False)
user = serializers.PrimaryKeyRelatedField(queryset = User.objects.all(), required=False)
class Meta:
model = Task
fields = '__all__'
# read_only_fields = ('')
def validate(self, attrs):
if not self.instance:
if 'name' not in attrs:
raise serializers.ValidationError("A Task Reauires a 'name' to be inialized")
return attrs
class UserSerializer(serializers.ModelSerializer):
username = serializers.CharField(write_only=True)
password = serializers.CharField(write_only=True, trim_whitespace=False)
token = serializers.CharField(read_only=True)
class Meta:
model = User
fields = ('username', 'password', 'token')
def validate_creds(self, username, password):
return True
def validate(self, attrs):
username = attrs.get('username')
password = attrs.get('password')
if username and password:
if User.objects.filter(username=username).exists() :
mes = "A user of this 'Username' already exists. The 'Username' should be unique."
raise serializers.ValidationError(mes)
elif not self.validate_creds(username, password):
mes = "You Must Provide a Reasonable 'Username' and Secure 'Password'."
raise serializers.ValidationError(mes)
user = User.objects.create(username=username)
user.set_password(password)
user.save()
token, _ = Token.objects.get_or_create(user=user)
attrs['token'] = token
else:
mes = "You are required to provide a 'Username' and 'Password'."
raise serializers.ValidationError(mes)
return attrs
|
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
"""
Created on Wed Apr 3 15:50:49 2019
@author: simsim
"""
from system import *
# choose the path with a failure at the beginning
def create(model):
plant = process("plant",["s1","s2","s3"],[],[],"s1")
environment = process("environment",["e1","e2","e3"],[],[],"e1")
pve = plant_environment("syst",plant,environment,model = model)
pve.add_transition("a",["plant","environment"],[["s1"],["e1"]],[["s2"],["e2"]])
pve.add_transition("b",["plant","environment"],[["s1","s3"],["e2","e3"]],[["s3","s3"],["e3","e3"]])
pve.add_transition("c",["plant","environment"],[["s2"],[]],[["s2"],[]])
pve.create_RNN()
pve.reinitialize()
return pve
|
import tkinter
import tkinter.colorchooser
import tkinter.filedialog
class UserInput:
def __init__(self,master,engine, username, updateWindow,windowType = None):
self.updateWindow = updateWindow
self.engine = engine
self.userInput = tkinter.Toplevel(master)
self.userInput.geometry("400x200")
self.userInput.configure(background = 'white')
self.userInput.resizable(0,0)
self.oldUsername = username
if(windowType == "signUp"):
#Ventana para logear a los usuarios y sus atributos
self.userInput.title("New user")
else:
self.userInput.title("Upgrade User")
#Espacio para ingresar un usuario
self.userL = tkinter.Label(self.userInput,text='User: ',font = ('arial',15),bd=5,bg="white")
self.userL.place(x=90,y=50)
self.userE = tkinter.Entry(self.userInput)
self.userE.place(x=150,y=50,height=30)
if(windowType == "update"):
self.userE.insert(0,self.oldUsername)
#Espacio para ingresar una contraseña
self.passL = tkinter.Label(self.userInput,text='Password: ',font = ('arial',15),bd=5,bg="white")
self.passL.place(x=46,y=100)
self.passE = tkinter.Entry(self.userInput,show="*")
self.passE.place(x=150,y=100,height=30)
if(windowType == "signUp"):
#Boton para ejecutar la verificacion edl usuario
self.loginButton = tkinter.Button(self.userInput,text="SIGN UP",cursor='hand2',command=self.newUser)
else:
self.loginButton = tkinter.Button(self.userInput,text="UPGRADE",cursor='hand2',command=self.updateUser)
self.loginButton.place(x=150,y=150)
def newUser(self):
#obtner los valores de los entry para el usuario y la contraseña
userAcc = self.userE.get()
passwordAcc = self.passE.get()
exists = self.engine.createOperatorUser(userAcc,passwordAcc)
if not exists:
self.updateWindow()
self.userInput.destroy()
else:
tkinter.messagebox.showinfo(message="El usuario ya existe", title="Sign in error")
def updateUser(self):
sameName = False
userAcc = self.userE.get()
passwordAcc = self.passE.get()
usersList = self.engine.getOperatorUser()
oldID = None
for userID, username in usersList:
if username == self.oldUsername:
oldID = userID
sameName = True
break
exist = self.engine.updateOperatorUser(oldID,userAcc,passwordAcc)
if(sameName or not exist):
self.updateWindow()
self.userInput.destroy()
else:
tkinter.messagebox.showinfo(message="El usuario ya existe", title="Update error")
|
"""
"""
from maya import cmds
from mamprefs import config
__all__ = ['script_output']
def script_output(direction):
"""
Script output dock for layouts.
"""
dock_control = config['WINDOW_SCRIPT_OUTPUT_DOCK']
dock_window = config['WINDOW_SCRIPT_OUTPUT']
if cmds.dockControl(dock_control, ex=True):
return cmds.dockControl(dock_control, e=True, vis=True, fl=False)
if cmds.window(dock_window, ex=True):
main_win = dock_window
else:
main_win = cmds.window(dock_window, title='Output Window')
cmds.paneLayout(parent=main_win)
# context menu
output_win = cmds.cmdScrollFieldReporter(fst="")
cmds.popupMenu(parent=output_win)
cmds.menuItem(
label='Clear Output',
command=lambda c: cmds.cmdScrollFieldReporter(
output_win, e=True, clear=True),
)
# Echo all commands toggle
cmds.menuItem(
label='Toggle Echo Commands',
command=lambda c: cmds.commandEcho(
state=not(cmds.commandEcho(q=True, state=True))),
)
# Go to python reference
cmds.menuItem(
label='Python Command Reference',
command=lambda c: cmds.showHelp('DocsPythonCommands'),
)
cmds.dockControl(
dock_control,
content=main_win,
label='Output Window',
area=direction,
height=500,
floating=False,
allowedArea=['left', 'right']
)
if __name__ == '__main__':
pass
|
import json
def error(message, code=500):
print("Error: %s" % message)
return response(message, code)
def response(message, code=200, data=None):
resp = {"statusCode": code, "body": {"message": message}}
if data:
resp["body"]["data"] = data
resp["body"] = json.dumps(resp["body"])
return resp
|
from django.contrib import admin
from leaflet.admin import LeafletGeoAdmin
from models import Message, Msgtype
# Register your models here.
class MessageAdmin(LeafletGeoAdmin):
list_display = ('name', 'email', 'show_location', 'created',)
map_height = '300px'
zoom = 13
admin.site.register(Message, MessageAdmin)
admin.site.register(Msgtype)
|
from Rule import Rule
from Grammar import Grammar
def create_Grammar(file_name: str, type: str):
with open(file_name, 'r') as f:
lines = f.readlines()
f.close()
for i in range(4):
lines[i] = lines[i][:lines[i].find('#')].strip()
Vt = lines[0].split()
Vn = lines[1].split()
S = lines[2]
P = []
num_rules = int(lines[3])
for i in range(num_rules):
temp = lines[i + 5].split()
if type == 'CFG':
P.append(Rule(temp[0], temp[1], '', type))
elif type == 'right' and temp[-1] in Vn:
P.append(Rule(temp[0], ''.join(temp[1:-1]), temp[-1], type))
elif type == 'left' and temp[1] in Vn:
P.append(Rule(temp[0], ''.join(temp[2:]), temp[1], type))
else:
P.append(Rule(temp[0], ''.join(temp[1:]), '', type))
if type == 'CFG':
string = lines[-1]
return Grammar(Vt, Vn, S, P, type), string
return Grammar(Vt, Vn, S, P, type)
|
import unittest
from conans.test.utils.tools import TestClient
from conans.util.files import load
from conans.paths import CONANINFO
import os
from conans.test.utils.conanfile import TestConanFile
class PackageIDTest(unittest.TestCase):
def setUp(self):
self.client = TestClient()
def _export(self, name, version, package_id_text=None, requires=None,
channel=None, default_option_value="off", settings=None):
conanfile = TestConanFile(name, version, requires=requires,
options={"an_option": ["on", "off"]},
default_options=[("an_option", "%s" % default_option_value)],
package_id=package_id_text,
settings=settings)
self.client.save({"conanfile.py": str(conanfile)}, clean_first=True)
self.client.run("export %s" % (channel or "lasote/stable"))
@property
def conaninfo(self):
return load(os.path.join(self.client.current_folder, CONANINFO))
def test_version_semver_schema(self):
self._export("Hello", "1.2.0")
self._export("Hello2", "2.3.8",
package_id_text='self.info.requires["Hello"].semver()',
requires=["Hello/1.2.0@lasote/stable"])
# Build the dependencies with --build missing
self.client.save({"conanfile.txt": "[requires]\nHello2/2.3.8@lasote/stable"}, clean_first=True)
self.client.run("install --build missing")
self.assertIn("Hello2/2.Y.Z", [line.strip() for line in self.conaninfo.splitlines()])
# Now change the Hello version and build it, if we install out requires should not be
# needed the --build needed because Hello2 don't need to be rebuilt
self._export("Hello", "1.5.0", package_id_text=None, requires=None)
self.client.run("install Hello/1.5.0@lasote/stable --build missing")
self._export("Hello2", "2.3.8",
package_id_text='self.info.requires["Hello"].semver()',
requires=["Hello/1.5.0@lasote/stable"])
self.client.save({"conanfile.txt": "[requires]\nHello2/2.3.8@lasote/stable"}, clean_first=True)
self.client.run("install .")
self.assertIn("Hello2/2.Y.Z", [line.strip() for line in self.conaninfo.splitlines()])
# Try to change user and channel too, should be the same, not rebuilt needed
self._export("Hello", "1.5.0", package_id_text=None, requires=None, channel="memsharded/testing")
self.client.run("install Hello/1.5.0@memsharded/testing --build missing")
self._export("Hello2", "2.3.8",
package_id_text='self.info.requires["Hello"].semver()',
requires=["Hello/1.5.0@memsharded/testing"])
self.client.save({"conanfile.txt": "[requires]\nHello2/2.3.8@lasote/stable"}, clean_first=True)
self.client.run("install .")
self.assertIn("Hello2/2.Y.Z", [line.strip() for line in self.conaninfo.splitlines()])
def test_version_full_version_schema(self):
self._export("Hello", "1.2.0", package_id_text=None, requires=None)
self._export("Hello2", "2.3.8",
package_id_text='self.info.requires["Hello"].full_version_mode()',
requires=["Hello/1.2.0@lasote/stable"])
# Build the dependencies with --build missing
self.client.save({"conanfile.txt": "[requires]\nHello2/2.3.8@lasote/stable"}, clean_first=True)
self.client.run("install --build missing")
self.assertIn("Hello2/2.3.8", self.conaninfo)
# If we change the user and channel should not be needed to rebuild
self._export("Hello", "1.2.0", package_id_text=None, requires=None, channel="memsharded/testing")
self.client.run("install Hello/1.2.0@memsharded/testing --build missing")
self._export("Hello2", "2.3.8",
package_id_text='self.info.requires["Hello"].full_version_mode()',
requires=["Hello/1.2.0@memsharded/testing"])
self.client.save({"conanfile.txt": "[requires]\nHello2/2.3.8@lasote/stable"}, clean_first=True)
self.client.run("install .")
self.assertIn("Hello2/2.3.8", self.conaninfo)
# Now change the Hello version and build it, if we install out requires is
# needed the --build needed because Hello2 needs to be build
self._export("Hello", "1.5.0", package_id_text=None, requires=None)
self.client.run("install Hello/1.5.0@lasote/stable --build missing")
self._export("Hello2", "2.3.8",
package_id_text='self.info.requires["Hello"].full_version_mode()',
requires=["Hello/1.5.0@lasote/stable"])
self.client.save({"conanfile.txt": "[requires]\nHello2/2.3.8@lasote/stable"}, clean_first=True)
with self.assertRaises(Exception):
self.client.run("install .")
self.assertIn("Can't find a 'Hello2/2.3.8@lasote/stable' package", self.client.user_io.out)
def test_version_full_recipe_schema(self):
self._export("Hello", "1.2.0", package_id_text=None, requires=None)
self._export("Hello2", "2.3.8",
package_id_text='self.info.requires["Hello"].full_recipe_mode()',
requires=["Hello/1.2.0@lasote/stable"])
# Build the dependencies with --build missing
self.client.save({"conanfile.txt": "[requires]\nHello2/2.3.8@lasote/stable"}, clean_first=True)
self.client.run("install --build missing")
self.assertIn("Hello2/2.3.8", self.conaninfo)
# If we change the user and channel should be needed to rebuild
self._export("Hello", "1.2.0", package_id_text=None, requires=None, channel="memsharded/testing")
self.client.run("install Hello/1.2.0@memsharded/testing --build missing")
self._export("Hello2", "2.3.8",
package_id_text='self.info.requires["Hello"].full_recipe_mode()',
requires=["Hello/1.2.0@memsharded/testing"])
self.client.save({"conanfile.txt": "[requires]\nHello2/2.3.8@lasote/stable"}, clean_first=True)
with self.assertRaises(Exception):
self.client.run("install .")
self.assertIn("Can't find a 'Hello2/2.3.8@lasote/stable' package", self.client.user_io.out)
# If we change only the package ID from hello (one more defaulted option to True) should not affect
self._export("Hello", "1.2.0", package_id_text=None, requires=None, default_option_value="on")
self.client.run("install Hello/1.2.0@lasote/stable --build missing")
self._export("Hello2", "2.3.8",
package_id_text='self.info.requires["Hello"].full_recipe_mode()',
requires=["Hello/1.2.0@lasote/stable"])
self.client.save({"conanfile.txt": "[requires]\nHello2/2.3.8@lasote/stable"}, clean_first=True)
self.client.run("install .")
def test_version_full_package_schema(self):
self._export("Hello", "1.2.0", package_id_text=None, requires=None)
self._export("Hello2", "2.3.8",
package_id_text='self.info.requires["Hello"].full_package_mode()',
requires=["Hello/1.2.0@lasote/stable"])
# Build the dependencies with --build missing
self.client.save({"conanfile.txt": "[requires]\nHello2/2.3.8@lasote/stable"}, clean_first=True)
self.client.run("install --build missing")
self.assertIn("Hello2/2.3.8", self.conaninfo)
# If we change only the package ID from hello (one more defaulted option to True) should affect
self._export("Hello", "1.2.0", package_id_text=None, requires=None, default_option_value="on")
self.client.run("install Hello/1.2.0@lasote/stable --build missing")
self.client.save({"conanfile.txt": "[requires]\nHello2/2.3.8@lasote/stable"}, clean_first=True)
with self.assertRaises(Exception):
self.client.run("install .")
self.assertIn("Can't find a 'Hello2/2.3.8@lasote/stable' package", self.client.user_io.out)
def test_nameless_mode(self):
self._export("Hello", "1.2.0", package_id_text=None, requires=None)
self._export("Hello2", "2.3.8",
package_id_text='self.info.requires["Hello"].unrelated_mode()',
requires=["Hello/1.2.0@lasote/stable"])
# Build the dependencies with --build missing
self.client.save({"conanfile.txt": "[requires]\nHello2/2.3.8@lasote/stable"}, clean_first=True)
self.client.run("install --build missing")
self.assertIn("Hello2/2.3.8", self.conaninfo)
# If we change even the require, should not affect
self._export("HelloNew", "1.2.0")
self.client.run("install HelloNew/1.2.0@lasote/stable --build missing")
self._export("Hello2", "2.3.8",
package_id_text='self.info.requires["HelloNew"].unrelated_mode()',
requires=["HelloNew/1.2.0@lasote/stable"])
self.client.save({"conanfile.txt": "[requires]\nHello2/2.3.8@lasote/stable"}, clean_first=True)
# Not needed to rebuild Hello2, it doesn't matter its requires
self.client.run("install .")
def test_toolset_visual_compatibility(self):
# By default is the same to build with native visual or the toolchain
for package_id in [None, "self.info.vs_toolset_compatible()"]:
self._export("Hello", "1.2.0", package_id_text=package_id,
channel="user/testing",
settings='"compiler"')
self.client.run('install Hello/1.2.0@user/testing '
' -s compiler="Visual Studio" '
' -s compiler.version=14 --build')
# Should have binary available
self.client.run('install Hello/1.2.0@user/testing'
' -s compiler="Visual Studio" '
' -s compiler.version=15 -s compiler.toolset=v140')
# Should NOT have binary available
error = self.client.run('install Hello/1.2.0@user/testing '
'-s compiler="Visual Studio" '
'-s compiler.version=15 -s compiler.toolset=v120',
ignore_error=True)
self.assertTrue(error)
self.assertIn("Missing prebuilt package for 'Hello/1.2.0@user/testing'", self.client.out)
# Specify a toolset not involved with the visual version is ok, needed to build:
self.client.run('install Hello/1.2.0@user/testing'
' -s compiler="Visual Studio" '
' -s compiler.version=15 -s compiler.toolset=v141_clang_c2 '
'--build missing')
def test_toolset_visual_incompatibility(self):
# By default is the same to build with native visual or the toolchain
self._export("Hello", "1.2.0", package_id_text="self.info.vs_toolset_incompatible()",
channel="user/testing",
settings='"compiler"',
)
self.client.run('install Hello/1.2.0@user/testing '
' -s compiler="Visual Studio" '
' -s compiler.version=14 --build')
# Should NOT have binary available
error = self.client.run('install Hello/1.2.0@user/testing'
' -s compiler="Visual Studio" '
' -s compiler.version=15 -s compiler.toolset=v140',
ignore_error=True)
self.assertTrue(error)
self.assertIn("Missing prebuilt package for 'Hello/1.2.0@user/testing'", self.client.out)
|
# Copyright 2014 Google Inc. All Rights Reserved.
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
# http://www.apache.org/licenses/LICENSE-2.0
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""One-off utilities."""
import logging
import time
from datetime import datetime
from pkg_resources import get_distribution, DistributionNotFound
import mutablerecords
def LogEveryNToLogger(n, logger, level, message, *args): # pylint: disable=invalid-name
"""Logs the given message every n calls to a logger.
Args:
n: Number of calls before logging.
logger: The logger to which to log.
level: The logging level (e.g. logging.INFO).
message: A message to log
*args: Any format args for the message.
Returns:
A method that logs and returns True every n calls.
"""
logger = logger or logging.getLogger()
def _gen(): # pylint: disable=missing-docstring
while True:
for _ in xrange(n):
yield False
logger.log(level, message, *args)
yield True
gen = _gen()
return lambda: next(gen)
def LogEveryN(n, level, message, *args): # pylint: disable=invalid-name
"""Logs a message every n calls. See LogEveryNToLogger."""
return LogEveryNToLogger(n, None, level, message, *args)
def TimeMillis(): # pylint: disable=invalid-name
"""The time in milliseconds."""
return int(time.time() * 1000)
def convert_to_dict(obj):
"""Recursively convert namedtuples to dicts."""
if hasattr(obj, '_asdict'):
obj = obj._asdict()
elif isinstance(obj, mutablerecords.records.RecordClass):
obj = {attr: getattr(obj, attr)
for attr in type(obj).all_attribute_names}
# Recursively convert values in dicts, lists, and tuples.
if isinstance(obj, dict):
obj = {k: convert_to_dict(v) for k, v in obj.iteritems()}
elif isinstance(obj, list):
obj = [convert_to_dict(value) for value in obj]
elif isinstance(obj, tuple):
obj = tuple(convert_to_dict(value) for value in obj)
else:
obj = str(obj)
return obj
def get_version():
"""Return the version string of the 'openhtf' package.
Note: the version number doesn't seem to get properly set when using ipython.
"""
version = 'Unknown'
try:
version = get_distribution('openhtf')
except DistributionNotFound:
version = 'Unknown - Perhaps openhtf was not installed via setup.py or pip.'
return version
|
# -*- coding: utf-8 -*-
# ---
# @Software: PyCharm
# @File: treading_run_sql.py
# @Author: Leslie Cheung
# @E-mail: leslieswr0820@gmail.com
# @Site:
# @Time: 2020/9/15 11:48
# ---
from threading import Thread
from NTRMYY.util.account import PgSQLContextManager
from NTRMYY.util.Dict_date import *
from NTRMYY.util.LogUitl import *
import datetime as dt
import pandas as pd
import os,sys
log = Logger('auto.log', logging.ERROR, logging.DEBUG)
def Run_Sql_Month(list_time,list_func_name):
'''
:param list_time: 时间列表 格式:'202001'
:param list_function: 函数列表:"fun_dw_inp_drgs_patient_m"
:return: None
'''
# try:
for f_name in list_func_name:
for t_time in list_time:
print(list_time.index(t_time))
try:
with PgSQLContextManager() as db_cursor:
start_time = dt.datetime.now()
sql = ''' select dwd."{f_name}"('{day_id}','{day_id}'); '''.format(day_id=t_time,f_name = f_name)
log.info("执行sql日期为:{}".format(t_time))
log.info(sql)
db_cursor.execute(sql)
end_date = dt.datetime.now()
log.info(f'执行完成时间为:{(end_date-start_time).seconds}s')
except:
with PgSQLContextManager() as db_cursor:
sql_log = '''
insert into dwd.run_table_log(function_name,insert_time,status,start_date,end_date)
values('{function_name}',{insert_time},'{status_code}','{start_date}','{end_date}');'''.format(
function_name=f_name, insert_time='now()', status_code='Failed',
start_date=t_time, end_date=t_time
)
db_cursor.execute(sql_log)
continue
with PgSQLContextManager() as db_cursor:
sql_log = '''
insert into dwd.run_table_log(function_name,insert_time,status,start_date,end_date)
values('{function_name}',{insert_time},'{status_code}','{start_date}','{end_date}');'''.format(
function_name=f_name, insert_time='now()', status_code='Successed',
start_date=list_time[0], end_date=list_time[-1]
)
db_cursor.execute(sql_log)
if __name__ == '__main__':
list_time = Get_Time_Qj_30(list_time=[])
with PgSQLContextManager() as db_cursor:
sql = '''
select
-- substring(specific_name from '%#"fun_dwd_D_______#"%' FOR '#'),
substring(specific_name from '^.{16}'),
routine_schema, ---数据库名
specific_name, ----函数事件名
routine_definition ---函数内容
from information_schema.routines
where
routine_schema ='dwd'
GROUP BY
routine_schema, ---数据库名
specific_name, ----函数事件名
routine_definition
having substring(specific_name from '^.{10}') = 'fun_dwd_DU'
'''
db_cursor.execute(sql)
result = db_cursor.fetchall()
# df = pd.DataFrame(result)
# df.columns = ['result', 'a', 'b', 'c']
# 存放函数名称的列表
list_function_name = []
for i in range(len(result)):
list_function_name.append(result[i][0])
# for row in df.itertuples(index=True, name='Pandas'):
# print(getattr(row, 'result'))
# list_function_name.append(getattr(row, 'result'))
n = 5
list_result = [list_function_name[i:i + n] for i in range(0, len(list_function_name), n)]
p_lst = []
for arg in list_result:
print(arg)
p = Thread(target=Run_Sql_Month, args=(list_time,arg))
p.start()
p_lst.append(p) |
# do_tvice takes a function object as an argument
# and calls it twice
def do_twice(f, k): # f=print_spam, k=2
f(k) # print_spam()
f(k)
def print_spam(v):
print('spam')
do_twice(print_spam, 1)
# Runs a function twice
# func: functional object
# arg: argument passed to the function
def do_twice(func, arg):
func(arg)
func(arg) |
import pytest
import pandas as pd
from sqlalchemy import create_engine
from pandas import DataFrame
# read in csv file from pandas library
input = pd.read_csv("../data/movie_metadata.csv")
# initiate sqlit in-memory database
engine = create_engine('sqlite://', echo=False)
# load file to local database
db = input.to_sql('movie', con=engine)
# query the database
db_tables = engine.execute("SELECT * FROM movie").fetchall()
print("Your csv file has loaded to in-memory SQLite database, and the first three records")
def test_top_10_genres():
# use pandas to select column and do the data manipulation rather than sql
df = pd.read_csv("../data/movie_metadata.csv")
df_genres_profit = df[['genres']]
df_genres_profit['profit'] = (df['gross'] - df['budget']).tolist()
df_genres_profit.isnull().sum()
test_res = df_genres_profit.groupby(['genres']).sum().sort_values(by='profit', ascending=False).head(10)
print(test_res)
testcsv = test_res.to_csv("./test-csv/top_10_genres_test.csv", encoding='utf-8')
assert top_10_genres(df) == testcsv,"test failed"
def test_top_10_directors_actors():
# use pandas to select column and do the data manipulation rather than sql
# select actor1, actor2, director with profit seperately with pandas and then concatenate them to
# do the filering and sorting
df = pd.read_csv("../data/movie_metadata.csv")
df_actor2_profit = df[['actor_2_name']]
df_actor2_profit['profit'] = (df['gross'] - df['budget']).tolist()
df_actor2_profit.rename(columns={'actor_2_name': 'name'}, inplace=True)
# df_actor2_profit.isnull().sum()
df_a2 = df_actor2_profit.dropna()
# df_a2.isnull().sum()
df_actor3_profit = df[['actor_3_name']]
df_actor3_profit['profit'] = (df['gross'] - df['budget']).tolist()
df_actor3_profit.rename(columns={'actor_3_name': 'name'}, inplace=True)
# df_actor3_profit.isnull().sum()
df_a3 = df_actor3_profit.dropna()
# df_a3.isnull().sum()
df_actor1_profit = df[['actor_1_name']]
df_actor1_profit['profit'] = (df['gross'] - df['budget']).tolist()
df_actor1_profit.rename(columns={'actor_1_name': 'name'}, inplace=True)
# df_actor1_profit.isnull().sum()
df_a1 = df_actor1_profit.dropna()
# df_a2.isnull().sum()
df_director_profit = df[['director_name']]
df_director_profit['profit'] = (df['gross'] - df['budget']).tolist()
df_director_profit.rename(columns={'director_name': 'name'}, inplace=True)
# df_director_profit.isnull().sum()
df_director = df_director_profit.dropna()
# df_director.isnull().sum()
df_actors_director_profit = pd.concat([df_a1, df_a2, df_director,df_a3])
# df_actors_director_profit
test_res = df_actors_director_profit.groupby(['name']).sum().sort_values(by='profit', ascending=False).head(10)
print(test_res)
testcsv = test_res.to_csv("./test-csv/top_10_directors_actors_test.csv", encoding='utf-8')
assert top_10_directors_actors(df) == testcsv,"test failed"
# the best actor, director pairs (up to 10) that have the highest IMDB_ratings
# input pandas dataframe from csv file
# output pandas dataframe containing the best actor, director pairs (up to 10) that have the highest IMDB_ratings using sql query
def test_top_10_actor_director_pair():
# use pandas to select column and do the data manipulation rather than sql
# select actor1 actor2 actor3 column with director and imdb score seperately and concatenate them to do the filter and sorting
df = pd.read_csv("../data/movie_metadata.csv")
df_actor3_director_imdb = df[['actor_3_name', 'director_name', 'imdb_score']]
df_actor2_director_imdb = df[['actor_2_name', 'director_name', 'imdb_score']]
df_actor1_director_imdb = df[['actor_1_name', 'director_name', 'imdb_score']]
df_actor3_director_imdb.rename(columns={'actor_3_name': 'actor_name'}, inplace=True)
df_actor2_director_imdb.rename(columns={'actor_2_name': 'actor_name'}, inplace=True)
df_actor1_director_imdb.rename(columns={'actor_1_name': 'actor_name'}, inplace=True)
df_actor_director_imdb = pd.concat([df_actor1_director_imdb, df_actor2_director_imdb,df_actor3_director_imdb]).dropna()
test_res = df_actor_director_imdb.sort_values(by='imdb_score', ascending=False).drop_duplicates(
subset=['actor_name', 'director_name'], keep='first').head(10)
testcsv = test_res.to_csv("./test-csv/top_10_actor_director_pair_test.csv", encoding='utf-8',index=False)
assert top_10_actor_director_pair(df) == testcsv,"test failed"
#the top 10 genres in decreasing order by their profitability
# input pandas dataframe from csv file
# output pandas dataframe containing top 10 genres in decreasing order by their profitability using sql query
def top_10_genres(data):
# Profit = gross - budget
data = engine.execute(
"SELECT genres, SUM(gross-budget) FROM movie WHERE genres IS NOT NULL GROUP BY genres ORDER BY SUM(gross-budget) DESC LIMIT 10").fetchall()
df = DataFrame.from_records(data)
df.columns = ['genres', 'profit']
print("Top 10 genres by profit")
print(df)
res = df.to_csv("../result/top_10_genres.csv", encoding='utf-8', index=False)
return res
#the top 10 directors in decreasing order by their profitability
# input pandas dataframe from csv file
# output pandas dataframe containing top 10 directors in decreasing order by their profitability using sql query
def top_10_directors_actors(data):
# Profit = gross - budget
data = engine.execute(
"SELECT actor_1_name as name, gross-budget FROM movie UNION ALL SELECT director_name as name, gross-budget FROM movie UNION ALL SELECT actor_2_name as name, gross-budget FROM movie UNION ALL SELECT actor_3_name as name, gross-budget FROM movie").fetchall()
df = DataFrame.from_records(data)
df.columns = ['name', 'profit']
print("Top 10 directors/actors by profit")
top10_res = df.groupby(['name']).sum().sort_values(by='profit', ascending=False).head(10)
print(top10_res)
res = top10_res.to_csv("../result/top_10_directors_actors.csv", encoding='utf-8')
return res
# the best actor, director pairs (up to 10) that have the highest IMDB_ratings
# input pandas dataframe from csv file
# output pandas dataframe containing the best actor, director pairs (up to 10) that have the highest IMDB_ratings using sql query
def top_10_actor_director_pair(data):
data = engine.execute(
"SELECT actor_1_name, director_name, imdb_score FROM movie UNION ALL SELECT actor_2_name, director_name, imdb_score FROM movie UNION ALL SELECT actor_3_name, director_name, imdb_score FROM movie").fetchall()
df = DataFrame.from_records(data)
df.columns = ['actor_name', 'director_name','imdb_score']
top10_res = df.dropna().sort_values(by='imdb_score', ascending=False).drop_duplicates(subset=['actor_name', 'director_name'],keep='first').head(10)
print("Top 10 actor-director pair by IMDB score")
print(top10_res)
res = top10_res.to_csv("../result/top_10_actor_director_pair.csv", encoding='utf-8', index=False)
return res
|
# by Liana Hill
# last updated October 21, 2019
# this program plays a number guessing game with the user
import random
def main():
# this while True loop allows the user to play the game multiple times
while True:
# this while True loop asks the user if they want to play a number guessing game
while True:
answer = (input("Do you want to play a game? Answer with 'y' for yes or 'n' for no"))
if answer == "y" or answer == "n":
break
if answer == "y":
print("Great! Let's begin!")
else:
print("Okay. See you next time!")
break
computer_number = random.randint(1, 100)
tries = 0
# this while True lets the user guess the computer's number and answers with the appropriate response;
# it also tells the user the number of tries it took
while True:
user_guess = int(input("Guess the computer's number."))
tries += 1
if user_guess == computer_number:
print("Correct! Great job!")
break
elif user_guess >= 100 or user_guess <= 0:
print("Your number is not in the correct range of 1-100.")
elif user_guess > computer_number:
print("Your guess is too high.")
else:
print("Your guess is too low.")
print("You guess the correct number in", tries, "tries")
main()
|
from setuptools import setup, find_packages
with open('trix/version.py') as f:
code = compile(f.read(), "trix/version.py", 'exec')
exec(code)
setup(
name='trix',
description='Next generation Trix. Detailed task control and statistics app for better'
' learning outcome.',
version=__version__, # noqa
url='https://github.com/devilry/trix2',
author='Tor Johansen, Espen Angell Kristiansen, Jonas Sandbekk',
author_email='tor@appresso.no, espen@appresso.no, jonassandbekk@gmail.com',
license='BSD',
packages=find_packages(exclude=['ez_setup']),
zip_safe=False,
include_package_data=True,
install_requires=[
'setuptools',
'Django==3.2.*',
'django-crispy-forms<=1.14, >=1.13',
'Markdown>=3.4.1',
'PyYAML>=6.0',
'django-extensions',
'dj-database-url>=0.5.0',
'cradmin_legacy>=4.1.2',
'gunicorn',
],
classifiers=[
'Development Status :: 4 - Beta',
'Environment :: Web Environment',
'Framework :: Django',
'Framework :: Django :: 3.2',
'Intended Audience :: Developers',
'License :: OSI Approved',
'Operating System :: OS Independent',
'Programming Language :: Python'
]
)
|
# food program
import random
from docx import Document
from datetime import date
import ast
import re
"""
This is a food menu program designed to create cooking menus, you can store recipes, add ingredients and more!
"""
class MenuMaker:
def import_dinner_dict(self):
'''
Input : NA
Output : res (Dictionary)
Imports a dictionary of meals from data/dinners_dict.txt
'''
file = open("data/dinners_dict.txt", "r")
contents = file.read()
res = ast.literal_eval(contents)
file.close()
return res
def import_ingredient_profiles(self):
'''
input : NA
Output : res (dictionary)
Imports a dictionary of ingredient flavor vectors from data/ingredients.txt
'''
f = open('data/ingredients.txt','r')
contents = f.read()
res = ast.literal_eval(contents)
f.close()
return res
def __init__(self):
self.dinners = self.import_dinner_dict()
self.days = ['Monday','Tuesday','Wednesday','Thursday','Friday','Saturday','Sunday']
#not currently used
#self.ingredient_profiles = import_ingredient_profiles()
def return_menu(self):
'''
Input : NA
Output : menu (list)
Creates a list of randomly picked meals, one for each day in the days
'''
return [random.choice(list(self.dinners.keys())) for day in self.days]
def add_meal(self):
'''
Input : NA
Output : NA
a function to manually add a meal/ingredients to "data/dinners_dict.txt"
'''
new_meal = input("what would you like to add?\n : ")
ingredients = input("what ingredients do you use? (Please list as such ...,...,...)\n : ")
ingredient_array = ingredients.split(",")
print(ingredient_array)
self.dinners[new_meal] = ingredient_array
f = open("data/dinners_dict.txt", "w")
f.write(str(self.dinners))
f.close()
def generate_menu(self):
'''
Input : NA
Output : NA
a function to create your own weekly menu or randomly generate one.
'''
user_input = input("Do you want to create your own menu? (y/n)\n: ")
if "y" in user_input.lower():
menu = []
for day in self.days:
print(self.dinners.keys())
user_input = input(f"type the recipe you want for {day}\n:")
while user_input.lower() not in self.dinners.keys():
print(self.dinners.keys())
user_input = input(f"type the recipe you want for {day}\n:")
menu.append(user_input)
else:
menu = return_menu()
[print(self.days[x]+ ': ' + menu[x]) for x in range(len(menu))]
save = input('does this menu look good to you? (y/n) to save.')
if save.lower() == "y":
f = open('data/saved_menus.txt','a')
f.write(str(menu) +'|' + str(date.today()) + "\n")
f.close()
def read_saved_menus(self):
'''
Input : NA
Output : saved_menus (dictionary)
function to read saved menus from saved_menus.txt return format {date-index: menu}
'''
f = open('data/saved_menus.txt','r')
saved_menus = {}
index = 0
for line in f:
menu=[]
seperate = line.split("|")
if len(seperate) > 1:
if "\n" in seperate[1]:
date = seperate[1][:-2] + f"-{index}"
else:
date = seperate[1] + f"{index}"
index += 1
m = seperate[0][1:-1]
mm = m.split(',')
for item in mm:
menu.append(item.strip("' "))
saved_menus[date] = menu
return saved_menus
def pick_saved_menu(self):
'''
Input : NA
Output : menu (list)
Read saved menus and pick a key, then returns a list of meals
'''
menus = read_saved_menus()
menu_index = []
i = 0
for key,val in menus.items():
print(i,key,val)
menu_index.append(key)
i += 1
pick_meal = input('Which meal would you like to use? pick corrisponding number (0-x) \n: ')
menu = menus[menu_index[int(pick_meal)]]
return menu
def create_docx(self,menu):
'''
Input : NA
Output : NA
creates a word document (.docx) from a saved menu or generated one.
'''
'''
new_or_saved = input('would you like to use a saved menu? (y/n) \n: ')
if "y" in new_or_saved.lower():
menu = pick_saved_menu()
else:
menu = return_menu()
'''
menu = [item for item in menu.values()]
doc = Document()
p = doc.add_paragraph()
print(menu)
for day in range(6):
p.add_run( self.days[day] + ": " + menu[0][day] + "\n")
doc.save(str(date.today()) + "menu.docx")
f = open("data/menus.txt","a")
f.write(str(date.today()) + str(menu))
f.close()
def create_grocery_list(self,menu):
'''
Input : NA
Output : grocery_list (list)
Returns a list of all the ingredients from a selected menu.
'''
grocery_list = []
for meal in menu:
grocery_list += [ingredient for ingredient in self.dinners[meal]]
return grocery_list
def terminal_interface(self):
print('''
Welcome to the Food Menu Program!!!
type "menu" to create a menu
type "docx" to create a word doc for a menu
type "groceries" to create grocery list
type "exit" to exit
''')
user_input = input(": ")
if user_input.lower() == "menu":
generate_menu()
elif user_input.lower() == "docx":
create_docx()
elif user_input.lower() == "groceries":
create_grocery_list(pick_saved_menu())
''' irrelivent stuff below '''
#flavor profile for meal ingredients
flavor_profile = ['sweet','sour','salt','bitter','acidic','basic','savory','hotness','spiciness','oily','minty'
,'astringent','starchiness','horseradish','creamy','earthy']
def meal_to_vec(self,meal):
#converting ingredients to a total meal flavor profile
res_profile = [0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0]
profiles = []
for ingredient in dinners[meal]:
if ingredient in ingredient_profiles.keys():
print(ingredient,ingredient_profiles[ingredient])
profiles.append(ingredient_profiles[ingredient])
else:
new_ingredient_profile = []
print(ingredient)
for flavor in range(len(flavor_profile)):
profile = input(f"how {flavor_profile[flavor]} is this? (0-1)")
new_ingredient_profile.append(float(profile))
ingredient_profiles[ingredient] = new_ingredient_profile
f = open('data/ingredient.txt','w')
f.write(ingredient_profiles)
f.close()
for item in range(len(profiles)):
for flavor in range(len(res_profile)):
res_profile[flavor] += profiles[item][flavor]
for item in res_profile:
item = item/len(profiles)
print(res_profile)
#meal_to_vec("testing")
#print(return_menu())
|
#am10182
#database project : please read documentaion.txt for more info
#IMPORTS
import operator
import sys
import re
import numpy as np
import time
from BTrees.OOBTree import *
# GLOBAL DB VARIABLES
tables = {} #entire database
hashtIndexes = {} #global hashtable indexes
btreeIndexes = {} #global btree indexes
# HELPER DEFENITIONS
def removeColumn(tabl):
newTbl = {}
for d in tabl.keys():
col = tabl[d]
col = np.delete(col,1)
newTbl[d] = col
return newTbl
def fileToStructND(fileName):
file = open(fileName,'r')
file.readline()
colTypes = []
firstRecord = file.readline()
firstRecord = firstRecord.split('|')
for i in firstRecord:
try:
x = int(i)
colTypes.append('f8')
except ValueError:
try:
x = float(i)
colTypes.append('f8')
except ValueError:
colTypes.append('U25')
file.close()
data = np.genfromtxt(fileName, dtype=colTypes, delimiter='|',names=True)
return data
def inputFromFile(fileName):
data = fileToStructND(fileName)
table = {}
for i in data.dtype.names:
table[i] = data[i]
return table
def writeToFile(fileName,tableName):
tbl = tables[tableName]
f = open(fileName,"w")
totalColumns = len(tbl)
ct = 1
for columnNames in tbl:
f.write(columnNames)
if ct == totalColumns:
ct = 1
else:
f.write("|")
ct = ct+1
f.write("\n")
temp = list(tbl.keys())[0]
temp = tbl[temp].size
for i in range (0,temp):
ct = 0
for columnNames in tbl:
x = tbl[columnNames]
f.write(str(x[i]))
ct = ct + 1
if(ct!=totalColumns):
f.write("|")
ct = 0
f.write("\n")
def select(tableVar,cond):
#will return a bool ndarray for points of match
#possible commands:
# x > 5 #case3
# x[+3] > 5 #CASE 2
# 5 > x #case4
# 7 > x[+3] #CASE 1
col = ""
RelOp = ""
ConOp =""
numMain = ""
numInBracks = ""
case =""
#find case
if re.search(".+>.+\[|.+<.+\[|.+=.+\[|.+!.+\[",cond): #case1
numMain = re.search(".+>|.+<|.+=|.+!",cond).group(0)[:-1]
ConOp = re.search("\[.+\]",cond).group(0)[1]
temp = "\\"+ConOp + ".+\]"
numInBracks = re.search(temp,cond).group(0)[1:-1]
if re.search(">=",cond):
RelOp = ">="
elif re.search("<=",cond):
RelOp = "<="
elif re.search("!=",cond):
RelOp = "!="
elif re.search("<",cond):
RelOp = "<"
elif re.search(">",cond):
RelOp = ">"
else:
RelOp = "="
x = len(RelOp)
temp = RelOp+".+\["
col = re.search(temp,cond).group(0)[1:-1]
case = 1
elif re.search("\]>|\]<|\]=|\]!",cond): #case2: x[+3]>5
col = re.search(".+\[",cond).group(0)[:-1]
ConOp = re.search("\[.+\]",cond).group(0)[1]
if re.search(">=",cond):
RelOp = ">="
elif re.search("<=",cond):
RelOp = "<="
elif re.search("!=",cond):
RelOp = "!="
elif re.search("<",cond):
RelOp = "<"
elif re.search(">",cond):
RelOp = ">"
else:
RelOp = "="
numMain = re.search(">.+|<.+|=.+",cond).group(0)[1:]
temp = "\\"+ConOp + ".+\]"
numInBracks = re.search(temp,cond).group(0)[1:-1]
case = 2
else:
temp = re.search(".+>|.+<|.+=|.+!",cond).group(0)[:-1]
if(True): #tableVar[temp]
case = 3
col = temp
if re.search(">=",cond):
RelOp = ">="
elif re.search("<=",cond):
RelOp = "<="
elif re.search("!=",cond):
RelOp = "!="
elif re.search("<",cond):
RelOp = "<"
elif re.search(">",cond):
RelOp = ">"
else:
RelOp = "="
numMain = re.search(">.+|<.+|=.+",cond).group(0)[1:]
else:
case = 4
numMain = temp
if re.search(">=",cond):
RelOp = ">="
elif re.search("<=",cond):
RelOp = "<="
elif re.search("!=",cond):
RelOp = "!="
elif re.search("<",cond):
RelOp = "<"
elif re.search(">",cond):
RelOp = ">"
else:
RelOp = "="
col = re.search(">.+|<.+|=.+",cond).group(0)[1:]
#if id + 4 > 8, taking 4 to other side so + become a -
if case == 1 or case == 2:
if(ConOp =="+"):
numMain = float(numMain)-float(numInBracks)
elif(ConOp == "-"):
numMain = float(numMain)+float(numInBracks)
elif(ConOp =="\\"):
numMain = float(numMain)*float(numInBracks)
else:
numMain = float(numMain)/float(numInBracks)
numMain = float(numMain)
column = tableVar[col]
if RelOp == ">":
column = column > numMain
elif RelOp == ">=":
column = column >= numMain
elif RelOp == "<=":
column = column <= numMain
elif RelOp == "<":
column = column < numMain
elif RelOp == "!=":
column = column != numMain
else:
column = column == numMain
return column
def mask(boolArr,tableVar):
colNames = tableVar.keys()
tbl = {}
for i in colNames:
x = tableVar[i]
tbl[i] = x[boolArr==True]
return tbl
def joinHelp(tableVar1,tableVar2,tname1,tname2):
newTable = {}
for i in tableVar1:
tempname = tname1+"_"+i
temp = tableVar1[i]
newTable[tempname] = temp[0] #for col type preserving
for i in tableVar2:
tempname = tname2+"_"+i
temp = tableVar2[i]
newTable[tempname] = temp[0] #for col type preserving
return newTable
def main():
#Reading from stdin
print("Welcome to database project. am10182 ddv246")
f= open("commands.txt","r")
for line in f:
print("\n")
print("-------------------------------------------------------------")
print("The Command Entered is: \n")
start = time.time()
mid = 0
print(line)
if line[0] == "\n":
print("Empty Command Entered \n") #do nothing, it's empty line, go back to stdin
elif "outputtofile(" in line:
line = line.replace(" ","")
temp = re.search("\(.+\)",line).group(0)
temp = temp[1:-1]
n = re.split(",",temp)
tablename = n[0]
filename = n[1]
writeToFile(tablename,filename)
mid = time.time()
elif "ash(" in line: #hashing implementation
#do hash
line = line.replace(" ","")
temp = re.search("\(.+\)",line).group(0)
temp = temp[1:-1]
n = re.split(",",temp)
tablename = n[0]
colname = n[1]
a = tables[tablename]
column = a[colname]
hashtable ={}
ct = 0
for k in column:
if k not in hashtable.keys():
lis = []
lis.append(ct)
hashtable[k] = lis
else:
lis = hashtable[k]
lis.append(ct)
hashtable[k] = lis
ct = ct+1
hashDirectory = {}
hashDirectory[colname] = hashtable
hashtIndexes[tablename] = hashDirectory
print("Status of Hash indexes are:")
for p in hashtable:
print(p," : ",hashtable[p])
elif "tree(" in line:
line = line.replace(" ","")
temp = re.search("\(.+\)",line).group(0)
temp = temp[1:-1]
n = re.split(",",temp)
tablename = n[0]
colname = n[1]
a = tables[tablename]
column = a[colname]
btree = OOBTree()
ct = 0
for k in column:
if k not in btree.keys():
lis = []
lis.append(ct)
btree[k] = lis
else:
lis = btree[k]
lis.append(ct)
btree[k] = lis
ct = ct+1
btreeDirectory = {}
btreeDirectory[colname] = btree
btreeIndexes[tablename] = btreeDirectory
print("Status of Btree table for each of key indexes are:")
for p in btree.keys():
print(p," : ",btree[p]) #do btree
else:
func = line.replace(" ", "")
func = func.replace("\n","")
func = re.sub('/.+', '/', func)
if "/" not in func[0:1]:
#doing a regexer
try:
tablename = re.search(".+:",func).group(0)[0:-1]
except:
print("Wrong format in command. pls retry")
continue
tablename = re.search(".+:",func).group(0)[0:-1]
command = re.search(':=.+?\(',func).group(0)[2:-1] # :to distinguish from other equal tos
params = re.search('\(.+\)',func).group(0)[1:-1]
if(command == "inputfromfile"): #done
newTable = inputFromFile(params)
tables[tablename] = newTable
mid = time.time()
writeToFile(tablename,tablename)
if(command == "select"):#done
if not (re.search(",",params)):
ans = tables[params] #means no condition like: R
tables[tablename] = ans
print(tables[tablename])
mid = time.time()
writeToFile(tablename,tablename)
elif not re.search("\(",params): #means one condition like : R,id = 4
tblName = re.search(".+,",params).group(0)[:-1]
tblVar = tables[tblName]
condn = re.search(",.+",params).group(0)[1:]
boolean = select(tblVar,condn)
ansTbl = mask(boolean,tblVar)
tables[tablename] = ansTbl
mid = time.time()
writeToFile(tablename,tablename)
elif re.search("\(.+\)",params): #means more than one condition
tblName = re.search(".+,",params).group(0)[:-1]
tblVar = tables[tblName]
conditions = re.findall("\(.+?\)",params)
booleans = []
for cond in conditions:
cond = cond[1:-1] #each of the conditions : time >30 after removing ()
boolean = select(tblVar,cond)
booleans.append(boolean)
finalBoolArr = booleans[0]
if re.search("or",params):
for x in booleans:
finalBoolArr = np.logical_or(x,finalBoolArr)
else:
for x in booleans:
finalBoolArr = np.logical_and(x,finalBoolArr)
ansTbl = mask(finalBoolArr,tblVar)
tables[tablename] = ansTbl
mid = time.time()
writeToFile(tablename,tablename)
else:
mid = time.time()
print("wrong format of select")
if(command == "project"): #done
temp = re.split(',',params)
sourceTable = tables[temp[0]]
temp.pop(0)
ansTable = {}
for x in temp:
col = sourceTable[x]
ansTable[x] = col
tables[tablename] = ansTable
mid = time.time()
writeToFile(tablename,tablename)
if(command == "avg"): #done
temp = re.split(",",params)
sourceTable = tables[temp[0]]
col = sourceTable[temp[1]]
avg = np.mean(col)
avg = np.array([avg],np.float)
ansTable = {}
newColName = "avg("+temp[1]+")"
ansTable[newColName] = avg
tables[tablename] = ansTable
mid = time.time()
writeToFile(tablename,tablename)
if(command == "sum"): #done
temp = re.split(",",params)
sourceTable = tables[temp[0]]
col = sourceTable[temp[1]]
avg = np.sum(col)
avg = np.array([avg],np.float)
ansTable = {}
newColName = "sum("+temp[1]+")"
ansTable[newColName] = avg
tables[tablename] = ansTable
mid = time.time()
writeToFile(tablename,tablename)
if(command == "count"): #done
temp = re.split(",",params)
sourceTable = tables[temp[0]]
col = sourceTable[temp[1]]
avg = col.size
avg = np.array([avg],np.float)
ansTable = {}
newColName = "count("+temp[1]+")"
ansTable[newColName] = avg
tables[tablename] = ansTable
mid = time.time()
writeToFile(tablename,tablename)
if(command == "sumgroup"): #done
temp = re.split(",",params)
sourceTable = tables[temp[0]]
sumColumn = sourceTable[temp[1]]
if len(temp) == 3:
groupbyColumn = sourceTable[temp[2]]
name_un=np.unique(groupbyColumn)
newgbc = []
newsc = []
for nm in name_un:
arr=np.array([(True if x==nm else False) for x in groupbyColumn])
if arr.any():
newgbc.append(nm)
newsc.append(np.sum(sumColumn[arr]))
newgbc = np.array(newgbc)
newsc = np.array(newsc)
sumColumn = "sum("+temp[1]+")"
newTable = {}
newTable[temp[2]] = newgbc
newTable[sumColumn] = newsc
tables[tablename] = newTable
mid = time.time()
writeToFile(tablename,tablename)
else:
groupbyColumn = sourceTable[temp[2]]
groupbyColumn2 = sourceTable[temp[3]]
name_un=np.unique(groupbyColumn)
name2_un=np.unique(groupbyColumn2)
newgbc = []
newgbc2 = []
newsc = []
for nm in name_un:
for nm2 in name2_un:
arr=np.array([(True if x==nm and y==nm2 else False) for x,y in zip(groupbyColumn,groupbyColumn2)])
if arr.any():
newgbc.append(nm)
newgbc2.append(nm2)
newsc.append(np.sum(sumColumn[arr]))
newgbc = np.array(newgbc)
newgbc2 = np.array(newgbc2)
newsc = np.array(newsc)
sumColumn = "sum("+temp[1]+")"
newTable = {}
newTable[temp[2]] = newgbc
newTable[temp[3]] = newgbc2
newTable[sumColumn] = newsc
tables[tablename] = newTable
mid = time.time()
writeToFile(tablename,tablename)
if(command == "avggroup"):#done
temp = re.split(",",params)
sourceTable = tables[temp[0]]
sumColumn = sourceTable[temp[1]]
if len(temp) == 3: #oneparam
groupbyColumn = sourceTable[temp[2]]
name_un=np.unique(groupbyColumn)
newgbc = []
newsc = []
for nm in name_un:
arr=np.array([(True if x==nm else False) for x in groupbyColumn])
if arr.any():
newgbc.append(nm)
newsc.append(np.mean(sumColumn[arr]))
newgbc = np.array(newgbc)
newsc = np.array(newsc)
sumColumn = "avg("+temp[1]+")"
newTable = {}
newTable[temp[2]] = newgbc
newTable[sumColumn] = newsc
tables[tablename] = newTable
mid = time.time()
writeToFile(tablename,tablename)
else: #means groupby 2 cols
groupbyColumn = sourceTable[temp[2]]
groupbyColumn2 = sourceTable[temp[3]]
name_un=np.unique(groupbyColumn)
name2_un=np.unique(groupbyColumn2)
newgbc = []
newgbc2 = []
newsc = []
for nm in name_un:
for nm2 in name2_un:
arr=np.array([(True if x==nm and y==nm2 else False) for x,y in zip(groupbyColumn,groupbyColumn2)])
if arr.any():
newgbc.append(nm)
newgbc2.append(nm2)
newsc.append(np.mean(sumColumn[arr]))
newgbc = np.array(newgbc)
newgbc2 = np.array(newgbc2)
newsc = np.array(newsc)
sumColumn = "mean("+temp[1]+")"
newTable = {}
newTable[temp[2]] = newgbc
newTable[temp[3]] = newgbc2
newTable[sumColumn] = newsc
tables[tablename] = newTable
mid = time.time()
writeToFile(tablename,tablename)
if(command == "countgroup"): #done
temp = re.split(",",params)
sourceTable = tables[temp[0]]
sumColumn = sourceTable[temp[1]]
if len(temp) == 3: #oneparam
groupbyColumn = sourceTable[temp[2]]
name_un=np.unique(groupbyColumn)
newgbc = []
newsc = []
for nm in name_un:
arr=np.array([(True if x==nm else False) for x in groupbyColumn])
if arr.any():
newgbc.append(nm)
newsc.append(len(sumColumn[arr]))
newgbc = np.array(newgbc)
newsc = np.array(newsc)
sumColumn = "count("+temp[1]+")"
newTable = {}
newTable[temp[2]] = newgbc
newTable[sumColumn] = newsc
tables[tablename] = newTable
mid = time.time()
writeToFile(tablename,tablename)
else: #means groupby 2 cols
groupbyColumn = sourceTable[temp[2]]
groupbyColumn2 = sourceTable[temp[3]]
name_un=np.unique(groupbyColumn)
name2_un=np.unique(groupbyColumn2)
newgbc = []
newgbc2 = []
newsc = []
for nm in name_un:
for nm2 in name2_un:
arr=np.array([(True if x==nm and y==nm2 else False) for x,y in zip(groupbyColumn,groupbyColumn2)])
if arr.any():
newgbc.append(nm)
newgbc2.append(nm2)
newsc.append(len(sumColumn[arr]))
newgbc = np.array(newgbc)
newgbc2 = np.array(newgbc2)
newsc = np.array(newsc)
sumColumn = "count("+temp[1]+")"
newTable = {}
newTable[temp[2]] = newgbc
newTable[temp[3]] = newgbc2
newTable[sumColumn] = newsc
tables[tablename] = newTable
mid = time.time()
writeToFile(tablename,tablename)
if(command == "sort"): #done
sortTableName = re.split(",",params)[0]
sortByColumns = re.split(",",params)[1:]
sortTableND = fileToStructND(sortTableName)
sortTableND = np.sort(sortTableND, order=sortByColumns)
table = {}
for i in sortTableND.dtype.names:
table[i] = sortTableND[i]
tables[tablename] = table
mid = time.time()
writeToFile(tablename,tablename)
if(command == "movsum"): #done
temp = re.split(",",params)
movingTable = tables[temp[0]]
movingColumn = movingTable[temp[1]]
movingNo = int(temp[2])
x = np.cumsum(movingColumn)
y = np.zeros(x.size)
for i in range(0,movingNo):
y[i] = x[i]
for i in range(0,x.size):
if i >= movingNo:
y[i]=x[i]-x[i-movingNo]
movingTable["mov_sum"] = y
tables[tablename] = movingTable
mid = time.time()
writeToFile(tablename,tablename)
if(command == "movavg"): #done
temp = re.split(",",params)
movingTable = tables[temp[0]]
movingColumn = movingTable[temp[1]]
movingNo = int(temp[2])
x = np.cumsum(movingColumn)
y = np.zeros(x.size)
for i in range(0,movingNo):
y[i] = float(x[i])/float(i+1)
for i in range(0,x.size):
if i >= movingNo:
y[i]=float(x[i]-x[i-movingNo])/float(movingNo)
movingTable["mov_avg"] = y
tables[tablename] = movingTable
mid = time.time()
writeToFile(tablename,tablename)
if(command == "join"): #done
#first join the two tables.
temp = re.split(',',params)
table1 = tables[temp[0]]
table2 = tables[temp[1]]
conditions = temp[2]
newTable = joinHelp(table1,table2,temp[0],temp[1]) #skeleton of new table
#todo : get conditions
#(R1.qty[+3] > S.Q[-3]) and (R1.saleid = S.saleid)
#table.attribute [arithop constant] relop table.attribute [artithop con- stant]
t1columns = []
t2columns = []
relOperators = []
leftNums = []
rightNums = []
arithOpsL = []
arithOpsR = []
def helper(cond):
left = ""
right = ""
if re.search("!=",cond):
left = re.search(".+!=",cond).group(0)
left = left[:-1]
right = re.search("!=.+",cond).group(0)
right = right[1:]
relOperators.append("!=")
elif re.search(">=",cond):
relOperators.append(">=")
left = re.search(".+>=",cond).group(0)
left = left[:-1]
right = re.search(">=.+",cond).group(0)
right = right[1:]
elif re.search("<=",cond):
relOperators.append("<=")
left = re.search(".+<=",cond).group(0)
left = left[:-1]
right = re.search("<=.+",cond).group(0)
right = right[1:]
elif re.search("=",cond):
relOperators.append("=")
left = re.search(".+=",cond).group(0)
left = left[:-1]
right = re.search("=.+",cond).group(0)
right = right[1:]
elif re.search("<",cond):
relOperators.append("<")
left = re.search(".+<",cond).group(0)
left = left[:-1]
right = re.search("<.+",cond).group(0)
right = right[1:]
else:
relOperators.append(">")
left = re.search(".+>",cond).group(0)
left = left[:-1]
right = re.search(">.+",cond).group(0)
right = right[1:]
#R1.qty[+3] > S.Q[-3] and R1.saleid = S.saleid
# t1columns.append(re.search(".+\.",left).group(0)[:-1])
# t2columns.append(re.search(".+\.",right).group(0)[:-1])
if re.search("\[",left):
t1columns.append(re.search("\..+\[",left).group(0)[1:-1])
arithOpsL.append(re.search("\[.+\]",left).group(0)[1])
leftNums.append(re.search("\[.+\]",left).group(0)[2:-1])
else:
t1columns.append(re.search("\..+",left).group(0)[1:])
arithOpsL.append("+")
leftNums.append("0")
if re.search("\[",right):
t2columns.append(re.search("\..+\[",right).group(0)[1:-1])
arithOpsR.append(re.search("\[.+\]",right).group(0)[1])
rightNums.append(re.search("\[.+\]",right).group(0)[2:-1])
else:
t2columns.append(re.search("\..+",right).group(0)[1:])
arithOpsR.append("+")
rightNums.append("0")
if "and" in conditions:
conditions = re.split("and",temp[2])
for con in conditions:
con = con[1:-1] #removing bracks
helper(con)
else:
helper(conditions)
temp1 = table1[list(table1.keys())[0]]
temp2 = table2[list(table2.keys())[0]]
x = temp1.size
y = temp2.size
ops = { "+": operator.add, "-": operator.sub, "*":operator.truediv, "\\":operator.mul } # etc.
for i in range(0,x):
#get record of t1
rowt1 = {}
for k in table1.keys():
val = table1[k]
val = val[i]
rowt1[k] = val
# print("entering t2")
for j in range(0,y):
rowt2 = {}
for l in table2.keys():
val = table2[l]
val = val[j]
rowt2[l] = val
#need to check if conditions hold good for row 1 and row2
boolVal = True
count = 0
for op in relOperators:
templ1 = ops[arithOpsL[count]](float(rowt1[t1columns[count]]),float(leftNums[count]))
tempr1 = ops[arithOpsR[count]](float(rowt2[t2columns[count]]),float(rightNums[count]))
if(op==">"):
boolVal = boolVal and templ1 > tempr1
if(op=="<"):
boolVal = boolVal and templ1 < tempr1
if(op=="="):
boolVal = boolVal and templ1 == tempr1
if(op==">="):
boolVal = boolVal and templ1 >= tempr1
if(op=="<="):
boolVal = boolVal and templ1 <= tempr1
if(op=="!="):
boolVal = boolVal and templ1 != tempr1
count = count+1
if(boolVal == True):
listnames = list(newTable.keys())
listnamesrt1 = list(rowt1.keys())
listnamesrt2 = list(rowt2.keys())
ct = 0
r1ct = 0
r2ct = 0
# print(listnames)
# print(listnamesrt1)
# print(listnamesrt2)
for p in rowt1.keys():
newTable[listnames[ct]]=np.append(newTable[listnames[ct]],rowt1[listnamesrt1[r1ct]])
ct = ct+1
r1ct = r1ct+1
for q in rowt2.keys():
newTable[listnames[ct]]=np.append(newTable[listnames[ct]],rowt2[listnamesrt2[r2ct]])
ct = ct+1
r2ct = r2ct+1
newTable = removeColumn(newTable) #removing redundant record one used to preserve datatype
tables[tablename] = newTable
mid = time.time()
writeToFile(tablename,tablename)
if(command == "concat"): #Q5 := concat(Q4, Q2)
temp = re.split(",",params)
concatTable1 = tables[temp[0]]
concatTable2 = tables[temp[0]]
x = list(concatTable1.keys())
y = list(concatTable2.keys())
#checking if schema matches
if(x==y):
for key in x:
concatTable1[key] = np.append(concatTable1[key],concatTable2[key])
tables[tablename] = concatTable1
mid = time.time()
writeToFile(tablename,tablename)
else:
print("schema of given tables dont match")
else:
print("") #!/usr/bin/python
# -*- coding: utf-8 -*-
# am10182
# ddv246
# database project : please read documentaion.txt for more info
import operator
import sys
import re
import numpy as np
import time
from BTrees.OOBTree import *
tables = {} # entire database
hashtIndexes = {} # global hashtable indexes
btreeIndexes = {} # global btree indexes
def removeColumn(tabl):
newTbl = {}
for d in tabl.keys():
col = tabl[d]
col = np.delete(col, 1)
newTbl[d] = col
return newTbl
def fileToStructND(fileName):
file = open(fileName, 'r')
file.readline()
colTypes = []
firstRecord = file.readline()
firstRecord = firstRecord.split('|')
for i in firstRecord:
try:
x = int(i)
colTypes.append('f8')
except ValueError:
try:
x = float(i)
colTypes.append('f8')
except ValueError:
colTypes.append('U25')
file.close()
data = np.genfromtxt(fileName, dtype=colTypes, delimiter='|',
names=True)
return data
def inputFromFile(fileName):
data = fileToStructND(fileName)
table = {}
for i in data.dtype.names:
table[i] = data[i]
return table
def writeToFile(fileName, tableName):
tbl = tables[tableName]
f = open(fileName, 'w')
totalColumns = len(tbl)
ct = 1
for columnNames in tbl:
f.write(columnNames)
if ct == totalColumns:
ct = 1
else:
f.write('|')
ct = ct + 1
f.write('\n')
temp = list(tbl.keys())[0]
temp = tbl[temp].size
for i in range(0, temp):
ct = 0
for columnNames in tbl:
x = tbl[columnNames]
f.write(str(x[i]))
ct = ct + 1
if ct != totalColumns:
f.write('|')
ct = 0
f.write('\n')
def select(tableVar, cond):
# will return a bool ndarray for points of match
# possible commands:
# x > 5 #case3
# x[+3] > 5 #CASE 2
# 5 > x #case4
# 7 > x[+3] #CASE 1
col = ''
RelOp = ''
ConOp = ''
numMain = ''
numInBracks = ''
case = ''
# find case
if re.search(".+>.+\[|.+<.+\[|.+=.+\[|.+!.+\[", cond): # case1
numMain = re.search('.+>|.+<|.+=|.+!', cond).group(0)[:-1]
ConOp = re.search("\[.+\]", cond).group(0)[1]
temp = '\\' + ConOp + ".+\]"
numInBracks = re.search(temp, cond).group(0)[1:-1]
if re.search('>=', cond):
RelOp = '>='
elif re.search('<=', cond):
RelOp = '<='
elif re.search('!=', cond):
RelOp = '!='
elif re.search('<', cond):
RelOp = '<'
elif re.search('>', cond):
RelOp = '>'
else:
RelOp = '='
x = len(RelOp)
temp = RelOp + ".+\["
col = re.search(temp, cond).group(0)[1:-1]
case = 1
elif re.search("\]>|\]<|\]=|\]!", cond):
# case2: x[+3]>5
col = re.search(".+\[", cond).group(0)[:-1]
ConOp = re.search("\[.+\]", cond).group(0)[1]
if re.search('>=', cond):
RelOp = '>='
elif re.search('<=', cond):
RelOp = '<='
elif re.search('!=', cond):
RelOp = '!='
elif re.search('<', cond):
RelOp = '<'
elif re.search('>', cond):
RelOp = '>'
else:
RelOp = '='
numMain = re.search('>.+|<.+|=.+', cond).group(0)[1:]
temp = '\\' + ConOp + ".+\]"
numInBracks = re.search(temp, cond).group(0)[1:-1]
case = 2
else:
temp = re.search('.+>|.+<|.+=|.+!', cond).group(0)[:-1]
if True: # tableVar[temp]
case = 3
col = temp
if re.search('>=', cond):
RelOp = '>='
elif re.search('<=', cond):
RelOp = '<='
elif re.search('!=', cond):
RelOp = '!='
elif re.search('<', cond):
RelOp = '<'
elif re.search('>', cond):
RelOp = '>'
else:
RelOp = '='
numMain = re.search('>.+|<.+|=.+', cond).group(0)[1:]
else:
case = 4
numMain = temp
if re.search('>=', cond):
RelOp = '>='
elif re.search('<=', cond):
RelOp = '<='
elif re.search('!=', cond):
RelOp = '!='
elif re.search('<', cond):
RelOp = '<'
elif re.search('>', cond):
RelOp = '>'
else:
RelOp = '='
col = re.search('>.+|<.+|=.+', cond).group(0)[1:]
# if id + 4 > 8, taking 4 to other side so + become a -
if case == 1 or case == 2:
if ConOp == '+':
numMain = float(numMain) - float(numInBracks)
elif ConOp == '-':
numMain = float(numMain) + float(numInBracks)
elif ConOp == '\\':
numMain = float(numMain) * float(numInBracks)
else:
numMain = float(numMain) / float(numInBracks)
numMain = float(numMain)
column = tableVar[col]
if RelOp == '>':
column = column > numMain
elif RelOp == '>=':
column = column >= numMain
elif RelOp == '<=':
column = column <= numMain
elif RelOp == '<':
column = column < numMain
elif RelOp == '!=':
column = column != numMain
else:
column = column == numMain
return column
def mask(boolArr, tableVar):
colNames = tableVar.keys()
tbl = {}
for i in colNames:
x = tableVar[i]
tbl[i] = x[boolArr == True]
return tbl
def sort(structArray):
# order=['age', 'height']
# myData.dtype.names
# use list(tuple) to make list.
c = 6
def joinHelp(
tableVar1,
tableVar2,
tname1,
tname2,
):
newTable = {}
for i in tableVar1:
tempname = tname1 + '_' + i
temp = tableVar1[i]
newTable[tempname] = temp[0] # for col type preserving
for i in tableVar2:
tempname = tname2 + '_' + i
temp = tableVar2[i]
newTable[tempname] = temp[0] # for col type preserving
# x = tableVar1[list(tableVar1.keys())[0]].size
# y = tableVar2[list(tableVar2.keys())[0]].size
# maxSize = x*y #maximum no of rows possible is cartesian product number in join
# tTable = newTable
# for x in newTable.keys():
# tTable[x] = np.tile(newTable[x],maxSize-1)
# print(tTable[x])
return newTable
def main():
# Reading from stdin
print 'Welcome to database project. am10182 ddv246'
f = open('commands.txt', 'r')
for line in f:
print '\n'
print '-------------------------------------------------------------'
print 'The Command Entered is: \n'
start = time.time()
mid = 0
print line
if line[0] == '\n':
print 'Empty Command Entered \n' # do nothing, it's empty line, go back to stdin
elif 'outputtofile(' in line:
line = line.replace(' ', '')
temp = re.search('\\(.+\\)', line).group(0)
temp = temp[1:-1]
n = re.split(',', temp)
tablename = n[0]
filename = n[1]
writeToFile(tablename, filename)
mid = time.time()
elif 'ash(' in line:
# hashing implementation
# do hash
line = line.replace(' ', '')
temp = re.search('\\(.+\\)', line).group(0)
temp = temp[1:-1]
n = re.split(',', temp)
tablename = n[0]
colname = n[1]
a = tables[tablename]
column = a[colname]
hashtable = {}
ct = 0
for k in column:
if k not in hashtable.keys():
lis = []
lis.append(ct)
hashtable[k] = lis
else:
lis = hashtable[k]
lis.append(ct)
hashtable[k] = lis
ct = ct + 1
hashDirectory = {}
hashDirectory[colname] = hashtable
hashtIndexes[tablename] = hashDirectory
print 'Status of Hash indexes are:'
for p in hashtable:
print (p, ' : ', hashtable[p])
elif 'tree(' in line:
line = line.replace(' ', '')
temp = re.search('\\(.+\\)', line).group(0)
temp = temp[1:-1]
n = re.split(',', temp)
tablename = n[0]
colname = n[1]
a = tables[tablename]
column = a[colname]
btree = OOBTree()
ct = 0
for k in column:
if k not in btree.keys():
lis = []
lis.append(ct)
btree[k] = lis
else:
lis = btree[k]
lis.append(ct)
btree[k] = lis
ct = ct + 1
btreeDirectory = {}
btreeDirectory[colname] = btree
btreeIndexes[tablename] = btreeDirectory
print 'Status of Btree table for each of key indexes are:'
for p in btree.keys():
print (p, ' : ', btree[p]) # do btree
else:
func = line.replace(' ', '')
func = func.replace('\n', '')
func = re.sub('/.+', '/', func)
if '/' not in func[0:1]:
# doing a regexer
try:
tablename = re.search('.+:', func).group(0)[0:-1]
except:
print 'Wrong format in command. pls retry'
continue
tablename = re.search('.+:', func).group(0)[0:-1]
command = re.search(':=.+?\(', func).group(0)[2:-1] # :to distinguish from other equal tos
params = re.search('\\(.+\\)', func).group(0)[1:-1]
if command == 'inputfromfile': # done
newTable = inputFromFile(params)
tables[tablename] = newTable
mid = time.time()
writeToFile(tablename, tablename)
if command == 'select': # done
if not re.search(',', params):
ans = tables[params] # means no condition like: R
tables[tablename] = ans
print tables[tablename]
mid = time.time()
writeToFile(tablename, tablename)
elif not re.search("\(", params):
# means one condition like : R,id = 4
tblName = re.search('.+,', params).group(0)[:-1]
tblVar = tables[tblName]
condn = re.search(',.+', params).group(0)[1:]
boolean = select(tblVar, condn)
ansTbl = mask(boolean, tblVar)
tables[tablename] = ansTbl
mid = time.time()
writeToFile(tablename, tablename)
elif re.search('\\(.+\\)', params):
# means more than one condition
tblName = re.search('.+,', params).group(0)[:-1]
tblVar = tables[tblName]
conditions = re.findall("\(.+?\)", params)
booleans = []
for cond in conditions:
cond = cond[1:-1] # each of the conditions : time >30 after removing ()
boolean = select(tblVar, cond)
booleans.append(boolean)
finalBoolArr = booleans[0]
if re.search('or', params):
for x in booleans:
finalBoolArr = np.logical_or(x,
finalBoolArr)
else:
for x in booleans:
finalBoolArr = np.logical_and(x,
finalBoolArr)
ansTbl = mask(finalBoolArr, tblVar)
tables[tablename] = ansTbl
mid = time.time()
writeToFile(tablename, tablename)
else:
mid = time.time()
print 'wrong format of select'
if command == 'project': # done
temp = re.split(',', params)
sourceTable = tables[temp[0]]
temp.pop(0)
ansTable = {}
for x in temp:
col = sourceTable[x]
ansTable[x] = col
tables[tablename] = ansTable
mid = time.time()
writeToFile(tablename, tablename)
if command == 'avg': # done
temp = re.split(',', params)
sourceTable = tables[temp[0]]
col = sourceTable[temp[1]]
avg = np.mean(col)
avg = np.array([avg], np.float)
ansTable = {}
newColName = 'avg(' + temp[1] + ')'
ansTable[newColName] = avg
tables[tablename] = ansTable
mid = time.time()
writeToFile(tablename, tablename)
if command == 'sum': # done
temp = re.split(',', params)
sourceTable = tables[temp[0]]
col = sourceTable[temp[1]]
avg = np.sum(col)
avg = np.array([avg], np.float)
ansTable = {}
newColName = 'sum(' + temp[1] + ')'
ansTable[newColName] = avg
tables[tablename] = ansTable
mid = time.time()
writeToFile(tablename, tablename)
if command == 'count': # done
temp = re.split(',', params)
sourceTable = tables[temp[0]]
col = sourceTable[temp[1]]
avg = col.size
avg = np.array([avg], np.float)
ansTable = {}
newColName = 'count(' + temp[1] + ')'
ansTable[newColName] = avg
tables[tablename] = ansTable
mid = time.time()
writeToFile(tablename, tablename)
if command == 'sumgroup': # done
temp = re.split(',', params)
sourceTable = tables[temp[0]]
sumColumn = sourceTable[temp[1]]
if len(temp) == 3:
groupbyColumn = sourceTable[temp[2]]
name_un = np.unique(groupbyColumn)
newgbc = []
newsc = []
for nm in name_un:
arr = np.array([(True if x
== nm else False) for x in
groupbyColumn])
if arr.any():
newgbc.append(nm)
newsc.append(np.sum(sumColumn[arr]))
newgbc = np.array(newgbc)
newsc = np.array(newsc)
sumColumn = 'sum(' + temp[1] + ')'
newTable = {}
newTable[temp[2]] = newgbc
newTable[sumColumn] = newsc
tables[tablename] = newTable
mid = time.time()
writeToFile(tablename, tablename)
else:
groupbyColumn = sourceTable[temp[2]]
groupbyColumn2 = sourceTable[temp[3]]
name_un = np.unique(groupbyColumn)
name2_un = np.unique(groupbyColumn2)
newgbc = []
newgbc2 = []
newsc = []
for nm in name_un:
for nm2 in name2_un:
arr = np.array([(True if x == nm and y
== nm2 else False) for (x,
y) in zip(groupbyColumn,
groupbyColumn2)])
if arr.any():
newgbc.append(nm)
newgbc2.append(nm2)
newsc.append(np.sum(sumColumn[arr]))
newgbc = np.array(newgbc)
newgbc2 = np.array(newgbc2)
newsc = np.array(newsc)
sumColumn = 'sum(' + temp[1] + ')'
newTable = {}
newTable[temp[2]] = newgbc
newTable[temp[3]] = newgbc2
newTable[sumColumn] = newsc
tables[tablename] = newTable
mid = time.time()
writeToFile(tablename, tablename)
if command == 'avggroup': # done
temp = re.split(',', params)
sourceTable = tables[temp[0]]
sumColumn = sourceTable[temp[1]]
if len(temp) == 3: # oneparam
groupbyColumn = sourceTable[temp[2]]
name_un = np.unique(groupbyColumn)
newgbc = []
newsc = []
for nm in name_un:
arr = np.array([(True if x
== nm else False) for x in
groupbyColumn])
if arr.any():
newgbc.append(nm)
newsc.append(np.mean(sumColumn[arr]))
newgbc = np.array(newgbc)
newsc = np.array(newsc)
sumColumn = 'avg(' + temp[1] + ')'
newTable = {}
newTable[temp[2]] = newgbc
newTable[sumColumn] = newsc
tables[tablename] = newTable
mid = time.time()
writeToFile(tablename, tablename)
else:
# means groupby 2 cols
groupbyColumn = sourceTable[temp[2]]
groupbyColumn2 = sourceTable[temp[3]]
name_un = np.unique(groupbyColumn)
name2_un = np.unique(groupbyColumn2)
newgbc = []
newgbc2 = []
newsc = []
for nm in name_un:
for nm2 in name2_un:
arr = np.array([(True if x == nm and y
== nm2 else False) for (x,
y) in zip(groupbyColumn,
groupbyColumn2)])
if arr.any():
newgbc.append(nm)
newgbc2.append(nm2)
newsc.append(np.mean(sumColumn[arr]))
newgbc = np.array(newgbc)
newgbc2 = np.array(newgbc2)
newsc = np.array(newsc)
sumColumn = 'mean(' + temp[1] + ')'
newTable = {}
newTable[temp[2]] = newgbc
newTable[temp[3]] = newgbc2
newTable[sumColumn] = newsc
tables[tablename] = newTable
mid = time.time()
writeToFile(tablename, tablename)
if command == 'countgroup': # done
temp = re.split(',', params)
sourceTable = tables[temp[0]]
sumColumn = sourceTable[temp[1]]
if len(temp) == 3: # oneparam
groupbyColumn = sourceTable[temp[2]]
name_un = np.unique(groupbyColumn)
newgbc = []
newsc = []
for nm in name_un:
arr = np.array([(True if x
== nm else False) for x in
groupbyColumn])
if arr.any():
newgbc.append(nm)
newsc.append(len(sumColumn[arr]))
newgbc = np.array(newgbc)
newsc = np.array(newsc)
sumColumn = 'count(' + temp[1] + ')'
newTable = {}
newTable[temp[2]] = newgbc
newTable[sumColumn] = newsc
tables[tablename] = newTable
mid = time.time()
writeToFile(tablename, tablename)
else:
# means groupby 2 cols
groupbyColumn = sourceTable[temp[2]]
groupbyColumn2 = sourceTable[temp[3]]
name_un = np.unique(groupbyColumn)
name2_un = np.unique(groupbyColumn2)
newgbc = []
newgbc2 = []
newsc = []
for nm in name_un:
for nm2 in name2_un:
arr = np.array([(True if x == nm and y
== nm2 else False) for (x,
y) in zip(groupbyColumn,
groupbyColumn2)])
if arr.any():
newgbc.append(nm)
newgbc2.append(nm2)
newsc.append(len(sumColumn[arr]))
newgbc = np.array(newgbc)
newgbc2 = np.array(newgbc2)
newsc = np.array(newsc)
sumColumn = 'count(' + temp[1] + ')'
newTable = {}
newTable[temp[2]] = newgbc
newTable[temp[3]] = newgbc2
newTable[sumColumn] = newsc
tables[tablename] = newTable
mid = time.time()
writeToFile(tablename, tablename)
if command == 'sort': # done
sortTableName = re.split(',', params)[0]
sortByColumns = re.split(',', params)[1:]
sortTableND = fileToStructND(sortTableName)
sortTableND = np.sort(sortTableND,
order=sortByColumns)
table = {}
for i in sortTableND.dtype.names:
table[i] = sortTableND[i]
tables[tablename] = table
mid = time.time()
writeToFile(tablename, tablename)
if command == 'movsum': # done
temp = re.split(',', params)
movingTable = tables[temp[0]]
movingColumn = movingTable[temp[1]]
movingNo = int(temp[2])
x = np.cumsum(movingColumn)
y = np.zeros(x.size)
for i in range(0, movingNo):
y[i] = x[i]
for i in range(0, x.size):
if i >= movingNo:
y[i] = x[i] - x[i - movingNo]
movingTable['mov_sum'] = y
tables[tablename] = movingTable
mid = time.time()
writeToFile(tablename, tablename)
if command == 'movavg': # done
temp = re.split(',', params)
movingTable = tables[temp[0]]
movingColumn = movingTable[temp[1]]
movingNo = int(temp[2])
x = np.cumsum(movingColumn)
y = np.zeros(x.size)
for i in range(0, movingNo):
y[i] = float(x[i]) / float(i + 1)
for i in range(0, x.size):
if i >= movingNo:
y[i] = float(x[i] - x[i - movingNo]) \
/ float(movingNo)
movingTable['mov_avg'] = y
tables[tablename] = movingTable
mid = time.time()
writeToFile(tablename, tablename)
if command == 'join': # done
# first join the two tables.
temp = re.split(',', params)
table1 = tables[temp[0]]
table2 = tables[temp[1]]
conditions = temp[2]
newTable = joinHelp(table1, table2, temp[0],
temp[1]) # skeleton of new table
# todo : get conditions
# (R1.qty[+3] > S.Q[-3]) and (R1.saleid = S.saleid)
# table.attribute [arithop constant] relop table.attribute [artithop con- stant]
t1columns = []
t2columns = []
relOperators = []
leftNums = []
rightNums = []
arithOpsL = []
arithOpsR = []
def helper(cond):
left = ''
right = ''
if re.search('!=', cond):
left = re.search('.+!=', cond).group(0)
left = left[:-1]
right = re.search('!=.+', cond).group(0)
right = right[1:]
relOperators.append('!=')
elif re.search('>=', cond):
relOperators.append('>=')
left = re.search('.+>=', cond).group(0)
left = left[:-1]
right = re.search('>=.+', cond).group(0)
right = right[1:]
elif re.search('<=', cond):
relOperators.append('<=')
left = re.search('.+<=', cond).group(0)
left = left[:-1]
right = re.search('<=.+', cond).group(0)
right = right[1:]
elif re.search('=', cond):
relOperators.append('=')
left = re.search('.+=', cond).group(0)
left = left[:-1]
right = re.search('=.+', cond).group(0)
right = right[1:]
elif re.search('<', cond):
relOperators.append('<')
left = re.search('.+<', cond).group(0)
left = left[:-1]
right = re.search('<.+', cond).group(0)
right = right[1:]
else:
relOperators.append('>')
left = re.search('.+>', cond).group(0)
left = left[:-1]
right = re.search('>.+', cond).group(0)
right = right[1:]
if re.search("\[", left):
t1columns.append(re.search("\..+\[",
left).group(0)[1:-1])
arithOpsL.append(re.search("\[.+\]",
left).group(0)[1])
leftNums.append(re.search("\[.+\]",
left).group(0)[2:-1])
else:
t1columns.append(re.search("\..+",
left).group(0)[1:])
arithOpsL.append('+')
leftNums.append('0')
if re.search("\[", right):
t2columns.append(re.search("\..+\[",
right).group(0)[1:-1])
arithOpsR.append(re.search("\[.+\]",
right).group(0)[1])
rightNums.append(re.search("\[.+\]",
right).group(0)[2:-1])
else:
t2columns.append(re.search("\..+",
right).group(0)[1:])
arithOpsR.append('+')
rightNums.append('0')
if 'and' in conditions:
conditions = re.split('and', temp[2])
for con in conditions:
con = con[1:-1] # removing bracks
helper(con)
else:
helper(conditions)
temp1 = table1[list(table1.keys())[0]]
temp2 = table2[list(table2.keys())[0]]
x = temp1.size
y = temp2.size
ops = { # etc.
'+': operator.add,
'-': operator.sub,
'*': operator.truediv,
'\\': operator.mul,
}
for i in range(0, x):
rowt1 = {}
for k in table1.keys():
val = table1[k]
val = val[i]
rowt1[k] = val
for j in range(0, y):
rowt2 = {}
for l in table2.keys():
val = table2[l]
val = val[j]
rowt2[l] = val
# need to check if conditions hold good for row 1 and row2
boolVal = True
count = 0
for op in relOperators:
templ1 = \
ops[arithOpsL[count]](float(rowt1[t1columns[count]]),
float(leftNums[count]))
tempr1 = \
ops[arithOpsR[count]](float(rowt2[t2columns[count]]),
float(rightNums[count]))
if op == '>':
boolVal = boolVal and templ1 \
> tempr1
if op == '<':
boolVal = boolVal and templ1 \
< tempr1
if op == '=':
boolVal = boolVal and templ1 \
== tempr1
if op == '>=':
boolVal = boolVal and templ1 \
>= tempr1
if op == '<=':
boolVal = boolVal and templ1 \
<= tempr1
if op == '!=':
boolVal = boolVal and templ1 \
!= tempr1
count = count + 1
if boolVal == True:
listnames = list(newTable.keys())
listnamesrt1 = list(rowt1.keys())
listnamesrt2 = list(rowt2.keys())
ct = 0
r1ct = 0
r2ct = 0
for p in rowt1.keys():
newTable[listnames[ct]] = \
np.append(newTable[listnames[ct]], rowt1[listnamesrt1[r1ct]])
ct = ct + 1
r1ct = r1ct + 1
for q in rowt2.keys():
newTable[listnames[ct]] = \
np.append(newTable[listnames[ct]], rowt2[listnamesrt2[r2ct]])
ct = ct + 1
r2ct = r2ct + 1
newTable = removeColumn(newTable) # removing redundant record one used to preserve datatype
tables[tablename] = newTable
mid = time.time()
writeToFile(tablename, tablename)
if command == 'concat': # Q5 := concat(Q4, Q2)
temp = re.split(',', params)
concatTable1 = tables[temp[0]]
concatTable2 = tables[temp[0]]
x = list(concatTable1.keys())
y = list(concatTable2.keys())
# checking if schema matches
if x == y:
# print("schema matches")
for key in x:
concatTable1[key] = \
np.append(concatTable1[key],
concatTable2[key])
tables[tablename] = concatTable1
mid = time.time()
writeToFile(tablename, tablename)
else:
print 'schema of given tables dont match'
else:
print '' # its a comment found after '/'
end = time.time()
if mid != 0:
print '---------------------------'
print 'Query output written to file'
print 'Query Execution time is ' + str(mid - start)
print 'Time taken to write to file is' + str(end - mid)
print '---------------------------\n'
main()
end = time.time()
if(mid!=0):
print("---------------------------")
print("Query output written to file")
print("Query Execution time is "+str(mid-start))
print("Time taken to write to file is"+str(end-mid))
print("---------------------------\n")
main()
|
from formula_gen import FormulaGen
from formula import Formula
class DPLL():
def __init__(self, target_file):
self.target_file = target_file
def dpll(self, mode):
temp = FormulaGen(self.target_file)
temp.gen_formula()
temp.make_variables()
temp.make_clauses()
temp_formula = Formula(temp.formula_str, temp.var_lst, temp.clause_lst)
while(True):
# Real DPLL phase
# While there is a unit clause {L} in F|A, add L -> 1 to A.
temp_formula.unit_propagation()
# If F |A contains no clauses, stop & output A.
if(temp_formula.valuation() == True):
return ("SATISFIABLE", temp.var_lst)
elif(temp_formula.valuation() == False):
learned_clause = temp_formula.learning_procedure()
if len(learned_clause.clause) == 0:
return ("UNSATISFIABLE", temp.var_lst)
temp_formula.clause_lst.append(learned_clause)
temp_formula.formula_str.append(learned_clause.clause)
# Backtrack to the position where learned clause is a unit clause
temp_formula.backtrack(learned_clause)
else:
if(mode == "dlis"):
temp_formula.decision_dlis()
elif(mode == "prop"):
temp_formula.decision_proportional()
else:
temp_formula.decision_random()
|
import tkinter as tk
from api import AppCache
# 用户信息
class UserDialog(tk.Toplevel):
def __init__(self, root, line=None):
super().__init__()
self.title('我的信息')
self.attributes("-topmost", True)
self.resizable(False, False)
self.desc = {
"uuid": "uuid",
"username": "用户名",
"email": "邮箱",
"phone": "电话",
"customNumbers": "自建账号",
"coinlistNumbers": "集成账号",
}
self.userInfo = AppCache.user
# 弹窗界面
self.setup_UI()
x = root.winfo_x()
y = root.winfo_y()
w = root.winfo_width()
h = root.winfo_height()
print(x,y,w,h)
self.geometry("+%d+%d" % (x + w/3, y + h/3))
def setup_UI(self):
for k,v in self.userInfo.items():
print(k,v)
row = tk.Frame(self)
row.pack(fill="x")
tk.Label(row, text="%s:" % self.desc[k], width=10, anchor="w").pack(side=tk.LEFT)
tk.Label(row, textvariable=tk.StringVar(value = v), width=20, anchor="w").pack(side=tk.LEFT)
|
import asyncio
import pytest
import aiotools
@pytest.mark.asyncio
async def test_timer():
"""
Test the timer functionality.
"""
vclock = aiotools.VirtualClock()
with vclock.patch_loop():
count = 0
async def counter(interval):
assert interval == 0.1
nonlocal count
await asyncio.sleep(0)
count += 1
count = 0
timer = aiotools.create_timer(counter, 0.1)
await asyncio.sleep(0.22)
timer.cancel()
await timer
assert count == 3
count = 0
timer = aiotools.create_timer(counter, 0.1, aiotools.TimerDelayPolicy.CANCEL)
await asyncio.sleep(0.22)
timer.cancel()
await timer
# should have same results
assert count == 3
@pytest.mark.asyncio
async def test_timer_leak_default():
"""
Test if the timer-fired tasks are claned up properly
even when each timer-fired task takes longer than the timer interval.
(In this case they will accumulate indefinitely!)
"""
vclock = aiotools.VirtualClock()
with vclock.patch_loop():
spawn_count = 0
cancel_count = 0
done_count = 0
async def delayed(interval):
nonlocal spawn_count, cancel_count, done_count
spawn_count += 1
try:
await asyncio.sleep(5)
done_count += 1
except asyncio.CancelledError:
cancel_count += 1
task_count = len(aiotools.compat.all_tasks())
timer = aiotools.create_timer(delayed, 1)
await asyncio.sleep(9.9)
timer.cancel()
await timer
assert task_count + 1 >= len(aiotools.compat.all_tasks())
assert spawn_count == done_count + cancel_count
assert spawn_count == 10
assert cancel_count == 5
@pytest.mark.asyncio
async def test_timer_leak_cancel():
"""
Test the effect of TimerDelayPolicy.CANCEL which always
cancels any pending previous tasks on each interval.
"""
vclock = aiotools.VirtualClock()
with vclock.patch_loop():
spawn_count = 0
cancel_count = 0
done_count = 0
async def delayed(interval):
nonlocal spawn_count, cancel_count, done_count
spawn_count += 1
try:
await asyncio.sleep(1)
except asyncio.CancelledError:
cancel_count += 1
else:
done_count += 1
task_count = len(aiotools.compat.all_tasks())
timer = aiotools.create_timer(
delayed,
0.01,
aiotools.TimerDelayPolicy.CANCEL,
)
await asyncio.sleep(0.1)
timer.cancel()
await timer
await asyncio.sleep(0)
assert task_count + 1 >= len(aiotools.compat.all_tasks())
assert spawn_count == cancel_count + done_count
assert cancel_count == 10
assert done_count == 0
@pytest.mark.asyncio
async def test_timer_leak_nocancel():
"""
Test the effect of TimerDelayPolicy.CANCEL which always
cancels any pending previous tasks on each interval.
"""
vclock = aiotools.VirtualClock()
with vclock.patch_loop():
spawn_count = 0
cancel_count = 0
done_count = 0
async def delayed(interval):
nonlocal spawn_count, cancel_count, done_count
spawn_count += 1
try:
await asyncio.sleep(0)
except asyncio.CancelledError:
cancel_count += 1
else:
done_count += 1
task_count = len(aiotools.compat.all_tasks())
timer = aiotools.create_timer(
delayed,
0.01,
aiotools.TimerDelayPolicy.CANCEL,
)
await asyncio.sleep(0.096)
timer.cancel()
await timer
await asyncio.sleep(0)
assert task_count + 1 >= len(aiotools.compat.all_tasks())
assert spawn_count == cancel_count + done_count
assert cancel_count == 0
assert done_count == 10
|
from .my_plots import *
from .latex2png import latex2png
import matplotlib.pyplot as plt
import numpy as np
from matplotlib.offsetbox import TextArea, DrawingArea, OffsetImage, AnnotationBbox
import pyvista as pv
from io import BytesIO
from PIL import Image
class EnergyLevels:
"""
Generates energy level diagram with annotation.
"""
def __init__(self):
self.label = []
self.locationX = []
self.locationY = []
self.state = None
self.arrows = []
def add(self, label, locationX, locationY):
"""
Adds energy level
Args:
label: label of the energy level
locationX: center position on plot axis
locationY: center position on plot axis
"""
self.label.append(label)
self.locationX.append(locationX)
self.locationY.append(locationY)
def setState(self, state):
"""
Adds current state representation to level diagram.
State will be represented as blobs of different sizes and
colors plotted on corresponding energy levels.
Blobs size correspond to the amplitude of that basis state
in the total state, while their colour is mapped based
on the complex colour wheel scheme defined in my_plots.
Args:
state : complex number array decomposing state in the
basis of the previously added energy levels
"""
assert len(state) == len(self.label), "state array lenght should be the same as number of states"
self.state = np.array(state)
def clearState(self):
"""
Clears system state from the energy level diagram.
"""
self.state = None
def getTotalStates(self):
"""
Total number of states on the energy level diagram.
"""
return len(self.locationX)
def addArrow(self, fromStateIndex, toStateIndex, label="", style="<->", color="k", strength=1,
detuning = None):
"""
Adds arrow to the energy level diagram.
Args:
fromStateIndex (int): index of the first state
toStateINdex (int): index of the second state it points to
style (string): style of arrow, accepted values are
'<-', '->' or '<->' . Default is '<->'
detuning: None by default. Or (relativeValue, "label") tuple
"""
assert fromStateIndex < len(self.locationX), "fromStateIndex should be in range 0, getTotalStates - 1"
assert toStateIndex < len(self.locationX), "toStateIndex should be in range 0, getTotalStates - 1"
# if arrow exists, replace label; if it doesn't exist add new arrow
for i in range(len(self.arrows)):
if self.arrows[i][0] == fromStateIndex and self.arrows[i][1] == toStateIndex:
self.arrows[i][2] = label
self.arrows[i][3] = style
self.arrows[i][4] = color
self.arrows[i][5] = strength
self.arrows[i][6] = detuning
self.arrows.append([fromStateIndex, toStateIndex, label, style, color,
strength, detuning])
def plot(self, axis, labels=False, linewidth=4, length=0.7, stateBlob=500, fontsize=14, arrowLabelSize=12,
debug=False, dpi=100, drivingStrenghtToWidth=True):
"""
Plots energy level digram on the given figure axis.
Args:
linewidth (float): energy level line width
length (float): energy level line length
stateBlob (flaot): maximum blob size for a system state,
corresponding to the unit amplitude for the system to
be in a given energy level
drivingStrenghtToWidth (bool): Should arrows correspond to
driving strengths. True by default.
debug (bool): turns on and of plot axis, useful for precise
positioning.
"""
for i in range(len(self.label)):
axis.plot([self.locationX[i] - length / 2,
self.locationX[i] + length / 2],
[self.locationY[i], self.locationY[i]],
"-",
color="k",
lw=linewidth,
solid_capstyle='round',
zorder=1)
if labels:
for i in range(len(self.label)):
axis.text(self.locationX[i] + 0.55 * length, self.locationY[i],
self.label[i],
fontsize = fontsize,
verticalalignment='center')
if self.state is not None:
for i in range(len(self.state)):
amplitude = np.abs(self.state[i])
color = getComplexColor(self.state[i], max(amplitude, 1e-15) * 1.00001)
axis.scatter([self.locationX[i]],
[self.locationY[i]],
s=[amplitude * stateBlob],
c=[color],
zorder=2)
normArrows = -1
for i in range(len(self.arrows)):
normArrows = max(self.arrows[i][5], normArrows)
for i in range(len(self.arrows)):
xStart = self.locationX[self.arrows[i][0]]
yStart = self.locationY[self.arrows[i][0]]
xEnd = self.locationX[self.arrows[i][1]]
yEnd = self.locationY[self.arrows[i][1]]
if self.arrows[i][6] is not None:
detuning, label = self.arrows[i][6]
else:
detuning = 0
yEnd += detuning
vector = np.array([xEnd - xStart, yEnd - yStart])
middle = np.array([xStart,yStart]) + vector/2
unitVector = vector / np.linalg.norm(vector)
dv = 0.5 * unitVector
xStart += dv[0]
yStart += dv[1]
xEnd -= dv[0]
yEnd -= dv[1]
vector = vector - 2 * dv
if drivingStrenghtToWidth:
# map relative strenghts of driving fields to widths of the arrows
width = 0.05 * self.arrows[i][5] / normArrows
else:
width = 0.05
if self.arrows[i][3] == "<->":
axis.arrow(middle[0], middle[1], vector[0]/2, vector[1]/2,
length_includes_head=True,
width=width,
edgecolor=self.arrows[i][4],
facecolor=self.arrows[i][4],
capstyle="round")
axis.arrow(middle[0], middle[1], -vector[0]/2, -vector[1]/2,
length_includes_head=True,
width=width,
edgecolor=self.arrows[i][4],
facecolor=self.arrows[i][4],
capstyle="round")
elif self.arrows[i][3] == "->":
axis.arrow(xStart, yStart, vector[0], vector[1],
length_includes_head=True,
width=width,
edgecolor=self.arrows[i][4],
facecolor=self.arrows[i][4],
capstyle="round")
else:
# self.arrows[i][3] == "<-"
axis.arrow(xEnd, yEnd, -vector[0], -vector[1],
length_includes_head=True,
width=width,
edgecolor=self.arrows[i][4],
facecolor=self.arrows[i][4],
capstyle="round")
plt.annotate('',
xytext=(5,0),
xy=(9 ,0),
arrowprops=dict(arrowstyle="simple", color=self.arrows[i][4]),
size=width
)
# add annotation if existing
if self.arrows[i][2] != "":
generator = latex2png()
file = generator.make_png(self.arrows[i][2],
fontsize=arrowLabelSize, dpi=dpi,
border=[5,5,5,5])
arr_image = plt.imread(file, format='png')
imagebox = OffsetImage(arr_image)
# axis.plot([middle[0]],[middle[1]], "bo")
ab = AnnotationBbox(imagebox, xy=(middle[0], middle[1]), pad=0, frameon=debug)
axis.add_artist(ab)
# add detuning if existing
if self.arrows[i][6] is not None:
detuning, label = self.arrows[i][6]
fromState = self.arrows[i][1]
axis.plot([self.locationX[fromState] - length / 2,
self.locationX[fromState] + length / 2],
[self.locationY[fromState] + detuning,
self.locationY[fromState] + detuning],
":",
color="k",
lw=linewidth,
solid_capstyle='round',
zorder=-1)
if not debug:
axis.set_axis_off()
def blobAnnotate(axis,
blobX, blobY,
textX, textY,
text,
blobSize=100, linewidth=3, fontsize=12,
color=cDUbb, curvatureSign="+", zorder=-1):
"""
Cartoon style blob annotation to highlight different parts of plot.
Args:
axis : figure axis where we do blob annotation
blobX (float) : X position of blob higlight on axis
blobY (float) : Y position of blob higlight on axis
textX (float) : X position of corresponding annotation
textY (float) : Y position of corresponding annotation
text (string) : annotation
"""
axis.scatter([blobX],[blobY], s=blobSize, c=color)
axis.annotate(text, (blobX, blobY),(textX, textY),
ha="center", va="center",
size=fontsize,
arrowprops=dict(arrowstyle="-",
fc=color, ec=color, lw=linewidth, zorder=zorder,
connectionstyle=("arc3,rad=%s0.05" % curvatureSign),
),
zorder=zorder,
bbox=dict(boxstyle="round,pad=0.3", fc=color, ec=color, lw=2)
)
def xAnnotate(axis, fromX, toX, color=cDUy, zorder=-2):
axis.axvspan(fromX, toX, color=color, zorder=zorder)
def yAnnotate(axis, fromY, toY, color=cDUy, zorder=-2):
axis.axhspan(fromY, toY, color=color, zorder=zorder)
def equation(latex, axis,fontsize=10, dpi=100, border=[4,4,4,4], debug=False,
x=0.1, y=1):
"""
Adds equations on the given axis plot (and turns off axis).
"""
generator = latex2png()
file = generator.make_png(latex, fontsize=fontsize, dpi=dpi, border=border)
arr_image = plt.imread(file, format='png')
imagebox = OffsetImage(arr_image)
ab = AnnotationBbox(imagebox, xy=(x, y), pad=0, frameon=debug)
axis.add_artist(ab)
if not debug:
axis.set_axis_off()
class BlochSphere:
"""
Utilities for plotting Bloch Sphere
"""
def __init__(self, r=3, resolution=3):
self.p = pv.Plotter(shape=(1, 1),
multi_samples=1,
window_size=(resolution * 600, resolution * 600),
off_screen=True,
notebook=False)
self.p.set_background(cDUsky, top="white")
self.resolution = resolution
# draw cross section of sphere with three principal coordinate planes
num = 50
theta = np.linspace(-1 * np.pi, 1 * np.pi, num)
self.r = r
phi = 0 * np.pi / 60
z = 0 * self.r * np.cos(theta)
x = self.r * np.cos(theta)
y = self.r * np.sin(theta)
rpts = np.column_stack((x, y, z))
spline = pv.Spline(rpts, 1000)
rxy_tube = spline.tube(radius=0.05)
z = self.r * np.cos(theta)
x = self.r * np.sin(theta) * np.cos(phi - np.pi / 2)
y = self.r * np.sin(theta) * np.sin(phi - np.pi / 2)
rpts = np.column_stack((x, y, z))
spline = pv.Spline(rpts, 1000)
rxz_tube = spline.tube(radius=0.05)
z = self.r * np.cos(theta)
x = self.r * np.sin(theta) * np.cos(phi)
y = self.r * np.sin(theta) * np.sin(phi)
rpts = np.column_stack((x, y, z))
spline = pv.Spline(rpts, 1000)
ryz_tube = spline.tube(radius=0.05)
# add sphere
big = pv.Sphere(center=(0, 0, 0), radius=self.r)
self.p.add_mesh(big, opacity=0.4,
color="w", specular=0.85, smooth_shading=True)
# add cross-sections
self.p.add_mesh(rxy_tube, opacity=0.1, smooth_shading=True, color=cDUkk)
self.p.add_mesh(rxz_tube, opacity=0.1, smooth_shading=True, color=cDUkk)
self.p.add_mesh(ryz_tube, opacity=0.1, smooth_shading=True, color=cDUkk)
def state2XYZ(complexVectorState):
# TO-Do
#x =
#y =
#z =
#return x,y,z
pass
def addStateBlob(self, x,y,z, color=cDUrr, radius=0.2):
"""
Adds highlighted Blob on or inside the Bloch sphere.
"""
small = pv.Sphere(center=np.array([x, y, z])*self.r,
radius=self.r / 3 * radius)
self.p.add_mesh(small, opacity=1.0, color=color, smooth_shading=True)
pass
def addStateArrow(self, x,y,z, color=cDUbbbb):
"""
Adds state arrow to the Bloch sphere, given the tip position.
"""
length = np.sqrt(x*x + y*y + z*z)
arrow=pv.Arrow(start=(0.0, 0.0, 0.0), direction=np.array([x,y,z]) * self.r,
tip_length=0.25, tip_radius=0.1, tip_resolution=20,
shaft_radius=0.05, shaft_resolution=20, scale=length * self.r)
self.p.add_mesh(arrow, opacity=1.0, color=color, smooth_shading=True)
def addTrajectory(self, trajectoryXYZ):
"""
Adds trajectory in time, with time shown with viridis colouring.
"""
spline = pv.Spline(trajectoryXYZ * self.r, 1000)
spline["scalars"] = np.arange(spline.n_points)
tubes=spline.tube(radius=0.1)
self.p.add_mesh(tubes, smooth_shading=True, show_scalar_bar=False)
def plot(self, axis=None, debug=False,
cameraPosition=[(12.2, 4.0, 4.0),
(0.0, 0.0, 0.0),
(0.0, 0.0, 1)],
labelAxis=True,
labelSize=12,
dpi=100,
label=[r"$|e\rangle$",
r"$|g\rangle$",
r"$\frac{|e\rangle+|g\rangle}{\sqrt{2}}$",
r"$\frac{|e\rangle+i|g\rangle}{\sqrt{2}}$"
],
labelOffset=None
):
"""
Plots Bloch sphere on the given axis.
"""
self.p.enable_depth_peeling(10)
self.p.camera_position = cameraPosition
# [(13.0, 0.0, 1.0),
# (0.0, 0.0, 0.0),
# (0.1, 0.0, 0.1)]
if axis==None:
if labelAxis:
print("Note: Bloch sphere axis are labeled only if axis"
" argument is passed to plot function")
self.p.show()
else:
png_output = BytesIO()
self.p.show(screenshot=png_output)
if labelAxis:
generator = latex2png()
im = Image.fromarray(self.p.image)
im = im.convert("RGBA")
if labelOffset == None:
labelOffset = [(int(im.size[0]*0.53), 10),
(int(im.size[0]*0.53), int(im.size[1]*0.87)),
(int(im.size[0]*0.35), int(im.size[1]*0.6)),
(int(im.size[0]*0.86), int(im.size[1]*0.54))]
for i in range(len(labelOffset)):
labelLatex = generator.make_png(label[i],
fontsize=labelSize,
dpi=dpi * self.resolution*2)
#white_to_transparency(Image.open(labelLatex))
l = Image.open(labelLatex)
im.paste(l, labelOffset[i], l.convert('RGBA'))
axis.imshow(im)
else:
axis.imshow(self.p.image)
if not debug:
axis.set_axis_off()
|
print('Chamamos as funções pelo nome seguido de parenteses com ou sem argumentos.')
print('a função type(42) retornará o tipo do parâmetro passado.')
print(type(42))
print()
print('Outro exemplo é a função int() que retornará o valor inteito relativo a um parâmetro.')
print('int ("32") retornará:')
print(int("32"))
print('int (3.27) retornará:')
print(int(3.27))
print('Também temos o float retorna o ponto flutuante referente a um parâmetro.')
print('por exemplo, float(32) retornará:')
print(float(32))
print("float('3.14') retornará:")
print(float('3.14'))
print('Por fim temos o str() trazendo o parâmetro passado em formato de string.')
print("um str(32) então retorna:")
print(str(32))
|
from turtle import *
#funkcja testuj() do testowania rozwiązania
def testuj(n):
a=499
b=796
reset()
tracer(0)
if n==1:
kwiat()
if n==2:
tetki(2)
if n==3:
motyw(3)
pu(); home(); pd()
pu();fd(b/2);pd()
lt(90)
color("red")
for i in range(2):
fd(a//2);lt(90)
fd(b);lt(90)
fd(a//2)
rt(90)
pu();bk(b/2);pd()
color("black")
update()
tracer(1)
if n==1:
addshape("zad1.gif")
if n==2:
addshape("zad2.gif")
if n==3:
addshape("zad3.gif")
zolw_test = Turtle()
if n==1:
zolw_test.shape("zad1.gif")
if n==2:
zolw_test.shape("zad2.gif")
if n==3:
zolw_test.shape("zad3.gif")
zolw_test.speed(0)
zolw_test.pu()
zolw_test.ondrag(zolw_test.goto)
#tutaj jest miejsce na Twoje rozwiązania zadania
#zadanie 1 miniLOGIA 14
def kwiat():
#procedura główna, bok - zmienna pomocnicza
bok = 8
lt(90)
for i in range(6):
pu(); fd(19*bok); pd()
platek(bok)
pu(); bk(19*bok); pd()
rt(60)
#płatek w środku
platek(bok)
def platek(bok):
#płatki złożone z sześciokątów
for i in range(6):
pu(); fd(bok); pd()
szesciokat(4*bok)
pu(); bk(bok); pd()
rt(60)
def szesciokat(bok):
#żółty sześciokąt
fillcolor("yellow")
begin_fill();
fd(bok);lt(60)
fd(bok);rt(120)
fd(bok);rt(60)
fd(bok);rt(60)
fd(bok);rt(60)
fd(bok);rt(120)
end_fill();
|
import time
import RPi.GPIO as GPIO
GPIO.setmode(GPIO.BCM)
GPIO.setwarnings(False)
class Sensor:
def __init__(self, echo_l, echo_m, echo_r, trig_l, trig_m, trig_r):
self.echo_l = echo_l
self.echo_m = echo_m
self.echo_r = echo_r
self.trig_l = trig_l
self.trig_m = trig_m
self.trig_r = trig_r
GPIO.setup(self.echo_l, GPIO.IN)
GPIO.setup(self.echo_m, GPIO.IN)
GPIO.setup(self.echo_r, GPIO.IN)
GPIO.setup(self.trig_l, GPIO.OUT)
GPIO.setup(self.trig_m, GPIO.OUT)
GPIO.setup(self.trig_r, GPIO.OUT)
def distance_us(self, l = True, m = True, r = True):
'''
:param l: disrance sensor l, default True
:param m: disrance sensor m, default True
:param r: disrance sensor r, default True
:return: distance, aggregated as List and median from 6 measuremts: [l,m,r]
'''
count = 0
output = [0, 1, 2]
m_l = []
m_m = []
m_r = []
#instructions to make 6 measurements per sensor
while count <= 3:
if l and m and r:
m_l.append(self.d_measurement(self.trig_l, self.echo_l))
m_m.append(self.d_measurement(self.trig_m, self.echo_m,))
m_r.append(self.d_measurement(self.trig_r, self.echo_r))
#in case you only want left and right
elif l and not m and r:
pass
'''
you get the idea, in case you only have a certain amount of sensors or more
'''
else:
return "something is wrong"
count += 1
output[0] = m_l
output[1] = m_m
output[2] = m_r
#takes the median of the six measurements
for i in range(0, len(output)):
output[i].sort()
output[i] = output[i][3]
return output
def d_measurement(self,t,e):
'''
The measurement function itself
:param t: triger pin
:param e: echo pin
:return: distance in cm
'''
#sends a signal on trig for 0.1 ms
GPIO.output(t, True)
time.sleep(0.0001)
GPIO.output(t, False)
#measures the time between sending the signal and receiving the signal on echo
while GPIO.input(e) == False:
start = time.time()
while GPIO.input(e) == True:
end = time.time()
sig_time = end - start
#time difference divided by constant is equal to distance in cm
return sig_time / 0.000058
|
def condicion (num):
mil=num//1000
num=num%1000
centena=num//100
num=num%100
decena=num//10
num=num%10
unidad=num
if unidad+centena== mil+decena:
return True
else:
return False
def main():
for v in range(1000,10000):
if condicion(v):
print(v)
main() |
import numpy as np
from scipy.sparse import csr_matrix
from sklearn.linear_model import LogisticRegression
from GNNs import GNN
def load_data(data_file):
graphs = pickle.load(open(data_file, 'rb'))
node_types = set()
label_types = set()
for graph in graphs:
#print(graph)
# raise TypeError
edges = list(graph[0])
nodes = list(graph[1])
label = graph[2]
node_types |= set([x[1] for x in nodes]) ## output the kinds of nodes
label_types.add(label)
node2index = {n: i for i, n in enumerate(node_types)}
label2index = {l: i for i, l in enumerate(label_types)}
adj_lists = []
features = []
labels = torch.zeros(len(graphs), len(label_types))
for graph in graphs:
edges = list(graph[0])
nodes = list(graph[1])
label = graph[2]
adjlist = defaultdict(set)
feature = torch.zeros(len(nodes), len(node_types))
for i, j in edges:
adjlist[i].add(j)
adjlist[j].add(i)
for i, n in nodes:
feature[i, node2index[n]] = 1
labels[len(adj_lists), label2index[label]] = 1
adj_lists.append(adjlist)
features.append(torch.FloatTensor(feature).to(device))
if __name__ == "__main__":
load_data("graphsage/data/fda_data.pkl") |
import numpy as np
import pandas as pd
from ols import OLS
# from ..utils.log import logger
def load_data():
train = pd.read_csv("data/reg_train.csv")
test = pd.read_csv("data/reg_test.csv")
X_train, X_test = train.drop("OUTCOME", axis=1), test.drop("OUTCOME", axis=1)
y_train, y_test = train["OUTCOME"], test["OUTCOME"]
return X_train, y_train, X_test, y_test
def mse(y, y_hat):
"""Calculates the mean squared error."""
_errors = y - y_hat
_n = len(y)
return (np.transpose(_errors) @ _errors)/_n
def test_ols(X_train, y_train, X_test, y_test):
ols = OLS(X_train, y_train)
ols.fit()
y_hat = ols.predict(X_test)
print("Mean Squared Error:", mse(y_test, y_hat))
if __name__ == "__main__":
X_train, y_train, X_test, y_test = load_data()
test_ols(X_train, y_train, X_test, y_test) |
from django.shortcuts import render
from contatos.models import ContatoModel
from contatos.forms import AgendaForm
def contato(request):
if request.method == 'POST':
form = AgendaForm(request.POST)
if not form.is_valid():
contexto = {
'contatos': ContatoModel.objects.all(),
'form': form,
}
return render(request, 'contato.html', contexto)
else:
ContatoModel.objects.create(**form.cleaned_data)
contexto = {
'contatos': ContatoModel.objects.all(),
}
return render(request, 'contato.html', contexto)
else:
contexto = {
'contatos': ContatoModel.objects.all()
}
return render(request, 'contato.html', contexto)
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
"""
newServer.py
The server is started when newServer.py is run from the command line:
> python newServer.py
See the usage string for more details.
> python newServer.py --help
@author: Yuhan Liu 802997 University of Melbourne
@version: 1.41
@Date: March - May, 2018
"""
import socket
import thread
import almath
import random
import sys
from naoqi import ALProxy
from naoqi import ALBroker
from naoqi import ALModule
import vision_definitions
import numpy as np
from CryClassifier import CryClassifier
from optparse import OptionParser
class OpenCVModule():
def __init__(self, IP, PORT, CameraID = 0, X=240, Y=320):
self._x = X
self._y = Y
self.ip= IP
self.port = PORT
self._videoProxy = None
self._cameraID = CameraID
#Configure YUV422 images
if self._x == 30 and self._y == 40:
self._resolution = vision_definitions.kQQQQVGA
self._fps = 30
elif self._x == 60 and self._y == 80:
self._resolution = vision_definitions.kQQQVGA
self._fps = 30
elif self._x == 120 and self._y == 160:
self._resolution = vision_definitions.kQQVGA
self._fps = 30
elif self._x == 240 and self._y == 320:
self._resolution = vision_definitions.kQVGA
self._fps = 11
elif self._x == 480 and self._y == 640:
self._resolution = vision_definitions.kVGA
self._fps = 2.5
elif self._x == 960 and self._y == 1280:
self._resolution = vision_definitions.k4VGA
self._fps = 0.5
else:
self._resolution = vision_definitions.kQVGA
self._fps = 30
self._colorSpace = vision_definitions.kBGRColorSpace
self._imgClient = ""
self._imgData = None
self._img = np.zeros((self._x, self._y, 3), np.uint8)
self._registerImageClient(IP, PORT)
def _registerImageClient(self, IP, PORT):
self._videoProxy = ALProxy("ALVideoDevice", IP, PORT)
for i in self._videoProxy.getSubscribers():
print self._videoProxy.unsubscribe(i)
self._imgClient = self._videoProxy.subscribeCamera("OpenCVModule", self._cameraID, self._resolution,
self._colorSpace, self._fps)
def unregisterImageClient(self):
if self._imgClient != "":
self._videoProxy.unsubscribe(self._imgClient)
def getImage(self):
try:
self._imgData = self._videoProxy.getImageRemote(self._imgClient)
b = np.asarray(bytearray(self._imgData[6]), dtype="uint8")
tmp_b = b.reshape((self._x, self._y, 3))
tmp_b[:,:,[0,2]] = tmp_b[:,:,[2,0]]
tmp_b = tmp_b.reshape(self._x*self._y*3)
return bytearray(tmp_b)
#self._img = b.reshape((self._x, self._y, 3))
except:
return 'ERROR'
def updateResolution(self,newX,newY):
self._x = newX
self._y = newY
if newX == 30 and newY == 40:
self._resolution = vision_definitions.kQQQQVGA
self._fps = 30
elif newX == 60 and newY == 80:
self._resolution = vision_definitions.kQQQVGA
self._fps = 30
elif newX == 120 and newY == 160:
self._resolution = vision_definitions.kQQVGA
self._fps = 30
elif newX == 240 and newY == 320:
self._resolution = vision_definitions.kQVGA
self._fps = 11
elif newX == 480 and newY == 640:
self._resolution = vision_definitions.kVGA
self._fps = 2.5
elif newX == 960 and newY == 1280:
self._resolution = vision_definitions.k4VGA
self._fps = 0.5
else:
self._resolution = vision_definitions.kQVGA
self._fps = 30
self.unregisterImageClient()
self._registerImageClient(self.ip,self.port)
class reactToTouchModule(ALModule):
def __init__(self, name):
ALModule.__init__(self, name)
memory.subscribeToEvent("FrontTactilTouched", "reactToTouch", "onHeadFrontTouched")
memory.subscribeToEvent("MiddleTactilTouched", "reactToTouch", "onHeadMiddleTouched")
memory.subscribeToEvent("RearTactilTouched", "reactToTouch", "onHeadRearTouched")
memory.subscribeToEvent("HandRightBackTouched", "reactToTouch", "onRightTouched")
memory.subscribeToEvent("HandLeftBackTouched", "reactToTouch", "onLeftTouched")
self.leftCount = 0
self.rightCount = 0
self.frontCount = 0
self.middleCount = 0
self.rearCount = 0
self.fairyList = []
self.lullabyList = []
player.unloadAllFiles()
for i in range(9):
i = i + 1
filename= "/home/nao/fairy/0"+i.__str__()+".mp3"
tmp = player.loadFile(filename)
self.fairyList.append(tmp)
for i in range(10,16):
filename= "/home/nao/fairy/"+i.__str__()+".mp3"
tmp = player.loadFile(filename)
self.fairyList.append(tmp)
for i in range(10):
i = i + 1
filename= "/home/nao/lullaby/Lullaby ("+i.__str__()+").mp3"
tmp = player.loadFile(filename)
self.lullabyList.append(tmp)
#print self.fairyList
#print self.lullabyList
self.lullabyPlayID = self.lullabyList[0]
self.fairyPlayID = self.fairyList[0]
def onRightTouched(self, strVarName, value):
player.stopAll()
index = random.randint(0,len(self.fairyList) - 1)
self.fairyPlayID = self.fairyList[index]
#print self.fairyPlayID
self.leftCount += 0.5
if self.leftCount % 1 == 0:
player.play(self.fairyPlayID)
def onLeftTouched(self, strVarName, value):
self.rightCount += 0.5
#print self.rightCount
if self.rightCount % 2 == 0:
player.play(self.fairyPlayID)
else:
player.pause(self.fairyPlayID)
def onHeadMiddleTouched(self, strVarName, value):
self.middleCount += 0.5
if self.middleCount % 2 == 1:
player.play(self.lullabyPlayID)
elif self.middleCount % 2 == 0:
player.pause(self.lullabyPlayID)
def onHeadFrontTouched(self, strVarName, value):
player.stopAll()
pre_index = self.lullabyList.index(self.lullabyPlayID)
index = 0
if pre_index > 0:
index = pre_index - 1
self.lullabyPlayID = self.lullabyList[index]
self.middleCount = 1
#print self.lullabyPlayID
if index % 2 == 0:
player.play(self.lullabyPlayID)
def onHeadRearTouched(self, strVarName, value):
player.stopAll()
pre_index = self.lullabyList.index(self.lullabyPlayID)
index = len(self.lullabyList) - 1
if pre_index < len(self.lullabyList) - 1:
index = pre_index + 1
self.lullabyPlayID = self.lullabyList[index]
self.middleCount = 1
#print self.lullabyPlayID
if index % 2 == 0:
player.play(self.lullabyPlayID)
def playASong(self,index):
player.stopAll()
if index<0:
index = 0
if index>len(self.lullabyList) - 1:
index = len(self.lullabyList) - 1
try:
self.lullabyPlayID = self.lullabyList[index]
player.play(self.lullabyPlayID)
except BaseException,err:
print str(err)
def playAStory(self,index):
player.stopAll()
if index<0:
index = 0
if index>len(self.fairyList) - 1:
index = len(self.fairyList) - 1
try:
self.fairyPlayID = self.fairyList[index]
player.play(self.fairyPlayID)
except BaseException, err:
print str(err)
def stopAll(self):
player.stopAll()
def pauseSong(self):
try:
player.pause(self.lullabyPlayID)
except BaseException, err:
print str(err)
def pauseStory(self):
try:
player.pause(self.fairyPlayID)
except BaseException, err:
print str(err)
def replaySong(self):
try:
player.play(self.lullabyPlayID)
except BaseException, err:
print str(err)
def replayStory(self):
try:
player.play(self.fairyPlayID)
except BaseException, err:
print str(err)
class SoundReceiverModule(ALModule):
def __init__( self, strModuleName):
try:
ALModule.__init__(self, strModuleName )
self.BIND_PYTHON( self.getName(),"callback")
self.outfile = None
self.aOutfile = [None]*(4-1) # ASSUME max nbr channels = 4
self.cryClf = CryClassifier()
self.count = 0
self.tmpAudioData = np.empty(0)
except BaseException, err:
print str(err)
def __del__( self ):
#print "INF: abcdk.SoundReceiverModule.__del__: cleaning everything"
self.stop()
def start( self ):
nNbrChannelFlag = 3 # ALL_Channels: 0, AL::LEFTCHANNEL: 1, AL::RIGHTCHANNEL: 2; AL::FRONTCHANNEL: 3 or AL::REARCHANNEL: 4.
nDeinterleave = 0
self.nSampleRate = 48000
audio.setClientPreferences( self.getName(), self.nSampleRate, nNbrChannelFlag, nDeinterleave )
audio.subscribe( self.getName() )
print( "INF: SoundReceiver: started!" )
def stop( self ):
#print( "INF: SoundReceiver: stopping..." )
audio.unsubscribe( self.getName() )
#print( "INF: SoundReceiver: stopped!" )
if( self.outfile != None ):
self.outfile.close()
def processRemote( self, nbOfChannels, nbrOfSamplesByChannel, aTimeStamp, buffer ):
aSoundDataInterlaced = np.fromstring( str(buffer), dtype=np.int16 )
aSoundData = np.reshape( aSoundDataInterlaced, (nbOfChannels, nbrOfSamplesByChannel), 'F' )
self.count += 1
self.tmpAudioData = np.append(self.tmpAudioData, aSoundData[0].astype(float))
if self.count == 100:
try:
result = self.cryClf.detectCryByRawData(self.tmpAudioData, 8000)
if result == "Cry":
tts.say("Do not cry, baby, mom is coming. What if I play a song for you?")
tts.say("Or you can touch my right arm. I can tell you a fairy tale.")
motionProxy.setAngles('RShoulderPitch', 10.0 * almath.TO_RAD, 0.1)
motionProxy.setAngles('RWristYaw', -70.0 * almath.TO_RAD, 0.1)
else:
motionProxy.setAngles('RShoulderPitch', 75.0 * almath.TO_RAD, 0.1)
motionProxy.setAngles('RWristYaw', 0.0 * almath.TO_RAD, 0.1)
except BaseException, err:
print str(err)
self.count = 0
self.tmpAudioData = np.empty(0)
def child_connection(index, sock, connection):
buf = connection.recv(1024)
print ("Get value %s from connection %d: " % (buf, index))
buf = str(buf)
if buf=='Monitor':
for i in range(1):
tmp = videoStream.getImage()
length = len(tmp) #230400
remainder = length % 1460 #1180
complement = 1460 - remainder #280
tmp += bytearray('0' * complement)
print len(tmp)
connection.send(tmp)
#videoStream.unregisterImageClient()
elif buf.startswith("Story"):
tmpIndex = int(buf[5:])
reactToTouch.playAStory(tmpIndex)
print 'Play Story'
elif buf.startswith('Song'):
tmpIndex = int(buf[4:])
reactToTouch.playASong(tmpIndex)
print 'Play Song'
elif buf.startswith('Volume'):
try:
newVolume = int(buf[6:])
volumeFloat = float(newVolume) / 100.0
tts.setVolume(volumeFloat)
player.setVolume(volumeFloat)
except BaseException,err:
print str(err)
print 'Volume'
elif buf.startswith('Resolution'):
newValue=buf[10:]
valueList = newValue.split('*')
newY= int(valueList[0])
newX= int(valueList[1])
videoStream.updateResolution(newX,newY)
print 'Resolution'
elif buf=='Stop':
reactToTouch.stopAll()
print 'Stop'
elif buf.startswith('Stop'):
print 'Stop?'
elif buf.startswith('Say:'):
print 'Say'
try:
tts.say(buf[4:])
except BaseException,err:
print str(err)
connection.close()
thread.exit_thread()
def mainProcess(robotIP,robotPort = 9559,hostIP,hostPort = 8000):
#HOST_IP = "192.168.1.13"
#HOST_IP = "192.168.1.100"
HOST_IP = hostIP
#HOST_PORT = 8000
HOST_PORT = hostPort
#ROBOT_IP = "192.168.1.27"
#ROBOT_IP = "192.168.1.103"
ROBOT_IP = robotIP
#ROBOT_PORT = 9559
ROBOT_PORT = robotPort
global tts
tts = ALProxy("ALTextToSpeech", ROBOT_IP, ROBOT_PORT)
global player
player = ALProxy("ALAudioPlayer", ROBOT_IP, ROBOT_PORT)
global videoStream
videoStream = OpenCVModule(ROBOT_IP, ROBOT_PORT, CameraID=0, X=240, Y=320)
global motionProxy
motionProxy = ALProxy("ALMotion", ROBOT_IP, ROBOT_PORT)
motionProxy.wakeUp()
global memory
memory = ALProxy("ALMemory", ROBOT_IP, ROBOT_PORT)
global audio
audio = ALProxy("ALAudioDevice", ROBOT_IP, ROBOT_PORT)
myBroker = ALBroker("myBroker", "0.0.0.0", 0, ROBOT_IP, ROBOT_PORT)
global reactToTouch
reactToTouch = reactToTouchModule("reactToTouch")
global SoundReceiver
SoundReceiver = SoundReceiverModule("SoundReceiver")
SoundReceiver.start()
sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
sock.bind((HOST_IP, HOST_PORT))
sock.listen(10)
print "listening"
index = 0
try:
while True:
connection, address = sock.accept()
index += 1
thread.start_new_thread(child_connection, (index, sock, connection))
except KeyboardInterrupt:
sock.close()
videoStream.unregisterImageClient()
myBroker.shutdown()
sys.exit(0)
def readCommand(argv):
#Processes the command.
usageStr = """
USAGE: python newServer.py <options>
OPTIONS: --robotIP the IP said by robot.
--robotPort int value, typically 9559.
--hostIP the IP of the server, try ipconfig on Windows or ifconfig on Mac.
--hostPort int value, can be specified, typically 8000.
"""
parser = OptionParser(usageStr)
parser.add_option('--robotIP', dest='robotIP', default="192.168.0.0")
parser.add_option('--robotPort', dest='robotPort', type='int', default=9559)
parser.add_option('--hostIP', dest='hostIP', default="192.168.0.1")
parser.add_option('--hostPort', dest='hostPort', type='int', default=8000)
options, otherjunk = parser.parse_args(argv)
if len(otherjunk) != 0:
raise Exception('Command line input not understood: ' + str(otherjunk))
args = dict()
args['robotIP'] = options.robotIP
args['robotPort'] = options.robotPort
args['hostIP'] = options.hostIP
args['hostPort'] = options.hostPort
return args
if __name__ == '__main__':
args = readCommand( sys.argv[1:] )
mainProcess( **args )
pass |
#!/usr/bin/python
"""
This code parses the MUD JSON file and extracts the ACL.
"""
import json
import socket
def ACL():
"Parse the JSON MUD file to extract Match rules"
with open('/usr/local/etc/controller/lighting-example.json') as data_file:
d = json.load(data_file)
#print(d)
acl = d["ietf-access-control-list:access-lists"]["acl"]
#print(acl)
#inbound rules#################################################################
idirection = str(acl[0]["acl-name"])
#print(idirection)
iace = acl[0]["access-list-entries"]["ace"][0]
#print(iace)
#input action
iact = str(iace["actions"]["permit"])
print(iact)
if iact == '[None]':
iact = 'ACCEPT'
print(iact)
# Matching rules
imatch = iace["matches"]
#print(imatch)
#inbound port
iport = str(imatch["destination-port-range"]["lower-port"])
#print(iport)
#Source IP
sip = imatch["ietf-acl-dnsname:source-hostname"]
#print(sip)
host = sip.split("//",1)[1]
host = host.split("/", 1)[0]
#print(host)
TranslatedIp = socket.gethostbyname(host)
#print(TranslatedIp)
#protocol
iproto = str(imatch["protocol"])
#print(iproto)
################################################################################
#print("Direction: "+ idirection)
#print("Drop Action: " + act)
#print("Port: " + iport)
#print("Source IP: " + TranslatedIp)
#print("Protocol:" + iproto)
#outbound rules#################################################################
odirection = str(acl[1]["acl-name"])
#print(odirection)
oace = acl[1]["access-list-entries"]["ace"][0]
#print(oace)
#action
oact = str(iace["actions"]["permit"])
#print(oact)
if oact == '[None]':
oact = 'ACCEPT'
#print(oact)
# Matching rules
omatch = oace["matches"]
#print(omatch)
#outbound port
oport = str(omatch["source-port-range"]["lower-port"])
#print(oport)
#Destination IP
dip = omatch["ietf-acl-dnsname:destination-hostname"]
#print(dip)
host = dip.split("//",1)[1]
host = host.split("/", 1)[0]
#print(host)
TranslatedIp = str(socket.gethostbyname(host))
#print(TranslatedIp)
#protocol
oproto = str(omatch["protocol"])
#print(oproto)
#################################################################################
#print("------------------------------------------------------------------------")
#print("Direction: "+ odirection)
#print("Drop Action: " + act)
#print("Port: " + oport)
#print("Destination IP: " + str(TranslatedIp))
#print("Protocol:" + oproto)
#return (TranslatedIp,iport,act,iproto)
inbound = []
inbound.append(TranslatedIp)
inbound.append(iport)
inbound.append(iact)
inbound.append(iproto)
#print(inbound)
outbound = []
outbound.append(TranslatedIp)
outbound.append(oport)
outbound.append(oact)
outbound.append(oproto)
#print(outbound)
return inbound,outbound
if __name__ == '__main__':
setLogLevel( 'info' )
ACL()
|
from glob import glob
import os
import shutil as sh
import numpy as np
from tqdm import tqdm
from bs4 import BeautifulSoup as bs
import argparse
DESCRIPTION = ("Convert the HTML generated by the Data 8 textbook repository "
"into a form that can be hosted with the Data 8 course website "
"template.")
parser = argparse.ArgumentParser(description=DESCRIPTION)
parser.add_argument("path_textbook", help="Path to the Data 8 textbook folder.")
parser.add_argument("path_course_website", help="Path to the Data 8 course website you've forked.")
args = parser.parse_args()
path_textbook = args.path_textbook
path_out = args.path_course_website
def insert_meta(lines, vals):
# Create metadata
meta = ['---'] + vals + ['---', '']
meta = [ii+'\n' for ii in meta]
for meta_line in meta[::-1]:
lines.insert(0, meta_line)
return lines
def take_inbetween(line, bw='()'):
take = line.split(bw[0])[-1].split(bw[1])[0]
return take
def create_hub_link(filepath, binder=False):
if binder is True:
interact_link = 'https://mybinder.org/v2/gh/data-8/textbook/gh-pages?'
filepath_section = 'filepath={}'.format(filepath)
else:
interact_link = '{{ site.hub_url }}/hub/user-redirect/git-pull?repo={{ site.textbook_url }}'
filepath_section = '&branch={{ site.textbook_branch }}' + '&subPath={}'.format(filepath)
return interact_link + filepath_section
# Copy over readme
print('Copying course readme...')
path_readme = os.path.join(path_textbook, 'README.md')
with open(path_readme, 'r') as ff:
lines = ff.readlines()
meta_add = ['layout: single', 'title: Introduction', 'sidebar:', ' title: "Textbook Navigation"', ' nav: sidebar-textbook']
lines = insert_meta(lines, meta_add)
with open(os.path.join(path_out, 'textbook', 'index.md'), 'w') as ff:
ff.writelines(lines)
# Copy over summary
print('Copying course summary...')
path_summary = os.path.join(path_textbook, 'SUMMARY.md')
with open(path_summary, 'r') as ff:
lines = ff.readlines()
lines = lines[2:]
from collections import OrderedDict
# Copy over the summary into a Jekyll sidebar structure
print('Generating textbook sidebar structure...')
summary = OrderedDict()
for line in lines:
title = take_inbetween(line, '[]')
link = take_inbetween(line, '()')
level = line.find('*')
if level == 0:
header = title
# Create a new title section
if not summary.get(header):
summary[header] = []
new_link = link.replace('chapters/', '/textbook/')
new_link = os.path.splitext(new_link)[0]
summary[header].append((title, new_link))
navigation_yml = ['sidebar-textbook:']
for key, vals in summary.items():
title, link = vals[0]
navigation_yml += [' - title: "{}"'.format(title.replace('"', "'")),
' url: {}'.format(link),
' children:']
for title, link in vals[1:]:
level = len(title.split(' ')[0].split('.')) - 1
to_add = [' - title: "{}"'.format(title.replace('"', "'")),
' url: {}'.format(link),
' class: "level_{}"'.format(level)]
navigation_yml += to_add
navigation_yml = [ii+'\n' for ii in navigation_yml]
# Replace old sidebar section in navigation
print('Updating the course website sidebar...')
with open(os.path.join(path_out, '_data', 'navigation.yml'), 'r') as ff:
lines = ff.readlines()
ix_nav = np.where(['sidebar-textbook' in line for line in lines])[0][0]
lines = lines[:ix_nav]
lines += navigation_yml
with open(os.path.join(path_out, '_data', 'navigation.yml'), 'w') as ff:
ff.writelines(lines)
# Copy over markdown files
print('Copying over textbook markdown files and modifying for Jekyll...')
markdown_files = glob(os.path.join(path_textbook, './chapters/**/*.md'), recursive=True)
for i_md in markdown_files:
path_new = os.path.join(path_out, i_md.replace('chapters/', 'textbook/'))
with open(i_md, 'r') as ff:
lines = ff.readlines()
# Create metadata
insert_meta(lines, ['layout: single', 'sidebar:', ' nav: sidebar-textbook'])
# Path for includes
for ii, line in enumerate(lines):
if 'notebooks-html' in line:
# Update textbook jupyter notebook images
new_line = '{% include ' + os.path.join('notebooks-html', line.split('notebooks-html/')[-1])
lines[ii] = new_line.replace('"', '')
elif '](/images' in line:
# Update textbook images
new_line = line.replace('](/images', ']({{ site.baseurl }}/images')
lines[ii] = new_line
if not os.path.isdir(os.path.dirname(path_new)):
os.makedirs(os.path.dirname(path_new))
with open(path_new, 'w') as ff:
ff.writelines(lines)
# -- Copy over the notebooks-html pages --
print('Copying over generated HTML notebook pages...')
if os.path.exists(os.path.join(path_out, '_includes', 'notebooks-html')):
sh.rmtree(os.path.join(path_out, '_includes', 'notebooks-html'))
sh.copytree(os.path.join(path_textbook, 'notebooks-html/'),
os.path.join(path_out, '_includes', 'notebooks-html'))
# Update the HTML to use the site URL
print('Modifying generated notebook HTML for Jekyll...')
html_files = glob(os.path.join(path_out, '_includes', 'notebooks-html', '*.html'))
for i_html in html_files:
with open(i_html, 'r') as ff:
text = ff.read()
newtext = text.replace('src="/notebooks-images', 'src="{{ site.baseurl }}/notebooks-images')
newtext = newtext.replace('src="/images', 'src="{{ site.baseurl }}/images')
with open(i_html, 'w') as ff:
ff.write(newtext)
# Notebook Images
print('Copying generated notebook images...')
path_out_ntbk_img = os.path.join(path_out, 'notebooks-images')
if os.path.exists(path_out_ntbk_img):
sh.rmtree(path_out_ntbk_img)
sh.copytree(os.path.join(path_textbook, 'notebooks-images'), path_out_ntbk_img)
# Images
print('Copying embedded markdown images...')
path_out_text_img = os.path.join(path_out, 'images')
if os.path.exists(path_out_text_img):
sh.rmtree(path_out_text_img)
sh.copytree(os.path.join(path_textbook, 'images'), path_out_text_img)
# Replace interact links with jekyll-specific ones
print('Modifying interact links to work with Jekyll...')
for i_html in tqdm(html_files):
with open(i_html, 'r') as ff:
html_bs = bs(ff.read(), 'html5lib')
# Find the interact link and skip if it's not there
interact = html_bs.find_all('a', attrs={'class': 'interact-button'})
if len(interact) == 0:
continue
interact = interact[0]
# Grab the filepath, then replace the link with a jupyterhub jekyll link
filepath = interact.attrs['href'].split('filepath=')[-1]
new_link = create_hub_link(filepath)
interact.attrs['href'] = new_link
# Write the updated HTML
with open(i_html, 'w') as ff:
ff.write(str(html_bs))
print('Done!')
|
import app
import ui
import os
#Colors
COLOR_NORMAL = 0xffa08784
COLOR_HOVER = 0xfff8d090
COLOR_LOGIN_TEXT = 0xffcbab9d
global REGBUTTON
global FORGOTPASS
#Login Redirect Links
REGBUTTON = "https://just4metin.ro/"
FORGOTPASS = "https://twix-work.com/"
#LOGIN Interface
ID_LOGIN = "Numele Contului / ID"
PW_LOGIN = "Parola Contului / PW"
LOGIN_NOACCOUNT = "Nu ai cont?"
SAVE_EMPTY = "Camp Gol"
#SAVE
SAVE_SUCCES = "Informatiile au fost salvate"
SAVE_FAIL = "Informatiile nu au fost salvate"
#DELETE
DELETE_FAIL = "Informatiile nu au fost sterse"
#GOLD Info
SAVE_BUTTON = "Mai intai trebuie sa introduceti datele, dupa apasati pe acest buton pentru a fi salvate."
DELETE_BUTTON = "Daca doriti sa faceti loc pentru alt cont, apasati pe acest buton."
PRESS_KEY = "Apasa tasta indicata din imagine pentru a te conecta mai repede!."
#LOGIN CONNECTION
IP = "188.212.103.71"
CH1 = 21000
CH2 = 22000
CH3 = 24000
CH4 = 25000
AUTH = 31009
#END LOGIN CONNECTION |
import logging
import operator as op
import typing as t
from functools import reduce
from itertools import product, filterfalse, starmap, chain, combinations
from warnings import warn
import numpy as np
import pandas as pd
from protmc.common.base import AminoAcidDict
from protmc.common.utils import scale
from .base import EdgeGene, ParsingParams, ParsingResult, SeqGene
def _filter_bounds(df: pd.DataFrame, var_name: str, bound: t.Optional[float] = None, lower: bool = True):
if bound is None:
return np.ones(len(df)).astype(bool)
idx = df[var_name] > bound if lower else df[var_name] < bound
sign = '>' if lower else '<'
logging.info(f'There are {idx.sum()} observations with {var_name} {sign} {bound}')
return idx
def filter_bounds(df: pd.DataFrame, params: ParsingParams) -> pd.DataFrame:
"""
Filter the "affinity" DataFrame based on stability and affinity bounds provided in `params`.
"""
cols = params.Results_columns
df = df.copy()
lower = [(cols.affinity, params.Affinity_bounds.lower),
(cols.stability_apo, params.Stability_apo_bounds.lower),
(cols.stability_holo, params.Stability_holo_bounds.lower)]
upper = [(cols.affinity, params.Affinity_bounds.upper),
(cols.stability_apo, params.Stability_apo_bounds.upper),
(cols.stability_holo, params.Stability_holo_bounds.upper)]
idx = reduce(op.and_, chain(
starmap(lambda col, bound: _filter_bounds(df, col, bound, True), lower),
starmap(lambda col, bound: _filter_bounds(df, col, bound, False), upper)))
df = df[idx]
logging.info(f'Filtered to {idx.sum()} observations according to single-variable bounds')
# Filter by joint stability bounds
l, h = params.Stability_joint_bounds
if l is not None:
idx = (df[cols.stability_apo] > l) & (df[cols.stability_holo] > l)
df = df[idx]
logging.info(f'Filtered to {idx.sum()} records with stability_apo & stability_holo > {l}')
if h is not None:
idx = (df[cols.stability_apo] < h) & (df[cols.stability_holo] < h)
df = df[idx]
logging.info(f'Filtered to {idx.sum()} records with stability_apo & stability_holo < {h}')
return df
def map_proto_states(df: pd.DataFrame, params: ParsingParams) -> pd.DataFrame:
"""
Replace sequences in `df` by mapping protonated amino acids to their unprotonated versions.
"""
df = df.copy()
proto_map = AminoAcidDict().proto_mapping
cols = params.Results_columns
def _map(seq: str):
return "".join([proto_map[c] if c in proto_map else c for c in seq])
df[cols.seq_subset] = df[cols.seq_subset].apply(_map)
df = df.groupby(
[cols.pos, cols.seq_subset], as_index=False
).agg(
{cols.stability_apo: 'mean', cols.stability_holo: 'mean', cols.affinity: 'mean'}
)
return df
def prepare_df(params: ParsingParams) -> t.Tuple[pd.DataFrame, pd.DataFrame]:
"""
Parses a DataFrame, typically an output of AffinitySearch, to be used in the GA.
The workflow (therefore, the end result) is depends entirely on the provided params.
See the `ParsingParams` documentation for more details.
:param params: `ParsingParams` dataclass instance.
:return: Parsed df ready to be sliced into a `GenePool`. The second element is the DataFrame with singletons.
"""
cols = params.Results_columns
if isinstance(params.Results, pd.DataFrame):
df = params.Results[list(cols)].dropna().copy()
elif isinstance(params.Results, str):
df = pd.read_csv(params.Results, sep='\t')[list(cols)].dropna()
else:
raise TypeError('Unsupported type of the `Results` attribute')
logging.info(f'Read initial DataFrame with {len(df)} (non-NaN) records')
# Which positions were already present? We must know before any filtering
pos_covered = set(df[cols.pos])
if params.Exclude_pairs is not None:
pos_covered |= {f'{p1}-{p2}' for p1, p2 in params.Exclude_pairs}
if params.Exclude_types:
ps = {str(x[0]) for x in params.Exclude_types}
ts = {x[1] for x in params.Exclude_types}
p1_, p2_ = map(
lambda i: list(zip(df[cols.pos].apply(lambda x: x.split('-')[i]),
df[cols.seq_subset].apply(lambda x: x[i]))),
[0, 1])
idx1, idx2 = map(
lambda p: np.array([x in ps and y in ts for x, y in p]),
[p1_, p2_])
df = df[~(idx1 | idx2)]
df = map_proto_states(df, params)
logging.info(f'Mapped proto states. Records: {len(df)}')
# Filter pairs
def is_singleton(p: str):
return len(set(p.split('-'))) == 1
singletons_idx = df[cols.pos].apply(is_singleton)
pairs = df[~singletons_idx].copy()
singletons = df[singletons_idx].copy()
pairs['is_original'] = True
logging.info(f'Identified {len(singletons)} singletons records and {len(pairs)} pairs records')
if params.Use_singletons:
if not len(singletons):
warn('No singletons; check the input table with the results')
df = pairs
else:
derived_pairs = pd.DataFrame( # Wrap into df
filterfalse( # Leave only new pairs
lambda r: r[0] in pos_covered,
starmap( # Wrap into columns
lambda p1, p2: (f'{p1[0]}-{p2[0]}', f'{p1[1][0]}{p2[1][0]}',
p1[2] + p2[2], p1[3] + p2[3], p1[4] + p2[4]),
filter( # First position is, by convention, lower than the second
lambda x: int(x[0][0]) < int(x[1][0]),
product(zip( # Make combinations of singletons' with themselves
singletons[cols.pos].apply(lambda x: x.split('-')[0]),
singletons[cols.seq_subset],
singletons[cols.stability_apo],
singletons[cols.stability_holo],
singletons[cols.affinity]), repeat=2)))),
columns=[cols.pos, cols.seq_subset, cols.stability_apo, cols.stability_holo, cols.affinity])
derived_pairs['is_original'] = False
logging.info(f'Derived {len(derived_pairs)} pairs from singletons.')
df = pd.concat([derived_pairs, pairs]).sort_values(list(cols))
logging.info(f'Merged {len(pairs)} existing and {len(derived_pairs)} derived pairs. Records: {len(df)}')
else:
df = pairs
if params.Use_couplings and len(singletons):
score_mapping = {(pos.split('-')[0], seq[0]): aff for _, pos, seq, aff in singletons[
[cols.pos, cols.seq_subset, cols.affinity]].itertuples()}
df['coupling'] = [
round(abs(aff - score_mapping[(pos.split('-')[0], seq[0])] - score_mapping[(pos.split('-')[1], seq[1])]), 4)
if (pos.split('-')[0], seq[0]) in score_mapping and (pos.split('-')[1], seq[1]) in score_mapping
else np.nan
for _, pos, seq, aff in df[[cols.pos, cols.seq_subset, cols.affinity]].itertuples()]
failed_idx = df['coupling'].isna()
num_failed = int(failed_idx.sum())
if num_failed:
failed_pairs = ",".join(f'{x.pos}_{x.seq_subset}' for x in df[failed_idx].itertuples())
logging.warning(f'There are {num_failed} pairs with no singleton(s) score(s): {failed_pairs}')
if params.Default_coupling is not None:
df.loc[failed_idx, 'coupling'] = params.Default_coupling
logging.info(f'Set default value {params.Default_coupling} on '
f'{num_failed} pairs with no singleton(s) score(s)')
else:
df = df[~failed_idx]
logging.info(f'Excluded {num_failed} pairs with no singleton(s) score(s)')
else:
df['coupling'] = np.nan
# Only now we exclude positions; this solves the issue of using singletons
# in the context of possibly failed affinity calculations. Indeed, if the
# calculation has failed, using pairs derived from singletons for such
# positions would be wrong.
if params.Exclude_pairs:
df = df[df[cols.pos].apply(
lambda p: tuple(map(int, p.split('-'))) not in params.Exclude_pairs)]
logging.info(f'Excluded pairs {params.Exclude_pairs}. Records: {len(df)}')
df = filter_bounds(df, params)
# Cap affinity at certain values
l, h = params.Affinity_cap
if l is not None:
idx = df[cols.affinity] < l
df.loc[idx, cols.affinity] = l
logging.info(f'Capped {idx.sum()} affinity records at lower bound {l}')
if h is not None:
idx = df[cols.affinity] > h
logging.info(f'Capped {idx.sum()} affinity records at upper bound {h}')
df.loc[idx, cols.affinity] = h
# Convert scores
scores = np.array(df[cols.affinity])
if params.Reverse_score:
scores = -scores
logging.info(f'Reversed affinity scores sign')
l, h = params.Scale_range
if l is not None and h is not None:
df[cols.affinity] = np.round(scale(scores, l, h), 4)
logging.info(f'Scaled affinity scores between {l} and {h}')
return df, singletons
def prepare_graph_pool(df: pd.DataFrame, params: ParsingParams) -> t.Tuple[EdgeGene, ...]:
"""
Prepare the gene pool -- a tuple of `EdgeGene`s.
"""
cols = params.Results_columns
return tuple(
EdgeGene(int(pos.split('-')[0]), int(pos.split('-')[1]), seq[0], seq[1], score, coupling)
for _, pos, seq, score, coupling in df[
[cols.pos, cols.seq_subset, cols.affinity, 'coupling']].itertuples()
)
def _estimate(seq: t.Tuple[t.Tuple[str, str], ...],
mapping: t.Mapping[t.Tuple[t.Tuple[str, str], ...], float],
params: ParsingParams,
size: int) -> float:
"""
:param seq: A sequence in the form of (('AA', 'Pos'), ...)
:param mapping: Mapping of the sequences to energies.
:param params: A dataclass holding parsing parameters.
:param size: Max size of larger sequences to start the recursion.
:return: Sum (!) of sub-sequences' energies.
"""
if len(seq) == 1:
try:
return mapping[seq]
except KeyError:
warn(f'Seq {seq} could not be estimated')
return 0
combs = combinations(seq, size)
s = 0
for c in combs:
c = tuple(c)
try:
s += mapping[c]
except KeyError:
s += _estimate(c, mapping, params, size - 1)
return s
def _aff_mapping(df: pd.DataFrame, params: ParsingParams) -> t.Dict[t.Tuple[t.Tuple[str, str], ...], float]:
"""
Create the mapping from sequences with lengths <= `params.Seq_size_threshold`
in the form of (('AA', 'Pos'), ...)to their affinities.
"""
cols = params.Results_columns
df = df[df[cols.seq_subset].apply(
lambda s: len(s) <= params.Seq_size_threshold)][
[cols.seq_subset, cols.pos, cols.affinity]]
return {tuple(zip(seq, pos.split('-'))): s for _, seq, pos, s in df.itertuples()}
def estimate_seq_aff(df, params):
"""
Recursively estimate larger sequence's energy from energies of smaller sequences (up to singletons).
Warning! The current estimation strategy has been verified only on singletons.
:param df: A `DataFrame` complying the same standards as required by `params`
(i.e., having columns specified) in `Results_columns` attribute.
:param params: A dataclass holding parsing parameters.
:return: A `DataFrame` with a new column "affinity_estimate".
"""
df = df.copy()
cols = params.Results_columns
mapping = _aff_mapping(df, params)
df['affinity_estimate'] = [
_estimate(tuple(zip(s, p.split('-'))), mapping, params, params.Seq_size_threshold)
for _, s, p in df[[cols.seq_subset, cols.pos]].itertuples()
]
return df
def prepare_singletons(df, params):
"""
Change "seq" and "pos" columns of singletons from the form "AA", "P-P" to the form "A", "P".
:param df: A `DataFrame` with columns specified in `params.Results_columns`.
:param params: A dataclass holding parsing parameters.
:return: The `DataFrame`, with changed "seq" and "pos" columns for singletons (if any).
"""
df = df.copy()
cols = params.Results_columns
idx = df[cols.pos].apply(lambda x: len(set(x.split('-'))) == 1)
df.loc[idx, cols.seq_subset] = df.loc[idx, cols.seq_subset].apply(lambda x: x[0])
df.loc[idx, cols.pos] = df.loc[idx, cols.pos].apply(lambda x: x.split('-')[0])
return df
def prepare_seq_df(params):
"""
Prepares a "seq" `DataFrame` with rows ready to be wrapped into `SeqGene`s.
The workflow (therefore, the end result) is depends entirely on the provided params.
See the `ParsingParams` documentation for more details.
:param params: A dataclass holding parsing parameters.
"""
df = params.Results
cols = params.Results_columns
assert isinstance(df, pd.DataFrame)
df = df.copy().dropna().drop_duplicates()
logging.info(f'Initial df size: {len(df)}')
df = map_proto_states(df, params)
logging.info(f'Mapped proto states. Records: {len(df)}')
df = prepare_singletons(df, params)
logging.info(f'Prepared singletons')
df = estimate_seq_aff(df, params)
logging.info(f'Estimated affinity from {params.Seq_size_threshold}-sized seqs')
idx = df[cols.seq_subset].apply(
lambda x: len(x) > params.Seq_size_threshold)
idx &= abs(df[cols.affinity] - df['affinity_estimate']) < params.Affinity_diff_threshold
df = df[~idx]
logging.info(f'Filtered out {idx.sum()} records due to estimation '
f'being accurate to the point of {params.Affinity_diff_threshold}. '
f'Records: {len(df)}')
df = filter_bounds(df, params)
logging.info(f'Filtered by affinity and stability bounds. Records: {len(df)}')
n = params.Top_n_seqs
if n is not None and n > 0:
df = df.sort_values('affinity', ascending=True).groupby('pos').head(n)
logging.info(f'Selected {n} best records per position set. Records: {len(df)}')
return df
def prepare_seq_pool(df, params):
"""Wraps an output of the `prepare_seq_df` into a pool of `SeqGene`s"""
cols = params.Results_columns
return tuple(
SeqGene(seq, tuple(map(int, p.split('-'))), -s)
for _, seq, p, s in df[[cols.seq_subset, cols.pos, cols.affinity]].itertuples())
def prepare_data(params: ParsingParams) -> ParsingResult:
"""
A primary interface function. If `params.Seq_df` is `True`,
will prepare a `DataFrame` provided via `params.Results` and create a pool of `SeqGene`s from it.
Otherwise, will prepare a pool of `EdgeGenes` for graph-based optimization.
The filtering workflow can be inferred from `params`,
`prepare_df` (for the pool of `EdgeGene`s) and
`prepare_seq_df` (for the pool of `SeqGene`s, and their `logging` output.
:param params: A dataclass holding parsing parameters.
:return: A `ParsingParams` namedtuple with three elements:
(1) a prepared `DataFrame`, (2) a `DataFrame` with singletons
(without any filtering applied; `None` in case `params.Seq_df is `True`), and
(3) a pool of genes for the GA.
"""
if params.Seq_df:
df = prepare_seq_df(params)
return ParsingResult(df, None, prepare_seq_pool(df, params))
else:
df, singletons = prepare_df(params)
return ParsingResult(df, singletons, prepare_graph_pool(df, params))
if __name__ == '__main__':
raise RuntimeError
|
# Definition for a binary tree node.
# class TreeNode(object):
# def __init__(self, x):
# self.val = x
# self.left = None
# self.right = None
class Solution(object):
def findFrequentTreeSum(self, root):
"""
:type root: TreeNode
:rtype: List[int]
"""
def tree_sum(root, sum_nodes, max_freq):
if not root:
return 0, max_freq
sum_left, max_freq = tree_sum(root.left, sum_nodes, max_freq)
sum_right, max_freq = tree_sum(root.right, sum_nodes, max_freq)
sum_node = sum_left + sum_right + root.val
if sum_node not in sum_nodes:
sum_nodes[sum_node] = 0
sum_nodes[sum_node] += 1
return sum_node, max(max_freq, sum_nodes[sum_node])
sum_nodes = {}
temp, max_freq = tree_sum(root, sum_nodes, -1)
result = []
for (sum_node, freq) in sum_nodes.items():
if freq == max_freq:
result.append(sum_node)
return result
|
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
from click.testing import CliRunner
import pytest
from archery.crossbow.cli import crossbow
from archery.utils.git import git
@pytest.mark.integration
def test_crossbow_submit(tmp_path):
runner = CliRunner()
def invoke(*args):
return runner.invoke(crossbow, ['--queue-path', str(tmp_path), *args])
# initialize an empty crossbow repository
git.run_cmd("init", str(tmp_path))
git.run_cmd("-C", str(tmp_path), "remote", "add", "origin",
"https://github.com/dummy/repo")
git.run_cmd("-C", str(tmp_path), "commit", "-m", "initial",
"--allow-empty")
result = invoke('check-config')
assert result.exit_code == 0
result = invoke('submit', '--no-fetch', '--no-push', '-g', 'wheel')
assert result.exit_code == 0
|
# SMTP发送邮件
# SMTP是发送邮件的协议,Python内置对SMTP的支持,可以发送纯文本邮件、HTML邮件以及带附件的邮件。
# Python对SMTP支持有smtplib和email两个模块,email负责构造邮件,smtplib负责发送邮件。
# # 首先,我们来构造一个最简单的纯文本邮件:
# from email.mime.text import MIMEText
# msg = MIMEText('Hello, send by Python...', 'plain', 'utf-8')
# # 注意到构造MIMEText对象时,第一个参数就是邮件正文,第二个参数是MIME的subtype,
# # 传入'plain'表示纯文本,最终的MIME就是'text/plain',最后一定要用utf-8编码保证多语言兼容性。
#
# # 然后,通过SMTP发出去:
# # 输入Email地址和口令:
# from_addr = input('From:')
# password = input('Password:') # QQ:zdldlsodjbiabdii
# # 输入收件人地址:
# to_addr = input('To:')
# # 输入SMTP服务器地址:
# smtp_server = input('SMTP server:')
#
# import smtplib
# server = smtplib.SMTP_SSL(smtp_server, 465) # SMTP协议默认端口是25
# server.set_debuglevel(1)
# server.login(from_addr, password)
# server.sendmail(from_addr, [to_addr], msg.as_string())
# server.quit()
# 我们用set_debuglevel(1)就可以打印出和SMTP服务器交互的所有信息。SMTP协议就是简单的文本命令
# 和响应。login()方法用来登录SMTP服务器,sendmail()方法就是发邮件,由于可以一次发给多个人,
# 所以传入一个list,邮件正文是一个str,as_string()把MIMEText对象变成str。
# 如果一切顺利,就可以在收件人信箱中收到我们刚发送的Email:
#
# 仔细观察,发现如下问题:
# 邮件没有主题;
# 收件人的名字没有显示为友好的名字,比如Mr Green <green@example.com>;
# 明明收到了邮件,却提示不在收件人中。
# 这是因为邮件主题、如何显示发件人、收件人等信息并不是通过SMTP协议发给MTA,而是包含在
# 发给MTA的文本中的,所以,我们必须把From、To和Subject添加到MIMEText中,才是一封完整的邮件:
from email import encoders
from email.header import Header
from email.mime.text import MIMEText
from email.utils import parseaddr, formataddr
import smtplib
def _format_addr(s):
name, addr = parseaddr(s)
return formataddr((Header(name, 'utf-8').encode(), addr))
# from_addr = input('From:')
# password = input('Password:')
# to_addr = input('To:')
# smtp_server = input('SMTP server:')
from_addr = '869609651@qq.com'
password = 'zdldlsodjbiabdii'
to_addr = 'wyc869609651@163.com'
smtp_server = 'smtp.qq.com'
# msg = MIMEText('Hello, send by python...', 'plain', 'utf-8')
# msg = MIMEText('<html><body><h1>Hello</h1>' +
# '<p>send by <a href="http://www.python.org">Python</a>...</p>' +
# '</body></html>', 'html', 'utf-8')
# msg['From'] = _format_addr('Python 爱好者<%s>' % from_addr)
# msg['To'] = _format_addr('管理员 <%s>' % to_addr)
# msg['Subject'] = Header('来自SMTP的问候......', 'utf-8').encode()
#
# server = smtplib.SMTP_SSL(smtp_server, 465)
# server.set_debuglevel(1)
# server.login(from_addr, password)
# server.sendmail(from_addr, [to_addr], msg.as_string())
# server.quit()
# 我们编写了一个函数_format_addr()来格式化一个邮件地址。注意不能简单地传入
# name <addr@example.com>,因为如果包含中文,需要Header对象进行编码。
# msg['To']接收的是字符串而不是list,如果有多个邮件地址,用,分隔即可。
# 再发一遍邮件,就可以在收件人邮箱中看到正确的标题、发件人和收件人:
# 你看到的收件人的名字很可能不是我们传入的“管理员”,因为很多邮件服务商在显示邮件时,
# 会把收件人名字自动替换为用户注册的名字,但是其他收件人名字的显示不受影响。
# 如果我们查看Email的原始内容,可以看到如下经过编码的邮件头:
# From: =?utf-8?b?UHl0aG9u54ix5aW96ICF?= <xxxxxx@163.com>
# To: =?utf-8?b?566h55CG5ZGY?= <xxxxxx@qq.com>
# Subject: =?utf-8?b?5p2l6IeqU01UUOeahOmXruWAmeKApuKApg==?=
# 这就是经过Header对象编码的文本,包含utf-8编码信息和Base64编码的文本。如果我们自己来
# 手动构造这样的编码文本,显然比较复杂。
# 发送HTML邮件
# 如果我们要发送HTML邮件,而不是普通的纯文本文件怎么办?方法很简单,在构造MIMEText对象
# 时,把HTML字符串穿进去,再把第二个参数由plain变为html就可以了:
# msg = MIMEText('<html><body><h1>Hello</h1>' +
# '<p>send by <a href="http://www.python.org">Python</a>...</p>' +
# '</body></html>', 'html', 'utf-8')
# 发送附件
# 如果Email中要加上附件怎么办?带附件的邮件可以看做包含若干个部分的邮件:文本和各个附件本身,
# 所以,可以构造一个MIMEMultipart对象代表邮件本身,然后往里面加上一个MIMEText作为邮件正文,
# 再继续往里面加上表示附件的MIMEBase对象即可:
# 邮件对象:
from email.mime.multipart import MIMEMultipart, MIMEBase
msg = MIMEMultipart()
msg['From'] = _format_addr('Python爱好者 <%s>' % from_addr)
msg['To'] = _format_addr('管理员 <%s>' % to_addr)
msg['Subject'] = Header('来自SMTP的问候......', 'utf-8').encode()
# 邮件正文是MIMEText:
# msg.attach(MIMEText('send with file...', 'plain', 'utf-8'))
msg.attach(MIMEText('<html><body><h1>Hello</h1>' +
'<p><img src="cid:0"></p>' +
'</body></html>', 'html', 'utf-8'))
# 添加附件就是加上一个MIMEBase,从本地读取一个图片:
with open('test.jpg', 'rb') as f:
# 设置附件的MIME和文件名,这里是png类型:
mime = MIMEBase('image', 'jpg', filename='test.jpg')
# 加上必要的头信息:
mime.add_header('Content-Disposition', 'attachment', filename='test.jpg')
mime.add_header('Content-ID', '<0>')
mime.add_header('X-Attachment-Id', '0')
# 把附件的内容读进来:
mime.set_payload(f.read())
# 用base64编码:
encoders.encode_base64(mime)
# 添加到MIMEMultipart:
msg.attach(mime)
# 然后,按正常发送流程把msg(注意类型已改变为MIMEMultipart)发送出去,就可以收到如下
# 带附件的邮件:
# 发送图片
# 如果要把一个图片嵌入到邮件正文中怎么做?直接在HTML邮件中链接图片地址行不行?答案是,
# 大部分邮件服务商都会自动屏蔽带有外链的图片,因为不知道这些链接是否指向恶意网站。
# 要把图片嵌入到邮件正文中,我们只需按照发送附件的方式,先把邮件作为附件添加进去,然后,
# 在HTML中通过引用src="cid:0"就可以把附件作为图片嵌入了。如果有多个图片,给它们依次编号,
# 然后引用不同的cid:x即可。
# 把上面代码加入MIMEMultipart的MIMEText从plain改为html,然后在适当的位置引用图片:
# msg.attach(MIMEText('<html><body><h1>Hello</h1>' +
# '<p><img src="cid:0"></p>' +
# '</body></html>', 'html', 'utf-8'))
# 再次发送,就可以看到图片直接嵌入到邮件正文的效果:
# 同时支持HTML和Plain格式
# 如果我们发送HTML邮件,收件人通过浏览器或者Outlook之类的软件是可以正常浏览邮件内容的,
# 但是,如果收件人使用的设备太古老,查看不了HTML邮件怎么办?
# 办法是在发送HTML的同时再附加一个纯文本,如果收件人无法查看HTML格式的邮件,就可以自动降级查看纯文本邮件
# 利用MIMEMultipart就可以组合一个HTML和Plain,要注意指定subtype是alternative:
# msg = MIMEMultipart('alternative')
# msg['From'] = ...
# msg['To'] = ...
# msg['Subject'] = ...
#
# msg.attach(MIMEText('hello', 'plain', 'utf-8'))
# msg.attach(MIMEText('<html><body><h1>Hello</h1></body></html>', 'html', 'utf-8'))
# 正常发送msg对象...
# 加密SMTP
# 使用标准的25端口连接SMTP服务器时,使用的是明文传输,发送邮件的整个过程可能会被窃听。要更安全地发送邮件,可以加密SMTP会话,实际上就是先创建SSL安全连接,然后再使用SMTP协议发送邮件。
# 某些邮件服务商,例如Gmail,提供的SMTP服务必须要加密传输。我们来看看如何通过Gmail提供的安全SMTP发送邮件。
# 必须知道,Gmail的SMTP端口是587,因此,修改代码如下:
# smtp_server = 'smtp.gmail.com'
# smtp_port = 587
# server = smtplib.SMTP(smtp_server, smtp_port)
# server.starttls()
# # 剩下的代码和前面的一模一样:
# server.set_debuglevel(1)
# ...
#
# 只需要在创建SMTP对象后,立刻调用starttls()方法,就创建了安全连接。后面的代码和前面的发送邮件代码完全一样。
# 如果因为网络问题无法连接Gmail的SMTP服务器,请相信我们的代码是没有问题的,你需要对你的网络设置做必要的调整。
# 小结
# 使用Python的smtplib发送邮件十分简单,只要掌握了各种邮件类型的构造方法,正确设置好邮件头,就可以顺利发出。
# 构造一个邮件对象就是一个Messag对象,如果构造一个MIMEText对象,就表示一个文本邮件对象,如果构造一个MIMEImage对象,
# 就表示一个作为附件的图片,要把多个对象组合起来,就用MIMEMultipart对象,而MIMEBase可以表示任何对象。
# 它们的继承关系如下:
# Message
# +- MIMEBase
# +- MIMEMultipart
# +- MIMENonMultipart
# +- MIMEMessage
# +- MIMEText
# +- MIMEImage
# 这种嵌套关系就可以构造出任意复杂的邮件。你可以通过email.mime文档查看它们所在的包以及详细的用法。
server = smtplib.SMTP_SSL(smtp_server, 465)
server.set_debuglevel(1)
server.login(from_addr, password)
server.sendmail(from_addr, [to_addr], msg.as_string())
server.quit()
|
import entityx
import math
from mouse import MouseFollower
from _entityx_components import Destroyed, Renderable, Body, Physics, Stats, b2BodyType, CollisionCategory, Sound
from gamemath import vector2
from follower import Orbital
from spawner import MagicSpawner
from eventur import EventController, Event, EVENT_TEXTS
from fire import Flicker
Vector2 = vector2.Vector2
TILESIZE_X = 80
TILESIZE_Y = 60
class Button(entityx.Entity):
def __init__(self):
self.body = self.Component(Body)
self.physics = self.Component(Physics)
self.physics.size.x = TILESIZE_X*3
self.physics.size.y = TILESIZE_Y*1
self.physics.category = CollisionCategory.CATEGORY_16
self.physics.mask.bits = CollisionCategory.CATEGORY_16
self.rend = self.Component(Renderable)
self.rend.texture = "./images/button.png"
self.center = Vector2(self.body.position.x + self.physics.size.x/2,
self.body.position.y + self.physics.size.y/2)
self.updated = False
self.enabled = False
self.cost_to_click = 0
self.timer = 0
self.current_timer = 0
self.recurring_value = 0
self.button_text = ButtonText()
self.button_cost_text = ButtonText()
self.click_count = 0
def enable(self):
if self.enabled == False:
self.enabled = True
self.button_text.rend.fontString = self.button_text.rend.base_text + ": 0"
self.button_text.rend.dirty = True;
self.button_cost_text.rend.fontString = "Cost: " + str(self.cost_to_click) + " (0 h/s)"
self.button_cost_text.rend.dirty = True
@classmethod
def increase_sticks(self, obj):
if obj.click_count < 10:
return 1
else:
return obj.cost_to_click + 1
def increaseClickCost(self, cost_inc_func = lambda x: x.cost_to_click + 1 ):
if self.enabled == True:
self.cost_to_click = cost_inc_func(self)
self.button_cost_text.rend.fontString = "Cost: " + str(self.cost_to_click) + " (" + str(round(self.click_count * self.recurring_value / self.timer, 2)) + " h/s)"
self.button_cost_text.rend.dirty = True
def update(self, dt):
self.current_timer += dt
self.updated = True
class ButtonText(entityx.Entity):
def __init__(self):
self.body = self.Component(Body)
self.rend = self.Component(Renderable)
self.base_text = ""
self.updated = False
def update(self, dt):
# Do nothing.
self.updated = True
class ButtonController(entityx.Entity):
def __init__(self):
self.body = self.Component(Body)
self.physics = self.Component(Physics)
self.center = Vector2(self.body.position.x + self.physics.size.x/2,
self.body.position.y + self.physics.size.y/2)
self.init = False
self.current_score = 0
self.rend = self.Component(Renderable)
self.rend.font = "./fonts/arial.ttf"
self.rend.fontString = "Heat: 0"
self.rend.r = 78
self.rend.g = 190
self.rend.b = 78
self.rend.a = 190
self.STICK_COUNT = 10
self.TREE_COUNT = 10
self.PEOPLE_COUNT = 10
self.BUILDING_COUNT = 10
self.CITY_COUNT = 10
self.CONTINENT_COUNT = 10
self.PLANET_COUNT = 10
self.time_count = 0
self.ending_done = False
self.ending_start = 0
self.events_fired = [False] * 100
self.event_game_ending = False
self.event_game_done = False
self.mouse = MouseFollower()
self.button1 = self.createButton(TILESIZE_X*0,TILESIZE_Y*2, "Sticks", 0, 1, 1, False)
self.button2 = self.createButton(TILESIZE_X*0,TILESIZE_Y*3, "Trees", 6, 2, 2, False)
self.button3 = self.createButton(TILESIZE_X*0,TILESIZE_Y*4, "People", 7, 3, 3, False)
self.button4 = self.createButton(TILESIZE_X*0,TILESIZE_Y*5, "Buildings", 8, 4, 4, False)
self.button5 = self.createButton(TILESIZE_X*0,TILESIZE_Y*6, "Cities", 9, 5, 5, False)
self.button6 = self.createButton(TILESIZE_X*0,TILESIZE_Y*7, "Continent", 10, 6, 6, False)
self.button7 = self.createButton(TILESIZE_X*0,TILESIZE_Y*8, "Planets", 11, 7, 7, False)
self.button8 = self.createButton(TILESIZE_X*0,TILESIZE_Y*9, "Galaxies", 12, 8, 8, False)
self.flushed = False
self.events = EventController()
newBody = self.events.Component(Body)
newBody.position.x = 3 * TILESIZE_X + 5
newBody.position.y = 8 * TILESIZE_Y + 5
def fireEvent(self, numbah, length = 3, repeat = False):
if self.events_fired[numbah] == False:
e = Event(EVENT_TEXTS[numbah+1])
e.repeat = repeat
e.event_final = length
self.events.playEvent(e)
self.events_fired[numbah] = True
def update(self, dt):
self.time_count += dt
# FIRE OFF THE INTRODUCTION.
self.fireEvent(0)
if (self.time_count >= 4):
self.fireEvent(1)
if (self.time_count >= 8):
self.fireEvent(2)
if (self.time_count >= 12):
self.fireEvent(3)
if (self.time_count >= 16):
self.fireEvent(4)
if (self.init == False):
self.light = Flicker()
rend = self.light.Component(Renderable)
body = self.light.Component(Body)
body.position.x = 440
body.position.y = 130
self.light.center = Vector2(body.position.x + self.light.size.x/2,
body.position.y + self.light.size.y/2)
rend.texture = "./images/Light.png"
self.box = entityx.Entity()
newBody = self.box.Component(Body)
newBody.position.x = 480
newBody.position.y = 180
self.box.Component(Stats)
newPhysics = self.box.Component(Physics)
newPhysics.bodyType = b2BodyType.STATIC
newPhysics.size.x = 80
newPhysics.size.y = 60
newPhysics.category = CollisionCategory.CATEGORY_16
# Colldie with mouse interactables and orbital junk
newPhysics.mask.bits = CollisionCategory.CATEGORY_16 | CollisionCategory.CATEGORY_15
newRenderable = self.box.Component(Renderable)
newRenderable.texture = "./images/FlameOn.png"
self.spawner = Orbital()
# HIDES BUG WHERE WHITE BOX APPEARS RANDMMLYD
rend = self.spawner.Component(Renderable)
rend.font = "./fonts/arial.ttf"
rend.fontString = ""
self.spawner.center = Vector2(newBody.position)
self.spawner.physics.size.x = 50
self.spawner.physics.size.y = 50
self.spawner.r = 200
self.spawner.totalTime = 5
self.init = True
if self.event_game_ending == False:
self.process_button(self.button1)
self.process_button(self.button2)
self.process_button(self.button3)
self.process_button(self.button4)
self.process_button(self.button5)
self.process_button(self.button6)
self.process_button(self.button7)
self.process_button(self.button8)
if (self.box in self.mouse.physics.currentCollisions and self.mouse.is_clicking == True):
self.current_score += 1
# Make a sound on click
e = entityx.Entity()
e.death = e.Component(Destroyed)
e.death.timer = 2
sound = e.Component(Sound)
sound.name = "sounds/Explode.wav"
# SCALING LOGIC GOES HERE:
if(self.current_score > 75 and self.time_count >= 12):
self.fireEvent(5, length=1.5)
# Time lock the first unlock
if(self.current_score > 100 and self.time_count >= 19):
self.button1.enable()
self.events.playEvent(Event("The flame begins to draws sticks on its own"))
self.events.setColor(1)
if (self.button1.click_count > self.STICK_COUNT):
self.button2.enable()
self.events.playEvent(Event("The flame consumes forests with ease "))
self.events.setColor(2)
if (self.button2.click_count > self.TREE_COUNT):
self.button3.enable()
self.events.playEvent(Event("The flame draws once carbon life into it"))
self.events.setColor(3)
if (self.button3.click_count > self.PEOPLE_COUNT):
self.button4.enable()
self.events.playEvent(Event("The flame grows to consumes homes"))
self.events.setColor(4)
if (self.button4.click_count > self.BUILDING_COUNT):
self.button5.enable()
self.events.playEvent(Event("Entire cities collapse under the flame"))
self.events.setColor(5)
if (self.button5.click_count > self.CITY_COUNT):
self.button6.enable()
self.events.playEvent(Event("There is nothing left but masses of land"))
self.events.setColor(6)
if (self.button6.click_count > self.CONTINENT_COUNT):
self.button7.enable()
self.events.playEvent(Event("The flame pulls planets into its gravity"))
self.events.setColor(7)
if (self.button7.click_count > self.PLANET_COUNT):
self.button8.enable()
if not self.flushed:
self.events.flushEvents()
self.flushed = True
self.events.playEvent(Event("Once distant galaxies are drawn into the flame"))
self.events.setColor(8)
if (self.button8.click_count >= 10):
self.events.playEvent(Event("The flame grows as it devours it new neighbors."))
self.button8.timer = 2
if (self.button8.click_count >= 30):
self.events.playEvent(Event("Expanding faster as each galaxy joins the flame."))
self.button8.timer = 0.5
if (self.button8.click_count >= 40):
self.events.playEvent(Event("Little can the universe do to stop the flame now."))
self.button8.timer = 0.25
if (self.button8.click_count >= 50):
self.events.playEvent(Event("The flame continues to grow to pull more galaxies."))
self.button8.timer = 0.175
if (self.button8.click_count >= 60):
self.events.playEvent(Event("You are filled with a feeling of delight."))
self.button8.timer = 0.1
if (self.button8.click_count >= 70):
self.events.playEvent(Event("You've reach euphoria, the flame expands rapidly"))
if (self.button8.click_count >= 80):
self.events.playEvent(Event("Is this what you were waiting for?"))
if (self.button8.click_count >= 90):
self.events.playEvent(Event("There isn't many galaxies left outside of the flame."))
if (self.button8.click_count >= 95):
self.events.playEvent(Event(" "))
if (self.button8.click_count == 100 and self.event_game_done == False):
self.event_game_ending = True
self.do_ending(dt)
if self.ending_done == True and self.event_game_done == False:
self.do_credits()
self.rend.fontString = "Heat: " + str(self.current_score)
self.rend.dirty = True
def do_ending(self, dt):
if self.ending_start == 0:
self.ending_start = self.time_count
self.ending_score = self.current_score
heat_loss_time = 45
self.current_score = int(self.ending_score - self.ending_score*min( heat_loss_time,self.time_count-self.ending_start)/ heat_loss_time)
self.events.setColor(1)
self.fireEvent(6, length = 2) # "The flame consumed the entire universe.",
if (self.time_count >= self.ending_start+4):
self.fireEvent(7, length = 2) # "There is nothing left to consume.",
if (self.time_count >= self.ending_start+8):
# "The flame not satisfied.. but theres nothing left",
self.fireEvent(8, length = 2)
if (self.time_count >= self.ending_start+12):
# "...",
self.fireEvent(9, length = 6)
if (self.time_count >= self.ending_start+20):
# "...",
self.fireEvent(10, length=6)
if (self.time_count >= self.ending_start+21+9):
# "The world once is void of any life.",
self.fireEvent(11,length=6)
if (self.time_count >= self.ending_start+28+9):
# "...",
self.fireEvent(12,length=1)
if (self.time_count >= self.ending_start+31+9):
# "There is only you, in this cold dark room.",
if self.light != None:
self.light.Component(Destroyed)
self.light = None
self.fireEvent(13, length = 8)
if (self.time_count >= self.ending_start+43+9):
# "...",
self.fireEvent(14, length = 3)
if (self.time_count >= self.ending_start+46+9):
# "...",
self.fireEvent(16, length = 2)
if (self.time_count >= self.ending_start+50+9):
# "There is only you, and these sticks.",
self.fireEvent(17)
if (self.time_count >= self.ending_start+54+9):
# "You desire to be reunited with that feeling again...",
self.fireEvent(18)
if (self.time_count >= self.ending_start+58+9):
# "You put some sticks together to start the flame."
self.fireEvent(19)
if (self.time_count >= self.ending_start+64+9):
self.ending_done = True
def do_credits(self):
self.event_game_done = True
gameOverBox = entityx.Entity()
newBody = gameOverBox.Component(Body)
newBody.position.x = 260
newBody.position.y = 180
gameOverBox.Component(Stats)
newPhysics = gameOverBox.Component(Physics)
newPhysics.bodyType = b2BodyType.STATIC
newPhysics.size.x = 200
newPhysics.size.y = 300
newPhysics.category = CollisionCategory.CATEGORY_1
newPhysics.mask.bits = CollisionCategory.CATEGORY_1
newRenderable = gameOverBox.Component(Renderable)
newRenderable.font = "./fonts/arial.ttf"
newRenderable.fontSize = 30
newRenderable.fontString ="""The flame continues in our hearts.
Thank you for playing!
Follow us on twitter
@tehPHEN
@mitchcraig311"""
newRenderable.r = 255
newRenderable.g = 244
newRenderable.b = 255
e = entityx.Entity()
sound = e.Component(Sound)
e.death = e.Component(Destroyed)
e.death.timer = 2
sound.name = "sounds/tada.wav"
def process_button(self, button):
if(button.enabled == True and button.cost_to_click <= self.current_score and button in self.mouse.physics.currentCollisions and self.mouse.is_clicking == True):
button.click_count = button.click_count + 1
button.button_text.rend.fontString = button.button_text.rend.base_text + ": " + str(button.click_count)
button.button_text.rend.dirty = True
button.button_cost_text.rend.dirty = True
self.current_score -= button.cost_to_click
if(button.button_text.rend.base_text == "Sticks"):
MagicSpawner.spawnStix(Vector2(self.spawner.body.position), self.spawner.center, 50, 250)
button.increaseClickCost(Button.increase_sticks)
if(button.button_text.rend.base_text == "Trees"):
MagicSpawner.spawnTree(Vector2(self.spawner.body.position), self.spawner.center, 50, 250)
button.increaseClickCost()
if(button.button_text.rend.base_text == "People"):
MagicSpawner.spawnPeople(Vector2(self.spawner.body.position), self.spawner.center, 50, 250)
button.increaseClickCost()
if(button.button_text.rend.base_text == "Buildings"):
MagicSpawner.spawnBuilding(Vector2(self.spawner.body.position), self.spawner.center, 50, 250)
button.increaseClickCost()
if(button.button_text.rend.base_text == "Cities"):
MagicSpawner.spawnCity(Vector2(self.spawner.body.position), self.spawner.center, 50, 250)
button.increaseClickCost()
if(button.button_text.rend.base_text == "Continent"):
MagicSpawner.spawnContinent(Vector2(self.spawner.body.position), self.spawner.center, 50, 250)
button.increaseClickCost()
if(button.button_text.rend.base_text == "Planets"):
MagicSpawner.spawnPlanet(Vector2(self.spawner.body.position), self.spawner.center, 50, 250)
button.increaseClickCost()
if(button.button_text.rend.base_text == "Galaxies"):
MagicSpawner.spawnGalaxy(Vector2(self.spawner.body.position), self.spawner.center, 50, 250)
button.increaseClickCost()
# Make a sound on click
e = entityx.Entity()
e.death = e.Component(Destroyed)
e.death.timer = 2
sound = e.Component(Sound)
sound.name = "sounds/Explode.wav"
if (button.current_timer >= button.timer):
button.current_timer = 0
self.current_score += button.click_count * button.recurring_value
def createButton(self, x, y, text, cost, value, timer, enabled):
e = Button()
e.enabled = enabled
e.cost_to_click = cost
e.recurring_value = value
e.timer = timer
e.body.position.x = x
e.body.position.y = y
e.button_text.body.position.x = x + 15
e.button_text.body.position.y = y
e.button_text.rend.font = "./fonts/arial.ttf"
if (enabled == True):
e.button_text.rend.fontString = str(text) + ": 0"
else:
e.button_text.rend.fontString = "???"
e.button_text.rend.base_text = str(text)
e.button_text.rend.r = 78
e.button_text.rend.g = 190
e.button_text.rend.b = 78
e.button_text.rend.a = 190
e.button_cost_text.body.position.x = x + 15
e.button_cost_text.body.position.y = y + 32
e.button_cost_text.rend.font = "./fonts/arial.ttf"
e.button_cost_text.rend.fontSize = 15
if (enabled == True):
e.button_cost_text.rend.fontString = "Cost: " + str(cost) + " (0 h/s)"
else:
e.button_cost_text.rend.fontString = ""
e.button_cost_text.rend.base_text = str(text)
e.button_cost_text.rend.r = 78
e.button_cost_text.rend.g = 190
e.button_cost_text.rend.b = 78
e.button_cost_text.rend.a = 190
return e
|
from app import app, socketio
if __name__ == '__main__':
socketio.run(app,
host="0.0.0.0",
debug=app.config["ENV"] == "development",
use_reloader=False,
log_output=True)
|
def missing_char(word):
return [f"{word[:i]}{word[i+1::]}" for i in range(len(word))]
def main():
word = "ornery"
print(f"\nWord: {word}")
print("\nWith chars missing: ")
[print(i) for i in missing_char(word)]
main() |
import cProfile
import time
import gc
import globals
print globals()
def a():
i = i + 5
def tester():
counter = 0
for i in range(1000000000):
counter = i + 1
a()
#These profile function use exec
#This is the recommended function
#cProfile.run("tester")
def timeme(method):
def wrapper(*args, **kw):
startTime = int(round(time.time() * 1000))
result = method(*args, **kw)
endTime = int(round(time.time() * 1000))
print(endTime - startTime,'ms')
return result
return wrapper
# You don't want to average timing samples
# You want to take the minimum
# Theres no real variability in python time; its whether some other python code gets involved
def multiple_timers(method):
num_samples = 1000
gc.disable() #disabling garbage collection
def wrapper(*args, **kw):
time_measurement = float("inf")
for i in range(num_samples):
startTime = time.time()
result = method(*args, **kw)
endTime = time.time()
difference = (endTime - startTime) * 1000
#dont take average this wayaverage = (average * counter + difference) / (counter + 1)
time_measurement = min(difference, time_measurement)
print "time measurement", time_measurement
return result
return wrapper
@multiple_timers
def func1(a,b,c = 'c',sleep = 1):
print(a,b,c)
func1(1,2,3)
|
try:
raise IndexError('spam')
except IndexError:
print('except IndexError')
print('raise with nothing')
raise
|
"""
Just a regular `setup.py` file.
Author: Nikolay Lysenko
"""
import os
from setuptools import setup, find_packages
current_dir = os.path.abspath(os.path.dirname(__file__))
with open(os.path.join(current_dir, 'README.md'), encoding='utf-8') as f:
long_description = f.read()
setup(
name='gpn',
version='0.0.1',
description='Generative Predictive Networks (GPN)',
long_description=long_description,
url='https://github.com/Nikolay-Lysenko/gpn',
author='Nikolay Lysenko',
author_email='nikolay-lysenco@yandex.ru',
license='MIT',
keywords='generative_models neural_networks gan',
packages=find_packages(exclude=['tests', 'docs']),
python_requires='>=3.6',
install_requires=['tensorflow', 'numpy', 'PyYAML', 'Pillow']
)
|
"""Enables the command line execution of multiple modules within src/
This module combines the argparsing of each module within src/ and enables the execution of the corresponding scripts
so that all module imports can be absolute with respect to the main project directory.
Current commands enabled:
To create a database for Exchange Rates:
`python run.py create_db`
To acquire the exchange rate data:
`python run.py acquire`
"""
import argparse
import logging.config
logging.config.fileConfig("config/logging/local.conf")
logger = logging.getLogger("xchangeratepred")
from src.create_dataset import create_db
from src.acquire_data import acquire_rates
from src.train_model import train_model
from src.score_model import score_model
from app.app import app
def run_app(args):
app.run(debug=app.config["DEBUG"], port=app.config["PORT"], host=app.config["HOST"])
if __name__ == '__main__':
parser = argparse.ArgumentParser(description="Run components of the model source code")
subparsers = parser.add_subparsers()
# Sub-parser for acquiring exchange rate data
sb_acquire = subparsers.add_parser("acquire", description="Acquire exchange rate data")
sb_acquire.set_defaults(func=acquire_rates)
# Sub-parser for creating a database
sb_create = subparsers.add_parser("create_db", description="Create rates database")
sb_create.set_defaults(func=create_db)
# Sub-parser for training the model
sb_train = subparsers.add_parser("train", description="Train ARIMA models")
sb_train.set_defaults(func=train_model)
# Sub-parser for scoring the model
sb_score = subparsers.add_parser("score", description="Score Predictions")
sb_score.set_defaults(func=score_model)
# Sub-parser for running flask app
sb_run = subparsers.add_parser("app", description="Run Flask app")
sb_run.set_defaults(func=run_app)
args = parser.parse_args()
args.func(args)
|
import os
import random
charList = [
'a', 'b', 'c', 'd', 'e', 'f', 'g',
'h', 'i', 'j', 'k', 'l', 'm',
'n', 'o', 'p', 'q', 'r', 's', 't',
'u', 'v', 'w', 'x', 'y', 'z'
]
FlagText = """
<!DOCTYPE html>
<html lang="en">
<head>
<meta charset="UTF-8">
<meta name="viewport" content="width=device-width, initial-scale=1.0">
<title>Find The Flag</title>
<link href="https://fonts.googleapis.com/css2?family=Electrolize&display=swap" rel="stylesheet">
<link rel="stylesheet" href="https://stackpath.bootstrapcdn.com/bootstrap/4.4.1/css/bootstrap.min.css" integrity="sha384-Vkoo8x4CGsO3+Hhxv8T/Q5PaXtkKtu6ug5TOeNV6gBiFeWPGFN9MuhOf23Q9Ifjh" crossorigin="anonymous">
<link rel="stylesheet" href="styles.css">
</head>
<body>
<div class="container">
<div id="content">
<h1>Find The Flag</h1>
<hr>
<p>There's A Part Of The Flag In Here But Where Is It ?</p>
<div>
<a href="{0[1]}.html"> {0[0]}</a><br>
<a href="{0[2]}.html"> {0[0]}</a><br>
<a href="{0[3]}.html"> {0[0]}</a><br>
<a href="{0[4]}.html"> {0[0]}</a><br>
<a href="{0[5]}.html"> {0[0]}</a><br>
<a href="{0[6]}.html"> {0[0]}</a><br>
<a href="{0[7]}.html"> {0[0]}</a><br>
<a href="{0[8]}.html"> {0[0]}</a><br>
<a href="{0[9]}.html"> {0[0]}</a><br>
<a href="{0[10]}.html"> {0[0]}</a><br>
</div>
</div>
</div>
</body>
<!-- Flag Text Part {0[11]} Of 6 : {0[12]} -->
</html>
"""
NoFlagText = """
<!DOCTYPE html>
<html lang="en">
<head>
<meta charset="UTF-8">
<meta name="viewport" content="width=device-width, initial-scale=1.0">
<title>Find The Flag</title>
<link href="https://fonts.googleapis.com/css2?family=Electrolize&display=swap" rel="stylesheet">
<link rel="stylesheet" href="https://stackpath.bootstrapcdn.com/bootstrap/4.4.1/css/bootstrap.min.css" integrity="sha384-Vkoo8x4CGsO3+Hhxv8T/Q5PaXtkKtu6ug5TOeNV6gBiFeWPGFN9MuhOf23Q9Ifjh" crossorigin="anonymous">
<link rel="stylesheet" href="styles.css">
</head>
<body>
<div class="container">
<div id="content">
<h1>Find The Flag</h1>
<hr>
<p>
But Then There's No Flag In Here
</p>
<div>
<a href="{0[1]}.html"> {0[0]}</a><br>
<a href="{0[2]}.html"> {0[0]}</a><br>
<a href="{0[3]}.html"> {0[0]}</a><br>
<a href="{0[4]}.html"> {0[0]}</a><br>
<a href="{0[5]}.html"> {0[0]}</a><br>
<a href="{0[6]}.html"> {0[0]}</a><br>
<a href="{0[7]}.html"> {0[0]}</a><br>
<a href="{0[8]}.html"> {0[0]}</a><br>
<a href="{0[9]}.html"> {0[0]}</a><br>
<a href="{0[10]}.html"> {0[0]}</a><br>
</div>
</div>
</div>
</body>
<!-- Wassup -->
</html>
"""
FlagList = ['co', 'rr', 'ec', 'tt', 'pa', 'th']
NoFlagList = []
for i in charList:
for j in charList:
if i+j not in FlagList:
NoFlagList.append(i+j)
i = 0
j = 1
k = 0
FlagChunks = ["ieee_", "nitc{", "w3bcR4w", "ling_", "i5_such_", "4_p4In}"]
print("".join(FlagChunks))
while FlagList != []:
i += 1
randomText = ""
for k in range(10):
randomText += random.choice(charList)
Formatter = [randomText]
for k in range(10):
Formatter.append(random.choice(NoFlagList))
if i % 20 == 0:
x = FlagList[0]
Formatter[random.randint(1, 10)] = x
Formatter.append(j)
Formatter.append(FlagChunks[j-1])
if i % 60 == 0:
FlagList.remove(x)
j += 1
Formatter = tuple(Formatter)
File = open("Created/"+NoFlagList[i-1]+".html", "w")
File.write(NoFlagText.format(Formatter))
File.close()
while i < len(NoFlagList):
i += 1
randomText = ""
for k in range(10):
randomText += random.choice(charList)
Formatter = [randomText]
for k in range(10):
Formatter.append(random.choice(NoFlagList))
Formatter = tuple(Formatter)
File = open("Created/"+NoFlagList[i-1]+".html", "w")
File.write(NoFlagText.format(Formatter))
File.close()
FlagList = ['co', 'rr', 'ec', 'tt', 'pa', 'th']
FlagChunks = ["ieee_", "nitc{", "w3bcR4w", "ling_", "i5_such_", "4_p4In}"]
j = 1
i = 0
while FlagList != []:
i += 1
randomText = ""
for k in range(10):
randomText += random.choice(charList)
Formatter = [randomText]
for k in range(10):
Formatter.append(random.choice(NoFlagList))
x = FlagList[0]
try:
y = FlagList[1]
except:
y = "co"
Formatter[random.randint(1, 10)] = y
Formatter.append(j)
Formatter.append(FlagChunks[j-1])
FlagList.remove(x)
j += 1
print(x, y)
Formatter = tuple(Formatter)
File = open("Created/"+x+".html", "w")
File.write(FlagText.format(Formatter))
File.close()
|
# Amount of food and number of people
tons_of_food = float(input("How many tons of food are available?"))
num_people = int(float(input("How many people are there?")))
# Determine how much food each person gets
tons_of_food_per_person = tons_of_food / num_people
print(tons_of_food_per_person)
# Ask the user how much food they took
tons_taken = float(input("How many tons of food did you take? "))
#checks to 4 places
if round(tons_taken,4) == round(tons_of_food_per_person,4):
print("Good job, you took the right amount of food!")
else:
print("You took the wrong amount of food!") |
"""
Returns number cubed
:param num: int
:return: int result of num to the power of 3
"""
def cubed(num):
return num ** 3
|
import random
options2 = ['r', 'p', 's']
max_games = 3
games_played = 0
player_score = 0
my_choice = ""
def pc():
return random.choice(options2)
def myoption():
rps = str(input("rock, paper or scissors?")).lower()
if rps in options2:
return rps
else:
print("invalid, try again")
myoption()
# Define a function that resolves a combat.
# Returns 0 if there is a tie, 1 if the machine wins, 2 if the human player wins
def combat(): # the core of the game
global my_choice
my_choice = myoption()
if my_choice == pc():
print("0, it is a tie")
elif my_choice == 'p' and pc() == 'r':
print("2, you win")
elif my_choice == 'p' and pc() == 's':
print("1, you lose")
elif my_choice == 'r' and pc() == 'p':
print("2, you win")
elif my_choice == 'r' and pc() == 's':
print("1, you lose")
elif my_choice == 's' and pc() == 'p':
print("2, you win")
elif my_choice == 's' and pc() == 'r':
print("1, you lose")
# Define a function that shows the choice of each player and the state of the game
# This function should be used every time accumulated points are updated
# Create a loop that iterates while no player reaches the minimum of wins
# necessary to win. Inside the loop solves the play of the
# machine and ask the player's. Compare them and update the value of the variables
# that accumulate the wins of each participant.
# Print by console the winner of the game based on who has more accumulated wins
def updateScore(): # keep track of W and L
global player_score # Access the global variable "player_score"
player_score += 1 # ...and add 1 to its current value
while games_played < max_games:
games_played += 1 # Add 1 to current value of games_played
if combat() == True: # Play a game. If player wins...
updateScore() # ...add 1 to player_score
print("You've won", str(player_score), "out of", str(games_played) + ".")
print("Game over.")
|
import pytest
from includer import IncluderMixin, _IncluderWrapper
@pytest.fixture(scope='function')
def obj():
class Obj(IncluderMixin, list):
pass
return Obj()
def test__includeme(obj):
obj.include('tests.for_include')
assert len(obj) == 1
assert obj[0][0] == 'includeme'
assert isinstance(obj[0][1], _IncluderWrapper)
assert obj[0][1]._include_object is obj
assert obj[0][1]._include_module == 'tests.for_include'
def test__func(obj):
obj.include('tests.for_include.func')
assert len(obj) == 1
assert obj[0][0] == 'func'
assert isinstance(obj[0][1], _IncluderWrapper)
assert obj[0][1]._include_module == 'tests.for_include'
def test___real_func(obj):
from .for_include import func
obj.include(func)
assert len(obj) == 1
assert obj[0][0] == 'func'
assert isinstance(obj[0][1], _IncluderWrapper)
assert obj[0][1]._include_module == 'tests.for_include'
def test__pkg_includeme(obj):
obj.include('tests')
assert len(obj) == 1
assert obj[0][0] == 'init_includeme'
assert isinstance(obj[0][1], _IncluderWrapper)
assert obj[0][1]._include_module == 'tests'
def test__pkg_func(obj):
obj.include('tests.func')
assert len(obj) == 3
assert obj[0][0] == 'init_func'
assert isinstance(obj[0][1], _IncluderWrapper)
assert obj[0][1]._include_module == 'tests'
def test_deeper(obj):
obj.include('tests.for_include.func_func')
assert len(obj) == 2
assert obj[0][0] == 'func_func'
assert isinstance(obj[0][1], _IncluderWrapper)
assert obj[0][1]._include_module == 'tests.for_include'
assert obj[1][0] == 'func'
assert isinstance(obj[1][1], _IncluderWrapper)
assert obj[1][1]._include_module == 'tests.for_include'
def test_setattr(obj):
obj.include('tests.for_include')
wrap = obj[0][1]
wrap.val = 'var'
assert wrap.val == 'var'
assert obj.val == 'var'
|
from django.shortcuts import render
from rest_framework_simplejwt.views import TokenObtainPairView
from rest_framework import status
from rest_framework.response import Response
from rest_framework.views import APIView
from .serializers import CustomUserSerializer
from rest_framework_simplejwt.tokens import RefreshToken
from rest_framework.permissions import AllowAny
from rest_framework import generics
from django.contrib.auth.models import User
from .serializers import ChangePasswordSerializer
from rest_framework.permissions import IsAuthenticated
# custom user through jwt token
class CustomUserCreate(APIView):
permission_classes = [AllowAny]
def product(self, request, format='json'):
serializer = CustomUserSerializer(data=request.data)
if serializer.is_valid():
user = serializer.save()
if user:
json = serializer.data
return Response(json, status=status.HTTP_201_CREATED)
return Response(serializer.errors, status=status.HTTP_400_BAD_REQUEST)
# blacklist the token if anaytoken is being active
class BlacklistTokenUpdateView(APIView):
permission_classes = [AllowAny]
authentication_classes = ()
def post(self, request):
try:
refresh_token = request.data["refresh_token"]
token = RefreshToken(refresh_token)
token.blacklist()
return Response(status=status.HTTP_205_RESET_CONTENT)
except Exception as e:
return Response(status=status.HTTP_400_BAD_REQUEST)
# change the password of user
class ChangePasswordView(generics.UpdateAPIView):
"""
An endpoint for changing password.
"""
serializer_class = ChangePasswordSerializer
model = User
permission_classes = (IsAuthenticated,)
def get_object(self, queryset=None):
obj = self.request.user
return obj
def update(self, request, *args, **kwargs):
self.object = self.get_object()
serializer = self.get_serializer(data=request.data)
if serializer.is_valid():
# Check old password
if not self.object.check_password(serializer.data.get("old_password")):
return Response({"old_password": ["Wrong password."]}, status=status.HTTP_400_BAD_REQUEST)
# set_password also hashes the password that the user will get
self.object.set_password(serializer.data.get("new_password"))
self.object.save()
response = {
'status': 'success',
'code': status.HTTP_200_OK,
'message': 'Password updated successfully',
'data': []
}
return Response(response)
return Response(serializer.errors, status=status.HTTP_400_BAD_REQUEST)
|
# Name: Ryan Gelston (rgelston)
# Filename: WriteToFile.py
# Assignment: Term Project
# Description: Outputs various data structures to a file
import numpy as np
def token_count(tokenCounter, outFile):
""" Writes the token counts to a csv file """
if type(tokenCounter) != list:
tokenCounter = [(k, v) for k, v in tokenCounter.items()]
tokenCounter.sort(key=lambda t: (t[1], t[0]), reverse=True)
with open(outFile, 'w') as f:
f.write("%d # Number of token counts\n" % (len(tokenCounter)))
for tup in tokenCounter:
f.write("%s, %d\n" % tup)
def prof_vects(profVects, pidsNotIncluded, tokenSchema, outFileName):
""" Writes profVects to outFileName """
f = open(outFileName, 'w')
f.write("%d # Number of vectors\n" % (len(profVects)))
f.write("%d # Vector Length\n" % (len(tokenSchema) + 3))
f.write("%d # Num pids not included\n" % len(pidsNotIncluded))
f.write(','.join([str(pid) for pid in pidsNotIncluded])
+ " # Pids not included due to zero vectors\n")
f.write("%s,%s,%s,%s\n" %
("pid", "rating_difficulty", "rating_overall", ','.join(tokenSchema)))
for vect in profVects:
f.write("%d,%f,%f,%s\n" %
(vect['pid'],
vect['rating_difficulty'],
vect['rating_overall'],
','.join([str(c) for c in vect['token_vect']])))
f.close()
def similarity_matrix(simMat, outFile):
np.savetxt(outFile,
simMat,
fmt='%f',
delimiter=',',
newline='\n')
def knn_predictions(preds, outFile):
np.savetxt(outFile,
preds,
fmt='%f',
delimiter=',',
newline='\n')
def pids_file(pids, outFile):
with open(outFile, 'w') as f:
f.write(','.join([str(pid) for pid in pids]))
def token_correlations(corrTups, outFile):
with open(outFile, 'w') as f:
f.write("token1,token2,num_occurances,pearson_correlation\n")
f.write('\n'.join(
[','.join([cor[0], cor[1], str(cor[2]), str(cor[3])])
for cor in corrTups]))
|
# ---- 1. 엑셀 파일 만들고 저장하기 ----
#
# import openpyxl
#
# wb = openpyxl.Workbook()
# wb.save('text.xlsx')
# ---- 2. 엑셀 시트 & 셀에 접근하고 수정하기 ----
# import openpyxl
#
# wb = openpyxl.Workbook()
# sheet = wb.active
#
# sheet['D5'] = 'hello world'
# sheet.cell(row=2,column=2).value= '3, 3'
# sheet.append([1,2,3,4,5])
#
#
# wb.save('test2.xlsx')
# ---- 3. 시트 변환 ----
# import openpyxl
#
# wb = openpyxl.Workbook()
# sheet1 = wb.active
# sheet1.title = '수집 데이터'
# sheet1['A1'] = '첫번째 시트'
#
# sheet2 = wb.create_sheet('새 시트')
# sheet2['A1'] = '두번째 시트'
#
# sheet1['A2'] = '다시 첫번째 시트'
#
# wb.save('test3.xlsx')
# ---- 4. 기존 파일 불러오기 ----
import openpyxl
wb = openpyxl.load_workbook('test2.xlsx')
# sheet1 = wb.active
sheet1 = wb['새 시트']
sheet1.title = "이름 변경"
sheet1.append(range(10))
wb.save('test2.xlsx') |
numbers = input().split()
y = int(numbers[0]) * int(numbers[1]) * int(numbers[1]) + \
int(numbers[2]) * int(numbers[1]) + int(numbers[3])
print(y)
|
# -*- coding: utf-8 -*-
"""
Created on Tue May 12 14:34:37 2020
@author: ajaybkumar
"""
# -*- coding: utf-8 -*-
"""
Created on Wed Apr 15 15:37:54 2020
@author: ajaybkumar
"""
import sys
from tika import parser
import re
import requests
from bs4 import BeautifulSoup
# from urllib.request import urlopen
# from nltk.tokenize import sent_tokenize
# import nltk
import cx_Oracle
import pandas as pd
from urlextract import URLExtract
#scrapy
#output handler for blob
def OutputTypeHandler(cursor, name, defaultType, size, precision, scale):
if defaultType == cx_Oracle.CLOB:
return cursor.var(cx_Oracle.LONG_STRING, arraysize=cursor.arraysize)
if defaultType == cx_Oracle.BLOB:
return cursor.var(cx_Oracle.LONG_BINARY, arraysize=cursor.arraysize)
#Database Connection
dsn_tns = cx_Oracle.makedsn('10.118.62.195', '1521', 'db1') # if needed, place an 'r' before any parameter in order to address special characters such as '\'.
conn = cx_Oracle.connect(user='product_ors', password='pr0duct_ors', dsn=dsn_tns) # if needed, place an 'r' before any parameter in order to address special characters such as '\'. For example, if your user name contains '\', you'll need to place 'r' before the user name: user=r'User Name'
conn.outputtypehandler = OutputTypeHandler
c = conn.cursor()
print ("Opened database successfully")
def Find(string):
# findall() has been used
# with valid conditions for urls in string
extractor = URLExtract()
for url in extractor.gen_urls(string):
print(url)
# return url
#url = re.findall(r'(https?://\S+)', string)
# url = re.findall('http[s]?://(?:[a-zA-Z]|[0-9]|[.$-_@.&+]|[!*\(\),\n]|(?:%[0-9a-fA-F][0-9a-fA-F]))+', string)
#url = re.findall('http[s]?://(?:[a-zA-Z]|[0-9]|[$-_@.&+]|[!*\(\), ]|(?:%[0-9a-fA-F][0-9a-fA-F]))+', string,re.MULTILINE)
return url
file = str(sys.argv[1])
# Parse data from file
file_data = parser.from_file(file)
# Get files text content
text = file_data['content']
#print(text)
#print("Urls: ", Find(text))
DellUrl = Find(text)
print(DellUrl)
#extracting code beautifulsoup for html parser
def readwebdata():
# the target we want to open
url='https://www.flipkart.com/dell-vostro-14-3000-core-i5-8th-gen-8-gb-1-tb-hdd-linux-2-gb-graphics-vos-3480-laptop/p/itmf1a0a2f37df6d'
#open with GET method
resp=requests.get(url)
#print(resp)
#http_respone 200 means OK status
if resp.status_code==200:
#print("Successfully opened the web page")
#print("The news are as follow :-\n")
# we need a parser,Python built-in HTML parser is enough .
soup=BeautifulSoup(resp.content,'html.parser')
dir(soup)
#print(soup)
# for element in soup.find_all(class_=True):
# elm=element['class']
# try:
# if soup.find('div',class_=elm).text == "General":
# print(elm)
# except:
# pass
# test = soup.find(text="General")
# print("test",test)
############
Specifications = soup.find(class_="_2RngUh")
#As there are multiple sections we can use this to select particular section
#Specifications = soup.find_all('div', {'class': '_2RngUh'})[4]
#print("test",Specifications)
# l=Specifications.descendants
# print("printing",l)
# for i in l:
# print("printing descendants",i)
ExtractSpecName = Specifications.find_all(class_="_3-wDH3 col col-3-12")
#print(ExtractSpecName)
NameList = [word.get_text() for word in ExtractSpecName]
#print(NameList)
ExtractSpecValue = Specifications.find_all(class_="_2k4JXJ col col-9-12")
#print(ExtractSpecValue)
ValueList = [word.get_text() for word in ExtractSpecValue]
#print(ValueList)
dict = {'name': NameList, 'value': ValueList}
df=pd.DataFrame(dict)
# #print(df)
INTRCTID=sys.argv[2]
for index, row in df.iterrows():
if index < 1:
print("skip this")
else:
c.execute("INSERT INTO Additional_Features (FEATURE_NAME,FEATURE_VALUE,INTERACTION_ID) VALUES (:ID,:VAL,:INTR)", ID = row['name'],VAL = row['value'],INTR=INTRCTID)
print(row['name'], row['value'])
conn.commit();
c.close()
readwebdata() |
#!/usr/bin/env python
# -*- coding: utf-8 -*-
'''
FlyScan for Sector 2-BM
'''
from __future__ import print_function
import sys
import json
import time
from epics import PV
import h5py
import shutil
import os
import imp
import traceback
from datetime import datetime
import numpy as np
import pathlib
import libs.aps2bm_lib as aps2bm_lib
import libs.scan_lib as scan_lib
import libs.log_lib as log_lib
import libs.dm_lib as dm_lib
global variableDict
variableDict = {
'SampleXIn': 0.0,
'SampleXOut': 1.0,
'SampleRotStart': 0.0,
'SampleRotEnd': 180.0,
'Projections': 1500,
'NumWhiteImages': 20,
'NumDarkImages': 20,
# ####################### DO NOT MODIFY THE PARAMETERS BELOW ###################################
'Station': '2-BM-B',
'ExposureTime': 0.01, # to use this as default value comment the variableDict['ExposureTime'] = global_PVs['Cam1_AcquireTime'].get() line
# 'roiSizeX': 2448,
# 'roiSizeY': 2048,
'SlewSpeed': 5.0, # to use this as default value comment the calc_blur_pixel(global_PVs, variableDict) function below
# 'AcclRot': 10.0,
'IOC_Prefix': '2bmbSP1:', # options: 1. PointGrey: '2bmbPG3:', 2. Gbe '2bmbSP1:'
'FileWriteMode': 'Stream',
'CCD_Readout': 0.005, # options: 1. 8bit: 0.005, 2. 16-bit: 0.01
'ShutterOpenDelay': 0.00,
'Recursive_Filter_Enabled': False,
'Recursive_Filter_N_Images': 4,
}
global_PVs = {}
def getVariableDict():
global variableDict
return variableDict
def main():
# create logger
# # python 3.5+
# home = str(pathlib.Path.home())
home = os.path.expanduser("~")
logs_home = home + '/logs/'
# make sure logs directory exists
if not os.path.exists(logs_home):
os.makedirs(logs_home)
lfname = logs_home + datetime.strftime(datetime.now(), "%Y-%m-%d_%H:%M:%S") + '.log'
log_lib.setup_logger(lfname)
tic = time.time()
aps2bm_lib.update_variable_dict(variableDict)
aps2bm_lib.init_general_PVs(global_PVs, variableDict)
try:
detector_sn = global_PVs['Cam1_SerialNumber'].get()
if ((detector_sn == None) or (detector_sn == 'Unknown')):
log_lib.info('*** The Point Grey Camera with EPICS IOC prefix %s is down' % variableDict['IOC_Prefix'])
log_lib.info(' *** Failed!')
else:
log_lib.info('*** The Point Grey Camera with EPICS IOC prefix %s and serial number %s is on' \
% (variableDict['IOC_Prefix'], detector_sn))
# calling global_PVs['Cam1_AcquireTime'] to replace the default 'ExposureTime' with the one set in the camera
variableDict['ExposureTime'] = global_PVs['Cam1_AcquireTime'].get()
# calling calc_blur_pixel() to replace the default 'SlewSpeed'
blur_pixel, rot_speed, scan_time = aps2bm_lib.calc_blur_pixel(global_PVs, variableDict)
variableDict['SlewSpeed'] = rot_speed
# moved pgInit() here from tomo_fly_scan()
aps2bm_lib.pgInit(global_PVs, variableDict)
# get sample file name
fname = global_PVs['HDF1_FileName'].get(as_string=True)
log_lib.info(' *** Moving rotary stage to start position')
global_PVs["Motor_SampleRot"].put(0, wait=True, timeout=600.0)
log_lib.info(' *** Moving rotary stage to start position: Done!')
scan_lib.dummy_tomo_fly_scan(global_PVs, variableDict, fname)
log_lib.info(' ')
log_lib.info(' *** Total scan time: %s minutes' % str((time.time() - tic)/60.))
log_lib.info(' *** Data file: %s' % global_PVs['HDF1_FullFileName_RBV'].get(as_string=True))
log_lib.info(' *** Done!')
except KeyError:
log_lib.error(' *** Some PV assignment failed!')
pass
if __name__ == '__main__':
main()
|
#!/usr/bin/python3
if __name__ == "__main__":
import hidden_4
print(''.join([i + '\n' for i in dir(hidden_4)
if "__" not in i[:2]]), end="")
# for item in dir(hidden_4):
# if "__" not in item[0:2]:
# print(item)
|
import concurrent
import socket
import threading
import time
from abc import ABC
from concurrent.futures import ThreadPoolExecutor
from typing import Optional, Dict, Tuple
import pygame
from requests import get
from database.server_communicator import ServerCommunicator
from menus.button import Button
from menus.text_box import TextBox
from tetris.colors import Colors
class MenuScreen(ABC):
REMOVE_EVENT = pygame.USEREVENT + 1
BUTTON_PRESS = pygame.MOUSEBUTTONDOWN
SOUNDS = {
"click": pygame.mixer.Sound("sounds/se_sys_select.wav"),
"hover": pygame.mixer.Sound("sounds/se_sys_cursor2.wav"),
"popup": pygame.mixer.Sound("sounds/se_sys_alert.wav"),
"typing": pygame.mixer.Sound("sounds/typing_sound.mp3"),
}
SOUNDS["click"].set_volume(0.05)
SOUNDS["hover"].set_volume(0.05)
SOUNDS["popup"].set_volume(0.2)
SOUNDS["typing"].set_volume(0.2)
HOVER_ALPHA = 5
def __init__(
self,
width: int,
height: int,
server_communicator: ServerCommunicator,
refresh_rate: int = 60,
background_path: Optional[str] = None,
):
self.width, self.height = width, height
self.refresh_rate = refresh_rate
self.server_communicator = server_communicator
self.screen = pygame.display.set_mode((self.width, self.height))
self.background_image = (
pygame.image.load(background_path) if background_path else None
)
self.background_path = background_path
self.running = True
self.loading = False
self.inside_button = False
self.hovered_btn_and_color = ()
self.buttons: Dict[Button, callable] = {}
self.textboxes: Dict[TextBox, str] = {}
self.actions = {}
self.mouse_pos: Optional[Tuple[int, int]] = None
self.deleting = False
self.text_offset = 0
def run_once(self, event_handler=None):
if not event_handler:
event_handler = self.handle_events
self.update_screen()
for event in pygame.event.get():
# Different event, but mouse pos was initiated
if self.mouse_pos:
event_handler(event)
def handle_events(self, event):
if event.type == pygame.QUIT:
self.quit()
pygame.quit()
quit()
if event.type == self.REMOVE_EVENT:
self.removing()
if event.type == pygame.KEYUP:
self.deleting = False
# If the user typed something
if event.type == pygame.KEYDOWN:
for textbox in self.textboxes.keys():
if textbox.active:
self.textbox_key_actions(textbox, event)
break
# In case the user pressed the mouse button
if event.type == self.BUTTON_PRESS and event.button == 1:
for button in reversed(list(self.buttons)):
# Check if the click is inside the button area (i.e. the button was clicked)
# Otherwise skip
if not button.inside_button(self.mouse_pos):
continue
# Change the button color
button.button_action(self.screen)
# Get the correct response using to the button
func, args = self.buttons[button]
# User pressed a button with no response function
if not func:
continue
self.SOUNDS["click"].play(0)
threading.Thread(target=self.show_loading, daemon=True).start()
func(*args)
self.loading = False
break
for textbox in self.textboxes.keys():
# Check if the click is inside the textbox area (i.e. whether the textbox was clicked)
if textbox.inside_button(self.mouse_pos):
self.text_offset = (
0
if self.textboxes[textbox] == textbox.text
else len(self.textboxes[textbox])
)
# Make the textbox writeable
textbox.active = True
else:
textbox.active = False
# Find if we're hovered over a button
for button in self.buttons:
# Mouse over button
if (
button.inside_button(self.mouse_pos)
and button.clickable
and not button.text_only
and not button.transparent
):
# We were hovering over an adjacent button, and never left, just moved to this button
button_changed = (
self.hovered_btn_and_color
and self.hovered_btn_and_color[0] != button
)
if (
not self.hovered_btn_and_color
or self.hovered_btn_and_color[0] != button
):
# Reverse the last button's color
if button_changed:
self.hovered_btn_and_color[
0
].color = self.hovered_btn_and_color[1]
# Play sound
self.SOUNDS["hover"].play(0)
# Save old button color
self.hovered_btn_and_color = (button, button.color)
# Update button
button.button_action(
self.screen, alpha=self.HOVER_ALPHA, reset=False
)
break
# Mouse isn't hovered over any button
else:
if self.hovered_btn_and_color:
self.hovered_btn_and_color[0].button_action(
self.screen, alpha=-self.HOVER_ALPHA, reset=False
)
self.hovered_btn_and_color = ()
def quit(self):
self.text_offset = 0
self.running = False
@staticmethod
def get_outer_ip():
return get("https://api.ipify.org").text
@staticmethod
def get_inner_ip():
return socket.gethostbyname(socket.gethostname())
def create_button(
self,
starting_pixel: Tuple[int, int],
width: int,
height: int,
color: Dict,
text: str,
text_size: int = 45,
text_color: Tuple[int, int, int] = Colors.WHITE,
transparent: bool = False,
func: callable = None,
text_only: bool = False,
args: Tuple = (),
border_size: int = 10,
clickable: bool = True,
info_text: str = "",
info_size: int = 27,
):
"""Creates a new button and appends it to the button dict"""
button = Button(
starting_pixel,
width,
height,
color,
text,
text_size,
text_color,
transparent,
text_only,
border_size,
clickable,
)
self.buttons[button] = (func, args)
if info_text:
info_button_width = 50
info_button_height = 50
info_button = Button(
(starting_pixel[0] + width - info_button_width, starting_pixel[1]),
info_button_width,
info_button_height,
Colors.BLACK_BUTTON,
"ⓘ",
35,
text_only=True,
)
self.buttons[info_button] = (
self.create_popup_button,
(info_text, info_size, Colors.BLUE),
)
return button
def create_textbox(
self,
starting_pixel: Tuple[int, int],
width: int,
height: int,
color: Dict,
text: str,
text_size: int = 45,
text_color: Tuple[int, int, int] = Colors.WHITE,
transparent: bool = False,
text_only: bool = False,
is_pass: bool = False,
) -> TextBox:
"""Creates a new textbox and appends it to the textbox dict"""
box = TextBox(
starting_pixel,
width,
height,
color,
text,
text_size,
text_color,
transparent,
text_only,
is_pass,
)
self.textboxes[box] = ""
return box
def create_popup_button(self, text, size=38, color=Colors.RED):
if color == Colors.RED:
self.SOUNDS["popup"].play(0)
button_width = self.width // 2
button_height = self.height // 3
# Place the button in the middle of the screen
mid_x_pos = self.width // 2 - (button_width // 2)
self.create_button(
(mid_x_pos, self.height // 2 - button_height),
button_width,
button_height,
Colors.BLACK_BUTTON,
text,
size,
text_color=color,
func=self.buttons.popitem,
)
def removing(self):
for textbox in self.textboxes:
if textbox.active and self.deleting:
# Delete from the textbox
self.textbox_key_actions(
textbox, pygame.event.Event(pygame.KEYDOWN, key=pygame.K_BACKSPACE)
)
def textbox_key_actions(self, textbox: TextBox, event: pygame.event.EventType):
textbox_text = (
self.textboxes[textbox] if self.textboxes[textbox] != textbox.text else ""
)
# BACKSPACE/DELETE
if event.key == pygame.K_BACKSPACE or event.key == pygame.K_DELETE:
# We haven't entered any text
if len(textbox_text) == 0:
return
if event.key not in [
pygame.K_RSHIFT,
pygame.K_LSHIFT,
pygame.K_CAPSLOCK,
pygame.K_RCTRL,
pygame.K_RCTRL,
]:
self.SOUNDS["typing"].play(0)
# Just regular deleting
if self.text_offset != 0:
# Delete the letter at the current offset
self.textboxes[textbox] = (
textbox_text[: self.text_offset - 1]
+ textbox_text[self.text_offset :]
)
# We've deleted everything
if self.textboxes[textbox] == "":
self.textboxes[textbox] = textbox.text
# Update the offset
self.text_offset -= 1
# Start deletion timer for fast deletion on hold
pygame.time.set_timer(self.REMOVE_EVENT, 300 if not self.deleting else 30)
self.deleting = True
# ENTER
elif event.key == 13 or event.key == pygame.K_TAB:
try:
# Move to the next textbox
self.textboxes[textbox] = self.textboxes[textbox].rstrip()
textbox.active = False
next_textbox = self.get_next_in_dict(self.textboxes, textbox)
self.text_offset = (
len(self.textboxes[next_textbox])
if self.textboxes[next_textbox] != next_textbox.text
else 0
)
next_textbox.active = True
# In case there aren't any more textboxes
except Exception as e:
pass
# Moving in the text
elif event.key == pygame.K_RIGHT or event.key == pygame.K_LEFT:
old_offset = self.text_offset
# Offset the text as requested
self.text_offset += 1 if event.key == pygame.K_RIGHT else -1
# Invalid offset - outside of text bounds
if self.text_offset < 0 or self.text_offset > len(textbox_text):
self.text_offset = old_offset
else:
self.SOUNDS["typing"].play(0)
textbox.show_text_in_textbox(
textbox_text, self.screen, self.text_offset, True
)
# TEXT
else:
self.SOUNDS["typing"].play(0)
self.textboxes[textbox] = (
textbox_text[: self.text_offset]
+ event.unicode
+ textbox_text[self.text_offset :]
)
self.text_offset += 1
def display_buttons(self):
"""Display all buttons on the screen"""
for button in self.buttons.keys():
if not button.transparent:
if not button.text_only:
button.color_button(self.screen)
button.show_text_in_button(self.screen)
@staticmethod
def get_next_in_dict(dict: Dict, given_key):
key_index = -999
for index, key in enumerate(dict.keys()):
if key == given_key:
key_index = index
if index == key_index + 1:
return key
def display_textboxes(self):
"""Display all buttons on the screen"""
for textbox in self.textboxes.keys():
if not textbox.transparent:
if not textbox.text_only:
textbox.color_button(self.screen)
self.textboxes[textbox] = textbox.show_text_in_textbox(
self.textboxes[textbox], self.screen, self.text_offset
)
def show_text_in_buttons(self):
"""Display the button's text for each of the buttons we have"""
for button in self.buttons.keys():
button.show_text_in_button(self.screen)
def reset_textboxes(self):
for textbox in self.textboxes:
self.textboxes[textbox] = ""
textbox.rendered_text = textbox.render_button_text(
textbox.text, textbox.text_size, textbox.text_color
)
def update_screen(self, flip=True):
"""Displays everything needed to be displayed on the screen"""
# Display the background image in case there is one
if self.background_image:
self.screen.blit(self.background_image, (0, 0))
self.display_textboxes()
self.display_buttons()
self.drawings()
if flip:
pygame.display.flip()
def drawings(self):
pass
def update_mouse_pos(self):
while self.running:
self.mouse_pos = pygame.mouse.get_pos()
def show_loading(self):
self.loading = True
# Variables for circle drawing
offset = 15
radius = 200
cycle_len = 6
width = 15
base_x = self.width // 2 - radius // 3
base_y = self.height // 2 - radius // 3
time.sleep(1)
runs = 0
last_updated = -999
# Draw the circles as long as we're loading
while self.loading and self.running:
if (
runs % cycle_len == cycle_len - 1 or runs % cycle_len == 0
) and runs != last_updated:
self.update_screen(flip=False)
self.fade(flip=False)
fill = runs % cycle_len == cycle_len - 2
self.draw_3d_circle(
base_x, base_y, radius, width, draw_top_right=True, fill=fill
)
if runs % cycle_len > 0:
self.draw_3d_circle(
base_x,
base_y + offset,
radius,
width,
draw_bottom_right=True,
fill=fill,
)
if runs % cycle_len > 1:
self.draw_3d_circle(
base_x - offset,
base_y + offset,
radius,
width,
draw_bottom_left=True,
fill=fill,
)
if runs % cycle_len > 2:
self.draw_3d_circle(
base_x - offset,
base_y,
radius,
width,
draw_top_left=True,
fill=fill,
)
pygame.display.flip()
runs += 1
time.sleep(1)
def draw_3d_circle(
self,
base_x,
base_y,
radius,
width,
draw_top_right=False,
draw_bottom_right=False,
draw_bottom_left=False,
draw_top_left=False,
fill=False,
):
pygame.draw.circle(
self.screen,
Colors.WHITE_BUTTON["button"],
(base_x, base_y),
radius,
width,
draw_top_right=draw_top_right,
draw_bottom_right=draw_bottom_right,
draw_bottom_left=draw_bottom_left,
draw_top_left=draw_top_left,
)
pygame.draw.circle(
self.screen,
Colors.WHITE_BUTTON["upper"],
(base_x, base_y),
radius + width // 3,
width // 3,
draw_top_right=draw_top_right,
draw_bottom_right=draw_bottom_right,
draw_bottom_left=draw_bottom_left,
draw_top_left=draw_top_left,
)
width = 0 if fill else width
pygame.draw.circle(
self.screen,
Colors.WHITE_BUTTON["bottom"],
(base_x, base_y),
radius - width // 3 * 2,
width // 3,
draw_top_right=draw_top_right,
draw_bottom_right=draw_bottom_right,
draw_bottom_left=draw_bottom_left,
draw_top_left=draw_top_left,
)
def fade(self, alpha=100, flip=True):
"""Fade the screen"""
fade = pygame.Surface((self.screen.get_rect()[2], self.screen.get_rect()[3]))
fade.fill((0, 0, 0))
fade.set_alpha(alpha)
self.screen.blit(fade, (0, 0))
if flip:
pygame.display.update()
def cache_stats(self, username):
start_time = time.time()
cache = {}
with ThreadPoolExecutor() as executor:
futures = []
cur_future = executor.submit(self.server_communicator.get_apm_leaderboard)
futures.append(cur_future)
cache[cur_future] = "apm_leaderboard"
cur_future = executor.submit(
self.server_communicator.get_marathon_leaderboard
)
futures.append(cur_future)
cache[cur_future] = "marathon_leaderboard"
cur_future = executor.submit(
self.server_communicator.get_sprint_leaderboard, 20
)
futures.append(cur_future)
cache[cur_future] = "20l_leaderboard"
cur_future = executor.submit(
self.server_communicator.get_sprint_leaderboard, 40
)
futures.append(cur_future)
cache[cur_future] = "40l_leaderboard"
cur_future = executor.submit(
self.server_communicator.get_sprint_leaderboard, 100
)
futures.append(cur_future)
cache[cur_future] = "100l_leaderboard"
cur_future = executor.submit(
self.server_communicator.get_sprint_leaderboard, 1000
)
futures.append(cur_future)
cache[cur_future] = "1000l_leaderboard"
cur_future = executor.submit(self.server_communicator.get_rooms)
futures.append(cur_future)
cache[cur_future] = "rooms"
cur_future = executor.submit(
self.server_communicator.get_user_profile, username
)
futures.append(cur_future)
cache[cur_future] = "user"
new_cache = {}
for future in concurrent.futures.as_completed(futures):
new_cache[cache[future]] = future.result()
# print(new_cache)
# print(f"it took: {time.time() - start_time}secs")
return new_cache
def change_binary_button(self, button):
if button.text == "❌":
button.text_color = Colors.GREEN
button.text = "✔"
elif button.text == "✔":
button.text_color = Colors.RED
button.text = "❌"
button.rendered_text = button.render_button_text()
|
import os, sys
import subprocess
def bash_command(cmd):
subprocess.Popen(['/bin/bash', '-c', cmd])
cmd = 'echo hi'
for i in range(3,11):
cmd += '; mv output_8_10_%d'%i+'/* outputfiles/output_8_10_%d'%i
for i in range(0,11):
cmd += '; mv output_8_11_%d'%i+'/* outputfiles/output_8_11_%d'%i
print cmd
bash_command(cmd)
|
def find(idx):
global ans
selected[idx] = 1
Q = []
for i in range(1, n+1):
if i == idx: continue
if applications[idx][i] == 1 and selected[i] == 0:
selected[i] = 1
Q.append(i)
while Q:
nidx = Q.pop(0)
for j in range(1, n+1):
if j == nidx: continue
if applications[nidx][j] == 1 and selected[j] == 0:
selected[j] = 1
Q.append(j)
ans += 1
return
for tc in range(1, int(input())+1):
n, m = map(int, input().split())
nums = list(map(int, input().split()))
applications = [[0]*(n+1) for _ in range(n+1)]
for i in range(0, m*2, 2):
applications[nums[i]][nums[i+1]] = 1
applications[nums[i+1]][nums[i]] = 1
ans = 0
selected = [0] * (n+1)
for i in range(1, n+1):
if selected[i] == 1: continue
else:
find(i)
print('#{} {}'.format(tc, ans)) |
"""
Miscellaneous utilities
"""
import sys
from ..exceptions import GMTOSError, GMTCLibError
def clib_extension(os_name=None):
"""
Return the extension for the shared library for the current OS.
.. warning::
Currently only works for OSX and Linux.
Returns
-------
os_name : str or None
The operating system name as given by ``sys.platform``
(the default if None).
Returns
-------
ext : str
The extension ('.so', '.dylib', etc).
"""
if os_name is None:
os_name = sys.platform
# Set the shared library extension in a platform independent way
if os_name.startswith('linux'):
lib_ext = 'so'
elif os_name == 'darwin':
# Darwin is OSX
lib_ext = 'dylib'
else:
raise GMTOSError('Unknown operating system: {}'.format(sys.platform))
return lib_ext
def check_status_code(status, function):
"""
Check if the status code returned by a function is non-zero.
Parameters
----------
status : int or None
The status code returned by a GMT C API function.
function : str
The name of the GMT function (used to raise the exception if it's a
non-zero status code).
Raises
------
GMTCLibError
If the status code is non-zero.
"""
if status is None or status != 0:
raise GMTCLibError(
'Failed {} with status code {}.'.format(function, status))
|
# -*- coding: utf-8 -*-
"""
Created on Sat Sep 14 19:13:12 2019
"""
num1 = 4
if(num1>=0 and num1<=5):
print("Buen ingreso")
else:
print("Mal ingreso")
if(num1<0 or num1>5):
print("Mal ingreso")
else:
print("Buen ingreso") |
'''
Python 9일차 실습#1
1. 50개 이내의 단어, 품사, 뜻을 저장한 단어장을 구성한 후
단어장을 출력하는 프로그램을 class를 이용하여 작성한다.
( 단어가 '0'이면 단어 입력 종료 )
( 단어는 최대 20자, 품사는 최대 10자, 뜻은 최대 50자 )
'''
class Wordbook1():
def __init__( self, word_name = None, word_class = None, word_meaning = None ):
self.word_name = word_name
self.word_class = word_class
self.word_meaning = word_meaning
def getWord_name( self ):
return self.word_name
def getWord_class( self ):
return self.word_class
def getWord_meaning( self ):
return self.word_meaning
def writeWordbook( self, word_name, word_class, word_meaning ):
self.word_name = word_name
self.word_class = word_class
self.word_meaning = word_meaning
def readWordbookInfo( self ):
return self.word_name, self.word_class, self.word_meaning |
from ingestion.cleansing import Cleansing
from ingestion.validation import Validation
class EnforcementActionReport:
def __init__(self):
pass
# Get log files that should be displayed in this EAR
def get_log_files(self, source):
pass
# Get cleansing status to display in EAR
def get_cleansing_status(self, source):
pass
# Get validation status to display in the EAR
def get_validation_status(self, source):
pass
# Get ingestion status to display in this EAR
def get_ingestion_status(self, source):
pass
# Get error description to show in EAR
def get_error_description(self, source, log_error):
pass
|
# Date and text manipulation
import time
from coltrane import utils
from django.utils.encoding import smart_unicode
# Local application
from coltrane.models import Link
import logging
logger = logging.getLogger(__name__)
class DiggClient(object):
"""
A minimal Digg client.
"""
def __init__(self, username, count=10):
self.username = username
self.count = count
def __getattr__(self):
return DiggClient(self.username, self.count)
def __repr__(self):
return "<DiggClient: %s>" % self.username
def get_latest_data(self):
# Get the users most recent diggs
self.base_url = 'http://services.digg.com/1.0/endpoint?method=user.getDiggs&username=%s&count=%s'
self.url = self.base_url % (self.username, self.count)
self.xml = utils.getxml(self.url)
# Parse out the story_id and datetime
self.diggs = [(i.get('story'), i.get('date')) for i in self.xml.getchildren()]
# A list of we'll ultimately pass out
self.link_list = []
# Now loop through the diggs
for story, date in self.diggs:
# And pull information about the stories
story_url = 'http://services.digg.com/2.0/story.getInfo?story_ids=%s' % str(story)
story_json = utils.getjson(story_url)
story_obj = story_json['stories'][0]
# A dict to stuff all the good stuff in
story_dict = {
# Since the digg_date is expressed in epoch seconds,
# we can start like so...
'date': utils.parsedate(time.ctime(float(date))),
}
# Get the link
story_dict['url'] = smart_unicode(story_obj.get('url'))
# Get the title
story_dict['title'] = smart_unicode(story_obj.get('title'))
story_dict['description'] = smart_unicode(story_obj.get('description'))
# Get the topic
story_dict['topic'] = smart_unicode(story_obj.get("topic").get('name'))
# Pass the dict out to our list
self.link_list.append(story_dict)
return self.link_list
def sync(self):
"""
When executed, will collect update your database with the latest diggs.
"""
[self._handle_digg(d) for d in self.get_latest_data()]
def _handle_digg(self, d):
"""
Accepts a data dictionary harvest from Digg's API and logs any new ones the database.
"""
try:
# Just test the URL in case it's already been logged.
l = Link.objects.get(url=d['url'])
# And just quit out silently if it already exists.
logger.debug("Digg already exists for %s" % d["title"])
except Link.DoesNotExist:
# If it doesn't exist, add it fresh.
logger.debug("Adding link to %s" % d["title"])
l = Link(
url = d['url'],
title = d['title'],
description = d['description'],
pub_date = d['date'],
tags = d['topic'],
)
l.save()
|
#!/usr/bin/env python
# MATH 481 HW4 problem 2
import numpy as np
import matplotlib
import matplotlib.pylab as plt
import sys
def compute(N):
out = np.zeros(N+1)
t = np.zeros(N + 1)
# define derivative functions for expansion
def d1(x):
return -(np.e ** x) + 2*x + 2
def d2(x):
return -(np.e ** x) + 2
def d3(x):
return -(np.e ** x)
def d4(x):
return -(np.e ** x)
k = 2 / N
# compute t[n] table for better readability
for n in range(N + 1):
t[n] = k * n
for n in range(N):
out[n + 1] = out[n] + k * d1(t[n]) + (1/2)*(k**2)*d2(t[n]) + (1/6)*(k**3)*d3(t[n]) + (1/24)*(k**4)*d4(t[n])
return t, out
def main():
matplotlib.rcParams.update({
'font.size': '14',
'font.family': 'serif',
'text.usetex': 'true',
})
def y(x):
return -(np.e ** x) + x ** 2 + 2 * x + 1
xorig = np.linspace(0, 2, 100)
plt.figure(1)
plt.clf()
plt.grid()
plt.title('Taylor series method plots')
plt.plot(xorig, y(xorig), 'r-')
plt.xlabel('x')
plt.ylabel('y')
results = []
for j in range(3, 11):
N = 2**j
t, U = compute(N)
err = abs(U[N] - y(2))
results.append({
'k': 2/N,
'error': err,
'ratio': '-' if j == 3 else results[-1]['error'] / err,
})
for i in results:
print('k: {0}, err: {1}, ratio: {2}'.format(i['k'], i['error'], i['ratio']))
return 0
if __name__ == '__main__':
sys.exit(main())
|
from bs4 import BeautifulSoup
from contracts import contract
from contracts.utils import check_isinstance, raise_desc, indent
from mcdp.exceptions import DPInternalError
from mcdp_report.html import ATTR_WHERE_CHAR, ATTR_WHERE_CHAR_END
class NoLocationFound(DPInternalError):
pass
@contract(html=bytes, returns=bytes)
def html_mark(html, where, add_class, tooltip=None):
"""
Takes a utf-8 encoded string and returns another html string.
The tooltip functionality is disabled for now.
Raise NoLocationFound if the location is not found.
"""
check_isinstance(html, bytes)
html = '<www><pre>' + html + '</pre></www>'
soup = BeautifulSoup(html, 'lxml', from_encoding='utf-8')
elements = soup.find_all("span")
found = []
for e in elements:
if e.has_attr(ATTR_WHERE_CHAR):
character = int(e[ATTR_WHERE_CHAR])
character_end = int(e[ATTR_WHERE_CHAR_END])
#print (where.character, character, character_end, where.character_end)
# inside = where.character <= character <= character_end <= where.character_end
inside = character <= where.character <= where.character_end <= character_end
if inside:
found.append(e)
if not found:
msg = 'Cannot find any html element for this location:\n\n%s' % where
msg += '\nwhere start: %s end: %s' % (where.character, where.character_end)
msg += '\nwhere.string = %r' % where.string
msg += '\n' + indent(html.__repr__(), 'html ')
raise_desc(NoLocationFound, msg)
# find the smallest one
def e_size(e):
character = int(e[ATTR_WHERE_CHAR])
character_end = int(e[ATTR_WHERE_CHAR_END])
l = character_end - character
return l
ordered = sorted(found, key=e_size)
e2 = ordered[0]
e2['class'] = e2.get('class', []) + [add_class]
if tooltip is not None:
script = 'show_tooltip(this, %r);' % tooltip
tooltip_u = unicode(tooltip, 'utf-8')
e2['onmouseover'] = script
e2['title'] = tooltip_u
pre = soup.body.www
s = str(pre)
s = s.replace('<www><pre>', '')
s = s.replace('</pre></www>', '')
assert isinstance(s, str)
return s
def html_mark_syntax_error(string, e):
where = e.where
character = where.character
first = string[:character]
rest = string[character:]
s = "" + first + '<span style="color:red">'+rest + '</span>'
return s
|
from string import Template
from graphene.test import Client
from django.test import TestCase
from ipam.models import VLAN
from netbox_graphql.schema import schema
from netbox_graphql.tests.utils import obj_to_global_id
from netbox_graphql.tests.factories.ipam_factories import VLANFactory, RoleFactory
from netbox_graphql.tests.factories.tenant_factories import TenantFactory
class CreateTestCase(TestCase):
@classmethod
def setUpTestData(cls):
cls.tenant = TenantFactory()
cls.role = RoleFactory()
cls.query = Template('''
mutation{
newVlan(input: { tenant: "$tenantId", role: "$roleId", vid: 2, name: "New Vlan"}) {
vlan{
name
vid
tenant{
name
}
role{
name
}
}
}
}
''').substitute(tenantId=obj_to_global_id(cls.tenant),
roleId=obj_to_global_id(cls.role))
def test_creating_returns_no_error(self):
result = schema.execute(self.query)
assert not result.errors
def test_creating_returns_data(self):
expected = {'newVlan':
{'vlan': {'name': 'New Vlan',
'vid': 2,
'tenant': {'name': self.tenant.name},
'role': {'name': self.role.name}
}}}
result = schema.execute(self.query)
self.assertEquals(result.data, expected)
def test_creating_creates_it(self):
oldCount = VLAN.objects.all().count()
schema.execute(self.query)
self.assertEquals(VLAN.objects.all().count(), oldCount + 1)
class QueryMultipleTestCase(TestCase):
@classmethod
def setUpTestData(cls):
cls.first = VLANFactory()
cls.second = VLANFactory()
cls.query = '''
{
vlans {
edges {
node {
id
}
}
}
}
'''
def test_querying_all_returns_no_error(self):
result = schema.execute(self.query)
assert not result.errors
def test_querying_all_returns_two_results(self):
result = schema.execute(self.query)
self.assertEquals(len(result.data['vlans']['edges']), 2)
class QuerySingleTestCase(TestCase):
@classmethod
def setUpTestData(cls):
cls.first = VLANFactory()
cls.second = VLANFactory()
cls.query = Template('''
{
vlans(id: "$id") {
edges {
node {
name
vid
tenant {
name
}
role {
name
}
}
}
}
}
''').substitute(id=obj_to_global_id(cls.second))
def test_querying_single_returns_no_error(self):
result = schema.execute(self.query)
assert not result.errors
def test_querying_single_returns_result(self):
result = schema.execute(self.query)
self.assertEquals(len(result.data['vlans']['edges']), 1)
def test_querying_single_returns_expected_result(self):
result = schema.execute(self.query)
expected = {'vlans':
{'edges': [
{'node': {'name': self.second.name,
'vid': self.second.vid,
'tenant': {'name': self.second.tenant.name},
'role': {'name': self.second.role.name}}}
]}}
self.assertEquals(result.data, expected)
class UpdateTestCase(TestCase):
@classmethod
def setUpTestData(cls):
cls.first = VLANFactory()
cls.tenant = TenantFactory()
cls.query = Template('''
mutation{
updateVlan(input: { id: "$id", vid: 10, name: "New Name", tenant: "$tenantId"}) {
vlan{
name
vid
tenant {
name
}
}
}
}
''').substitute(id=obj_to_global_id(cls.first),
tenantId=obj_to_global_id(cls.tenant))
def test_updating_returns_no_error(self):
result = schema.execute(self.query)
assert not result.errors
def test_updating_doesnt_change_count(self):
oldCount = VLAN.objects.all().count()
schema.execute(self.query)
self.assertEquals(VLAN.objects.all().count(), oldCount)
def test_updating_returns_updated_data(self):
expected = {'updateVlan':
{'vlan': {'name': 'New Name',
'vid': 10,
'tenant': {'name': self.tenant.name}}}}
result = schema.execute(self.query)
self.assertEquals(result.data, expected)
def test_updating_alters_data(self):
schema.execute(self.query)
vlan = VLAN.objects.get(id=self.first.id)
self.assertEquals(vlan.name, 'New Name')
self.assertEquals(vlan.vid, 10)
self.assertEquals(vlan.tenant.name, self.tenant.name)
class DeleteTestCase(TestCase):
@classmethod
def setUpTestData(cls):
cls.first = VLANFactory()
cls.query = Template('''
mutation{
deleteVlan(input: { id:"$id"}) {
vlan{
id
}
}
}
''').substitute(id=obj_to_global_id(cls.first))
def test_deleting_returns_no_error(self):
result = schema.execute(self.query)
assert not result.errors
def test_deleting_removes_a_type(self):
oldCount = VLAN.objects.all().count()
schema.execute(self.query)
self.assertEquals(VLAN.objects.all().count(), oldCount - 1)
|
from analisador_lexico import Analisador_Lexico
from analisador_sintatico import Parser
text_input = """
program teste1
declare
integer a := 2;
begin
write(3 / 2);
write(3 * 3);
write(9 - 5);
%write(a);
if(9 = 1) then
write(3);
else
if(9 <> 3) then
write(10);
end
end
end
"""
pg = Parser()
pg.parse()
parser = pg.get_parser(text_input)
|
# -*- coding: utf-8 -*-
# author: ysoftman
# python version : 3.x
# desc : pandas test
import numpy as np
import pandas as pd
print("load olympics.csv ... ")
print()
# csv 파일 읽기
# 첫 2줄을 건너뛰고 로딩(skiprows 0번째 라인 인덱스부터 시작)
# csv 파일의 나라명(첫번째)을 인덱스로 한다.
df = pd.read_csv('olympics.csv', skiprows=1, index_col=0)
print("df.head()=\n", df.head(), sep="")
print()
# 컬럼 이름들
print(df.columns)
print()
# 컬럼 이름들을 변경
for col in df.columns:
if col[:2] == '01':
df.rename(columns={col: 'Gold' + col[4:]}, inplace=True)
if col[:2] == '02':
df.rename(columns={col: 'Silver' + col[4:]}, inplace=True)
if col[:2] == '03':
df.rename(columns={col: 'Bronze' + col[4:]}, inplace=True)
if col[:1] == '№':
df.rename(columns={col: '#' + col[1:]}, inplace=True)
print("df.head()=\n", df.head(), sep="")
print()
# 나라(index)별 금메달 획득 여부 파악
# Gold 컬럼에 boolean masking
print(df['Gold'] > 0)
# 금메달을 딴 나라들만 별도의 dataframe 으로 만들 수 있다.
only_gold = df.where(df['Gold'] > 0)
print("only_god =\n", only_gold)
print("df['Gold'].count = ", df['Gold'].count())
print("only_gold['Gold'].count = ", only_gold['Gold'].count())
# NaN 행들지우기, dropna(1)은 NaN 열들지우기
only_gold = only_gold.dropna()
print("only_god =\n", only_gold)
print()
# 하계올림픽(Gold) or 동계올림픽(Gold.1) 에서 금메달 딴 나라
print("df[(df['Gold'] > 0) | (df['Gold.1'] > 0)] = \n",
df[(df['Gold'] > 0) | (df['Gold.1'] > 0)])
print()
# 하계올림픽(Gold) 금메달 못 따고 and 동계올림픽(Gold.1) 금메달 딴 나라
print("df[(df['Gold'] == 0) & (df['Gold.1'] > 0)] = \n",
df[(df['Gold'] == 0) & (df['Gold.1'] > 0)])
print()
# 인덱스 변경하기
# 인데스 변경전 현재의 인덱스(나라명)을 값을 컬럼으로 보존하기
df['Country'] = df.index
# 인덱스는 컬럼중에 선택할 수 있다.
# Gold 컬럼을 인덱스로 하고 Gold 자체가 인덱스의 이름이 된다.
df = df.set_index('Gold')
print("df.set_index('Gold') = \n", df)
print()
# 인덱스를 리셋하면 디폴트로 숫자 카운트 인덱스가 된다.
df = df.reset_index()
print("df.reset_index() = \n", df)
print()
|
from flask import Flask, render_template, request, redirect
import os
from selenium import webdriver
import time
from flask import Blueprint
from bs4 import BeautifulSoup as soup
import logging
from logging.handlers import RotatingFileHandler
from flask import current_app
flask_app = Blueprint('flask_app', __name__)
@flask_app.route('/')
def index():
return render_template('index.html')
# # Distance Page
@flask_app.route('/distance/<fr>/<to>', methods=['GET', 'POST'])
def distance(fr,to):
logging.basicConfig(filename='example.log',level=logging.DEBUG)
logging.debug('This message should go to the log file')
logging.info('So should this')
logging.warning('And this, too')
if ((fr == 'Moscow') and (to=='Moscow')) :
current_app.logger.info('%s The specified address is located inside the MKAD, the distance does not need to be calculated')
return '0'
# #selenium bot function returns result
else:
if (fr==to):
current_app.logger.info('%s Cannot calculate in the same address')
return '0'
else:
if ((fr=='') or (to=='')):
current_app.logger.info('Error')
return '0'
else:
result = from_to(fr,to)
current_app.logger.info('%s The result is ',result)
return result
def from_to(fr,to):
url = 'https://www.google.fr/maps/dir/{}/{}/data=!4m2!4m1!3e0'.format(fr, to)
# # Chromedrive setting
driver = webdriver.Chrome(executable_path = r'D:/chromedriver/chromedriver.exe')
# # Connect to the page
driver.get(url)
# # Wait 1 sec to ensure full page is loaded
time.sleep(1)
# # Soupify source code
page_soup = soup(driver.page_source, "html.parser")
# # Extract Distance
css_dist = "div[class^='section-directions-trip-distance'] > div"
# # Distance value + unit
distance = page_soup.select_one(css_dist).text
# # Format dictionnary
result = {"distance": distance}
return result
if __name__ == '__main__':
# Threaded option to enable multiple instances for multiple user access support
flask_app.run(debug=True, port=5000) |
import preprocessing
import classification
# Perform the standard pre-processing algorithm
result = preprocessing.execute('abcd',
features_to_remove=[0, 1, 2],
features_to_extract=[4, 5])
classifier = classification.build(dataset_id='abcd',
input_dimensions=11,
output_dimensions=1,
additional_hidden_layers=0)
classification.fit(X=result.X_train, y=result.y_train, classifier=classifier)
|
import lxml.html as html
import scrapy
CARDS_ITEMS = '//a[@class="card-text text-grid-item"]'
COLLECTIONS_URLS = '//tbody/tr/td[1]/a/@href'
# .replace('\n', '').strip()
COLLECTION_NAME = '//h1[@class="set-header-title-h1"]/text()'
# CARDS = '//a[@class="card-grid-item-card"]/@href'
# .replace('\n', '').strip() empty list elements
NAMES = '//h6[@class="card-text-title"][1]/text()'
# .replace('\n', '').strip()
TYPES = '//p[@class="card-text-type-line"]/text()'
DESCRIPTIONS = '//div[@class="card-text-oracle"]/p/text()' # '\n'.join()
# ''.join().replace('\n', '').strip()
# .join().replace('\n', '').strip()
STATS = '//a/div[@class="card-text-stats"]/text()'
CARDS_LINKS = '//a[@class="card-text text-grid-item"]/@href'
class MagicSpider(scrapy.Spider):
name = 'magic'
start_urls = [
'https://scryfall.com/sets?lang=en'
]
custom_settings = {
'FEEDS': {
'cards.csv': {
'format': 'csv',
'encoding': 'utf-8',
'indent': 4,
'overwrite': True
}
},
'USER_AGENT': 'Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/70.0.3538.77 Safari/537.36',
'CONCURRENT_REQUESTS': 20,
'CONCURRENT_REQUESTS_PER_DOMAIN': 12,
'MEMUSAGE_WARNING_MB': 1000,
'MEMUSAGE_NOTIFY_MAIL': ['francobonfigliovazquez@gmail.com']
}
def parse_cards(self, response):
cards = response.xpath(CARDS_ITEMS).getall()
names = []
typess = []
stats = []
cards_links = []
descriptions = []
collection = response.xpath(
COLLECTION_NAME).get().replace('\n', '').strip()
for card in cards:
card = html.fromstring(card)
# ? I can try to get the index [0] here and make it unnecessary to select it in the yield later
names.append(card.xpath(NAMES))
typess.append(card.xpath(TYPES)) # ? *
stats.append(card.xpath(STATS)) # ? *
cards_links.append(card.xpath(CARDS_LINKS))
descriptions.append(' '.join(card.xpath(DESCRIPTIONS)))
for name, typee, stat, description, card_link in zip(names, typess, stats, descriptions, cards_links):
if description == '':
description = '-'
if stat == []:
stat.append('-')
yield {
'name': name[0].replace('\n', '').strip(),
'type': typee[0].replace('\n', '').replace('—', '|').strip(),
'stats': stat[0].replace('\n', '').strip(),
'description': description,
'colection': collection,
'card_link': card_link
}
def parse(self, response):
collections_url = response.xpath(COLLECTIONS_URLS).getall()
for collection_url in collections_url:
collection_url += '?as=text'
yield response.follow(collection_url, callback=self.parse_cards)
|
Min = 6
def main():
password = get_password(Min)
print('*' * len(password))
def get_password(Min):
password = input("Enter password with {} or more charaters".format(Min))
while len(password) < Min:
print("Enter again")
password = input("Enter password with {} or more charaters".format(Min))
return password
main()
|
# Configuration file for the Sphinx documentation builder.
#
# This file only contains a selection of the most common options. For a full
# list see the documentation:
# http://www.sphinx-doc.org/en/master/config
# -- Path setup --------------------------------------------------------------
# If extensions (or modules to document with autodoc) are in another directory,
# add these directories to sys.path here. If the directory is relative to the
# documentation root, use os.path.abspath to make it absolute, like shown here.
#
import sys
from pathlib import Path
HERE = Path(__file__).parent.parent
sys.path.insert(0, str(HERE))
# -- Project information -----------------------------------------------------
project = 'MiScan'
copyright = '2019, Qulab USTC'
author = 'Qulab USTC'
version = '1.0.2'
# The full version, including alpha/beta/rc tags
release = '1.0.2'
# -- General configuration ---------------------------------------------------
# Add any Sphinx extension module names here, as strings. They can be
# extensions coming with Sphinx (named 'sphinx.ext.*') or your custom
# ones.
extensions = [
'sphinx.ext.autodoc',
'sphinx.ext.autosummary',
'sphinx.ext.napoleon',
'sphinx.ext.intersphinx'
]
# Add any paths that contain templates here, relative to this directory.
templates_path = ['_templates']
# List of patterns, relative to source directory, that match files and
# directories tohen looking for sourc ignore we files.
# This pattern also affects html_static_path and html_extra_path.
exclude_patterns = []
# -- Options for HTML output -------------------------------------------------
# The theme to use for HTML and HTML Help pages. See the documentation for
# a list of builtin themes.
#
html_theme = 'sphinx_rtd_theme'
# Add any paths that contain custom static files (such as style sheets) here,
# relative to this directory. They are copied after the builtin static files,
# so a file named "default.css" will overwrite the builtin "default.css".
html_static_path = ['_static']
master_doc = 'index'
# other
source_suffix = {
'.rst': 'restructuredtext',
}
source_encoding = 'utf-8-sig'
pygments_style = 'sphinx'
|
#!/usr/bin/python
# -*- coding: utf8 -*-
import datetime
import time
import tushare as ts
import pandas as pd
'''
https://www.jb51.net/article/213955.htm
日K 转换为 周K
'''
def test1():
df = ts.pro_bar(ts_code='300068.SZ', start_date='20190628', end_date='20210823', ma=[50, 300])
# 进行转换,周线的每个变量都等于那一周最后一个交易日的变量值
# period_stock_data = stock_data.resample(period_type, how = 'last')
# 日期 格式 转为为 2019-11-22
import pandas as pd
# stock_data['trade_date'] = pd.to_datetime(stock_data['trade_date'], unit='s', origin=pd.Timestamp('2018-07-01'))
df['trade_date'] =pd.to_datetime(df['trade_date'], format='%Y%m%d', errors='coerce')
# 将 date 设定为 index
df.set_index('trade_date', inplace=True)
#获取周k
df_week = pd.DataFrame()
df_week ['open'] = df['open'].resample('W').first()
df_week['close'] = df['close'].resample('W').last()
df_week['high'] = df['high'].resample('W').max()
df_week['low'] = df['low'].resample('W').min()
print df_week[0:10]
df_week.to_csv('1.csv')
def convert_date(d):
# print d
day = d
'20210820'
d1 = day[0:4]
d2 =day[4:6]
d3 = day[6:8]
d = d3+'/'+d2+'/'+d1
# return parse(d).weekday()
return d
if __name__ == '__main__':
test1() |
#coding: utf-8
from casino.models import Casino
from common.models import Language
from django.db import models
from django.utils.translation import ugettext as _
# from utilites.funcs import easy_upload_path
class News(models.Model):
"""
News model. Have some categories
"""
CATEGORY = ((1, _(u"Общее"),), (2, _(u"Бонусы"),), (3, _(u"Акции"),), (3, _(u"Турниры"),), )
language = models.ForeignKey(Language, verbose_name=_(u"Язык"))
casino = models.ForeignKey(Casino, verbose_name=_(u"Относится к казино"))#, blank=True
urlkey = models.SlugField(_(u"URL ключ"), max_length=250)
name = models.CharField(_(u"Заголовок новости"), max_length=250)
category = models.SmallIntegerField(_(u"Категория"), choices=CATEGORY)
description = models.TextField(_(u"Краткое описание"))
text = models.TextField(_(u"Текст"))
date = models.DateTimeField(_(u"Дата публикации"), auto_now_add=True)
def __unicode__(self):
"""
Get name of news publication
"""
return self.name
class Meta:
verbose_name = _(u"Новость о казино")
verbose_name_plural = _(u"Новости о казино")
class Article(models.Model):
"""
Article model. Simple site text content
"""
language = models.ForeignKey(Language, verbose_name=_(u"Язык"))
urlkey = models.SlugField(_(u"URL ключ"), max_length=250)
name = models.CharField(_(u"Название"), max_length=250)
description = models.CharField(_(u"Краткое описание"), max_length=250)
text = models.TextField(_(u"Подробный текст"))
tags = models.CharField(_(u"Тэги"), max_length=100)
date = models.DateTimeField(_(u"Дата публикации"), auto_now_add=True)
def __unicode__(self):
"""
Get name of article
"""
return self.name
class Meta:
verbose_name = _(u"Статья")
verbose_name_plural = _(u"Статьи")
|
if False:
from typing import Dict, List, Tuple, Union, Optional
def MFnParticleSystem_lifespan(*args, **kwargs): pass
def uIntPtr_assign(*args, **kwargs): pass
def MFnAirField_inheritVelocity(*args, **kwargs): pass
def MFnFluid_toGridIndex(*args, **kwargs): pass
def MnCloth_setInputMeshAttractDamping(*args, **kwargs): pass
def MnParticle_setSelfCollisionSoftness(*args, **kwargs): pass
def MRenderLine_getFlatness(*args, **kwargs): pass
def new_intPtr(*args, **kwargs): pass
def MDynamicsUtil_addNodeTypeToRunup(*args, **kwargs): pass
def MFnVolumeAxisField_turbulence(*args, **kwargs): pass
def MnCloth_setSelfCrossoverPush(*args, **kwargs): pass
def MnRigid_createNRigid(*args, **kwargs): pass
def MnCloth_setShearResistance(*args, **kwargs): pass
def MDynamicsUtil_swigregister(*args, **kwargs): pass
def MDynSweptTriangle_vertex(*args, **kwargs): pass
def delete_MFnFluid(*args, **kwargs): pass
def MFnParticleSystem_setPerParticleAttribute(*args, **kwargs): pass
def MFnVolumeAxisField_detailTurbulence(*args, **kwargs): pass
def MnCloth_setBendAngleScale(*args, **kwargs): pass
def new_MnSolver(*args, **kwargs): pass
def MFnParticleSystem_particleName(*args, **kwargs): pass
def floatPtr_swigregister(*args, **kwargs): pass
def MFnField_maxDistance(*args, **kwargs): pass
def MFnFluid_getCoordinates(*args, **kwargs): pass
def new_MFnNewtonField(*args, **kwargs): pass
def delete_MFnRadialField(*args, **kwargs): pass
def MHairSystem_registeringCallableScript(*args, **kwargs): pass
def MnParticle_setTopology(*args, **kwargs): pass
def MnSolver_removeAllCollisions(*args, **kwargs): pass
def delete_MFnVortexField(*args, **kwargs): pass
def new_charPtr(*args, **kwargs): pass
def MHairSystem_className(*args, **kwargs): pass
def MFnAirField_setSpeed(*args, **kwargs): pass
def MFnFluid_getForceAtPoint(*args, **kwargs): pass
def MFnParticleSystem_renderType(*args, **kwargs): pass
def delete_MFnUniformField(*args, **kwargs): pass
def MnCloth_setInputMeshAttractPositions(*args, **kwargs): pass
def MnParticle_setMaxSelfCollisionIterations(*args, **kwargs): pass
def MRenderLine_getWidth(*args, **kwargs): pass
def MDynamicsUtil_runupIfRequired(*args, **kwargs): pass
def MFnDragField_direction(*args, **kwargs): pass
def MFnGravityField_swigregister(*args, **kwargs): pass
def MFnParticleSystem_age(*args, **kwargs): pass
def MFnVolumeAxisField_directionalSpeed(*args, **kwargs): pass
def MnCloth_setSelfTrappedCheck(*args, **kwargs): pass
def MnRigid_setTopology(*args, **kwargs): pass
def MnCloth_getInverseMass(*args, **kwargs): pass
def intPtr_value(*args, **kwargs): pass
def MFnInstancer_particleCount(*args, **kwargs): pass
def MFnFluid_type(*args, **kwargs): pass
def MFnNIdData_swigregister(*args, **kwargs): pass
def MFnParticleSystem_getPerParticleAttribute(*args, **kwargs): pass
def MFnVolumeAxisField_setTurbulenceOffset(*args, **kwargs): pass
def MnCloth_setStretchAndCompressionResistance(*args, **kwargs): pass
def MnRigid_swigregister(*args, **kwargs): pass
def delete_uIntPtr(*args, **kwargs): pass
def floatPtr_frompointer(*args, **kwargs): pass
def MFnField_setAttenuation(*args, **kwargs): pass
def MFnFluid_fuel(*args, **kwargs): pass
def MFnNewtonField_className(*args, **kwargs): pass
def MFnRadialField_type(*args, **kwargs): pass
def MHairSystem_setRegisteringCallableScript(*args, **kwargs): pass
def MnParticle_createNParticle(*args, **kwargs): pass
def MnSolver_makeAllCollide(*args, **kwargs): pass
def charPtr_swigregister(*args, **kwargs): pass
def new_uIntPtr(*args, **kwargs): pass
def MFnParticleSystem_emission(*args, **kwargs): pass
def MFnFluid_getColorMode(*args, **kwargs): pass
def MFnParticleSystem_setCount(*args, **kwargs): pass
def MFnUniformField_type(*args, **kwargs): pass
def MnCloth_setLinksRestLengthFromPositions(*args, **kwargs): pass
def MnParticle_setMaxIterations(*args, **kwargs): pass
def MRenderLine_getTwist(*args, **kwargs): pass
def MDynamicsUtil_inRunup(*args, **kwargs): pass
def new_MFnDragField(*args, **kwargs): pass
def MFnGravityField_setDirection(*args, **kwargs): pass
def MFnParticleSystem_particleIds(*args, **kwargs): pass
def MFnVolumeAxisField_speedAwayFromCenter(*args, **kwargs): pass
def MnCloth_setSelfCollisionSoftness(*args, **kwargs): pass
def delete_MnRigid(*args, **kwargs): pass
def MnRigid_getPositions(*args, **kwargs): pass
def MFnFluid_gridSize(*args, **kwargs): pass
def delete_intPtr(*args, **kwargs): pass
def intPtr_assign(*args, **kwargs): pass
def new_MDynSweptTriangle(*args, **kwargs): pass
def MDynSweptLine_normal(*args, **kwargs): pass
def MFnNIdData_getObjectPtr(*args, **kwargs): pass
def MFnParticleSystem_receiveShadows(*args, **kwargs): pass
def MFnVolumeAxisField_setTurbulenceFrequency(*args, **kwargs): pass
def MnRigid_getFriction(*args, **kwargs): pass
def SWIG_PyInstanceMethod_New(*args, **kwargs): pass
def floatPtr_cast(*args, **kwargs): pass
def MFnField_attenuation(*args, **kwargs): pass
def MFnFluid_temperature(*args, **kwargs): pass
def delete_MFnNewtonField(*args, **kwargs): pass
def MFnPfxGeometry_swigregister(*args, **kwargs): pass
def delete_MnParticle(*args, **kwargs): pass
def MnSolver_setStartTime(*args, **kwargs): pass
def boolPtr_swigregister(*args, **kwargs): pass
def MFnAirField_setDirection(*args, **kwargs): pass
def MFnDynSweptGeometryData_lineCount(*args, **kwargs): pass
def MFnParticleSystem_count(*args, **kwargs): pass
def MFnTurbulenceField_swigregister(*args, **kwargs): pass
def MnCloth_setBendRestAngleFromPositions(*args, **kwargs): pass
def MnParticle_setSurfaceTension(*args, **kwargs): pass
def MDynamicsUtil_evalDynamics2dTexture(*args, **kwargs): pass
def MFnDragField_className(*args, **kwargs): pass
def MFnGravityField_direction(*args, **kwargs): pass
def MFnParticleSystem_visibleInRefractions(*args, **kwargs): pass
def MFnParticleSystem_tailSize(*args, **kwargs): pass
def MnCloth_setMaxSelfCollisionIterations(*args, **kwargs): pass
def new_MnRigid(*args, **kwargs): pass
def MFnUniformField_className(*args, **kwargs): pass
def MDynSweptLine_swigregister(*args, **kwargs): pass
def MFnDynSweptGeometryData_create(*args, **kwargs): pass
def MFnFluid_getTemperatureMode(*args, **kwargs): pass
def MFnParticleSystem_castsShadows(*args, **kwargs): pass
def MFnVolumeAxisField_setTurbulenceSpeed(*args, **kwargs): pass
def MnCloth_setBendAngleDropoff(*args, **kwargs): pass
def MnRigid_getBounce(*args, **kwargs): pass
def floatPtr_value(*args, **kwargs): pass
def MFnField_setMagnitude(*args, **kwargs): pass
def MFnFluid_pressure(*args, **kwargs): pass
def MFnNewtonField_type(*args, **kwargs): pass
def MFnPfxGeometry_getBoundingBox(*args, **kwargs): pass
def MHairSystem_getCollisionObject(*args, **kwargs): pass
def new_MnParticle(*args, **kwargs): pass
def MnSolver_setMaxIterations(*args, **kwargs): pass
def boolPtr_frompointer(*args, **kwargs): pass
def MFnAirField_direction(*args, **kwargs): pass
def MFnParticleSystem_emit(*args, **kwargs): pass
def MnCloth_setVelocities(*args, **kwargs): pass
def MnParticle_setViscosity(*args, **kwargs): pass
def MRenderLine_assign(*args, **kwargs): pass
def delete_MDynSweptTriangle(*args, **kwargs): pass
def delete_MFnDragField(*args, **kwargs): pass
def new_MFnGravityField(*args, **kwargs): pass
def MFnVolumeAxisField_speedAroundAxis(*args, **kwargs): pass
def MnCloth_setMaxIterations(*args, **kwargs): pass
def MnParticle_swigregister(*args, **kwargs): pass
def MFnParticleSystem_disableCloudAxis(*args, **kwargs): pass
def new_MFnNIdData(*args, **kwargs): pass
def MFnFluid_getFuelMode(*args, **kwargs): pass
def MFnVolumeAxisField_setTurbulence(*args, **kwargs): pass
def MnRigid_getInverseMass(*args, **kwargs): pass
def floatPtr_assign(*args, **kwargs): pass
def MFnField_magnitude(*args, **kwargs): pass
def MFnFluid_getVelocity(*args, **kwargs): pass
def MFnNObjectData_swigregister(*args, **kwargs): pass
def MFnPfxGeometry_getLineData(*args, **kwargs): pass
def MHairSystem_unregisterCollisionSolverPreFrame(*args, **kwargs): pass
def MnCloth_swigregister(*args, **kwargs): pass
def MnSolver_setSubsteps(*args, **kwargs): pass
def MRenderLineArray_className(*args, **kwargs): pass
def boolPtr_cast(*args, **kwargs): pass
def new_MFnAirField(*args, **kwargs): pass
def MFnFluid_setCoordinateMode(*args, **kwargs): pass
def MFnParticleSystem_isValid(*args, **kwargs): pass
def MFnDynSweptGeometryData_sweptTriangle(*args, **kwargs): pass
def MnCloth_setPositions(*args, **kwargs): pass
def MnParticle_setLiquidRadiusScale(*args, **kwargs): pass
def delete_MRenderLine(*args, **kwargs): pass
def uCharPtr_swigregister(*args, **kwargs): pass
def MFnDragField_type(*args, **kwargs): pass
def MFnGravityField_className(*args, **kwargs): pass
def MFnParticleSystem_flatShaded(*args, **kwargs): pass
def MFnVolumeAxisField_speedAlongAxis(*args, **kwargs): pass
def MnParticle_getFriction(*args, **kwargs): pass
def MRenderLine_getIncandescence(*args, **kwargs): pass
def MDynSweptLine_className(*args, **kwargs): pass
def MFnNIdData_className(*args, **kwargs): pass
def MFnParticleSystem_visibleInReflections(*args, **kwargs): pass
def MFnFluid_getCoordinateMode(*args, **kwargs): pass
def MnCloth_setAirTightness(*args, **kwargs): pass
def MnRigid_getThickness(*args, **kwargs): pass
def delete_floatPtr(*args, **kwargs): pass
def MFnField_getForceAtPoint(*args, **kwargs): pass
def MFnFluid_density(*args, **kwargs): pass
def MFnNObjectData_setCached(*args, **kwargs): pass
def new_MFnPfxGeometry(*args, **kwargs): pass
def MHairSystem_unregisterCollisionSolverCollide(*args, **kwargs): pass
def MnCloth_getFriction(*args, **kwargs): pass
def MFnTurbulenceField_phase(*args, **kwargs): pass
def MnSolver_setDisabled(*args, **kwargs): pass
def boolPtr_value(*args, **kwargs): pass
def MFnAirField_className(*args, **kwargs): pass
def MFnParticleSystem_evaluateDynamics(*args, **kwargs): pass
def MFnTurbulenceField_setFrequency(*args, **kwargs): pass
def MFnVolumeAxisField_setDirectionalSpeed(*args, **kwargs): pass
def MnParticle_setRestDensity(*args, **kwargs): pass
def new_MRenderLine(*args, **kwargs): pass
def uCharPtr_frompointer(*args, **kwargs): pass
def MFnAirField_swigregister(*args, **kwargs): pass
def delete_MFnGravityField(*args, **kwargs): pass
def MFnParticleSystem_betterIllum(*args, **kwargs): pass
def MnCloth_setCollisionFlags(*args, **kwargs): pass
def MnParticle_getBounce(*args, **kwargs): pass
def MFnDynSweptGeometryData_type(*args, **kwargs): pass
def MDynSweptLine_length(*args, **kwargs): pass
def MFnDynSweptGeometryData_sweptLine(*args, **kwargs): pass
def MFnParticleSystem_primaryVisibility(*args, **kwargs): pass
def MFnVolumeAxisField_setSpeedAwayFromCenter(*args, **kwargs): pass
def MRenderLineArray_swigregister(*args, **kwargs): pass
def MnRigid_getVelocities(*args, **kwargs): pass
def new_floatPtr(*args, **kwargs): pass
def new_MFnField(*args, **kwargs): pass
def MFnAirField_speed(*args, **kwargs): pass
def MFnFluid_falloff(*args, **kwargs): pass
def MFnNObjectData_isCached(*args, **kwargs): pass
def MFnPfxGeometry_className(*args, **kwargs): pass
def MHairSystem_registerCollisionSolverPreFrame(*args, **kwargs): pass
def MnCloth_getBounce(*args, **kwargs): pass
def MnSolver_setWindNoiseIntensity(*args, **kwargs): pass
def boolPtr_assign(*args, **kwargs): pass
def delete_MFnAirField(*args, **kwargs): pass
def MFnFluid_setFuelMode(*args, **kwargs): pass
def MFnParticleSystem_saveInitialState(*args, **kwargs): pass
def MFnTurbulenceField_frequency(*args, **kwargs): pass
def MnCloth_createNCloth(*args, **kwargs): pass
def MnCloth_setAddCrossLinks(*args, **kwargs): pass
def uCharPtr_cast(*args, **kwargs): pass
def MFnAirField_setEnableSpread(*args, **kwargs): pass
def MFnGravityField_type(*args, **kwargs): pass
def MFnParticleSystem_threshold(*args, **kwargs): pass
def MFnVolumeAxisField_invertAttenuation(*args, **kwargs): pass
def MnCloth_setSelfCollisionFlags(*args, **kwargs): pass
def MnParticle_getInverseMass(*args, **kwargs): pass
def MFnTurbulenceField_setPhase(*args, **kwargs): pass
def MDynSweptLine_tangent(*args, **kwargs): pass
def MFnDynSweptGeometryData_triangleCount(*args, **kwargs): pass
def delete_MFnNIdData(*args, **kwargs): pass
def MFnVolumeAxisField_setSpeedAwayFromAxis(*args, **kwargs): pass
def MnCloth_setPressureDamping(*args, **kwargs): pass
def MFnNObjectData_type(*args, **kwargs): pass
def shortPtr_swigregister(*args, **kwargs): pass
def MFnField_className(*args, **kwargs): pass
def MFnFluid_velocityGridSizes(*args, **kwargs): pass
def MFnNObjectData_getCollide(*args, **kwargs): pass
def delete_MFnPfxGeometry(*args, **kwargs): pass
def MHairSystem_registerCollisionSolverCollide(*args, **kwargs): pass
def MnSolver_setWindDir(*args, **kwargs): pass
def delete_boolPtr(*args, **kwargs): pass
def MFnAirField_type(*args, **kwargs): pass
def new_MFnTurbulenceField(*args, **kwargs): pass
def delete_MnCloth(*args, **kwargs): pass
def MnParticle_setLiquidSimulation(*args, **kwargs): pass
def MFnParticleSystem_position(*args, **kwargs): pass
def uCharPtr_value(*args, **kwargs): pass
def new_MFnVolumeAxisField(*args, **kwargs): pass
def MnParticle_getThickness(*args, **kwargs): pass
def MFnVolumeAxisField_setSpeedAroundAxis(*args, **kwargs): pass
def MFnNIdData_type(*args, **kwargs): pass
def MFnParticleSystem_hasEmission(*args, **kwargs): pass
def MnCloth_setIncompressibility(*args, **kwargs): pass
def MnRigid_getNumVertices(*args, **kwargs): pass
def MRenderLine_getLine(*args, **kwargs): pass
def shortPtr_frompointer(*args, **kwargs): pass
def delete_MFnField(*args, **kwargs): pass
def MFnNObjectData_setObjectPtr(*args, **kwargs): pass
def MFnPfxGeometry_type(*args, **kwargs): pass
def MFnVortexField_swigregister(*args, **kwargs): pass
def MnCloth_getThickness(*args, **kwargs): pass
def MnSolver_setWindSpeed(*args, **kwargs): pass
def new_boolPtr(*args, **kwargs): pass
def MFnField_swigregister(*args, **kwargs): pass
def MFnFluid_setTemperatureMode(*args, **kwargs): pass
def MFnParticleSystem_create(*args, **kwargs): pass
def MFnTurbulenceField_className(*args, **kwargs): pass
def new_MnCloth(*args, **kwargs): pass
def MnParticle_setDisableGravity(*args, **kwargs): pass
def MRenderLineArray_renderLine(*args, **kwargs): pass
def MFnAirField_componentOnly(*args, **kwargs): pass
def charPtr_value(*args, **kwargs): pass
def uCharPtr_assign(*args, **kwargs): pass
def MFnAirField_setSpread(*args, **kwargs): pass
def MFnFluid_expandToInclude(*args, **kwargs): pass
def MFnParticleSystem_radius1(*args, **kwargs): pass
def MFnVolumeAxisField_className(*args, **kwargs): pass
def MFnDynSweptGeometryData_swigregister(*args, **kwargs): pass
def MnParticle_getVelocities(*args, **kwargs): pass
def MDynSweptLine_vertex(*args, **kwargs): pass
def new_MFnDynSweptGeometryData(*args, **kwargs): pass
def MFnNObjectData_create(*args, **kwargs): pass
def MFnInstancer_swigregister(*args, **kwargs): pass
def MFnParticleSystem_hasOpacity(*args, **kwargs): pass
def MnRigid_setCollisionFlags(*args, **kwargs): pass
def MFnFluid_index(*args, **kwargs): pass
def shortPtr_cast(*args, **kwargs): pass
def MFnField_type(*args, **kwargs): pass
def MFnFluid_setSize(*args, **kwargs): pass
def MFnNObjectData_getParticleObjectPtr(*args, **kwargs): pass
def MFnParticleSystem_swigregister(*args, **kwargs): pass
def MFnVortexField_setAxis(*args, **kwargs): pass
def MnCloth_getVelocities(*args, **kwargs): pass
def MnSolver_setAirDensity(*args, **kwargs): pass
def doublePtr_swigregister(*args, **kwargs): pass
def MFnField_isFalloffCurveConstantOne(*args, **kwargs): pass
def MFnFluid_getVelocityMode(*args, **kwargs): pass
def new_MFnParticleSystem(*args, **kwargs): pass
def delete_MFnTurbulenceField(*args, **kwargs): pass
def MnObject_swigregister(*args, **kwargs): pass
def MnParticle_setDamping(*args, **kwargs): pass
def MRenderLineArray_length(*args, **kwargs): pass
def MFnParticleSystem_position1(*args, **kwargs): pass
def charPtr_assign(*args, **kwargs): pass
def delete_uCharPtr(*args, **kwargs): pass
def MFnAirField_spread(*args, **kwargs): pass
def MFnFluid_isResizeToEmitter(*args, **kwargs): pass
def MFnParticleSystem_radius0(*args, **kwargs): pass
def delete_MFnVolumeAxisField(*args, **kwargs): pass
def MnCloth_setBounce(*args, **kwargs): pass
def MnParticle_getPositions(*args, **kwargs): pass
def new_MFnFluid(*args, **kwargs): pass
def delete_MDynSweptLine(*args, **kwargs): pass
def MFnDynSweptGeometryData_className(*args, **kwargs): pass
def MFnInstancer_allInstances(*args, **kwargs): pass
def MFnVolumeAxisField_speedAwayFromAxis(*args, **kwargs): pass
def MFnParticleSystem_hasRgb(*args, **kwargs): pass
def MFnVolumeAxisField_setSpeedAlongAxis(*args, **kwargs): pass
def MnCloth_setTrackVolume(*args, **kwargs): pass
def MFnUniformField_swigregister(*args, **kwargs): pass
def shortPtr_value(*args, **kwargs): pass
def MDynSweptTriangle_swigregister(*args, **kwargs): pass
def MFnFluid_getDimensions(*args, **kwargs): pass
def MFnNObjectData_getRigidObjectPtr(*args, **kwargs): pass
def MFnParticleSystem_originalParticleShape(*args, **kwargs): pass
def MFnVortexField_axis(*args, **kwargs): pass
def MnCloth_getPositions(*args, **kwargs): pass
def MnSolver_setGravityDir(*args, **kwargs): pass
def MnCloth_setTopology(*args, **kwargs): pass
def doublePtr_frompointer(*args, **kwargs): pass
def MFnField_falloffCurve(*args, **kwargs): pass
def MFnFluid_setVelocityMode(*args, **kwargs): pass
def MFnParticleSystem_className(*args, **kwargs): pass
def MFnTurbulenceField_type(*args, **kwargs): pass
def delete_MnObject(*args, **kwargs): pass
def MnParticle_setFriction(*args, **kwargs): pass
def MRenderLineArray_deleteArray(*args, **kwargs): pass
def MFnAirField_enableSpread(*args, **kwargs): pass
def new_uCharPtr(*args, **kwargs): pass
def MFnAirField_setComponentOnly(*args, **kwargs): pass
def MFnFluid_isAutoResize(*args, **kwargs): pass
def MFnParticleSystem_radius(*args, **kwargs): pass
def MFnVolumeAxisField_type(*args, **kwargs): pass
def MnCloth_setInverseMass(*args, **kwargs): pass
def MnParticle_getNumVertices(*args, **kwargs): pass
def MRenderLine_className(*args, **kwargs): pass
def new_MDynSweptLine(*args, **kwargs): pass
def delete_MFnDynSweptGeometryData(*args, **kwargs): pass
def MFnInstancer_instancesForParticle(*args, **kwargs): pass
def MFnParticleSystem_hasLifespan(*args, **kwargs): pass
def MnCloth_setFriction(*args, **kwargs): pass
def MFnVolumeAxisField_setDirection(*args, **kwargs): pass
def MnCloth_setPressure(*args, **kwargs): pass
def MnRigid_setFriction(*args, **kwargs): pass
def MFnFluid_swigregister(*args, **kwargs): pass
def shortPtr_assign(*args, **kwargs): pass
def MDynSweptTriangle_className(*args, **kwargs): pass
def MFnFluid_getResolution(*args, **kwargs): pass
def MFnNObjectData_getClothObjectPtr(*args, **kwargs): pass
def MFnParticleSystem_deformedParticleShape(*args, **kwargs): pass
def new_MFnVortexField(*args, **kwargs): pass
def MnCloth_getNumVertices(*args, **kwargs): pass
def MnSolver_setGravity(*args, **kwargs): pass
def MFnFluid_setColorMode(*args, **kwargs): pass
def doublePtr_cast(*args, **kwargs): pass
def MFnField_setUseMaxDistance(*args, **kwargs): pass
def MFnFluid_getDensityMode(*args, **kwargs): pass
def delete_MFnParticleSystem(*args, **kwargs): pass
def MFnRadialField_swigregister(*args, **kwargs): pass
def new_MnObject(*args, **kwargs): pass
def MnParticle_setBounce(*args, **kwargs): pass
def MRenderLineArray_assign(*args, **kwargs): pass
def MFnParticleSystem_surfaceShading(*args, **kwargs): pass
def uIntPtr_swigregister(*args, **kwargs): pass
def MnCloth_setThickness(*args, **kwargs): pass
def MnParticle_setSelfCollideWidth(*args, **kwargs): pass
def MRenderLine_getTransparency(*args, **kwargs): pass
def MFnVolumeAxisField_setInvertAttenuation(*args, **kwargs): pass
def MnCloth_setRestitutionTension(*args, **kwargs): pass
def MnCloth_setSealHoles(*args, **kwargs): pass
def MnRigid_setBounce(*args, **kwargs): pass
def MDynamicsUtil_hasValidDynamics2dTexture(*args, **kwargs): pass
def delete_shortPtr(*args, **kwargs): pass
def MDynSweptTriangle_area(*args, **kwargs): pass
def MFnFluid_create2D(*args, **kwargs): pass
def MFnParticleSystem_isDeformedParticleShape(*args, **kwargs): pass
def MnCloth_setBendResistance(*args, **kwargs): pass
def MnSolver_removeNObject(*args, **kwargs): pass
def doublePtr_value(*args, **kwargs): pass
def MFnField_useMaxDistance(*args, **kwargs): pass
def MFnFluid_setDensityMode(*args, **kwargs): pass
def MFnParticleSystem_type(*args, **kwargs): pass
def MHairSystem_swigregister(*args, **kwargs): pass
def MnParticle_setInverseMass(*args, **kwargs): pass
def delete_MRenderLineArray(*args, **kwargs): pass
def MRenderLine_swigregister(*args, **kwargs): pass
def charPtr_cast(*args, **kwargs): pass
def MFnAirField_setInheritRotation(*args, **kwargs): pass
def MFnFluid_emitIntoArrays(*args, **kwargs): pass
def MFnParticleSystem_position0(*args, **kwargs): pass
def MFnUniformField_setDirection(*args, **kwargs): pass
def MnCloth_setComputeRestAngles(*args, **kwargs): pass
def MFnRadialField_setType(*args, **kwargs): pass
def MnParticle_setSelfCollide(*args, **kwargs): pass
def delete_MDynamicsUtil(*args, **kwargs): pass
def MnCloth_setPumpRate(*args, **kwargs): pass
def MFnDragField_swigregister(*args, **kwargs): pass
def new_MFnInstancer(*args, **kwargs): pass
def MFnParticleSystem_mass(*args, **kwargs): pass
def MFnVolumeAxisField_turbulenceOffset(*args, **kwargs): pass
def MnCloth_setStartPressure(*args, **kwargs): pass
def MHairSystem_getFollicle(*args, **kwargs): pass
def MnRigid_setThickness(*args, **kwargs): pass
def MFnVolumeAxisField_direction(*args, **kwargs): pass
def new_shortPtr(*args, **kwargs): pass
def MDynSweptTriangle_uvPoint(*args, **kwargs): pass
def MFnFluid_create3D(*args, **kwargs): pass
def new_MFnNObjectData(*args, **kwargs): pass
def MFnParticleSystem_isPerParticleVectorAttribute(*args, **kwargs): pass
def MnCloth_setSelfCollideWidth(*args, **kwargs): pass
def MnSolver_addNObject(*args, **kwargs): pass
def doublePtr_assign(*args, **kwargs): pass
def MFnField_setPerVertex(*args, **kwargs): pass
def MFnFluid_getFalloffMode(*args, **kwargs): pass
def MFnNewtonField_swigregister(*args, **kwargs): pass
def MFnRadialField_radialType(*args, **kwargs): pass
def delete_MHairSystem(*args, **kwargs): pass
def MnParticle_setThickness(*args, **kwargs): pass
def new_MRenderLineArray(*args, **kwargs): pass
def uIntPtr_frompointer(*args, **kwargs): pass
def delete_charPtr(*args, **kwargs): pass
def uIntPtr_cast(*args, **kwargs): pass
def MFnAirField_inheritRotation(*args, **kwargs): pass
def MFnFluid_updateGrid(*args, **kwargs): pass
def MFnParticleSystem_acceleration(*args, **kwargs): pass
def MFnUniformField_direction(*args, **kwargs): pass
def MnCloth_setComputeRestLength(*args, **kwargs): pass
def MnParticle_setCollide(*args, **kwargs): pass
def MRenderLine_getColor(*args, **kwargs): pass
def MFnNIdData_create(*args, **kwargs): pass
def charPtr_frompointer(*args, **kwargs): pass
def new_MDynamicsUtil(*args, **kwargs): pass
def MFnDragField_setUseDirection(*args, **kwargs): pass
def MFnInstancer_className(*args, **kwargs): pass
def MFnParticleSystem_opacity(*args, **kwargs): pass
def MFnVolumeAxisField_turbulenceFrequency(*args, **kwargs): pass
def MnCloth_setTangentialDrag(*args, **kwargs): pass
def MnRigid_setVelocities(*args, **kwargs): pass
def MnCloth_setDamping(*args, **kwargs): pass
def MnCloth_setDisableGravity(*args, **kwargs): pass
def intPtr_swigregister(*args, **kwargs): pass
def MDynSweptTriangle_normalToPoint(*args, **kwargs): pass
def MFnNObjectData_className(*args, **kwargs): pass
def MFnParticleSystem_isPerParticleDoubleAttribute(*args, **kwargs): pass
def MFnVortexField_type(*args, **kwargs): pass
def MnSolver_createNSolver(*args, **kwargs): pass
def delete_doublePtr(*args, **kwargs): pass
def MFnField_perVertex(*args, **kwargs): pass
def MFnFluid_setFalloffMode(*args, **kwargs): pass
def MFnNewtonField_setMinDistance(*args, **kwargs): pass
def new_MFnRadialField(*args, **kwargs): pass
def new_MHairSystem(*args, **kwargs): pass
def MnParticle_setVelocities(*args, **kwargs): pass
def MnSolver_swigregister(*args, **kwargs): pass
def MFnDragField_setDirection(*args, **kwargs): pass
def uIntPtr_value(*args, **kwargs): pass
def MFnAirField_setInheritVelocity(*args, **kwargs): pass
def MFnFluid_voxelCenterPosition(*args, **kwargs): pass
def MFnParticleSystem_velocity(*args, **kwargs): pass
def new_MFnUniformField(*args, **kwargs): pass
def MnCloth_setInputMeshAttractAndRigidStrength(*args, **kwargs): pass
def MnParticle_setDragAndLift(*args, **kwargs): pass
def intPtr_cast(*args, **kwargs): pass
def MRenderLine_getParameter(*args, **kwargs): pass
def MDynamicsUtil_removeNodeTypeFromRunup(*args, **kwargs): pass
def MFnDragField_useDirection(*args, **kwargs): pass
def delete_MFnInstancer(*args, **kwargs): pass
def MFnParticleSystem_rgb(*args, **kwargs): pass
def MFnVolumeAxisField_turbulenceSpeed(*args, **kwargs): pass
def MnCloth_setDragAndLift(*args, **kwargs): pass
def MnRigid_setPositions(*args, **kwargs): pass
def MFnVortexField_className(*args, **kwargs): pass
def MFnInstancer_type(*args, **kwargs): pass
def intPtr_frompointer(*args, **kwargs): pass
def MDynSweptTriangle_normal(*args, **kwargs): pass
def MFnFluid_className(*args, **kwargs): pass
def delete_MFnNObjectData(*args, **kwargs): pass
def MFnParticleSystem_isPerParticleIntAttribute(*args, **kwargs): pass
def MFnVolumeAxisField_swigregister(*args, **kwargs): pass
def MnCloth_setRestitutionAngle(*args, **kwargs): pass
def delete_MnSolver(*args, **kwargs): pass
def new_doublePtr(*args, **kwargs): pass
def MFnField_setMaxDistance(*args, **kwargs): pass
def MFnFluid_getColors(*args, **kwargs): pass
def MFnNewtonField_minDistance(*args, **kwargs): pass
def MFnRadialField_className(*args, **kwargs): pass
def MnParticle_setIncompressibility(*args, **kwargs): pass
def MnParticle_setPositions(*args, **kwargs): pass
def MnSolver_solve(*args, **kwargs): pass
MFnFluid_kNegXGradient = 4
MFnFluid_kGrid = 1
MFnParticleSystem_kStreak = 9
MFnParticleSystem_kSprites = 8
MFnFluid_kCenterGradient = 7
MFnFluid_kNegZGradient = 6
MFnFluid_kNegYGradient = 5
MFnFluid_kGradient = 3
MFnFluid_kDynamicColorGrid = 2
MFnFluid_kConstant = 0
MFnFluid_kFixed = MFnFluid_kConstant
MFnFluid_kNoFalloffGrid = MFnFluid_kConstant
MFnFluid_kUseShadingColor = MFnFluid_kConstant
MFnFluid_kZero = MFnFluid_kConstant
MFnParticleSystem_kCloud = MFnFluid_kConstant
|
# Copyright 2016 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Identity converter. Useful for testing and diagnostic."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import gast
from tensorflow.contrib.py2tf.pyct import anno
from tensorflow.contrib.py2tf.pyct import templates
class SymbolNamer(object):
"""Describes the interface for ControlFlowTransformer's namer."""
def new_symbol(self, name_root, reserved_locals):
"""Generate a new unique symbol.
Args:
name_root: String, used as stem in the new name.
reserved_locals: Set(string), additional local symbols that are reserved
and which should not be used.
Returns:
String.
"""
raise NotImplementedError()
class ControlFlowTransformer(gast.NodeTransformer):
"""Transforms control flow structures like loops an conditionals."""
def __init__(self, namer):
self.namer = namer
# pylint:disable=invalid-name
def _tuple_or_item(self, elts):
elts = tuple(elts)
if len(elts) == 1:
return elts[0]
return elts
def _ast_tuple_or_item(self, elts, ctx):
elts = list(elts)
if len(elts) == 1:
return elts[0]
return gast.Tuple(elts, ctx)
def visit_If(self, node):
raise NotImplementedError()
def visit_While(self, node):
self.generic_visit(node)
# Scrape out the data flow analysis
body_scope = anno.getanno(node, 'body_scope')
parent_scope_values = anno.getanno(node, 'parent_scope_values')
body_closure = tuple(body_scope.modified - body_scope.created)
def template(
state_args, # pylint:disable=unused-argument
state_locals,
state_results, # pylint:disable=unused-argument
test_name,
test, # pylint:disable=unused-argument
body_name,
body,
state_init):
def test_name(state_args): # pylint:disable=function-redefined,unused-argument
return test
def body_name(state_args): # pylint:disable=function-redefined,unused-argument
body # pylint:disable=pointless-statement
return state_locals
state_results = tf.while_loop(test_name, body_name, [state_init]) # pylint:disable=undefined-variable
test_name = self.namer.new_symbol('loop_test', body_scope.used)
body_name = self.namer.new_symbol('loop_body', body_scope.used)
node = templates.replace(
template,
state_args=self._tuple_or_item(
gast.Name(n, gast.Param(), None) for n in body_closure),
state_locals=self._ast_tuple_or_item(
(gast.Name(n, gast.Load(), None) for n in body_closure),
gast.Load()),
state_results=self._ast_tuple_or_item(
(gast.Name(n, gast.Store(), None) for n in body_closure),
gast.Store()),
test_name=gast.Name(test_name, gast.Load(), None),
test=node.test,
body_name=gast.Name(body_name, gast.Load(), None),
body=node.body,
state_init=[parent_scope_values.getval(n) for n in body_closure])
return node
# pylint:enable=invalid-name
def transform(node, namer):
transformer = ControlFlowTransformer(namer)
node = transformer.visit(node)
return node
|
# -*- coding: utf-8 -*-
"""
Created on Tue Jun 2 07:20:02 2020
@author: Aniket Maity
"""
N = 9999
arr = []
T = input()
N = int(T)
while(N !=0):
newStr = ''
newStr = '%02d' %(int(T[0])*int(T[1])) + '%02d' %(int(T[1])*int(T[2]))
sumLastPart ='%02d' %((int(T[0])*int(T[1])) + (int(T[1])*int(T[2])))
sumLastPart = sumLastPart[::-1]
newStr +=sumLastPart
arr.append(newStr)
T = input()
N = int(T)
for item in arr:
print(item) |
'''
The MIT License (MIT)
Copyright (c) 2017 Thunderclouding.com - exhesham
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in
all copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
THE SOFTWARE.
'''
from OpenSSL.crypto import X509
import os, time, base64, sys
from M2Crypto import X509, EVP, RSA, ASN1, m2
import argparse
# Initialize command line
parser = argparse.ArgumentParser(description='my flags')
parser.add_argument('--root-c',action='store',dest='root_issuer_c', help='The root issuer country',default=['IL'], nargs=1, required=False)
parser.add_argument('--root-cn',action='store',dest='root_issuer_cn', help='The root issuer common name',default=['Hesham Authoritah'], nargs=1, required=False)
parser.add_argument('--inter-c',action='store',dest='intermediate_issuer_c', help='The intermediate country name',default=['IL'], nargs=1, required=False)
parser.add_argument('--inter-cn',action='store',dest='intermediate_issuer_cn', help='The intermediate issuer common name',default=['Hisham Intermediate Authoritah'], nargs=1, required=False)
parser.add_argument('--cn',action='store',dest='cert_cn', help='Common name of the certificate',default=['localhost'], nargs=1, required=False)
parser.add_argument('--c',action='store',dest='cert_c', help='Country of the certificate',default=['IL'], nargs=1, required=False)
parser.add_argument('--root-key-file',action='store',dest='root_key_file', help='The output root certificate file. if file exists, it will be overwritten',default=['root.key'], nargs=1, required=False)
parser.add_argument('--root-crt-file',action='store',dest='root_crt_file', help='The output root key certificate file. if file exists, it will be overwritten',default=['root.crt'], nargs=1, required=False)
parser.add_argument('--inter-key-file',action='store',dest='intermediate_key_file', help='The output intermediate certificate file. if file exists, it will be overwritten',default=['inter.key'], nargs=1, required=False)
parser.add_argument('--inter-crt-file',action='store',dest='intermediate_crt_file', help='The output intermediate key certificate file. if file exists, it will be overwritten',default=['inter.crt'], nargs=1, required=False)
parser.add_argument('--key-file',action='store',dest='key_file', help='The output certificate file. if file exists, it will be overwritten',default=['cert.key'], nargs=1, required=False)
parser.add_argument('--crt-file',action='store',dest='crt_file', help='The output key certificate file. if file exists, it will be overwritten',default=['cert.crt'], nargs=1, required=False)
parser.add_argument('--version', action='version', version=' 1.0')
parser.add_argument('--clean',dest='clean', help='Clean output files', required=False, action="store_true")
parser.add_argument('--cert',dest='create_cert', help='Create certificate', required=False, action="store_true")
parser.add_argument('--ca', dest='create_root', help='Create root certificate - if intermediate is not available, it will be created', required=False, action="store_true")
args = parser.parse_args()
# Global params
root_issuer_c = args.root_issuer_c[0]
root_issuer_cn = args.root_issuer_cn[0]
intermediate_issuer_c = args.intermediate_issuer_c[0]
intermediate_issuer_cn = args.intermediate_issuer_cn[0]
cert_cn = args.cert_cn[0]
cert_c = args.cert_c[0]
cert_key_file = args.key_file[0]
cert_file = args.crt_file[0]
root_key_file = args.root_key_file [0]
root_crt_file = args.root_crt_file[0]
intermediate_key_file = args.intermediate_key_file[0]
intermediate_crt_file = args.intermediate_crt_file[0]
def callback(*args):
pass
def mkreq(bits, ca=0, cn=cert_cn, c=cert_c):
pk = EVP.PKey()
x = X509.Request()
rsa = RSA.gen_key(bits, 65537, callback)
pk.assign_rsa(rsa)
x.set_pubkey(pk)
name = x.get_subject()
name.C = c
name.CN = cn
if not ca:
ext1 = X509.new_extension('subjectAltName', 'DNS:' + cn)
ext2 = X509.new_extension('nsComment', 'Hello there')
extstack = X509.X509_Extension_Stack()
extstack.push(ext1)
extstack.push(ext2)
x.add_extensions(extstack)
x.sign(pk, 'sha256')
assert x.verify(pk)
pk2 = x.get_pubkey()
assert x.verify(pk2)
return x, pk
def generate_and_sign_cert(req, pk, sign_key, issuer_cn, issuer_c):
pkey = req.get_pubkey()
sub = req.get_subject()
cert = X509.X509()
cert.set_serial_number(1)
cert.set_version(2)
cert.set_subject(sub)
t = long(time.time()) + time.timezone
now = ASN1.ASN1_UTCTIME()
now.set_time(t)
nowPlusYear = ASN1.ASN1_UTCTIME()
nowPlusYear.set_time(t + 60 * 60 * 24 * 365)
cert.set_not_before(now)
cert.set_not_after(nowPlusYear)
issuer = X509.X509_Name()
issuer.C = issuer_c
issuer.CN = issuer_cn
cert.set_issuer(issuer)
cert.set_pubkey(pkey)
ext = X509.new_extension('basicConstraints', 'CA:TRUE')
cert.add_ext(ext)
cert.sign(sign_key, 'sha256')
return cert, pk, pkey
def create_root_cert():
if os.path.exists(root_crt_file):
os.remove(root_crt_file)
if os.path.exists(root_key_file):
os.remove(root_key_file)
req, pk = mkreq(4096, ca=1, cn=root_issuer_cn, c=root_issuer_c)
cert, pk, pkey = generate_and_sign_cert(req, pk, sign_key=pk, issuer_cn=root_issuer_cn, issuer_c=root_issuer_c)
if m2.OPENSSL_VERSION_NUMBER >= 0x0090800fL:
assert cert.check_ca()
assert cert.check_purpose(m2.X509_PURPOSE_SSL_SERVER, 1)
assert cert.check_purpose(m2.X509_PURPOSE_NS_SSL_SERVER, 1)
assert cert.check_purpose(m2.X509_PURPOSE_ANY, 1)
assert cert.check_purpose(m2.X509_PURPOSE_SSL_SERVER, 0)
assert cert.check_purpose(m2.X509_PURPOSE_NS_SSL_SERVER, 0)
assert cert.check_purpose(m2.X509_PURPOSE_ANY, 0)
else:
return None, None, None
# TODO:self.assertRaises(AttributeError, cert.check_ca)
cert.save(root_crt_file)
pk.save_key(root_key_file, cipher=None)
return cert, pk, pkey
def create_intermediate_cert(root_pkey=None):
if not os.path.exists(root_key_file):
return 1, 'create root certificate'
if not root_pkey:
root_pkey = EVP.load_key(root_key_file)
# Clean intermediate cert
if os.path.exists(intermediate_crt_file):
os.remove(intermediate_crt_file)
if os.path.exists(intermediate_key_file):
os.remove(intermediate_key_file)
req, pk = mkreq(2048, ca=1, cn=intermediate_issuer_cn, c=intermediate_issuer_c)
cert, pk, pkey = generate_and_sign_cert(req, pk, sign_key=root_pkey, issuer_cn=root_issuer_cn,
issuer_c=root_issuer_c)
pk.save_key(intermediate_key_file, cipher=None)
cert.save(intermediate_crt_file)
return cert, pk, pkey
def create_chain():
'''Create certificate CA chain made of root and intermediate chain'''
create_root_cert()
create_intermediate_cert()
def clean_files():
remove_file(root_crt_file)
remove_file(root_key_file)
remove_file(intermediate_crt_file)
remove_file(intermediate_key_file)
remove_file(cert_file)
remove_file(cert_key_file)
def sign_cert(cn = cert_cn, c = cert_c):
if not os.path.exists(root_crt_file) \
or not os.path.exists(root_key_file) \
or not os.path.exists(intermediate_crt_file) \
or not os.path.exists(intermediate_key_file):
create_chain()
inter_pkey = EVP.load_key(intermediate_key_file)
req, pk = mkreq(2048, ca=1, cn=cn, c=c)
cert, pk, pkey = generate_and_sign_cert(req, pk, sign_key=inter_pkey, issuer_cn=intermediate_issuer_cn,
issuer_c=intermediate_issuer_c)
return cert, pk, pkey
def save_to_text_file(text, filename):
remove_file(filename)
with open(filename, "w") as text_file:
text_file.write(text)
def remove_file(filename):
if os.path.exists(filename):
os.remove(filename)
def create_ca_signed_certificiate():
signed_cert, key, pkey = sign_cert()
save_to_text_file(signed_cert.as_pem(), cert_file)
save_to_text_file(str(key.as_pem(cipher=None)), cert_key_file)
if __name__ == '__main__':
if args.clean:
print "Will clean files"
clean_files()
if args.create_root:
print "Will create root certificate"
create_root_cert()
if args.create_cert:
print "Will create a certificate"
create_ca_signed_certificiate()
|
import sys
import math
import numpy
from PyQt5 import QtWidgets, QtGui
from PyQt5.QtWidgets import QMessageBox
from behinesazan.gas.station.software.view.GasInformationInputForm.base import BaseGasInformationInputForm
from behinesazan.gas.station.software.model.gas.Gas import Gas
# from behinesazan.gas.station.software.model.gas.Gas import Gas
# from AgaQt import Gas
class GasInformationInputForm(QtWidgets.QWidget, BaseGasInformationInputForm.Ui_Form):
data = {}
g = Gas()
def __init__(self, parent=None):
super(GasInformationInputForm, self).__init__(parent)
self.setupUi(self)
self.label_37.setVisible(False)
self.label_38.setVisible(False)
self.label_39.setVisible(False)
self.label_40.setVisible(False)
self.label_23.setVisible(False)
self.address_input
# TODO create clear button
self.pushButton_2.clicked.connect(self.cancel)
self.pushButton.clicked.connect(self.datagather)
self.clear_button.clicked.connect(self.data.clear)
# def clear_btn_clicked(self):
# self.data.clear()
# self.address_input.clear()
# self.province_input.clear()
# self.city_input.clear()
# self.area_input.clear()
# self.station_nominal_capacity.clear()
# self.lineEdit_25.clear()
# self.lineEdit_24.clear()
# self.lineEdit_26.clear()
# self.lineEdit_27.clear()
# self.lineEdit_28.clear()
# self.lineEdit_30.clear()
# self.lineEdit_31.clear()
#
# self.lineEdit.clear()
# self.lineEdit_2.clear()
# self.lineEdit_3.clear()
# self.lineEdit_4.clear()
# self.lineEdit_5.clear()
# self.lineEdit_6.clear()
# self.lineEdit_7.clear()
# self.lineEdit_8.clear()
# self.lineEdit_9.clear()
# self.lineEdit_10.clear()
# self.lineEdit_11.clear()
# self.lineEdit_12.clear()
# self.lineEdit_13.clear()
# self.lineEdit_14.clear()
# self.lineEdit_15.clear()
# self.lineEdit_16.clear()
# self.lineEdit_17.clear()
# self.lineEdit_18.clear()
# self.lineEdit_19.clear()
# self.lineEdit_20.clear()
# self.lineEdit_21.clear()
def datagather(self):
self.toutCheck = False
self.windCheck = False
self.humidityCheck = False
self.stationCapacityCheck = False
self.tStandard = 273.15
self.data["T_Standard"] = 273.15 + 15
self.pStandard = 101.325
self.data["P_Standard"] = 101.325
self.data["address"] = self.address_input.toPlainText()
self.data["province"] = self.province_input.text()
self.data["city"] = self.city_input.text()
self.data["nominal_capacity"] = self.station_nominal_capacity.text()
self.data['area'] = self.area_input.text()
try:
# T in gas TEmperature
if self.lineEdit_25.text() != "":
try:
if float(self.lineEdit_25.text()) < -273.15:
QMessageBox.about(self, "خطا در اطلاعات ورودی",
"دمای گاز ورودی به درستی وارد نشده است. لطفاً اطلاعات صحیح وارد نمایید.")
self.label_37.setVisible(True)
self.label_23.setVisible(True)
return
else:
self.Tin = float(self.lineEdit_25.text()) + 273.15
self.data["T_input"] = self.Tin
self.label_37.setVisible(False)
self.label_23.setVisible(False)
except:
QMessageBox.about(self, "خطا در اطلاعات ورودی",
"دمای گاز ورودی به درستی وارد نشده است. لطفاً اطلاعات صحیح وارد نمایید.")
self.label_37.setVisible(True)
self.label_23.setVisible(True)
return
else:
self.label_37.setVisible(True)
self.label_23.setVisible(True)
# P in gas pressure
if self.lineEdit_24.text() != "":
try:
if float(self.lineEdit_24.text()) * 6.89476 + self.pStandard <= 0:
QMessageBox.about(self, "خطا در اطلاعات ورودی",
"فشار گاز ورودی به درستی وارد نشده است. لطفاً اطلاعات صحیح وارد نمایید.")
self.label_38.setVisible(True)
self.label_23.setVisible(True)
return
else:
self.Pin = float(self.lineEdit_24.text()) * 6.89476
self.data["P_input"] = self.Pin
self.label_38.setVisible(False)
self.label_23.setVisible(False)
except:
QMessageBox.about(self, "خطا در اطلاعات ورودی",
"فشار گاز ورودی به درستی وارد نشده است. لطفاً اطلاعات صحیح وارد نمایید.")
self.label_38.setVisible(True)
self.label_23.setVisible(True)
return
else:
self.label_38.setVisible(True)
self.label_23.setVisible(True)
# T out gas TEmperature
if self.lineEdit_26.text() != "":
try:
if float(self.lineEdit_26.text()) < -273.15:
QMessageBox.about(self, "خطا در اطلاعات ورودی",
"دمای گاز خروجی به درستی وارد نشده است. لطفاً اطلاعات صحیح وارد نمایید.")
self.label_39.setVisible(True)
self.label_23.setVisible(True)
return
else:
self.toutStation = float(self.lineEdit_26.text()) + 273.15
self.data["T_station_out"] = self.toutStation
self.label_39.setVisible(False)
self.label_23.setVisible(False)
except:
QMessageBox.about(self, "خطا در اطلاعات ورودی",
"دمای گاز خروجی به درستی وارد نشده است. لطفاً اطلاعات صحیح وارد نمایید.")
self.label_39.setVisible(True)
self.label_23.setVisible(True)
return
else:
self.label_39.setVisible(True)
self.label_23.setVisible(True)
# P out gas pressure
if self.lineEdit_27.text() != "":
try:
if float(self.lineEdit_27.text()) * 6.89476 + self.pStandard <= 0:
QMessageBox.about(self, "خطا در اطلاعات ورودی",
"فشار گاز خروجی به درستی وارد نشده است. لطفاً اطلاعات صحیح وارد نمایید.")
self.label_40.setVisible(True)
self.label_23.setVisible(True)
return
else:
self.poutStation = float(self.lineEdit_27.text()) * 6.89476
self.data["P_station_out"] = self.poutStation
self.label_40.setVisible(False)
self.label_23.setVisible(False)
except:
QMessageBox.about(self, "خطا در اطلاعات ورودی",
"فشار گاز خروجی به درستی وارد نشده است. لطفاً اطلاعات صحیح وارد نمایید.")
self.label_40.setVisible(True)
self.label_23.setVisible(True)
return
else:
self.label_40.setVisible(True)
self.label_23.setVisible(True)
# جک کردن دمای محیط
if self.lineEdit_28.text() != "":
try:
if float(self.lineEdit_28.text()) < - 273.15:
QMessageBox.about(self, "خطا در اطلاعات ورودی",
"اطلاعات دمای محیط به درستی وارد نشده است. لطفاً اطلاعات صحیح وارد نمایید.")
return
else:
self.outTemperature = float(self.lineEdit_28.text()) + 273.15
self.data["T_environment"] = self.outTemperature
self.toutCheck = True
except:
QMessageBox.about(self, "خطا در اطلاعات ورودی",
"اطلاعات دمای محیط به درستی وارد نشده است. لطفاً اطلاعات صحیح وارد نمایید.")
return
# Humidity....
# if self.lineEdit_29.text() != "": try: if float(self.lineEdit_29.text()) < 0 or float(
# self.lineEdit_29.text()) > 100: QMessageBox.about(self, "خطا در اطلاعات ورودی", "رطوبت نسبی نمی تواند
# کمتر از صفر یا بزرگتر از 100 درصد باشد. لطفاً اطلاعات صحیح وارد نمایید.")
#
# else:
# self.humidity = float(self.lineEdit_29.text())
# self.humidityCheck = True
# except:
# QMessageBox.about(self, "خطا در اطلاعات ورودی",
# "اطلاعات رطوبت نسبی به درستی وارد نشده است. لطفاً اطلاعات صحیح وارد نمایید.")
# Wind Velocity...
if self.lineEdit_30.text() != "":
try:
if float(self.lineEdit_30.text()) < 0:
QMessageBox.about(self, "خطا در اطلاعات ورودی",
"سرعت باد نمی تواند کوچکتر از صفر باشد. لطفاً اطلاعات صحیح وارد نمایید.")
return
else:
self.windVelocity = float(self.lineEdit_30.text())
if self.windVelocity < 0.5:
self.windVelocity = 0.5
self.data["Wind_velocity"] = self.windVelocity
except:
QMessageBox.about(self, "خطا در اطلاعات ورودی",
"اطلاعاتی صحبحی برای سرعت باد وارد نشده است. لطفاً اطلاعات صحیح وارد فرمایید")
return
else:
self.data["Wind_velocity"] = 0.5 # set wind velocity to 0.5 if nothing is entered
# STation Capacity
if self.lineEdit_31.text() != "":
try:
if float(self.lineEdit_31.text()) <= 0:
QMessageBox.about(self, "خطا در اطلاعات ورودی",
"ظرفیت ایستگاه باید از صفر بزرگتر باشد. لطفاً اطلاعات صحیح وارد نمایید.")
return
else:
self.stationCapacity = float(self.lineEdit_31.text())
self.data["Station_Capacity"] = self.stationCapacity
self.stationCapacityCheck = True
except:
QMessageBox.about(self, "خطا در اطلاعات ورودی",
"ظرفیت ایستگاه به درستی وارد نشده است. لطفاً اطلاعات صحیح وارد نمایید.")
return
else:
self.stationCapacity = 0
self.data["Station_Capacity"] = self.stationCapacity
pass
component = []
component.append(self.input_var_check(self.lineEdit.text()))
component.append(self.input_var_check(self.lineEdit_2.text()))
component.append(self.input_var_check(self.lineEdit_3.text()))
component.append(self.input_var_check(self.lineEdit_4.text()))
component.append(self.input_var_check(self.lineEdit_5.text()))
component.append(self.input_var_check(self.lineEdit_6.text()))
component.append(self.input_var_check(self.lineEdit_7.text()))
component.append(self.input_var_check(self.lineEdit_8.text()))
component.append(self.input_var_check(self.lineEdit_9.text()))
component.append(self.input_var_check(self.lineEdit_10.text()))
component.append(self.input_var_check(self.lineEdit_11.text()))
component.append(self.input_var_check(self.lineEdit_12.text()))
component.append(self.input_var_check(self.lineEdit_13.text()))
component.append(self.input_var_check(self.lineEdit_14.text()))
component.append(self.input_var_check(self.lineEdit_15.text()))
component.append(self.input_var_check(self.lineEdit_16.text()))
component.append(self.input_var_check(self.lineEdit_17.text()))
component.append(self.input_var_check(self.lineEdit_18.text()))
component.append(self.input_var_check(self.lineEdit_19.text()))
component.append(self.input_var_check(self.lineEdit_20.text()))
component.append(self.input_var_check(self.lineEdit_21.text()))
if self.comboBox.currentText() == "درصد جرمی":
component = numpy.divide(numpy.divide(component, self.g.M_i), numpy.dot(component, self.g.M_i))
pass
self.g.component = component
# self.g.component[0] = self.input_var_check(self.lineEdit.text())
# self.g.component[1] = self.input_var_check(self.lineEdit_2.text())
# self.g.component[2] = self.input_var_check(self.lineEdit_3.text())
# self.g.component[3] = self.input_var_check(self.lineEdit_4.text())
# self.g.component[4] = self.input_var_check(self.lineEdit_5.text())
# self.g.component[5] = self.input_var_check(self.lineEdit_6.text())
# self.g.component[6] = self.input_var_check(self.lineEdit_7.text())
# self.g.component[7] = self.input_var_check(self.lineEdit_8.text())
# self.g.component[8] = self.input_var_check(self.lineEdit_9.text())
# self.g.component[9] = self.input_var_check(self.lineEdit_10.text())
# self.g.component[10] = self.input_var_check(self.lineEdit_11.text())
# self.g.component[11] = self.input_var_check(self.lineEdit_12.text())
# self.g.component[12] = self.input_var_check(self.lineEdit_13.text())
# self.g.component[13] = self.input_var_check(self.lineEdit_14.text())
# self.g.component[14] = self.input_var_check(self.lineEdit_15.text())
# self.g.component[15] = self.input_var_check(self.lineEdit_16.text())
# self.g.component[16] = self.input_var_check(self.lineEdit_17.text())
# self.g.component[17] = self.input_var_check(self.lineEdit_18.text())
# self.g.component[18] = self.input_var_check(self.lineEdit_19.text())
# self.g.component[19] = self.input_var_check(self.lineEdit_20.text())
# self.g.component[20] = self.input_var_check(self.lineEdit_21.text())
self.g.component = numpy.divide(self.g.component, math.fsum(self.g.component))
for comp in self.g.component:
if comp < 0:
QMessageBox.about(self, "خطا در اطلاعات ورودی", "لطفاً اطلاعات صحیح وارد فرمایید")
return
self.data["gas"] = self.g
if self.label_23.isVisible():
return
else:
self.close()
self.g.p_theta = self.pStandard
self.g.T_theta = self.tStandard
# print(self.toutStation, self.poutStation)
# ui.pushButton_15.iconOut = ":/icon/GasdefIcon2.svg"
# icon = QtGui.QIcon()
# icon.addPixmap(QtGui.QPixmap(ui.pushButton_15.iconOut), QtGui.QIcon.Normal, QtGui.QIcon.Off)
# ui.pushButton_15.setIcon(icon)
self.gasCheck = True
# if 'Wind_velocity' in self.data.keys():
# print(self.data['Wind_velocity'])
# else:
# print("it has not key")
# try:
# print(self.data["Wind_velocity"])
#
# except KeyError:
# print(sys.exc_info()[0])
# print(sys.exc_info()[1])
# except Exception:
# print("salam")
print(self.data)
except:
print(sys.exc_info()[0])
print(sys.exc_info()[1])
self.label_37.setVisible(True)
self.label_38.setVisible(True)
self.label_39.setVisible(True)
self.label_40.setVisible(True)
self.label_23.setVisible(True)
self.gasCheck = False
QMessageBox.about(self, "خطا در اطلاعات ورودی", "لطفاً اطلاعات صحیح وارد فرمایید")
return
def input_var_check(self, text):
if text == "":
return 0
return float(text)
def cancel(self):
self.label_37.setVisible(False)
self.label_38.setVisible(False)
self.label_39.setVisible(False)
self.label_40.setVisible(False)
self.label_23.setVisible(False)
self.close()
# self.gasProperty = None
pass
if __name__ == "__main__":
app = QtWidgets.QApplication(sys.argv)
Form = QtWidgets.QWidget()
ui = GasInformationInputForm()
ui.show()
sys.exit(app.exec_())
|
import random
from allauth.account.models import EmailAddress
from allauth.account.utils import sync_user_email_addresses
from django.contrib.auth import get_user_model
from django.contrib.auth.models import Group
from django.core.management.base import BaseCommand, CommandError
from faker import Factory
from xSACdb.roles.groups import *
from xsd_members.models import MemberProfile
from xsd_sites.models import Site
from xsd_training.models import *
from xsd_trips.models import Trip
from xsd_trips.models.trip_member import TripMember
from xsd_trips.models.trip_state import STATE_COMPLETED, STATE_CANCELLED
class Command(BaseCommand):
help = 'Generates fake data for testing, demo site and development'
fake = None
FLUFFY_USER_COUNT = 150
TG_COUNT = 15
TG_MAX_SIZE = 25
SS_COUNT = 50
PL_COUNT = 500
PSDC_COUNT = 50
TRIP_COUNT = 100
def setUp(self):
self.OD = Qualification.objects.get(code="OD")
self.SD = Qualification.objects.get(code="SD")
self.DL = Qualification.objects.get(code="DL")
self.AD = Qualification.objects.get(code="AD")
self.FC = Qualification.objects.get(code="FC")
self.PERSONAL_QUALS = [self.OD, self.SD, self.DL, self.AD, self.FC]
self.ADI = Qualification.objects.get(code="ADI")
self.PI = Qualification.objects.get(code="PI")
self.TI = Qualification.objects.get(code="TI")
self.AOWI = Qualification.objects.get(code="AOWI")
self.OWI = Qualification.objects.get(code="OWI")
self.AI = Qualification.objects.get(code="AI")
self.NI = Qualification.objects.get(code="NI")
self.INSTRUCTOR_QUALS = [self.ADI, self.PI, self.TI, self.AOWI, self.OWI,
self.AI, self.NI]
self.OO1 = Lesson.objects.get(code="OO1", qualification__active=True)
self.OO2 = Lesson.objects.get(code="OO2", qualification__active=True)
self.SO1 = Lesson.objects.get(code="SO1")
self.BOAT_HANDLING = SDC.objects.get(title="Boat Handling")
self.WRECK_APPRECIATION = SDC.objects.get(title="Wreck Appreciation")
def handle(self, *args, **options):
from django.conf import settings
if not (settings.DEBUG or settings.STAGING):
raise CommandError('You cannot run this command in production')
self.setUp()
self.fake = Factory.create(settings.FAKER_LOCALE)
# Manually force seed, otherwise it's done by time, which could lead to inconsistent tests
self.fake.seed(settings.RANDOM_SEED)
random.seed(settings.RANDOM_SEED)
with transaction.atomic():
self.stdout.write('Generating fake data...')
# Members
self.generateUsefulUsers()
self.generateFluffyUsers()
# Training
self.generateTrainingGroups()
self.generateSessions()
self.generatePerformedLessons()
# Trips
self.generateTrips()
self.fillTrips()
self.stdout.write('Done')
def status_write(self, message):
self.stdout.write(' {}'.format(message))
def verifyEmail(self, user):
sync_user_email_addresses(user)
ea = EmailAddress.objects.get_for_user(user, user.email)
ea.verified = True
ea.save()
def generateUsefulUsers(self):
U = get_user_model()
groupAdmin = Group.objects.get(pk=GROUP_ADMIN)
groupTraining = Group.objects.get(pk=GROUP_TRAINING)
groupTrips = Group.objects.get(pk=GROUP_TRIPS)
groupSites = Group.objects.get(pk=GROUP_SITES)
groupMembers = Group.objects.get(pk=GROUP_MEMBERS)
groupDO = Group.objects.get(pk=GROUP_DO)
superUser = U.objects.create_superuser(
username="su",
email="superuser@xsacdb.wjdp.uk",
password="su",
first_name="SUPER",
last_name="USER",
)
superUser.save()
superUser.groups.add(groupAdmin)
superUser.save()
superUser.memberprofile.fake(self.fake)
superUser.memberprofile.training_for = self.OD
superUser.memberprofile.save()
self.verifyEmail(superUser)
divingOfficer = U.objects.create_user(
email="do@xsacdb.wjdp.uk",
password="do",
first_name=self.fake.first_name(),
last_name="Divingofficer",
)
divingOfficer.username = "do"
divingOfficer.save()
divingOfficer.groups.add(groupDO)
divingOfficer.save()
divingOfficer.memberprofile.approve(superUser)
divingOfficer.memberprofile.fake(self.fake)
divingOfficer.memberprofile.set_qualification(self.AD)
divingOfficer.memberprofile.set_qualification(self.OWI)
divingOfficer.memberprofile.save()
self.verifyEmail(divingOfficer)
trainingOfficer = U.objects.create_user(
email="to@xsacdb.wjdp.uk",
password="to",
first_name=self.fake.first_name(),
last_name="Trainingofficer",
)
trainingOfficer.username = "to"
trainingOfficer.save()
trainingOfficer.groups.add(groupTraining)
trainingOfficer.save()
trainingOfficer.memberprofile.approve(superUser)
trainingOfficer.memberprofile.fake(self.fake)
trainingOfficer.memberprofile.set_qualification(self.DL)
trainingOfficer.memberprofile.set_qualification(self.OWI)
trainingOfficer.memberprofile.save()
self.verifyEmail(trainingOfficer)
membersOfficer = U.objects.create_user(
email="mo@xsacdb.wjdp.uk",
password="mo",
first_name=self.fake.first_name(),
last_name="Membersofficer",
)
membersOfficer.username = "mo"
membersOfficer.save()
membersOfficer.groups.add(groupMembers)
membersOfficer.save()
membersOfficer.memberprofile.approve(superUser)
membersOfficer.memberprofile.fake(self.fake)
membersOfficer.memberprofile.set_qualification(self.SD)
membersOfficer.memberprofile.set_qualification(self.TI)
membersOfficer.memberprofile.save()
self.verifyEmail(membersOfficer)
od1 = U.objects.create_user(
email="od1@xsacdb.wjdp.uk",
password="od1",
first_name=self.fake.first_name(),
last_name="Oceandiver",
)
od1.username = "od1"
od1.save()
od1.memberprofile.approve(membersOfficer)
od1.memberprofile.fake(self.fake)
od1.memberprofile.set_qualification(self.OD)
od1.memberprofile.save()
self.verifyEmail(od1)
od2 = U.objects.create_user(
email="od2@xsacdb.wjdp.uk",
password="od2",
first_name=self.fake.first_name(),
last_name="Oceandiver",
)
od2.username = "od2"
od2.save()
od2.memberprofile.approve(membersOfficer)
od2.memberprofile.fake(self.fake)
od2.memberprofile.set_qualification(self.OD)
od2.memberprofile.save()
self.verifyEmail(od2)
sd1 = U.objects.create_user(
email="sd1@xsacdb.wjdp.uk",
password="sd1",
first_name=self.fake.first_name(),
last_name="Sportsdiver",
)
sd1.username = "sd1"
sd1.save()
sd1.memberprofile.approve(membersOfficer)
sd1.memberprofile.fake(self.fake)
sd1.memberprofile.set_qualification(self.SD)
sd1.memberprofile.save()
self.verifyEmail(sd1)
sd2 = U.objects.create_user(
email="sd2@xsacdb.wjdp.uk",
password="sd2",
first_name=self.fake.first_name(),
last_name="Sportsdiver",
)
sd2.username = "sd2"
sd2.save()
sd2.memberprofile.approve(membersOfficer)
sd2.memberprofile.fake(self.fake)
sd2.memberprofile.set_qualification(self.SD)
sd2.memberprofile.save()
self.verifyEmail(sd2)
dl1 = U.objects.create_user(
email="dl1@xsacdb.wjdp.uk",
password="dl1",
first_name=self.fake.first_name(),
last_name="Diveleader",
)
dl1.username = "dl1"
dl1.save()
dl1.memberprofile.approve(membersOfficer)
dl1.memberprofile.fake(self.fake)
dl1.memberprofile.set_qualification(self.DL)
dl1.memberprofile.save()
self.verifyEmail(dl1)
dl2 = U.objects.create_user(
email="dl2@xsacdb.wjdp.uk",
password="dl2",
first_name=self.fake.first_name(),
last_name="Diveleader",
)
dl2.username = "dl2"
dl2.save()
dl2.memberprofile.approve(membersOfficer)
dl2.memberprofile.fake(self.fake)
dl2.memberprofile.set_qualification(self.DL)
dl2.memberprofile.save()
self.verifyEmail(dl2)
owi1 = U.objects.create_user(
email="owi@xsacdb.wjdp.uk",
password="owi1",
first_name=self.fake.first_name(),
last_name="Openwaterinstructor",
)
owi1.username = "owi1"
owi1.save()
owi1.memberprofile.approve(membersOfficer)
owi1.memberprofile.fake(self.fake)
owi1.memberprofile.set_qualification(self.DL)
owi1.memberprofile.set_qualification(self.OWI)
owi1.memberprofile.save()
self.verifyEmail(owi1)
self.usefulUsers = {
'su': superUser,
'do': divingOfficer,
'to': trainingOfficer,
'mo': membersOfficer,
'od1': od1,
'od2': od2,
'sd1': sd1,
'sd2': sd2,
'dl1': dl1,
'dl2': dl2,
'owi1': owi1,
}
self.usefulUsersArray = [
superUser,
divingOfficer,
trainingOfficer,
membersOfficer,
od1, od2,
sd1, sd2,
dl1, dl2,
owi1,
]
self.memberActionUsers = [
superUser,
divingOfficer,
membersOfficer
]
self.status_write('Generated useful users')
def _awardQualification(self, mp, qual):
pq = PerformedQualification(trainee=mp, qualification=qual)
pq.mode = random.choice(PerformedQualification.MODE_CHOICES)[0]
if pq.mode == 'XO':
pq.xo_from = '{} {}'.format(
random.choice(['PADI', 'SSI', 'NAUI', 'CMAS', 'FLOPS']),
random.choice(['Dive-maestro', 'Fin-flapper', 'Fish', 'Snorkel-sucker', 'Coral-sniffer']),
)
mp.award_qualification(pq, actor=self.usefulUsers['do'])
def generateFluffyUsers(self):
U = get_user_model()
for i in range(0, self.FLUFFY_USER_COUNT):
u = U.objects.create_user(
email=self.fake.email(),
password="guest",
first_name=self.fake.first_name(),
last_name=self.fake.last_name(),
)
u.save()
if self.fake.boolean(chance_of_getting_true=90):
u.memberprofile.fake(self.fake)
if self.fake.boolean(chance_of_getting_true=80):
self.verifyEmail(u)
else:
sync_user_email_addresses(u)
if self.fake.boolean(chance_of_getting_true=80):
u.memberprofile.approve(random.choice(self.memberActionUsers))
if self.fake.boolean(chance_of_getting_true=90):
for i in range(random.randint(1,4)):
self._awardQualification(u.memberprofile, random.choice(self.PERSONAL_QUALS))
if self.fake.boolean(chance_of_getting_true=10):
for i in range(random.randint(1, 4)):
self._awardQualification(u.memberprofile, random.choice(self.INSTRUCTOR_QUALS))
if self.fake.boolean(chance_of_getting_true=10):
# Archive some
u.memberprofile.archive(random.choice(self.memberActionUsers))
u.memberprofile.save()
self.status_write('Generated {} fluffy users'.format(self.FLUFFY_USER_COUNT))
def generateTrainingGroups(self):
def fill_group(group, qual, count):
# Group will contain no more than count trainees. It may contain less.
ts = []
for i in range(0, count):
t = random.choice(MemberProfile.objects.filter(top_qual_cached=qual))
if t not in ts:
ts.append(t)
group.trainees.add(t)
group.save()
y = datetime.date.today().year
self.tg_od1 = TraineeGroup.objects.create(name="Ocean Diver {}".format(y))
self.tg_od1.trainees.add(self.usefulUsers['su'].profile) # SU should be an ocean diver
fill_group(self.tg_od1, None, random.randint(8, self.TG_MAX_SIZE))
self.tg_od2 = TraineeGroup.objects.create(name="Ocean Diver {}".format(y - 1))
fill_group(self.tg_od2, None, random.randint(8, self.TG_MAX_SIZE))
self.tg_sd1 = TraineeGroup.objects.create(name="Sports Diver {}".format(y))
fill_group(self.tg_sd1, self.OD, random.randint(8, self.TG_MAX_SIZE))
self.tg_sd2 = TraineeGroup.objects.create(name="Sports Diver {}".format(y - 1))
fill_group(self.tg_sd2, self.OD, random.randint(8, self.TG_MAX_SIZE))
self.tg_dl1 = TraineeGroup.objects.create(name="Dive Leader {}".format(y))
fill_group(self.tg_dl1, self.SD, random.randint(8, self.TG_MAX_SIZE))
self.status_write('Generated useful training groups')
for i in range(0, self.TG_COUNT):
g = TraineeGroup.objects.create(name=' '.join(self.fake.words(nb=random.randint(2, 4))))
fill_group(g, random.choice([None, self.OD, self.SD, self.DL, self.AD, self.FC]), random.randint(1, 15))
self.status_write('Generated {} fluffy training groups'.format(self.TG_COUNT))
def generateSessions(self):
instructors = MemberProfile.objects.filter(is_instructor_cached=True)
sites = Site.objects.filter(type='TR')
def session_name(mode, qual):
if mode == 'AS':
return "{} Theory Exam".format(qual.title)
else:
return self.fake.word()
for i in range(0, self.SS_COUNT):
mode = random.choice(['TH', 'SW', 'OW', 'AS'])
g = random.choice([
(self.tg_od1, self.OD),
(self.tg_od2, self.OD),
(self.tg_sd1, self.SD),
(self.tg_sd2, self.SD),
# (self.tg_dl1, self.DL),
])
s = Session.objects.create(
name=session_name(mode, g[1]),
when=self.fake.date_time_between(start_date="-3y", end_date="+120d"),
where=random.choice(sites),
notes='\n\n'.join(self.fake.paragraphs(nb=random.randint(0, 3)))
)
ts = g[0].trainees.all()
for j in range(0, random.randint(1, min(len(ts), self.TG_MAX_SIZE))):
pl = PerformedLesson.objects.create(
session=s,
lesson=random.choice(Lesson.objects.filter(
qualification=g[1], mode=mode,
)),
instructor=random.choice(instructors),
trainee=ts[j],
)
self.status_write('Generated {} sessions'.format(self.SS_COUNT))
def generatePerformedLessons(self):
instructors = MemberProfile.objects.filter(is_instructor_cached=True)
def generateTraineePLs(trainee, qual, level=0.5, previous=False):
# Theory
ths = Lesson.objects.filter(qualification=qual, mode='TH')
def generateTrips(self):
for i in range(0, self.TRIP_COUNT):
trip = Trip()
trip.fake(fake=self.fake, quals=self.PERSONAL_QUALS, past=self.fake.boolean(chance_of_getting_true=80))
if trip.min_qual == self.SD:
trip.owner = random.choice(self.usefulUsersArray[6:]).get_profile()
elif trip.min_qual == self.DL:
trip.owner = random.choice(self.usefulUsersArray[8:]).get_profile()
else:
trip.owner = random.choice(self.usefulUsersArray).get_profile()
trip.save()
self.status_write('Generated {} trips'.format(self.TRIP_COUNT))
def fillTrips(self):
membership = MemberProfile.objects.filter(archived=False)
trips_to_fill = Trip.objects.filter(state__gte=STATE_CANCELLED)
for trip in trips_to_fill:
if self.fake.boolean(chance_of_getting_true=10):
continue
if trip.spaces:
fill_max = random.randint(0, trip.spaces + 5)
else:
fill_max = random.randint(0, 12)
actors = [trip.owner.user] * 10 + [self.usefulUsers['do'], self.usefulUsers['su']]
already_on_trip = []
for i in range(0, fill_max):
member = random.choice(membership)
while member in already_on_trip:
member = random.choice(membership)
already_on_trip.append(member)
if trip.state in (STATE_COMPLETED, STATE_CANCELLED):
# Not allowed to do this via normal methods, do manually
TripMember.objects.create(
trip=trip,
member=member,
state=TripMember.STATE_ACCEPTED,
)
else:
trip.add_members(members=[member], actor=random.choice(actors))
self.status_write('Filled {} trips'.format(trips_to_fill.count()))
|
import pandas as pd
import os.path
xlfile = "sample.xlsx"
if os.path.exists(xlfile):
print("dd") |
from flask import request, render_template, jsonify, url_for, redirect, g
from flask_socketio import SocketIO, emit
from .models import User
from index import app, db
import redis
from sqlalchemy.exc import IntegrityError
import time
from .utils.auth import generate_token, requires_auth, verify_token
r = redis.StrictRedis(host='redis-group.v7ufhi.ng.0001.use1.cache.amazonaws.com', port=6379, db=0)
socketio = SocketIO(app)
def emit_data_from_redis():
data = {}
for key in r.scan_iter():
if r.type(key) == b'hash':
data[key.decode()] = {
'bid': r.hget(key, 'bid').decode(),
'ask': r.hget(key, 'ask').decode(),
'spread': r.hget(key, 'spread').decode() if r.hget(key, 'spread') is not None else None,
'avg_spread': r.hget(key, 'avg_spread').decode(),
}
print('data: ', data)
emit('liveData', data)
# Handle the webapp connecting to the websocket
@socketio.on('connect')
def test_connect():
print('someone connected to websocket')
emit_data_from_redis()
@socketio.on('next')
def next_data(success):
print('next_data')
time.sleep(1)
emit_data_from_redis()
@app.route('/', methods=['GET'])
def index():
return render_template('index.html')
@app.route('/<path:path>', methods=['GET'])
def any_root_path(path):
return render_template('index.html')
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.