text stringlengths 38 1.54M |
|---|
from rest_framework import serializers
from .models import *
class ConvoSerializer(serializers.ModelSerializer):
class Meta:
# defines metadata that our model has and that must be converted to Convo class
model = Convo
# when __all__ is used, all fields have to specified on instantiation
# the only fields that can be left out are many to many fields and the ones that have default
fields = '__all__'
# exclude = ['users', 'otherField']
class UserSerializer(serializers.ModelSerializer):
class Meta:
model = User
fields = '__all__'
class RoomSerializer(serializers.ModelSerializer):
class Meta:
model = Room
fields = '__all__'
class TagSerializer(serializers.ModelSerializer):
convos = ConvoSerializer(many=True, read_only=True)
class Meta:
model = Tag
fields = '__all__'
class PostSerializer(serializers.ModelSerializer):
class Meta:
model = Post
fields = '__all__'
|
#-*- coding: utf-8
import wx
class StreamPropertiesWindow(wx.Dialog):
def __init__(self, parent, stream):
super().__init__(parent=parent, title="Stream properties")
panel = wx.Panel(self)
generalSizer = wx.BoxSizer(wx.VERTICAL)
mainPanel = wx.BoxSizer(wx.VERTICAL)
self.__printStreamInfo(panel, mainPanel, stream)
okButton = wx.Button(panel, label="OK")
self.Bind(wx.EVT_BUTTON, lambda event: self.EndModal(wx.OK), okButton)
mainPanel.Add(okButton, 0, wx.ALIGN_CENTRE_HORIZONTAL)
generalSizer.Add(mainPanel, 1, wx.ALL|wx.EXPAND, 10)
panel.SetSizer(generalSizer)
generalSizer.Fit(self)
self.Centre(wx.BOTH)
def __printStreamInfo(self, parentPanel, sizer, stream):
streamProperties = str(stream).split("\n")[1:]
infoSizer = wx.FlexGridSizer(3, (5, 5))
infoSizer.AddGrowableCol(2, 1)
for streamProperty in streamProperties:
for propertyField in streamProperty.split("\t"):
propertyField = propertyField.strip()
if propertyField != "":
window = wx.StaticText(parentPanel, label=propertyField)
infoSizer.Add(window, 0, wx.ALIGN_LEFT)
sizer.Add(infoSizer, 1, wx.BOTTOM|wx.EXPAND, 10) |
'''A simple calculator module for learning tdd in python.'''
def add(x,y):
'''Add function'''
return x + y
def subtract(x,y):
'''Subtraction function'''
return x - y
def multiply(x,y):
'''Multiply function'''
return x * y
def devide(x,y):
'''Division function'''
if y == 0:
raise ValueError('Can not devide by zero!')
return x / y
|
import sys
sys.path.append("/home/lls/mlhalos_code")
import numpy as np
from mlhalos import machinelearning as ml
# Get training set
saving_path = "/share/data2/lls/regression/test_truth_feature/"
path_features = "/share/data2/lls/features_w_periodicity_fix/"
training_ids = np.load("/share/data1/lls/regression/in_halos_only/log_m_output/even_radii_and_random/training_ids.npy")
testing_ids = np.load("/share/data1/lls/regression/in_halos_only/log_m_output/even_radii_and_random/testing_ids.npy")
ics_den_features = np.lib.format.open_memmap(path_features + "ics_density_contrasts.npy", mode="r", shape=(256**3, 50))
ics_den_training = ics_den_features[training_ids]
ics_den_testing = ics_den_features[testing_ids]
del ics_den_features
halo_mass = np.load("/home/lls/stored_files/halo_mass_particles.npy")
log_mass = np.log10(halo_mass[training_ids])
halo_mass_testing = np.log10(halo_mass[testing_ids])
del halo_mass
# train on density + true halo mass
training_features = np.column_stack((ics_den_training, log_mass, log_mass))
print(training_features.shape)
cv = True
third_features = int((training_features.shape[1] -1)/3)
param_grid = {"n_estimators": [1000, 1300],
"max_features": [third_features, "sqrt", 25, 40],
"min_samples_leaf": [5, 15],
#"criterion": ["mse", "mae"],
}
clf = ml.MLAlgorithm(training_features, method="regression", cross_validation=cv, split_data_method=None, n_jobs=60,
save=True, path=saving_path + "classifier/classifier.pkl", param_grid=param_grid)
if cv is True:
print(clf.best_estimator)
print(clf.algorithm.best_params_)
print(clf.algorithm.best_score_)
np.save(saving_path + "f_imp.npy", clf.feature_importances)
# test
X_test = np.column_stack((ics_den_testing, halo_mass_testing))
y_predicted = clf.algorithm.predict(X_test)
np.save(saving_path + "predicted_log_halo_mass.npy", y_predicted)
###### PLOT RESULTS ######
from regression.plots import plotting_functions as pf
y_w_truth_feat = np.load("/Users/lls/Documents/mlhalos_files/truth_feature/predicted_log_halo_mass.npy")
y_den = np.load("/Users/lls/Documents/mlhalos_files/den_only_periodicity_fix/predicted_log_halo_mass.npy")
x = np.load("/Users/lls/Documents/mlhalos_files/lowz_density/true_mass_test_set.npy")
bins_plotting = np.linspace(x.min(), x.max(), 15, endpoint=True)
# VIOLINS
pf.compare_violin_plots(y_w_truth_feat, x, y_den, x,
bins_plotting, label1="ics density + truth", label2="ics density", color1="g",
color2="r")
# 2D HISTOGRAM
pf.compare_2d_histograms(x, y_w_truth_feat, x, y_den,
title1="ics density + truth", title2="ics density", save_path=None) |
"""
You are the manager of a supermarket.
You have a list of N items together with their prices that consumers bought on a particular day.
Your task is to print each item_name and net_price in order of its first occurrence.
"""
# Enter your code here. Read input from STDIN. Print output to STDOUT
from collections import OrderedDict
if __name__ == "__main__":
N = int(input())
item = OrderedDict()
for i in range(N):
x = input().split()
item_name = (" "). join(x[:-1])
price = x[-1]
if item_name in item.keys():
item[item_name] += int(price)
else:
item[item_name] = int(price)
for key in item.keys():
print(key, item[key], sep=" ")
|
import json
from django.core.paginator import Paginator
from django.http import HttpResponse, JsonResponse
from django.shortcuts import render
# Create your views here.
from mutagen.mp3 import MP3
from albumapp.models import Album, Chapter
def getAllAlbum(request):
page_num = request.GET.get('page')
row_num = request.GET.get('rows')
rows = []
album = Album.objects.all().order_by('id')
all_page = Paginator(album, row_num)
page = Paginator(album, row_num).page(page_num)
for i in page:
rows.append(i)
page_data = {
"total": all_page.num_pages,
"records": all_page.count,
"page": page_num,
"rows": rows
}
def myDefault(u):
if isinstance(u, Album):
return {
"author": u.author,
"brief": u.brief,
"broadcast": u.broadcast,
"count": u.count,
"cover": u.cover,
"createDate": u.create_date,
"id": u.id,
"publishDate": u.publish_date,
"score": u.score,
"status": u.status,
"title": u.title,
}
data = json.dumps(page_data, default=myDefault)
return HttpResponse(data)
def getChapterByAlbumId(request):
album_Id = request.GET.get('albumId')
page_num = request.GET.get('page')
row_num = request.GET.get('rows')
rows = []
album = Chapter.objects.all().filter(album_id=album_Id).order_by('id')
all_page = Paginator(album, row_num)
page = Paginator(album, row_num).page(page_num)
for i in page:
rows.append(i)
page_data = {
"total": all_page.num_pages,
"records": all_page.count,
"page": page_num,
"rows": rows
}
def myDefault(u):
if isinstance(u, Chapter):
return {
"albumId": u.album_id,
"createDate": u.create_date.strftime("%Y-%m-%d"),
"duration": u.duration,
"id": u.id,
"size": u.size,
"title": u.title,
"url": u.url,
}
data = json.dumps(page_data, default=myDefault)
return HttpResponse(data)
def getChapter(request):
val = Chapter.objects.all().values()
json_str = json.dumps(list(val))
return HttpResponse(json_str)
def addalbum(request):
title = request.GET.get("title")
score = request.GET.get("score")
author = request.GET.get("author")
broadcast = request.GET.get("broadcast")
count = request.GET.get("count")
brief = request.GET.get("brief")
publish_date = request.GET.get("publish_date")
cover = request.GET.get("cover")
status = request.GET.get("state")
create_date = request.GET.get("create_date")
print(title,status,score,publish_date)
Album.objects.create(title=title, score=score, author=author, create_date=create_date,
publish_date=publish_date,broadcast=broadcast,count=count,
brief=brief,cover=cover,status=status)
return HttpResponse('OK')
def delete(request):
id = request.GET.get('id')
print(id)
Id = Album.objects.filter(id=id)
if Id:
Id.delete()
return JsonResponse({"error": "0"})
else:
return JsonResponse({"error": 1, "msg": "删除失败!"})
def updata(request):
pass |
#!/usr/bin/python3
# -*- coding: utf-8 -*-
from PyQt5.QtWidgets import *
from PyQt5 import QtCore
from PyQt5.QtGui import *
import Parameters as Parameters
import SendEmail as SendEmail
import pyperclip
# Класс кнопки для копирования ответа (часто используется, поэтоу был создан отдельный класс)
class CopyButton():
def button(self):
copy_button = QPushButton('Скопировать текст', self)
button_color = Parameters.Color().whatcolor()
copy_button.setStyleSheet("background-color: {0}".format(button_color))
return copy_button
# Класс кнопки для отправки письма (часто используется, поэтоу был создан отдельный класс)
class SendEmailButton():
def button(self):
send_button = QPushButton('Отправить письмо', self)
button_color = Parameters.Color().whatcolor()
send_button.setStyleSheet("background-color: {0}".format(button_color))
return send_button
# Класс окна отправки письма
class SendEmailWindow(QWidget):
def __init__(self):
super(QWidget, self).__init__()
self.setFont(QFont('Century Gothic', 10))
self.setWindowTitle('Отправка письма')
windoww1 = Parameters.ParameterSize().ww()
windowh1 = Parameters.ParameterSize().wh()
button_color = Parameters.Color().whatcolor()
self.setWindowIcon(QIcon('logo.png'))
self.setFixedSize(windoww1 * 0.2395833333333333, windowh1 * 0.4259259259259259)
log_pas = QHBoxLayout()
btns = QHBoxLayout()
vbox = QVBoxLayout()
# поле ввода почты
self.uremail_line = QLineEdit()
self.uremail_line.setPlaceholderText('Введите ваш @mail')
# поле ввода пароля
self.urpassword_line = QLineEdit()
self.urpassword_line.setPlaceholderText('Введите ваш пароль')
self.urpassword_line.setEchoMode(QLineEdit.Password)
# поле ввода почты адресата
self.address_line = QLineEdit()
self.address_line.setPlaceholderText('Введите @mail получателя')
# кнопка отправки
btn_send = QPushButton('Отправить', self)
btn_send.setStyleSheet("background-color: {0}".format(button_color))
log_pas.addWidget(self.uremail_line)
log_pas.addWidget(self.urpassword_line)
btns.addWidget(btn_send)
vbox.addLayout(log_pas)
vbox.addWidget(self.address_line, alignment=QtCore.Qt.AlignHCenter)
vbox.addLayout(btns)
self.setLayout(vbox)
btn_send.clicked.connect(self.send)
def send(self):
uremail = self.uremail_line.text()
urpassword = self.urpassword_line.text()
address = self.address_line.text()
result = SendEmail.send_letter(uremail, urpassword, address)
if result == QMessageBox.Ok:
self.close()
else:
self.close()
'''
Далее идут шаблонные ответы. Они занимают порядка 2000 строк, поэтому рекомендую не тратить время на чтение их всех,
так как структура каждого из них идентична. Предлагаю рассмотреть реализацию одного из овтетов на примере ниже.
'''
# Класс первого раздела
class Ans1(QWidget):
def __init__(self):
super(QWidget, self).__init__()
# Настрйоки интерфейса окна
button_color = Parameters.Color().whatcolor()
self.setFont(QFont('Century Gothic', 10))
windoww1 = Parameters.ParameterSize().ww()
windowh1 = Parameters.ParameterSize().wh()
self.setWindowTitle('Вопросы по заказу продукции')
self.setFixedSize(windoww1*0.625, windowh1*0.7407407407407407)
self.setWindowIcon(QIcon('logo.png'))
# Далее идут подразделы этой тематики, реализованные кнопками,
# нажав на которые, пользователь попадет в этот подраздел
topleft1 = QPushButton("Обращается новый партнёр "
"\nи хочет узнать, каким образом "
"\nможно приобрести продукцию Остеомеда.", self)
topleft1.setStyleSheet("background-color: {0}".format(button_color))
topleft1.clicked.connect(self.show_tema13)
topleft2 = QPushButton("Партнёр хочет сделать заказ "
"\nне стандартной продукции"
"\n(с индивидуальными характеристиками).", self)
topleft2.setStyleSheet("background-color: {0}".format(button_color))
topleft2.clicked.connect(self.show_tema14)
topleft3 = QPushButton("Партнёр просит принять "
"\nего заказ по телефону,"
"\nт.к. нет возможности написать письмо. ", self)
topleft3.setStyleSheet("background-color: {0}".format(button_color))
topleft3.clicked.connect(self.show_tema18)
topleft4 = QPushButton("Партнёр просит уточнить, "
"\nвозможна ли срочная поставка товара"
"\n(день в день) и что для этого требуется. ", self)
topleft4.setStyleSheet("background-color: {0}".format(button_color))
topleft4.clicked.connect(self.show_tema12)
topleft5 = QPushButton("Партнёр просит скорректировать счёт на продукцию"
"\nпо прошествии более трёх дней с даты полной или частичной оплаты"
"\n(продукции нет в наличии,"
"\nона будет специально производиться для партнёра).", self)
topleft5.setStyleSheet("background-color: {0}".format(button_color))
topleft5.clicked.connect(self.show_tema11)
# Компануем подразделы в окне
vbox = QVBoxLayout()
vbox.addWidget(topleft1)
vbox.addWidget(topleft2)
vbox2 = QVBoxLayout()
vbox2.addWidget(topleft3)
vbox2.addWidget(topleft4)
hbox = QHBoxLayout()
hbox.addLayout(vbox)
hbox.addLayout(vbox2)
hbox.addWidget(topleft5)
self.setLayout(hbox)
# Функции вызова окон с ответами
def show_tema13(self):
self.w11 = Tema13()
self.w11.show()
def show_tema14(self):
self.w12 = Tema14()
self.w12.show()
def show_tema18(self):
w13 = Tema18()
self.w13.show()
def show_tema12(self):
self.w14 = Tema12()
self.w14.show()
def show_tema11(self):
self.w15 = Tema11()
self.w15.show()
# Ответ на первый подраздел
class Tema13(QWidget):
def __init__(self):
super(QWidget, self).__init__()
# Настрйоки интерфейса окна
window_w1 = Parameters.ParameterSize().ww()
window_h1 = Parameters.ParameterSize().wh()
self.title = 'Приобретение продукции'
self.setWindowIcon(QIcon('logo.png'))
self.setWindowTitle(self.title)
self.setFont(QFont('Century Gothic', 10))
self.setFixedSize(window_w1 * 0.46875, window_h1 * 0.4259259259259259)
# Ответ
self.txt = "Последовательность действий следующая:" \
"\n1) Необходимо уточнить название или регион конечного клиента." \
"\n2) Перенаправить к менеджеру по указанному региону, что бы менеджер решил:" \
"\n -Самому работать с данной больницей" \
"\n (работа с больницей напрямую – скорее исключение, при особой необходимости для компании)." \
"\n -Отдать уже существующему дилеру (чаще всего просто передаётся существующему дилеру)." \
"\n -Если в этом регионе нет работающего дилера, то необходимо" \
"\n запрашивать у нового дилера все документы для заключения с ним договора поставки." \
"\nВажно так же учитывать о какой сумме идет речь, " \
"\n насколько долгосрочное это будет сотрудничество " \
"\n и имеет ли смысл вообще заключать с новым юр. лицом какое-либо соглашение. "
answer1_1 = QLabel(self.txt, self)
# Кнопка отправки
btn = SendEmailButton.button(self)
btn.clicked.connect(self.show_send)
# Кнопка копирования
copy_button = CopyButton.button(self)
copy_button.clicked.connect(self.copy)
# Компануем объекты в окне
hbox = QHBoxLayout()
hbox.addWidget(btn)
hbox.addWidget(copy_button)
vbox = QVBoxLayout()
vbox.addWidget(answer1_1, alignment=QtCore.Qt.AlignHCenter)
vbox.addLayout(hbox)
self.setLayout(vbox)
# функция подготовки данных для отправки письма
def show_send(self):
# Вызов окна отправки для ввода реквизитов
self.send_mail = SendEmailWindow()
self.send_mail.show()
# Записываем ответ во временный файл
with open("Text.txt", "w", encoding='windows-1251') as textfile:
textfile.write(self.txt)
# Записываем подраздел во временный файл
with open("Title.txt", "w", encoding='windows-1251') as textfile:
textfile.write(self.title)
# Функция копирования Ответа
def copy(self):
pyperclip.copy(self.txt)
class Tema14(QWidget):
def __init__(self):
window_w1 = Parameters.ParameterSize().ww()
window_h1 = Parameters.ParameterSize().wh()
super(QWidget, self).__init__()
self.setWindowIcon(QIcon('logo.png'))
self.setFont(QFont('Century Gothic', 10))
self.title = 'Заказ нестандартной продукции'
self.setWindowTitle(self.title)
self.setFixedSize(window_w1 * 0.46875, window_h1 * 0.4259259259259259)
self.txt = 'Последовательность действий следующая:' \
'\n1) Стараемся предложить все-таки то,' \
'\nчто имеется на складе с пояснением преимуществ стандартного изделия ' \
'\n(особенно указывая на то, что оно либо уже есть на складах, ' \
'\nлибо будет сделано гораздо быстрее и дешевле индивидуального заказа).' \
'\n2) Уточняем у руководства и производства, ' \
'\nможно ли сделать такое индивидуальное изделие для партнёра.' \
'\n3) Если партнёр на 100% хочет индивидуальное изделие - уточняем конечного клиента,' \
'\nинформируем о большем времени изготовления и большей стоимости изготовления ' \
'\n(а также предупреждаем о том, что на продукцию будет распространяться ставка НДС 18%),' \
'\nинформируем о необходимости 100% предоплаты. ' \
'\n4) Ждём предоплаты и после согласовываем заказ с производством.'
answer1_2 = QLabel(self.txt, self)
btn = SendEmailButton.button(self)
btn.clicked.connect(self.show_send)
copy_button = CopyButton.button(self)
copy_button.clicked.connect(self.copy)
hbox = QHBoxLayout()
hbox.addWidget(btn)
hbox.addWidget(copy_button)
vbox = QVBoxLayout()
vbox.addWidget(answer1_2, alignment=QtCore.Qt.AlignHCenter)
vbox.addLayout(hbox)
self.setLayout(vbox)
def show_send(self):
self.send_mail = SendEmailWindow()
self.send_mail.show()
with open("Text.txt", "w", encoding='windows-1251') as textfile:
textfile.write(self.txt)
with open("Title.txt", "w", encoding='windows-1251') as textfile:
textfile.write(self.title)
def copy(self):
pyperclip.copy(self.txt)
class Tema18(QWidget):
def __init__(self):
button_color = Parameters.Color().whatcolor()
window_w1 = Parameters.ParameterSize().ww()
window_h1 = Parameters.ParameterSize().wh()
super(QWidget, self).__init__()
self.setWindowIcon(QIcon('logo.png'))
self.setFont(QFont('Century Gothic', 10))
self.setWindowTitle('Заказ по телефону')
self.setFixedSize(window_w1 * 0.5208333333333333, window_h1 * 0.6481481481481481)
topleft1 = QPushButton("Надёжный, проверенный партнёр и ситуация исключительной важности:", self)
topleft1.setStyleSheet("background-color: {0}".format(button_color))
topleft1.clicked.connect(self.show_tema181)
topleft2 = QPushButton("Во всех остальных случаях:", self)
topleft2.setStyleSheet("background-color: {0}".format(button_color))
topleft2.clicked.connect(self.show_tema182)
hbox = QHBoxLayout()
hbox.addWidget(topleft1)
hbox.addWidget(topleft2)
self.setLayout(hbox)
def show_tema181(self):
self.w11 = Tema181()
self.w11.show()
def show_tema182(self):
self.w12 = Tema182()
self.w12.show()
class Tema181(QWidget):
def __init__(self):
window_w1 = Parameters.ParameterSize().ww()
window_h1 = Parameters.ParameterSize().wh()
super(QWidget, self).__init__()
self.setFont(QFont('Century Gothic', 10))
self.setWindowIcon(QIcon('logo.png'))
self.title = "Заказ по телефону (надежный парнер, частный случай)"
self.setWindowTitle(self.title)
self.setFixedSize(window_w1 * 0.46875, window_h1 * 0.4259259259259259)
self.txt = 'Возможно исключение в виде заказа по телефону ' \
'\n(с одновременной трансляцией письма партнёру с указанием того, ' \
'\nчто заказ принят по телефону).'
answer1_4 = QLabel(self.txt, self)
btn = SendEmailButton.button(self)
btn.clicked.connect(self.show_send)
copy_button = CopyButton.button(self)
copy_button.clicked.connect(self.copy)
hbox = QHBoxLayout()
hbox.addWidget(btn)
hbox.addWidget(copy_button)
vbox = QVBoxLayout()
vbox.addWidget(answer1_4, alignment=QtCore.Qt.AlignHCenter)
vbox.addLayout(hbox)
self.setLayout(vbox)
def show_send(self):
self.send_mail = SendEmailWindow()
self.send_mail.show()
with open("Text.txt", "w", encoding='windows-1251') as textfile:
textfile.write(self.txt)
with open("Title.txt", "w", encoding='windows-1251') as textfile:
textfile.write(self.title)
def copy(self):
pyperclip.copy(self.txt)
class Tema182(QWidget):
def __init__(self):
window_w1 = Parameters.ParameterSize().ww()
window_h1 = Parameters.ParameterSize().wh()
super(QWidget, self).__init__()
self.setFont(QFont('Century Gothic', 10))
self.setWindowIcon(QIcon('logo.png'))
self.title = 'Заказ по телефону (остальные случаи)'
self.setWindowTitle('Остальные случаи')
self.setFixedSize(window_w1 * 0.46875, window_h1 * 0.4259259259259259)
self.txt = 'Принятие заявки на продукцию по телефону не представляется возможным.'
answer1_4 = QLabel(self.txt, self)
btn = SendEmailButton.button(self)
btn.clicked.connect(self.show_send)
copy_button = CopyButton.button(self)
copy_button.clicked.connect(self.copy)
hbox = QHBoxLayout()
hbox.addWidget(btn)
hbox.addWidget(copy_button)
vbox = QVBoxLayout()
vbox.addWidget(answer1_4, alignment=QtCore.Qt.AlignHCenter)
vbox.addLayout(hbox)
self.setLayout(vbox)
def show_send(self):
self.send_mail = SendEmailWindow()
self.send_mail.show()
with open("Text.txt", "w", encoding='windows-1251') as textfile:
textfile.write(self.txt)
with open("Title.txt", "w", encoding='windows-1251') as textfile:
textfile.write(self.title)
def copy(self):
pyperclip.copy(self.txt)
class Tema12(QWidget):
def __init__(self):
button_color = Parameters.Color().whatcolor()
window_w1 = Parameters.ParameterSize().ww()
window_h1 = Parameters.ParameterSize().wh()
super(QWidget, self).__init__()
self.setFont(QFont('Century Gothic', 10))
self.setWindowIcon(QIcon('logo.png'))
self.setWindowTitle('Срочная доставка')
self.setFixedSize(window_w1 * 0.5208333333333333, window_h1 * 0.6481481481481481)
topleft1 = QPushButton("Товар есть на складе в г. Москва:", self)
topleft1.setStyleSheet("background-color: {0}".format(button_color))
topleft1.clicked.connect(self.show_tema121)
topleft2 = QPushButton("Товара нет на складе в г. Москва:", self)
topleft2.setStyleSheet("background-color: {0}".format(button_color))
topleft2.clicked.connect(self.show_tema122)
hbox = QHBoxLayout()
hbox.addWidget(topleft1)
hbox.addWidget(topleft2)
self.setLayout(hbox)
def show_tema121(self):
self.w11 = Tema121()
self.w11.show()
def show_tema122(self):
self.w12 = Tema122()
self.w12.show()
class Tema121(QWidget):
def __init__(self):
window_w1 = Parameters.ParameterSize().ww()
window_h1 = Parameters.ParameterSize().wh()
super(QWidget, self).__init__()
self.setFont(QFont('Century Gothic', 10))
self.title = 'Срочная доставка (товар есть)'
self.setWindowIcon(QIcon('logo.png'))
self.setWindowTitle(self.title)
self.setFixedSize(window_w1 * 0.46875, window_h1 * 0.4259259259259259)
self.txt = 'Срочная поставка можна при заказе до 15:00 и 100% оплате выставленного счёта.'
answer1_4 = QLabel(self.txt, self)
btn = SendEmailButton.button(self)
btn.clicked.connect(self.show_send)
copy_button = CopyButton.button(self)
copy_button.clicked.connect(self.copy)
hbox = QHBoxLayout()
hbox.addWidget(btn)
hbox.addWidget(copy_button)
vbox = QVBoxLayout()
vbox.addWidget(answer1_4, alignment=QtCore.Qt.AlignHCenter)
vbox.addLayout(hbox)
self.setLayout(vbox)
def show_send(self):
self.send_mail = SendEmailWindow()
self.send_mail.show()
with open("Text.txt", "w", encoding='windows-1251') as textfile:
textfile.write(self.txt)
with open("Title.txt", "w", encoding='windows-1251') as textfile:
textfile.write(self.title)
def copy(self):
pyperclip.copy(self.txt)
class Tema122(QWidget):
def __init__(self):
window_w1 = Parameters.ParameterSize().ww()
window_h1 = Parameters.ParameterSize().wh()
super(QWidget, self).__init__()
self.setFont(QFont('Century Gothic', 10))
self.title = 'Срочная доставка (товара нет)'
self.setWindowIcon(QIcon('logo.png'))
self.setWindowTitle(self.title)
self.setFixedSize(window_w1 * 0.46875, window_h1 * 0.4259259259259259)
self.txt = 'Объяснить клиенту, что невозможно день-в-день доставить товар с производства в Москву.' \
'\nМинимальный срок доставки - воспользоваться услугами транспортной компании.'
answer1_4 = QLabel(self.txt, self)
btn = SendEmailButton.button(self)
btn.clicked.connect(self.show_send)
copy_button = CopyButton.button(self)
copy_button.clicked.connect(self.copy)
hbox = QHBoxLayout()
hbox.addWidget(btn)
hbox.addWidget(copy_button)
vbox = QVBoxLayout()
vbox.addWidget(answer1_4, alignment=QtCore.Qt.AlignHCenter)
vbox.addLayout(hbox)
self.setLayout(vbox)
def show_send(self):
self.send_mail = SendEmailWindow()
self.send_mail.show()
with open("Text.txt", "w", encoding='windows-1251') as textfile:
textfile.write(self.txt)
with open("Title.txt", "w", encoding='windows-1251') as textfile:
textfile.write(self.title)
def copy(self):
pyperclip.copy(self.txt)
class Tema11(QWidget):
def __init__(self):
button_color = Parameters.Color().whatcolor()
window_w1 = Parameters.ParameterSize().ww()
window_h1 = Parameters.ParameterSize().wh()
super(QWidget, self).__init__()
self.setFont(QFont('Century Gothic', 10))
self.setWindowTitle('Корректировки по счёту')
self.setWindowIcon(QIcon('logo.png'))
self.setFixedSize(window_w1 * 0.46875, window_h1 * 0.6481481481481481)
topleft1 = QPushButton("Заказ уже производится или произведён:", self)
topleft1.setStyleSheet("background-color: {0}".format(button_color))
topleft1.clicked.connect(self.show_tema111)
topleft2 = QPushButton("Заказ ещё не не передан на изготовление:", self)
topleft2.setStyleSheet("background-color: {0}".format(button_color))
topleft2.clicked.connect(self.show_tema112)
hbox = QHBoxLayout()
hbox.addWidget(topleft1)
hbox.addWidget(topleft2)
self.setLayout(hbox)
def show_tema111(self):
self.w11 = Tema111()
self.w11.show()
def show_tema112(self):
self.w12 = Tema112()
self.w12.show()
class Tema111(QWidget):
def __init__(self):
window_w1 = Parameters.ParameterSize().ww()
window_h1 = Parameters.ParameterSize().wh()
super(QWidget, self).__init__()
self.setFont(QFont('Century Gothic', 10))
self.title = 'Заказ производится или произведен'
self.setWindowTitle(self.title)
self.setWindowIcon(QIcon('logo.png'))
self.setFixedSize(window_w1 * 0.46875, window_h1 * 0.4259259259259259)
self.txt = 'Никаких корректировок уже быть не может. ' \
'\nПартнёр обязан забрать изготовленный для него товар.'
answer1_4 = QLabel(self.txt, self)
btn = SendEmailButton.button(self)
btn.clicked.connect(self.show_send)
copy_button = CopyButton.button(self)
copy_button.clicked.connect(self.copy)
hbox = QHBoxLayout()
hbox.addWidget(btn)
hbox.addWidget(copy_button)
vbox = QVBoxLayout()
vbox.addWidget(answer1_4, alignment=QtCore.Qt.AlignHCenter)
vbox.addLayout(hbox)
self.setLayout(vbox)
def show_send(self):
self.send_mail = SendEmailWindow()
self.send_mail.show()
with open("Text.txt", "w", encoding='windows-1251') as textfile:
textfile.write(self.txt)
with open("Title.txt", "w", encoding='windows-1251') as textfile:
textfile.write(self.title)
def copy(self):
pyperclip.copy(self.txt)
class Tema112(QWidget):
def __init__(self):
window_w1 = Parameters.ParameterSize().ww()
window_h1 = Parameters.ParameterSize().wh()
super(QWidget, self).__init__()
self.setWindowIcon(QIcon('logo.png'))
self.setFont(QFont('Century Gothic', 10))
self.title = 'Заказ не передан на изготовление'
self.setWindowTitle(self.title)
self.setFixedSize(window_w1 * 0.46875, window_h1 * 0.4259259259259259)
self.txt = 'Возможна оперативная корретировка заказа ' \
'\n(с обязательным уточнением информации по срокам передачи в производство в Рыбинске).'
answer1_4 = QLabel(self.txt, self)
btn = SendEmailButton.button(self)
btn.clicked.connect(self.show_send)
copy_button = CopyButton.button(self)
copy_button.clicked.connect(self.copy)
hbox = QHBoxLayout()
hbox.addWidget(btn)
hbox.addWidget(copy_button)
vbox = QVBoxLayout()
vbox.addWidget(answer1_4, alignment=QtCore.Qt.AlignHCenter)
vbox.addLayout(hbox)
self.setLayout(vbox)
def show_send(self):
self.send_mail = SendEmailWindow()
self.send_mail.show()
with open("Text.txt", "w", encoding='windows-1251') as textfile:
textfile.write(self.txt)
with open("Title.txt", "w", encoding='windows-1251') as textfile:
textfile.write(self.title)
def copy(self):
pyperclip.copy(self.txt)
class Ans2(QWidget):
def __init__(self):
window_w1 = Parameters.ParameterSize().ww()
window_h1 = Parameters.ParameterSize().wh()
button_color = Parameters.Color().whatcolor()
super(QWidget, self).__init__()
self.setFont(QFont('Century Gothic', 10))
self.setWindowIcon(QIcon('logo.png'))
self.setWindowTitle('Вопросы по поставке продукции')
self.setFixedSize(window_w1 * 0.625, window_h1 * 0.7407407407407407)
topcenter1 = QPushButton("Становится известно о переносе "
"\nсрока изготовления продукции в Рыбинске, "
"\nза которую уже была внесена 100% предоплата партнёром.", self)
topcenter1.setStyleSheet("background-color: {0}".format(button_color))
topcenter1.clicked.connect(self.show_tema8)
topcenter2 = QPushButton("Партнёр интересуется о сроках поставки товара,"
"\nт.к. ранее уже была частичная отгрузка; "
"\nпросит поторопить производство.", self)
topcenter2.setStyleSheet("background-color: {0}".format(button_color))
topcenter2.clicked.connect(self.show_tema9)
topcenter3 = QPushButton("Партнёр просит уточнить причину задержки товара"
"\nне в официальный срок (30 дней)"
"\nпри условии полной или частичной предоплаты.", self)
topcenter3.setStyleSheet("background-color: {0}".format(button_color))
topcenter3.clicked.connect(self.show_tema10)
topcenter4 = QPushButton("При пересчёте полученного товара на складе,"
"\nпартнёр выявил факт пересорта "
"\nпродукции/ошибки в товарной накладной.", self)
topcenter4.setStyleSheet("background-color: {0}".format(button_color))
topcenter4.clicked.connect(self.show_tema6)
vbox = QVBoxLayout()
vbox.addWidget(topcenter1)
vbox.addWidget(topcenter2)
vbox2 = QVBoxLayout()
vbox2.addWidget(topcenter3)
vbox2.addWidget(topcenter4)
hbox = QHBoxLayout()
hbox.addLayout(vbox)
hbox.addLayout(vbox2)
self.setLayout(hbox)
def show_tema8(self):
self.w21 = Tema8()
self.w21.show()
def show_tema9(self):
self.w22 = Tema9()
self.w22.show()
def show_tema10(self):
self.w23 = Tema10()
self.w23.show()
def show_tema6(self):
self.w24 = Tema6()
self.w24.show()
class Tema8(QWidget):
def __init__(self):
window_w1 = Parameters.ParameterSize().ww()
window_h1 = Parameters.ParameterSize().wh()
super(QWidget, self).__init__()
self.setFont(QFont('Century Gothic', 10))
self.title = 'Перенос сроков изготовления'
self.setWindowTitle(self.title)
self.setWindowIcon(QIcon('logo.png'))
self.setFixedSize(window_w1 * 0.46875, window_h1 * 0.4259259259259259)
self.txt = 'Последовательность действий следующая:' \
'\n 1) Связаться с партнёром, извиниться' \
'\nи сообщить о переносе сроков изготовления продукции' \
'\n(предварительно уточнив их в Рыбинске).' \
'\n 2) Если это поставка срочно нужна партнёру,' \
'\nто попробовать отгрузить со склада в Москве' \
'\n(при условии наличия товара).' \
'\nВАЖНО! Относиться это должно только к группам товара А и В.' \
'\n 3) Если партнёр просит заменить часть продукции' \
'\n(и изготовление товара в Рыбинске ещё не началось) – ' \
'\nвозможно предоставление замены' \
'\n(только в случае крайней необходимости для партнёра).' \
'\nНеобходимости провести корректировку в 1С,' \
'\nсделав корректировку в Заказе Покупателя.'
answer2_1 = QLabel(self.txt, self)
btn = SendEmailButton.button(self)
btn.clicked.connect(self.show_send)
copy_button = CopyButton.button(self)
copy_button.clicked.connect(self.copy)
hbox = QHBoxLayout()
hbox.addWidget(btn)
hbox.addWidget(copy_button)
vbox = QVBoxLayout()
vbox.addWidget(answer2_1, alignment=QtCore.Qt.AlignHCenter)
vbox.addLayout(hbox)
self.setLayout(vbox)
def show_send(self):
self.send_mail = SendEmailWindow()
self.send_mail.show()
with open("Text.txt", "w", encoding='windows-1251') as textfile:
textfile.write(self.txt)
with open("Title.txt", "w", encoding='windows-1251') as textfile:
textfile.write(self.title)
def copy(self):
pyperclip.copy(self.txt)
class Tema9(QWidget):
def __init__(self):
window_w1 = Parameters.ParameterSize().ww()
window_h1 = Parameters.ParameterSize().wh()
super(QWidget, self).__init__()
self.setFont(QFont('Century Gothic', 10))
self.title = 'Информация о сроках поставки'
self.setWindowIcon(QIcon('logo.png'))
self.setWindowTitle(self.title)
self.setFixedSize(window_w1 * 0.46875, window_h1 * 0.4259259259259259)
self.txt = 'Последовательность действий следующая:' \
'\n 1) Уточнить данную информацию для партнёра' \
'\nна производстве в Рыбинске' \
'\n(вовремя будет сдан товар или есть задержки).' \
'\n 2) Если ещё не вышел срок изготовления продукции – ' \
'\nсообщить через сколько будет товар.' \
'\n 3) Если срок изготовления продукции уже прошёл -' \
'\nизвиниться и сообщить о переносе сроков изготовления продукции.' \
'\nЕсли товар срочно нужен партнёру -' \
'\nто попробовать отгрузить со склада в Москве' \
'\n(при условии наличия товара).' \
'\nВНИМАНИЕ! Это относится только к продукции категории А и В. '
answer2_2 = QLabel(self.txt, self)
btn = SendEmailButton.button(self)
btn.clicked.connect(self.show_send)
copy_button = CopyButton.button(self)
copy_button.clicked.connect(self.copy)
hbox = QHBoxLayout()
hbox.addWidget(btn)
hbox.addWidget(copy_button)
vbox = QVBoxLayout()
vbox.addWidget(answer2_2, alignment=QtCore.Qt.AlignHCenter)
vbox.addLayout(hbox)
self.setLayout(vbox)
def show_send(self):
self.send_mail = SendEmailWindow()
self.send_mail.show()
with open("Text.txt", "w", encoding='windows-1251') as textfile:
textfile.write(self.txt)
with open("Title.txt", "w", encoding='windows-1251') as textfile:
textfile.write(self.title)
def copy(self):
pyperclip.copy(self.txt)
class Tema10(QWidget):
def __init__(self):
button_color = Parameters.Color().whatcolor()
window_w1 = Parameters.ParameterSize().ww()
window_h1 = Parameters.ParameterSize().wh()
super(QWidget, self).__init__()
self.setFont(QFont('Century Gothic', 10))
self.setWindowIcon(QIcon('logo.png'))
self.setWindowTitle('Информация о задержке товара')
self.setFixedSize(window_w1 * 0.5208333333333333, window_h1 * 0.6481481481481481)
topleft1 = QPushButton("Товар был оплачен партнёром на 100%, либо 50%/50% "
"\n(всё было оплачено ДО момента предполагаемой отгрузки)", self)
topleft1.setStyleSheet("background-color: {0}".format(button_color))
topleft1.clicked.connect(self.show_tema101)
topleft2 = QPushButton("Товар был оплачен партнёром только на 50%", self)
topleft2.setStyleSheet("background-color: {0}".format(button_color))
topleft2.clicked.connect(self.show_tema102)
hbox = QHBoxLayout()
hbox.addWidget(topleft1)
hbox.addWidget(topleft2)
self.setLayout(hbox)
def show_tema101(self):
self.w11 = Tema101()
self.w11.show()
def show_tema102(self):
self.w12 = Tema102()
self.w12.show()
class Tema101(QWidget):
def __init__(self):
window_w1 = Parameters.ParameterSize().ww()
window_h1 = Parameters.ParameterSize().wh()
super(QWidget, self).__init__()
self.setFont(QFont('Century Gothic', 10))
self.setWindowIcon(QIcon('logo.png'))
self.title = 'Товар оплачен на 100% или 50% (До момента предполагаемой отгрузки)'
self.setWindowTitle(self.title)
self.setFixedSize(window_w1 * 0.46875, window_h1 * 0.4259259259259259)
self.txt = 'Необходимо объяснить,' \
'\nчто была большая загруженность производства,' \
'\nприкладываются все усилия,' \
'\nчтобы ускорить выпуск партии товара партнёра.' \
'\nВНИМАНИЕ! Если партнёр не проблемный и всегда вовремя платит' \
'\n(держит свои обещания),' \
'\nто можно попробовать отгрузить часть продукции со склада' \
'\nпри его наличии – это право остаётся за КП.' \
'\nОтгрузить можно только продукцию группы А и В.'
answer1_4 = QLabel(self.txt, self)
btn = SendEmailButton.button(self)
btn.clicked.connect(self.show_send)
copy_button = CopyButton.button(self)
copy_button.clicked.connect(self.copy)
hbox = QHBoxLayout()
hbox.addWidget(btn)
hbox.addWidget(copy_button)
vbox = QVBoxLayout()
vbox.addWidget(answer1_4, alignment=QtCore.Qt.AlignHCenter)
vbox.addLayout(hbox)
self.setLayout(vbox)
def show_send(self):
self.send_mail = SendEmailWindow()
self.send_mail.show()
with open("Text.txt", "w", encoding='windows-1251') as textfile:
textfile.write(self.txt)
with open("Title.txt", "w", encoding='windows-1251') as textfile:
textfile.write(self.title)
def copy(self):
pyperclip.copy(self.txt)
class Tema102(QWidget):
def __init__(self):
window_w1 = Parameters.ParameterSize().ww()
window_h1 = Parameters.ParameterSize().wh()
super(QWidget, self).__init__()
self.setFont(QFont('Century Gothic', 10))
self.title = 'Товар оплачен только на 50%'
self.setWindowTitle(self.title)
self.setWindowIcon(QIcon('logo.png'))
self.setFixedSize(window_w1 * 0.46875, window_h1 * 0.4259259259259259)
self.txt = 'Чаще всего вторые 50% (отсрочку) не оплачивают,' \
'\nпока нет сообщения от КП о том,' \
'\nчто товар готов к отгрузке.' \
'\nЕсли так случилось, повторяем действия из первого пункта' \
'\nи напоминаем об условиях оплаты отсрочки.'
answer1_4 = QLabel(self.txt, self)
btn = SendEmailButton.button(self)
btn.clicked.connect(self.show_send)
copy_button = CopyButton.button(self)
copy_button.clicked.connect(self.copy)
hbox = QHBoxLayout()
hbox.addWidget(btn)
hbox.addWidget(copy_button)
vbox = QVBoxLayout()
vbox.addWidget(answer1_4, alignment=QtCore.Qt.AlignHCenter)
vbox.addLayout(hbox)
self.setLayout(vbox)
def show_send(self):
self.send_mail = SendEmailWindow()
self.send_mail.show()
with open("Text.txt", "w", encoding='windows-1251') as textfile:
textfile.write(self.txt)
with open("Title.txt", "w", encoding='windows-1251') as textfile:
textfile.write(self.title)
def copy(self):
pyperclip.copy(self.txt)
class Tema6(QWidget):
def __init__(self):
button_color = Parameters.Color().whatcolor()
window_w1 = Parameters.ParameterSize().ww()
window_h1 = Parameters.ParameterSize().wh()
super(QWidget, self).__init__()
self.setFont(QFont('Century Gothic', 10))
self.setWindowIcon(QIcon('logo.png'))
self.setWindowTitle('Факт пересорта/ошибки на складе')
self.setFixedSize(window_w1 * 0.5208333333333333, window_h1 * 0.6481481481481481)
topleft1 = QPushButton("Документы сделаны верно", self)
topleft1.setStyleSheet("background-color: {0}".format(button_color))
topleft1.clicked.connect(self.show_tema61)
topleft2 = QPushButton("Ошибка в документах, но товар отгружен правильно", self)
topleft2.setStyleSheet("background-color: {0}".format(button_color))
topleft2.clicked.connect(self.show_tema62)
topleft3 = QPushButton("Ошибка в документах и товар отгружен неправильно", self)
topleft3.setStyleSheet("background-color: {0}".format(button_color))
topleft3.clicked.connect(self.show_tema63)
hbox = QHBoxLayout()
vbox = QVBoxLayout()
vbox.addWidget(topleft1)
vbox.addWidget(topleft2)
vbox.addWidget(topleft3)
hbox.addLayout(vbox)
self.setLayout(hbox)
def show_tema61(self):
self.w11 = Tema61()
self.w11.show()
def show_tema62(self):
self.w12 = Tema62()
self.w12.show()
def show_tema63(self):
self.w13 = Tema63()
self.w13.show()
class Tema61(QWidget):
def __init__(self):
window_w1 = Parameters.ParameterSize().ww()
window_h1 = Parameters.ParameterSize().wh()
super(QWidget, self).__init__()
self.setFont(QFont('Century Gothic', 10))
self.setWindowIcon(QIcon('logo.png'))
self.title = 'Документы верны'
self.setWindowTitle(self.title)
self.setFixedSize(window_w1 * 0.46875, window_h1 * 0.4259259259259259)
self.txt = 'Просим со следующим забором груза привезти' \
'\nне верно отгруженный товар в московский офис' \
'\nи меняем его на необходимый партнёру' \
'\n(строго в соответствии с ТН),' \
'\nесли отгрузка была со склада в Москве.' \
'\nЕсли отгрузка была со склада в Рыбинске,' \
'\nто предлагается отправить партнёру нужный товар со следующей отгрузкой,' \
'\nлибо оформить срочную доставку его в офис в Москве.'
answer1_4 = QLabel(self.txt, self)
btn = SendEmailButton.button(self)
btn.clicked.connect(self.show_send)
copy_button = CopyButton.button(self)
copy_button.clicked.connect(self.copy)
hbox = QHBoxLayout()
hbox.addWidget(btn)
hbox.addWidget(copy_button)
vbox = QVBoxLayout()
vbox.addWidget(answer1_4, alignment=QtCore.Qt.AlignHCenter)
vbox.addLayout(hbox)
self.setLayout(vbox)
def show_send(self):
self.send_mail = SendEmailWindow()
self.send_mail.show()
with open("Text.txt", "w", encoding='windows-1251') as textfile:
textfile.write(self.txt)
with open("Title.txt", "w", encoding='windows-1251') as textfile:
textfile.write(self.title)
def copy(self):
pyperclip.copy(self.txt)
class Tema62(QWidget):
def __init__(self):
window_w1 = Parameters.ParameterSize().ww()
window_h1 = Parameters.ParameterSize().wh()
super(QWidget, self).__init__()
self.setFont(QFont('Century Gothic', 10))
self.setWindowIcon(QIcon('logo.png'))
self.title = 'Ошибка в документах'
self.setWindowTitle(self.title)
self.setFixedSize(window_w1 * 0.46875, window_h1 * 0.4259259259259259)
self.txt = 'Делается корректировка в Заказе Покупателю,' \
'\nреализации (всех задействованных документах),' \
'\nпосле чего готовится новый комплект документов' \
'\nи о них сообщается партнёру' \
'\n(док-ты высылаются по почте в электронном виде)' \
'\nс просьбой подписать и передать при первой возможности.'
answer1_4 = QLabel(self.txt, self)
btn = SendEmailButton.button(self)
btn.clicked.connect(self.show_send)
copy_button = CopyButton.button(self)
copy_button.clicked.connect(self.copy)
hbox = QHBoxLayout()
hbox.addWidget(btn)
hbox.addWidget(copy_button)
vbox = QVBoxLayout()
vbox.addWidget(answer1_4, alignment=QtCore.Qt.AlignHCenter)
vbox.addLayout(hbox)
self.setLayout(vbox)
def show_send(self):
self.send_mail = SendEmailWindow()
self.send_mail.show()
with open("Text.txt", "w", encoding='windows-1251') as textfile:
textfile.write(self.txt)
with open("Title.txt", "w", encoding='windows-1251') as textfile:
textfile.write(self.title)
def copy(self):
pyperclip.copy(self.txt)
class Tema63(QWidget):
def __init__(self):
window_w1 = Parameters.ParameterSize().ww()
window_h1 = Parameters.ParameterSize().wh()
super(QWidget, self).__init__()
self.setWindowIcon(QIcon('logo.png'))
self.setFont(QFont('Century Gothic', 10))
self.title = 'Ошибка в документах, товар отгружен неправильно'
self.setWindowTitle(self.title)
self.setFixedSize(window_w1 * 0.46875, window_h1 * 0.4259259259259259)
self.txt = 'Делается корректировка в Заказе Покупателю,' \
'\nреализации (всех задействованных документах),' \
'\nпосле чего готовится новый комплект документов' \
'\nи о них сообщается партнёру (док-ты высылаются по почте в электронном виде).' \
'\nПросим со следующим забором груза привезти' \
'\nне верно отгруженный товар в московский офис и меняем его на необходимый партнёру' \
'\n(строго в соответствии с ТН), если отгрузка была со склада в Москве.' \
'\nЕсли отгрузка была со склада в Рыбинске,' \
'\nто предлагается отправить партнёру нужный товар со следующей отгрузкой,' \
'\nлибо оформить срочную доставку его в офис в Москве. '
answer1_4 = QLabel(self.txt, self)
btn = SendEmailButton.button(self)
btn.clicked.connect(self.show_send)
copy_button = CopyButton.button(self)
copy_button.clicked.connect(self.copy)
hbox = QHBoxLayout()
hbox.addWidget(btn)
hbox.addWidget(copy_button)
vbox = QVBoxLayout()
vbox.addWidget(answer1_4, alignment=QtCore.Qt.AlignHCenter)
vbox.addLayout(hbox)
self.setLayout(vbox)
def show_send(self):
self.send_mail = SendEmailWindow()
self.send_mail.show()
with open("Text.txt", "w", encoding='windows-1251') as textfile:
textfile.write(self.txt)
with open("Title.txt", "w", encoding='windows-1251') as textfile:
textfile.write(self.title)
def copy(self):
pyperclip.copy(self.txt)
class Ans3(QWidget):
def __init__(self):
window_w1 = Parameters.ParameterSize().ww()
window_h1 = Parameters.ParameterSize().wh()
button_color = Parameters.Color().whatcolor()
super(QWidget, self).__init__()
self.setFont(QFont('Century Gothic', 10))
self.setWindowIcon(QIcon('logo.png'))
self.setWindowTitle('Вопросы по оплате')
self.setFixedSize(window_w1 * 0.625, window_h1 * 0.7407407407407407)
topcenter1 = QPushButton("У Партнёра есть просрочка оплаты по предыдущим счетам,"
"\nон хочет продлить срок оплаты по ним"
"\nс возможностью дальнейших отгрузок по новым счетам.", self)
topcenter1.setStyleSheet("background-color: {0}".format(button_color))
topcenter1.clicked.connect(self.show_tema1)
topcenter2 = QPushButton("Партнёр хочет отгрузку ранее оплаченного товара,"
"\nно в данный момент просрочил даты оплаты других счетов", self)
topcenter2.setStyleSheet("background-color: {0}".format(button_color))
topcenter2.clicked.connect(self.show_tema4)
vbox = QVBoxLayout()
vbox.addWidget(topcenter1)
vbox.addWidget(topcenter2)
hbox = QHBoxLayout()
hbox.addLayout(vbox)
self.setLayout(hbox)
def show_tema1(self):
self.w21 = Tema1()
self.w21.show()
def show_tema4(self):
self.w22 = Tema4()
self.w22.show()
class Tema1(QWidget):
def __init__(self):
button_color = Parameters.Color().whatcolor()
window_w1 = Parameters.ParameterSize().ww()
window_h1 = Parameters.ParameterSize().wh()
super(QWidget, self).__init__()
self.setFont(QFont('Century Gothic', 10))
self.setWindowIcon(QIcon('logo.png'))
self.setWindowTitle('Просрочка оплаты по прошлым счетам')
self.setFixedSize(window_w1 * 0.46875, window_h1 * 0.6481481481481481)
topleft1 = QPushButton("При сумме долга более 500 000 рублей", self)
topleft1.setStyleSheet("background-color: {0}".format(button_color))
topleft1.clicked.connect(self.show_tema1_1)
topleft2 = QPushButton("При сумме долга менее 500 000 рублей", self)
topleft2.setStyleSheet("background-color: {0}".format(button_color))
topleft2.clicked.connect(self.show_tema1_2)
hbox = QHBoxLayout()
hbox.addWidget(topleft1)
hbox.addWidget(topleft2)
self.setLayout(hbox)
def show_tema1_1(self):
self.w11 = TemA11()
self.w11.show()
def show_tema1_2(self):
self.w12 = TemA12()
self.w12.show()
class TemA11(QWidget):
def __init__(self):
window_w1 = Parameters.ParameterSize().ww()
window_h1 = Parameters.ParameterSize().wh()
super(QWidget, self).__init__()
self.setFont(QFont('Century Gothic', 10))
self.title = 'Сумма долга более 500 000 рублей'
self.setWindowTitle(self.title)
self.setWindowIcon(QIcon('logo.png'))
self.setFixedSize(window_w1 * 0.46875, window_h1 * 0.4259259259259259)
self.txt = 'Должна быть приостановка всех отгрузок до момента оплаты старых долгов.' \
'\nНа усмотрение КП (при срочной или сверх важной операции)' \
'\nможно предложить Партнёру 100% предоплату' \
'\nпо счёту с уточнением информации по срокам оплаты прошлых счетов.'
answer1_4 = QLabel(self.txt, self)
btn = SendEmailButton.button(self)
btn.clicked.connect(self.show_send)
copy_button = CopyButton.button(self)
copy_button.clicked.connect(self.copy)
hbox = QHBoxLayout()
hbox.addWidget(btn)
hbox.addWidget(copy_button)
vbox = QVBoxLayout()
vbox.addWidget(answer1_4, alignment=QtCore.Qt.AlignHCenter)
vbox.addLayout(hbox)
self.setLayout(vbox)
def show_send(self):
self.send_mail = SendEmailWindow()
self.send_mail.show()
with open("Text.txt", "w", encoding='windows-1251') as textfile:
textfile.write(self.txt)
with open("Title.txt", "w", encoding='windows-1251') as textfile:
textfile.write(self.title)
def copy(self):
pyperclip.copy(self.txt)
class TemA12(QWidget):
def __init__(self):
window_w1 = Parameters.ParameterSize().ww()
window_h1 = Parameters.ParameterSize().wh()
super(QWidget, self).__init__()
self.setFont(QFont('Century Gothic', 10))
self.setWindowIcon(QIcon('logo.png'))
self.title = 'Сумма долга менее 500 000 рублей'
self.setWindowTitle(self.title)
self.setFixedSize(window_w1 * 0.46875, window_h1 * 0.4259259259259259)
self.txt = 'Партнёр может написать гарантийное письмо с точными сроками оплаты' \
'\n(в разумных пределах) и номерами счетов.' \
'\nПо новым счетам предусматривается оплата 100%.'
answer1_4 = QLabel(self.txt, self)
btn = SendEmailButton.button(self)
btn.clicked.connect(self.show_send)
copy_button = CopyButton.button(self)
copy_button.clicked.connect(self.copy)
hbox = QHBoxLayout()
hbox.addWidget(btn)
hbox.addWidget(copy_button)
vbox = QVBoxLayout()
vbox.addWidget(answer1_4, alignment=QtCore.Qt.AlignHCenter)
vbox.addLayout(hbox)
self.setLayout(vbox)
def show_send(self):
self.send_mail = SendEmailWindow()
self.send_mail.show()
with open("Text.txt", "w", encoding='windows-1251') as textfile:
textfile.write(self.txt)
with open("Title.txt", "w", encoding='windows-1251') as textfile:
textfile.write(self.title)
def copy(self):
pyperclip.copy(self.txt)
class Tema4(QWidget):
def __init__(self):
window_w1 = Parameters.ParameterSize().ww()
window_h1 = Parameters.ParameterSize().wh()
super(QWidget, self).__init__()
self.setFont(QFont('Century Gothic', 10))
self.title = 'Досрочная отгрузка'
self.setWindowTitle(self.title)
self.setWindowIcon(QIcon('logo.png'))
self.setFixedSize(window_w1 * 0.46875, window_h1 * 0.4259259259259259)
self.txt = 'Несмотря на то, что из-за неоплаты старых счетов партнёр' \
'\nможет находится в «стоп-листе», оплаченный товар должен быть передан покупателю.' \
'\nНо необходимо уведомить партнёра о том,' \
'\nчто данная отгрузка не означает отгрузку неоплаченных и новых счетов до того момента, ' \
'\nпока отсрочка не будет погашена.'
answer1_4 = QLabel(self.txt, self)
btn = SendEmailButton.button(self)
btn.clicked.connect(self.show_send)
copy_button = CopyButton.button(self)
copy_button.clicked.connect(self.copy)
hbox = QHBoxLayout()
hbox.addWidget(btn)
hbox.addWidget(copy_button)
vbox = QVBoxLayout()
vbox.addWidget(answer1_4, alignment=QtCore.Qt.AlignHCenter)
vbox.addLayout(hbox)
self.setLayout(vbox)
def show_send(self):
self.send_mail = SendEmailWindow()
self.send_mail.show()
with open("Text.txt", "w", encoding='windows-1251') as textfile:
textfile.write(self.txt)
with open("Title.txt", "w", encoding='windows-1251') as textfile:
textfile.write(self.title)
def copy(self):
pyperclip.copy(self.txt)
class Ans4(QWidget):
def __init__(self):
window_w1 = Parameters.ParameterSize().ww()
window_h1 = Parameters.ParameterSize().wh()
button_color = Parameters.Color().whatcolor()
super(QWidget, self).__init__()
self.setFont(QFont('Century Gothic', 10))
self.setWindowIcon(QIcon('logo.png'))
self.setWindowTitle('Вопросы по подбору продукции')
self.setFixedSize(window_w1 * 0.625, window_h1 * 0.7407407407407407)
topcenter1 = QPushButton("Партнёр просит проконсультировать его относительно артикула продукции"
"\n(например, озвучить длину штифта)"
"\nи просит подобрать к нему артикулы для составления типового комплекта"
"\n(Допустим комплект для операции на прокс. бедро). ", self)
topcenter1.setStyleSheet("background-color: {0}".format(button_color))
topcenter1.clicked.connect(self.show_tema16)
topcenter2 = QPushButton("Партнёр хочет уточнить,"
"\nподходит ли та или иная продукция для совмещения с продукцией другого бренда"
"\n(Например штифты -Остеомед, а винты – Синтез).", self)
topcenter2.setStyleSheet("background-color: {0}".format(button_color))
topcenter2.clicked.connect(self.show_tema15)
vbox = QVBoxLayout()
vbox.addWidget(topcenter1)
vbox.addWidget(topcenter2)
hbox = QHBoxLayout()
hbox.addLayout(vbox)
self.setLayout(hbox)
def show_tema16(self):
self.w41 = Tema16()
self.w41.show()
def show_tema15(self):
self.w42 = Tema15()
self.w42.show()
class Tema15(QWidget):
def __init__(self):
window_w1 = Parameters.ParameterSize().ww()
window_h1 = Parameters.ParameterSize().wh()
super(QWidget, self).__init__()
self.setFont(QFont('Century Gothic', 10))
self.title = 'Совмещение с продукицей другого бренда'
self.setWindowTitle(self.title)
self.setWindowIcon(QIcon('logo.png'))
self.setFixedSize(window_w1 * 0.46875, window_h1 * 0.4259259259259259)
self.txt = 'Последовательность действий следующая:' \
'\n 1) В первую очередь необходимо уточнить,' \
'\nс чем связана такая ситуация, почему необходимо совмещать продукцию.' \
'\n 2) Уточнить ифнормацию по данной продукции у Главного Менеджера/Складовщика ' \
'\n(это же касается и вопросов: подходит ли чей-либо другой инструмент ' \
'\nи импланты к нашим имплантам и инструментам).' \
'\n 3) Можно сообщить, что товар более дорогих производителей (Синтез)' \
'\nможно использовать для прод. «Остеомед-М».' \
'\nВНИМАНИЕ! Единолично такую информацию лучше не предоставлять во избежание спорных ситуаций,' \
'\nстоит запросить письменное подтверждение опытных коллег.'
answer3_2 = QLabel(self.txt, self)
btn = SendEmailButton.button(self)
btn.clicked.connect(self.show_send)
copy_button = CopyButton.button(self)
copy_button.clicked.connect(self.copy)
hbox = QHBoxLayout()
hbox.addWidget(btn)
hbox.addWidget(copy_button)
vbox = QVBoxLayout()
vbox.addWidget(answer3_2, alignment=QtCore.Qt.AlignHCenter)
vbox.addLayout(hbox)
self.setLayout(vbox)
def show_send(self):
self.send_mail = SendEmailWindow()
self.send_mail.show()
with open("Text.txt", "w", encoding='windows-1251') as textfile:
textfile.write(self.txt)
with open("Title.txt", "w", encoding='windows-1251') as textfile:
textfile.write(self.title)
def copy(self):
pyperclip.copy(self.txt)
class Tema16(QWidget):
def __init__(self):
window_w1 = Parameters.ParameterSize().ww()
window_h1 = Parameters.ParameterSize().wh()
super(QWidget, self).__init__()
self.setFont(QFont('Century Gothic', 10))
self.title = 'Отгрузка до оплаты, имеется просрочка'
self.setWindowTitle(self.title)
self.setWindowIcon(QIcon('logo.png'))
self.setFixedSize(window_w1 * 0.46875, window_h1 * 0.4259259259259259)
self.txt = 'Несмотря на то, что из-за неоплаты старых счетов партнёр может находится в «стоп-листе»,' \
'\nоплаченный товар должен быть передан покупателю.' \
'\nНо необходимо уведомить партнёра о том,' \
'\nчто данная отгрузка не означает отгрузку неоплаченных и новых счетов до того момента,' \
'\nпока отсрочка не будет погашена.'
answer3_2 = QLabel(self.txt, self)
btn = SendEmailButton.button(self)
btn.clicked.connect(self.show_send)
copy_button = CopyButton.button(self)
copy_button.clicked.connect(self.copy)
hbox = QHBoxLayout()
hbox.addWidget(btn)
hbox.addWidget(copy_button)
vbox = QVBoxLayout()
vbox.addWidget(answer3_2, alignment=QtCore.Qt.AlignHCenter)
vbox.addLayout(hbox)
self.setLayout(vbox)
def show_send(self):
self.send_mail = SendEmailWindow()
self.send_mail.show()
with open("Text.txt", "w", encoding='windows-1251') as textfile:
textfile.write(self.txt)
with open("Title.txt", "w", encoding='windows-1251') as textfile:
textfile.write(self.title)
def copy(self):
pyperclip.copy(self.txt)
class Ans5(QWidget):
def __init__(self):
window_w1 = Parameters.ParameterSize().ww()
window_h1 = Parameters.ParameterSize().wh()
button_color = Parameters.Color().whatcolor()
super(QWidget, self).__init__()
self.setFont(QFont('Century Gothic', 10))
self.setWindowIcon(QIcon('logo.png'))
self.setWindowTitle('Вопросы по аренде инструмента')
self.setFixedSize(window_w1 * 0.625, window_h1 * 0.7407407407407407)
topcenter1 = QPushButton("Партнёр хочет заказать продление аренды инструмента."
"\nНо при этом у него есть долги по счетам/проблемы"
"\nс регулярными оплатами/иные невыполненные финансовые обязательства.", self)
topcenter1.setStyleSheet("background-color: {0}".format(button_color))
topcenter1.clicked.connect(self.show_tema2)
topcenter2 = QPushButton("Срочно (день в день) нужен инструмент под операцию,"
"\nно его нет на складе компании в Москве.", self)
topcenter2.setStyleSheet("background-color: {0}".format(button_color))
topcenter2.clicked.connect(self.show_tema5)
topcenter3 = QPushButton("Партнёр сообщает, что с Главным Менеджером было оговорено"
"\nдобавление инструмента в счёт за импланты "
"\n(т.н. равномерное «размазывание» стоимости инструмента по всему счёту).", self)
topcenter3.setStyleSheet("background-color: {0}".format(button_color))
topcenter3.clicked.connect(self.show_tema20)
vbox = QVBoxLayout()
vbox.addWidget(topcenter1)
vbox.addWidget(topcenter2)
vbox.addWidget(topcenter3)
hbox = QHBoxLayout()
hbox.addLayout(vbox)
self.setLayout(hbox)
def show_tema2(self):
self.w51 = Tema2()
self.w51.show()
def show_tema5(self):
self.w52 = Tema5()
self.w52.show()
def show_tema20(self):
self.w53 = Tema20()
self.w53.show()
class Tema2(QWidget):
def __init__(self):
button_color = Parameters.Color().whatcolor()
window_w1 = Parameters.ParameterSize().ww()
window_h1 = Parameters.ParameterSize().wh()
super(QWidget, self).__init__()
self.setFont(QFont('Century Gothic', 10))
self.setWindowTitle('Продление аренды при имеющихся долгах')
self.setWindowIcon(QIcon('logo.png'))
self.setFixedSize(window_w1 * 0.46875, window_h1 * 0.6481481481481481)
topleft1 = QPushButton("Если у партнёра имеются неиспользованные штифты для инструмента", self)
topleft1.setStyleSheet("background-color: {0}".format(button_color))
topleft1.clicked.connect(self.show_tema21)
topleft2 = QPushButton("Если у партнёра не имеются неиспользованные штифты для инструмента", self)
topleft2.setStyleSheet("background-color: {0}".format(button_color))
topleft2.clicked.connect(self.show_tema22)
hbox = QHBoxLayout()
vbox = QVBoxLayout()
vbox.addWidget(topleft1)
vbox.addWidget(topleft2)
hbox.addLayout(vbox)
self.setLayout(hbox)
def show_tema21(self):
self.w11 = Tema21()
self.w11.show()
def show_tema22(self):
self.w12 = Tema22()
self.w12.show()
class Tema21(QWidget):
def __init__(self):
window_w1 = Parameters.ParameterSize().ww()
window_h1 = Parameters.ParameterSize().wh()
super(QWidget, self).__init__()
self.setFont(QFont('Century Gothic', 10))
self.title = 'Есть неиспользованные штифты'
self.setWindowTitle(self.title)
self.setWindowIcon(QIcon('logo.png'))
self.setFixedSize(window_w1 * 0.46875, window_h1 * 0.4259259259259259)
self.txt = 'Если Партнёр обладает неиспользованными штифтами,' \
'\nто продление инструмента можно согласовать. ' \
'\nВместе с этим необходимо запросить ГП' \
'\nоб оплате долгов/выполнении просроченных финансовых обязательств.'
answer1_4 = QLabel(self.txt, self)
btn = SendEmailButton.button(self)
btn.clicked.connect(self.show_send)
copy_button = CopyButton.button(self)
copy_button.clicked.connect(self.copy)
hbox = QHBoxLayout()
hbox.addWidget(btn)
hbox.addWidget(copy_button)
vbox = QVBoxLayout()
vbox.addWidget(answer1_4, alignment=QtCore.Qt.AlignHCenter)
vbox.addLayout(hbox)
self.setLayout(vbox)
def show_send(self):
self.send_mail = SendEmailWindow()
self.send_mail.show()
with open("Text.txt", "w", encoding='windows-1251') as textfile:
textfile.write(self.txt)
with open("Title.txt", "w", encoding='windows-1251') as textfile:
textfile.write(self.title)
def copy(self):
pyperclip.copy(self.txt)
class Tema22(QWidget):
def __init__(self):
window_w1 = Parameters.ParameterSize().ww()
window_h1 = Parameters.ParameterSize().wh()
super(QWidget, self).__init__()
self.setFont(QFont('Century Gothic', 10))
self.title = 'Нет неиспользованных штифтов'
self.setWindowTitle(self.title)
self.setWindowIcon(QIcon('logo.png'))
self.setFixedSize(window_w1 * 0.46875, window_h1 * 0.4259259259259259)
self.txt = 'Если Партнёр не обладает неиспользованными штифтами,' \
'\nто в продлении инструмента необходимо отказать.' \
'\nИсключение - отдельный запрос важной для нас больницы.'
answer1_4 = QLabel(self.txt, self)
btn = SendEmailButton.button(self)
btn.clicked.connect(self.show_send)
copy_button = CopyButton.button(self)
copy_button.clicked.connect(self.copy)
hbox = QHBoxLayout()
hbox.addWidget(btn)
hbox.addWidget(copy_button)
vbox = QVBoxLayout()
vbox.addWidget(answer1_4, alignment=QtCore.Qt.AlignHCenter)
vbox.addLayout(hbox)
self.setLayout(vbox)
def show_send(self):
self.send_mail = SendEmailWindow()
self.send_mail.show()
with open("Text.txt", "w", encoding='windows-1251') as textfile:
textfile.write(self.txt)
with open("Title.txt", "w", encoding='windows-1251') as textfile:
textfile.write(self.title)
def copy(self):
pyperclip.copy(self.txt)
class Tema5(QWidget):
def __init__(self):
window_w1 = Parameters.ParameterSize().ww()
window_h1 = Parameters.ParameterSize().wh()
super(QWidget, self).__init__()
self.setFont(QFont('Century Gothic', 10))
self.title = 'Срочно нужен инструмент, но его нет на складе'
self.setWindowTitle(self.title)
self.setWindowIcon(QIcon('logo.png'))
self.setFixedSize(window_w1 * 0.46875, window_h1 * 0.4259259259259259)
self.txt = 'Последовательность действий следующая:' \
'\n 1) Уточнить в каких больницах/у каких дилеров Москвы' \
'\nнаходится необходимый инструмент' \
'\n(поиск ведётся по дилерам и больницам с помощью информации из 1С).' \
'\n 2) Попробовать согласовать его передачу на 1 срочную операцию.' \
'\n 3) В случае, если свободного инструмента нет,' \
'\nпопробовать подыскать аналог инструмента Синтеза и т.д.' \
'\n 4) Если нет абсолютно никаких вариантов -' \
'\nто необходимо сообщить об этом партнёру и предложить перенести операцию.' \
'\nВыяснить и сообщить партнёру, когда инструмент будет на складе в Москве.'
answer5_2 = QLabel(self.txt, self)
btn = SendEmailButton.button(self)
btn.clicked.connect(self.show_send)
copy_button = CopyButton.button(self)
copy_button.clicked.connect(self.copy)
hbox = QHBoxLayout()
hbox.addWidget(btn)
hbox.addWidget(copy_button)
vbox = QVBoxLayout()
vbox.addWidget(answer5_2, alignment=QtCore.Qt.AlignHCenter)
vbox.addLayout(hbox)
self.setLayout(vbox)
def show_send(self):
self.send_mail = SendEmailWindow()
self.send_mail.show()
with open("Text.txt", "w", encoding='windows-1251') as textfile:
textfile.write(self.txt)
with open("Title.txt", "w", encoding='windows-1251') as textfile:
textfile.write(self.title)
def copy(self):
pyperclip.copy(self.txt)
class Tema20(QWidget):
def __init__(self):
window_w1 = Parameters.ParameterSize().ww()
window_h1 = Parameters.ParameterSize().wh()
super(QWidget, self).__init__()
self.setFont(QFont('Century Gothic', 10))
self.title = 'Добавление инструмента оговорено'
self.setWindowTitle(self.title)
self.setWindowIcon(QIcon('logo.png'))
self.setFixedSize(window_w1 * 0.46875, window_h1 * 0.4259259259259259)
self.txt = 'Последовательность действий следующая:' \
'\n 1) Необходимо написать письмо партнёру (Главного Менеджера в копию)' \
'\nпо уточнению счёта на импланты' \
'\n(согласовать необходимое кол-во, артикулы) и комплектацию предполагаемого инструмента' \
'\n(комплектация выбирается КП из Ф/З и указывается в письме).' \
'\n 2) После этого ожидается согласование итогового счёта с партнёром,' \
'\nв котором уже сделана наценка за инструмент.' \
'\n 3) После согласования и оплаты – счёт отправляется на производство,' \
'\nсписок необходимого инструмента в формате Excel пересылается по почте в Рыбинск' \
'\n(используется документ внутреннего заказа).'
answer5_3 = QLabel(self.txt, self)
btn = SendEmailButton.button(self)
btn.clicked.connect(self.show_send)
copy_button = CopyButton.button(self)
copy_button.clicked.connect(self.copy)
hbox = QHBoxLayout()
hbox.addWidget(btn)
hbox.addWidget(copy_button)
vbox = QVBoxLayout()
vbox.addWidget(answer5_3, alignment=QtCore.Qt.AlignHCenter)
vbox.addLayout(hbox)
self.setLayout(vbox)
def show_send(self):
self.send_mail = SendEmailWindow()
self.send_mail.show()
with open("Text.txt", "w", encoding='windows-1251') as textfile:
textfile.write(self.txt)
with open("Title.txt", "w", encoding='windows-1251') as textfile:
textfile.write(self.title)
def copy(self):
pyperclip.copy(self.txt)
class Ans6(QWidget):
def __init__(self):
window_w1 = Parameters.ParameterSize().ww()
window_h1 = Parameters.ParameterSize().wh()
button_color = Parameters.Color().whatcolor()
super(QWidget, self).__init__()
self.setFont(QFont('Century Gothic', 10))
self.setWindowIcon(QIcon('logo.png'))
self.setWindowTitle('Прочие вопросы')
self.setFixedSize(window_w1 * 0.625, window_h1 * 0.7407407407407407)
topcenter1 = QPushButton("Партнёр просит уточнить, можно ли делать МРТ,"
"\nхочет сертификат на разрешение МРТ исследования.", self)
topcenter1.setStyleSheet("background-color: {0}".format(button_color))
topcenter1.clicked.connect(self.show_tema3)
topcenter2 = QPushButton("Партнёр убеждает о сторонних устных договорённостях с Главным Менеджером,"
"\nдающих ему некоторое преимущество в спорных ситуациях"
"\n(аренда/срочная отгрузка/другое).", self)
topcenter2.setStyleSheet("background-color: {0}".format(button_color))
topcenter2.clicked.connect(self.show_tema7)
topcenter3 = QPushButton("Партнёр просит сообщить реальное наличие товара на складе для срочной операции"
"\n(файл наличия на складах ему ранее с утра отправлялся),"
"\nу него есть подозрения относительно актуальности информации.", self)
topcenter3.setStyleSheet("background-color: {0}".format(button_color))
topcenter3.clicked.connect(self.show_tema19)
vbox = QVBoxLayout()
vbox.addWidget(topcenter1)
vbox.addWidget(topcenter2)
vbox.addWidget(topcenter3)
hbox = QHBoxLayout()
hbox.addLayout(vbox)
self.setLayout(hbox)
def show_tema3(self):
self.w61 = Tema3()
self.w61.show()
def show_tema7(self):
self.w62 = Tema7()
self.w62.show()
def show_tema19(self):
self.w63 = Tema19()
self.w63.show()
class Tema3(QWidget):
def __init__(self):
window_w1 = Parameters.ParameterSize().ww()
window_h1 = Parameters.ParameterSize().wh()
super(QWidget, self).__init__()
self.setFont(QFont('Century Gothic', 10))
self.title = 'МРТ'
self.setWindowTitle(self.title)
self.setWindowIcon(QIcon('logo.png'))
self.setFixedSize(window_w1 * 0.46875, window_h1 * 0.4259259259259259)
self.txt = 'Необходимо предоставить Партнёру подобный сертификат,' \
'\nкоторый был составлен и выслан КП в 2018 г.' \
'\nВ нём содержится подробная информация о том,' \
'\nчто с имплантатами ООО "ОСТЕОМЕД-М" можно проводить МРТ.'
answer6_1 = QLabel(self.txt, self)
btn = SendEmailButton.button(self)
btn.clicked.connect(self.show_send)
copy_button = CopyButton.button(self)
copy_button.clicked.connect(self.copy)
hbox = QHBoxLayout()
hbox.addWidget(btn)
hbox.addWidget(copy_button)
vbox = QVBoxLayout()
vbox.addWidget(answer6_1, alignment=QtCore.Qt.AlignHCenter)
vbox.addLayout(hbox)
self.setLayout(vbox)
def show_send(self):
self.send_mail = SendEmailWindow()
self.send_mail.show()
with open("Text.txt", "w", encoding='windows-1251') as textfile:
textfile.write(self.txt)
with open("Title.txt", "w", encoding='windows-1251') as textfile:
textfile.write(self.title)
def copy(self):
pyperclip.copy(self.txt)
class Tema7(QWidget):
def __init__(self):
window_w1 = Parameters.ParameterSize().ww()
window_h1 = Parameters.ParameterSize().wh()
super(QWidget, self).__init__()
self.setFont(QFont('Century Gothic', 10))
self.title = 'Есть договоренность'
self.setWindowTitle(self.title)
self.setWindowIcon(QIcon('logo.png'))
self.setFixedSize(window_w1 * 0.46875, window_h1 * 0.4259259259259259)
self.txt = 'Последовательность действий следующая' \
'\n 1) Узнать у Главного Менеджера, правда ли это. ' \
'Написать, позвонить, другим способом связаться с ним,' \
'\nпостараться выяснить о чём именно была договорённость.' \
'\n 2) В случае, если Главный Менеджер недоступен,' \
'\nпопросить партнёра предоставить письменное подтверждение' \
'\nмежду руководителями о данной договоренности' \
'\n(что бы иметь на руках какое-то подтверждение для руководства при запросе обоснования действий).'
answer6_2 = QLabel(self.txt, self)
btn = SendEmailButton.button(self)
btn.clicked.connect(self.show_send)
copy_button = CopyButton.button(self)
copy_button.clicked.connect(self.copy)
hbox = QHBoxLayout()
hbox.addWidget(btn)
hbox.addWidget(copy_button)
vbox = QVBoxLayout()
vbox.addWidget(answer6_2, alignment=QtCore.Qt.AlignHCenter)
vbox.addLayout(hbox)
self.setLayout(vbox)
def show_send(self):
self.send_mail = SendEmailWindow()
self.send_mail.show()
with open("Text.txt", "w", encoding='windows-1251') as textfile:
textfile.write(self.txt)
with open("Title.txt", "w", encoding='windows-1251') as textfile:
textfile.write(self.title)
def copy(self):
pyperclip.copy(self.txt)
class Tema19(QWidget):
def __init__(self):
window_w1 = Parameters.ParameterSize().ww()
window_h1 = Parameters.ParameterSize().wh()
super(QWidget, self).__init__()
self.setWindowIcon(QIcon('logo.png'))
self.setFont(QFont('Century Gothic', 10))
self.title = 'Реальное наличие товара'
self.setWindowTitle(self.title)
self.setFixedSize(window_w1 * 0.46875, window_h1 * 0.4259259259259259)
self.txt = 'Последовательность действий следующая:' \
'\n 1) Если это небольшой кол-во продукции – можно проверить наличие на складе в Москве.' \
'\n 2) Если это большое кол-во продукции (от 7-10 поз.) – говорим,' \
'\nчто информация актуальна и верна, что необходимо пользоваться ею для заказа.'
answer6_3 = QLabel(self.txt, self)
btn = SendEmailButton.button(self)
btn.clicked.connect(self.show_send)
copy_button = CopyButton.button(self)
copy_button.clicked.connect(self.copy)
hbox = QHBoxLayout()
hbox.addWidget(btn)
hbox.addWidget(copy_button)
vbox = QVBoxLayout()
vbox.addWidget(answer6_3, alignment=QtCore.Qt.AlignHCenter)
vbox.addLayout(hbox)
self.setLayout(vbox)
def show_send(self):
self.send_mail = SendEmailWindow()
self.send_mail.show()
with open("Text.txt", "w", encoding='windows-1251') as textfile:
textfile.write(self.txt)
with open("Title.txt", "w", encoding='windows-1251') as textfile:
textfile.write(self.title)
def copy(self):
pyperclip.copy(self.txt)
|
# Generated by Django 2.1.2 on 2019-01-30 08:39
from django.db import migrations
class Migration(migrations.Migration):
dependencies = [
('extrequests', '0001_initial'),
]
operations = [
migrations.RemoveField(
model_name='profileupdaterequest',
name='domains',
),
migrations.RemoveField(
model_name='profileupdaterequest',
name='languages',
),
migrations.RemoveField(
model_name='profileupdaterequest',
name='lessons',
),
migrations.DeleteModel(
name='ProfileUpdateRequest',
),
]
|
from __future__ import annotations
import pytest
from pre_commit_hooks.detect_private_key import main
# Input, expected return value
TESTS = (
(b'-----BEGIN RSA PRIVATE KEY-----', 1),
(b'-----BEGIN DSA PRIVATE KEY-----', 1),
(b'-----BEGIN EC PRIVATE KEY-----', 1),
(b'-----BEGIN OPENSSH PRIVATE KEY-----', 1),
(b'PuTTY-User-Key-File-2: ssh-rsa', 1),
(b'---- BEGIN SSH2 ENCRYPTED PRIVATE KEY ----', 1),
(b'-----BEGIN ENCRYPTED PRIVATE KEY-----', 1),
(b'-----BEGIN OpenVPN Static key V1-----', 1),
(b'ssh-rsa DATA', 0),
(b'ssh-dsa DATA', 0),
# Some arbitrary binary data
(b'\xa2\xf1\x93\x12', 0),
)
@pytest.mark.parametrize(('input_s', 'expected_retval'), TESTS)
def test_main(input_s, expected_retval, tmpdir):
path = tmpdir.join('file.txt')
path.write_binary(input_s)
assert main([str(path)]) == expected_retval
|
import datetime
import os, subprocess
from hashlib import sha256
from flask import Flask, redirect, url_for, render_template, request, jsonify
from werkzeug.utils import secure_filename
import random
import re
import json
from eventrecorder import EventRecorder
from Speakerbot import Speakerbot
from speaker_db import SpeakerDB
from speakerlib import *
from speakonomy import Speakonomy
from macros import Macro
from util.words import parse_and_fill_mad_lib
from config import config
print "loading speakerbot"
sb = Speakerbot()
print "loading speakonomy"
speakonomy = Speakonomy(sb)
print "loading speakerdb"
db = SpeakerDB()
print "initializing event recorder"
evr = EventRecorder(db=db)
def stub_interrogator(*args, **kwargs):
return True
def stub_mangler(*args, **kwargs):
return args, kwargs
sb.attach_mangler("say", evr.censor)
sb.attach_listener("say", evr.queue_speech_for_tweet)
sb.attach_listener("say", evr.record_utterance)
sb.attach_listener("say", evr.post_to_slack)
sb.attach_listener("say", speakonomy.sell_saying)
sb.attach_listener("play", speakonomy.sell_sound)
sb.attach_listener("play", evr.record_sound_event)
sb.attach_interrogator("play", speakonomy.check_affordability)
#sb.attach_mangler("say_classy", stub_mangler)
app = Flask(__name__)
app.config['DEBUG'] = config.get("debug", True)
app.config['UPLOAD_FOLDER'] = os.path.relpath('sounds')
print "I'm ready"
@app.context_processor
def inject_global_params():
return dict(speakonomy=speakonomy, config=config, current_speakerbuck_balance=speakonomy.get_speakerbuck_balance())
@app.route('/')
@app.route('/home/<image>')
def home(image=None):
message = request.args.get('message', None)
if not image:
image = db.get_random_image()
last_withdrawal_time, speakerbucks_per_minute = speakonomy.get_last_withdrawal_time(include_sbpm=True)
return render_template(
"home.html",
sounds=sb.load_sounds(score_cutoff=-50),
image=image,
message=message,
votes=db.get_image_votes(image),
nsfw=db.check_nsfw(image),
comments=db.get_image_comments(image),
last_withdrawal_time=last_withdrawal_time,
speakerbucks_per_minute=speakerbucks_per_minute,
random_title="The !adjective !noun !adverb !verb the !noun."
)
@app.route('/upload', methods=["GET", "POST"])
def upload_sound():
#TODO: Put the sounds into their own class
message = None
if request.method == 'POST':
name = request.form.get('sound_name')
f = request.files['file']
filename = secure_filename(f.filename)
sound_fp = os.path.join(app.config['UPLOAD_FOLDER'], filename)
if os.path.exists(sound_fp):
message = 'That sound name already exists.'
else:
f.save(sound_fp)
sound_seconds = get_mp3_seconds(sound_fp)
if sound_seconds > 15:
message = 'This sound is {} seconds! Must be 15 seconds or less.'.format(sound_seconds)
else:
base_cost = speakonomy.get_sound_base_cost(sound_fp)
sb.add_sound_to_db(name, filename, base_cost)
full_sound_path = os.path.join(os.path.abspath(os.path.dirname(__file__)), sound_fp)
subprocess.call(['mp3gain','-r', '{}'.format(full_sound_path)])
return redirect(url_for("home", message="Your sound's been uploaded. Keep the change, ya filthy animal."))
return render_template(
"upload.html",
message=message,
)
@app.route('/theme-songs', methods=["GET", "POST"])
def theme_songs():
message = request.args.get('message', None)
if request.method == 'POST':
if request.form['user'] and request.form['song']:
if request.form['validator'] != config['unfucktion'](request.form['song']):
message = 'Validation failed'
elif db.get_person_for_song(request.form['song']):
message = 'That song is already assigned to a user.'
else:
db.execute("update person set theme_song=? where name=?", [request.form['song'], request.form['user']])
else:
message = 'Invalid parameters'
return redirect(url_for("theme_songs", message=message))
return render_template(
"themesongs.html",
sounds=sorted(sb.load_sounds(base_cost_cutoff=1000).keys()),
people=db.get_people(),
message=message,
)
@app.route('/spadmin', methods=["GET", "POST"])
def admin():
if request.form.get('person-name'):
db.add_person(request.form['person-name'])
return redirect(url_for("admin"))
elif request.form.get('delete-person'):
db.remove_person(request.form['delete-person'])
return redirect(url_for("admin"))
return render_template(
"admin.html",
people=db.get_people(),
)
@app.route('/image/<image>/upboat')
def upvote_image(image):
votes = db.get_image_votes(image)
votes += 1
db.execute("update images set votes=? where file_name=?", [votes, image])
speakonomy.deposit_funds(5)
return jsonify(votes=votes, speakerbuck_balance=speakonomy.get_speakerbuck_balance())
@app.route('/image/<image>/downgoat')
def downvote_image(image):
votes = db.get_image_votes(image)
votes -= 1
db.execute("update images set votes=? where file_name=?", [votes, image])
speakonomy.deposit_funds(5)
return jsonify(votes=votes, speakerbuck_balance=speakonomy.get_speakerbuck_balance())
@app.route('/new-image')
def new_image():
image = db.get_random_image()
return jsonify(image=image, votes=db.get_image_votes(image))
@app.route('/downvote-sound', methods=["POST"])
def downvote_sound():
if request.form.get('downvote_sound'):
sound_name = request.form.get('downvote_sound')
db.execute("update sounds set downvotes=downvotes+1 where name=?", [sound_name])
return redirect(url_for("home", message="Thank you for downvoting, you little twerp."))
@app.route('/image/<image>/nsfw')
def toggle_nsfw(image):
nsfw_flag = 1
if db.check_nsfw(image):
nsfw_flag = 0
db.execute("update images set nsfw=? where file_name=?", [nsfw_flag, image])
return redirect(url_for("home", image=image))
@app.route('/comment/<image>', methods=["POST"])
def comment_image(image):
comment = request.form["image-comment"]
if comment.strip() != '':
db.add_comment(image, comment)
speakonomy.deposit_funds(10)
return jsonify(speakerbuck_balance=speakonomy.get_speakerbuck_balance())
@app.route('/images/nsfw')
def nsfw_images():
images = db.get_nsfw_images()
return render_template("images.html", images=images, speakonomy=speakonomy)
@app.route('/images/top')
def top_images():
num_images = request.args.get("num", 25)
images = db.get_top_images(num_images=num_images)
return render_template("images.html", images=images, speakonomy=speakonomy)
@app.route('/images/worst')
def worst_images():
num_images = request.args.get("num", 25)
images = db.get_top_images(num_images=num_images, order="asc")
return render_template("images.html", images=images, speakonomy=speakonomy)
@app.route('/spinstats')
def spinstats():
now = datetime.datetime.now()
midnight = datetime.datetime(now.year, now.month, now.day).strftime("%s")
today_aggregate_stats = db.get_aggregate_wager_stats(start=midnight)
return render_template("spinstats.html",
aggregate_stats=db.get_aggregate_wager_stats(),
today_aggregate_stats=today_aggregate_stats,
recent_spins=db.get_wager_history(20),
number_occurence=db.get_number_occurence(),
multiplier_occurence=db.get_multiplier_occurence(),
wagers_and_outcomes=db.get_wagers_and_outcomes_by_day(),
wagers_by_outcome=db.get_wagers_by_outcome(),
lucky_numbers=db.get_lucky_numbers(),
number_cooccurrence=db.get_lucky_and_chosen_cooccurence()
)
@app.route('/play_sound/<sound_name>')
def play_sound(sound_name):
if sound_name == "rebecca-black" and datetime.datetime.today().weekday() != 4:
sound_name = random.choice(sb.sounds.keys())
sb.play(sound_name)
if sound_name == "rebecca-black":
speakonomy.set_free_play_timeout(minutes=5)
sb.say("It's Friday. Friday. So all sounds are free for the next 5 minutes.")
return jsonify(speakerbuck_balance=speakonomy.get_speakerbuck_balance(),
played_sound=sound_name,
played_sound_cost=sb.sounds[sound_name].cost
)
@app.route('/say/', methods=["POST", "GET"])
@app.route('/say/<text>')
def say(text=None):
if not text:
text = request.args.get('speech-text', None) or request.form.get("speech-text", None)
if not text or len(text) > 100:
return redirect(url_for("home"))
if request.args.get('record_utterance', "false") == "true" or request.form.get('record_utterance', "false") == "true":
record_utterance = True
else:
record_utterance = False
if ('george' in text.lower() or 'jorge' in text.lower()) and random.randint(1,2) == 1:
text = re.sub('jorge|jorj', 'george', text, flags=re.I)
text = re.sub('george', lambda x:random.choice(['alejandro','alex','dickbutt','eric','george','gianni','greg','josh','matt','ross','tim','trevor','tuan']), text, flags=re.I)
sb.say(text, record_utterance=record_utterance)
return jsonify(speakerbuck_balance=speakonomy.get_speakerbuck_balance())
@app.route('/play-macro/<macro_name>')
def play_macro(macro_name):
#TODO: Full ajax for macros
say('!macro {}'.format(macro_name))
return redirect('/macros')
@app.route('/macros')
def macros():
macros_list = []
sb.load_sounds()
results = db.execute("SELECT name, manifest FROM macros").fetchall()
for result in results:
macros_list.append(Macro(sb, result['name'], result['manifest']))
return render_template(
"macros.html",
macros=macros_list
)
@app.route('/macros/create', methods=["GET", "POST"])
def create_macro():
message = request.args.get('message', None)
if request.method == 'POST':
macro_name = request.form.get('name')
macro_sounds = json.dumps(request.form.getlist('macro_sound[]'))
if macro_name and macro_sounds:
if 1==2 and request.form['validator'] != config['unfucktion'](request.form['song']):
message = 'Validation failed'
else:
db.execute("INSERT INTO macros (name, manifest) VALUES (?, ?)", [macro_name, macro_sounds])
else:
message = 'Invalid parameters'
return redirect(url_for("macros", message=message))
if request.method == 'POST':
macro_components
return json.dumps(request.form.getlist('macro_sound[]'), indent=4)
sounds = sb.load_sounds()
template_sounds = [sounds[k] for k in sorted(sounds.keys())]
return render_template(
"macromaker.html",
sounds=template_sounds,
)
def default_redirect():
if not request.is_xhr:
redir = request.referrer or url_for("home")
return redirect(redir)
else:
return "true"
if __name__ == '__main__':
app.run('0.0.0.0',port=8080)
|
#!/usr/bin/env python3
import math
EPSILON = 1e-5
DELTA = 1e-3
SEGMENTS = 100
LOW_LIM = -1
HIGH_LIM = 1
DIVISOR = 2
ENLARGE = 2
def plot_func(graph, f, x0, x1, num_of_segments=SEGMENTS, c='black'):
"""
plot f between x0 to x1 using num_of_segments straight lines
to the graph object. the function will be plotted in color c
"""
x = x0
z = x + (x1 - x0)/num_of_segments
for i in range(num_of_segments):
p1 = (x, f(x))
p2 = (z, f(z))
graph.plot_line(p1, p2, c)
x = p2[0]
z = x + (x1 - x0)/num_of_segments
def const_function(c):
"""return the mathematical function f such that f(x) = c
>>> const_function(2)(2)
2
>>> const_function(4)(2)
4
"""
def f(x): return c
return f
def identity():
"""return the mathematical function f such that f(x) = x
>>> identity()(3)
3
"""
def f(x): return x
return f
def sin_function():
"""return the mathematical function f such that f(x) = sin(x)
>>> sin_function()(math.pi/2)
1.0
"""
def f(x): return math.sin(x)
return f
def sum_functions(g, h):
"""return f s.t. f(x) = g(x)+h(x)"""
def f(x): return g(x) + h(x)
return f
def sub_functions(g, h):
"""return f s.t. f(x) = g(x)-h(x)"""
def f(x): return g(x) - h(x)
return f
def mul_functions(g, h):
"""return f s.t. f(x) = g(x)*h(x)"""
def f(x): return g(x)*h(x)
return f
def div_functions(g, h):
"""return f s.t. f(x) = g(x)/h(x)"""
def f(x): return g(x)/h(x)
return f
def solve(f, x0=-1000, x1=1000, epsilon=EPSILON):
"""
Find a solution to f in the range x0 and x1
assuming that f is monotnic.
If no solution was found return None
"""
if abs(f(x0)) < epsilon:
return x0
if abs(f(x1)) < epsilon:
return x1
if f(x0) * f(x1) >= 0:
return None
left_edge = x0
right_edge = x1
if f(x0) > f(x1):
left_edge, right_edge = x1, x0
x = (left_edge + right_edge) / DIVISOR
while abs(f(x)) >= epsilon:
if f(x) > 0:
right_edge = x
else:
left_edge = x
x = (left_edge + right_edge) / DIVISOR
return x
def inverse(g, epsilon=EPSILON):
"""
return f s.t. f(g(x)) = x
g must be monotonic and continuous
"""
def f(y):
idea = sub_functions(g, const_function(y))
x0 = LOW_LIM
x1 = HIGH_LIM
while idea(x0) * idea(x1) > 0:
x0 *= ENLARGE
x1 *= ENLARGE
return solve(idea,x0,x1,epsilon)
return f
def compose(g, h):
"""return the f which is the compose of g and h """
def f(x): return g(h(x))
return f
def derivative(g, delta=DELTA):
"""return f s.t. f(x) = g'(x)"""
def f(x): return (g(x + delta) - g(x)) / delta
return f
def definite_integral(f, x0, x1, num_of_segments=SEGMENTS):
"""
return a float - the definite_integral of f between x0 and x1
>>> round(definite_integral(const_function(3),-2,3),6)
15.0
"""
NEXT = (x1 - x0)/num_of_segments
x = x0
integral = 0
for i in range(num_of_segments):
next_x = x + NEXT
integral += f((x + next_x)/DIVISOR) * NEXT
x = next_x
return integral
def integral_function(f, delta=0.01):
"""return F such that F'(x) = f(x)"""
def F(x):
num_of_segments = int(math.ceil(math.fabs(x) / delta))
if x > 0:
return definite_integral(f,0,x,num_of_segments)
elif x < 0:
return definite_integral(f,x,0,num_of_segments)*-1
else:
return 0
return F
def ex11_func_list():
"""return list with the functions in q.13"""
def f0():
"""return f s.t f(x) = 4"""
return const_function(4)
def f1():
"""return f s.t f(x) = 3-sin(x)"""
return sub_functions(const_function(3),sin_function())
def f2():
"""return f s.t f(x) = sin(x-2)"""
return compose(sin_function(),(sub_functions(identity(),
const_function(2))))
def f3():
"""return f s.t f(x) = 10/[2+sin(x)+x**2]"""
inner1 = sum_functions(const_function(2),sin_function())
inner2 = mul_functions(identity(),identity())
return div_functions(const_function(10),sum_functions(inner1,inner2))
def f4():
"""return f s.t f(x) = cos(x)/(sin(x)-2)"""
cos = derivative(sin_function())
return div_functions(cos,sub_functions(sin_function(),
const_function(2)))
def f5():
"""return f s.t f(x) = -0.1*integral(0.3x**2+0.7x-1)"""
a = mul_functions(const_function(0.3),mul_functions(identity(),
identity()))
b = mul_functions(const_function(0.7),identity())
c = const_function(1)
integral = integral_function(sub_functions(sum_functions(a,b),c))
return mul_functions(const_function(-0.1),integral)
def f6():
"""return f s.t f(x) = [cos(sin(x))-0.3*cos(x)]*2"""
trig_part = compose(derivative(sin_function()), sin_function())
extra_part = mul_functions(const_function(0.3),
derivative(sin_function()))
return mul_functions(const_function(2),
sub_functions(trig_part, extra_part))
def f7():
"""return f s.t f is the inverse function to g(x) = 2-x^3"""
x_power_3 = mul_functions(mul_functions(identity(),identity()),
identity())
g = sub_functions(const_function(2),x_power_3)
return inverse(g)
func_list = [f0(),f1(),f2(),f3(),f4(),f5(),f6(),f7()]
return func_list
# func that generates the figure in the ex description
def example_func(x):
return (x/5)**3
if __name__ == "__main__":
import tkinter as tk
from ex11helper import Graph
# un-tag the following lines to activate the doctests
# import doctest
# doctest.testmod()
master = tk.Tk()
graph = Graph(master, -10, -10, 10, 10)
# un-tag the line below after implementation of plot_func
plot_func(graph,example_func,-10,10,SEGMENTS,'red')
color_arr = ['black', 'blue', 'red', 'green', 'brown', 'purple',
'dodger blue', 'orange']
# un-tag the lines below after implementation of ex11_func_list
for f in ex11_func_list():
plot_func(graph, f, -10, 10, SEGMENTS, color_arr[0])
color_arr.remove(color_arr[0])
master.mainloop()
|
import pygad as pg
import pygad.plotting
import matplotlib.pyplot as plt
import numpy as np
from scipy import stats
import utils
import glob
from multiprocessing import Pool
filename = __file__
def plot(args):
halo = args[0]
definition = args[1]
print args
path = '/ptmp/mpa/naab/REFINED/%s/SF_X/4x-2phase/out/snap_%s_4x_???' % (halo, halo)
max = int(sorted(glob.glob(path))[-1][-3:])
s, h, g = pg.prepare_zoom('/ptmp/mpa/naab/REFINED/%s/SF_X/4x-2phase/out/snap_%s_4x_%s' % (halo, halo, max), gas_trace='/u/mihac/data/%s/4x-2phase/gastrace_%s' % (halo, definition), star_form=None)
mask = np.max(s.gas['T_at_ejection'], axis=-1) > 0
s_masked = s.gas[mask]
f, ax = plt.subplots(2, figsize=utils.figsize)
_, _, _, cbar = pg.plotting.scatter_map(np.log10(s_masked['cycle_r_max'].flatten()),
np.log10(s_masked['T_at_ejection'].flatten()), s=s_masked,
extent=[[.5, 3.5], [3, 8]], logscale=True,
qty=s_masked['mass_at_ejection'].flatten(),
colors='ejection_time.flatten()',
colors_av=s_masked['mass_at_ejection'].flatten(),
clogscale=False, clim=[2, s.cosmic_time()],
zero_is_white=True, ax=ax[0])
cbar_ax = cbar.ax
cbar_ax.set_xlabel('Ejection time [Gyr]', fontsize=utils.axes_labelsize*.8)
plt.setp(cbar_ax.get_xticklabels(), fontsize=utils.tick_labelsize*.8)
ax[0].set_ylabel(r'$T_\mathrm{ejection}\ \mathrm{[K]}$', fontsize=utils.axes_labelsize)
ax[0].tick_params(labelbottom='off')
_, _, _, cbar = pg.plotting.scatter_map(np.log10(s_masked['cycle_r_max'].flatten()),
np.log10(s_masked['T_at_ejection'].flatten()), s=s_masked,
extent=[[.5, 3.5], [3, 8]], logscale=True,
qty=s_masked['mass_at_ejection'].flatten(),
colors=s_masked['metals_at_ejection'].flatten()/s_masked['mass_at_ejection'].flatten()/pg.solar.Z(),
colors_av=s_masked['mass_at_ejection'].flatten(),
clogscale=True, clim=[10**-1.5, 10**.5],
zero_is_white=True, ax=ax[1])
cbar_ax = cbar.ax
cbar_ax.set_xlabel(r'$\log_{10} [ Z ]_\odot $', fontsize=utils.axes_labelsize*.8)
plt.setp(cbar_ax.get_xticklabels(), fontsize=utils.tick_labelsize*.8)
ax[1].set_xlabel(r'$\log_{10}\left(\mathrm{cycle}\ r_\mathrm{max}\ \mathrm{[kpc]}\right)$',
fontsize=utils.axes_labelsize)
ax[1].set_ylabel(r'$\log_{10}\left(T_\mathrm{ejection}\ \mathrm{[K]}\right)$',
fontsize=utils.axes_labelsize)
f.tight_layout()
plt.subplots_adjust(top=0.93)
f.suptitle('%s - %s' % (halo, definition))
plt.savefig(filename.split("/")[-1][:-3] + '_' + halo + '_' + definition + ".png", bbox_inches='tight')
p = Pool(7)
p.map(plot, utils.combinations)
|
FACTORY = '''
class Factory
{
}
Factory.build = function (type)
{
return eval("new " + type + "()");
};
'''
COMMON = '''
create_command_from_json = function (payload)
{
let json = JSON.parse(payload);
for(let key in json)
{
let command = Factory.build(key);
if(command)
{
command.deserialize_json(json[key]);
}
return command;
}
};
serialize_command_to_json = function (command)
{
let json = {};
json[command.get_type()] = {};
command.serialize_json(json[command.get_type()]);
return JSON.stringify(json);
};
clone_object = function (obj)
{
let payload = serialize_command_to_json(obj);
return create_command_from_json(payload);
};
''' |
from plotly import graph_objects as go
import pandas as pd
import numpy as np
from scipy import stats
def _infer_strftime_format(data: pd.DatetimeIndex):
unique_index = np.unique(data.values)
unique_index.sort()
time_delta = unique_index[1:] - unique_index[:-1]
# get mode using scipy
td = stats.mode(time_delta)[0][0]
td = td.astype('timedelta64[m]')
day = td / np.timedelta64('1', 'D')
if day < 1:
return '%Y/%m/%d %H:%M:%S'
else:
return '%Y/%m/%d'
def net_value_line(net_value: pd.Series, color='#00477D', name='net value', fill=None):
strftime_format = _infer_strftime_format(net_value.index)
timestamp = net_value.index # type:pd.DatetimeIndex
timestamp = timestamp.strftime(strftime_format)
return go.Scatter(x=timestamp, y=net_value.values,
mode='lines', line_color=color,
name=name, fill=fill)
def returns_distribution(returns: pd.DataFrame or pd.Series):
return go.Histogram(x=returns)
def entry_exit_dot(traded: pd.DataFrame, long=True, long_marker='#3283FE', short_marker='#FF0000'):
traded_time = traded['update_time'].apply(lambda x: x.strftime('%Y/%m/%d %H:%M:%S'))
traded_price = traded['dealt_price']
if 'remarks' in traded.columns:
hover_text = traded.apply(lambda x: 'deal time: {} <br>deal price: {} <br>deal qty: {} <br>'
'direction: {} <br>remarks: {}'
.format(x['update_time'], x['dealt_price'], x['dealt_qty'], x['order_direction'],
x['remarks']),
axis=1)
else:
hover_text = traded.apply(lambda x: 'deal time: {} <br>deal price: {} <br>deal qty: {} <br>'
'direction: {}'
.format(x['update_time'], x['dealt_price'], x['dealt_qty'], x['order_direction']),
axis=1)
if long:
dot = go.Scatter(x=traded_time, y=traded_price,
mode='markers', name='long', hovertext=hover_text, marker_color=long_marker, marker_size=8)
else:
dot = go.Scatter(x=traded_time, y=traded_price,
mode='markers', name='short', hovertext=hover_text, marker_color=short_marker, marker_size=8)
return dot
def entrust_dot(traded: pd.DataFrame, long=True, long_marker='#30D5C8', short_marker='#FF0DA6'):
order_time = traded['order_time'].apply(lambda x: x.strftime('%Y/%m/%d %H:%M:%S'))
order_price = traded['order_price']
if 'remarks' in traded.columns:
hover_text = traded.apply(lambda x: 'order time: {} <br>order price: {} <br>order qty: {} <br>'
'direction: {} <br>remarks: {}'
.format(x['order_time'], x['order_price'], x['order_qty'], x['order_direction'],
x['remarks']),
axis=1)
else:
hover_text = traded.apply(lambda x: 'order time: {} <br>order price: {} <br>order qty: {} <br>'
'direction: {}'
.format(x['order_time'], x['order_price'], x['order_qty'], x['order_direction']),
axis=1)
if long:
dot = go.Scatter(x=order_time, y=order_price,
mode='markers', name='long entrust', hovertext=hover_text, marker_color=long_marker,
marker_size=4)
else:
dot = go.Scatter(x=order_time, y=order_price,
mode='markers', name='short entrust', hovertext=hover_text, marker_color=short_marker,
marker_size=4)
return dot
|
"""
This module decorates the django templatize function to parse haml templates
before the translation utility extracts tags from it.
"""
try:
from django.utils.translation import trans_real
_django_available = True
except ImportError, e:
_django_available = False
import hamlpy
def decorate_templatize(func):
def templatize(src, origin=None):
hamlParser = hamlpy.Compiler()
html = hamlParser.process(src.decode('utf-8'))
return func(html.encode('utf-8'), origin)
return templatize
if _django_available:
trans_real.templatize = decorate_templatize(trans_real.templatize)
|
f = open("items.py")
items = {}
total = 0
for line in f:
info = line.split()
items[info[0]]= {"price":float(info[1]), "tax":float(info[2])}
price = float(info[1])*float(info[2])
total += price
print(items)
print("The total price is:", total, "€")
|
from configparser import ConfigParser
def config(filename='database.ini', section='postgresql'):
parser = ConfigParser()
parser.read(filename)
db = {}
if parser.has_section(section):
params = parser.items(section)
for p in params:
db[p[0]] = p[1]
else:
raise Exception('Section {0} not found in {1} file'.format(section, filename))
return db
|
"""
Loads different file types for use in analysis
Created on 03.09.2019
"""
#Import required packages
import pickle
import os
import sys
import numpy as np
import pandas as pd
def readChannelRawData(filepath):
"""
This function reads the data from a .dat file in binary float64 format.
Inputs:
filepath: The path to the .dat file to be read
Outputs:
amplifier_file: numpy array containing the data from the .dat file (in uV)
"""
with open(filepath, 'rb') as fid:
channel_array = np.fromfile(fid, np.float64)
return channel_array
def readTrigger(filepath):
"""
This function reads the data from a .dat file in binary float64 format.
Inputs:
filepath: The path to the .dat file to be read
Outputs:
amplifier_file: numpy array containing the data from the .dat file (in uV)
"""
with open(filepath, 'rb') as fid:
trigger_array = np.fromfile(fid, np.float64)
return trigger_array
def loadLabviewOutput(folderPath, outputpathFolder, stim_timestamps):
LVfilepath = ''
for LVfile in os.listdir(folderPath):
if 'StimSeq' in LVfile:
LVfilepath = folderPath + '/' + LVfile
if LVfilepath != '':
whisker_stim = readLabviewOuput(LVfilepath, outputpathFolder)
else: whisker_stim = np.zeros(len(stim_timestamps))
return whisker_stim
def readLabviewOuput(filepath, paradigm):
"""
This function reads the Labview output folder that identifies stimulated whiskers.
Inputs:
filepath: The path to the .txt file to be read
Outputs:
whisker_stim_array: array containing order of whisker stimulations
"""
with open(filepath) as stimseq:
whiskers = np.loadtxt(stimseq)
#if 'PSC' in paradigm:
# whiskers = np.asarray(whiskers)
# whiskers = whiskers.reshape((40,100))
# whiskers = whiskers[:,0:72]
# whiskers = whiskers.flatten()
#Identify C2 stimulations
C2 = ['C2' if x==1 else x for x in whiskers]#PSC1
#C2 = ['C2' if x==0 else x for x in whiskers]#PSC2
#Identify C1 stimulations
C2C1 = ['C1' if x==0 else x for x in C2]#PSC1
#C2C1 = ['C1' if x==1 else x for x in C2]#PSC2
#Identify D1 stimulations
C2C1D1 = ['D1' if x==2 else x for x in C2C1]
#Identify B1 stimulations
C2C1D1B1 = ['B1' if x==3 else x for x in C2C1D1]
return C2C1D1B1
def loadCSDAssignment(p, filepath):
"""
This function reads the csv file created by the user which assigns electrode channels to different cortical regions.
Inputs:
filepath: The path to the .csv file to be read
Outputs:
electrode_assignment: array containing order of electrodes along cortical depth
"""
datafromfile = pd.read_csv(filepath)
if p['nr_of_shanks'] == 1:
shank1electrodes = datafromfile.shank1electrodes
shank1assignments = datafromfile.shank1assignments
shank2electrodes = [None]
shank2assignments = [None]
if p['nr_of_shanks'] == 2:
shank1electrodes = datafromfile.shank1electrodes
shank1assignments = datafromfile.shank1assignments
shank2electrodes = datafromfile.shank2electrodes
shank2assignments = datafromfile.shank2assignments
if p['nr_of_shanks'] ==2: return shank1electrodes, shank1assignments, shank2electrodes, shank2assignments
if p['nr_of_shanks'] ==1: return shank1electrodes, shank1assignments
def headerID(p, paradigm):
if p['evoked_pre'] == 0.05 and p['evoked_post'] == 0.25 and p['binsize'] == 0.05 and paradigm == 'PSC' :
headers = np.vstack((['Electrode', 'C1_PSC_-50-0ms', 'C1_PSC_0-50ms', 'C1_PSC_50-100ms', 'C1_PSC_100-150ms', \
'C1_PSC_150-200ms', 'C1_PSC_200-250ms', 'C2_PSC_-50-0ms', 'C2_PSC_0-50ms', 'C2_PSC_50-100ms', \
'C2_PSC_100-150ms', 'C2_PSC_150-200ms', 'C2_PSC_200-250ms', 'B1_PSC_-50-0ms', 'B1_PSC_0-50ms', \
'B1_PSC_50-100ms', 'B1_PSC_100-150ms', 'B1_PSC_150-200ms', 'B1_PSC_200-250ms', \
'D1_PSC_-50-0ms', 'D1_PSC_0-50ms', 'D1_PSC_50-100ms', 'D1_PSC_100-150ms', 'D1_PSC_150-200ms', \
'D1_PSC_200-250ms', 'C1_PSCC_-50-0ms', 'C1_PSCC_0-50ms', 'C1_PSCC_50-100ms', 'C1_PSCC_100-150ms', \
'C1_PSCC_150-200ms', 'C1_PSCC_200-250ms', 'C2_PSCC_-50-0ms','C2_PSCC_0-50ms', 'C2_PSCC_50-100ms', \
'C2_PSCC_100-150ms', 'C2_PSCC_150-200ms', 'C2_PSC_200-250ms', 'B1_PSCC_-50-0ms', 'B1_PSCC_0-50ms', \
'B1_PSCC_50-100ms', 'B1_PSCC_100-150ms', 'B1_PSCC_150-200ms', 'B1_PSC_200-250ms', \
'D1_PSCC_-50-0ms', 'D1_PSCC_0-50ms', 'D1_PSCC_50-100ms', 'D1_PSCC_100-150ms', \
'D1_PSCC_150-200ms', 'D1_PSC_200-250ms']))
if p['evoked_pre'] == 0.05 and p['evoked_post'] == 0.25 and p['binsize'] == 0.05 and paradigm == 'CompPSC':
headers = np.vstack((['Electrode', 'LayerAssignment', 'C1_-50-0ms', 'C1_0-50ms', 'C1_50-100ms', 'C1_100-150ms', \
'C1_150-200ms', 'C1_200-250ms',\
'C2_-50-0ms', 'C2_0-50ms', 'C2_50-100ms', 'C2_100-150ms', 'C2_150-200ms', 'C2_200-250ms',\
'B1_-50-0ms', 'B1_0-50ms', 'B1_50-100ms', 'B1_100-150ms', 'B1_150-200ms', 'B1_200-250ms',\
'D1-50-0ms', 'D1_0-50ms', 'D1_50-100ms', 'D1_100-150ms', 'D1_150-200ms', 'D1_200-250ms']))
if p['evoked_pre'] == 0.05 and p['evoked_post'] == 0.20 and p['binsize'] == 0.05 and paradigm == 'Spikes':
headers = (['ElectrodeChannel', 'NegSpikes_-50-0ms', 'NegSpikes_0-50ms', 'NegSpikes_50-100ms', \
'NegSpikes_100-150ms', 'NegSpikes_150-200ms', 'PosSpikes_-50-0ms', \
'PosSpikes_0-50ms', 'PosSpikes_50-100ms', 'PosSpikes_100-150ms', \
'PosSpikes_150-200ms'])
if p['evoked_pre'] == 0.01 and p['evoked_post'] == 0.05 and p['binsize'] == 0.01 and paradigm == 'Spikes':
headers = (['ElectrodeChannel', 'NegSpikes_-10-0ms', 'NegSpikes_0-10ms', 'NegSpikes_10-20ms', \
'NegSpikes_20-30ms', 'NegSpikes_30-40ms', 'NegSpikes_40-50ms', 'PosSpikes_-10-0ms', \
'PosSpikes_0-10ms', 'PosSpikes_10-20ms', 'PosSpikes_20-30ms', \
'PosSpikes_30-40ms', 'PosSpikes_40-50ms']).flatten()
return headers
|
def solution(n, control):
answer = n
for i in control:
if i=="w":
answer = answer + 1
elif i=="s":
answer = answer - 1
elif i=="d":
answer = answer + 10
elif i=="a":
answer = answer - 10
return answer |
import importlib
from urllib import parse
import tldextract
from core import client
from pyquery import PyQuery as pq
def get_host(url, path=''):
p = parse.urlparse(url)
return parse.urljoin('%s://%s' % (p.scheme, p.netloc), path)
class KeyListStorage(dict):
def upsert(self, key, value):
data = self.get(key)
if not isinstance(value, (list, set, tuple)):
value = [value]
if data:
value = data + value
self.update({key: value})
def query(self, query):
return [
self.get(key) for
key in self.keys() if query(key)
]
class SpiderBase(object):
name = None
def __init__(self, url):
self.url = url
self._html = None
self._images = KeyListStorage()
def __repr__(self):
return self.name
@property
def html(self):
if self._html is None:
self._html = client.html(self.url)
return self._html
@property
def doc(self):
return pq(self.html or None)
@property
def ok(self):
return bool(self.doc)
def info(self):
# save img info
for url in self.extract_image_urls() or []:
client.img(url)
return dict(
url=self.url,
name=self.extract_name(),
tags=self.extract_tags(),
divas=self.extract_divas(),
duration=self.extract_duration(),
title=self.extract_title(),
content=self.extract_content(),
embed_code=self.extract_embed_code(),
image_urls=self.extract_image_urls(),
)
def extract_name(self):
return self.name
def extract_tags(self):
""" extracet tags
"""
raise NotImplementedError
def extract_divas(self):
""" extracet divas
"""
return []
def extract_duration(self):
""" extracet duration
"""
raise NotImplementedError
def extract_title(self):
""" extracet title
"""
raise NotImplementedError
def extract_content(self):
""" extracet content
"""
raise NotImplementedError
def extract_embed_code(self):
""" extracet embed_code
"""
raise NotImplementedError
def extract_image_urls(self):
""" extracet image_urls
"""
raise NotImplementedError
def import_spider(name):
kname = name.title().replace('-', '').replace('_', '')
mname = 'core.video.spiders.%s' % name.replace('-', '_')
try:
mod = importlib.import_module(mname)
except ImportError:
return None
try:
return getattr(mod, kname)
except AttributeError:
return None
def get_spider(url):
name = tldextract.extract(url).domain
klass = import_spider(name)
return klass and klass(url)
|
import bs4 as bs
import urllib
from urllib import request
import os.path
import ast
from Python.help_me import *
class RecipeParse(object):
def __init__(self, url):
"""
Generates generic RecipeParse object
:param url: Input url
:return: None
"""
self.url = url
self.soup = self.lets_get_soup()
self.title = ''
self.img_url = ''
self.recipe_yield = ''
self.ingredients = {}
self.instructions = []
def __str__(self):
"""
Generates markdown styled string
:"""
return: None
def lets_get_soup(self):
"""
Gets BeautifulSoup object from url
:return: False or BeautifulSoup object
"""
try:
# pretend to be Firefox
req = urllib.request.Request(self.url,
headers={'User-Agent': 'Mozilla/5.0'})
with urllib.request.urlopen(req) as url_file:
url_byte = url_file.read()
except urllib.request.HTTPError as e: # HTTP status code
print(e.__str__())
return False
except urllib.request.URLError as e: # General Error
print(e.__str__())
return False
except OSError as e: # Vague Error
print(e.__str__())
return False
except Exception as e: # Anything
print(e.__str__())
return False
try:
url_string = url_byte.decode(encoding='latin1').encode(
encoding='utf-8')
except UnicodeDecodeError as e:
print(e.__str__())
return False
except Exception as e:
print(e.__str__())
return False
return bs.BeautifulSoup(url_string, "html.parser")
def make_markdown(self):
"""
Creates and writes markdown styled recipe to a file
:return: True or IOError is raised
"""
new_file = ''
directory = os.path.dirname(os.path.dirname(__file__)) + "/Recipes/"
if not os.path.exists(directory):
os.makedirs(directory)
try:
self.title = ''.join(c for c in self.title if 0 < ord(c) < 127)
if os.path.isfile(directory + self.title + ".md"):
raise FileExistsError
x = str(directory + self.title + ".md")
new_file = open(x, "w")
new_file.write(self.__str__())
except FileExistsError:
raise FileExistsError(directory + self.title + ".md")
except IOError:
raise IOError
except:
raise Exception
finally:
if new_file:
try:
new_file.close()
except IOError:
raise IOError
except Exception:
raise Exception
return True
|
import argparse
import random
from progress.bar import Bar
from mechanics import Mechanics
import utils
parser = argparse.ArgumentParser(description='Rescore fields')
parser.add_argument('--ai', dest='ai', choices=["random", "sklearn_mlp", "torch"],
required=True,
help='AI player that should play')
parser.add_argument('--ai-data', dest='ai_data',
help='Saved data for ai, such as trained models')
parser.add_argument('--score', dest='score', choices=[None, "future", "neighbor"],
help='Rescoring strategy for games.')
parser.add_argument('--future-len', dest='future_len', type=int, default=5,
help='How many moves to look for in the future, only active if score is "future"')
parser.add_argument('--future-replay', dest='future_replay', type=int, default=5,
help='How many random games in the future, only active if score is "future"')
parser.add_argument('--dump', dest="dump_path", required=True)
parser.add_argument('--fields', dest="fields", required=True,
help="File containing list of start fields for rescoring")
parser.add_argument('--nr-fields', dest="nr_fields", type=int, default=10_000,
help="How many fields should be rescored")
#parser.add_argument('--nr-games', dest="nr_games", default=-1,
# help="Number of games to play, -1 means indefinetly.")
args = parser.parse_args()
tetris = Mechanics()
ai_player = utils.get_ai_player(args.ai, tetris, args.ai_data)
class GameOverScore:
worst_score = 0
def __float__(self):
return GameOverScore.worst_score
def score_future(ai_player, input_field, nr_moves, nr_replay):
current_score = ai_player.score_field(input_field)
new_scores = []
for game_round in range(nr_replay):
field = input_field.copy()
pieces = ai_player.mechanics.piece_stream()
for nr_pieces, piece in enumerate(pieces):
if nr_pieces == nr_moves:
new_scores.append(ai_player.score_field(field))
break
if not ai_player.mechanics.can_place_piece(field, piece, ai_player.mechanics.start_placement):
new_scores.append(GameOverScore())
break
placement, _ = ai_player.choose_placement(field, piece)
field = ai_player.mechanics.place_piece(field, piece, placement)
return(input_field, current_score, new_scores)
def aggregate_future_scores(future_score_lists):
for field, old_score, new_scores in future_score_lists:
GameOverScore.worst_score = min(min(float(s) for s in new_scores), GameOverScore.worst_score)
new_scores_aggregated = [sum(float(s) for s in ns)/len(ns) for _, _, ns in future_score_lists]
normalization_factor = (sum(os for _, os, _ in future_score_lists) /
sum(new_scores_aggregated))
return [(field, new_score*normalization_factor) for (field, _, _), new_score in
zip(future_score_lists, new_scores_aggregated)]
def score_neighbors(ai_player, input_field):
current_score = ai_player.score_field(input_field)
new_scores = []
for piece in ai_player.mechanics.piece_types:
if not ai_player.mechanics.can_place_piece(input_field, piece, ai_player.mechanics.start_placement):
new_scores.append(GameOverScore())
else:
placement, _ = ai_player.choose_placement(input_field, piece)
field = ai_player.mechanics.place_piece(input_field, piece, placement)
new_scores.append(ai_player.score_field(field))
return(input_field, current_score, new_scores)
with open(args.fields, "r") as infile:
fields = [utils.deserialize_field(line.split()[0]) for line in infile]
fields = random.sample(fields, args.nr_fields)
print("Loaded fields")
if args.score in ["future", "neighbor"]:
score_lists = []
for field in Bar("Playing", suffix='%(index)d/%(max)d %(eta)ds').iter(fields):
if args.score == "future":
score_lists.append(score_future(ai_player, field,
args.future_len, args.future_replay))
elif args.score == "neighbor":
score_lists.append(score_neighbors(ai_player, field))
new_scores = aggregate_future_scores(score_lists)
with open(args.dump_path, "a") as dump_file:
for field, score in new_scores:
dump_file.write(f"{utils.serialize_field(field)} {score}\n")
|
def who_win(c1, c2):
for x in range(3, -1, -1):
# 카운트 함수 사용 x
if c1[x] > c2[x]:
return 'A'
elif c1[x] < c2[x]:
return 'B'
else:
pass
return 'D'
N = int(input())
for turn in range(N):
tmp1 = list(map(int, input().split()))
tmp2 = list(map(int, input().split()))
n1, child1 = tmp1[0], tmp1[1:]
n2, child2 = tmp2[0], tmp2[1:]
# 딱지에 적힌 그림의 각 개수를 아래 변수에 저장할거다.
# 0번째 인덱스는 활용 안할거다.
p1 = [0]*5
p2 = [0]*5
# 첫번째 아이의 딱지부터
for i in range(n1):
# child1[i]는 딱지에 적힌 모양이 나온다.
# 모양에서 1을 뺀 인덱스를 활용
p1[child1[i]] += 1
# 두번째 아이의 딱지
for i in range(n2):
p2[child2[i]] += 1
print(who_win(p1[1:], p2[1:])) |
'''
给定一个正整数 num,编写一个函数,如果 num 是一个完全平方数,则返回 True,否则返回 False。
说明:不要使用任何内置的库函数,如 sqrt。
示例 1:
输入:16
输出:True
示例 2:
输入:14
输出:False
来源:力扣(LeetCode)
链接:https://leetcode-cn.com/problems/valid-perfect-square
著作权归领扣网络所有。商业转载请联系官方授权,非商业转载请注明出处。
'''
class Solution:
def isPerfectSquare(self, num: int) -> bool:
l, r = 1, num
while l < r:
mid = (l + r) // 2
if mid * mid < num:
l = mid + 1
else:
r = mid
return l * l == num
obj = Solution()
print(obj.isPerfectSquare(16))
|
from ..base import CowSay
class CowSayNsfw(CowSay):
# Plugin
def handleInitialize(self):
super(CowSayNsfw, self).handleInitialize()
self.registerCommand("dergsay", self.handleDragonSay).setDescription("Rawr.")
self.registerCommand("ponysay", self.handlePonySay).setDescription("mcd1992 is a faggot.")
self.registerCommand("sheepsay", self.handleSheepSay).setDescription("Baaa.")
for _, command in self.commands.items():
command.addParameter("message")
# CowSayNsfw
# Internal
def handleDragonSay(self, command, commandInvocation, message):
self.handleSay(command, commandInvocation, message, commandInvocation.fullArguments, "dragon-and-cow")
def handlePonySay(self, command, commandInvocation, message):
self.handleSay(command, commandInvocation, message, commandInvocation.fullArguments, "unipony-smaller")
def handleSheepSay(self, command, commandInvocation, message):
self.handleSay(command, commandInvocation, message, commandInvocation.fullArguments, "sodomized-sheep")
|
#coding=utf-8
#简单tcp连接实例
__author__ = 'xuxin'
import socket
s = socket.socket()
s.connect(('192.168.1.153',7000)) #与服务器程序ip地址和端口号相同
s.send('hello is me')
data = s.recv(512)
s.send('hihi')
s.close()
print 'the data received is',data
|
from flask_restful import Resource, Api, reqparse
from repository.models import Item
from services.service import Service
|
from django.shortcuts import render, get_object_or_404
from django.http import HttpResponse, HttpResponseRedirect
from django.urls import reverse
from django.contrib import messages
from . models import Categories, Register
def index(request):
return render(request, 'spendoApp/home.html')
def home(request):
return HttpResponse('Página de inicio')
def contact_us(request):
return render(request, 'spendoApp/contactUs.html')
# ---------------Categories-----------------------
def category(request, categoryId):
category = Categories.objects.get(pk = categoryId)
return render(request, 'spendoApp/category.html', { 'category': category })
def categoriesListar(request):
cats = Categories.objects.order_by('nombre')
context = { 'categories': cats }
return render(request, 'spendoApp/categories.html' , context)
def categoryUpdateForm(request, id):
q = Categories.objects.get(pk = id)
context = { 'category': q }
return render(request, 'spendoApp/categoryEditar.html', context)
def categoryUpdate(request, id):
try:
q = Categories.objects.get(pk = id)
q.nombre = request.POST['name']
q.medida = request.POST['medida']
q.save()
return HttpResponseRedirect(reverse('spendoApp:categoriesListar'))
except Exception as e:
return HttpResponse("Hubo un error intentando editar el programa" %e)
def categoryDelete(request, categoryId):
try:
category = Categories.objects.get(pk = categoryId)
category.delete()
messages.add_message(request, messages.SUCCESS, "The category has been deleted!")
return HttpResponseRedirect('/spendoApp/categories')
except Exception as e:
return HttpResponse("Hubo un error intentando eliminar: <br>%s"%e)
def categoryAgregarForm(request):
return render(request, 'spendoApp/categoryAgregar.html')
def categoryAgregar(request):
try:
q = Categories(
medida = request.POST['medida'],
nombre = request.POST['name']
)
q.save()
messages.add_message(request, messages.SUCCESS, "The category has been created!")
return HttpResponseRedirect(reverse('spendoApp:categoriesListar'))
except Exception as e:
return HttpResponse("Hubo un error intentando guardar el programa")
#------------------------Registers-------------------------------
def register(request, categoryId):
register = Register.objects.get(pk = categoryId)
return render(request, 'spendoApp/register.html', { 'register': register })
def registersListar(request):
registers = Register.objects.order_by('nombre')
context = { 'categories': cats }
return render(request, 'spendoApp/categories.html' , context)
def registerUpdateForm(request, id):
q = Register.objects.get(pk = id)
context = { 'register': q }
return render(request, 'spendoApp/registerEditar.html', context)
def registerUpdate(request, id):
try:
q = Register.objects.get(pk = id)
q.nombre = request.POST['name']
q.medida = request.POST['medida']
q.save()
return HttpResponseRedirect(reverse('spendoApp:registerListar'))
except Exception as e:
return HttpResponse("Hubo un error intentando editar el programa" %e)
def registerDelete(request, registerId):
try:
register = Register.objects.get(pk = registerId)
register.delete()
messages.add_message(request, messages.SUCCESS, "The register has been deleted!")
return HttpResponseRedirect('/spendoApp/registers')
except Exception as e:
return HttpResponse("Hubo un error intentando eliminar: <br>%s"%e)
def registerAgregarForm(request):
return render(request, 'spendoApp/registerAgregar.html')
def registerAgregar(request):
try:
q = Register(
medida = request.POST['medida'],
nombre = request.POST['name']
)
q.save()
messages.add_message(request, messages.SUCCESS, "The register has been created!")
return HttpResponseRedirect(reverse('spendoApp:registerListar'))
except Exception as e:
return HttpResponse("Hubo un error intentando guardar el programa")
|
#-*- coding:utf-8 -*-
import random
import MySQLdb
# 未完成项:
# 1)用户注册及登录操作,区分用户记录战绩
# 2)使用数据库记录每个用户的战绩数据
# 3)使用数据库查询显示用户的战绩数据
class GuessNumber(object):
def __init__(self):
"""
在初始化中连接数据库
数据库名:GuessNumber
"""
try:
self.conn = MySQLdb.connect(
host='localhost',
user='root',
passwd='433280',
db='GuessNumber',
charset='utf8',
use_unicode=True
)
self.cur = self.conn.cursor()
except MySQLdb.Error as err:
print('唉呀,连接数据库出错了!')
print('Error %d : %s ' % (err.args[0], err.args[1]))
def close_DB(self):
"""
关闭数据库,将__init__中打开的数据库连接和游标关闭
:return: None
"""
self.cur.close()
self.conn.close()
return None
def ui(self):
"""
主界面目录
:return:用户选择的功能码,func:string
"""
print('*' * 80)
print('*\t\t\t\t猜数小游戏\t\t\t')
print('*' * 80)
print('*\t\t\t\t1)开始游戏')
print('*\t\t\t\t2)我的战绩')
print('*\t\t\t\tq)离开游戏')
print('*' * 80)
while True:
func = raw_input('想要干啥:')
if func not in ('1', '2', 'q'):
print('没有这个选项啊,换一个呗~')
else:
break
return func
def start_game_ui(self):
"""
难度选择界面
:return:用户选择的功能码,func:string
"""
print('*' * 80)
print('*\t\t\t\t选择游戏难度\t\t\t')
print('*' * 80)
print('*\t\t\t\t1)简单(0 - 10)')
print('*\t\t\t\t2)一般(0 - 1000)')
print('*\t\t\t\t3)困难(0 - 100000)')
print('*\t\t\t\tq)不玩了~')
print('*' * 80)
while True:
func = raw_input('选个难度呗:')
if func not in ('1','2','3','q'):
print('没有这个选项啊,换一个呗~')
else:
break
return func
def guess(self,func):
"""
猜数函数,根据输入的难度,随机生成一个数,并根据用户输入判断是否猜中
:param func: 用户输入的难度选择,string
:return: 本次猜数所使用的次数,count:num
"""
start = 0 # 下限
# 根据选择难度定义随机数上限
if func == '1':
end = 10 # 上限
file_difficulty = '简单'
elif func == '2':
end = 1000 # 上限
file_difficulty = '一般'
else:
end = 100000 # 上限
file_difficulty = '困难'
target = random.randint(start,end) # 生成目标值
count = 0 # 记录猜测的次数
print('target = %d' % target)
while True:
input_num = int(raw_input('来来来猜一个:'))
count += 1
if input_num == target:
print('居然猜中了!')
print('答案就是%d!!!' % target)
break
elif input_num > target:
print('大了大了~~')
else:
print('小了小了~~')
print('猜了%d次终于猜对咯,不容易不容易。。' % count)
file_record = '难度:%s \t 目标数:%d \t 猜测次数:%d\n' % (file_difficulty,target,count)
with open('my_record.txt','a') as f:
f.write(file_record)
return count
def one_more_try(self):
"""
选择是否再猜一次
:return: 用户选择的功能码,func:bool
"""
print('*' * 80)
print('*\t\t\t\t再来一次?\t\t\t')
print('*' * 80)
print('*\t\t\t\t1)来来来,再战三百回!')
print('*\t\t\t\tq)累了累了。。。。')
while True:
func = raw_input('怎么说?')
if func not in ('1','q'):
print('没有这个选项啊,换一个呗~')
else:
break
if func == '1':
return(True)
else:
return(False)
if __name__ == '__main__':
guess_number = GuessNumber()
round_number = 0 # 猜测的轮数
round_count = 0 # 每轮猜测的次数
while True:
main_func = guess_number.ui()
if main_func == 'q': # 退出游戏
break
elif main_func == '1': # 开始游戏
more_times = True # 再来一次,初始化为True
while more_times:
round_number += 1
start_func = guess_number.start_game_ui() # 选择难度
if start_func != 'q':
round_count += guess_number.guess(start_func)
more_times = guess_number.one_more_try() # 再来一次
elif main_func == '2': # 我的战绩
with open('my_record.txt','r') as f:
records = f.readlines()
for each in records:
print(each), # ','用来去掉print自带的换行符
else: # 离开游戏
break
if round_count:
round_average = float(round_count) / round_number # 转换为浮点数计算平均每轮猜测的次数
round_average = round(round_average,2) # 四舍五入
else:
round_average = 0
print('本次游戏共猜了%d轮,平均每轮%f次猜中!' % (round_number,round_average))
guess_number.close_DB() # 关闭数据库
print('游戏结束~')
|
import logging
import os
import numpy
import torch
import machine
from machine.util import DictList, ReasonLabeler
from machine.models import PolicyMapping, SigmoidTermination
from machine.util.callbacks import EpisodeLogger
device = torch.device("cuda" if torch.cuda.is_available() else "cpu")
class ReinforcementTrainer(object):
"""
The ReinforcementTrainer class helps in setting up a training framework for
reinforcement learning.
Largely inspired by babyAI repo code for PPOAlgo
"""
def __init__(self, envs, opt, model, model_name, obs, reshape_reward, algo_name='ppo', reasoning=False):
self._trainer = f"Reinforcement Trainer - algorithm: {algo_name}"
self._algo = algo_name
self.env = envs
self.preprocess_obss = obs
self.reshape_reward = reshape_reward
self.logger = logging.getLogger(__name__)
self.model_path = os.path.join(opt.output_dir, model_name)
if not os.path.exists(self.model_path):
self.logger.info(f"Created model path: {self.model_path}")
os.mkdir(self.model_path)
# Copy command-line arguments to class
self.frames = opt.frames
self.frames_per_proc = opt.frames_per_proc
self.num_procs = opt.num_processes
self.discount = opt.gamma
self.lr = opt.lr
self.gae_lambda = opt.gae_lambda
self.recurrence = opt.recurrence
self.batch_size = opt.batch_size
assert opt.batch_size % opt.recurrence == 0
self.clip_eps = opt.clip_eps
self.entropy_coef = opt.entropy_coef
self.value_loss_coef = opt.value_loss_coef
self.max_grad_norm = opt.max_grad_norm
self.num_frames = self.frames_per_proc * self.num_procs
assert self.frames_per_proc % opt.recurrence == 0
# Arguments for disruptiveness
self.explore_for = opt.explore_for
self.disrupt_mode = opt.disrupt
self.disrupt_coef = opt.disrupt_coef
# Arguments for reasoning
self.reasoning = reasoning
if self.reasoning:
self.delay_reason = opt.delay_reason
self.sparse_reason = opt.sparse_diag
self.reason_coef = opt.reason_coef
self.reason_criterion = torch.nn.CrossEntropyLoss()
if "GoToObjThrees" in opt.env_name:
self.num_subtasks = 3
else:
self.num_subtasks = 2
if "GoTo" in opt.env_name:
replace_instruction = r"go to (the|a)"
elif "Pickup" in opt.env_name:
replace_instruction = r"pick up (the|a)"
if "Transfer" in opt.env_name:
transfer_type = int(opt.env_name.split("-")[1][-1])
else:
transfer_type = None
self.reason_labeler = ReasonLabeler(self.num_procs, self.num_subtasks, tt=transfer_type, replace_instr=replace_instruction)
# Initialize observations
self.obs, self.obs_info = self.env.reset()
self.obss = [None] * self.frames_per_proc
# Initialize log variables
self.init_log_vars()
# Initialize callbacks
self.callback = EpisodeLogger(
opt.print_every, opt.save_every, model_name, opt.tb, opt.explore_for, opt.reasoning)
self.callback.set_trainer(self)
# Set parameters for specific algorithms
if algo_name == 'ppo':
self.epochs = opt.ppo_epochs
self.model = model
self.model.train()
self.optimizer = torch.optim.Adam(self.model.parameters(
), self.lr, (opt.beta1, opt.beta2), eps=opt.optim_eps)
else:
raise ValueError("Not a valid implemented RL algorithm!")
# Initialize experience matrices
self.init_experience_matrices()
self.logger.info(
f"Setup {self._trainer}, with model_name: {model_name}")
def collect_experiences(self, intrinsic_reward=False):
"""
Collect actions, observations and rewards over multiple concurrent
environments.
Taken from babyAI repo
Args:
intrinsic_reward (bool): Whether to use intrinsic motivation, in
the form of the disruptiveness metric. If False, get reward
from the environment and compute advantage.
"""
for i in range(self.frames_per_proc):
# Do one agent-environment interaction
preprocessed_obs = self.preprocess_obss(self.obs, device=device)
with torch.no_grad():
model_results = self.model(
preprocessed_obs, self.memory * self.mask.unsqueeze(1))
dist = model_results['dist']
value = model_results['value']
memory = model_results['memory']
action = dist.sample()
if self.reasoning and self.callback.cycle > self.delay_reason:
# Save task status for every frame
task_status = self.reason_labeler.annotate_status(self.obs, self.obs_info)
self.task_status = task_status
obs, reward, done, env_info = self.env.step(action.cpu().numpy())
# Update experiences values
self.update_memory(i, action, value, obs, reward, done)
self.obs = obs
self.obs_info = env_info
self.memory = memory
self.mask = 1 - \
torch.tensor(done, device=device, dtype=torch.float)
# Update log values
self.update_log_values(i, dist, action, reward, done)
# Add advantage and return to experiences
if intrinsic_reward:
self.compute_disruptiveness()
else:
self.compute_advantage()
if self.reasoning:
# Fill in the correct reasons over the observed frames
self.reasons = self.reason_labeler.compute_reasons(self.status, self.obs)
# Flatten the data correctly, making sure that each episode's data is
# a continuous chunk.
exps = self.flatten_data()
# Log some values
log = self.log_output()
return exps, log
def update_model_parameters(self, exps, logs):
"""
Perform gradient update on the model using the gathered experience.
Taken from babyAI repo
"""
for e_i in range(self.epochs):
self.callback.on_epoch_begin(e_i)
ask_reason = numpy.random.randint(0,self.recurrence)
for inds in self._get_batches_starting_indexes():
batch_logs = self.callback.on_batch_begin(None)
batch_loss = 0
memory = exps.memory[inds]
for i in range(self.recurrence):
# Create a sub-batch of experience
sb = exps[inds + i]
# Compute loss
model_results = self.model(sb.obs, memory * sb.mask)
dist = model_results['dist']
value = model_results['value']
memory = model_results['memory']
disrupt_val = torch.tensor(1)
if self.disrupt_mode > 0:
if i < self.recurrence - 1:
s1 = sb.obs.image
s2 = exps[inds + i + 1].obs.image
disrupt_val = torch.sum(
s1 != s2, dtype=torch.float)
disrupt_val = torch.log(disrupt_val)
disrupt_val = torch.clamp(
disrupt_val, min=.01, max=10)
disrupt_val *= self.disrupt_coef
entropy = dist.entropy().mean()
ratio = torch.exp(dist.log_prob(
sb.action) - sb.log_prob)
surr1 = ratio * sb.advantage
surr2 = torch.clamp(
ratio, 1.0 - self.clip_eps, 1.0 + self.clip_eps) * sb.advantage
policy_loss = -torch.min(surr1, surr2).mean()
value_clipped = sb.value + \
torch.clamp(value - sb.value, -
self.clip_eps, self.clip_eps)
surr1 = (value - sb.returnn).pow(2)
surr2 = (value_clipped - sb.returnn).pow(2)
value_loss = torch.max(surr1, surr2).mean()
if self.disrupt_mode == 1:
loss = (policy_loss * disrupt_val) - self.entropy_coef * \
entropy + (self.value_loss_coef * value_loss)
elif self.disrupt_mode == 2:
loss = policy_loss - self.entropy_coef * \
entropy + (self.value_loss_coef *
(value_loss * disrupt_val))
elif self.reasoning and self.callback.cycle > self.delay_reason and ((not self.sparse_reason) or ask_reason == i):
a = sb.reasons.type(torch.long).to(device)
zero_mask = (a >= 0).type(torch.long)
val, idx = model_results['reason'].max(dim=1)
if torch.sum(zero_mask) > 0:
correct = torch.sum(a == idx).item()
summ = torch.sum(zero_mask).item()
acc = correct / summ
else:
acc = 0
self.log_reason_correct.append(acc)
rr = model_results['reason'].size()
zz = zero_mask.repeat(rr[-1], 1).transpose(0,1)
a1 = model_results['reason'].type(torch.float) * zz.type(torch.float)
a2 = a * zero_mask
reason_loss = self.reason_criterion(a1, a2)
loss = policy_loss - self.entropy_coef * \
entropy + (self.value_loss_coef * value_loss) + \
self.reason_coef * reason_loss
else:
loss = policy_loss - self.entropy_coef * \
entropy + (self.value_loss_coef * value_loss)
# Update loss
batch_loss += loss
# Update batch logging values
batch_logs['entropy'] += entropy.item()
batch_logs['value'] += value.mean().item()
batch_logs['policy_loss'] += policy_loss.item()
batch_logs['value_loss'] += value_loss.item()
batch_logs['disrupt'] += disrupt_val.item()
if self.reasoning and self.callback.cycle > self.delay_reason and ((not self.sparse_reason) or ask_reason == i):
batch_logs['reason_loss'] += reason_loss.item()
# Update memories for next epoch
if i < self.recurrence - 1:
exps.memory[inds + i + 1] = memory.detach()
# Update loss
batch_loss /= self.recurrence
# Update batch logging values
batch_logs['entropy'] /= self.recurrence
batch_logs['value'] /= self.recurrence
batch_logs['policy_loss'] /= self.recurrence
batch_logs['value_loss'] /= self.recurrence
batch_logs['disrupt'] /= self.recurrence
if self.reasoning:
batch_logs['reason'] /= self.recurrence
# Update actor-critic
self.optimizer.zero_grad()
batch_loss.backward()
grad_norm = sum(p.grad.data.norm(
2) ** 2 for p in self.model.parameters() if p.grad is not None) ** 0.5
torch.nn.utils.clip_grad_norm_(
self.model.parameters(), self.max_grad_norm)
self.optimizer.step()
# Update log values
batch_logs['grad_norm'] = grad_norm.item()
self.callback.on_batch_end(batch_loss.item(), batch_logs)
logs = self.callback.on_epoch_end(logs)
return logs
def train(self):
"""
Perform a series on training steps as configured.
"""
# Start training model
self.callback.on_train_begin()
num_frames = 0
while num_frames < self.frames:
self.callback.on_cycle_start()
# Create experiences and update the training status
exps, logs = self.collect_experiences(
intrinsic_reward=(num_frames < self.explore_for)
)
# Use experience to update policy
logs = self.update_model_parameters(exps, logs)
num_frames += logs['num_frames']
self.callback.on_cycle_end(logs)
self.callback.on_train_end()
def init_experience_matrices(self):
"""
Initialize matrices used in the storing of observations.
"""
shape = (self.frames_per_proc, self.num_procs)
if self._algo == 'ppo':
memsize = self.model.memory_size
self.memory = torch.zeros(shape[1], memsize, device=device)
self.memories = torch.zeros(*shape, memsize, device=device)
self.mask = torch.ones(shape[1], device=device)
self.masks = torch.zeros(*shape, device=device)
self.actions = torch.zeros(*shape, device=device, dtype=torch.int)
self.values = torch.zeros(*shape, device=device)
self.rewards = torch.zeros(*shape, device=device)
self.advantages = torch.zeros(*shape, device=device)
self.log_probs = torch.zeros(*shape, device=device)
if self.reasoning:
self.reason = torch.zeros(shape[1], device=device)
self.reasons = torch.zeros(*shape, device=device)
self.status = torch.zeros(*shape, self.num_subtasks, device=device)
def init_log_vars(self):
"""
Initialize the variables used for logging training progress.
"""
self.log_episode_return = torch.zeros(self.num_procs, device=device)
self.log_episode_reshaped_return = torch.zeros(
self.num_procs, device=device)
self.log_episode_num_frames = torch.zeros(
self.num_procs, device=device)
self.log_done_counter = 0
self.log_return = [0] * self.num_procs
self.log_reshaped_return = [0] * self.num_procs
self.log_num_frames = [0] * self.num_procs
if self.reasoning:
self.task_status = torch.as_tensor([[0] * self.num_subtasks] * self.num_procs, dtype=torch.float)
self.log_reason_correct = [0] * self.num_procs
def update_memory(self, i, action, value, obs, reward, done):
"""
Update the memory matrices based on agent-environment interaction.
"""
self.obss[i] = self.obs
self.memories[i] = self.memory
self.masks[i] = self.mask
self.actions[i] = action
self.values[i] = value
if self.reshape_reward is not None:
self.rewards[i] = torch.tensor([
self.reshape_reward(obs_, action_, reward_, done_)
for obs_, action_, reward_, done_ in zip(obs, action, reward, done)
], device=device)
else:
self.rewards[i] = torch.tensor(reward, device=device)
if self.reasoning:
self.status[i,:,:] = self.task_status
def update_log_values(self, i, dist, action, reward, done):
"""
Update the logging values used for keeping track of training progress.
"""
self.log_probs[i] = dist.log_prob(action)
self.log_episode_return += torch.tensor(
reward, device=device, dtype=torch.float)
self.log_episode_reshaped_return += self.rewards[i]
self.log_episode_num_frames += torch.ones(
self.num_procs, device=device)
for j, done_ in enumerate(done):
if done_:
self.log_done_counter += 1
self.log_return.append(self.log_episode_return[j].item())
self.log_reshaped_return.append(
self.log_episode_reshaped_return[j].item())
self.log_num_frames.append(self.log_episode_num_frames[j].item())
self.log_episode_return *= self.mask
self.log_episode_reshaped_return *= self.mask
self.log_episode_num_frames *= self.mask
def compute_advantage(self):
"""
Run the advantage estimation from [1].
A_t = delta_t + (gamma * lambda) delta_(t+1) ...
with
delta_t = reward_t + gamma V(s_(t+1)) - V(s_t)
[1]: Mnih et al. (2016) "Asynchronous methods for deep reinforcement learning"
"""
preprocessed_obs = self.preprocess_obss(self.obs, device=device)
with torch.no_grad():
next_value = self.model(
preprocessed_obs, self.memory * self.mask.unsqueeze(1))['value']
for i in reversed(range(self.frames_per_proc)):
next_mask = self.masks[i + 1] if i < self.frames_per_proc - 1 else self.mask
next_value = self.values[i + 1] if i < self.frames_per_proc - 1 else next_value
next_advantage = self.advantages[i + 1] if i < self.frames_per_proc - 1 else 0
delta = self.rewards[i] + self.discount * \
next_value * next_mask - self.values[i]
self.advantages[i] = delta + self.discount * \
self.gae_lambda * next_advantage * next_mask
def compute_disruptiveness(self):
"""
Compute an intrinsic reward based on the disruptiveness metric.
"""
preprocessed_obs = self.preprocess_obss(self.obs, device=device)
with torch.no_grad():
next_value = self.model(
preprocessed_obs, self.memory * self.mask.unsqueeze(1))['value']
for i in range(self.frames_per_proc):
s_t = self.obss[i]
s_t1 = self.obss[i +
1] if i < (self.frames_per_proc - 1) else self.obs
# Binary difference
state_t = torch.Tensor([s['image'] for s in s_t])
state_t1 = torch.Tensor([s['image'] for s in s_t1])
val = torch.nonzero(state_t - state_t1).size()[0]
# Normalize over max change
self.advantages[i] = val / (7 * 7 * self.num_procs)
def flatten_data(self):
"""
Flatten the memory such that it is a continuous chunk of data. This is
required by the PyTorch optimization step.
"""
exps = DictList()
exps.obs = [self.obss[i][j]
for j in range(self.num_procs)
for i in range(self.frames_per_proc)]
# In commments below T is self.frames_per_proc, P is self.num_procs,
# D is the dimensionality
# T x P x D -> P x T x D -> (P * T) x D
exps.memory = self.memories.transpose(
0, 1).reshape(-1, *self.memories.shape[2:])
# T x P -> P x T -> (P * T) x 1
exps.mask = self.masks.transpose(0, 1).reshape(-1).unsqueeze(1)
# for all tensors below, T x P -> P x T -> P * T
exps.action = self.actions.transpose(0, 1).reshape(-1)
exps.value = self.values.transpose(0, 1).reshape(-1)
exps.reward = self.rewards.transpose(0, 1).reshape(-1)
exps.advantage = self.advantages.transpose(0, 1).reshape(-1)
exps.returnn = exps.value + exps.advantage
exps.log_prob = self.log_probs.transpose(0, 1).reshape(-1)
if self.reasoning:
exps.reasons = self.reasons.transpose(0, 1).reshape(-1)
# Preprocess experiences
exps.obs = self.preprocess_obss(exps.obs, device=device)
return exps
def log_output(self):
"""
Create logging output based on the observed training progress.
"""
keep = max(self.log_done_counter, self.num_procs)
log = {
"return_per_episode": self.log_return[-keep:],
"reshaped_return_per_episode": self.log_reshaped_return[-keep:],
"num_frames_per_episode": self.log_num_frames[-keep:],
"num_frames": self.num_frames,
"episodes_done": self.log_done_counter,
}
self.log_done_counter = 0
self.log_return = self.log_return[-self.num_procs:]
self.log_reshaped_return = self.log_reshaped_return[-self.num_procs:]
self.log_num_frames = self.log_num_frames[-self.num_procs:]
if self.reasoning:
log['correct_reasons'] = self.log_reason_correct[-keep:]
self.log_reason_correct = self.log_reason_correct[-self.num_procs:]
return log
def _get_batches_starting_indexes(self):
"""
Gives, for each batch, the indexes of the observations given to
the model and the experiences used to compute the loss at first.
Returns
-------
batches_starting_indexes : list of list of int
the indexes of the experiences to be used at first for each batch
Taken from babyAI repo
"""
indexes = numpy.arange(0, self.num_frames, self.recurrence)
indexes = numpy.random.permutation(indexes)
num_indexes = self.batch_size // self.recurrence
batches_starting_indexes = [indexes[i:i + num_indexes]
for i in range(0, len(indexes), num_indexes)]
return batches_starting_indexes |
#!/usr/bin/env python
# coding: utf-8
# created by hevlhayt@foxmail.com
# Date: 2016/10/26
# Time: 19:09
import hashlib
import re
from LineMe.settings import DEPLOYED_LANGUAGE
def get_template_dir(appname):
lang = DEPLOYED_LANGUAGE
if lang == 'zh-cn':
return appname + '/zh_cn/'
else:
return appname + '/us_en/'
def logger_join(*args, **kwargs):
if not args:
return ''
else:
str_arg = ' '.join([str(arg) for arg in args])
if not kwargs:
return str_arg
else:
return str_arg + ' ' + \
' '.join([k.upper()+':'+str(v).replace('\n', '') for k, v in kwargs.items() if v is not None])
def md5(s):
if type(s) is str or unicode:
m = hashlib.md5()
m.update(s)
return m.hexdigest()
else:
return ''
def input_filter(arg):
if arg and (type(arg) is str or unicode):
return re.sub(ur"[^a-zA-Z0-9\u4e00-\u9fa5]", '', arg)
else:
return None
|
# Teste seu código aos poucos.
# Não teste tudo no final, pois fica mais difícil de identificar erros.
# Use as mensagens de erro para corrigir seu código.
preco= float(input("qual o valor da compra? "))
if (preco>=200):
msg= (preco-(preco*5/100))
else:
msg= (preco)
print(round(msg,2)) |
import re
from os import listdir
from os.path import isfile, join, isdir
from sklearn.feature_extraction.text import strip_accents_unicode
fmt_dirs = ['c', 'h', 'cpp', 'js', 'java', 'css', 'tsv', 'csv', 'py', 'f90']
#fmt_dirs = [f for f in listdir(training_path) if isdir(join(training_path, f))]
# Maybe map multiple formats like this instead?
fmts = {
'c++': ['cc', 'cxx', 'cpp', 'c++'],
'h++': [ 'hxx', 'hpp', 'h++'],
'csv': ['csv', 'tsv'] # i.e. treat all XSV as the same format (with internal variations the classifier should cope with)?
}
# See http://gcc.gnu.org/onlinedocs/gcc-4.3.6/gcc/Overall-Options.html
def obliviate(text):
# Drop non-ascii characters:
text = strip_accents_unicode(text)
# replace numbers with 0
text = re.sub(r"[0-9]+", "0", text)
# replace upper-case letters with A
text = re.sub(r"[A-Z]+", "A", text)
# replace lower-case letters with a
text = re.sub(r"[a-z]+", "a", text)
# Turn newlines into N
text = re.sub(r"\n", "N\n", text)
return text
def unrepeat(text):
# Drop all repeats:
if len(text) > 1:
t2 = text[0]
i = 1
while i < len(text):
if text[i] != text[i-1]:
t2 += text[i]
i += 1
text += '---\n' + t2
return text
def load_fmt_files( source_path ):
data = []
target = []
names = []
for i, j in enumerate(fmt_dirs):
files_path = join(source_path, j)
fmt_files = [f for f in listdir(files_path) if isfile(join(files_path, f))]
for filename in fmt_files:
fmt_file = join(files_path, filename)
with open(fmt_file, 'r') as fin:
content = fin.read()
if len(content) > 32:
content = obliviate(content)
data.append(content)
target.append(i)
names.append(fmt_file)
return data, target, names |
import asyncio
from aiohttp import ClientSession
import time
async def fetch(url, session):
async with session.post(url) as resp:
resp = await resp.read()
print(resp)
# asyncio.run(fetch("http://httpbin.org/headers"))
async def run(n):
url = "http://httpbin.org/headers"
tasks = []
async with ClientSession() as session:
for i in range(n):
task = asyncio.ensure_future(fetch(url, session))
tasks.append(task)
await asyncio.gather(*tasks)
# asyncio.run(run(4))
async def bound_fetch(sem, url, session):
async with sem:
await fetch(url, session)
#-------------------------------
# high performance client
#-------------------------------
async def run_fast(n):
url = 'http://127.0.0.1:8080/api/introspect'
tasks = []
sem = asyncio.Semaphore(1000)
async with ClientSession() as session:
for i in range(n):
task = asyncio.ensure_future(bound_fetch(sem, url, session))
tasks.append(task)
await asyncio.gather(*tasks)
start = time.time()
asyncio.run(run_fast(1000))
print(f'\nTook: {time.time() - start}')
|
import taichi as ti
import numpy as np
from mpm_solver import MPMSolver
write_to_disk = False
# Try to run on GPU
ti.init(arch=ti.cuda)
gui = ti.GUI("Taichi MLS-MPM-99", res=512, background_color=0x112F41)
mpm = MPMSolver(res=(64, 64, 64), size=10)
mpm.add_cube(lower_corner=[2, 4, 3], cube_size=[1, 1, 3], material=MPMSolver.material_snow)
mpm.add_cube(lower_corner=[2, 6, 3], cube_size=[1, 1, 3], material=MPMSolver.material_elastic)
mpm.add_cube(lower_corner=[2, 8, 3], cube_size=[1, 1, 3], material=MPMSolver.material_water)
mpm.set_gravity((0, -50, 0))
for frame in range(1500):
mpm.step(4e-3)
colors = np.array([0x068587, 0xED553B, 0xEEEEF0], dtype=np.uint32)
np_x, np_v, np_material = mpm.particle_info()
np_x = np_x / 10.0
# simple camera transform
screen_x = ((np_x[:, 0] + np_x[:, 2]) / 2 ** 0.5) - 0.2
screen_y = (np_x[:, 1])
screen_pos = np.stack([screen_x, screen_y], axis=-1)
gui.circles(screen_pos, radius=1.5, color=colors[np_material])
gui.show(f'{frame:06d}.png' if write_to_disk else None)
|
from django_grpc_framework import proto_serializers
from algorithms.models import Message
import resource_pb2
class MessageProtoSerializer(proto_serializers.ModelProtoSerializer):
class Meta:
model = Message
proto_clas = resource_pb2
fields = ['name'] |
import requests
from pyquery import PyQuery as pq
import json
import os
import re
import pdfkit
from PyPDF2 import PdfFileMerger
import time
from selenium import webdriver
from selenium.webdriver.firefox.options import Options
from selenium.webdriver.firefox.firefox_profile import FirefoxProfile
import config
from news import News
from utils import *
options = Options()
options.add_argument('-headless')
profile = FirefoxProfile()
# 激活手动代理配置(对应着在 profile(配置文件)中设置首选项)
profile.set_preference("network.proxy.type", 1)
# ip及其端口号配置为 http 协议代理
profile.set_preference("network.proxy.http", config.firefox_proxy)
profile.set_preference("network.proxy.http_port", config.firefox_port)
profile.set_preference("network.proxy.ssl", config.firefox_proxy)
profile.set_preference("network.proxy.ssl_port", config.firefox_port)
# 所有协议共用一种 ip 及端口,如果单独配置,不必设置该项,因为其默认为 False
# profile.set_preference("network.proxy.share_proxy_settings", True)
# 默认本地地址(localhost)不使用代理,如果有些域名在访问时不想使用代理可以使用类似下面的参数设置
# profile.set_preference("network.proxy.no_proxies_on", "localhost")
profile.update_preferences()
driver = webdriver.Firefox(executable_path=config.firefox_executable_path, firefox_profile=profile, firefox_options=options)
# driver.close()
class CnBBC(News):
def __init__(self):
date = time.strftime("%Y-%m-%d")
self.html_dir = config.save_folder+'/cn_bbc/'+date+'/html'
self.img_dir = config.save_folder+'/cn_bbc/'+date+'/html/img'
self.single_pdf_dir = config.save_folder+'/cn_bbc/'+date+'/single_pdf_dir'
self.muti_pdf_dir = config.save_folder+'/muti_pdf_dir'
make_dirs(self.html_dir)
make_dirs(self.img_dir)
make_dirs(self.single_pdf_dir)
make_dirs(self.muti_pdf_dir)
@classmethod
def _parse_page(cls, page):
e = pq(page)
data = {}
data['title'] = e('title').text()
data['content'] = e('.story-body').remove('.with-extracted-share-icons').html()
data['imgs'] = [e(i).attr('src') for i in e('.story-body img')]
# print(data['imgs'])
return data
@classmethod
def _single_page(cls, url):
print('getting url ...', url)
page = get_html(url)
data = cls._parse_page(page)
if data['content'] is None:
return False
title = data['title']
content = replace_img_url(data['content'])
html_path = os.path.join(cls().html_dir, title+'.html')
# print('html_path', html_path)
save_html(html_path, content)
down_imgs(data['imgs'], cls().img_dir)
pdf_path = os.path.join(cls().single_pdf_dir, title+'.pdf')
save_pdf(html_path, pdf_path)
@classmethod
def _parse_list(cls, page):
e = pq(page)
# print('')
news_list = [config.cn_bbc_head_url + e(i).attr('href') for i in e('.most-popular__list-container a')]
return news_list
@classmethod
def _get_most_popular(cls, url):
driver.get(url)
time.sleep(5)
page = driver.page_source
driver.close()
# print(page)
# save_html('bbc.html',page)
news_list = cls._parse_list(page)
return news_list
def main(self):
main_url = config.cn_bbc_main_url
most_popular_news = self._get_most_popular(main_url)
print('most_popular_news', most_popular_news)
for url in most_popular_news:
# print('item', item)
self._single_page(url)
pdfs = get_all_file(self.single_pdf_dir)
filename = 'cn_bbc_{}.{}'.format(time.strftime("%Y-%m-%d"),'pdf')
out_path = os.path.join(self.muti_pdf_dir, filename)
merger_pdf(pdfs, out_path)
# self._single_page('https://www.bbc.com/zhongwen/simp/world-46533135') |
import pandas as pd
import numpy as np
import string
import re
import os
import nltk
from nltk.corpus import stopwords
from string import punctuation
from gensim.models import Word2Vec
import spacy
root_folder = os.path.realpath('.')
EMBEDDING_DIM = 300
TEXT_WINDOW=100
MIN_WORD_COUNT=50
def word2vec_training():
data_fake = pd.read_csv('datasets/1/Fake.csv', encoding='utf-8')
data_true = pd.read_csv('datasets/1/True.csv', encoding='utf-8')
data_fake['target'] = 0
data_true['target'] = 1
data_fake = data_fake.replace(to_replace='None', value=np.nan).dropna()
data_true = data_true.replace(to_replace='None', value=np.nan).dropna()
dataset1 = pd.concat([data_true, data_fake]).reset_index(drop=True)
dataset2 = pd.read_csv('datasets/2/fake_or_real_news.csv', encoding='utf-8', sep=",")
dataset2['target']=1
dataset2.loc[dataset2['label'] == 'FAKE', ['target']] = 0
datasets =[dataset1, dataset2]
ds_count = 0
for dataset in datasets:
print("="*50)
print("Dataset numero "+str(ds_count))
news = dataset
print("** Initial ** Dataframe Size: ")
print(np.shape(news))
print("="*50)
news = news.replace(to_replace='None', value=np.nan).dropna()
news = news.replace(to_replace='', value=np.nan).dropna()
news['text'] = pd.DataFrame(news.text.apply(lambda x: clean_text(x)))
news = news.replace(to_replace='None', value=np.nan).dropna()
news = news[news['text'].map(lambda x: len(x.strip())>50)]
news['text'] = news.apply(lambda x: lemmatizer(x['text']), axis=1)
news['text'] = news['text'].str.replace('-PRON-', '')
print("**After Preprocessing** Dataframe Size: ")
print(np.shape(news))
print("="*50)
print("Class Population: ")
count_fake = news[news["target"]==0].count()["text"]
count_true = news[news["target"]==1].count()["text"]
print("Real (True) News Samples: "+str(count_true)+" / "+str(len(news)))
print("Fake News Samples: "+str(count_fake)+" / "+str(len(news)))
documents = [row.split() for row in news['text']]
print("Word2Vec Building Vocabulary...")
if(ds_count == 0):
w2v_model = Word2Vec(documents,
min_count=MIN_WORD_COUNT,
window=TEXT_WINDOW,
size=EMBEDDING_DIM,
workers=6)
else:
w2v_model.build_vocab(documents, update = True)
print("Word2Vec Training Embedding Model...")
w2v_model.train(documents, total_examples=len(documents), epochs=10)
print("Word2Vec Dictionary Current Length: "+str(len(w2v_model.wv.vocab)))
ds_count += 1
print("Word2Vec Word Embedding Training Ended. ")
w2v_model.init_sims(replace=True)
w2v_model.save("saved_embeddings/w2v_fake_news.model")
def load_saved_embedding():
w2v_model = Word2Vec.load(root_folder+
"/embeddings/saved/w2v_fake_news.model")
return w2v_model
|
import xgboost as xgb
from xgboost.sklearn import XGBClassifier
import matplotlib.pylab as plt
from sklearn import cross_validation, metrics #Additional scklearn functions
from sklearn.grid_search import GridSearchCV #Perforing grid search
import pandas as pd
import numpy as np
import matplotlib.pylab as plt
from matplotlib.pylab import rcParams
import operator
rcParams['figure.figsize'] = 12, 4
'''
Utility Library for XG Boost
-----------------------------
Source =>
https://www.analyticsvidhya.com/blog/2016/03/complete-guide-parameter-tuning-xgboost-with-codes-python/
'''
def modelfit(alg, X, Y,useTrainCV=True, cv_folds=5, early_stopping_rounds=50, feature_index=None):
if useTrainCV:
xgb_param = alg.get_xgb_params()
xgtrain = xgb.DMatrix(X, label=Y)
cvresult = xgb.cv(xgb_param, xgtrain, num_boost_round=alg.get_params()['n_estimators'], nfold=cv_folds,
metrics='auc', early_stopping_rounds=early_stopping_rounds, verbose_eval=True)
alg.set_params(n_estimators=cvresult.shape[0])
#Fit the algorithm on the data
alg.fit(X, Y, eval_metric='auc')
#Predict training set:
dtrain_predictions = alg.predict(X)
dtrain_predprob = alg.predict_proba(X)[:,1]
#Print model report:
print "\nModel Report"
print "Accuracy : %.4g" % metrics.accuracy_score(Y, dtrain_predictions)
print "AUC Score (Train): %f" % metrics.roc_auc_score(Y, dtrain_predprob)
importance = alg.booster().get_fscore()
importance = sorted(importance.items(), key=operator.itemgetter(1),reverse=True)
for s in importance:
if(feature_index is not None):
_id = int(s[0][1:])
print('%s:%d',feature_index[_id],s[1])
feat_imp = pd.Series(alg.booster().get_fscore()).sort_values(ascending=False)
feat_imp.plot(kind='bar', title='Feature Importances')
plt.ylabel('Feature Importance Score')
plt.show()
|
from contextlib import contextmanager
try:
from contextlib import nullcontext # Python 3.7+
except ImportError:
@contextmanager
def nullcontext(enter_result=None):
yield enter_result
|
# Generated by Django 3.2.6 on 2021-09-13 14:44
from django.db import migrations
class Migration(migrations.Migration):
dependencies = [
('feed', '0006_alter_post_author'),
]
operations = [
migrations.AlterModelOptions(
name='post',
options={'ordering': ('-datetime',), 'verbose_name': 'Post', 'verbose_name_plural': 'Posts'},
),
]
|
import random
def findHighest3(l):
m1=0
for x in l:
if x>m1:
m1=x
m2=0
for x in l:
if x>m2 and x!=m1:
m2=x
m3=0
for x in l:
if x>m3 and x!=m1 and x!=m2:
m3=x
return l.index(m1),l.index(m2),l.index(m3)
def main(a):
board=[]
for x in range(40):
board.append(0)
pos=0
count=0
dub=0
for x in range(a):
r1=int(random.random()*4)+1
r2=int(random.random()*4)+1
#print r1+r2
pos+=r1+r2
if pos>39:
pos-=40
if r1==r2:
dub+=1
else:
dub=0
if pos==2 or pos==17 or pos==33:
r=int(random.random()*16)
if r==0:
pos=0
elif r==1:
pos=10
elif pos==7 or pos==2 or pos==36:
r=int(random.random()*16)
if r==0:
pos=0
if r==1:
pos=10
if r==2:
pos=11
if r==3:
pos=24
if r==4:
pos=39
if r==5:
pos=5
if r==7 or r==8:
if pos==7:
pos=15
if pos==22:
pos=25
if pos==36:
pos=5
if r==9:
if pos==7 or pos==12:
pos=12
if pos==22:
pos=28
if r==10:
pos-=3
if pos==30 or dub==3:
pos=10
dub=0
board[pos]+=1
print board
print findHighest3(board)
main(10**4)
|
from django.contrib import admin
from .models import Post,Contact
# Register your models here.
class BlogAdmin(admin.ModelAdmin):
list_display=('title',)
list_filter=("title","disc",)
admin.site.register(Post)
admin.site.register(Contact) |
import inspect
import json
import logging
import os
import random
from importlib import import_module
import re
import sys
class BadArgumentError(Exception):
pass
class StateTransitionBase(object):
def __init__(self, name, func):
self.name = name
self.function = func
class StateCheck(StateTransitionBase):
def __init__(self, name, func):
if not name:
raise BadArgumentError("All StateChecks must have a name.")
if not isinstance(name, str):
raise BadArgumentError("A StateCheck\'s name must be a string.")
if not function:
raise BadArgumentError('All StateChecks must have a check function.')
if not callable(func):
raise BadArgumentError('A StateCheck\'s check function must be a Perl subroutine.')
StateTransitionBase.__init__(self, name, func)
def check(self, c):
c.clear_http_session()
try:
self.function(c)
except Exception as e:
pass
class WebAppTest(object):
def __init__(self, app_to_test=None, accessories_to_test: list=[], name=None, fixed_test_context=None, description=""
,customization_point_values=[], checks=None, package_dbs_to_add: dict = {}):
app_package_name = app_to_test
accessory_package_names = accessories_to_test
if not app_package_name:
raise BadArgumentError('AppTest must identify the application package being tested. Use parameter named '
'"app_to_test".')
if accessory_package_names and not isinstance(accessory_package_names, list):
raise BadArgumentError('If accessoriesToTest is given, it must be an array of package names')
if not isinstance(name, str):
raise BadArgumentError('AppTest name name must be a string.')
if fixed_test_context:
if not isinstance(fixed_test_context, str):
raise BadArgumentError('AppTest testContext name must be a string.')
if fixed_test_context is not "" and not re.match("^/[-_.a-z0-9%]+$", fixed_test_context):
raise BadArgumentError('AppTest fixedTestContext must be a single-level relative path starting with a '
'slash, or be empty')
if not isinstance(description, str):
raise BadArgumentError("AppTest description name must be a string.")
if package_dbs_to_add and not isinstance(package_dbs_to_add, dict):
raise BadArgumentError("AppTest packageDbsToAdd must be a hash, mapping section name to URL")
if customization_point_values:
if not isinstance(customization_point_values, dict):
raise BadArgumentError("CustomizationPointValues must be a hash")
for package in customization_point_values:
values_for_package = customization_point_values[package]
if not isinstance(values_for_package, dict):
raise BadArgumentError("Entries in CustomizationPointValues reference a hash of name-value paris")
if checks is None:
raise BadArgumentError('AppTest must provide at least a StateCheck for the virgin state')
i = 0
for candidate in checks:
if i%2:
if not isinstance(candidate, StateTransitionBase):
raise BadArgumentError('Array of StateChecks and StateTransitions must alternate: expected '
'StateTransition')
else:
if not isinstance(candidate, StateCheck):
raise BadArgumentError('Array of StateChecks and StateTransitions must alternate: expected '
'StateCheck')
i += i+1
if not len(checks) % 2:
raise BadArgumentError('Array of StateChecks and StateTransitions must alternate and end with a '
'StateCheck.')
self.name = name
self.description = description
self.app_package_name = app_package_name
self.accessory_package_names = accessory_package_names
self.package_dbs_to_add = package_dbs_to_add
self.fixed_test_context = fixed_test_context
self.customization_point_values = customization_point_values
self.states_transitions = checks
def get_virgin_state_test(self):
return self.states_transitions[0]
def get_transition_from(self, to_state):
counter = 0
for state in self.states_transitions:
if state == to_state:
return self.states_transitions[counter + 1], self.states_transitions[counter + 2]
i = i+1
return None
class AbstractScaffold(object):
def __init__(self):
self.verbose = 0
self._is_ok = False
def setup(self, options):
if options and "verbose" in options:
self.verbose = options["verbose"]
else:
self.verbose = 0
def is_ok(self):
return self._is_ok
def name(self):
return __package__ + self.__class__
def deploy(self, site):
json_string = json.dump(site)
logging.info("Site JSON: "+ json_string)
cmd = "sudo ubos-admin deploy --stdin "
cmd += "--verbose" if self.verbose is 1 else ""
return not self.invoke_on_target(cmd, json_string)
def undeploy(self, site):
json_string = json.dump(site)
logging.info("Site JSON: " + json_string)
cmd = "sudo ubos-admin undeploy "
cmd += "--verbose " if self.verbose is 1 else ""
cmd += "--siteid " + site["siteid"]
return not self.invoke_on_target(cmd)
def update(self):
cmd = "sudo ubos-admin update "
cmd += "--verbose" if self.verbose is 1 else ""
return not self.invoke_on_target(cmd)
def switch_channel_update(self, new_channel, verbose, cmd):
if not cmd:
cmd = "sudo ubos-admin update "
cmd += "--verbose" if self.verbose is 1 else ""
script = "echo "+ new_channel+ " > /etc/ubos/channel"
script += cmd
out_p = None
ext = self.invoke_on_target("sudo /bin/bash ", script, out_p, out_p)
if ext:
if verbose:
logging.error("Channel switch failed:" + "script:\n"+ script+"\n")
else:
logging.error("Channel switch failed:" + "script:\n" + script + "\n" + out_p +"\n")
return not ext
def backup(self, site):
cmd = 'F=$(mktemp webapptest-XXXXX.ubos-backup);'
cmd += ' sudo ubos-admin backup'
cmd += "--verbose" if self.verbose is 1 else ""
cmd += ' --siteid ' + site-["siteid"] + ' --force --out $F';
cmd += ' echo $F'
file = ""
ext = self.invoke_on_target(cmd, None, file)
if not ext:
file = file.strip()
return file
else:
logging.error("Backup failed")
return 0
def backup_to_local(self, site, file_name):
raise NotImplementedError("Mut be overriden by child")
def restore(self, site, identifier):
cmd = 'sudo ubos-admin restore'
cmd += "--verbose" if self.verbose is 1 else ""
cmd += ' --siteid ' + site["siteid"]+' --in ' + identifier
return not self.invoke_on_target(cmd)
def restore_from_local(self, site, file_name):
raise NotImplementedError("Mut be overriden by child")
def destroy_backup(self, site, identifier):
return not self.invoke_on_target('rm '+identifier)
def teardown(self):
return 0
def invoke_on_target(self, cmd, stdin, stdout, stderr):
raise NotImplementedError("Mut be overriden by child")
def get_target_ip(self):
raise NotImplementedError("Mut be overriden by child")
def get_file_info(self, file_name, make_content_available):
raise NotImplementedError("Mut be overriden by child")
def handle_impersonate_depot(self, ip):
cmd = """use strict;
use warnings;
use Ubos::Utils;
my $ip="""+ip if ip is not None else "undef"+""";
unless( -r '/etc/hosts' ) {
print STDERR "Cannot read /etc/hosts\\n";
exit 1;
}
my $etchosts = UBOS::Utils::slurpFile( '/etc/hosts' );
if( $etchosts ) {
if( defined( $ip )) {
unless( $etchosts =~ m!depot\.ubos\.net! ) {
$etchosts .= <<ADD;
# webapptest added
$ip depot.ubos.net
ADD
UBOS::Utils::saveFile( '/etc/hosts', $etchosts, 0644, 'root', 'root' );
}
} else {
my $changed = 0;
if( $etchosts =~ s!# webapptest added\s*!! ) {
$changed = 1;
}
my $ipEsc = quotemeta( $ip );
if( $etchosts =~ s!$ipEsc\s+depot\.ubos\.net!! ) {
$changed = 1;
}
if( $changed ) {
UBOS::Utils::saveFile( '/etc/hosts', $etchosts, 0644, 'root', 'root' );
}
}
exit 0;
} else {
print STDERR "/etc/hosts is empty. Not changing\n";
exit 1;
}
1;"""
out = object()
err = object()
if self.invoke_on_target('sudo /bin/bash -c /usr/bin/perl', cmd, out, err):
logging.error("Failed to edit /etc/hosts file to add depot.ubos.net:\nout:{out}\nerr:{err}\ncmd:{cmd}"
.format(out=out, err=err, cmd=cmd))
return 0
#elif re.compile("/Respect the privacy of others/").match(err):
# logging.error("Failed to edit /etc/hosts file to add depot.ubos.net. sudo problem:",
# out, err)
# return 0
return 1
def install_additional_package_dbs(self, repos: dict):
if len(repos) is 0:
return 1
logging.info("Installing additional package dbs:", repos.keys())
cmd = """
use strict
use warnings;
use UBOS::Utils;
"""
for name in repos.keys():
url = repos[name]
cmd += """
UBOS::Utils::saveFile( '/etc/pacman.d/repositories.d/$name', <<'DATA' );
[{name}]
Server = {url}
DATA
""".format(name=name, url=url)
cmd += """
UBOS::Utils::myexec( "ubos-admin update" );
1;
"""
out = object()
err = object()
if self.invoke_on_target('sudo /bin/bash -c /usr/bin/perl', cmd, out, err):
logging.error("Failed to add repositories:\nout:{out}\nerr:{err}\ncmd:{cmd}"
.format(out=out, err=err, cmd=cmd))
return 0
return 1
class AbstractTestPlan(object):
def __init__(self, test, options):
if not test:
raise Exception('Must provide test')
self.test = test
def run(self, scaffold=False, interactive=False, verbose=False):
raise NotImplementedError("Must override ubos.webapptest.AbstractTestPlan::run")
def get_test(self):
return self.test
class AbstractSingleSiteTestPlan(AbstractTestPlan):
def __init__(self, test, options, tls_data):
AbstractTestPlan.__init__(test, options)
hostname = None
if "siteJson" in options:
if "appConfigJson" not in options:
raise Exception("If specifying siteJson, you also need to specify appConfigJson")
if "hostname" not in options:
raise Exception("If specifying siteJson, you also need to specify hostname")
if "context" not in options:
raise Exception("If specifying siteJson, you also need to specify context")
self.site_json = options['siteJson']
self.app_config_json = options['siteConfigJson']
elif "appConfigJson" in options:
raise Exception("If specifying appConfigJson, you also need to specify siteJson")
else:
if "hostname" in options:
if options["hostname"] != "*" and not re.compile("m!^[-.a-z0-9_]+$!").match(options["hostname"]):
raise Exception("Test plan hostname parameter must be a valid hostname, or *")
hostname = options["hostname"]
context = test.get_fixed_test_context()
if context:
if "context" in options:
logging.warning("Context " + options["context"] +
" provided as argument to test plan ignored: WebAppTest requires fixed test context"
)
elif "context" in options:
context = options["context"]
else:
context = "/ctxt-" + random_hex(8)
if context != '' and not re.compile("m!^/[-_.a-z0-9%]+$!").match(context):
raise Exception("'Context parameter must be a single-level relative path starting with a slash, "
"or be empty'")
self.app_config_json = {
'context': context,
'appconfigid': "a" + random_hex(40),
'appid': test.app_package_name()
}
if test.accessoryPackageNames():
self.app_config_json['accessoryids'] = [test.accessoryPackageNames()]
cust_point_values = test.get_customization_point_values()
if cust_point_values:
packages = test.accessory_package_names().copy().append(test.app_package_name())
for package in packages:
if package in cust_point_values:
json_hash = dict()
for name in cust_point_values[package].keys():
value = cust_point_values[package][name]
json_hash[name] = {"value": value}
self.app_config_json["customizationpoints"] = {package: json_hash}
admin = {
"userid": "testuser",
"username": "Test User",
"credential": 's3cr3t',
"email": 'testing.ignore.ubos.net'
}
self.site_json = {
'siteid': 's' + random_hex(40),
'hostname': hostname,
'admin': admin,
'appconfigs': [self.app_config_json]
}
if tls_data is not None:
self.site_json["tls"] = tls_data
def run(self, scaffold=False, interactive=False, verbose=False):
raise NotImplementedError("Must override ubos.webapptest.AbstractSingleSiteTestPlan::run")
def protocol(self):
if "tls" in self.site_json:
return "https"
else:
return "http"
def hostname(self):
return self.site_json["hostname"]
def context(self):
return self.app_config_json["context"]
def site_id(self):
self.app_config_json["appconfigid"]
def get_site_json(self):
return self.site_json
def set_site_json(self, json):
self.site_json = json
def get_app_config_json(self):
return self.app_config_json
def set_app_config_json(self, json):
self.app_config_json = json
def get_admin_data(self):
return self.site_json["admin"]
class TestContext(object):
max_wait_till_ready = 60
def __init__(self, scaffold, test_plan, verbose):
self.scaffold = scaffold
self.test_plan = test_plan
self.verbose = verbose
# TestUtils
def random_hex(length):
('%030x' % random.randrange(16 ** 30))[:length]
def find_app_tests_in_directory(directory):
if not os.path.isdir(directory):
raise Exception('Not a directory')
app_test_candidates = os.listdir(directory)
sys.path.append(directory)
pysearchre = re.compile('.py$', re.IGNORECASE)
pluginfiles = filter(pysearchre.search,
app_test_candidates)
plugins = map(lambda fp: os.path.splitext(fp)[0], pluginfiles)
modules = dict()
for plugin in plugins:
if not plugin.startswith("__"):
import_module(plugin)
for member, cls in inspect.getmembers(sys.modules[plugin], inspect.isclass):
for base in cls.__bases__:
if base is AbstractTestPlan:
modules[member] = cls
print(modules)
return modules
def class_info_to_dict(info):
result = dict()
for class_name, cls in info:
result[class_name] = cls
return result
def find_commands():
import ubos.commands
return class_info_to_dict(inspect.getmembers(sys.modules["ubos.commands"], inspect.isclass))
def find_testplans():
import ubos.testplans
return class_info_to_dict(inspect.getmembers(sys.modules["ubos.testplans"], inspect.isclass))
def find_scaffolds():
import ubos.scaffolds
return class_info_to_dict(inspect.getmembers(sys.modules["ubos.scaffolds"], inspect.isclass))
def find_testplan(name):
plans = find_testplans()
if name in plans.keys():
return plans[name]
else:
return None
def find_scaffold(name: str):
plans = find_scaffolds()
if name in plans.keys():
return plans[name]
else:
return None
def find_app_test_in_directory(directory, name):
apps = find_app_tests_in_directory(directory)
if name in apps.keys():
return apps.get(name)
else:
return None
def ask_user(question=None, interactive=True, success_of_last_step=True, success_of_plan_so_far=True):
repeat = 0
abort = 0
qt = int(not success_of_last_step)
if interactive:
full_question = ""
if question:
full_question += question + " (" + "success" if success_of_last_step else "failure" + ")."
else:
full_question += "Last step " + "success" if success_of_last_step else "failure" + "."
full_question += " C(ontinue)/R(epeat)/A(bort)/Q(uit)? "
user_input = input(full_question)
if user_input.lower() is "c":
repeat = 0
abort = 0
qt = 0
if re.compile("^\s*r\s*$").match(user_input):
repeat = 1
abort = 0
qt = 0
if re.compile("^\s*a\s*$").match(user_input):
repeat = 0
abort = 1
qt = 0
if re.compile("^\s*q\s*$").match(user_input):
repeat = 0
abort = 0
qt = 1
return repeat, abort, qt
|
from django.shortcuts import render, redirect
from .models import League, Team, Player
from django.db.models import Q, Count
from . import team_maker
def index(request):
context = {
"baseball_leagues": League.objects.filter(sport="Baseball"),
"womens_leagues": League.objects.filter(name__contains="Women"),
"sport_hockey": League.objects.filter(sport__contains="Hockey"),
# "sport_not_soccer": League.objects.exclude(sport__icontains="Football", sport__icontains="Soccer"),
"sport_not_soccer": League.objects.exclude(sport="Football") & League.objects.exclude(sport="Soccer"),
"Conference_leagues": League.objects.filter(name__icontains="conference"),
"atlantic_leagues": League.objects.filter(name__contains="atlantic"),
"dallas_teams": Team.objects.filter(location="Dallas"),
"raptor_teams": Team.objects.filter(team_name="Raptors"),
"city_teams": Team.objects.filter(location__contains="City"),
"t_teams": Team.objects.filter(team_name__startswith="T"),
"az_teams": Team.objects.all().order_by("location"),
"za_teams": Team.objects.all().order_by("-team_name"),
"cooper_players": Player.objects.filter(last_name="Cooper"),
"joshua_players": Player.objects.filter(first_name="Joshua"),
"cooper_not_josh": Player.objects.filter(last_name="Cooper").exclude(first_name="Joshua"),
"alex_or_wyatt": Player.objects.filter(Q(first_name="Alexander") | Q(first_name="Wyatt")),
# PARTE2#
"ateams": Team.objects.filter(league__name='Atlantic Soccer Conference'),
"Bplayers": Player.objects.filter(curr_team__team_name='Penguins'),
"Iplayers": Player.objects.filter(curr_team__league_id=2),
"Lplayers": Player.objects.filter(curr_team__league_id=7) & Player.objects.filter(last_name="Lopez"),
"fplayers": Player.objects.filter(curr_team__league_id=7) | Player.objects.filter(curr_team__league_id=9),
'fteams': Team.objects.filter(league__sport="Football"),
'steams': Team.objects.filter(all_players__first_name="Sophia"),
'sleague': League.objects.filter(teams__id=25) | League.objects.filter(teams__id=4) | League.objects.filter(teams__id=32),
"notfplayers": Player.objects.filter(last_name='Flores') & Player.objects.filter(~Q(curr_team_id=10)),
'seteams': Team.objects.filter(all_players__id=115),
"maniplayers": Player.objects.filter(all_teams__id=4),
"vikiplayers": Player.objects.filter(all_teams__id=40),
'jacteams': Team.objects.filter(all_players__id=151)[:3],
"atplayers": Player.objects.filter(first_name="Joshua") & Player.objects.filter(all_teams__league_id=3),
"playerNums": Team.objects.annotate(nplayer=Count('all_players')).filter(nplayer__gt=12),
"allplayerteam": Team.objects.annotate(nplayer=Count('all_players')).order_by('nplayer')
}
return render(request, "leagues/index.html", context)
def make_data(request):
team_maker.gen_leagues(10)
team_maker.gen_teams(50)
team_maker.gen_players(200)
return redirect("index")
|
from datetime import datetime, timedelta
import unittest
from app import app, db
from app.models import User, Party
class UserModelCase(unittest.TestCase):
def setUp(self):
app.config['SQL_ALCHEMY_DATABASE_URI'] = 'sqlite://'
db.create_all()
def tearDown(self):
db.session.remove()
db.drop_all()
def test_password_hashing(self):
u = User({'username':'jawnboy', 'email':'jawnboy@yahoo.com','password':'iamjawnboy'})
#u.set_password('iamjawnboy')
self.assertFalse(u.check_password('dog'))
self.assertTrue(u.check_password('iamjawnboy'))
def test_avatar(self):
u=User({'username':'john', 'email':'john@example.com','password':'iamjohn'})
#print(u.avatar(128))
#print('https://www.gravatar.com/avatar/d4c74594d841139328695756648b6bd6?d=identicon&s=128')
self.assertEqual(u.avatar(128), ('https://www.gravatar.com/avatar/d4c74594d841139328695756648b6bd6?d=identicon&s=128'))
def test_follow(self):
u1 = User({'username':'jawnboy', 'email':'jawnboy@yahoo.com','password':'iamjawnboy'})
u2 = User({'username':'nicky6', 'email':'nicky6@yahoo.com','password':'iamnicky6'})
db.session.add(u1)
db.session.add(u2)
self.assertEqual(u1.followed.all(), [])
self.assertEqual(u1.followers.all(), [])
u1.follow(u2)
db.session.commit()
self.assertTrue(u1.is_following(u2))
self.assertEqual(u1.followed.count(),1)
self.assertEqual(u1.followed.first().username, 'nicky6')
self.assertEqual(u2.followers.count(),1)
self.assertEqual(u2.followers.first().username, 'jawnboy')
u1.unfollow(u2)
db.session.commit()
self.assertFalse(u1.is_following(u2))
self.assertEqual(u1.followed.count(), 0)
self.assertEqual(u2.followers.count(), 0)
def test_follow_posts(self):
# create four users
u1 = User({'username':'jawnboy', 'email':'jawnboy@yahoo.com','password':'iamjawnboy'})
u2 = User({'username':'nicky6', 'email':'nicky6@yahoo.com','password':'iamnicky6'})
u3 = User({'username':'john', 'email':'john@yahoo.com','password':'iamjohn'})
u4 = User({'username':'susan', 'email':'susan@yahoo.com','password':'iamsusan'})
db.session.add_all([u1, u2, u3, u4])
#create four parties
p1 = Party({'title':'jawnboy with it','owner_id':1})
p2 = Party({'title':'nicky6 with it','owner_id':2})
p3 = Party({'title':'john with it','owner_id':3})
p4 = Party({'title':'susan with it', 'owner_id':4})
db.session.add_all([p1,p2,p3,p4])
db.session.commit()
#setup the followers
u1.follow(u2) #jawnboy follows nicky6
u1.follow(u4) #jawnboy follows susan
u2.follow(u3) #nicky6 follows john
u3.follow(u4) #mary follows david
db.session.commit()
#check the followed parties of each user
f1 = u1.followed_parties().all()
f2 = u2.followed_parties().all()
f3 = u3.followed_parties().all()
f4 = u4.followed_parties().all()
#make sure they follow the write parties
self.assertEqual(f1, [p4,p2,p1])
self.assertEqual(f2, [p3,p2])
self.assertEqual(f3, [p4,p3])
self.assertEqual(f4, [p4])
if __name__ == '__main__':
unittest.main(verbosity=2)
|
# Generated by Django 2.2.5 on 2020-03-05 14:02
from django.conf import settings
import django.contrib.postgres.fields.jsonb
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
dependencies = [
migrations.swappable_dependency(settings.AUTH_USER_MODEL),
('notifications', '0001_initial'),
]
operations = [
migrations.CreateModel(
name='NotificationsDetail',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('notification_type', models.CharField(choices=[('1', 'POST_INVOLVEMENT'), ('2', 'POST_COMMENT'), ('3', 'POST_LIKE'), ('4', 'POST_NEW'), ('5', 'QUESTION_NEW'), ('6', 'UPDATE_FOLLOWED_USER'), ('7', 'Chat')], max_length=1)),
('notification_context', django.contrib.postgres.fields.jsonb.JSONField()),
('notification_sender_model_name', models.CharField(max_length=50)),
('notification_by', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='notification_by_user', to=settings.AUTH_USER_MODEL)),
('notification_for', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='notification_for_users', to=settings.AUTH_USER_MODEL)),
],
)
]
|
from sortImpl.merge_sort import MergeSortStategy
from doSort import DoSort
def main():
merge_sort_strategy = MergeSortStategy()
do_sort = DoSort(merge_sort_strategy)
do_sort.do_sort()
if __name__ == '__main__':
main() |
#!/usr/local/bin/python2.7
# encoding: utf-8
import cv2
faceDetector=cv2.CascadeClassifier("../../data/opencv/haarcascades/haarcascade_frontalface_default.xml")
eyeDetector=cv2.CascadeClassifier("../../data/opencv/haarcascades/haarcascade_eye.xml")
cap=cv2.VideoCapture(0)
while True:
ret,img=cap.read()
gray=cv2.cvtColor(img,cv2.COLOR_BGR2GRAY)
face=faceDetector.detectMultiScale(gray,1.3,5)
for (x,y,w,h) in face:
cv2.rectangle(img,(x,y),(x+w,y+h),(255,0,0),2)
roiGray=gray[y:y+h,x:x+h]
roiColor=img[y:y+h,x:x+h]
eye=eyeDetector.detectMultiScale(roiGray)
for (ex,ey,ew,eh) in eye:
cv2.rectangle(roiColor,(ex,ey),(ex+ew,ey+eh),(0,255,0),2)
cv2.imshow('img',img)
k=cv2.waitKey(30)&0xff
if k==27:
break
cap.release()
cv2.destroyAllWindows() |
# Session 13A post-class test problem 2
# http://people.stern.nyu.edu/adamodar/pdfiles/eqnotes/postclass/session13btest.pdf
def market_value(
shares):
total = 0
for i, s in iter(shares):
total += s[s][0] * s[][1]
return vps
if __name__ == "__main__":
import argparse
parser = argparse.ArgumentParser(
description="Estimate the value per share using the option value approach.")
parser.add_argument("-e", "--equity", help="Equity value",
type=float, default=500)
parser.add_argument("-so", "--shares_outstanding",
help="Number of shares outstanding",
type=int, default=100)
# parser.add_argument("-sp", "--share_price",
# help="Share price",
# type=float, default=5)
parser.add_argument("-os", "--options_outstanding",
help="Number options outsanding",
type=int, default=25)
parser.add_argument("-oep", "--options_exercise_price",
help="Exercise price of the options",
type=int, default=1)
args = parser.parse_args()
print(vps_options_ov(args.equity, args.shares_outstanding,
args.options_outstanding, args.options_exercise_price))
# 4.75
|
from PIL import Image
from InstagramAPI import InstagramAPI
user,pwd = 'the_fourth_sangam', '@wes0meme-insta'
InstagramAPI = InstagramAPI(user,pwd)
InstagramAPI.login() # login
im = Image.open('custom2.png')
im.save('poem1.jpg')
photo_path ='poem1.jpg'
caption = """
Haiku by Kobayashi Issa
Dew evaporates
And all our world
is dew
So dear,
So fresh,
So fleeting
Kobayashi Issa (1763–1828) was a Japanese poet and lay Buddhist priest known for his haiku poems and journals. He is better known as simply Issa, a pen name meaning Cup-of-tea. He is regarded as one of the four haiku masters in Japan, along with Basho, Buson and Shiki.
He lost two children at a young age, and many of his poems reflect the sorrow and fleeting nature of our existence.
#Kobayashi #Issa #haiku #japan #poetry #poetsofinstagram #poem #instapic #love #instagood #photooftheday #beautiful #art #instadaily #life #amazing #instamood #instagram #motivation
Photo by @ShyamValsan"""
InstagramAPI.uploadPhoto(photo_path, caption = caption)
print InstagramAPI.LastResponse.text
InstagramAPI.logout() # logout
|
# -*- coding: utf-8 -*-
# Generated by Django 1.9.6 on 2017-06-10 11:43
from __future__ import unicode_literals
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('mozartweb', '0014_event_bc_key'),
]
operations = [
migrations.CreateModel(
name='Author',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('first_name', models.CharField(max_length=200, verbose_name='Pr\xe9nom')),
('last_name', models.CharField(max_length=200, verbose_name='Nom')),
],
options={
'ordering': ('last_name',),
'verbose_name': 'Auteur',
'verbose_name_plural': 'Auteurs',
},
),
migrations.AddField(
model_name='reference',
name='author',
field=models.ManyToManyField(blank=True, null=True, to='mozartweb.Author', verbose_name='Auteur(s)'),
),
]
|
from collections import defaultdict
n,m=map(int,input().split())
l_n=defaultdict(list)
for i in range(n):
c=input()
l_n[c].append(str(i+1))
for i in range(m):
c=input()
print(' '.join(l_n[c]) or -1)
|
s1 = "hello"
s2 = input()
count=0
for i in s2:
if(count<=4 and i==s1[count]):
count+=1
if(count==5):
print("YES")
else:
print("NO")
|
__author__ = 'Grzegorz'
# unused
# class PlayerInput():
# def __init__(self, neighbours = None):
# """
# Args:
# neighbours: set of AgarPlayer objects
# """
# self.neighbourhood = neighbours if neighbours else set()
#
# def add_neighbour(self, neighbour_player):
# self.neighbourhood.add(neighbour_player)
|
import plotly.plotly as py
import plotly.graph_objs as go
import plotly.offline as pltoff
trace = go.Scatter(
x=[1,2,3,4,5,6,7],
y=[10,None,2,15,None,26,11],
name = 'first',
connectgaps = True
)
trace1 = go.Scatter(
x=[1,2,3,4,5,6,7],
y=[5,10,None,7,None,14,6],
name ='second'
)
pltoff.plot([trace,trace1],filename='my scatter.html')
|
# Generated by Django 2.1.1 on 2018-09-29 20:35
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('forum', '0012_joueur'),
]
operations = [
migrations.AlterField(
model_name='joueur',
name='nom',
field=models.CharField(max_length=30),
),
]
|
#!/usr/bin/env python3
# So program can be run from Brickman
from ev3dev.ev3 import *
from time import sleep
m = LargeMotor('outB')
l = LargeMotor('outA')
btn = 1
while(tmp==1):
m.run_to_rel_pos(position_sp=1000, speed_sp=900, stop_action="hold")
print("set speed (speed_sp) = " + str(m.speed_sp))
print("actual speed = " + str(m.speed))
|
from .db import db
class Transaction(db.Model):
__tablename__ = "transactions"
id = db.Column(db.Integer, primary_key=True)
userId = db.Column(db.Integer, db.ForeignKey("users.id"), nullable=False)
planetId = db.Column(db.Integer, db.ForeignKey(
"planets.id"), nullable=False)
orderType = db.Column(db.String(4), nullable=False)
shares = db.Column(db.Integer, nullable=False)
price_paid = db.Column(db.Numeric(asdecimal=False), nullable=False)
user = db.relationship("User", back_populates="transactions")
planets = db.relationship("Planet", back_populates="transactions")
def to_dict(self):
return {
"id": self.id,
"userId": self.userId,
"planetId": self.planetId,
"orderType": self.orderType,
"shares": self.shares,
"price_paid": self.price_paid,
}
|
# Generated by Django 2.0.2 on 2018-02-26 01:03
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
dependencies = [
('datasource', '0004_auto_20180225_1628'),
]
operations = [
migrations.CreateModel(
name='AlphaVantageCurrencySource',
fields=[
('datasourcemixin_ptr', models.OneToOneField(auto_created=True, on_delete=django.db.models.deletion.CASCADE, parent_link=True, primary_key=True, serialize=False, to='datasource.DataSourceMixin')),
('api_key', models.CharField(default='P38D2XH1GFHST85V', max_length=32)),
('from_symbol', models.CharField(max_length=32)),
('to_symbol', models.CharField(default='CAD', max_length=32)),
],
options={
'abstract': False,
'base_manager_name': 'objects',
},
bases=('datasource.datasourcemixin',),
),
migrations.RemoveField(
model_name='alphavantagestocksource',
name='function',
),
]
|
import logging
import socket
import stat
from os.path import expanduser
import paramiko
from antareslauncher.remote_environnement.iconnection import IConnection
class SshConnection(IConnection):
"""Class to _connect to remote server"""
def __init__(self, config: dict = None):
"""
Args:
config: Dictionary containing the "hostname" (name or IP), "username", "port" (default is 22),
"password" (not compulsory if private_key_file is given), "private_key_file": path to private rsa key
"""
super(SshConnection, self).__init__()
self.logger = logging.getLogger(__name__ + "." + __class__.__name__)
self.__client = None
self.__home_dir = None
self.timeout = 10
if config:
self.logger.info("Loading ssh connection from config dictionary")
self.__init_from_config(config)
else:
raise IOError
self.initialize_home_dir()
self.logger.info(
f"Connection created with host = {self.host} and username = {self.username}"
)
def __initialise_public_key(self, key_file_name, key_password):
"""Initialises self.private_key
Args:
key_file_name: The file name of the private key
Returns:
True if a valid key was found, False otherwise
"""
try:
self.private_key = paramiko.Ed25519Key.from_private_key_file(
filename=key_file_name
)
return True
except paramiko.SSHException:
try:
self.private_key = paramiko.RSAKey.from_private_key_file(
filename=key_file_name, password=key_password
)
return True
except paramiko.SSHException:
self.private_key = None
return False
def __init_from_config(self, config: dict):
self.host = config.get("hostname", "")
self.username = config.get("username", "")
self.port = config.get("port", 22)
self.password = config.get("password", None)
key_password = config.get("key_password", None)
key_file = config.get("private_key_file", None)
if key_file:
key_file_path = expanduser(key_file)
self.__initialise_public_key(
key_file_name=key_file_path, key_password=key_password
)
elif self.password is None:
self.logger.debug("self.password is None")
raise ValueError
def initialize_home_dir(self):
"""Initializes self.__home_dir with the home directory retrieved by started "echo $HOME" connecting to the
remote server
"""
output, _ = self.execute_command("echo $HOME")
self.__home_dir = str(output).split()[0]
@property
def home_dir(self):
"""
Returns:
The home directory of the remote server
"""
return self.__home_dir
def _connect(self):
"""Opens the connection to remote server"""
try:
# Paramiko.SSHClient can be used to make connections to the remote server and transfer files
self.__client = paramiko.SSHClient()
# Parsing an instance of the AutoAddPolicy to set_missing_host_key_policy() changes it to allow any host.
self.__client.set_missing_host_key_policy(paramiko.AutoAddPolicy())
# Connect to the server
if self.private_key:
self.__client.connect(
hostname=self.host,
port=self.port,
username=self.username,
pkey=self.private_key,
timeout=self.timeout,
allow_agent=False,
)
# look_for_keys=False disable searching for discoverable private key files in ~/.ssh/
else:
self.__client.connect(
hostname=self.host,
port=self.port,
username=self.username,
password=self.password,
timeout=self.timeout,
allow_agent=False,
look_for_keys=False,
)
except paramiko.AuthenticationException:
self.logger.exception(
f"paramiko.AuthenticationException: {paramiko.AuthenticationException}"
)
result_flag = False
except paramiko.SSHException:
self.logger.exception(f"paramiko.SSHException: {paramiko.SSHException}")
result_flag = False
except socket.timeout:
self.logger.exception(f"socket.timeout: {socket.timeout}")
result_flag = False
except socket.error:
self.logger.exception(f"socket.error: {socket.error}")
result_flag = False
self.__client.close()
else:
result_flag = True
return result_flag
def execute_command(self, command: str):
"""Executes a command on the remote host. Puts stderr and stdout in
self.ssh_error and self.ssh_output respectively
Args:
command: String containing the command that will be executed through the ssh connection
Returns:
output: The standard output of the command
error: The standard error of the command
"""
output = None
error = None
self.logger.info(f"Executing command on remote server: {command}")
try:
if self._connect():
stdin, stdout, stderr = self.__client.exec_command(command, timeout=30)
output = stdout.read().decode("utf-8")
error = stderr.read().decode("utf-8")
self.__client.close()
except socket.timeout:
error = f"Command timed out {command}"
self.__client.close()
except paramiko.SSHException:
error = f"Failed to execute the {command}"
self.__client.close()
self.logger.info(f"Command output:\nOutput: {output}\nError: {error}")
return output, error
def upload_file(self, src: str, dst: str):
"""Uploads a file to a remote server via sftp protocol
Args:
src: Local file to upload
dst: Remote directory where the file will be uploaded
Returns:
True if the file has been uploaded, False otherwise
"""
result_flag = True
try:
if self._connect():
self.logger.info(f"Uploading file {src} to remote directory {dst}")
sftp_client = self.__client.open_sftp()
sftp_client.put(src, dst)
sftp_client.close()
else:
result_flag = False
except paramiko.SSHException:
self.logger.debug("Paramiko SSH Exception")
result_flag = False
except IOError:
self.logger.debug("IO Error")
result_flag = False
else:
self.__client.close()
return result_flag
def download_file(self, src: str, dst: str):
"""Downloads a file from a remote server via sftp protocol
Args:
src: Remote file to download
dst: Local directory where the file will be downloaded, dst must be full path + filename.txt
Returns:
True if the file has been downloaded, False otherwise
"""
try:
if self._connect():
self.logger.info(
f'Downloading remote file "{src}" to directory "{dst}"'
)
sftp_client = self.__client.open_sftp()
sftp_client.get(src, dst)
sftp_client.close()
result_flag = True
else:
result_flag = False
except paramiko.SSHException:
self.logger.debug("Paramiko SSH Exception")
result_flag = False
else:
self.__client.close()
return result_flag
def check_remote_dir_exists(self, dir_path):
"""Checks if a remote path is a directory
Args:
dir_path: Remote path
Returns:
True if the directory exists, False otherwise
Raises:
IOError if the path exists and is a file
"""
if self._connect():
sftp_client = self.__client.open_sftp()
try:
self.logger.info(f"Checking remote dir {dir_path} exists")
sftp_stat = sftp_client.stat(dir_path)
if stat.S_ISDIR(sftp_stat.st_mode):
return True
else:
raise IOError
except FileNotFoundError:
self.logger.debug("FileNotFoundError")
return False
finally:
sftp_client.close()
else:
return False
def check_file_not_empty(self, file_path):
"""Checks if a remote file exists and is not empty
Args:
file_path: Path on the remote server
Returns:
True if file exists and is not empty, False otherwise
Raises:
IOError if path exists and it is a directory
"""
if self._connect():
sftp_client = self.__client.open_sftp()
try:
self.logger.info(f'Checking remote file "{file_path}" not empty')
sftp_stat = sftp_client.stat(file_path)
if stat.S_ISREG(sftp_stat.st_mode):
if sftp_stat.st_size > 0:
return True
else:
return False
else:
raise IOError
except FileNotFoundError:
self.logger.debug("FileNotFoundError")
return False
finally:
sftp_client.close()
else:
return False
def make_dir(self, dir_path):
"""Creates a remote directory if it does not exists yet
Args:
dir_path: Remote path of the directory that will be created
Returns:
True if path exists or the directory is successfully created, False otherwise
Raises:
IOError if the path exists and it is a file
"""
try:
if self._connect():
sftp_client = self.__client.open_sftp()
try:
self.logger.info(f"Checking if remote directory {dir_path} exists")
sftp_stat = sftp_client.stat(dir_path)
if stat.S_ISDIR(sftp_stat.st_mode):
result_flag = True
else:
result_flag = False
except FileNotFoundError:
self.logger.info(f"Creating remote directory {dir_path}")
sftp_client.mkdir(dir_path)
result_flag = True
finally:
sftp_client.close()
else:
result_flag = False
except paramiko.SSHException:
self.logger.debug("Paramiko SSHException")
result_flag = False
else:
self.__client.close()
return result_flag
def remove_file(self, file_path):
"""Removes a remote file
Args:
file_path: Path on the remote server
Returns:
True if file is successfully removed, False otherwise
Raises:
IOError if path exists and it is a directory
"""
if self._connect():
sftp_client = self.__client.open_sftp()
try:
self.logger.info(f"Removing remote file {file_path}")
sftp_stat = sftp_client.stat(file_path)
if stat.S_ISREG(sftp_stat.st_mode):
try:
sftp_client.remove(file_path)
except IOError:
pass
return True
else:
raise IOError
except FileNotFoundError:
self.logger.debug("FileNotFoundError")
return True
finally:
sftp_client.close()
else:
return False
def remove_dir(self, dir_path):
"""Removes a remote directory
Args:
dir_path: Path on the remote server
Returns:
True if the directory is successfully removed, False otherwise
Raises:
IOError if path exists and it is a file
"""
if self._connect():
sftp_client = self.__client.open_sftp()
try:
self.logger.info(f"Removing remote directory {dir_path}")
sftp_stat = sftp_client.stat(dir_path)
if stat.S_ISDIR(sftp_stat.st_mode):
try:
sftp_client.rmdir(dir_path)
except IOError:
pass
return True
else:
raise IOError
except FileNotFoundError:
self.logger.debug("FileNotFoundError")
return False
finally:
sftp_client.close()
else:
return False
def test_connection(self):
if self._connect():
self.__client.close()
return True
return False
|
import csv
from datetime import datetime
with open("/Users/apple/Downloads/785492_1361825_bundle_archive/covid.csv",'r') as covid:
reader = csv.reader(covid)
x = []
col = []
disease = {}
for data in reader:
x.append(data)
fields = x[0]
x[0].append("disease")
x[0].append("linage")
# print(x[0])
# exit()
col.append(x[0][0])
col.append(x[0][1])
col.append(x[0][2])
col.append(x[0][3])
col.append(x[0][23])
col.append(x[0][24])
# print(col)
# exit()
# print(fields)
rows = x[1:]
# dict = dict(zip(x[0],x[1]))
# print(dict.keys())
# print(dict)
# exit()
# print(fields)
# print(rows)
updated_list = []
for row in rows:
l = []
date = datetime.strptime(row[3],'%d-%m-%Y')
formated_date = date.strftime('%m/%d/%Y')
# print(formated_date)
if row[1] == '1':
row[1] = "male"
if row[1] == '2':
row[1] = "female"
if row[2] == '1':
row[2] = "positive"
if row[2] == '2':
row[2] ="negative"
row[8] = int(row[8])
row[9] = int(row[9])
# print(l)
l.append(row[1])
l.append(row[2])
l.append(row[3])
l.append(row[8])
l.append(row[9])
sum = (row[8]+row[9])
l.append(sum)
disease[x[0][7]] = {row[7]}
disease[x[0][10]] = {row[10]}
# print(list)
# print(disease)
l.append(disease)
list = [x[0][11], x[0][12], x[0][13],x[0][14]]
l.append(list)
print(l)
exit()
updated_list.append(l)
# print(updated_list)
# exit()
# print(row[0],row[1],row[2],row[8])
with open('covid1','w') as c1:
writer = csv.writer(c1)
writer.writerow(col)
writer.writerows(updated_list)
# sex,patient_type,entry_date,diseases,linage
# female,positive,04-05-2020,27,97,124,{"peneumonia":1,"diabates":0},[]
# female,positive,19-03-2020,24,97,121,{"peneumonia":1,"diabates":0},[]
# male,negative,06-04-2020,54,2,56,{"peneumonia":1,"diabates":0},[]
# female,negative,17-04-2020,30,97,127,{"peneumonia":1,"diabates":0},[]
|
import sys
import os
import re
import requests
import bs4
import warnings
from importlib import reload
from astropy.io import fits
from astropy.nddata import Cutout2D
from astropy.wcs import WCS
from astropy.coordinates import SkyCoord, Angle
from astropy import units as u
from astropy.utils.exceptions import AstropyWarning
from astropy.wcs.utils import pixel_to_skycoord
#sys.path.append(os.path.abspath(
# os.path.join(os.getcwd(), "../../DAGN-Blindtest")
#))
warnings.simplefilter('ignore', category=AstropyWarning)
import sdss_scrape as scrap
import plane_coods as pc
scrap = reload(scrap)
tolNeighs = lambda pt, t : pc.tolNeighs(pt, t) + [pt]
def cutout (fitsPath, cood, rad) :
"""
Performs cutout of FITS file -
fitsPath - Directory where FITS file is present
cood - (ra, dec) of the object
rad - Radius (in arcseconds) of the cutout
"""
try :
hdu = fits.open(fitsPath, memmap=False)[0]
except Exception as e :
return None
wcs = WCS(hdu.header)
position = SkyCoord(ra=Angle(cood[0], unit=u.deg),
dec=Angle(cood[1], unit=u.deg))
size = u.Quantity((rad, rad), u.arcsec)
return Cutout2D(hdu.data, position, size, wcs=wcs)
def parse_type (typestr) :
""" Parses the galaxy type and returns the number of peaks """
return 2 if typestr == "DOUBLE" else \
(1 if typestr == "SINGLE" else 0)
def parse_peaks (peakstr, nums) :
""" Takes the reported peak string, number of peaks
and returns a list of 2-D tuple coordinates [(Int, Int)] """
if not nums :
return []
open_inds = [r.start() for r in re.finditer('\(', peakstr)]
close_inds = [r.start() for r in re.finditer('\)', peakstr)]
comma_inds = [r.start() for r in re.finditer(',', peakstr)]
plist = [
(int(peakstr[o+1:cm]), int(peakstr[cm+2:c]))
for o, cm, c in zip(open_inds, comma_inds, close_inds)
]
return plist
def cood_to_objid (cood) :
""" Takes (ra, dec) coordinates and tries to return an SDSS objid
if an object is catalogued at that coordinate """
link = "https://skyserver.sdss.org/dr12/en/tools/explore/summary.aspx?ra={}&dec={}"\
.format(cood.ra.deg, cood.dec.deg)
soup = bs4.BeautifulSoup(requests.get(link).text, features='lxml')
if len(soup.select(".nodatafound")) == 1 :
return None
st = str(soup.findAll("td", {"class": "t"})[6])
return st[st[:-1].rfind('>')+1 : st.rfind('<')]
def peak_to_objid (objid, cood, fitsPath, plist) :
""" Takes an SDSS objid, path to the FITS file
and returns the SDSS objids (if they exist) corresponding
to the pixel coordinates of the peaks in 'plist' """
# cutout = fp.cutout(fitsPath, cood, 40)
ct = cutout(fitsPath, cood, 40)
cood1 = pixel_to_skycoord(plist[0][0], plist[0][1], ct.wcs)
cood2 = pixel_to_skycoord(plist[1][0], plist[1][1], ct.wcs)
os.remove(fitsPath)
return cood_to_objid(cood1), cood_to_objid(cood2)
def double_peak_ids (objid, cood, band, plist) :
"""
Returns the object ids corresponding to
the double peaks in a band -
objid - objid of double detection
cood - coordinate of the object
band - one of 'ugri'
plist - peak list [(p1x, p1y), (p2x, p1y)]
"""
fits_fold = os.path.join(os.getcwd(), "FITS")
fits_path = os.path.join(fits_fold, objid + "-{}.fits".format(band))
if not os.path.exists(fits_path) :
repoLink = scrap.scrapeRepoLink(objid)
dlinks = scrap.scrapeBandLinks(repoLink)
scrap.downloadExtract(objid,
band,
dlinks[band],
fits_fold,
fits_path)
return peak_to_objid(objid,
cood,
fits_path,
plist)
|
import math
INF = 1000000
money = []
def LSOne(k):
return (k & (-k))
def update(k, v):
while k <= len(money):
money[k] += v
k += LSOne(k)
def range_update(i, j, v):
update(i, v)
update(j + 1, -v)
def construct_array(l,r,c,p,q,s,n,m):
for i in range(0,m):
l_val = ((prev_l * p) + prev_r)%n + 1 if i > 0 else l
r_val = ((prev_r * q) + prev_l)%n + 1 if i > 0 else r
c_val = (prev_c * s) % 1000000 + 1 if i > 0 else c
if (l_val > r_val): l_val, r_val = r_val, l_val
range_update(l_val+1, r_val+1, c_val)
prev_l, prev_r, prev_c = l_val, r_val, c_val
if __name__ == "__main__":
t = int(raw_input())
while t:
n,m = raw_input().split()
n,m = long(n), long(m)
l,r,c,p,q,s = raw_input().split()
money = [0 for i in range(INF+1)]
construct_array(long(l), long(r), long(c), long(p), long(q), long(s),n,m)
maximum, index = 0, -1
for i in range(0, len(money)):
if money[i] > maximum:
maximum = money[i]
index = i
print index-1, maximum
t = t - 1 |
#!/usr/bin/env python3
def lower_en(char,k):
ascii_char = ord(char)
ascii_char += k % 26
if not ascii_char <= 122:
distance = ascii_char - 122
position = 97 + (distance - 1)
return chr(position)
else:
return chr(ascii_char)
def upper_en(char,k):
ascii_char = ord(char)
ascii_char += k % 26
if not ascii_char <= 90:
distance = ascii_char - 90
position = 65 + (distance - 1)
return chr(position)
else:
return chr(ascii_char)
def main(s, k):
encrypted = ""
for i in range (0,len(s)):
en_char = ""
char = s[i]
if 65 <= ord(char) <= 90:
en_char = upper_en(char, k)
elif 97 <= ord(char) <= 122:
en_char = lower_en(char, k)
else:
en_char = char
encrypted += en_char
return encrypted
|
import blog
from django.shortcuts import get_object_or_404, render
from .models import Post, Author, Tag
from django.views.generic import ListView, DetailView
from .form import CommentForm
from django.views import View
from django.http import HttpResponseRedirect
from django.urls import reverse
# Create your views here.
class StatingPageView(ListView):
template_name = "blog/index.html"
model = Post
ordering = ["-posted_date"] # this will oder the bellow data
context_object_name = "posts" #This will rename or presnt teh output data in posts
def get_queryset(self): #This function will fesh all data
queryset = super().get_queryset()
data = queryset[:3] # This code will limited the number of posts to 3
return data
# def starting_page(request):
# latest_posts = Post.objects.all().order_by('-posted_date')[:3]
# return render(request, "blog/index.html", {"posts":latest_posts})
class AllPostsView(ListView):
template_name = "blog/all-posts.html"
model = Post
ordering = ["-posted_date"]
context_object_name = "all_posts"
# def posts(request):
# all_posted_posts = Post.objects.all().order_by('-posted_date')
# return render(request, "blog/all-posts.html",{"all_posts": all_posted_posts})
# class SinglePostView(DetailView): #detailview is allow us to create view that show details , detail views can search data by slug or ID if we sepcify it as dynamic segment in urls so not need to specify this and if tenresult doent mucht it will serve the 404page
# template_name = "blog/post-detail.html"
# model = Post
# def get_context_data(self, **kwargs) :
# context = super().get_context_data(**kwargs)
# context ["post_tags"] = self.object.tags.all() #This will take the selected object and wll tale all his tags in the object
# context["comment_form"] = CommentForm() # This will create a CommentForm
# return context
class SinglePostView(View): #detailview is allow us to create view that show details , detail views can search data by slug or ID if we sepcify it as dynamic segment in urls so not need to specify this and if tenresult doent mucht it will serve the 404page
def is_stored_posts(self,request,post_id):
stored_posts= request.session.get("stored_posts")
if stored_posts is not None:
is_saved_for_later = post_id in stored_posts
else:
is_saved_for_later = False
return is_saved_for_later
def get(self,request,slug):
post = Post.objects.get(slug = slug)
context = {
"post":post,
"post_tags":post.tags.all(),
"comment_form":CommentForm(),
"comments": post.comments.all().order_by("-id"), #this will oder by Id in descen order due to the sign (-)
"saved_for_later": self.is_stored_posts(request,post.id)
}
return render(request, "blog/post-detail.html", context)
def post(self,request, slug):
comment_form = CommentForm(request.POST)
post = Post.objects.get(slug = slug)
if comment_form.is_valid():
comment =comment_form.save(commit=False) #This will take user inpute without saving he data
comment.post = post # This will add post the one we excluded in the CommentForm and it will be added to the above code
comment.save() #This will not save manully the complute data to the DB
return HttpResponseRedirect(reverse("post-detail-page",args=[slug]))
context = {
"post":post,
"post_tags":post.tags.all(),
"comment_form":CommentForm(),
"comments": post.comments.all().order_by("-id"),
"saved_for_later": self.is_stored_posts(request,post.id)
}
return render(request, "blog/post-detail.html", context)
# def post_detail(reqeust, slug):
# selected_posts = get_object_or_404(Post, slug = slug)
# return render(reqeust, "blog/post-detail.html", {"post": selected_posts,
# "post_tags": selected_posts.tags.all()})
class ReadLaterView(View):
def get(self, request):
stored_posts = request.session.get("stored_posts")
context = {}
if stored_posts is None or len(stored_posts) == 0:
context["posts"] = []
context["has_posts"] = False
else:
posts = Post.objects.filter(id__in=stored_posts)
context["posts"]= posts
context["has_posts"] = True
return render(request, "blog/stored-posts.html", context)
def post(self,request):
stored_posts = request.session.get("stored_posts")
if stored_posts is None:
stored_posts = []
post_id = int(request.POST["post_id"])
if post_id not in stored_posts:
stored_posts.append(post_id)
else:
stored_posts.remove(post_id)
request.session["stored_posts"] = stored_posts # this code will now save the post to the stored one
return HttpResponseRedirect("/") |
#!/usr/bin/env python
# -*- coding=utf8 -*-
"""
# Author: luning
# Created Time : 一 11/25 20:09:40 2019
# File Name: src/render/draw.py
# Description:
"""
from base import *
def drawPoint(canvas, point, color="#FFFFFF"):
canvas.create_line(point.getX(),point.getY(),point.getX(),point.getY(), fill=color)
|
# ВЛОЖЕННЫЕ ИНСТРУКЦИИ
color = 'red'
if color == 'blue':
print('синий')
# elif сокращение от else if(иначе если)
elif color == 'red':
print('красный')
elif color == 'green':
print('зеленый')
# else выполняется, только если все предыдущие проверки вернули False
else:
print('неизвесеный цвет')
"""
Оператор elif переводится как «иначе если».
Логическое выражение, стоящее после оператора elif, проверяется,
только если все вышестоящие условия ложные.
То есть в этой схеме может выполниться только один блок кода:
первый, второй, третий или четвёртый.
Если одно из выражений истинно, то нижестоящие условия проверяться не будут.
"""
# ******* Лучший вариант******
"""Чтобы проверялись все условия, независимо от результата предыдущего,
следует использовать несколько независимых операторов if
"""
numb_1 = int(input("Введите первое целое число: "))
numb_2 = int(input("Введите второе целое число: "))
if numb_1 != numb_2:
print("Числа не равны")
if numb_1 > numb_2:
print("Первое число больше второго")
elif numb_1 < numb_2:
print("Первое число меньше второго")
elif numb_1 == numb_2:
print("Числа равны")
"""
Числа с точкой (ПЛАВАЮЩИЕ ЧИСЛА) FLOAT,пример: 3.14
"""
numb_1 = float(input("Введите первое вещественное число: "))
numb_2 = float(input("Введите второе вещественное число: "))
if numb_1 >= numb_2:
print("Первая ветвь")
if numb_1 > numb_2:
print("Первое число больше второго")
else:
print("Числа равны")
elif numb_1 <= numb_2:
print("Вторая ветвь")
if numb_1 < numb_2:
print("Первое число меньше второго")
else:
print("Числа равны")
|
import rethinkdb as r
import common
import time
reimport = True
parse = True
index = True
conn = r.connect('localhost', 28015)
conn.use("logs")
if reimport:
if "logs" in r.db_list().run(conn):
print("Dropping database 'logs'")
r.db_drop("logs").run(conn)
print("Creating database 'logs'")
r.db_create("logs").run(conn)
print("Creating table haproxy")
r.table_create("haproxy").run(conn)
import_start = time.time()
total_count = 0
for lines_buffer in common.chunk_lines(10000):
total_count += len(lines_buffer)
print("Inserting {0} more records, {1:,} total inserted".format(len(lines_buffer), total_count))
r.table("haproxy").insert([{"_raw": l} for l in lines_buffer], durability="soft").run(conn)
print("SYNC")
r.table("haproxy").sync().run(conn)
import_end = time.time()
print(
"Imported {0:,} records in {1:.2f} seconds, {2:.2f} records / second".format(
total_count, import_end - import_start, total_count / (import_end - import_start)))
regex = r"""
^(?P<log_month>[A-Za-z]{3}) (?P<log_day>[0-9]{2}) (?P<log_hour>[0-9]{2}):(?P<log_minute>[0-9]{2}):(?P<log_second>[0-9]{2})
(?P<log_ip>[0-9]{1,3}\.[0-9]{1,3}\.[0-9]{1,3}\.[0-9]{1,3})
(?P<process_name>[A-Za-z]+)\[(?P<pid>[0-9]+)\]:
(?P<client_ip>[0-9]+\.[0-9]+\.[0-9]+\.[0-9]+):(?P<client_port>[0-9]+)
\[(?P<accept_day>[0-9]{2})/(?P<accept_month>[A-Za-z]{3})/(?P<accept_year>[0-9]{4}):(?P<accept_hour>[0-9]{2}):(?P<accept_minute>[0-9]{2}):(?P<accept_second>[0-9]{2}\.[0-9]{3})\]
(?P<frontend_name>[^ ]+)
(?P<backend_name>[^/]+)/(?P<server_name>[^ ]+)
(?P<Tq>[^/]+)/(?P<Tw>[^/]+)/(?P<Tc>[^/]+)/(?P<Tr>[^/]+)/(?P<Tt>[^ ]+)
(?P<status_code>[^ ]+)
(?P<bytes_read>[^ ]+)
(?P<captured_request_cookie>[^ ]+)
(?P<captured_response_cookie>[^ ]+)
(?P<termination_state>[^ ]+)
(?P<actconn>[^/]+)/(?P<feconn>[^/]+)/(?P<beconn>[^/]+)/(?P<srv_conn>[^/]+)/(?P<retries>[^ ]+)
(?P<srv_queue>[^/]+)/(?P<backend_queue>[^ ]+)
.*
\"((?P<http_verb>[^ ]+) (?P<request_uri>[^ ]+) ?(?P<http_version>HTTP/[0-9]\.[0-9])?|<BADREQ>)\"?
"""
if parse:
regex = regex.replace("\n", "")
print(repr(regex))
parse_start = time.time()
def regex_parse(rec):
return { "_match": rec["_raw"].match(regex) }
def parse_month(month):
return \
r.branch(month == "Jan", 1,
r.branch(month == "Feb", 2,
r.branch(month == "Mar", 3,
r.branch(month == "Apr", 4,
r.branch(month == "May", 5,
r.branch(month == "Jun", 6,
r.branch(month == "Jul", 7,
r.branch(month == "Aug", 8,
r.branch(month == "Sep", 9,
r.branch(month == "Oct", 10,
r.branch(month == "Nov", 11, 12)))))))))))
def extract(rec):
match = rec["_match"]
return {
#"log_month": match["groups"][0]["str"],
#"log_day": match["groups"][1]["str"].coerce_to("number"),
#"log_hour": match["groups"][2]["str"].coerce_to("number"),
#"log_minute": match["groups"][3]["str"].coerce_to("number"),
#"log_second": match["groups"][4]["str"].coerce_to("number"),
"log_time": r.time(
r.now().year(), # not part of the log format, weird
parse_month(match["groups"][0]["str"]),
match["groups"][1]["str"].coerce_to("number"),
match["groups"][2]["str"].coerce_to("number"),
match["groups"][3]["str"].coerce_to("number"),
match["groups"][4]["str"].coerce_to("number"),
"-06:00"
),
"log_ip": match["groups"][5]["str"],
"process_name": match["groups"][6]["str"],
"pid": match["groups"][7]["str"].coerce_to("number"),
"client_ip": match["groups"][8]["str"],
"client_port": match["groups"][9]["str"].coerce_to("number"),
#"accept_day": match["groups"][10]["str"].coerce_to("number"),
#"accept_month": match["groups"][11]["str"],
#"accept_year": match["groups"][12]["str"].coerce_to("number"),
#"accept_hour": match["groups"][13]["str"].coerce_to("number"),
#"accept_minute": match["groups"][14]["str"].coerce_to("number"),
#"accept_second": match["groups"][15]["str"].coerce_to("number"),
"accept_time": r.time(
match["groups"][12]["str"].coerce_to("number"),
parse_month(match["groups"][11]["str"]),
match["groups"][10]["str"].coerce_to("number"),
match["groups"][13]["str"].coerce_to("number"),
match["groups"][14]["str"].coerce_to("number"),
match["groups"][15]["str"].coerce_to("number"),
"-06:00"
),
"frontend_name": match["groups"][16]["str"],
"backend_name": match["groups"][17]["str"],
"server_name": match["groups"][18]["str"],
"Tq": match["groups"][19]["str"].coerce_to("number"),
"Tw": match["groups"][20]["str"].coerce_to("number"),
"Tc": match["groups"][21]["str"].coerce_to("number"),
"Tr": match["groups"][22]["str"].coerce_to("number"),
"Tt": match["groups"][23]["str"].coerce_to("number"),
"status_code": match["groups"][24]["str"].coerce_to("number"),
"bytes_read": match["groups"][25]["str"].coerce_to("number"),
"captured_request_cookie": match["groups"][26]["str"],
"captured_response_cookie": match["groups"][27]["str"],
"termination_state": match["groups"][28]["str"],
"actconn": match["groups"][29]["str"].coerce_to("number"),
"feconn": match["groups"][30]["str"].coerce_to("number"),
"beconn": match["groups"][31]["str"].coerce_to("number"),
"srv_conn": match["groups"][32]["str"].coerce_to("number"),
"retries": match["groups"][33]["str"].coerce_to("number"),
"srv_queue": match["groups"][34]["str"].coerce_to("number"),
"backend_queue": match["groups"][35]["str"].coerce_to("number"),
# 36 - full
# 37 - null or http verb
# 38 - null or request uri
# 39 - null or http version
"http_verb": r.branch(match["groups"][37] == None, match["groups"][36]["str"], match["groups"][37]["str"]),
"request_uri": r.branch(match["groups"][38] == None, None, match["groups"][38]["str"]),
"http_version": r.branch(match["groups"][39] == None, None, match["groups"][39]["str"]),
}
print("Beginning parse pass")
retval = r.table("haproxy").update(regex_parse, durability="soft").run(conn)
if retval["errors"] != 0:
print("Errors during update")
print(repr(retval))
import sys; sys.exit(1)
print("Parse pass complete")
print("SYNC")
r.table("haproxy").sync().run(conn)
print("Beginning extract pass")
retval = r.table("haproxy").update(extract, durability="soft").run(conn)
if retval["errors"] != 0:
print("Errors during update")
print(repr(retval))
import sys; sys.exit(1)
print("Extract pass complete")
print("SYNC")
r.table("haproxy").sync().run(conn)
print("Beginning delete pass")
retval = r.table('haproxy').replace(r.row.without('_match'), durability="soft").run(conn)
if retval["errors"] != 0:
print("Errors during replace")
print(repr(retval))
import sys; sys.exit(1)
print("Delete pass complete")
print("SYNC")
r.table("haproxy").sync().run(conn)
parse_end = time.time()
print("Parse completed in {0:.2f} seconds".format(parse_end - parse_start))
#print("start")
#for row in r.table("haproxy").filter(lambda rec: rec["_raw"].match(regex) == None).limit(1).run(conn):
# print(repr(row))
#print("end")
if index:
print("Creating indexes...")
index_start = time.time()
r.table("haproxy").index_create("http_verb").run(conn)
r.table("haproxy").index_create("status_code").run(conn)
r.table("haproxy").index_wait("http_verb").run(conn)
r.table("haproxy").index_wait("status_code").run(conn)
index_end = time.time()
print("Index creation completed in {0:.2f} seconds".format(index_end - index_start))
|
import matplotlib.pyplot as plt
import numpy as np
from timemachines.skatertools.utilities.conventions import to_log_space_1d
if __name__=='__main__':
us = np.linspace(start=0,stop=1,num=150)
ys = [to_log_space_1d(u, low=-300, high=700) for u in us]
plt.plot(us, ys)
plt.title('Parameter convention (1-dim)')
plt.grid()
plt.show() |
"""
script developing 2d complex SPN
for synthetic sine data
"""
import numpy as np
import logging
import time
import argparse
import pickle
import matplotlib.pyplot as plt
import sys
sys.path.append('../../2dgaussian/SPFlow/src/')
from spn.algorithms.Inference import log_likelihood
from spn.algorithms.Statistics import get_structure_stats
from spn.structure.Base import Context
from spn.algorithms.LearningWrappers import learn_parametric
# current_time=time.strftime("%Y-%m-%d_%H:%M:%S", time.localtime())
# logging.basicConfig(filename='/media/yu/data/yu/code/gp_whittle/WhittleNetwork/dev/whittle_spn_'+current_time+'.log', filemode='w', level=logging.INFO,
# format='%(asctime)s - %(name)s - %(levelname)s - %(message)s')
# logger = logging.getLogger(__name__)
def get_save_path(ARGS):
if ARGS.wspn_type == 1:
key = 'wspn1d'
elif ARGS.wspn_type == 2:
key = 'wspn2d'
elif ARGS.wspn_type == 3:
key = 'wspn_pair'
else:
print('input spn type error')
sys.exit()
if ARGS.data_type==1:
data = 'sine/'
elif ARGS.data_type==2:
data = 'mnist/'
elif ARGS.data_type==3:
data = 'SP/'
elif ARGS.data_type==4:
data = 'stock/'
elif ARGS.data_type==5:
data = 'billiards/'
else:
print('input data type error')
sys.exit()
save_path = '/media/yu/data/yu/code/gp_whittle/WhittleNetwork/dev_aies/' + data + key + '_' + str(ARGS.n_min_slice) + '_' + str(ARGS.threshold) + '/'
return save_path
def get_l_rfft(ARGS):
if ARGS.data_type==1:
l_rfft = 17
elif ARGS.data_type==2:
l_rfft = 8
elif ARGS.data_type==3:
l_rfft = 17
elif ARGS.data_type==4:
l_rfft = 17
elif ARGS.data_type==5:
l_rfft = 51
else:
print('input l_rfft error')
sys.exit()
return l_rfft
def learn_whittle_spn_1d(train_data, n_RV, n_min_slice=2000, init_scope=None):
from spn.structure.leaves.parametric.Parametric import Gaussian
# learn spn
ds_context = Context(parametric_types=[Gaussian] * n_RV).add_domains(train_data)
print('learning WSPN')
# l_rfft=None --> 1d gaussian node, is_pair does not work
wspn = learn_parametric(train_data, ds_context, min_instances_slice=n_min_slice, threshold=ARGS.threshold,
initial_scope=init_scope, cpus=1, l_rfft=None, is_pair=False)
save_path = get_save_path(ARGS)
check_path(save_path)
f = open(save_path + 'wspn_1d.pkl', 'wb')
pickle.dump(wspn, f)
f.close()
return wspn
def load_whittle_spn_1d(ARGS):
save_path = get_save_path(ARGS)
f = open(save_path+'wspn_1d.pkl', 'rb')
spn = pickle.load(f)
f.close()
log_msg = get_structure_stats(spn)
print(log_msg)
logger.info(log_msg)
return spn
def learn_whittle_spn_2d(train_data, n_RV, n_min_slice, init_scope=None):
from spn.structure.leaves.parametric.Parametric import Gaussian
# learn spn
ds_context = Context(parametric_types=[Gaussian] * n_RV).add_domains(train_data)
print('learning WSPN')
# need to pair RVs
# need flag for 2d?
l_rfft = get_l_rfft(ARGS)
# l_rfft!=None --> 2d/pair gaussian node, is_pair=False --> 2d gaussian, diagonal covariance matrix
wspn = learn_parametric(train_data, ds_context, min_instances_slice=n_min_slice, threshold=ARGS.threshold,
initial_scope=init_scope, cpus=1, l_rfft=l_rfft, is_pair=False)
save_path = get_save_path(ARGS)
check_path(save_path)
f = open(save_path+'wspn_2d.pkl', 'wb')
pickle.dump(wspn, f)
f.close()
return wspn
def load_whittle_spn_2d(ARGS, log=True):
save_path = get_save_path(ARGS)
f = open(save_path+'wspn_2d.pkl', 'rb')
spn = pickle.load(f)
f.close()
log_msg = get_structure_stats(spn)
print(log_msg)
if log:
logger.info(log_msg)
return spn
def learn_whittle_spn_pair(train_data, n_RV, n_min_slice, init_scope=None):
from spn.structure.leaves.parametric.Parametric import MultivariateGaussian
# learn spn
ds_context = Context(parametric_types=[MultivariateGaussian] * n_RV).add_domains(train_data)
print('learning WSPN')
# need to pair RVs
# need flag for 2d?
l_rfft = get_l_rfft(ARGS)
# l_rfft!=None --> 2d/pair gaussian node, is_pair=True --> pairwise gaussian, full covariance matrix
wspn = learn_parametric(train_data, ds_context, min_instances_slice=n_min_slice, threshold=ARGS.threshold,
initial_scope=init_scope, cpus=1, l_rfft=l_rfft, is_pair=True)
save_path = get_save_path(ARGS)
check_path(save_path)
f = open(save_path+'wspn_pair.pkl', 'wb')
pickle.dump(wspn, f)
f.close()
return wspn
def load_whittle_spn_pair(ARGS, log=True):
save_path = get_save_path(ARGS)
f = open(save_path+'wspn_pair.pkl', 'rb')
spn = pickle.load(f)
f.close()
log_msg = get_structure_stats(spn)
print(log_msg)
if log:
logger.info(log_msg)
return spn
def load_whittle_spn_res(ARGS):
# load res-spn, need to be modified when model changed
log_msg = 'Have you set the latest model path?'
print(log_msg)
logger.info(log_msg)
rspn_path = 'ventola/em_optimized_fuse_spn_yu_sine'
f = open(rspn_path, 'rb')
rspn = pickle.load(f)
f.close()
log_msg = get_structure_stats(rspn)
print(log_msg)
logger.info(log_msg)
return rspn
def data_to_2d(data, p, L):
# transfer data from 1d to 2d
h, w = data.shape
l = L//2+1
data1 = data.reshape(h * p, -1)
data1_r = data1[:, 0:l].reshape(h * p, l, 1)
data1_i = data1[:, l:].reshape(h * p, l, 1)
data2 = np.concatenate([data1_r, data1_i], 2)
data2 = data2.reshape(h, -1, 2)
return data2
def load_data_for_wspn(ARGS):
if ARGS.data_type==1:
log_msg = 'loading sine data'
print(log_msg)
data_train = np.fromfile('/media/yu/data/yu/code/gp_whittle/WhittleNetwork/train_sine.dat',
dtype=np.float64).reshape(-1, 204)
data_pos = np.fromfile('/media/yu/data/yu/code/gp_whittle/WhittleNetwork/test_sine_positive.dat',
dtype=np.float64).reshape(-1, 204)
data_neg = np.fromfile('/media/yu/data/yu/code/gp_whittle/WhittleNetwork/test_sine_negative.dat',
dtype=np.float64).reshape(-1, 204)
n_RV = 204 # number of RVs
p = 6 # dim
L = 32 # TS length
scope_list = np.arange(n_RV)
scope_temp = np.delete(scope_list, np.where(scope_list % 34 == 17))
init_scope = list(np.delete(scope_temp, np.where(scope_temp % 34 == 33)))
elif ARGS.data_type==2:
log_msg = 'loading mnist data'
print(log_msg)
data_train = np.fromfile('/media/yu/data/yu/code/gp_whittle/WhittleNetwork/train_mnist.dat',
dtype=np.float64).reshape(-1, 224)
data_pos = np.fromfile('/media/yu/data/yu/code/gp_whittle/WhittleNetwork/test_mnist_positive.dat',
dtype=np.float64).reshape(-1, 224)
data_neg = np.fromfile('/media/yu/data/yu/code/gp_whittle/WhittleNetwork/test_mnist_negative.dat',
dtype=np.float64).reshape(-1, 224)
n_RV = 224 # number of RVs
p = 14 # dim
L = 14 # TS length
scope_list = np.arange(n_RV)
scope_temp = np.delete(scope_list, np.where(scope_list % 16 == 8))
init_scope = list(np.delete(scope_temp, np.where(scope_temp % 16 == 15)))
# data_train = data_train[0:100, :]
elif ARGS.data_type==3:
log_msg = 'loading S&P data'
print(log_msg)
data_train = np.fromfile('/media/yu/data/yu/code/gp_whittle/WhittleNetwork/train_SP.dat',
dtype=np.float64).reshape(-1, 374)
data_pos = data_train.copy()
data_neg = data_train.copy()
n_RV = 374 # number of RVs
p = 11 # dim
L = 32 # TS length
scope_list = np.arange(n_RV)
scope_temp = np.delete(scope_list, np.where(scope_list % 34 == 17))
init_scope = list(np.delete(scope_temp, np.where(scope_temp % 34 == 33)))
elif ARGS.data_type==4:
log_msg = 'loading Stock data'
print(log_msg)
data_train = np.fromfile('/media/yu/data/yu/code/gp_whittle/WhittleNetwork/train_stock.dat',
dtype=np.float64).reshape(-1, 578)
data_pos = data_train.copy()
data_neg = data_train.copy()
n_RV = 578 # number of RVs
p = 17 # dim
L = 32 # TS length
scope_list = np.arange(n_RV)
scope_temp = np.delete(scope_list, np.where(scope_list % 34 == 17))
init_scope = list(np.delete(scope_temp, np.where(scope_temp % 34 == 33)))
elif ARGS.data_type==5:
log_msg = 'loading Billiards data'
print(log_msg)
data_path = '/media/yu/data/yu/code/gp_whittle/WhittleNetwork/datasets/billiards_data/'
# Load training data
data = pickle.load(open(data_path+'billiards_train.pkl', 'rb'))
# extract data and do DTFT
positions = data['y']
positions = positions[..., :2]
data_rfft = np.fft.rfft(positions, axis=1)
d_r = data_rfft.real
d_i = data_rfft.imag
data_x1 = np.concatenate([d_r[:, :, 0, 0], d_i[:, :, 0, 0]], axis=1)
data_y1 = np.concatenate([d_r[:, :, 0, 1], d_i[:, :, 0, 1]], axis=1)
data_x2 = np.concatenate([d_r[:, :, 1, 0], d_i[:, :, 1, 0]], axis=1)
data_y2 = np.concatenate([d_r[:, :, 1, 1], d_i[:, :, 1, 1]], axis=1)
data_x3 = np.concatenate([d_r[:, :, 2, 0], d_i[:, :, 2, 0]], axis=1)
data_y3 = np.concatenate([d_r[:, :, 2, 1], d_i[:, :, 2, 1]], axis=1)
# data_train in the form of [x1r, x1i, y1r, y1i, x2r, x2i, y2r, y2i, x3r, x3i, y3r, y3i]
data_train = np.concatenate((data_x1, data_y1, data_x2, data_y2, data_x3, data_y3), axis=1)
# Load test data
data = pickle.load(open(data_path+'billiards_test.pkl', 'rb'))
# extract data and do DTFT
positions = data['y']
positions = positions[..., :2]
data_rfft = np.fft.rfft(positions, axis=1)
d_r = data_rfft.real
d_i = data_rfft.imag
data_x1 = np.concatenate([d_r[:, :, 0, 0], d_i[:, :, 0, 0]], axis=1)
data_y1 = np.concatenate([d_r[:, :, 0, 1], d_i[:, :, 0, 1]], axis=1)
data_x2 = np.concatenate([d_r[:, :, 1, 0], d_i[:, :, 1, 0]], axis=1)
data_y2 = np.concatenate([d_r[:, :, 1, 1], d_i[:, :, 1, 1]], axis=1)
data_x3 = np.concatenate([d_r[:, :, 2, 0], d_i[:, :, 2, 0]], axis=1)
data_y3 = np.concatenate([d_r[:, :, 2, 1], d_i[:, :, 2, 1]], axis=1)
# data_train in the form of [x1r, x1i, y1r, y1i, x2r, x2i, y2r, y2i, x3r, x3i, y3r, y3i]
data_pos = np.concatenate((data_x1, data_y1, data_x2, data_y2, data_x3, data_y3), axis=1)
# Load outlier data
data = pickle.load(open(data_path+'billiards_test_2.pkl', 'rb'))
# extract data and do DTFT
positions = data['y']
positions = positions[..., :2]
data_rfft = np.fft.rfft(positions, axis=1)
d_r = data_rfft.real
d_i = data_rfft.imag
data_x1 = np.concatenate([d_r[:, :, 0, 0], d_i[:, :, 0, 0]], axis=1)
data_y1 = np.concatenate([d_r[:, :, 0, 1], d_i[:, :, 0, 1]], axis=1)
data_x2 = np.concatenate([d_r[:, :, 1, 0], d_i[:, :, 1, 0]], axis=1)
data_y2 = np.concatenate([d_r[:, :, 1, 1], d_i[:, :, 1, 1]], axis=1)
data_x3 = np.concatenate([d_r[:, :, 2, 0], d_i[:, :, 2, 0]], axis=1)
data_y3 = np.concatenate([d_r[:, :, 2, 1], d_i[:, :, 2, 1]], axis=1)
# data_train in the form of [x1r, x1i, y1r, y1i, x2r, x2i, y2r, y2i, x3r, x3i, y3r, y3i]
data_neg = np.concatenate((data_x1, data_y1, data_x2, data_y2, data_x3, data_y3), axis=1)
n_RV = 612 # number of RVs
p = 6 # dim
L = 100 # TS length
scope_list = np.arange(n_RV)
scope_temp = np.delete(scope_list, np.where(scope_list % 102 == 51))
init_scope = list(np.delete(scope_temp, np.where(scope_temp % 102 == 101)))
else:
print('input data error')
sys.exit()
print('data done')
return data_train, data_pos, data_neg, n_RV, p, L, init_scope
def check_path(path):
import os
if not os.path.exists(path):
os.makedirs(path)
def calc_ll(wspn, data_train, data_pos, data_neg):
# calculate LL
log_msg = 'Log-likelihood calculating...'
print(log_msg)
logger.info(log_msg)
ll_train = log_likelihood(wspn, data_train)
ll_pos = log_likelihood(wspn, data_pos)
ll_neg = log_likelihood(wspn, data_neg)
log_msg = '---------median-----------'
print(log_msg)
logger.info(log_msg)
log_msg = 'LL_train=' + str(np.median(ll_train))
print(log_msg)
logger.info(log_msg)
log_msg = 'LL_positive=' + str(np.median(ll_pos))
print(log_msg)
logger.info(log_msg)
log_msg = 'LL_negative=' + str(np.median(ll_neg))
print(log_msg)
logger.info(log_msg)
log_msg = '--------- mean -----------'
print(log_msg)
logger.info(log_msg)
log_msg = 'LL_train=' + str(np.mean(ll_train))
print(log_msg)
logger.info(log_msg)
log_msg = 'LL_positive=' + str(np.mean(ll_pos))
print(log_msg)
logger.info(log_msg)
log_msg = 'LL_negative=' + str(np.mean(ll_neg))
print(log_msg)
logger.info(log_msg)
def init_log(ARGS):
current_time = time.strftime("%Y-%m-%d_%H:%M:%S", time.localtime())
# Creating log file
path_base = '/media/yu/data/yu/code/gp_whittle/WhittleNetwork/dev/'
if ARGS.train_type == 1:
file_base = 'train_wspn_' + str(ARGS.wspn_type) + '_on_data' + str(ARGS.data_type) + '_'
elif ARGS.train_type == 2:
file_base = 'test_wspn' + str(ARGS.wspn_type) + '_on_data' + str(ARGS.data_type) + '_'
else:
file_base = 'error'
logging.basicConfig(
filename=path_base + file_base + current_time + '.log',
filemode='w',
level=logging.INFO,
format='%(asctime)s - %(name)s - %(levelname)s - %(message)s')
logger = logging.getLogger(__name__)
return logger
if __name__ == '__main__':
# set parameters
parser = argparse.ArgumentParser()
# Args go here
parser.add_argument('--wspn_type', type=int, default=3,
help='Type of wspn, 1-1d, 2-2d, 3-pair, 4-res-spn')
parser.add_argument('--train_type', type=int, default=1,
help='Type of train, 1-train, 2-test')
parser.add_argument('--n_min_slice', type=int, default=2100,
help='minimum size of slice.')
parser.add_argument('--data_type', type=int, default=1,
help='Type of data, 1-sine, 2-mnist, 3-S&P, 4-stock, 5-billiards')
parser.add_argument('--threshold', type=float, default=0.3,
help='Threshold of splitting features')
ARGS, unparsed = parser.parse_known_args()
# init logger
logger = init_log(ARGS)
log_msg = '\n--wspn_type=' + str(ARGS.wspn_type) + \
'\n--train_type=' + str(ARGS.train_type) + \
'\n--n_min_slice=' + str(ARGS.n_min_slice) + \
'\n--data_type=' + str(ARGS.data_type) + \
'\n--threshold=' + str(ARGS.threshold)
print(log_msg)
logger.info(log_msg)
start_time = time.time()
np.random.seed(2019052799)
# load data and data_info
data_train, data_pos, data_neg, n_RV, p, L, init_scope = load_data_for_wspn(ARGS)
if ARGS.wspn_type==1:
# train/load wspn 1d
n_min_slice = ARGS.n_min_slice
if ARGS.train_type==1:
log_msg = 'Train WSPN 1d'
logger.info(log_msg)
wspn = learn_whittle_spn_1d(data_train, n_RV, n_min_slice, init_scope)
elif ARGS.train_type==2:
log_msg = 'Test WSPN 1d'
logger.info(log_msg)
wspn = load_whittle_spn_1d(ARGS)
calc_ll(wspn, data_train, data_pos, data_neg)
elif ARGS.wspn_type==2:
# train/load wspn 2d
n_min_slice = ARGS.n_min_slice
if ARGS.train_type == 1:
log_msg = 'Train WSPN pair'
logger.info(log_msg)
wspn = learn_whittle_spn_2d(data_train, n_RV, n_min_slice, init_scope)
elif ARGS.train_type==2:
log_msg = 'Test WSPN pair'
logger.info(log_msg)
wspn = load_whittle_spn_2d(ARGS)
# calculate LL
calc_ll(wspn, data_train, data_pos, data_neg)
elif ARGS.wspn_type==3:
# train/load wspn pair
n_min_slice = ARGS.n_min_slice
if ARGS.train_type == 1:
log_msg = 'Train WSPN 2d'
logger.info(log_msg)
wspn = learn_whittle_spn_pair(data_train, n_RV, n_min_slice, init_scope)
elif ARGS.train_type==2:
log_msg = 'Test WSPN 2d'
logger.info(log_msg)
wspn = load_whittle_spn_pair(ARGS)
# calculate LL
calc_ll(wspn, data_train, data_pos, data_neg)
elif ARGS.wspn_type==4:
# train/load W-res-spn
n_min_slice = ARGS.n_min_slice
if ARGS.train_type == 1:
log_msg = 'Cannot train here'
logger.info(log_msg)
sys.exit()
elif ARGS.train_type==2:
log_msg = 'Test Res-SPN'
logger.info(log_msg)
wspn = load_whittle_spn_res(ARGS)
# modify data for res-spn
data_train = data_train[:, init_scope]
data_pos = data_pos[:, init_scope]
data_neg = data_neg[:, init_scope]
# calculate LL
calc_ll(wspn, data_train, data_pos, data_neg)
log_msg = 'Running time: ' + str((time.time() - start_time)/60.0) + 'minutes'
logger.info(log_msg)
|
from project_conf import MODEL_SAVE_PATH, PROCESS_DIR, TRAINING_PREFIX, CACHE_DIR
import os
from django.shortcuts import render
from django.http import HttpResponse
from time import strftime, ctime
from datetime import datetime
from utils_proc import get_running_procs
from models.rest import ALREADY_TRAINING, UNKNOWN_TRAINING
BASE_CONTEXT = {
'tabs': [
{'name': 'Overview', 'url': 'overview.html'},
{'name': 'Training', 'url': 'training.html'},
{'name': 'Details', 'url': 'details.html'}
]
}
def overview(request):
if request.method == "GET":
models = get_models_info()
context = dict(
{
'active': 'Overview',
'models': models
},
**BASE_CONTEXT
)
return render(request, 'models/overview.html', context)
return HttpResponse(status=405)
def get_models_info(selected=None):
models = []
for _, _, filenames in os.walk(MODEL_SAVE_PATH):
for f in filenames:
filepath = os.path.join(MODEL_SAVE_PATH, f)
last_modified = datetime.fromtimestamp(os.path.getctime(filepath)).strftime('%Y-%d-%m')
models.append({
'name': f,
'modified': last_modified,
'selected': f == selected,
'size': readable_file_size(os.path.getsize(filepath))
})
return models
def readable_file_size(n_bytes, suffix='B'):
for unit in ['', 'K', 'M', 'G', 'T', 'P', 'E', 'Z']:
if abs(n_bytes) < 1e3:
return "%3.1f%s%s" % (n_bytes, unit, suffix)
n_bytes /= 1e3
return "%.1f%s%s" % (n_bytes, 'Y', suffix)
def training(request):
if request.method == "GET":
context = dict(
{'active': 'Training'},
**BASE_CONTEXT
)
if 'error' in request.GET:
if request.GET['error'] == ALREADY_TRAINING:
error_msg = 'Training has not been started because concurrent training processes are not supported.'
elif request.GET['error'] == UNKNOWN_TRAINING:
error_msg = 'The provided training method is unknown.'
else:
error_msg = 'Unexpected error occured.'
context.update({'error_msg': error_msg})
return render(request, 'models/training.html', context)
return HttpResponse(status=405)
def details(request):
if request.method == "GET":
context = dict(
{'active': 'Details'},
**BASE_CONTEXT
)
procs = get_running_procs(prefix="train")
if len(procs) == 0:
context['error_none_running'] = True
else:
context.update({
'pid': procs[0]['id']
})
return render(request, 'models/details.html', context)
return HttpResponse(status=405) |
import math
import os
import json
import pandas as pd
from collections import defaultdict
from bs4 import BeautifulSoup
from Tokenizer import tokenizer
#WEBPAGES_RAW_NAME = "WEBPAGES_RAW"
#JSON_FILE_NAME = os.path.join(".", WEBPAGES_RAW_NAME, "bookkeeping.json")
def query_process(inverted_index, document_length, query):
tokenized_query = tokenizer(query)
if len(tokenized_query) == 1:
query_dict = dict()
try:
token = list(tokenized_query.keys())[0]
#print(token)
#print(inverted_index['irvine'])
query_dict = inverted_index[token]
#print('query dict : ')
#print(inverted_index['irvine'])
#query_dict = url_tfidf_dict
#for info in url_tfidf_dict:
#query_dict[info] = url_tfidf_dict[info][1]
except:
pass
# Only sort if more than 1 url returned
if len(query_dict.items()) > 1:
query_result = sorted(list(query_dict.items()), key = lambda x: x[1], reverse = True)
# else:
# query_result = sorted(list(query_dict.items()), key = lambda x: x[1], reverse = True)
return query_result
else:
multi_query_dict = defaultdict(float)
try:
query_normalized_tfidf_dict = defaultdict(float)
query_length_square = 0
for token in tokenized_query.keys():
tf_weight = 1 + math.log10(tokenized_query[token])
idf = math.log10(len(document_length)/len(inverted_index[token]))
tf_idf = tf_weight * idf
query_normalized_tfidf_dict[token] = tf_idf
query_length_square += math.pow(tf_idf, 2)
#normalized_tf_in_query = tf_idf / query_length
query_length = math.sqrt(query_length_square)
for token in query_normalized_tfidf_dict.keys():
query_normalized_tfidf_dict[token] = query_normalized_tfidf_dict[token]/query_length
for token in query_normalized_tfidf_dict.keys():
doc_dict = inverted_index[token]
for doc in doc_dict.keys():
normalized_tf_in_doc = inverted_index[token][doc][0]/document_length[doc]
multi_query_dict[doc] += query_normalized_tfidf_dict[token] * normalized_tf_in_doc
except:
pass
# Only sort if more than 1 url returned
if len(multi_query_dict.items()) > 1:
multi_query_result = sorted(list(multi_query_dict.items()), key = lambda x: x[1], reverse = True)
# else: # Removed because we will allow more than 20 urls
# multi_query_result = sorted(list(multi_query_dict.items()), key = lambda x: x[1], reverse = True)
return multi_query_result
def retrieve_doc(query_result_list):
id_url = json.load(open('WEBPAGES_RAW/bookkeeping.json'), encoding="utf-8")
final_output = []
for docid_str, _ in query_result_list:
id_info = docid_str.split('/')
folder_id = id_info[0]
file_id = id_info[1]
file_name = "{}/{}/{}".format("WEBPAGES_RAW", folder_id, file_id)
html = open(file_name,'r', encoding = 'utf-8')
soup = BeautifulSoup(html, 'lxml')
title = soup.find("title")
#file = "/".join(path[15:].split("\\"))
#file_dir = path[0:15] + file
#with open(file_dir, "r", encoding = "utf-8") as file2:
#soup = BeautifulSoup(file2, "lxml")
#title = soup.find("title")
if title:
final_output.append((" ".join(soup.title.string.strip().split()), id_url[docid_str]))
else:
final_output.append(("Sorry. No description for the title. ", id_url[docid_str]))
#print(final_output)
return final_output
def search_query(query_content):
inverted_index = pd.read_pickle("inverted_index__final_file.pkl")
document_length = pd.read_pickle("document_length__final_file.pkl")
search_result = retrieve_doc(query_process(inverted_index, document_length, query_content))
# print(search_result) # Commented out to allow more than 20 results
# if len(search_result) > 20 :
# search_result = search_result[0:20]
return search_result
'''
try:
search_result = retrieve_doc(query_process(inverted_index, document_length, query_content))
print(search_result)
return search_result
except:
return [("Sorry, there is no result available")]
'''
|
class Vehicle: # יצירת קלאס בשם רכב
color = 'white'
banana = 8
def __init__(self, name, max_speed, mileage, color="White"):
self.max_speed = max_speed
self.mileage = mileage
self.name = name
# self.color = color
bus = Vehicle("Volvo", 80, 10000)
print(bus.color, bus.name, bus.mileage, bus.max_speed, bus.banana)
car = Vehicle("Tesla", 150, 2000)
print(car.color, car.name, car.mileage, car.max_speed)
|
import sys
def compute(x, y):
buf_1 = 1
buf_2 = 1
for i in range(x, x-y, -1):
buf_1 *= i
for j in range(1, y+1):
buf_2 *= j
return int(buf_1/buf_2)
if __name__ == '__main__':
K = int(sys.stdin.readline().strip())
two = sys.stdin.readline().strip()
A, X, B, Y = list(map(int, two.split()))
result = []
for i in range(0, int(K/A)+1):
for j in range(0, int((K-A*i)/B)+1):
if A*i+B*j == K:
result.append([i, j])
result_num = 0
for buf in result:
result_num += compute(X, buf[0]) * compute(Y, buf[1])
print(result_num % 1000000007)
|
import sys
import os
# If you find a solution that does not need the two paths, please comment!
sys.path.append('/home/zack/program/prova/tivit')
os.environ['DJANGO_SETTINGS_MODULE'] = 'tivit.settings'
import django
django.setup()
from feiras.models import Feira
from feiras.serializers import FeiraSerializer
feira = Feira()
feira.log = 1111 #models.BigIntegerField()
feira.lat = 1111 #models.BigIntegerField()
feira.set_sens = 1111 #models.BigIntegerField()
feira.area_p = 1111 #models.BigIntegerField()
feira.cod_dist = 11 #models.IntegerField()
feira.distrito = 'dista' #models.CharField(max_length=100)
feira.cod_sub_pref = 1111 #models.IntegerField()
feira.sub_pref = 'subprefa' # models.CharField(max_length=100)
feira.regiao5 = 'r5a' #models.CharField(max_length=20)
feira.regiao8 = 'r8a' #models.CharField(max_length=20)
feira.nome_feira = 'feira a' #models.CharField(max_length=200)
feira.registro = 'registro a' #models.CharField(max_length=20)
feira.logradouro = 'aaaaaaaaaaaaaa' #models.CharField(max_length=250)
feira.numero = 'nnnnnnnnnnnnn' #models.CharField(max_length=50,blank=True)
feira.bairro = 'bbbbbbbbb' #models.CharField(max_length=50)
feira.referencia = 'rrrrrrrrrrr' #models.CharField(max_length=200,blank=True)
feira.save()
|
# from examples.bert_embeddings import BertEmbeddings, AbstractBertPooller
from bert_embeddings import BertEmbeddings, AbstractBertPooller
import numpy as np
class MeanPooller(AbstractBertPooller):
def __init__(self):
super().__init__()
def get_name(self):
return 'mean_pooler'
def pool(self, results):
embeddings = [result['embedding'] for result in results]
tokens_list = [result['tokens'] for result in results]
embeddings = [embedding[1: len(tokens) - 1] for embedding, tokens in zip(embeddings, tokens_list)]
return np.array([np.mean(sentence_vectors, axis=0) for sentence_vectors in embeddings])
if __name__ == "__main__":
bert_embeddings = BertEmbeddings(MeanPooller)
bert_embeddings.run()
|
from django.db import models
# Create your models here.
class SaleInfo(models.Model):
id = models.AutoField(primary_key=True)
name = models.CharField(max_length=32)
age = models.IntegerField()
email = models.EmailField()
company = models.CharField(max_length=64)
def __str__(self):
return self.name
|
import docx
d = docx.Document('c:\\Users\\Manith\\demo.docx')
p = d.paragraphs[0].text
p.runs[1].text #this should out put the bold style
p.runs[1].italic #changes to italic
p.style = 'Title' #change the style to title
d.save('c:\\Users\\Manith\\demo.docx')
#Make new doc
d2= docx.Document()
d2.add_paragraph('This is a new paragrpah')
p = d2.paragraphs[0]
p.add_run('New run')
d2.save('c:\\Users\\Manith\\demo2.docx')
def gettext(filename):
doc = docx.Document(filename)
fulltext = []
for p in doc.paragraphs:
fulltext.append(p.text)
return '\n'.join(fulltext) #returns as string on new line |
import sys
import re
import numpy as np
import pandas as pd
from nltk.tokenize import word_tokenize
from nltk.stem import WordNetLemmatizer
from sqlalchemy import create_engine
from sklearn.metrics import confusion_matrix
from sklearn.ensemble import RandomForestClassifier
from sklearn.model_selection import train_test_split
from sklearn.pipeline import Pipeline, FeatureUnion
from sklearn.base import BaseEstimator, TransformerMixin
from sklearn.feature_extraction.text import CountVectorizer, TfidfTransformer
def load_data(messages_filepath, categories_filepath):
# load messages dataset
messages = pd.read_csv('messages.csv')
categories = pd.read_csv('categories.csv')
df = pd.merge(messages, categories, how = 'inner',on ='id' )
return df
def clean_data(df):
"""create a dataframe of the individual category columns"""
categories = df['categories'].str.split(pat = ';', expand = True)
categories.columns = categories.iloc[1]
categories = categories.applymap(lambda x: int(x[-1]))
categories.columns = categories.columns.map(lambda x: x[:-2]).rename(None)
df.drop(columns='categories', inplace=True)
df = pd.concat([df,categories],axis=1)
#drop duplicated row
df.drop_duplicates(inplace=True)
print ("Duplicates: ", df.duplicated().sum())
return df
def save_data(df, database_filename):
"""Saves dataframe to database"""
name = 'sqlite:///' + database_filename
engine = create_engine(name)
df.to_sql('InsertTableName', engine, index=False, if_exists='replace')
def main():
if len(sys.argv) == 4:
messages_filepath, categories_filepath, database_filepath = sys.argv[1:]
print('Loading data...\n MESSAGES: {}\n CATEGORIES: {}'
.format(messages_filepath, categories_filepath))
df = load_data(messages_filepath, categories_filepath)
print('Cleaning data...')
df = clean_data(df)
print('Saving data...\n DATABASE: {}'.format(database_filepath))
save_data(df, database_filepath)
print('Cleaned data saved to database!')
else:
print('Please provide the filepaths of the messages and categories '\
'datasets as the first and second argument respectively, as '\
'well as the filepath of the database to save the cleaned data '\
'to as the third argument. \n\nExample: python process_data.py '\
'disaster_messages.csv disaster_categories.csv '\
'DisasterResponse.db')
if __name__ == '__main__':
main()
|
# -*- coding: utf-8 -*-
"""
Created on Thu Jul 20 05:30:32 2017
@author: BALASUBRAMANIAM
"""
import matplotlib.pyplot as plt
import numpy as np
plt.hist([1, 2, 1], bins=[0, 1, 2, 3])
plt.show()
#x = np.random.normal(size = 1000)
#plt.hist(x, normed=True, bins=30)
#plt.ylabel('Probability')
#plt.show()
'''
#cumulative histogram
plt.hist(x,
bins=100,
normed=True,
stacked=True,
cumulative=True)
plt.show()
plt.hist(x,
bins=100,
normed=True,
stacked=True,
)
plt.show()
'''
|
x = (1,2,3)
y = tuple((1,2,3))
print(y)
locations = {
(34.4545, 34.43423):"Tokyo"
(78.4545, 54.43423):"New York"
} |
# coding: utf-8
# In[1]:
"""
Created on Sat Jun 9 22:30:54 2018
@author: Wesley Lourenco Barbosa
"""
from IPython import get_ipython
get_ipython().magic('reset -sf')
from IPython import get_ipython
def __reset__(): get_ipython().magic('reset -sf')
import matplotlib.pyplot as plt
import fix_yahoo_finance as yf
import numpy as np
import pandas as pd
import pylab
import seaborn as sns
# TASK 01
valid_opt = False
opt = 0
while(valid_opt==False):
opt = 0
print('\n######### ##########\n',
'Choose the stock exchange you want to compare agains the others:\n',
'1 - NYSE [New York Stock Exchange]\n',
'2 - NASDAQ [National Association of Securities Dealers Automated Quotations]\n',
'3 - LSE [London Stock Exchange]\n',
'4 - NSEI [National Stock Exchange of India]\n',
'5 - BM&F Bovespa [Bolsa de Valores do Estado de Sao Paulo]')
try:
opt = int(input())
except:
print('Invalid Option')
print('\Enter a number')
if(opt not in [1,2,3,4,5]):
print('Invalid Option')
print('\Enter a valid option.')
input("Press Enter")
else:
valid_opt = True
# In[2]:
#TASK 02
#Each stock market will be attached to one index
stock_markets_dict = {}
if opt==1:
stock_markets_dict = {'NYSE-^NYA':'^NYA','NASDAQ-^IXIC':'^IXIC','LSE-LSE.L':'LSE.L','NSEI-^NSEI':'^NSEI','BM&F-^BVSP':'^BVSP'}
elif opt==2:
stock_markets_dict = {'NASDAQ-^IXIC':'^IXIC','NYSE - ^NYA':'^NYA','LSE-LSE.L':'LSE.L','NSEI-^NSEI':'^NSEI','BM&F-^BVSP':'^BVSP'}
elif opt==3:
stock_markets_dict = {'LSE-LSE.L':'LSE.L','NYSE - ^NYA':'^NYA','NASDAQ-^IXIC':'^IXIC','NSEI-^NSEI':'^NSEI','BM&F-^BVSP':'^BVSP'}
elif opt==4:
stock_markets_dict = {'NSEI-^NSEI':'^NSEI','NYSE - ^NYA':'^NYA','NASDAQ-^IXIC':'^IXIC','LSE-LSE.L':'LSE.L','BM&F-^BVSP':'^BVSP'}
else:
stock_markets_dict = {'BM&F-^BVSP':'^BVSP','NYSE - ^NYA':'^NYA','NASDAQ-^IXIC':'^IXIC','LSE-LSE.L':'LSE.L','NSEI-^NSEI':'^NSEI'}
#TASK 03 - Download the 10 years of data
stock_data = []
for ticker in stock_markets_dict.values():
print(ticker)
start_date = '2008-06-23'
end_date = '2018-06-22'
stock_data_tmp = yf.download(ticker,start_date,end_date)
if(len(stock_data_tmp)>5):
valid_stock = True
print('Stock Succesfully Downloaded')
stock_data.append(stock_data_tmp)
else:
print('Stock Download Failed.')
# In[3]:
# Get the close value of the last day of each month
last_close = []
for stocks in stock_data:
last_close.append(stocks['Close'].iloc[stocks.reset_index().groupby(stocks.index.to_period('M'))['Date'].idxmax()])
#Array with month and year to build the df with monthly stock returns
dates = []
for year in range(2008,2019):
if(year==2008):
month = '7'
while(int(month)<12):
dates.append(month+'-'+str(year))
month = str(int(month)+1)
elif year==2018:
month = '1'
while(int(month)<7):
dates.append((month)+'-'+str(year))
month = str(int(month)+1)
else:
month = '1'
while(int(month)<12):
dates.append((month)+'-'+str(year))
month = str(int(month)+1)
#Create the list of indexes for the df
indexes = ['Date']
indexes.extend(stock_markets_dict.values())
#Create the DF
monthly_returns_df = pd.DataFrame([],columns=indexes)
monthly_returns_df['Date'] = pd.Series(dates)
#Calculate the monthly return values and insert them into pandas DF
cont = 0
for close_value in last_close:
monthly_return = []
for i in range(1,len(close_value)):
monthly_return.append(((close_value[i]-close_value[i-1])/close_value[i])*100)
cont = cont+1
monthly_returns_df[indexes[cont]] = pd.Series(monthly_return)
# In[4]:
#TASK 04
# Compute the correlation matrix
corr = monthly_returns_df.corr()
corr.style.background_gradient().set_precision(3)
# In[11]:
#TASK 05
#Plots each indices return in a different graph over time
# to observe its behaviour over time and visually check if there is any strong correlation
# Initialize the figure
plt.style.use('seaborn-darkgrid')
# create a color palette
palette = plt.get_cmap('Set1')
plt.figure(figsize=(20,50))
# multiple line plot
num=0
for column in monthly_returns_df.drop('Date', axis=1):
num+=1
# Find the right spot on the plot
plt.subplot(5,1, num)
# plot every groups, but discreet
for v in monthly_returns_df.drop('Date', axis=1):
plt.plot(monthly_returns_df['Date'], monthly_returns_df[v], marker='', color='grey', linewidth=0.6, alpha=0.3)
# Plot the lineplot
plt.plot(monthly_returns_df['Date'], monthly_returns_df[column], marker='', color=palette(num), linewidth=2.0, alpha=0.9, label=column)
# Same limits for everybody!
#plt.xlim(0,10)
plt.ylim(-60,30)
plt.tick_params(labelleft='on')
plt.xticks(rotation=90)
# Add title
plt.title(column, loc='left', fontsize=12, fontweight=0, color=palette(num) )
plt.suptitle("Single Lines Graph", fontsize=13, fontweight=0, color='black', style='italic', y=0.6)
# Axis title
plt.text(55, -72, 'Dates', fontsize=18, ha='center', va='center')
plt.text(-8, 220, 'Monthly Returns', fontsize=18, ha='center', va='center', rotation='vertical')
plt.savefig('returns_over_time.pdf', bbox_inches='tight', )
# In[6]:
#Plots all indices return in the same graph over time to observe its behaviour over time and visually check if there is any strong correlation
# multiple line plot
# The interest market index is in stronger color
palette = plt.get_cmap('Set1')
plt.figure(figsize=(100,20))
n = 0
x = monthly_returns_df['Date']
for column in monthly_returns_df.drop('Date', axis=1):
if n==0:
plt.plot( x, column, data=monthly_returns_df, marker='', color=palette(n), linewidth=6, linestyle='-')
else:
plt.plot( x, column, data=monthly_returns_df, marker='', color=palette(n), linewidth=2, linestyle='-')
n = n+1
# plt.plot( 'Date', '^NYA', data=monthly_returns_df, marker='', markerfacecolor='blue', markersize=12, color=palette(cont), linewidth=2, linestyle='-')
# plt.plot( 'Date', '^IXIC', data=monthly_returns_df, marker='', color=palette(n), linewidth=2, linestyle='-')
# plt.plot( 'Date', 'LSE.L', data=monthly_returns_df, marker='', color=palette(2), linewidth=2, linestyle='-')
# plt.plot( 'Date', '^NSEI', data=monthly_returns_df, marker='', color=palette(3), linewidth=2, linestyle='-')
# plt.plot( 'Date', '^BVSP', data=monthly_returns_df, marker='', color=palette(4), linewidth=2, linestyle='-')
# general title
plt.suptitle("Multiple Lines Graph", fontsize=13, fontweight=0, color='black', style='italic', y=1.02)
# Axis title
plt.text(60, -62, 'Dates', fontsize=18, ha='center', va='center')
plt.text(-7, -20, 'Monthly Returns', fontsize=18, ha='center', va='center', rotation='vertical')
plt.legend()
#plt.savefig('returns_over_time_All_in_One_Graph.pdf', bbox_inches='tight', )
# In[7]:
# with regression
# Plot all of them
sns.pairplot(monthly_returns_df, kind="reg")
plt.show()
#plt.savefig('pairplot_reg.pdf', bbox_inches='tight', )
print('\n\n Basic correlogram')
# Basic correlogram
sns.pairplot(monthly_returns_df)
plt.show()
#plt.savefig('pairplot_corr.pdf', bbox_inches='tight', )
# In[8]:
#Plot only the chosen one agains the others
x = monthly_returns_df[monthly_returns_df.columns[0]]
n = 0
columns = monthly_returns_df.columns[2:6]
y_chosenIndex = monthly_returns_df.columns[1]
for column in columns:
plt.figure(figsize=(30,10)).add_subplot(111)
plt.xticks(rotation=90)
y = monthly_returns_df[column]
plt.scatter(x, monthly_returns_df[y_chosenIndex], color='darkred', label=y_chosenIndex)
plt.scatter(x, y, color=palette(n+1), label=column)
plt.legend()
plt.show()
n+=1
#plt.savefig('scatterPlot_againsTheChosenOne.pdf', bbox_inches='tight', )
# In[9]:
#Plot only the chosen one agains the others
x = monthly_returns_df[monthly_returns_df.columns[0]]
plt.figure(figsize=(30,10)).add_subplot(111)
plt.xticks(rotation=90)
n = 0
for column in monthly_returns_df.columns.drop('Date'):
y = monthly_returns_df[column]
plt.scatter(x, y, color=palette(n), label=column)
n+=1
plt.legend()
#plt.savefig('scatterPlot.pdf', bbox_inches='tight', )
plt.show()
# In[10]:
f, ax = plt.subplots(figsize=(10, 8))
corr = monthly_returns_df.corr()
sns.heatmap(corr, mask=np.zeros_like(corr, dtype=np.bool), cmap=sns.diverging_palette(220, 10, as_cmap=True),
square=True, ax=ax)
|
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
"""
-----------------------------------------------------
@post('/scan/<taskid>/start')
@get('/scan/<taskid>/stop')
@get('/scan/<taskid>/kill')
@get('/scan/<taskid>/status')
@get('/scan/<taskid>/data')
-----------------------------------------------------
"""
import time
from urllib.parse import urljoin
import requests
from .logs import get_logger
from .exceptions import TaskStatusError, TaskResultError, TaskLogError
logger = get_logger('sqlmapcli')
class TaskStatus(object):
""" Task status constant """
READY = 'not running'
RUNNING = 'running'
FINISHED = 'terminated'
class Task(object):
def __init__(self, id, options, addr):
""" Create a task object.
Args:
id (str): task id from remote sqlmapapi server.
options (dict): options used to run task, see
`curl http://<host>:<port>/option/<taskid>/list`.
addr (str): remote sqlmapapi server address.
"""
self.id = id
self.addr = addr
self.options = options or {}
# always store url target in task object
self.url = self.options.get('url', None)
def __str__(self):
return '<Task#%s>' % self.id
def __repr__(self):
return str(self)
def _request(self, path, method='GET'):
""" Used to request remote sqlmapapi server.
Args:
path (str): url path for request.
method (str): GET or POST for different request.
Returns:
dict if successful, None otherwirse.
"""
try:
url, method = urljoin(self.addr, path), method.upper()
if method == 'GET':
r = requests.get(url).json()
elif method == 'POST':
r = requests.post(url, json=self.options).json()
except requests.RequestException as e:
logger.error('Fail to %s %s: %s' % (method, path, e))
return None
if r.get('success'):
return r
else:
logger.error('Fail to %s %s: %s' % (method, path, r.get('message'))) # noqa
return None
def set_option(self, key, value):
""" Set option for task.
Options can be set when client create task, or call `set_option`
after task is created but not start.
Args:
key (str): option name.
value (str): option value.
Returns:
Task: for chained call, eg.
`task.set_option(key, value).set_option(key, value)`.
"""
self.options[key] = value
if key == 'url':
self.url = value
return self
def get_option(self, key):
""" Get task option.
Args:
key (str): option name.
Returns:
str: option value.
"""
return self.options.get(key)
def update_options(self, options):
""" Update some options at same time.
Args:
options (dict): options that to update.
"""
self.options.update(options)
if 'url' in options:
self.url = options.get('url')
def list_options(self):
""" Get options that manually set.
Returns:
dict: options that user set.
"""
return self.options
def start(self, url=None, options=None):
""" Task start to run.
Args:
url (str): target url to scan by sqlmap, this is a shorthand
for set option with key `url`
options (Optional[dict]): shorthand, set options for task,
alternative to `set_option` or `update_options` or set
options when create task.
Returns:
str: engineid, maybe useful in future.
"""
if options:
self.update_options(options)
if url:
self.url = url
self.set_option('url', url)
r = self._request('/scan/%s/start' % self.id, 'POST')
self.engineid = r.get("engineid") if r else None
return self.engineid
def stop(self):
""" Stop running task.
Returns:
bool: True if stop successfully, False otherwise.
"""
r = self._request('/scan/%s/stop' % self.id)
return bool(r)
def kill(self):
""" Kill running task unconditionally.
Returns:
bool: True if Kill successfully, False otherwise.
"""
r = self._request('/scan/%s/kill' % self.id)
return bool(r)
def status(self):
""" Task currenty status, ready, running or finished.
Returns:
dict: include status and retcode.
Raises:
TaskStatusError: status exception.
"""
r = self._request('/scan/%s/status' % self.id)
if r:
status, retcode = r.get('status'), r.get('returncode')
return {'status': status, 'retcode': retcode}
else:
raise TaskStatusError("Can't get status")
@property
def ready(self):
""" shorthand for task status.
Returns:
bool: True if task is created but not start, False otherwise.
"""
try:
r = self.status()
return r.get('status') == TaskStatus.READY
except TaskStatusError as e:
logger.error('Fail to GET task<%s> status: %s', self.id, e)
return False
@property
def running(self):
""" shorthand for task status.
Returns:
bool: True if task start but not finished, False otherwise.
"""
try:
r = self.status()
return r.get('status') == TaskStatus.RUNNING
except TaskStatusError as e:
logger.error('Fail to GET task<%s> status: %s', self.id, e)
return False
@property
def finished(self):
""" shorthand for task status.
Returns:
bool: True if task is finished, False otherwise.
"""
try:
r = self.status()
return r.get('status') == TaskStatus.FINISHED
except TaskStatusError as e:
logger.error('Fail to GET task<%s> status: %s', self.id, e)
return False
def get_result(self):
""" Get task result.
Returns:
dict: task data.
Raises:
TaskResultError: task result exception.
"""
r = self._request('/scan/%s/data' % self.id)
if r:
return r.get('data')
else:
raise TaskResultError("Can't get result")
def get_log(self, start=None, end=None):
""" Get task log.
Args:
start (int): start index of log list.
end (int): end index of log list.
Returns:
dict: task log data.
Raises:
TaskLogError: task log exception.
"""
if start and end:
r = self._request('/scan/%s/log/%s/%s' % (self.id, start, end))
else:
r = self._request('/scan/%s/log' % self.id)
if r:
return r.get('log')
else:
raise TaskLogError("Can't get log")
def run(self, url=None, options=None, interval=5):
""" Shorthand for call `start`, `status` and `get_result`
Args:
url (str): target url to scan by sqlmap, this is a shorthand
for set option with key `url`
options (Optional[dict]): shorthand, set options for task,
alternative to `set_option` or `update_options` or set
options when create task.
interval (int): interval time toquery task status, seconds default.
Returns:
dict if successfully, None otherwise.
"""
self.start(url, options)
while self.running:
time.sleep(interval)
try:
r = self.get_result()
except TaskResultError as e:
logger.error('Fail to GET task<%s> result: %s', self.id, e)
return None
return r
|
from devices import SerialDevice, profiles, profile_names, UnknownDevice, DeviceStates
from serial.tools.list_ports import comports
import time
import logging
from qt import *
from .bundles import SigBundle, SlotBundle
from .subscriptions import SubscriptionManager
class DeviceManager(QObject):
# forward these method calls, for backwards compatibility
subscribe = SubscriptionManager.subscribe
subscribe_to = SubscriptionManager.subscribe_to
def __init__(self, parent=None):
super().__init__(parent=parent)
self.log = logging.getLogger(__name__)
self.log.info("Initializing DeviceManager...")
self.signals = SigBundle({'device_added':[SerialDevice], 'device_removed': [str]})
self.slots = SlotBundle({'add_device':[SerialDevice], 'remove_device': [str]})
self.slots.link_to(self)
self.devices = {}
self.new_devices = []
self.first_scan = True
self.ports = []
self.sub_manager = SubscriptionManager(self)
prev = QSettings().value('starting_devices', [])
if isinstance(prev, str):
prev = [prev]
self.starting_devices = prev
prev = QSettings().value('ignored_ports', [])
if isinstance(prev, str):
prev = [prev]
self.ignored_ports = prev
self.scan_timer = QTimer()
self.scan_timer.timeout.connect(lambda: self.scan())
self.scan_timer.start(250)
self.update_timer = QTimer()
self.update_timer.timeout.connect(lambda: self.update())
self.update_timer.start(1)
self.sub_manager.check_for_new_subscribers()
UnknownDevice.register_autodetect_info(profiles)
def set_starting_devices(self, devices):
self.starting_devices = devices
QSettings().setValue('starting_devices', devices)
def set_ignored_ports(self, ports):
self.ignored_ports = ports
QSettings().setValue('ignored_ports', ports)
def close(self):
self.scan_timer.stop()
self.update_timer.stop()
for _, device in self.devices.items():
device.close()
def on_add_device(self, device):
self.devices[device.guid] = device
self.signals.device_added.emit(device)
def on_remove_device(self, guid):
self.signals.device_removed.emit(guid)
self.devices[guid].close()
self.devices.pop(guid)
def do_first_scan(self, new_ports):
if self.starting_devices:
for string in self.starting_devices:
parts = string.split(':')
profile = parts[0]
port = parts[1]
baud = parts[2] if len(parts) == 3 else None
if profile in profile_names:
if port in new_ports or port == 'DummyPort':
if baud:
self.on_add_device(profiles[profile](port=port, baud=baud))
else:
self.on_add_device(profiles[profile](port=port))
if port != 'DummyPort':
self.ports.append(port)
self.first_scan = False
def scan(self):
self.sub_manager.check_for_new_subscribers()
new_ports = [p.device for p in sorted(comports())]
if self.first_scan:
self.do_first_scan(new_ports)
for port in [p for p in new_ports if p not in self.ports]:
if port not in self.ignored_ports:
self.log.debug(f"New device connected at ({port}), enumerating...")
device = UnknownDevice(port=port)
self.new_devices.append(device)
for port in [p for p in self.ports if p not in new_ports]:
self.log.debug(f"Existing device removed from ({port}), cleaning up...")
for k in self.devices.keys():
if self.devices[k].port == port:
self.on_remove_device(self.devices[k].guid)
break
self.ports = new_ports
def update(self):
for device in self.new_devices:
device.communicate()
if device.state == DeviceStates.enumeration_failed:
self.log.debug(f"Enumeration failed on ({device.port})")
device.close()
self.new_devices.remove(device)
elif device.state == DeviceStates.enumeration_succeeded:
if device.name in profile_names:
device.close()
new_device = profiles[device.name](device=device)
self.log.debug(f"Enumeration succeeded on ({new_device.port})")
self.devices[new_device.guid] = new_device
self.new_devices.remove(device)
self.signals.device_added.emit(new_device)
for guid in self.devices.keys():
self.devices[guid].communicate() |
'''
Read Lorenz96 propagation results and plotting
'''
import numpy as np
import matplotlib.pyplot as plt
import pickle
import matplotlib
#setup plotting
plt.rcParams["figure.figsize"] = [4., 3.]
SMALL_SIZE = 10
matplotlib.rc('axes', titlesize=SMALL_SIZE)
matplotlib.rc('font', size=SMALL_SIZE)
plt.tick_params(labelsize=10)
test=2
pred_mode=1
#2D
results = pickle.load(open('./Results/Results/res_propagation_test'+str(test)+'_predmode_'+str(pred_mode)+'.p', 'rb'))
#1D
#results = pickle.load(open('./Results/Results/res_propagation1d_test'+str(test)+'.p', 'rb'))
K_d=results['K_d']
MEAN_d=results['MEAN_d']
VAR_d=results['VAR_d']
X_hist_d=results['X_hist_d']
W=results['W']
w=results['w']
mean_1=results['mean_1']
var_1=results['var_1']
y=np.array(results['y_test'])[0,:,0]
n_steps=len(MEAN_d)
n_samples=len(K_d['K1'])
#3D
point=results['point']
X=np.array(X_hist_d['hist0'][0])
X_initial=X[0,-1,0]
'''
#1D
point=2
X_initial=X_hist_d['hist0'][0][0,-1]
#X_initial=-3.2657751
#plotting
'''
#plot initial starting point with errorbar
plt.scatter(w,X_initial,color='blue',label='sampled distribution')
std1=var_1**0.5
std1=np.array(std1)
std1=3.5*std1
#plot sampled points
#plot predicted means
#plt.scatter(1,mean_1,color='blue')
#plt.errorbar(1,mean_1,yerr=std1,capsize=0,fmt='',ecolor='lightgrey')
#for i in range(2,n_steps-1):
# plt.scatter(W['w{0}'.format(i)],MEAN_d['mean{0}'.format(i+1)],color='deepskyblue')
#plot predicted means
for i in range(1,n_steps):
pos=[i]
violin=plt.violinplot(K_d['K{0}'.format(i)].reshape(len(K_d['K{0}'.format(i)])),pos,showmeans = True)
for pc in [violin['cbars'],violin['cmins'],violin['cmaxes'],violin['cmeans']]:
pc.set_edgecolor('#2222ff')
for pc in violin['bodies']:
pc.set_facecolor('#2222ff')
plt.grid(False)
x=range(1,n_steps,1)
plt.plot(x,y[point:point+n_steps-1],color='red',marker='o',linestyle='--',label='target',fillstyle='none')
plt.xlabel('#step')
plt.title('Lorenz 96 propagation')
plt.legend(loc=1, prop={'size': 8})
plt.savefig('./Figures/L96_Prop_nsamples'+str(n_samples)+'_nsteps'+str(n_steps-1)+'.pdf')
plt.show()
'''WALK plot'''
x=[]
y1=[X_initial]
y2=[X_initial]
mean1=[]
for i in range(1,n_steps):
pos=[i]
y1.append(min(K_d['K{0}'.format(i)]))
y2.append(max(K_d['K{0}'.format(i)]))
x.append(i)
mean1.append(K_d['K{0}'.format(i)].mean())
x1=np.append(0,x)
y1=np.array(y1).reshape(11,)
y2=np.array(y2).reshape(11,)
plt.plot(x,mean1,label='predicted mean',color='blue',marker='o',linestyle='--')
plt.plot(x,y[point:point+n_steps-1],label='target',marker='o',linestyle='--',color='red',fillstyle='none')
plt.fill_between(x1,y1,y2,facecolor='lightgrey',label='confidence bound')
plt.scatter(0,X_initial,color='blue',label='initial point')
plt.xlabel('#step')
plt.title('Lorenz 96: Sampled distributions vs. true values')
plt.legend(loc=1, prop={'size': 8})
plt.savefig('./Figures/L96_uncertainty1.pdf') |
import scrapy
from scrapy.linkextractors import LinkExtractor
from scrapy.spiders import CrawlSpider, Rule
from crawl_pro.items import CrawlProItem
class FirstSpider(CrawlSpider):
name = 'first'
# allowed_domains = ['www.aaa.com']
start_urls = ['http://www.521609.com/daxuemeinv/']
rules = (
Rule(LinkExtractor(allow=r'list8\d+\.html'), callback='parse_item', follow=True),
)
def parse_item(self, response):
li_lis = response.xpath('/html/body/div[4]/div[2]/div[2]/ul/li')
for li in li_lis:
href = 'http://www.521609.com/'+li.xpath('./a/@href').extract_first()
yield scrapy.Request(url=href, callback=self.parse_href)
def parse_href(self, response):
alt = response.xpath('//*[@id="bigimg"]/@alt').extract_first()
item = CrawlProItem()
item['name'] = alt
yield item
|
# Ahahahaha stack overflow I love you soo much.
def combinations(iterable, r):
# combinations('ABCD', 2) --> AB AC AD BC BD CD
# combinations(range(4), 3) --> 012 013 023 123
pool = tuple(iterable)
n = len(pool)
if r > n:
return
indices = list(range(r))
yield tuple(pool[i] for i in indices)
while True:
for i in reversed(range(r)):
if indices[i] != i + n - r:
break
else:
return
indices[i] += 1
for j in range(i+1, r):
indices[j] = indices[j-1] + 1
yield tuple(pool[i] for i in indices)
# This function takes a target number and 6 card numbers and finds all the solutions
def findSolutions(target, numberCards):
solutions = []
solutions.append(recurseSolve(target, list(numberCards),""))
return solutions
def recurseSolve(target, numberCards, solutionChain):
if(target in numberCards):
solutionChain += "; = " + str(target)
#print(solutionChain)
return solutionChain
elif( all (x > 0 for x in numberCards)):
for pair in list(combinations(numberCards, 2)):
# Make a local copy for the recursion
nextCards = numberCards[:]
# Remove the two cards chosen to be combined in someway
nextCards.remove(pair[0])
nextCards.remove(pair[1])
# Perform the Addition operation (which is independent on order for the result)
newSolutionChain = solutionChain + ";" + str(numberCards) + "->" + "(" + str(pair[0]) + "+" + str(pair[1]) + ")"
recurseSolve(target, nextCards + [pair[0]+pair[1]], newSolutionChain)
# Perform the Multiplicative Operation which is also independent
# Check that either value isn't equal to 1 first (optimisation check)
if ( pair[0] or pair[1] != 1 ):
newSolutionChain = solutionChain + ";" + str(numberCards) + "->" + "(" + str(pair[0]) + "*" + str(pair[1]) + ")"
recurseSolve(target, nextCards + [pair[0]*pair[1]], newSolutionChain)
# Check divisions don't create floats and do both orders
if( pair[0] or pair[1] != 1 ): # Optimisation check not to bother with pointless branching operations
if( isinstance((pair[0] / pair[1]), float) == False ):
newSolutionChain = solutionChain + ";" + str(numberCards) + "->" + "(" + str(pair[0]) + "/" + str(pair[1]) + ")"
recurseSolve(target, nextCards + [pair[0]/pair[1]], newSolutionChain)
if( isinstance((pair[1] / pair[0]), float) == False ):
newSolutionChain = solutionChain + ";" + str(numberCards) + "->" + "(" + str(pair[1]) + "/" + str(pair[0]) + ")"
recurseSolve(target, nextCards + [pair[1]/pair[0]], newSolutionChain)
if( (pair[0] != pair[1] ) ): # Optimisation check to not bother with dividing by yourself
# Now do subtraction both orders after checking that the result is non negative
if( (pair[0] - pair[1] > 0) ):
newSolutionChain = solutionChain + ";" + str(numberCards) + "->" + "(" + str(pair[0]) + "-" + str(pair[1]) + ")"
recurseSolve(target, nextCards + [pair[0]-pair[1]], newSolutionChain)
if( (pair[1] - pair[0] > 0) ):
newSolutionChain = solutionChain + ";" + str(numberCards) + "->" + "(" + str(pair[1]) + "-" + str(pair[0]) + ")"
recurseSolve(target, nextCards + [pair[1]-pair[0]], newSolutionChain)
return
|
import sqlalchemy as sa
from sqlalchemy.orm import sessionmaker
from sqlalchemy.ext.declarative import declarative_base
from users import lets_connect
from users import request_data as ask
from users import User
import datetime
DB_PATH = "sqlite:///sochi_athletes.sqlite3"
Base=declarative_base()
class Athelete(Base):
__tablename__ = 'athelete'
id = sa.Column(sa.Integer, primary_key=True)
age = sa.Column(sa.Integer)
birthdate = sa.Column(sa.Text)
gender = sa.Column(sa.Text)
height = sa.Column(sa.Float)
weight = sa.Column(sa.Integer)
name = sa.Column(sa.Text)
gold_medals = sa.Column(sa.Integer)
silver_medals = sa.Column(sa.Integer)
bronze_medals = sa.Column(sa.Integer)
total_medals = sa.Column(sa.Integer)
sport = sa.Column(sa.Text)
country = sa.Column(sa.Text)
def date_convert(date): #переводит дату(text) в формат datetime.date
if "-" in date:
split_=date.split('-')
else:
split_=date.split('.') #Да, я мог поменять тип ввода в своей программе и не создавать новые условия, но мне хочется оставить всё так.
newdate=datetime.date(*map(int,split_))
return newdate
#1 march 2000 28 february 26 february
def near_brn(u_bd,all_bd): #Получает дату рождения и список всех дат, чтобы найти ближайшего.
dif=abs(u_bd-datetime.date(2077, 12, 10)) #Атсылоччччка
did=None
for iid,iter_ in all_bd.items():
if abs(u_bd-iter_)<dif:
dif=abs(u_bd-iter_)
did=iid
return did
def near_height(u_h, all_h):
dif=1100000
did=None
for iid,iter_ in all_h.items():
if iter_ is not None:
if abs(float(u_h)-float(iter_))<dif:
dif=abs(float(u_h)-float(iter_))
did=iid
return did, round(dif,2)
def find_user(session):
id_=input("Позвольте узнать идентификатор искомого участника \n")
query = session.query(User).filter(User.id == id_)
if query.count() == 0:
# print('Участникик с этим идентификатором не найден. Перепроверьте данные и попробуйте ещё раз')
return False, None, None,None,None
else:
name = [user.first_name for user in query.all()][0]
bd = date_convert([user.birthdate for user in query.all()][0])
height = [user.height for user in query.all()][0]
print("Ваш выбор пал на {}".format(name))
return True, name, id_, height,bd
def main():
session=lets_connect()
find_true,name,in_id,height,bd=find_user(session)
while not find_true:
print('Участникик с этим идентификатором не найден. Перепроверьте данные и попробуйте ещё раз')
find_true,name, in_id,height,bd =find_user(session)
query_all=session.query(Athelete)
user_heights={}
user_bds={}
# users_cnt = query.count()
# print(users_cnt)
user_ids=[user.id for user in query_all.all()]
user_heights_list=[user.height for user in query_all.all()]
for a,b in zip(user_ids, user_heights_list):
user_heights[a]=b
del user_heights[int(in_id)]
n_ih,n_hd = near_height(height, user_heights)
query = session.query(Athelete).filter(Athelete.id == n_ih)
name_height=[user.name for user in query.all()][0]
print ("Рост участника #{id} {name_height} отличатеся от {name_u} всего на {dif} метра".format(id=n_ih, name_height=name_height, name_u=name, dif=n_hd))
user_bds_list=[user.birthdate for user in query_all.all()]
for a,b in zip(user_ids, user_bds_list):
user_bds[a]=date_convert(b)
del user_bds[int(in_id)]
n_ib=near_brn(bd,user_bds)
query = session.query(Athelete).filter(Athelete.id == n_ib)
name_birth=[user.name for user in query.all()][0]
print ("#{id} {b_name} и {u_name} почти ровестники!".format(id=n_ib, b_name=name_birth,u_name=name) )
if __name__=='__main__':
main()
|
# -*- coding: utf-8 -*-
# pylint: disable=C0103
# pylint: disable=C0111
import ustruct
from ubinascii import hexlify, unhexlify
from micropython import const
"""
SMP PDUs
References can be found here:
* https://www.bluetooth.org/en-us/specification/adopted-specifications - Core specification 4.1
** [vol 3] Part H (Section 3.3) - Command Format
"""
PAIRING_REQUEST = const(0x01)
PAIRING_RESPONSE = const(0x02)
PAIRING_CONFIRM = const(0x03)
PAIRING_RANDOM = const(0x04)
PAIRING_FAILED = const(0x05)
ENCRYPTION_INFORMATION = const(0x06)
MASTER_IDENTIFICATION = const(0x07)
IDENTITY_INFORMATION = const(0x08)
IDENTITY_ADDRESS_INFORMATION = const(0x09)
SIGNING_INFORMATION = const(0x0a)
SECURITY_REQUEST = const(0x0b)
SMP_PDUS = {
PAIRING_REQUEST: "Pairing_Request",
PAIRING_RESPONSE: "Pairing_Response",
PAIRING_CONFIRM: "Pairing_Confirm",
PAIRING_RANDOM: "Pairing_Random",
PAIRING_FAILED: "Pairing_Failed",
ENCRYPTION_INFORMATION: "Encryption_Information",
MASTER_IDENTIFICATION: "Master_Identification",
IDENTITY_INFORMATION: "Identity_Information",
IDENTITY_ADDRESS_INFORMATION: "Identity_Address Information",
SIGNING_INFORMATION: "Signing_Information",
SECURITY_REQUEST: "Security_Request"
}
class SMP(object):
"""SMP"""
struct_format = "<B"
struct_size = ustruct.calcsize(struct_format)
def __init__(self, code, data=b''):
self._code = code
self._code_name = SMP_PDUS[code]
self._data = data
def __getattr__(self, name):
if name == "code":
return self._code
elif name == "code_name":
return self._code_name
elif name == "length":
return len(self._data)
elif name == "data":
return self._data
def __str__(self):
desc_str = (
"<{:s} "
"code={:s}(0x{:02x}) length={:d} data={:s}>"
)
return desc_str.format(
self.__class__.__name__,
self.code_name,
self.code,
self.length,
hexlify(self.data)
)
@staticmethod
def from_buffer(data):
"""
SMP code is the first octet of the PDU
0 1 2 3 4 5 6 7
-----------------
| code |
-----------------
References can be found here:
* https://www.bluetooth.org/en-us/specification/adopted-specifications
** Core specification 4.1
** [vol 3] Part H (Section 3.3) - Command Format
"""
code = ustruct.unpack(SMP.struct_format, data[:SMP.struct_size])[0]
data = data[SMP.struct_size:]
return SMP(code, data)
def to_buffer(self):
"""
Get data string
"""
return ustruct.pack(self.struct_format, self.code) + self.data
|
'''
Reduce --> Módulo functools
--> 99% das vezes, é mais legível utilizar o loop for ao invés do reduce() -- Dito pelo prórpio criador
do python
'''
print('\n')
'''
Imagine que você tem uma coleção de dados:
dados = [a1, a2, a3, ..., an]
E tem também uma função:
def função(x, y):
return x*y
Assim como map() e filter(), o reduce() recebe dois parâmetros --> (função, iterável qualquer)
reduce(função, dados)
Como funciona o reduce():
Passo 1: res1 = f(a1, a2) --> Aplica a função nos dois primeiros elementos da coleção e guarda o resultado
Passo 2: res2 = f(res1, a3) --> Aplica a função passando o resultado do res1 e o terceiro elemento
da coleção e guarda o resultado
Passo 3: res3 = (res2, a4)
.
.
.
Passo n: resn = (resn-1, an)
Basicamente, o reduce é como: função(função(função((a1, a2), a3), a4), ...), an)
'''
# Na prática:
from functools import reduce
dados = [1, 3, 5, 11]
# Para utilizar o reduce(), precisamos de uma função que receba dois parâmetros
multi = lambda x, y: x*y
res = reduce(multi, dados)
print(res)
# o que aconteceu: 1*3 = 3 // 3*5 = 15 // 15*11 = 165
# Outro exemplo:
outros_dados = [1, 2, 4, 6, 8]
multi_novamente = lambda x1, y1: x1*y1
res1 = reduce(multi_novamente, outros_dados)
print(res1); print('\n') # 1*2 = 2 // 2*4 = 8 // 8*6 = 48 // 48*8 = 384
# ---> É como se utilizasse a lógica de recursividade
# Utilizando loop for: ----> Mais fácil de perceber a recursividade <----
res_for = 1
for n in dados:
res_for = res_for*n
print(res_for)
res_for2 = 1
for n in outros_dados:
res_for2 = res_for2*n
print(res_for2) |
# Env setup
import sqlite3
import xml.etree.ElementTree as ET
from os import path
# Connect to db and path to the saved db
connect = sqlite3.connect(path.abspath(path.join('..', '..', 'sample_files', 'db', 'week3_assignment_library_xml.sqlite')))
cursor = connect.cursor()
# Create db structure
cursor.executescript('''
DROP TABLE IF EXISTS Artist;
DROP TABLE IF EXISTS Genre;
DROP TABLE IF EXISTS Album;
DROP TABLE IF EXISTS Track
''')
cursor.executescript('''
CREATE TABLE Artist (
id INTEGER NOT NULL PRIMARY KEY AUTOINCREMENT UNIQUE,
name TEXT UNIQUE
);
CREATE TABLE Genre (
id INTEGER NOT NULL PRIMARY KEY AUTOINCREMENT UNIQUE,
name TEXT UNIQUE
);
CREATE TABLE Album (
id INTEGER NOT NULL PRIMARY KEY AUTOINCREMENT UNIQUE,
artist_id INTEGER,
title TEXT UNIQUE
);
CREATE TABLE Track (
id INTEGER NOT NULL PRIMARY KEY AUTOINCREMENT UNIQUE,
title TEXT UNIQUE,
album_id INTEGER,
genre_id INTEGER,
len INTEGER, rating INTEGER, count INTEGER
)
''')
# User's input
file_name = input('Enter the name of the XML file: ')
file_path = path.abspath(path.join('..', '..', 'sample_files', file_name))
file_read = open(file_path)
# Parse XMl file
xml_tree = ET.parse(file_read)
xml_tags = xml_tree.findall('dict/dict/dict')
print('There are %d records in the XMl file.' % len(xml_tags))
def lookup(value, key):
found = False
for child in value:
if found:
return child.text
if child.tag == 'key' and child.text == key:
found = True
return None
for item in xml_tags:
if (lookup(item, 'Track ID') is None):
continue
name = lookup(item, 'Name')
artist = lookup(item, 'Artist')
album = lookup(item, 'Album')
genre = lookup(item, 'Genre')
len = lookup(item, 'Total Time')
rating = lookup(item, 'Rating')
count = lookup(item, 'Play Count')
if name is None or artist is None or album is None or genre is None:
continue
# Print the parsed results
print('Name:', name, 'Artist:', artist, 'Album:', album, 'Genre:', genre, 'Total Time:', len, 'Rating:', rating, 'Play Count:', count)
# Load items to db
# artist
cursor.execute('INSERT OR IGNORE INTO Artist (name) VALUES (?)', (artist, ))
cursor.execute('SELECT id FROM Artist WHERE name = ?', (artist, ))
artist_id = cursor.fetchone()[0]
# genre
cursor.execute('INSERT OR IGNORE INTO Genre (name) VALUES (?)', (genre, ))
cursor.execute('SELECT id FROM Genre WHERE name = ?', (genre, ))
genre_id = cursor.fetchone()[0]
# album
cursor.execute('INSERT OR IGNORE INTO Album (artist_id, title) VALUES (?,?)', (artist_id, album))
cursor.execute('SELECT id FROM Album WHERE title = ?', (album, ))
album_id = cursor.fetchone()[0]
# track
cursor.execute('INSERT OR REPLACE INTO Track (title, album_id, genre_id, len, rating, count) VALUES (?,?,?,?,?,?)', (name, album_id, genre_id, len, rating, count))
# Commit items into db
connect.commit()
# Close db connection
cursor.close()
connect.close() |
#!/usr/bin/env python3
import rospy
from geometry_msgs.msg import PoseStamped
from geometry_msgs.msg import Quaternion
from move_base_msgs.msg import MoveBaseActionResult
from tf.transformations import quaternion_from_euler
from time import sleep
def set_goal():
x = [3.0, 3.0 , -3.0, -3.0, 3.0]
y = [-3.0, 0.0, 0.0, 3.0, 3.0]
theta = [1.57, 3.14, 1.57, 0.0, 3.14]
goals = []
for i in range(5):
goals.append(PoseStamped())
goals[i].header.frame_id = 'map'
goals[i].pose.position.x = x[i]
goals[i].pose.position.y = y[i]
quat = quaternion_from_euler(0, 0, theta[i])
goals[i].pose.orientation.x = quat[0]
goals[i].pose.orientation.y = quat[1]
goals[i].pose.orientation.z = quat[2]
goals[i].pose.orientation.w = quat[3]
return goals
def listener_callback(data):
global status
status = data.status.status
def talker_main(goal):
pub = rospy.Publisher('/move_base_simple/goal', PoseStamped, queue_size=10)
rate = rospy.Rate(4)
if not rospy.is_shutdown():
rospy.loginfo(goal)
pub.publish(goal)
rate.sleep()
rospy.loginfo(goal)
pub.publish(goal)
if __name__ == '__main__':
status = 0
poses = set_goal()
rospy.init_node('pose_goal')
for i in range(5):
talker_main(poses[i])
sleep(2)
while not rospy.is_shutdown():
rospy.Subscriber('/move_base/result', MoveBaseActionResult, listener_callback)
if status == 3:
break |
from django.db import models
LEGEND_CHOICES = (
('Bangalore', 'Bangalore'),
('Bloodhound', 'Bloodhound'),
('Caustic', 'Caustic'),
('Crypto', 'Crypto'),
('Fuse', 'Fuse'),
('Gibraltar', 'Gibraltar'),
('Horizon', 'Horizon'),
('Lifeline', 'Lifeline'),
('Loba', 'Loba'),
('Mirage', 'Mirage'),
('Octane', 'Octane'),
('Pathfinder', 'Pathfinder'),
('Rampart', 'Rampart'),
('Revenant', 'Revenant'),
('Valkyrie', 'Valkyrie'),
('Wattson', 'Wattson'),
('Wraith', 'Wraith'),
)
class Person(models.Model):
name = models.CharField(max_length=60, default="", blank=True, null=False, verbose_name='Name', unique=True)
objects = models.Manager()
def __str__(self):
return self.name
class Legend(models.Model):
name = models.CharField(max_length=60, choices=LEGEND_CHOICES, verbose_name='Legend', unique=True)
objects = models.Manager()
def __str__(self):
return self.name
class Entry(models.Model):
person = models.ManyToManyField(Person)
legend = models.ManyToManyField(Legend)
gp = models.PositiveIntegerField(default=0, verbose_name='Games Played')
kills = models.PositiveIntegerField(default=0, verbose_name='Kills')
tpt3 = models.PositiveIntegerField(default=0, verbose_name='Times Placed Top 3')
objects = models.Manager()
|
"""
Notification helper test
========================
This module provides complete testing for all notification functions.
"""
import pytz
import pickle
from celery.result import AsyncResult
from freezegun import freeze_time
from unittest import mock
from datetime import datetime, date, timedelta, time
from django.test import TestCase
from utils.notificationhelper import (get_seconds_until_midnight,
get_prepare_task_time,
set_notifications_tasks,
get_notifications_tasks,
get_route_id_by_name,
get_preparing_time,
DEFAULT_PREPARING_TIME)
class MockTime:
hour = 0
minute = 30
second = 50
class NotificationTestCase(TestCase):
"""TestCase for providing functions of notification testing"""
def setUp(self):
"""Provide preparation NotificationHelper testing."""
kiev_tz = pytz.timezone('Europe/Kiev')
today = date.today()
self.notification_time = time(10, 0, 0)
self.preparing_time = 60 * 10
notification_datetime = datetime.combine(today, self.notification_time, tzinfo=kiev_tz)
self.task_time = notification_datetime - timedelta(seconds=self.preparing_time)
@freeze_time('2019-01-01-10-30-00')
def test_get_seconds_until_midnight(self):
"""Provide tests for `get_seconds_until_midnight` method."""
tomorrow = datetime.now() + timedelta(days=1)
midnight = tomorrow.replace(hour=0, minute=0, second=0)
expected_time = (midnight - datetime.now()).seconds
gotten_time = get_seconds_until_midnight()
self.assertEqual(expected_time, gotten_time)
@mock.patch('utils.redishelper.REDIS_HELPER.get')
def test_get_notifications_tasks_if_nonexistent(self, redis_get):
"""Provide tests for `get_notifications_tasks` method
in case if notification task not exist."""
redis_get.return_value = None
gotten_result = get_notifications_tasks()
self.assertDictEqual(gotten_result, {})
@mock.patch('utils.redishelper.REDIS_HELPER.get')
def test_get_notifications_tasks(self, redis_get):
"""Provide tests for `get_notifications_tasks` method."""
notification_id = 100
notifications_tasks = {notification_id: AsyncResult(notification_id)}
redis_get.return_value = pickle.dumps(notifications_tasks)
gotten_result = get_notifications_tasks()
self.assertDictEqual(gotten_result, notifications_tasks)
@mock.patch('utils.notificationhelper.get_seconds_until_midnight')
@mock.patch('utils.redishelper.REDIS_HELPER.set')
def test_set_notifications_tasks(self, redis_set, get_seconds_until_midnight):
"""Provide tests for `set_notifications_tasks` method."""
redis_set.return_value = True
expected_result = set_notifications_tasks({'test_key': 'test_value'})
self.assertTrue(get_seconds_until_midnight.called)
self.assertTrue(redis_set.called)
self.assertTrue(expected_result)
redis_set.return_value = False
expected_result = set_notifications_tasks({'test_key': 'test_value'})
self.assertTrue(get_seconds_until_midnight.called)
self.assertTrue(redis_set.called)
self.assertFalse(expected_result)
def test_get_prepare_task_time(self):
"""Provide tests for `get_prepare_task_time` method."""
time_to_stop = time(10, 15, 30)
task_time = self.task_time - timedelta(
hours=time_to_stop.hour,
minutes=time_to_stop.minute,
seconds=time_to_stop.second
)
expected_task_time = task_time
gotten_task_time = get_prepare_task_time(
self.notification_time,
time_to_stop,
self.preparing_time
)
self.assertEqual(expected_task_time, gotten_task_time)
def test_get_prepare_task_time_if_time_to_stop_none(self):
"""Provide tests for `get_prepare_task_time` method in the case when
`time_to_stop` parameter not exist."""
time_to_stop = None
expected_task_time = self.task_time
gotten_task_time = get_prepare_task_time(
self.notification_time,
time_to_stop,
self.preparing_time
)
self.assertEqual(expected_task_time, gotten_task_time)
def test_get_route_id_by_name_success(self):
"""Provide tests for `get_route_id_by_name` method."""
routes_data = {'test_route_id': 'test_short_name'}
route_name = 'test_short_name'
expected_result = 'test_route_id'
gotten_result = get_route_id_by_name(routes_data, route_name)
self.assertEqual(expected_result, gotten_result)
def test_get_route_id_by_name_fail(self):
"""Provide tests for `get_route_id_by_name` method in the case when
`route_name` parameter not exist."""
routes_data = {'test_route_id': 'test_short_name'}
route_name = 'nonexistent_name'
gotten_result = get_route_id_by_name(routes_data, route_name)
self.assertIsNone(gotten_result)
def test_get_preparing_time(self):
"""Provide tests for `get_preparing_time` method."""
expected_result = 30 * 60 + 50 + DEFAULT_PREPARING_TIME
result = get_preparing_time(MockTime())
self.assertEqual(result, expected_result)
|
# -*- coding: utf-8 -*-
# @Author: xiaodong
# @Date: 2017-05-17 23:12:12
# @Last Modified by: xiaodong
# @Last Modified time: 2017-05-17 23:12:19
#coding:utf-8
'''
env:
python 2.7
pybrain 0.3.3
sklearn 0.18.1
'''
from pybrain.structure import FeedForwardNetwork
from pybrain.structure import LinearLayer, TanhLayer, FullConnection
from pybrain.supervised.trainers import BackpropTrainer
from pybrain.datasets import SupervisedDataSet
from pybrain.tools.customxml.networkwriter import NetworkWriter
from pybrain.tools.customxml.networkreader import NetworkReader
from sklearn.datasets import load_boston
from sklearn.preprocessing import MinMaxScaler
from sklearn.externals import joblib
#从sklearn数据集中读取用来模拟的数据
boston = load_boston()
x = boston.data
y = boston.target.reshape(-1,1)
#直接采用不打乱的方式进行7:3分离训练集和测试集
per = int(len(x) * 0.7)
#对数据进行归一化处理(一般来说使用Sigmoid时一定要归一化)
sx = MinMaxScaler()
sy = MinMaxScaler()
xTrain = x[:per]
xTrain = sx.fit_transform(xTrain)
yTrain = y[:per]
yTrain = sy.fit_transform(yTrain)
xTest = x[per:]
xTest = sx.transform(xTest)
yTest = y[per:]
yTest = sy.transform(yTest)
#初始化前馈神经网络
fnn = FeedForwardNetwork()
#构建输入层,隐藏层和输出层,一般隐藏层为3-5层,不宜过多
inLayer = LinearLayer(x.shape[1], 'inLayer')
hiddenLayer = TanhLayer(3, 'hiddenLayer')
outLayer = LinearLayer(1, 'outLayer')
#将构建的输出层、隐藏层、输出层加入到fnn中
fnn.addInputModule(inLayer)
fnn.addModule(hiddenLayer)
fnn.addOutputModule(outLayer)
#对各层之间建立完全连接
in_to_hidden = FullConnection(inLayer, hiddenLayer)
hidden_to_out = FullConnection(hiddenLayer, outLayer)
#与fnn建立连接
fnn.addConnection(in_to_hidden)
fnn.addConnection(hidden_to_out)
fnn.sortModules()
#初始化监督数据集
DS = SupervisedDataSet(x.shape[1],1)
#将训练的数据及标签加入到DS中
for i in range(len(xTrain)):
DS.addSample(xTrain[i],yTrain[i])
#采用BP进行训练,训练至收敛,最大训练次数为1000
trainer = BackpropTrainer(fnn, DS, learningrate=0.01, verbose=True)
trainer.trainUntilConvergence(maxEpochs=1000)
#在测试集上对其效果做验证
values = []
for x in xTest:
values.append(sy.inverse_transform(fnn.activate(x))[0])
#计算RMSE (Root Mean Squared Error)均方差
sum(map(lambda x: x ** 0.5,map(lambda x,y: pow(x-y,2), boston.target[per:], values))) / float(len(xTest))
#将训练数据进行保存
NetworkWriter.writeToFile(fnn, 'pathName.xml')
joblib.dump(sx, 'sx.pkl', compress=3)
joblib.dump(sy, 'sy.pkl', compress=3)
#将保存的数据读取
fnn = NetworkReader.readFrom('pathName.xml')
sx = joblib.load('sx.pkl')
sy = joblib.load('sy.pkl') |
#!/usr/bin/python
"""
Copyright (c) 2014, Intel Corporation
Redistribution and use in source and binary forms, with or without
modification, are permitted provided that the following conditions are
met:
* Redistributions of source code must retain the above copyright notice,
this list of conditions and the following disclaimer.
* Redistributions in binary form must reproduce the above copyright
notice, this list of conditions and the following disclaimer in the
documentation and/or other materials provided with the distribution.
* Neither the name of Intel Corporation nor the names of its
contributors may be used to endorse or promote products derived from
this software without specific prior written permission.
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS
IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED
TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A
PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER
OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF
LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING
NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
@file TimedEvent.py
@author Mic Bowman
@date 2014-03-31
This package defines modules for the mobdat simulation environment
"""
import os, sys
import logging
# we need to import python modules from the $SUMO_HOME/tools directory
sys.path.append(os.path.join(os.environ.get("OPENSIM","/share/opensim"),"lib","python"))
sys.path.append(os.path.realpath(os.path.join(os.path.dirname(__file__), "..")))
sys.path.append(os.path.realpath(os.path.join(os.path.dirname(__file__), "..", "..")))
sys.path.append(os.path.realpath(os.path.join(os.path.dirname(__file__), "..", "lib")))
from mobdat.common.Utilities import GenName
from mobdat.common.TimeVariable import *
from mobdat.common.Constraint import *
from mobdat.common.TravelTimeEstimator import TravelTimeEstimator
logger = logging.getLogger(__name__)
# XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX
# XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX
class TripEvent :
def __init__(self, stime, splace, dplace) :
self.StartTime = stime
self.SrcName = splace.Details
self.DstName = dplace.Details
def __str__(self) :
return 'travel from {0} to {1} starting at {2}'.format(self.SrcName, self.DstName, self.StartTime)
# XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX
# XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX
class PlaceEvent :
# -----------------------------------------------------------------
def __init__(self, details, stimevar, etimevar, duration = 0.01, id = None) :
self.Details = details
self.STime = stimevar
self.ETime = etimevar
self.Duration = max(duration, 0.01)
self.EventID = id or GenName('PLACE')
self.Arrival = None
self.Departure = None
# -----------------------------------------------------------------
def NextPlace(self) :
return self.Departure.DstPlace if self.Departure else None
# -----------------------------------------------------------------
def PrevPlace(self) :
return self.Arrival.SrcPlace if self.Arrival else None
# -----------------------------------------------------------------
def AddConstraints(self, cstore) :
constraint = OrderConstraint(self.STime.ID, self.ETime.ID, self.Duration)
cstore.append(constraint)
if self.Departure :
self.Departure.AddConstraints(cstore)
# -----------------------------------------------------------------
def DumpToLog(self) :
logger.warn("[{0:8s}]: {1:8s} from {2} to {3}".format(self.EventID, self.Details, str(self.STime), str(self.ETime)))
if self.Departure :
self.Departure.DumpToLog()
# XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX
# XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX
class TravelEvent :
DefaultDuration = 0.5
# -----------------------------------------------------------------
def __init__(self, srcplace, dstplace, estimator = None, id = None) :
self.SrcPlace = srcplace
self.DstPlace = dstplace
self.Duration = estimator.ComputeTravelTime(srcplace.Details, dstplace.Details) if estimator else self.DefaultDuration
self.EventID = id or GenName('TRAVEL')
# -----------------------------------------------------------------
def AddConstraints(self, cstore) :
if self.DstPlace :
self.DstPlace.AddConstraints(cstore)
constraint = OrderConstraint(self.SrcPlace.ETime.ID, self.DstPlace.STime.ID, self.Duration)
cstore.append(constraint)
# -----------------------------------------------------------------
def DumpToLog(self) :
logger.warn("[{0:8s}]: travel from {1} to {2}".format(self.EventID, self.SrcPlace.Details, self.DstPlace.Details))
if self.DstPlace :
self.DstPlace.DumpToLog()
# XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX
# XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX
class TimeVariableStore(dict) :
# -----------------------------------------------------------------
def __init__(self, *args, **kwargs) :
dict.__init__(self, *args, **kwargs)
# -----------------------------------------------------------------
def Copy(self) :
newlist = TimeVariableStore()
for tvar in self.itervalues() :
newlist[tvar.ID] = tvar.Copy()
return newlist
# -----------------------------------------------------------------
def StoreIsValid(self) :
""" Determine if the store is in a consistent state
Returns:
True if all variables are still valid
"""
for var in self.itervalues() :
if not var.IsValid() :
logger.warn('variable {0} is inconsistent; {1}'.format(var.ID, str(var)))
return False
return True
# -----------------------------------------------------------------
def StoreIsFixed(self) :
""" Determine if all variables in the store have fixed their values
Returns:
True if all variables are fixed
"""
for var in self.itervalues() :
if not var.IsFixed() : return False
return True
# -----------------------------------------------------------------
def FindFreeVariables(self) :
""" Find the time variables with values that have not been
set. Ignore invalid variables.
Returns:
A possibly empty list of variable identifiers
"""
variables = []
for var in self.itervalues() :
if not var.IsFixed() : variables.append(var)
return sorted(variables, key= lambda var : var.Priority, reverse=True)
# -----------------------------------------------------------------
def DumpToLog(self) :
for tvar in sorted(self.values(), key= lambda tvar : tvar.STime) :
logger.warn("{0:5s} {1}".format(tvar.ID, str(tvar)))
# XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX
# XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX
class ConstraintStore(list) :
# -----------------------------------------------------------------
def __init__(self, *args) :
list.__init__(self, args)
# -----------------------------------------------------------------
def DumpToLog(self, varstore) :
for constraint in self :
constraint.DumpToLog(varstore)
# -----------------------------------------------------------------
def ApplyConstraints(self, varstore) :
""" Apply the list of constraints repeatedly until the variable
space stabilizes. With float ranges there is some danger of this
never stopping though that is unlikely.
Returns:
True if all constraints applied, False if there was a conflict
"""
changed = True
while changed :
if not varstore.StoreIsValid() :
return False
changed = False
for constraint in self :
changed = constraint.Apply(varstore) or changed
return varstore.StoreIsValid()
# -----------------------------------------------------------------
def SolveConstraints(self, varstore) :
""" Apply constraints repeatedly until all variables have been given a value
Args:
varstore -- store of TimeVariables over which constraints will be applied
Returns:
True if the variable store is valid after all variables have been given a value
"""
if not self.ApplyConstraints(varstore) :
return False
variables = varstore.FindFreeVariables()
for var in variables :
var.PickValue()
# print "================================================================="
# print "Pick variable {0} and set value to {1}".format(var.ID, var.STime)
# print "================================================================="
if not self.ApplyConstraints(varstore) :
return False
return True
# XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX
# XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX
class TimedEventList :
# -----------------------------------------------------------------
def __init__(self, details, lifespan, estimator = None) :
self.Events = {}
self.TimeVariableStore = TimeVariableStore()
self.TravelTimeEstimator = estimator or TravelTimeEstimator()
baseid = self.AddPlaceEvent(details, MinimumTimeVariable(0.0), MaximumTimeVariable(lifespan))
self.BaseEvent = self.Events[baseid]
# -----------------------------------------------------------------
@property
def LastEvent(self) :
event = self.BaseEvent
while event.Departure :
event = event.Departure.DstPlace
return event
# -----------------------------------------------------------------
def PrevPlaceID(self, eventid) :
ev = self.Events[eventid].PrevPlace()
return ev.EventID if ev else None
# -----------------------------------------------------------------
def NextPlaceID(self, eventid) :
ev = self.Events[eventid].NextPlace()
return ev.EventID if ev else None
# -----------------------------------------------------------------
def MoreTripEvents(self) :
return self.BaseEvent.NextPlace()
# -----------------------------------------------------------------
def PopTripEvent(self) :
stime = self.BaseEvent.ETime
splace = self.BaseEvent
dplace = self.BaseEvent.NextPlace()
if not dplace :
return None
self.BaseEvent = dplace
return TripEvent(stime, splace, dplace)
# -----------------------------------------------------------------
def AddPlaceEvent(self, details, svar, evar, duration = 0.01, id = None) :
""" Create a PlaceEvent object from the parameters and save it in the list of events
Args:
svar -- the start of the event, an initialized TimeVariable object
evar -- the end of the event, an initialized TimeVariable object
duration -- the minimum duration for the event
id -- the id of the newly created event, generated if not provided
Returns:
The identifier of the newly created event
"""
self.TimeVariableStore[svar.ID] = svar
self.TimeVariableStore[evar.ID] = evar
event = PlaceEvent(details, svar, evar, duration, id)
self.Events[event.EventID] = event
return event.EventID
# -----------------------------------------------------------------
def InsertAfterPlaceEvent(self, id1, id2) :
"""Insert PlaceEvent id2 after the event id1. Create a travel event to move
from the current location to the new one.
Args:
id1 -- string event identifier
id2 -- string event identifier
"""
ev1 = self.Events[id1]
ev2 = self.Events[id2]
if ev1.Departure :
ev2.Departure = TravelEvent(ev2, ev1.Departure.DstPlace, estimator = self.TravelTimeEstimator)
ev1.Departure = TravelEvent(ev1, ev2, estimator = self.TravelTimeEstimator)
ev2.Arrival = ev1.Departure
t1 = max(ev1.STime.STime, ev2.STime.STime)
t2 = max(ev1.STime.ETime, ev2.STime.ETime)
ev1.ETime = MaximumTimeVariable(t1, t2, ev1.ETime.ID)
self.TimeVariableStore[ev1.ETime.ID] = ev1.ETime
return (id1, id2)
# -----------------------------------------------------------------
def InsertWithinPlaceEvent(self, id1, id2) :
"""Insert split event id1 and insert id2 into the middle. Create travel events to
move from the current location to the new location and then back to the current location.
The assumption is that self.STime.STime < place.STime.STime and
place.ETime.ETime < self.ETime.ETime
Args:
id1 -- string event identifier
id2 -- string event identifier
"""
ev1 = self.Events[id1]
ev2 = self.Events[id2]
# this is really wrong, there should be a constraint across the two intervals
# that ensures that the duration is consistent...
oldduration = ev1.Duration
if oldduration > 0.01 :
ev1.Duration = oldduration / 2.0
idc = self.AddPlaceEvent(ev1.Details, ev1.STime.Copy(GenName('TV')), ev1.ETime.Copy(GenName('TV')), ev1.Duration)
clone = self.Events[idc]
if ev1.Departure :
clone.Departure = TravelEvent(clone, ev1.Departure.DstPlace, estimator = self.TravelTimeEstimator)
ev2.Departure = TravelEvent(ev2, clone, estimator = self.TravelTimeEstimator)
clone.Arrival = ev2.Departure
clone.STime = MinimumTimeVariable(ev2.ETime.STime, ev1.ETime.ETime, clone.STime.ID)
self.TimeVariableStore[clone.STime.ID] = clone.STime
ev1.Departure = TravelEvent(ev1, ev2, estimator = self.TravelTimeEstimator)
ev2.Arrival = ev1.Departure
ev1.ETime = MaximumTimeVariable(ev1.STime.STime, ev2.STime.ETime, ev1.ETime.ID)
self.TimeVariableStore[ev1.ETime.ID] = ev1.ETime
return (id1, id2, clone.EventID)
# -----------------------------------------------------------------
def SolveConstraints(self) :
cstore = ConstraintStore()
self.BaseEvent.AddConstraints(cstore)
return cstore.SolveConstraints(self.TimeVariableStore)
# -----------------------------------------------------------------
def DumpToLogTimeVariables(self) :
self.TimeVariableStore.DumpToLog()
# -----------------------------------------------------------------
def DumpToLog(self) :
self.BaseEvent.DumpToLog()
## XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX
## XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX
if __name__ == '__main__' :
## XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX
## XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX
def AddWorkEvent(evlist, event, days) :
swork = GaussianTimeVariable(days * 24.0 + 7.0, days * 24.0 + 9.0)
ework = GaussianTimeVariable(days * 24.0 + 16.0, days * 24.0 + 18.0)
idw = evlist.AddPlaceEvent('work', swork, ework, 9.0)
evlist.InsertWithinPlaceEvent(event, idw)
return idw
## XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX
## XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX
def AddLunchToWorkEvent(evlist, workevent, days) :
slunch = GaussianTimeVariable(days * 24.0 + 11.5, days * 24.0 + 13.0)
elunch = GaussianTimeVariable(days * 24.0 + 12.5, days * 24.0 + 14.0)
idl = evlist.AddPlaceEvent('lunch', slunch, elunch, 0.75)
evlist.InsertWithinPlaceEvent(workevent, idl)
return idl
## XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX
## XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX
def AddCoffeeBeforeWorkEvent(evlist, workevent, days) :
"""Add a PlaceEvent for coffee before a work event. This moves the
coffee event as close as possible to the work event.
"""
scoffee = MaximumTimeVariable(days * 24.0 + 0.0, days * 24.0 + 24.0)
ecoffee = MaximumTimeVariable(days * 24.0 + 0.0, days * 24.0 + 24.0)
idc = evlist.AddPlaceEvent('coffee', scoffee, ecoffee, 0.2)
evlist.InsertAfterPlaceEvent(evlist.PrevPlaceID(workevent), idc)
return idc
## XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX
## XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX
def AddRestaurantAfterWorkEvent(evlist, workevent, days) :
sdinner = MinimumTimeVariable(days * 24.0 + 0.0, days * 24.0 + 24.0)
edinner = MinimumTimeVariable(days * 24.0 + 0.0, days * 24.0 + 24.0)
idr = evlist.AddPlaceEvent('dinner', sdinner, edinner, 1.5)
evlist.InsertAfterPlaceEvent(workevent, idr)
return idr
## XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX
## XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX
def AddShoppingTrip(evlist, days) :
# happens between 7am and 10pm
svar = GaussianTimeVariable(days * 24.0 + 7.0, days * 24.0 + 22.0)
evar = GaussianTimeVariable(days * 24.0 + 7.0, days * 24.0 + 22.0)
ids = evlist.AddPlaceEvent('shopping', svar, evar, 0.75)
evlist.InsertWithinPlaceEvent(evlist.LastEvent.EventID, ids)
stops = int(random.triangular(0, 4, 1))
while stops > 0 :
stops = stops - 1
svar = MinimumTimeVariable(days * 24.0 + 7.0, days * 24.0 + 22.0)
evar = MinimumTimeVariable(days * 24.0 + 7.0, days * 24.0 + 22.0)
idnew = evlist.AddPlaceEvent('shopping', svar, evar, 0.5)
evlist.InsertAfterPlaceEvent(ids, idnew)
ids = idnew
## XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX
## XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX
def BuildOneDay(evlist) :
lastev = evlist.LastEvent.EventID
workev = AddWorkEvent(evlist, lastev, day)
if random.uniform(0.0, 1.0) > 0.6 :
AddCoffeeBeforeWorkEvent(evlist, workev, day)
if random.uniform(0.0, 1.0) > 0.8 :
AddRestaurantAfterWorkEvent(evlist, workev, day)
if random.uniform(0.0, 1.0) > 0.5 :
AddLunchToWorkEvent(evlist, workev, day)
if random.uniform(0.0, 1.0) > 0.8 :
AddShoppingTrip(evlist, day)
if not evlist.SolveConstraints() :
print 'resolution failed'
sys.exit(1)
# -----------------------------------------------------------------
evlist = TimedEventList('home', 1000 * 24.0)
for day in range(0, 1000) :
BuildOneDay(evlist)
print 'day = {0}'.format(day)
while evlist.MoreTripEvents() :
trip = evlist.PopTripEvent()
print str(trip)
# evlist.DumpToLog()
|
# Question Page import
# import csv lib
import csv
import ast
# import management command
from django.core.management.base import BaseCommand, CommandError
# import needed models
from extensions.models import Extensions, ExtensionLexisLink
from categories.models import ExtensionCommandType
from lexis.models import Lexis
from events.models import CategoryEventCollection
# create command class
class Command(BaseCommand):
help = "Import key question data"
# parse csv file?
def add_arguments(self, parser):
parser.add_argument("--file", action="store", type=str)
def handle(self, *args, **options):
# get instance of the key question index page / folder
# get parent folder these pages will be placed in
# parent_page = Page.objects.get(title='Key Questions').specific
# Create Icon Orderable Function for Lexis
def createOrderable(orderList, id):
# loop through all List items
if orderList:
for i in orderList:
# create and save a new icon row in IconList table passing
# fetch the lexisterm to get the id
term = Lexis.objects.get(term=i)
print('TERM: ', term)
print('TERMID: ', term.id)
if term:
lex = ExtensionLexisLink(term_link_id=term.id, page_id=id)
print('LEX: ', lex)
# lex.save()
return
with open(options["file"]) as import_file:
# create questions dictionary from csv file
extensions = csv.DictReader(import_file)
# loop through questions and for every question create a page
for ex in extensions:
# for every foreign key relationship you will need to import that model
# get that specific object with the id reference and apply it to the page
if ex["event_collection"]:
category_event = CategoryEventCollection.objects.get(
collection_event=ex["event_collection"].upper())
# category_event = CategoryEventCollection.objects.get(id=ex["event_collection"])
print('CATEGORY EVENT: ', category_event)
else:
category_event = None
# print('QUESTION: ', book["question"])
# get assignment command link
print('COMMAND: ', ex["assignment_command_type"])
assignment_link = ExtensionCommandType.objects.get(command_name=ex["assignment_command_type"])
print('ASS_LINK: ', category_event)
# IMPORT
import_ext = Extensions.objects.create(
assignment_command_types=assignment_link,
event_collection=category_event,
action=ex["action"],
explanation=ex["explanation"],
)
# TEST
# import_ext = Extensions(
# assignment_command_types=assignment_link,
# event_collection=category_event,
# action=ex["action"],
# explanation=ex["explanation"],
#
# )
print('IMPORT_EXT: ', import_ext)
print('IMPORT_EXT_ID: ', import_ext.id)
# Import orderable
# get array from csv for lex links
lex_string = ex["lexis_links"]
if lex_string:
# convert string to list
orderList = ast.literal_eval(lex_string)
print(orderList)
# pass function OrderList and newEntry.id to function
createOrderable(orderList, import_ext.id)
# print all done
self.stdout.write("All done!")
#
# invoke the file in the command line
# folder structure
# create app content_mmigration
# create folder structure management / commands / commandfile.py
# put data in .data folder in root
# import_questions = import_questions.py file
# --file="PATH TO CSV FILE"
# $ python manage.py import_extensionsModel --file=".data/aswl_e1_extensionsB.csv"
# NOW DO YOU IMPORT AN ORDERABLE M2M relationship import
# HOW DO YOU HANDLE A STUCT BLOCK STREAM IMPORT |
# -*- coding: utf-8 -*-
# Form implementation generated from reading ui file 'Choose_FaceLM.ui'
#
# Created by: PyQt5 UI code generator 5.8.1
#
# WARNING! All changes made in this file will be lost!
from PyQt5 import QtCore, QtGui, QtWidgets
class Ui_MainWindow(object):
def setupUi(self, MainWindow):
MainWindow.setObjectName("MainWindow")
MainWindow.resize(1156, 896)
self.centralwidget = QtWidgets.QWidget(MainWindow)
self.centralwidget.setObjectName("centralwidget")
self.label_landmarks_count = QtWidgets.QLabel(self.centralwidget)
self.label_landmarks_count.setGeometry(QtCore.QRect(700, 20, 301, 41))
font = QtGui.QFont()
font.setFamily("Agency FB")
font.setPointSize(14)
self.label_landmarks_count.setFont(font)
self.label_landmarks_count.setObjectName("label_landmarks_count")
self.btn_load_pic = QtWidgets.QPushButton(self.centralwidget)
self.btn_load_pic.setGeometry(QtCore.QRect(20, 20, 131, 51))
self.btn_load_pic.setObjectName("btn_load_pic")
self.btn_load_landmarks = QtWidgets.QPushButton(self.centralwidget)
self.btn_load_landmarks.setGeometry(QtCore.QRect(170, 20, 211, 51))
self.btn_load_landmarks.setObjectName("btn_load_landmarks")
self.horizontalLayoutWidget = QtWidgets.QWidget(self.centralwidget)
self.horizontalLayoutWidget.setGeometry(QtCore.QRect(0, 90, 1221, 601))
self.horizontalLayoutWidget.setObjectName("horizontalLayoutWidget")
self.layout_pic = QtWidgets.QHBoxLayout(self.horizontalLayoutWidget)
self.layout_pic.setSizeConstraint(QtWidgets.QLayout.SetNoConstraint)
self.layout_pic.setContentsMargins(0, 0, 0, 0)
self.layout_pic.setObjectName("layout_pic")
self.pic_view_1 = QtWidgets.QLabel(self.horizontalLayoutWidget)
self.pic_view_1.setText("")
self.pic_view_1.setAlignment(QtCore.Qt.AlignLeading|QtCore.Qt.AlignLeft|QtCore.Qt.AlignTop)
self.pic_view_1.setObjectName("pic_view_1")
self.layout_pic.addWidget(self.pic_view_1)
self.pic_view_2 = QtWidgets.QLabel(self.horizontalLayoutWidget)
self.pic_view_2.setText("")
self.pic_view_2.setObjectName("pic_view_2")
self.layout_pic.addWidget(self.pic_view_2)
self.btn_delete_last_point = QtWidgets.QPushButton(self.centralwidget)
self.btn_delete_last_point.setGeometry(QtCore.QRect(530, 20, 131, 51))
self.btn_delete_last_point.setObjectName("btn_delete_last_point")
self.btn_export_landmarks = QtWidgets.QPushButton(self.centralwidget)
self.btn_export_landmarks.setGeometry(QtCore.QRect(390, 20, 131, 51))
self.btn_export_landmarks.setObjectName("btn_export_landmarks")
MainWindow.setCentralWidget(self.centralwidget)
self.menubar = QtWidgets.QMenuBar(MainWindow)
self.menubar.setGeometry(QtCore.QRect(0, 0, 1156, 25))
self.menubar.setObjectName("menubar")
self.menu = QtWidgets.QMenu(self.menubar)
self.menu.setObjectName("menu")
MainWindow.setMenuBar(self.menubar)
self.statusbar = QtWidgets.QStatusBar(MainWindow)
self.statusbar.setObjectName("statusbar")
MainWindow.setStatusBar(self.statusbar)
self.load_image = QtWidgets.QAction(MainWindow)
self.load_image.setObjectName("load_image")
self.load_landmarks = QtWidgets.QAction(MainWindow)
self.load_landmarks.setObjectName("load_landmarks")
self.menubar.addAction(self.menu.menuAction())
self.retranslateUi(MainWindow)
QtCore.QMetaObject.connectSlotsByName(MainWindow)
def retranslateUi(self, MainWindow):
_translate = QtCore.QCoreApplication.translate
MainWindow.setWindowTitle(_translate("MainWindow", "MainWindow"))
self.label_landmarks_count.setText(_translate("MainWindow", "目前已選取特徵點總數 : "))
self.btn_load_pic.setText(_translate("MainWindow", "讀取圖片"))
self.btn_load_landmarks.setText(_translate("MainWindow", "讀取人臉特徵點(*.txt)"))
self.btn_delete_last_point.setText(_translate("MainWindow", "刪除上一個點"))
self.btn_export_landmarks.setText(_translate("MainWindow", "輸出人臉特徵點(*.txt)"))
self.menu.setTitle(_translate("MainWindow", "檔案"))
self.load_image.setText(_translate("MainWindow", "讀取圖片"))
self.load_landmarks.setText(_translate("MainWindow", "讀取Landmarks"))
if __name__ == "__main__":
import sys
app = QtWidgets.QApplication(sys.argv)
MainWindow = QtWidgets.QMainWindow()
ui = Ui_MainWindow()
ui.setupUi(MainWindow)
MainWindow.show()
sys.exit(app.exec_())
|
from rest_framework import serializers, validators
from .models import *
from scenes.models import Scene
class LogSerializer(serializers.ModelSerializer):
# 隐藏log_id字段
log_id = serializers.HiddenField(
default=serializers.IntegerField
)
class Meta:
model = Logs
fields = "__all__"
class LogSceneSerializer(serializers.ModelSerializer):
class Meta:
model = Logs
fields = ('scene_id', )
class LogModuleSerializer(serializers.ModelSerializer):
class Meta:
model = Logs
fields = ('log_module', ) |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.