index int64 0 1,000k | blob_id stringlengths 40 40 | code stringlengths 7 10.4M |
|---|---|---|
17,500 | 37d3f5b51be57a34a9864358199cdb87c7135048 | words = "Sometimes I look like Jesse James".split()
words.insert(2,"feel")
print(words)
del words[3]
print(words)
#Join the array with space as delimiter
sentence = ' '.join(words)
print(sentence) |
17,501 | 79ae8ac2e5fd70c18bc77f040ad96ec7c8983525 | from utils import read_data
import matplotlib.pyplot as plt
data = read_data()
for key, val in data.items()[-4:]:
plt.plot([d[4] for d in val], [d[5]-d[4] for d in val],
'.', label=str(key), alpha=0.5)
# plt.plot(sorted([d[5]-d[4] for d in val]),
# #'.',
# label=str(key), alpha=0.5)
plt.legend()
plt.show()
|
17,502 | 746c5373588d5a4cecf4eb708a054c1dc1aa70e9 | #Dawid has four bags of candies. The i-th of them contains ai candies.
#Also, Dawid has two friends. He wants to give each bag to one of his two friends.
#Is it possible to distribute the bags in such a way that each friend receives the same amount of candies in total?
bags = []
for i in range (4):
candies=int(input(" "))
bags.append(candies)
if(bags[0]+bags[1] == bags[2]+bags[3]):
print("yes")
elif(bags[0]+bags[2] == bags[1]+bags[3]):
print("yes")
elif(bags[0]+bags[3] == bags[1]+bags[2]):
print("yes")
elif(bags[1]+bags[2] == bags[0]+bags[3]):
print("yes")
elif(bags[1]+bags[3] == bags[0]+bags[2]):
print("yes")
elif(bags[2]+bags[3] == bags[0]+bags[1]):
print("yes")
else:
print("no")
|
17,503 | 93c4bd5c03dc8ad41a178c7beba4c13eede5dae1 | from OpenGL.GL import *
from OpenGL.GLUT import *
def display():
glClearColor(0.5, 0.5, 0.5, 0.0)
glClear(GL_COLOR_BUFFER_BIT | GL_DEPTH_BUFFER_BIT)
glColor3d(1.0, 0.0, 0.0);
glBegin(GL_LINE_LOOP);
glVertex2d(-0.9, -0.9);
glVertex2d(0.9, -0.9);
glVertex2d(0.9, 0.9);
glVertex2d(-0.9, 0.9);
glEnd();
glFlush()
glutSwapBuffers()
def init():
glutInit(sys.argv)
glutInitDisplayMode(GLUT_RGBA | GLUT_DOUBLE | GLUT_DEPTH)
glutInitWindowSize(320,240)
glutCreateWindow("TEXTAREA")
glutDisplayFunc(display)
init()
glutMainLoop()
|
17,504 | c02ec4409753d1f8118e4ce4bd91f1215735396a | #!/usr/bin/env python3
import gym
def main():
env = gym.make("CartPole-v0")
n_episodes = 20
n_timeSteps = 100
for e in range(n_episodes):
observation = env.reset()
for t in range(n_timeSteps):
env.render()
print(observation)
action = env.action_space.sample()
observation, reward, done, info = env.step(action)
if done:
print("Episode finished after {} timesteps".format(t+1))
break
main()
|
17,505 | af0da85a33fa45ed93c7f68c30d6465e6197972e | # ITP Week 2 Day 3 Exercise
#Write a basic calculator using the input function to complete the following tasks. Be sure to call your functions at the end, using the correct arguments.
# Easy:
# - A function that subtracts one integer from another
# - A function that multiplies three integers
# - A function that adds four integers
def sub(num1, num2):
return num1-num2
def mult_three_nums(num1, num2, num3):
return num1 * num2 * num3
def add_four_nums(num1, num2, num3, num4):
sum = num1 + num2 + num3 + num4
return sum
# Medium:
# - Create a calculator function using THREE input parameters (two float, one string[hint: it will be a math symbol]) to allow the user to add, substract, multiply and divide.
def calculator():
first_number = float(int(input("first number:"))
type_of_operation = input("please choose: +, -, *, /: ")
second_number = float(input("second number: "))
return sum
# Hard:
# - You're at a restaurant with some friends and the server didn't split up the check. Create a function that takes a bill amount, multiplies it by a global variable called tax_rate, adds the tax, and then divides the total bill by the number of people input by the user. BONUS: Include an option for adding tip through either a percentage amount assigned to a global varible, or through a specific amount input by the user. You may use the math module from the Python standard library.
|
17,506 | 9c9db9deea358f5cc49dcc9481a85155d2d1eeb2 | # -*- coding: utf-8 -*-
from __future__ import unicode_literals
# Form implementation generated from reading ui file 'two.ui'
#
# Created by: PyQt5 UI code generator 5.11.2
#
# WARNING! All changes made in this file will be lost!
# MainWindow.setCentralWidget(self.centralwidget)
from PyQt5 import QtCore, QtGui, QtWidgets
from PyQt5.QtWidgets import *
from PyQt5.QtCore import *
from module.turles1 import *
import random
from PyQt5.QtCore import Qt, QEvent, QRegExp
from PyQt5.QtGui import QKeyEvent, QKeySequence, QRegExpValidator
from module.zc2 import Ui_ZC2
path2 = "./zc/test-1.png"
# 设定列表存储用户选定的性别
L = []
# 设定列表存储图片路径
T = []
# 设定列表保存文本
W = []
class Ui_Two(QtWidgets.QMainWindow):
def __init__(self, s):
super().__init__()
self.setupUi(self)
self.s = s
def setupUi(self, MainWindow):
print("two>setupUI")
MainWindow.setObjectName("MainWindow")
MainWindow.resize(562, 695)
self.setWindowIcon(QtGui.QIcon("bomb.png"))
MainWindow.setStyleSheet("background-image:url(%s)" % path2)
self.MainWindow = MainWindow
self.centralwidget = QtWidgets.QWidget(MainWindow)
self.centralwidget.setObjectName("centralwidget")
self.yzmText = QtWidgets.QLineEdit(self.centralwidget)
self.yzmText.setGeometry(QtCore.QRect(190, 430, 91, 20))
font = QtGui.QFont()
font.setPointSize(13)
font.setBold(True)
font.setWeight(75)
self.yzmText.setFont(font)
self.yzmText.setObjectName("yzmText")
self.label_2 = QtWidgets.QLabel(self.centralwidget)
self.label_2.setGeometry(QtCore.QRect(60, 430, 111, 16))
font = QtGui.QFont()
font.setPointSize(13)
font.setBold(True)
font.setWeight(75)
self.label_2.setFont(font)
self.label_2.setObjectName("label_2")
self.qrButton = QtWidgets.QCommandLinkButton(self.centralwidget)
self.qrButton.setGeometry(QtCore.QRect(200, 470, 185, 41))
font = QtGui.QFont()
font.setFamily("Segoe UI")
font.setPointSize(13)
font.setBold(True)
font.setWeight(75)
self.qrButton.setFont(font)
self.qrButton.setObjectName("qrButton")
self.label_7 = QtWidgets.QLabel(self.centralwidget)
self.label_7.setGeometry(QtCore.QRect(60, 100, 121, 21))
font = QtGui.QFont()
font.setPointSize(13)
font.setBold(True)
font.setWeight(75)
self.label_7.setFont(font)
self.label_7.setObjectName("label_7")
self.label_4 = QtWidgets.QLabel(self.centralwidget)
self.label_4.setGeometry(QtCore.QRect(60, 197, 121, 16))
font = QtGui.QFont()
font.setPointSize(13)
font.setBold(True)
font.setWeight(75)
self.label_4.setFont(font)
self.label_4.setObjectName("label_4")
self.label_3 = QtWidgets.QLabel(self.centralwidget)
self.label_3.setGeometry(QtCore.QRect(60, 157, 121, 20))
font = QtGui.QFont()
font.setPointSize(13)
font.setBold(True)
font.setWeight(75)
self.label_3.setFont(font)
self.label_3.setObjectName("label_3")
self.ncText = QtWidgets.QLineEdit(self.centralwidget)
self.ncText.setGeometry(QtCore.QRect(190, 157, 191, 20))
font = QtGui.QFont()
font.setPointSize(13)
font.setBold(True)
font.setWeight(75)
self.ncText.setFont(font)
self.ncText.setObjectName("ncText")
self.gxText = QtWidgets.QLineEdit(self.centralwidget)
self.gxText.setGeometry(QtCore.QRect(190, 197, 191, 20))
font = QtGui.QFont()
font.setPointSize(13)
font.setBold(True)
font.setWeight(75)
self.gxText.setFont(font)
self.gxText.setObjectName("gxText")
self.label_6 = QtWidgets.QLabel(self.centralwidget)
self.label_6.setGeometry(QtCore.QRect(60, 230, 121, 21))
font = QtGui.QFont()
font.setPointSize(13)
font.setBold(True)
font.setWeight(75)
self.label_6.setFont(font)
self.label_6.setObjectName("label_6")
self.label_5 = QtWidgets.QLabel(self.centralwidget)
self.label_5.setGeometry(QtCore.QRect(60, 390, 121, 21))
font = QtGui.QFont()
font.setPointSize(13)
font.setBold(True)
font.setWeight(75)
self.label_5.setFont(font)
self.label_5.setObjectName("label_5")
self.label = QtWidgets.QLabel(self.centralwidget)
self.label.setGeometry(QtCore.QRect(60, 350, 121, 21))
font = QtGui.QFont()
font.setPointSize(13)
font.setBold(True)
font.setWeight(75)
self.label.setFont(font)
self.label.setObjectName("label")
self.sex_2Button = QtWidgets.QRadioButton(self.centralwidget)
self.sex_2Button.setGeometry(QtCore.QRect(310, 230, 89, 16))
self.sex_2Button.setStyleSheet("name=\"sex\";")
self.sex_2Button.setObjectName("sex_2Button")
self.emText = QtWidgets.QLineEdit(self.centralwidget)
self.emText.setGeometry(QtCore.QRect(190, 390, 191, 20))
font = QtGui.QFont()
font.setPointSize(13)
font.setBold(True)
font.setWeight(75)
self.emText.setFont(font)
self.emText.setObjectName("emText")
self.emText.setPlaceholderText(":xxx@root.com")
self.phText = QtWidgets.QLineEdit(self.centralwidget)
self.phText.setGeometry(QtCore.QRect(190, 350, 191, 20))
font = QtGui.QFont()
font.setPointSize(13)
font.setBold(True)
font.setWeight(75)
self.phText.setFont(font)
self.phText.setObjectName("phText")
self.phText.setPlaceholderText("请输入11位数字")
regx = QRegExp("^1[0-9]{10}$")
validator = QRegExpValidator(regx, self.phText)
self.phText.setValidator(validator)
self.sex_1Button = QtWidgets.QRadioButton(self.centralwidget)
self.sex_1Button.setGeometry(QtCore.QRect(200, 230, 89, 16))
self.sex_1Button.setStyleSheet("name=\"sex\" checked;")
self.sex_1Button.setObjectName("sex_1Button")
# on_tellmeButton_clicked
self.label_8 = QtWidgets.QLabel(self.centralwidget)
self.label_8.setGeometry(QtCore.QRect(60, 310, 121, 16))
font = QtGui.QFont()
font.setPointSize(13)
font.setBold(True)
font.setWeight(75)
self.label_8.setFont(font)
self.label_8.setObjectName("label_8")
self.fmText = QtWidgets.QLineEdit(self.centralwidget)
self.fmText.setGeometry(QtCore.QRect(190, 310, 191, 20))
font = QtGui.QFont()
font.setPointSize(13)
font.setBold(True)
font.setWeight(75)
self.fmText.setFont(font)
self.fmText.setObjectName("fmText")
self.label_9 = QtWidgets.QLabel(self.centralwidget)
self.label_9.setGeometry(QtCore.QRect(60, 270, 121, 16))
font = QtGui.QFont()
font.setPointSize(13)
font.setBold(True)
font.setWeight(75)
self.label_9.setFont(font)
self.label_9.setObjectName("label_9")
self.gxText_2 = QtWidgets.QLineEdit(self.centralwidget)
self.gxText_2.setGeometry(QtCore.QRect(190, 270, 191, 20))
font = QtGui.QFont()
font.setPointSize(13)
font.setBold(True)
font.setWeight(75)
self.gxText_2.setFont(font)
self.gxText_2.setObjectName("gxText_2")
self.txbutton = QtWidgets.QPushButton(self.centralwidget)
self.txbutton.setGeometry(QtCore.QRect(190, 70, 81, 71))
self.txbutton.setText("")
self.txbutton.setObjectName("txbutton")
path = './avatar0.jpg'
self.txbutton.setStyleSheet("border-image:url(%s)" % path)
self.label_10 = QtWidgets.QLabel(self.centralwidget)
self.label_10.setGeometry(QtCore.QRect(400, 430, 131, 20))
self.label_10.setText("")
self.label_10.setObjectName("label_10")
# 验证码输出
self.yzbtun = QtWidgets.QPushButton(self.centralwidget)
self.yzbtun.setGeometry(QtCore.QRect(300, 420, 75, 41))
self.yzbtun.setText("")
self.yzbtun.setObjectName("yzbtun")
MainWindow.setCentralWidget(self.centralwidget)
self.menubar = QtWidgets.QMenuBar(MainWindow)
self.menubar.setGeometry(QtCore.QRect(0, 0, 562, 23))
self.menubar.setObjectName("menubar")
MainWindow.setMenuBar(self.menubar)
self.statusbar = QtWidgets.QStatusBar(MainWindow)
self.statusbar.setObjectName("statusbar")
MainWindow.setStatusBar(self.statusbar)
self.retranslateUi(MainWindow)
QtCore.QMetaObject.connectSlotsByName(MainWindow)
# txbutton
# 信号槽定义信号
self.txbutton.clicked.connect(self.test1)
self.yzbtun.clicked.connect(self.test2)
self.qrButton.clicked.connect(self.show_message)
self.sex_1Button.toggled.connect(self.changeTitle)
self.sex_2Button.toggled.connect(self.changeTitle1)
def changeTitle(self, value):
L.clear()
L.append("男")
def changeTitle1(self, value):
L.clear()
L.append("女")
# 选择图像
def test1(self):
print("点击了图片")
print(L)
openfile_name = QFileDialog.getOpenFileName(self)
# print(openfile_name)
path = openfile_name[0]
print(path)
T.append(path)
# yzbtun
self.txbutton.setStyleSheet("border-image:url(%s)" % path)
def test2(self):
global ceishi
ceishi = yanzhengma()
print("ceishi", type(ceishi))
path1 = './yanzhengma.png'
self.yzbtun.setStyleSheet("border-image:url(%s)" % path1)
def show_message(self):
# 收集所有的文本数据
nickname = self.ncText.text()
W.append(nickname)
style = self.gxText.text()
W.append(style)
birthday = self.gxText_2.text()
W.append(birthday)
tel = self.phText.text()
W.append(tel)
email = self.emText.text()
W.append(email)
address = self.fmText.text()
W.append(address)
l = [0, 1, 2, 3, 4, 5, 6, 7, 8, 9]
tet = random.sample(l, 8)
text1 = "1"
for i in tet:
text1 += str(i)
print(text1)
yzm = ""
text = self.yzmText.text()
for i in ceishi:
yzm += str(i)
if text == yzm:
QtWidgets.QMessageBox.information(self, "提示", "恭喜您,注册成功",
QtWidgets.QMessageBox.Yes)
self.hide()
self.zc2 = Ui_ZC2(L, T, W, text1, self.s)
self.zc2.show()
self.zc2.zhanghao(text1)
else:
QtWidgets.QMessageBox.information(self, "提示", "验证码输入错误!",
QtWidgets.QMessageBox.Yes)
self.yzmText.clear()
def retranslateUi(self, MainWindow):
_translate = QtCore.QCoreApplication.translate
MainWindow.setFixedSize(MainWindow.width(), MainWindow.height())
MainWindow.setWindowTitle(_translate("MainWindow", "Root"))
MainWindow.setWindowIcon(QtGui.QIcon("icon1.png"))
# self.setWindowIcon(QtGui.QIcon("bomb.png"))
self.label_2.setText(_translate("MainWindow", "输入验证码:"))
self.qrButton.setText(_translate("MainWindow", "确认"))
self.label_7.setText(_translate("MainWindow", "上传头像:"))
self.label_4.setText(_translate("MainWindow", "个性签名:"))
self.label_3.setText(_translate("MainWindow", "昵称:"))
self.label_6.setText(_translate("MainWindow", "选择性别:"))
self.label_5.setText(_translate("MainWindow", "输入邮箱号码:"))
self.label.setText(_translate("MainWindow", "输入手机号码:"))
self.sex_2Button.setText(_translate("MainWindow", "女"))
self.sex_1Button.setText(_translate("MainWindow", "男"))
self.label_8.setText(_translate("MainWindow", "家庭地址:"))
self.label_9.setText(_translate("MainWindow", "出生日期:"))
self.label_10.setText(_translate("MainWindow", "点击获取验证码"))
|
17,507 | e416f6165f896789484782a344f31b8ded94ec35 | from . import utility
from . import v0
class Pomme(v0.Pomme):
"""This environment is the same as v0.Pomme, except it collapses the board at certain intervals."""
metadata = {
'render.modes': ['human', 'rgb_array'],
'video.frames_per_second' : utility.RENDER_FPS
}
def __init__(self, *args, **kwargs):
super(*args, **kwargs)
first_collapse = kwargs.get('first_collapse')
self.collapses = list(range(
first_collapse, self._max_steps, int((self._max_steps - first_collapse)/self._board_size)
))
def _collapse_board(self, ring):
"""Collapses the board at a certain ring radius.
For example, if the board is 13x13 and ring is 0, then the the ring of the first row, last row,
first column, and last column is all going to be turned into rigid walls. All agents in that ring
die and all bombs are removed without detonating.
For further rings, the values get closer to the center.
Args:
ring: Integer value of which cells to collapse.
"""
board = self._board.copy()
def collapse(r, c):
if utility.position_is_agent(board, (r, c)):
# Agent. Kill it.
num_agent = board[r][c] - utility.Item.Agent0.value
agent = self._agents[num_agent]
agent.die()
elif utility.position_is_bomb(board, (r, c)):
# Bomb. Remove the bomb.
self._bombs = [b for b in self._bombs if b.position != (r, c)]
elif (r, c) in self._items:
# Item. Remove the item.
del self._items[(r, c)]
board[r][c] = utility.Item.Rigid.value
for cell in range(ring, self._board_size - ring):
collapse(ring, cell)
if ring != cell:
collapse(cell, ring)
end = self._board_size - ring - 1
collapse(end, cell)
if end != cell:
collapse(cell, end)
return board
|
17,508 | 95ee197422672edcf09e9c8a765243e56bd7629f | # -*- coding: utf-8 -*-
__all__ = ['DefaultElasticSearchFactory']
from .DefaultElasticSearchFactory import DefaultElasticSearchFactory |
17,509 | 266814c161632501ef4d49748b4072eb3a7bc723 | import os
import sys
from robotframework_ls.constants import NULL
from robocorp_ls_core.robotframework_log import get_logger
import threading
from typing import Optional, Dict, Set, Iterator, Union, Any
from robocorp_ls_core.protocols import Sentinel, IEndPoint
from robotframework_ls.impl.protocols import (
ILibraryDoc,
ILibraryDocOrError,
ICompletionContext,
)
import itertools
from robocorp_ls_core.watchdog_wrapper import IFSObserver
from robotframework_ls.impl.robot_lsp_constants import (
OPTION_ROBOT_LIBRARIES_LIBDOC_NEEDS_ARGS,
)
from robotframework_ls.impl.libspec_warmup import (
_norm_filename,
_normfile,
LibspecWarmup,
)
from pathlib import Path
from contextlib import contextmanager
import typing
from robotframework_ls.impl.text_utilities import get_digest_from_string
from robocorp_ls_core.basic import normalize_filename
log = get_logger(__name__)
def _get_libspec_mutex_name(libspec_filename):
from robocorp_ls_core.system_mutex import generate_mutex_name
libspec_filename = _norm_filename(libspec_filename)
basename = os.path.basename(libspec_filename)
name = os.path.splitext(basename)[0]
return generate_mutex_name(libspec_filename, prefix="%s_" % (name,))
def _get_additional_info_filename(spec_filename):
additional_info_filename = os.path.join(spec_filename + ".m")
return additional_info_filename
@contextmanager
def _timed_acquire_mutex_for_spec_filename(spec_filename):
from robocorp_ls_core.system_mutex import timed_acquire_mutex
try:
ctx = timed_acquire_mutex(_get_libspec_mutex_name(spec_filename), timeout=30)
ctx.__enter__()
except:
raise RuntimeError(
f"Unable to get mutex for: {spec_filename} after 30 seconds."
)
try:
yield ctx
finally:
ctx.__exit__()
def _load_library_doc_and_mtime(libspec_manager, spec_filename: str, obtain_mutex=True):
"""
:param obtain_mutex:
Should be False if this is part of a bigger operation that already
has the spec_filename mutex.
"""
from robotframework_ls.impl import robot_specbuilder
from robotframework_ls.impl.libspec_markdown_conversion import (
load_markdown_json_version,
)
ctx: Any
if obtain_mutex:
ctx = _timed_acquire_mutex_for_spec_filename(spec_filename)
else:
ctx = NULL
with ctx:
# We must load it with a mutex to avoid conflicts between generating/reading.
try:
mtime = os.path.getmtime(spec_filename)
if not libspec_manager.is_copy:
libdoc = load_markdown_json_version(
libspec_manager, spec_filename, mtime
)
if libdoc is None:
builder = robot_specbuilder.SpecDocBuilder()
libdoc = builder.build(spec_filename)
if libdoc.doc_format != "markdown":
libspec_manager.schedule_conversion_to_markdown(spec_filename)
return libdoc, mtime
else:
# For a copy we don't use markdown by default, rather
# we always use the raw format and convert as needed.
builder = robot_specbuilder.SpecDocBuilder()
libdoc = builder.build(spec_filename)
return libdoc, mtime
except Exception:
log.exception("Error when loading spec info from: %s", spec_filename)
return None
def _load_lib_info(libspec_manager, canonical_spec_filename: str, can_regenerate: bool):
libdoc_and_mtime = _load_library_doc_and_mtime(
libspec_manager, canonical_spec_filename
)
if libdoc_and_mtime is None:
return None
libdoc, mtime = libdoc_and_mtime
return _LibInfo(libdoc, mtime, canonical_spec_filename, can_regenerate)
_IS_BUILTIN = "is_builtin"
_SOURCE_TO_MTIME = "source_to_mtime"
_UNABLE_TO_LOAD = "unable_to_load"
def _create_updated_source_to_mtime(library_doc):
sources = set()
source = library_doc.source
if source is not None:
sources.add(source)
for keyword in library_doc.keywords:
source = keyword.source
if source is not None:
sources.add(source)
source_to_mtime = {}
for source in sources:
try:
# i.e.: get it before normalizing (but leave the cache key normalized).
# This is because even on windows the file-system may end up being
# case-dependent on some cases.
mtime = os.path.getmtime(source)
source = _normfile(source)
source_to_mtime[source] = mtime
except Exception:
log.exception("Unable to load source for file: %s", source)
return source_to_mtime
def _create_additional_info(
libspec_manager, spec_filename, is_builtin, obtain_mutex=True
):
try:
additional_info = {_IS_BUILTIN: is_builtin}
if is_builtin:
# For builtins we don't have to check the mtime
# (on a new version we update the folder).
return additional_info
library_doc_and_mtime = _load_library_doc_and_mtime(
libspec_manager, spec_filename, obtain_mutex=obtain_mutex
)
if library_doc_and_mtime is None:
additional_info[_UNABLE_TO_LOAD] = True
return additional_info
library_doc = library_doc_and_mtime[0]
additional_info[_SOURCE_TO_MTIME] = _create_updated_source_to_mtime(library_doc)
return additional_info
except:
log.exception(
"Error creating additional info for spec filename: %s", spec_filename
)
return {}
def _load_spec_filename_additional_info(spec_filename):
"""
Loads additional information given a spec filename.
"""
import json
try:
additional_info_filename = _get_additional_info_filename(spec_filename)
with open(additional_info_filename, "r") as stream:
source_to_mtime = json.load(stream)
return source_to_mtime
except:
log.exception("Unable to load source mtimes from: %s", additional_info_filename)
return {}
def _dump_spec_filename_additional_info(
libspec_manager, spec_filename, is_builtin, obtain_mutex=True
):
"""
Creates a filename with additional information not directly available in the
spec.
"""
try:
if not libspec_manager.is_copy:
libspec_manager.schedule_conversion_to_markdown(spec_filename)
except:
log.exception("Error converting %s to markdown.", spec_filename)
import json
source_to_mtime = _create_additional_info(
libspec_manager, spec_filename, is_builtin, obtain_mutex=obtain_mutex
)
additional_info_filename = _get_additional_info_filename(spec_filename)
with open(additional_info_filename, "w") as stream:
json.dump(source_to_mtime, stream, indent=2, sort_keys=True)
class _LibInfo(object):
__slots__ = [
"library_doc",
"mtime",
"_canonical_spec_filename",
"_additional_info",
"_invalid",
"_can_regenerate",
]
def __init__(self, library_doc: ILibraryDoc, mtime, spec_filename, can_regenerate):
"""
:param library_doc:
:param mtime:
:param spec_filename:
:param bool can_regenerate:
False means that the information from this file can't really be
regenerated (i.e.: this is a spec file from a library or created
by the user).
"""
assert library_doc
assert mtime
assert spec_filename
self.library_doc = library_doc
self.mtime = mtime
self._can_regenerate = can_regenerate
self._canonical_spec_filename = spec_filename
self._additional_info = None
self._invalid = False
def __str__(self):
return f"_LibInfo({self.library_doc}, {self.mtime})"
def verify_sources_sync(self):
"""
:return bool:
True if everything is ok and this library info can be used. Otherwise,
the spec file and the _LibInfo must be recreated.
"""
if not self._can_regenerate:
# This means that this info was generated by a library or the user
# himself, thus, we can't regenerate it.
return True
if self._invalid: # Once invalid, always invalid.
return False
additional_info = self._additional_info
if additional_info is None:
additional_info = _load_spec_filename_additional_info(
self._canonical_spec_filename
)
if additional_info.get(_IS_BUILTIN, False):
return True
source_to_mtime = additional_info.get(_SOURCE_TO_MTIME)
if source_to_mtime is None:
# Nothing to validate...
return True
updated_source_to_mtime = _create_updated_source_to_mtime(self.library_doc)
if source_to_mtime != updated_source_to_mtime:
log.info(
"Library %s is invalid. Current source to mtime:\n%s\nChanged from:\n%s"
% (self.library_doc.name, source_to_mtime, updated_source_to_mtime)
)
self._invalid = True
return False
return True
class _FolderInfo(object):
def __init__(self, folder_path, recursive):
self.folder_path = folder_path
self.recursive = recursive
self.libspec_canonical_filename_to_info = {}
self._watch = NULL
self._lock = threading.Lock()
def start_watch(self, observer, notifier):
with self._lock:
if self._watch is NULL:
if not os.path.isdir(self.folder_path):
if not os.path.exists(self.folder_path):
log.info(
"Trying to track changes in path which does not exist: %s",
self.folder_path,
)
else:
log.info(
"Trying to track changes in path which is not a folder: %s",
self.folder_path,
)
return
log.debug("Tracking folder for changes: %s", self.folder_path)
from robocorp_ls_core.watchdog_wrapper import PathInfo
folder_path = self.folder_path
self._watch = observer.notify_on_any_change(
[PathInfo(folder_path, recursive=self.recursive)],
notifier.on_change,
(self._on_change_spec,),
extensions=(".py", ".libspec"),
)
def _on_change_spec(self, spec_file):
with self._lock:
spec_file_key = _norm_filename(spec_file)
# Just add/remove that specific spec file from the tracked list.
libspec_canonical_filename_to_info = (
self.libspec_canonical_filename_to_info.copy()
)
if os.path.exists(spec_file):
libspec_canonical_filename_to_info[spec_file_key] = None
else:
libspec_canonical_filename_to_info.pop(spec_file_key, None)
self.libspec_canonical_filename_to_info = libspec_canonical_filename_to_info
def synchronize(self):
with self._lock:
try:
self.libspec_canonical_filename_to_info = self._collect_libspec_info(
[self.folder_path],
self.libspec_canonical_filename_to_info,
recursive=self.recursive,
)
except Exception:
log.exception("Error when synchronizing: %s", self.folder_path)
def dispose(self):
with self._lock:
watch = self._watch
self._watch = NULL
watch.stop_tracking()
self.libspec_canonical_filename_to_info = {}
def _collect_libspec_info(self, folders, old_libspec_filename_to_info, recursive):
seen_libspec_files = set()
if recursive:
for folder in folders:
if os.path.isdir(folder):
for root, _dirs, files in os.walk(folder):
for filename in files:
if filename.lower().endswith(".libspec"):
seen_libspec_files.add(os.path.join(root, filename))
else:
for folder in folders:
if os.path.isdir(folder):
for filename in os.listdir(folder):
if filename.lower().endswith(".libspec"):
seen_libspec_files.add(os.path.join(folder, filename))
new_libspec_filename_to_info: Dict[str, _LibInfo] = {}
for filename in seen_libspec_files:
filename = _norm_filename(filename)
info = old_libspec_filename_to_info.get(filename)
if info is not None:
try:
curr_mtime = os.path.getmtime(filename)
except:
# it was deleted in the meanwhile...
continue
else:
if info.mtime != curr_mtime:
# The spec filename mtime changed, so, set to None
# to reload it.
info = None
new_libspec_filename_to_info[filename] = info
return new_libspec_filename_to_info
class LibspecManager(object):
"""
Used to manage the libspec files.
.libspec files are searched in the following directories:
- PYTHONPATH folders (not recursive)
- Workspace folders (recursive -- notifications from the LSP)
- ${user}.robotframework-ls/specs/${python_hash} (not recursive)
It searches for .libspec files in the folders tracked and provides the
keywords that are available from those (properly caching data as needed).
"""
@classmethod
def get_robot_version(cls) -> str:
from robotframework_ls.impl import robot_version
return robot_version.get_robot_version()
@classmethod
def get_robot_major_version(cls) -> int:
from robotframework_ls.impl import robot_version
return robot_version.get_robot_major_version()
@classmethod
def get_internal_libspec_dir(cls) -> str:
from robotframework_ls import robot_config
home = robot_config.get_robotframework_ls_home()
pyexe: Union[bytes, str] = sys.executable
if isinstance(pyexe, bytes):
pyexe = pyexe.decode("utf-8", "replace")
digest: str = get_digest_from_string(pyexe)
v = cls.get_robot_version()
# Note: _v1: information on the mtime of the libspec sources now available.
return os.path.join(home, "specs", cls.INTERNAL_VERSION, "%s_%s" % (digest, v))
@classmethod
def get_internal_builtins_libspec_dir(cls, internal_libspec_dir=None):
return os.path.join(
internal_libspec_dir or cls.get_internal_libspec_dir(), "builtins"
)
# On v2 we disambiguate by using a hash for the filenames if we're generating
# a libspec for a target filename.
INTERNAL_VERSION = "v2"
def create_copy(self):
return LibspecManager(
builtin_libspec_dir=self._builtins_libspec_dir,
user_libspec_dir=self._user_libspec_dir,
dir_cache_dir=self._dir_cache_dir,
observer=None,
endpoint=None,
pre_generate_libspecs=False,
cache_libspec_dir=self._cache_libspec_dir,
is_copy=True,
)
def __init__(
self,
builtin_libspec_dir: Optional[str] = None,
user_libspec_dir: Optional[str] = None,
dir_cache_dir: Optional[str] = None,
observer: Optional[IFSObserver] = None,
endpoint: Optional[IEndPoint] = None,
pre_generate_libspecs: bool = False,
cache_libspec_dir: Optional[str] = None,
*,
is_copy: bool = False,
):
"""
:param __internal_libspec_dir__:
Only to be used in tests (to regenerate the builtins)!
"""
from robocorp_ls_core import watchdog_wrapper
from robocorp_ls_core.cache import DirCache
from robotframework_ls import robot_config
self._is_copy = is_copy
self._dir_cache_dir = dir_cache_dir or os.path.join(
robot_config.get_robotframework_ls_home(), ".cache"
)
dir_cache = DirCache(self._dir_cache_dir)
self._libspec_dir = self.get_internal_libspec_dir()
self._user_libspec_dir = user_libspec_dir or os.path.join(
self._libspec_dir, "user"
)
self._cache_libspec_dir = cache_libspec_dir or os.path.join(
self._libspec_dir, "cache"
)
self._builtins_libspec_dir = (
builtin_libspec_dir
or self.get_internal_builtins_libspec_dir(self._libspec_dir)
)
log.info("User libspec dir: %s", self._user_libspec_dir)
log.info("Builtins libspec dir: %s", self._builtins_libspec_dir)
log.info("Cache libspec dir: %s", self._cache_libspec_dir)
self._deprecated_library_name_to_replacement: Dict[str, str] = {}
try:
os.makedirs(self._user_libspec_dir)
except:
# Ignore exception if it's already created.
pass
try:
os.makedirs(self._builtins_libspec_dir)
except:
# Ignore exception if it's already created.
pass
try:
os.makedirs(self._cache_libspec_dir)
except:
# Ignore exception if it's already created.
pass
self.pre_generate_libspecs = pre_generate_libspecs
if pre_generate_libspecs:
from robotframework_ls.impl.libspec_markdown_conversion import (
LibspecMarkdownConversion,
)
self.libspec_markdown_conversion: Optional[
LibspecMarkdownConversion
] = LibspecMarkdownConversion(self)
else:
self.libspec_markdown_conversion = None
self._libspec_warmup = LibspecWarmup(endpoint, dir_cache)
self._libspec_failures_cache: Dict[
tuple, str
] = {} # key -> error creating libspec
self._main_thread = threading.current_thread()
if observer is None:
from robocorp_ls_core.watchdog_wrapper import create_observer
log.info("No observer passed to LibspecManager (creating dummy observer).")
observer = create_observer("dummy", ())
self._fs_observer = observer
self._file_changes_notifier = watchdog_wrapper.create_notifier(
self._on_file_changed, timeout=0.5, extensions=(".libspec", ".py")
)
# Spec info found in the workspace
self._workspace_folder_uri_to_folder_info: Dict[str, _FolderInfo] = {}
self._additional_pythonpath_folder_to_folder_info: Dict[str, _FolderInfo] = {}
# Spec info found in the pythonpath
pythonpath_folder_to_folder_info: Dict[str, _FolderInfo] = {}
found = set()
for path in sys.path:
if path:
try:
resolved = Path(path).resolve() # also solves links.
except:
log.exception(
f"Unable to Path.resolve({path!r}) (resolving PYTHONPATH entries)."
)
continue
if resolved in found:
continue
found.add(resolved)
if os.path.isdir(path):
pythonpath_folder_to_folder_info[path] = _FolderInfo(
path, recursive=False
)
self._pythonpath_folder_to_folder_info: Dict[
str, _FolderInfo
] = pythonpath_folder_to_folder_info
# Spec info found in internal dirs (autogenerated)
self._internal_folder_to_folder_info: Dict[str, _FolderInfo] = {
self._user_libspec_dir: _FolderInfo(
self._user_libspec_dir, recursive=False
),
self._builtins_libspec_dir: _FolderInfo(
self._builtins_libspec_dir, recursive=False
),
}
# Must be set from the outside world when needed.
self.config = None
if self.pre_generate_libspecs:
log.debug("Generating builtin libraries libspec.")
self._libspec_warmup.gen_builtin_libraries(self)
log.debug("Synchronizing internal caches.")
self._synchronize()
log.debug("Finished initializing LibspecManager.")
@property
def is_copy(self):
return self._is_copy
def schedule_conversion_to_markdown(self, spec_filename: str):
if self.libspec_markdown_conversion is not None:
self.libspec_markdown_conversion.schedule_conversion_to_markdown(
spec_filename
)
@property
def fs_observer(self) -> IFSObserver:
return self._fs_observer
def _check_in_main_thread(self):
curr_thread = threading.current_thread()
if self._main_thread is not curr_thread:
raise AssertionError(
f"This may only be called at the thread: {self._main_thread}. Current thread: {curr_thread}"
)
@property
def config(self):
return self._config
@config.setter
def config(self, config):
from robotframework_ls.impl.robot_lsp_constants import (
OPTION_ROBOT_LIBRARIES_LIBDOC_PRE_GENERATE,
)
from robocorp_ls_core.basic import make_unique
from robotframework_ls import robot_config
self._check_in_main_thread()
from robotframework_ls.impl.robot_lsp_constants import OPTION_ROBOT_PYTHONPATH
self._config = config
existing_entries = set(self._additional_pythonpath_folder_to_folder_info.keys())
if config is not None:
pythonpath_entries = set(
config.get_setting(OPTION_ROBOT_PYTHONPATH, list, [])
)
pythonpath_entries = set(
config.get_setting(OPTION_ROBOT_PYTHONPATH, list, [])
)
for new_pythonpath_entry in pythonpath_entries:
new_pythonpath_entry = os.path.abspath(new_pythonpath_entry)
if new_pythonpath_entry not in existing_entries:
self.add_additional_pythonpath_folder(new_pythonpath_entry)
for old_entry in existing_entries:
if old_entry not in pythonpath_entries:
self.remove_additional_pythonpath_folder(old_entry)
pre_generate = []
pre_generate.extend(
config.get_setting(OPTION_ROBOT_LIBRARIES_LIBDOC_PRE_GENERATE, list, [])
)
if self.pre_generate_libspecs:
log.debug("Generating user/pythonpath libraries libspec.")
self._libspec_warmup.gen_user_libraries(self, make_unique(pre_generate))
self._deprecated_library_name_to_replacement = (
robot_config.get_robot_libraries_deprecated_name_to_replacement(config)
)
@property
def user_libspec_dir(self) -> str:
return self._user_libspec_dir
@property
def cache_libspec_dir(self) -> str:
return self._cache_libspec_dir
def _on_file_changed(self, spec_file, folder_info_on_change_spec):
log.debug("File change detected: %s", spec_file)
# Check if the cache related to libspec generation failure must be
# cleared.
fix = False
for cache_key in self._libspec_failures_cache:
libname = cache_key[0]
if libname in spec_file:
fix = True
break
if fix:
new = {}
for cache_key, value in self._libspec_failures_cache.items():
libname = cache_key[0]
if libname not in spec_file:
new[cache_key] = value
# Always set as a whole (to avoid racing conditions).
self._libspec_failures_cache = new
# Notify _FolderInfo._on_change_spec
lowername = spec_file.lower()
if lowername.endswith(".libspec"):
folder_info_on_change_spec(spec_file)
elif lowername.endswith(".py"):
# Note: right now we don't act on .py info.
# This means that the caches for libraries will actually be invalid
# until someone calls 'libspec_manager.get_library_doc_or_error'
# for that library again (at which point it verifies the timestamp
# of the library).
pass
def add_workspace_folder(self, folder_uri: str):
self._check_in_main_thread()
from robocorp_ls_core import uris
if folder_uri not in self._workspace_folder_uri_to_folder_info:
log.debug("Added workspace folder: %s", folder_uri)
cp = self._workspace_folder_uri_to_folder_info.copy()
folder_info = cp[folder_uri] = _FolderInfo(
uris.to_fs_path(folder_uri), recursive=True
)
self._workspace_folder_uri_to_folder_info = cp
folder_info.start_watch(self._fs_observer, self._file_changes_notifier)
folder_info.synchronize()
else:
log.debug("Workspace folder already added: %s", folder_uri)
def remove_workspace_folder(self, folder_uri: str):
self._check_in_main_thread()
if folder_uri in self._workspace_folder_uri_to_folder_info:
log.debug("Removed workspace folder: %s", folder_uri)
cp = self._workspace_folder_uri_to_folder_info.copy()
folder_info = cp.pop(folder_uri, NULL)
folder_info.dispose()
self._workspace_folder_uri_to_folder_info = cp
else:
log.debug("Workspace folder already removed: %s", folder_uri)
def add_additional_pythonpath_folder(self, folder_path):
self._check_in_main_thread()
if folder_path not in self._additional_pythonpath_folder_to_folder_info:
log.debug("Added additional pythonpath folder: %s", folder_path)
cp = self._additional_pythonpath_folder_to_folder_info.copy()
folder_info = cp[folder_path] = _FolderInfo(folder_path, recursive=True)
self._additional_pythonpath_folder_to_folder_info = cp
folder_info.start_watch(self._fs_observer, self._file_changes_notifier)
folder_info.synchronize()
else:
log.debug("Additional pythonpath folder already added: %s", folder_path)
def remove_additional_pythonpath_folder(self, folder_path):
self._check_in_main_thread()
if folder_path in self._additional_pythonpath_folder_to_folder_info:
log.debug("Removed additional pythonpath folder: %s", folder_path)
cp = self._additional_pythonpath_folder_to_folder_info.copy()
folder_info = cp.pop(folder_path, NULL)
folder_info.dispose()
self._additional_pythonpath_folder_to_folder_info = cp
else:
log.debug("Additional pythonpath folder already removed: %s", folder_path)
def synchronize_workspace_folders(self):
for folder_info in self._workspace_folder_uri_to_folder_info.values():
folder_info.start_watch(self._fs_observer, self._file_changes_notifier)
folder_info.synchronize()
def synchronize_pythonpath_folders(self):
for folder_info in self._pythonpath_folder_to_folder_info.values():
folder_info.start_watch(self._fs_observer, self._file_changes_notifier)
folder_info.synchronize()
def synchronize_additional_pythonpath_folders(self):
for folder_info in self._additional_pythonpath_folder_to_folder_info.values():
folder_info.start_watch(self._fs_observer, self._file_changes_notifier)
folder_info.synchronize()
def synchronize_internal_libspec_folders(self):
for folder_info in self._internal_folder_to_folder_info.values():
folder_info.start_watch(self._fs_observer, self._file_changes_notifier)
folder_info.synchronize()
def _synchronize(self):
"""
Updates the internal caches related to the tracked .libspec files found.
This can be a slow call as it may traverse the whole workspace folders
hierarchy, so, it should be used only during startup to fill the initial
info.
"""
self.synchronize_workspace_folders()
self.synchronize_pythonpath_folders()
self.synchronize_additional_pythonpath_folders()
self.synchronize_internal_libspec_folders()
def collect_all_tracked_folders(self) -> Iterator[str]:
from robocorp_ls_core import uris
for uri in self._workspace_folder_uri_to_folder_info.keys():
yield uris.to_fs_path(uri)
yield from self._pythonpath_folder_to_folder_info.keys()
yield from self._additional_pythonpath_folder_to_folder_info.keys()
def iter_lib_info(self, builtin=False):
from robotframework_ls.impl.text_utilities import has_deprecated_text
blacklist = ()
if self.config is not None:
from robotframework_ls.impl.robot_generated_lsp_constants import (
OPTION_ROBOT_LIBRARIES_BLACKLIST,
)
blacklist = self.config.get_setting(
OPTION_ROBOT_LIBRARIES_BLACKLIST, list, ()
)
if not blacklist:
blacklist = ()
else:
blacklist = set(blacklist)
deprecated_library_name_to_replacement = (
self._deprecated_library_name_to_replacement
)
for libinfo in self._iter_lib_info(builtin):
if libinfo.library_doc.name not in blacklist:
deprecated = deprecated_library_name_to_replacement.get(
libinfo.library_doc.name
)
if deprecated is not None:
if not libinfo.library_doc.doc:
libinfo.library_doc = deprecated
elif not has_deprecated_text(libinfo.library_doc.doc):
libinfo.library_doc.__original_doc__ = libinfo.library_doc.doc
libinfo.library_doc.doc = deprecated + libinfo.library_doc.doc
else:
if libinfo.library_doc.doc and hasattr(
libinfo.library_doc, "__original_doc__"
):
libinfo.library_doc.doc = libinfo.library_doc.__original_doc__
delattr(libinfo.library_doc, "__original_doc__")
yield libinfo
def _iter_lib_info(self, builtin=False):
"""
:rtype: generator(_LibInfo)
"""
# Note: the iteration order is important (first ones are visited earlier
# and have higher priority).
iter_in = []
for (_uri, info) in self._workspace_folder_uri_to_folder_info.items():
if info.libspec_canonical_filename_to_info:
iter_in.append((info.libspec_canonical_filename_to_info, False))
for (_uri, info) in self._pythonpath_folder_to_folder_info.items():
if info.libspec_canonical_filename_to_info:
iter_in.append((info.libspec_canonical_filename_to_info, False))
for (_uri, info) in self._additional_pythonpath_folder_to_folder_info.items():
if info.libspec_canonical_filename_to_info:
iter_in.append((info.libspec_canonical_filename_to_info, False))
if builtin:
info = self._internal_folder_to_folder_info[self._builtins_libspec_dir]
if info.libspec_canonical_filename_to_info:
iter_in.append((info.libspec_canonical_filename_to_info, True))
else:
for (_uri, info) in self._internal_folder_to_folder_info.items():
if info.libspec_canonical_filename_to_info:
iter_in.append((info.libspec_canonical_filename_to_info, True))
for canonical_filename_to_info, can_regenerate in iter_in:
for canonical_spec_filename, info in list(
canonical_filename_to_info.items()
):
if info is None:
info = canonical_filename_to_info[
canonical_spec_filename
] = _load_lib_info(self, canonical_spec_filename, can_regenerate)
# Note: we could end up yielding a library with the same name
# multiple times due to its scope. It's up to the caller to
# validate that.
if info is not None and info.library_doc is not None:
yield info
def get_library_names(self):
return sorted(
set(lib_info.library_doc.name for lib_info in self.iter_lib_info())
)
def _get_cached_error(
self,
libname,
*,
is_builtin=False,
target_file: Optional[str] = None,
args: Optional[str] = None,
) -> Optional[str]:
cache_key = (libname, is_builtin, target_file, args)
return self._libspec_failures_cache.get(cache_key)
def _create_libspec(
self,
libname,
*,
is_builtin=False,
target_file: Optional[str] = None,
args: Optional[str] = None,
) -> Optional[str]:
"""
:param target_file:
If given this is the library file (i.e.: c:/foo/bar.py) which is the
actual library we're creating the spec for.
"""
cache_key = (libname, is_builtin, target_file, args)
previous = self._libspec_failures_cache.get(cache_key, Sentinel.SENTINEL)
if previous is not Sentinel.SENTINEL:
return typing.cast(Optional[str], previous)
error_creating = self._cached_create_libspec(
libname, is_builtin, target_file, args
)
if error_creating is not None:
# Always set as a whole (to avoid racing conditions).
cp = self._libspec_failures_cache.copy()
cp[cache_key] = error_creating
self._libspec_failures_cache = cp
return error_creating
def _subprocess_check_output(self, *args, **kwargs):
# Only done for mocking.
from robocorp_ls_core.subprocess_wrapper import subprocess
return subprocess.check_output(*args, **kwargs)
def _cached_create_libspec(
self,
libname: str,
is_builtin: bool,
target_file: Optional[str],
args: Optional[str],
*,
_internal_force_text=False, # Should only be set from within this function.
) -> Optional[str]:
"""
Returns an error message if it wasn't able to generate it or None if
it did generate it.
"""
from robotframework_ls.impl import robot_constants
if not is_builtin:
if not target_file:
is_builtin = libname in robot_constants.STDLIBS
import time
from robocorp_ls_core.subprocess_wrapper import subprocess
from robocorp_ls_core.robotframework_log import get_log_level
acquire_mutex = _timed_acquire_mutex_for_spec_filename
if _internal_force_text:
# In this case this is a recursive call and we already have the lock.
acquire_mutex = NULL
log_exception = log.exception
if is_builtin and libname == "Dialogs" and get_log_level() < 1:
# Dialogs may have dependencies that are not available, so, don't show
# it unless verbose mode is enabled.
log_exception = log.debug
if not libname.replace(".", "").replace("/", "").replace("\\", "").strip():
return f"Unable to generate libspec for: {libname}"
additional_path = None
additional_path_exists = False
log_time = True
cwd = None
if target_file is not None:
additional_path = os.path.dirname(target_file)
if os.path.splitext(os.path.basename(target_file))[0] == "__init__":
additional_path = os.path.dirname(additional_path)
additional_path_exists = os.path.exists(additional_path)
if additional_path and additional_path_exists:
cwd = additional_path
if libname.endswith(("/", "\\")):
libname = libname[:-1]
libname = os.path.basename(libname)
if libname.lower().endswith((".py", ".class", ".java")):
libname = os.path.splitext(libname)[0]
curtime = time.time()
try:
try:
call = [sys.executable]
major_version = self.get_robot_major_version()
if major_version < 4:
call.extend("-m robot.libdoc --format XML".split())
else:
call.extend(
"-m robot.libdoc --format XML --specdocformat RAW".split()
)
if additional_path and additional_path_exists:
call.extend(["-P", os.path.normpath(additional_path)])
if _internal_force_text:
call.append("--docformat")
call.append("text")
# Note: always set as a whole, so, iterate in generator is thread-safe.
for entry in self._additional_pythonpath_folder_to_folder_info:
if os.path.exists(entry):
call.extend(["-P", os.path.normpath(entry)])
if not args:
call.append(libname)
else:
call.append("::".join([libname, args]))
libspec_filename = self._compute_libspec_filename(
libname, is_builtin, target_file, args
)
log.debug(f"Obtaining mutex to generate libspec: {libspec_filename}.")
with acquire_mutex(libspec_filename): # Could fail.
log.debug(
f"Obtained mutex to generate libspec: {libspec_filename}."
)
call.append(libspec_filename)
mtime: float = -1
try:
mtime = os.path.getmtime(libspec_filename)
except:
pass
log.debug(
"Generating libspec for: %s.\nCwd:%s\nCommand line:\n%s",
libname,
cwd,
" ".join(call),
)
try:
try:
# Note: stdout is always subprocess.PIPE in this call.
# Note: the env is always inherited (the process which has
# the LibspecManager must be the target env already).
self._subprocess_check_output(
call,
stderr=subprocess.STDOUT,
stdin=subprocess.PIPE,
cwd=cwd,
)
except OSError as e:
log.exception("Error calling: %s", call)
# We may have something as: Ignore OSError: [WinError 6] The handle is invalid,
# give the result based on whether the file changed on disk.
try:
if mtime != os.path.getmtime(libspec_filename):
_dump_spec_filename_additional_info(
self,
libspec_filename,
is_builtin=is_builtin,
obtain_mutex=False,
)
return None
except:
pass
log.debug("Not retrying after OSError failure.")
return str(e)
except subprocess.CalledProcessError as e:
if not _internal_force_text:
if (
b"reST format requires 'docutils' module to be installed"
in e.output
):
return self._cached_create_libspec(
libname,
is_builtin,
target_file,
args,
_internal_force_text=True,
)
log_exception(
"Error creating libspec: %s.\nReturn code: %s\nOutput:\n%s",
libname,
e.returncode,
e.output,
)
bytes_output = e.output
output = bytes_output.decode("utf-8", "replace")
# Remove things we don't want to show.
for s in ("Try --help", "--help", "Traceback"):
index = output.find(s)
if index >= 0:
output = output[:index].strip()
if output:
return output
return f"Error creating libspec: {output}"
_dump_spec_filename_additional_info(
self,
libspec_filename,
is_builtin=is_builtin,
obtain_mutex=False,
)
return None
except Exception as e:
log_exception("Error creating libspec: %s", libname)
return str(e)
finally:
if log_time:
delta = time.time() - curtime
log.debug("Took: %.2fs to generate info for: %s" % (delta, libname))
def dispose(self):
self._file_changes_notifier.dispose()
if self.libspec_markdown_conversion is not None:
self.libspec_markdown_conversion.dispose()
def _compute_libspec_filename(
self,
libname: str,
is_builtin: bool = False,
target_file: Optional[str] = None,
args: Optional[str] = None,
):
from robotframework_ls.impl import robot_constants
libspec_dir = self._user_libspec_dir
if libname in robot_constants.STDLIBS:
libspec_dir = self._builtins_libspec_dir
if target_file:
if args:
digest = (
get_digest_from_string(target_file)
+ "_"
+ get_digest_from_string(args)
)
else:
digest = get_digest_from_string(target_file)
libspec_filename = os.path.join(libspec_dir, digest + ".libspec")
elif not args:
libspec_filename = os.path.join(libspec_dir, libname + ".libspec")
else:
digest = get_digest_from_string(args)
libspec_filename = os.path.join(libspec_dir, libname + digest + ".libspec")
return libspec_filename
def _do_create_libspec_on_get(
self, libname, target_file: Optional[str], args: Optional[str], is_builtin: bool
) -> Optional[str]:
error_creating = self._create_libspec(
libname, target_file=target_file, args=args, is_builtin=is_builtin
)
if error_creating is None:
self.synchronize_internal_libspec_folders()
return error_creating
def _get_library_target_filename(
self, libname: str, current_doc_uri: Optional[str] = None
) -> Optional[str]:
from robocorp_ls_core import uris
target_file: Optional[str] = None
libname_lower = libname.lower()
if os.path.isabs(libname):
target_file = libname
else:
# Check if it maps to a file in the filesystem
if current_doc_uri is not None:
cwd = os.path.dirname(uris.to_fs_path(current_doc_uri))
if cwd and os.path.isdir(cwd):
f = os.path.join(cwd, libname)
if os.path.isdir(f):
f = os.path.join(f, "__init__.py")
if os.path.exists(f):
target_file = f
elif not libname_lower.endswith(".py"):
f += ".py"
if os.path.exists(f):
target_file = f
if target_file is None and libname.endswith((".py", ".class", ".java")):
iter_in_pythonpath_directories = itertools.chain(
self._additional_pythonpath_folder_to_folder_info.keys(),
self._pythonpath_folder_to_folder_info.keys(),
)
# https://github.com/robocorp/robotframework-lsp/issues/266
# If the user specifies a file, we don't just search the current
# relative folder, we also need to search for relative entries
# in the whole PYTHONPATH.
for entry in iter_in_pythonpath_directories:
check_target = os.path.join(entry, libname)
if os.path.exists(check_target):
target_file = check_target
break
return target_file
def get_library_doc_or_error(
self,
libname: str,
create: bool,
completion_context: ICompletionContext,
builtin: bool = False,
args: Optional[str] = None,
) -> ILibraryDocOrError:
"""
:param libname:
It may be a library name, a relative path to a .py file or an
absolute path to a .py file.
"""
from robotframework_ls.impl import robot_constants
from robotframework_ls.impl import ast_utils
libname_lower = libname.lower()
target_file: str = ""
normalized_target_file: str = ""
pre_error_msg: str = ""
config = self.config
libraries_libdoc_needs_args_lower: Set[str]
if config is not None:
libraries_libdoc_needs_args_lower = set(
str(x).lower()
for x in config.get_setting(
OPTION_ROBOT_LIBRARIES_LIBDOC_NEEDS_ARGS,
list,
["remote", "fakerlib"],
)
)
else:
libraries_libdoc_needs_args_lower = {"remote", "fakerlib"}
# Note that experimentally, using '*' may pass args to all libraries.
if (
libname_lower not in libraries_libdoc_needs_args_lower
and "*" not in libraries_libdoc_needs_args_lower
):
args = None
if args:
if "{" in args:
# We need to resolve the arguments if there are variables in it.
from robotframework_ls.impl.variable_resolve import (
ResolveVariablesContext,
)
assert completion_context.config is config
args, unresolved = ResolveVariablesContext(
completion_context
).token_value_and_unresolved_resolving_variables(
ast_utils.create_token(args)
)
pre_error_msg = (
"It was not possible to statically resolve the following variables:\n%s\nFollow-up error:\n"
% (", ".join(str(x[0]) for x in unresolved),)
)
args = args.replace("\\\\", "\\")
if not builtin:
found_target_filename = self._get_library_target_filename(
libname, completion_context.doc.uri
)
if found_target_filename:
target_file = found_target_filename
normalized_target_file = normalize_filename(target_file)
else:
builtin = libname in robot_constants.STDLIBS
if libname_lower.endswith((".py", ".class", ".java")):
libname_lower = os.path.splitext(libname_lower)[0]
if "/" in libname_lower or "\\" in libname_lower:
libname_lower = os.path.basename(libname_lower)
lib_info: _LibInfo
for lib_info in self.iter_lib_info(builtin=builtin):
library_doc = lib_info.library_doc
# If it maps to a file in the filesystem, that's what we need to match,
# otherwise, match just by its name.
# Note: this is only valid for the cases where we can regenerate the info
# for cases where this information is builtin, only match by the name.
if target_file and lib_info._can_regenerate:
if args:
digest = (
get_digest_from_string(target_file)
+ "_"
+ get_digest_from_string(args)
)
found = library_doc.filename.endswith(digest + ".libspec")
else:
found = bool(
library_doc.source
and normalize_filename(library_doc.source)
== normalized_target_file
)
if not found:
try:
found = bool(
library_doc.source
and os.path.samefile(library_doc.source, target_file)
)
except:
# os.path.samefile touches the filesystem, so, it can
# raise an exception.
found = False
else:
if not args:
found = bool(
library_doc.name and library_doc.name.lower() == libname_lower
)
else:
digest = get_digest_from_string(args)
found = library_doc.filename.endswith(
os.path.normcase(libname + digest + ".libspec")
)
if found:
if not lib_info.verify_sources_sync():
if create:
# Found but it's not in sync. Try to regenerate (don't proceed
# because we don't want to match a lower priority item, so,
# regenerate and get from the cache without creating).
self._do_create_libspec_on_get(
libname, target_file, args, is_builtin=builtin
)
# Note: get even if it if was not created (we may match
# a lower priority library).
return self.get_library_doc_or_error(
libname,
create=False,
completion_context=completion_context,
builtin=builtin,
args=args,
)
else:
# Not in sync and it should not be created, just skip it.
continue
else:
return _LibraryDocOrError(library_doc, None)
if create:
error_msg = self._do_create_libspec_on_get(
libname, target_file, args, is_builtin=builtin
)
if error_msg is None:
return self.get_library_doc_or_error(
libname,
create=False,
completion_context=completion_context,
builtin=builtin,
args=args,
)
return _LibraryDocOrError(None, pre_error_msg + error_msg)
error_msg = self._get_cached_error(
libname, is_builtin=builtin, target_file=target_file, args=args
)
if error_msg:
log.debug("Unable to get library named: %s. Reason: %s", libname, error_msg)
return _LibraryDocOrError(None, pre_error_msg + error_msg)
msg = f"Unable to find library named: {libname}"
log.debug(msg)
return _LibraryDocOrError(None, pre_error_msg + msg)
class _LibraryDocOrError:
def __init__(self, library_doc: Optional[ILibraryDoc], error: Optional[str]):
self.library_doc = library_doc
self.error = error
def __typecheckself__(self) -> None:
from robocorp_ls_core.protocols import check_implements
_: ILibraryDocOrError = check_implements(self)
|
17,510 | aa04c0ec7d99476296c0d240a109af8d4f695aae | #
# $Header: /projects/wlv/iphy/sos/repository/iphy_A.rep/TEMP/case_1285613542/#alidation#iphy-gui#gui#lib#devices#rto.py,v 1.1 2011-09-02 11:43:53-07 case Exp $
# Old Header: /inphi/inlab/pylab/devices/rto.py,v 1.3 2008/07/23 02:09:06 rbemra Exp
# $Log: #alidation#iphy-gui#gui#lib#devices#rto.py,v $
# Revision 1.1 2011-09-02 11:43:53-07 case
# ...No comments entered during checkin...
#
# Revision 1.1 2011-05-04 18:10:35-07 rbemra
# Initial python+pyvisa for instrumentation (from ExacTik/882)
#
# Revision 1.3 2008/07/23 02:09:06 rbemra
# inlab -> inlab/pylab, just pylab w.r.t. python pkg
#
# Revision 1.2 2008/07/21 19:34:51 rbemra
# Removed CR chars added during cvs import
#
# Revision 1.1.1.1 2008/07/21 18:20:28 rbemra
# Initial Version
#
#
# Real-time Oscilloscope remote interface
# Packaged under inlab 7/21/2008: RSB
# First Rev.: Sept., 2007: RSB
#
#
import time
from labdev import *
#------------------------------------------------------------------------------
class RTO:
# common commands
CMD_CLS = 0
CMD_AUTO = 1
CMD_RST = 2
CMD_TRG = 3
CMD_RCL = 4
CMD_STOP = 5
CMD_RUN = 6
CMD_SAV = 7
CMD_CDIS = 8
def cmd_name(self, val):
if val==0:
name = "*CLS"
elif val==1:
name = ":AUT"
elif val==2:
name = "*RST"
elif val==3:
name = "*TRG"
elif val==4:
name = "*RCL"
elif val==5:
name = ":STOP"
elif val==6:
name = ":RUN"
elif val==7:
name = "*SAV"
elif val==8:
name = ":CDIS"
else:
name = ""
return name
# time reference
TIME_REF_LEFT = 0
TIME_REF_CENT = 1
TIME_REF_RIGHT = 2
time_ref_L = ["LEFT", "CENTER", "RIGHT"]
def time_ref_name(self, val):
return self.time_ref_L[val]
def time_ref(self, name):
return get_index(self.time_ref_L, name, False)
# threshold type
THR_STAN = 0
THR_VOLT = 1
THR_PERC = 2
def thr_name(self, val):
if val==0:
name = "STAN"
elif val==1:
name = "VOLT"
elif val==2:
name = "PERC"
else:
name = ""
return name
# TopBase type
TB_STAN = 0
TB_MINMAX = 1
TB_HIST = 2
TB_CUSTOM = 3
def tb_name(self, val):
if val==0:
name = "STAN"
elif val==1:
name = "MINMAX"
elif val==2:
name = "HISTONLY"
else: # CUSTOM, or unknown
name = ""
return name
# edge direction
DIR_RISE = 0
DIR_FALL = 1
DIR_EITHER = 2
def dir_name(self, val):
if val==0:
name = "RIS"
elif val==1:
name = "FALL"
elif val==2:
name = "EITH"
else:
name = ""
return name
# trigger slope
SLP_POS = 0
SLP_NEG = 1
DIR_EITH = 2
def slp_name(self, val):
if val==0:
name = "POS"
elif val==1:
name = "NEG"
else:
name = ""
return name
# position on edge
POS_UPP = 0
POS_MID = 1
POS_LOW = 2
def pos_name(self, val):
if val==0:
name = "UPP"
elif val==1:
name = "MIDD"
elif val==2:
name = "LOW"
else:
name = ""
return name
# measurement source
SRC_CHAN = 0
SRC_FUNC = 1
SRC_WMEM = 2
def src_name(self, val):
if val==self.SRC_CHAN:
name = "CHAN"
elif val==self.SRC_FUNC:
name = "FUNC"
elif val==self.SRC_WMEM:
name = "WMEM"
else:
name = ""
return name
# Math operators
OP_INV = 0
OP_AVER = 1
OP_ADD = 2
OP_SUBT = 3
OP_COMM = 4
OP_DIFF = 5
OP_DIV = 6
OP_FFTM = 7
OP_FFTP = 8
OP_MULT = 9
op_L = ["INVERT", "AVERAGE", "ADD", "SUBTRACT", "COMMONMODE",
"DIFF", "DIVIDE", "FFTM", "FFTP", "MULTIPLY"]
def op_name(self, val):
return self.op_L[val]
def oper(self, name):
return get_index(self.op_L, name, False)
# File types
FTYP_WFM = 0
FTYP_CSV = 1
FTYP_TSV = 2
FTYP_TXT = 3
FTYP_SET = 4
def ftyp_name(self, val):
if val==self.FTYP_WFM:
name = "wfm"
elif val==self.FTYP_CSV:
name = "csv"
elif val==self.FTYP_TSV:
name = "tsv"
elif val==self.FTYP_TXT:
name = "txt"
elif val==self.FTYP_SET:
name = "set"
else:
name = ""
return name
# Trigger modes
TRIG_EDGE = 0
TRIG_GLITCH = 1
TRIG_ADV_COMM = 2
TRIG_ADV_DELAY = 3
TRIG_ADV_PATT = 4
TRIG_ADV_STATE = 5
TRIG_ADV_VIOL = 6
trig_L = ["EDGE", "GLITCH", "COMM", "DELAY", "PATT",
"STATE", "VIOL"]
def trig_name(self, val):
return self.trig_L[val]
def trig(self, name):
return get_index(self.trig_L, name, False)
# Trigger sweep modes
TRIGSWP_AUTO = 0
TRIGSWP_TRIG = 1
TRIGSWP_1SHOT = 2
trigswp_L = ["AUTO", "TRIG", "SINGLE"]
def trigswp_name(self, val):
return self.trigswp_L[val]
def trigswp(self, name):
return get_index(self.trigswp_L, name, False)
# constants
V_OVFLOW = 100.0 # it's +/-5V for Agilent, clipping gives 9.9999e+37
T_OVFLOW = 1.0 # some big time value
#---------------------------------------------------------------------------------
RTO1 = RTO() # global single instance
#
# COMMON/ROOT COMMANDS
#---------------------------------------------------------------------------------
def send_cmd(scope, cmd, intArg=None, cmdStrg=None):
if cmdStrg!=None:
if intArg!=None:
scope.viDev.write("%s %d"%(cmdStrg, intArg))
else:
scope.viDev.write(cmdStrg)
else:
cName = RTO1.cmd_name(cmd)
if intArg!=None:
scope.viDev.write("%s %d"%(cName, intArg))
else:
scope.viDev.write(cName)
#---------------------------------------------------------------------------------
#
# CHANNEL/FUNCTION COMMANDS
#---------------------------------------------------------------------------------
def set_display(scope, chnl, OnOff=0, src=RTO.SRC_CHAN):
srcName = RTO1.src_name(src)
scope.viDev.write(":%s%d:DISP %d"%(srcName, chnl, OnOff))
#---------------------------------------------------------------------------------
def define_func(scope, num, operator, oper1, oper2, src1=RTO.SRC_CHAN, src2=RTO.SRC_CHAN):
srcName = RTO1.src_name(src1)
sCmd = ":FUNC%d:%s %s%d"%(num, RTO1.op_name(operator), srcName, oper1)
if (operator==RTO.OP_INV or operator==RTO.OP_AVER or
operator==RTO.OP_FFTM or operator==RTO.OP_FFTP):
pass
else:
srcName = RTO1.src_name(src2)
sCmd += ",%s%d"%(srcName, oper2)
scope.viDev.write(sCmd)
#---------------------------------------------------------------------------------
def set_vrange(scope, chnl, vRng=None, vOffset=None, src=RTO.SRC_CHAN):
if (src==RTO.SRC_CHAN):
if vRng!=None:
scope.viDev.write(":CHAN%d:RANG %e"%(chnl, vRng))
if (vOffset != None):
scope.viDev.write("CHAN%d:OFFSET %e"%(chnl, vOffset))
elif (src==RTO.SRC_FUNC):
if vRng!=None:
scope.viDev.write(":FUNC%d:VERT:RANG %e"%(chnl, vRng))
if (vOffset != None):
scope.viDev.write("FUNC%d:VERT:OFFSET %e"%(chnl, vOffset))
#---------------------------------------------------------------------------------
def get_vrange(scope, chnl, src=RTO.SRC_CHAN):
rList = []
if (src==RTO.SRC_CHAN):
vRng = float(scope.viDev.ask(":CHAN%d:RANG?"%chnl))
vOff = float(scope.viDev.ask(":CHAN%d:OFFS?"%chnl))
elif (src==RTO.SRC_FUNC):
vRng = float(scope.viDev.ask(":FUNC%d:VERT:RANG?"%chnl))
vOff = float(scope.viDev.ask(":FUNC%d:VERT:OFFS?"%chnl))
else:
return rList
rList.append(vRng)
rList.append(vOff)
return rList
#---------------------------------------------------------------------------------
#
# Adjust to whole mV range, so that /div scale is whole mV
#
def set_vrange_min_max(scope, chnl, vMin, vMax, src=RTO.SRC_CHAN):
mvRng = int(1000*(vMax-vMin))
mvRng = mvRng + mvRng%8 # expand to multiple of 8 (divisions)
set_vrange(scope, chnl, 0.001*mvRng, 0.5*(vMin+vMax), src)
#---------------------------------------------------------------------------------
def get_vrange_min_max(scope, chnl, src=RTO.SRC_CHAN):
rList = []
rngOff = get_vrange(scope, chnl, src)
halfRng = 0.5*rngOff[0]
osVal = rngOff[1]
rList.append(osVal-halfRng)
rList.append(osVal+halfRng)
return rList
#---------------------------------------------------------------------------------
# Vertical Auto-scale
# Get [range, offset]
# get vMax, if MAXV, keep inc.ing offset by volts/div until vMax!=MAXV
# get vMin, if MAXV, keep dec.ing offset by volts/div until vMax!=MAXV
# Then use set_vrange_min_max()
#
OFFSET_MAX = 5.0 # max. allowed +/-offset we will expect
MAX_OFF_ITER = 20 #
SCALE_FACTOR = 2.0
MAX_SCALE_ITER = 5 # 1/2^5 = 1/32
def auto_vrange(scope, chnl, src=RTO.SRC_CHAN):
set_display(scope, chnl, 1, src)
[vRange, vOff] = get_vrange(scope, chnl, src)
vOff1 = vOff
offDelta = vRange/8.0
nIter = 0
stat = False
restore = True
while True:
[vMax] = meas_single(scope, "VMAX", chnl, stat=False, nMeas=1, src=src)
if vMax < RTO.V_OVFLOW:
stat = True
break # done
if nIter > MAX_OFF_ITER:
break
vOff1 += offDelta
if vOff1 > OFFSET_MAX: # maybe we're pushing offset in opp. direction
# print "Ok, I'm pushing it down; let me try opposite direction ..."
set_vrange(scope, chnl, vRange, vOff, src)
nIter = 0 # start again
offDelta = -offDelta
continue
elif vOff1 < -OFFSET_MAX:
break
set_vrange(scope, chnl, vRange, vOff1, src)
nIter += 1
if not stat and nIter > MAX_OFF_ITER:
nIter = 0
vRange1 = vRange
while True:
vRange1 = SCALE_FACTOR*vRange1
set_vrange(scope, chnl, vRange1, src)
[vMax] = meas_single(scope, "VMAX", chnl, stat=False, nMeas=1, src=src)
if vMax < RTO.V_OVFLOW:
stat = True
restore = False
break # done
if nIter > MAX_SCALE_ITER:
break
nIter += 1
if not stat:
print "* * Error * *: could not find VMAX"
return stat
if restore:
set_vrange(scope, chnl, vRange, vOff, src) # restore
# Repeat for vMin
vOff1 = vOff
offDelta = vRange/8.0
nIter = 0
stat = False
while True:
[vMin] = meas_single(scope, "VMIN", chnl, stat=False, nMeas=1, src=src)
if vMin < RTO.V_OVFLOW:
stat = True
break # done
if nIter > MAX_OFF_ITER:
break
vOff1 -= offDelta
if vOff1 < -OFFSET_MAX: # maybe we're pushing offset in opp. direction
# print "Ok, I'm pushing it up; let me try opposite direction ..."
set_vrange(scope, chnl, vRange, vOff, src)
offDelta = -offDelta
continue
elif vOff1 > OFFSET_MAX:
stat = False
break
set_vrange(scope, chnl, vRange, vOff1, src)
nIter += 1
if not stat and nIter > MAX_OFF_ITER:
nIter = 0
sFactor = 0.5
vRange1 = vRange
while True:
vRange1 = SCALE_FACTOR*vRange1
set_vrange(scope, chnl, vRange1, src)
[vMin] = meas_single(scope, "VMIN", chnl, stat=False, nMeas=1, src=src)
if vMin < RTO.V_OVFLOW:
stat = True
break # done
if nIter > MAX_SCALE_ITER:
break
nIter += 1
if not stat:
print "* * Error * *: could not find VMIN"
return stat
vDel = (vMax-vMin)*0.05 # add 10% to range
set_vrange_min_max(scope, chnl, vMin-vDel, vMax+vDel, src) # this v-autoscales chnl
return True
#---------------------------------------------------------------------------------
#
# ACQUISITION/DISPLAY COMMANDS
#---------------------------------------------------------------------------------
def set_aver(scope, OnOff=0, Count=1):
scope.viDev.write(":ACQ:AVER %d"%OnOff)
if OnOff:
scope.viDev.write("ACQ:COUN %d"%Count)
#---------------------------------------------------------------------------------
def set_srate(scope, doAuto=True, sRate=None):
if sRate != None:
scope.viDev.write(":ACQ:SRATE %e"%sRate)
elif not doAuto:
scope.viDev.write(":ACQ:SRATE MAX")
else:
scope.viDev.write(":ACQ:SRATE:AUTO 1")
#---------------------------------------------------------------------------------
def set_points(scope, doAuto=True, nPts=None):
if nPts != None:
scope.viDev.write(":ACQ:POIN %d"%nPts)
else:
if doAuto: auto = 1
else: auto = 0
scope.viDev.write(":ACQ:POIN:AUTO %d"%auto)
#---------------------------------------------------------------------------------
#
# TIMEBASE COMMANDS
#---------------------------------------------------------------------------------
def set_timebase_range(scope, tRng, refPos=RTO.TIME_REF_CENT):
scope.viDev.write(":TIM:RANG %e"%tRng)
sRef = RTO1.time_ref_name(refPos)
scope.viDev.write(":TIM:REF %s"%sRef)
#---------------------------------------------------------------------------------
def set_timebase_pos(scope, tPos):
scope.viDev.write(":TIM:POS %e"%tPos)
#---------------------------------------------------------------------------------
def get_trange(scope):
rList = []
rList.append(float(scope.viDev.ask(":TIM:RANG?")))
rList.append(RTO1.time_ref(scope.viDev.ask(":TIM:REF?")))
return rList
#---------------------------------------------------------------------------------
#
def meas_period(scope, chnl, stat=False, nMeas=1,
src=RTO.SRC_CHAN, dir=RTO.DIR_RISE, doAppend=False):
if (stat and (not doAppend)): # statistics on, use MEAS:RES? to get mean, min, max, rms
scope.viDev.write(":MEAS:CLE") # clear out measmts
nList = [] # numeric returned list
srcName = RTO1.src_name(src)
dirName = RTO1.dir_name(dir)
set_display(scope, chnl, 1, src)
scope.viDev.write(":MEAS:SOUR %s%d"%(srcName, chnl))
if (stat): # statistics on, use MEAS:RES? to get mean, min, max, rms
scope.viDev.write(":MEAS:STAT ON")
scope.viDev.write(":MEAS:PER %s%d, %s"%(srcName, chnl, dirName))
time.sleep(1.0) # pause a bit
# wait till >= nMeas acquisitions complete
mCount = 0
while (mCount < nMeas):
time.sleep(1.0) # pause a bit
vResult = scope.viDev.ask_for_values(":MEAS:RES?") # 1.0,Curr,Min,Max,Mean,sDev,nMeas
# sResult = scope.viDev.ask(":MEAS:RES?") # Name,Curr,Min,Max,Mean,sDev,nMeas
# sList = sResult.split(',')
if vResult[1] >= RTO.T_OVFLOW: # make all measVals=OVFLOW, nMeas=0
for k in range(1, 6):
vResult[k] = RTO.T_OVFLOW
vResult[6] = 0.
break
mCount = vResult[6]
for k in range(1, 7):
nList.append(vResult[k])
else: # current query
nList.append(float(scope.viDev.ask(":MEAS:PER? %s%d, %s"%(srcName, chnl, dirName))))
return nList # Numeric list: [Curr, Min, Max, Mean, sDev, nMeas]
#---------------------------------------------------------------------------------
#
# MEASURE COMMANDS
#---------------------------------------------------------------------------------
def meas_vrange_min_max(scope, chnl, src=RTO.SRC_CHAN):
rList = []
srcName = RTO1.src_name(src)
rVal = float(scope.viDev.ask(":MEAS:VMIN? %s%d"%(srcName, chnl)))
rList.append(rVal)
rVal = float(scope.viDev.ask(":MEAS:VMAX? %s%d"%(srcName, chnl)))
rList.append(rVal)
return rList
#---------------------------------------------------------------------------------
# Use this with measCmd =
# "VMAX", "VMIN", "VTOP", "VBAS", "VLOW", "VUPP", "VPP", "VRMS", "VAMP"
# returns numeric list: [Curr] or [Curr, Min, Max, Mean, sDev, nMeas]
#
def meas_single(scope, measCmd, chnl, stat=False, nMeas=1,
src=RTO.SRC_CHAN, doAppend=False):
if (stat and (not doAppend)): # statistics on, use MEAS:RES? to get mean, min, max, rms
scope.viDev.write(":MEAS:CLE") # clear out measmts
nList = [] # numeric returned list
srcName = RTO1.src_name(src)
scope.viDev.write(":MEAS:SOUR %s%d"%(srcName, chnl))
if (stat): # statistics on, use MEAS:RES? to get mean, min, max, rms
scope.viDev.write(":MEAS:STAT ON")
scope.viDev.write(":MEAS:%s %s%d"%(measCmd, srcName, chnl))
# wait till >= nMeas acquisitions complete
mCount = 0
while (mCount < nMeas):
time.sleep(1.0) # pause a bit
sResult = scope.viDev.ask(":MEAS:RES?")
sList = sResult.split(',') # Name,Curr,Min,Max,Mean,sDev,nMeas
mCount = float(sList[6])
for k in range(1, 7):
nList.append(float(sList[k]))
else: # current query
nList.append(float(scope.viDev.ask(":MEAS:%s? %s%d"%(measCmd, srcName, chnl))))
return nList # Numeric list: [Curr, Min, Max, Mean, sDev, nMeas]
#---------------------------------------------------------------------------------
# returns numeric list: [Curr] or [Curr, Min, Max, Mean, sDev, nMeas]
#
def meas_vavg(scope, chnl, stat=False, nMeas=1,
src=RTO.SRC_CHAN, doAppend=False):
if (stat and (not doAppend)): # statistics on, use MEAS:RES? to get mean, min, max, rms
scope.viDev.write(":MEAS:CLE") # clear out measmts
nList = [] # numeric returned list
srcName = RTO1.src_name(src)
scope.viDev.write(":MEAS:SOUR %s%d"%(srcName, chnl))
if (stat): # statistics on, use MEAS:RES? to get mean, min, max, rms
scope.viDev.write(":MEAS:STAT ON")
scope.viDev.write(":MEAS:VAV DISP, %s%d"%(srcName, chnl))
# wait till >= nMeas acquisitions complete
mCount = 0
while (mCount < nMeas):
time.sleep(1.0) # pause a bit
sResult = scope.viDev.ask(":MEAS:RES?")
sList = sResult.split(',') # Name,Curr,Min,Max,Mean,sDev,nMeas
mCount = float(sList[6])
for k in range(1, 7):
nList.append(float(sList[k]))
else: # current query
nList.append(float(scope.viDev.ask(":MEAS:VAV? DISP, %s%d"%(srcName, chnl))))
return nList # Numeric list: [Curr, Min, Max, Mean, sDev, nMeas]
#---------------------------------------------------------------------------------
#
def meas_vamp(scope, chnl, stat=False, nMeas=1, src=RTO.SRC_CHAN, doAppend=False):
return meas_single(scope, "VAMP", chnl, stat, nMeas, src)
#---------------------------------------------------------------------------------
#
def meas_vtop(scope, chnl, stat=False, nMeas=1, src=RTO.SRC_CHAN, doAppend=False):
return meas_single(scope, "VTOP", chnl, stat, nMeas, src)
#---------------------------------------------------------------------------------
#
def meas_vbase(scope, chnl, stat=False, nMeas=1, src=RTO.SRC_CHAN, doAppend=False):
return meas_single(scope, "VBAS", chnl, stat, nMeas, src)
#---------------------------------------------------------------------------------
# Measure voltage in chnl at time tVal
# returns numeric list: [Curr] or [Curr, Min, Max, Mean, sDev, nMeas]
#
def meas_vtime(scope, tVal, chnl, stat=False, nMeas=1,
src=RTO.SRC_CHAN, doAppend=False):
if (stat and (not doAppend)): # statistics on, use MEAS:RES? to get mean, min, max, rms
scope.viDev.write(":MEAS:CLE") # clear out measmts
nList = [] # numeric returned list
srcName = RTO1.src_name(src)
scope.viDev.write(":MEAS:SOUR %s%d"%(srcName, chnl))
if (stat): # statistics on, use MEAS:RES? to get mean, min, max, rms
scope.viDev.write(":MEAS:STAT ON")
scope.viDev.write(":MEAS:VTIM %e, %s%d"%(tVal, srcName, chnl))
# wait till >= nMeas acquisitions complete
mCount = 0
while (mCount < nMeas):
time.sleep(1.0) # pause a bit
sResult = scope.viDev.ask(":MEAS:RES?")
sList = sResult.split(',') # Name,Curr,Min,Max,Mean,sDev,nMeas
mCount = float(sList[6])
for k in range(1, 7):
nList.append(float(sList[k]))
else: # current query
nList.append(float(scope.viDev.ask(":MEAS:VTIM? %e, %s%d"%(tVal, srcName, chnl))))
return nList # Numeric list: [Curr, Min, Max, Mean, sDev, nMeas]
#---------------------------------------------------------------------------------
# Return list of displayed measmt.s, each entry being a list of 6 values:
# [Curr,Min,Max,Mean,sDev,nMeas]
#
def meas_results(scope):
mList = []
sResult = scope.viDev.ask(":MEAS:RES?")
sList = sResult.split(',') # Name,Curr,Min,Max,Mean,sDev,nMeas
nMeas = len(sList)/7
for kMeas in range(0, len(sList)-1, 7):
cList = []
for k in range(1, 7):
cList.append(float(sList[kMeas+k]))
mList.append(cList)
return mList
#---------------------------------------------------------------------------------
def set_trigger(scope, chnl, level=None, edge=RTO.SLP_POS):
sEdge = RTO1.slp_name(edge)
if level!=None:
scope.viDev.write(":TRIG:LEV CHAN%d, %e;EDGE:SOUR CHAN%d;SLOP %s"%
(chnl, level, chnl, sEdge))
else:
scope.viDev.write(":TRIG:EDGE:SOUR CHAN%d;SLOP %s"%(chnl, sEdge))
#---------------------------------------------------------------------------------
def set_trigswp(scope, mode=RTO.TRIGSWP_AUTO):
sMode = RTO1.trigswp_name(mode)
scope.viDev.write(":TRIG:SWE %s"%sMode)
#---------------------------------------------------------------------------------
def set_viol_width_trigger(scope, chnl, width, posPol=True, dirWide=True):
sTrig = RTO1.trig_name(RTO.TRIG_ADV_VIOL)
if posPol:
sPole = "POS"
else:
sPole = "NEG"
if dirWide:
sDir = "GTH"
else:
sDir = "LTH"
sViolCmd = ":TRIG:ADV:VIOL:PWID:"
scope.viDev.write(sViolCmd+"SOUR CHAN%d"%chnl)
scope.viDev.write(sViolCmd+"POL "+sPole)
scope.viDev.write(sViolCmd+"DIR "+sDir)
scope.viDev.write(sViolCmd+"WIDT %e"%width)
#---------------------------------------------------------------------------------
# Define measmt. threshold triple for given or all channel(s)
# thrList = [upVal, midVal, loVal]
#
def define_thr(scope, thrType, thrList=None, chnl=None, src=RTO.SRC_CHAN):
sCmd = ":MEAS:DEF THR, "
if (chnl==None):
CHAN = "ALL"
else:
srcName = RTO1.src_name(src)
CHAN = "%s%d"%(srcName, chnl)
thrName = RTO1.thr_name(thrType)
if thrType==RTO.THR_STAN: # STANdard
sCmd += "%s,%s"%(thrName, CHAN)
else:
sCmd += "%s,%e,%e,%e, %s"%(thrName, thrList[0], thrList[1], thrList[2], CHAN)
scope.viDev.write(sCmd)
#---------------------------------------------------------------------------------
# Define measmt. topbase pair for given or all channel(s)
# tbList = [topVolts, baseVolts]
#
def define_topbase(scope, tbType, tbList=None, chnl=None, src=RTO.SRC_CHAN):
sCmd = ":MEAS:DEF TOPB, "
if (chnl==None):
CHAN = "ALL"
else:
srcName = RTO1.src_name(src)
CHAN = "%s%d"%(srcName, chnl)
tbName = RTO1.tb_name(tbType)
if tbType==RTO.TB_CUSTOM:
sCmd += "%e,%e,%s"%(tbList[0], tbList[1], CHAN)
else:
sCmd += "%s,%s"%(tbName, CHAN)
scope.viDev.write(sCmd)
#---------------------------------------------------------------------------------
#
# List = [DIR_RISE|DIR_FALL|DIR_EITHER, edgeNum, POS_UPP|POS_MID|POS_LOW]
#
def define_deltime(scope, begList, endList):
sCmd = ":MEAS:DEF DELT"
for cList in [begList, endList]:
sCmd += ", %s, %d, %s"%(RTO1.dir_name(cList[0]), cList[1], RTO1.pos_name(cList[2]))
# print("DeltDef: %s"%sCmd)
scope.viDev.write(sCmd)
#---------------------------------------------------------------------------------
# Delta-time measmt. from ch1 to ch2
# returns numeric list: [Curr] or [Curr, Min, Max, Mean, sDev, nMeas]
def meas_deltime(scope, ch1, ch2=None, stat=False, nMeas=1,
src1=RTO.SRC_CHAN, src2=RTO.SRC_CHAN, doAppend=False):
if (stat and (not doAppend)): # statistics on, use MEAS:RES? to get mean, min, max, rms
scope.viDev.write(":MEAS:CLE") # clear out measmts
nList = [] # numeric returned list
src1Name = RTO1.src_name(src1)
src2Name = RTO1.src_name(src2)
mCmd = ":MEAS:SOUR %s%d"%(src1Name, ch1)
if ch2!=None:
mCmd += ", %s%d"%(src2Name, ch2)
scope.viDev.write(mCmd)
mCmd = ":MEAS:DELT"
if (stat): # statistics on, use MEAS:RES? to get mean, min, max, rms
# scope.viDev.write(":MEAS:CLE") # clear out measmts
scope.viDev.write(":MEAS:STAT ON")
mCmd += " %s%d"%(src1Name, ch1)
if (ch2!=None):
mCmd += ", %s%d"%(src2Name, ch2)
scope.viDev.write(mCmd)
# wait till >= nMeas acquisitions complete
mCount = 0
while (mCount < nMeas):
time.sleep(2.0) # pause a bit, was 1.0
sResult = scope.viDev.ask(":MEAS:RES?")
sList = sResult.split(',') # Name,Curr,Min,Max,Mean,sDev,nMeas
mCount = float(sList[6])
for k in range(1, 7):
nList.append(float(sList[k]))
else: # current query
mCmd += "? %s%d"%(src1Name, ch1)
if (ch2!=None):
mCmd += ", %s%d"%(src2Name, ch2)
nList.append(float(scope.viDev.ask(mCmd)))
return nList # Numeric list: [Curr, Min, Max, Mean, sDev, nMeas]
#---------------------------------------------------------------------------------
def get_vrng(lo, hi): # return range bracketing 100s of mV, plus 120mV
dlo = floor(lo*1000)-120 # mV
dhi = ceil(hi*1000)+120 #
rng_mV = ceil(float(dhi-dlo)/8)*8*0.001 # ensure multiple of 8
return rng_mV
#---------------------------------------------------------------------------------
# Get the time of src+chnl signal crossing val, edgDir from display left
#
def meas_tcross(scope, val, edgDir, chnl, stat=False, nMeas=1, src=RTO.SRC_CHAN):
if (stat): # statistics on, use MEAS:RES? to get mean, min, max, rms
scope.viDev.write(":MEAS:CLE") # clear out measmts
nList = [] # numeric returned list
srcName = RTO1.src_name(src)
scope.viDev.write(":MEAS:SOUR %s%d"%(srcName, chnl))
if (stat): # statistics on, use MEAS:RES? to get mean, min, max, rms
scope.viDev.write(":MEAS:STAT ON")
scope.viDev.write(":MEAS:TVOL %e, %d, %s%d"%(val, edgDir, srcName, chnl))
# wait till >= nMeas acquisitions complete
mCount = 0
while (mCount < nMeas):
time.sleep(1.0) # pause a bit
sResult = scope.viDev.ask(":MEAS:RES?")
sList = sResult.split(',') # Name,Curr,Min,Max,Mean,sDev,nMeas
mCount = float(sList[6])
for k in range(1, 7):
nList.append(float(sList[k]))
else: # current query
nList.append(float(scope.viDev.ask(":MEAS:TVOL? %e, %d, %s%d"%(val, edgDir, srcName, chnl))))
return nList # Numeric list: [Curr, Min, Max, Mean, sDev, nMeas]
#---------------------------------------------------------------------------------
#
# obtain pair of v-crossings of 2 channels, fNum=FUNC # for difference (chp-chn)
#
def vcross(scope, fNum, chp, chn, jpgPrefix=None):
rList = []
define_func(scope, fNum, RTO.OP_SUBT, chp, chn)
auto_vrange(scope, chp)
auto_vrange(scope, chn)
auto_vrange(scope, fNum, RTO.SRC_FUNC)
# set_display(scope, fNum, 1, RTO.SRC_FUNC)
# pRng = get_vrange(scope, chp)
# nRng = get_vrange(scope, chn)
# set_vrange(scope, fNum, pRng[0]+nRng[0], pRng[1]-nRng[1], RTO.SRC_FUNC)
# rising zero-xing(p-n)
xList = meas_tcross(scope, 0., +1, fNum, True, 100, RTO.SRC_FUNC)
xList = meas_vtime(scope, xList[3], chp, True, 100)
if (jpgPrefix != None):
jpgFile = jpgPrefix + '_Rise'
save_screen(scope, jpgFile, doAsk=False, doPause=True)
vxRise = xList[3]
# falling zero-xing(p-n)
xList = meas_tcross(scope, 0., -2, fNum, True, 100, RTO.SRC_FUNC)
xList = meas_vtime(scope, xList[3], chp, True, 100)
if jpgPrefix:
jpgFile = jpgPrefix + '_Fall'
save_screen(scope, jpgFile, doAsk=False, doPause=True)
vxFall = xList[3]
if (vxRise > vxFall):
rList.append(vxFall)
rList.append(vxRise)
else:
rList.append(vxRise)
rList.append(vxFall)
return rList
#---------------------------------------------------------------------------------
def jitter_stat(scope, OnOff=True):
on = 0
if OnOff:
on = 1
scope.viDev.write(":MEAS:JITT:STAT %d"%on)
#---------------------------------------------------------------------------------
#
# Return [jitPerMin, jitPerMax, avePer]
#
def meas_jit_per(scope, chnl, nMeas=1, src=RTO.SRC_CHAN):
perList = meas_period(scope, chnl, True, nMeas, src)
avePer = perList[3]
jitMin = perList[1] - avePer
jitMax = perList[2] - avePer
nList = [jitMin, jitMax, avePer]
return nList # Numeric list: [Min, Max, avePer]
#---------------------------------------------------------------------------------
#
# Return [jitHperMin, jitHperMax]
#
def meas_jit_hper(scope, chnl, nMeas=1, src=RTO.SRC_CHAN):
srcName = RTO1.src_name(src)
scope.viDev.write(":MEAS:CLE") # clear out measmts
scope.viDev.write(":MEAS:SOUR %s%d"%(srcName, chnl))
# statistics on, use MEAS:RES? to get mean, min, max, rms
scope.viDev.write(":MEAS:STAT ON")
# Measure period, +W, -W, then min(+W,-W), max(+W, -W) for 1/2 period min/max
scope.viDev.write(":MEAS:PER %s%d, RIS"%(srcName, chnl, ))
scope.viDev.write(":MEAS:PWID %s%d"%(srcName, chnl))
scope.viDev.write(":MEAS:NWID %s%d"%(srcName, chnl))
# wait till >= nMeas acquisitions complete
mCount = 0
while (mCount < nMeas):
time.sleep(1.0) # pause a bit
nResult = scope.viDev.ask_for_values(":MEAS:RES?")
# nResult has 7-valued triplet of -W, +W, PER in order:
# Name,Curr,Min,Max,Mean,sDev,numMeas
# Ensure min(numMeas) > nMeas
mCount = min(nResult[6], nResult[7+6], nResult[2*7+6])
minHper = min(nResult[2], nResult[7+2]) # min() of min()
maxHper = max(nResult[3], nResult[7+3]) # max() of max()
aveHper = 0.5*nResult[4+2*7] # ave. half-period
jitHperMin = minHper - aveHper
jitHperMax = maxHper - aveHper
nList = [jitHperMin, jitHperMax]
return nList # Numeric list: [Min, Max]
#---------------------------------------------------------------------------------
#
# Return [jccMin, jccMax]
#
def meas_jit_cc(scope, chnl, nMeas=1, src=RTO.SRC_CHAN):
srcName = RTO1.src_name(src)
scope.viDev.write(":MEAS:CLE") # clear out measmts
scope.viDev.write(":MEAS:SOUR %s%d"%(srcName, chnl))
# statistics on, use MEAS:RES? to get mean, min, max, rms
scope.viDev.write(":MEAS:STAT ON")
scope.viDev.write(":MEAS:CTCJ %s%d, RIS"%(srcName, chnl))
# wait till >= nMeas acquisitions complete
mCount = 0
while (mCount < nMeas):
time.sleep(1.0) # pause a bit
sResult = scope.viDev.ask(":MEAS:RES?")
sList = sResult.split(',') # Name,Curr,Min,Max,Mean,sDev,nMeas
mCount = float(sList[6])
jitMin = float(sList[2])
jitMax = float(sList[3])
nList = [jitMin, jitMax]
return nList # Numeric list: [Min, Max]
#---------------------------------------------------------------------------------
#
# DISK COMMANDS
#---------------------------------------------------------------------------------
def load_setup(scope, setupFile, fType=RTO.FTYP_SET, dest=1):
if fType==RTO.FTYP_WFM:
scope.viDev.write(":DISK:LOAD \"%s\",WMEM%d"%(setupFile, dest))
else:
scope.viDev.write(":DISK:LOAD \"%s\""%setupFile)
#---------------------------------------------------------------------------------
def save_screen(scope, jpgName, doAsk=True, doPause=True):
if doPause:
send_cmd(scope, RTO.CMD_STOP)
if doAsk:
yesNo = raw_input("\nScreenshot %s.jpg? (n):"%jpgName)
if yesNo=='y' or yesNo=='1':
yesNo = True
else:
yesNo = False
else:
yesNo = True
if yesNo:
cmd = ":DISK:SIM \"%s\", JPEG, SCR, ON"%(jpgName)
scope.viDev.write(cmd)
if doPause:
send_cmd(scope, RTO.CMD_RUN)
#---------------------------------------------------------------------------------
def save_csv(scope, chnl, fileName, src=RTO.SRC_CHAN,
doAsk=True, doPause=True):
save_waveform(scope, chnl, fileName, src, doAsk, doPause)
#---------------------------------------------------------------------------------
def save_waveform(scope, chnl, fileName, src=RTO.SRC_CHAN,
doAsk=True, doPause=True, tabDelim=False):
if doPause:
send_cmd(scope, RTO.CMD_STOP)
if tabDelim: vType = "TSV"
else: vType = "CSV"
if doAsk:
yesNo = raw_input("\nWaveform %s.%s? (n):"%(fileName, vType))
if yesNo=='y' or yesNo=='1':
yesNo = True
else:
yesNo = False
else:
yesNo = True
if yesNo:
if chnl <= 0: # do all channels, functions, waveform mems
cmd = ":DISK:MST \"%s\", %s, OFF"%(fileName, vType)
else:
srcName = RTO1.src_name(src)
cmd = ":DISK:STORE %s%d, \"%s\", TEXT, %s"%(srcName, chnl, fileName, vType)
scope.viDev.write(cmd)
# kludge to wait till file_save is done
while True:
time.sleep(5.0)
id = scope.viDev.ask("*IDN?")
if len(id) > 0:
break
print("Saving %s, please wait....\n"%fileName)
if doPause:
send_cmd(scope, RTO.CMD_RUN)
#---------------------------------------------------------------------------------
RTOSC_2p5G = 0
RTOSC_13G = 1
RTOSC_TEK = 2
RTOSC_12G = 3
RtScopeDict = {
'5485':RTOSC_2p5G,
'81304':RTOSC_13G,
'7404':RTOSC_TEK,
'81204':RTOSC_12G
}
class RtScope(LabDev):
def __init__(self, name):
LabDev.__init__(self, name)
LabDev.set_model(self, RtScopeDict, DEV_SCOPE)
# TODO: merge all scope_utils code here
def set_display(self, chnl, OnOff=0, src=RTO.SRC_CHAN):
srcName = RTO1.src_name(src)
write(":%s%d:DISP %d"%(srcName, chnl, OnOff))
# def meas_period():
def send_cmd(self, cmd, intArg=None, cmdStrg=None):
send_cmd(self, cmd, intArg, cmdStrg)
def set_display(self, chnl, OnOff=0, src=RTO.SRC_CHAN):
set_display(self, chnl, OnOff, src)
def define_func(self, num, operator, oper1, oper2, src1=RTO.SRC_CHAN, src2=RTO.SRC_CHAN):
define_func(self, num, operator, oper1, oper2, src1, src2)
def set_vrange(self, chnl, vRng=None, vOffset=None, src=RTO.SRC_CHAN):
set_vrange(self, chnl, vRng, vOffset, src)
def get_vrange(self, chnl, src=RTO.SRC_CHAN):
return get_vrange(self, chnl, src)
def set_vrange_min_max(self, chnl, vMin, vMax, src=RTO.SRC_CHAN):
set_vrange_min_max(self, chnl, vMin, vMax, src)
def get_vrange_min_max(self, chnl, src=RTO.SRC_CHAN):
return get_vrange_min_max(self, chnl, src)
def auto_vrange(self, chnl, src=RTO.SRC_CHAN):
return auto_vrange(self, chnl, src)
def set_aver(self, OnOff=0, Count=1):
set_aver(self, OnOff, Count)
def set_srate(self, doAuto=True, sRate=None):
set_srate(self, doAuto, sRate)
def set_points(self, doAuto=True, nPts=None):
set_points(self, doAuto, nPts)
def set_timebase_range(self, tRng, refPos=RTO.TIME_REF_CENT):
set_timebase_range(self, tRng, refPos)
def set_timebase_pos(self, tPos):
set_timebase_pos(self, tPos)
def get_trange(self):
return get_trange(self)
def meas_period(self, chnl, stat=False, nMeas=1,
src=RTO.SRC_CHAN, dir=RTO.DIR_RISE, doAppend=False):
return meas_period(self, chnl, stat, nMeas, src, dir, doAppend)
def meas_vrange_min_max(self, chnl, src=RTO.SRC_CHAN):
return meas_vrange_min_max(self, chnl, src)
def meas_single(self, measCmd, chnl, stat=False, nMeas=1,
src=RTO.SRC_CHAN, doAppend=False):
return meas_single(self, measCmd, chnl, stat, nMeas, src, doAppend)
def meas_vavg(self, chnl, stat=False, nMeas=1,
src=RTO.SRC_CHAN, doAppend=False):
return meas_vavg(self, chnl, stat, nMeas, src, doAppend)
def meas_vamp(self, chnl, stat=False, nMeas=1, src=RTO.SRC_CHAN, doAppend=False):
return meas_vamp(self, chnl, stat, nMeas, src, doAppend)
def meas_vtop(self, chnl, stat=False, nMeas=1, src=RTO.SRC_CHAN, doAppend=False):
return meas_vtop(self, chnl, stat, nMeas, src, doAppend)
def meas_vbase(self, chnl, stat=False, nMeas=1, src=RTO.SRC_CHAN, doAppend=False):
return meas_vbase(self, chnl, stat, nMeas, src, doAppend)
def meas_vtime(self, tVal, chnl, stat=False, nMeas=1, src=RTO.SRC_CHAN, doAppend=False):
return meas_vtime(self, tVal, chnl, stat, nMeas, src, doAppend)
def meas_results(self):
return meas_results(self)
def set_trigger(self, chnl, level=None, edge=RTO.SLP_POS):
set_trigger(self, chnl, level, edge)
def set_trigswp(self, mode=RTO.TRIGSWP_AUTO):
set_trigswp(self, mode)
def set_viol_width_trigger(self, chnl, width, posPol=True, dirWide=True):
set_viol_width_trigger(self, chnl, width, posPol, dirWide)
def define_thr(self, thrType, thrList=None, chnl=None, src=RTO.SRC_CHAN):
define_thr(self, thrType, thrList, chnl, src)
def define_topbase(self, tbType, tbList=None, chnl=None, src=RTO.SRC_CHAN):
define_topbase(self, tbType, tbList, chnl, src)
def define_deltime(self, begList, endList):
define_deltime(self, begList, endList)
def meas_deltime(self, ch1, ch2=None, stat=False, nMeas=1,
src1=RTO.SRC_CHAN, src2=RTO.SRC_CHAN, doAppend=False):
return meas_deltime(self, ch1, ch2, stat, nMeas, src1, src2, doAppend)
def get_vrng(self, lo, hi): # return range bracketing 100s of mV, plus 120mV
return get_vrng(self, lo, hi)
def meas_tcross(self, val, edgDir, chnl, stat=False, nMeas=1, src=RTO.SRC_CHAN):
return meas_tcross(self, val, edgDir, chnl, stat, nMeas, src)
def vcross(self, fNum, chp, chn, jpgPrefix=None):
return vcross(self, fNum, chp, chn, jpgPrefix)
def jitter_stat(self, OnOff=True):
jitter_stat(self, OnOff)
def meas_jit_per(self, chnl, nMeas=1, src=RTO.SRC_CHAN):
return meas_jit_per(self, chnl, nMeas, src)
def meas_jit_hper(self, chnl, nMeas=1, src=RTO.SRC_CHAN):
return meas_jit_hper(self, chnl, nMeas, src)
def meas_jit_cc(self, chnl, nMeas=1, src=RTO.SRC_CHAN):
return meas_jit_cc(self, chnl, nMeas, src)
def load_setup(self, setupFile, fType=RTO.FTYP_SET, dest=1):
load_setup(self, setupFile, fType, dest)
def save_screen(self, jpgName, doAsk=True, doPause=True):
save_screen(self, jpgName, doAsk, doPause)
def save_csv(self, chnl, fileName, src=RTO.SRC_CHAN,
doAsk=True, doPause=True):
save_csv(self, chnl, fileName, src, doAsk, doPause)
def save_waveform(self, chnl, fileName, src=RTO.SRC_CHAN, doAsk=True, doPause=True, tabDelim=False):
save_waveform(self, chnl, fileName, src, doAsk, doPause, tabDelim)
#---------------------------------------------------------------------------------
def scope_utils_run():
import time
# from pylab.utils.labutils import get_index
from labutils import get_index
il = get_instruments_list()
dso = RtScope(il[get_index(il, "DSO81204")]) # '4f0705c"
dso.dev_print()
jper = dso.meas_jit_per(1, 10000)
jper = [1e12 * x for x in jper] # ps
print("CHAN1: jitMin=%e, jitMax=%e, avePer=%e ps"%
(jper[0], jper[1], jper[2]))
jcc = dso.meas_jit_cc(1, 10000)
jcc = [1e12 * x for x in jcc] # ps
print("CHAN1: jccMin=%e, jccMax=%e ps"%(jcc[0], jcc[1]))
if (True):
for kCh in range(1, 5):
rList = dso.get_vrange(kCh)
print("CHAN%d: RANGE=%e, OFFSET=%e"%(kCh, rList[0], rList[1]))
rList = dso.get_vrange_min_max(kCh)
print ("CHAN%d: Min=%e, Max=%e"%(kCh, rList[0], rList[1]))
rList = dso.meas_vrange_min_max(kCh)
print("CHAN%d: VMIN=%e, VMAX=%e"%(kCh, rList[0], rList[1]))
dso.set_trigger(2, 0.75, RTO.SLP_NEG) # trigger on ch2 falling edge
dso.define_thr(RTO.THR_VOLT, [0.2, 0., -0.2], 1)
dso.define_thr(RTO.THR_VOLT, [0.75*2*0.8, 0.75, 0.75*2*0.2], 2)
dso.define_thr(RTO.THR_VOLT, [0.1, 0., -0.1], 3)
dso.define_thr(RTO.THR_PERC, [80, 50, 20], 4)
dso.define_deltime([RTO.DIR_FALL, 1, RTO.POS_MID],
[RTO.DIR_RISE, 4, RTO.POS_MID])
rList = dso.meas_deltime(2, 1)
print("Current Delay(CHAN2, CHAN1)=%e"%rList[0])
rList = dso.meas_deltime(2, 1, True, 100)
print("Ave. Delay(CHAN2, CHAN1)=%e"%rList[3])
dso.define_deltime([RTO.DIR_FALL, 1, RTO.POS_MID],
[RTO.DIR_FALL, 3, RTO.POS_MID])
rList = dso.meas_deltime(2, 3, True, 100)
print("Ave. Delay(CHAN2, CHAN3)=%e"%rList[3])
dso.set_trigger(4, 0.75, RTO.SLP_POS) # trigger on ch4 rising edge
dso.define_deltime([RTO.DIR_RISE, 1, RTO.POS_MID],
[RTO.DIR_RISE, 3, RTO.POS_MID])
rList = dso.meas_deltime(4, 1, True, 100)
print("Ave. Delay(CHAN4, CHAN1)=%e"%rList[3])
|
17,511 | 39cb6c92986e95eb7a095e77635a3462fd8f9f2a | # Ejercicio 023_Intro_BlockData_2
# Donde se muestra como cambiar primero el color de wool
# ejercicio previo a mostrar cambio de orientacion
# Usamos de forma mas eficiente la variable blockType
# Connect to Minecraft
from mcpi.minecraft import Minecraft
mc = Minecraft.create()
# 35 es White Wool
blockType = 35
# Coordenadas iniciales
x = 10
y = 4
z = 5
# En x +2
x = x + 2
# Se crean los bloques con data
mc.setBlock(x, y, z+2, blockType,0)
mc.setBlock(x, y, z+4, blockType,1)
mc.setBlock(x, y, z+6, blockType,2)
mc.setBlock(x, y, z+8, blockType,3)
mc.setBlock(x, y, z+10, blockType,4)
mc.setBlock(x, y, z+12, blockType,5)
mc.setBlock(x, y, z+14, blockType,6)
mc.setBlock(x, y, z+16, blockType,7)
mc.setBlock(x, y, z+18, blockType,8)
mc.setBlock(x, y, z+20, blockType,9)
mc.setBlock(x, y, z+22, blockType,10)
mc.setBlock(x, y, z+24, blockType,11)
mc.setBlock(x, y, z+26, blockType,12)
mc.setBlock(x, y, z+28, blockType,13)
mc.setBlock(x, y, z+30, blockType,14)
mc.setBlock(x, y, z+30, blockType,15)
|
17,512 | 5551253d258ca52784c9b99716eb43cb5d683ae9 | from flask import Flask, render_template, request
import json
import datetime
from pyfunc import loginFunc, insertFunc, displayFunc, dashboardFunc
import configparser
import os
app = Flask(__name__)
@app.route("/")
def index():
return render_template("index.html")
@app.route("/getUserLogin", methods=["GET", "POST"])
def getCurrentState():
data = request.get_json()
user_email = data["email"]
user_pass = data["password"]
loginTrue, is_admin = loginFunc.login(user_email, user_pass)
return json.dumps({"login": loginTrue, "isAdmin": is_admin})
@app.route("/insertSleep", methods=["GET", "POST"])
def insertSleep():
data = request.get_json()
raw_data = insertFunc.insertSleep(data)
print(raw_data)
return json.dumps(raw_data)
@app.route("/getSleep", methods=["GET", "POST"])
def getSleep():
data = request.get_json()
raw_data = displayFunc.getSleep_all(data)
return json.dumps(raw_data)
@app.route("/insertBloodp", methods=["GET", "POST"])
def insertBloodp():
data = request.get_json()
raw_data = insertFunc.insertBloodp(data)
print(raw_data)
return json.dumps(raw_data)
@app.route("/getBloodp", methods=["GET", "POST"])
def getBloodp():
data = request.get_json()
raw_data = displayFunc.getBloodp_all(data)
return json.dumps(raw_data)
@app.route("/insertExercise", methods=["GET", "POST"])
def insertExercise():
data = request.get_json()
raw_data = insertFunc.insertExercise(data)
print(raw_data)
return json.dumps(raw_data)
@app.route("/getExercise", methods=["GET", "POST"])
def getExercise():
data = request.get_json()
raw_data = displayFunc.getExercise_all(data)
return json.dumps(raw_data)
@app.route("/getDashboardSleepHours", methods=["GET", "POST"])
def getDashboardSleepHours():
data = request.get_json()
raw_data = dashboardFunc.getDashboard_sleep_hours(data)
return json.dumps(raw_data)
@app.route("/getDashboardExerciseHours", methods=["GET", "POST"])
def getDashboardExerciseHours():
data = request.get_json()
raw_data = dashboardFunc.getDashboard_exercise_hours(data)
return json.dumps(raw_data)
@app.route("/getDashboardOfSysBP", methods=["GET", "POST"])
def getDashboardOfSysBP():
data = request.get_json()
raw_data = dashboardFunc.getDashboard_sys_bp(data)
return json.dumps(raw_data)
@app.route("/getDashboardOfDiaBP", methods=["GET", "POST"])
def getDashboardOfDiaBP():
data = request.get_json()
raw_data = dashboardFunc.getDashboard_dia_bp(data)
return json.dumps(raw_data)
if __name__ == "__main__":
app.run(host="0.0.0.0")
|
17,513 | e0738a651e68439f826dad2efed8be47b981f1e2 | from django.core.management.base import BaseCommand, CommandError
from mlpipe.models import Pipe
import mlpipe.mlpipe_utils as mlpipeutils
from mlpipe.job_utils import JobRunner
import os
class Command(BaseCommand):
help = 'get the md5 for a string'
def add_arguments(self, parser):
# Positional arguments
parser.add_argument('inputstring')
def handle(self, *args, **options):
if "inputstring" in options:
print(mlpipeutils.get_md5(options["inputstring"]))
|
17,514 | d103d3a961cf51b6abf8b4f98c923b61f3497662 | # volunpack.py
# version 0.75 (more than half finished now!)
#
# this file is part of voltools
#
# unpacker for vol files
#
# pre-production software
# may be licensed differently after completion
#
# written by and copyright © 2020 Erica Garcia [ericathesnark] <me@athenas.space>
# licensed under the MIT license [https://license.athenas.space/mit] | SPDX-License-Identifier: MIT
#
# this code says: trans rights
#
# don't like that? suck it up, or write your own code ^-^
# @@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@
# @@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@
# @@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@
# @@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@
# @@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@
# @@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@=*+++++::::+*===@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@
# @@@@@@@@@@@@@@@@@@@@@@@@@=*****:--........-....-:*=@@@@@@@@@@@@@@@@@@@@@@@@@@@@
# @@@@@@@@@@@@@@@@@@@@@@@@*---.......................-*@@@@@@@@@@@@@@@@@@@@@@@@@@
# @@@@@@:+=@@@@@@@@@@@*:--------...........-----.......-+@@@@@@@@@@@@@@@@@@@==**@
# @@@@@+::---:=@@@@@=:--------:---........----------------:*@@@@@@@@@@@@=+:::::+@
# @@@@@:::::-----:*+------------.-.......-------------------:=@@@@@@@*+::::::::*@
# @@@@=::+++:+::----::::::--::------....----:::::------------:=@@@@:+****+++:::=@
# @@@@@=::+****+::::::::::::-:+:--------.--:+++=#==+:---------:*+::**+=#==*+++=@@
# @@@@@=:--++*****:::::::::::-:+::------....-:::+=##=+:--------:*==*+=#==*+::=@@@
# @@@@@@=::::+****++::::::::-----------.-.-.---:::+=#=:::----:*===*******+++*@@@@
# @@@@@@@=:::++**:++++:::::----------------...---------:---:+*=********+++++=@@@@
# @@@@@@@@=:::++:::++++::::-----.------------..--------::::+***********++++*@@@@@
# @@@@@@@@@@=+:::::+++:::::-------------------..-----:-:::+***=*++++++++++*@@@@@@
# @@@@@@@@@@@@=+::++++++::::-----------------------------:*+*++:::::++++=@@@@@@@@
# @@@@@@@@@@@@@=+:++++++::::::--------------------------::*+++++++++++=@@@@@@@@@@
# @@@@@@@@@@@@@@*+:++++++::::::::-------::------------:::++++++++++:*@@@@@@@@@@@@
# @@@@@@@@@@@@@@=*++++++++++++:::::::-----::-------:::::**+++++++::*@@@@@@@@@@@@@
# @@@@@@@@@@@@@@@=++++++++++++::::::::::--:::----:::::::::++++:::+*@@@@@@@@@@@@@@
# @@@@@@@@@@@@@@@@=+++++++++++++++:::::::-:::::--:::::::::::::::*=@@@@@@@@@@@@@@@
# @@@@@@@@@@@@@@@@@=+++++++++++++::::::::::::----:::----::::::+=@@@@@@@@@@@@@@@@@
# @@@@@@@@@@@@@@@@@@=+::+++++:::::::::::::::::--::::-------::+=@@@@@@@@@@@@@@@@@@
# @@@@@@@@@@@@@@@@@=::::::::::::::::::::::::::::::::--------:*@@@@@@@@@@@@@@@@@@@
# @@@@@@@@@@@@@@@=+::::::::::::::::::::::::::::::::::------:+=@@@@@@@@@@@@@@@@@@@
# @@@@@@@@@@@@@@=*:::::::-::::::::::::::---:-::::-:::------:=@@@@@@@@@@@@@@@@@@@@
# @@@@@@@@@@@@@@*:::::--------------------::::::-----------+@@@@@@@@@@@@@@@@@@@@@
# @@@@@@@@@@@@@=+:----------------::::::::::-:::::---------.-+@@@@@@@@@@@@@@@@@@@
# @@@@@@@@@@@@=+:-------------:::::::::-----::::----------....-+@@@@@@@@@@@@@@@@@
# @@@@@@@@@@@=+:------------------------------------------.......:=@@@@@@@@@@@@@@
# @@@@@@@@@@@+:--------------------------::::-------------........-+@@@@@@@@@@@@@
# @@@@@@@@@@*:-----------------------:-:-:----------------.........-:=@@@@@@@@@@@
# @@@@@@@@@=:-------------------::------------------------..........-+@@@@@@@@@@@
# @@@@@@@@=:----------------------------------------------..........-:=@@@@@@@@@@
# @@@@@@@=:-----------------------------------------------...........-+@@@@@@@@@@
import os, sys
def unpack(filename):
# open the file
f = open(filename, "rb")
# load the header into memory
hdr = f.read(4)
# check header
if hdr.decode() == "PVOL":
dumpPVOL(f) # PVOL dumping method
elif hdr.decode() == " VOL":
dumpVOL2(f) # VOL2 dumping method
elif hdr.decode() == "VOLN":
dumpVOLN(f) # VOLN dumping method
else: # what did you give me?!
print("Invalid input file!")
sys.exit(1)
def __parseDetailDirectory(detailDirEntries, fileDirContents, f):
# just declaring some constants for readability
COMPRESSION_NONE = 0
COMPRESSION_LZH = 3
# declare a dict to keep the files
files = {}
# parse detail directory entries
for entry in detailDirEntries:
# make sure it's a valid entry (4-null header)
nulls = entry[:4]
assert nulls == b'\x00\x00\x00\x00'
# get filename offset from the entry
fnOffset = int.from_bytes(entry[4:8], "little")
# use a list to build filename
fn = []
# add characters to fn list until we find a null
for i in fileDirContents[fnOffset::]:
if i == 0: # if the character is null
fn = "".join(fn) # turn list into string
break
fn.append(chr(i)) # add current character to fn list
# get the offset at which file data is stored
dataOffset = int.from_bytes(entry[8:12], "little")
# get file length from details directory
fLenFromDir = int.from_bytes(entry[12:15], "little")
# check whether file is compressed or not
compressionFlag = entry[16]
# seek to the file data entry
f.seek(dataOffset)
# check file header
fHdr = f.read(4)
assert fHdr.decode() == "VBLK"
# some vols are weird and have mismatched filesize in the directory and header, so storing both
fLenFromHdr = int.from_bytes(f.read(3), "little")
# seek past the unknown data
f.seek(1, 1)
# provisionally set length to what the file directory claims
fLen = fLenFromDir
# make a decision if filesizes are discrepant
if fLenFromDir != fLenFromHdr:
if compressionFlag == COMPRESSION_LZH: # if the file is LZH compressed
fLen = fLenFromHdr if fLenFromHdr < fLenFromDir else fLenFromDir # go with the filesize specified in header if it's smaller, otherwise use directory's
# (if the file is not LZH compressed, filesize still defaults to the directory's)
# get file data and decompress if necessary
fileData = __decompressFile(f.read(fLen), compressionFlag)
# build file entry and add to dict
files.update({fn: fileData})
return files
def __decompressFile(fileData, compressionFlag):
if compressionFlag == 0:
return fileData
elif compressionFlag == 3:
return fileData # placeholder while I figure out what the hell this butchered form of LZH is
def __dumpFiles(fileDict, fileName):
# dump files
for fn, d in fileDict.items():
path = os.path.join(fileName+"-ext", fn) # construct the target path
os.makedirs(os.path.dirname(path), exist_ok=True) # make sure we actually have somewhere to put the file
nf = open(path, "wb") # open file for binary writing
nf.write(d) # dump the entire value of the entry
def dumpPVOL(f):
# get 4-byte file directory offset
fDirOffset = f.read(4)
# seek to the file directory
fDir = f.seek(int.from_bytes(fDirOffset, "little"))
# read 4 bytes to get file directory header and check it
fDirHdr = f.read(4)
assert fDirHdr.decode() == "vols"
# read 4 bytes to get the length of the file directory, and make it an int
fDirLen = int.from_bytes(f.read(4), "little")
# load the file directory's contents into memory
fDirContent = f.read(fDirLen)
# read 4 bytes to get detail directory header and check it
dDirHdr = f.read(4).decode()
assert dDirHdr in ["voli", '\x00vol']
# sometimes PVOL has some weird padding between file and details directories, so skip a byte if there's a null
if dDirHdr == '\x00vol':
f.seek(1,1)
# read 4 bytes to get detail directory length, and make it an int
dDirLen = int.from_bytes(f.read(4), "little")
# load the info directory's content into memory
dDirContent = f.read(dDirLen)
# make a list of info entries (17 bytes long) by dividing the content of the directory
dDirEntries = [dDirContent[x:x+17] for x in range(0,len(dDirContent), 17)]
# parse detail directory entries
files = __parseDetailDirectory(dDirEntries,fDirContent,f)
# dump files
__dumpFiles(files, f.name)
def dumpVOL2(f):
# get 4-byte empty directory offset
eDirOffset = f.read(4)
# seek to the empty directory
eDir = f.seek(int.from_bytes(eDirOffset, "little"))
# seek past the 16 bytes of empty directory
f.seek(16, 1)
# read file directory header and check
assert f.read(4).decode() == "vols"
# read file directory length and make it into an int
fDirLen = int.from_bytes(f.read(4), "little")
# read the contents of the file directory
fDirContent = f.read(fDirLen)
# read details directory header and check
dDirHdr = f.read(4).decode()
assert dDirHdr in ["voli", '\x00vol']
# sometimes VOL2 has some weird padding between file and details directories, so skip a byte if there's a null
if dDirHdr == '\x00vol':
f.seek(1,1)
# read details directory length and make it into an int
dDirLen = int.from_bytes(f.read(4), "little")
# read details directory contents
dDirContent = f.read(dDirLen)
# split the details directory into 17 byte long entries
dDirEntries = [dDirContent[x:x+17] for x in range(0,len(dDirContent), 17)]
# parse detail directory entries
files = __parseDetailDirectory(dDirEntries,fDirContent,f)
# dump files
__dumpFiles(files, f.name)
def dumpVOLN(f):
raise NotImplementedError
unpack(sys.argv[1])
|
17,515 | 066e8cba86c9f44d8b0e7b7c5c7fa9612c262f7c | '''
Created on 8 Oct 2017
@author: matt
'''
class Line(object):
'''
classdocs
'''
def __init__(self, dot_1, dot_2):
'''
Constructor
'''
self.dot_1 = dot_1
self.dot_2 = dot_2
def __str__(self):
return "Line: \n\t %s \n\t %s" % (self.dot_1, self.dot_2)
def lenght(self):
return ( ((self.dot_1.x-self.dot_2.x)**2)
+ ((self.dot_1.y-self.dot_2.y)**2)
+ ((self.dot_1.z-self.dot_2.z)**2))**0.5 |
17,516 | 4702eff73a412a9a8c61d9eba646c3974e4ea72b | __author__ = 'nmarchenko'
"""
http://www.checkio.org/mission/task/info/determine-the-order/python-27/
The Robots have found an encrypted message. We cannot decrypt it right now, but we can take the first steps.
Given a set of "words," (for simplicity we will use lowercase latin letters as symbols) each word contains symbols at
the "alphabetical" order (It is not in the latin alphabetical order, but a different order). We need to determine the
order of all the symbols from each word and create one word with all the symbols at once from given words in the
"alphabetical" order. For some cases, if we can not determine the order for several symbols -- use latin alphabetical
order. For example: Given words "acb", "bd", "zwa". As we can see "z" and "w" must be before "a" and "d" after "b".
So the result is "zwacbd".
Precondition: In each test, there will be only one solution.
Input: A list of strings.
Output: A string.
Example:
checkio(["acb", "bd", "zwa"]) == "zwacbd"
checkio(["klm", "kadl", "lsm"]) == "kadlsm"
checkio(["a", "b", "c"]) == "abc"
checkio(["aazzss"]) == "azs"
checkio(["dfg", "frt", "tyg"]) == "dfrtyg"
"""
def checkio(data):
result = list(set("".join(data)))
done = False
while not done:
done = True
for i in result:
chunks = [list(x) for x in data if i in x]
if len(chunks) == 1 and len(chunks[0]) == 1:
import string
chunks[0] = string.ascii_lowercase
for chank in chunks:
for c in chank:
if c in result:
i_c, i_i = result.index(c), result.index(i)
if chank.index(c) > chank.index(i) and not i_c > i_i:
result[i_c], result[i_i] = result[i_i], result[i_c]
done = False
return "".join(result)
import unittest
class _test(unittest.TestCase):
def test_00(self):
self.assertTrue(checkio(["acb", "bd", "zwa"]) == "zwacbd")
def test_01(self):
self.assertTrue(checkio(["klm", "kadl", "lsm"]) == "kadlsm")
def test_02(self):
self.assertTrue(checkio(["a", "b", "c"]) == "abc")
def test_03(self):
self.assertTrue(checkio(["aazzss"]) == "azs")
def test_04(self):
self.assertTrue(checkio(["dfg", "frt", "tyg"]) == "dfrtyg") |
17,517 | 7305c4211c054133bd792ebe4533f7109238d1bc | # Public Domain SOCKS proxy protocol implementation
# Adapted from https://gist.github.com/bluec0re/cafd3764412967417fd3
# References:
# SOCKS4 protocol http://www.openssh.com/txt/socks4.protocol
# SOCKS4A protocol http://www.openssh.com/txt/socks4a.protocol
# SOCKS5 protocol https://tools.ietf.org/html/rfc1928
# SOCKS5 username/password authentication https://tools.ietf.org/html/rfc1929
import collections
import socket
import struct
from .compat import compat_ord
__author__ = 'Timo Schmid <coding@timoschmid.de>'
SOCKS4_VERSION = 4
SOCKS4_REPLY_VERSION = 0x00
# Excerpt from SOCKS4A protocol:
# if the client cannot resolve the destination host's domain name to find its
# IP address, it should set the first three bytes of DSTIP to NULL and the last
# byte to a non-zero value.
SOCKS4_DEFAULT_DSTIP = struct.pack('!BBBB', 0, 0, 0, 0xFF)
SOCKS5_VERSION = 5
SOCKS5_USER_AUTH_VERSION = 0x01
SOCKS5_USER_AUTH_SUCCESS = 0x00
class Socks4Command:
CMD_CONNECT = 0x01
CMD_BIND = 0x02
class Socks5Command(Socks4Command):
CMD_UDP_ASSOCIATE = 0x03
class Socks5Auth:
AUTH_NONE = 0x00
AUTH_GSSAPI = 0x01
AUTH_USER_PASS = 0x02
AUTH_NO_ACCEPTABLE = 0xFF # For server response
class Socks5AddressType:
ATYP_IPV4 = 0x01
ATYP_DOMAINNAME = 0x03
ATYP_IPV6 = 0x04
class ProxyError(socket.error):
ERR_SUCCESS = 0x00
def __init__(self, code=None, msg=None):
if code is not None and msg is None:
msg = self.CODES.get(code) or 'unknown error'
super().__init__(code, msg)
class InvalidVersionError(ProxyError):
def __init__(self, expected_version, got_version):
msg = ('Invalid response version from server. Expected {:02x} got '
'{:02x}'.format(expected_version, got_version))
super().__init__(0, msg)
class Socks4Error(ProxyError):
ERR_SUCCESS = 90
CODES = {
91: 'request rejected or failed',
92: 'request rejected because SOCKS server cannot connect to identd on the client',
93: 'request rejected because the client program and identd report different user-ids'
}
class Socks5Error(ProxyError):
ERR_GENERAL_FAILURE = 0x01
CODES = {
0x01: 'general SOCKS server failure',
0x02: 'connection not allowed by ruleset',
0x03: 'Network unreachable',
0x04: 'Host unreachable',
0x05: 'Connection refused',
0x06: 'TTL expired',
0x07: 'Command not supported',
0x08: 'Address type not supported',
0xFE: 'unknown username or invalid password',
0xFF: 'all offered authentication methods were rejected'
}
class ProxyType:
SOCKS4 = 0
SOCKS4A = 1
SOCKS5 = 2
Proxy = collections.namedtuple('Proxy', (
'type', 'host', 'port', 'username', 'password', 'remote_dns'))
class sockssocket(socket.socket):
def __init__(self, *args, **kwargs):
self._proxy = None
super().__init__(*args, **kwargs)
def setproxy(self, proxytype, addr, port, rdns=True, username=None, password=None):
assert proxytype in (ProxyType.SOCKS4, ProxyType.SOCKS4A, ProxyType.SOCKS5)
self._proxy = Proxy(proxytype, addr, port, username, password, rdns)
def recvall(self, cnt):
data = b''
while len(data) < cnt:
cur = self.recv(cnt - len(data))
if not cur:
raise EOFError(f'{cnt - len(data)} bytes missing')
data += cur
return data
def _recv_bytes(self, cnt):
data = self.recvall(cnt)
return struct.unpack(f'!{cnt}B', data)
@staticmethod
def _len_and_data(data):
return struct.pack('!B', len(data)) + data
def _check_response_version(self, expected_version, got_version):
if got_version != expected_version:
self.close()
raise InvalidVersionError(expected_version, got_version)
def _resolve_address(self, destaddr, default, use_remote_dns):
try:
return socket.inet_aton(destaddr)
except OSError:
if use_remote_dns and self._proxy.remote_dns:
return default
else:
return socket.inet_aton(socket.gethostbyname(destaddr))
def _setup_socks4(self, address, is_4a=False):
destaddr, port = address
ipaddr = self._resolve_address(destaddr, SOCKS4_DEFAULT_DSTIP, use_remote_dns=is_4a)
packet = struct.pack('!BBH', SOCKS4_VERSION, Socks4Command.CMD_CONNECT, port) + ipaddr
username = (self._proxy.username or '').encode()
packet += username + b'\x00'
if is_4a and self._proxy.remote_dns:
packet += destaddr.encode() + b'\x00'
self.sendall(packet)
version, resp_code, dstport, dsthost = struct.unpack('!BBHI', self.recvall(8))
self._check_response_version(SOCKS4_REPLY_VERSION, version)
if resp_code != Socks4Error.ERR_SUCCESS:
self.close()
raise Socks4Error(resp_code)
return (dsthost, dstport)
def _setup_socks4a(self, address):
self._setup_socks4(address, is_4a=True)
def _socks5_auth(self):
packet = struct.pack('!B', SOCKS5_VERSION)
auth_methods = [Socks5Auth.AUTH_NONE]
if self._proxy.username and self._proxy.password:
auth_methods.append(Socks5Auth.AUTH_USER_PASS)
packet += struct.pack('!B', len(auth_methods))
packet += struct.pack(f'!{len(auth_methods)}B', *auth_methods)
self.sendall(packet)
version, method = self._recv_bytes(2)
self._check_response_version(SOCKS5_VERSION, version)
if method == Socks5Auth.AUTH_NO_ACCEPTABLE or (
method == Socks5Auth.AUTH_USER_PASS and (not self._proxy.username or not self._proxy.password)):
self.close()
raise Socks5Error(Socks5Auth.AUTH_NO_ACCEPTABLE)
if method == Socks5Auth.AUTH_USER_PASS:
username = self._proxy.username.encode()
password = self._proxy.password.encode()
packet = struct.pack('!B', SOCKS5_USER_AUTH_VERSION)
packet += self._len_and_data(username) + self._len_and_data(password)
self.sendall(packet)
version, status = self._recv_bytes(2)
self._check_response_version(SOCKS5_USER_AUTH_VERSION, version)
if status != SOCKS5_USER_AUTH_SUCCESS:
self.close()
raise Socks5Error(Socks5Error.ERR_GENERAL_FAILURE)
def _setup_socks5(self, address):
destaddr, port = address
ipaddr = self._resolve_address(destaddr, None, use_remote_dns=True)
self._socks5_auth()
reserved = 0
packet = struct.pack('!BBB', SOCKS5_VERSION, Socks5Command.CMD_CONNECT, reserved)
if ipaddr is None:
destaddr = destaddr.encode()
packet += struct.pack('!B', Socks5AddressType.ATYP_DOMAINNAME)
packet += self._len_and_data(destaddr)
else:
packet += struct.pack('!B', Socks5AddressType.ATYP_IPV4) + ipaddr
packet += struct.pack('!H', port)
self.sendall(packet)
version, status, reserved, atype = self._recv_bytes(4)
self._check_response_version(SOCKS5_VERSION, version)
if status != Socks5Error.ERR_SUCCESS:
self.close()
raise Socks5Error(status)
if atype == Socks5AddressType.ATYP_IPV4:
destaddr = self.recvall(4)
elif atype == Socks5AddressType.ATYP_DOMAINNAME:
alen = compat_ord(self.recv(1))
destaddr = self.recvall(alen)
elif atype == Socks5AddressType.ATYP_IPV6:
destaddr = self.recvall(16)
destport = struct.unpack('!H', self.recvall(2))[0]
return (destaddr, destport)
def _make_proxy(self, connect_func, address):
if not self._proxy:
return connect_func(self, address)
result = connect_func(self, (self._proxy.host, self._proxy.port))
if result != 0 and result is not None:
return result
setup_funcs = {
ProxyType.SOCKS4: self._setup_socks4,
ProxyType.SOCKS4A: self._setup_socks4a,
ProxyType.SOCKS5: self._setup_socks5,
}
setup_funcs[self._proxy.type](address)
return result
def connect(self, address):
self._make_proxy(socket.socket.connect, address)
def connect_ex(self, address):
return self._make_proxy(socket.socket.connect_ex, address)
|
17,518 | 673553aba566955c25034cea036b9c15583ea655 |
import os, sys, math, pickle, numpy, matplotlib, glob
numpy.set_printoptions(threshold=numpy.nan)
matplotlib.use('Agg')
from matplotlib import pyplot
pyplot.rcParams['text.usetex'] = True
def do_total_resolution_plot(all_truth_data, all_reco_data, all_weights, all_truth_energy,
evals, xlabel, ylabel, title, savedir, savename):
for_stack_truth_data = []
for_stack_reco_data = []
for_stack_weights = []
for_stack_labels = []
for_stack_colours = []
for_stack_totals = []
for_stack_truth_energies = []
for_stack_cases = []
cases = ['nue_cc','numu_cc','nutau_cc','nuall_nc']
labels = [r'$\nu_e$ CC',r'$\nu_{\mu}$ CC',r'$\nu_{\tau}$ CC',r'$\nu$ NC']
colours = ['r', 'b', 'g', 'magenta']
AllMedianResVals = {}
for case, label, colour in zip(cases,labels,colours):
for_stack_truth_data.append(all_truth_data[case])
for_stack_reco_data.append(all_reco_data[case])
for_stack_weights.append(all_weights[case]*1e6)
for_stack_labels.append(label)
for_stack_colours.append(colour)
for_stack_totals.append(sum(numpy.array(all_weights[case]))*1e6)
for_stack_truth_energies.append(all_truth_energy[case])
for_stack_cases.append(case)
AllMedianResVals[case] = []
for_stack_totals, for_stack_truth_data, for_stack_reco_data, for_stack_weights, for_stack_labels, for_stack_colours, for_stack_cases, for_stack_truth_energies = (list(t) for t in zip(*sorted(zip(for_stack_totals, for_stack_truth_data, for_stack_reco_data, for_stack_weights, for_stack_labels, for_stack_colours, for_stack_cases, for_stack_truth_energies))))
for i in range(0,len(evals)-1):
print " Events in energy bin %.2f GeV - %.2f GeV"%(evals[i],evals[i+1])
for reco_data, truth_data, weights, case, label, colour, truth_energy in zip(for_stack_reco_data,
for_stack_truth_data,
for_stack_weights,
for_stack_cases,
for_stack_labels,
for_stack_colours,
for_stack_truth_energies):
InBin = [x < evals[i+1] and x >= evals[i] and x != 0.0 for x in truth_energy]
InBin = numpy.array(InBin)
if 'energy' in savename:
ResVals = (reco_data[InBin] - truth_data[InBin])/(truth_data[InBin])
elif 'coszen' in savename:
ResVals = reco_data[InBin] - truth_data[InBin]
if len(ResVals) == 0:
AllMedianResVals[case].append(0.0)
else:
AllMedianResVals[case].append(numpy.median(numpy.absolute(ResVals)))
histx, bins = numpy.histogram(ResVals,
weights = weights[InBin]*1e6,
bins = numpy.linspace(-2,2,21))
ymax = 0.0
if numpy.sum(histx) != 0.0:
pyplot.hist(bins[:-1],
weights = histx,
bins = bins,
color = colour,
label = label,
linewidth = 2,
histtype = 'step')
ymax = max(max(histx),ymax)
if ymax != 0.0:
pyplot.grid()
pyplot.xlabel(xlabel)
pyplot.ylabel("Events per 0.2 (%.2f GeV - %.2f GeV)"%(evals[i],evals[i+1]))
pyplot.ylim(0.0,1.1*ymax)
pyplot.subplots_adjust(bottom=0.12,top=0.8)
pyplot.title(title,size='x-large',x=0.5,y=1.20)
pyplot.legend(bbox_to_anchor=(0., 1.02, 1., .102), loc=3,
ncol=3, mode="expand", borderaxespad=0.,fontsize='x-small')
pyplot.savefig("LocalPlots/%s/%s_InBin_%.2f_%.2f.pdf"%(savedir,savename,evals[i],evals[i+1]))
pyplot.close()
ymax = 0.0
for case, colour, label in zip(for_stack_cases,for_stack_colours,for_stack_labels):
pyplot.hist(evals[:-1],
weights = AllMedianResVals[case],
bins = evals,
color = colour,
label = label,
linewidth = 2,
histtype = 'step')
ymax = max(max(AllMedianResVals[case]),ymax)
pyplot.grid()
pyplot.xscale("log")
pyplot.xlabel("Truth Energy (GeV)")
pyplot.ylabel(ylabel)
pyplot.ylim(0.0,1.1*ymax)
pyplot.subplots_adjust(bottom=0.12,top=0.8)
pyplot.title(title,size='x-large',x=0.5,y=1.20)
pyplot.legend(bbox_to_anchor=(0., 1.02, 1., .102), loc=3,
ncol=3, mode="expand", borderaxespad=0.,fontsize='x-small')
print " Making median resolution plot over all energy"
pyplot.savefig("LocalPlots/%s/%s_Median.pdf"%(savedir,savename))
pyplot.close()
if __name__ == '__main__':
results = pickle.load(open("samplecomparisons.pckl"))
eventids = results['eventids']
unosc_weight = results['unosc_weight']
osc_weight = results['osc_weight']
energy = results['energy']
coszen = results['coszen']
isnu = results['isnu']
reco_energy = results['reco_energy']
reco_coszen = results['reco_coszen']
selections = ['prd', 'msu', 'nbi']
for selection in selections:
if selection == 'nbi':
selname = 'GRECO'
elif selection == 'msu':
selname = 'DRAGON'
elif selection == 'prd':
selname = 'LEESARD'
cases = ['nue_cc','numu_cc','nutau_cc','nuall_nc']
dirnames = ['NuECC','NuMuCC','NuTauCC','NuNC']
evals = numpy.logspace(0,3,21)
print 'Doing %s total energy resolution plot'%(selection)
do_total_resolution_plot(all_truth_data = energy[selection],
all_reco_data = reco_energy[selection],
all_weights = osc_weight[selection],
all_truth_energy = energy[selection],
evals = evals,
xlabel = r'All $E_{\nu}$ (Reco-Truth)/Truth',
ylabel = r'All $E_{\nu}$ $|$(Reco-Truth)/Truth$|$ Median',
title = '%s 1X600 Energy Resolution'%(selname),
savedir = 'Total',
savename = '%s_all_energy_resolution_'%(selection))
print 'Doing %s total coszen resolution plot'%(selection)
do_total_resolution_plot(all_truth_data = coszen[selection],
all_reco_data = reco_coszen[selection],
all_weights = osc_weight[selection],
all_truth_energy = energy[selection],
evals = evals,
xlabel = r'All $\cos\theta_Z$ Reco-Truth',
ylabel = r'All $\cos\theta_Z$ $|$Reco-Truth$|$ Median',
title = '%s 1X600 Zenith Resolution'%(selname),
savedir = 'Total',
savename = '%s_all_coszen_resolution_'%(selection))
|
17,519 | c8667c1d7ca3498414c9c49330c60108c6ca0a9d | from BtsShell import connections
import re, os
import time
# try:
# from network_config import *
# except Exception, e:
# print 'import network_config failed: %s' % e
# pass
#from TcpIp import TcpIps
from Pinger import BackgroundPinger
BgPing = BackgroundPinger()
def Bg_ping_start(host, options):
"""This keyword is for backgroud ping
| Input Parameters | Man. | Description |
| host | Yes | Host IP address |
| options | No | Options for ping command such as "-n 10 -l 1024" |
Example
| switch host connection | ${TM500 CONTROL PC CONNECTION} | |
| Bg_ping_start | 10.68.152.34 | -n 10 |
| Do other things | arg1 | arg2 |
| Bg_ping_stop | |
| ${result} | Bg_get_ping_result |
"""
BgPing.start_traffic(host, options)
def Bg_ping_stop():
"""This keyword is used with "Bg ping start", it just stop ping process .
see Bg_ping_start for example.
"""
BgPing.stop_traffic()
def Bg_get_ping_result():
"""This keyword is used with "Bg ping start" and "Bg ping stop", it is used for get the ping result.
see Bg_ping_start for example.
| return value |
| ([sends, recvs, losts], [max_delay, min_delay, avg_delay]) |
"""
return BgPing.analyse_result()
def ping_remote_system(host, options="", ignore=""):
"""This keyword test the reachability of remote system.
| Input Parameters | Man. | Description |
| host | Yes | Host IP address |
| options | No | Options for ping command such as "-n 10 -l 1024" |
| ping delay statistics | No | default or sample as ":1" or "5:" or "5:10" |
| Return Value 1 | a list contains three values which indicate sent/received/lost packagets |
| ${package_summary[0]} | send package count |
| ${package_summary[1]} | receive package count |
| ${package_summary[2]} | lost package count |
| Return Value 2 | a list contains three values which indicate max/min/average ping delay |
| ${ping_delay[0]} | max ping delay |
| ${ping_delay[1]} | min ping delay |
| ${ping_delay[2]} | average ping delay |
Example
| ${package_summary} | ${ping_delay} | Ping Remote System | 192.168.255.1 |
| should be true | ${package_summary[2]}<3 | #lost package should be less than 3 |
| ${package_summary} | ${ping_delay} | Ping Remote System | 192.168.255.1 | -n 10 | :1 |
| should be true | ${ping_delay[2]}<100 | #first ping delay should be less than 100ms |
| ${package_summary} | ${ping_delay} | Ping Remote System | 192.168.255.1 | -n 10 | 5: |
| should be true | ${ping_delay[2]}<100 | #ping delay except the top 5 should be less than 100ms |
"""
connection_type = connections.get_current_connection_type()
## option_list = ''
## for option in options:
## option_list = option_list + option + ' '
ret = connections.execute_shell_command_without_check('ping %s %s' % (options, host))
time_delay = []
delay_max_min_avg = []
statistic_delay = []
if connection_type == 'Windows':
lines = ret.split(os.linesep)
delay_pattern1 = re.compile(r'(?i).*Reply from.*?time=(\d+)ms.*')
delay_pattern2 = re.compile(r'(?i).*Reply from.*?time<(\d+)ms.*')
for line in lines:
if re.match(r'(?i).*Reply from.*?time.*(\d+)ms.*', line):
time = delay_pattern1.match(line)
if time:
tmp = time.groups()[0]
time_delay.append(int(tmp))
else:
time = delay_pattern2.match(line)
if time:
tmp = time.groups()[0]
time_delay.append(int(tmp))
else:
time_delay.append(-1)
else:
result = re.search('.*Sent\s*=\s*(\d*).*Received\s*=\s*(\d*).*Lost\s*=\s*(\d*)', line)
if result:
summary_result = result.groups()
print "Total ping delay is:",time_delay
if ""==ignore:
statistic_delay = time_delay
else:
(start,end) = ignore.split(":")
if ""==start:
start = 0
else:
start = int(start)
if ""==end:
end = len(time_delay)
else:
end = int(end)
for i in range(start,end):
statistic_delay.append(time_delay[i])
print "Statistic ping delay is:",statistic_delay
invalid = statistic_delay.count(-1)
for i in range(invalid):
statistic_delay.remove(-1)
print "Valid ping delay is:",statistic_delay
if 0 < len(statistic_delay):
list_max = max(statistic_delay)
list_min = min(statistic_delay)
list_avg = float(sum(statistic_delay))/float(len(statistic_delay))
delay_max_min_avg.append(list_max)
delay_max_min_avg.append(list_min)
delay_max_min_avg.append(list_avg)
else:
delay_max_min_avg = [0,0,0]
if connection_type == 'Linux':
if line.find('packets') > 0:
'4 packets transmitted, 0 received, 100% packet loss, time 3000ms'
result = re.search('(\d+)\s*packets transmitted.*(\d+)\s*received', line)
return (summary_result, delay_max_min_avg)
def ping_delay(host):
"""This keyword test the average Ping delay.
| Input Parameters | Man. | Description |
| host | Yes | Host IP address |
| Return Value | a list contains three values which indicate Minimum/Maximum/Average time |
Example
| ${ping_result} | Ping Delay | 10.68.149.182 -l 100/200/500 -n 50 |
| Should Be True | ${ping_result[-1]} < 13 |
"""
ret = connections.execute_shell_command_without_check('ping %s' % host)
lines = ret.splitlines()
for line in lines:
if line.find('Minimum') > 0:
result = re.search('^.*Minimum = (\d*)ms, Maximum = (\d*)ms, Average = (\d*)ms', line)
return result.groups()
def stop_firewall():
"""This keyword stops PC firewall.
| Input Parameters | Man. | Description |
Example
| Stop Firewall |
"""
connections.execute_shell_command('sc stop TmPfw')
def wait_until_units_startup(timeout = '30', *units):
"""This keyword tests the reachability of given unit/units.
| Input Parameters | Man. | Description |
| timeout | No | Timeout for 'Ping' command', default is set to 30 sec |
| *units | No | Units list which is for test |
Example
| Wait Until Units Startup | 600 | 192.168.255.1 | 192.168.255.129 |
"""
not_ready_units = list(units)
ready_units = []
ping_time_interval = 10 # ping all units every 10 seconds
time_is_up = int(timeout)
while len(not_ready_units) != 0 and time_is_up > 0:
start_ping_time = time.time() # get start ping time
for unit in not_ready_units:
ping_result = ping_remote_system(unit)
if ping_result[0][2] == '0': # ping OK which means unit is ready
ready_units.append(unit)
# remove units which is already ping successfully
for ready_unit in ready_units:
try:
not_ready_units.remove(ready_unit)
except ValueError:
pass
ready_units = [] # empty the ready_units
end_ping_time = time.time() # get end ping time
consume_ping_time = int(end_ping_time - start_ping_time)
time_is_up = time_is_up - consume_ping_time
time.sleep(ping_time_interval)
if len(not_ready_units) != 0: # still some units are not ready
raise Exception, 'there are still some units (%s) not in working state' % not_ready_units
def get_pppoe_connection_ip_address():
ret = connections.execute_shell_command_without_check('ipconfig')
pppoe_connection_start = False
lines = ret.splitlines()
for line in lines:
if line.find('PPP') >= 0:
pppoe_connection_start = True
if line.find('Address') >= 0 and pppoe_connection_start:
pppoe_connection_ip_address = line.split(':')[-1].strip()
break
try:
return pppoe_connection_ip_address
except NameError:
raise Exception, 'no any PPPoE connection found'
def get_multi_pppoe_connection_ip_address():
pppoe_connection_ip_address = []
ret = connections.execute_shell_command_without_check('ipconfig')
pppoe_connection_start = False
lines = ret.splitlines()
for line in lines:
if line.find('PPP') >= 0:
pppoe_connection_start = True
if line.find('Address') >= 0 and pppoe_connection_start:
tmp = line.split(':')[-1].strip()
print tmp
pppoe_connection_start = False
pppoe_connection_ip_address.append(tmp)
if 0 == len(pppoe_connection_ip_address):
raise Exception, 'no any PPPoE connection found'
try:
return pppoe_connection_ip_address
except NameError:
raise Exception, 'no any PPPoE connection found'
if __name__ == '__main__':
connections.connect_to_host('10.69.71.114', '23', 'tdlte-tester', 'btstest')
Bg_ping_start("10.69.71.115", "-n 10")
time.sleep(5)
Bg_ping_stop()
print Bg_get_ping_result()
|
17,520 | 507cd83cbf015f34aa42b8f8786abd9aa0525bc8 | from map import TileMap
from drawings import *
|
17,521 | bfb693cfcf132ef0825b11801b95f913fe61a28a | import setuptools
# Needs twine, setuptools, wheel, tqdm pip installed first
# python setup.py bdist_wheel
# python -m twine upload dist/*
# grab readme and use as documentation dynamically
with open("README.md", "r") as fh:
long_description = fh.read()
# grab requirements dynamically
with open('requirements.txt') as reqtext:
requirements = reqtext.read().splitlines()
setuptools.setup(
name="splunk-toolbox",
version="1.2.0",
scripts=['splunktoolbox'],
author="Patrick Hastings",
author_email="phastings@openmobo.com",
description="A wrapper around the Splunk REST API endpoint",
long_description=long_description,
long_description_content_type="text/markdown",
url="https://github.com/gnubyte/splunk-toolbox",
packages=setuptools.find_packages(),
classifiers=[
"Programming Language :: Python :: 3",
"License :: OSI Approved :: MIT License",
"Operating System :: OS Independent"
],
install_requires=requirements
) |
17,522 | 6cf27509fa3e1eb13bba2c69d931c413cbd9f31a | # -*- coding: utf-8 -*-
'''
Views OA模块视图方法包
@summary: OA模块的view方法
'''
from django.contrib import auth
from django.template import RequestContext
from django.shortcuts import render_to_response
from django.http import HttpResponseRedirect
from models import *
import time, datetime
from PillarsCGSystem import common
from TimeSheetSys.models import Confirm
def leave(request):
context = {}
return render_to_response('OASys/leave.html', context, context_instance=RequestContext(request))
def leave_archive(request):
context = {}
return render_to_response('OASys/leaveArchive.html', context, context_instance=RequestContext(request)) |
17,523 | 59e265fe911360cd9b8e5569c602ef5cb32d0760 | # FIXME: This is naive and has horrible polynomial complexity - optimize
# before you proceed.
import operator
from dataclasses import dataclass
from typing import Set
ASTEROID = "#"
@dataclass
class Space:
asteroids: Set["Asteroid"]
def __init__(self):
self.asteroids = set([])
@classmethod
def parse(cls, filename):
space = cls()
for y, line in enumerate(open(filename).readlines()):
for x, point in enumerate(line.strip()):
if point == ASTEROID:
space.add_asteroid(Asteroid(x, y, space))
return space
def add_asteroid(self, asteroid):
self.asteroids.add(asteroid)
def get_asteroids_for_asteroid(self, asteroid):
return self.asteroids - {asteroid}
def get_asteroids_for_line(self, line):
return self.asteroids - {line.src, line.dst}
def __iter__(self):
for asteroid in self.asteroids:
yield asteroid
@dataclass
class LineOfSight:
src: "Asteroid"
dst: "Asteroid"
@property
def slope(self):
try:
return (self.src.y - self.dst.y) / (self.src.x - self.dst.x)
except ZeroDivisionError:
return float("inf")
def obstructed_by(self, asteroid):
if self.slope == float("inf"):
obstructs = (
(self.src.y <= asteroid.y and asteroid.y <= self.dst.y)
or (self.src.y >= asteroid.y and asteroid.y >= self.dst.y)
) and asteroid.x == self.src.x
elif self.slope == 0:
obstructs = (
(self.src.x <= asteroid.x and asteroid.x <= self.dst.x)
or (self.src.x >= asteroid.x and asteroid.x >= self.dst.x)
) and asteroid.y == self.src.y
else:
obstructs = (
(
(self.src.x <= asteroid.x and asteroid.x <= self.dst.x)
or (self.src.x >= asteroid.x and asteroid.x >= self.dst.x)
)
and (
(self.src.y <= asteroid.y and asteroid.y <= self.dst.y)
or (self.src.y >= asteroid.y and asteroid.y >= self.dst.y)
)
and (
(asteroid.y - self.dst.y)
== (self.slope * (asteroid.x - self.dst.x))
)
)
if obstructs:
print(f"{self} obstructed by {asteroid}")
return obstructs
def __str__(self):
return f"{self.src} -> {self.dst} m={self.slope}"
@dataclass
class Asteroid:
x: int
y: int
space: Space
def __gt__(self, other):
return len(self) > len(other)
def __len__(self):
return len(
[
line
for line in (
LineOfSight(self, asteroid)
for asteroid in self.space.get_asteroids_for_asteroid(self)
)
if not any(
line.obstructed_by(asteroid)
for asteroid in self.space.get_asteroids_for_line(line)
)
]
)
def __hash__(self):
return hash((self.x, self.y))
def __str__(self):
return f"{self.x}, {self.y}"
if __name__ == "__main__":
print(len(max(Space.parse("input.txt"))))
|
17,524 | 4e9433343b30ecde6aff14b2bf3de411f68acaeb | # Generated by Django 3.0.7 on 2020-07-22 16:41
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('login', '0011_auto_20200722_2209'),
]
operations = [
migrations.AlterField(
model_name='register',
name='email',
field=models.EmailField(max_length=254),
),
]
|
17,525 | 8fec64047693bfa13f29b4bf0507c0322ff4a101 | from GameCards_OriHasin.DeckOfCards import DeckOfCards
from GameCards_OriHasin.Card import Card
class Player:
#מחלקה המגדירה קלף במשחק
#Name,Amount, list1 -רשימה של קלפי השחקן, list2 = חפיסת קלפים המתקבלת ליצירת חבילת קלפים אישית לשחקן
def __init__(self,Name,Amount,NumOfCards=5):#מתודת קונסטרקטור
self.Name=Name
if self.Name == '': #בודק שלא מכניסים שם ריק
raise ValueError("Name can't be empty , Please enter a name of player ")
self.Amount=Amount
self.NumOfCards=NumOfCards
if self.NumOfCards<=0:
raise ValueError("Player can't be with '0' or negative number of cards")
self.list1=[]
def __repr__(self): # מתודה המדפיסה פרטי שחקן
return (f'Player Details: {self.Name} , {self.Amount} , {self.list1}')
def setHand(self,Deck1): #מתודה היוצרת חבילת קלפים
if type(Deck1) != DeckOfCards:#בדיקה שהאובייקט המתקבל מסוג חפיסת קלפים
raise ValueError("Send only DeckOfCards.")
for i in range(self.NumOfCards):
self.list1.append(Deck1.dealOne())
def getCard(self):#מתודה המושכת קלף מהשחקן
if len(self.list1)==0:#בדיקה שהפונקציה לא מתבצעת על חבילת קלפים ריקה
raise ValueError("You can't get card , the deck is empty")
return self.list1.pop()
def addCard(self,card): #מתודה המוסיפה קלף לשחקן
if type(card)!=Card:#בדיקה שהאובייקט המתקבל מסוג קלף
raise ValueError("Send only a Card")
self.list1.append(card)
def reduceAmount(self,amount): #מתודה המורידה סכום לשחקן
if type(amount)!=int:#בדיקה שהפרמטר המתקבל מסוג מספר שלם
raise ValueError("Send a int number")
if amount<=0:#בדיקה שהפרמטר המתקבל חיובי בלבד
raise ValueError("Send only a positive number")
if self.Amount-amount<0:#בדיקה שלא מתווצר סכום אישי שלילי
self.Amount=0
print("The amount of player is '0'")
return
self.Amount-=amount
def addAmount(self,amount): #מתודה המוסיפה סכום לשחקן
if type(amount)!=int:#בדיקה שהערך המתקבל מסוג מספר שלם
raise ValueError("Send a int number")
if amount<=0:#בדיקה שהערך המתקבל חיובי
raise ValueError("Send only a positive number")
self.Amount+=amount |
17,526 | be1516cb719ffda35b4d234a22922a25eaf22714 | try:
import ossaudiodev
except:
print "ossaudiodev not installed"
ossaudiodev = None
try:
import FFT
except:
print "FFT not installed"
ossaudiodev = None
try:
import Numeric
except:
print "Numeric not installed"
ossaudiodev = None
import struct, math, time, threading, copy
def add(s1, s2):
return minmax([(v1 + v2) for (v1, v2) in zip(s1, s2)])
def minmax(vector):
return [min(max(v,0),255) for v in vector]
def scale(sample, value):
return minmax([((s - 128) * value) + 128 for s in sample])
def sine(freqs, seconds, volume = 1.0, sample_rate = 8000.0):
sample = [128] * int(sample_rate * seconds)
if type(freqs) == type(0):
freqs = [freqs]
for freq in freqs:
for n in range(len(sample)):
sample[n] += int(127 * math.sin(n * 2 * math.pi * freq/sample_rate) * volume)
return minmax(sample)
class SoundThread(threading.Thread):
def __init__(self, parent, name = "sound thread"):
threading.Thread.__init__(self, name = name)
self.parent = parent
self.event = threading.Event()
self.start()
def run(self):
while not self.event.isSet():
self.parent.lock.acquire()
buffer = copy.copy(self.parent.buffer)
self.parent.buffer = None
self.parent.lock.release()
if buffer != None:
self.parent.dev.write("".join(map(chr,buffer)))
self.parent.dev.flush()
self.event.wait(.001)
def join(self, timeout=None):
self.event.set()
threading.Thread.join(self, timeout)
class SoundDevice:
def __init__(self, device, async = 0, cache = 1):
self.device = device
self.async = async
self.cache = cache
self.cacheDict = {}
self.status = "closed"
self.number_of_channels= 1
self.sample_rate= 8000
self.sample_width= 1
self.minFreq = 20
self.maxFreq = 3500
self.debug = 0
self.buffer = None
if ossaudiodev != None:
self.format = ossaudiodev.AFMT_U8
if self.debug:
self.setFile("770.txt")
if self.async:
self.lock = threading.Lock()
self.thread = SoundThread(self)
def initialize(self, mode):
if ossaudiodev == None: return
self.dev = ossaudiodev.open("/dev/dsp", mode)
self.dev.setparameters(self.format,
self.number_of_channels,
self.sample_rate)
self.status = mode
def play(self, sample):
"""
"""
if ossaudiodev == None: return
if self.status != "w":
self.initialize("w")
if self.async:
self.lock.acquire()
self.buffer = sample
self.lock.release()
else:
self.dev.write("".join(map(chr,sample)))
self.dev.flush()
def playTone(self, freqs, seconds, volume = 1.0):
"""
freq example: playTone([550,400], .1, volume=.5) # middle C for .1 seconds, half volume
"""
if ossaudiodev == None: return
if type(freqs) == type(0):
freqs = [freqs]
if self.status != "w":
self.initialize("w")
sample = [128] * int(self.sample_rate * seconds)
for freq in freqs:
if self.cache and (freq,seconds) in self.cacheDict:
sample = self.cacheDict[(freq,seconds)]
else:
for n in range(len(sample)):
sample[n] = min(max(sample[n] + int(127 * math.sin(n * 2 * math.pi * freq/self.sample_rate) * volume), 0),255)
self.cacheDict[(freq,seconds)] = sample
if self.async:
self.lock.acquire()
self.buffer = sample
self.lock.release()
else:
self.dev.write("".join(map(chr,sample)))
self.dev.flush()
def read(self, seconds):
if ossaudiodev == None: return
if self.status != "r":
self.initialize("r")
buffer = self.dev.read(int(self.sample_rate * seconds))
size = len(buffer)
return struct.unpack(str(size) + "B", buffer)
def setFile(self, filename):
if ossaudiodev == None: return
self.filename = filename
self.fp = open(self.filename, "r")
def readFile(self, seconds):
if ossaudiodev == None: return
data = None
try:
data = eval(self.fp.readline())
except:
self.fp = open(self.filename, "r")
try:
data = eval(self.fp.readline())
except:
print "Failed reading file '%s'" % self.filename
time.sleep(seconds)
return data[:int(seconds * self.sample_rate)]
def getFreq(self, seconds):
# change to read from the buffer, rather than block
if ossaudiodev == None: return
if self.debug:
data = self.readFile(1)
else:
data = self.read(seconds)
transform = FFT.real_fft(data).real
minFreqPos = self.minFreq
maxFreqPos = self.maxFreq
freq = Numeric.argmax(transform[1+minFreqPos:maxFreqPos])
value = transform[1+minFreqPos:maxFreqPos][freq]
domFreq = (freq + self.minFreq) / seconds
if self.debug and abs(value) > 8000 and self.minFreq < domFreq < self.maxFreq:
print "Frequence:", domFreq, "Value:", value, "Volume:", transform[0]
return (domFreq, value, transform[0])
def close(self):
if ossaudiodev == None: return
if self.status != "closed":
self.dev.close()
self.status = "closed"
if __name__ == "__main__":
sd = SoundDevice("/dev/dsp", async = 1)
sd.playTone(500, 1)
## DTMF Tones
## 1209 Hz 1336 Hz 1477 Hz 1633 Hz
## ABC DEF
## 697 Hz 1 2 3 A
## GHI JKL MNO
## 770 Hz 4 5 6 B
## PRS TUV WXY
## 852 Hz 7 8 9 C
## oper
## 941 Hz * 0 # D
|
17,527 | f9b71e15d27453572714be4cbcd1f9caeeb80f0d | from PyQt5 import QtWidgets, uic
import sys
from selenium import webdriver
from google.cloud import translate_v2
import keyboard
from PyQt5.QtCore import Qt, QObject, pyqtSignal
from PyQt5.QtGui import QCursor
from PyQt5.QtWidgets import QAction, QApplication, QMainWindow, QMenu
import pyautogui
import clipboard
import pinyin
import os
import json
import html
f = open('polyphones.json', "r" ,encoding = "Utf-8")
ch_dat = json.load(f)
os.environ['GOOGLE_APPLICATION_CREDENTIALS'] = r"C:\Users\jesse\My Documents\LiClipse Workspace\words_parsing\GoogleCloudKey.json"
translate_client = translate_v2.Client()
class KeyBoardManager(QObject):
PSignal = pyqtSignal()
def start(self):
keyboard.add_hotkey("9", self.PSignal.emit, suppress=True) # put the keyboard shortcuts here
class Replacing(QtWidgets.QMainWindow):
def __init__(self):
super(Replacing, self).__init__()
uic.loadUi('parsing_utility.ui',self)
manager = KeyBoardManager(self)
manager.PSignal.connect(self.onClickpb)
manager.start()
def onClickpb(self):
html_var = self.lineEdit.text()
f = open(html_var + ".html","a+",encoding="utf-8")
pols = ''
pyautogui.hotkey('ctrl', 'c')
var_1 = ''
for item in clipboard.paste().split():
var_1 = var_1 + item
self.textEdit.setText(var_1)
for item in var_1:
if str(ch_dat.get(item)) == "None":
pass
else:
pols = pols + item + "," + str(ch_dat.get(item))
self.textEdit_2.setText(pinyin.get(var_1,delimiter="") + " " + pols)
output = translate_client.translate(var_1, target_language="EN",source_language="Zh")
self.textEdit_3.setText(html.unescape(output.get("translatedText")))
f.write(pinyin.get(var_1,delimiter="") + " " + pols+ "</br>")
f.write(html.unescape(output.get("translatedText"))+ "</br>")
f.write(clipboard.paste() + "</br>")
f.close()
app = QtWidgets.QApplication([])
win = Replacing()
win.show()
sys.exit(app.exec()) |
17,528 | 1a3853ebea55b7d63fd3eb4d437e72e33f8cf4c4 | # -*- coding: utf-8 -*-
"""
Created on Tue May 10 13:22:34 2016
@author: muss
"""
import os
def prpy(basedir):
for root, dirs, files in os.walk(basedir):
for fname in files:
if fname.endswith('.py'):
print(os.path.join(root, fname))
if fname == 'testwalk.py':
with open(fname) as tw:
for line in tw:
print('--> {}'.format(line))
def main():
prpy('c:/devl/projects/python')
if __name__ == '__main__':
main()
|
17,529 | dfdd0fd43eaf9f0d50425b9eb63823d43900fd7a | """
Determine characteristics of text to speech data,
and determine if optimal.
"""
# Imports
import numpy as np
import soundfile as sf
# Definitions
char_per_sec_optimal = 20 # optimal speed
silence_threshold = 0.01 # threshold to be considered silence
sound_filename = input("Enter the (.wav) file of sound: ") # get sound
sound, samp_rate = sf.read(sound_filename) # open sound
len_sound = len(sound) # get length of sound
time_sound = len_sound / samp_rate # get duration of sound
print("The sound is {0} seconds long.".format(time_sound))
text_filename = input("Enter the (.txt) file of text: ") # get text
with open(text_filename, "r") as f:
text = f.read() # read file
len_text = len(text) # get text length
print("There are {0} characters in the text file.".format(len_text))
# Characters optimal
char_per_sec = len_text / time_sound # get character speed
print("The characters per second speed is {0} char/s.".format(char_per_sec))
print("The optimal speed is {0} char/s.".format(char_per_sec_optimal))
pad_char = (char_per_sec_optimal - char_per_sec) * time_sound # get number of characters to add or remove
if pad_char > 0: # if add characters
print("You should add {0} characters to be optimal.".format(pad_char))
elif pad_char < 0: # if remove characters
print("You should remove {0} characters to be optimal.".format(-pad_char))
# Silence ratio
try: # get number of streams
streams = len(sound[0])
silence_data = np.amax(np.abs(sound), axis=-1) # get max value in streams
except Exception:
silence_data = np.abs(sound) # only one stream.
len_silence_data = len(silence_data) # get length
noise_count = 0
silence_count = 0
for i_sound in range(len_silence_data): # cycle through sound
if silence_data[i_sound] < silence_threshold: # if silence
silence_count += 1 # increment silence count
else: # if noise
noise_count += 1 # increment noise count
percent_noise = noise_count / len_silence_data
percent_silence = silence_count / len_silence_data
print("Sound file contains {0}% of noise above {1} and {2}% silence.".format(percent_noise,
silence_threshold,
percent_silence))
# Silence ratio (Text)
whitespace_count = 0
character_count = 0
for c in text: # cycle through characters
if c.isspace():
whitespace_count += 1 # increment whitespace count
else:
character_count += 1 # increment character count
percent_whitespace = whitespace_count / len_text # get percent whitespaces
percent_character = character_count / len_text # get percent characters
print("Text file contains {0}% characters and {1}% whitespaces.".format(percent_character, percent_whitespace))
optimal_len_text = len_text + pad_char
optimal_characters = percent_noise * optimal_len_text # get optimal characters
optimal_whitespaces = percent_silence * optimal_len_text # get optimal white spaces
diff_characters = optimal_characters - character_count # get difference
diff_whitespaces = optimal_whitespaces - whitespace_count # get difference
# Recommend padding
if diff_characters > 0:
print("You should add {0} alpha-numeric characters.".format(diff_characters))
elif diff_characters < 0:
print("You should remove {0} alpha-numeric characters.".format(abs(diff_characters)))
else:
print("You should not change the number of alpha-numeric characters.")
if diff_whitespaces > 0:
print("You should add {0} whitespaces.".format(diff_whitespaces))
elif diff_whitespaces < 0:
print("You should remove {0} whitespaces.".format(abs(diff_whitespaces)))
else:
print("You should not change the number of whitespaces.")
|
17,530 | 6731b01708e6ea8e5df37e9443f0e004f9f1c517 | import datetime
from odyssey import db, app
from odyssey.v1.common.constants import VENDOR_MASTER, COUNTRIES_MASTER, CITIES_MASTER
from passlib.apps import custom_app_context as pwd_context
from itsdangerous import URLSafeSerializer
login_serializer = URLSafeSerializer(app.secret_key)
class VendorMaster(db.Model):
__tablename__ = VENDOR_MASTER
__bind_key__ = 'base_db'
id = db.Column(db.String, primary_key=True)
company_name = db.Column(db.String)
person_name = db.Column(db.String)
company_country_code = db.Column(db.String)
person_country_code = db.Column(db.String)
designation = db.Column(db.String)
person_contact = db.Column(db.String)
country = db.Column(db.String)
city = db.Column(db.String)
address_1 = db.Column(db.String)
address_2 = db.Column(db.String)
weekday_hrs = db.Column(db.String)
weekend_hrs = db.Column(db.String)
twitter_url = db.Column(db.String)
facebook_url = db.Column(db.String)
instagram_url = db.Column(db.String)
linkedin_url = db.Column(db.String)
gplus_url = db.Column(db.String)
mobile = db.Column(db.String)
auth_token = db.Column(db.String)
logo_url = db.Column(db.String)
website_url = db.Column(db.String)
description = db.Column(db.String)
email = db.Column(db.String)
is_email_verified = db.Column(db.Boolean,default=False)
is_deleted = db.Column(db.Boolean,default=False)
password = db.Column(db.String)
created_on = db.Column(db.DateTime, default=datetime.datetime.utcnow())
activated_on = db.Column(db.DateTime)
def __init__(self, *args, **kwargs):
self.id = kwargs.get('id')
self.company_name = kwargs.get('name')
self.country = kwargs.get('country')
self.city = kwargs.get('city')
self.mobile = kwargs.get('mobile')
self.website_url = kwargs.get('website_url')
self.company_country_code = kwargs.get('country_code')
self.email = kwargs.get('email')
self.hash_password(kwargs.get('password'))
@property
def contract_serialize(self):
return {
"v_id":self.id,
"v_name":self.company_name,
"logo_url":self.logo_url
}
@property
def dashboard_serialize(self):
return {
"id":self.id,
"about":self.description,
"logo_url":self.logo_url,
"mobile":self.mobile,
"country_code":self.company_country_code,
"email":self.email,
"weekend_operating_hrs":self.weekend_hrs,
"weekday_operating_hrs":self.weekday_hrs,
"website_ur;":self.website_url,
"address_line_1":self.address_1,
"address_line_2":self.address_2,
"facebook_url":self.facebook_url,
"twitter_url":self.twitter_url,
"person_name":self.person_name,
"person_mobile":self.person_contact,
"person_country_code":self.person_country_code,
"gplus_url":self.gplus_url,
"linkedin_url":self.linkedin_url,
"insta_url":self.instagram_url,
"company_name":self.company_name,
"country":self.country,
"city":self.city,
"designation":self.designation,
}
@property
def is_authenticated(self):
return True
@property
def is_active(self):
return True
@property
def is_anonymous(self):
return False
def get_auth_token(self):
data = [self.password, self.email]
return login_serializer.dumps(data)
def hash_password(self, password):
self.password = pwd_context.encrypt(password)
def get_default_password(self):
return str(self.email).strip().lower().split('@')[0]
def verify_password(self, password):
try:
return pwd_context.verify(password, self.password)
except TypeError:
return False
except ValueError:
return False |
17,531 | a64f6eaa26ff0cad7f5197ab987185ee2bd1e5ed | import unittest
from urllib.parse import quote
from DateTime import DateTime
from ZTUtils.Zope import complex_marshal
from ZTUtils.Zope import make_hidden_input
from ZTUtils.Zope import make_query
from ZTUtils.Zope import simple_marshal
class QueryTests(unittest.TestCase):
def testMarshalString(self):
self.assertEqual(simple_marshal('string'), '')
def testMarshalBool(self):
self.assertEqual(simple_marshal(True), ':boolean')
def testMarshalInt(self):
self.assertEqual(simple_marshal(42), ":int")
def testMarshalFloat(self):
self.assertEqual(simple_marshal(3.1415), ":float")
def testMarshalDate(self):
self.assertEqual(simple_marshal(DateTime()), ":date")
def testMarshalUnicode(self):
arg_type = ''
self.assertEqual(simple_marshal('unic\xF3de'), arg_type)
def testMarshallLists(self):
'''Test marshalling lists'''
test_date = DateTime()
list_ = [1, test_date, 'str', 'unic\xF3de']
result = complex_marshal([('list', list_), ])
arg4_type = ':list'
self.assertEqual(result,
[('list', ':int:list', 1),
('list', ':date:list', test_date),
('list', ':list', 'str'),
('list', arg4_type, 'unic\xF3de')])
def testMarshallRecords(self):
'''Test marshalling records'''
test_date = DateTime()
record = {
'arg1': 1, 'arg2': test_date,
'arg3': 'str', 'arg4': 'unic\xF3de',
}
result = complex_marshal([('record', record), ])
arg4_type = ':record'
self.assertEqual(
set(result),
{('record.arg1', ':int:record', 1),
('record.arg2', ':date:record', test_date),
('record.arg3', ':record', 'str'),
('record.arg4', arg4_type, 'unic\xF3de')})
def testMarshallListsInRecords(self):
'''Test marshalling lists inside of records'''
test_date = DateTime()
record = {'arg1': [1, test_date, 'str', 'unic\xF3de'], 'arg2': 1}
result = complex_marshal([('record', record), ])
arg1_type = ':list:record'
self.assertEqual(
set(result),
{('record.arg1', ':int:list:record', 1),
('record.arg1', ':date:list:record', test_date),
('record.arg1', ':list:record', 'str'),
('record.arg1', arg1_type, 'unic\xF3de'),
('record.arg2', ':int:record', 1)})
def testMakeComplexQuery(self):
'''Test that make_query returns sane results'''
test_date = DateTime()
quote_date = quote(str(test_date))
record = {'arg1': [1, test_date, 'str'], 'arg2': 1}
list_ = [1, test_date, 'str']
int_ = 1
str_ = 'str'
query = make_query(date=test_date, integer=int_, listing=list_,
record=record, string=str_)
self.assertEqual(
set(query.split('&')),
{
'date:date=%s' % quote_date,
'integer:int=1',
'listing:int:list=1',
'listing:date:list=%s' % quote_date,
'listing:list=str',
'string=str',
'record.arg1:int:list:record=1',
'record.arg1:date:list:record=%s' % quote_date,
'record.arg1:list:record=str',
'record.arg2:int:record=1',
})
def testMakeQueryUnicode(self):
''' Test makequery against Github issue 15
https://github.com/zopefoundation/Zope/issues/15
'''
query = make_query(search_text='unic\xF3de')
arg_type = 'search_text='
self.assertEqual(arg_type + 'unic%C3%B3de', query)
def testMakeHiddenInput(self):
tag = make_hidden_input(foo='bar')
self.assertEqual(tag, '<input type="hidden" name="foo" value="bar">')
tag = make_hidden_input(foo=1)
self.assertEqual(tag, '<input type="hidden" name="foo:int" value="1">')
# Escaping
tag = make_hidden_input(foo='bar & baz')
self.assertEqual(
tag, '<input type="hidden" name="foo" value="bar & baz">')
tag = make_hidden_input(foo='<bar>')
self.assertEqual(
tag, '<input type="hidden" name="foo" value="<bar>">')
tag = make_hidden_input(foo='"bar"')
self.assertEqual(
tag, '<input type="hidden" name="foo" value=""bar"">')
|
17,532 | 72e967133e51c5807356c8a8589767fb54e791c9 | import os
import Migration as db
from dotenv import load_dotenv
# Load environment
load_dotenv()
ENV = os.getenv("ENV")
DATA_DIR = os.getenv("DATA_DIR")
db_host = os.getenv("DB_HOST")
db_user = os.getenv("DB_USER")
db_pass = os.getenv("DB_PASS")
db_name = os.getenv("DB_NAME")
csv_dir = DATA_DIR+'/'+ENV
csv_files = os.listdir(csv_dir)
if len(csv_files) > 0:
# Instantiate Database for Migration
migration = db.Migration(db_host, db_user, db_pass, db_name)
# Iterate csv files for migration
[migration.execute(table_name, csv_dir) for table_name in csv_files]
# Close connection after insertion
migration.db_close()
|
17,533 | b9c19f9a966eb1b5a45ff167a07859630e5101d7 | from django.shortcuts import render
from .models import Codes, Question, Answer
from django.contrib import messages
from datetime import datetime
# Create your views here.
def index(request):
show = datetime(2019, 12, 25)
today = datetime.today()
if show < today:
c = Codes.objects.get(pk=1)
code = c.code[:c.sub]
allAns = len(Answer.objects.all())
corAns = len(Answer.objects.filter(correct=True))
try:
ans = Answer.objects.filter(correct=False)[:1].get()
except Answer.DoesNotExist:
ans = None
if ans:
if request.method == "POST":
i = request.POST.get("answer")
if ans.title == i:
ans.correct = True
ans.save()
sub = c.sub
c.sub = sub+1
c.save()
messages.success(request, 'That answer was correct! Good job :)')
else:
messages.warning(request, 'Oops! Nice try, but not quite')
else:
messages.success(request, "Awesome work! You got all the questions right!")
context = {'code': code, 'ans': ans, 'allAns': allAns, 'corAns': corAns}
return render(request, 'reveal/index.html', context)
else:
return render(request, 'reveal/notready.html', {})
|
17,534 | 15f567360d119ba16415f2552254276833fba5db | print 4 + 4 |
17,535 | 2f8f395fbbcee0ab31b97a79d59cf75e22e1e07a | from enum import Enum
class PieceSide(Enum):
WHITE = 0
BLACK = 1 |
17,536 | f532270c4dd615eb9c6841b5d2ad0bacad859975 | #
# Copyright 2019 Altran. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
#
from nameko.events import EventDispatcher, event_handler, EventHandler
from nameko.rpc import rpc
import json
from nameko.standalone.rpc import ClusterRpcProxy
import sys, os
sys.path.insert(0, os.getcwd() + "../util/")
from util.elastic_search_client import get_request, post_request
from util.gphmodels import GphDB, Component, WorkLoad, Policy, Cluster, Alarm
class K8sEventHandlerService:
""" Service to handle K8s events and trigger DB updates. """
name = "K8sEventHandlerService"
CONFIG = {'AMQP_URI' : "amqp://guest:guest@localhost:5672"}
def __init__(self):
print("K8sEventHandlerService object created")
#@rpc
@event_handler("NatsEventSubscriberService", "Event_K8SEvent")
def processEvent(self, payload):
'Do Something'
#print("Reached K8s event handler")
sys.stdout.flush()
events = payload
for event in events:
"""----Add code to put Kubernetes events in Graph Database ------------"""
res = post_request('events', json.dumps(event))
logger.info("Recd. SM-Item --> id[%s],[resource:%s],[name:%s]",
event["id"],
event["resource"],
event["name"])
#@rpc
@event_handler("NatsEventSubscriberService", "Event_K8SAlarm")
def processAlarm(self, payload):
'Do Something'
try:
alarms = payload
if(len(alarms) < 1):
print("Alarm list empty")
for alarm in alarms:
someObjName = alarm.get("objectName")
alarmClass = alarm.get("criticality")
alarmText = alarm.get("alarmDescription")
alarmState = "ACTIVE"
GphDB.updateAlarmForObject(someObjName, alarmClass, alarmText, alarmState)
post_request('alarms', json.dumps(alarm))
print("Alarm : %s :: %s" %(someObjName, alarmText))
except Exception as e:
sys.stdout.flush()
def __objectCreated(self, payload):
'Do Something'
def __objectDeleted(self, payload):
'Do Something'
def __objectUpdated(self, payload):
'Do Something'
def __raiseAlarm(self, payload):
'Do Something'
def __cancelAlarm(self, payload):
'Do Something'
from nameko.containers import ServiceContainer
container = ServiceContainer(K8sEventHandlerService, config=K8sEventHandlerService.CONFIG)
service_extensions = list(container.extensions)
container.start()
|
17,537 | e7840aa70fa320583328df81c0b43e358cf73e88 | import smtplib
SERVER = ('smtp.gmail.com')
FROM = "sender@gmail.com"
password="senderpassword"
TO = ["receiveer@gmail.com"] # must be a list
SUBJECT = "Suspicious IP Address Detected "
# Taking ip From result.text file
f=open("/root/result.text", "r")
ip=f.read()
TEXT = (" The Suspicious IP Address is : {}".format(ip))
# Prepare actual message
message = """\
From: %s
To: %s
Subject: %s
%s
""" % (FROM, ", ".join(TO), SUBJECT, TEXT)
# Send the mail
server = smtplib.SMTP(SERVER)
server.starttls()
server.login(FROM, password)
server.sendmail(FROM, TO, message)
server.quit() |
17,538 | e3c698197b162fd7b28a00c3af5b701a22ffeac8 | def change(arr, num1, num2):
ar = []
for i in arr:
ar += [i]
ar[num1], ar[num2] = ar[num2], ar[num1]
arr = ''.join(ar)
return arr
def dp(depth, number):
if depth == K:
return number
if cache[depth].get(number):
# cache[depth][number]
return cache[depth][number]
res = 0
for [x, y] in cl:
num = change(number, x, y)
res = max(res, int(dp(depth+1, num)))
cache[depth][number] = res
return res
T = int(input())
for t in range(T):
n, K = input().split()
K = int(K)
cache = [{} for i in range(K+1)]
cl = []
for i in range(len(n)):
for j in range(i+1, len(n)):
cl.append([i, j])
print(f'#{t+1} {dp(0,n)}') |
17,539 | 5264b162e7a3d551194aa282f8cd02b409da1f55 | from rest_framework.response import Response
from rest_framework.views import status
def validate_create_data(fn):
def decorated(*args, **kwargs):
title = args[0].request.data.get("title", "")
if not title :
return Response(
data={
"message": "'title' are required to add a task"
},
status=status.HTTP_400_BAD_REQUEST
)
return fn(*args, **kwargs)
return decorated
def validate_update_data(fn):
def decorated(*args, **kwargs):
title = args[0].request.data.get("title", "")
completed = args[0].request.data.get("completed", None)
if not title and completed is None:
return Response(
data={
"message": "'title' or 'completed' are required to add a task"
},
status=status.HTTP_400_BAD_REQUEST
)
elif not completed is None:
if not completed in ['True', 'False', 'true', 'false']:
return Response(
data={
"message": "'completed' only accept 'True' or 'False'"
},
status=status.HTTP_400_BAD_REQUEST
)
return fn(*args, **kwargs)
return decorated |
17,540 | b68679bb00d32dbb1a565a73975bfb0e41775b20 | #!/usr/bin/env python
#-*- coding:utf-8 -*-
#
import unittest
from bes.match.matcher_re import matcher_re
class Testmatcher_re(unittest.TestCase):
def test_re_matcher_case(self):
m = matcher_re('^.*something.*$', ignore_case = True)
self.assertTrue( m.match('SOMETHING') )
self.assertTrue( m.match('fooSOMETHING') )
self.assertTrue( m.match('SOMETHINGbar') )
self.assertFalse( m.match('SOMETHIN') )
def test_re_matcher_case(self):
m = matcher_re('^.*something.*$', ignore_case = True)
self.assertTrue( m.match('SOMETHING') )
self.assertTrue( m.match('fooSOMETHING') )
self.assertTrue( m.match('SOMETHINGbar') )
self.assertFalse( m.match('SOMETHIN') )
if __name__ == "__main__":
unittest.main()
|
17,541 | 5f1dca36dd4077ac9bca129b511ec18da559497c | import unittest
from hanabi_ai.model.game_info import GameInfo
class GameInfoTests(unittest.TestCase):
def setUp(self):
# a simulated game state 7 turns in
self.info_player_0 = GameInfo()
self.info_player_0.score = 2
self.info_player_0.deck_size = 36
self.info_player_0.discarded = ['G1', 'Y1']
self.info_player_0.disclosures = 5
self.info_player_0.mistakes_left = 5
self.info_player_0.num_players = 2
self.info_player_0.hands = [
['??', '??', '??', '??', '??'],
['R3', 'R2', 'W3', 'B5', 'G4']
],
self.info_player_0.known_info = [
['??', '??', '??', '??', '?1'],
['??', '??', '??', '??', '??']
]
self.info_player_0.scored_cards = {
'R': 0,
'B': 0,
'G': 0,
'Y': 1,
'W': 1
}
self.info_player_0.history = []
def test_can_discard(self):
self.assertTrue(self.info_player_0.can_discard())
def test_cannot_discard(self):
self.info_player_0.disclosures = 8
self.assertFalse(self.info_player_0.can_discard())
def test_can_disclose(self):
self.assertTrue(self.info_player_0.can_disclose())
def test_cannot_disclose(self):
self.info_player_0.disclosures = 0
self.assertFalse(self.info_player_0.can_disclose())
def test_is_safe(self):
self.assertTrue(self.info_player_0.is_safe('W2'))
def test_is_not_safe(self):
self.assertFalse(self.info_player_0.is_safe('W3'))
def test_next_player(self):
self.assertEqual(1, self.info_player_0.next_player(0))
self.assertEqual(0, self.info_player_0.next_player(1))
def test_next_player_more_players(self):
self.info_player_0.num_players = 4
self.assertEqual(1, self.info_player_0.next_player(0))
self.assertEqual(2, self.info_player_0.next_player(1))
self.assertEqual(3, self.info_player_0.next_player(2))
self.assertEqual(0, self.info_player_0.next_player(3))
|
17,542 | 4f0cbca47b2fdf9e2b7db405f68b708c3dac9c0f | # Find the Union and Intersection of the two sorted arrays.
a = [1, 2, 3, 4]
b = [2, 3, 4, 5]
# answer
print(len(set(a+b)))
|
17,543 | bfd6d2d6bf776d3547da79c457e8e5bbc76a0c13 | from base.instance import Instance
"""
Parse Input/Output in DIMACS format.
"""
def __encode_literal(x):
return (x-1) * 2 if x > 0 else (-x - 1) * 2 + 1
def __parse_clause(line):
"""
Converting a clause to an array of literals.
"""
literals = [int(x) for x in line.split()]
if literals[-1] != 0:
raise Exception("Parsing error: All clauses must end with 0.")
literals = [__encode_literal(x) for x in literals[:-1]]
return literals
def parse_program(program):
"""
Parse a program (of type string) and return an Instance
"""
# TODO: Check number of vars and clauses to see if they match
lines = program.split("\n")
# Ignore comments at the beginning of the file:
start_i = 0
while lines[start_i][0] != "p":
start_i += 1
lines = lines[start_i:]
var_count, clause_count = lines[0].split()[2:4]
var_count, clause_count = int(var_count), int(clause_count)
variables = list(range(var_count))
clauses = []
for i in range(1, clause_count + 1):
clauses.append(__parse_clause(lines[i]))
return Instance(variables, clauses)
def decode_assignment(assignment):
result = [i + 1 if assignment[i] else -(i + 1) for i in range(len(assignment))]
return " ".join([str(x) for x in result])
|
17,544 | 94e24cbd0ca3a5c24f03a2dff26d27f4a47883cc | from django.conf.urls import patterns, include, url
# Uncomment the next two lines to enable the admin:
from django.contrib import admin
admin.autodiscover()
urlpatterns = patterns('',
# Examples:
# url(r'^$', 'models_test.views.home', name='home'),
# url(r'^models_test/', include('models_test.foo.urls')),
# Uncomment the admin/doc line below to enable admin documentation:
# url(r'^admin/doc/', include('django.contrib.admindocs.urls')),
# Uncomment the next line to enable the admin:
url(r'^$', 'models_app.views.index', name='index'),
url(r'^login/$','models_app.views.auth_login', name='auth_login'),
url(r'^logout/$', 'models_app.views.auth_logout', name='auth_logout'),
url(r'^admin/', include(admin.site.urls)),
url(r'^logup/$', 'models_app.views.logup', name='logup'),
url(r'^success/(?P<message>[\w]+)/$', 'models_app.views.success', name='success'),
)
|
17,545 | 7c3f250a47d3e8812303b70764388f13934807e3 | # coding:UTF-8
from scapy.all import *
from scapy.layers.inet import *
from scapy.layers.inet6 import *
from scapy.layers.l2 import Ether
class PcapDecode:
def __init__(self):
# ETHER:读取以太网层协议配置文件
with open('./protocol/ETHER', 'r', encoding='UTF-8') as f:
ethers = f.readlines()
self.ETHER_DICT = dict()
for ether in ethers:
ether = ether.strip().strip('\n').strip('\r').strip('\r\n')
self.ETHER_DICT[int(ether.split(':')[0])] = ether.split(':')[1]
# IP:读取IP层协议配置文件
with open('./protocol/IP', 'r', encoding='UTF-8') as f:
ips = f.readlines()
self.IP_DICT = dict()
for ip in ips:
ip = ip.strip().strip('\n').strip('\r').strip('\r\n')
self.IP_DICT[int(ip.split(':')[0])] = ip.split(':')[1]
# PORT:读取应用层协议端口配置文件
with open('./protocol/PORT', 'r', encoding='UTF-8') as f:
ports = f.readlines()
self.PORT_DICT = dict()
for port in ports:
port = port.strip().strip('\n').strip('\r').strip('\r\n')
self.PORT_DICT[int(port.split(':')[0])] = port.split(':')[1]
# TCP:读取TCP层协议配置文件
with open('./protocol/TCP', 'r', encoding='UTF-8') as f:
tcps = f.readlines()
self.TCP_DICT = dict()
for tcp in tcps:
tcp = tcp.strip().strip('\n').strip('\r').strip('\r\n')
self.TCP_DICT[int(tcp.split(':')[0])] = tcp.split(':')[1]
# UDP:读取UDP层协议配置文件
with open('./protocol/UDP', 'r', encoding='UTF-8') as f:
udps = f.readlines()
self.UDP_DICT = dict()
for udp in udps:
udp = udp.strip().strip('\n').strip('\r').strip('\r\n')
self.UDP_DICT[int(udp.split(':')[0])] = udp.split(':')[1]
# 解析以太网层协议
def ether_decode(self, p):
data = dict()
if p.haslayer(Ether):
data = self.ip_decode(p)
return data
else:
data['time'] = time.strftime('%Y-%m-%d %H:%M:%S', time.localtime(p.time))
data['Source'] = 'Unknow'
data['Destination'] = 'Unknow'
data['Procotol'] = 'Unknow'
data['len'] = len(corrupt_bytes(p))
data['info'] = p.summary()
return data
# 解析IP层协议
def ip_decode(self, p):
data = dict()
if p.haslayer(IP): # 2048:Internet IP (IPv4)
ip = p.getlayer(IP)
if p.haslayer(TCP): # 6:TCP
data = self.tcp_decode(p, ip)
return data
elif p.haslayer(UDP): # 17:UDP
data = self.udp_decode(p, ip)
return data
else:
if ip.proto in self.IP_DICT:
data['time'] = time.strftime('%Y-%m-%d %H:%M:%S', time.localtime(p.time))
data['Source'] = ip.src
data['Destination'] = ip.dst
data['Procotol'] = self.IP_DICT[ip.proto]
data['len'] = len(corrupt_bytes(p))
data['info'] = p.summary()
return data
else:
data['time'] = time.strftime('%Y-%m-%d %H:%M:%S', time.localtime(p.time))
data['Source'] = ip.src
data['Destination'] = ip.dst
data['Procotol'] = 'IPv4'
data['len'] = len(corrupt_bytes(p))
data['info'] = p.summary()
return data
elif p.haslayer(IPv6): # 34525:IPv6
ipv6 = p.getlayer(IPv6)
if p.haslayer(TCP): # 6:TCP
data = self.tcp_decode(p, ipv6)
return data
elif p.haslayer(UDP): # 17:UDP
data = self.udp_decode(p, ipv6)
return data
else:
if ipv6.nh in self.IP_DICT:
data['time'] = time.strftime('%Y-%m-%d %H:%M:%S', time.localtime(p.time))
data['Source'] = ipv6.src
data['Destination'] = ipv6.dst
data['Procotol'] = self.IP_DICT[ipv6.nh]
data['len'] = len(corrupt_bytes(p))
data['info'] = p.summary()
return data
else:
data['time'] = time.strftime('%Y-%m-%d %H:%M:%S', time.localtime(p.time))
data['Source'] = ipv6.src
data['Destination'] = ipv6.dst
data['Procotol'] = 'IPv6'
data['len'] = len(corrupt_bytes(p))
data['info'] = p.summary()
return data
else:
if p.type in self.ETHER_DICT:
data['time'] = time.strftime('%Y-%m-%d %H:%M:%S', time.localtime(p.time))
data['Source'] = p.src
data['Destination'] = p.dst
data['Procotol'] = self.ETHER_DICT[p.type]
data['len'] = len(corrupt_bytes(p))
data['info'] = p.summary()
return data
else:
data['time'] = time.strftime('%Y-%m-%d %H:%M:%S', time.localtime(p.time))
data['Source'] = p.src
data['Destination'] = p.dst
data['Procotol'] = hex(p.type)
data['len'] = len(corrupt_bytes(p))
data['info'] = p.summary()
return data
# 解析TCP层协议
def tcp_decode(self, p, ip):
data = dict()
tcp = p.getlayer(TCP)
data['time'] = time.strftime('%Y-%m-%d %H:%M:%S', time.localtime(p.time))
data['Source'] = ip.src + ":" + str(ip.sport)
data['Destination'] = ip.dst + ":" + str(ip.dport)
data['len'] = len(corrupt_bytes(p))
data['info'] = p.summary()
if tcp.dport in self.PORT_DICT:
data['Procotol'] = self.PORT_DICT[tcp.dport]
elif tcp.sport in self.PORT_DICT:
data['Procotol'] = self.PORT_DICT[tcp.sport]
elif tcp.dport in self.TCP_DICT:
data['Procotol'] = self.TCP_DICT[tcp.dport]
elif tcp.sport in self.TCP_DICT:
data['Procotol'] = self.TCP_DICT[tcp.sport]
else:
data['Procotol'] = "TCP"
return data
# 解析UDP层协议
def udp_decode(self, p, ip):
data = dict()
udp = p.getlayer(UDP)
data['time'] = time.strftime('%Y-%m-%d %H:%M:%S', time.localtime(p.time))
data['Source'] = ip.src + ":" + str(ip.sport)
data['Destination'] = ip.dst + ":" + str(ip.dport)
data['len'] = len(corrupt_bytes(p))
data['info'] = p.summary()
if udp.dport in self.PORT_DICT:
data['Procotol'] = self.PORT_DICT[udp.dport]
elif udp.sport in self.PORT_DICT:
data['Procotol'] = self.PORT_DICT[udp.sport]
elif udp.dport in self.UDP_DICT:
data['Procotol'] = self.UDP_DICT[udp.dport]
elif udp.sport in self.UDP_DICT:
data['Procotol'] = self.UDP_DICT[udp.sport]
else:
data['Procotol'] = "UDP"
return data
|
17,546 | 67001c753877239108d82eab4ad7aa8d0d9577b0 | #! /usr/bin/env python3
"""Solves problem 005 from the Project Euler website"""
from common.prime import prime_factors
def solve():
"""Solve the problem and return the result"""
result = 1
map = dict()
for x in range(2, 20):
temp = prime_factors(x)
for n in range(2, 20):
if n in temp:
if n in map:
map[n] = max(temp.count(n), map[n])
else:
map[n] = temp.count(n)
for x in map:
result *= (x ** map[x])
return result
if __name__ == '__main__':
print(solve())
|
17,547 | 3604c4392f25f076aaadcc3d471e53c1827bf1cc |
def partition(arr: list[int], low: int, high: int):
# choose the last element as pivot
pivot = arr[high]
# Pointer for greater element
i = low - 1
for j in range(low, high):
if arr[j] <= pivot:
# If element smaller than pivot is found
# swap it with the greater element pointed by i
i += 1
# Swapping element at i with element at j
arr[i], arr[j] = arr[j], arr[i]
# Swap the pivot element with
# e greater element specified by i
arr[i+1], arr[high] = arr[high], arr[i+1]
# Return the position from where partition is done
return i+1
def quick_sort(arr, low, high):
if low < high:
pivot = partition(arr, low, high)
# Sort left
quick_sort(arr, low, pivot-1)
# Sort right
quick_sort(arr, pivot+1, high)
test_quick_sort = [5, 3, 2, 10, 7, 1, 4, 8]
print(f"Array {test_quick_sort} before quick sort")
quick_sort(test_quick_sort, low=0, high=len(test_quick_sort)-1)
print(f"Array {test_quick_sort} after quick sort")
|
17,548 | a73b472c87a097d69bdc06095ac6c68b80520433 | # now here we have created a package named "math" with 2 - files
# which will run in any program and go passed via __init.py__ file
#__init__.py file is always mecessary while creating a package in python
__all__ = ['simple','complex'] |
17,549 | 94f86012087d946df063b6438fd723bba6642e32 | from django import forms
from django.contrib.auth.forms import (
AuthenticationForm, UserCreationForm
)
from django.contrib.auth import get_user_model
from .models import User, Student, Society
User = User
Student = Student
Society = Society
class LoginForm(AuthenticationForm):
"""ログインフォーム"""
def __init__(self, *args, **kwargs):
super().__init__(*args, **kwargs)
for field in self.fields.values():
field.widget.attrs['class'] = 'form-control'
field.widget.attrs['placeholder'] = field.label # placeholderにフィールドのラベルを入れる
class UserCreateForm(UserCreationForm):
"""ユーザー登録用フォーム"""
class Meta:
model = User
fields = ('email',)
def __init__(self, *args, **kwargs):
super().__init__(*args, **kwargs)
for field in self.fields.values():
field.widget.attrs['class'] = 'form-control'
def clean_email(self):
email = self.cleaned_data['email']
User.objects.filter(email=email, is_active=False).delete()
return email
class StudentCreateForm(UserCreationForm):
"""ユーザー登録用フォーム"""
class Meta:
model = Student
fields = ('email',)
def __init__(self, *args, **kwargs):
super().__init__(*args, **kwargs)
for field in self.fields.values():
field.widget.attrs['class'] = 'form-control'
def clean_email(self):
email = self.cleaned_data['email']
User.objects.filter(email=email, is_active=False).delete()
return email
class SocietyCreateForm(UserCreationForm):
"""サークル登録用フォーム"""
class Meta:
model = Society
#model = Student
fields = ('email',)
def __init__(self, *args, **kwargs):
super().__init__(*args, **kwargs)
for field in self.fields.values():
field.widget.attrs['class'] = 'form-control'
def clean_email(self):
email = self.cleaned_data['email']
Society.objects.filter(email=email, is_active=False).delete()
#Student.objects.filter(email=email, is_active=False).delete()
return email
|
17,550 | d40e21fadc56aada242349e91c6e26eaca88d44f | from django.shortcuts import render,get_object_or_404,redirect
from django.contrib.auth.decorators import login_required
from django.contrib.auth import authenticate,login,logout
from .models import Country, District, Division, SubDistrict, ServiceCategory, Package, NoticeBoard, AuthLogs
from django.contrib import messages
from django.core.paginator import Paginator
from.models import SubDistrict,Surveyor,Division,District,CollectData,AssignDataCollector
from django.contrib.auth.models import User
from django.contrib.auth.hashers import make_password
from django.core.mail import send_mail
# Create your views here.
def userlogin(request):
if request.user.is_authenticated:
return redirect('admin_home')
else:
if request.method == "POST":
user = request.POST.get('user', )
password = request.POST.get('pass', )
auth = authenticate(request, username=user, password=password)
if auth is not None:
login(request, auth)
return redirect('admin_home')
else:
messages.add_message(request, messages.ERROR, 'Username or password mismatch!')
return render(request, "login.html")
def user_logout(request):
logout(request)
return redirect('login')
def admin_home(request):
if request.user.is_authenticated:
assign_collector_obj = AssignDataCollector.objects.all()[::-1]
total_data_collector = Surveyor.objects.all().count()
total_data_collect = CollectData.objects.all().count()
total_assign_data_collector = AssignDataCollector.objects.all().count()
last_notice = NoticeBoard.objects.last()
context={
"isact_home":"active",
"total_data_collector":total_data_collector,
"total_data_collect":total_data_collect,
"total_assign_data_collector":total_assign_data_collector,
"data": assign_collector_obj,
"title": "Data Collection Dashboard",
"last_notice": last_notice
}
return render(request, "admin_home.html", context)
else:
return redirect('login')
def assign_data_collector(request):
if request.user.is_authenticated:
surveyors = Surveyor.objects.all()[::-1]
country_obj = Country.objects.all()
division_obj = Division.objects.all()
district_obj = District.objects.all()
subdistrict_obj = SubDistrict.objects.all()
service_categories = ServiceCategory.objects.all()
context={
"isact_assigndatacollector":"active",
"surveyors":surveyors,
"country":country_obj,
"division":division_obj,
"district":district_obj,
"subdistrict":subdistrict_obj,
"service_categories": service_categories,
"title": "Assign Data Collector"
}
if request.method == "POST":
company_name = request.POST.get("company_name")
service_category = request.POST.get("service_category")
service_category_obj = ServiceCategory.objects.get(name=service_category)
data_collector_id = request.POST.get("data_collector")
data_collector_obj = Surveyor.objects.get(id=data_collector_id)
assign_by = request.user
area = request.POST.get("area")
country_obj = request.POST.get("country")
country = Country.objects.get(country_name=country_obj)
division_obj = request.POST.get("division")
division = Division.objects.get(division_name=division_obj)
district_obj = request.POST.get("district")
district = District.objects.get(district_name=district_obj)
sub_district_obj = request.POST.get("sub_district")
sub_district = SubDistrict.objects.get(sub_district_name=sub_district_obj)
collect_obj = AssignDataCollector(company_name=company_name, service_category=service_category_obj,data_collector=data_collector_obj,
assign_by=assign_by, area=area, country=country, division=division,district=district,sub_district=sub_district)
collect_obj.save()
messages.success(request, "Data Collector Assign Successfully")
return redirect(admin_home)
return render(request, "assign_data_collector/assign_data_collect_form.html", context)
else:
return ('login')
def view_form(request, id):
if request.user.is_authenticated:
obj =get_object_or_404(AssignDataCollector, id=id)
context={
"obj":obj,
"title": "Assignment Details"
}
return render(request, "assign_data_collector/view_form.html", context)
return redirect('login')
def form_delete(request, id):
if request.user.is_authenticated:
obj = get_object_or_404(AssignDataCollector, id=id)
obj.delete()
messages.success(request, "Data Deleted Successfully")
return redirect('admin_home')
else:
return redirect('login')
def surveyor_list(request, filter):
if request.user.is_authenticated:
user_obj = None
if filter == 'None':
user_obj = Surveyor.objects.all()[::-1]
elif filter == 'active':
user_obj = Surveyor.objects.all().filter(status=1)[::-1]
elif filter == 'inactive':
user_obj = Surveyor.objects.all().filter(status=2)[::-1]
elif filter == 'rejected':
user_obj = Surveyor.objects.all().filter(status=3)[::-1]
context ={
"isact_surveyorlist": "active",
"user": user_obj,
"title": "Data Collector List"
}
return render(request, "surveyor/surveyor_list.html", context)
else:
return redirect('login')
def view_surveyor(request, id):
if request.user.is_authenticated:
user_obj = Surveyor.objects.get(id=id)
user_obj_another = user_obj.user
data_obj = CollectData.objects.all().filter(data_collector=user_obj_another)
total_collect_data = CollectData.objects.all().filter(data_collector=user_obj_another).count()
context= {
"user": user_obj,
"data":data_obj,
"total_collect_data":total_collect_data,
"isact_surveyorlist": "active",
"title": "Data Collector Details"
}
return render(request, "surveyor/view_surveyor.html", context)
else:
return redirect('login')
def update_surveyor(request, id):
if request.user.is_authenticated:
user_obj = get_object_or_404(Surveyor, id=id)
if request.method == "POST":
user_obj.address = request.POST.get("address")
user_obj.profile_picture = request.POST.get("profile_picture")
user_obj.country = request.POST.get("country")
user_obj.division = request.POST.get("division")
user_obj.district = request.POST.get("district")
user_obj.sub_district = request.POST.get("sub_district")
user_obj.email = request.POST.get("email")
user_obj.graduation_subject = request.POST.get("graduation_subject")
user_obj.university = request.POST.get("university")
user_obj.Skills = request.POST.get("Skills")
user_obj.area = request.POST.get("area")
user_obj.phone = request.POST.get("phone")
user_obj.description = request.POST.get("description")
user_obj.designation = request.POST.get("designation")
user_obj.experience = request.POST.get("experience")
user_obj.role = request.POST.get("role")
user_obj.status = request.POST.get("status")
user_obj.save()
messages.success(request, "User Update Successfully !!")
return redirect('update_surveyor', id=id)
context ={
"user": user_obj,
"isact_surveyorlist": "active",
"title": "Update Data Collector"
}
return render(request, "surveyor/surveyor_update.html", context)
else:
return redirect('login')
def remove_surveyor(request, id):
obj = get_object_or_404(Surveyor, id=id)
obj.delete()
messages.success(request, "Requested User Delete Successfully !!")
return redirect('surveyor_list', 'None')
def register_surveyor(request):
if request.user.is_authenticated:
if request.method == "POST":
fname = request.POST.get('fname', )
lname = request.POST.get('lname', )
uname = request.POST.get('uname', )
password = request.POST.get('password', )
address = request.POST.get("address")
profile_picture = request.FILES.get("profile_picture")
country = request.POST.get("country")
division = request.POST.get("division")
district = request.POST.get("district")
sub_district = request.POST.get("sub_district")
email = request.POST.get("email")
area = request.POST.get("area")
phone = request.POST.get("phone")
designation = request.POST.get("designation")
experience = request.POST.get("experience")
description = request.POST.get("description")
graduation_subject = request.POST.get("graduation_subject")
university = request.POST.get("university")
user = User.objects.all().filter(username=uname)
if user :
messages.success(request, "User Already Exits")
return redirect('register_surveyor')
else :
auth_info={
'first_name': fname,
'last_name': lname,
'username': uname,
'password': make_password(password),
}
user = User(**auth_info)
user.save()
user_obj = Surveyor(experience=experience,university=university,description=description,graduation_subject=graduation_subject,user=user,address=address,profile_picture=profile_picture,country=country,division=division,
district=district,sub_district=sub_district,email=email,area=area,
phone=phone,designation=designation)
user_obj.save()
messages.success(request, "Data Collector Create Successfully !!")
context = {
"isact_registersurveyor": "active",
"title": "Register Data Collector"
}
return render(request, "surveyor/register_surveyor.html", context)
else:
return redirect('login')
def country(request):
if request.user.is_authenticated:
if request.method == "POST":
country_name = request.POST.get("country")
user = Country(country_name=country_name)
user.save()
messages.success(request, "Country Added Successfully")
get_country = Country.objects.all()[::-1]
context = {
"get_country": get_country,
'isact_location': 'active',
"title": "Add Country"
}
return render(request, "add/add_country.html", context)
else:
return redirect('login')
def country_remove(request, id):
obj = get_object_or_404(Country, id=id)
obj.delete()
messages.success(request, "Country Remove Successfully")
return redirect('country')
def add_division(request):
if request.user.is_authenticated:
if request.method == "POST":
division_name = request.POST.get("division")
user = Division(division_name=division_name)
user.save()
messages.success(request, "Division Added Successfully")
div_obj = Division.objects.all()[::-1]
paginator = Paginator(div_obj, 10)
page = request.GET.get('page')
get_page = paginator.get_page(page)
context = {
"div_obj": get_page,
'isact_location': 'active',
"title": "Add Division"
}
return render(request, "add/add_division.html", context)
else:
return redirect('login')
def add_district(request):
if request.user.is_authenticated:
if request.method == "POST":
district_name = request.POST.get("district")
user = District(district_name=district_name)
user.save()
messages.success(request, "District Added Successfully")
get_district = District.objects.all()[::-1]
paginator = Paginator(get_district, 10)
page = request.GET.get('page')
get_page = paginator.get_page(page)
context = {
"get_district": get_page,
'isact_location': 'active',
"title": "Add District"
}
return render(request, "add/add_district.html", context)
else:
return redirect('login')
def update_district(request, id):
if request.user.is_authenticated:
obj = get_object_or_404(District, id=id)
context={
"district":obj,
'isact_location': 'active',
"title": "Update District"
}
if request.method == "POST":
obj.district_name = request.POST.get("district")
obj.save()
messages.success(request, "District Update Successfully")
return redirect(add_district)
return render(request, "update/update_district.html", context)
else:
return redirect('login')
def remove_district(reuquest, id):
obj = get_object_or_404(District, id=id)
obj.delete()
messages.success(reuquest, "District Removed Successfully ")
return redirect(add_district)
def add_sub_district(request):
if request.user.is_authenticated:
if request.method == "POST":
sub_district_name = request.POST.get("sub_district")
user = SubDistrict(sub_district_name=sub_district_name)
user.save()
messages.success(request, "Sub District Added Successfully")
get_subdistrct = SubDistrict.objects.all()[::-1]
paginator = Paginator(get_subdistrct, 10)
page = request.GET.get('page')
get_page = paginator.get_page(page)
context = {
"get_subdistrct": get_page,
'isact_location': 'active',
"title": "Add Sub District"
}
return render(request, "add/add_subdistrict.html", context)
else:
return redirect('login')
def update_sub_district(request, id):
if request.user.is_authenticated:
obj = get_object_or_404(SubDistrict, id=id)
context={
"obj":obj,
'isact_location': 'active',
"title": "Update Sub District"
}
if request.method == "POST":
obj.sub_district_name = request.POST.get("sub_district")
obj.save()
messages.success(request, "Sub District Update Successfully")
return redirect(add_sub_district)
return render(request, "update/sub_district_update.html", context)
else:
return redirect('login')
def remove_subdistrict(request, id):
obj = get_object_or_404(SubDistrict, id=id)
obj.delete()
messages.success(request, "Sub District Remove Successfully")
return redirect(add_sub_district)
def notifications(request):
if request.user.is_authenticated:
user_obj = Surveyor.objects.all()[::-1]
context = {
"user": user_obj,
'isact_notification': 'active',
"title": "Email Notifications"
}
if request.method == "POST":
send_to = request.POST.get('oxdoraitech@gmail.com')
subject = request.POST.get('subject')
message = request.POST.get('message')
recipient = request.POST.get('recipient')
check_obj = Surveyor.objects.filter(email=recipient)
if check_obj.exists():
send_mail("Mail Subject : "+subject, message,send_to, [recipient], fail_silently=False)
if send_mail:
messages.success(request, "Your Email Successfully Send !!!")
else:
messages.success(request, "Send Fail ")
else:
messages.success(request, "Mail Does not Exists")
return render(request, "notification/create_notification.html", context)
else:
return redirect('login')
def collecting_data_list(request):
if request.user.is_authenticated:
data = CollectData.objects.all()[::-1]
context={
"data": data,
"isact_datacollectlist":"active",
'title': "All Collected Data"
}
return render(request, "data_collect/data_collection_list.html", context)
else:
return redirect('login')
def create_data_form(request):
if request.user.is_authenticated:
service_category_list = ServiceCategory.objects.all()
package_list = Package.objects.all()
context={
"isact_createsurvey":"active",
'package_list': package_list,
'service_category_list': service_category_list,
"title": "Submit Data"
}
if request.method == "POST":
visited_company_name = request.POST.get("visited_company_name")
contact_person_name = request.POST.get("contact_person_name")
designation_of_contact_person = request.POST.get("designation_of_contact_person")
service_category_id = request.POST.get("service_category_id")
package_id = request.POST.get("package_id")
contact_no = request.POST.get("contact_no")
email = request.POST.get("email")
address = request.POST.get("address")
picture_visited_person = request.FILES.get("picture")
description = request.POST.get("description")
collector_obj = CollectData(data_collector=request.user, visited_company_name=visited_company_name,
contact_person_name=contact_person_name,
designation_of_contact_person=designation_of_contact_person,
service_category_id=service_category_id,
package_id=package_id, contact_no=contact_no,
email=email, address=address, picture_visited_person=picture_visited_person,
description=description)
collector_obj.save()
messages.success(request, "Collect Data Store Successfully")
return redirect(create_data_form)
return render(request, "data_collect/create_data_form.html", context)
else:
return redirect('login')
def collect_data_view(request, id):
if request.user.is_authenticated:
data = get_object_or_404(CollectData, id=id)
context ={
"data":data,
"isact_datacollectlist": "active",
"title": "Data Details"
}
return render(request, "data_collect/collect_data_view.html", context)
else:
return redirect('login')
def collect_data_delete(request, id):
obj = get_object_or_404(CollectData, id=id)
obj.delete()
messages.success(request, "Data Deleted Successfully !!")
return redirect("collecting_data_list")
def update_country(request, id):
if request.user.is_authenticated:
country_obj = get_object_or_404(Country, id=id)
context = {
"country":country_obj,
'isact_location': 'active',
"title": "Update Country"
}
if request.method == "POST":
country_obj.country_name = request.POST.get("country")
country_obj.save()
messages.success(request, "Country Name Update Successfully")
return redirect(country)
return render(request, "update/update_country.html", context)
else:
return redirect('login')
def update_division(request, id):
if request.user.is_authenticated:
devision_obj = get_object_or_404(Division, id=id)
context = {
"division": devision_obj,
'isact_location': 'active',
"title": "Update Division"
}
if request.method == "POST":
devision_obj.division_name = request.POST.get("division")
devision_obj.save()
messages.success(request, "Division Name Update Successfully")
return redirect(add_division)
return render(request, "update/update_division.html", context)
else:
return redirect('login')
def remove_division(request, id):
obj = get_object_or_404(Division, id=id)
obj.delete()
messages.success(request, "Division Removed Successfully")
return redirect(add_division)
def add_service_category(request):
if request.user.is_authenticated:
if request.method == "POST":
service_category = request.POST.get("service_category")
service_category_obj = ServiceCategory(name=service_category)
service_category_obj.save()
messages.success(request, "Service Category Added Successfully")
service_category_list = ServiceCategory.objects.all()[::-1]
context = {
'isact_service_category': 'active',
'service_category_list': service_category_list,
"title": "Add Service Category"
}
return render(request, "add/add_service_category.html", context)
else:
return redirect('login')
def update_service_category(request, id):
if request.user.is_authenticated:
service_category_obj = get_object_or_404(ServiceCategory, id=id)
context = {
"service_category": service_category_obj,
'isact_service_category': 'active',
"title": "Update Service Category"
}
if request.method == "POST":
service_category_obj.name = request.POST.get("service_category")
service_category_obj.save()
messages.success(request, "Service Category Updated Successfully")
return render(request, "update/update_service_category.html", context)
else:
return redirect('login')
def delete_service_category(request, id):
service_category_obj = get_object_or_404(ServiceCategory, id=id)
service_category_obj.delete()
messages.success(request, "Service Category Removed Successfully")
return redirect(add_service_category)
def add_package(request):
if request.user.is_authenticated:
if request.method == "POST":
package = request.POST.get("package")
service_category_id = request.POST.get("service_category_id")
package_obj = Package(service_category_id=service_category_id, name=package)
package_obj.save()
messages.success(request, "Package Added Successfully")
service_category_list = ServiceCategory.objects.all()
package_list = Package.objects.all()
context = {
'isact_package': 'active',
'package_list': package_list,
'service_category_list': service_category_list,
"title": "Add Package"
}
return render(request, "add/add_package.html", context)
else:
return redirect('login')
def update_package(request, id):
if request.user.is_authenticated:
service_category_list = ServiceCategory.objects.all()
package_obj = get_object_or_404(Package, id=id)
context = {
'service_category_list': service_category_list,
'package': package_obj,
'isact_package': 'active',
"title": "Update Package"
}
if request.method == "POST":
package_obj.service_category_id = request.POST.get("service_category_id")
package_obj.name = request.POST.get("package")
package_obj.save()
messages.success(request, "Package Updated Successfully")
return render(request, "update/update_package.html", context)
else:
return redirect('login')
def delete_package(request, id):
package_obj = get_object_or_404(Package, id=id)
package_obj.delete()
messages.success(request, "Package Removed Successfully")
return redirect(add_package)
@login_required(login_url='login')
def create_notice(request):
if request.method == "POST":
title = request.POST.get("title")
notice_desc = request.POST.get("notice_desc")
notice_image = request.FILES.get("notice_image")
notice_obj = NoticeBoard(title=title, notice_desc=notice_desc, notice_image=notice_image)
notice_obj.save()
messages.success(request, "Notice Added Successfully")
context = {
'isact_notice': 'active',
"title": "Add Notice"
}
return render(request, "add/add_notice.html", context)
@login_required(login_url='login')
def auth_log(request):
auth_logs = AuthLogs.objects.all()[::-1]
context = {
"auth_logs": auth_logs,
'isact_auth_log': 'active',
"title": "All Auth Log",
}
return render(request, "auth-audit/auth-log-list.html", context)
|
17,551 | 3a330ae09de05bfa44ed13ddda32ac4daf760438 | '''
Faça um programa que leia dois números a e b (positivos menores que 10000) e:
- Crie um vetor onde cada posição é um algarismo do número. A primeira posição é o algarismo menos significativo
- Crie um vetor que seja a soma de a e b, mas faça-o usando apenas os vetores construídos anteriormente
Dica: some as posições correspondentes. Se a soma ultrapassar 10, subtraia 10 do resultado e some 1 à próxima
posição.
'''
num_a = int(input('Digite o número a: '))
num_b = int(input('Digite o número b: '))
num_a = str(num_a)
num_b = str(num_b)
vetor_a = []
vetor_b = []
vetor_soma = []
cont_a = 0
cont_b = 0
cont_soma = 0
fator = 0
# Quebrando a
for i in num_a:
num = int(i)
vetor_a.append(num)
while len(vetor_a) < 5:
vetor_a.insert(cont_a, 0)
cont_a += 1
# Quebrando b
for i in num_b:
num = int(i)
vetor_b.append(num)
while len(vetor_b) < 5:
vetor_b.insert(cont_b, 0)
cont_b += 1
# Somando a e b
for i in range(4,-1,-1):
if vetor_a[i] + vetor_b[i] + fator < 10:
num = vetor_a[i] + vetor_b[i] + fator
vetor_soma.append(num)
fator = 0
elif vetor_a[i] + vetor_b[i] + fator > 10:
num = vetor_a[i] + vetor_b[i] + fator - 10
vetor_soma.append(num)
fator = 1
while len(vetor_soma) < 5:
vetor_soma.insert(cont_soma, 0)
cont_soma += 1
num_a = int(num_a)
num_b = int(num_b)
vetor_soma.reverse()
print(f'A soma de {num_a} com {num_b} vale {vetor_soma}.')
|
17,552 | b72bc30d3b2564563a4fb571066921e823136bbe | from Menu import Menu, MenuItem
from Coffee_Maker import CoffeeMaker
from Money_Machine import MoneyMachine
class CoffeeMachine:
"""1. print report
2. check resources sufficient
3. process coins
4. check transaction successful
5. make coffee"""
def __init__(self):
self.coffee_maker = CoffeeMaker()
self.menu = Menu()
self.money_machine = MoneyMachine()
self.is_on = True
self.process()
def process(self):
while self.is_on:
options = self.menu.get_items()
choice = input(f"What would you like? ({options}): ")
choice = choice.lower()
if choice == "off":
self.is_on = False
elif choice == "report":
self.coffee_maker.report()
self.money_machine.report()
else:
drink = self.menu.find_drink(choice)
is_enough_ingredients = self.coffee_maker.is_resource_sufficient(drink)
is_payment_successful = self.money_machine.make_payment(drink.cost)
if is_enough_ingredients and is_payment_successful:
self.coffee_maker.make_coffee(drink)
coffee_machine = CoffeeMachine() |
17,553 | b1fa8628a5df3d4e9ad1c1822526de65fcbe59c5 | class Song:
def __init__(self, id, artist, title, key=''):
self.id = str(id.encode('utf-8'))
self.artist = str(artist.encode('utf-8'))
self.title = str(title.encode('utf-8'))
self.key = str(key.encode('utf-8'))
self.url = None
def __str__(self):
return "%s - %s" % (self.artist, self.title)
def add_url(self, url):
self.url = url
|
17,554 | 8738dafa350e41fe1c5764bc2b559f0ad6a89fa2 | import time
import numpy as np
import math
import cv2
import mediapipe as mp
class handDetector():
def __init__(self, mode=False, max_hands=2, detection_conf=0.5, track_conf=0.5):
self.mode = mode
self.max_hands = max_hands
self.detection_conf = detection_conf
self.track_conf = track_conf
self.mpHands = mp.solutions.hands
self.hands = self.mpHands.Hands(self.mode, self.max_hands,
self.detection_conf, self.track_conf)
self.mpDraw = mp.solutions.drawing_utils
self.tip_ids = [4, 8, 12, 16, 20] # tip finger id
# Find hand landmarks
def findHands(self, img, draw=True):
imgRGB = cv2.cvtColor(img, cv2.COLOR_BGR2RGB)
self.results = self.hands.process(imgRGB)
#print(results.multi_hand_landmarks)
if self.results.multi_hand_landmarks:
for hand_lms in self.results.multi_hand_landmarks:
if draw:
self.mpDraw.draw_landmarks(img, hand_lms, self.mpHands.HAND_CONNECTIONS)
return img
# Find finger landmarks
def findPosition(self, img, hand_number=0, draw=True):
x_list = []
y_list = []
bbox = []
self.lm_list = []
if self.results.multi_hand_landmarks:
my_hand = self.results.multi_hand_landmarks[hand_number]
for id, lm in enumerate(my_hand.landmark):
#print(id, lm)
height, width, c = img.shape
cx, cy = int(lm.x * width), int(lm.y * height)
x_list.append(cx)
y_list.append(cy)
#print(id, cx, cy)
self.lm_list.append([id, cx, cy])
if draw:
cv2.circle(img, (cx, cy), 6, (255, 0, 0), cv2.FILLED)
x_min, x_max = min(x_list, default=0), max(x_list, default=0)
y_min, y_max = min(y_list, default=0), max(y_list, default=0)
bbox = x_min, y_min, x_max, y_max
if draw:
cv2.rectangle(img, (x_min - 20, y_min - 20), (x_max + 20, y_max + 20),
(0, 255, 0), 2)
return self.lm_list, bbox
# Find finger if its up
def fingersUp(self):
fingers = []
# Thumb
if self.lm_list[self.tip_ids[0]][1] > self.lm_list[self.tip_ids[0] - 1][1]:
fingers.append(1)
else:
fingers.append(0)
# Fingers
for id in range(1, 5):
if self.lm_list[self.tip_ids[id]][2] < self.lm_list[self.tip_ids[id] - 2][2]:
fingers.append(1)
else:
fingers.append(0)
# totalFingers = fingers.count(1)
return fingers
# Find distance and average of 2 fingers
def findDistance(self, p1, p2, img, draw=True, r=7, t=3):
x1, y1 = self.lm_list[p1][1:]
x2, y2 = self.lm_list[p2][1:]
cx, cy = (x1 + x2) // 2, (y1 + y2) // 2
if draw:
cv2.line(img, (x1, y1), (x2, y2), (255, 0, 255), t)
cv2.circle(img, (x1, y1), r, (255, 0, 255), cv2.FILLED)
cv2.circle(img, (x2, y2), r, (255, 0, 255), cv2.FILLED)
cv2.circle(img, (cx, cy), r, (0, 0, 255), cv2.FILLED)
length = math.hypot(x2 - x1, y2 - y1)
return length, img, [x1, y1, x2, y2, cx, cy]
def main():
prev_time = 0
curr_time = 0
cap = cv2.VideoCapture(0)
detector = handDetector()
while True:
success, img = cap.read()
img = detector.findHands(img)
lm_list, bbox = detector.findPosition(img)
if len(lm_list) != 0:
print(lm_list[4])
curr_time = time.time()
fps = 1/(curr_time - prev_time)
prev_time = curr_time
cv2.putText(img, str(int(fps)), (10, 70), cv2.FONT_HERSHEY_PLAIN, 3,
(250, 0, 0), 3)
cv2.imshow('image', img)
if cv2.waitKey(1) & 0xFF == ord('q'): # press Q to quit
break
if __name__ == "__main__":
main() |
17,555 | 9d188fc798ac4e434ac0c1b8fa02b1815b93529f | # 每日温度
# 根据每日 气温 列表,请重新生成一个列表,对应位置的输入是你需要再等待多久温度才会升高的天数。如果之后都不会升高,请输入 0 来代替。
# 例如,给定一个列表 temperatures = [73, 74, 75, 71, 69, 72, 76, 73],你的输出应该是 [1, 1, 4, 2, 1, 1, 0, 0]。
# 提示:气温 列表长度的范围是 [1, 30000]。每个气温的值的都是 [30, 100] 范围内的整数。
class Solution(object):
# 法一:
# 从最后一天推到第一天
def dailyTemperatures1(self, T):
"""
:type T: List[int]
:rtype: List[int]
"""
n = len(T)
res = [0 for i in range(n)]
for i in range(n-2, -1, -1): # 从最后一天推到第一天
j = i + 1
while j < n:
if T[i] < T[j]: #
res[i] = j - i
break
elif res[j] == 0: #
res[i] = 0
break
j += res[j]
return res
# 法二:
# 维护递减栈:后入栈的元素总比栈顶元素小
# 相当于最好情况的双重for,接近O(n)
def dailyTemperatures2(self, T):
"""
:type T: List[int]
:rtype: List[int]
"""
n = len(T)
res = [0 for i in range(n)]
stack = [] # 用栈记录下遍历过元素的下标,
for k, v in enumerate(T):
if stack:
while stack and T[stack[-1]] < v:
res[stack[-1]] = k - stack[-1]
stack.pop()
stack.append(k)
return res
def dailyTemperatures(self, T):
return self.dailyTemperatures2(T)
if __name__ == '__main__':
s = Solution()
print(s.dailyTemperatures([55,38,53,81,61,93,97,32,43,78]))
# for i in range(3, -1, -1):
# print(i)
#
# print([0 for i in range(len([1,2,3]))]) |
17,556 | 41894c669355cb7f6db7428930520df95a39707c | import json
import logging
from os import path
from json.decoder import JSONDecodeError
from functools import partial
from sys import argv
from datetime import timedelta, datetime
from wifi_scanner.algorithms import smooth_scan, full_scan, selective_scan
from wifi_scanner.schedulers import random_trigger, interval_trigger, \
traffic_trigger, repeat_scan
log = logging.getLogger(__name__)
def write_output(
file_name: str, start_time: datetime, end_time: datetime, access_points: list
):
try:
with open(file_name, "r") as json_file:
data = json.load(json_file)
except (JSONDecodeError, FileNotFoundError) as e:
log.exception(e)
log.warning("The file was empty; initializing with empty list!")
data = []
data.append(
{
"start_time": start_time.timestamp(),
"end_time": end_time.timestamp(),
"aps": [
{
"bss": ap.bss,
"ssid": ap.ssid,
"frequency": ap.frequency,
"signal_strength": ap.signal_strength
}
for ap in access_points
]
},
)
with open(file_name, "w") as json_file:
json.dump(data, json_file)
algorithms = {
"full": full_scan,
"selective": partial(selective_scan, channels=[1, 6, 11]),
"selective_1ch": partial(selective_scan, channels=[1]),
"selective_odd": partial(selective_scan, channels=[1, 3, 5, 7, 9, 11]),
"smooth_300": partial(smooth_scan, interval=timedelta(milliseconds=300)),
"smooth_600": partial(smooth_scan, interval=timedelta(milliseconds=600)),
"smooth_1200": partial(smooth_scan, interval=timedelta(milliseconds=1200)),
}
schedulers = {
"single": partial(repeat_scan, repetitions=1),
"5min": partial(interval_trigger, interval=timedelta(minutes=5)),
"2min": partial(interval_trigger, interval=timedelta(minutes=2)),
"traffic": traffic_trigger
}
if __name__ == "__main__":
logging.basicConfig(
level=logging.DEBUG,
format="[%(asctime)s][%(name)s][%(levelname)s]: %(message)s"
)
# This could use Argparse, but that's really overcomplicating this simple
# usecase.
if len(argv) < 3:
print(f"Not enough arguments! "
f"Usage: {argv[0]} <scanning algorithm> <scheduler>")
exit(1)
elif argv[1] not in algorithms.keys():
print(f"Invalid scanning algorithm '{argv[1]}'. "
f"Choose from: {', '.join(algorithms.keys())}")
exit(1)
elif argv[2] not in schedulers.keys():
print(f"Invalid scheduler '{argv[2]}'. "
f"Choose from: {', '.join(schedulers.keys())}")
exit(1)
algorithm = algorithms[argv[1]]
scheduler = schedulers[argv[2]]
log.info(f"===== Starting {argv[2]} with {argv[1]} scan =====")
for start_time, end_time, scan in scheduler(algorithm):
write_output(
path.expanduser("~pi/scanner_measurements.json"),
start_time,
end_time,
list(scan)
)
|
17,557 | 5b6fb19759ab80e63b7c9e44db1f4d091aac6c8b | from mainapp.models import Books
class CartItem(): ##这个有卵用呀
def __init__(self,book,amount,perprice=None):
self.amount = amount
self.book=book
self.per_price=perprice
self.status = 1
class Cart():
def __init__(self): ###调用类方法会自动还行这个,
self.save_price=0
self.total=0
self.cartitem=[]
###计算购物车中商品的节省金额一级总金额
def sums(self): ##这里得加书的市场价格,和当当价格的参数
self.total_price=0
self.save_price=0
for i in self.cartitem: ##
self.total_price+=int(i.book.book_dprice) ##这里存的不是queryset对象么?,i.属性不是属性的的值么,老师的家了i.book.属性
self.save_price+=(int(i.book.book_price)-int(i.book.book_dprice))
##向购物车中添加书籍
def add_book_toCart(self,bookid):
print(bookid,"26hang,是传过来的bookid")
for i in self.cartitem:
print(i.book.book_id,"这个是储存的书的id")
if int(i.book.book_id)==int(bookid):
i.amount+=1
print(i.amount,"这是amount的个数")
self.sums( )
print("进来来么,if的判断32行")
return None
book=Books.objects.filter(book_id=bookid)[0]
print(book.book_dprice,"cart模块存入的对象")
print("进来第几次:1111111111111111111111111111111111111111111111111111")
permoney=book.book_dprice
self.cartitem.append(CartItem(book=book,amount=1,perprice=permoney)) ####CartItem这里已经声明了一个了,所以应该有了
print("这里也ok把")
self.sums()
print("zheli haineng jin么?36行")
##修改购物车的商品新信息
def modify_cart(self,bookid,amount):
for i in self.cartitem:
if i.book.id==bookid:
i.amount=amount
self.sums()
##删除购物车
def delecte_book(self,bookid):
print(bookid,"这是删除的bookid")
for i in self.cartitem:
print("删除循环")
if int(i.book.book_id)==int(bookid):
print(i.book.book_id,"删除前的id")
self.cartitem.remove(i)
print(i.book.book_id,"删除后的id")
print("删完了么?")
self.sums()
##这是详情页的传输书籍的函数
def detail_addbook(self,num,bookid):
print(bookid, "63hang,是传过来的bookid")
book = Books.objects.filter(book_id=bookid)[0]
per_money = int(num)*int(book.book_dprice)
self.cartitem.append(CartItem(book=book, amount=num,perprice=per_money)) ##把book和数量直接加里了
self.total_price=int(book.book_dprice)*int(num) ##总价格
self.save_price=(int(book.book_price)-int(book.book_dprice))*int(num)
print("zheli haineng jin么?36行")
|
17,558 | 3a02e081fa977cd051602b06da4891e8ab168068 | import random
import string
from ui import *
import sqlite3
class Common:
@classmethod
def write_staff_to_file(cls, file_name, obj_list): # for staff
"""
Writes object list into a DB file.
Args:
file_name (str): name of file to write to
table: list of lists to write to a file
Returns:
None
"""
conn = sqlite3.connect('database.db')
c = conn.cursor()
for obj in obj_list:
if obj.status == "mentor":
query = "DELETE FROM `staff` WHERE `status` = 'mentor';"
c.execute(query)
elif obj.status == "employee":
query = "DELETE FROM `staff` WHERE `status` = 'employee';"
c.execute(query)
elif obj.status == "manager":
query = "DELETE FROM `staff` WHERE `status` = 'manager';"
c.execute(query)
for index, obj in enumerate(obj_list):
params = [obj.name, obj.surname, obj.email, obj.password, obj.status, obj.id]
c.execute("INSERT INTO staff (name, surname, email, password, status, staff_id) VALUES (?, ?, ?, ?, ?, ?)", params)
conn.commit()
conn.close()
@classmethod
def write_student_to_db(cls, file_name, obj_list):
"""
Writes object list into a DB file.
Args:
obj_list: list students objects
file_name (str): name of file to write to
Returns:
None
"""
conn = sqlite3.connect('database.db')
c = conn.cursor()
query = "DELETE FROM `student`;"
c.execute(query)
for obj in obj_list:
params = [obj.name, obj.surname, obj.email, obj.password, obj.status, obj.card, obj.team, obj.id]
c.execute("INSERT INTO student (name, surname, email, password, status, card, team, student_id) VALUES (?, ?, ?, ?, ?, ?, ?, ?)", params)
conn.commit()
conn.close()
@classmethod
def write_assignment_to_db(cls, file_name, obj_list):
"""
Writes object list into a DB file.
Args:
obj_list: list of assignment objects
file_name (str): name of file to write to
table: list of lists to write to a file
Returns:
None
"""
conn = sqlite3.connect('database.db')
c = conn.cursor()
query = "DELETE FROM `assignements`;"
c.execute(query)
for obj in obj_list:
params = [obj.start_date, obj.end_date, obj.assignment_name]
c.execute("INSERT INTO assignements (start_date, end_date, name) VALUES (?, ?, ?)", params)
conn.commit()
conn.close()
@classmethod
def write_attendance_to_db(cls, file_name, obj_list):
"""
Writes object list into a DB file.
Args:
file_name (str): name of file to write to
table: list of lists to write to a file
Returns:
None
"""
conn = sqlite3.connect('database.db')
c = conn.cursor()
query = "DELETE FROM `attendance`;"
c.execute(query)
for obj in obj_list:
params = [obj.data, obj.status, obj.id]
c.execute("INSERT INTO attendance (date, status, student_id) VALUES (?, ?, ?)", params)
conn.commit()
conn.close()
@classmethod
def write_submission_to_db(cls, file_name, obj_list):
"""
Writes object list into a DB file.
Args:
file_name (str): name of file to write to
table: list of lists to write to a file
Returns:
None
"""
conn = sqlite3.connect('database.db')
c = conn.cursor()
query = "DELETE FROM `submission`;"
c.execute(query)
for obj in obj_list:
params = [obj.send_date, obj.name, obj.grade, obj.github_link, obj.student_id]
c.execute("INSERT INTO submission (send_date, grade, name, github_link, student_id) VALUES (?, ?, ?, ?, ?)", params)
conn.commit()
conn.close()
@classmethod
def write_team_to_db(cls, file_name, teams_list):
"""
Writes object list into a DB file.
Args:
file_name (str): name of file to write to
table: list of lists to write to a file
Returns:
None
"""
conn = sqlite3.connect('database.db')
c = conn.cursor()
query = "DELETE FROM `teams_list`;"
c.execute(query)
for team in teams_list:
c.execute("INSERT INTO teams_list (name) VALUES (?)", [team])
conn.commit()
conn.close()
@staticmethod
def error_integer_handling(chosen_option, value_of_possible_options):
"""
:param chosen_option: user's input.
:param value_of_possible_options: how many options users could take? Don't count 0 - exit
:return: True or False. Will be useful to control while loop in other part of program. If True, continue program.
"""
try:
int(chosen_option)
if int(chosen_option) < 0 or int(chosen_option) > value_of_possible_options:
raise ValueError
except TypeError:
print("Wrong input.")
m = Ui.get_inputs([""], "")
return False
except ValueError:
print("It must be integer between 1 and " + str(value_of_possible_options) + " or 0. Press enter to try again.")
m = Ui.get_inputs([""], "")
return False
return True
@staticmethod
def check_date(max_day, user_day):
"""
:param max_day: int (e.g. 30)
:param user_day: str (e.g. 32)
:return: True (if
"""
try:
if int(user_day) > max_day:
raise ValueError
return True
except ValueError:
return False
@staticmethod
def generate_random_id(table):
"""
Generates random and unique string. Used for id/key generation.
"""
characters = [['!', '@', '#', '$', '%', '^', '&', '*'], [1, 2, 3, 4, 5, 6, 7, 8, 9, 0]]
characters.append(list(string.ascii_uppercase))
characters.append(list(string.ascii_lowercase))
generated = ''
is_unique = False
id_table = []
for element in table:
id_table.append(element)
while not is_unique:
is_unique = True
for i in range(2):
generated += str(characters[0][random.randint(0, len(characters[0])-1)])
generated += str(characters[1][random.randint(0, len(characters[1])-1)])
generated += str(characters[2][random.randint(0, len(characters[2])-1)])
generated += str(characters[3][random.randint(0, len(characters[3])-1)])
if generated in id_table:
is_unique = False
return generated
|
17,559 | 22fe017cc5a5ba98300bbc8774859b71a21ea48d | import numpy as np
from sklearn.naive_bayes import GaussianNB
X = np.array([[-1, -1], [-2, -1], [-3, -2], [1, 1], [2, 1], [3, 2]])
Y = np.array([1, 1, 1, 2, 2, 2])
classifier1 = GaussianNB()
classifier1.fit(X, Y)
NewX = [[1, 0], [0, 1], [-1, 0], [0, -1]]
print(classifier1.predict(NewX))
classifier2 = GaussianNB()
classifier2.partial_fit(X, Y, np.unique(Y))
print("predict1", classifier2.predict(NewX))
classifier2.partial_fit([[0.5, 0]], [1])
print("predict2", classifier2.predict(NewX)) |
17,560 | 02ca15358033fd03e33da4bb75bc1260ab5965e8 | class Date:
"""
Created with enough functionality to determine the number
of days in a month of a certain year.
"""
year = 0
month = 0
day = 0
def __init__(self, year, month, day):
self.year = year
self.month = month
self.day = day
def is_leap_year(self):
if self.year % 4 == 0:
if self.year % 100 == 0:
if self.year % 400 == 0:
return True
else:
return False
else:
return True
else:
return False
def max_days_in_month(self):
days_per_month = [31, 28, 31, 30, 31, 30, 31, 31, 30, 31, 30, 31]
if self.is_leap_year():
days_per_month[1] = 29
else:
days_per_month[1] = 28
return days_per_month[self.month - 1]
class Calculator:
"""
Day 10 Project: Calculator
enter a number
loop:
enter an operator
enter second number
show result
continue y/n
"""
result = 0
left_number = 0
right_number = 0
operator = ""
def clear(self):
self.result = 0
def calculate(self):
if self.operator == "-":
self.result = self.left_number - self.right_number
elif self.operator == "+":
self.result = self.left_number + self.right_number
elif self.operator == "*":
self.result = self.left_number * self.right_number
elif self.operator == "/":
if self.right_number == 0:
# Prevent division by zero error.
self.result = 0
else:
self.result = self.left_number / self.right_number
return self.result
def main():
while True:
# Day 10 menu
menu_choice = int(input("[1] Days per month\n[2] Calculator\n[0] Quit\n: "))
if menu_choice == 1:
# Days per month
# Get the month and year from the user.
the_month = int(input("Enter the month: "))
the_year = int(input("Enter the year: "))
# Create a Date object using these values.
# Using 1 for the day, as it does not matter for this script.
date = Date(the_year, the_month, 1)
# Call our max days in month function.
print(date.max_days_in_month())
elif menu_choice == 2:
# Calculator
# Create a calculator object
c = Calculator()
# Set the left number by asking the user for a number
c.left_number = float(input("Enter the first number: "))
# We start a loop, which will continue until the user types 0 to quit.
# This allows our result to become our new left value.
while True:
# Set the operator. The user can also choose to exit the calculator.
c.operator = input("Enter the operator [-, +, *, /, 0 to quit]: ")
# Quit if requested
if c.operator == "0":
break
# Set the right number to our user input
c.right_number = float(input("Enter the second number: "))
# Calculate our result and display it to the user.
c.calculate()
print(c.result)
# Set the left number to the result before looping.
# This allows continued calculation using the previous result.
c.left_number = c.result
elif menu_choice == 0:
# Exits program
break
# Main program
if __name__ == "__main__":
main()
|
17,561 | f0ad10f982b54122458e9699434195bbd32320b0 | #!/usr/bin/env python3
# Copyright 2004-present Facebook. All Rights Reserved.
import os
import runpy
import shutil
import subprocess
import tempfile
from glob import glob
from pathlib import Path
from typing import Optional
from zipfile import ZipFile
import labgraph as lg
from ..runners.launch import _get_pex_path, _in_pex, launch
from ..util.logger import get_logger
from ..util.resource import get_resource_tempfile
SOURCE_PATH = "labgraph"
logger = get_logger(__name__)
def test_typecheck() -> None:
"""
Typechecks LabGraph using mypy. Assumes that the test is running from a PEX (if
not, the test skips).
"""
mypy_ini_path = get_resource_tempfile(__name__, "mypy.ini")
mypy_args = ["--config-file", mypy_ini_path]
zip_path: Optional[str] = None
try:
# If available, get the path to the typecheck_src.zip source archive
zip_path = get_resource_tempfile(__name__, "typecheck_src.zip")
except FileNotFoundError:
pass # Just let zip_path be None and handle this case below
temp_dir: Optional[tempfile.TemporaryDirectory] = None
if zip_path is None:
# If the source archive is not available, typecheck the installed location
# for LabGraph
src_path = str(Path(lg.__file__).parent)
mypy_args += glob(f"{src_path}/**/*.py", recursive=True)
else:
# If available, typecheck the typecheck_src.zip source archive
temp_dir = tempfile.TemporaryDirectory() # noqa: P201
src_path = temp_dir.name
# Extract the source files from the zip file
src_file = ZipFile(zip_path)
for file_path in src_file.namelist():
if file_path.startswith(SOURCE_PATH) and file_path.endswith(".py"):
src_file.extract(file_path, src_path)
mypy_args.append(file_path)
# Typecheck in a subprocess
mypy_proc = launch("mypy", mypy_args, cwd=src_path, stdout=subprocess.PIPE)
mypy_output: Optional[str] = None
if mypy_proc.stdout is not None:
mypy_output = mypy_proc.stdout.read().decode("utf-8")
mypy_proc.wait()
if temp_dir is not None:
temp_dir.cleanup()
if mypy_proc.returncode != 0:
error_message = f"Typechecking failed (exit code {mypy_proc.returncode})"
if mypy_output is not None:
logger.error(mypy_output)
error_message += f":\n\n{mypy_output}"
raise RuntimeError(error_message)
|
17,562 | fb4a9045a402df5867799c3d08e4aa458e47000c | #!/usr/bin/env python
# encoding: utf-8
#
# Copyright (c) 2008 Doug Hellmann All rights reserved.
#
"""
"""
#end_pymotw_header
import xmlrpclib
server = xmlrpclib.ServerProxy('http://localhost:9000')
print 'Ping:', server.ping()
|
17,563 | f5cd04e0ea8b45d9e8d34b48474830ecbc8ca473 | #!/usr/bin/python
#-*- coding:utf-8 -*-
import sys
import struct
import numpy as np
import tensorflow as tf
def tile_f32():
para = []
# init the input data and parameters
input_dim_count = int(np.random.randint(1, high=5, size=1))
input_shape = []
reps_shape = []
in_size_all = 1
reps_size_all = 1
for i in range(0, input_dim_count):
input_shape.append(int(np.random.randint(16, high=32, size=1)))
reps_shape.append(int(np.random.randint(1, high=5, size=1)))
in_size_all *= input_shape[i]
reps_size_all *= reps_shape[i]
zero_point = int(np.random.randint(-60, high=60, size=1))
std = int(np.random.randint(1, high=20, size=1))
src_in = np.random.normal(zero_point, std, input_shape)
out_calcu = tf.tile(src_in, reps_shape)
sess = tf.Session()
src_out = sess.run(out_calcu)
src_in_1 = src_in.reshape(in_size_all)
src_out_1 = src_out.reshape(in_size_all * reps_size_all)
total_size = (len(src_in_1) + len(src_out_1)) + 1 + input_dim_count + input_dim_count
para.append(total_size)
para.append(input_dim_count)
for i in range(0, input_dim_count):
para.append(input_shape[i])
for i in range(0, input_dim_count):
para.append(reps_shape[i])
with open("tile_data_f32.bin", "wb") as fp:
data = struct.pack(('%di' % len(para)), *para)
fp.write(data)
data = struct.pack(('%df' % len(src_in_1)), *src_in_1)
fp.write(data)
data = struct.pack(('%df' % len(src_out_1)), *src_out_1)
fp.write(data)
fp.close()
print(para)
return 0
if __name__ == '__main__':
tile_f32()
print("end")
|
17,564 | 95410c0edcb0c9049a6455ba4f58c4901018f89b | import sys
def dep():
if line[1] in d:
d[line[1]] += int(line[2])
else:
d[line[1]] = int(line[2])
def wit():
if line[1] in d:
d[line[1]] -= int(line[2])
else:
d[line[1]] = -1* int(line[2])
d = dict()
for line in sys.stdin:
line = line.split()
if (line[0] == 'BALANCE'):
if line[1] in d:
print(d[line[1]])
else:
print('ERROR')
elif (line[0] == 'DEPOSIT'):
dep()
elif (line[0] == 'WITHDRAW'):
wit()
elif (line[0] == 'INCOME'):
for el in d:
if d[el] > 0:
d[el] += int(line[1]) * d[el]//100
elif (line[0] == 'TRANSFER'):
l_save = line.copy()
line[2] = line[3]
wit()
line[1] = l_save[2]
dep()
|
17,565 | 3b2271f316a0c90fa2a6c24592b905ca5b240101 | from cs50 import SQL
from flask import Flask, flash, redirect, render_template, request, session
from flask_session import Session
from tempfile import mkdtemp
from werkzeug.exceptions import default_exceptions
from werkzeug.security import check_password_hash, generate_password_hash
from helpers import apology, login_required, lookup, usd
# Configure application
app = Flask(__name__)
# Ensure responses aren't cached
@app.after_request
def after_request(response):
response.headers["Cache-Control"] = "no-cache, no-store, must-revalidate"
response.headers["Expires"] = 0
response.headers["Pragma"] = "no-cache"
return response
# Custom filter
app.jinja_env.filters["usd"] = usd
# Configure session to use filesystem (instead of signed cookies)
app.config["SESSION_FILE_DIR"] = mkdtemp()
app.config["SESSION_PERMANENT"] = False
app.config["SESSION_TYPE"] = "filesystem"
Session(app)
# Configure CS50 Library to use SQLite database
db = SQL("sqlite:///finance.db")
@app.route("/", methods = ["GET"])
@login_required
def index():
user = session.get("user_id")
rows = db.execute("Select Stock, sum(Num) as Number from portfolio where User = :User group by Stock having sum(Num) > 0", User = session.get("user_id"))
stocks = rows
currentprices = []
# get current price for each group (ie AAPL) with help from lookup function (which remember, returns a dict)
for stock in stocks:
symbol = str(stock["Stock"])
currentprices.append(usd(round((lookup(symbol)['price']),2)))
totals = []
totals1 = []
# get current price for each group (ie AAPL) with help from lookup function (which remember, returns a dict)
for stock in stocks:
symbol = str(stock["Stock"])
p = round(float(lookup(symbol)['price']),2)
n = round(float(stock["Number"]),2)
t = round(p*n,2)
totals.append(usd(t))
totals1.append(t)
#get cash
rows = db.execute("Select cash from users where id = :User", User = session.get("user_id"))
cash = round(float(rows[0]["cash"]),2)
gotal = round(sum(totals1)+cash,2)
return render_template("index.html", stocks = stocks, prices = currentprices, totals = totals, cash = usd(cash), gotal = usd(gotal))
@app.route("/buy", methods=["GET", "POST"])
@login_required
def buy():
"""Buy shares of stock"""
if request.method == "POST":
# Ensure buy order
if not request.form.get("symbol"):
return apology("must provide valid order info", 400)
# Ensure buy order
elif not request.form.get("shares"):
return apology("must provide valid order info", 400)
# Ensure stock is balid else display an apology
elif lookup(request.form.get("symbol")) == None:
return apology("invalid stock", 400)
try:
shares = int(request.form.get("shares"))
except ValueError:
return apology("shares must be a positive integer", 400)
# Check if its negative
#elif int(request.form.get("shares")) < 1:
# return apology("must provide valid order info", 400)
# Add stock to user's portfolio
stock = lookup(request.form.get("symbol"))['name']
num = request.form.get("shares")
price = (lookup(request.form.get("symbol"))['price'])
user = session.get("user_id")
amount = (float(request.form.get("shares")) * float(lookup(request.form.get("symbol"))['price']))
# check if they have enough cash
# Query database for username
rows = db.execute("SELECT * FROM users WHERE id = :id", id = session.get("user_id"))
rows = float(rows[0]["cash"])
# Add trasnaction to portfolio if user has enough cash
if (float(num) * float(price)) <= rows:
result = db.execute("INSERT INTO portfolio (User, Stock, Price, Num) VALUES(:User, :Stock, :Price, :Num)", User = session.get("user_id"), Stock = stock, Price = usd(price), Num = num)
if not result:
return apology("TX did not recrod", 400)
# Update cash
result = db.execute("UPDATE users set cash = cash - :amount where id = :User ", User = session.get("user_id"), amount = amount)
if not result:
return apology("Cash did not update", 400)
# Redirect user to home page
return redirect("/")
else:
return apology("Not enough Cash", 403)
else:
return render_template("buy.html")
@app.route("/history")
@login_required
def history():
"""Show history of transactions"""
user = session.get("user_id")
rows = db.execute("Select TransDate as Date, Stock, Price, case when Num < 0 then 'Sell' else 'Buy' end as Type, Num as Quantity from portfolio where User = :User order by Date asc", User = session.get("user_id"))
return render_template("hist.html", rows = rows)
@app.route("/login", methods=["GET", "POST"])
def login():
"""Log user in"""
# Forget any user_id
session.clear()
# User reached route via POST (as by submitting a form via POST)
if request.method == "POST":
# Ensure username was submitted
if not request.form.get("username"):
return apology("must provide username", 403)
# Ensure password was submitted
elif not request.form.get("password"):
return apology("must provide password", 403)
# Query database for username
rows = db.execute("SELECT * FROM users WHERE username = :username",
username=request.form.get("username"))
# Ensure username exists and password is correct
if len(rows) != 1 or not check_password_hash(rows[0]["hash"], request.form.get("password")):
return apology("invalid username and/or password", 403)
# Remember which user has logged in
session["user_id"] = rows[0]["id"]
# Redirect user to home page
return redirect("/")
# User reached route via GET (as by clicking a link or via redirect)
else:
return render_template("login.html")
@app.route("/logout")
def logout():
"""Log user out"""
# Forget any user_id
session.clear()
# Redirect user to login form
return redirect("/")
@app.route("/quote", methods=["GET", "POST"])
@login_required
def quote():
"""Get stock quote."""
if request.method == "POST":
# Ensure username quote
if not request.form.get("symbol"):
return apology("must provide quote", 400)
# Ensure stock is balid else display an apology
elif lookup(request.form.get("symbol")) == None:
return apology("invalid stock", 400)
# display symbol
quote = lookup(request.form.get("symbol"))['name']
quote1 = usd(lookup(request.form.get("symbol"))['price'])
quote2 = lookup(request.form.get("symbol"))['symbol']
return render_template("quote1.html", name1 = quote, name2 = quote2, name3 = quote1)
else:
return render_template("quote.html")
@app.route("/register", methods=["GET", "POST"])
def register():
"""Forget any user id"""
session.clear()
"""Register user"""
if request.method == "POST":
# Ensure username was submitted
if not request.form.get("username"):
return apology("must provide username", 400)
# Ensure password was submitted
elif not request.form.get("password"):
return apology("must provide password", 400)
# Make sure password and confirmation match
elif request.form.get("password") != request.form.get("confirmation"):
return apology("password does not match", 400)
# add username and pw to the DB
hash = generate_password_hash(request.form.get("password"))
result = db.execute("INSERT INTO users (username, hash) VALUES(:username, :hash)", username=request.form.get("username"), hash=hash)
if not result:
return apology("username already exists", 400)
# Log user in
# Query database for username
rows = db.execute("SELECT * FROM users WHERE username = :username", username=request.form.get("username"))
session["user_id"] = rows[0]["id"]
# Redirect user to home page
return redirect("/")
# User reached route via GET (as by clicking a link or via redirect)
else:
return render_template("register.html")
@app.route("/sell", methods=["GET", "POST"])
@login_required
def sell():
"""Sell shares of stock"""
if request.method == "POST":
symbol = request.form.get("symbol")
rows = db.execute("Select Stock, sum(Num) as Number from portfolio where User = :User and Stock = :symbol group by Stock", User = session.get("user_id"), symbol = symbol)
num = rows[0]["Number"]
num1 = int(request.form.get("number"))
# render apology if the user fails to select a stock
if not request.form.get("symbol"):
return apology("must provide symbol", 403)
# Ensure number of shares
elif not request.form.get("number"):
return apology("must provide number", 403)
# Ensure if users owns the number of stocks
elif num1 > num:
return apology("not enough stock", 403)
#log sale as a negative quant of shares at the current slide
stock = symbol
price = float(lookup(stock)['price'])
num = -num1
result = db.execute("INSERT INTO portfolio (User, Stock, Price, Num) VALUES(:User, :Stock, :Price, :Num)", User = session.get("user_id"), Stock = stock, Price = price, Num = num)
#update the user cash
amount = round(num*price,2)
result = db.execute("UPDATE users set cash = cash - :amount where id = :User ", User = session.get("user_id"), amount = amount)
# if not result:
# return apology("username already exists", 403)
# Log user in
# Query database for username
# rows = db.execute("SELECT * FROM users WHERE username = :username", username=request.form.get("username"))
# session["user_id"] = rows[0]["id"]
# Redirect user to home page
return redirect("/")
# User reached route via GET (as by clicking a link or via redirect)
else:
rows = db.execute("Select Stock, sum(Num) as Number from portfolio where User = :User group by Stock", User = session.get("user_id"))
stockss = rows
stocksss = []
for stock in stockss:
symbol = str(stock["Stock"])
stocksss.append(symbol)
return render_template("sell.html", x = stocksss)
# get current price for each group (ie AAPL) with help from lookup function (which remember, returns a dict)
def errorhandler(e):
"""Handle error"""
return apology(e.name, e.code)
# listen for errors
for code in default_exceptions:
app.errorhandler(code)(errorhandler)
|
17,566 | c9b38ea30e2784f4a0d4e018f1bbc91e6cfbef5d | import boto3
class ApiGateway:
"""docstring for LexRunTimeApi"""
def __init__(self):
self.client = boto3.client('apigateway')
def create_rest_api(self, name):
return self.client.create_rest_api(name=name)
def get_resources(self, restApiId):
return self.client.get_resources(restApiId=restApiId)
def create_resource(self, restApiId, parentId, res):
return self.client.create_resource(restApiId=restApiId, parentId=parentId, pathPart=res)
def put_method(self, restApiId, resourceId, method):
return self.client.put_method(restApiId=restApiId, resourceId=resourceId,httpMethod=method, authorizationType='NONE')
def put_integration(self, restApiId, resourceId, httpMethod, uri):
return self.client.put_integration(restApiId=restApiId, resourceId=resourceId,httpMethod=httpMethod, type="AWS_PROXY",uri=uri, integrationHttpMethod='POST')
def get_integration(self, restApiId, resourceId):
return self.client.get_integration(restApiId=restApiId, resourceId=resourceId, httpMethod='GET')
def create_deployment(self, restApiId, stageName):
return self.client.create_deployment(restApiId=restApiId,stageName=stageName)
def set_up_dwight_gateway(self, api_name, aws_region, aws_acct_id, lambda_function):
new_api = self.create_rest_api(api_name)
restApiId = new_api["id"]
resources = self.get_resources(restApiId)
root_id = resources["items"][0]["id"]
dwight_resources = ["spotify" ,"connect-spotify" ,"gmail" ,"connect-gmail" ,"uber" ,"connect-uber"]
for resource_name in dwight_resources:
new_resource = self.create_resource(restApiId, root_id, resource_name)
self.put_method(restApiId, new_resource["id"], "GET")
uri = "arn:aws:apigateway:{0}:lambda:path/2015-03-31/functions/arn:aws:lambda:{0}:{1}:function:{2}/invocations".format(aws_region, aws_acct_id, lambda_function)
self.put_integration(restApiId, new_resource["id"], "GET", uri)
self.create_deployment(restApiId, "prod")
return restApiId |
17,567 | d7be50b682c8f8c58c2f7dd81cc6d4cfcca8bb6e | from mpi4py import MPI
import argparse
import numpy
from arcsilib.arcsiutils import ARCSIEnum
import sys
# Define MPI message tags
mpiTags = ARCSIEnum('READY', 'DONE', 'EXIT', 'START')
arcsiStages = ARCSIEnum('ARCSIPART1', 'ARCSIPART2', 'ARCSIPART3', 'ARCSIPART4')
# Initializations and preliminaries
mpiComm = MPI.COMM_WORLD # get MPI communicator object
mpiSize = mpiComm.size # total number of processes
mpiRank = mpiComm.rank # rank of this process
mpiStatus = MPI.Status() # get MPI status object
print("Rank: " + str(mpiRank))
if (__name__ == '__main__') and (mpiRank == 0):
paramsLst = numpy.arange(100)
paramsLstTmp = []
nTasks = len(paramsLst)
taskIdx = 0
completedTasks = 0
while completedTasks < nTasks:
print("completedTasks = ", completedTasks)
print("nTasks = ", nTasks)
rtnParamsObj = mpiComm.recv(source=MPI.ANY_SOURCE, tag=MPI.ANY_TAG, status=mpiStatus)
source = mpiStatus.Get_source()
tag = mpiStatus.Get_tag()
print("Source: ", source)
if tag == mpiTags.READY:
# Worker is ready, so send it a task
if taskIdx < nTasks:
mpiComm.send([arcsiStages.ARCSIPART1, paramsLst[taskIdx]], dest=source, tag=mpiTags.START)
print("Sending task %d to worker %d" % (taskIdx, source))
taskIdx += 1
#else:
# mpiComm.send(None, dest=source, tag=mpiTags.EXIT)
elif tag == mpiTags.DONE:
print("Got data from worker %d" % source)
paramsLstTmp.append(rtnParamsObj)
completedTasks += 1
elif tag == tags.EXIT:
print("Worker %d exited." % source)
closedWorkers += 1
#raise ARCSIException("MPI worker was closed - worker was still needed so there is a bug here somewhere... Please report to mailing list.")
paramsLst = paramsLstTmp
print(paramsLst)
paramsLstTmp = []
nTasks = len(paramsLst)
taskIdx = 0
completedTasks = 0
while completedTasks < nTasks:
print("completedTasks = ", completedTasks)
print("nTasks = ", nTasks)
rtnParamsObj = mpiComm.recv(source=MPI.ANY_SOURCE, tag=MPI.ANY_TAG, status=mpiStatus)
source = mpiStatus.Get_source()
tag = mpiStatus.Get_tag()
print("Source: ", source)
if tag == mpiTags.READY:
# Worker is ready, so send it a task
if taskIdx < nTasks:
mpiComm.send([arcsiStages.ARCSIPART4, paramsLst[taskIdx]], dest=source, tag=mpiTags.START)
print("Sending task %d to worker %d" % (taskIdx, source))
taskIdx += 1
#else:
# mpiComm.send(None, dest=source, tag=mpiTags.EXIT)
elif tag == mpiTags.DONE:
print("Got data from worker %d" % source)
paramsLstTmp.append(rtnParamsObj)
completedTasks += 1
elif tag == tags.EXIT:
print("Worker %d exited." % source)
closedWorkers += 1
#raise ARCSIException("MPI worker was closed - worker was still needed so there is a bug here somewhere... Please report to mailing list.")
for workerID in range(mpiSize):
if workerID > 0:
mpiComm.send(None, dest=workerID, tag=mpiTags.EXIT)
else:
print("ELSE not main: ", mpiRank)
# Worker processes execute code below
while True:
mpiComm.send(None, dest=0, tag=mpiTags.READY)
tskData = mpiComm.recv(source=0, tag=MPI.ANY_TAG, status=mpiStatus)
tag = mpiStatus.Get_tag()
paramsObj = None
print(tskData)
print(tag)
if tag == mpiTags.START:
# Do work!
if tskData[0] == arcsiStages.ARCSIPART1:
print('PART #1')
paramsObj = tskData[1] * 10
elif tskData[0] == arcsiStages.ARCSIPART2:
print('PART #2')
paramsObj = tskData[1] * 20
elif tskData[0] == arcsiStages.ARCSIPART3:
print('PART #3')
paramsObj = tskData[1] * 30
elif tskData[0] == arcsiStages.ARCSIPART4:
print('PART #4')
paramsObj = tskData[1] * 40
else:
raise ARCSIException("Don't recognise processing stage")
mpiComm.send(paramsObj, dest=0, tag=mpiTags.DONE)
elif tag == mpiTags.EXIT:
break
mpiComm.send(None, dest=0, tag=mpiTags.EXIT)
|
17,568 | 2394687ccd3299b37237d12d24629dbe408e25ec | from django.db import models
from django.contrib.auth.models import User
class UserInfo(models.Model):
full_name = models.CharField(max_length=100)
email = models.EmailField()
username = models.CharField(max_length=100)
def __str__(self):
return '%s' %(self.username)
class Keyword(models.Model):
keyword = models.CharField(max_length=100)
date = models.CharField(max_length=100)
user = models.ForeignKey(User,on_delete=models.CASCADE,blank=True,null=True)
def __str__(self):
return '%s' %(self.keyword)
class SearchedDate(models.Model):
startDate = models.CharField(max_length=100)
endDate = models.CharField(max_length=100)
def __str__(self):
return '%s' %(self.startDate) |
17,569 | 44e1da6bb116ff0d93eb75f5cadee8662dbef982 | default_app_config = 'froide.letter.apps.LetterConfig'
|
17,570 | 4d213ea6448772372de237ae7423aec766b7d602 | from SimpleXMLRPCServer import SimpleXMLRPCServer
from playground import *
import logging
# Set up logging
logging.basicConfig(level=logging.DEBUG)
server = SimpleXMLRPCServer(('172.16.89.213', 80), logRequests=False)
# Expose a function
def ping(message):
return 'pong' if message=="ping" else ''
def start_game(xmlStruct):
return start_racko_game(xmlStruct['game_id'],xmlStruct['player_id'],xmlStruct['initial_discard'],xmlStruct['other_player_id'])
def get_move(xmlStruct):
response = get_racko_move(xmlStruct['game_id'],xmlStruct['rack'],xmlStruct['discard'],xmlStruct['remaining_microseconds'],xmlStruct['other_player_moves'])
return response
def get_deck_exchange(xmlStruct):
response = get_racko_deck_exchange(xmlStruct['game_id'],xmlStruct['remaining_microseconds'],xmlStruct['rack'],xmlStruct['card'])
return response
def move_result(xmlStruct):
response = move_racko_result(xmlStruct['game_id'],xmlStruct['move'],xmlStruct)
return response
def game_result(xmlStruct):
return racko_game_result(xmlStruct['game_id'],xmlStruct['your_score'],xmlStruct['other_score'],xmlStruct['reason'])
server.register_function(ping)
server.register_function(start_game)
server.register_function(get_move)
server.register_function(get_deck_exchange)
server.register_function(move_result)
server.register_function(game_result)
try:
print 'Use Control-C to exit'
server.serve_forever()
except KeyboardInterrupt:
print 'Exiting'
|
17,571 | 92c7d7b5ed44c5599e01cc299efa80ca9f1ddb27 | """
This example trains a model on the MNIST data set using keras-tqdm progress bars.
"""
from mnist_model import mnist_model
from keras_tqdm import TQDMCallback
if __name__ == "__main__":
mnist_model(verbose=0, callbacks=[TQDMCallback()])
|
17,572 | 510c8e53d417b1d1fd4445cf3040da87442b7be0 | def find_integer_with_most_divisors(input_list):
test_list=[]
for i in input_list:
c=0
for n in range (1,i+1):
if i%n==0:
c+=1
test_list.append(c)
#print(test_list)
#print(test_list.index(max(test_list)))
return input_list[test_list.index(max(test_list))]
|
17,573 | fd791a96e1abe51ad22dd1074ed729d531ac9bc0 |
class Config:
def __init__(self,
is_training, num_words,
learning_rate=0.01, minimum_learning_rate=1e-5,
batch_size=128, decay_steps=1e4, decay_factor=0.3,
word_embedding_dim=100, cell_type='LSTM',
rnn_state_size=100, encoder_bidirection=True,
beam_width=5, max_iteration=10,
attention=True, attention_type='Bahdanau',
attention_num_units=100, attention_depth=100):
self.config = dict()
assert cell_type in ['LSTM', 'GRU']
# training config
self.config['training'] = training_config = dict()
training_config['is_training'] = is_training
training_config['learning_rate'] = learning_rate
training_config['minimum_learning_rate'] = minimum_learning_rate
training_config['batch_size'] = batch_size
training_config['decay_steps'] = decay_steps
training_config['decay_factor'] = decay_factor
# word embedding config
self.config['word'] = word_config = dict()
word_config['num_word'] = num_words
word_config['embedding_dim'] = word_embedding_dim
# encoder config
self.config['encoder'] = encoder_config = dict()
encoder_config['cell_type'] = cell_type
encoder_config['state_size'] = rnn_state_size
encoder_config['bidirection'] = encoder_bidirection
# decoder config
self.config['decoder'] = decoder_config = dict()
decoder_config['cell_type'] = cell_type
decoder_config['state_size'] = rnn_state_size
decoder_config['beam_width'] = beam_width
decoder_config['max_iteration'] = max_iteration
# attention config
assert attention_type in ['Bahdanau', 'Luong']
self.config['attention'] = attention_config = dict()
attention_config['attention'] = attention
attention_config['attention_type'] = attention_type
attention_config['attention_num_units'] = attention_num_units
attention_config['attention_depth'] = attention_depth
def __getitem__(self, keys):
if type(keys) == str:
try:
return self.config[keys]
except KeyError as e:
raise KeyError('Wrong key {} for config'.format(keys))
elif type(keys) == list or type(keys) == tuple:
assert len(keys) == 2
try:
return self.config[keys[0]][keys[1]]
except KeyError as e:
raise KeyError('Wrong key {} for config'.format(keys))
|
17,574 | 2773e2061cbdf95b5c77fa78d1f445367bbdc104 | from __future__ import absolute_import, unicode_literals
import os
import shutil
from tempfile import mkdtemp
import pytest
from instance.cli.application import UI
from instance.utils import BytesIO
here = os.path.abspath(os.path.dirname(__file__))
SITE_UID = 'instance-test'
@pytest.fixture(scope="session")
def env():
from instance.conf import settings, DATA_ROOT_VARIABLE, SITE_UID_VARIABLE
from django.conf import settings as django_settings
assert not settings.configured
assert not django_settings.configured
settings.configure(overrides={
DATA_ROOT_VARIABLE: here,
SITE_UID_VARIABLE: SITE_UID
})
return settings
@pytest.fixture
def ui(env):
return UI(stdout=BytesIO(), stderr=BytesIO())
@pytest.fixture
def File():
def FileOpener(relpath, mode="rb"):
return open(os.path.join(data_root, relpath.lstrip('/')), mode)
return FileOpener
@pytest.fixture
def rf(env):
from django.test.client import RequestFactory
return RequestFactory()
@pytest.fixture
def client(env):
from django.test.client import Client
return Client()
|
17,575 | 6f8dd9626a309135aa6329a861ffdf95a812e60e | from __future__ import annotations
import numpy as np
import pandas as pd
from Time_Processing.format_convert_Func import datetime64_ndarray_to_datetime_tuple, np_datetime64_to_datetime
from File_Management.load_save_Func import *
from BivariateAnalysis_Class import BivariateOutlier, Bivariate, MethodOfBins
from File_Management.path_and_file_management_Func import try_to_find_folder_path_otherwise_make_one, try_to_find_file
from UnivariateAnalysis_Class import CategoryUnivariate, UnivariatePDFOrCDFLike, UnivariateGaussianMixtureModel, \
DeterministicUnivariateProbabilisticModel, OneDimensionBinnedData
from typing import Union, Tuple, List, Iterable, Sequence
from BivariateAnalysis_Class import Bivariate, MethodOfBins
from Ploting.fast_plot_Func import *
from PowerCurve_Class import *
from Filtering.simple_filtering_Func import linear_series_outlier, out_of_range_outlier, \
change_point_outlier_by_sliding_window_and_interquartile_range
from Data_Preprocessing.TruncatedOrCircularToLinear_Class import TruncatedToLinear, CircularToLinear
import copy
from Data_Preprocessing.float_precision_control_Func import float_eps
from Correlation_Modeling.Copula_Class import VineGMCMCopula, GMCM, \
FOUR_DIM_CVINE_CONSTRUCTION, THREE_DIM_CVINE_CONSTRUCTION
from TimeSeries_Class import SynchronousTimeSeriesData
import datetime
from Time_Processing.Season_Enum import SeasonTemplate1
from enum import Enum
from PhysicalInstance_Class import PhysicalInstanceDataFrame, PhysicalInstanceSeries, PhysicalInstance
from pathlib import Path
from project_utils import project_path_, WS_POUT_SCATTER_ALPHA, WS_POUT_2D_PLOT_KWARGS, WS_POUT_SCATTER_SIZE
from collections import ChainMap
import warnings
import re
from Filtering.OutlierAnalyser_Class import DataCategoryNameMapper, DataCategoryData
from Filtering.sklearn_novelty_and_outlier_detection_Func import *
from sklearn.preprocessing import StandardScaler, MinMaxScaler
from ConvenientDataType import UncertaintyDataFrame, StrOneDimensionNdarray
from tqdm import tqdm
from parse import parse
import matplotlib.pyplot as plt
import matplotlib as mpl
from mpl_toolkits.axes_grid1 import make_axes_locatable
class WTandWFBase(PhysicalInstanceDataFrame):
results_path = project_path_ / 'Data/Results/' # type: Path
__slots__ = ("cut_in_wind_speed", "cut_out_wind_speed", "rated_active_power_output")
@property
def _constructor(self):
return super()._constructor
@property
def _constructor_expanddim(self):
return super()._constructor_expanddim
@property
def _constructor_sliced(self):
return super()._constructor_sliced
def __init__(self, *args, cut_in_wind_speed=4, rated_active_power_output, cut_out_wind_speed=25, **kwargs):
super().__init__(*args, **kwargs)
self.cut_in_wind_speed = cut_in_wind_speed
self.cut_out_wind_speed = cut_out_wind_speed
self.rated_active_power_output = rated_active_power_output
if 'active power output' in self.columns:
# Make all -0.02 p.u. ~ 0 p.u. Pout to be equal to 0 p.u
self.loc[self['active power output'].between(*self.rated_active_power_output * np.array([-0.02, 0])),
'active power output'] = 0
# Make all 1.0 p.u. ~ 1.02 p.u. Pout to be equal to 1.0 p.u.
self.loc[self['active power output'].between(*self.rated_active_power_output * np.array([1, 1.02])),
'active power output'] = self.rated_active_power_output
def plot(self, *,
ax=None,
plot_mfr: Iterable[PowerCurveByMfr] = None,
mfr_kwargs: Sequence[dict] = None,
mfr_mode: str = 'continuous',
plot_scatter_pc: bool = False,
save_to_buffer: bool = False,
**kwargs):
ax = scatter(self['wind speed'].values,
self['active power output'].values / self.rated_active_power_output,
ax=ax,
**dict(ChainMap(kwargs,
WS_POUT_2D_PLOT_KWARGS,
{'alpha': WS_POUT_SCATTER_ALPHA,
's': WS_POUT_SCATTER_SIZE,
'color': 'royalblue'}
))
)
if plot_mfr:
for i, this_mfr_pc in enumerate(plot_mfr):
if mfr_kwargs is not None:
this_mfr_kwargs = mfr_kwargs[i]
else:
this_mfr_kwargs = {}
ax = this_mfr_pc.plot(ax=ax, mode=mfr_mode, **this_mfr_kwargs)
if plot_scatter_pc:
ax = PowerCurveByMethodOfBins(self['wind speed'].values,
self['active power output'].values / self.rated_active_power_output).plot(
ws=np.arange(0, 50, 0.5),
ax=ax,
plot_recording=False,
save_to_buffer=save_to_buffer,
**dict(ChainMap(kwargs, WS_POUT_2D_PLOT_KWARGS))
)
return ax
def twin_time_series_plot(self, *, time_window_mask: Sequence[bool] = slice(None),
x_axis_format: str = '%H',
x_label: str = 'Time of a Day [Hour]',
wind_speed_y_lim=(-0.05, 27.55),
power_output_y_lim=WS_POUT_2D_PLOT_KWARGS['y_lim']):
time_x = self.index[time_window_mask]
ax = series(x=time_x, y=self.loc[time_window_mask, 'wind speed'].values, figure_size=(5, 3.3 * 0.618),
x_axis_format=x_axis_format, x_label=x_label,
marker='*', markersize=6, color='royalblue', linestyle='-',
y_lim=wind_speed_y_lim, y_label='Wind Speed [m/s]', label='Wind speed')
ax2 = ax.twinx() # instantiate a second axes that shares the same x-axis
ax2.set_ylabel('Active Power Output [p.u.]', fontdict={'size': 10}) # we already handled the x-label with ax1
series(x=time_x, y=self.loc[time_window_mask, 'active power output'].values, ax=ax2,
x_axis_format=x_axis_format, marker='o', markersize=3, color='green', linestyle='--',
y_lim=power_output_y_lim, label='Power output')
plt.grid(False)
# ask matplotlib for the plotted objects and their labels
lines, labels = ax.get_legend_handles_labels()
lines2, labels2 = ax2.get_legend_handles_labels()
ax.get_legend().remove()
ax2.legend(lines + lines2, labels + labels2, loc=0, prop={'size': 10})
return ax, ax2
def update_air_density_to_last_column(self):
if 'air density' in self.columns:
return self['air density'].values
else:
from Wind_Class import cal_air_density, celsius_to_kelvin
air_density = cal_air_density(celsius_to_kelvin(self['environmental temperature'].values),
self['relative humidity'].values / 100,
self['barometric pressure'].values * 100)
self.insert(self.shape[1], column='air density', value=air_density)
return air_density
def clip_active_power_output(self):
low_mask = self.loc[:, 'active power output'].values <= 0
self.loc[low_mask, 'active power output'] = float_eps
high_mask = self.loc[:, 'active power output'].values >= self.rated_active_power_output
self.loc[high_mask, 'active power output'] = (1. - float_eps) * self.rated_active_power_output
class WT(WTandWFBase):
def __init__(self, *args, rated_active_power_output=3000, **kwargs):
super().__init__(*args, rated_active_power_output=rated_active_power_output, **kwargs)
@property
def default_results_saving_path(self):
criteria = "Criteria_1p5_sigma"
"Criteria_95pct"
"Criteria_3_sigma"
"Criteria_2_sigma"
"Criteria_1p5_sigma"
saving_path = {
"outlier": self.results_path / f"Filtering/{criteria}/{self.__str__()}/results.pkl",
"power curve": self.results_path / f"PowerCurve/{criteria}/{self.__str__()}/results.pkl"
}
for x in saving_path.values():
try_to_find_folder_path_otherwise_make_one(x.parent)
return saving_path
@property
def outlier_name_mapper(self) -> DataCategoryNameMapper:
meta = [["missing data", "missing", -1, "N/A"],
["Normal data", "normal", 0, "the recordings that can be captured by the simulation"],
["Low maximum Pout", "CAT-I", 1, "curtailment"],
["Linear Pout-WS", "CAT-II", 2, "e.g., constant WS-variable Pout"],
["Low Pout-high WS", "CAT-III", 3, "Low Pout-high WS caused by the other sources"],
["Scattered", "CAT-IV", 4, "the recordings rejected by the simulation"]]
mapper = DataCategoryNameMapper.init_from_template(rows=len(meta))
mapper[:] = meta
return mapper
def outlier_detector(self, how_to_detect_scattered: str = 'sim', *,
save_file_path: Path = None,
prior_sim_knowledge_path: Path = None) -> DataCategoryData:
assert (how_to_detect_scattered in ('isolation forest', 'hist', 'sim')), "Check 'how_to_detect_scattered'"
save_file_path = save_file_path or self.default_results_saving_path["outlier"]
if try_to_find_file(save_file_path):
warnings.warn(f"{self.__str__()} has results in {save_file_path}, so return to the existing file")
return load_pkl_file(save_file_path)['DataCategoryData obj']
assert (prior_sim_knowledge_path is not None)
criteria = re.search(rf"(Criteria_\w+)\\", save_file_path.__str__()).group(1)
print(f"Use criteria = {criteria}")
# %% Initialise const, hard coding
ws_std_bin_step = 0.1
ws_bin_step = 0.1
# ↓↓↓↓↓↓↓↓↓↓↓↓↓↓↓↓↓↓↓↓↓↓↓↓↓↓↓↓↓↓↓↓↓↓↓↓↓↓↓↓↓↓↓↓↓↓↓↓↓↓↓↓↓↓↓↓↓↓↓↓↓↓↓↓↓↓↓↓↓↓↓↓↓↓↓↓↓↓↓↓↓↓↓↓↓↓↓↓↓↓↓↓↓↓↓↓↓↓↓↓↓↓↓↓↓↓↓↓↓↓
outlier = super().outlier_detector() # type: DataCategoryData
# %% CAT-I
cat_i_outlier_mask = self.data_category_is_linearity(
'30T',
constant_error={'active power output': self.rated_active_power_output * 0.0005}
)
cat_i_outlier_mask = np.bitwise_and(
cat_i_outlier_mask,
self.data_category_inside_boundary(
{'active power output': (self.rated_active_power_output * 0.1, self.rated_active_power_output * 0.9)}
)
)
outlier.abbreviation[cat_i_outlier_mask] = "CAT-I" # ok
del cat_i_outlier_mask
# %% CAT-II
cat_ii_outlier_mask = self.data_category_is_linearity('60T', constant_error={'wind speed': 0.01})
# cat_ii_outlier_mask = self.data_category_is_linearity(
# '60T',
# general_linearity_error={'wind speed': 0.001,
# 'active power output': self.rated_active_power_output * 0.0005}
# )
outlier.abbreviation[np.bitwise_and(cat_ii_outlier_mask, ~outlier(["CAT-I", "missing"]))] = "CAT-II" # ok
del cat_ii_outlier_mask
# %% Others(1) and range check
others_1_mask = ~outlier(["CAT-I", "CAT-II", "missing"])
# The region inside mfr PC range must be normal, which is due to air density variation!
low_mfr_pc = PowerCurveByMfr.init_all_instances_in_docs()[0]
high_mfr_pc = PowerCurveByMfr.init_all_instances_in_docs()[-1]
normal_mask = np.all(
(others_1_mask,
self['active power output'] >= low_mfr_pc(self['wind speed']) * self.rated_active_power_output,
self['active power output'] <= high_mfr_pc(self['wind speed']) * self.rated_active_power_output),
axis=0)
outlier.abbreviation[normal_mask] = "normal"
others_2_mask = np.bitwise_and(~outlier(["CAT-I", "CAT-II", "missing"]), ~normal_mask)
del others_1_mask, low_mfr_pc, high_mfr_pc, normal_mask
# %% Others(2) and sim
from Wind_Class import Wind
# Get a Callable that can calculates the 10s to 10s wind speed variation sigma
simulation_resolution = 3
sigma_func = Wind.learn_transition_by_looking_at_actual_high_resol(simulation_resolution) # type: Callable
# The follows is the key contribution, which is the implementation of the proposed simulation
try_to_find_folder_path_otherwise_make_one(prior_sim_knowledge_path.parent)
prior_sim_knowledge = load_pkl_file(prior_sim_knowledge_path) # type:Union[pd.DataFrame, None]
level_0_name = "wind speed"
level_1_name = "wind speed std."
level_2_name = "air density"
# Basic information needed for prior_sim_knowledge and the simulation
ws_binned_data_obj = OneDimensionBinnedData(self[level_0_name][others_2_mask].values,
bin_step=ws_bin_step,
first_bin_left_boundary=-ws_bin_step / 2)
ws_std_binned_data_obj = OneDimensionBinnedData(self[level_1_name][others_2_mask].values,
bin_step=ws_std_bin_step,
first_bin_left_boundary=-ws_std_bin_step / 2)
mfr_pc_densities = PowerCurveByMfr.air_density_in_docs()
uncertainty_data_frame_template_obj = UncertaintyDataFrame.init_from_template(1)
prior_sim_knowledge_columns = uncertainty_data_frame_template_obj.index
# prior_sim_knowledge should be initialised if not existing
if prior_sim_knowledge is None:
prior_sim_knowledge = pd.DataFrame(
columns=prior_sim_knowledge_columns,
index=pd.MultiIndex.from_tuples(
((str(ws_binned_data_obj.bin[0]), str(ws_std_binned_data_obj.bin[0]), mfr_pc_densities[0]),),
names=(level_0_name, level_1_name, level_2_name)
),
dtype=float
)
# Iterate over the recordings that are to be checked (to_be_simulated_mask)
mfr_pc_obj = PowerCurveByMfr(mfr_pc_densities[0])
any_update_flag = False
self.update_air_density_to_last_column()
original_resolution = 660
print(f"Total rows to check = {self[others_2_mask].shape[0]}")
for i, this_recording in tqdm(enumerate(self[others_2_mask].iterrows())):
this_recording_index = this_recording[0]
this_recording_ws = this_recording[1]['wind speed']
this_recording_ws_std = this_recording[1]['wind speed std.']
this_recording_air_density = this_recording[1]['air density']
this_recording_pout = this_recording[1]['active power output'] / self.rated_active_power_output
# %% ↓↓↓↓↓↓↓↓↓↓↓↓↓↓↓↓↓↓↓↓↓↓↓↓↓↓↓↓↓↓↓↓↓↓↓↓↓↓↓↓ DEBUG ↓↓↓↓↓↓↓↓↓↓↓↓↓↓↓↓↓↓↓↓↓↓↓↓↓↓↓↓↓↓↓↓↓↓↓↓↓↓↓↓
# if not ((this_recording_ws > 25) and (this_recording_pout > 0.2)):
# continue
# %% ↑↑↑↑↑↑↑↑↑↑↑↑↑↑↑↑↑↑↑↑↑↑↑↑↑↑↑↑↑↑↑↑↑↑↑↑↑↑↑↑ DEBUG ↑↑↑↑↑↑↑↑↑↑↑↑↑↑↑↑↑↑↑↑↑↑↑↑↑↑↑↑↑↑↑↑↑↑↑↑↑↑↑↑
this_recording_ws_bin = ws_binned_data_obj(this_recording_ws)
this_recording_ws_std_bin = ws_std_binned_data_obj(this_recording_ws_std)
this_multi_index_obj = pd.MultiIndex.from_tuples(
((str(this_recording_ws_bin), str(this_recording_ws_std_bin), mfr_pc_densities[0]),),
names=(level_0_name, level_1_name, level_2_name)
)
not_existing_flag = np.sum(this_multi_index_obj.isin(prior_sim_knowledge.index)) == 0
existing_but_nan_flag = False
if np.sum(this_multi_index_obj.isin(prior_sim_knowledge.index)) != 0:
existing_but_nan_flag = np.any(
np.isnan(prior_sim_knowledge.loc[this_multi_index_obj[0]].values)
)
# If it does not have index for current WS and WS std.
if not_existing_flag or existing_but_nan_flag:
any_update_flag = True
# Firstly, create current_sim_knowledge based on current key
current_sim_knowledge = pd.DataFrame(
columns=prior_sim_knowledge_columns,
index=this_multi_index_obj,
dtype=float
)
# Secondly, simulate (using mfr_pc_densities[0])
# prepare high resolution wind, using medium point of WS bin and WS std. bin
wind = Wind(this_recording_ws_bin[1], this_recording_ws_std_bin[1],
original_resolution=original_resolution)
high_resol_wind = wind.simulate_transient_wind_speed_time_series(
resolution=simulation_resolution,
traces_number_for_each_recording=100_000,
sigma_func=sigma_func
)
# prepare mfr pc
this_pout_uncertainty = mfr_pc_obj.cal_with_hysteresis_control_using_high_resol_wind(
high_resol_wind,
return_percentiles=uncertainty_data_frame_template_obj,
discard_prev_seconds=original_resolution - 600
)
# Finally, update the value,
current_sim_knowledge.iloc[0] = this_pout_uncertainty.values.flatten()
if not_existing_flag:
prior_sim_knowledge = pd.concat((prior_sim_knowledge, current_sim_knowledge))
else:
prior_sim_knowledge.loc[this_multi_index_obj[0]] = this_pout_uncertainty.values.flatten()
# For every 25 updates, also save and update prior_sim_knowledge in the disk
if i % 25 == 0:
prior_sim_knowledge = prior_sim_knowledge.sort_index()
save_pkl_file(prior_sim_knowledge_path, prior_sim_knowledge)
# Check the table again, which now should have the value for current WS and WS std.
this_recording_sim = prior_sim_knowledge.loc[this_multi_index_obj]
this_recording_sim = UncertaintyDataFrame(this_recording_sim.values.T,
columns=(0,),
index=this_recording_sim.columns)
if criteria == 'Criteria_1p5_sigma':
based_pout_sim_low, based_pout_sim_high = this_recording_sim(by_sigma=1.5).values.flatten()
elif criteria == 'Criteria_2_sigma':
based_pout_sim_low, based_pout_sim_high = this_recording_sim(by_sigma=2).values.flatten()
elif criteria == 'Criteria_3_sigma':
based_pout_sim_low, based_pout_sim_high = this_recording_sim(by_sigma=3).values.flatten()
elif criteria == 'Criteria_95pct':
based_pout_sim_low, based_pout_sim_high = this_recording_sim(
preserved_data_percentage=95).values.flatten()
else:
raise NotImplementedError
#
# Mapping the value using the relationship among Mfr_PC_rho_x
new_power_output = PowerCurveByMfr.map_given_power_output_to_another_air_density(
old_air_density=np.array([mfr_pc_densities[0]] * 2),
new_air_density=np.array([this_recording_air_density] * 2),
old_power_output=np.array([based_pout_sim_low, based_pout_sim_high]),
wind_speed=np.array([this_recording_ws] * 2),
)
based_pout_sim_low, based_pout_sim_high = new_power_output
if (this_recording_pout >= based_pout_sim_low) and (this_recording_pout <= based_pout_sim_high):
outlier.abbreviation[outlier.index == this_recording_index] = 'normal'
# After the iteration, if any updates happen, should save and update prior_sim_knowledge in the disk
if any_update_flag:
def prior_sim_knowledge_index_sort_key(this_index):
parse_obj_level_0 = parse(r"[{} {} {}]", this_index[0])
parse_obj_level_1 = parse(r"[{} {} {}]", this_index[1])
level_0_val = float(parse_obj_level_0[0])
level_1_val = float(parse_obj_level_1[0])
return level_0_val, level_1_val
sorted_index = sorted(prior_sim_knowledge.index, key=prior_sim_knowledge_index_sort_key)
prior_sim_knowledge = prior_sim_knowledge.reindex(sorted_index)
save_pkl_file(prior_sim_knowledge_path, prior_sim_knowledge)
# %% Others(3) and boundary rule
# CAT-III
boundary_mask = self.data_category_inside_boundary(
{'wind speed': (self.cut_in_wind_speed, self.cut_out_wind_speed),
'active power output': (-np.inf, float_eps)}
)
boundary_mask = np.bitwise_and(outlier("others"), boundary_mask)
outlier.abbreviation[boundary_mask] = "CAT-III"
# CAT-IV
outlier.abbreviation[outlier("others")] = "CAT-IV"
# %% Save final results
save_pkl_file(save_file_path,
{'raw_ndarray_data': np.array(outlier.abbreviation),
'raw_index': outlier.index,
'DataCategoryData obj': outlier})
return outlier
def outlier_plot(self, outlier: DataCategoryData = None, ax=None, *, plot_individual: bool = False, **kwargs):
outlier = outlier or load_pkl_file(self.default_results_saving_path["outlier"])['DataCategoryData obj']
self.loc[:, "active power output"] /= self.rated_active_power_output
# if sum(outlier("CAT-I.a")) > 0:
# ax = scatter(*self[outlier("CAT-I.a")][["wind speed", "active power output"]].values.T, label="CAT-I.a",
# ax=ax if not plot_individual else None, alpha=WS_POUT_SCATTER_ALPHA,
# color="darkorange", marker="1", s=24, zorder=8, **WS_POUT_2D_PLOT_KWARGS)
if sum(outlier("CAT-I")) > 0:
ax = scatter(*self[outlier("CAT-I")][["wind speed", "active power output"]].values.T, label="CAT-I",
ax=ax if not plot_individual else None, alpha=WS_POUT_SCATTER_ALPHA,
color="black", marker="x", s=16, **WS_POUT_2D_PLOT_KWARGS)
if sum(outlier("CAT-II")) > 0:
ax = scatter(*self[outlier("CAT-II")][["wind speed", "active power output"]].values.T, label="CAT-II",
ax=ax if not plot_individual else None, alpha=WS_POUT_SCATTER_ALPHA,
color="red", marker="|", s=28, zorder=11, **WS_POUT_2D_PLOT_KWARGS)
if sum(outlier("CAT-III")) > 0:
ax = scatter(*self[outlier("CAT-III")][["wind speed", "active power output"]].values.T, label="CAT-III",
ax=ax if not plot_individual else None, alpha=WS_POUT_SCATTER_ALPHA,
color="darkorange", marker="v", s=14, **WS_POUT_2D_PLOT_KWARGS)
# if sum(outlier("CAT-IV.a")) > 0:
# ax = scatter(*self[outlier("CAT-IV.a")][["wind speed", "active power output"]].values.T, label="CAT-IV.a",
# ax=ax if not plot_individual else None, alpha=WS_POUT_SCATTER_ALPHA,
# color="green", marker="3", s=24, zorder=9, **WS_POUT_2D_PLOT_KWARGS)
if sum(outlier("CAT-IV")) > 0:
ax = scatter(*self[outlier("CAT-IV")][["wind speed", "active power output"]].values.T, label="CAT-IV",
ax=ax if not plot_individual else None, alpha=WS_POUT_SCATTER_ALPHA,
color="green", marker="3", s=24, zorder=-9, **WS_POUT_2D_PLOT_KWARGS)
# ax = scatter(*self[outlier("others")][["wind speed", "active power output"]].values.T, label="Others",
# ax=ax if not plot_individual else None, alpha=WS_POUT_SCATTER_ALPHA,
# color="royalblue", zorder=10, **WS_POUT_2D_PLOT_KWARGS, **kwargs)
ax = scatter(*self[outlier("normal")][["wind speed", "active power output"]].values.T, label="Normal",
ax=ax if not plot_individual else None, alpha=WS_POUT_SCATTER_ALPHA,
color="royalblue", zorder=10, **WS_POUT_2D_PLOT_KWARGS, **kwargs)
self.loc[:, "active power output"] *= self.rated_active_power_output
return ax
def outlier_report(self, outlier: DataCategoryData = None, *, save_to_buffer=False):
outlier = outlier or load_pkl_file(self.default_results_saving_path["outlier"])['DataCategoryData obj']
return outlier.report(self.default_results_saving_path["outlier"].parent / "report.csv",
save_to_buffer=save_to_buffer)
def select_data_and_get_power_curve_model(self, task: str, **kwargs) -> PowerCurveFittedBy8PLF:
assert (task in ('load', 'fit')), "'Task' is not in ('load', 'fit')"
pc_file_path = self.default_results_saving_path["power curve"]
outlier = load_pkl_file(self.default_results_saving_path["outlier"])['DataCategoryData obj']
# selected_data_mask = outlier(("others", "CAT-I.a", "CAT-IV.a"))
selected_data_mask = outlier("normal")
pc_obj = PowerCurveFittedBy8PLF(
wind_speed_recording=self['wind speed'].values[selected_data_mask],
active_power_output_recording=self['active power output'].values[
selected_data_mask] / self.rated_active_power_output,
**kwargs
)
current_results = load_pkl_file(pc_file_path)
if task == 'fit':
if current_results is not None:
current_best = current_results[-1]['variable']
pc_obj.update_params(*current_best[:pc_obj.params.__len__()]) # The last the best
params_init_scheme = 'self'
else:
params_init_scheme = 'average'
pc_obj.fit(ga_algorithm_param={'max_num_iteration': 2500,
'max_iteration_without_improv': 1000,
'population_size': 100},
params_init_scheme=params_init_scheme,
run_n_times=100,
save_to_file_path=pc_file_path,
focal_error=0.001,
wind_speed=np.arange(0, 28.5, 0.1),
function_timeout=6000)
else:
current_best = current_results[-1]['variable']
pc_obj.update_params(*current_best[:pc_obj.params.__len__()])
return pc_obj
def get_current_season(self, season_template: Enum = SeasonTemplate1) -> tuple:
# TODO Deprecated
warnings.warn("Deprecated", DeprecationWarning)
synchronous = SynchronousTimeSeriesData(self.measurements,
self.outlier_category,
self.outlier_category_detailed)
return synchronous.get_current_season(season_template=season_template)
def do_truncate(self, start_time: datetime.datetime = None, end_time: datetime.datetime = None):
# TODO Deprecated
warnings.warn("Deprecated", DeprecationWarning)
synchronous = SynchronousTimeSeriesData(self.measurements,
self.outlier_category,
self.outlier_category_detailed)
self.measurements, self.outlier_category, self.outlier_category_detailed = synchronous.do_truncate(
start_time=start_time,
end_time=end_time
)
def do_truncate_by_season(self, season_to_be_queried: str, season_template: Enum = SeasonTemplate1):
# TODO Deprecated
warnings.warn("Deprecated", DeprecationWarning)
synchronous = SynchronousTimeSeriesData(self.measurements,
self.outlier_category,
self.outlier_category_detailed)
self.measurements, self.outlier_category, self.outlier_category_detailed = synchronous.do_truncate_by_season(
season_to_be_queried=season_to_be_queried,
season_template=season_template
)
@staticmethod
def __transform_active_power_output_from_linear_to_original(active_power_output_linear: ndarray,
this_path_) -> ndarray:
# TODO Deprecated
warnings.warn("Deprecated", DeprecationWarning)
data_preprocessing_params = load_pkl_file(this_path_ + 'data_preprocessing_params.pkl')
# 对于区域内的有功功率,进行truncated→linear转换;
active_power_output_linear = TruncatedToLinear(
data_preprocessing_params['min_active_power_output'],
data_preprocessing_params['max_active_power_output']).inverse_transform(active_power_output_linear)
return active_power_output_linear
@staticmethod
def __transform_data_to_linear_for_copula_model(data_to_be_transformed: ndarray, path_, dims: int) -> dict:
# TODO Deprecated
warnings.warn("Deprecated", DeprecationWarning)
transformed_data = {}.fromkeys(('a', 'b', 'model_boundary', 'a_mask', 'b_mask'))
# 载入model_boundary数据
model_boundary = load_npy_file(path_ + 'model_boundary.npy')
transformed_data['model_boundary'] = model_boundary
# 确定两个模型的mask
_, model_a_global_mask, _, model_b_global_mask, _, _, _ = PowerCurve.cal_region_boundary_mask(
model_boundary, data_to_be_transformed[:, 1])
transformed_data['a_mask'] = model_a_global_mask
transformed_data['b_mask'] = model_b_global_mask
for i, model_this_global_mask in enumerate((model_a_global_mask, model_b_global_mask)):
# 如果在某个区域没数据的话就continue
if sum(model_this_global_mask) < 1:
continue
# 确定转换数据的预处理(线性化)的参数,理论上来说,这些参数只有在fit模型的时候才能被修改
this_region = 'a' if i == 0 else 'b'
this_path_ = path_ + this_region + '/'
this_transformed_data = np.full((sum(model_this_global_mask), dims), np.nan)
@load_exist_pkl_file_otherwise_run_and_save(this_path_ + 'data_preprocessing_params.pkl')
def cal_data_preprocessing_params():
min_active_power_output = np.nanmin(data_to_be_transformed[model_this_global_mask, 0])
max_active_power_output = np.nanmax(data_to_be_transformed[model_this_global_mask, 0])
min_wind_speed = np.nanmin(data_to_be_transformed[model_this_global_mask, 1])
max_wind_speed = np.nanmax(data_to_be_transformed[model_this_global_mask, 1])
min_absolute_wind_direction_in_truncated = -np.sqrt(2) - 10e8 * float_eps
max_absolute_wind_direction_in_truncated = np.sqrt(2) + 10e8 * float_eps
return {'min_active_power_output': min_active_power_output - 10e8 * float_eps,
'max_active_power_output': max_active_power_output + 10e8 * float_eps,
'min_wind_speed': min_wind_speed - 10e8 * float_eps,
'max_wind_speed': max_wind_speed + 10e8 * float_eps,
'min_absolute_wind_direction_in_truncated': min_absolute_wind_direction_in_truncated,
'max_absolute_wind_direction_in_truncated': max_absolute_wind_direction_in_truncated}
data_preprocessing_params = cal_data_preprocessing_params
# 对于区域内的有功功率,进行truncated→linear转换;
this_transformed_data[:, 0] = TruncatedToLinear(
data_preprocessing_params['min_active_power_output'],
data_preprocessing_params['max_active_power_output']).transform(
data_to_be_transformed[model_this_global_mask, 0])
# 对于区域内的风速,进行truncated→linear转换;
if dims >= 2:
this_transformed_data[:, 1] = TruncatedToLinear(
data_preprocessing_params['min_wind_speed'],
data_preprocessing_params['max_wind_speed']).transform(
data_to_be_transformed[model_this_global_mask, 1])
# 对于区域内的风向,进行circular→truncated→linear转换
if dims >= 3:
this_transformed_data[:, 2] = CircularToLinear(
data_preprocessing_params['min_absolute_wind_direction_in_truncated'],
data_preprocessing_params['max_absolute_wind_direction_in_truncated'], 360).transform(
data_to_be_transformed[model_this_global_mask, 2])
# 对于区域内的温度,不做变换
if dims >= 4:
this_transformed_data[:, 3] = copy.deepcopy(data_to_be_transformed[model_this_global_mask, 3])
# 将该区域的结果写入最终结果
transformed_data[this_region] = this_transformed_data
return transformed_data
def __transform_data_to_linear_for_2d_gmcm_model(self, data_to_be_transformed: ndarray, path_) -> dict:
# TODO Deprecated
warnings.warn("Deprecated", DeprecationWarning)
return self.__transform_data_to_linear_for_copula_model(data_to_be_transformed, path_, 2)
def __transform_data_to_linear_for_3d_vine_gmcm_model(self, data_to_be_transformed: ndarray, path_) -> dict:
# TODO Deprecated
warnings.warn("Deprecated", DeprecationWarning)
return self.__transform_data_to_linear_for_copula_model(data_to_be_transformed, path_, 3)
def __transform_data_to_linear_for_4d_vine_gmcm_model(self, data_to_be_transformed: ndarray, path_) -> dict:
# TODO Deprecated
warnings.warn("Deprecated", DeprecationWarning)
return self.__transform_data_to_linear_for_copula_model(data_to_be_transformed, path_, 4)
def __prepare_fitting_data_for_vine_gmcm_model(self, path_, dims) -> dict:
"""
准备vine_gmcm的fitting 数据
model_a_global_mask和model_b_global_mask代表两个区域/完全不同的两个模型
"""
# TODO Deprecated
warnings.warn("Deprecated", DeprecationWarning)
# 确定模型a和模型b的mask,并且储存boundary的计算值
@load_exist_npy_file_otherwise_run_and_save(path_ + 'model_boundary.npy')
def identify_model_boundary():
pc = PowerCurveByMethodOfBins(self.measurements['wind speed'].values[self.outlier_category == 0],
self.measurements['active power output'].values[self.outlier_category == 0])
return np.array(pc.cal_region_boundary())
# 将不需要的数据全部置为np.nan
fitting_data = np.stack((self.measurements['active power output'].values,
self.measurements['wind speed'].values,
self.measurements['absolute wind direction'].values,
self.measurements['environmental temperature'].values),
axis=1)
considered_data_mask = np.stack((self.outlier_category_detailed['active power output'].values == 0,
self.outlier_category_detailed['wind speed'].values == 0,
self.outlier_category_detailed['absolute wind direction'].values == 0,
self.outlier_category_detailed['environmental temperature'].values == 0),
axis=1)
fitting_data[~considered_data_mask] = np.nan
# 转换数据
if dims == 4:
return self.__transform_data_to_linear_for_4d_vine_gmcm_model(fitting_data, path_)
elif dims == 3:
return self.__transform_data_to_linear_for_3d_vine_gmcm_model(fitting_data[:, :3], path_)
def __prepare_fitting_data_for_4d_vine_gmcm_model(self, path_):
# TODO Deprecated
warnings.warn("Deprecated", DeprecationWarning)
return self.__prepare_fitting_data_for_vine_gmcm_model(path_, 4)
def __prepare_fitting_data_for_3d_vine_gmcm_model(self, path_):
# TODO Deprecated
warnings.warn("Deprecated", DeprecationWarning)
return self.__prepare_fitting_data_for_vine_gmcm_model(path_, 3)
def fit_4d_cvine_gmcm_model(self):
"""
对于4维的vine模型,只考虑每个pair copula对应的两个变量对应的outlier_category_detailed是0的情况。
维度的名字依次是:'active power output', 'wind speed', 'absolute wind direction', 'environmental temperature'。
因为有两个区域,所以其实本质上是两个独立的4d_vine_gmcm_model
"""
# TODO Deprecated
warnings.warn("Deprecated", DeprecationWarning)
path_ = self.results_path / '4d_cvine_gmcm_model/' / self.__str__() / '/'
try_to_find_folder_path_otherwise_make_one((path_, path_ + 'a/', path_ + 'b/'))
fitting_data = self.__prepare_fitting_data_for_4d_vine_gmcm_model(path_)
for this_region, this_fitting_data in fitting_data.items():
if (this_region != 'a') and (this_region != 'b'):
continue
vine_gmcm_copula = VineGMCMCopula(this_fitting_data,
construction=FOUR_DIM_CVINE_CONSTRUCTION,
gmcm_model_folder_for_construction_path_=path_ + this_region + '/',
marginal_distribution_file_=path_ + this_region + '/marginal.pkl')
vine_gmcm_copula.fit()
def fit_3d_cvine_gmcm_model(self, use_ws_ahead: int = 0):
"""
对于3维的vine模型,只考虑每个pair copula对应的两个变量对应的outlier_category_detailed是0的情况。
维度的名字依次是:'active power output', 'wind speed', 'absolute wind direction'。
因为有两个区域,所以其实本质上是两个独立的4d_vine_gmcm_model
"""
# TODO Deprecated
warnings.warn("Deprecated", DeprecationWarning)
if use_ws_ahead == 0:
path_ = self.results_path + '3d_cvine_gmcm_model/' + self.__str__() + '/'
else:
path_ = self.results_path + '3d_cvine_gmcm_model_use_ws_ahead_{}/'.format(use_ws_ahead) + \
self.__str__() + '/'
try_to_find_folder_path_otherwise_make_one((path_, path_ + 'a/', path_ + 'b/'))
fitting_data = self.__prepare_fitting_data_for_3d_vine_gmcm_model(path_)
for this_region, this_fitting_data in fitting_data.items():
# DEBUG: For the IET paper, only consider region_a
if this_region != 'a':
continue
if use_ws_ahead != 0:
this_fitting_data[:, 2] = np.roll(this_fitting_data[:, 2], -1)
this_fitting_data[-1, 2] = np.nan
vine_gmcm_copula = VineGMCMCopula(this_fitting_data,
construction=THREE_DIM_CVINE_CONSTRUCTION,
gmcm_model_folder_for_construction_path_=path_ + this_region + '/',
marginal_distribution_file_=path_ + this_region + '/marginal.pkl')
vine_gmcm_copula.fit()
# DEBUG: For the IET paper
# flag = np.bitwise_and(self.outlier_category_detailed['wind speed'].values == 0,
# self.outlier_category_detailed['absolute wind direction'].values == 0)
# ws = self.measurements['wind speed'].values[flag]
# wd = self.measurements['absolute wind direction'].values[flag]
# GMCM(gmcm_model_file_=path_ + this_region + '/GMCM_(2, 3).mat',
# ndarray_data=np.stack((ws, wd), axis=1),
# marginal_distribution_file_=path_ + this_region + '/marginal_for_GMCM_(2, 3).pkl',
# gmcm_fitting_k=8,
# gmcm_max_fitting_iteration=2500,
# gmcm_fitting_attempt=1,
# )
def fit_2d_conditional_probability_model_by_gmm(self, *, bin_step: float, gmm_args: dict = None, **kwargs):
# TODO Deprecated
warnings.warn("Deprecated", DeprecationWarning)
gmm_args = gmm_args or {}
_path = kwargs.get('_path') or (self.results_path + '2d_conditional_probability_by_gmm/' + self.__str__() + \
f' bin_step={bin_step}/')
try_to_find_folder_path_otherwise_make_one(_path)
mask = np.bitwise_or(self.outlier_category == 0,
self.outlier_category == 5)
bivariate = Bivariate(self.measurements['wind speed'].values[mask],
self.measurements['active power output'].values[mask],
bin_step=bin_step)
@load_exist_pkl_file_otherwise_run_and_save(_path + (kwargs.get('model_name') or 'model.pkl'))
def load_or_make():
return bivariate.fit_mob_using_gaussian_mixture_model(**gmm_args)
load_or_make()
def estimate_active_power_output_by_2d_conditional_probability_model_by_gmm(self,
wind_speed_ndarray: ndarray,
*, bin_step: float,
if_no_available_mode: Union[int, str] =
'nearest_not_none_bin_keys') -> \
Tuple[UnivariateGaussianMixtureModel, ...]:
# TODO Deprecated
warnings.warn("Deprecated", DeprecationWarning)
path_ = self.results_path + '2d_conditional_probability_by_gmm/' + self.__str__() + \
' bin_step={}/'.format(bin_step)
model = load_pkl_file(path_ + 'model.pkl')
# 计算(其实是选择)输出的条件概率模型
power_output_model = []
for this_predictor_var in wind_speed_ndarray:
this_model_idx = MethodOfBins.find_mob_key_according_to_mob_or_mob_fitting_like_dict(this_predictor_var,
model)
if if_no_available_mode == 'nearest_not_none_bin_keys':
power_output_model.append(
UnivariateGaussianMixtureModel(model[this_model_idx['nearest_not_none_bin_keys']]
['this_bin_probability_model']))
else:
assert isinstance(if_no_available_mode, int)
temp = []
for i in range(if_no_available_mode):
temp = UnivariateGaussianMixtureModel(model[this_model_idx['not_none_bin_keys'][i]]
['this_bin_probability_model'])
power_output_model.append(temp)
return tuple(power_output_model)
@staticmethod
def estimate_active_power_output_by_mfr_power_curve(wind_speed_ndarray: ndarray):
# TODO Deprecated
warnings.warn("Deprecated", DeprecationWarning)
return PowerCurveByMfr()(wind_speed_ndarray)
def __add_active_power_output_dim_for_copula_based_estimating_method(self, input_ndarray: ndarray,
linspace_number: int) -> ndarray:
"""
因为estimate的时候其实是条件概率,它们的输入少了第一维度(即:active power output),所以在将数据放入联合概率模型
之前要补充一个维度
"""
# TODO Deprecated
warnings.warn("Deprecated", DeprecationWarning)
active_power_output_dim = np.linspace(-30, self.rated_active_power_output + 30, linspace_number)
active_power_output_dim = np.tile(active_power_output_dim, input_ndarray.shape[0])
input_ndarray = np.repeat(input_ndarray, linspace_number, 0)
return np.concatenate((active_power_output_dim.reshape(-1, 1), input_ndarray), axis=1)
@staticmethod
def __transform_estimating_method_results_to_normalised_pdf_like(
unnormalised_pdf_like: ndarray, linspace_number: int) -> Tuple[UnivariatePDFOrCDFLike, ...]:
# TODO Deprecated
warnings.warn("Deprecated", DeprecationWarning)
normalised_pdf_like = []
for i in range(0, unnormalised_pdf_like.shape[0], linspace_number):
this_normalised_pdf_like = UnivariatePDFOrCDFLike(
pdf_like_ndarray=unnormalised_pdf_like[i:i + linspace_number, :])
normalised_pdf_like.append(this_normalised_pdf_like)
return tuple(normalised_pdf_like)
def __estimate_active_power_output_by_copula_model(self, input_ndarray: ndarray, path_, dims) -> tuple:
# TODO Deprecated
warnings.warn("Deprecated", DeprecationWarning)
if input_ndarray.ndim == 1:
input_ndarray = np.expand_dims(input_ndarray, 1)
estimated_active_power_output_pdf_like = np.array([None for _ in range(input_ndarray.shape[0])])
# 设置模型精度,并且准备增维以估计联合概率,计算region的mask
linspace_number = 500
input_ndarray_modify = self.__add_active_power_output_dim_for_copula_based_estimating_method(
input_ndarray, linspace_number)
if dims == 4:
prepared_data = self.__transform_data_to_linear_for_4d_vine_gmcm_model(input_ndarray_modify, path_)
this_construction = FOUR_DIM_CVINE_CONSTRUCTION
elif dims == 3:
prepared_data = self.__transform_data_to_linear_for_3d_vine_gmcm_model(input_ndarray_modify, path_)
this_construction = THREE_DIM_CVINE_CONSTRUCTION
elif dims == 2:
prepared_data = self.__transform_data_to_linear_for_2d_gmcm_model(input_ndarray_modify, path_)
this_construction = None
else:
raise Exception("Unsupported dims")
(region_1_mask, region_a_mask_in_input_data, region_rated_mask,
region_b_mask_in_input_data, region_5_mask, hard_rated_mask, hard_cut_off_mask) = \
PowerCurve.cal_region_boundary_mask(prepared_data['model_boundary'], input_ndarray[:, 0])
# 对于region_a和region_b,采用高级不确定模型去估计
for this_region, this_prepared_data in prepared_data.items():
"""
DEBUG for IET
"""
# DEBUG for IET
if this_region == 'b':
continue
if ((this_region != 'a') and (this_region != 'b')) or (this_prepared_data is None):
continue
# 只有对于3维或以上的模型才用VineGMCMCopula
if dims >= 3:
vine_gmcm_copula = VineGMCMCopula(construction=this_construction,
gmcm_model_folder_for_construction_path_=path_ + this_region + '/',
marginal_distribution_file_=path_ + this_region + '/marginal.pkl')
pdf_ = vine_gmcm_copula.cal_joint_pdf(ndarray_data_like=this_prepared_data)
else:
gmcm_copula = GMCM(gmcm_model_file_=path_ + this_region + '/GMCM_(1, 2).mat',
marginal_distribution_file_=path_ + this_region + '/marginal.pkl')
pdf_ = gmcm_copula.cal_joint_pdf(ndarray_data_like=this_prepared_data)
pdf_ = np.stack((pdf_, input_ndarray_modify[prepared_data[this_region + '_mask'], 0]), axis=1)
pdf_ = self.__transform_estimating_method_results_to_normalised_pdf_like(pdf_, linspace_number)
if this_region == 'a':
estimated_active_power_output_pdf_like[region_a_mask_in_input_data] = pdf_
else:
estimated_active_power_output_pdf_like[region_b_mask_in_input_data] = pdf_
"""
DEBUG for IET
"""
# DEBUG for IET
estimated_active_power_output_pdf_like[region_1_mask] = DeterministicUnivariateProbabilisticModel(0.)
estimated_active_power_output_pdf_like[hard_rated_mask] = DeterministicUnivariateProbabilisticModel(
1. * self.rated_active_power_output)
estimated_active_power_output_pdf_like[hard_cut_off_mask] = DeterministicUnivariateProbabilisticModel(0.)
return tuple(estimated_active_power_output_pdf_like)
def estimate_active_power_output_by_2d_gmcm_model(self, input_ndarray: ndarray) -> tuple:
"""
运用2维的GMCM模型去估计有功输出。
:param input_ndarray 2维。维度的名称依次是:'wind speed'
:return: UnivariatePDFLike组成的tuple
"""
# TODO Deprecated
warnings.warn("Deprecated", DeprecationWarning)
path_ = self.results_path + '2d_gmcm_model/' + self.__str__() + '/'
return self.__estimate_active_power_output_by_copula_model(input_ndarray, path_, 2)
def estimate_active_power_output_by_2d_gmcm_model_with_uncertain_inputs(self, input_ndarray: ndarray) -> tuple:
# TODO Deprecated
warnings.warn("Deprecated", DeprecationWarning)
path_ = self.results_path + '2d_gmcm_model/' + self.__str__() + '/'
estimated_active_power_output_pdf_like = np.array([None for _ in range(input_ndarray.shape[0])])
linspace_number = 500
# 准备下界
input_ndarray_modify_with_lower = self.__add_active_power_output_dim_for_copula_based_estimating_method(
input_ndarray[:, [0]], linspace_number)
prepared_data_with_lower = self.__transform_data_to_linear_for_2d_gmcm_model(input_ndarray_modify_with_lower,
path_)
# 准备上界
input_ndarray_modify_with_upper = self.__add_active_power_output_dim_for_copula_based_estimating_method(
input_ndarray[:, [1]], linspace_number)
prepared_data_with_higher = self.__transform_data_to_linear_for_2d_gmcm_model(input_ndarray_modify_with_upper,
path_)
prepared_data = np.stack((prepared_data_with_lower, prepared_data_with_higher))
# 开始计算
"""
停止!!!因为不知道用哪个boundary!!!这是模型不连续的坏处
"""
(region_1_mask, region_a_mask_in_input_data, region_rated_mask,
region_b_mask_in_input_data, region_5_mask, hard_rated_mask, hard_cut_off_mask) = \
PowerCurve.cal_region_boundary_mask(prepared_data['model_boundary'], input_ndarray[:, 0])
def estimate_active_power_output_by_3d_cvine_gmcm_model(self, input_ndarray: ndarray, use_ws_ahead=0) -> tuple:
"""
运用3维的vine模型去估计有功输出。
:param input_ndarray 3维。维度的名称依次是:'wind speed', 'absolute wind direction'
:param use_ws_ahead 主要服务于PMAPS 2020 paper
:return: UnivariatePDFLike组成的tuple
"""
# TODO Deprecated
warnings.warn("Deprecated", DeprecationWarning)
if use_ws_ahead == 0:
path_ = self.results_path + '3d_cvine_gmcm_model/' + self.__str__() + '/'
elif use_ws_ahead == 1:
path_ = self.results_path + '3d_cvine_gmcm_model_use_ws_ahead_1/' + self.__str__() + '/'
else:
raise
return self.__estimate_active_power_output_by_copula_model(input_ndarray, path_, 3)
def estimate_active_power_output_by_4d_cvine_gmcm_model(self, input_ndarray: ndarray) -> tuple:
"""
运用4维的vine模型去估计有功输出。运用这个函数前必须让fit_4d_vine_gmcm_model完整地跑一遍。
:param input_ndarray 3维。维度的名称依次是:'wind speed', 'absolute wind direction', 'environmental temperature'
:return: UnivariatePDFLike组成的tuple
"""
# TODO Deprecated
warnings.warn("Deprecated", DeprecationWarning)
path_ = self.results_path + '4d_cvine_gmcm_model/' + self.__str__() + '/'
return self.__estimate_active_power_output_by_copula_model(input_ndarray, path_, 4)
def identify_outlier(self):
# TODO Deprecated
warnings.warn("Deprecated", DeprecationWarning)
try_to_find_folder_path_otherwise_make_one(self.results_path + 'Filtering/' + self.__str__() + '/')
# 先对每一个维度进行outlier分析
@load_exist_pkl_file_otherwise_run_and_save(
self.results_path + 'Filtering/' + self.__str__() + '/outlier_category_detailed.pkl')
def load_or_make_outlier_category_detailed():
self.outlier_category_detailed = pd.DataFrame(np.full(self.measurements.shape, 0, dtype=int),
columns=self.measurements.columns)
# wind speed outlier
self.outlier_category_detailed.loc[
self.__identify_missing_data_outlier('wind speed'), 'wind speed'] = -1
self.outlier_category_detailed.loc[
self.__identify_out_of_range_outlier('wind speed', 0, 50), 'wind speed'] = 1
self.outlier_category_detailed.loc[
self.__identify_linear_series_outlier('wind speed'), 'wind speed'] = 2
# active power output outlier
self.outlier_category_detailed.loc[
self.__identify_missing_data_outlier('active power output'), 'active power output'] = -1
self.outlier_category_detailed.loc[self.__identify_shut_down_outlier(), 'active power output'] = 1
self.outlier_category_detailed.loc[self.__identify_change_point_outlier('active power output'),
'active power output'] = 2
self.outlier_category_detailed.loc[self.__identify_curtailment_outlier(), 'active power output'] = 3
self.outlier_category_detailed.loc[self.__identify_interquartile_outlier(), 'active power output'] = 5
# absolute wind direction outlier
self.outlier_category_detailed.loc[
self.__identify_missing_data_outlier('absolute wind direction'), 'absolute wind direction'] = -1
self.outlier_category_detailed.loc[
self.__identify_out_of_range_outlier('absolute wind direction', 0, 360), 'absolute wind direction'] = 1
self.outlier_category_detailed.loc[
self.__identify_linear_series_outlier('absolute wind direction'), 'absolute wind direction'] = 2
# environmental temperature outlier
self.outlier_category_detailed.loc[
self.__identify_missing_data_outlier('environmental temperature'), 'environmental temperature'] = -1
return self.outlier_category_detailed
# 因为在这个project中,active power output是中心变量,所以单独对它与wind speed组成的二维序列进行outlier分析
@load_exist_npy_file_otherwise_run_and_save(
self.results_path + 'Filtering/' + self.__str__() + '/outlier_category.npy')
def load_or_make():
self.outlier_category = self.outlier_category_detailed['active power output'].values
self.outlier_category[self.outlier_category_detailed['wind speed'].values == 2] = 4
return self.outlier_category
self.outlier_category_detailed = load_or_make_outlier_category_detailed()
self.outlier_category = load_or_make()
class WF(WTandWFBase):
__slots__ = ("cut_in_wind_speed", "cut_out_wind_speed", "rated_active_power_output", "number_of_wind_turbine")
def __init__(self, *args, rated_active_power_output: Union[int, float],
number_of_wind_turbine: int = None, **kwargs):
super().__init__(*args, rated_active_power_output=rated_active_power_output, **kwargs)
self.number_of_wind_turbine = number_of_wind_turbine
@classmethod
def init_from_wind_turbine_instances(
cls, wind_turbine_instances: Sequence[WT], *,
obj_name: str,
wind_turbine_instances_data_category: Sequence[DataCategoryData] = None
) -> Tuple[WF, pd.DataFrame]:
"""
To initialise a WF instance from a group of WT instances.
Can only work on averaging WS and Pout.
Specifically, if 'wind_turbine_instances_data_category' is provided, then only initialise using 'shutdown',
'curtailed', 'operating' WT recordings, and will also return valid total_curtailment_amount
:return:
"""
wind_farm_df = pd.DataFrame()
rated_active_power_output = []
total_curtailment_amount = pd.DataFrame()
for i, this_wind_turbine in enumerate(wind_turbine_instances):
this_wind_turbine = copy.deepcopy(this_wind_turbine)
# If WT data category information is available
if wind_turbine_instances_data_category is not None:
# Only consider 'shutdown', 'curtailed', 'operating', the rest (i.e., 'nan') are NaN
this_wind_turbine[~wind_turbine_instances_data_category[i](
('shutdown', 'curtailed', 'operating')
)] = np.nan
# Curtailment amount is important information, if available
total_curtailment_amount = pd.merge(
total_curtailment_amount,
this_wind_turbine[wind_turbine_instances_data_category[i]('curtailed')][['active power output']],
how='outer', left_index=True, right_index=True, suffixes=(f'_WT{i}', f'_WT{i + 1}')
)
# 'non-missing' means that both the 'wind speed' and 'active power output' must be simultaneously not NaN,
# To achieve this, the data will be modified intentionally: i.e., to discard more data
any_nan_mask = this_wind_turbine[['wind speed', 'active power output']].isna().any(1).values
this_wind_turbine.loc[any_nan_mask, ['wind speed', 'active power output']] = np.nan
rated_active_power_output.append(this_wind_turbine.rated_active_power_output)
wind_farm_df = pd.merge(wind_farm_df, this_wind_turbine.pd_view()[['wind speed', 'active power output']],
how='outer', left_index=True, right_index=True,
suffixes=(f'_WT{i}', f'_WT{i + 1}'))
# Adjust to multi index, so the indexing will be easy
new_columns = pd.MultiIndex.from_arrays([[re.findall(r'.*(?=_)', x)[0] for x in wind_farm_df.columns],
[re.findall(r'(?<=_).*', x)[0] for x in wind_farm_df.columns]],
names=('Physical Quantity', 'WT No.'))
wind_farm_df.columns = new_columns
# Averaging
# Note that the treatment for WS and Pout are different
# For Pout, equivalent, the non-missing values are summed up
# For WS, equivalent, the non-missing values are averaged directly
# Note 'non-missing' means that both the 'wind speed' and 'active power output' must be simultaneously not NaN
wind_farm_df = pd.DataFrame(
{'wind speed': wind_farm_df['wind speed'].mean(1, skipna=True).values,
'active power output': wind_farm_df['active power output'].sum(1, skipna=True).values},
index=wind_farm_df.index
)
wind_farm_instance = cls(wind_farm_df,
rated_active_power_output=sum(rated_active_power_output),
number_of_wind_turbine=wind_turbine_instances.__len__(),
obj_name=obj_name,
predictor_names=('wind speed',),
dependant_names=('active power output',))
# Curtailment amount is important information, if available
if wind_turbine_instances_data_category is not None:
total_curtailment_amount = total_curtailment_amount.reindex(wind_farm_df.index).fillna(0).sum(axis=1)
return wind_farm_instance, total_curtailment_amount
@property
def default_results_saving_path(self):
saving_path = {
"outlier": self.results_path / f"Filtering/{self.__str__()}/results.pkl",
"operating regime": self.results_path / f"OperatingRegime/{self.__str__()}/report.csv",
"operating regime single senor": self.results_path / f"OperatingRegime/{self.__str__()}/"
f"single_sensor_classification.pkl",
"fully operating regime power curve": self.results_path / f"PowerCurve/{self.__str__()}/fully_OPR_8PL.pkl",
"fully operating regime power curve single senor": self.results_path / f"PowerCurve/{self.__str__()}/"
f"fully_OPR_8PL_single_senor.pkl",
"resample_and_also_resample_operating_regime": self.results_path / f"resample/{self.__str__()}/results.pkl"
}
for x in saving_path.values():
try_to_find_folder_path_otherwise_make_one(x.parent)
return saving_path
@staticmethod
def infer_operating_regime_from_wind_turbine_instances_data_category(
wind_turbine_instances_data_category: Sequence[DataCategoryData],
) -> DataCategoryData:
"""
:param wind_turbine_instances_data_category:
:return:
"""
# %% Must be note that, in the classification of operating regime, WT-level 'shutdown' and 'nan' are treated
# as the same group. Because they both have no contribution to the WF-level total power output calculation.
# However, interestingly, note that WF-level wind speed calculate will be different!
# Because 'shutdown' can provide OK wind speed, but 'nan' can not, especially due to elementwise deletion.
wind_turbine_instances_data_category = copy.deepcopy(wind_turbine_instances_data_category)
for obj in wind_turbine_instances_data_category:
obj.abbreviation[np.isin(obj.abbreviation, ('shutdown', 'nan'))] = 'shutdown_or_nan'
states_unique = ['operating', 'curtailed', 'shutdown_or_nan']
def parse_by_states_unique_func(x) -> dict:
return parse('(' + ', '.join(map(lambda y: '{' + y + '}', states_unique)) + ')', x).named
# %% Obtain a pd.DataFrame obj that stores all WT-level information for convenience
wind_turbine_instances_data_category_df = pd.DataFrame(dtype=str)
for i, this_wind_turbine_instances_data_category in enumerate(wind_turbine_instances_data_category):
wind_turbine_instances_data_category_df = pd.merge(
wind_turbine_instances_data_category_df,
this_wind_turbine_instances_data_category.pd_view,
how='outer', left_index=True, right_index=True,
suffixes=(f'_WT{i}', f'_WT{i + 1}')
)
wind_turbine_instances_data_category_df.fillna('shutdown_or_nan', inplace=True)
wind_turbine_instances_data_category_df = wind_turbine_instances_data_category_df.astype(str)
# %% Obtain a pd.DataFrame obj from the WF-level point of view
operating_regime_df = pd.DataFrame(
columns=states_unique + ['combination'],
index=wind_turbine_instances_data_category_df.index,
)
for this_state in states_unique:
operating_regime_df[this_state] = np.sum(
wind_turbine_instances_data_category_df.values == this_state, 1
).astype(int)
assert (int(np.unique(np.sum(operating_regime_df[states_unique].values, 1))) == len(
wind_turbine_instances_data_category)), "'wind_turbine_instances_data_category' sum on axis_1 is wrong"
operating_regime_df['combination'] = list(zip(*operating_regime_df[states_unique].values.T))
operating_regime_df['combination'] = operating_regime_df['combination'].astype(str)
# %% Infer a DataCategoryNameMapper obj
combination_unique = np.unique(operating_regime_df['combination'])
combination_unique = sorted(combination_unique,
key=lambda x: (parse_by_states_unique_func(x)['operating'],
parse_by_states_unique_func(x)['curtailed'],
parse_by_states_unique_func(x)['shutdown_or_nan']),
reverse=True)
operating_regime_name_mapper = DataCategoryNameMapper.init_from_template()
abbreviation_i = 0
for i, this_combination in enumerate(combination_unique):
# The rules for not considering:
# WF-level outliers: if there are any WT recordings unexplainable
if np.sum(this_combination == operating_regime_df['combination']) / len(operating_regime_df) < 0.0001:
# if int(parse_by_states_unique_func(this_combination)['nan']) > 0:
abbreviation_set = 'others'
else:
abbreviation_i += 1
abbreviation_set = f"S{abbreviation_i}"
operating_regime_name_mapper.loc[i] = [this_combination,
abbreviation_set,
-1,
parse_by_states_unique_func(this_combination).__str__()]
# %% Obtain a DataCategoryData obj
operating_regime = DataCategoryData(
abbreviation=operating_regime_name_mapper.convert_sequence_data_key(
'long name',
'abbreviation',
sequence_data=operating_regime_df['combination']
),
index=operating_regime_df.index,
name_mapper=operating_regime_name_mapper
)
# operating_regime.report(sorted_kwargs={'key': lambda x: "0" + x[1:] if x[1:].__len__() < 2 else x[1:]})
return operating_regime
@property
def outlier_name_mapper(self) -> DataCategoryNameMapper:
meta = [["missing data", "missing", -1, "N/A"],
["others", "others", 0, "N/A"],
["Linear Pout-WS", "CAT-III", 4, "e.g., constant WS-variable Pout"]]
mapper = DataCategoryNameMapper.init_from_template(rows=len(meta))
mapper[:] = meta
return mapper
def outlier_detector(self, *data_category_is_linearity_args,
save_file_path: Path = None,
extra_boundary_rules: Iterable[dict] = None,
**data_category_is_linearity_kwargs) -> DataCategoryData:
save_file_path = save_file_path or self.default_results_saving_path["outlier"]
if try_to_find_file(save_file_path):
warnings.warn(f"{self.__str__()} has results in {save_file_path}")
return load_pkl_file(save_file_path)['DataCategoryData obj']
outlier = super().outlier_detector() # type: DataCategoryData
# %% CAT-III
if data_category_is_linearity_args == ():
data_category_is_linearity_args = ('60T',)
if data_category_is_linearity_kwargs == {}:
data_category_is_linearity_kwargs = {"constant_error": {'wind speed': 0.01}}
cat_iii_outlier_mask = self.data_category_is_linearity(*data_category_is_linearity_args,
**data_category_is_linearity_kwargs)
if extra_boundary_rules is not None:
for this_boundary in extra_boundary_rules:
cat_iii_outlier_mask = np.bitwise_or(cat_iii_outlier_mask,
self.data_category_inside_boundary(this_boundary))
outlier.abbreviation[cat_iii_outlier_mask] = "CAT-III"
################################################################################################################
# outlier = super(WF, self).outlier_detector() # type: DataCategoryData
# cat_iii_outlier_mask = self.data_category_is_linearity('6T', constant_error={'wind speed': 0.001})
# if extra_boundary_rules is not None:
# for this_boundary in extra_boundary_rules:
# cat_iii_outlier_mask = np.bitwise_or(cat_iii_outlier_mask,
# self.data_category_inside_boundary(this_boundary))
# outlier.abbreviation[cat_iii_outlier_mask] = "CAT-III"
# ax = self[outlier('CAT-III')].plot(color='r', zorder = 2)
# ax = self[outlier('others')].plot(ax=ax)
# self[outlier('CAT-III')].plot(color='r')
# self[outlier('others')].plot()
# outlier.report()
# debug_see = self[outlier('CAT-III')].pd_view()
################################################################################################################
save_pkl_file(save_file_path,
{'raw_ndarray_data': np.array(outlier.abbreviation),
'raw_index': outlier.index,
'DataCategoryData obj': outlier})
return outlier
def outlier_detector_for_extra_feature(self) -> ndarray:
"""
Return outlier for extra dim
:return:
"""
considered_extra_feature = (set(self.columns) - {'wind speed',
'active power output'}) & set(FEATURE_NORMAL_RANGE)
# Outside boundary outlier
out_of_range_outlier_mask = ~self.data_category_inside_boundary({key: FEATURE_NORMAL_RANGE[key]
for key in considered_extra_feature})
# Linear outlier
linear_outlier_mask = self.data_category_is_linearity('30T',
constant_error={key: 0.00001
for key in considered_extra_feature})
# Combine
outlier_mask = np.bitwise_or(out_of_range_outlier_mask, linear_outlier_mask)
return outlier_mask
def operating_regime_detector(self, task: str = 'load', ts_freq_minutes: int = None, *,
load_with_new_params: dict = None):
assert (task in ('load', 'fit', 'load raw')), "'Task' is not in ('load', 'fit', 'load raw')"
pc_file_path = self.default_results_saving_path["fully operating regime power curve single senor"]
operating_regime_file_path = self.default_results_saving_path["operating regime single senor"]
# %% Prepare run the GA for a EquivalentWindFarmPowerCurve obj
if task == 'fit':
num_mask = np.bitwise_and(~np.isnan(self['wind speed'].values),
~np.isnan(self['active power output'].values))
assert ts_freq_minutes is not None
else:
num_mask = np.full_like(self['wind speed'].values, fill_value=True).astype(bool)
wf_pc_obj = EquivalentWindFarmPowerCurve(
total_wind_turbine_number=self.number_of_wind_turbine,
wind_speed_recording=self['wind speed'].values[num_mask],
active_power_output_recording=self['active power output'].values[num_mask] / self.rated_active_power_output,
index=self.index[num_mask],
wind_farm_ts_freq_minutes=ts_freq_minutes
)
# If there are any fitting results in the saving path, then they can be used as initials
if try_to_find_file(pc_file_path):
current_best = load_pkl_file(pc_file_path)[-1]['variable']
wf_pc_obj.update_params(*current_best[:wf_pc_obj.params.__len__()]) # The last the best
params_init_scheme = 'self'
else:
# Use prior params
wf_pc_obj.update_params(*np.array([1., 0., -8.302, 12.188,
0.326, 25.1, 24.5, 1.8]))
params_init_scheme = 'self'
# ↓↓↓↓↓↓↓↓↓↓↓↓↓↓↓↓↓↓↓↓↓↓↓↓↓↓↓↓↓↓↓↓↓↓↓↓↓↓↓↓↓DEBUG↓↓↓↓↓↓↓↓↓↓↓↓↓↓↓↓↓↓↓↓↓↓↓↓↓↓↓↓↓↓↓↓↓↓↓↓↓↓↓↓↓↓↓↓↓↓↓↓↓↓↓↓↓↓↓↓↓↓↓↓↓
# input("Press to continue DEBUG")
# DEBUG_VAR = wf_pc_obj.corresponding_8p_pc_obj
#
# DEBUG_VAR.b_1 = -9.
# DEBUG_VAR.c_1 = 10.6
# DEBUG_VAR.g_1 = 0.30
# DEBUG_VAR.update_params(*DEBUG_VAR.params)
#
# input("Press to update wf_pc_obj")
# wf_pc_obj.update_params(*DEBUG_VAR.params)
# ↑↑↑↑↑↑↑↑↑↑↑↑↑↑↑↑↑↑↑↑↑↑↑↑↑↑↑↑↑↑↑↑↑↑↑↑↑↑↑↑↑DEBUG↑↑↑↑↑↑↑↑↑↑↑↑↑↑↑↑↑↑↑↑↑↑↑↑↑↑↑↑↑↑↑↑↑↑↑↑↑↑↑↑↑↑↑↑↑↑↑↑↑↑↑↑↑↑↑↑↑↑↑↑↑
if task == 'fit':
wf_pc_obj.fit(
ga_algorithm_param={'max_num_iteration': 10,
'max_iteration_without_improv': 1000000,
'population_size': 500},
params_init_scheme=params_init_scheme,
run_n_times=10000000,
save_to_file_path=pc_file_path,
focal_error=0.001,
function_timeout=6000,
)
else:
print(f"best found = {wf_pc_obj}")
if load_with_new_params is not None:
old_params = wf_pc_obj.params_ordered_dict
old_params.update(load_with_new_params)
wf_pc_obj.update_params(**old_params)
# ↓↓↓↓↓↓↓↓↓↓↓↓↓↓↓↓↓↓↓↓↓↓↓↓↓↓↓↓↓↓↓↓↓↓↓↓↓↓↓↓↓DEBUG↓↓↓↓↓↓↓↓↓↓↓↓↓↓↓↓↓↓↓↓↓↓↓↓↓↓↓↓↓↓↓↓↓↓↓↓↓↓↓↓↓↓↓↓↓↓↓↓
# wf_pc_obj.b_1 = -9.
# wf_pc_obj.c_1 = 10.6
# wf_pc_obj.g_1 = 0.30
# operating_regime = wf_pc_obj.maximum_likelihood_estimation_for_wind_farm_operation_regime(
# task='evaluate',
# return_fancy=True
# )
# self.plot()
#
# ax = wf_pc_obj.corresponding_8p_pc_obj.plot(color='g')
# self.plot(operating_regime=operating_regime[-1], not_show_color_bar=True, ax=ax)
# self[operating_regime[-1]('S1')].plot()
# self[operating_regime[-1]('S2')].plot()
# self[operating_regime[-1]('S3')].plot()
# ↑↑↑↑↑↑↑↑↑↑↑↑↑↑↑↑↑↑↑↑↑↑↑↑↑↑↑↑↑↑↑↑↑↑↑↑↑↑↑↑↑DEBUG↑↑↑↑↑↑↑↑↑↑↑↑↑↑↑↑↑↑↑↑↑↑↑↑↑↑↑↑↑↑↑↑↑↑↑↑↑↑↑↑↑↑↑↑↑↑↑↑
operating_regime = wf_pc_obj.maximum_likelihood_estimation_for_wind_farm_operation_regime(
task='evaluate',
return_fancy=True
)
# self[operating_regime[-1]('S1')].plot(plot_scatter_pc=True)
# self[operating_regime[-1]('S2')].plot(plot_scatter_pc=True)
# self[operating_regime[-1]('S3')].plot(plot_scatter_pc=True)
if task == 'load':
save_pkl_file(operating_regime_file_path, operating_regime[-1])
return wf_pc_obj, operating_regime[-1]
else:
return operating_regime
def resample_and_also_resample_operating_regime(self,
resample_args: tuple = ('10T',),
resample_kwargs: dict = None, *,
operating_regime_file_path: Path = None,
additional_outlier_mask: ndarray):
"""
Resample the wind farm, and most importantly, resample the operating regime.
The method to resample the operating regime is to select the most frequent values in new sampling window.
:return:
"""
@load_exist_pkl_file_otherwise_run_and_save(
self.default_results_saving_path['resample_and_also_resample_operating_regime'])
def func():
nonlocal operating_regime_file_path
nonlocal resample_kwargs
self_copy = copy.deepcopy(self)
# For single sensor WF, the operating regime must be detected using as high resolution data as possible.
# Therefore, object function 'operating_regime_detector' must be called (so there will be results in
# self.default_results_saving_path["operating regime single senor"], or as specified)
if operating_regime_file_path is None:
operating_regime_file_path = self.default_results_saving_path["operating regime single senor"]
operating_regime = load_pkl_file(operating_regime_file_path) # type: DataCategoryData
assert operating_regime is not None
resample_kwargs = resample_kwargs or {
'resampler_obj_func_source_code': "agg(lambda x: np.mean(x.values))"
}
# Check outlier
existing_outlier = load_pkl_file(self_copy.default_results_saving_path["outlier"])['DataCategoryData obj']
self_copy.loc[np.bitwise_or(~existing_outlier('others'),
additional_outlier_mask), :] = np.nan
# Resample self
resampled_self = self_copy.resample(*resample_args, **resample_kwargs)
resampled_self.obj_name = self_copy.obj_name + f" resampled"
# Resample operating regime
def rolling_func(x):
(values, counts) = np.unique(x, return_counts=True)
index = np.argmax(counts)
return int(values[index])
rolling_obj = operating_regime.pd_view.applymap(lambda x: int(x[1:])).rolling(*resample_args)
resampled_operating_regime = rolling_obj.apply(rolling_func, raw=True)
resampled_operating_regime = resampled_operating_regime.reindex(resampled_self.index, method='nearest')
resampled_operating_regime = resampled_operating_regime.astype(int)
resampled_operating_regime = DataCategoryData(
abbreviation=resampled_operating_regime.applymap(lambda x: f"S{int(x)}").values.flatten(),
index=resampled_operating_regime.index
)
return resampled_self, resampled_operating_regime
return func()
def init_assuming_all_fully_operating(self, ts_freq_minutes) -> WF:
obj = copy.deepcopy(self)
obj.obj_name = "".join([self.obj_name, '_assuming_all_fully_operating'])
real_operating_regime_raw = self.operating_regime_detector('load raw', ts_freq_minutes)
pout = obj['active power output'].values
pout = (real_operating_regime_raw[1].values +
pout * self.number_of_wind_turbine / real_operating_regime_raw[0]['normally_operating_number'])
obj.loc[:, 'active power output'] = pout
return obj
def plot(self, *,
ax=None,
plot_mfr: Iterable[PowerCurveByMfr] = None,
operating_regime: DataCategoryData = None,
plot_individual: bool = False,
not_show_color_bar=False,
**kwargs):
if operating_regime is None:
ax = super(WF, self).plot(ax=ax, plot_mfr=plot_mfr, **kwargs)
else:
# Analyse the unique abbreviation
unique_abbreviation = np.unique(operating_regime.abbreviation)
unique_abbreviation = unique_abbreviation[unique_abbreviation != 'others']
unique_abbreviation_sort = sorted(unique_abbreviation, key=lambda x: int(parse(r"S{}", x)[0]))
unique_abbreviation_sort = np.append(unique_abbreviation_sort, 'others')
# Prepare assigned colors
cmap_name = 'jet' # 'copper', 'jet', 'cool'
custom_cm = plt.cm.get_cmap(cmap_name, unique_abbreviation_sort.__len__())
color_list = custom_cm(range(unique_abbreviation_sort.__len__()))[np.newaxis, :, :3]
for i, this_operating_regime in enumerate(unique_abbreviation_sort):
if this_operating_regime in ('S1', 'others'):
ax = super(WF, self[operating_regime('S1')]).plot(ax=ax if not plot_individual else None,
plot_mfr=plot_mfr, zorder=-1,
color=tuple(color_list[:, 0, :].squeeze()),
**kwargs)
this_operating_regime_mask = operating_regime(this_operating_regime)
ax = scatter(self[this_operating_regime_mask]['wind speed'],
self[this_operating_regime_mask]['active power output'] / self.rated_active_power_output,
ax=ax if not plot_individual else None,
color=tuple(color_list[:, i, :].squeeze()),
**kwargs)
# Color bar codes
if not not_show_color_bar:
norm = mpl.colors.Normalize(vmin=0, vmax=1)
sm = plt.cm.ScalarMappable(cmap=plt.get_cmap(cmap_name, unique_abbreviation_sort.__len__()), norm=norm)
sm.set_array([])
divider = make_axes_locatable(ax)
cax = divider.append_axes("top", size="5%", pad=0.05)
cbar = plt.colorbar(sm, cax=cax, ax=ax, ticks=(), orientation='horizontal')
for j, lab in enumerate(unique_abbreviation_sort):
if lab == 'others':
lab = 'Others'
else:
lab = operating_regime.name_mapper.infer_from_abbreviation(lab)['long name'].values[0]
cbar.ax.text((2 * j + 1) / (unique_abbreviation_sort.__len__() * 2), 3, lab, ha='center',
va='center',
fontsize=10, rotation=45)
"""
top=0.89,
bottom=0.125,
left=0.11,
right=0.995,
hspace=0.2,
wspace=0.2
"""
return ax
if __name__ == '__main__':
tt = WF(
np.arange(120).reshape((8, 15)),
obj_name='tt_name',
predictor_names=('tt_predictor_names',),
dependant_names=('tt_dependant_names',),
rated_active_power_output=3000
)
# print(tt)
cc = tt[0]
|
17,576 | b9d70a8bfe92fb27780b94a11590f80230dbbc7b | # bai 1
import pandas as pd
import matplotlib.pylab as plt
from sklearn.model_selection import train_test_split
from sklearn.linear_model import LinearRegression
from dmba import regressionSummary, exhaustive_search
from dmba import backward_elimination, forward_selection, stepwise_selection
from dmba import adjusted_r2_score, AIC_score, BIC_score
df = pd.read_csv("../dataset/BostonHousing.csv")
predictors = ['crim', 'zn', 'indus', 'chas', 'nox', 'rm', 'age', 'dis', 'rad', 'tax', 'ptratio', 'lstat']
outcome = 'medv'
X = df[predictors]
Y = df[outcome]
train_X, test_X, train_y, test_y = train_test_split(X, Y, test_size = 0.4, random_state = 1)
variables = ['crim', 'zn', 'indus', 'chas', 'nox', 'rm', 'age', 'dis', 'rad', 'tax', 'ptradio', 'lstat']
predictor_a = ['crim', 'chas', 'rm']
lm = LinearRegression()
lm.fit(train_X[predictor_a], train_y)
print('intercept:', lm.intercept_)
print(predictor_a, "\n", lm.coef_)
#forward
def train_model1(variables):
if len(variables) == 0:
return None
model = LinearRegression()
model.fit(train_X[variables], train_y)
return model
def score_model1(model, variables):
if len(variables) == 0:
return AIC_score(train_y, [train_y.mean()] * len(train_y), model, df=1)
return AIC_score(train_y, model.predict(train_X[variables]), model)
#backward
def train_model2(variables):
model = LinearRegression()
model.fit(train_X[variables], train_y)
return model
def score_model2(model, variables):
return AIC_score(train_y, model.predict(train_X[variables]), model)
#ii
# df.corr().to_csv("../dataset/corr.csv") correlation matrix
print(df.corr())
#iii
print("-------------------------------FORWARD-----------------------------")
best_model1, best_variables1 = forward_selection(train_X.columns, train_model1, score_model1, verbose=True)
print(best_variables1, len(best_variables1))
regressionSummary(test_y, best_model1.predict(test_X[best_variables1]))
predictValue1 = best_model1.predict(test_X[best_variables1])
residual1 = test_y - predictValue1
plt.hist(residual1, bins=25) # hist of residual
plt.show()
print("-----------------------------BACKWARD-------------------------------")
best_model2, best_variables2 = backward_elimination(train_X.columns, train_model2, score_model2, verbose=True)
print(best_variables2, len(best_variables2))
regressionSummary(test_y, best_model2.predict(test_X[best_variables2]))
predictValue2 = best_model2.predict(test_X[best_variables2])
residual2 = test_y - predictValue2
plt.hist(residual2, bins=25) # hist of residual
plt.show()
print("------------------------------STEPWISE------------------------------")
best_model3, best_variables3 = stepwise_selection(train_X.columns, train_model1, score_model1, verbose=True)
print(best_variables3, len(best_variables3))
regressionSummary(test_y, best_model3.predict(test_X[best_variables3]))
predictValue3 = best_model3.predict(test_X[best_variables3])
residual3 = test_y - predictValue3
plt.hist(residual3, bins=25) # hist of residual
plt.show()
#
#
|
17,577 | 1d3e2fa051a40ef99fabe28fd9a937dde7ed3519 | # Colour
COLOR_RED = (255, 0, 0)
COLOR_GREEN = (0, 255, 0)
COLOR_BLUE = (0, 0, 255)
COLOR_GRAY = (100, 100, 100)
COLOR_LIGHT_GRAY = (200, 200, 200)
COLOR_DARK_GRAY = (50, 50, 50)
COLOR_BLACK = (0, 0, 0)
COLOR_WHITE = (255, 255, 255)
COLOR_YELLOW = (255, 255, 0)
COLOR_PINK = (255, 0, 255)
COLOR_PURPLE = (110, 0, 255)
color_infected = COLOR_RED
color_recovered = COLOR_GREEN
color_normal = COLOR_WHITE
color_protected = COLOR_PURPLE
color_ded = COLOR_DARK_GRAY
|
17,578 | ccb4135556c30538d7ba6d81c73a738833f3985e | #Write a Python program to find the repeated items of a tuple.
def repeat(tuple,num):
count = 0
for m in tuple:
if num == m:
count += 1
return count
tuple = tuple(map(int, input("Input the integer seprated by comma : ").split(",")))
num = int(input("Input the element : "))
print("Repeated items of a tuple :",repeat(tuple,num))
|
17,579 | 33f47d959cdefd1edb187c47a2b85762cd37d795 |
from inputs.classes import CsvInput
from components.digraphs import TemporalDiGraph
from plots.plotter import Plotter
from plots.circle import Circle
from plots.slice import Slice
from algorithms.foremost import calculate_foremost_tree
tube = TemporalDiGraph('TubeNetwork', data=CsvInput('./tube.csv'))
tube.details()
plotter = Plotter()
# plotter.single(Circle, tube.get_temporal_subgraph((840, 860)), ordered=True, save=True)
# plotter.single(Circle, calculate_foremost_tree(tube.get_temporal_subgraph((840, 860)), 'Blackhorse Road'), ordered=True, save=True)
# plotter.single(Slice, calculate_foremost_tree(tube.get_temporal_subgraph((840, 860)), 'Blackhorse Road'), slider=True, save=True)
plotter.single(Circle, calculate_foremost_tree(tube.get_temporal_subgraph((850, 920)), 'Warren Street'), ordered=True, save=True)
# plotter.single(Circle, tube, save=True)
# plotter.single(Slice, tube, save=False)
input("Press enter key to exit...")
|
17,580 | 79a36ebc8698628bab61ff7fdf1b30c1242d235b | from ase.build import bulk
from ase.visualize import view
def main():
atoms = bulk("Mg","fcc",a=4.05,cubic=True)
view(atoms)
if __name__ == "__main__":
main()
|
17,581 | 32d1650653dfa435b0e2200672a6fe07b5d468c4 | #Tennesse Tax calculator
#Request amount of items from user
#set up while loop to accomodate this amount
#Use variable named total in while loop to keep a running total of items
#use variable named subtotal to keep running subtotal
def is_number(s):
try:
float(s)
return True
except ValueError:
return False
def convert_to_currency(s) :
currency = (int(s * 100)) / 100.00
return currency
TN_TAX = 1.09
counter = 1
total = 0
subtotal = 0
num_items= raw_input('How many items do you plan to check out?')
while not is_number(num_items) :
num_items= raw_input('How many items do you plan to check out?')
if is_number(num_items) :
num_items = float(num_items)
while counter <= num_items :
item_price = raw_input('What is the price of this item?')
while not is_number(item_price):
item_price = raw_input('ERROR INPUT \n What is the price of this item?')
if is_number(item_price) :
item_price = float(item_price)
counter += 1
subtotal = convert_to_currency(subtotal + item_price)
total = convert_to_currency(total + (item_price * TN_TAX))
print 'Your current subtotal is \n :: ' + '$' + str(subtotal)
print 'Your current total is \n :: ' + '$' + str(total)
if counter < num_items :
print ' \n Next item please.'
print 'Thanks you for using my tax calculator'
#FUTURE WISHLIST
#Allow user to select a state to reflect tax rates
#Different tax rates for different classificatiosn of items
|
17,582 | f5885a5054001bcfb409de834d3f908e84b65133 | from copy import deepcopy
grid = []
ON = '#'
OFF = '.'
NUM_ITERATIONS = 100
STUCK_LIGHTS = True
def get_active_neighbour_count(i, j):
count = 0
for k in range(i - 1, i + 2):
if k < 0 or k >= len(grid):
continue
for l in range(j - 1, j + 2):
if l < 0 or l >= len(grid[k]) or (k == i and l == j):
continue
if grid[k][l] == ON:
count += 1
return count
with open('day18.in') as f:
for line in f:
line = line.rstrip('\n')
grid.append(list(line))
def stick_lights():
grid[0][0] = ON
grid[0][-1] = ON
grid[-1][0] = ON
grid[-1][-1] = ON
for _ in range(NUM_ITERATIONS):
new_grid = deepcopy(grid)
for i in range(len(grid)):
for j in range(len(grid[i])):
if grid[i][j] == ON and get_active_neighbour_count(i, j) not in (2, 3):
new_grid[i][j] = OFF
elif grid[i][j] == OFF and get_active_neighbour_count(i, j) == 3:
new_grid[i][j] = ON
grid = new_grid
if STUCK_LIGHTS:
stick_lights()
print(sum(map(lambda a: a.count(ON), (x for x in grid))))
|
17,583 | 118cd5fb77bd0fcb214f858976ce23fb884b9b0e | #!/usr/bin/env python
# -*- coding: utf-8 -*-
import cPickle
import re
# 形態素解析した結果から、記号や数字のみの単語を削除する
after_mecab_file = "../../ResearchData/Experiment4/after_mecab/after_mecab"
f_docs_without_symbols = "../../ResearchData/Experiment4/after_mecab/docs_without_symbols"
pattern = re.compile(u"[.]*[a-zA-Zぁ-んァ-ン一-龥]+[.]*")
patternForTag = re.compile(u"[ぁ-んァ-ンーa-zA-Z0-9一-龠0-9\-\r]*[ぁ-んァ-ン一-龥]+[ぁ-んァ-ンーa-zA-Z0-9一-龠0-9\-\r]*")
video_count = 0
add_file_name_list = ["1.pkl", "2.pkl"]
for name in add_file_name_list:
with open(after_mecab_file + name, 'rb') as f:
print "open: " + after_mecab_file + name
docs = cPickle.load(f)
print "convert str to unicode (decode)"
docs_decoded = [ [ w.decode('utf-8') for w in doc] for doc in docs]
print "len(docs_decoded):" + str(len(docs_decoded))
docs = None
docs_without_symbols = []
append = docs_without_symbols.append
for doc in docs_decoded:
doc_without_symbols = []
for w in doc:
if not w.find('___'):
# タグであったとき
if len(w) > 1 and (len(w) > 2 or patternForTag.match(w)):
doc_without_symbols.append(w)
else:
# タグでなかったとき
if pattern.match(w):
doc_without_symbols.append(w)
# doc_without_symbols = [w for w in doc if (w.find('___') == 0 and )]
# doc_without_symbols.extend([w for w in doc if pattern.match(w)])
append(doc_without_symbols)
video_count += 1
if (video_count + 1) % 10000 == 0 :
print "end videos: " + str(video_count + 1)
docs_decoded = None
print 'convert unicode to str (encode)'
docs_encoded = [[w.encode('utf-8') for w in doc] for doc in docs_without_symbols]
docs_without_symbols = None
print "len(docs_encoded):" + str(len(docs_encoded))
with open(f_docs_without_symbols + name, 'wb') as f_save:
print "open for saving: " + f_docs_without_symbols + name
cPickle.dump(docs_encoded, f_save, cPickle.HIGHEST_PROTOCOL)
print "end save"
print "end videos: " + str(video_count) |
17,584 | 952ad6eb13857535a14e43571baaf788af02742c | #!/usr/bin/env python
#temp= `i2cget -y 2 0x48`
temp1=`i2cget -y 2 0x48`
temp2=`i2cget -y 2 0x4a`
echo -n "Temp Sensor 1: "
echo $((temp1*18/10+32))
echo $(($temp1))
echo -n "Temp Sensor 2: "
echo $((temp2*18/10+32))
echo $(($temp2))
i2cset -y -r 2 0x48 0x02 22
i2cset -y -r 2 0x4a 0x02 22
i2cset -y -r 2 0x4a 0x03 27
i2cset -y -r 2 0x4a 0x03 27 |
17,585 | ed31d6516d6887c1c79b82fa1e740ad4a8747310 | # Generated by Django 3.1 on 2020-08-27 09:52
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('blog', '0008_post_category'),
]
operations = [
migrations.AddField(
model_name='post',
name='suggest',
field=models.URLField(default='None'),
),
]
|
17,586 | b9a1af55655aebdba6a4b388c26c73f61184d425 | from weapon import *
class Robot:
def __init__(self, name):
self.name = name
self.health = 10
self.weapon = Weapon ("Laser", 5)
# initial attack power
def assign_name(self):
self.name = ()
def attack(self, dinosaur):
dinosaur.health = dinosaur.health - self.weapon.attack_power
def health_status (self):
return self.health |
17,587 | 86b2deb8681060346fd463b73ca44234b19a6ee5 | #! /usr/bin/env python3
# -*- coding:Utf8 -*-
"""CRÉATION D'UN MOUVEMENT DIAGONAL GRÂCE AUX VECTEURS"""
"APPRESS BEGINNING GAME DEVELOPMENT - CHAPTER 5"
#
# Importation fonction et modules : ####
#
import sys
sys.path.append('/home/pampi/Documents/Git/Cours-Python/Livre/BeginningGameDevelopmentWithPygame/Stock')
import pygame
from pygame.locals import *
from sys import exit
from vector2d import Vector2D
#
# Gestion d'évènements : définition de différentes Fonctions/Classes : ####
#
#
# Programme principal : ####
#
pygame.init()
background_image_filename = 'Stock/sushiplate.jpg'
sprite_image_filename = 'Stock/fugu.png'
screen = pygame.display.set_mode((640, 480), 0, 32)
background = pygame.image.load(background_image_filename).convert()
sprite = pygame.image.load(sprite_image_filename).convert_alpha()
clock = pygame.time.Clock()
position = Vector2D(100.0, 100.0)
speed = 250.
heading = Vector2D()
while True:
for event in pygame.event.get():
if event.type == QUIT:
exit()
if event.type == MOUSEBUTTONDOWN:
#
#
# <*> juste avant un paramètre de fonction permet d'étendre le dit
# paramètre (*event.pos = event.pos[0], event.pos[1])
# destination = Vector2D(*event.pos) - Vector2D(*sprite.get_size()) / 2.A
#
#
destination = Vector2D(event.pos) - Vector2D(sprite.get_size()) / 2.
heading = Vector2D.fromPoints(position, destination)
heading.normalize()
screen.blit(background, (0, 0))
screen.blit(sprite, position)
timePassed = clock.tick()
timepassedSeconds = timePassed / 1000.0
distanceMoved = timepassedSeconds * speed
position += heading * distanceMoved
pygame.display.update()
|
17,588 | fd3e17910ab9561a35838af4928bf6fa608196c2 | #! /usr/bin/env python
import os
import mock
import unittest
import requests
import requests_mock
from datetime import date
from stocks.db import dal, Exchange, Stock, HistoricalQuote, CompleteHistoricalData
from stocks.load_data import parseExchangeData, parseStockData
from stocks.quotes import (fetch_historical_data, insert_historical_data,
get_latest_year, get_current_stock, get_next_stock)
class QuotesTest(unittest.TestCase):
@classmethod
def setUpClass(cls):
fixtures = os.path.join(os.path.abspath(os.path.dirname(__file__)), 'fixtures')
dal.connect('sqlite:///:memory:')
dal.session = dal.Session()
# Load exchange and stock data
parseExchangeData(os.path.join(fixtures, 'exchanges.csv'))
parseStockData(os.path.join(fixtures, 'amex.csv'), 'AMEX')
parseStockData(os.path.join(fixtures, 'nasdaq.csv'), 'NASDAQ')
parseStockData(os.path.join(fixtures, 'nyse.csv'), 'NYSE')
def setUp(self):
self.fixtures = os.path.join(os.path.abspath(os.path.dirname(__file__)), 'fixtures')
def tearDown(self):
self.fixtures = None
@requests_mock.Mocker()
def test_insert_historical_data(self, m):
stock_symbol = 'AAPL'
response_data = open(os.path.join(self.fixtures, 'aapl_2010-01-01_2010-01-31.json')).read()
m.register_uri(requests_mock.ANY, requests_mock.ANY, text=response_data)
quotes = fetch_historical_data(stock_symbol, date(2010, 1, 1), date(2010, 1, 31))
insert_historical_data(quotes)
session = dal.session
stock = session.query(Stock).filter(Stock.symbol == stock_symbol).one()
count = session.query(HistoricalQuote).filter(HistoricalQuote.stock == stock).count()
self.assertEqual(len(quotes), count, "Saved all records downloaded.")
def test_get_latest_year(self):
session = dal.Session()
stock = session.query(Stock).filter(Stock.symbol == 'AAPL').one()
self.assertEqual(date.today().year + 1, get_latest_year(stock))
def test_get_current_stock(self):
session = dal.Session()
first_stock = session.query(Stock).order_by(Stock.id.asc()).first()
queried_stock = get_current_stock()
self.assertEqual(first_stock.symbol, queried_stock.symbol)
self.assertEqual(first_stock.exchange.symbol, queried_stock.exchange.symbol)
def test_get_next_stock(self):
session = dal.Session()
first_stock = session.query(Stock).order_by(Stock.id.asc()).first()
queried_stock = get_next_stock()
self.assertEqual(first_stock.symbol, queried_stock.symbol)
self.assertEqual(first_stock.exchange.symbol, queried_stock.exchange.symbol)
second_stock = session.query(Stock).filter(Stock.id > first_stock.id).order_by(Stock.id.asc()).first()
queried_stock = get_next_stock()
self.assertEqual(second_stock.id, queried_stock.id)
# Now we test the case where there's a historically complete stock for the third stock. We expect that the
# id returned from get_next_stock to be the fourth stock's id.
third_stock = session.query(Stock).filter(Stock.id > second_stock.id).order_by(Stock.id.asc()).first()
fourth_stock = session.query(Stock).filter(Stock.id > third_stock.id).order_by(Stock.id.asc()).first()
c = CompleteHistoricalData(stock_id = third_stock.id)
session.add(c)
session.commit()
queried_stock = get_next_stock()
self.assertEqual(fourth_stock.id, queried_stock.id)
|
17,589 | b22e955863161b1b9448b3bb29396f1eddd491fe | def read_data():
return {"Water": 240, "Butter": 230}
def table(data):
body = ["{} & {:.0f} & {:.1f} \\\\".format(name, density, density / 16)
for name, density in data.items()]
# using % instead of new-style formatting because otherwise I would have to escape the {}
template = r"""\begin{table}[ht]
\centering
\label{tab:density}
\caption{Densities of common ingredients}
\begin{tabular}{c|c|c}
Ingredient & \fr{g}{cup} & \fr{g}{tablespoon} \\ \hline
%s
\end{tabular}
\end{table}"""
return template % '\n'.join(body)
def macro(data):
template = r"""\DeclareDocumentCommand{\%s}{ m m g }{%
\IfNoValueTF{#3}{#2}{\fr{{#2}}{{#3}}} % if two arguments, interpret as #2/#3
cup\IfNoValueTF{#3}{\ifthenelse{\equal{#2}{1}}{}{s}}{s} % if only one argument and that argument is exactly "1" then omit -s, else include it
\FPupn\grams{\IfNoValueTF{#3}{#2}{#3 #2 /} 150 * 0 round}%
(\FPprint{\grams} grams) %
#1 flour}"""
|
17,590 | 71f68edd138bfbd7d5b143d01d4a1255d787c865 | # -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.shortcuts import render
from rest_framework import generics
from .serializers import BucketlistSerializer
from .models import Bucketlist
from rest_framework import permissions
from .permissions import IsOwner
class CreateView(generics.ListCreateAPIView):
queryset = Bucketlist.objects.all()
serializer_class = BucketlistSerializer
permission_classes = (permissions.IsAuthenticated, IsOwner)
def perform_create(self, serializer):
serializer.save(owner=self.request.user)
class DetailsView(generics.RetrieveUpdateDestroyAPIView):
"""This class handles the http GET, PUT and DELETE requests."""
queryset = Bucketlist.objects.all()
serializer_class = BucketlistSerializer
permission_classes = (permissions.IsAuthenticated, IsOwner)
|
17,591 | 3cfefa87566ab9c155b2ded9ca51c39e7b260242 |
def available_platforms(verbose=True):
"""Available platforms to run OpenMM integratos
List of available platforms to run an OpenMM MD integrators.
Parameters
----------
verbose : bool, default=True.
If True, the method prints out a message with a line corresponding to each available
platorm. If False, the method returns the list of platform names.
Examples
--------
>>> from uibcdf_tools import available_platforms
>>> available_platorms()
Platform Reference with speed 1.0
Platform CPU with speed 10.0
Platform CUDA with speed 100.0
Platform OpenCL with speed 50.0
Notes
-----
This methods invokes the simtk.openmm methods to work with the class `Platform`. You can check
the `section Platforms in the OpenMM User Guide web page
<http://docs.openmm.org/7.1.0/userguide/application.html#platforms>`_ and the `OpenMM Python API
documentation <http://docs.openmm.org/latest/api-python/generated/simtk.openmm.openmm.Platform.html#>`_.
"""
import simtk.openmm as mm
platforms_available = []
for ii in range(mm.Platform.getNumPlatforms()):
platform_name = mm.Platform.getPlatform(ii).getName()
platform = mm.Platform.getPlatformByName(platform_name)
platform_speed = platform.getSpeed()
platforms_available.append(platform_name)
if verbose:
print('Platform {} with speed {}'.format(platform_name,platform_speed))
del(platform_name, platform, platform_speed)
if verbose is False:
return platforms_available
def loading_failures():
"""Loading failures of platorms to run OpenMM integrators
List of failures at the time of importing OpenMM regarding the platforms to run the
integrators..
Parameters
----------
Examples
--------
>>> from uibcdf_tools import loading_failures
>>> loading_failures()
('Error loading library /home/diego/Myopt/Miniconda/miniconda3/envs/UIBCDF_lab_dev/lib/plugins/libOpenMMCUDA.so: libcufft.so.9.2: cannot open shared object file: No such file or directory', 'Error loading library /home/diego/Myopt/Miniconda/miniconda3/envs/UIBCDF_lab_dev/lib/plugins/libOpenMMRPMDCUDA.so: libOpenMMCUDA.so: cannot open shared object file: No such file or directory', 'Error loading library /home/diego/Myopt/Miniconda/miniconda3/envs/UIBCDF_lab_dev/lib/plugins/libOpenMMDrudeCUDA.so: libOpenMMCUDA.so: cannot open shared object file: No such file or directory', 'Error loading library /home/diego/Myopt/Miniconda/miniconda3/envs/UIBCDF_lab_dev/lib/plugins/libOpenMMAmoebaCUDA.so: libOpenMMCUDA.so: cannot open shared object file: No such file or directory', 'Error loading library /home/diego/Myopt/Miniconda/miniconda3/envs/UIBCDF_lab_dev/lib/plugins/libOpenMMCudaCompiler.so: libnvrtc.so.9.2: cannot open shared object file: No such file or directory')
Notes
-----
This methods invokes the simtk.openmm methods to work with the class `Platform`. You can check
the `section Platforms in the OpenMM User Guide web page
<http://docs.openmm.org/7.1.0/userguide/application.html#platforms>`_ and the `OpenMM Python API
documentation <http://docs.openmm.org/latest/api-python/generated/simtk.openmm.openmm.Platform.html#>`_.
"""
import simtk.openmm as mm
print(mm.Platform.getPluginLoadFailures())
|
17,592 | 973359d3002a2e7c8865d8631ac92be9d27fc7fe | #!/usr/bin/env python
import asyncio
import logging
from aiowsio.client import WSIOClient
logger = logging.getLogger("websockets")
logger.setLevel(logging.DEBUG)
logger.addHandler(logging.StreamHandler())
client = WSIOClient("ws://127.0.0.5:8001")
@client.on("chat message")
async def on_chat_message(data):
print("message:", data)
@client.on("connect")
async def on_connect(data):
await client.emit("chat message", "Hi, all!")
numbers = [1, 5, 2]
sum = await client.emit("sum", numbers)
print("the sum of", numbers, "is", sum)
try:
asyncio.get_event_loop().run_until_complete(client)
except KeyboardInterrupt:
pass
finally:
try:
asyncio.get_event_loop().run_until_complete(client.close())
asyncio.get_event_loop().close()
except KeyboardInterrupt:
pass
|
17,593 | efd15633b687eb323a342306af63b1f52cc8025f | print "How old are you?",
age = raw_input()
print "One third of your age is %.2f" %(int(age)/3.0) |
17,594 | 44261c0c1261d01bfdcc71fa0aaa380490bb6776 | from .pages.main_page import MainPage
from .pages.login_page import LoginPage
def test_guest_can_go_to_login_page(browser):
link = "http://selenium1py.pythonanywhere.com/"
page = MainPage(browser, link)
page.open()
page.go_to_login_page()
# login_page = page.go_to_login_page() вместо строки выше и строки ниже
login_page = LoginPage(browser, browser.current_url)
login_page.should_be_login_page()
def test_guest_should_see_login_link(browser):
link = "http://selenium1py.pythonanywhere.com/"
page = MainPage(browser, link)
page.open()
page.should_be_login_link()
|
17,595 | 55f544706efe6c914bf522e98438141fd150f396 | lst2 = []
txt = (input("Enter a comma seperated sequence of numbers"))
lst = txt.split(",")
print (lst)
for i in lst:
lst2.append(int(i))
print (f"The sum of the numbers are {sum(lst2)}")
|
17,596 | a637e3ed6eac81b8b5060e3a125db8bfba1b4509 | from distutils.core import setup, Extension
import os.path
import interpreters
PROJECT_ROOT = os.path.abspath(os.path.dirname(__file__))
#################################################
# Define the package metadata.
NAME = 'interpreters'
VERSION = interpreters.__version__
AUTHOR = 'Eric Snow'
EMAIL = 'ericsnowcurrently@gmail.com'
URL = 'https://github.com/ericsnowcurrently/interpreters'
LICENSE = 'New BSD License'
SUMMARY = "Python-level access to CPython's C-level subinterpreters API."
# DESCRIPTION is dynamically built below.
KEYWORDS = ''
PLATFORMS = []
CLASSIFIERS = [
#'Development Status :: 1 - Planning',
#'Development Status :: 2 - Pre-Alpha',
'Development Status :: 3 - Alpha',
#'Development Status :: 4 - Beta',
#'Development Status :: 5 - Production/Stable',
#'Development Status :: 6 - Mature',
#'Development Status :: 7 - Inactive',
'Intended Audience :: Developers',
#'License :: OSI Approved :: BSD License',
#'Operating System :: OS Independent',
#'Programming Language :: Python :: 2',
#'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3',
#'Programming Language :: Python :: 3.2',
#'Programming Language :: Python :: 3.3',
#'Programming Language :: Python :: 3.4',
#'Programming Language :: Python :: 3.5',
'Programming Language :: Python :: 3.7',
'Programming Language :: Python :: 3.8',
'Topic :: Software Development',
'Topic :: Software Development :: Libraries',
]
with open(os.path.join(PROJECT_ROOT, 'README.rst')) as readme_file:
DESCRIPTION = readme_file.read()
#################################################
# Set up files.
PACKAGES = []
PACKAGE_DATA = {}
MODULES = ['interpreters.py']
EXTENSIONS = [
Extension('_interpreters',
['src/_interpretersmodule.c'],
include_dirs=['include'],
#define_macros=[('NDEBUG', '1')],
#undef_macros=['HAVE_FOO'],
),
],
#################################################
# Set up the rest of the package info.
REQUIRES = []
#################################################
# Pull it all together.
kwargs = {'name': NAME,
'version': VERSION,
'author': AUTHOR,
'author_email': EMAIL,
#'maintainer': MAINTAINER,
#'maintainer_email': MAINTAINER_EMAIL,
'url': URL,
#'download_url': DOWNLOAD,
'license': LICENSE,
'description': SUMMARY,
'long_description': DESCRIPTION,
'keywords': KEYWORDS,
'platforms': PLATFORMS,
'classifiers': CLASSIFIERS,
'requires': REQUIRES,
'packages': PACKAGES,
'package_data': PACKAGE_DATA,
'py_modules': MODULES,
'ext_modules': EXTENSIONS,
}
for key in list(kwargs):
if not kwargs[key]:
del kwargs[key]
if __name__ == '__main__':
if {'src', 'include'} - set(os.listdir(PROJECT_ROOT)):
raise Exception('missing extension module files; '
'please run python3 -m update')
setup(**kwargs)
|
17,597 | d2aee21a550a1d343ce2a44f73a037d90636dddc | import sys, webbrowser
import requests
from bs4 import BeautifulSoup
def 구글검색(키워드, 탭수=5):
URL양식 = 'http://google.com/search?q={0}'
응답 = requests.get(URL양식.format(키워드))
if not 응답.status_code == 200:
print('HTTP 응답 코드: {}'.format(응답.status_code))
else:
스프 = BeautifulSoup(응답.text, 'lxml')
결과링크 = 스프.select('.r a')
for 링크 in 결과링크[:탭수]:
webbrowser.open('http://google.com' + 링크.get('href'))
if __name__ == '__main__':
사용법 = """사용법
$ python {0} 검색문구
"""
if len(sys.argv) < 2:
sys.exit(사용법.format(sys.argv[0]))
검색문구 = sys.argv[1]
구글검색(검색문구)
|
17,598 | 4ffb656e8a6294863247483683aa70303d057e70 | from django.urls import path, include
from rest_framework.routers import DefaultRouter
from rest_framework.schemas import get_schema_view
from .views import PostViewSet, UserViewSet, api_root
from . import views
router = DefaultRouter()
router.register('posts', views.PostViewSet)
router.register('users', views.UserViewSet)
schema_view = get_schema_view(title='Pastebin API')
app_name='blog'
urlpatterns = [
path('', views.IndexView.as_view(), name='index'),
path('Categories', views.CategoriesView.as_view(), name='categories'),
path('Categories/<int:pk>/', views.CategoryView.as_view(), name='category'),
path('Posts/<int:pk>/', views.PostView.as_view(), name='detail'),
path('Search/', views.SearchView.as_view(), name='search'),
path('accounts/', include('django.contrib.auth.urls')),
path('account', views.ProfileView.as_view(), name='profile_view'),
path('new_post', views.CreatePostView.as_view(), name='new_post'),
path('Posts/<int:pk>/edit', views.EditPostView.as_view(), name='edit'),
path('Posts/<int:pk>/delete', views.DeletePostView.as_view(), name='delete'),
path('api-auth', include('rest_framework.urls')),
path('api', include(router.urls)),
path('schema/', schema_view),
path('api-root', views.api_root, name='apiroot'),
]
|
17,599 | 1b36c48383f3c7fe085af4a78f913af1c744ec6e | from keras.layers import Input, Flatten, Bidirectional, Dropout, concatenate, GRU, Activation, Conv1D, MaxPooling1D, \
BatchNormalization
from keras.layers import LSTM, Dense
from keras.models import Model
import numpy as np
def get_model(config, data):
max_sequence_length = data.max_full_sequence_length
input_dims = data.input_dims
num_classes = data.num_classes
input_dropout = config['input_dropout']
lstm_units = config['lstm_units']
dense_units = config['dense_units']
dense_units2 = config['dense_units2']
cnn_kernel_size = int(config['cnn_kernel_size']) #def 5
pool_size = int(config['pool_size']) #def 2
filters = int(config['cnn_filters']) #def 256
sequence_input = Input(shape=(max_sequence_length, input_dims[0]))
x = sequence_input
# x = BatchNormalization()(x)
x = Dropout(input_dropout, input_shape=(max_sequence_length,))(x)
x = Bidirectional(GRU(lstm_units, return_sequences=True, stateful=False))(x)
x = Conv1D(filters=filters, kernel_size=cnn_kernel_size, padding='valid', activation='relu')(x)
x = MaxPooling1D(pool_size=pool_size)(x)
x = Flatten()(x)
input2 = Input(shape=(input_dims[1] * 2 * 3,))
x2 = input2
# x2 = BatchNormalization()(x2)
if dense_units2:
x2 = Dropout(0.1)(x2)
x2 = Dense(dense_units2)(x2)
x2 = Activation('relu')(x2)
x = Dropout(0.2)(x)
if dense_units:
x = Dense(dense_units)(x)
x = Activation('relu')(x)
x_arr = [x, x2]
if len(x_arr) > 1:
x = concatenate(x_arr)
else:
x = x_arr[0]
activation = 'softmax'
output_units = num_classes
if config['binary']:
activation = 'sigmoid'
output_units = 1
preds = Dense(output_units, activation=activation)(x)
return Model([sequence_input, input2], preds)
def get_x(x):
return x
def get_x_train(training_data):
return [training_data['x_train'], np.array(training_data['additional_layers']['x_train'][0])]
def get_x_val(training_data):
return [training_data['x_val'], np.array(training_data['additional_layers']['x_val'][0])]
def get_x_val_test(test_data):
return [test_data['x_val_test'], np.array(test_data['additional_layers']['x_val_test'][0])]
def get_cv_x_test(fold_data):
return [fold_data['x_test'], np.array(fold_data['additional_layers']['x_test'][0])]
def get_second_input_vector_sequence(seq, data):
return data.get_candidate_vector(seq)
def get_additional_layers_conf():
return [
get_second_input_vector_sequence
] |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.