code stringlengths 22 1.05M | apis listlengths 1 3.31k | extract_api stringlengths 75 3.25M |
|---|---|---|
# -*- coding: utf-8 -*-
# Form implementation generated from reading ui file 'pesquisa_produtos.ui'
#
# Created by: PyQt5 View code generator 5.14.1
#
# WARNING! All changes made in this file will be lost!
from PyQt5 import QtCore, QtGui, QtWidgets
class Ui_Frame(object):
def setupUi(self, Frame):
Frame.setObjectName("Frame")
Frame.resize(1048, 361)
Frame.setAutoFillBackground(False)
Frame.setStyleSheet("background: #FFF;")
self.fr_titulo_servicos = QtWidgets.QFrame(Frame)
self.fr_titulo_servicos.setGeometry(QtCore.QRect(0, 0, 1051, 60))
self.fr_titulo_servicos.setStyleSheet("")
self.fr_titulo_servicos.setObjectName("fr_titulo_servicos")
self.lb_tituloClientes_2 = QtWidgets.QLabel(self.fr_titulo_servicos)
self.lb_tituloClientes_2.setGeometry(QtCore.QRect(10, 15, 200, 30))
font = QtGui.QFont()
font.setFamily("DejaVu Sans")
font.setPointSize(18)
font.setBold(True)
font.setWeight(75)
self.lb_tituloClientes_2.setFont(font)
self.lb_tituloClientes_2.setStyleSheet("color: rgb(0, 0, 0)")
self.lb_tituloClientes_2.setObjectName("lb_tituloClientes_2")
self.bt_inserir = QtWidgets.QPushButton(self.fr_titulo_servicos)
self.bt_inserir.setGeometry(QtCore.QRect(910, 9, 131, 41))
font = QtGui.QFont()
font.setFamily("Tahoma")
font.setPointSize(10)
font.setBold(True)
font.setWeight(75)
self.bt_inserir.setFont(font)
self.bt_inserir.setCursor(QtGui.QCursor(QtCore.Qt.PointingHandCursor))
self.bt_inserir.setFocusPolicy(QtCore.Qt.NoFocus)
self.bt_inserir.setContextMenuPolicy(QtCore.Qt.ActionsContextMenu)
self.bt_inserir.setStyleSheet("QPushButton {\n"
" background-color: rgb(78, 154, 6);\n"
"color: #FFF\n"
" }\n"
"QPushButton:hover{\n"
" background-color: #40a286\n"
"}")
self.bt_inserir.setIconSize(QtCore.QSize(75, 35))
self.bt_inserir.setObjectName("bt_inserir")
self.tb_produtos = QtWidgets.QTableWidget(Frame)
self.tb_produtos.setGeometry(QtCore.QRect(0, 100, 1041, 211))
self.tb_produtos.viewport().setProperty("cursor", QtGui.QCursor(QtCore.Qt.PointingHandCursor))
self.tb_produtos.setFocusPolicy(QtCore.Qt.WheelFocus)
self.tb_produtos.setStyleSheet("QTableView{\n"
"color: #797979;\n"
"font-weight: bold;\n"
"font-size: 13px;\n"
"background: #FFF;\n"
"padding: 0 0 0 5px;\n"
"}\n"
"QHeaderView:section{\n"
"background: #FFF;\n"
"padding: 5px 0 ;\n"
"font-size: 12px;\n"
"font-family: \"Arial\";\n"
"font-weight: bold;\n"
"color: #797979;\n"
"border: none;\n"
"border-bottom: 2px solid #CCC;\n"
"text-transform: uppercase\n"
"}\n"
"QTableView::item {\n"
"border-bottom: 2px solid #CCC;\n"
"padding: 2px;\n"
"}\n"
"\n"
"")
self.tb_produtos.setFrameShape(QtWidgets.QFrame.NoFrame)
self.tb_produtos.setFrameShadow(QtWidgets.QFrame.Plain)
self.tb_produtos.setAutoScrollMargin(20)
self.tb_produtos.setEditTriggers(QtWidgets.QAbstractItemView.NoEditTriggers)
self.tb_produtos.setSelectionMode(QtWidgets.QAbstractItemView.NoSelection)
self.tb_produtos.setSelectionBehavior(QtWidgets.QAbstractItemView.SelectRows)
self.tb_produtos.setShowGrid(False)
self.tb_produtos.setGridStyle(QtCore.Qt.NoPen)
self.tb_produtos.setWordWrap(False)
self.tb_produtos.setRowCount(1)
self.tb_produtos.setObjectName("tb_produtos")
self.tb_produtos.setColumnCount(8)
item = QtWidgets.QTableWidgetItem()
self.tb_produtos.setVerticalHeaderItem(0, item)
item = QtWidgets.QTableWidgetItem()
self.tb_produtos.setHorizontalHeaderItem(0, item)
item = QtWidgets.QTableWidgetItem()
self.tb_produtos.setHorizontalHeaderItem(1, item)
item = QtWidgets.QTableWidgetItem()
self.tb_produtos.setHorizontalHeaderItem(2, item)
item = QtWidgets.QTableWidgetItem()
self.tb_produtos.setHorizontalHeaderItem(3, item)
item = QtWidgets.QTableWidgetItem()
self.tb_produtos.setHorizontalHeaderItem(4, item)
item = QtWidgets.QTableWidgetItem()
self.tb_produtos.setHorizontalHeaderItem(5, item)
item = QtWidgets.QTableWidgetItem()
self.tb_produtos.setHorizontalHeaderItem(6, item)
item = QtWidgets.QTableWidgetItem()
self.tb_produtos.setHorizontalHeaderItem(7, item)
self.tb_produtos.horizontalHeader().setDefaultSectionSize(120)
self.tb_produtos.horizontalHeader().setHighlightSections(False)
self.tb_produtos.horizontalHeader().setStretchLastSection(True)
self.tb_produtos.verticalHeader().setVisible(False)
self.tb_produtos.verticalHeader().setDefaultSectionSize(50)
self.tb_produtos.verticalHeader().setMinimumSectionSize(20)
self.fr_botoes = QtWidgets.QFrame(Frame)
self.fr_botoes.setGeometry(QtCore.QRect(0, 330, 1051, 30))
self.fr_botoes.setStyleSheet("background:#E1DFE0;\n"
"border: none;")
self.fr_botoes.setObjectName("fr_botoes")
self.bt_selecionar = QtWidgets.QPushButton(self.fr_botoes)
self.bt_selecionar.setGeometry(QtCore.QRect(930, 0, 120, 30))
font = QtGui.QFont()
font.setPointSize(10)
font.setBold(True)
font.setWeight(75)
self.bt_selecionar.setFont(font)
self.bt_selecionar.setCursor(QtGui.QCursor(QtCore.Qt.PointingHandCursor))
self.bt_selecionar.setFocusPolicy(QtCore.Qt.NoFocus)
self.bt_selecionar.setContextMenuPolicy(QtCore.Qt.ActionsContextMenu)
self.bt_selecionar.setStyleSheet("QPushButton {\n"
"background-color: #1E87F0;\n"
"color: #FFF\n"
" }\n"
"QPushButton:hover{\n"
"background-color: #40a286\n"
"}")
self.bt_selecionar.setIconSize(QtCore.QSize(75, 35))
self.bt_selecionar.setObjectName("bt_selecionar")
self.bt_refresh = QtWidgets.QPushButton(Frame)
self.bt_refresh.setGeometry(QtCore.QRect(1010, 60, 30, 31))
font = QtGui.QFont()
font.setFamily("Arial")
self.bt_refresh.setFont(font)
self.bt_refresh.setCursor(QtGui.QCursor(QtCore.Qt.PointingHandCursor))
self.bt_refresh.setFocusPolicy(QtCore.Qt.NoFocus)
self.bt_refresh.setContextMenuPolicy(QtCore.Qt.NoContextMenu)
self.bt_refresh.setText("")
icon = QtGui.QIcon()
icon.addPixmap(QtGui.QPixmap("Imagens/refresh.png"), QtGui.QIcon.Normal, QtGui.QIcon.Off)
self.bt_refresh.setIcon(icon)
self.bt_refresh.setObjectName("bt_refresh")
self.tx_busca = QtWidgets.QLineEdit(Frame)
self.tx_busca.setGeometry(QtCore.QRect(190, 60, 791, 31))
font = QtGui.QFont()
font.setFamily("Arial")
self.tx_busca.setFont(font)
self.tx_busca.setFocusPolicy(QtCore.Qt.ClickFocus)
self.tx_busca.setStyleSheet("QLineEdit {\n"
"color: #000\n"
"}\n"
"")
self.tx_busca.setObjectName("tx_busca")
self.cb_produtos = QtWidgets.QComboBox(Frame)
self.cb_produtos.setGeometry(QtCore.QRect(10, 60, 171, 31))
self.cb_produtos.setFocusPolicy(QtCore.Qt.StrongFocus)
self.cb_produtos.setStyleSheet("QComboBox{\n"
"background: #fff;\n"
"color: #000;\n"
"font: 13px \"Arial\" ;\n"
"text-transform: uppercase\n"
"}\n"
"QComboBox:Focus {\n"
"border: 1px solid red;\n"
"}\n"
" QComboBox::drop-down {\n"
" subcontrol-origin: padding;\n"
" subcontrol-position: top right;\n"
" width: 25px;\n"
" border-left-width: 1px;\n"
" border-left-color: darkgray;\n"
" border-left-style: solid; /* just a single line */\n"
" border-top-right-radius: 3px; /* same radius as the QComboBox */\n"
" border-bottom-right-radius: 3px;\n"
" }\n"
"QComboBox::down-arrow {\n"
" image: url(\"Imagens/down.png\");\n"
" }\n"
"")
self.cb_produtos.setObjectName("cb_produtos")
self.cb_produtos.addItem("")
self.bt_busca = QtWidgets.QPushButton(Frame)
self.bt_busca.setGeometry(QtCore.QRect(980, 60, 30, 31))
font = QtGui.QFont()
font.setFamily("Arial")
self.bt_busca.setFont(font)
self.bt_busca.setCursor(QtGui.QCursor(QtCore.Qt.PointingHandCursor))
self.bt_busca.setFocusPolicy(QtCore.Qt.NoFocus)
self.bt_busca.setContextMenuPolicy(QtCore.Qt.NoContextMenu)
self.bt_busca.setText("")
icon1 = QtGui.QIcon()
icon1.addPixmap(QtGui.QPixmap("Imagens/search.png"), QtGui.QIcon.Normal, QtGui.QIcon.Off)
self.bt_busca.setIcon(icon1)
self.bt_busca.setObjectName("bt_busca")
self.retranslateUi(Frame)
QtCore.QMetaObject.connectSlotsByName(Frame)
def retranslateUi(self, Frame):
_translate = QtCore.QCoreApplication.translate
Frame.setWindowTitle(_translate("Frame", "Lista de Produtos"))
self.lb_tituloClientes_2.setText(_translate("Frame", "PRODUTOS"))
self.bt_inserir.setText(_translate("Frame", "NOVO PRODUTO"))
item = self.tb_produtos.verticalHeaderItem(0)
item.setText(_translate("Frame", "1"))
item = self.tb_produtos.horizontalHeaderItem(0)
item.setText(_translate("Frame", "ID"))
item = self.tb_produtos.horizontalHeaderItem(1)
item.setText(_translate("Frame", "CODIGO DE BARRAS"))
item = self.tb_produtos.horizontalHeaderItem(2)
item.setText(_translate("Frame", "ESTOQUE"))
item = self.tb_produtos.horizontalHeaderItem(3)
item.setText(_translate("Frame", "DESCRIÇÃO"))
item = self.tb_produtos.horizontalHeaderItem(4)
item.setText(_translate("Frame", "MARCA"))
item = self.tb_produtos.horizontalHeaderItem(5)
item.setText(_translate("Frame", "PREÇO"))
item = self.tb_produtos.horizontalHeaderItem(6)
item.setText(_translate("Frame", "FORNECEDOR"))
item = self.tb_produtos.horizontalHeaderItem(7)
item.setText(_translate("Frame", "CATEGORIA"))
self.bt_selecionar.setText(_translate("Frame", "SELECIONAR"))
self.bt_refresh.setToolTip(_translate("Frame", "ATUALIZAR TABELA"))
self.tx_busca.setPlaceholderText(_translate("Frame", "PROCURAR POR..."))
self.cb_produtos.setItemText(0, _translate("Frame", "SELECIONE"))
self.bt_busca.setToolTip(_translate("Frame", "BUSCAR"))
| [
"PyQt5.QtWidgets.QTableWidget",
"PyQt5.QtWidgets.QLineEdit",
"PyQt5.QtGui.QIcon",
"PyQt5.QtGui.QFont",
"PyQt5.QtWidgets.QComboBox",
"PyQt5.QtCore.QMetaObject.connectSlotsByName",
"PyQt5.QtWidgets.QFrame",
"PyQt5.QtGui.QCursor",
"PyQt5.QtCore.QRect",
"PyQt5.QtGui.QPixmap",
"PyQt5.QtWidgets.QLabel... | [((503, 526), 'PyQt5.QtWidgets.QFrame', 'QtWidgets.QFrame', (['Frame'], {}), '(Frame)\n', (519, 526), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((754, 795), 'PyQt5.QtWidgets.QLabel', 'QtWidgets.QLabel', (['self.fr_titulo_servicos'], {}), '(self.fr_titulo_servicos)\n', (770, 795), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((887, 900), 'PyQt5.QtGui.QFont', 'QtGui.QFont', ([], {}), '()\n', (898, 900), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((1236, 1282), 'PyQt5.QtWidgets.QPushButton', 'QtWidgets.QPushButton', (['self.fr_titulo_servicos'], {}), '(self.fr_titulo_servicos)\n', (1257, 1282), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((1365, 1378), 'PyQt5.QtGui.QFont', 'QtGui.QFont', ([], {}), '()\n', (1376, 1378), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((2067, 2096), 'PyQt5.QtWidgets.QTableWidget', 'QtWidgets.QTableWidget', (['Frame'], {}), '(Frame)\n', (2089, 2096), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((3570, 3598), 'PyQt5.QtWidgets.QTableWidgetItem', 'QtWidgets.QTableWidgetItem', ([], {}), '()\n', (3596, 3598), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((3670, 3698), 'PyQt5.QtWidgets.QTableWidgetItem', 'QtWidgets.QTableWidgetItem', ([], {}), '()\n', (3696, 3698), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((3772, 3800), 'PyQt5.QtWidgets.QTableWidgetItem', 'QtWidgets.QTableWidgetItem', ([], {}), '()\n', (3798, 3800), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((3874, 3902), 'PyQt5.QtWidgets.QTableWidgetItem', 'QtWidgets.QTableWidgetItem', ([], {}), '()\n', (3900, 3902), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((3976, 4004), 'PyQt5.QtWidgets.QTableWidgetItem', 'QtWidgets.QTableWidgetItem', ([], {}), '()\n', (4002, 4004), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((4078, 4106), 'PyQt5.QtWidgets.QTableWidgetItem', 'QtWidgets.QTableWidgetItem', ([], {}), '()\n', (4104, 4106), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((4180, 4208), 'PyQt5.QtWidgets.QTableWidgetItem', 'QtWidgets.QTableWidgetItem', ([], {}), '()\n', (4206, 4208), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((4282, 4310), 'PyQt5.QtWidgets.QTableWidgetItem', 'QtWidgets.QTableWidgetItem', ([], {}), '()\n', (4308, 4310), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((4384, 4412), 'PyQt5.QtWidgets.QTableWidgetItem', 'QtWidgets.QTableWidgetItem', ([], {}), '()\n', (4410, 4412), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((4907, 4930), 'PyQt5.QtWidgets.QFrame', 'QtWidgets.QFrame', (['Frame'], {}), '(Frame)\n', (4923, 4930), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((5155, 5192), 'PyQt5.QtWidgets.QPushButton', 'QtWidgets.QPushButton', (['self.fr_botoes'], {}), '(self.fr_botoes)\n', (5176, 5192), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((5278, 5291), 'PyQt5.QtGui.QFont', 'QtGui.QFont', ([], {}), '()\n', (5289, 5291), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((5954, 5982), 'PyQt5.QtWidgets.QPushButton', 'QtWidgets.QPushButton', (['Frame'], {}), '(Frame)\n', (5975, 5982), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((6066, 6079), 'PyQt5.QtGui.QFont', 'QtGui.QFont', ([], {}), '()\n', (6077, 6079), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((6408, 6421), 'PyQt5.QtGui.QIcon', 'QtGui.QIcon', ([], {}), '()\n', (6419, 6421), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((6634, 6660), 'PyQt5.QtWidgets.QLineEdit', 'QtWidgets.QLineEdit', (['Frame'], {}), '(Frame)\n', (6653, 6660), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((6742, 6755), 'PyQt5.QtGui.QFont', 'QtGui.QFont', ([], {}), '()\n', (6753, 6755), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((7036, 7062), 'PyQt5.QtWidgets.QComboBox', 'QtWidgets.QComboBox', (['Frame'], {}), '(Frame)\n', (7055, 7062), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((7984, 8012), 'PyQt5.QtWidgets.QPushButton', 'QtWidgets.QPushButton', (['Frame'], {}), '(Frame)\n', (8005, 8012), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((8093, 8106), 'PyQt5.QtGui.QFont', 'QtGui.QFont', ([], {}), '()\n', (8104, 8106), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((8426, 8439), 'PyQt5.QtGui.QIcon', 'QtGui.QIcon', ([], {}), '()\n', (8437, 8439), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((8666, 8710), 'PyQt5.QtCore.QMetaObject.connectSlotsByName', 'QtCore.QMetaObject.connectSlotsByName', (['Frame'], {}), '(Frame)\n', (8703, 8710), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((571, 599), 'PyQt5.QtCore.QRect', 'QtCore.QRect', (['(0)', '(0)', '(1051)', '(60)'], {}), '(0, 0, 1051, 60)\n', (583, 599), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((841, 870), 'PyQt5.QtCore.QRect', 'QtCore.QRect', (['(10)', '(15)', '(200)', '(30)'], {}), '(10, 15, 200, 30)\n', (853, 870), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((1319, 1348), 'PyQt5.QtCore.QRect', 'QtCore.QRect', (['(910)', '(9)', '(131)', '(41)'], {}), '(910, 9, 131, 41)\n', (1331, 1348), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((1568, 1611), 'PyQt5.QtGui.QCursor', 'QtGui.QCursor', (['QtCore.Qt.PointingHandCursor'], {}), '(QtCore.Qt.PointingHandCursor)\n', (1581, 1611), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((1966, 1986), 'PyQt5.QtCore.QSize', 'QtCore.QSize', (['(75)', '(35)'], {}), '(75, 35)\n', (1978, 1986), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((2134, 2165), 'PyQt5.QtCore.QRect', 'QtCore.QRect', (['(0)', '(100)', '(1041)', '(211)'], {}), '(0, 100, 1041, 211)\n', (2146, 2165), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((2225, 2268), 'PyQt5.QtGui.QCursor', 'QtGui.QCursor', (['QtCore.Qt.PointingHandCursor'], {}), '(QtCore.Qt.PointingHandCursor)\n', (2238, 2268), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((4966, 4996), 'PyQt5.QtCore.QRect', 'QtCore.QRect', (['(0)', '(330)', '(1051)', '(30)'], {}), '(0, 330, 1051, 30)\n', (4978, 4996), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((5232, 5261), 'PyQt5.QtCore.QRect', 'QtCore.QRect', (['(930)', '(0)', '(120)', '(30)'], {}), '(930, 0, 120, 30)\n', (5244, 5261), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((5454, 5497), 'PyQt5.QtGui.QCursor', 'QtGui.QCursor', (['QtCore.Qt.PointingHandCursor'], {}), '(QtCore.Qt.PointingHandCursor)\n', (5467, 5497), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((5848, 5868), 'PyQt5.QtCore.QSize', 'QtCore.QSize', (['(75)', '(35)'], {}), '(75, 35)\n', (5860, 5868), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((6019, 6049), 'PyQt5.QtCore.QRect', 'QtCore.QRect', (['(1010)', '(60)', '(30)', '(31)'], {}), '(1010, 60, 30, 31)\n', (6031, 6049), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((6184, 6227), 'PyQt5.QtGui.QCursor', 'QtGui.QCursor', (['QtCore.Qt.PointingHandCursor'], {}), '(QtCore.Qt.PointingHandCursor)\n', (6197, 6227), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((6445, 6481), 'PyQt5.QtGui.QPixmap', 'QtGui.QPixmap', (['"""Imagens/refresh.png"""'], {}), "('Imagens/refresh.png')\n", (6458, 6481), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((6695, 6725), 'PyQt5.QtCore.QRect', 'QtCore.QRect', (['(190)', '(60)', '(791)', '(31)'], {}), '(190, 60, 791, 31)\n', (6707, 6725), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((7100, 7129), 'PyQt5.QtCore.QRect', 'QtCore.QRect', (['(10)', '(60)', '(171)', '(31)'], {}), '(10, 60, 171, 31)\n', (7112, 7129), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((8047, 8076), 'PyQt5.QtCore.QRect', 'QtCore.QRect', (['(980)', '(60)', '(30)', '(31)'], {}), '(980, 60, 30, 31)\n', (8059, 8076), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((8207, 8250), 'PyQt5.QtGui.QCursor', 'QtGui.QCursor', (['QtCore.Qt.PointingHandCursor'], {}), '(QtCore.Qt.PointingHandCursor)\n', (8220, 8250), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((8464, 8499), 'PyQt5.QtGui.QPixmap', 'QtGui.QPixmap', (['"""Imagens/search.png"""'], {}), "('Imagens/search.png')\n", (8477, 8499), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n')] |
from data import all_emoji
from aiogram.types import InlineKeyboardMarkup, InlineKeyboardButton
from aiogram.utils.callback_data import CallbackData
from data import all_emoji
from utils.googlesheets import send_to_google
from utils.set_minus_and_plus_currences import set_minus_and_plus
from utils.get_minuses_sum_FGH import get_minus_FGH
from utils.get_values_FGH_MNO import get_plus_FGH
cb_what_sum = CallbackData('cb_ws', 'type_btn')
def create_kb_what_sum():
keyboard = InlineKeyboardMarkup()
keyboard.add (
InlineKeyboardButton (
text = 'скорректировать',
callback_data = cb_what_sum.new(type_btn='correct_sum')
)
)
keyboard.add (
InlineKeyboardButton (
text = 'подтвердить',
callback_data = cb_what_sum.new(type_btn='confirm_sum')
)
)
keyboard.add (
InlineKeyboardButton (
text = 'вернуться к заявке',
callback_data = cb_what_sum.new(type_btn='back_to_chosen_request')
)
)
back__main_menu = all_emoji['back__main_menu']
keyboard.add (
InlineKeyboardButton (
text=f'назад {back__main_menu} главное меню',
callback_data=cb_what_sum.new (
type_btn='back_main_menu'
)
)
)
return keyboard
cb_choose_currency = CallbackData('anprix', 'curr', 'type_btn')
def create_kb_choose_currency_processing(request):
emo_snail = all_emoji['back__main_menu']
# добавляет плюсы и оставляет минусы если операция - обмен
if request[3] == 'обмен':
if not request[5] == '0':
rub = request[5]
rub = str(rub)
if rub[0] != '-': rub = '+' + rub + ' ₽'
else: rub = rub + ' ₽'
else: rub = ''
if not request[6] == '0':
usd = request[6]
usd = str(usd)
if usd[0] != '-': usd = '+' + usd + ' $'
else: usd = usd + ' $'
else: usd = ''
if not request[7] == '0':
eur = request[7]
eur = str(eur)
if eur[0] != '-': eur = '+' + eur + ' €'
else: eur = eur + ' €'
else: eur = ''
else:
if not request[5] == '0':
rub = request[5]
rub = str(rub)
if rub[0] == '-': rub = rub[1:] + ' ₽'
else: rub = rub + ' ₽'
else: rub = ''
if not request[6] == '0':
usd = request[6]
usd = str(usd)
if usd[0] == '-': usd = usd[1:] + ' $'
else: usd = usd + ' $'
else: usd = ''
if not request[7] == '0':
eur = request[7]
eur = str(eur)
if eur[0] == '-': eur = eur[1:] + ' €'
else: eur = eur + ' €'
else: eur = ''
keyboard = InlineKeyboardMarkup()
if not request[5] == '0':
keyboard.add (
InlineKeyboardButton (
text = '{}'.format(rub),
callback_data = cb_choose_currency.new(curr='rub', type_btn='change_curr')
)
)
if not request[6] == '0':
keyboard.add (
InlineKeyboardButton (
text = '{}'.format(usd),
callback_data = cb_choose_currency.new(curr='usd', type_btn='change_curr')
)
)
if not request[7] == '0':
keyboard.add (
InlineKeyboardButton (
text = '{}'.format(eur),
callback_data = cb_choose_currency.new(curr='eur', type_btn='change_curr')
)
)
keyboard.add (
InlineKeyboardButton (
text=f'назад {emo_snail} главное меню',
callback_data=cb_choose_currency.new (
curr='-',
type_btn='back_main_menu'
)
)
)
return keyboard
cb_what_sum_correct = CallbackData('cbwsc', 'curr', 'type_btn')
def create_kb_what_sum_correct(request):
keyboard = InlineKeyboardMarkup()
rub, usd, eur = get_minus_FGH(request)
if rub != '':
keyboard.add (
InlineKeyboardButton (
text=rub,
callback_data = cb_what_sum_correct.new (
curr='rub',
type_btn='change_curr'
)
)
)
if usd != '':
keyboard.add (
InlineKeyboardButton (
text=usd,
callback_data = cb_what_sum_correct.new (
curr='usd',
type_btn='change_curr'
)
)
)
if eur != '':
keyboard.add (
InlineKeyboardButton (
text=eur,
callback_data = cb_what_sum_correct.new (
curr='eur',
type_btn='change_curr'
)
)
)
emo_snail = all_emoji['back__main_menu']
keyboard.add (
InlineKeyboardButton (
text=f'назад {emo_snail} главное меню',
callback_data=cb_what_sum_correct.new (
curr='-',
type_btn='back_main_menu'
)
)
)
return keyboard
cb_sum_correct_chunk = CallbackData('cbscc', 'curr', 'type_btn')
def create_kb_sum_correct_chunk(request):
keyboard = InlineKeyboardMarkup()
rub, usd, eur = get_plus_FGH(request)
if rub != '':
keyboard.add (
InlineKeyboardButton (
text=rub,
callback_data = cb_sum_correct_chunk.new (
curr='rub',
type_btn='change_curr'
)
)
)
if usd != '':
keyboard.add (
InlineKeyboardButton (
text=usd,
callback_data = cb_sum_correct_chunk.new (
curr='usd',
type_btn='change_curr'
)
)
)
if eur != '':
keyboard.add (
InlineKeyboardButton (
text=eur,
callback_data = cb_sum_correct_chunk.new (
curr='eur',
type_btn='change_curr'
)
)
)
emo_snail = all_emoji['back__main_menu']
keyboard.add (
InlineKeyboardButton (
text=f'назад {emo_snail} главное меню',
callback_data=cb_sum_correct_chunk.new (
curr='-',
type_btn='back_main_menu'
)
)
)
return keyboard | [
"utils.get_values_FGH_MNO.get_plus_FGH",
"aiogram.utils.callback_data.CallbackData",
"utils.get_minuses_sum_FGH.get_minus_FGH",
"aiogram.types.InlineKeyboardMarkup"
] | [((411, 444), 'aiogram.utils.callback_data.CallbackData', 'CallbackData', (['"""cb_ws"""', '"""type_btn"""'], {}), "('cb_ws', 'type_btn')\n", (423, 444), False, 'from aiogram.utils.callback_data import CallbackData\n'), ((1355, 1397), 'aiogram.utils.callback_data.CallbackData', 'CallbackData', (['"""anprix"""', '"""curr"""', '"""type_btn"""'], {}), "('anprix', 'curr', 'type_btn')\n", (1367, 1397), False, 'from aiogram.utils.callback_data import CallbackData\n'), ((3883, 3924), 'aiogram.utils.callback_data.CallbackData', 'CallbackData', (['"""cbwsc"""', '"""curr"""', '"""type_btn"""'], {}), "('cbwsc', 'curr', 'type_btn')\n", (3895, 3924), False, 'from aiogram.utils.callback_data import CallbackData\n'), ((5235, 5276), 'aiogram.utils.callback_data.CallbackData', 'CallbackData', (['"""cbscc"""', '"""curr"""', '"""type_btn"""'], {}), "('cbscc', 'curr', 'type_btn')\n", (5247, 5276), False, 'from aiogram.utils.callback_data import CallbackData\n'), ((486, 508), 'aiogram.types.InlineKeyboardMarkup', 'InlineKeyboardMarkup', ([], {}), '()\n', (506, 508), False, 'from aiogram.types import InlineKeyboardMarkup, InlineKeyboardButton\n'), ((2819, 2841), 'aiogram.types.InlineKeyboardMarkup', 'InlineKeyboardMarkup', ([], {}), '()\n', (2839, 2841), False, 'from aiogram.types import InlineKeyboardMarkup, InlineKeyboardButton\n'), ((3986, 4008), 'aiogram.types.InlineKeyboardMarkup', 'InlineKeyboardMarkup', ([], {}), '()\n', (4006, 4008), False, 'from aiogram.types import InlineKeyboardMarkup, InlineKeyboardButton\n'), ((4034, 4056), 'utils.get_minuses_sum_FGH.get_minus_FGH', 'get_minus_FGH', (['request'], {}), '(request)\n', (4047, 4056), False, 'from utils.get_minuses_sum_FGH import get_minus_FGH\n'), ((5339, 5361), 'aiogram.types.InlineKeyboardMarkup', 'InlineKeyboardMarkup', ([], {}), '()\n', (5359, 5361), False, 'from aiogram.types import InlineKeyboardMarkup, InlineKeyboardButton\n'), ((5387, 5408), 'utils.get_values_FGH_MNO.get_plus_FGH', 'get_plus_FGH', (['request'], {}), '(request)\n', (5399, 5408), False, 'from utils.get_values_FGH_MNO import get_plus_FGH\n')] |
import rospy
MOVE_CYCLE_PERIOD = 0.01
def move_towards(target, current, step=1):
if abs(target-current) < step:
return target, True
else:
if target > current:
return current + step, False
else:
return current - step, False
def move_leg(leg, coxa=None, femur=None, tibia=None, step=1.3):
coxa_done = True
femur_done = True
tibia_done = True
if coxa:
leg.coxa, coxa_done = move_towards(coxa, leg.coxa, step)
if femur:
leg.femur, femur_done = move_towards(femur, leg.femur, step)
if tibia:
leg.tibia, tibia_done = move_towards(tibia, leg.tibia, step)
return coxa_done and femur_done and tibia_done
def is_leg_close(leg, coxa=None, femur=None, tibia=None, tolerance=20):
coxa_close = True
femur_close = True
tibia_close = True
if coxa:
coxa_close = leg.coxa + tolerance > coxa > leg.coxa - tolerance
if femur:
femur_close = leg.femur + tolerance > femur > leg.femur - tolerance
if tibia:
tibia_close = leg.tibia + tolerance > tibia > leg.tibia - tolerance
return coxa_close and femur_close and tibia_close
class FoldingManager(object):
def __init__(self, body_controller):
super(FoldingManager, self).__init__()
self.body_controller = body_controller
self.last_motor_position = None
def position_femur_tibia(self):
current_position = self.body_controller.read_hexapod_motor_positions()
self.last_motor_position = current_position
while True:
rospy.sleep(MOVE_CYCLE_PERIOD)
lf = move_leg(self.last_motor_position.left_front, None, 60, 240)
lm = move_leg(self.last_motor_position.left_middle, None, 60, 240)
lr = move_leg(self.last_motor_position.left_rear, None, 60, 240)
rf = move_leg(self.last_motor_position.right_front, None, 240, 60)
rm = move_leg(self.last_motor_position.right_middle, None, 240, 60)
rr = move_leg(self.last_motor_position.right_rear, None, 240, 60)
self.body_controller.set_motors(self.last_motor_position)
if lf and lm and lr and rf and rm and rr:
break
rospy.sleep(0.05)
def check_if_folded(self):
current_position = self.body_controller.read_hexapod_motor_positions()
self.last_motor_position = current_position
lf = is_leg_close(self.last_motor_position.left_front, 240)
lm = is_leg_close(self.last_motor_position.left_middle, 240) or is_leg_close(self.last_motor_position.left_middle, 60)
lr = is_leg_close(self.last_motor_position.left_rear, 60)
rf = is_leg_close(self.last_motor_position.right_front, 60)
rm = is_leg_close(self.last_motor_position.right_middle, 60) or is_leg_close(self.last_motor_position.right_middle, 240)
rr = is_leg_close(self.last_motor_position.right_rear, 240)
return lf and lm and lr and rf and rm and rr
def unfold(self):
self.position_femur_tibia()
current_position = self.body_controller.read_hexapod_motor_positions()
self.last_motor_position = current_position
while True:
rospy.sleep(MOVE_CYCLE_PERIOD)
lf = False
lr = False
rf = False
rr = False
if self.last_motor_position.left_middle.coxa > 120:
lf = move_leg(self.last_motor_position.left_front, 150)
lm = move_leg(self.last_motor_position.left_middle, 150)
if self.last_motor_position.left_middle.coxa < 180:
lr = move_leg(self.last_motor_position.left_rear, 150)
if self.last_motor_position.right_middle.coxa < 180:
rf = move_leg(self.last_motor_position.right_front, 150)
rm = move_leg(self.last_motor_position.right_middle, 150)
if self.last_motor_position.right_middle.coxa > 120:
rr = move_leg(self.last_motor_position.right_rear, 150)
self.body_controller.set_motors(self.last_motor_position)
if lf and lm and lr and rf and rm and rr:
break
while True:
rospy.sleep(MOVE_CYCLE_PERIOD)
lf = move_leg(self.last_motor_position.left_front, tibia=210)
lm = move_leg(self.last_motor_position.left_middle, tibia=210)
lr = move_leg(self.last_motor_position.left_rear, tibia=210)
rf = move_leg(self.last_motor_position.right_front, tibia=90)
rm = move_leg(self.last_motor_position.right_middle, tibia=90)
rr = move_leg(self.last_motor_position.right_rear, tibia=90)
self.body_controller.set_motors(self.last_motor_position)
if lf and lm and lr and rf and rm and rr:
break
rospy.sleep(0.2)
self.body_controller.set_torque(False)
def fold(self):
self.position_femur_tibia()
current_position = self.body_controller.read_hexapod_motor_positions()
self.last_motor_position = current_position
if not self.check_if_folded():
while True:
rospy.sleep(MOVE_CYCLE_PERIOD)
lm = move_leg(self.last_motor_position.left_middle, 150)
rm = move_leg(self.last_motor_position.right_middle, 150)
self.body_controller.set_motors(self.last_motor_position)
if lm and rm:
break
while True:
rospy.sleep(MOVE_CYCLE_PERIOD)
lf = move_leg(self.last_motor_position.left_front, 240)
lr = move_leg(self.last_motor_position.left_rear, 60)
rf = move_leg(self.last_motor_position.right_front, 60)
rr = move_leg(self.last_motor_position.right_rear, 240)
self.body_controller.set_motors(self.last_motor_position)
if lf and lr and rf and rr:
break
while True:
rospy.sleep(MOVE_CYCLE_PERIOD)
lm = move_leg(self.last_motor_position.left_middle, 240)
rm = move_leg(self.last_motor_position.right_middle, 60)
self.body_controller.set_motors(self.last_motor_position)
if lm and rm:
break
rospy.sleep(0.2)
self.body_controller.set_torque(False)
def unfold_on_ground(self):
self.position_femur_tibia()
current_position = self.body_controller.read_hexapod_motor_positions()
self.last_motor_position = current_position
# lift middle legs
while True:
rospy.sleep(MOVE_CYCLE_PERIOD)
lm = move_leg(self.last_motor_position.left_middle, tibia=200)
rm = move_leg(self.last_motor_position.right_middle, tibia=100)
self.body_controller.set_motors(self.last_motor_position)
if lm and rm:
break
# fold out middle legs
while True:
rospy.sleep(MOVE_CYCLE_PERIOD)
lm = move_leg(self.last_motor_position.left_middle, coxa=150)
rm = move_leg(self.last_motor_position.right_middle, coxa=150)
self.body_controller.set_motors(self.last_motor_position)
if lm and rm:
break
# lower right leg
while True:
rospy.sleep(MOVE_CYCLE_PERIOD)
rm = move_leg(self.last_motor_position.right_middle, femur=170, tibia=100)
self.body_controller.set_motors(self.last_motor_position)
if rm:
break
# unfold right legs
while True:
rospy.sleep(MOVE_CYCLE_PERIOD)
rf = move_leg(self.last_motor_position.right_front, coxa=150)
rr = move_leg(self.last_motor_position.right_rear, coxa=150)
self.body_controller.set_motors(self.last_motor_position)
if rf and rr:
break
# lift right legs
while True:
rospy.sleep(MOVE_CYCLE_PERIOD)
rf = move_leg(self.last_motor_position.right_front, tibia=90)
rr = move_leg(self.last_motor_position.right_rear, tibia=90)
self.body_controller.set_motors(self.last_motor_position)
if rf and rr:
break
# switch lifted side
while True:
rospy.sleep(MOVE_CYCLE_PERIOD)
lm = move_leg(self.last_motor_position.left_middle, femur=130, tibia=200)
rm = move_leg(self.last_motor_position.right_middle, femur=240, tibia=90)
self.body_controller.set_motors(self.last_motor_position)
if rm and lm:
break
# unfold left legs
while True:
rospy.sleep(MOVE_CYCLE_PERIOD)
lf = move_leg(self.last_motor_position.left_front, coxa=150)
lr = move_leg(self.last_motor_position.left_rear, coxa=150)
self.body_controller.set_motors(self.last_motor_position)
if lf and lr:
break
# lift left legs
while True:
rospy.sleep(MOVE_CYCLE_PERIOD)
lf = move_leg(self.last_motor_position.left_front, tibia=210)
lr = move_leg(self.last_motor_position.left_rear, tibia=210)
self.body_controller.set_motors(self.last_motor_position)
if lf and lr:
break
# lift middle left
while True:
rospy.sleep(MOVE_CYCLE_PERIOD)
lm = move_leg(self.last_motor_position.left_middle, femur=60, tibia=210)
self.body_controller.set_motors(self.last_motor_position)
if lm:
break
rospy.sleep(0.2)
self.body_controller.set_torque(False)
def fold_on_ground(self):
current_position = self.body_controller.read_hexapod_motor_positions()
self.last_motor_position = current_position
while True:
rospy.sleep(MOVE_CYCLE_PERIOD)
lf = move_leg(self.last_motor_position.left_front, 150, femur=60, tibia=210)
lm = move_leg(self.last_motor_position.left_middle, 150, femur=60, tibia=210)
lr = move_leg(self.last_motor_position.left_rear, 150, femur=60, tibia=210)
rf = move_leg(self.last_motor_position.right_front, 150, femur=240, tibia=90)
rm = move_leg(self.last_motor_position.right_middle, 150, femur=240, tibia=90)
rr = move_leg(self.last_motor_position.right_rear, 150, femur=240, tibia=90)
self.body_controller.set_motors(self.last_motor_position)
if lf and lm and lr and rf and rm and rr:
break
# lower right leg
while True:
rospy.sleep(MOVE_CYCLE_PERIOD)
rm = move_leg(self.last_motor_position.right_middle, femur=170, tibia=100)
self.body_controller.set_motors(self.last_motor_position)
if rm:
break
# compress right legs
while True:
rospy.sleep(MOVE_CYCLE_PERIOD)
rf = move_leg(self.last_motor_position.right_front, None, 240, 60)
rr = move_leg(self.last_motor_position.right_rear, None, 240, 60)
self.body_controller.set_motors(self.last_motor_position)
if rf and rr:
break
# fold right legs
while True:
rospy.sleep(MOVE_CYCLE_PERIOD)
rf = move_leg(self.last_motor_position.right_front, 60)
rr = move_leg(self.last_motor_position.right_rear, 240)
self.body_controller.set_motors(self.last_motor_position)
if rf and rr:
break
# switch lifted side
while True:
rospy.sleep(MOVE_CYCLE_PERIOD)
lm = move_leg(self.last_motor_position.left_middle, femur=130, tibia=200)
rm = move_leg(self.last_motor_position.right_middle, femur=240, tibia=90)
self.body_controller.set_motors(self.last_motor_position)
if rm and lm:
break
# compress left legs
while True:
rospy.sleep(MOVE_CYCLE_PERIOD)
lf = move_leg(self.last_motor_position.left_front, None, 60, 240)
lr = move_leg(self.last_motor_position.left_rear, None, 60, 240)
self.body_controller.set_motors(self.last_motor_position)
if lf and lr:
break
# fold left legs
while True:
rospy.sleep(MOVE_CYCLE_PERIOD)
lf = move_leg(self.last_motor_position.left_front, 240)
lr = move_leg(self.last_motor_position.left_rear, 60)
self.body_controller.set_motors(self.last_motor_position)
if lf and lr:
break
# lift left middle leg
while True:
rospy.sleep(MOVE_CYCLE_PERIOD)
lm = move_leg(self.last_motor_position.left_middle, femur=60, tibia=210)
self.body_controller.set_motors(self.last_motor_position)
if lm:
break
# fold middle legs
while True:
rospy.sleep(MOVE_CYCLE_PERIOD)
lm = move_leg(self.last_motor_position.left_middle, 230)
rm = move_leg(self.last_motor_position.right_middle, 70)
self.body_controller.set_motors(self.last_motor_position)
if lm and rm:
break
# compress middle legs
while True:
rospy.sleep(MOVE_CYCLE_PERIOD)
lm = move_leg(self.last_motor_position.left_middle, None, 60, 240)
rm = move_leg(self.last_motor_position.right_middle, None, 240, 60)
self.body_controller.set_motors(self.last_motor_position)
if lm and rm:
break
rospy.sleep(0.2)
self.body_controller.set_torque(False)
| [
"rospy.sleep"
] | [((2232, 2249), 'rospy.sleep', 'rospy.sleep', (['(0.05)'], {}), '(0.05)\n', (2243, 2249), False, 'import rospy\n'), ((4831, 4847), 'rospy.sleep', 'rospy.sleep', (['(0.2)'], {}), '(0.2)\n', (4842, 4847), False, 'import rospy\n'), ((6262, 6278), 'rospy.sleep', 'rospy.sleep', (['(0.2)'], {}), '(0.2)\n', (6273, 6278), False, 'import rospy\n'), ((9625, 9641), 'rospy.sleep', 'rospy.sleep', (['(0.2)'], {}), '(0.2)\n', (9636, 9641), False, 'import rospy\n'), ((13697, 13713), 'rospy.sleep', 'rospy.sleep', (['(0.2)'], {}), '(0.2)\n', (13708, 13713), False, 'import rospy\n'), ((1576, 1606), 'rospy.sleep', 'rospy.sleep', (['MOVE_CYCLE_PERIOD'], {}), '(MOVE_CYCLE_PERIOD)\n', (1587, 1606), False, 'import rospy\n'), ((3216, 3246), 'rospy.sleep', 'rospy.sleep', (['MOVE_CYCLE_PERIOD'], {}), '(MOVE_CYCLE_PERIOD)\n', (3227, 3246), False, 'import rospy\n'), ((4202, 4232), 'rospy.sleep', 'rospy.sleep', (['MOVE_CYCLE_PERIOD'], {}), '(MOVE_CYCLE_PERIOD)\n', (4213, 4232), False, 'import rospy\n'), ((5502, 5532), 'rospy.sleep', 'rospy.sleep', (['MOVE_CYCLE_PERIOD'], {}), '(MOVE_CYCLE_PERIOD)\n', (5513, 5532), False, 'import rospy\n'), ((5967, 5997), 'rospy.sleep', 'rospy.sleep', (['MOVE_CYCLE_PERIOD'], {}), '(MOVE_CYCLE_PERIOD)\n', (5978, 5997), False, 'import rospy\n'), ((6585, 6615), 'rospy.sleep', 'rospy.sleep', (['MOVE_CYCLE_PERIOD'], {}), '(MOVE_CYCLE_PERIOD)\n', (6596, 6615), False, 'import rospy\n'), ((6948, 6978), 'rospy.sleep', 'rospy.sleep', (['MOVE_CYCLE_PERIOD'], {}), '(MOVE_CYCLE_PERIOD)\n', (6959, 6978), False, 'import rospy\n'), ((7304, 7334), 'rospy.sleep', 'rospy.sleep', (['MOVE_CYCLE_PERIOD'], {}), '(MOVE_CYCLE_PERIOD)\n', (7315, 7334), False, 'import rospy\n'), ((7593, 7623), 'rospy.sleep', 'rospy.sleep', (['MOVE_CYCLE_PERIOD'], {}), '(MOVE_CYCLE_PERIOD)\n', (7604, 7623), False, 'import rospy\n'), ((7947, 7977), 'rospy.sleep', 'rospy.sleep', (['MOVE_CYCLE_PERIOD'], {}), '(MOVE_CYCLE_PERIOD)\n', (7958, 7977), False, 'import rospy\n'), ((8304, 8334), 'rospy.sleep', 'rospy.sleep', (['MOVE_CYCLE_PERIOD'], {}), '(MOVE_CYCLE_PERIOD)\n', (8315, 8334), False, 'import rospy\n'), ((8684, 8714), 'rospy.sleep', 'rospy.sleep', (['MOVE_CYCLE_PERIOD'], {}), '(MOVE_CYCLE_PERIOD)\n', (8695, 8714), False, 'import rospy\n'), ((9035, 9065), 'rospy.sleep', 'rospy.sleep', (['MOVE_CYCLE_PERIOD'], {}), '(MOVE_CYCLE_PERIOD)\n', (9046, 9065), False, 'import rospy\n'), ((9390, 9420), 'rospy.sleep', 'rospy.sleep', (['MOVE_CYCLE_PERIOD'], {}), '(MOVE_CYCLE_PERIOD)\n', (9401, 9420), False, 'import rospy\n'), ((9883, 9913), 'rospy.sleep', 'rospy.sleep', (['MOVE_CYCLE_PERIOD'], {}), '(MOVE_CYCLE_PERIOD)\n', (9894, 9913), False, 'import rospy\n'), ((10655, 10685), 'rospy.sleep', 'rospy.sleep', (['MOVE_CYCLE_PERIOD'], {}), '(MOVE_CYCLE_PERIOD)\n', (10666, 10685), False, 'import rospy\n'), ((10946, 10976), 'rospy.sleep', 'rospy.sleep', (['MOVE_CYCLE_PERIOD'], {}), '(MOVE_CYCLE_PERIOD)\n', (10957, 10976), False, 'import rospy\n'), ((11310, 11340), 'rospy.sleep', 'rospy.sleep', (['MOVE_CYCLE_PERIOD'], {}), '(MOVE_CYCLE_PERIOD)\n', (11321, 11340), False, 'import rospy\n'), ((11656, 11686), 'rospy.sleep', 'rospy.sleep', (['MOVE_CYCLE_PERIOD'], {}), '(MOVE_CYCLE_PERIOD)\n', (11667, 11686), False, 'import rospy\n'), ((12038, 12068), 'rospy.sleep', 'rospy.sleep', (['MOVE_CYCLE_PERIOD'], {}), '(MOVE_CYCLE_PERIOD)\n', (12049, 12068), False, 'import rospy\n'), ((12399, 12429), 'rospy.sleep', 'rospy.sleep', (['MOVE_CYCLE_PERIOD'], {}), '(MOVE_CYCLE_PERIOD)\n', (12410, 12429), False, 'import rospy\n'), ((12745, 12775), 'rospy.sleep', 'rospy.sleep', (['MOVE_CYCLE_PERIOD'], {}), '(MOVE_CYCLE_PERIOD)\n', (12756, 12775), False, 'import rospy\n'), ((13031, 13061), 'rospy.sleep', 'rospy.sleep', (['MOVE_CYCLE_PERIOD'], {}), '(MOVE_CYCLE_PERIOD)\n', (13042, 13061), False, 'import rospy\n'), ((13381, 13411), 'rospy.sleep', 'rospy.sleep', (['MOVE_CYCLE_PERIOD'], {}), '(MOVE_CYCLE_PERIOD)\n', (13392, 13411), False, 'import rospy\n'), ((5162, 5192), 'rospy.sleep', 'rospy.sleep', (['MOVE_CYCLE_PERIOD'], {}), '(MOVE_CYCLE_PERIOD)\n', (5173, 5192), False, 'import rospy\n')] |
from pathlib import Path
import os
from PIL import Image, ImageFont, ImageDraw
import numpy as np
import pandas as pd
from math import *
p = Path("resources/graphics/Pokemon/Icons")
df = pd.read_csv(Path("resources/PBS/compressed/pokemon.csv"), index_col=0)
width = 64
height = ceil(len(df) / 64)
canvas = Image.new("RGBA", (width, height), "#00000000")
draw = ImageDraw.Draw(canvas)
for i, row in df.iterrows():
try:
img = (
Image.open(p / f"{row.internalname}.png")
.convert("RGBA")
.resize((64, 32), resample=Image.NEAREST)
.crop((0, 0, 32, 32))
)
canvas.alpha_composite(img, ((i % 64) * 32, (i // 64) * 32))
except Exception as e:
continue
canvas.save(Path("resources/graphics/generated/battler_ldtk_list.png"))
# for pth in p.glob("*.png"):
# img = (
# Image.open(pth)
# .convert("RGBA")
# .resize((64, 32), resample=Image.NEAREST)
# .crop((0, 0, 32, 32))
# )
| [
"PIL.Image.new",
"PIL.ImageDraw.Draw",
"PIL.Image.open",
"pathlib.Path"
] | [((142, 182), 'pathlib.Path', 'Path', (['"""resources/graphics/Pokemon/Icons"""'], {}), "('resources/graphics/Pokemon/Icons')\n", (146, 182), False, 'from pathlib import Path\n'), ((310, 357), 'PIL.Image.new', 'Image.new', (['"""RGBA"""', '(width, height)', '"""#00000000"""'], {}), "('RGBA', (width, height), '#00000000')\n", (319, 357), False, 'from PIL import Image, ImageFont, ImageDraw\n'), ((365, 387), 'PIL.ImageDraw.Draw', 'ImageDraw.Draw', (['canvas'], {}), '(canvas)\n', (379, 387), False, 'from PIL import Image, ImageFont, ImageDraw\n'), ((201, 245), 'pathlib.Path', 'Path', (['"""resources/PBS/compressed/pokemon.csv"""'], {}), "('resources/PBS/compressed/pokemon.csv')\n", (205, 245), False, 'from pathlib import Path\n'), ((750, 808), 'pathlib.Path', 'Path', (['"""resources/graphics/generated/battler_ldtk_list.png"""'], {}), "('resources/graphics/generated/battler_ldtk_list.png')\n", (754, 808), False, 'from pathlib import Path\n'), ((455, 496), 'PIL.Image.open', 'Image.open', (["(p / f'{row.internalname}.png')"], {}), "(p / f'{row.internalname}.png')\n", (465, 496), False, 'from PIL import Image, ImageFont, ImageDraw\n')] |
from django.contrib import admin
from django.http import HttpResponse
from django.urls import path
from django.shortcuts import render, HttpResponse, redirect
from django import forms
import os
import csv
from io import TextIOWrapper, StringIO
from .models import Player, Team, Usage, XgLookup
class CsvImportForm(forms.Form):
csv_file = forms.FileField()
class NoLoggingMixin:
def log_addition(self, *args):
return
def log_change(self, *args):
return
def log_deletion(self, *args):
return
class ExportCsvMixin:
def export_as_csv(self, request, queryset):
meta = self.model._meta
field_names = [field.name for field in meta.fields]
response = HttpResponse(content_type='text/csv')
response['Content-Disposition'] = 'attachment; filename={}.csv'.format(meta)
writer = csv.writer(response)
writer.writerow(field_names)
for obj in queryset:
row = writer.writerow([getattr(obj, field) for field in field_names])
return response
def export_delete_as_csv(self, request, queryset):
meta = self.model._meta
field_names = [field.name for field in meta.fields]
response = HttpResponse(content_type='text/csv')
response['Content-Disposition'] = 'attachment; filename={}.csv'.format(meta)
writer = csv.writer(response)
writer.writerow(field_names)
for obj in queryset:
row = writer.writerow([getattr(obj, field) for field in field_names])
obj.delete()
return response
export_as_csv.short_description = "Export Selected"
export_delete_as_csv.short_description = "Export and Delete Selected"
class UploadCsvMixin:
def get_urls(self):
urls = super().get_urls()
my_urls = [
path('import-csv/', self.import_csv)
]
return my_urls + urls
def import_csv(self, request):
if request.method == 'POST':
csv_file = TextIOWrapper(request.FILES['csv_file'].file, encoding=request.encoding)
extension = os.path.splitext(request.FILES['csv_file'].name)[1]
if extension == '.csv':
reader = csv.reader(csv_file)
headers = next(reader)
model_fields = [m.name for m in self.model._meta.fields if m.name != 'updated']
# if set(headers) == set(model_fields):
input_data = [dict(zip(headers, row)) for row in reader]
for i in input_data:
t = self.model()
[setattr(t, k, v) for k, v in i.items()]
t.save()
# else:
# self.message_user(request, "Bad headers - unable to import selected file. Expected headers: '{expected}' Received headers: '{actual}'".format(
# expected=model_fields,
# actual=headers
# ), level='ERROR')
# return redirect("..")
else:
self.message_user(request, 'Incorrect file type', level='ERROR')
return redirect('..')
self.message_user(request, "Your csv file has been imported")
return redirect("..")
form = CsvImportForm()
payload = {"form": form}
return render(
request, "custom_admin/csv_form.html", payload
)
@admin.register(Player)
class PlayerAdmin(NoLoggingMixin, ExportCsvMixin, admin.ModelAdmin):
readonly_fields = ('updated',)
actions = ['export_as_csv']
@admin.register(Team)
class TeamAdmin(NoLoggingMixin, ExportCsvMixin, admin.ModelAdmin):
readonly_fields = ('updated',)
actions = ['export_as_csv']
@admin.register(Usage)
class UsageAdmin(NoLoggingMixin, ExportCsvMixin, admin.ModelAdmin):
readonly_fields = ('updated',)
actions = ['export_as_csv', 'export_delete_as_csv']
@admin.register(XgLookup)
class XgLookupAdmin(NoLoggingMixin, UploadCsvMixin, ExportCsvMixin, admin.ModelAdmin):
change_list_template = 'custom_admin/models_changelist.html'
readonly_fields = ('updated',)
actions = ['export_as_csv'] | [
"django.shortcuts.render",
"django.shortcuts.HttpResponse",
"csv.writer",
"os.path.splitext",
"django.contrib.admin.register",
"io.TextIOWrapper",
"django.shortcuts.redirect",
"django.forms.FileField",
"django.urls.path",
"csv.reader"
] | [((3493, 3515), 'django.contrib.admin.register', 'admin.register', (['Player'], {}), '(Player)\n', (3507, 3515), False, 'from django.contrib import admin\n'), ((3654, 3674), 'django.contrib.admin.register', 'admin.register', (['Team'], {}), '(Team)\n', (3668, 3674), False, 'from django.contrib import admin\n'), ((3811, 3832), 'django.contrib.admin.register', 'admin.register', (['Usage'], {}), '(Usage)\n', (3825, 3832), False, 'from django.contrib import admin\n'), ((3994, 4018), 'django.contrib.admin.register', 'admin.register', (['XgLookup'], {}), '(XgLookup)\n', (4008, 4018), False, 'from django.contrib import admin\n'), ((345, 362), 'django.forms.FileField', 'forms.FileField', ([], {}), '()\n', (360, 362), False, 'from django import forms\n'), ((723, 760), 'django.shortcuts.HttpResponse', 'HttpResponse', ([], {'content_type': '"""text/csv"""'}), "(content_type='text/csv')\n", (735, 760), False, 'from django.shortcuts import render, HttpResponse, redirect\n'), ((863, 883), 'csv.writer', 'csv.writer', (['response'], {}), '(response)\n', (873, 883), False, 'import csv\n'), ((1226, 1263), 'django.shortcuts.HttpResponse', 'HttpResponse', ([], {'content_type': '"""text/csv"""'}), "(content_type='text/csv')\n", (1238, 1263), False, 'from django.shortcuts import render, HttpResponse, redirect\n'), ((1366, 1386), 'csv.writer', 'csv.writer', (['response'], {}), '(response)\n', (1376, 1386), False, 'import csv\n'), ((3412, 3466), 'django.shortcuts.render', 'render', (['request', '"""custom_admin/csv_form.html"""', 'payload'], {}), "(request, 'custom_admin/csv_form.html', payload)\n", (3418, 3466), False, 'from django.shortcuts import render, HttpResponse, redirect\n'), ((1831, 1867), 'django.urls.path', 'path', (['"""import-csv/"""', 'self.import_csv'], {}), "('import-csv/', self.import_csv)\n", (1835, 1867), False, 'from django.urls import path\n'), ((2006, 2078), 'io.TextIOWrapper', 'TextIOWrapper', (["request.FILES['csv_file'].file"], {'encoding': 'request.encoding'}), "(request.FILES['csv_file'].file, encoding=request.encoding)\n", (2019, 2078), False, 'from io import TextIOWrapper, StringIO\n'), ((3309, 3323), 'django.shortcuts.redirect', 'redirect', (['""".."""'], {}), "('..')\n", (3317, 3323), False, 'from django.shortcuts import render, HttpResponse, redirect\n'), ((2103, 2151), 'os.path.splitext', 'os.path.splitext', (["request.FILES['csv_file'].name"], {}), "(request.FILES['csv_file'].name)\n", (2119, 2151), False, 'import os\n'), ((2217, 2237), 'csv.reader', 'csv.reader', (['csv_file'], {}), '(csv_file)\n', (2227, 2237), False, 'import csv\n'), ((3200, 3214), 'django.shortcuts.redirect', 'redirect', (['""".."""'], {}), "('..')\n", (3208, 3214), False, 'from django.shortcuts import render, HttpResponse, redirect\n')] |
# -*- coding: utf-8 -*-
# =====================================================================================================================
# Copyright (©) 2015-2021 LCS - Laboratoire Catalyse et Spectrochimie, Caen, France. =
# CeCILL-B FREE SOFTWARE LICENSE AGREEMENT - See full LICENSE agreement in the root directory =
# =====================================================================================================================
#
# ======================================================================================================================
# Copyright (©) 2015-2021 LCS - Laboratoire Catalyse et Spectrochimie, Caen, France. =
# CeCILL-B FREE SOFTWARE LICENSE AGREEMENT - See full LICENSE agreement in the root directory =
# ======================================================================================================================
from pathlib import Path
from os import environ
from os.path import join
import pytest
from spectrochempy.core import preferences as prefs
from spectrochempy import NO_DISPLAY
from spectrochempy.utils import get_filename
def test_get_filename():
# should read in the default prefs.datadir (and for testing we fix the name to environ['TEST_FILE']
f = get_filename(filetypes=["OMNIC files (*.spg *.spa *.srs)",
"SpectroChemPy files (*.scp)"])
assert isinstance(f, dict)
f = get_filename(filetypes=["OMNIC files (*.spg *.spa *.srs)",
"SpectroChemPy files (*.scp)"],
dictionary=False)
assert isinstance(f, list)
assert isinstance(f[0], Path)
if NO_DISPLAY:
assert str(f[0]) == join(prefs.datadir, environ['TEST_FILE'])
# directory specified by a keyword as well as the filename
f = get_filename("nh4y-activation.spg", directory="irdata")
assert f == {
'.spg': [Path(prefs.datadir) / 'irdata' / 'nh4y-activation.spg']
}
# directory specified in the filename as a subpath of the data directory
f = get_filename("irdata/nh4y-activation.spg")
assert f == {
'.spg': [Path(prefs.datadir) / 'irdata' / 'nh4y-activation.spg']
}
# no directory specified (filename must be in the working or the default data directory
f = get_filename("wodger.spg")
# if it is not found an error is generated
with pytest.raises(IOError):
f = get_filename("nh4y-activation.spg")
# directory is implicit (we get every files inside, with an allowed extension)
# WARNING: Must end with a backslash
f = get_filename("irdata/",
filetypes=['OMNIC files (*.spa, *.spg)', 'OMNIC series (*.srs)', 'all files (*.*)'],
listdir=True)
if '.scp' in f.keys():
del f['.scp']
assert len(f.keys()) == 2
# should raise an error
with pytest.raises(IOError):
get_filename("~/xxxx",
filetypes=["OMNIC files (*.sp*)",
"SpectroChemPy files (*.scp)",
"all files (*)"])
# EOF
| [
"os.path.join",
"pytest.raises",
"pathlib.Path",
"spectrochempy.utils.get_filename"
] | [((1359, 1453), 'spectrochempy.utils.get_filename', 'get_filename', ([], {'filetypes': "['OMNIC files (*.spg *.spa *.srs)', 'SpectroChemPy files (*.scp)']"}), "(filetypes=['OMNIC files (*.spg *.spa *.srs)',\n 'SpectroChemPy files (*.scp)'])\n", (1371, 1453), False, 'from spectrochempy.utils import get_filename\n'), ((1522, 1634), 'spectrochempy.utils.get_filename', 'get_filename', ([], {'filetypes': "['OMNIC files (*.spg *.spa *.srs)', 'SpectroChemPy files (*.scp)']", 'dictionary': '(False)'}), "(filetypes=['OMNIC files (*.spg *.spa *.srs)',\n 'SpectroChemPy files (*.scp)'], dictionary=False)\n", (1534, 1634), False, 'from spectrochempy.utils import get_filename\n'), ((1910, 1965), 'spectrochempy.utils.get_filename', 'get_filename', (['"""nh4y-activation.spg"""'], {'directory': '"""irdata"""'}), "('nh4y-activation.spg', directory='irdata')\n", (1922, 1965), False, 'from spectrochempy.utils import get_filename\n'), ((2161, 2203), 'spectrochempy.utils.get_filename', 'get_filename', (['"""irdata/nh4y-activation.spg"""'], {}), "('irdata/nh4y-activation.spg')\n", (2173, 2203), False, 'from spectrochempy.utils import get_filename\n'), ((2415, 2441), 'spectrochempy.utils.get_filename', 'get_filename', (['"""wodger.spg"""'], {}), "('wodger.spg')\n", (2427, 2441), False, 'from spectrochempy.utils import get_filename\n'), ((2705, 2831), 'spectrochempy.utils.get_filename', 'get_filename', (['"""irdata/"""'], {'filetypes': "['OMNIC files (*.spa, *.spg)', 'OMNIC series (*.srs)', 'all files (*.*)']", 'listdir': '(True)'}), "('irdata/', filetypes=['OMNIC files (*.spa, *.spg)',\n 'OMNIC series (*.srs)', 'all files (*.*)'], listdir=True)\n", (2717, 2831), False, 'from spectrochempy.utils import get_filename\n'), ((2499, 2521), 'pytest.raises', 'pytest.raises', (['IOError'], {}), '(IOError)\n', (2512, 2521), False, 'import pytest\n'), ((2535, 2570), 'spectrochempy.utils.get_filename', 'get_filename', (['"""nh4y-activation.spg"""'], {}), "('nh4y-activation.spg')\n", (2547, 2570), False, 'from spectrochempy.utils import get_filename\n'), ((2987, 3009), 'pytest.raises', 'pytest.raises', (['IOError'], {}), '(IOError)\n', (3000, 3009), False, 'import pytest\n'), ((3019, 3128), 'spectrochempy.utils.get_filename', 'get_filename', (['"""~/xxxx"""'], {'filetypes': "['OMNIC files (*.sp*)', 'SpectroChemPy files (*.scp)', 'all files (*)']"}), "('~/xxxx', filetypes=['OMNIC files (*.sp*)',\n 'SpectroChemPy files (*.scp)', 'all files (*)'])\n", (3031, 3128), False, 'from spectrochempy.utils import get_filename\n'), ((1796, 1837), 'os.path.join', 'join', (['prefs.datadir', "environ['TEST_FILE']"], {}), "(prefs.datadir, environ['TEST_FILE'])\n", (1800, 1837), False, 'from os.path import join\n'), ((2005, 2024), 'pathlib.Path', 'Path', (['prefs.datadir'], {}), '(prefs.datadir)\n', (2009, 2024), False, 'from pathlib import Path\n'), ((2243, 2262), 'pathlib.Path', 'Path', (['prefs.datadir'], {}), '(prefs.datadir)\n', (2247, 2262), False, 'from pathlib import Path\n')] |
from toontown.safezone.DistributedETreasureAI import DistributedETreasureAI
from toontown.safezone.RegenTreasurePlannerAI import RegenTreasurePlannerAI
class ETreasurePlannerAI(RegenTreasurePlannerAI):
def __init__(self, zoneId):
self.healAmount = 2
self.spawnPoints = []
RegenTreasurePlannerAI.__init__(self, zoneId, DistributedETreasureAI, 'ETreasurePlanner', 15, 3)
def initSpawnPoints(self):
self.spawnPoints = [(19, -171, 0.0),
(-3, -100, 3.66),
(-4, -25, 7.0),
(1.15, 64.89, 4.858),
(-89, 43.4, 0.0),
(-114, -5, 1.8),
(-106, -98, 0.0),
(-1, -61, 1.0),
(130, 30, 0.0),
(-21, -7, 7.0),
(-27, 91, 0.0),
(-57, 0, 2.7),
(12, -128, -9.97),
(-1.8, 103.4, -8.0),
(-27.5, 6, -9.2),
(-29.6, -34.4, -5.4),
(-163.7, 13.8, 0.9),
(1.3, -107, 7.9),
(-87, -49, 0.05),
(45, 2.6, 8.0)]
return self.spawnPoints
def validAvatar(self, av):
return 0 < av.hp < av.maxHp
| [
"toontown.safezone.RegenTreasurePlannerAI.RegenTreasurePlannerAI.__init__"
] | [((303, 403), 'toontown.safezone.RegenTreasurePlannerAI.RegenTreasurePlannerAI.__init__', 'RegenTreasurePlannerAI.__init__', (['self', 'zoneId', 'DistributedETreasureAI', '"""ETreasurePlanner"""', '(15)', '(3)'], {}), "(self, zoneId, DistributedETreasureAI,\n 'ETreasurePlanner', 15, 3)\n", (334, 403), False, 'from toontown.safezone.RegenTreasurePlannerAI import RegenTreasurePlannerAI\n')] |
from sdk.color_print import c_print
from user_roles import role_translate_id
from tqdm import tqdm
def add_roles(session, old_session, roles, logger):
added = 0
tenant_name = session.tenant
if roles:
logger.info(f'Adding User Roles to tenant: \'{tenant_name}\'')
#Translate Acc Grp IDs
logger.debug('API - Getting source Account Groups')
src_acc_grps = old_session.request('GET', '/cloud/group').json()
logger.debug('API - Getting destination Account Groups')
dest_acc_grps = session.request('GET', '/cloud/group').json()
#Translate Resource List IDs
logger.debug('API - Getting source Resource Lists')
src_rsc_lists = old_session.request('GET', '/v1/resource_list').json()
logger.debug('API - Getting destination Resource Lists')
dest_rsc_lists = session.request('GET', '/v1/resource_list').json()
for role in tqdm(roles, desc='Adding User Roles', leave=False):
#Translate Acc Grp IDs
if 'accountGroupIds' in role:
new_ids = []
for index in range(len(role['accountGroupIds'])):
old_id = role['accountGroupIds'][index]
new_id = role_translate_id.translate_acc_grp_ids(old_id, dest_acc_grps, src_acc_grps)
new_ids.append(new_id)
role.update(accountGroupIds=new_ids)
#Translate resource List IDS
if 'resourceListIds' in role:
new_ids = []
for index in range(len(role['resourceListIds'])):
old_id = role['resourceListIds'][index]
new_id = role_translate_id.translate_rsc_list_ids(old_id, dest_rsc_lists, src_rsc_lists)
new_ids.append(new_id)
role.update(resourceListIds=new_ids)
name = role['name']
logger.debug(f'API - Adding role: {name}')
res = session.request('POST', '/user/role', json=role)
if res.status_code == 200 or res.status_code == 201:
added += 1
else:
logger.info(f'No User Roles to add for tenant: \'{tenant_name}\'')
return added | [
"user_roles.role_translate_id.translate_rsc_list_ids",
"user_roles.role_translate_id.translate_acc_grp_ids",
"tqdm.tqdm"
] | [((924, 974), 'tqdm.tqdm', 'tqdm', (['roles'], {'desc': '"""Adding User Roles"""', 'leave': '(False)'}), "(roles, desc='Adding User Roles', leave=False)\n", (928, 974), False, 'from tqdm import tqdm\n'), ((1237, 1313), 'user_roles.role_translate_id.translate_acc_grp_ids', 'role_translate_id.translate_acc_grp_ids', (['old_id', 'dest_acc_grps', 'src_acc_grps'], {}), '(old_id, dest_acc_grps, src_acc_grps)\n', (1276, 1313), False, 'from user_roles import role_translate_id\n'), ((1678, 1757), 'user_roles.role_translate_id.translate_rsc_list_ids', 'role_translate_id.translate_rsc_list_ids', (['old_id', 'dest_rsc_lists', 'src_rsc_lists'], {}), '(old_id, dest_rsc_lists, src_rsc_lists)\n', (1718, 1757), False, 'from user_roles import role_translate_id\n')] |
# File: ds_base_service.py
#
# Licensed under Apache 2.0 (https://www.apache.org/licenses/LICENSE-2.0.txt)
#
import json
import time
import base64
from functools import wraps
from ..config import ds_api_host, ds_api_base
from .ds_abstract_service import DSAbstractService
class DSBaseService(DSAbstractService):
"""
Base Service that implements common operations for all DS services.
"""
def __init__(self, ds_api_key, ds_api_secret_key, proxy=None):
super(DSBaseService, self).__init__(proxy=proxy)
data_string = str(ds_api_key) + ":" + str(ds_api_secret_key)
data_bytes = data_string.encode("ascii")
data_bytes = base64.b64encode(data_bytes)
self._hash = data_bytes.decode("ascii")
self._url_base = '{}{}'.format(ds_api_host, ds_api_base)
def _headers(self, with_content_type=True):
headers = {
'Authorization': 'Basic {}'.format(self._hash),
}
if with_content_type:
headers['Content-Type'] = 'application/json'
return headers
def _request(self, path, method='GET', body=None, headers=None):
"""
Send a request to the Digital Shadows API.
:param path: API endpoint path, does not require host. eg. /api/session-user
:param method:
:param body:
:param headers:
:return: tuple(response, content)
"""
url = '{}{}'.format(self._url_base, path)
headers = self._headers() if headers is None else headers
response, content = super(DSBaseService, self)._request(url,
method=method,
body=str(body).replace("'", '"'),
headers=headers)
if int(response['status']) == 200:
return json.loads(content)
else:
raise RuntimeError('{} responded with status code {}'.format(url, response['status']))
def _request_post(self, path, method='POST', body=None, headers=None):
"""
Send a request to the Digital Shadows API.
:param path: API endpoint path, does not require host. eg. /api/session-user
:param method:
:param body:
:param headers:
:return: tuple(response, content)
"""
url = '{}{}'.format(self._url_base, path)
headers = self._headers() if headers is None else headers
response, content = super(DSBaseService, self)._request(url,
method=method,
body=str(body).replace("'", '"'),
headers=headers)
if int(response['status']) in (200, 204):
if content != "":
res_text = json.loads(content)
else:
res_text = ""
post_response = {
'status': response['status'],
'message': 'SUCCESS',
'content': []
}
post_response['content'].append(res_text)
return post_response
else:
raise RuntimeError('{} responded with status code {}'.format(url, response['status']))
def _scrolling_request(self, path, method='GET', body=None, headers=None):
"""
Scrolls through a paginated response from the Digital Shadows API.
:param path: API endpoint path, does not require host. eg. /api/session-user
:param method:
:param body: View object - requires pagination field, see DSBaseService.paginated decorator
:return: tuple(response, content)
"""
assert 'pagination' in body
paginated_view = body
url = '{}{}'.format(self._url_base, path)
headers = self._headers() if headers is None else headers
scrolling = True
while scrolling:
response, content = super(DSBaseService, self)._request(url,
method,
body=str(paginated_view).replace("'", '"'),
headers=headers)
if int(response['status']) == 200:
data = json.loads(content)
offset = data['currentPage']['offset']
size = data['currentPage']['size']
total = data['total']
if offset + size < total:
paginated_view['pagination']['offset'] = offset + size
else:
scrolling = False
yield data
elif int(response['status']) == 429:
# rate limited, wait before resuming scroll requests
time.sleep(1)
else:
scrolling = False
def valid_credentials(self):
"""
Checks if the provided Digital Shadows credentials are valid.
:return: bool
"""
path = '/api/session-user'
url = '{}{}'.format(self._url_base, path)
response, content = super(DSBaseService, self)._request(url,
headers=self._headers(with_content_type=False))
return int(response['status']) == 200
@staticmethod
def paginated(offset=0, size=500):
def paginated_decorator(view_function):
@wraps(view_function)
def view_wrapper(*args, **kwargs):
pagination = {
'pagination': {
'offset': offset,
'size': size
}
}
view = view_function(*args, **kwargs)
pagination.update(view)
return pagination
return view_wrapper
return paginated_decorator
@staticmethod
def sorted(sort_property, reverse=False):
def sorted_decorator(view_function):
@wraps(view_function)
def view_wrapper(*args, **kwargs):
sort = {
'sort': {
'property': sort_property,
'direction': "ASCENDING" if reverse else "DESCENDING"
}
}
view = view_function(*args, **kwargs)
sort.update(view)
return sort
return view_wrapper
return sorted_decorator
| [
"base64.b64encode",
"time.sleep",
"json.loads",
"functools.wraps"
] | [((670, 698), 'base64.b64encode', 'base64.b64encode', (['data_bytes'], {}), '(data_bytes)\n', (686, 698), False, 'import base64\n'), ((1908, 1927), 'json.loads', 'json.loads', (['content'], {}), '(content)\n', (1918, 1927), False, 'import json\n'), ((5597, 5617), 'functools.wraps', 'wraps', (['view_function'], {}), '(view_function)\n', (5602, 5617), False, 'from functools import wraps\n'), ((6169, 6189), 'functools.wraps', 'wraps', (['view_function'], {}), '(view_function)\n', (6174, 6189), False, 'from functools import wraps\n'), ((2947, 2966), 'json.loads', 'json.loads', (['content'], {}), '(content)\n', (2957, 2966), False, 'import json\n'), ((4447, 4466), 'json.loads', 'json.loads', (['content'], {}), '(content)\n', (4457, 4466), False, 'import json\n'), ((4949, 4962), 'time.sleep', 'time.sleep', (['(1)'], {}), '(1)\n', (4959, 4962), False, 'import time\n')] |
# Imports para o Carla
import glob
import os
import sys
try:
sys.path.append(glob.glob('../carla/dist/carla-*%d.%d-%s.egg' % (
sys.version_info.major,
sys.version_info.minor,
'win-amd64' if os.name == 'nt' else 'linux-x86_64'))[0])
except IndexError:
pass
import carla
try:
sys.path.append(os.path.dirname(os.path.dirname(os.path.abspath(__file__))) + '/carla')
except IndexError:
pass
from agents.navigation.unb_agent import Agent
"""
Esse script consiste na implementação de alguns módulos de veículos autônomos:
- Controladores PID para controle longitudinal e lateral
- Alteração de rota dinamicamente mediante tratamento de sinal de
um sensor de obstáculo posicionado na frente do véiculo.
Com isso, o veículo sai de um ponto inicial, desvia de dois obstáculos
mudando de faixa e detectando um semáforo vermelho, para antes do cruzamento
"""
def main():
actor_list = []
try:
# Conecta cliente à simulação
client = carla.Client('localhost', 2000)
client.set_timeout(10.0)
# Configura a simulação através do cliente
world = client.get_world()
_map = world.get_map()
settings = world.get_settings()
"""
No modo síncrono configurado abaixo, o servidor espera um "tick" do cliente,
que é uma mensagem de "pronto para prosseguir", antes de atualizar para o próximo
passo da simulação. Na prática, isso significa que a simulação espera os cálculos
realizados pelo cliente para prosseguir.
"""
settings.synchronous_mode = True
"""
A configuração abaixo permite a definição de um
intervalo fixo entre os "passos" da simulação.
Se setado para 0.022, acontecerão aproximadamente
45 frames por segundo simulado
"""
settings.fixed_delta_seconds = 0.022
world.apply_settings(settings)
# Spawn do ego veículo e escolha do ponto de destino
blueprint_library = world.get_blueprint_library()
vehicle_bp = blueprint_library.filter('bmw')[0]
spawn_point = _map.get_spawn_points()[64]
destination_point = _map.get_spawn_points()[31]
vehicle = world.spawn_actor(vehicle_bp, spawn_point)
actor_list.append(vehicle)
world.tick()
# Spawn primeiro obstáculo
obstacle_bp = blueprint_library.filter('vehicle.audi.a2')[0]
obstacle_spawn_point = _map.get_spawn_points()[62]
obstacle = world.spawn_actor(obstacle_bp, obstacle_spawn_point)
actor_list.append(obstacle)
# Spawn segundo obstáculo
obstacle_spawn_point = carla.Transform(carla.Location(x=-88.056326, y=-48.930733, z=0.930733), carla.Rotation(pitch=0.000000, yaw=89.787674, roll=0.000000))
obstacle2 = world.spawn_actor(obstacle_bp, obstacle_spawn_point)
actor_list.append(obstacle2)
world.tick()
# Cria agente e o vincula ao ego veículo
agent = Agent(vehicle, ignore_traffic_light=False)
actor_list.append(agent._camera)
actor_list.append(agent.obstacle_sensor)
# Gera rota
agent.set_route(spawn_point.location, destination_point.location)
# Gameloop
while not agent.arrived():
world.tick()
world.get_spectator().set_transform(agent._camera.get_transform())
# Gera o comando de controle ao veículo
control = agent.run_step(speed=(vehicle.get_speed_limit())) or agent.emergency_stop()
vehicle.apply_control(control)
# Visualização da rota
agent.show_path(distance=int(agent.get_speed(vehicle)/2))
finally:
print("Destino alcançado!")
print('Destruindo Atores')
# Parar sensores ativos pois eles não param automaticamente ao fim da execução
agent.obstacle_sensor.stop()
client.apply_batch([carla.command.DestroyActor(x) for x in actor_list])
print('Done.')
world.tick()
# Desabilita modo síncrono para permitir movimentação da tela
settings.synchronous_mode = False
world.apply_settings(settings)
if __name__ == '__main__':
main() | [
"carla.command.DestroyActor",
"carla.Location",
"agents.navigation.unb_agent.Agent",
"carla.Client",
"os.path.abspath",
"carla.Rotation",
"glob.glob"
] | [((1002, 1033), 'carla.Client', 'carla.Client', (['"""localhost"""', '(2000)'], {}), "('localhost', 2000)\n", (1014, 1033), False, 'import carla\n'), ((3007, 3049), 'agents.navigation.unb_agent.Agent', 'Agent', (['vehicle'], {'ignore_traffic_light': '(False)'}), '(vehicle, ignore_traffic_light=False)\n', (3012, 3049), False, 'from agents.navigation.unb_agent import Agent\n'), ((82, 240), 'glob.glob', 'glob.glob', (["('../carla/dist/carla-*%d.%d-%s.egg' % (sys.version_info.major, sys.\n version_info.minor, 'win-amd64' if os.name == 'nt' else 'linux-x86_64'))"], {}), "('../carla/dist/carla-*%d.%d-%s.egg' % (sys.version_info.major,\n sys.version_info.minor, 'win-amd64' if os.name == 'nt' else 'linux-x86_64')\n )\n", (91, 240), False, 'import glob\n'), ((2690, 2744), 'carla.Location', 'carla.Location', ([], {'x': '(-88.056326)', 'y': '(-48.930733)', 'z': '(0.930733)'}), '(x=-88.056326, y=-48.930733, z=0.930733)\n', (2704, 2744), False, 'import carla\n'), ((2746, 2796), 'carla.Rotation', 'carla.Rotation', ([], {'pitch': '(0.0)', 'yaw': '(89.787674)', 'roll': '(0.0)'}), '(pitch=0.0, yaw=89.787674, roll=0.0)\n', (2760, 2796), False, 'import carla\n'), ((3928, 3957), 'carla.command.DestroyActor', 'carla.command.DestroyActor', (['x'], {}), '(x)\n', (3954, 3957), False, 'import carla\n'), ((360, 385), 'os.path.abspath', 'os.path.abspath', (['__file__'], {}), '(__file__)\n', (375, 385), False, 'import os\n')] |
import json
import requests
post_url = "http://127.0.0.1:5000/api/"
# ---------- general web interfacing ----------------------
def post(endpoint, payload, uri="http://127.0.0.1:5000/api/"):
"""
Posts to the flask web server.
Args:
endpoint: The endpoint of the API
payload: Payload according to what the web server requires.
uri: Web server uri.
Returns:
object: Response from web server.
"""
return requests.post(uri + endpoint, json=payload)
def get(endpoint, uri="http://127.0.0.1:5000/api/"):
"""
Gets from the flask web server.
Args:
endpoint: The endpoint of the API
uri: Web server uri.
Returns:
object: Response from web server.
"""
return requests.get(uri + endpoint)
# ---------- API ----------------------
def get_all_patients():
"""
Obtains a list of all patients in the database. (For testing)
Returns:
dict: All patients currently in database referenced by ID.
"""
resp = get("all_patients")
return byte_2_json(resp)
def add_new_patient(patient_id: str, attending_email: str, user_age: int):
"""
Adds new patient to the database.
Args:
patient_id: ID of the patient.
attending_email: Email of the user
user_age: Age of the user.
Returns:
dict: Patient that added.
"""
payload = {
"patient_id": patient_id,
"attending_email": attending_email,
"user_age": user_age
}
resp = post("new_patient", payload)
return byte_2_json(resp)
def get_interval_average(patient_id: str, timestamp: str):
"""
Gets the average heart rate from before a timestamp.
Args:
patient_id: ID of the patient.
timestamp: timestamp in form YYYY-MM-DD HH:MM:SS.#######
Returns:
float: Average heart rate from before the timestamp.
"""
payload = {
"patient_id": patient_id,
"heart_rate_average_since": timestamp,
}
resp = post("heart_rate/interval_average", payload)
return byte_2_json(resp)
def post_heart_rate(patient_id: str, heart_rate: int):
"""
Posts a heart rate to a patient. Timestamp automatically generated.
Args:
patient_id: ID of the patient.
heart_rate: Heart rate to post.
Returns:
dict: Updated patient information.
"""
payload = {
"patient_id": patient_id,
"heart_rate": heart_rate,
}
resp = post("heart_rate", payload)
return byte_2_json(resp)
def get_patient_status(patient_id: str):
"""
Obtains patient status. Sends email if tachychardic.
Args:
patient_id: ID of the patient.
Returns:
tuple: first is if tachychardic, second is timestamp.
"""
resp = get("status/{}".format(patient_id))
return byte_2_json(resp)
def get_heart_rate(patient_id: str):
"""
Obtains all heart rates from the
Args:
patient_id: ID of the patient.
Returns:
list: List of all heart rates from the patient.
"""
resp = get("heart_rate/{}".format(patient_id))
return byte_2_json(resp)
def get_heart_rate_average(patient_id: str):
"""
Obtains an average heart rate of the patient.
Args:
patient_id: ID of the patient.
Returns:
float: Average heart rate of the patient.
"""
resp = get("heart_rate/average/{}".format(patient_id))
return byte_2_json(resp)
def byte_2_json(resp):
"""
Converts bytes to json. Raises exception if necessary.
Args:
resp (bytes): Response from request.
Returns:
dict: Json object of interest.
"""
json_resp = json.loads(resp.content.decode('utf-8'))
json_resp = error_catcher(json_resp)
return json_resp
def error_catcher(json_resp: dict):
"""
Raises appropriate exceptions from the web server.
Args:
json_resp: Information from the server.
Returns:
dict: The original dictionary if not error.
"""
if type(json_resp) == dict and "error_type" in json_resp.keys():
if "TypeError" in json_resp["error_type"]:
raise TypeError(json_resp["msg"])
if "AttributeError" in json_resp["error_type"]:
raise AttributeError(json_resp["msg"])
if "ValueError" in json_resp["error_type"]:
raise ValueError(json_resp["msg"])
return json_resp
if __name__ == "__main__":
from random import choice
from string import ascii_uppercase
p_id = ''.join(choice(ascii_uppercase) for _ in range(10))
print(p_id)
r = add_new_patient(p_id, "<EMAIL>", 21)
print(r)
r = post_heart_rate(p_id, 80)
print("Posted: ", r)
hr = get_heart_rate(p_id)
print("All Heartrates:", hr)
r = post_heart_rate(p_id, 90)
print("Posted: ", r)
av = get_heart_rate_average(p_id)
print("Average: ", av)
hr = get_heart_rate(p_id)
print("All Heartrates:", hr)
curr_status, timestamp = get_patient_status(p_id)
print("Current Status 1 (False/Not Tach): ", curr_status, "Timestamp: ", timestamp)
int_avg = get_interval_average(p_id, timestamp)
print("Interval Average (should be 85):", int_avg)
r = post_heart_rate(p_id, 100)
print("Posted: ", r)
hr = get_heart_rate(p_id)
print("All Heartrates:", hr)
r = post_heart_rate(p_id, 110)
curr_status, _ = get_patient_status(p_id)
print("Current Status 2 (True/Tach + sends email): ", curr_status, "Timestamp: ", timestamp)
av = get_heart_rate_average(p_id)
print("Average (95): ", av)
int_avg = get_interval_average(p_id, timestamp)
print("Interval Average (should be 85):", int_avg)
| [
"random.choice",
"requests.post",
"requests.get"
] | [((463, 506), 'requests.post', 'requests.post', (['(uri + endpoint)'], {'json': 'payload'}), '(uri + endpoint, json=payload)\n', (476, 506), False, 'import requests\n'), ((762, 790), 'requests.get', 'requests.get', (['(uri + endpoint)'], {}), '(uri + endpoint)\n', (774, 790), False, 'import requests\n'), ((4542, 4565), 'random.choice', 'choice', (['ascii_uppercase'], {}), '(ascii_uppercase)\n', (4548, 4565), False, 'from random import choice\n')] |
"""Loads the config.json file and store key value pairs into variables"""
import json
with open('config.json', 'r', encoding='utf-8') as f:
config = json.load(f)
config_location_type = config['location_type']
config_location = config['location']
country = config['country']
config_covid_terms = config['covid_terms']
newsAPI_key = config['newsAPI_key']
news_outlet_websites = config['news_outlet_websites']
webpage_url = config["local_host_url"]
| [
"json.load"
] | [((158, 170), 'json.load', 'json.load', (['f'], {}), '(f)\n', (167, 170), False, 'import json\n')] |
import glob, os
import numpy as np
import tensorflow as tf
import tensorflow.contrib.graph_editor as ge
class Flownet2:
def __init__(self, bilinear_warping_module):
self.weights = dict()
for key, shape in self.all_variables():
self.weights[key] = tf.get_variable(key, shape=shape)
self.bilinear_warping_module = bilinear_warping_module
def leaky_relu(self, x, s):
assert s > 0 and s < 1, "Wrong s"
return tf.maximum(x, s*x)
def warp(self, x, flow):
return self.bilinear_warping_module.bilinear_warping(x, tf.stack([flow[:,:,:,1], flow[:,:,:,0]], axis=3))
# flip true -> [:,:,:,0] y axis downwards
# [:,:,:,1] x axis
# as in matrix indexing
#
# false returns 0->x, 1->y
def __call__(self, im0, im1, flip=True):
f = self.get_blobs(im0, im1)['predict_flow_final']
if flip:
f = tf.stack([f[:,:,:,1], f[:,:,:,0]], axis=3)
return f
def get_optimizer(self, flow, target, learning_rate=1e-4):
#flow = self.__call__(im0, im1)
loss = tf.reduce_sum(flow * target) # target holding the gradients!
opt = tf.train.AdamOptimizer(learning_rate=learning_rate, beta1=0.95, beta2=0.99, epsilon=1e-8)
opt = opt.minimize(loss, var_list=
# [v for k,v in self.weights.iteritems() if (k.startswith('net3_') or k.startswith('netsd_') or k.startswith('fuse_'))])
[v for k,v in self.weights.iteritems() if ((k.startswith('net3_') or k.startswith('netsd_') or k.startswith('fuse_')) and not ('upsample' in k or 'deconv' in k))])
return opt, loss
# If I run the network with large images (1024x2048) it crashes due to memory
# constraints on a 12Gb titan X.
# See https://github.com/tensorflow/tensorflow/issues/5816#issuecomment-268710077
# for a possible explanation. I fix it by adding run_after in the section with
# the correlation layer so that 441 large tensors are not allocated at the same time
def run_after(self, a_tensor, b_tensor):
"""Force a to run after b"""
ge.reroute.add_control_inputs(a_tensor.op, [b_tensor.op])
# without epsilon I get nan-errors when I backpropagate
def l2_norm(self, x):
return tf.sqrt(tf.maximum(1e-5, tf.reduce_sum(x**2, axis=3, keep_dims=True)))
def get_blobs(self, im0, im1):
blobs = dict()
batch_size = tf.to_int32(tf.shape(im0)[0])
width = tf.to_int32(tf.shape(im0)[2])
height = tf.to_int32(tf.shape(im0)[1])
TARGET_WIDTH = width
TARGET_HEIGHT = height
divisor = 64.
ADAPTED_WIDTH = tf.to_int32(tf.ceil(tf.to_float(width)/divisor) * divisor)
ADAPTED_HEIGHT = tf.to_int32(tf.ceil(tf.to_float(height)/divisor) * divisor)
SCALE_WIDTH = tf.to_float(width) / tf.to_float(ADAPTED_WIDTH);
SCALE_HEIGHT = tf.to_float(height) / tf.to_float(ADAPTED_HEIGHT);
blobs['img0'] = im0
blobs['img1'] = im1
blobs['img0s'] = blobs['img0']*0.00392156862745098
blobs['img1s'] = blobs['img1']*0.00392156862745098
#mean = np.array([0.411451, 0.432060, 0.450141])
mean = np.array([0.37655231, 0.39534855, 0.40119368])
blobs['img0_nomean'] = blobs['img0s'] - mean
blobs['img1_nomean'] = blobs['img1s'] - mean
blobs['img0_nomean_resize'] = tf.image.resize_bilinear(blobs['img0_nomean'], size=[ADAPTED_HEIGHT, ADAPTED_WIDTH], align_corners=True)
blobs['img1_nomean_resize'] = tf.image.resize_bilinear(blobs['img1_nomean'], size=[ADAPTED_HEIGHT, ADAPTED_WIDTH], align_corners=True)
blobs['conv1a'] = tf.pad(blobs['img0_nomean_resize'], [[0,0], [3,3], [3,3], [0,0]])
blobs['conv1a'] = tf.nn.conv2d(blobs['conv1a'], self.weights['conv1_w'], strides=[1,2,2,1], padding="VALID") + self.weights['conv1_b']
blobs['conv1a'] = self.leaky_relu(blobs['conv1a'], 0.1)
blobs['conv1b'] = tf.pad(blobs['img1_nomean_resize'], [[0,0], [3,3], [3,3], [0,0]])
blobs['conv1b'] = tf.nn.conv2d(blobs['conv1b'], self.weights['conv1_w'], strides=[1,2,2,1], padding="VALID") + self.weights['conv1_b']
blobs['conv1b'] = self.leaky_relu(blobs['conv1b'], 0.1)
blobs['conv2a'] = tf.pad(blobs['conv1a'], [[0,0], [2,2], [2,2], [0,0]])
blobs['conv2a'] = tf.nn.conv2d(blobs['conv2a'], self.weights['conv2_w'], strides=[1,2,2,1], padding="VALID") + self.weights['conv2_b']
blobs['conv2a'] = self.leaky_relu(blobs['conv2a'], 0.1)
blobs['conv2b'] = tf.pad(blobs['conv1b'], [[0,0], [2,2], [2,2], [0,0]])
blobs['conv2b'] = tf.nn.conv2d(blobs['conv2b'], self.weights['conv2_w'], strides=[1,2,2,1], padding="VALID") + self.weights['conv2_b']
blobs['conv2b'] = self.leaky_relu(blobs['conv2b'], 0.1)
blobs['conv3a'] = tf.pad(blobs['conv2a'], [[0,0], [2,2], [2,2], [0,0]])
blobs['conv3a'] = tf.nn.conv2d(blobs['conv3a'], self.weights['conv3_w'], strides=[1,2,2,1], padding="VALID") + self.weights['conv3_b']
blobs['conv3a'] = self.leaky_relu(blobs['conv3a'], 0.1)
blobs['conv3b'] = tf.pad(blobs['conv2b'], [[0,0], [2,2], [2,2], [0,0]])
blobs['conv3b'] = tf.nn.conv2d(blobs['conv3b'], self.weights['conv3_w'], strides=[1,2,2,1], padding="VALID") + self.weights['conv3_b']
blobs['conv3b'] = self.leaky_relu(blobs['conv3b'], 0.1)
# this might be considered a bit hacky
tmp = []
x1_l = []
x2_l = []
for di in range(-20, 21, 2):
for dj in range(-20, 21, 2):
x1 = tf.pad(blobs['conv3a'], [[0,0], [20,20], [20,20], [0,0]])
x2 = tf.pad(blobs['conv3b'], [[0,0], [20-di,20+di], [20-dj,20+dj], [0,0]])
x1_l.append(x1)
x2_l.append(x2)
c = tf.nn.conv2d(x1*x2, tf.ones([1, 1, 256, 1])/256., strides=[1,1,1,1], padding='VALID')
tmp.append(c[:,20:-20,20:-20,:])
for i in range(len(tmp)-1):
#self.run_after(tmp[i], tmp[i+1])
self.run_after(x1_l[i], tmp[i+1])
self.run_after(x2_l[i], tmp[i+1])
blobs['corr'] = tf.concat(tmp, axis=3)
blobs['corr'] = self.leaky_relu(blobs['corr'], 0.1)
blobs['conv_redir'] = tf.nn.conv2d(blobs['conv3a'], self.weights['conv_redir_w'], strides=[1,1,1,1], padding="VALID") + self.weights['conv_redir_b']
blobs['conv_redir'] = self.leaky_relu(blobs['conv_redir'], 0.1)
blobs['blob16'] = tf.concat([blobs['conv_redir'], blobs['corr']], axis=3)
blobs['conv3_1'] = tf.nn.conv2d(blobs['blob16'], self.weights['conv3_1_w'], strides=[1,1,1,1], padding="SAME") + self.weights['conv3_1_b']
blobs['conv3_1'] = self.leaky_relu(blobs['conv3_1'], 0.1)
blobs['conv4'] = tf.pad(blobs['conv3_1'], [[0,0], [1,1], [1,1], [0,0]])
blobs['conv4'] = tf.nn.conv2d(blobs['conv4'], self.weights['conv4_w'], strides=[1,2,2,1], padding="VALID") + self.weights['conv4_b']
blobs['conv4'] = self.leaky_relu(blobs['conv4'], 0.1)
blobs['conv4_1'] = tf.nn.conv2d(blobs['conv4'], self.weights['conv4_1_w'], strides=[1,1,1,1], padding="SAME") + self.weights['conv4_1_b']
blobs['conv4_1'] = self.leaky_relu(blobs['conv4_1'], 0.1)
blobs['conv5'] = tf.pad(blobs['conv4_1'], [[0,0], [1,1], [1,1], [0,0]])
blobs['conv5'] = tf.nn.conv2d(blobs['conv5'], self.weights['conv5_w'], strides=[1,2,2,1], padding="VALID") + self.weights['conv5_b']
blobs['conv5'] = self.leaky_relu(blobs['conv5'], 0.1)
blobs['conv5_1'] = tf.nn.conv2d(blobs['conv5'], self.weights['conv5_1_w'], strides=[1,1,1,1], padding="SAME") + self.weights['conv5_1_b']
blobs['conv5_1'] = self.leaky_relu(blobs['conv5_1'], 0.1)
blobs['conv6'] = tf.pad(blobs['conv5_1'], [[0,0], [1,1], [1,1], [0,0]])
blobs['conv6'] = tf.nn.conv2d(blobs['conv6'], self.weights['conv6_w'], strides=[1,2,2,1], padding="VALID") + self.weights['conv6_b']
blobs['conv6'] = self.leaky_relu(blobs['conv6'], 0.1)
blobs['conv6_1'] = tf.nn.conv2d(blobs['conv6'], self.weights['conv6_1_w'], strides=[1,1,1,1], padding="SAME") + self.weights['conv6_1_b']
blobs['conv6_1'] = self.leaky_relu(blobs['conv6_1'], 0.1)
blobs['predict_flow6'] = tf.nn.conv2d(blobs['conv6_1'], self.weights['Convolution1_w'], strides=[1,1,1,1], padding="SAME") + self.weights['Convolution1_b']
blobs['deconv5'] = tf.nn.conv2d_transpose(blobs['conv6_1'], self.weights['deconv5_w'], output_shape=[batch_size, ADAPTED_HEIGHT/32, ADAPTED_WIDTH/32, 512], strides=[1,2,2,1]) + self.weights['deconv5_b']
blobs['deconv5'] = self.leaky_relu(blobs['deconv5'], 0.1)
blobs['upsampled_flow6_to_5'] = tf.nn.conv2d_transpose(blobs['predict_flow6'], self.weights['upsample_flow6to5_w'], output_shape=[batch_size, ADAPTED_HEIGHT/32, ADAPTED_WIDTH/32, 2], strides=[1,2,2,1]) + self.weights['upsample_flow6to5_b']
blobs['concat5'] = tf.concat([blobs['conv5_1'], blobs['deconv5'], blobs['upsampled_flow6_to_5']], axis=3)
blobs['predict_flow5'] = tf.pad(blobs['concat5'], [[0,0], [1,1], [1,1], [0,0]])
blobs['predict_flow5'] = tf.nn.conv2d(blobs['predict_flow5'], self.weights['Convolution2_w'], strides=[1,1,1,1], padding="VALID") + self.weights['Convolution2_b']
blobs['deconv4'] = tf.nn.conv2d_transpose(blobs['concat5'], self.weights['deconv4_w'], output_shape=[batch_size, ADAPTED_HEIGHT/16, ADAPTED_WIDTH/16, 256], strides=[1,2,2,1]) + self.weights['deconv4_b']
blobs['deconv4'] = self.leaky_relu(blobs['deconv4'], 0.1)
blobs['upsampled_flow5_to_4'] = tf.nn.conv2d_transpose(blobs['predict_flow5'], self.weights['upsample_flow5to4_w'], output_shape=[batch_size, ADAPTED_HEIGHT/16, ADAPTED_WIDTH/16, 2], strides=[1,2,2,1]) + self.weights['upsample_flow5to4_b']
blobs['concat4'] = tf.concat([blobs['conv4_1'], blobs['deconv4'], blobs['upsampled_flow5_to_4']], axis=3)
blobs['predict_flow4'] = tf.nn.conv2d(blobs['concat4'], self.weights['Convolution3_w'], strides=[1,1,1,1], padding="SAME") + self.weights['Convolution3_b']
blobs['deconv3'] = tf.nn.conv2d_transpose(blobs['concat4'], self.weights['deconv3_w'], output_shape=[batch_size, ADAPTED_HEIGHT/8, ADAPTED_WIDTH/8, 128], strides=[1,2,2,1]) + self.weights['deconv3_b']
blobs['deconv3'] = self.leaky_relu(blobs['deconv3'], 0.1)
blobs['upsampled_flow4_to_3'] = tf.nn.conv2d_transpose(blobs['predict_flow4'], self.weights['upsample_flow4to3_w'], output_shape=[batch_size, ADAPTED_HEIGHT/8, ADAPTED_WIDTH/8, 2], strides=[1,2,2,1]) + self.weights['upsample_flow4to3_b']
blobs['concat3'] = tf.concat([blobs['conv3_1'], blobs['deconv3'], blobs['upsampled_flow4_to_3']], axis=3)
blobs['predict_flow3'] = tf.nn.conv2d(blobs['concat3'], self.weights['Convolution4_w'], strides=[1,1,1,1], padding="SAME") + self.weights['Convolution4_b']
blobs['deconv2'] = tf.nn.conv2d_transpose(blobs['concat3'], self.weights['deconv2_w'], output_shape=[batch_size, ADAPTED_HEIGHT/4, ADAPTED_WIDTH/4, 64], strides=[1,2,2,1]) + self.weights['deconv2_b']
blobs['deconv2'] = self.leaky_relu(blobs['deconv2'], 0.1)
blobs['upsampled_flow3_to_2'] = tf.nn.conv2d_transpose(blobs['predict_flow3'], self.weights['upsample_flow3to2_w'], output_shape=[batch_size, ADAPTED_HEIGHT/4, ADAPTED_WIDTH/4, 2], strides=[1,2,2,1]) + self.weights['upsample_flow3to2_b']
blobs['concat2'] = tf.concat([blobs['conv2a'], blobs['deconv2'], blobs['upsampled_flow3_to_2']], axis=3)
blobs['predict_flow2'] = tf.nn.conv2d(blobs['concat2'], self.weights['Convolution5_w'], strides=[1,1,1,1], padding="SAME") + self.weights['Convolution5_b']
blobs['blob41'] = blobs['predict_flow2'] * 20.
blobs['blob42'] = tf.image.resize_bilinear(blobs['blob41'], size=[ADAPTED_HEIGHT, ADAPTED_WIDTH], align_corners=True)
blobs['blob43'] = self.warp(blobs['img1_nomean_resize'], blobs['blob42'])
blobs['blob44'] = blobs['img0_nomean_resize'] - blobs['blob43']
#blobs['blob45'] = tf.sqrt(1e-8+tf.reduce_sum(blobs['blob44']**2, axis=3, keep_dims=True))
blobs['blob45'] = self.l2_norm(blobs['blob44'])
blobs['blob46'] = 0.05*blobs['blob42']
blobs['blob47'] = tf.concat([blobs['img0_nomean_resize'], blobs['img1_nomean_resize'], blobs['blob43'], blobs['blob46'], blobs['blob45']], axis=3)
####################################################################################
####################################################################################
####################################################################################
###################### END OF THE FIRST BRANCH #####################################
####################################################################################
####################################################################################
####################################################################################
blobs['blob48'] = tf.pad(blobs['blob47'], [[0,0], [3,3], [3,3], [0,0]])
blobs['blob48'] = tf.nn.conv2d(blobs['blob48'], self.weights['net2_conv1_w'], strides=[1,2,2,1], padding="VALID") + self.weights['net2_conv1_b']
blobs['blob48'] = self.leaky_relu(blobs['blob48'], 0.1)
blobs['blob49'] = tf.pad(blobs['blob48'], [[0,0], [2,2], [2, 2], [0,0]])
blobs['blob49'] = tf.nn.conv2d(blobs['blob49'], self.weights['net2_conv2_w'], strides=[1,2,2,1], padding="VALID") + self.weights['net2_conv2_b']
blobs['blob49'] = self.leaky_relu(blobs['blob49'], 0.1)
blobs['blob50'] = tf.pad(blobs['blob49'], [[0,0], [2,2], [2,2], [0,0]])
blobs['blob50'] = tf.nn.conv2d(blobs['blob50'], self.weights['net2_conv3_w'], strides=[1,2,2,1], padding="VALID") + self.weights['net2_conv3_b']
blobs['blob50'] = self.leaky_relu(blobs['blob50'], 0.1)
blobs['blob51'] = tf.nn.conv2d(blobs['blob50'], self.weights['net2_conv3_1_w'], strides=[1,1,1,1], padding="SAME") + self.weights['net2_conv3_1_b']
blobs['blob51'] = self.leaky_relu(blobs['blob51'], 0.1)
blobs['blob52'] = tf.pad(blobs['blob51'], [[0,0], [1,1], [1,1], [0,0]])
blobs['blob52'] = tf.nn.conv2d(blobs['blob52'], self.weights['net2_conv4_w'], strides=[1,2,2,1], padding="VALID") + self.weights['net2_conv4_b']
blobs['blob52'] = self.leaky_relu(blobs['blob52'], 0.1)
blobs['blob53'] = tf.nn.conv2d(blobs['blob52'], self.weights['net2_conv4_1_w'], strides=[1,1,1,1], padding="SAME") + self.weights['net2_conv4_1_b']
blobs['blob53'] = self.leaky_relu(blobs['blob53'], 0.1)
blobs['blob54'] = tf.pad(blobs['blob53'], [[0,0], [1,1], [1,1], [0,0]])
blobs['blob54'] = tf.nn.conv2d(blobs['blob54'], self.weights['net2_conv5_w'], strides=[1,2,2,1], padding="VALID") + self.weights['net2_conv5_b']
blobs['blob54'] = self.leaky_relu(blobs['blob54'], 0.1)
blobs['blob55'] = tf.nn.conv2d(blobs['blob54'], self.weights['net2_conv5_1_w'], strides=[1,1,1,1], padding="SAME") + self.weights['net2_conv5_1_b']
blobs['blob55'] = self.leaky_relu(blobs['blob55'], 0.1)
blobs['blob56'] = tf.pad(blobs['blob55'], [[0,0], [1,1], [1,1], [0,0]])
blobs['blob56'] = tf.nn.conv2d(blobs['blob56'], self.weights['net2_conv6_w'], strides=[1,2,2,1], padding="VALID") + self.weights['net2_conv6_b']
blobs['blob56'] = self.leaky_relu(blobs['blob56'], 0.1)
blobs['blob57'] = tf.nn.conv2d(blobs['blob56'], self.weights['net2_conv6_1_w'], strides=[1,1,1,1], padding="SAME") + self.weights['net2_conv6_1_b']
blobs['blob57'] = self.leaky_relu(blobs['blob57'], 0.1)
blobs['blob58'] = tf.nn.conv2d(blobs['blob57'], self.weights['net2_predict_conv6_w'], strides=[1,1,1,1], padding="SAME") + self.weights['net2_predict_conv6_b']
blobs['blob59'] = tf.nn.conv2d_transpose(blobs['blob57'], self.weights['net2_deconv5_w'], output_shape=[batch_size, ADAPTED_HEIGHT/32, ADAPTED_WIDTH/32, 512], strides=[1,2,2,1]) + self.weights['net2_deconv5_b']
blobs['blob59'] = self.leaky_relu(blobs['blob59'], 0.1)
blobs['blob60'] = tf.nn.conv2d_transpose(blobs['predict_flow6'], self.weights['net2_net2_upsample_flow6to5_w'], output_shape=[batch_size, ADAPTED_HEIGHT/32, ADAPTED_WIDTH/32, 2], strides=[1,2,2,1]) + self.weights['net2_net2_upsample_flow6to5_b']
blobs['blob61'] = tf.concat([blobs['blob55'], blobs['blob59'], blobs['blob60']], axis=3)
blobs['blob62'] = tf.nn.conv2d(blobs['blob61'], self.weights['net2_predict_conv5_w'], strides=[1,1,1,1], padding="SAME") + self.weights['net2_predict_conv5_b']
blobs['blob63'] = tf.nn.conv2d_transpose(blobs['blob61'], self.weights['net2_deconv4_w'], output_shape=[batch_size, ADAPTED_HEIGHT/16, ADAPTED_WIDTH/16, 256], strides=[1,2,2,1]) + self.weights['net2_deconv4_b']
blobs['blob63'] = self.leaky_relu(blobs['blob63'], 0.1)
blobs['blob64'] = tf.nn.conv2d_transpose(blobs['blob62'], self.weights['net2_net2_upsample_flow5to4_w'], output_shape=[batch_size, ADAPTED_HEIGHT/16, ADAPTED_WIDTH/16, 2], strides=[1,2,2,1]) + self.weights['net2_net2_upsample_flow5to4_b']
blobs['blob65'] = tf.concat([blobs['blob53'], blobs['blob63'], blobs['blob64']], axis=3)
blobs['blob66'] = tf.nn.conv2d(blobs['blob65'], self.weights['net2_predict_conv4_w'], strides=[1,1,1,1], padding="SAME") + self.weights['net2_predict_conv4_b']
blobs['blob67'] = tf.nn.conv2d_transpose(blobs['blob65'], self.weights['net2_deconv3_w'], output_shape=[batch_size, ADAPTED_HEIGHT/8, ADAPTED_WIDTH/8, 128], strides=[1,2,2,1]) + self.weights['net2_deconv3_b']
blobs['blob67'] = self.leaky_relu(blobs['blob67'], 0.1)
blobs['blob68'] = tf.nn.conv2d_transpose(blobs['blob66'], self.weights['net2_net2_upsample_flow4to3_w'], output_shape=[batch_size, ADAPTED_HEIGHT/8, ADAPTED_WIDTH/8, 2], strides=[1,2,2,1]) + self.weights['net2_net2_upsample_flow4to3_b']
blobs['blob69'] = tf.concat([blobs['blob51'], blobs['blob67'], blobs['blob68']], axis=3)
blobs['blob70'] = tf.nn.conv2d(blobs['blob69'], self.weights['net2_predict_conv3_w'], strides=[1,1,1,1], padding="SAME") + self.weights['net2_predict_conv3_b']
blobs['blob71'] = tf.nn.conv2d_transpose(blobs['blob69'], self.weights['net2_deconv2_w'], output_shape=[batch_size, ADAPTED_HEIGHT/4, ADAPTED_WIDTH/4, 64], strides=[1,2,2,1]) + self.weights['net2_deconv2_b']
blobs['blob71'] = self.leaky_relu(blobs['blob71'], 0.1)
blobs['blob72'] = tf.nn.conv2d_transpose(blobs['blob70'], self.weights['net2_net2_upsample_flow3to2_w'], output_shape=[batch_size, ADAPTED_HEIGHT/4, ADAPTED_WIDTH/4, 2], strides=[1,2,2,1]) + self.weights['net2_net2_upsample_flow3to2_b']
blobs['blob73'] = tf.concat([blobs['blob49'], blobs['blob71'], blobs['blob72']], axis=3)
blobs['blob74'] = tf.nn.conv2d(blobs['blob73'], self.weights['net2_predict_conv2_w'], strides=[1,1,1,1], padding="SAME") + self.weights['net2_predict_conv2_b']
blobs['blob75'] = blobs['blob74'] * 20.
blobs['blob76'] = tf.image.resize_bilinear(blobs['blob75'], size=[ADAPTED_HEIGHT, ADAPTED_WIDTH], align_corners=True)
blobs['blob77'] = self.warp(blobs['img1_nomean_resize'], blobs['blob76'])
blobs['blob78'] = blobs['img0_nomean_resize'] - blobs['blob77']
#blobs['blob79'] = tf.sqrt(1e-8+tf.reduce_sum(blobs['blob78']**2, axis=3, keep_dims=True))
blobs['blob79'] = self.l2_norm(blobs['blob78'])
blobs['blob80'] = 0.05*blobs['blob76']
blobs['blob81'] = tf.concat([blobs['img0_nomean_resize'], blobs['img1_nomean_resize'], blobs['blob77'], blobs['blob80'], blobs['blob79']], axis=3)
####################################################################################
####################################################################################
####################################################################################
###################### END OF THE SECOND BRANCH ####################################
####################################################################################
####################################################################################
####################################################################################
blobs['blob82'] = tf.pad(blobs['blob81'], [[0,0], [3,3], [3,3], [0,0]])
blobs['blob82'] = tf.nn.conv2d(blobs['blob82'], self.weights['net3_conv1_w'], strides=[1,2,2,1], padding="VALID") + self.weights['net3_conv1_b']
blobs['blob82'] = self.leaky_relu(blobs['blob82'], 0.1)
blobs['blob83'] = tf.pad(blobs['blob82'], [[0,0], [2,2], [2, 2], [0,0]])
blobs['blob83'] = tf.nn.conv2d(blobs['blob83'], self.weights['net3_conv2_w'], strides=[1,2,2,1], padding="VALID") + self.weights['net3_conv2_b']
blobs['blob83'] = self.leaky_relu(blobs['blob83'], 0.1)
blobs['blob84'] = tf.pad(blobs['blob83'], [[0,0], [2,2], [2,2], [0,0]])
blobs['blob84'] = tf.nn.conv2d(blobs['blob84'], self.weights['net3_conv3_w'], strides=[1,2,2,1], padding="VALID") + self.weights['net3_conv3_b']
blobs['blob84'] = self.leaky_relu(blobs['blob84'], 0.1)
blobs['blob85'] = tf.nn.conv2d(blobs['blob84'], self.weights['net3_conv3_1_w'], strides=[1,1,1,1], padding="SAME") + self.weights['net3_conv3_1_b']
blobs['blob85'] = self.leaky_relu(blobs['blob85'], 0.1)
blobs['blob86'] = tf.pad(blobs['blob85'], [[0,0], [1,1], [1,1], [0,0]])
blobs['blob86'] = tf.nn.conv2d(blobs['blob86'], self.weights['net3_conv4_w'], strides=[1,2,2,1], padding="VALID") + self.weights['net3_conv4_b']
blobs['blob86'] = self.leaky_relu(blobs['blob86'], 0.1)
blobs['blob87'] = tf.nn.conv2d(blobs['blob86'], self.weights['net3_conv4_1_w'], strides=[1,1,1,1], padding="SAME") + self.weights['net3_conv4_1_b']
blobs['blob87'] = self.leaky_relu(blobs['blob87'], 0.1)
blobs['blob88'] = tf.pad(blobs['blob87'], [[0,0], [1,1], [1,1], [0,0]])
blobs['blob88'] = tf.nn.conv2d(blobs['blob88'], self.weights['net3_conv5_w'], strides=[1,2,2,1], padding="VALID") + self.weights['net3_conv5_b']
blobs['blob88'] = self.leaky_relu(blobs['blob88'], 0.1)
blobs['blob89'] = tf.nn.conv2d(blobs['blob88'], self.weights['net3_conv5_1_w'], strides=[1,1,1,1], padding="SAME") + self.weights['net3_conv5_1_b']
blobs['blob89'] = self.leaky_relu(blobs['blob89'], 0.1)
blobs['blob90'] = tf.pad(blobs['blob89'], [[0,0], [1,1], [1,1], [0,0]])
blobs['blob90'] = tf.nn.conv2d(blobs['blob90'], self.weights['net3_conv6_w'], strides=[1,2,2,1], padding="VALID") + self.weights['net3_conv6_b']
blobs['blob90'] = self.leaky_relu(blobs['blob90'], 0.1)
blobs['blob91'] = tf.nn.conv2d(blobs['blob90'], self.weights['net3_conv6_1_w'], strides=[1,1,1,1], padding="SAME") + self.weights['net3_conv6_1_b']
blobs['blob91'] = self.leaky_relu(blobs['blob91'], 0.1)
blobs['blob92'] = tf.nn.conv2d(blobs['blob91'], self.weights['net3_predict_conv6_w'], strides=[1,1,1,1], padding="SAME") + self.weights['net3_predict_conv6_b']
blobs['blob93'] = tf.nn.conv2d_transpose(blobs['blob91'], self.weights['net3_deconv5_w'], output_shape=[batch_size, ADAPTED_HEIGHT/32, ADAPTED_WIDTH/32, 512], strides=[1,2,2,1]) + self.weights['net3_deconv5_b']
blobs['blob93'] = self.leaky_relu(blobs['blob93'], 0.1)
blobs['blob94'] = tf.nn.conv2d_transpose(blobs['blob92'], self.weights['net3_net3_upsample_flow6to5_w'], output_shape=[batch_size, ADAPTED_HEIGHT/32, ADAPTED_WIDTH/32, 2], strides=[1,2,2,1]) + self.weights['net3_net3_upsample_flow6to5_b']
blobs['blob95'] = tf.concat([blobs['blob89'], blobs['blob93'], blobs['blob94']], axis=3)
blobs['blob96'] = tf.nn.conv2d(blobs['blob95'], self.weights['net3_predict_conv5_w'], strides=[1,1,1,1], padding="SAME") + self.weights['net3_predict_conv5_b']
blobs['blob97'] = tf.nn.conv2d_transpose(blobs['blob95'], self.weights['net3_deconv4_w'], output_shape=[batch_size, ADAPTED_HEIGHT/16, ADAPTED_WIDTH/16, 256], strides=[1,2,2,1]) + self.weights['net3_deconv4_b']
blobs['blob97'] = self.leaky_relu(blobs['blob97'], 0.1)
blobs['blob98'] = tf.nn.conv2d_transpose(blobs['blob96'], self.weights['net3_net3_upsample_flow5to4_w'], output_shape=[batch_size, ADAPTED_HEIGHT/16, ADAPTED_WIDTH/16, 2], strides=[1,2,2,1]) + self.weights['net3_net3_upsample_flow5to4_b']
blobs['blob99'] = tf.concat([blobs['blob87'], blobs['blob97'], blobs['blob98']], axis=3)
blobs['blob100'] = tf.nn.conv2d(blobs['blob99'], self.weights['net3_predict_conv4_w'], strides=[1,1,1,1], padding="SAME") + self.weights['net3_predict_conv4_b']
blobs['blob101'] = tf.nn.conv2d_transpose(blobs['blob99'], self.weights['net3_deconv3_w'], output_shape=[batch_size, ADAPTED_HEIGHT/8, ADAPTED_WIDTH/8, 128], strides=[1,2,2,1]) + self.weights['net3_deconv3_b']
blobs['blob101'] = self.leaky_relu(blobs['blob101'], 0.1)
blobs['blob102'] = tf.nn.conv2d_transpose(blobs['blob100'], self.weights['net3_net3_upsample_flow4to3_w'], output_shape=[batch_size, ADAPTED_HEIGHT/8, ADAPTED_WIDTH/8, 2], strides=[1,2,2,1]) + self.weights['net3_net3_upsample_flow4to3_b']
blobs['blob103'] = tf.concat([blobs['blob85'], blobs['blob101'], blobs['blob102']], axis=3)
blobs['blob104'] = tf.nn.conv2d(blobs['blob103'], self.weights['net3_predict_conv3_w'], strides=[1,1,1,1], padding="SAME") + self.weights['net3_predict_conv3_b']
blobs['blob105'] = tf.nn.conv2d_transpose(blobs['blob103'], self.weights['net3_deconv2_w'], output_shape=[batch_size, ADAPTED_HEIGHT/4, ADAPTED_WIDTH/4, 64], strides=[1,2,2,1]) + self.weights['net3_deconv2_b']
blobs['blob105'] = self.leaky_relu(blobs['blob105'], 0.1)
blobs['blob106'] = tf.nn.conv2d_transpose(blobs['blob104'], self.weights['net3_net3_upsample_flow3to2_w'], output_shape=[batch_size, ADAPTED_HEIGHT/4, ADAPTED_WIDTH/4, 2], strides=[1,2,2,1]) + self.weights['net3_net3_upsample_flow3to2_b']
blobs['blob107'] = tf.concat([blobs['blob83'], blobs['blob105'], blobs['blob106']], axis=3)
blobs['blob108'] = tf.nn.conv2d(blobs['blob107'], self.weights['net3_predict_conv2_w'], strides=[1,1,1,1], padding="SAME") + self.weights['net3_predict_conv2_b']
blobs['blob109'] = blobs['blob108'] * 20.
####################################################################################
####################################################################################
####################################################################################
###################### END OF THE THIRD BRANCH ####################################
####################################################################################
####################################################################################
####################################################################################
blobs['blob110'] = tf.concat([blobs['img0_nomean_resize'], blobs['img1_nomean_resize']], axis=3)
#self.run_after(blobs['blob110'], blobs['blob109'])
blobs['blob111'] = tf.nn.conv2d(blobs['blob110'], self.weights['netsd_conv0_w'], strides=[1,1,1,1], padding="SAME") + self.weights['netsd_conv0_b']
blobs['blob111'] = self.leaky_relu(blobs['blob111'], 0.1)
blobs['blob112'] = tf.pad(blobs['blob111'], [[0,0], [1,1], [1,1], [0,0]])
blobs['blob112'] = tf.nn.conv2d(blobs['blob112'], self.weights['netsd_conv1_w'], strides=[1,2,2,1], padding="VALID") + self.weights['netsd_conv1_b']
blobs['blob112'] = self.leaky_relu(blobs['blob112'], 0.1)
blobs['blob113'] = tf.nn.conv2d(blobs['blob112'], self.weights['netsd_conv1_1_w'], strides=[1,1,1,1], padding="SAME") + self.weights['netsd_conv1_1_b']
blobs['blob113'] = self.leaky_relu(blobs['blob113'], 0.1)
blobs['blob114'] = tf.pad(blobs['blob113'], [[0,0], [1,1], [1,1], [0,0]])
blobs['blob114'] = tf.nn.conv2d(blobs['blob114'], self.weights['netsd_conv2_w'], strides=[1,2,2,1], padding="VALID") + self.weights['netsd_conv2_b']
blobs['blob114'] = self.leaky_relu(blobs['blob114'], 0.1)
blobs['blob115'] = tf.nn.conv2d(blobs['blob114'], self.weights['netsd_conv2_1_w'], strides=[1,1,1,1], padding="SAME") + self.weights['netsd_conv2_1_b']
blobs['blob115'] = self.leaky_relu(blobs['blob115'], 0.1)
blobs['blob116'] = tf.pad(blobs['blob115'], [[0,0], [1,1], [1,1], [0,0]])
blobs['blob116'] = tf.nn.conv2d(blobs['blob116'], self.weights['netsd_conv3_w'], strides=[1,2,2,1], padding="VALID") + self.weights['netsd_conv3_b']
blobs['blob116'] = self.leaky_relu(blobs['blob116'], 0.1)
blobs['blob117'] = tf.nn.conv2d(blobs['blob116'], self.weights['netsd_conv3_1_w'], strides=[1,1,1,1], padding="SAME") + self.weights['netsd_conv3_1_b']
blobs['blob117'] = self.leaky_relu(blobs['blob117'], 0.1)
blobs['blob118'] = tf.pad(blobs['blob117'], [[0,0], [1,1], [1,1], [0,0]])
blobs['blob118'] = tf.nn.conv2d(blobs['blob118'], self.weights['netsd_conv4_w'], strides=[1,2,2,1], padding="VALID") + self.weights['netsd_conv4_b']
blobs['blob118'] = self.leaky_relu(blobs['blob118'], 0.1)
blobs['blob119'] = tf.nn.conv2d(blobs['blob118'], self.weights['netsd_conv4_1_w'], strides=[1,1,1,1], padding="SAME") + self.weights['netsd_conv4_1_b']
blobs['blob119'] = self.leaky_relu(blobs['blob119'], 0.1)
blobs['blob120'] = tf.pad(blobs['blob119'], [[0,0], [1,1], [1,1], [0,0]])
blobs['blob120'] = tf.nn.conv2d(blobs['blob120'], self.weights['netsd_conv5_w'], strides=[1,2,2,1], padding="VALID") + self.weights['netsd_conv5_b']
blobs['blob120'] = self.leaky_relu(blobs['blob120'], 0.1)
blobs['blob121'] = tf.nn.conv2d(blobs['blob120'], self.weights['netsd_conv5_1_w'], strides=[1,1,1,1], padding="SAME") + self.weights['netsd_conv5_1_b']
blobs['blob121'] = self.leaky_relu(blobs['blob121'], 0.1)
blobs['blob122'] = tf.pad(blobs['blob121'], [[0,0], [1,1], [1,1], [0,0]])
blobs['blob122'] = tf.nn.conv2d(blobs['blob122'], self.weights['netsd_conv6_w'], strides=[1,2,2,1], padding="VALID") + self.weights['netsd_conv6_b']
blobs['blob122'] = self.leaky_relu(blobs['blob122'], 0.1)
blobs['blob123'] = tf.nn.conv2d(blobs['blob122'], self.weights['netsd_conv6_1_w'], strides=[1,1,1,1], padding="SAME") + self.weights['netsd_conv6_1_b']
blobs['blob123'] = self.leaky_relu(blobs['blob123'], 0.1)
blobs['blob124'] = tf.nn.conv2d(blobs['blob123'], self.weights['netsd_Convolution1_w'], strides=[1,1,1,1], padding="SAME") + self.weights['netsd_Convolution1_b']
blobs['blob125'] = tf.nn.conv2d_transpose(blobs['blob123'], self.weights['netsd_deconv5_w'], output_shape=[batch_size, ADAPTED_HEIGHT/32, ADAPTED_WIDTH/32, 512], strides=[1,2,2,1]) + self.weights['netsd_deconv5_b']
blobs['blob125'] = self.leaky_relu(blobs['blob125'], 0.1)
blobs['blob126'] = tf.nn.conv2d_transpose(blobs['blob124'], self.weights['netsd_upsample_flow6to5_w'], output_shape=[batch_size, ADAPTED_HEIGHT/32, ADAPTED_WIDTH/32, 2], strides=[1,2,2,1]) + self.weights['netsd_upsample_flow6to5_b']
blobs['blob127'] = tf.concat([blobs['blob121'], blobs['blob125'], blobs['blob126']], axis=3)
blobs['blob128'] = tf.nn.conv2d(blobs['blob127'], self.weights['netsd_interconv5_w'], strides=[1,1,1,1], padding="SAME") + self.weights['netsd_interconv5_b']
blobs['blob129'] = tf.nn.conv2d(blobs['blob128'], self.weights['netsd_Convolution2_w'], strides=[1,1,1,1], padding="SAME") + self.weights['netsd_Convolution2_b']
blobs['blob130'] = tf.nn.conv2d_transpose(blobs['blob127'], self.weights['netsd_deconv4_w'], output_shape=[batch_size, ADAPTED_HEIGHT/16, ADAPTED_WIDTH/16, 256], strides=[1,2,2,1]) + self.weights['netsd_deconv4_b']
blobs['blob130'] = self.leaky_relu(blobs['blob130'], 0.1)
blobs['blob131'] = tf.nn.conv2d_transpose(blobs['blob129'], self.weights['netsd_upsample_flow5to4_w'], output_shape=[batch_size, ADAPTED_HEIGHT/16, ADAPTED_WIDTH/16, 2], strides=[1,2,2,1]) + self.weights['netsd_upsample_flow5to4_b']
blobs['blob132'] = tf.concat([blobs['blob119'], blobs['blob130'], blobs['blob131']], axis=3)
blobs['blob133'] = tf.nn.conv2d(blobs['blob132'], self.weights['netsd_interconv4_w'], strides=[1,1,1,1], padding="SAME") + self.weights['netsd_interconv4_b']
blobs['blob134'] = tf.nn.conv2d(blobs['blob133'], self.weights['netsd_Convolution3_w'], strides=[1,1,1,1], padding="SAME") + self.weights['netsd_Convolution3_b']
blobs['blob135'] = tf.nn.conv2d_transpose(blobs['blob132'], self.weights['netsd_deconv3_w'], output_shape=[batch_size, ADAPTED_HEIGHT/8, ADAPTED_WIDTH/8, 128], strides=[1,2,2,1]) + self.weights['netsd_deconv3_b']
blobs['blob135'] = self.leaky_relu(blobs['blob135'], 0.1)
blobs['blob136'] = tf.nn.conv2d_transpose(blobs['blob134'], self.weights['netsd_upsample_flow4to3_w'], output_shape=[batch_size, ADAPTED_HEIGHT/8, ADAPTED_WIDTH/8, 2], strides=[1,2,2,1]) + self.weights['netsd_upsample_flow4to3_b']
blobs['blob137'] = tf.concat([blobs['blob117'], blobs['blob135'], blobs['blob136']], axis=3)
blobs['blob138'] = tf.nn.conv2d(blobs['blob137'], self.weights['netsd_interconv3_w'], strides=[1,1,1,1], padding="SAME") + self.weights['netsd_interconv3_b']
blobs['blob139'] = tf.nn.conv2d(blobs['blob138'], self.weights['netsd_Convolution4_w'], strides=[1,1,1,1], padding="SAME") + self.weights['netsd_Convolution4_b']
blobs['blob140'] = tf.nn.conv2d_transpose(blobs['blob137'], self.weights['netsd_deconv2_w'], output_shape=[batch_size, ADAPTED_HEIGHT/4, ADAPTED_WIDTH/4, 64], strides=[1,2,2,1]) + self.weights['netsd_deconv2_b']
blobs['blob140'] = self.leaky_relu(blobs['blob140'], 0.1)
blobs['blob141'] = tf.nn.conv2d_transpose(blobs['blob139'], self.weights['netsd_upsample_flow3to2_w'], output_shape=[batch_size, ADAPTED_HEIGHT/4, ADAPTED_WIDTH/4, 2], strides=[1,2,2,1]) + self.weights['netsd_upsample_flow3to2_b']
blobs['blob142'] = tf.concat([blobs['blob115'], blobs['blob140'], blobs['blob141']], axis=3)
blobs['blob143'] = tf.nn.conv2d(blobs['blob142'], self.weights['netsd_interconv2_w'], strides=[1,1,1,1], padding="SAME") + self.weights['netsd_interconv2_b']
blobs['blob144'] = tf.nn.conv2d(blobs['blob143'], self.weights['netsd_Convolution5_w'], strides=[1,1,1,1], padding="SAME") + self.weights['netsd_Convolution5_b']
blobs['blob145'] = 0.05*blobs['blob144']
blobs['blob146'] = tf.image.resize_nearest_neighbor(blobs['blob145'], size=[ADAPTED_HEIGHT, ADAPTED_WIDTH], align_corners=False)
blobs['blob147'] = tf.image.resize_nearest_neighbor(blobs['blob109'], size=[ADAPTED_HEIGHT, ADAPTED_WIDTH], align_corners=False)
#blobs['blob148'] = tf.sqrt(1e-8+tf.reduce_sum(blobs['blob146']**2, axis=3, keep_dims=True))
blobs['blob148'] = self.l2_norm(blobs['blob146'])
#blobs['blob149'] = tf.sqrt(1e-8+tf.reduce_sum(blobs['blob147']**2, axis=3, keep_dims=True))
blobs['blob149'] = self.l2_norm(blobs['blob147'])
blobs['blob150'] = self.warp(blobs['img1_nomean_resize'], blobs['blob146'])
blobs['blob151'] = blobs['img0_nomean_resize'] - blobs['blob150']
#blobs['blob152'] = tf.sqrt(1e-8+tf.reduce_sum(blobs['blob151']**2, axis=3, keep_dims=True))
blobs['blob152'] = self.l2_norm(blobs['blob151'])
blobs['blob153'] = self.warp(blobs['img1_nomean_resize'], blobs['blob147'])
blobs['blob154'] = blobs['img0_nomean_resize'] - blobs['blob153']
#blobs['blob155'] = tf.sqrt(1e-8+tf.reduce_sum(blobs['blob154']**2, axis=3, keep_dims=True))
blobs['blob155'] = self.l2_norm(blobs['blob154'])
blobs['blob156'] = tf.concat([blobs['img0_nomean_resize'], blobs['blob146'], blobs['blob147'], blobs['blob148'], blobs['blob149'], blobs['blob152'], blobs['blob155']], axis=3)
blobs['blob157'] = tf.nn.conv2d(blobs['blob156'], self.weights['fuse_conv0_w'], strides=[1,1,1,1], padding="SAME") + self.weights['fuse_conv0_b']
blobs['blob157'] = self.leaky_relu(blobs['blob157'], 0.1)
blobs['blob158'] = tf.pad(blobs['blob157'], [[0,0], [1,1], [1,1], [0,0]])
blobs['blob158'] = tf.nn.conv2d(blobs['blob158'], self.weights['fuse_conv1_w'], strides=[1,2,2,1], padding="VALID") + self.weights['fuse_conv1_b']
blobs['blob158'] = self.leaky_relu(blobs['blob158'], 0.1)
blobs['blob159'] = tf.nn.conv2d(blobs['blob158'], self.weights['fuse_conv1_1_w'], strides=[1,1,1,1], padding="SAME") + self.weights['fuse_conv1_1_b']
blobs['blob159'] = self.leaky_relu(blobs['blob159'], 0.1)
blobs['blob160'] = tf.pad(blobs['blob159'], [[0,0], [1,1], [1,1], [0,0]])
blobs['blob160'] = tf.nn.conv2d(blobs['blob160'], self.weights['fuse_conv2_w'], strides=[1,2,2,1], padding="VALID") + self.weights['fuse_conv2_b']
blobs['blob160'] = self.leaky_relu(blobs['blob160'], 0.1)
blobs['blob161'] = tf.nn.conv2d(blobs['blob160'], self.weights['fuse_conv2_1_w'], strides=[1,1,1,1], padding="SAME") + self.weights['fuse_conv2_1_b']
blobs['blob161'] = self.leaky_relu(blobs['blob161'], 0.1)
blobs['blob162'] = tf.nn.conv2d(blobs['blob161'], self.weights['fuse__Convolution5_w'], strides=[1,1,1,1], padding="SAME") + self.weights['fuse__Convolution5_b']
blobs['blob163'] = tf.nn.conv2d_transpose(blobs['blob161'], self.weights['fuse_deconv1_w'], output_shape=[batch_size, ADAPTED_HEIGHT/2, ADAPTED_WIDTH/2, 32], strides=[1,2,2,1]) + self.weights['fuse_deconv1_b']
blobs['blob163'] = self.leaky_relu(blobs['blob163'], 0.1)
blobs['blob164'] = tf.nn.conv2d_transpose(blobs['blob162'], self.weights['fuse_upsample_flow2to1_w'], output_shape=[batch_size, ADAPTED_HEIGHT/2, ADAPTED_WIDTH/2, 2], strides=[1,2,2,1]) + self.weights['fuse_upsample_flow2to1_b']
blobs['blob165'] = tf.concat([blobs['blob159'], blobs['blob163'], blobs['blob164']], axis=3)
blobs['blob166'] = tf.nn.conv2d(blobs['blob165'], self.weights['fuse_interconv1_w'], strides=[1,1,1,1], padding="SAME") + self.weights['fuse_interconv1_b']
blobs['blob167'] = tf.nn.conv2d(blobs['blob166'], self.weights['fuse__Convolution6_w'], strides=[1,1,1,1], padding="SAME") + self.weights['fuse__Convolution6_b']
blobs['blob168'] = tf.nn.conv2d_transpose(blobs['blob165'], self.weights['fuse_deconv0_w'], output_shape=[batch_size, ADAPTED_HEIGHT/1, ADAPTED_WIDTH/1, 16], strides=[1,2,2,1]) + self.weights['fuse_deconv0_b']
blobs['blob168'] = self.leaky_relu(blobs['blob168'], 0.1)
blobs['blob169'] = tf.nn.conv2d_transpose(blobs['blob167'], self.weights['fuse_upsample_flow1to0_w'], output_shape=[batch_size, ADAPTED_HEIGHT, ADAPTED_WIDTH, 2], strides=[1,2,2,1]) + self.weights['fuse_upsample_flow1to0_b']
blobs['blob170'] = tf.concat([blobs['blob157'], blobs['blob168'], blobs['blob169']], axis=3)
blobs['blob171'] = tf.nn.conv2d(blobs['blob170'], self.weights['fuse_interconv0_w'], strides=[1,1,1,1], padding="SAME") + self.weights['fuse_interconv0_b']
blobs['blob172'] = tf.nn.conv2d(blobs['blob171'], self.weights['fuse__Convolution7_w'], strides=[1,1,1,1], padding="SAME") + self.weights['fuse__Convolution7_b']
blobs['predict_flow_resize'] = tf.image.resize_bilinear(blobs['blob172'], size=[TARGET_HEIGHT, TARGET_WIDTH], align_corners=True)
scale = tf.stack([SCALE_WIDTH, SCALE_HEIGHT])
scale = tf.reshape(scale, [1,1,1,2])
blobs['predict_flow_final'] = scale*blobs['predict_flow_resize']
self.blobs = blobs
return blobs
def all_variables(self):
return [('netsd_deconv5_w', (4, 4, 512, 1024)),
('netsd_conv1_b', (64,)),
('netsd_upsample_flow5to4_w', (4, 4, 2, 2)),
('conv2_b', (128,)),
('fuse__Convolution5_w', (3, 3, 128, 2)),
('netsd_conv4_1_w', (3, 3, 512, 512)),
('netsd_interconv3_w', (3, 3, 386, 128)),
('netsd_deconv4_w', (4, 4, 256, 1026)),
('deconv4_b', (256,)),
('fuse_interconv0_w', (3, 3, 82, 16)),
('netsd_Convolution2_b', (2,)),
('net3_conv4_b', (512,)),
('net3_conv3_b', (256,)),
('net3_predict_conv2_w', (3, 3, 194, 2)),
('net3_predict_conv3_b', (2,)),
('conv6_1_w', (3, 3, 1024, 1024)),
('fuse_upsample_flow2to1_b', (2,)),
('Convolution1_w', (3, 3, 1024, 2)),
('net3_deconv3_w', (4, 4, 128, 770)),
('net2_deconv3_b', (128,)),
('fuse_conv1_w', (3, 3, 64, 64)),
('conv5_w', (3, 3, 512, 512)),
('Convolution4_w', (3, 3, 386, 2)),
('fuse_conv0_b', (64,)),
('net2_conv3_w', (5, 5, 128, 256)),
('upsample_flow4to3_b', (2,)),
('netsd_conv4_1_b', (512,)),
('fuse_upsample_flow2to1_w', (4, 4, 2, 2)),
('netsd_conv4_b', (512,)),
('net2_net2_upsample_flow3to2_b', (2,)),
('net3_predict_conv4_b', (2,)),
('fuse_upsample_flow1to0_b', (2,)),
('conv4_1_w', (3, 3, 512, 512)),
('deconv2_b', (64,)),
('net2_conv4_1_w', (3, 3, 512, 512)),
('net3_deconv4_w', (4, 4, 256, 1026)),
('net2_deconv5_b', (512,)),
('netsd_deconv5_b', (512,)),
('net2_deconv2_b', (64,)),
('net3_conv2_b', (128,)),
('conv_redir_w', (1, 1, 256, 32)),
('fuse_conv1_1_b', (128,)),
('net2_deconv5_w', (4, 4, 512, 1024)),
('net2_conv5_b', (512,)),
('net2_conv4_w', (3, 3, 256, 512)),
('net2_predict_conv6_w', (3, 3, 1024, 2)),
('netsd_conv5_b', (512,)),
('deconv4_w', (4, 4, 256, 1026)),
('net2_net2_upsample_flow4to3_b', (2,)),
('fuse__Convolution6_w', (3, 3, 32, 2)),
('net3_deconv2_w', (4, 4, 64, 386)),
('net2_conv6_1_w', (3, 3, 1024, 1024)),
('netsd_conv0_b', (64,)),
('netsd_conv5_1_w', (3, 3, 512, 512)),
('net2_conv6_1_b', (1024,)),
('net3_conv2_w', (5, 5, 64, 128)),
('net3_predict_conv6_w', (3, 3, 1024, 2)),
('net3_conv4_1_b', (512,)),
('net3_net3_upsample_flow4to3_w', (4, 4, 2, 2)),
('net2_deconv2_w', (4, 4, 64, 386)),
('deconv3_b', (128,)),
('netsd_interconv5_b', (512,)),
('net2_conv3_1_w', (3, 3, 256, 256)),
('netsd_interconv4_w', (3, 3, 770, 256)),
('net3_deconv3_b', (128,)),
('fuse_conv0_w', (3, 3, 11, 64)),
('net3_predict_conv6_b', (2,)),
('fuse_upsample_flow1to0_w', (4, 4, 2, 2)),
('netsd_deconv3_b', (128,)),
('net3_predict_conv5_w', (3, 3, 1026, 2)),
('netsd_conv5_w', (3, 3, 512, 512)),
('netsd_interconv5_w', (3, 3, 1026, 512)),
('netsd_Convolution3_w', (3, 3, 256, 2)),
('net2_predict_conv4_w', (3, 3, 770, 2)),
('deconv2_w', (4, 4, 64, 386)),
('net3_predict_conv5_b', (2,)),
('fuse__Convolution5_b', (2,)),
('fuse__Convolution7_w', (3, 3, 16, 2)),
('net2_net2_upsample_flow6to5_w', (4, 4, 2, 2)),
('netsd_conv3_b', (256,)),
('net3_conv6_w', (3, 3, 512, 1024)),
('net3_conv1_b', (64,)),
('netsd_Convolution4_b', (2,)),
('net3_conv3_w', (5, 5, 128, 256)),
('netsd_conv0_w', (3, 3, 6, 64)),
('net2_conv4_b', (512,)),
('net2_predict_conv3_w', (3, 3, 386, 2)),
('net3_net3_upsample_flow3to2_w', (4, 4, 2, 2)),
('fuse_conv1_1_w', (3, 3, 64, 128)),
('deconv5_b', (512,)),
('fuse__Convolution7_b', (2,)),
('net3_conv6_1_w', (3, 3, 1024, 1024)),
('net3_net3_upsample_flow5to4_w', (4, 4, 2, 2)),
('net3_conv4_w', (3, 3, 256, 512)),
('upsample_flow5to4_w', (4, 4, 2, 2)),
('conv4_1_b', (512,)),
('img0s_aug_b', (320, 448, 3, 1)),
('conv5_1_b', (512,)),
('net3_conv4_1_w', (3, 3, 512, 512)),
('upsample_flow5to4_b', (2,)),
('net3_conv3_1_b', (256,)),
('Convolution1_b', (2,)),
('upsample_flow4to3_w', (4, 4, 2, 2)),
('conv5_1_w', (3, 3, 512, 512)),
('conv3_1_b', (256,)),
('conv3_w', (5, 5, 128, 256)),
('net2_conv2_b', (128,)),
('net3_net3_upsample_flow6to5_w', (4, 4, 2, 2)),
('upsample_flow3to2_b', (2,)),
('netsd_Convolution5_w', (3, 3, 64, 2)),
('netsd_interconv2_w', (3, 3, 194, 64)),
('net2_predict_conv6_b', (2,)),
('net2_deconv4_w', (4, 4, 256, 1026)),
('scale_conv1_b', (2,)),
('net2_net2_upsample_flow5to4_w', (4, 4, 2, 2)),
('netsd_conv2_b', (128,)),
('netsd_conv2_1_b', (128,)),
('netsd_upsample_flow6to5_w', (4, 4, 2, 2)),
('net2_predict_conv5_b', (2,)),
('net3_conv6_1_b', (1024,)),
('netsd_conv6_w', (3, 3, 512, 1024)),
('Convolution4_b', (2,)),
('net2_predict_conv4_b', (2,)),
('fuse_deconv1_b', (32,)),
('conv3_1_w', (3, 3, 473, 256)),
('net3_deconv2_b', (64,)),
('netsd_conv6_b', (1024,)),
('net2_conv5_1_w', (3, 3, 512, 512)),
('net3_conv5_1_w', (3, 3, 512, 512)),
('deconv5_w', (4, 4, 512, 1024)),
('fuse_conv2_b', (128,)),
('netsd_conv1_1_b', (128,)),
('netsd_upsample_flow6to5_b', (2,)),
('Convolution5_w', (3, 3, 194, 2)),
('scale_conv1_w', (1, 1, 2, 2)),
('net2_net2_upsample_flow5to4_b', (2,)),
('conv6_1_b', (1024,)),
('fuse_conv2_1_b', (128,)),
('netsd_Convolution5_b', (2,)),
('netsd_conv3_1_b', (256,)),
('conv2_w', (5, 5, 64, 128)),
('fuse_conv2_w', (3, 3, 128, 128)),
('net2_conv2_w', (5, 5, 64, 128)),
('conv3_b', (256,)),
('net3_deconv5_w', (4, 4, 512, 1024)),
('img1s_aug_w', (1, 1, 1, 1)),
('netsd_conv2_w', (3, 3, 128, 128)),
('conv6_w', (3, 3, 512, 1024)),
('netsd_conv4_w', (3, 3, 256, 512)),
('net2_conv1_w', (7, 7, 12, 64)),
('netsd_Convolution1_w', (3, 3, 1024, 2)),
('netsd_conv1_w', (3, 3, 64, 64)),
('netsd_deconv4_b', (256,)),
('conv4_w', (3, 3, 256, 512)),
('conv5_b', (512,)),
('net3_deconv5_b', (512,)),
('netsd_interconv3_b', (128,)),
('net3_conv3_1_w', (3, 3, 256, 256)),
('net2_predict_conv5_w', (3, 3, 1026, 2)),
('Convolution3_b', (2,)),
('netsd_conv5_1_b', (512,)),
('netsd_interconv4_b', (256,)),
('conv4_b', (512,)),
('net3_net3_upsample_flow6to5_b', (2,)),
('Convolution5_b', (2,)),
('fuse_conv2_1_w', (3, 3, 128, 128)),
('net3_net3_upsample_flow4to3_b', (2,)),
('conv1_w', (7, 7, 3, 64)),
('upsample_flow6to5_b', (2,)),
('conv6_b', (1024,)),
('netsd_upsample_flow3to2_w', (4, 4, 2, 2)),
('net2_deconv3_w', (4, 4, 128, 770)),
('netsd_conv2_1_w', (3, 3, 128, 128)),
('netsd_Convolution3_b', (2,)),
('netsd_upsample_flow4to3_w', (4, 4, 2, 2)),
('fuse_interconv1_w', (3, 3, 162, 32)),
('netsd_upsample_flow4to3_b', (2,)),
('netsd_conv3_1_w', (3, 3, 256, 256)),
('netsd_deconv3_w', (4, 4, 128, 770)),
('net3_conv5_b', (512,)),
('net3_conv5_1_b', (512,)),
('net2_net2_upsample_flow4to3_w', (4, 4, 2, 2)),
('net2_net2_upsample_flow3to2_w', (4, 4, 2, 2)),
('net2_conv3_b', (256,)),
('netsd_conv6_1_w', (3, 3, 1024, 1024)),
('fuse_deconv0_b', (16,)),
('net2_predict_conv2_w', (3, 3, 194, 2)),
('net2_conv1_b', (64,)),
('net2_conv6_b', (1024,)),
('net3_predict_conv2_b', (2,)),
('net2_conv4_1_b', (512,)),
('netsd_Convolution4_w', (3, 3, 128, 2)),
('deconv3_w', (4, 4, 128, 770)),
('fuse_deconv1_w', (4, 4, 32, 128)),
('netsd_Convolution2_w', (3, 3, 512, 2)),
('netsd_Convolution1_b', (2,)),
('net2_conv3_1_b', (256,)),
('fuse_conv1_b', (64,)),
('net2_deconv4_b', (256,)),
('net3_predict_conv4_w', (3, 3, 770, 2)),
('Convolution3_w', (3, 3, 770, 2)),
('netsd_upsample_flow3to2_b', (2,)),
('net3_net3_upsample_flow3to2_b', (2,)),
('fuse_interconv0_b', (16,)),
('Convolution2_w', (3, 3, 1026, 2)),
('net2_conv6_w', (3, 3, 512, 1024)),
('netsd_conv3_w', (3, 3, 128, 256)),
('netsd_upsample_flow5to4_b', (2,)),
('net3_predict_conv3_w', (3, 3, 386, 2)),
('conv_redir_b', (32,)),
('net2_conv5_1_b', (512,)),
('upsample_flow6to5_w', (4, 4, 2, 2)),
('net2_net2_upsample_flow6to5_b', (2,)),
('net3_conv6_b', (1024,)),
('fuse__Convolution6_b', (2,)),
('Convolution2_b', (2,)),
('upsample_flow3to2_w', (4, 4, 2, 2)),
('net3_conv1_w', (7, 7, 12, 64)),
('fuse_deconv0_w', (4, 4, 16, 162)),
('img0s_aug_w', (1, 1, 1, 1)),
('netsd_conv1_1_w', (3, 3, 64, 128)),
('netsd_deconv2_b', (64,)),
('net2_conv5_w', (3, 3, 512, 512)),
('fuse_interconv1_b', (32,)),
('netsd_conv6_1_b', (1024,)),
('netsd_interconv2_b', (64,)),
('img1s_aug_b', (320, 448, 3, 1)),
('netsd_deconv2_w', (4, 4, 64, 386)),
('net2_predict_conv3_b', (2,)),
('net2_predict_conv2_b', (2,)),
('net3_deconv4_b', (256,)),
('net3_net3_upsample_flow5to4_b', (2,)),
('conv1_b', (64,)),
('net3_conv5_w', (3, 3, 512, 512))] | [
"tensorflow.nn.conv2d",
"tensorflow.contrib.graph_editor.reroute.add_control_inputs",
"tensorflow.image.resize_nearest_neighbor",
"tensorflow.shape",
"tensorflow.pad",
"tensorflow.reshape",
"tensorflow.get_variable",
"tensorflow.to_float",
"tensorflow.reduce_sum",
"tensorflow.ones",
"tensorflow.... | [((469, 489), 'tensorflow.maximum', 'tf.maximum', (['x', '(s * x)'], {}), '(x, s * x)\n', (479, 489), True, 'import tensorflow as tf\n'), ((1098, 1126), 'tensorflow.reduce_sum', 'tf.reduce_sum', (['(flow * target)'], {}), '(flow * target)\n', (1111, 1126), True, 'import tensorflow as tf\n'), ((1173, 1267), 'tensorflow.train.AdamOptimizer', 'tf.train.AdamOptimizer', ([], {'learning_rate': 'learning_rate', 'beta1': '(0.95)', 'beta2': '(0.99)', 'epsilon': '(1e-08)'}), '(learning_rate=learning_rate, beta1=0.95, beta2=0.99,\n epsilon=1e-08)\n', (1195, 1267), True, 'import tensorflow as tf\n'), ((2109, 2166), 'tensorflow.contrib.graph_editor.reroute.add_control_inputs', 'ge.reroute.add_control_inputs', (['a_tensor.op', '[b_tensor.op]'], {}), '(a_tensor.op, [b_tensor.op])\n', (2138, 2166), True, 'import tensorflow.contrib.graph_editor as ge\n'), ((3190, 3236), 'numpy.array', 'np.array', (['[0.37655231, 0.39534855, 0.40119368]'], {}), '([0.37655231, 0.39534855, 0.40119368])\n', (3198, 3236), True, 'import numpy as np\n'), ((3382, 3490), 'tensorflow.image.resize_bilinear', 'tf.image.resize_bilinear', (["blobs['img0_nomean']"], {'size': '[ADAPTED_HEIGHT, ADAPTED_WIDTH]', 'align_corners': '(True)'}), "(blobs['img0_nomean'], size=[ADAPTED_HEIGHT,\n ADAPTED_WIDTH], align_corners=True)\n", (3406, 3490), True, 'import tensorflow as tf\n'), ((3525, 3633), 'tensorflow.image.resize_bilinear', 'tf.image.resize_bilinear', (["blobs['img1_nomean']"], {'size': '[ADAPTED_HEIGHT, ADAPTED_WIDTH]', 'align_corners': '(True)'}), "(blobs['img1_nomean'], size=[ADAPTED_HEIGHT,\n ADAPTED_WIDTH], align_corners=True)\n", (3549, 3633), True, 'import tensorflow as tf\n'), ((3665, 3734), 'tensorflow.pad', 'tf.pad', (["blobs['img0_nomean_resize']", '[[0, 0], [3, 3], [3, 3], [0, 0]]'], {}), "(blobs['img0_nomean_resize'], [[0, 0], [3, 3], [3, 3], [0, 0]])\n", (3671, 3734), True, 'import tensorflow as tf\n'), ((3965, 4034), 'tensorflow.pad', 'tf.pad', (["blobs['img1_nomean_resize']", '[[0, 0], [3, 3], [3, 3], [0, 0]]'], {}), "(blobs['img1_nomean_resize'], [[0, 0], [3, 3], [3, 3], [0, 0]])\n", (3971, 4034), True, 'import tensorflow as tf\n'), ((4265, 4322), 'tensorflow.pad', 'tf.pad', (["blobs['conv1a']", '[[0, 0], [2, 2], [2, 2], [0, 0]]'], {}), "(blobs['conv1a'], [[0, 0], [2, 2], [2, 2], [0, 0]])\n", (4271, 4322), True, 'import tensorflow as tf\n'), ((4553, 4610), 'tensorflow.pad', 'tf.pad', (["blobs['conv1b']", '[[0, 0], [2, 2], [2, 2], [0, 0]]'], {}), "(blobs['conv1b'], [[0, 0], [2, 2], [2, 2], [0, 0]])\n", (4559, 4610), True, 'import tensorflow as tf\n'), ((4841, 4898), 'tensorflow.pad', 'tf.pad', (["blobs['conv2a']", '[[0, 0], [2, 2], [2, 2], [0, 0]]'], {}), "(blobs['conv2a'], [[0, 0], [2, 2], [2, 2], [0, 0]])\n", (4847, 4898), True, 'import tensorflow as tf\n'), ((5129, 5186), 'tensorflow.pad', 'tf.pad', (["blobs['conv2b']", '[[0, 0], [2, 2], [2, 2], [0, 0]]'], {}), "(blobs['conv2b'], [[0, 0], [2, 2], [2, 2], [0, 0]])\n", (5135, 5186), True, 'import tensorflow as tf\n'), ((6156, 6178), 'tensorflow.concat', 'tf.concat', (['tmp'], {'axis': '(3)'}), '(tmp, axis=3)\n', (6165, 6178), True, 'import tensorflow as tf\n'), ((6497, 6552), 'tensorflow.concat', 'tf.concat', (["[blobs['conv_redir'], blobs['corr']]"], {'axis': '(3)'}), "([blobs['conv_redir'], blobs['corr']], axis=3)\n", (6506, 6552), True, 'import tensorflow as tf\n'), ((6793, 6851), 'tensorflow.pad', 'tf.pad', (["blobs['conv3_1']", '[[0, 0], [1, 1], [1, 1], [0, 0]]'], {}), "(blobs['conv3_1'], [[0, 0], [1, 1], [1, 1], [0, 0]])\n", (6799, 6851), True, 'import tensorflow as tf\n'), ((7290, 7348), 'tensorflow.pad', 'tf.pad', (["blobs['conv4_1']", '[[0, 0], [1, 1], [1, 1], [0, 0]]'], {}), "(blobs['conv4_1'], [[0, 0], [1, 1], [1, 1], [0, 0]])\n", (7296, 7348), True, 'import tensorflow as tf\n'), ((7787, 7845), 'tensorflow.pad', 'tf.pad', (["blobs['conv5_1']", '[[0, 0], [1, 1], [1, 1], [0, 0]]'], {}), "(blobs['conv5_1'], [[0, 0], [1, 1], [1, 1], [0, 0]])\n", (7793, 7845), True, 'import tensorflow as tf\n'), ((8994, 9085), 'tensorflow.concat', 'tf.concat', (["[blobs['conv5_1'], blobs['deconv5'], blobs['upsampled_flow6_to_5']]"], {'axis': '(3)'}), "([blobs['conv5_1'], blobs['deconv5'], blobs['upsampled_flow6_to_5'\n ]], axis=3)\n", (9003, 9085), True, 'import tensorflow as tf\n'), ((9115, 9173), 'tensorflow.pad', 'tf.pad', (["blobs['concat5']", '[[0, 0], [1, 1], [1, 1], [0, 0]]'], {}), "(blobs['concat5'], [[0, 0], [1, 1], [1, 1], [0, 0]])\n", (9121, 9173), True, 'import tensorflow as tf\n'), ((9920, 10011), 'tensorflow.concat', 'tf.concat', (["[blobs['conv4_1'], blobs['deconv4'], blobs['upsampled_flow5_to_4']]"], {'axis': '(3)'}), "([blobs['conv4_1'], blobs['deconv4'], blobs['upsampled_flow5_to_4'\n ]], axis=3)\n", (9929, 10011), True, 'import tensorflow as tf\n'), ((10747, 10838), 'tensorflow.concat', 'tf.concat', (["[blobs['conv3_1'], blobs['deconv3'], blobs['upsampled_flow4_to_3']]"], {'axis': '(3)'}), "([blobs['conv3_1'], blobs['deconv3'], blobs['upsampled_flow4_to_3'\n ]], axis=3)\n", (10756, 10838), True, 'import tensorflow as tf\n'), ((11557, 11647), 'tensorflow.concat', 'tf.concat', (["[blobs['conv2a'], blobs['deconv2'], blobs['upsampled_flow3_to_2']]"], {'axis': '(3)'}), "([blobs['conv2a'], blobs['deconv2'], blobs['upsampled_flow3_to_2']\n ], axis=3)\n", (11566, 11647), True, 'import tensorflow as tf\n'), ((11899, 12002), 'tensorflow.image.resize_bilinear', 'tf.image.resize_bilinear', (["blobs['blob41']"], {'size': '[ADAPTED_HEIGHT, ADAPTED_WIDTH]', 'align_corners': '(True)'}), "(blobs['blob41'], size=[ADAPTED_HEIGHT,\n ADAPTED_WIDTH], align_corners=True)\n", (11923, 12002), True, 'import tensorflow as tf\n'), ((12394, 12527), 'tensorflow.concat', 'tf.concat', (["[blobs['img0_nomean_resize'], blobs['img1_nomean_resize'], blobs['blob43'],\n blobs['blob46'], blobs['blob45']]"], {'axis': '(3)'}), "([blobs['img0_nomean_resize'], blobs['img1_nomean_resize'], blobs[\n 'blob43'], blobs['blob46'], blobs['blob45']], axis=3)\n", (12403, 12527), True, 'import tensorflow as tf\n'), ((13203, 13260), 'tensorflow.pad', 'tf.pad', (["blobs['blob47']", '[[0, 0], [3, 3], [3, 3], [0, 0]]'], {}), "(blobs['blob47'], [[0, 0], [3, 3], [3, 3], [0, 0]])\n", (13209, 13260), True, 'import tensorflow as tf\n'), ((13501, 13558), 'tensorflow.pad', 'tf.pad', (["blobs['blob48']", '[[0, 0], [2, 2], [2, 2], [0, 0]]'], {}), "(blobs['blob48'], [[0, 0], [2, 2], [2, 2], [0, 0]])\n", (13507, 13558), True, 'import tensorflow as tf\n'), ((13800, 13857), 'tensorflow.pad', 'tf.pad', (["blobs['blob49']", '[[0, 0], [2, 2], [2, 2], [0, 0]]'], {}), "(blobs['blob49'], [[0, 0], [2, 2], [2, 2], [0, 0]])\n", (13806, 13857), True, 'import tensorflow as tf\n'), ((14319, 14376), 'tensorflow.pad', 'tf.pad', (["blobs['blob51']", '[[0, 0], [1, 1], [1, 1], [0, 0]]'], {}), "(blobs['blob51'], [[0, 0], [1, 1], [1, 1], [0, 0]])\n", (14325, 14376), True, 'import tensorflow as tf\n'), ((14838, 14895), 'tensorflow.pad', 'tf.pad', (["blobs['blob53']", '[[0, 0], [1, 1], [1, 1], [0, 0]]'], {}), "(blobs['blob53'], [[0, 0], [1, 1], [1, 1], [0, 0]])\n", (14844, 14895), True, 'import tensorflow as tf\n'), ((15357, 15414), 'tensorflow.pad', 'tf.pad', (["blobs['blob55']", '[[0, 0], [1, 1], [1, 1], [0, 0]]'], {}), "(blobs['blob55'], [[0, 0], [1, 1], [1, 1], [0, 0]])\n", (15363, 15414), True, 'import tensorflow as tf\n'), ((16616, 16686), 'tensorflow.concat', 'tf.concat', (["[blobs['blob55'], blobs['blob59'], blobs['blob60']]"], {'axis': '(3)'}), "([blobs['blob55'], blobs['blob59'], blobs['blob60']], axis=3)\n", (16625, 16686), True, 'import tensorflow as tf\n'), ((17439, 17509), 'tensorflow.concat', 'tf.concat', (["[blobs['blob53'], blobs['blob63'], blobs['blob64']]"], {'axis': '(3)'}), "([blobs['blob53'], blobs['blob63'], blobs['blob64']], axis=3)\n", (17448, 17509), True, 'import tensorflow as tf\n'), ((18266, 18336), 'tensorflow.concat', 'tf.concat', (["[blobs['blob51'], blobs['blob67'], blobs['blob68']]"], {'axis': '(3)'}), "([blobs['blob51'], blobs['blob67'], blobs['blob68']], axis=3)\n", (18275, 18336), True, 'import tensorflow as tf\n'), ((19068, 19138), 'tensorflow.concat', 'tf.concat', (["[blobs['blob49'], blobs['blob71'], blobs['blob72']]"], {'axis': '(3)'}), "([blobs['blob49'], blobs['blob71'], blobs['blob72']], axis=3)\n", (19077, 19138), True, 'import tensorflow as tf\n'), ((19392, 19495), 'tensorflow.image.resize_bilinear', 'tf.image.resize_bilinear', (["blobs['blob75']"], {'size': '[ADAPTED_HEIGHT, ADAPTED_WIDTH]', 'align_corners': '(True)'}), "(blobs['blob75'], size=[ADAPTED_HEIGHT,\n ADAPTED_WIDTH], align_corners=True)\n", (19416, 19495), True, 'import tensorflow as tf\n'), ((19887, 20020), 'tensorflow.concat', 'tf.concat', (["[blobs['img0_nomean_resize'], blobs['img1_nomean_resize'], blobs['blob77'],\n blobs['blob80'], blobs['blob79']]"], {'axis': '(3)'}), "([blobs['img0_nomean_resize'], blobs['img1_nomean_resize'], blobs[\n 'blob77'], blobs['blob80'], blobs['blob79']], axis=3)\n", (19896, 20020), True, 'import tensorflow as tf\n'), ((20696, 20753), 'tensorflow.pad', 'tf.pad', (["blobs['blob81']", '[[0, 0], [3, 3], [3, 3], [0, 0]]'], {}), "(blobs['blob81'], [[0, 0], [3, 3], [3, 3], [0, 0]])\n", (20702, 20753), True, 'import tensorflow as tf\n'), ((20994, 21051), 'tensorflow.pad', 'tf.pad', (["blobs['blob82']", '[[0, 0], [2, 2], [2, 2], [0, 0]]'], {}), "(blobs['blob82'], [[0, 0], [2, 2], [2, 2], [0, 0]])\n", (21000, 21051), True, 'import tensorflow as tf\n'), ((21293, 21350), 'tensorflow.pad', 'tf.pad', (["blobs['blob83']", '[[0, 0], [2, 2], [2, 2], [0, 0]]'], {}), "(blobs['blob83'], [[0, 0], [2, 2], [2, 2], [0, 0]])\n", (21299, 21350), True, 'import tensorflow as tf\n'), ((21812, 21869), 'tensorflow.pad', 'tf.pad', (["blobs['blob85']", '[[0, 0], [1, 1], [1, 1], [0, 0]]'], {}), "(blobs['blob85'], [[0, 0], [1, 1], [1, 1], [0, 0]])\n", (21818, 21869), True, 'import tensorflow as tf\n'), ((22331, 22388), 'tensorflow.pad', 'tf.pad', (["blobs['blob87']", '[[0, 0], [1, 1], [1, 1], [0, 0]]'], {}), "(blobs['blob87'], [[0, 0], [1, 1], [1, 1], [0, 0]])\n", (22337, 22388), True, 'import tensorflow as tf\n'), ((22850, 22907), 'tensorflow.pad', 'tf.pad', (["blobs['blob89']", '[[0, 0], [1, 1], [1, 1], [0, 0]]'], {}), "(blobs['blob89'], [[0, 0], [1, 1], [1, 1], [0, 0]])\n", (22856, 22907), True, 'import tensorflow as tf\n'), ((24102, 24172), 'tensorflow.concat', 'tf.concat', (["[blobs['blob89'], blobs['blob93'], blobs['blob94']]"], {'axis': '(3)'}), "([blobs['blob89'], blobs['blob93'], blobs['blob94']], axis=3)\n", (24111, 24172), True, 'import tensorflow as tf\n'), ((24925, 24995), 'tensorflow.concat', 'tf.concat', (["[blobs['blob87'], blobs['blob97'], blobs['blob98']]"], {'axis': '(3)'}), "([blobs['blob87'], blobs['blob97'], blobs['blob98']], axis=3)\n", (24934, 24995), True, 'import tensorflow as tf\n'), ((25759, 25831), 'tensorflow.concat', 'tf.concat', (["[blobs['blob85'], blobs['blob101'], blobs['blob102']]"], {'axis': '(3)'}), "([blobs['blob85'], blobs['blob101'], blobs['blob102']], axis=3)\n", (25768, 25831), True, 'import tensorflow as tf\n'), ((26572, 26644), 'tensorflow.concat', 'tf.concat', (["[blobs['blob83'], blobs['blob105'], blobs['blob106']]"], {'axis': '(3)'}), "([blobs['blob83'], blobs['blob105'], blobs['blob106']], axis=3)\n", (26581, 26644), True, 'import tensorflow as tf\n'), ((27555, 27632), 'tensorflow.concat', 'tf.concat', (["[blobs['img0_nomean_resize'], blobs['img1_nomean_resize']]"], {'axis': '(3)'}), "([blobs['img0_nomean_resize'], blobs['img1_nomean_resize']], axis=3)\n", (27564, 27632), True, 'import tensorflow as tf\n'), ((27952, 28010), 'tensorflow.pad', 'tf.pad', (["blobs['blob111']", '[[0, 0], [1, 1], [1, 1], [0, 0]]'], {}), "(blobs['blob111'], [[0, 0], [1, 1], [1, 1], [0, 0]])\n", (27958, 28010), True, 'import tensorflow as tf\n'), ((28493, 28551), 'tensorflow.pad', 'tf.pad', (["blobs['blob113']", '[[0, 0], [1, 1], [1, 1], [0, 0]]'], {}), "(blobs['blob113'], [[0, 0], [1, 1], [1, 1], [0, 0]])\n", (28499, 28551), True, 'import tensorflow as tf\n'), ((29034, 29092), 'tensorflow.pad', 'tf.pad', (["blobs['blob115']", '[[0, 0], [1, 1], [1, 1], [0, 0]]'], {}), "(blobs['blob115'], [[0, 0], [1, 1], [1, 1], [0, 0]])\n", (29040, 29092), True, 'import tensorflow as tf\n'), ((29575, 29633), 'tensorflow.pad', 'tf.pad', (["blobs['blob117']", '[[0, 0], [1, 1], [1, 1], [0, 0]]'], {}), "(blobs['blob117'], [[0, 0], [1, 1], [1, 1], [0, 0]])\n", (29581, 29633), True, 'import tensorflow as tf\n'), ((30116, 30174), 'tensorflow.pad', 'tf.pad', (["blobs['blob119']", '[[0, 0], [1, 1], [1, 1], [0, 0]]'], {}), "(blobs['blob119'], [[0, 0], [1, 1], [1, 1], [0, 0]])\n", (30122, 30174), True, 'import tensorflow as tf\n'), ((30657, 30715), 'tensorflow.pad', 'tf.pad', (["blobs['blob121']", '[[0, 0], [1, 1], [1, 1], [0, 0]]'], {}), "(blobs['blob121'], [[0, 0], [1, 1], [1, 1], [0, 0]])\n", (30663, 30715), True, 'import tensorflow as tf\n'), ((31917, 31990), 'tensorflow.concat', 'tf.concat', (["[blobs['blob121'], blobs['blob125'], blobs['blob126']]"], {'axis': '(3)'}), "([blobs['blob121'], blobs['blob125'], blobs['blob126']], axis=3)\n", (31926, 31990), True, 'import tensorflow as tf\n'), ((32913, 32986), 'tensorflow.concat', 'tf.concat', (["[blobs['blob119'], blobs['blob130'], blobs['blob131']]"], {'axis': '(3)'}), "([blobs['blob119'], blobs['blob130'], blobs['blob131']], axis=3)\n", (32922, 32986), True, 'import tensorflow as tf\n'), ((33913, 33986), 'tensorflow.concat', 'tf.concat', (["[blobs['blob117'], blobs['blob135'], blobs['blob136']]"], {'axis': '(3)'}), "([blobs['blob117'], blobs['blob135'], blobs['blob136']], axis=3)\n", (33922, 33986), True, 'import tensorflow as tf\n'), ((34888, 34961), 'tensorflow.concat', 'tf.concat', (["[blobs['blob115'], blobs['blob140'], blobs['blob141']]"], {'axis': '(3)'}), "([blobs['blob115'], blobs['blob140'], blobs['blob141']], axis=3)\n", (34897, 34961), True, 'import tensorflow as tf\n'), ((35386, 35499), 'tensorflow.image.resize_nearest_neighbor', 'tf.image.resize_nearest_neighbor', (["blobs['blob145']"], {'size': '[ADAPTED_HEIGHT, ADAPTED_WIDTH]', 'align_corners': '(False)'}), "(blobs['blob145'], size=[ADAPTED_HEIGHT,\n ADAPTED_WIDTH], align_corners=False)\n", (35418, 35499), True, 'import tensorflow as tf\n'), ((35532, 35645), 'tensorflow.image.resize_nearest_neighbor', 'tf.image.resize_nearest_neighbor', (["blobs['blob109']"], {'size': '[ADAPTED_HEIGHT, ADAPTED_WIDTH]', 'align_corners': '(False)'}), "(blobs['blob109'], size=[ADAPTED_HEIGHT,\n ADAPTED_WIDTH], align_corners=False)\n", (35564, 35645), True, 'import tensorflow as tf\n'), ((36630, 36794), 'tensorflow.concat', 'tf.concat', (["[blobs['img0_nomean_resize'], blobs['blob146'], blobs['blob147'], blobs[\n 'blob148'], blobs['blob149'], blobs['blob152'], blobs['blob155']]"], {'axis': '(3)'}), "([blobs['img0_nomean_resize'], blobs['blob146'], blobs['blob147'],\n blobs['blob148'], blobs['blob149'], blobs['blob152'], blobs['blob155']],\n axis=3)\n", (36639, 36794), True, 'import tensorflow as tf\n'), ((37036, 37094), 'tensorflow.pad', 'tf.pad', (["blobs['blob157']", '[[0, 0], [1, 1], [1, 1], [0, 0]]'], {}), "(blobs['blob157'], [[0, 0], [1, 1], [1, 1], [0, 0]])\n", (37042, 37094), True, 'import tensorflow as tf\n'), ((37573, 37631), 'tensorflow.pad', 'tf.pad', (["blobs['blob159']", '[[0, 0], [1, 1], [1, 1], [0, 0]]'], {}), "(blobs['blob159'], [[0, 0], [1, 1], [1, 1], [0, 0]])\n", (37579, 37631), True, 'import tensorflow as tf\n'), ((38812, 38885), 'tensorflow.concat', 'tf.concat', (["[blobs['blob159'], blobs['blob163'], blobs['blob164']]"], {'axis': '(3)'}), "([blobs['blob159'], blobs['blob163'], blobs['blob164']], axis=3)\n", (38821, 38885), True, 'import tensorflow as tf\n'), ((39777, 39850), 'tensorflow.concat', 'tf.concat', (["[blobs['blob157'], blobs['blob168'], blobs['blob169']]"], {'axis': '(3)'}), "([blobs['blob157'], blobs['blob168'], blobs['blob169']], axis=3)\n", (39786, 39850), True, 'import tensorflow as tf\n'), ((40235, 40337), 'tensorflow.image.resize_bilinear', 'tf.image.resize_bilinear', (["blobs['blob172']"], {'size': '[TARGET_HEIGHT, TARGET_WIDTH]', 'align_corners': '(True)'}), "(blobs['blob172'], size=[TARGET_HEIGHT,\n TARGET_WIDTH], align_corners=True)\n", (40259, 40337), True, 'import tensorflow as tf\n'), ((40351, 40388), 'tensorflow.stack', 'tf.stack', (['[SCALE_WIDTH, SCALE_HEIGHT]'], {}), '([SCALE_WIDTH, SCALE_HEIGHT])\n', (40359, 40388), True, 'import tensorflow as tf\n'), ((40405, 40436), 'tensorflow.reshape', 'tf.reshape', (['scale', '[1, 1, 1, 2]'], {}), '(scale, [1, 1, 1, 2])\n', (40415, 40436), True, 'import tensorflow as tf\n'), ((281, 314), 'tensorflow.get_variable', 'tf.get_variable', (['key'], {'shape': 'shape'}), '(key, shape=shape)\n', (296, 314), True, 'import tensorflow as tf\n'), ((582, 636), 'tensorflow.stack', 'tf.stack', (['[flow[:, :, :, 1], flow[:, :, :, 0]]'], {'axis': '(3)'}), '([flow[:, :, :, 1], flow[:, :, :, 0]], axis=3)\n', (590, 636), True, 'import tensorflow as tf\n'), ((919, 967), 'tensorflow.stack', 'tf.stack', (['[f[:, :, :, 1], f[:, :, :, 0]]'], {'axis': '(3)'}), '([f[:, :, :, 1], f[:, :, :, 0]], axis=3)\n', (927, 967), True, 'import tensorflow as tf\n'), ((2818, 2836), 'tensorflow.to_float', 'tf.to_float', (['width'], {}), '(width)\n', (2829, 2836), True, 'import tensorflow as tf\n'), ((2839, 2865), 'tensorflow.to_float', 'tf.to_float', (['ADAPTED_WIDTH'], {}), '(ADAPTED_WIDTH)\n', (2850, 2865), True, 'import tensorflow as tf\n'), ((2890, 2909), 'tensorflow.to_float', 'tf.to_float', (['height'], {}), '(height)\n', (2901, 2909), True, 'import tensorflow as tf\n'), ((2912, 2939), 'tensorflow.to_float', 'tf.to_float', (['ADAPTED_HEIGHT'], {}), '(ADAPTED_HEIGHT)\n', (2923, 2939), True, 'import tensorflow as tf\n'), ((3757, 3854), 'tensorflow.nn.conv2d', 'tf.nn.conv2d', (["blobs['conv1a']", "self.weights['conv1_w']"], {'strides': '[1, 2, 2, 1]', 'padding': '"""VALID"""'}), "(blobs['conv1a'], self.weights['conv1_w'], strides=[1, 2, 2, 1],\n padding='VALID')\n", (3769, 3854), True, 'import tensorflow as tf\n'), ((4057, 4154), 'tensorflow.nn.conv2d', 'tf.nn.conv2d', (["blobs['conv1b']", "self.weights['conv1_w']"], {'strides': '[1, 2, 2, 1]', 'padding': '"""VALID"""'}), "(blobs['conv1b'], self.weights['conv1_w'], strides=[1, 2, 2, 1],\n padding='VALID')\n", (4069, 4154), True, 'import tensorflow as tf\n'), ((4345, 4442), 'tensorflow.nn.conv2d', 'tf.nn.conv2d', (["blobs['conv2a']", "self.weights['conv2_w']"], {'strides': '[1, 2, 2, 1]', 'padding': '"""VALID"""'}), "(blobs['conv2a'], self.weights['conv2_w'], strides=[1, 2, 2, 1],\n padding='VALID')\n", (4357, 4442), True, 'import tensorflow as tf\n'), ((4633, 4730), 'tensorflow.nn.conv2d', 'tf.nn.conv2d', (["blobs['conv2b']", "self.weights['conv2_w']"], {'strides': '[1, 2, 2, 1]', 'padding': '"""VALID"""'}), "(blobs['conv2b'], self.weights['conv2_w'], strides=[1, 2, 2, 1],\n padding='VALID')\n", (4645, 4730), True, 'import tensorflow as tf\n'), ((4921, 5018), 'tensorflow.nn.conv2d', 'tf.nn.conv2d', (["blobs['conv3a']", "self.weights['conv3_w']"], {'strides': '[1, 2, 2, 1]', 'padding': '"""VALID"""'}), "(blobs['conv3a'], self.weights['conv3_w'], strides=[1, 2, 2, 1],\n padding='VALID')\n", (4933, 5018), True, 'import tensorflow as tf\n'), ((5209, 5306), 'tensorflow.nn.conv2d', 'tf.nn.conv2d', (["blobs['conv3b']", "self.weights['conv3_w']"], {'strides': '[1, 2, 2, 1]', 'padding': '"""VALID"""'}), "(blobs['conv3b'], self.weights['conv3_w'], strides=[1, 2, 2, 1],\n padding='VALID')\n", (5221, 5306), True, 'import tensorflow as tf\n'), ((6271, 6374), 'tensorflow.nn.conv2d', 'tf.nn.conv2d', (["blobs['conv3a']", "self.weights['conv_redir_w']"], {'strides': '[1, 1, 1, 1]', 'padding': '"""VALID"""'}), "(blobs['conv3a'], self.weights['conv_redir_w'], strides=[1, 1, \n 1, 1], padding='VALID')\n", (6283, 6374), True, 'import tensorflow as tf\n'), ((6581, 6680), 'tensorflow.nn.conv2d', 'tf.nn.conv2d', (["blobs['blob16']", "self.weights['conv3_1_w']"], {'strides': '[1, 1, 1, 1]', 'padding': '"""SAME"""'}), "(blobs['blob16'], self.weights['conv3_1_w'], strides=[1, 1, 1, \n 1], padding='SAME')\n", (6593, 6680), True, 'import tensorflow as tf\n'), ((6873, 6969), 'tensorflow.nn.conv2d', 'tf.nn.conv2d', (["blobs['conv4']", "self.weights['conv4_w']"], {'strides': '[1, 2, 2, 1]', 'padding': '"""VALID"""'}), "(blobs['conv4'], self.weights['conv4_w'], strides=[1, 2, 2, 1],\n padding='VALID')\n", (6885, 6969), True, 'import tensorflow as tf\n'), ((7079, 7177), 'tensorflow.nn.conv2d', 'tf.nn.conv2d', (["blobs['conv4']", "self.weights['conv4_1_w']"], {'strides': '[1, 1, 1, 1]', 'padding': '"""SAME"""'}), "(blobs['conv4'], self.weights['conv4_1_w'], strides=[1, 1, 1, 1\n ], padding='SAME')\n", (7091, 7177), True, 'import tensorflow as tf\n'), ((7370, 7466), 'tensorflow.nn.conv2d', 'tf.nn.conv2d', (["blobs['conv5']", "self.weights['conv5_w']"], {'strides': '[1, 2, 2, 1]', 'padding': '"""VALID"""'}), "(blobs['conv5'], self.weights['conv5_w'], strides=[1, 2, 2, 1],\n padding='VALID')\n", (7382, 7466), True, 'import tensorflow as tf\n'), ((7576, 7674), 'tensorflow.nn.conv2d', 'tf.nn.conv2d', (["blobs['conv5']", "self.weights['conv5_1_w']"], {'strides': '[1, 1, 1, 1]', 'padding': '"""SAME"""'}), "(blobs['conv5'], self.weights['conv5_1_w'], strides=[1, 1, 1, 1\n ], padding='SAME')\n", (7588, 7674), True, 'import tensorflow as tf\n'), ((7867, 7963), 'tensorflow.nn.conv2d', 'tf.nn.conv2d', (["blobs['conv6']", "self.weights['conv6_w']"], {'strides': '[1, 2, 2, 1]', 'padding': '"""VALID"""'}), "(blobs['conv6'], self.weights['conv6_w'], strides=[1, 2, 2, 1],\n padding='VALID')\n", (7879, 7963), True, 'import tensorflow as tf\n'), ((8073, 8171), 'tensorflow.nn.conv2d', 'tf.nn.conv2d', (["blobs['conv6']", "self.weights['conv6_1_w']"], {'strides': '[1, 1, 1, 1]', 'padding': '"""SAME"""'}), "(blobs['conv6'], self.weights['conv6_1_w'], strides=[1, 1, 1, 1\n ], padding='SAME')\n", (8085, 8171), True, 'import tensorflow as tf\n'), ((8292, 8397), 'tensorflow.nn.conv2d', 'tf.nn.conv2d', (["blobs['conv6_1']", "self.weights['Convolution1_w']"], {'strides': '[1, 1, 1, 1]', 'padding': '"""SAME"""'}), "(blobs['conv6_1'], self.weights['Convolution1_w'], strides=[1, \n 1, 1, 1], padding='SAME')\n", (8304, 8397), True, 'import tensorflow as tf\n'), ((8451, 8621), 'tensorflow.nn.conv2d_transpose', 'tf.nn.conv2d_transpose', (["blobs['conv6_1']", "self.weights['deconv5_w']"], {'output_shape': '[batch_size, ADAPTED_HEIGHT / 32, ADAPTED_WIDTH / 32, 512]', 'strides': '[1, 2, 2, 1]'}), "(blobs['conv6_1'], self.weights['deconv5_w'],\n output_shape=[batch_size, ADAPTED_HEIGHT / 32, ADAPTED_WIDTH / 32, 512],\n strides=[1, 2, 2, 1])\n", (8473, 8621), True, 'import tensorflow as tf\n'), ((8750, 8936), 'tensorflow.nn.conv2d_transpose', 'tf.nn.conv2d_transpose', (["blobs['predict_flow6']", "self.weights['upsample_flow6to5_w']"], {'output_shape': '[batch_size, ADAPTED_HEIGHT / 32, ADAPTED_WIDTH / 32, 2]', 'strides': '[1, 2, 2, 1]'}), "(blobs['predict_flow6'], self.weights[\n 'upsample_flow6to5_w'], output_shape=[batch_size, ADAPTED_HEIGHT / 32, \n ADAPTED_WIDTH / 32, 2], strides=[1, 2, 2, 1])\n", (8772, 8936), True, 'import tensorflow as tf\n'), ((9203, 9314), 'tensorflow.nn.conv2d', 'tf.nn.conv2d', (["blobs['predict_flow5']", "self.weights['Convolution2_w']"], {'strides': '[1, 1, 1, 1]', 'padding': '"""VALID"""'}), "(blobs['predict_flow5'], self.weights['Convolution2_w'],\n strides=[1, 1, 1, 1], padding='VALID')\n", (9215, 9314), True, 'import tensorflow as tf\n'), ((9377, 9547), 'tensorflow.nn.conv2d_transpose', 'tf.nn.conv2d_transpose', (["blobs['concat5']", "self.weights['deconv4_w']"], {'output_shape': '[batch_size, ADAPTED_HEIGHT / 16, ADAPTED_WIDTH / 16, 256]', 'strides': '[1, 2, 2, 1]'}), "(blobs['concat5'], self.weights['deconv4_w'],\n output_shape=[batch_size, ADAPTED_HEIGHT / 16, ADAPTED_WIDTH / 16, 256],\n strides=[1, 2, 2, 1])\n", (9399, 9547), True, 'import tensorflow as tf\n'), ((9676, 9862), 'tensorflow.nn.conv2d_transpose', 'tf.nn.conv2d_transpose', (["blobs['predict_flow5']", "self.weights['upsample_flow5to4_w']"], {'output_shape': '[batch_size, ADAPTED_HEIGHT / 16, ADAPTED_WIDTH / 16, 2]', 'strides': '[1, 2, 2, 1]'}), "(blobs['predict_flow5'], self.weights[\n 'upsample_flow5to4_w'], output_shape=[batch_size, ADAPTED_HEIGHT / 16, \n ADAPTED_WIDTH / 16, 2], strides=[1, 2, 2, 1])\n", (9698, 9862), True, 'import tensorflow as tf\n'), ((10041, 10146), 'tensorflow.nn.conv2d', 'tf.nn.conv2d', (["blobs['concat4']", "self.weights['Convolution3_w']"], {'strides': '[1, 1, 1, 1]', 'padding': '"""SAME"""'}), "(blobs['concat4'], self.weights['Convolution3_w'], strides=[1, \n 1, 1, 1], padding='SAME')\n", (10053, 10146), True, 'import tensorflow as tf\n'), ((10208, 10376), 'tensorflow.nn.conv2d_transpose', 'tf.nn.conv2d_transpose', (["blobs['concat4']", "self.weights['deconv3_w']"], {'output_shape': '[batch_size, ADAPTED_HEIGHT / 8, ADAPTED_WIDTH / 8, 128]', 'strides': '[1, 2, 2, 1]'}), "(blobs['concat4'], self.weights['deconv3_w'],\n output_shape=[batch_size, ADAPTED_HEIGHT / 8, ADAPTED_WIDTH / 8, 128],\n strides=[1, 2, 2, 1])\n", (10230, 10376), True, 'import tensorflow as tf\n'), ((10505, 10689), 'tensorflow.nn.conv2d_transpose', 'tf.nn.conv2d_transpose', (["blobs['predict_flow4']", "self.weights['upsample_flow4to3_w']"], {'output_shape': '[batch_size, ADAPTED_HEIGHT / 8, ADAPTED_WIDTH / 8, 2]', 'strides': '[1, 2, 2, 1]'}), "(blobs['predict_flow4'], self.weights[\n 'upsample_flow4to3_w'], output_shape=[batch_size, ADAPTED_HEIGHT / 8, \n ADAPTED_WIDTH / 8, 2], strides=[1, 2, 2, 1])\n", (10527, 10689), True, 'import tensorflow as tf\n'), ((10868, 10973), 'tensorflow.nn.conv2d', 'tf.nn.conv2d', (["blobs['concat3']", "self.weights['Convolution4_w']"], {'strides': '[1, 1, 1, 1]', 'padding': '"""SAME"""'}), "(blobs['concat3'], self.weights['Convolution4_w'], strides=[1, \n 1, 1, 1], padding='SAME')\n", (10880, 10973), True, 'import tensorflow as tf\n'), ((11035, 11202), 'tensorflow.nn.conv2d_transpose', 'tf.nn.conv2d_transpose', (["blobs['concat3']", "self.weights['deconv2_w']"], {'output_shape': '[batch_size, ADAPTED_HEIGHT / 4, ADAPTED_WIDTH / 4, 64]', 'strides': '[1, 2, 2, 1]'}), "(blobs['concat3'], self.weights['deconv2_w'],\n output_shape=[batch_size, ADAPTED_HEIGHT / 4, ADAPTED_WIDTH / 4, 64],\n strides=[1, 2, 2, 1])\n", (11057, 11202), True, 'import tensorflow as tf\n'), ((11323, 11507), 'tensorflow.nn.conv2d_transpose', 'tf.nn.conv2d_transpose', (["blobs['predict_flow3']", "self.weights['upsample_flow3to2_w']"], {'output_shape': '[batch_size, ADAPTED_HEIGHT / 4, ADAPTED_WIDTH / 4, 2]', 'strides': '[1, 2, 2, 1]'}), "(blobs['predict_flow3'], self.weights[\n 'upsample_flow3to2_w'], output_shape=[batch_size, ADAPTED_HEIGHT / 4, \n ADAPTED_WIDTH / 4, 2], strides=[1, 2, 2, 1])\n", (11345, 11507), True, 'import tensorflow as tf\n'), ((11677, 11782), 'tensorflow.nn.conv2d', 'tf.nn.conv2d', (["blobs['concat2']", "self.weights['Convolution5_w']"], {'strides': '[1, 1, 1, 1]', 'padding': '"""SAME"""'}), "(blobs['concat2'], self.weights['Convolution5_w'], strides=[1, \n 1, 1, 1], padding='SAME')\n", (11689, 11782), True, 'import tensorflow as tf\n'), ((13283, 13386), 'tensorflow.nn.conv2d', 'tf.nn.conv2d', (["blobs['blob48']", "self.weights['net2_conv1_w']"], {'strides': '[1, 2, 2, 1]', 'padding': '"""VALID"""'}), "(blobs['blob48'], self.weights['net2_conv1_w'], strides=[1, 2, \n 2, 1], padding='VALID')\n", (13295, 13386), True, 'import tensorflow as tf\n'), ((13582, 13685), 'tensorflow.nn.conv2d', 'tf.nn.conv2d', (["blobs['blob49']", "self.weights['net2_conv2_w']"], {'strides': '[1, 2, 2, 1]', 'padding': '"""VALID"""'}), "(blobs['blob49'], self.weights['net2_conv2_w'], strides=[1, 2, \n 2, 1], padding='VALID')\n", (13594, 13685), True, 'import tensorflow as tf\n'), ((13880, 13983), 'tensorflow.nn.conv2d', 'tf.nn.conv2d', (["blobs['blob50']", "self.weights['net2_conv3_w']"], {'strides': '[1, 2, 2, 1]', 'padding': '"""VALID"""'}), "(blobs['blob50'], self.weights['net2_conv3_w'], strides=[1, 2, \n 2, 1], padding='VALID')\n", (13892, 13983), True, 'import tensorflow as tf\n'), ((14098, 14201), 'tensorflow.nn.conv2d', 'tf.nn.conv2d', (["blobs['blob50']", "self.weights['net2_conv3_1_w']"], {'strides': '[1, 1, 1, 1]', 'padding': '"""SAME"""'}), "(blobs['blob50'], self.weights['net2_conv3_1_w'], strides=[1, 1,\n 1, 1], padding='SAME')\n", (14110, 14201), True, 'import tensorflow as tf\n'), ((14399, 14502), 'tensorflow.nn.conv2d', 'tf.nn.conv2d', (["blobs['blob52']", "self.weights['net2_conv4_w']"], {'strides': '[1, 2, 2, 1]', 'padding': '"""VALID"""'}), "(blobs['blob52'], self.weights['net2_conv4_w'], strides=[1, 2, \n 2, 1], padding='VALID')\n", (14411, 14502), True, 'import tensorflow as tf\n'), ((14617, 14720), 'tensorflow.nn.conv2d', 'tf.nn.conv2d', (["blobs['blob52']", "self.weights['net2_conv4_1_w']"], {'strides': '[1, 1, 1, 1]', 'padding': '"""SAME"""'}), "(blobs['blob52'], self.weights['net2_conv4_1_w'], strides=[1, 1,\n 1, 1], padding='SAME')\n", (14629, 14720), True, 'import tensorflow as tf\n'), ((14918, 15021), 'tensorflow.nn.conv2d', 'tf.nn.conv2d', (["blobs['blob54']", "self.weights['net2_conv5_w']"], {'strides': '[1, 2, 2, 1]', 'padding': '"""VALID"""'}), "(blobs['blob54'], self.weights['net2_conv5_w'], strides=[1, 2, \n 2, 1], padding='VALID')\n", (14930, 15021), True, 'import tensorflow as tf\n'), ((15136, 15239), 'tensorflow.nn.conv2d', 'tf.nn.conv2d', (["blobs['blob54']", "self.weights['net2_conv5_1_w']"], {'strides': '[1, 1, 1, 1]', 'padding': '"""SAME"""'}), "(blobs['blob54'], self.weights['net2_conv5_1_w'], strides=[1, 1,\n 1, 1], padding='SAME')\n", (15148, 15239), True, 'import tensorflow as tf\n'), ((15437, 15540), 'tensorflow.nn.conv2d', 'tf.nn.conv2d', (["blobs['blob56']", "self.weights['net2_conv6_w']"], {'strides': '[1, 2, 2, 1]', 'padding': '"""VALID"""'}), "(blobs['blob56'], self.weights['net2_conv6_w'], strides=[1, 2, \n 2, 1], padding='VALID')\n", (15449, 15540), True, 'import tensorflow as tf\n'), ((15655, 15758), 'tensorflow.nn.conv2d', 'tf.nn.conv2d', (["blobs['blob56']", "self.weights['net2_conv6_1_w']"], {'strides': '[1, 1, 1, 1]', 'padding': '"""SAME"""'}), "(blobs['blob56'], self.weights['net2_conv6_1_w'], strides=[1, 1,\n 1, 1], padding='SAME')\n", (15667, 15758), True, 'import tensorflow as tf\n'), ((15884, 15994), 'tensorflow.nn.conv2d', 'tf.nn.conv2d', (["blobs['blob57']", "self.weights['net2_predict_conv6_w']"], {'strides': '[1, 1, 1, 1]', 'padding': '"""SAME"""'}), "(blobs['blob57'], self.weights['net2_predict_conv6_w'], strides\n =[1, 1, 1, 1], padding='SAME')\n", (15896, 15994), True, 'import tensorflow as tf\n'), ((16061, 16235), 'tensorflow.nn.conv2d_transpose', 'tf.nn.conv2d_transpose', (["blobs['blob57']", "self.weights['net2_deconv5_w']"], {'output_shape': '[batch_size, ADAPTED_HEIGHT / 32, ADAPTED_WIDTH / 32, 512]', 'strides': '[1, 2, 2, 1]'}), "(blobs['blob57'], self.weights['net2_deconv5_w'],\n output_shape=[batch_size, ADAPTED_HEIGHT / 32, ADAPTED_WIDTH / 32, 512],\n strides=[1, 2, 2, 1])\n", (16083, 16235), True, 'import tensorflow as tf\n'), ((16353, 16549), 'tensorflow.nn.conv2d_transpose', 'tf.nn.conv2d_transpose', (["blobs['predict_flow6']", "self.weights['net2_net2_upsample_flow6to5_w']"], {'output_shape': '[batch_size, ADAPTED_HEIGHT / 32, ADAPTED_WIDTH / 32, 2]', 'strides': '[1, 2, 2, 1]'}), "(blobs['predict_flow6'], self.weights[\n 'net2_net2_upsample_flow6to5_w'], output_shape=[batch_size, \n ADAPTED_HEIGHT / 32, ADAPTED_WIDTH / 32, 2], strides=[1, 2, 2, 1])\n", (16375, 16549), True, 'import tensorflow as tf\n'), ((16714, 16824), 'tensorflow.nn.conv2d', 'tf.nn.conv2d', (["blobs['blob61']", "self.weights['net2_predict_conv5_w']"], {'strides': '[1, 1, 1, 1]', 'padding': '"""SAME"""'}), "(blobs['blob61'], self.weights['net2_predict_conv5_w'], strides\n =[1, 1, 1, 1], padding='SAME')\n", (16726, 16824), True, 'import tensorflow as tf\n'), ((16891, 17065), 'tensorflow.nn.conv2d_transpose', 'tf.nn.conv2d_transpose', (["blobs['blob61']", "self.weights['net2_deconv4_w']"], {'output_shape': '[batch_size, ADAPTED_HEIGHT / 16, ADAPTED_WIDTH / 16, 256]', 'strides': '[1, 2, 2, 1]'}), "(blobs['blob61'], self.weights['net2_deconv4_w'],\n output_shape=[batch_size, ADAPTED_HEIGHT / 16, ADAPTED_WIDTH / 16, 256],\n strides=[1, 2, 2, 1])\n", (16913, 17065), True, 'import tensorflow as tf\n'), ((17183, 17372), 'tensorflow.nn.conv2d_transpose', 'tf.nn.conv2d_transpose', (["blobs['blob62']", "self.weights['net2_net2_upsample_flow5to4_w']"], {'output_shape': '[batch_size, ADAPTED_HEIGHT / 16, ADAPTED_WIDTH / 16, 2]', 'strides': '[1, 2, 2, 1]'}), "(blobs['blob62'], self.weights[\n 'net2_net2_upsample_flow5to4_w'], output_shape=[batch_size, \n ADAPTED_HEIGHT / 16, ADAPTED_WIDTH / 16, 2], strides=[1, 2, 2, 1])\n", (17205, 17372), True, 'import tensorflow as tf\n'), ((17545, 17655), 'tensorflow.nn.conv2d', 'tf.nn.conv2d', (["blobs['blob65']", "self.weights['net2_predict_conv4_w']"], {'strides': '[1, 1, 1, 1]', 'padding': '"""SAME"""'}), "(blobs['blob65'], self.weights['net2_predict_conv4_w'], strides\n =[1, 1, 1, 1], padding='SAME')\n", (17557, 17655), True, 'import tensorflow as tf\n'), ((17722, 17894), 'tensorflow.nn.conv2d_transpose', 'tf.nn.conv2d_transpose', (["blobs['blob65']", "self.weights['net2_deconv3_w']"], {'output_shape': '[batch_size, ADAPTED_HEIGHT / 8, ADAPTED_WIDTH / 8, 128]', 'strides': '[1, 2, 2, 1]'}), "(blobs['blob65'], self.weights['net2_deconv3_w'],\n output_shape=[batch_size, ADAPTED_HEIGHT / 8, ADAPTED_WIDTH / 8, 128],\n strides=[1, 2, 2, 1])\n", (17744, 17894), True, 'import tensorflow as tf\n'), ((18012, 18199), 'tensorflow.nn.conv2d_transpose', 'tf.nn.conv2d_transpose', (["blobs['blob66']", "self.weights['net2_net2_upsample_flow4to3_w']"], {'output_shape': '[batch_size, ADAPTED_HEIGHT / 8, ADAPTED_WIDTH / 8, 2]', 'strides': '[1, 2, 2, 1]'}), "(blobs['blob66'], self.weights[\n 'net2_net2_upsample_flow4to3_w'], output_shape=[batch_size, \n ADAPTED_HEIGHT / 8, ADAPTED_WIDTH / 8, 2], strides=[1, 2, 2, 1])\n", (18034, 18199), True, 'import tensorflow as tf\n'), ((18364, 18474), 'tensorflow.nn.conv2d', 'tf.nn.conv2d', (["blobs['blob69']", "self.weights['net2_predict_conv3_w']"], {'strides': '[1, 1, 1, 1]', 'padding': '"""SAME"""'}), "(blobs['blob69'], self.weights['net2_predict_conv3_w'], strides\n =[1, 1, 1, 1], padding='SAME')\n", (18376, 18474), True, 'import tensorflow as tf\n'), ((18541, 18712), 'tensorflow.nn.conv2d_transpose', 'tf.nn.conv2d_transpose', (["blobs['blob69']", "self.weights['net2_deconv2_w']"], {'output_shape': '[batch_size, ADAPTED_HEIGHT / 4, ADAPTED_WIDTH / 4, 64]', 'strides': '[1, 2, 2, 1]'}), "(blobs['blob69'], self.weights['net2_deconv2_w'],\n output_shape=[batch_size, ADAPTED_HEIGHT / 4, ADAPTED_WIDTH / 4, 64],\n strides=[1, 2, 2, 1])\n", (18563, 18712), True, 'import tensorflow as tf\n'), ((18822, 19009), 'tensorflow.nn.conv2d_transpose', 'tf.nn.conv2d_transpose', (["blobs['blob70']", "self.weights['net2_net2_upsample_flow3to2_w']"], {'output_shape': '[batch_size, ADAPTED_HEIGHT / 4, ADAPTED_WIDTH / 4, 2]', 'strides': '[1, 2, 2, 1]'}), "(blobs['blob70'], self.weights[\n 'net2_net2_upsample_flow3to2_w'], output_shape=[batch_size, \n ADAPTED_HEIGHT / 4, ADAPTED_WIDTH / 4, 2], strides=[1, 2, 2, 1])\n", (18844, 19009), True, 'import tensorflow as tf\n'), ((19166, 19276), 'tensorflow.nn.conv2d', 'tf.nn.conv2d', (["blobs['blob73']", "self.weights['net2_predict_conv2_w']"], {'strides': '[1, 1, 1, 1]', 'padding': '"""SAME"""'}), "(blobs['blob73'], self.weights['net2_predict_conv2_w'], strides\n =[1, 1, 1, 1], padding='SAME')\n", (19178, 19276), True, 'import tensorflow as tf\n'), ((20776, 20879), 'tensorflow.nn.conv2d', 'tf.nn.conv2d', (["blobs['blob82']", "self.weights['net3_conv1_w']"], {'strides': '[1, 2, 2, 1]', 'padding': '"""VALID"""'}), "(blobs['blob82'], self.weights['net3_conv1_w'], strides=[1, 2, \n 2, 1], padding='VALID')\n", (20788, 20879), True, 'import tensorflow as tf\n'), ((21075, 21178), 'tensorflow.nn.conv2d', 'tf.nn.conv2d', (["blobs['blob83']", "self.weights['net3_conv2_w']"], {'strides': '[1, 2, 2, 1]', 'padding': '"""VALID"""'}), "(blobs['blob83'], self.weights['net3_conv2_w'], strides=[1, 2, \n 2, 1], padding='VALID')\n", (21087, 21178), True, 'import tensorflow as tf\n'), ((21373, 21476), 'tensorflow.nn.conv2d', 'tf.nn.conv2d', (["blobs['blob84']", "self.weights['net3_conv3_w']"], {'strides': '[1, 2, 2, 1]', 'padding': '"""VALID"""'}), "(blobs['blob84'], self.weights['net3_conv3_w'], strides=[1, 2, \n 2, 1], padding='VALID')\n", (21385, 21476), True, 'import tensorflow as tf\n'), ((21591, 21694), 'tensorflow.nn.conv2d', 'tf.nn.conv2d', (["blobs['blob84']", "self.weights['net3_conv3_1_w']"], {'strides': '[1, 1, 1, 1]', 'padding': '"""SAME"""'}), "(blobs['blob84'], self.weights['net3_conv3_1_w'], strides=[1, 1,\n 1, 1], padding='SAME')\n", (21603, 21694), True, 'import tensorflow as tf\n'), ((21892, 21995), 'tensorflow.nn.conv2d', 'tf.nn.conv2d', (["blobs['blob86']", "self.weights['net3_conv4_w']"], {'strides': '[1, 2, 2, 1]', 'padding': '"""VALID"""'}), "(blobs['blob86'], self.weights['net3_conv4_w'], strides=[1, 2, \n 2, 1], padding='VALID')\n", (21904, 21995), True, 'import tensorflow as tf\n'), ((22110, 22213), 'tensorflow.nn.conv2d', 'tf.nn.conv2d', (["blobs['blob86']", "self.weights['net3_conv4_1_w']"], {'strides': '[1, 1, 1, 1]', 'padding': '"""SAME"""'}), "(blobs['blob86'], self.weights['net3_conv4_1_w'], strides=[1, 1,\n 1, 1], padding='SAME')\n", (22122, 22213), True, 'import tensorflow as tf\n'), ((22411, 22514), 'tensorflow.nn.conv2d', 'tf.nn.conv2d', (["blobs['blob88']", "self.weights['net3_conv5_w']"], {'strides': '[1, 2, 2, 1]', 'padding': '"""VALID"""'}), "(blobs['blob88'], self.weights['net3_conv5_w'], strides=[1, 2, \n 2, 1], padding='VALID')\n", (22423, 22514), True, 'import tensorflow as tf\n'), ((22629, 22732), 'tensorflow.nn.conv2d', 'tf.nn.conv2d', (["blobs['blob88']", "self.weights['net3_conv5_1_w']"], {'strides': '[1, 1, 1, 1]', 'padding': '"""SAME"""'}), "(blobs['blob88'], self.weights['net3_conv5_1_w'], strides=[1, 1,\n 1, 1], padding='SAME')\n", (22641, 22732), True, 'import tensorflow as tf\n'), ((22930, 23033), 'tensorflow.nn.conv2d', 'tf.nn.conv2d', (["blobs['blob90']", "self.weights['net3_conv6_w']"], {'strides': '[1, 2, 2, 1]', 'padding': '"""VALID"""'}), "(blobs['blob90'], self.weights['net3_conv6_w'], strides=[1, 2, \n 2, 1], padding='VALID')\n", (22942, 23033), True, 'import tensorflow as tf\n'), ((23148, 23251), 'tensorflow.nn.conv2d', 'tf.nn.conv2d', (["blobs['blob90']", "self.weights['net3_conv6_1_w']"], {'strides': '[1, 1, 1, 1]', 'padding': '"""SAME"""'}), "(blobs['blob90'], self.weights['net3_conv6_1_w'], strides=[1, 1,\n 1, 1], padding='SAME')\n", (23160, 23251), True, 'import tensorflow as tf\n'), ((23377, 23487), 'tensorflow.nn.conv2d', 'tf.nn.conv2d', (["blobs['blob91']", "self.weights['net3_predict_conv6_w']"], {'strides': '[1, 1, 1, 1]', 'padding': '"""SAME"""'}), "(blobs['blob91'], self.weights['net3_predict_conv6_w'], strides\n =[1, 1, 1, 1], padding='SAME')\n", (23389, 23487), True, 'import tensorflow as tf\n'), ((23554, 23728), 'tensorflow.nn.conv2d_transpose', 'tf.nn.conv2d_transpose', (["blobs['blob91']", "self.weights['net3_deconv5_w']"], {'output_shape': '[batch_size, ADAPTED_HEIGHT / 32, ADAPTED_WIDTH / 32, 512]', 'strides': '[1, 2, 2, 1]'}), "(blobs['blob91'], self.weights['net3_deconv5_w'],\n output_shape=[batch_size, ADAPTED_HEIGHT / 32, ADAPTED_WIDTH / 32, 512],\n strides=[1, 2, 2, 1])\n", (23576, 23728), True, 'import tensorflow as tf\n'), ((23846, 24035), 'tensorflow.nn.conv2d_transpose', 'tf.nn.conv2d_transpose', (["blobs['blob92']", "self.weights['net3_net3_upsample_flow6to5_w']"], {'output_shape': '[batch_size, ADAPTED_HEIGHT / 32, ADAPTED_WIDTH / 32, 2]', 'strides': '[1, 2, 2, 1]'}), "(blobs['blob92'], self.weights[\n 'net3_net3_upsample_flow6to5_w'], output_shape=[batch_size, \n ADAPTED_HEIGHT / 32, ADAPTED_WIDTH / 32, 2], strides=[1, 2, 2, 1])\n", (23868, 24035), True, 'import tensorflow as tf\n'), ((24200, 24310), 'tensorflow.nn.conv2d', 'tf.nn.conv2d', (["blobs['blob95']", "self.weights['net3_predict_conv5_w']"], {'strides': '[1, 1, 1, 1]', 'padding': '"""SAME"""'}), "(blobs['blob95'], self.weights['net3_predict_conv5_w'], strides\n =[1, 1, 1, 1], padding='SAME')\n", (24212, 24310), True, 'import tensorflow as tf\n'), ((24377, 24551), 'tensorflow.nn.conv2d_transpose', 'tf.nn.conv2d_transpose', (["blobs['blob95']", "self.weights['net3_deconv4_w']"], {'output_shape': '[batch_size, ADAPTED_HEIGHT / 16, ADAPTED_WIDTH / 16, 256]', 'strides': '[1, 2, 2, 1]'}), "(blobs['blob95'], self.weights['net3_deconv4_w'],\n output_shape=[batch_size, ADAPTED_HEIGHT / 16, ADAPTED_WIDTH / 16, 256],\n strides=[1, 2, 2, 1])\n", (24399, 24551), True, 'import tensorflow as tf\n'), ((24669, 24858), 'tensorflow.nn.conv2d_transpose', 'tf.nn.conv2d_transpose', (["blobs['blob96']", "self.weights['net3_net3_upsample_flow5to4_w']"], {'output_shape': '[batch_size, ADAPTED_HEIGHT / 16, ADAPTED_WIDTH / 16, 2]', 'strides': '[1, 2, 2, 1]'}), "(blobs['blob96'], self.weights[\n 'net3_net3_upsample_flow5to4_w'], output_shape=[batch_size, \n ADAPTED_HEIGHT / 16, ADAPTED_WIDTH / 16, 2], strides=[1, 2, 2, 1])\n", (24691, 24858), True, 'import tensorflow as tf\n'), ((25032, 25142), 'tensorflow.nn.conv2d', 'tf.nn.conv2d', (["blobs['blob99']", "self.weights['net3_predict_conv4_w']"], {'strides': '[1, 1, 1, 1]', 'padding': '"""SAME"""'}), "(blobs['blob99'], self.weights['net3_predict_conv4_w'], strides\n =[1, 1, 1, 1], padding='SAME')\n", (25044, 25142), True, 'import tensorflow as tf\n'), ((25210, 25382), 'tensorflow.nn.conv2d_transpose', 'tf.nn.conv2d_transpose', (["blobs['blob99']", "self.weights['net3_deconv3_w']"], {'output_shape': '[batch_size, ADAPTED_HEIGHT / 8, ADAPTED_WIDTH / 8, 128]', 'strides': '[1, 2, 2, 1]'}), "(blobs['blob99'], self.weights['net3_deconv3_w'],\n output_shape=[batch_size, ADAPTED_HEIGHT / 8, ADAPTED_WIDTH / 8, 128],\n strides=[1, 2, 2, 1])\n", (25232, 25382), True, 'import tensorflow as tf\n'), ((25503, 25691), 'tensorflow.nn.conv2d_transpose', 'tf.nn.conv2d_transpose', (["blobs['blob100']", "self.weights['net3_net3_upsample_flow4to3_w']"], {'output_shape': '[batch_size, ADAPTED_HEIGHT / 8, ADAPTED_WIDTH / 8, 2]', 'strides': '[1, 2, 2, 1]'}), "(blobs['blob100'], self.weights[\n 'net3_net3_upsample_flow4to3_w'], output_shape=[batch_size, \n ADAPTED_HEIGHT / 8, ADAPTED_WIDTH / 8, 2], strides=[1, 2, 2, 1])\n", (25525, 25691), True, 'import tensorflow as tf\n'), ((25860, 25970), 'tensorflow.nn.conv2d', 'tf.nn.conv2d', (["blobs['blob103']", "self.weights['net3_predict_conv3_w']"], {'strides': '[1, 1, 1, 1]', 'padding': '"""SAME"""'}), "(blobs['blob103'], self.weights['net3_predict_conv3_w'],\n strides=[1, 1, 1, 1], padding='SAME')\n", (25872, 25970), True, 'import tensorflow as tf\n'), ((26039, 26211), 'tensorflow.nn.conv2d_transpose', 'tf.nn.conv2d_transpose', (["blobs['blob103']", "self.weights['net3_deconv2_w']"], {'output_shape': '[batch_size, ADAPTED_HEIGHT / 4, ADAPTED_WIDTH / 4, 64]', 'strides': '[1, 2, 2, 1]'}), "(blobs['blob103'], self.weights['net3_deconv2_w'],\n output_shape=[batch_size, ADAPTED_HEIGHT / 4, ADAPTED_WIDTH / 4, 64],\n strides=[1, 2, 2, 1])\n", (26061, 26211), True, 'import tensorflow as tf\n'), ((26324, 26512), 'tensorflow.nn.conv2d_transpose', 'tf.nn.conv2d_transpose', (["blobs['blob104']", "self.weights['net3_net3_upsample_flow3to2_w']"], {'output_shape': '[batch_size, ADAPTED_HEIGHT / 4, ADAPTED_WIDTH / 4, 2]', 'strides': '[1, 2, 2, 1]'}), "(blobs['blob104'], self.weights[\n 'net3_net3_upsample_flow3to2_w'], output_shape=[batch_size, \n ADAPTED_HEIGHT / 4, ADAPTED_WIDTH / 4, 2], strides=[1, 2, 2, 1])\n", (26346, 26512), True, 'import tensorflow as tf\n'), ((26673, 26783), 'tensorflow.nn.conv2d', 'tf.nn.conv2d', (["blobs['blob107']", "self.weights['net3_predict_conv2_w']"], {'strides': '[1, 1, 1, 1]', 'padding': '"""SAME"""'}), "(blobs['blob107'], self.weights['net3_predict_conv2_w'],\n strides=[1, 1, 1, 1], padding='SAME')\n", (26685, 26783), True, 'import tensorflow as tf\n'), ((27729, 27832), 'tensorflow.nn.conv2d', 'tf.nn.conv2d', (["blobs['blob110']", "self.weights['netsd_conv0_w']"], {'strides': '[1, 1, 1, 1]', 'padding': '"""SAME"""'}), "(blobs['blob110'], self.weights['netsd_conv0_w'], strides=[1, 1,\n 1, 1], padding='SAME')\n", (27741, 27832), True, 'import tensorflow as tf\n'), ((28034, 28138), 'tensorflow.nn.conv2d', 'tf.nn.conv2d', (["blobs['blob112']", "self.weights['netsd_conv1_w']"], {'strides': '[1, 2, 2, 1]', 'padding': '"""VALID"""'}), "(blobs['blob112'], self.weights['netsd_conv1_w'], strides=[1, 2,\n 2, 1], padding='VALID')\n", (28046, 28138), True, 'import tensorflow as tf\n'), ((28258, 28363), 'tensorflow.nn.conv2d', 'tf.nn.conv2d', (["blobs['blob112']", "self.weights['netsd_conv1_1_w']"], {'strides': '[1, 1, 1, 1]', 'padding': '"""SAME"""'}), "(blobs['blob112'], self.weights['netsd_conv1_1_w'], strides=[1,\n 1, 1, 1], padding='SAME')\n", (28270, 28363), True, 'import tensorflow as tf\n'), ((28575, 28679), 'tensorflow.nn.conv2d', 'tf.nn.conv2d', (["blobs['blob114']", "self.weights['netsd_conv2_w']"], {'strides': '[1, 2, 2, 1]', 'padding': '"""VALID"""'}), "(blobs['blob114'], self.weights['netsd_conv2_w'], strides=[1, 2,\n 2, 1], padding='VALID')\n", (28587, 28679), True, 'import tensorflow as tf\n'), ((28799, 28904), 'tensorflow.nn.conv2d', 'tf.nn.conv2d', (["blobs['blob114']", "self.weights['netsd_conv2_1_w']"], {'strides': '[1, 1, 1, 1]', 'padding': '"""SAME"""'}), "(blobs['blob114'], self.weights['netsd_conv2_1_w'], strides=[1,\n 1, 1, 1], padding='SAME')\n", (28811, 28904), True, 'import tensorflow as tf\n'), ((29116, 29220), 'tensorflow.nn.conv2d', 'tf.nn.conv2d', (["blobs['blob116']", "self.weights['netsd_conv3_w']"], {'strides': '[1, 2, 2, 1]', 'padding': '"""VALID"""'}), "(blobs['blob116'], self.weights['netsd_conv3_w'], strides=[1, 2,\n 2, 1], padding='VALID')\n", (29128, 29220), True, 'import tensorflow as tf\n'), ((29340, 29445), 'tensorflow.nn.conv2d', 'tf.nn.conv2d', (["blobs['blob116']", "self.weights['netsd_conv3_1_w']"], {'strides': '[1, 1, 1, 1]', 'padding': '"""SAME"""'}), "(blobs['blob116'], self.weights['netsd_conv3_1_w'], strides=[1,\n 1, 1, 1], padding='SAME')\n", (29352, 29445), True, 'import tensorflow as tf\n'), ((29657, 29761), 'tensorflow.nn.conv2d', 'tf.nn.conv2d', (["blobs['blob118']", "self.weights['netsd_conv4_w']"], {'strides': '[1, 2, 2, 1]', 'padding': '"""VALID"""'}), "(blobs['blob118'], self.weights['netsd_conv4_w'], strides=[1, 2,\n 2, 1], padding='VALID')\n", (29669, 29761), True, 'import tensorflow as tf\n'), ((29881, 29986), 'tensorflow.nn.conv2d', 'tf.nn.conv2d', (["blobs['blob118']", "self.weights['netsd_conv4_1_w']"], {'strides': '[1, 1, 1, 1]', 'padding': '"""SAME"""'}), "(blobs['blob118'], self.weights['netsd_conv4_1_w'], strides=[1,\n 1, 1, 1], padding='SAME')\n", (29893, 29986), True, 'import tensorflow as tf\n'), ((30198, 30302), 'tensorflow.nn.conv2d', 'tf.nn.conv2d', (["blobs['blob120']", "self.weights['netsd_conv5_w']"], {'strides': '[1, 2, 2, 1]', 'padding': '"""VALID"""'}), "(blobs['blob120'], self.weights['netsd_conv5_w'], strides=[1, 2,\n 2, 1], padding='VALID')\n", (30210, 30302), True, 'import tensorflow as tf\n'), ((30422, 30527), 'tensorflow.nn.conv2d', 'tf.nn.conv2d', (["blobs['blob120']", "self.weights['netsd_conv5_1_w']"], {'strides': '[1, 1, 1, 1]', 'padding': '"""SAME"""'}), "(blobs['blob120'], self.weights['netsd_conv5_1_w'], strides=[1,\n 1, 1, 1], padding='SAME')\n", (30434, 30527), True, 'import tensorflow as tf\n'), ((30739, 30843), 'tensorflow.nn.conv2d', 'tf.nn.conv2d', (["blobs['blob122']", "self.weights['netsd_conv6_w']"], {'strides': '[1, 2, 2, 1]', 'padding': '"""VALID"""'}), "(blobs['blob122'], self.weights['netsd_conv6_w'], strides=[1, 2,\n 2, 1], padding='VALID')\n", (30751, 30843), True, 'import tensorflow as tf\n'), ((30963, 31068), 'tensorflow.nn.conv2d', 'tf.nn.conv2d', (["blobs['blob122']", "self.weights['netsd_conv6_1_w']"], {'strides': '[1, 1, 1, 1]', 'padding': '"""SAME"""'}), "(blobs['blob122'], self.weights['netsd_conv6_1_w'], strides=[1,\n 1, 1, 1], padding='SAME')\n", (30975, 31068), True, 'import tensorflow as tf\n'), ((31190, 31300), 'tensorflow.nn.conv2d', 'tf.nn.conv2d', (["blobs['blob123']", "self.weights['netsd_Convolution1_w']"], {'strides': '[1, 1, 1, 1]', 'padding': '"""SAME"""'}), "(blobs['blob123'], self.weights['netsd_Convolution1_w'],\n strides=[1, 1, 1, 1], padding='SAME')\n", (31202, 31300), True, 'import tensorflow as tf\n'), ((31369, 31545), 'tensorflow.nn.conv2d_transpose', 'tf.nn.conv2d_transpose', (["blobs['blob123']", "self.weights['netsd_deconv5_w']"], {'output_shape': '[batch_size, ADAPTED_HEIGHT / 32, ADAPTED_WIDTH / 32, 512]', 'strides': '[1, 2, 2, 1]'}), "(blobs['blob123'], self.weights['netsd_deconv5_w'],\n output_shape=[batch_size, ADAPTED_HEIGHT / 32, ADAPTED_WIDTH / 32, 512],\n strides=[1, 2, 2, 1])\n", (31391, 31545), True, 'import tensorflow as tf\n'), ((31667, 31852), 'tensorflow.nn.conv2d_transpose', 'tf.nn.conv2d_transpose', (["blobs['blob124']", "self.weights['netsd_upsample_flow6to5_w']"], {'output_shape': '[batch_size, ADAPTED_HEIGHT / 32, ADAPTED_WIDTH / 32, 2]', 'strides': '[1, 2, 2, 1]'}), "(blobs['blob124'], self.weights[\n 'netsd_upsample_flow6to5_w'], output_shape=[batch_size, ADAPTED_HEIGHT /\n 32, ADAPTED_WIDTH / 32, 2], strides=[1, 2, 2, 1])\n", (31689, 31852), True, 'import tensorflow as tf\n'), ((32019, 32128), 'tensorflow.nn.conv2d', 'tf.nn.conv2d', (["blobs['blob127']", "self.weights['netsd_interconv5_w']"], {'strides': '[1, 1, 1, 1]', 'padding': '"""SAME"""'}), "(blobs['blob127'], self.weights['netsd_interconv5_w'], strides=\n [1, 1, 1, 1], padding='SAME')\n", (32031, 32128), True, 'import tensorflow as tf\n'), ((32194, 32304), 'tensorflow.nn.conv2d', 'tf.nn.conv2d', (["blobs['blob128']", "self.weights['netsd_Convolution2_w']"], {'strides': '[1, 1, 1, 1]', 'padding': '"""SAME"""'}), "(blobs['blob128'], self.weights['netsd_Convolution2_w'],\n strides=[1, 1, 1, 1], padding='SAME')\n", (32206, 32304), True, 'import tensorflow as tf\n'), ((32365, 32541), 'tensorflow.nn.conv2d_transpose', 'tf.nn.conv2d_transpose', (["blobs['blob127']", "self.weights['netsd_deconv4_w']"], {'output_shape': '[batch_size, ADAPTED_HEIGHT / 16, ADAPTED_WIDTH / 16, 256]', 'strides': '[1, 2, 2, 1]'}), "(blobs['blob127'], self.weights['netsd_deconv4_w'],\n output_shape=[batch_size, ADAPTED_HEIGHT / 16, ADAPTED_WIDTH / 16, 256],\n strides=[1, 2, 2, 1])\n", (32387, 32541), True, 'import tensorflow as tf\n'), ((32663, 32848), 'tensorflow.nn.conv2d_transpose', 'tf.nn.conv2d_transpose', (["blobs['blob129']", "self.weights['netsd_upsample_flow5to4_w']"], {'output_shape': '[batch_size, ADAPTED_HEIGHT / 16, ADAPTED_WIDTH / 16, 2]', 'strides': '[1, 2, 2, 1]'}), "(blobs['blob129'], self.weights[\n 'netsd_upsample_flow5to4_w'], output_shape=[batch_size, ADAPTED_HEIGHT /\n 16, ADAPTED_WIDTH / 16, 2], strides=[1, 2, 2, 1])\n", (32685, 32848), True, 'import tensorflow as tf\n'), ((33023, 33132), 'tensorflow.nn.conv2d', 'tf.nn.conv2d', (["blobs['blob132']", "self.weights['netsd_interconv4_w']"], {'strides': '[1, 1, 1, 1]', 'padding': '"""SAME"""'}), "(blobs['blob132'], self.weights['netsd_interconv4_w'], strides=\n [1, 1, 1, 1], padding='SAME')\n", (33035, 33132), True, 'import tensorflow as tf\n'), ((33198, 33308), 'tensorflow.nn.conv2d', 'tf.nn.conv2d', (["blobs['blob133']", "self.weights['netsd_Convolution3_w']"], {'strides': '[1, 1, 1, 1]', 'padding': '"""SAME"""'}), "(blobs['blob133'], self.weights['netsd_Convolution3_w'],\n strides=[1, 1, 1, 1], padding='SAME')\n", (33210, 33308), True, 'import tensorflow as tf\n'), ((33369, 33543), 'tensorflow.nn.conv2d_transpose', 'tf.nn.conv2d_transpose', (["blobs['blob132']", "self.weights['netsd_deconv3_w']"], {'output_shape': '[batch_size, ADAPTED_HEIGHT / 8, ADAPTED_WIDTH / 8, 128]', 'strides': '[1, 2, 2, 1]'}), "(blobs['blob132'], self.weights['netsd_deconv3_w'],\n output_shape=[batch_size, ADAPTED_HEIGHT / 8, ADAPTED_WIDTH / 8, 128],\n strides=[1, 2, 2, 1])\n", (33391, 33543), True, 'import tensorflow as tf\n'), ((33665, 33848), 'tensorflow.nn.conv2d_transpose', 'tf.nn.conv2d_transpose', (["blobs['blob134']", "self.weights['netsd_upsample_flow4to3_w']"], {'output_shape': '[batch_size, ADAPTED_HEIGHT / 8, ADAPTED_WIDTH / 8, 2]', 'strides': '[1, 2, 2, 1]'}), "(blobs['blob134'], self.weights[\n 'netsd_upsample_flow4to3_w'], output_shape=[batch_size, ADAPTED_HEIGHT /\n 8, ADAPTED_WIDTH / 8, 2], strides=[1, 2, 2, 1])\n", (33687, 33848), True, 'import tensorflow as tf\n'), ((34015, 34124), 'tensorflow.nn.conv2d', 'tf.nn.conv2d', (["blobs['blob137']", "self.weights['netsd_interconv3_w']"], {'strides': '[1, 1, 1, 1]', 'padding': '"""SAME"""'}), "(blobs['blob137'], self.weights['netsd_interconv3_w'], strides=\n [1, 1, 1, 1], padding='SAME')\n", (34027, 34124), True, 'import tensorflow as tf\n'), ((34190, 34300), 'tensorflow.nn.conv2d', 'tf.nn.conv2d', (["blobs['blob138']", "self.weights['netsd_Convolution4_w']"], {'strides': '[1, 1, 1, 1]', 'padding': '"""SAME"""'}), "(blobs['blob138'], self.weights['netsd_Convolution4_w'],\n strides=[1, 1, 1, 1], padding='SAME')\n", (34202, 34300), True, 'import tensorflow as tf\n'), ((34361, 34534), 'tensorflow.nn.conv2d_transpose', 'tf.nn.conv2d_transpose', (["blobs['blob137']", "self.weights['netsd_deconv2_w']"], {'output_shape': '[batch_size, ADAPTED_HEIGHT / 4, ADAPTED_WIDTH / 4, 64]', 'strides': '[1, 2, 2, 1]'}), "(blobs['blob137'], self.weights['netsd_deconv2_w'],\n output_shape=[batch_size, ADAPTED_HEIGHT / 4, ADAPTED_WIDTH / 4, 64],\n strides=[1, 2, 2, 1])\n", (34383, 34534), True, 'import tensorflow as tf\n'), ((34648, 34831), 'tensorflow.nn.conv2d_transpose', 'tf.nn.conv2d_transpose', (["blobs['blob139']", "self.weights['netsd_upsample_flow3to2_w']"], {'output_shape': '[batch_size, ADAPTED_HEIGHT / 4, ADAPTED_WIDTH / 4, 2]', 'strides': '[1, 2, 2, 1]'}), "(blobs['blob139'], self.weights[\n 'netsd_upsample_flow3to2_w'], output_shape=[batch_size, ADAPTED_HEIGHT /\n 4, ADAPTED_WIDTH / 4, 2], strides=[1, 2, 2, 1])\n", (34670, 34831), True, 'import tensorflow as tf\n'), ((34990, 35099), 'tensorflow.nn.conv2d', 'tf.nn.conv2d', (["blobs['blob142']", "self.weights['netsd_interconv2_w']"], {'strides': '[1, 1, 1, 1]', 'padding': '"""SAME"""'}), "(blobs['blob142'], self.weights['netsd_interconv2_w'], strides=\n [1, 1, 1, 1], padding='SAME')\n", (35002, 35099), True, 'import tensorflow as tf\n'), ((35165, 35275), 'tensorflow.nn.conv2d', 'tf.nn.conv2d', (["blobs['blob143']", "self.weights['netsd_Convolution5_w']"], {'strides': '[1, 1, 1, 1]', 'padding': '"""SAME"""'}), "(blobs['blob143'], self.weights['netsd_Convolution5_w'],\n strides=[1, 1, 1, 1], padding='SAME')\n", (35177, 35275), True, 'import tensorflow as tf\n'), ((36815, 36917), 'tensorflow.nn.conv2d', 'tf.nn.conv2d', (["blobs['blob156']", "self.weights['fuse_conv0_w']"], {'strides': '[1, 1, 1, 1]', 'padding': '"""SAME"""'}), "(blobs['blob156'], self.weights['fuse_conv0_w'], strides=[1, 1,\n 1, 1], padding='SAME')\n", (36827, 36917), True, 'import tensorflow as tf\n'), ((37118, 37221), 'tensorflow.nn.conv2d', 'tf.nn.conv2d', (["blobs['blob158']", "self.weights['fuse_conv1_w']"], {'strides': '[1, 2, 2, 1]', 'padding': '"""VALID"""'}), "(blobs['blob158'], self.weights['fuse_conv1_w'], strides=[1, 2,\n 2, 1], padding='VALID')\n", (37130, 37221), True, 'import tensorflow as tf\n'), ((37340, 37445), 'tensorflow.nn.conv2d', 'tf.nn.conv2d', (["blobs['blob158']", "self.weights['fuse_conv1_1_w']"], {'strides': '[1, 1, 1, 1]', 'padding': '"""SAME"""'}), "(blobs['blob158'], self.weights['fuse_conv1_1_w'], strides=[1, \n 1, 1, 1], padding='SAME')\n", (37352, 37445), True, 'import tensorflow as tf\n'), ((37655, 37758), 'tensorflow.nn.conv2d', 'tf.nn.conv2d', (["blobs['blob160']", "self.weights['fuse_conv2_w']"], {'strides': '[1, 2, 2, 1]', 'padding': '"""VALID"""'}), "(blobs['blob160'], self.weights['fuse_conv2_w'], strides=[1, 2,\n 2, 1], padding='VALID')\n", (37667, 37758), True, 'import tensorflow as tf\n'), ((37877, 37982), 'tensorflow.nn.conv2d', 'tf.nn.conv2d', (["blobs['blob160']", "self.weights['fuse_conv2_1_w']"], {'strides': '[1, 1, 1, 1]', 'padding': '"""SAME"""'}), "(blobs['blob160'], self.weights['fuse_conv2_1_w'], strides=[1, \n 1, 1, 1], padding='SAME')\n", (37889, 37982), True, 'import tensorflow as tf\n'), ((38110, 38220), 'tensorflow.nn.conv2d', 'tf.nn.conv2d', (["blobs['blob161']", "self.weights['fuse__Convolution5_w']"], {'strides': '[1, 1, 1, 1]', 'padding': '"""SAME"""'}), "(blobs['blob161'], self.weights['fuse__Convolution5_w'],\n strides=[1, 1, 1, 1], padding='SAME')\n", (38122, 38220), True, 'import tensorflow as tf\n'), ((38289, 38461), 'tensorflow.nn.conv2d_transpose', 'tf.nn.conv2d_transpose', (["blobs['blob161']", "self.weights['fuse_deconv1_w']"], {'output_shape': '[batch_size, ADAPTED_HEIGHT / 2, ADAPTED_WIDTH / 2, 32]', 'strides': '[1, 2, 2, 1]'}), "(blobs['blob161'], self.weights['fuse_deconv1_w'],\n output_shape=[batch_size, ADAPTED_HEIGHT / 2, ADAPTED_WIDTH / 2, 32],\n strides=[1, 2, 2, 1])\n", (38311, 38461), True, 'import tensorflow as tf\n'), ((38574, 38756), 'tensorflow.nn.conv2d_transpose', 'tf.nn.conv2d_transpose', (["blobs['blob162']", "self.weights['fuse_upsample_flow2to1_w']"], {'output_shape': '[batch_size, ADAPTED_HEIGHT / 2, ADAPTED_WIDTH / 2, 2]', 'strides': '[1, 2, 2, 1]'}), "(blobs['blob162'], self.weights[\n 'fuse_upsample_flow2to1_w'], output_shape=[batch_size, ADAPTED_HEIGHT /\n 2, ADAPTED_WIDTH / 2, 2], strides=[1, 2, 2, 1])\n", (38596, 38756), True, 'import tensorflow as tf\n'), ((38914, 39022), 'tensorflow.nn.conv2d', 'tf.nn.conv2d', (["blobs['blob165']", "self.weights['fuse_interconv1_w']"], {'strides': '[1, 1, 1, 1]', 'padding': '"""SAME"""'}), "(blobs['blob165'], self.weights['fuse_interconv1_w'], strides=[\n 1, 1, 1, 1], padding='SAME')\n", (38926, 39022), True, 'import tensorflow as tf\n'), ((39087, 39197), 'tensorflow.nn.conv2d', 'tf.nn.conv2d', (["blobs['blob166']", "self.weights['fuse__Convolution6_w']"], {'strides': '[1, 1, 1, 1]', 'padding': '"""SAME"""'}), "(blobs['blob166'], self.weights['fuse__Convolution6_w'],\n strides=[1, 1, 1, 1], padding='SAME')\n", (39099, 39197), True, 'import tensorflow as tf\n'), ((39258, 39430), 'tensorflow.nn.conv2d_transpose', 'tf.nn.conv2d_transpose', (["blobs['blob165']", "self.weights['fuse_deconv0_w']"], {'output_shape': '[batch_size, ADAPTED_HEIGHT / 1, ADAPTED_WIDTH / 1, 16]', 'strides': '[1, 2, 2, 1]'}), "(blobs['blob165'], self.weights['fuse_deconv0_w'],\n output_shape=[batch_size, ADAPTED_HEIGHT / 1, ADAPTED_WIDTH / 1, 16],\n strides=[1, 2, 2, 1])\n", (39280, 39430), True, 'import tensorflow as tf\n'), ((39543, 39717), 'tensorflow.nn.conv2d_transpose', 'tf.nn.conv2d_transpose', (["blobs['blob167']", "self.weights['fuse_upsample_flow1to0_w']"], {'output_shape': '[batch_size, ADAPTED_HEIGHT, ADAPTED_WIDTH, 2]', 'strides': '[1, 2, 2, 1]'}), "(blobs['blob167'], self.weights[\n 'fuse_upsample_flow1to0_w'], output_shape=[batch_size, ADAPTED_HEIGHT,\n ADAPTED_WIDTH, 2], strides=[1, 2, 2, 1])\n", (39565, 39717), True, 'import tensorflow as tf\n'), ((39879, 39987), 'tensorflow.nn.conv2d', 'tf.nn.conv2d', (["blobs['blob170']", "self.weights['fuse_interconv0_w']"], {'strides': '[1, 1, 1, 1]', 'padding': '"""SAME"""'}), "(blobs['blob170'], self.weights['fuse_interconv0_w'], strides=[\n 1, 1, 1, 1], padding='SAME')\n", (39891, 39987), True, 'import tensorflow as tf\n'), ((40052, 40162), 'tensorflow.nn.conv2d', 'tf.nn.conv2d', (["blobs['blob171']", "self.weights['fuse__Convolution7_w']"], {'strides': '[1, 1, 1, 1]', 'padding': '"""SAME"""'}), "(blobs['blob171'], self.weights['fuse__Convolution7_w'],\n strides=[1, 1, 1, 1], padding='SAME')\n", (40064, 40162), True, 'import tensorflow as tf\n'), ((2294, 2339), 'tensorflow.reduce_sum', 'tf.reduce_sum', (['(x ** 2)'], {'axis': '(3)', 'keep_dims': '(True)'}), '(x ** 2, axis=3, keep_dims=True)\n', (2307, 2339), True, 'import tensorflow as tf\n'), ((2433, 2446), 'tensorflow.shape', 'tf.shape', (['im0'], {}), '(im0)\n', (2441, 2446), True, 'import tensorflow as tf\n'), ((2479, 2492), 'tensorflow.shape', 'tf.shape', (['im0'], {}), '(im0)\n', (2487, 2492), True, 'import tensorflow as tf\n'), ((2526, 2539), 'tensorflow.shape', 'tf.shape', (['im0'], {}), '(im0)\n', (2534, 2539), True, 'import tensorflow as tf\n'), ((5590, 5651), 'tensorflow.pad', 'tf.pad', (["blobs['conv3a']", '[[0, 0], [20, 20], [20, 20], [0, 0]]'], {}), "(blobs['conv3a'], [[0, 0], [20, 20], [20, 20], [0, 0]])\n", (5596, 5651), True, 'import tensorflow as tf\n'), ((5669, 5754), 'tensorflow.pad', 'tf.pad', (["blobs['conv3b']", '[[0, 0], [20 - di, 20 + di], [20 - dj, 20 + dj], [0, 0]]'], {}), "(blobs['conv3b'], [[0, 0], [20 - di, 20 + di], [20 - dj, 20 + dj], [0,\n 0]])\n", (5675, 5754), True, 'import tensorflow as tf\n'), ((2671, 2689), 'tensorflow.to_float', 'tf.to_float', (['width'], {}), '(width)\n', (2682, 2689), True, 'import tensorflow as tf\n'), ((2755, 2774), 'tensorflow.to_float', 'tf.to_float', (['height'], {}), '(height)\n', (2766, 2774), True, 'import tensorflow as tf\n'), ((5843, 5866), 'tensorflow.ones', 'tf.ones', (['[1, 1, 256, 1]'], {}), '([1, 1, 256, 1])\n', (5850, 5866), True, 'import tensorflow as tf\n')] |
"""
"""
import sys
import uuid
import base64
import fileinput
import datetime
from django.utils import timezone
from django.conf import settings
from django.shortcuts import get_object_or_404
from urlparse import urlparse, parse_qs
from APNSWrapper import *
from mdm.models import MDMDevice, DeviceCommand
def replaceAll(file, searchExp, replaceExp):
for line in fileinput.input(file, inplace=1):
if searchExp in line:
line = line.replace(searchExp, replaceExp)
sys.stdout.write(line)
def notify_device(device):
device_token = base64.b64decode(device.device_token)
cert = settings.APNS_CERT
wrapper = APNSNotificationWrapper(cert, False)
message = APNSNotification()
message.token(device_token)
message.appendProperty(APNSProperty('mdm', str(device.push_magic)))
wrapper.append(message)
wrapper.notify()
| [
"base64.b64decode",
"fileinput.input",
"sys.stdout.write"
] | [((371, 403), 'fileinput.input', 'fileinput.input', (['file'], {'inplace': '(1)'}), '(file, inplace=1)\n', (386, 403), False, 'import fileinput\n'), ((569, 606), 'base64.b64decode', 'base64.b64decode', (['device.device_token'], {}), '(device.device_token)\n', (585, 606), False, 'import base64\n'), ((498, 520), 'sys.stdout.write', 'sys.stdout.write', (['line'], {}), '(line)\n', (514, 520), False, 'import sys\n')] |
import math
import itertools
from operator import itemgetter
import json
import os
import random
from .geom import hflip_pattern, vflip_pattern, rot_pattern
from .patterns import (
get_pattern_size,
get_pattern_livecount,
get_grid_empty,
get_grid_pattern,
segment_pattern,
methuselah_quadrants_pattern,
pattern_union,
cloud_region,
)
from .utils import pattern2url, retry_on_failure
from .error import GollyXPatternsError, GollyXMapsError
##############
# Util methods
def get_rainbow_pattern_function_map():
return {
"rainbowmath": rainbowmath_fourcolor,
"rainbow": rainbow_fourcolor,
"sunburst": sunburst_fourcolor,
"quadgaussian": quadgaussian_fourcolor,
"random": random_fourcolor,
"timebomb": timebomb_fourcolor,
"timebombredux": timebomb2_fourcolor,
"randommethuselahs": randommethuselahs_fourcolor,
"crabs": crabs_fourcolor,
"patiolights": patiolights_fourcolor,
"orchard": orchard_fourcolor,
"justyna": justyna_fourcolor,
"rabbits": rabbits_fourcolor,
"multum": multum_fourcolor,
"eights": eightx_fourcolor,
# Need one more
}
def rainbow_jitteryrow_pattern(rows, cols, seed=None, methuselah=None, spacing=None):
if seed is not None:
random.seed(seed)
# L is a characteristic length scale
if spacing is None:
L = 10
else:
L = spacing
if methuselah is None:
methuselah = "rheptomino"
count = cols // L
centerx = cols // 2
centery = rows // 2
# Place one methuselah every L grid spaces,
# up to the maximum multiple of 4 possible
maxshapesperteam = (cols // 4) // L
maxshapes = 4 * maxshapesperteam
team_assignments = [0, 1, 2, 3]
random.shuffle(team_assignments)
rotdegs = [0, 90, 180, 270]
patterns_list_all = [[], [], [], []]
# This algorithm is structured unusually,
# but ensures everything is centered.
for i in range(maxshapesperteam):
# Populate all four quadrants manually...
end = (i + 1) * L
start = end - L // 2
# +---------------+
# |Q1 |Q2 |Q3 |Q4 |
# | | | | |
# +---------------+
#
# Q1
pattern = get_grid_pattern(
methuselah,
rows,
cols,
xoffset=centerx - centerx // 2 - random.randint(start, end),
yoffset=centery + random.randint(-L, L),
hflip=bool(random.getrandbits(1)),
vflip=bool(random.getrandbits(1)),
rotdeg=random.choice(rotdegs),
)
team_ix = team_assignments[0]
team_patterns_list = patterns_list_all[team_ix]
team_patterns_list.append(pattern)
patterns_list_all[team_ix] = team_patterns_list
# Q2
pattern = get_grid_pattern(
methuselah,
rows,
cols,
xoffset=centerx - random.randint(start, end),
yoffset=centery + random.randint(-L, L),
hflip=bool(random.getrandbits(1)),
vflip=bool(random.getrandbits(1)),
rotdeg=random.choice(rotdegs),
)
team_ix = team_assignments[1]
team_patterns_list = patterns_list_all[team_ix]
team_patterns_list.append(pattern)
patterns_list_all[team_ix] = team_patterns_list
# Q3
pattern = get_grid_pattern(
methuselah,
rows,
cols,
xoffset=centerx + random.randint(start, end),
yoffset=centery + random.randint(-L, L),
hflip=bool(random.getrandbits(1)),
vflip=bool(random.getrandbits(1)),
rotdeg=random.choice(rotdegs),
)
team_ix = team_assignments[2]
team_patterns_list = patterns_list_all[team_ix]
team_patterns_list.append(pattern)
patterns_list_all[team_ix] = team_patterns_list
# Q4
pattern = get_grid_pattern(
methuselah,
rows,
cols,
xoffset=centerx + centerx // 2 + random.randint(start, end),
yoffset=centery + random.randint(-L, L),
hflip=bool(random.getrandbits(1)),
vflip=bool(random.getrandbits(1)),
rotdeg=random.choice(rotdegs),
)
team_ix = team_assignments[3]
team_patterns_list = patterns_list_all[team_ix]
team_patterns_list.append(pattern)
patterns_list_all[team_ix] = team_patterns_list
pattern_unions = [pattern_union(pl) for pl in patterns_list_all]
return tuple(pattern_unions)
def rainbow_methuselah_quadrants_pattern(
rows, cols, seed=None, methuselah_counts=None, fixed_methuselah=None
):
"""
Add methuselahs to each quadrant.
If the user does not specify any args,
this fills the quadrants with lots of
small methuselahs.
The user can specify which methuselahs
to use and how many to use, so e.g.
can specify 1 methuselah per quadrant, etc.
"""
# set rng seed (optional)
if seed is not None:
random.seed(seed)
small_methuselah_names = [
"bheptomino",
"cheptomino",
"eheptomino",
"piheptomino",
"rpentomino",
]
reg_methuselah_names = [
"acorn",
"bheptomino",
"cheptomino",
"eheptomino",
"multuminparvo",
"piheptomino",
"rabbit",
"rpentomino",
]
BIGDIMLIMIT = 150
mindim = min(rows, cols)
if methuselah_counts is None:
if mindim < BIGDIMLIMIT:
methuselah_counts = [3, 4, 9]
else:
methuselah_counts = [3, 4, 9, 16]
if fixed_methuselah is None:
if mindim < BIGDIMLIMIT:
methuselah_names = reg_methuselah_names + small_methuselah_names
else:
methuselah_names = small_methuselah_names
else:
methuselah_names = [fixed_methuselah]
valid_mc = [1, 2, 3, 4, 9, 16]
for mc in methuselah_counts:
if mc not in valid_mc:
msg = "Invalid methuselah counts passed: must be in {', '.join(valid_mc)}\n"
msg += "you specified {', '.join(methuselah_counts)}"
raise GollyXPatternsError(msg)
# Put a cluster of methuselahs in each quadrant,
# one quadrant per team.
# Procedure:
# place random methuselah patterns in each quadrant corner
# Store each quadrant and its upper left corner in (rows from top, cols from left) format
quadrants = [
(1, (0, cols // 2)),
(2, (0, 0)),
(3, (rows // 2, 0)),
(4, (rows // 2, cols // 2)),
]
rotdegs = [0, 90, 180, 270]
all_methuselahs = []
for iq, quad in enumerate(quadrants):
count = random.choice(methuselah_counts)
if count == 1:
# Only one methuselah in this quadrant, so use the center
jitterx = 4
jittery = 4
corner = quadrants[iq][1]
y = corner[0] + rows // 4 + random.randint(-jittery, jittery)
x = corner[1] + cols // 4 + random.randint(-jitterx, jitterx)
meth = random.choice(methuselah_names)
pattern = get_grid_pattern(
meth,
rows,
cols,
xoffset=x,
yoffset=y,
hflip=bool(random.getrandbits(1)),
vflip=bool(random.getrandbits(1)),
rotdeg=random.choice(rotdegs),
)
livecount = get_pattern_livecount(meth)
all_methuselahs.append((livecount, pattern))
elif count == 2 or count == 4:
# Two or four methuselahs in this quadrant, so place at corners of a square
# Form the square by cutting the quadrant into thirds
if count == 4:
jitterx = 3
jittery = 3
else:
jitterx = 5
jittery = 5
corner = quadrants[iq][1]
# Slices and partitions form the inside square
nslices = 2
nparts = nslices + 1
posdiag = bool(random.getrandbits(1))
for a in range(1, nparts):
for b in range(1, nparts):
proceed = False
if count == 2:
if (posdiag and a == b) or (
not posdiag and a == (nslices - b + 1)
):
proceed = True
elif count == 4:
proceed = True
if proceed:
y = (
corner[0]
+ a * ((rows // 2) // nparts)
+ random.randint(-jittery, jittery)
)
x = (
corner[1]
+ b * ((cols // 2) // nparts)
+ random.randint(-jitterx, jitterx)
)
meth = random.choice(methuselah_names)
try:
pattern = get_grid_pattern(
meth,
rows,
cols,
xoffset=x,
yoffset=y,
hflip=bool(random.getrandbits(1)),
vflip=bool(random.getrandbits(1)),
rotdeg=random.choice(rotdegs),
)
except GollyXPatternsError:
raise GollyXPatternsError(
f"Error with methuselah {meth}: cannot fit"
)
livecount = get_pattern_livecount(meth)
all_methuselahs.append((livecount, pattern))
elif count == 3 or count == 9:
# Three or nine methuselahs, place these on a square with three points per side
# or eight points total
if count == 9:
jitterx = 3
jittery = 3
else:
jitterx = 5
jittery = 5
corner = quadrants[iq][1]
nslices = 4
for a in range(1, nslices):
for b in range(1, nslices):
proceed = False
if count == 3:
if a == b:
proceed = True
elif count == 9:
proceed = True
if proceed:
y = (
corner[0]
+ a * ((rows // 2) // nslices)
+ random.randint(-jittery, jittery)
)
x = (
corner[1]
+ b * ((cols // 2) // nslices)
+ random.randint(-jitterx, jitterx)
)
meth = random.choice(methuselah_names)
try:
pattern = get_grid_pattern(
meth,
rows,
cols,
xoffset=x,
yoffset=y,
hflip=bool(random.getrandbits(1)),
vflip=bool(random.getrandbits(1)),
rotdeg=random.choice(rotdegs),
)
except GollyXPatternsError:
raise GollyXPatternsError(
f"Error with methuselah {meth}: cannot fit"
)
livecount = get_pattern_livecount(meth)
all_methuselahs.append((livecount, pattern))
elif count == 16:
# Sixteen methuselahs, place these on a 4x4 square
jitterx = 2
jittery = 2
corner = quadrants[iq][1]
nslices = 5
for a in range(1, nslices):
for b in range(1, nslices):
y = (
corner[0]
+ a * ((rows // 2) // nslices)
+ random.randint(-jittery, jittery)
)
x = (
corner[1]
+ b * ((cols // 2) // nslices)
+ random.randint(-jitterx, jitterx)
)
meth = random.choice(methuselah_names)
try:
pattern = get_grid_pattern(
meth,
rows,
cols,
xoffset=x,
yoffset=y,
hflip=bool(random.getrandbits(1)),
vflip=bool(random.getrandbits(1)),
rotdeg=random.choice(rotdegs),
)
except GollyXPatternsError:
raise GollyXPatternsError(
f"Error with methuselah {meth}: cannot fit"
)
livecount = get_pattern_livecount(meth)
all_methuselahs.append((livecount, pattern))
random.shuffle(all_methuselahs)
# Sort by number of live cells
all_methuselahs.sort(key=itemgetter(0), reverse=True)
team1_patterns = []
team2_patterns = []
team3_patterns = []
team4_patterns = []
asc = [1, 2, 3, 4]
ascrev = list(reversed(asc))
serpentine_pattern = asc + ascrev
for i, (_, methuselah_pattern) in enumerate(all_methuselahs):
serpix = i % len(serpentine_pattern)
serpteam = serpentine_pattern[serpix]
if serpteam == 1:
team1_patterns.append(methuselah_pattern)
elif serpteam == 2:
team2_patterns.append(methuselah_pattern)
elif serpteam == 3:
team3_patterns.append(methuselah_pattern)
elif serpteam == 4:
team4_patterns.append(methuselah_pattern)
team1_pattern = pattern_union(team1_patterns)
team2_pattern = pattern_union(team2_patterns)
team3_pattern = pattern_union(team3_patterns)
team4_pattern = pattern_union(team4_patterns)
return team1_pattern, team2_pattern, team3_pattern, team4_pattern
#############
# Map methods
def random_fourcolor(rows, cols, seed=None):
"""
Generate a random four-color list life initialization.
Returns: four listlife strings,
with the random initializations.
(8-20% of all cells are alive).
Strategy: generate a set of (x,y) tuples,
convert to list, split in four. Use those
point sets to create listLife URL strings.
"""
if seed is not None:
random.seed(seed)
density = random.randint(8, 18) / 100.0
ncells = rows * cols
nlivecells = 4 * ((density * ncells) // 4)
points = set()
while len(points) < nlivecells:
randy = random.randint(0, rows - 1)
randx = random.randint(0, cols - 1)
points.add((randx, randy))
points = list(points)
pattern_urls = []
# Loop over each team
for i in range(4):
# Subselection of points
q = len(points) // 4
start_ix = i * q
end_ix = (i + 1) * q
this_points = set(points[start_ix:end_ix])
# Assemble pattern
this_pattern = []
for y in range(rows):
this_row = []
for x in range(cols):
if (x, y) in this_points:
this_row.append("o")
else:
this_row.append(".")
this_rowstr = "".join(this_row)
this_pattern.append(this_rowstr)
this_url = pattern2url(this_pattern)
pattern_urls.append(this_url)
return tuple(pattern_urls)
@retry_on_failure
def randommethuselahs_fourcolor(rows, cols, seed=None):
if seed is not None:
random.seed(seed)
patterns = rainbow_methuselah_quadrants_pattern(rows, cols, seed)
result = (pattern2url(pat) for pat in patterns)
return result
@retry_on_failure
def orchard_fourcolor(rows, cols, seed=None):
if seed is not None:
random.seed(seed)
mindim = min(rows, cols)
if mindim < 150:
mc = [4, 9]
else:
mc = [4, 9, 16]
count = random.choice(mc)
patterns = rainbow_methuselah_quadrants_pattern(
rows, cols, seed, methuselah_counts=[count], fixed_methuselah="acorn"
)
urls = (pattern2url(p) for p in patterns)
return urls
@retry_on_failure
def justyna_fourcolor(rows, cols, seed=None):
if seed is not None:
random.seed(seed)
mc = [1]
count = random.choice(mc)
patterns = rainbow_methuselah_quadrants_pattern(
rows, cols, seed, methuselah_counts=[count], fixed_methuselah="justyna"
)
urls = (pattern2url(p) for p in patterns)
return urls
@retry_on_failure
def rabbits_fourcolor(rows, cols, seed=None):
if seed is not None:
random.seed(seed)
mindim = min(rows, cols)
if mindim < 150:
mc = [1, 2]
else:
mc = [1, 2, 3]
count = random.choice(mc)
patterns = rainbow_methuselah_quadrants_pattern(
rows, cols, seed, methuselah_counts=[count], fixed_methuselah="rabbit"
)
urls = (pattern2url(p) for p in patterns)
return urls
@retry_on_failure
def multum_fourcolor(rows, cols, seed=None):
if seed is not None:
random.seed(seed)
mindim = min(rows, cols)
if mindim < 150:
mc = [1, 2]
else:
mc = [2, 3, 4]
count = random.choice(mc)
patterns = rainbow_methuselah_quadrants_pattern(
rows, cols, seed, methuselah_counts=[count], fixed_methuselah="multuminparvo"
)
urls = (pattern2url(p) for p in patterns)
return urls
@retry_on_failure
def eightx_fourcolor(rows, cols, seed=None):
fmap = {
"eightb": _eightb_fourcolor,
"eightc": _eightc_fourcolor,
"eighte": _eighte_fourcolor,
"eightr": _eightr_fourcolor,
"eightpi": _eightpi_fourcolor,
}
k = random.choice(list(fmap.keys()))
return fmap[k](rows, cols, seed)
def _eightb_fourcolor(rows, cols, seed=None):
if seed is not None:
random.seed(seed)
patterns = rainbow_jitteryrow_pattern(rows, cols, seed, "bheptomino")
urls = (pattern2url(p) for p in patterns)
return urls
def _eightc_fourcolor(rows, cols, seed=None):
if seed is not None:
random.seed(seed)
patterns = rainbow_jitteryrow_pattern(rows, cols, seed, "cheptomino")
urls = (pattern2url(p) for p in patterns)
return urls
def _eighte_fourcolor(rows, cols, seed=None):
if seed is not None:
random.seed(seed)
patterns = rainbow_jitteryrow_pattern(rows, cols, seed, "eheptomino", spacing=7)
urls = (pattern2url(p) for p in patterns)
return urls
def _eightpi_fourcolor(rows, cols, seed=None):
if seed is not None:
random.seed(seed)
patterns = rainbow_jitteryrow_pattern(rows, cols, seed, "piheptomino")
urls = (pattern2url(p) for p in patterns)
return urls
def _eightr_fourcolor(rows, cols, seed=None):
if seed is not None:
random.seed(seed)
patterns = rainbow_jitteryrow_pattern(rows, cols, seed, "rpentomino")
urls = (pattern2url(p) for p in patterns)
return urls
@retry_on_failure
def patiolights_fourcolor(rows, cols, seed=None):
"""
Patio lights pattern is a line segments with boxes placed randomly along the segment, like a string of lights
"""
if seed is not None:
random.seed(seed)
urls = []
thickness = random.randint(2, 3)
nteams = 4
# Find the y locations of each light string:
# Divide rows into Nteams + 1 parts with Nteams slices
# Place the light strings at the slices
jittery = 5
lightstring_ys = [
((i + 1) * rows) // (nteams + 1) + random.randint(-jittery, jittery)
for i in range(nteams)
]
# Randomize order of light string team assignments
random.shuffle(lightstring_ys)
# I dunno
def _get_bounds(z, dim):
zstart = z - dim // 2
zend = z + (dim - dim // 2)
return zstart, zend
for iteam in range(nteams):
team_pattern = get_grid_empty(rows, cols, flat=False)
# Assemble the light string
lightstring_y = lightstring_ys[iteam]
for ix in range(0, cols):
for iy in range(lightstring_y - 1, lightstring_y + thickness):
team_pattern[iy][ix] = "o"
for ix in range(0, cols):
for iy in range(lightstring_y - 1, lightstring_y + thickness):
team_pattern[iy][ix] = "o"
# Add some lights to the string
jitterx = 4
bounds = (lightstring_y - 1, lightstring_y + thickness)
maxy = max(bounds)
miny = min(bounds)
ylightstop = miny - random.randint(2, 3)
ylightsbot = maxy + random.randint(2, 3)
ix = random.randint(4, 12)
while ix < cols - 1:
if random.random() < 0.50:
team_pattern[ylightsbot][ix] = "o"
team_pattern[ylightsbot][ix + 1] = "o"
team_pattern[ylightsbot + 1][ix] = "o"
team_pattern[ylightsbot + 1][ix + 1] = "o"
else:
team_pattern[ylightstop][ix] = "o"
team_pattern[ylightstop][ix + 1] = "o"
team_pattern[ylightstop - 1][ix] = "o"
team_pattern[ylightstop - 1][ix + 1] = "o"
ix += random.randint(10, 12) + random.randint(-jitterx, jitterx)
pattern_url = pattern2url(team_pattern)
urls.append(pattern_url)
return tuple(urls)
@retry_on_failure
def rainbow_fourcolor(rows, cols, seed=None):
return _rainburst_fourcolor(rows, cols, seed, sunburst=False)
@retry_on_failure
def sunburst_fourcolor(rows, cols, seed=None):
return _rainburst_fourcolor(rows, cols, seed, sunburst=True)
def _rainburst_fourcolor(rows, cols, seed=None, sunburst=False):
"""
Create a Gaussian normal distribution in the top left and bottom right quadrants,
then slice it into radial pieces, which makes a nice rainbow shape.
"""
SMOL = 1e-12
if seed is not None:
random.seed(seed)
# Algorithm:
# set the slope
# generate (x, y) points
# if slope < 1/g, A
# if slope < 1, B
# if slope < g: C
# else: D
density = random.randint(8, 18)/100.0
nteams = 4
ncells = rows * cols
npointsperteam = (ncells//nteams)*density
nlivecells = nteams*npointsperteam
centerx = cols // 2
centery = rows // 2
teams_points = []
g = 2.5
slope_checks = [
0,
1/g,
1,
g,
]
urls = []
for iteam in range(nteams):
team_points = set()
while len(team_points) < npointsperteam:
randx = int(random.gauss(centerx, centerx // 2))
randy = int(random.gauss(centery, centery // 2))
slope = (randy - centery) / (randx - centerx + SMOL)
if iteam==0:
if slope > slope_checks[iteam] and slope < slope_checks[iteam+1]:
team_points.add((randx, randy))
elif iteam==1:
if slope > slope_checks[iteam] and slope < slope_checks[iteam+1]:
team_points.add((randx, randy))
elif iteam==2:
if slope > slope_checks[iteam] and slope < slope_checks[iteam+1]:
team_points.add((randx, randy))
elif iteam==3:
if slope > slope_checks[iteam]:
team_points.add((randx, randy))
team_pattern = []
for y in range(rows):
team_row = []
for x in range(cols):
if (x, y) in team_points:
team_row.append("o")
else:
team_row.append(".")
team_row_str = "".join(team_row)
team_pattern.append(team_row_str)
if sunburst and iteam%2==0:
team_pattern = vflip_pattern(team_pattern)
team_url = pattern2url(team_pattern)
urls.append(team_url)
random.shuffle(urls)
return tuple(urls)
@retry_on_failure
def timebomb_fourcolor(rows, cols, seed=None):
return _timebomb_fourcolor(rows, cols, revenge=False, seed=seed)
@retry_on_failure
def timebomb2_fourcolor(rows, cols, seed=None):
return _timebomb_fourcolor(rows, cols, revenge=True, seed=seed)
def _timebomb_fourcolor(rows, cols, revenge, seed=None):
if seed is not None:
random.seed(seed)
mindim = min(rows, cols)
# Geometry
# L = length scale
L = 20
centerx = cols // 2
centery = rows // 2
# Each team gets one oscillator and one timebomb
nteams = 4
team_assignments = list(range(nteams))
random.shuffle(team_assignments)
def _get_oscillator_name():
if revenge:
oscillators = ["airforce", "koksgalaxy", "dinnertable", "vring64", "harbor"]
which_oscillator = random.choice(oscillators)
else:
which_oscillator = "quadrupleburloaferimeter"
return which_oscillator
rotdegs = [0, 90, 180, 270]
urls = [None, None, None, None]
for iteam in range(nteams):
# Location:
# x = center + a*L
# y = center + b*L
# QI: a = 1, b = 1
# QII: a = -1, b = 1
# QIII: a = -1, b = -1
# QIV: a = 1, b = -1
if iteam==0 or iteam==3:
a = 1
else:
a = -1
if iteam==0 or iteam==1:
b = 1
else:
b = -1
osc_x = centerx + a*L
osc_y = centery + b*L
bomb_x = centerx + 2*a*L
bomb_y = centery + 2*b*L
# jitter for patterns
osc_jitter_x = 3
osc_jitter_y = 3
timebomb_jitter_x = 6
timebomb_jitter_y = 6
osc_pattern = get_grid_pattern(
_get_oscillator_name(),
rows,
cols,
xoffset=osc_x + random.randint(-osc_jitter_x, osc_jitter_x),
yoffset=osc_y + random.randint(-osc_jitter_y, osc_jitter_y),
rotdeg=random.choice(rotdegs),
)
bomb_pattern = get_grid_pattern(
"timebomb",
rows,
cols,
xoffset=bomb_x + random.randint(-timebomb_jitter_x, timebomb_jitter_x),
yoffset=bomb_y + random.randint(-timebomb_jitter_y, timebomb_jitter_y),
rotdeg=random.choice(rotdegs),
)
team_pattern = pattern_union([osc_pattern, bomb_pattern])
team_url = pattern2url(team_pattern)
team_ix = team_assignments[iteam]
urls[team_ix] = team_url
return tuple(urls)
def crabs_fourcolor(rows, cols, seed=None):
if seed is not None:
random.seed(seed)
rotdegs = [0, 90, 180, 270]
jitter = 1
# 8 crabs total
centerys = [rows//4, 3*rows//4]
centerxs = [cols//5, 2*cols//5, 3*cols//5, 4*cols//5]
nteams = 4
team_assignments = list(range(nteams))
random.shuffle(team_assignments)
crab_patterns = [[], [], [], []]
for i, (centerx, centery) in enumerate(itertools.product(centerxs, centerys)):
imod4 = i%4
crabcenterx = centerx + random.randint(-jitter, jitter)
crabcentery = centery + random.randint(-jitter, jitter)
crab = get_grid_pattern(
"crabstretcher",
rows,
cols,
xoffset=crabcenterx,
yoffset=crabcentery,
hflip=(random.random() < 0.5),
vflip=(random.random() < 0.5),
rotdeg=random.choice(rotdegs),
)
team_ix = team_assignments[imod4]
team_pattern = crab_patterns[team_ix]
team_pattern.append(crab)
crab_patterns[team_ix] = team_pattern
pattern_unions = [pattern_union(pl) for pl in crab_patterns]
urls = [pattern2url(pu) for pu in pattern_unions]
return tuple(urls)
def quadgaussian_fourcolor(rows, cols, seed=None):
if seed is not None:
random.seed(seed)
# Lower bound of 0.10, upper bound of 0.15
density = 0.10 + random.random() * 0.05
ncells = rows * cols
nlivecells = ((ncells * density)//4)*4
nlivecellspt = nlivecells // 4
# Variable blobbiness
stdx = cols// random.randint(8, 16)
stdy = rows// random.randint(8, 16)
jitter = 5
nteams = 4
team_assignments = list(range(nteams))
random.shuffle(team_assignments)
centerxs = [cols//4, 3*cols//4]
centerys = [rows//4, 3*rows//4]
urls = [None, None, None, None]
master_points = set()
for i, (centerx, centery) in enumerate(itertools.product(centerxs, centerys)):
team_ix = team_assignments[i]
cx = centerx + random.randint(-jitter, jitter)
cy = centery + random.randint(-jitter, jitter)
team_points = set()
while len(team_points) < nlivecellspt:
randx = int(random.gauss(cx, stdx))
randy = int(random.gauss(cy, stdy))
if (randx >= 0 and randx < cols) and (randy >= 0 and randy < rows):
if (randx, randy) not in master_points:
team_points.add((randx, randy))
master_points.add((randx, randy))
# Assemble the circle dot diagram for team
team_pattern = []
for y in range(rows):
this_row = []
for x in range(cols):
if (x, y) in team_points:
this_row.append("o")
else:
this_row.append(".")
this_rowstr = "".join(this_row)
team_pattern.append(this_rowstr)
team_url = pattern2url(team_pattern)
urls[team_ix] = team_url
return tuple(urls)
#@retry_on_failure
def rainbowmath_fourcolor(rows, cols, seed=None):
if seed is not None:
random.seed(seed)
def is_prime(n):
n = abs(n)
if n == 2 or n == 3: return True
if n < 2 or n%2 == 0: return False
if n < 9: return True
if n%3 == 0: return False
r = int(n**0.5)
# since all primes > 3 are of the form 6n ± 1
# start with f=5 (which is prime)
# and test f, f+2 for being prime
# then loop by 6.
f = 5
while f <= r:
if n % f == 0: return False
if n % (f+2) == 0: return False
f += 6
return True
def is_not_prime(n):
return not is_prime(n)
# Random choice of which form to use
coin = random.randint(1,8)
if coin == 1:
p = random.choice([k*k for k in [5, 7, 9, 11]])
f = lambda x, y: int(is_not_prime((x*x & y*y) % p))
elif coin == 2:
# Linked diagonals of boxes
ab = [3, 4, 5]
a = random.choice(ab)
b = random.choice(ab)
cs = [16, 18, 20, 22]
c = random.choice(cs)
p = 7
f = lambda x, y: int((x//a ^ y//a)*c % p)
elif coin == 3:
# Linked diagonals of very large boxes
ab = [9, 10, 11]
a = random.choice(ab)
b = random.choice(ab)
cs = [16, 18, 20, 22]
c = random.choice(cs)
p = 7
f = lambda x, y: int((x//a ^ y//a)*c % p)
elif coin == 4:
# Sterpinsky triangles
ps = [7, 11, 13, 15, 35, 37]
p = random.choice(ps)
f = lambda x, y: int((x & y) % p)
elif coin == 5:
# This is a one-off that's in perfect sync and makes wild patterns
a = 3
b = 3
p = 99
f = lambda x, y: int((a**x)%p & (b**y)%p)
elif coin == 6:
a = random.randint(1,10)
b = random.randint(1,10)
p = 99
f = lambda x, y: int(is_not_prime((a*x & b*y) % p))
elif coin == 7:
ps = [81, 83, 85, 87, 89, 91, 93, 95, 97, 99]
p = random.choice(ps)
f = lambda x, y: int(is_not_prime((x//(y+1) ^ y) % p))
elif coin == 8:
ps = [69, 99, 299, 699, 999]
p = random.choice(ps)
f = lambda x, y: int(is_not_prime((x*x//(y+1)) % p))
xoffset = 0
yoffset = 0
team_patterns = _expression_pattern(
rows,
cols,
seed,
f,
xoffset=xoffset,
yoffset=yoffset,
)
urls = [pattern2url(pat) for pat in team_patterns]
for url in urls:
if url == "[]":
raise GollyXPatternsError("Error with bitfield: everything is empty")
return tuple(urls)
def _expression_pattern(
rows,
cols,
seed,
f_handle,
xoffset=0,
yoffset=0,
):
nteams = 4
# These store the the .o diagrams (flat=False means these are lists of lists of one char)
team_patterns = []
for i in range(nteams):
tp = get_grid_empty(rows,cols,flat=False)
team_patterns.append(tp)
# Assemble a list of cells that are alive at the roots of f (if f returns 0)
coordinates = []
for xtrue in range(0, cols):
for ytrue in range(0, rows):
xtransform = xtrue - xoffset
ytransform = ytrue - yoffset
if f_handle(xtransform, ytransform) == 0:
coordinates.append((xtrue, ytrue))
# Shuffle live cell cordinates
random.shuffle(coordinates)
# Assign live cell coordinates to teams using serpentine pattern
team_order = list(range(nteams))
random.shuffle(team_order)
serpentine_pattern = list(team_order) + list(reversed(team_order))
for i, (x, y) in enumerate(coordinates):
serp_ix = i % len(serpentine_pattern)
team_ix = serpentine_pattern[serp_ix]
team_patterns[team_ix][y][x] = "o"
return team_patterns
| [
"random.choice",
"random.shuffle",
"itertools.product",
"random.seed",
"random.getrandbits",
"operator.itemgetter",
"random.random",
"random.randint",
"random.gauss"
] | [((1805, 1837), 'random.shuffle', 'random.shuffle', (['team_assignments'], {}), '(team_assignments)\n', (1819, 1837), False, 'import random\n'), ((13654, 13685), 'random.shuffle', 'random.shuffle', (['all_methuselahs'], {}), '(all_methuselahs)\n', (13668, 13685), False, 'import random\n'), ((16746, 16763), 'random.choice', 'random.choice', (['mc'], {}), '(mc)\n', (16759, 16763), False, 'import random\n'), ((17108, 17125), 'random.choice', 'random.choice', (['mc'], {}), '(mc)\n', (17121, 17125), False, 'import random\n'), ((17562, 17579), 'random.choice', 'random.choice', (['mc'], {}), '(mc)\n', (17575, 17579), False, 'import random\n'), ((18013, 18030), 'random.choice', 'random.choice', (['mc'], {}), '(mc)\n', (18026, 18030), False, 'import random\n'), ((20069, 20089), 'random.randint', 'random.randint', (['(2)', '(3)'], {}), '(2, 3)\n', (20083, 20089), False, 'import random\n'), ((20472, 20502), 'random.shuffle', 'random.shuffle', (['lightstring_ys'], {}), '(lightstring_ys)\n', (20486, 20502), False, 'import random\n'), ((24658, 24678), 'random.shuffle', 'random.shuffle', (['urls'], {}), '(urls)\n', (24672, 24678), False, 'import random\n'), ((25330, 25362), 'random.shuffle', 'random.shuffle', (['team_assignments'], {}), '(team_assignments)\n', (25344, 25362), False, 'import random\n'), ((27575, 27607), 'random.shuffle', 'random.shuffle', (['team_assignments'], {}), '(team_assignments)\n', (27589, 27607), False, 'import random\n'), ((28980, 29012), 'random.shuffle', 'random.shuffle', (['team_assignments'], {}), '(team_assignments)\n', (28994, 29012), False, 'import random\n'), ((31073, 31093), 'random.randint', 'random.randint', (['(1)', '(8)'], {}), '(1, 8)\n', (31087, 31093), False, 'import random\n'), ((33759, 33786), 'random.shuffle', 'random.shuffle', (['coordinates'], {}), '(coordinates)\n', (33773, 33786), False, 'import random\n'), ((33898, 33924), 'random.shuffle', 'random.shuffle', (['team_order'], {}), '(team_order)\n', (33912, 33924), False, 'import random\n'), ((1328, 1345), 'random.seed', 'random.seed', (['seed'], {}), '(seed)\n', (1339, 1345), False, 'import random\n'), ((5127, 5144), 'random.seed', 'random.seed', (['seed'], {}), '(seed)\n', (5138, 5144), False, 'import random\n'), ((6805, 6837), 'random.choice', 'random.choice', (['methuselah_counts'], {}), '(methuselah_counts)\n', (6818, 6837), False, 'import random\n'), ((15163, 15180), 'random.seed', 'random.seed', (['seed'], {}), '(seed)\n', (15174, 15180), False, 'import random\n'), ((15196, 15217), 'random.randint', 'random.randint', (['(8)', '(18)'], {}), '(8, 18)\n', (15210, 15217), False, 'import random\n'), ((15371, 15398), 'random.randint', 'random.randint', (['(0)', '(rows - 1)'], {}), '(0, rows - 1)\n', (15385, 15398), False, 'import random\n'), ((15415, 15442), 'random.randint', 'random.randint', (['(0)', '(cols - 1)'], {}), '(0, cols - 1)\n', (15429, 15442), False, 'import random\n'), ((16351, 16368), 'random.seed', 'random.seed', (['seed'], {}), '(seed)\n', (16362, 16368), False, 'import random\n'), ((16610, 16627), 'random.seed', 'random.seed', (['seed'], {}), '(seed)\n', (16621, 16627), False, 'import random\n'), ((17063, 17080), 'random.seed', 'random.seed', (['seed'], {}), '(seed)\n', (17074, 17080), False, 'import random\n'), ((17427, 17444), 'random.seed', 'random.seed', (['seed'], {}), '(seed)\n', (17438, 17444), False, 'import random\n'), ((17879, 17896), 'random.seed', 'random.seed', (['seed'], {}), '(seed)\n', (17890, 17896), False, 'import random\n'), ((18669, 18686), 'random.seed', 'random.seed', (['seed'], {}), '(seed)\n', (18680, 18686), False, 'import random\n'), ((18906, 18923), 'random.seed', 'random.seed', (['seed'], {}), '(seed)\n', (18917, 18923), False, 'import random\n'), ((19143, 19160), 'random.seed', 'random.seed', (['seed'], {}), '(seed)\n', (19154, 19160), False, 'import random\n'), ((19392, 19409), 'random.seed', 'random.seed', (['seed'], {}), '(seed)\n', (19403, 19409), False, 'import random\n'), ((19630, 19647), 'random.seed', 'random.seed', (['seed'], {}), '(seed)\n', (19641, 19647), False, 'import random\n'), ((20019, 20036), 'random.seed', 'random.seed', (['seed'], {}), '(seed)\n', (20030, 20036), False, 'import random\n'), ((21417, 21438), 'random.randint', 'random.randint', (['(4)', '(12)'], {}), '(4, 12)\n', (21431, 21438), False, 'import random\n'), ((22704, 22721), 'random.seed', 'random.seed', (['seed'], {}), '(seed)\n', (22715, 22721), False, 'import random\n'), ((22886, 22907), 'random.randint', 'random.randint', (['(8)', '(18)'], {}), '(8, 18)\n', (22900, 22907), False, 'import random\n'), ((25068, 25085), 'random.seed', 'random.seed', (['seed'], {}), '(seed)\n', (25079, 25085), False, 'import random\n'), ((27330, 27347), 'random.seed', 'random.seed', (['seed'], {}), '(seed)\n', (27341, 27347), False, 'import random\n'), ((27690, 27727), 'itertools.product', 'itertools.product', (['centerxs', 'centerys'], {}), '(centerxs, centerys)\n', (27707, 27727), False, 'import itertools\n'), ((28580, 28597), 'random.seed', 'random.seed', (['seed'], {}), '(seed)\n', (28591, 28597), False, 'import random\n'), ((28839, 28860), 'random.randint', 'random.randint', (['(8)', '(16)'], {}), '(8, 16)\n', (28853, 28860), False, 'import random\n'), ((28879, 28900), 'random.randint', 'random.randint', (['(8)', '(16)'], {}), '(8, 16)\n', (28893, 28900), False, 'import random\n'), ((29194, 29231), 'itertools.product', 'itertools.product', (['centerxs', 'centerys'], {}), '(centerxs, centerys)\n', (29211, 29231), False, 'import itertools\n'), ((30409, 30426), 'random.seed', 'random.seed', (['seed'], {}), '(seed)\n', (30420, 30426), False, 'import random\n'), ((31124, 31171), 'random.choice', 'random.choice', (['[(k * k) for k in [5, 7, 9, 11]]'], {}), '([(k * k) for k in [5, 7, 9, 11]])\n', (31137, 31171), False, 'import random\n'), ((7190, 7221), 'random.choice', 'random.choice', (['methuselah_names'], {}), '(methuselah_names)\n', (7203, 7221), False, 'import random\n'), ((13751, 13764), 'operator.itemgetter', 'itemgetter', (['(0)'], {}), '(0)\n', (13761, 13764), False, 'from operator import itemgetter\n'), ((20341, 20374), 'random.randint', 'random.randint', (['(-jittery)', 'jittery'], {}), '(-jittery, jittery)\n', (20355, 20374), False, 'import random\n'), ((21334, 21354), 'random.randint', 'random.randint', (['(2)', '(3)'], {}), '(2, 3)\n', (21348, 21354), False, 'import random\n'), ((21383, 21403), 'random.randint', 'random.randint', (['(2)', '(3)'], {}), '(2, 3)\n', (21397, 21403), False, 'import random\n'), ((25536, 25562), 'random.choice', 'random.choice', (['oscillators'], {}), '(oscillators)\n', (25549, 25562), False, 'import random\n'), ((27783, 27814), 'random.randint', 'random.randint', (['(-jitter)', 'jitter'], {}), '(-jitter, jitter)\n', (27797, 27814), False, 'import random\n'), ((27847, 27878), 'random.randint', 'random.randint', (['(-jitter)', 'jitter'], {}), '(-jitter, jitter)\n', (27861, 27878), False, 'import random\n'), ((28667, 28682), 'random.random', 'random.random', ([], {}), '()\n', (28680, 28682), False, 'import random\n'), ((29297, 29328), 'random.randint', 'random.randint', (['(-jitter)', 'jitter'], {}), '(-jitter, jitter)\n', (29311, 29328), False, 'import random\n'), ((29352, 29383), 'random.randint', 'random.randint', (['(-jitter)', 'jitter'], {}), '(-jitter, jitter)\n', (29366, 29383), False, 'import random\n'), ((31321, 31338), 'random.choice', 'random.choice', (['ab'], {}), '(ab)\n', (31334, 31338), False, 'import random\n'), ((31351, 31368), 'random.choice', 'random.choice', (['ab'], {}), '(ab)\n', (31364, 31368), False, 'import random\n'), ((31412, 31429), 'random.choice', 'random.choice', (['cs'], {}), '(cs)\n', (31425, 31429), False, 'import random\n'), ((2617, 2639), 'random.choice', 'random.choice', (['rotdegs'], {}), '(rotdegs)\n', (2630, 2639), False, 'import random\n'), ((3179, 3201), 'random.choice', 'random.choice', (['rotdegs'], {}), '(rotdegs)\n', (3192, 3201), False, 'import random\n'), ((3741, 3763), 'random.choice', 'random.choice', (['rotdegs'], {}), '(rotdegs)\n', (3754, 3763), False, 'import random\n'), ((4318, 4340), 'random.choice', 'random.choice', (['rotdegs'], {}), '(rotdegs)\n', (4331, 4340), False, 'import random\n'), ((7062, 7095), 'random.randint', 'random.randint', (['(-jittery)', 'jittery'], {}), '(-jittery, jittery)\n', (7076, 7095), False, 'import random\n'), ((7136, 7169), 'random.randint', 'random.randint', (['(-jitterx)', 'jitterx'], {}), '(-jitterx, jitterx)\n', (7150, 7169), False, 'import random\n'), ((21483, 21498), 'random.random', 'random.random', ([], {}), '()\n', (21496, 21498), False, 'import random\n'), ((21983, 22005), 'random.randint', 'random.randint', (['(10)', '(12)'], {}), '(10, 12)\n', (21997, 22005), False, 'import random\n'), ((22008, 22041), 'random.randint', 'random.randint', (['(-jitterx)', 'jitterx'], {}), '(-jitterx, jitterx)\n', (22022, 22041), False, 'import random\n'), ((23357, 23392), 'random.gauss', 'random.gauss', (['centerx', '(centerx // 2)'], {}), '(centerx, centerx // 2)\n', (23369, 23392), False, 'import random\n'), ((23418, 23453), 'random.gauss', 'random.gauss', (['centery', '(centery // 2)'], {}), '(centery, centery // 2)\n', (23430, 23453), False, 'import random\n'), ((26681, 26703), 'random.choice', 'random.choice', (['rotdegs'], {}), '(rotdegs)\n', (26694, 26703), False, 'import random\n'), ((27004, 27026), 'random.choice', 'random.choice', (['rotdegs'], {}), '(rotdegs)\n', (27017, 27026), False, 'import random\n'), ((28149, 28171), 'random.choice', 'random.choice', (['rotdegs'], {}), '(rotdegs)\n', (28162, 28171), False, 'import random\n'), ((29484, 29506), 'random.gauss', 'random.gauss', (['cx', 'stdx'], {}), '(cx, stdx)\n', (29496, 29506), False, 'import random\n'), ((29532, 29554), 'random.gauss', 'random.gauss', (['cy', 'stdy'], {}), '(cy, stdy)\n', (29544, 29554), False, 'import random\n'), ((31602, 31619), 'random.choice', 'random.choice', (['ab'], {}), '(ab)\n', (31615, 31619), False, 'import random\n'), ((31632, 31649), 'random.choice', 'random.choice', (['ab'], {}), '(ab)\n', (31645, 31649), False, 'import random\n'), ((31693, 31710), 'random.choice', 'random.choice', (['cs'], {}), '(cs)\n', (31706, 31710), False, 'import random\n'), ((2423, 2449), 'random.randint', 'random.randint', (['start', 'end'], {}), '(start, end)\n', (2437, 2449), False, 'import random\n'), ((2481, 2502), 'random.randint', 'random.randint', (['(-L)', 'L'], {}), '(-L, L)\n', (2495, 2502), False, 'import random\n'), ((2527, 2548), 'random.getrandbits', 'random.getrandbits', (['(1)'], {}), '(1)\n', (2545, 2548), False, 'import random\n'), ((2574, 2595), 'random.getrandbits', 'random.getrandbits', (['(1)'], {}), '(1)\n', (2592, 2595), False, 'import random\n'), ((2985, 3011), 'random.randint', 'random.randint', (['start', 'end'], {}), '(start, end)\n', (2999, 3011), False, 'import random\n'), ((3043, 3064), 'random.randint', 'random.randint', (['(-L)', 'L'], {}), '(-L, L)\n', (3057, 3064), False, 'import random\n'), ((3089, 3110), 'random.getrandbits', 'random.getrandbits', (['(1)'], {}), '(1)\n', (3107, 3110), False, 'import random\n'), ((3136, 3157), 'random.getrandbits', 'random.getrandbits', (['(1)'], {}), '(1)\n', (3154, 3157), False, 'import random\n'), ((3547, 3573), 'random.randint', 'random.randint', (['start', 'end'], {}), '(start, end)\n', (3561, 3573), False, 'import random\n'), ((3605, 3626), 'random.randint', 'random.randint', (['(-L)', 'L'], {}), '(-L, L)\n', (3619, 3626), False, 'import random\n'), ((3651, 3672), 'random.getrandbits', 'random.getrandbits', (['(1)'], {}), '(1)\n', (3669, 3672), False, 'import random\n'), ((3698, 3719), 'random.getrandbits', 'random.getrandbits', (['(1)'], {}), '(1)\n', (3716, 3719), False, 'import random\n'), ((4124, 4150), 'random.randint', 'random.randint', (['start', 'end'], {}), '(start, end)\n', (4138, 4150), False, 'import random\n'), ((4182, 4203), 'random.randint', 'random.randint', (['(-L)', 'L'], {}), '(-L, L)\n', (4196, 4203), False, 'import random\n'), ((4228, 4249), 'random.getrandbits', 'random.getrandbits', (['(1)'], {}), '(1)\n', (4246, 4249), False, 'import random\n'), ((4275, 4296), 'random.getrandbits', 'random.getrandbits', (['(1)'], {}), '(1)\n', (4293, 4296), False, 'import random\n'), ((7508, 7530), 'random.choice', 'random.choice', (['rotdegs'], {}), '(rotdegs)\n', (7521, 7530), False, 'import random\n'), ((8192, 8213), 'random.getrandbits', 'random.getrandbits', (['(1)'], {}), '(1)\n', (8210, 8213), False, 'import random\n'), ((26544, 26587), 'random.randint', 'random.randint', (['(-osc_jitter_x)', 'osc_jitter_x'], {}), '(-osc_jitter_x, osc_jitter_x)\n', (26558, 26587), False, 'import random\n'), ((26617, 26660), 'random.randint', 'random.randint', (['(-osc_jitter_y)', 'osc_jitter_y'], {}), '(-osc_jitter_y, osc_jitter_y)\n', (26631, 26660), False, 'import random\n'), ((26846, 26899), 'random.randint', 'random.randint', (['(-timebomb_jitter_x)', 'timebomb_jitter_x'], {}), '(-timebomb_jitter_x, timebomb_jitter_x)\n', (26860, 26899), False, 'import random\n'), ((26930, 26983), 'random.randint', 'random.randint', (['(-timebomb_jitter_y)', 'timebomb_jitter_y'], {}), '(-timebomb_jitter_y, timebomb_jitter_y)\n', (26944, 26983), False, 'import random\n'), ((28063, 28078), 'random.random', 'random.random', ([], {}), '()\n', (28076, 28078), False, 'import random\n'), ((28106, 28121), 'random.random', 'random.random', ([], {}), '()\n', (28119, 28121), False, 'import random\n'), ((31880, 31897), 'random.choice', 'random.choice', (['ps'], {}), '(ps)\n', (31893, 31897), False, 'import random\n'), ((7410, 7431), 'random.getrandbits', 'random.getrandbits', (['(1)'], {}), '(1)\n', (7428, 7431), False, 'import random\n'), ((7461, 7482), 'random.getrandbits', 'random.getrandbits', (['(1)'], {}), '(1)\n', (7479, 7482), False, 'import random\n'), ((9133, 9164), 'random.choice', 'random.choice', (['methuselah_names'], {}), '(methuselah_names)\n', (9146, 9164), False, 'import random\n'), ((32164, 32185), 'random.randint', 'random.randint', (['(1)', '(10)'], {}), '(1, 10)\n', (32178, 32185), False, 'import random\n'), ((32197, 32218), 'random.randint', 'random.randint', (['(1)', '(10)'], {}), '(1, 10)\n', (32211, 32218), False, 'import random\n'), ((8825, 8858), 'random.randint', 'random.randint', (['(-jittery)', 'jittery'], {}), '(-jittery, jittery)\n', (8839, 8858), False, 'import random\n'), ((9041, 9074), 'random.randint', 'random.randint', (['(-jitterx)', 'jitterx'], {}), '(-jitterx, jitterx)\n', (9055, 9074), False, 'import random\n'), ((11224, 11255), 'random.choice', 'random.choice', (['methuselah_names'], {}), '(methuselah_names)\n', (11237, 11255), False, 'import random\n'), ((12826, 12857), 'random.choice', 'random.choice', (['methuselah_names'], {}), '(methuselah_names)\n', (12839, 12857), False, 'import random\n'), ((32384, 32401), 'random.choice', 'random.choice', (['ps'], {}), '(ps)\n', (32397, 32401), False, 'import random\n'), ((10915, 10948), 'random.randint', 'random.randint', (['(-jittery)', 'jittery'], {}), '(-jittery, jittery)\n', (10929, 10948), False, 'import random\n'), ((11132, 11165), 'random.randint', 'random.randint', (['(-jitterx)', 'jitterx'], {}), '(-jitterx, jitterx)\n', (11146, 11165), False, 'import random\n'), ((12545, 12578), 'random.randint', 'random.randint', (['(-jittery)', 'jittery'], {}), '(-jittery, jittery)\n', (12559, 12578), False, 'import random\n'), ((12742, 12775), 'random.randint', 'random.randint', (['(-jitterx)', 'jitterx'], {}), '(-jitterx, jitterx)\n', (12756, 12775), False, 'import random\n'), ((32538, 32555), 'random.choice', 'random.choice', (['ps'], {}), '(ps)\n', (32551, 32555), False, 'import random\n'), ((9624, 9646), 'random.choice', 'random.choice', (['rotdegs'], {}), '(rotdegs)\n', (9637, 9646), False, 'import random\n'), ((9494, 9515), 'random.getrandbits', 'random.getrandbits', (['(1)'], {}), '(1)\n', (9512, 9515), False, 'import random\n'), ((9561, 9582), 'random.getrandbits', 'random.getrandbits', (['(1)'], {}), '(1)\n', (9579, 9582), False, 'import random\n'), ((11715, 11737), 'random.choice', 'random.choice', (['rotdegs'], {}), '(rotdegs)\n', (11728, 11737), False, 'import random\n'), ((13277, 13299), 'random.choice', 'random.choice', (['rotdegs'], {}), '(rotdegs)\n', (13290, 13299), False, 'import random\n'), ((11585, 11606), 'random.getrandbits', 'random.getrandbits', (['(1)'], {}), '(1)\n', (11603, 11606), False, 'import random\n'), ((11652, 11673), 'random.getrandbits', 'random.getrandbits', (['(1)'], {}), '(1)\n', (11670, 11673), False, 'import random\n'), ((13155, 13176), 'random.getrandbits', 'random.getrandbits', (['(1)'], {}), '(1)\n', (13173, 13176), False, 'import random\n'), ((13218, 13239), 'random.getrandbits', 'random.getrandbits', (['(1)'], {}), '(1)\n', (13236, 13239), False, 'import random\n')] |
'''
Created on Jun 21, 2020
@author: ballance
'''
import vsc
from vsc_test_case import VscTestCase
from vsc.visitors.model_pretty_printer import ModelPrettyPrinter
class TestListScalar(VscTestCase):
@vsc.randobj
class my_item_c(object):
def __init__(self):
self.fixed = vsc.rand_list_t(vsc.bit_t(8), sz=4)
self.dynamic = vsc.randsz_list_t(vsc.bit_t(8))
self.queue = vsc.randsz_list_t(vsc.bit_t(8))
def test_randsz_smoke(self):
@vsc.randobj
class my_item_c(object):
def __init__(self):
self.l = vsc.randsz_list_t(vsc.uint8_t())
@vsc.constraint
def l_c(self):
self.l.size in vsc.rangelist(vsc.rng(2,10))
self.l[1] == (self.l[0]+1)
it = my_item_c()
it.randomize()
print("it.l.size=" + str(it.l.size))
for i,v in enumerate(it.l):
print("v[" + str(i) + "] = " + str(v))
self.assertEqual(it.l[1], it.l[0]+1)
def test_randsz_len(self):
@vsc.randobj
class my_item_c(object):
def __init__(self):
self.l = vsc.randsz_list_t(vsc.uint8_t())
@vsc.constraint
def l_c(self):
self.l.size in vsc.rangelist(vsc.rng(2,10))
self.l[1] == (self.l[0]+1)
it = my_item_c()
it.randomize()
self.assertGreaterEqual(len(it.l), 2)
self.assertLessEqual(len(it.l), 10)
print("it.l.size=" + str(it.l.size))
for i,v in enumerate(it.l):
print("v[" + str(i) + "] = " + str(v))
self.assertEqual(it.l[1], it.l[0]+1)
def test_randsz_foreach_idx(self):
@vsc.randobj
class my_item_c(object):
def __init__(self):
self.l = vsc.randsz_list_t(vsc.uint8_t())
self.a = vsc.rand_uint8_t()
@vsc.constraint
def l_c(self):
self.l.size in vsc.rangelist(vsc.rng(2,10))
with vsc.foreach(self.l, it=False, idx=True) as idx:
with vsc.if_then(idx > 0):
self.l[idx] == self.l[idx-1]+1
it = my_item_c()
it.randomize()
for i in range(len(it.l)):
if i > 0:
self.assertEqual(it.l[i], it.l[i-1]+1)
def test_fixedsz_foreach_idx(self):
@vsc.randobj
class my_item_c(object):
def __init__(self):
self.a = vsc.rand_uint8_t()
self.b = vsc.rand_uint8_t()
self.temp = vsc.list_t(vsc.uint8_t())
self.temp = [1,3,4,12,13,14]
@vsc.constraint
def ab_c(self):
self.a in vsc.rangelist(1,2,3)
with vsc.foreach(self.temp, idx=True) as i:
self.a != self.temp[i]
it = my_item_c()
for i in range(10):
it.randomize()
self.assertEqual(it.a, 2)
def disabled_test_sum_simple(self):
@vsc.randobj
class my_item_c(object):
def __init__(self):
self.l = vsc.rand_list_t(vsc.uint8_t(), sz=5)
self.a = vsc.rand_uint8_t()
@vsc.constraint
def sum_c(self):
self.l.sum == 5
with vsc.foreach(self.l) as it:
it != 0
it = my_item_c()
it.randomize()
print("Model: " + ModelPrettyPrinter.print(it.get_model()))
self.assertEqual(it.l.sum, 5)
| [
"vsc.bit_t",
"vsc.rng",
"vsc.rand_uint8_t",
"vsc.rangelist",
"vsc.uint8_t",
"vsc.if_then",
"vsc.foreach"
] | [((330, 342), 'vsc.bit_t', 'vsc.bit_t', (['(8)'], {}), '(8)\n', (339, 342), False, 'import vsc\n'), ((395, 407), 'vsc.bit_t', 'vsc.bit_t', (['(8)'], {}), '(8)\n', (404, 407), False, 'import vsc\n'), ((452, 464), 'vsc.bit_t', 'vsc.bit_t', (['(8)'], {}), '(8)\n', (461, 464), False, 'import vsc\n'), ((2126, 2144), 'vsc.rand_uint8_t', 'vsc.rand_uint8_t', ([], {}), '()\n', (2142, 2144), False, 'import vsc\n'), ((2851, 2869), 'vsc.rand_uint8_t', 'vsc.rand_uint8_t', ([], {}), '()\n', (2867, 2869), False, 'import vsc\n'), ((2895, 2913), 'vsc.rand_uint8_t', 'vsc.rand_uint8_t', ([], {}), '()\n', (2911, 2913), False, 'import vsc\n'), ((3642, 3660), 'vsc.rand_uint8_t', 'vsc.rand_uint8_t', ([], {}), '()\n', (3658, 3660), False, 'import vsc\n'), ((659, 672), 'vsc.uint8_t', 'vsc.uint8_t', ([], {}), '()\n', (670, 672), False, 'import vsc\n'), ((1319, 1332), 'vsc.uint8_t', 'vsc.uint8_t', ([], {}), '()\n', (1330, 1332), False, 'import vsc\n'), ((2086, 2099), 'vsc.uint8_t', 'vsc.uint8_t', ([], {}), '()\n', (2097, 2099), False, 'import vsc\n'), ((2332, 2371), 'vsc.foreach', 'vsc.foreach', (['self.l'], {'it': '(False)', 'idx': '(True)'}), '(self.l, it=False, idx=True)\n', (2343, 2371), False, 'import vsc\n'), ((2953, 2966), 'vsc.uint8_t', 'vsc.uint8_t', ([], {}), '()\n', (2964, 2966), False, 'import vsc\n'), ((3129, 3151), 'vsc.rangelist', 'vsc.rangelist', (['(1)', '(2)', '(3)'], {}), '(1, 2, 3)\n', (3142, 3151), False, 'import vsc\n'), ((3171, 3203), 'vsc.foreach', 'vsc.foreach', (['self.temp'], {'idx': '(True)'}), '(self.temp, idx=True)\n', (3182, 3203), False, 'import vsc\n'), ((3596, 3609), 'vsc.uint8_t', 'vsc.uint8_t', ([], {}), '()\n', (3607, 3609), False, 'import vsc\n'), ((3805, 3824), 'vsc.foreach', 'vsc.foreach', (['self.l'], {}), '(self.l)\n', (3816, 3824), False, 'import vsc\n'), ((808, 822), 'vsc.rng', 'vsc.rng', (['(2)', '(10)'], {}), '(2, 10)\n', (815, 822), False, 'import vsc\n'), ((1468, 1482), 'vsc.rng', 'vsc.rng', (['(2)', '(10)'], {}), '(2, 10)\n', (1475, 1482), False, 'import vsc\n'), ((2279, 2293), 'vsc.rng', 'vsc.rng', (['(2)', '(10)'], {}), '(2, 10)\n', (2286, 2293), False, 'import vsc\n'), ((2405, 2425), 'vsc.if_then', 'vsc.if_then', (['(idx > 0)'], {}), '(idx > 0)\n', (2416, 2425), False, 'import vsc\n')] |
import json
from configserver import ConfigServer, get_postgres_db
from configserver.errors import InvalidRouteUUIDError
from flask.testing import FlaskClient
import pytest
from peewee import SqliteDatabase
import logging
from uuid import uuid4
import functools
from typing import Iterable
@pytest.fixture(autouse=True)
def no_logs():
logging.getLogger().setLevel(logging.WARNING)
@pytest.fixture()
def webhook_server():
with open("config.json") as config_file:
config_JSON = json.load(config_file)
server = ConfigServer(
use_test_auth=True,
db=get_postgres_db(),
config_JSON=config_JSON
)
yield server
server.close()
@pytest.fixture()
def user_auth():
return {
"headers": {
"user": f"test_user{uuid4()}@<EMAIL>"
}
}
@pytest.fixture()
def router_app(webhook_server, user_auth):
test_client = webhook_server.app.app.test_client() # type: FlaskClient
class PatchedFlaskClient:
get = functools.partialmethod(test_client.get, **user_auth)
delete = functools.partialmethod(test_client.delete, **user_auth)
post = functools.partialmethod(test_client.post, **user_auth)
patch = functools.partialmethod(test_client.patch, **user_auth)
return PatchedFlaskClient
@pytest.fixture()
def test_route_uuid(webhook_server: ConfigServer, router_app: FlaskClient) -> Iterable[str]:
create_route_resp = router_app.post(
"/create-route",
data=json.dumps({
"name": "route",
"destination": "http://127.0.0.1"
}),
content_type='application/json'
)
uuid = json.loads(create_route_resp.data)["uuid"]
try:
yield uuid
finally:
router_app.delete(f"/routes/{uuid}")
def test_create_route(router_app: FlaskClient):
create_route_resp = router_app.post(
"/create-route",
data=json.dumps({
"name": "route",
"destination": "http://127.0.0.1"
}),
content_type='application/json'
)
assert create_route_resp.status_code == 201
def test_get(router_app: FlaskClient, test_route_uuid: str):
assert router_app.get(f"/routes/{test_route_uuid}").status_code == 200
def test_get_by_token(router_app: FlaskClient, test_route_uuid: str):
token = json.loads(router_app.get(f"/routes/{test_route_uuid}").data)["token"]
assert router_app.get(f"/routes/token/{token}").status_code == 200
def test_patch(router_app: FlaskClient, test_route_uuid: str):
assert router_app.patch(
f"/routes/{test_route_uuid}",
data=json.dumps({
"name": "new-name"
}),
content_type='application/json',
).status_code == 204
assert json.loads(router_app.get(f"/routes/{test_route_uuid}").data)["name"] == "new-name"
@pytest.mark.usefixtures("test_route_uuid")
def test_get_all(router_app: FlaskClient):
all_routes_resp = router_app.get("/routes")
assert all_routes_resp.status_code == 200
data = json.loads(all_routes_resp.data)
assert len(data) == 1 and data[0]["name"] == "route"
def test_delete(router_app: FlaskClient, test_route_uuid: str):
assert router_app.delete(f"/routes/{test_route_uuid}").status_code == 204
assert router_app.get(f"/routes/{test_route_uuid}").status_code == 404
def test_regenerate(router_app: FlaskClient, test_route_uuid: str):
prev_token = json.loads(router_app.get(f"/routes/{test_route_uuid}").data)["token"]
resp = router_app.post(f"/routes/{test_route_uuid}/regenerate")
assert resp.status_code == 200
assert json.loads(resp.data)["token"] != prev_token
def test_add_user_link(router_app: FlaskClient, test_route_uuid: str):
test_auth = {
"headers": {
"user": "<EMAIL>"
}
}
assert router_app.post(f"/links/{test_route_uuid}", **test_auth).status_code == 201
assert len(json.loads(router_app.get("/routes", **test_auth).data)) == 1
def test_get_user_link(router_app: FlaskClient, test_route_uuid: str):
test_auth = {
"headers": {
"user": "<EMAIL>"
}
}
assert router_app.get(f"/links/{test_route_uuid}", **test_auth).status_code == 404
assert router_app.get(f"/links/{test_route_uuid}").status_code == 200
def test_remove_user_link(router_app: FlaskClient, test_route_uuid: str):
test_auth = {
"headers": {
"user": "<EMAIL>"
}
}
test_add_user_link(router_app, test_route_uuid)
assert router_app.delete(f"/links/{test_route_uuid}", **test_auth).status_code == 204
assert len(json.loads(router_app.get("/routes", **test_auth).data)) == 0
def test_get_route_stats(router_app: FlaskClient, test_route_uuid: str):
assert router_app.get(f"/routes/{test_route_uuid}/statistics").status_code == 200
def test_get_route_logs(router_app: FlaskClient, test_route_uuid: str):
assert router_app.get(f"/routes/{test_route_uuid}/logs").status_code == 200
@pytest.mark.usefixtures("test_route_uuid")
def test_all_routes_stats(router_app: FlaskClient):
assert router_app.get(f"/routes/statistics").status_code == 200
def test_all_routes_stats_with_no_stats(router_app: FlaskClient):
assert router_app.get(f"/routes/statistics").status_code == 200 | [
"logging.getLogger",
"json.loads",
"json.dumps",
"configserver.get_postgres_db",
"json.load",
"uuid.uuid4",
"pytest.mark.usefixtures",
"pytest.fixture",
"functools.partialmethod"
] | [((293, 321), 'pytest.fixture', 'pytest.fixture', ([], {'autouse': '(True)'}), '(autouse=True)\n', (307, 321), False, 'import pytest\n'), ((389, 405), 'pytest.fixture', 'pytest.fixture', ([], {}), '()\n', (403, 405), False, 'import pytest\n'), ((680, 696), 'pytest.fixture', 'pytest.fixture', ([], {}), '()\n', (694, 696), False, 'import pytest\n'), ((816, 832), 'pytest.fixture', 'pytest.fixture', ([], {}), '()\n', (830, 832), False, 'import pytest\n'), ((1299, 1315), 'pytest.fixture', 'pytest.fixture', ([], {}), '()\n', (1313, 1315), False, 'import pytest\n'), ((2827, 2869), 'pytest.mark.usefixtures', 'pytest.mark.usefixtures', (['"""test_route_uuid"""'], {}), "('test_route_uuid')\n", (2850, 2869), False, 'import pytest\n'), ((4990, 5032), 'pytest.mark.usefixtures', 'pytest.mark.usefixtures', (['"""test_route_uuid"""'], {}), "('test_route_uuid')\n", (5013, 5032), False, 'import pytest\n'), ((3020, 3052), 'json.loads', 'json.loads', (['all_routes_resp.data'], {}), '(all_routes_resp.data)\n', (3030, 3052), False, 'import json\n'), ((495, 517), 'json.load', 'json.load', (['config_file'], {}), '(config_file)\n', (504, 517), False, 'import json\n'), ((996, 1049), 'functools.partialmethod', 'functools.partialmethod', (['test_client.get'], {}), '(test_client.get, **user_auth)\n', (1019, 1049), False, 'import functools\n'), ((1067, 1123), 'functools.partialmethod', 'functools.partialmethod', (['test_client.delete'], {}), '(test_client.delete, **user_auth)\n', (1090, 1123), False, 'import functools\n'), ((1139, 1193), 'functools.partialmethod', 'functools.partialmethod', (['test_client.post'], {}), '(test_client.post, **user_auth)\n', (1162, 1193), False, 'import functools\n'), ((1210, 1265), 'functools.partialmethod', 'functools.partialmethod', (['test_client.patch'], {}), '(test_client.patch, **user_auth)\n', (1233, 1265), False, 'import functools\n'), ((1646, 1680), 'json.loads', 'json.loads', (['create_route_resp.data'], {}), '(create_route_resp.data)\n', (1656, 1680), False, 'import json\n'), ((341, 360), 'logging.getLogger', 'logging.getLogger', ([], {}), '()\n', (358, 360), False, 'import logging\n'), ((585, 602), 'configserver.get_postgres_db', 'get_postgres_db', ([], {}), '()\n', (600, 602), False, 'from configserver import ConfigServer, get_postgres_db\n'), ((1488, 1552), 'json.dumps', 'json.dumps', (["{'name': 'route', 'destination': 'http://127.0.0.1'}"], {}), "({'name': 'route', 'destination': 'http://127.0.0.1'})\n", (1498, 1552), False, 'import json\n'), ((1904, 1968), 'json.dumps', 'json.dumps', (["{'name': 'route', 'destination': 'http://127.0.0.1'}"], {}), "({'name': 'route', 'destination': 'http://127.0.0.1'})\n", (1914, 1968), False, 'import json\n'), ((3604, 3625), 'json.loads', 'json.loads', (['resp.data'], {}), '(resp.data)\n', (3614, 3625), False, 'import json\n'), ((780, 787), 'uuid.uuid4', 'uuid4', ([], {}), '()\n', (785, 787), False, 'from uuid import uuid4\n'), ((2607, 2639), 'json.dumps', 'json.dumps', (["{'name': 'new-name'}"], {}), "({'name': 'new-name'})\n", (2617, 2639), False, 'import json\n')] |
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
"""
Created on Wed Apr 7 14:40:40 2021
@author: victorsellemi
"""
import numpy as np
def filter_MA(Y,q = 2):
"""
DESCRIPTION:
Decompose a time series into a trend and stationary component
using the moving average (MA) filter (i.e., low pass filter)
INPUT:
Y = (T x 1) vector of time series data
q = scalar value of moving average (half) window: default = 2
OUTPUT:
trend = (T x 1) vector of trend component of the time series, i.e., low frequency component
error = (T x 1) vector of stationary part of the time series
"""
# length of time series
T = Y.shape[0]
# window width
Q = 2*q
# border of the series is preserved
p1 = np.concatenate((np.eye(q), np.zeros((q,T-q))), axis = 1)
p2 = np.zeros((T-Q,T))
p3 = np.concatenate((np.zeros((q,T-q)), np.eye(q)), axis = 1)
P = np.concatenate((p1,p2,p3), axis = 0)
# part of the series to be averaged
X = np.eye(T-Q)
Z = np.zeros((T-Q,1))
for i in range(Q):
# update X
X = np.concatenate((X, np.zeros((T-Q,1))), axis = 1) + np.concatenate((Z, np.eye(T-Q)), axis = 1)
# update Z
Z = np.concatenate((Z, np.zeros((T-Q,1))), axis = 1)
X = np.concatenate((np.zeros((q,T)), X, np.zeros((q,T))), axis = 0)
# construct linear filter
L = P + (1/(Q+1)) * X
# construct the trend
trend = L.dot(Y)
# construct stationary component
signal = Y - trend
return trend,signal
| [
"numpy.eye",
"numpy.zeros",
"numpy.concatenate"
] | [((877, 897), 'numpy.zeros', 'np.zeros', (['(T - Q, T)'], {}), '((T - Q, T))\n', (885, 897), True, 'import numpy as np\n'), ((970, 1006), 'numpy.concatenate', 'np.concatenate', (['(p1, p2, p3)'], {'axis': '(0)'}), '((p1, p2, p3), axis=0)\n', (984, 1006), True, 'import numpy as np\n'), ((1060, 1073), 'numpy.eye', 'np.eye', (['(T - Q)'], {}), '(T - Q)\n', (1066, 1073), True, 'import numpy as np\n'), ((1080, 1100), 'numpy.zeros', 'np.zeros', (['(T - Q, 1)'], {}), '((T - Q, 1))\n', (1088, 1100), True, 'import numpy as np\n'), ((827, 836), 'numpy.eye', 'np.eye', (['q'], {}), '(q)\n', (833, 836), True, 'import numpy as np\n'), ((838, 858), 'numpy.zeros', 'np.zeros', (['(q, T - q)'], {}), '((q, T - q))\n', (846, 858), True, 'import numpy as np\n'), ((920, 940), 'numpy.zeros', 'np.zeros', (['(q, T - q)'], {}), '((q, T - q))\n', (928, 940), True, 'import numpy as np\n'), ((939, 948), 'numpy.eye', 'np.eye', (['q'], {}), '(q)\n', (945, 948), True, 'import numpy as np\n'), ((1387, 1403), 'numpy.zeros', 'np.zeros', (['(q, T)'], {}), '((q, T))\n', (1395, 1403), True, 'import numpy as np\n'), ((1407, 1423), 'numpy.zeros', 'np.zeros', (['(q, T)'], {}), '((q, T))\n', (1415, 1423), True, 'import numpy as np\n'), ((1319, 1339), 'numpy.zeros', 'np.zeros', (['(T - Q, 1)'], {}), '((T - Q, 1))\n', (1327, 1339), True, 'import numpy as np\n'), ((1185, 1205), 'numpy.zeros', 'np.zeros', (['(T - Q, 1)'], {}), '((T - Q, 1))\n', (1193, 1205), True, 'import numpy as np\n'), ((1236, 1249), 'numpy.eye', 'np.eye', (['(T - Q)'], {}), '(T - Q)\n', (1242, 1249), True, 'import numpy as np\n')] |
import sys
import pytest
import aiohttp_mako
from aiohttp import web
@pytest.fixture
def app():
app = web.Application()
lookup = aiohttp_mako.setup(app, input_encoding='utf-8',
output_encoding='utf-8',
default_filters=['decode.utf8'])
tplt = "<html><body><h1>${head}</h1>${text}</body></html>"
lookup.put_string('tplt.html', tplt)
return app
| [
"aiohttp.web.Application",
"aiohttp_mako.setup"
] | [((110, 127), 'aiohttp.web.Application', 'web.Application', ([], {}), '()\n', (125, 127), False, 'from aiohttp import web\n'), ((141, 250), 'aiohttp_mako.setup', 'aiohttp_mako.setup', (['app'], {'input_encoding': '"""utf-8"""', 'output_encoding': '"""utf-8"""', 'default_filters': "['decode.utf8']"}), "(app, input_encoding='utf-8', output_encoding='utf-8',\n default_filters=['decode.utf8'])\n", (159, 250), False, 'import aiohttp_mako\n')] |
from fastapi import Depends
from fastapi.exceptions import HTTPException
from fastapi.security import OAuth2PasswordBearer
from app.models.users import User, UserRepository
get_token = OAuth2PasswordBearer(tokenUrl="/login")
async def get_user(
token: str = Depends(get_token), users: UserRepository = Depends()
) -> User:
"""Get current authenticated user."""
user = await users.get(token=token)
if user:
return user
raise HTTPException(status_code=403, detail="Invalid token")
| [
"fastapi.Depends",
"fastapi.security.OAuth2PasswordBearer",
"fastapi.exceptions.HTTPException"
] | [((187, 226), 'fastapi.security.OAuth2PasswordBearer', 'OAuth2PasswordBearer', ([], {'tokenUrl': '"""/login"""'}), "(tokenUrl='/login')\n", (207, 226), False, 'from fastapi.security import OAuth2PasswordBearer\n'), ((266, 284), 'fastapi.Depends', 'Depends', (['get_token'], {}), '(get_token)\n', (273, 284), False, 'from fastapi import Depends\n'), ((310, 319), 'fastapi.Depends', 'Depends', ([], {}), '()\n', (317, 319), False, 'from fastapi import Depends\n'), ((456, 510), 'fastapi.exceptions.HTTPException', 'HTTPException', ([], {'status_code': '(403)', 'detail': '"""Invalid token"""'}), "(status_code=403, detail='Invalid token')\n", (469, 510), False, 'from fastapi.exceptions import HTTPException\n')] |
#!/usr/bin/env python
#
# Author: <NAME>.
# Email:
#
from __future__ import print_function
from collections import defaultdict
import sys
import DNS
import re
RE_PARSE = re.compile(r'(ip4|ip6|include|redirect)[:=](.*)', re.IGNORECASE)
MAX_RECURSION = 5
def dns_txt(domain):
try:
resp = DNS.dnslookup(domain, 'TXT')
except DNS.ServerError as err:
print(err, file=sys.stderr)
return None
response = []
for r in resp:
response.append(''.join(r))
return response
def dns_parse(txt_field):
resp = defaultdict(set)
for rec in txt_field:
fields = rec.split()
for field in fields:
match = RE_PARSE.match(field)
if match:
resp[match.group(1)].add(match.group(2))
return resp
def process(domain):
domains = [domain]
ip_addresses = set()
for cnt in range(MAX_RECURSION):
includes = set()
for dom in domains:
txt = dns_txt(dom)
if not txt:
continue
spf = dns_parse(txt)
ip_addresses |= spf.get('ip4', set())
ip_addresses |= spf.get('ip6', set())
includes |= spf.get('include', set())
includes |= spf.get('redirect', set())
if not includes:
break
domains = includes
return ip_addresses
if __name__ == '__main__':
whitelist = set()
with open(sys.argv[1]) as fd:
for line in fd:
line = line.strip()
for ip in process(line):
whitelist.add(ip)
for ip in sorted(whitelist):
print(ip)
| [
"collections.defaultdict",
"DNS.dnslookup",
"re.compile"
] | [((172, 235), 're.compile', 're.compile', (['"""(ip4|ip6|include|redirect)[:=](.*)"""', 're.IGNORECASE'], {}), "('(ip4|ip6|include|redirect)[:=](.*)', re.IGNORECASE)\n", (182, 235), False, 'import re\n'), ((525, 541), 'collections.defaultdict', 'defaultdict', (['set'], {}), '(set)\n', (536, 541), False, 'from collections import defaultdict\n'), ((295, 323), 'DNS.dnslookup', 'DNS.dnslookup', (['domain', '"""TXT"""'], {}), "(domain, 'TXT')\n", (308, 323), False, 'import DNS\n')] |
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
"""
Created on Tue May 19 18:08:01 2020
@author: <NAME>
Implementação do ajuste do modelo SEIIHURD com separação de grupos. Necessita
de mais verificações e funções para simplificar o input. Baseado nas classes
disponíveis no modelos.py
"""
import numpy as np
from functools import reduce
import scipy.integrate as spi
from scipy.optimize import least_squares
from platypus import NSGAII, Problem, Real
from pyswarms.single.global_best import GlobalBestPSO
import pyswarms as ps
from pyswarms.backend.topology import Star
from pyswarms.utils.plotters import plot_cost_history
from itertools import repeat
import multiprocessing as mp
import copy
import joblib
'''
Social contact matrices from
PREM, Kiesha; COOK, <NAME>.; <NAME>. Projecting social contact matrices in
152 countries using contact surveys and demographic data. PLoS computational
biology, v. 13, n. 9, p. e1005697, 2017.
'''
ages_Mu_min = 5 * np.arange(16)
Mu_house = np.array([[0.47868515, 0.50507561, 0.29848922, 0.15763748, 0.26276959,
0.40185462, 0.46855027, 0.42581354, 0.2150961 , 0.0856771 ,
0.08705463, 0.07551931, 0.05129175, 0.02344832, 0.00793644,
0.01072846],
[0.35580205, 0.77874482, 0.51392686, 0.21151069, 0.08597966,
0.28306027, 0.49982218, 0.52854893, 0.41220947, 0.15848728,
0.07491245, 0.07658339, 0.04772343, 0.02588962, 0.01125956,
0.01073152],
[0.25903114, 0.63488713, 1.36175618, 0.50016515, 0.11748191,
0.10264613, 0.24113458, 0.47274372, 0.54026417, 0.26708819,
0.11007723, 0.04406045, 0.02746409, 0.02825033, 0.02044872,
0.01214665],
[0.14223192, 0.24383932, 0.53761638, 1.05325205, 0.28778496,
0.10925453, 0.0651564 , 0.2432454 , 0.39011334, 0.41381277,
0.23194909, 0.07541471, 0.03428398, 0.02122257, 0.01033573,
0.00864859],
[0.27381886, 0.15430529, 0.16053062, 0.5104134 , 0.95175366,
0.3586594 , 0.09248672, 0.04774269, 0.15814197, 0.36581739,
0.25544811, 0.13338965, 0.03461345, 0.01062458, 0.00844199,
0.00868782],
[0.59409802, 0.26971847, 0.10669146, 0.18330524, 0.39561893,
0.81955947, 0.26376865, 0.06604084, 0.03824556, 0.11560004,
0.23218163, 0.15331788, 0.07336147, 0.02312255, 0.00412646,
0.01025778],
[0.63860889, 0.75760606, 0.43109156, 0.09913293, 0.13935789,
0.32056062, 0.65710277, 0.25488454, 0.1062129 , 0.0430932 ,
0.06880784, 0.09938458, 0.09010691, 0.02233902, 0.01155556,
0.00695246],
[0.56209348, 0.87334544, 0.75598244, 0.33199136, 0.07233271,
0.08674171, 0.20243583, 0.60062714, 0.17793601, 0.06307045,
0.04445926, 0.04082447, 0.06275133, 0.04051762, 0.01712777,
0.00598721],
[0.35751289, 0.66234582, 0.77180208, 0.54993616, 0.17368099,
0.07361914, 0.13016852, 0.19937327, 0.46551558, 0.15412263,
0.06123041, 0.0182514 , 0.04234381, 0.04312892, 0.01656267,
0.01175358],
[0.208131 , 0.41591452, 0.56510014, 0.67760241, 0.38146504,
0.14185001, 0.06160354, 0.12945701, 0.16470166, 0.41150841,
0.14596804, 0.04404807, 0.02395316, 0.01731295, 0.01469059,
0.02275339],
[0.30472548, 0.26744442, 0.41631962, 0.46516888, 0.41751365,
0.28520772, 0.13931619, 0.07682945, 0.11404965, 0.16122096,
0.33813266, 0.1349378 , 0.03755396, 0.01429426, 0.01356763,
0.02551792],
[0.52762004, 0.52787011, 0.33622117, 0.43037934, 0.36416323,
0.42655672, 0.33780201, 0.13492044, 0.0798784 , 0.15795568,
0.20367727, 0.33176385, 0.12256126, 0.05573807, 0.0124446 ,
0.02190564],
[0.53741472, 0.50750067, 0.3229994 , 0.30706704, 0.21340314,
0.27424513, 0.32838657, 0.26023515, 0.13222548, 0.07284901,
0.11950584, 0.16376401, 0.25560123, 0.09269703, 0.02451284,
0.00631762],
[0.37949376, 0.55324102, 0.47449156, 0.24796638, 0.19276924,
0.20675484, 0.3267867 , 0.39525729, 0.3070043 , 0.10088992,
0.10256839, 0.13016641, 0.1231421 , 0.24067708, 0.05475668,
0.01401368],
[0.16359554, 0.48536065, 0.40533723, 0.31542539, 0.06890518,
0.15670328, 0.12884062, 0.27912381, 0.25685832, 0.20143856,
0.12497647, 0.07565566, 0.10331686, 0.08830789, 0.15657321,
0.05744065],
[0.29555039, 0.39898035, 0.60257982, 0.5009724 , 0.13799378,
0.11716593, 0.14366306, 0.31602298, 0.34691652, 0.30960511,
0.31253708, 0.14557295, 0.06065554, 0.10654772, 0.06390924,
0.09827735]])
Mu_school = np.array([[3.21885854e-001, 4.31659966e-002, 7.88269419e-003,
8.09548363e-003, 5.35038146e-003, 2.18201974e-002,
4.01633514e-002, 2.99376002e-002, 1.40680283e-002,
1.66587853e-002, 9.47774696e-003, 7.41041622e-003,
1.28200661e-003, 7.79120405e-004, 8.23608272e-066,
6.37926405e-120],
[5.40133328e-002, 4.84870697e+000, 2.70046494e-001,
3.14778450e-002, 3.11206331e-002, 8.56826951e-002,
1.08251879e-001, 9.46101139e-002, 8.63528188e-002,
5.51141159e-002, 4.19385198e-002, 1.20958942e-002,
4.77242219e-003, 1.39787217e-003, 3.47452943e-004,
8.08973738e-039],
[4.56461982e-004, 1.04840235e+000, 6.09152459e+000,
1.98915822e-001, 1.99709921e-002, 6.68319525e-002,
6.58949586e-002, 9.70851505e-002, 9.54147078e-002,
6.70538232e-002, 4.24864096e-002, 1.98701346e-002,
5.11869429e-003, 7.27320438e-004, 4.93746124e-025,
1.82153965e-004],
[2.59613205e-003, 4.73315233e-002, 1.99337834e+000,
7.20040500e+000, 8.57326037e-002, 7.90668822e-002,
8.54208542e-002, 1.10816964e-001, 8.76955236e-002,
9.22975521e-002, 4.58035025e-002, 2.51130956e-002,
5.71391798e-003, 1.07818752e-003, 6.21174558e-033,
1.70710246e-070],
[7.19158720e-003, 2.48833195e-002, 9.89727235e-003,
8.76815025e-001, 4.33963352e-001, 5.05185217e-002,
3.30594492e-002, 3.81384107e-002, 2.34709676e-002,
2.67235372e-002, 1.32913985e-002, 9.00655556e-003,
6.94913059e-004, 1.25675951e-003, 1.77164197e-004,
1.21957619e-047],
[7.04119204e-003, 1.19412206e-001, 3.75016980e-002,
2.02193056e-001, 2.79822908e-001, 1.68610223e-001,
2.86939363e-002, 3.56961469e-002, 4.09234494e-002,
3.32290896e-002, 8.12074348e-003, 1.26152144e-002,
4.27869081e-003, 2.41737477e-003, 4.63116893e-004,
1.28597237e-003],
[1.41486320e-002, 3.86561429e-001, 2.55902236e-001,
1.69973534e-001, 4.98104010e-002, 8.98122446e-002,
7.95333394e-002, 5.19274611e-002, 5.46612930e-002,
2.64567137e-002, 2.03241595e-002, 2.96263220e-003,
5.42888613e-003, 4.47585970e-004, 1.65440335e-048,
3.11189454e-055],
[2.40945305e-002, 2.11030046e-001, 1.54767246e-001,
8.17929897e-002, 1.84061608e-002, 5.43009779e-002,
7.39351186e-002, 5.21677009e-002, 5.63267084e-002,
2.51807147e-002, 3.53972554e-003, 7.96646343e-003,
5.56929776e-004, 2.08530461e-003, 1.84428290e-123,
9.69555083e-067],
[7.81313905e-003, 1.14371898e-001, 9.09011945e-002,
3.80212104e-001, 8.54533192e-003, 2.62430162e-002,
2.51880009e-002, 3.22563508e-002, 6.73506045e-002,
2.24997143e-002, 2.39241043e-002, 6.50627191e-003,
5.50892674e-003, 4.78308850e-004, 4.81213215e-068,
2.40231425e-092],
[6.55265016e-002, 2.31163536e-001, 1.49970765e-001,
5.53563093e-001, 5.74032526e-003, 3.02865481e-002,
5.72506883e-002, 4.70559232e-002, 4.28736553e-002,
2.42614518e-002, 2.86665377e-002, 1.29570473e-002,
3.24362518e-003, 1.67930318e-003, 6.20916950e-134,
3.27297624e-072],
[1.72765646e-002, 3.43744913e-001, 4.30902785e-001,
4.74293073e-001, 5.39328187e-003, 1.44128740e-002,
3.95545363e-002, 3.73781860e-002, 4.56834488e-002,
5.92135906e-002, 2.91473801e-002, 1.54857502e-002,
4.53105390e-003, 8.87272668e-024, 1.23797452e-117,
5.64262349e-078],
[6.14363036e-002, 2.98367348e-001, 2.59092700e-001,
3.00800812e-001, 5.92454596e-003, 5.26458862e-002,
2.02188672e-002, 3.27897605e-002, 4.07753741e-002,
2.83422407e-002, 2.43657809e-002, 2.73993226e-002,
8.87990718e-003, 1.13279180e-031, 7.81960493e-004,
7.62467510e-004],
[3.63695643e-002, 5.96870355e-002, 3.05072624e-002,
1.45523978e-001, 1.26062984e-002, 1.69458169e-003,
1.55127292e-002, 4.22097670e-002, 9.21792425e-003,
1.42200652e-002, 1.10967529e-002, 5.77020348e-003,
2.04474044e-002, 1.11075734e-002, 4.42271199e-067,
2.12068625e-037],
[1.67937029e-003, 2.72971001e-002, 1.05886266e-002,
7.61087735e-032, 1.97191559e-003, 1.92885006e-003,
1.24343737e-002, 5.39297787e-003, 5.41684968e-003,
8.63502071e-003, 1.94554498e-003, 1.49082274e-002,
8.11781100e-003, 1.74395489e-002, 1.11239023e-002,
3.45693088e-126],
[1.28088348e-028, 5.11065200e-026, 1.93019797e-040,
7.60476035e-003, 2.63586947e-022, 1.69749024e-024,
1.25875005e-026, 7.62109877e-003, 7.84979948e-003,
2.11516023e-002, 3.52117832e-002, 2.14360383e-002,
7.73902109e-003, 8.01328325e-003, 7.91285055e-003,
2.13825814e-002],
[2.81655586e-094, 2.11305187e-002, 8.46562506e-042,
2.12592841e-002, 4.89802057e-036, 7.59232387e-003,
9.77247001e-069, 2.23108239e-060, 1.43715978e-048,
8.56015694e-060, 4.69469043e-042, 1.59822047e-046,
2.20978550e-083, 8.85861277e-107, 1.02042815e-080,
6.61413913e-113]])
Mu_work = np.array([[0.00000000e+000, 0.00000000e+000, 0.00000000e+000,
0.00000000e+000, 0.00000000e+000, 0.00000000e+000,
0.00000000e+000, 0.00000000e+000, 0.00000000e+000,
0.00000000e+000, 0.00000000e+000, 0.00000000e+000,
0.00000000e+000, 8.20604524e-092, 1.20585150e-005,
3.16436834e-125],
[0.00000000e+000, 1.16840561e-003, 9.90713236e-072,
4.42646396e-059, 2.91874286e-006, 9.98773031e-003,
2.58779981e-002, 5.66104376e-003, 2.12699812e-002,
5.72117462e-003, 1.48212306e-003, 1.23926126e-003,
1.28212945e-056, 1.34955578e-005, 7.64591325e-079,
2.38392073e-065],
[0.00000000e+000, 2.56552144e-003, 1.12756182e-001,
2.40351143e-002, 2.62981485e-002, 7.56512432e-003,
6.19587609e-002, 1.73269871e-002, 5.87405128e-002,
3.26749742e-002, 1.24709193e-002, 2.93054408e-008,
3.71596993e-017, 2.79780317e-053, 4.95800770e-006,
3.77718083e-102],
[0.00000000e+000, 1.07213881e-002, 4.28390448e-002,
7.22769090e-001, 5.93479736e-001, 3.39341952e-001,
3.17013715e-001, 2.89168861e-001, 3.11143180e-001,
2.34889238e-001, 1.32953769e-001, 6.01944097e-002,
1.47306181e-002, 8.34699602e-006, 2.85972822e-006,
1.88926122e-031],
[0.00000000e+000, 9.14252587e-003, 5.74508682e-002,
4.00000235e-001, 7.93386618e-001, 7.55975146e-001,
6.32277283e-001, 6.83601459e-001, 4.98506972e-001,
3.82309992e-001, 2.81363576e-001, 1.23338103e-001,
4.15708021e-002, 9.86113407e-006, 1.32609387e-005,
3.74318048e-006],
[0.00000000e+000, 1.04243481e-002, 7.34587492e-002,
3.49556755e-001, 7.50680101e-001, 1.25683393e+000,
9.01245714e-001, 8.63446835e-001, 7.70443641e-001,
5.17237071e-001, 4.09810981e-001, 1.80645400e-001,
5.51284783e-002, 1.60674627e-005, 1.01182608e-005,
3.01442534e-006],
[0.00000000e+000, 1.65842404e-002, 8.34076781e-002,
1.89301935e-001, 5.21246906e-001, 8.54460001e-001,
1.12054931e+000, 9.64310078e-001, 8.34675180e-001,
6.52534012e-001, 3.79383514e-001, 2.11198205e-001,
5.17285688e-002, 1.63795563e-005, 4.10100851e-006,
3.49478980e-006],
[0.00000000e+000, 1.11666639e-002, 5.03319748e-002,
3.70510313e-001, 4.24294782e-001, 7.87535547e-001,
8.45085693e-001, 1.14590365e+000, 1.07673077e+000,
7.13492115e-001, 5.00740004e-001, 1.90102207e-001,
3.59740115e-002, 1.22988530e-005, 9.13512833e-006,
6.02097416e-006],
[0.00000000e+000, 6.07792440e-003, 5.49337607e-002,
2.23499535e-001, 4.82353827e-001, 7.52291991e-001,
8.89187601e-001, 9.33765370e-001, 1.10492283e+000,
8.50124391e-001, 5.88941528e-001, 1.94947085e-001,
5.09477228e-002, 1.43626161e-005, 1.02721567e-005,
1.29503893e-005],
[0.00000000e+000, 3.31622551e-003, 7.01829848e-002,
2.67512972e-001, 3.14796392e-001, 5.41516885e-001,
6.95769048e-001, 7.50620518e-001, 7.50038547e-001,
7.00954088e-001, 4.35197983e-001, 2.11283335e-001,
3.88576200e-002, 1.62810370e-005, 1.08243610e-005,
6.09172339e-006],
[0.00000000e+000, 4.39576425e-004, 7.17737968e-002,
1.89254612e-001, 2.47832532e-001, 5.16027731e-001,
6.02783971e-001, 6.15949277e-001, 8.05581107e-001,
7.44063535e-001, 5.44855374e-001, 2.52198706e-001,
4.39235685e-002, 1.18079721e-005, 1.18226645e-005,
1.01613165e-005],
[0.00000000e+000, 4.91737561e-003, 1.08686672e-001,
1.24987806e-001, 1.64110983e-001, 3.00118829e-001,
4.18159745e-001, 3.86897613e-001, 4.77718241e-001,
3.60854250e-001, 3.22466456e-001, 1.92516925e-001,
4.07209694e-002, 1.34978304e-005, 6.58739925e-006,
6.65716756e-006],
[0.00000000e+000, 6.35447018e-004, 3.96329620e-002,
1.83072502e-002, 7.04596701e-002, 1.24861117e-001,
1.37834574e-001, 1.59845720e-001, 1.66933479e-001,
1.56084857e-001, 1.14949158e-001, 8.46570798e-002,
1.50879843e-002, 2.03019580e-005, 8.26102156e-006,
1.48398182e-005],
[7.60299521e-006, 3.36326754e-006, 7.64855296e-006,
2.27621532e-005, 3.14933351e-005, 7.89308410e-005,
7.24212842e-005, 2.91748203e-005, 6.61873732e-005,
5.95693238e-005, 7.70713500e-005, 5.30687748e-005,
4.66030117e-005, 1.41633235e-005, 2.49066205e-005,
1.19109038e-005],
[5.78863840e-055, 7.88785149e-042, 2.54830412e-006,
2.60648191e-005, 1.68036205e-005, 2.12446739e-005,
3.57267603e-005, 4.02377033e-005, 3.56401935e-005,
3.09769252e-005, 2.13053382e-005, 4.49709414e-005,
2.61368373e-005, 1.68266203e-005, 1.66514322e-005,
2.60822813e-005],
[2.35721271e-141, 9.06871674e-097, 1.18637122e-089,
9.39934076e-022, 4.66000452e-005, 4.69664011e-005,
4.69316082e-005, 8.42184044e-005, 2.77788168e-005,
1.03294378e-005, 1.06803618e-005, 7.26341826e-075,
1.10073971e-065, 1.02831671e-005, 5.16902994e-049,
8.28040509e-043]])
Mu_other = np.array([[0.95537734, 0.46860132, 0.27110607, 0.19447667, 0.32135073,
0.48782072, 0.54963024, 0.42195593, 0.27152038, 0.17864251,
0.20155642, 0.16358271, 0.1040159 , 0.0874149 , 0.05129938,
0.02153823],
[0.51023519, 2.17757364, 0.9022516 , 0.24304235, 0.20119518,
0.39689588, 0.47242431, 0.46949918, 0.37741651, 0.16843746,
0.12590504, 0.12682331, 0.11282247, 0.08222718, 0.03648526,
0.02404257],
[0.18585796, 1.11958124, 4.47729443, 0.67959759, 0.43936317,
0.36934142, 0.41566744, 0.44467286, 0.48797422, 0.28795385,
0.17659191, 0.10674831, 0.07175567, 0.07249261, 0.04815305,
0.03697862],
[0.09854482, 0.3514869 , 1.84902386, 5.38491613, 1.27425161,
0.59242579, 0.36578735, 0.39181798, 0.38131832, 0.31501028,
0.13275648, 0.06408612, 0.04499218, 0.04000664, 0.02232326,
0.01322698],
[0.13674436, 0.1973461 , 0.33264088, 2.08016394, 3.28810184,
1.29198125, 0.74642201, 0.44357051, 0.32781391, 0.35511243,
0.20132011, 0.12961 , 0.04994553, 0.03748657, 0.03841073,
0.02700581],
[0.23495203, 0.13839031, 0.14085679, 0.5347385 , 1.46021275,
1.85222022, 1.02681162, 0.61513602, 0.39086271, 0.32871844,
0.25938947, 0.13520412, 0.05101963, 0.03714278, 0.02177751,
0.00979745],
[0.23139098, 0.18634831, 0.32002214, 0.2477269 , 0.64111274,
0.93691022, 1.14560725, 0.73176025, 0.43760432, 0.31057135,
0.29406937, 0.20632155, 0.09044896, 0.06448983, 0.03041877,
0.02522842],
[0.18786196, 0.25090485, 0.21366969, 0.15358412, 0.35761286,
0.62390736, 0.76125666, 0.82975354, 0.54980593, 0.32778339,
0.20858991, 0.1607099 , 0.13218526, 0.09042909, 0.04990491,
0.01762718],
[0.12220241, 0.17968132, 0.31826246, 0.19846971, 0.34823183,
0.41563737, 0.55930999, 0.54070187, 0.5573184 , 0.31526474,
0.20194048, 0.09234293, 0.08377534, 0.05819374, 0.0414762 ,
0.01563101],
[0.03429527, 0.06388018, 0.09407867, 0.17418896, 0.23404519,
0.28879108, 0.34528852, 0.34507961, 0.31461973, 0.29954426,
0.21759668, 0.09684718, 0.06596679, 0.04274337, 0.0356891 ,
0.02459849],
[0.05092152, 0.10829561, 0.13898902, 0.2005828 , 0.35807132,
0.45181815, 0.32281821, 0.28014803, 0.30125545, 0.31260137,
0.22923948, 0.17657382, 0.10276889, 0.05555467, 0.03430327,
0.02064256],
[0.06739051, 0.06795035, 0.0826437 , 0.09522087, 0.23309189,
0.39055444, 0.39458465, 0.29290532, 0.27204846, 0.17810118,
0.24399007, 0.22146653, 0.13732849, 0.07585801, 0.03938794,
0.0190908 ],
[0.04337917, 0.05375367, 0.05230119, 0.08066901, 0.16619572,
0.25423056, 0.25580913, 0.27430323, 0.22478799, 0.16909017,
0.14284879, 0.17211604, 0.14336033, 0.10344522, 0.06797049,
0.02546014],
[0.04080687, 0.06113728, 0.04392062, 0.04488748, 0.12808591,
0.19886058, 0.24542711, 0.19678011, 0.17800136, 0.13147441,
0.13564091, 0.14280335, 0.12969805, 0.11181631, 0.05550193,
0.02956066],
[0.01432324, 0.03441212, 0.05604694, 0.10154456, 0.09204 ,
0.13341443, 0.13396901, 0.16682638, 0.18562675, 0.1299677 ,
0.09922375, 0.09634331, 0.15184583, 0.13541738, 0.1169359 ,
0.03805293],
[0.01972631, 0.02274412, 0.03797545, 0.02036785, 0.04357298,
0.05783639, 0.10706321, 0.07688271, 0.06969759, 0.08029393,
0.05466604, 0.05129046, 0.04648653, 0.06132882, 0.05004289,
0.03030569]])
def generate_reduced_matrices(age_sep, Ni):
'''
Receives the age_separation and populations to generate the average contact
matrices, returns a (4, len(age_sep)+1, len(age_sep)+1) with the 4 partial
contact matrices: house, school, work and other
Ni is the population for each population component (16 5-years age groups)
'''
nMat = len(age_sep) + 1
Ms = np.empty((4, nMat, nMat))
age_indexes = list()
age_indexes.append(np.flatnonzero(ages_Mu_min <= age_sep[0]))
for i in range(1, len(age_sep)):
age_indexes.append(np.flatnonzero((ages_Mu_min > age_sep[i-1]) *
(ages_Mu_min <= age_sep[i])))
age_indexes.append(np.flatnonzero(ages_Mu_min > age_sep[-1]))
for i in range(nMat):
Nia = Ni[age_indexes[i]]
Na = Nia.sum()
for j in range(nMat):
Ms[0,i,j] = (Nia * ((Mu_house[age_indexes[i]][:,age_indexes[j]]).sum(axis=1))).sum()/Na
Ms[1,i,j] = (Nia * ((Mu_school[age_indexes[i]][:,age_indexes[j]]).sum(axis=1))).sum()/Na
Ms[2,i,j] = (Nia * ((Mu_work[age_indexes[i]][:,age_indexes[j]]).sum(axis=1))).sum()/Na
Ms[3,i,j] = (Nia * ((Mu_other[age_indexes[i]][:,age_indexes[j]]).sum(axis=1))).sum()/Na
return Ms
class SEIIHURD_age:
''' SEIIHURD Model'''
def __init__(self,tamanhoPop,numeroProcessadores=None):
self.N = tamanhoPop
self.numeroProcessadores = numeroProcessadores
self.pos = None
#pars dict betas, delta, kappa, p, gammaA, gammaS, h, epsilon, gammaH, gammaU, muU, muH, wU, wH
# seguindo a notação beta_12 é 2 infectando 1, onde 1 é a linha e 2 a coluna.
def _SEIIHURD_age_eq(self, X, t, pars):
S, E, Ia, Is, H, U, R, D, Nw = np.split(X, 9)
StE = S * (pars['beta'] @ ((Ia * pars['delta'] + Is).reshape((-1,1)))).flatten()
dS = - StE
dE = StE - pars['kappa'] * E
dIa = (1. - pars['p']) * pars['kappa'] * E - pars['gammaA'] * Ia
dIs = pars['p'] * pars['kappa'] * E - pars['gammaS'] * Is
dH = pars['h'] * pars['xi'] * pars['gammaS'] * Is + (1 - pars['muU'] +\
pars['wU'] * pars['muU']) * pars['gammaU'] * U - pars['gammaH'] * H
dU = pars['h'] * (1 - pars['xi']) * pars['gammaS'] * Is + pars['wH'] *\
pars['gammaH'] * H - pars['gammaU'] * U
dR = pars['gammaA'] * Ia + (1. - pars['h']) * pars['gammaS'] * Is + \
(1 - pars['muH']) * (1 - pars['wH']) * pars['gammaH'] * H
dD = (1 - pars['wH']) * pars['muH'] * pars['gammaH'] * H + \
(1 - pars['wU']) * pars['muU'] * pars['gammaU'] * U
dNw = pars['p'] * pars['kappa'] * E
return np.r_[dS, dE, dIa, dIs, dH, dU, dR, dD, dNw]
def _call_ODE(self, ts, ppars):
betas = ppars['beta'].copy()
pars = copy.deepcopy(ppars)
if 'tcut' not in ppars.keys():
tcorte = None
else:
tcorte = pars['tcut']
if type(ts) in [int, float]:
ts = np.arange(ts)
if tcorte == None:
tcorte = [ts[-1]]
if type(betas) != list:
betas = [betas]
if tcorte[-1] < ts[-1]:
tcorte.append(ts[-1])
tcorte = [ts[0]] + tcorte
tcorte.sort()
Is0 = pars['x0'].reshape((3,-1)).sum(axis=0)
x0 = np.r_[1. - Is0, pars['x0'], np.zeros(4*len(Is0)), pars['x0'][2*len(Is0):]]
saida = x0.reshape((1,-1))
Y = saida.copy()
for i in range(1, len(tcorte)):
cut_last = False
pars['beta'] = betas[i-1]
t = ts[(ts >= tcorte[i-1]) * (ts<= tcorte[i])]
if len(t) > 0:
if t[0] > tcorte[i-1]:
t = np.r_[tcorte[i-1], t]
if t[-1] < tcorte[i]:
t = np.r_[t, tcorte[i]]
cut_last = True
Y = spi.odeint(self._SEIIHURD_age_eq, Y[-1], t, args=(pars,))
if cut_last:
saida = np.r_[saida, Y[1:-1]]
else:
saida = np.r_[saida, Y[1:]]
else:
Y = spi.odeint(self._SEIIHURD_age_eq, Y[-1], tcorte[i-1:i+1], args=(pars,))
return ts, saida
def _fill_paramPSO(self, paramPSO):
if 'options' not in paramPSO.keys():
paramPSO['options'] = {'c1': 0.1, 'c2': 0.3, 'w': 0.9,'k':5,'p':2}
if 'n_particles' not in paramPSO.keys():
paramPSO['n_particles'] = 300
if 'iter' not in paramPSO.keys():
paramPSO['iter'] = 1000
return paramPSO
def _prepare_input(self, data):
list_states = ['S', 'E', 'Ia', 'Is', 'H', 'U', 'R', 'D', 'Nw']
i_integ = list()
Y = list()
for ke in data.keys():
if ke == 't':
t = data[ke]
else:
Y.append(data[ke])
simb, num = ke.split("_")
n0 = self.nages * list_states.index(simb)
if '_ALL' in ke:
i_integ.append(list(range(n0,n0 + self.nages)))
else:
i_integ.append(int(num) + n0)
return i_integ, Y, t
def _prepare_conversor(self, p2f, pothers, bound):
padjus = list()
if bound != None:
bound_new = [[], []]
for i, par in enumerate(p2f):
if 'beta' in par:
if '_ALL' in par:
for l in range(len(pothers['beta'])):
for j in range(pothers['beta'][i].shape[0]):
for k in range(pothers['beta'][i].shape[1]):
padjus.append('beta_{}_{}_{}'.format(l,j,k))
if bound != None:
bound_new[0].append(bound[0][i])
bound_new[1].append(bound[1][i])
else:
padjus.append(par)
if bound != None:
bound_new[0].append(bound[0][i])
bound_new[1].append(bound[1][i])
elif '_ALL' in par:
name = par.split('_')[0]
for j in range(len(pothers[name])):
padjus.append('{}_{}'.format(name, j))
if bound != None:
bound_new[0].append(bound[0][i])
bound_new[1].append(bound[1][i])
else:
padjus.append(par)
if bound != None:
bound_new[0].append(bound[0][i])
bound_new[1].append(bound[1][i])
if bound != None:
bound_new[0] = np.array(bound_new[0])
bound_new[1] = np.array(bound_new[1])
return bound_new, padjus
def _conversor(self, coefs, pars0, padjus):
pars = copy.deepcopy(pars0)
for i, coef in enumerate(coefs):
if 'beta' in padjus[i]:
if '_M_' in padjus[i]:
indx = int(padjus[i].split('_')[-1])
pars['beta'][indx] = coef * pars['beta'][indx]
else:
indx = padjus[i].split('_')
pars['beta'][int(indx[1])][int(indx[2]), int(indx[3])] = coef
elif '_' in padjus[i]:
name, indx = padjus[i].split('_')
pars[name][int(indx)] = coef
else:
pars[padjus[i]] = coef
return pars
def objectiveFunction(self, coefs_list, stand_error=False, weights=None):
errsq = np.zeros(coefs_list.shape[0])
for i, coefs in enumerate(coefs_list):
errs = self._residuals(coefs, stand_error, weights)
errsq[i] = (errs*errs).mean()
return errsq
def _residuals(self, coefs, stand_error=False, weights=None):
if type(weights) == type(None):
weights = np.ones(len(self.Y))
error_func = (lambda x: np.sqrt(x+1)) if stand_error else (lambda x:np.ones_like(x))
errs = np.empty((0,))
ts, mY = self._call_ODE(self.t, self._conversor(coefs, self.pars_init, self.padjus))
for indY, indODE in enumerate(self.i_integ):
if type(indODE) == list:
temp = (self.N.reshape((1,-1)) * mY[:,indODE]).sum(axis=1)
errs = np.r_[errs, weights[indY] * ((self.Y[indY] - temp) / error_func(temp)) ]
else:
try:
errs = np.r_[errs, weights[indY] * ((self.Y[indY] - self.N[indODE%self.nages] * mY[:,indODE]) / error_func(mY[:,indODE])) ]
except:
print(self.t, self._conversor(coefs, self.pars_init, self.padjus))
raise
errs = errs[~np.isnan(errs)]
return errs
def prepare_to_fit(self, data, pars, pars_to_fit, bound=None, nages=1, stand_error=False):
self.pars_init = copy.deepcopy(pars)
self.nages = nages
self.i_integ, self.Y, self.t = self._prepare_input(data)
self.bound, self.padjus = self._prepare_conversor(pars_to_fit, pars, bound)
self.n_to_fit = len(self.padjus)
def fit(self, data, pars, pars_to_fit, bound=None, nages=2, paramPSO=dict(), stand_error=False):
'''
data: dictionary:
t -> times
X_N -> variable:
X is the simbol of the parameter: S, E, Ia, Is, H, U, R, D, Nw
N is the index of the age-group, starting on 0
pars: dictionary, with the variable names as keys.
pars_to_fit: the name of the parameters to fits, if the parameter is a list,
add _N with the index you want to if or _ALL to fit all
the 'beta' parameter has 3 indexes: beta_I_J_K, with I indicating the
which tcut it belongs and J_K indicating the position in the matrix.
the beta also has a option 'beta_M_I' that fits a multiplicative
constant of the infection matrix, without changing the relative weights
(the _M_ and _ALL_ options are incompatible by now, and _M_ requires
testing)
bound = intervalo de limite para procura de cada parametro, onde None = sem limite
bound => (lista_min_bound, lista_max_bound)
'''
paramPSO = self._fill_paramPSO(paramPSO)
self.prepare_to_fit(data, pars, pars_to_fit, bound=bound, nages=nages, stand_error=stand_error)
optimizer = ps.single.LocalBestPSO(n_particles=paramPSO['n_particles'], dimensions=self.n_to_fit, options=paramPSO['options'],bounds=self.bound)
cost = pos = None
cost, pos = optimizer.optimize(self.objectiveFunction,paramPSO['iter'], stand_error=stand_error, n_processes=self.numeroProcessadores)
self.pos = pos
self.pars_opt = self._conversor(pos, self.pars_init, self.padjus )
self.rmse = cost
self.optimize = optimizer
def fit_lsquares(self, data, pars, pars_to_fit, bound=None, nages=2, stand_error=False, init=None, nrand=10):
self.prepare_to_fit(data, pars, pars_to_fit, bound=bound, nages=nages, stand_error=stand_error)
if init == None:
cost_best = np.inf
res_best = None
#BUG: the parallel code does not work if PSO code had run previously
if type(self.pos) != type(None) or self.numeroProcessadores == None or self.numeroProcessadores <= 1:
for i in range(nrand):
print("{} / {}".format(i, nrand))
par0 = np.random.rand(self.n_to_fit)
par0 = self.bound[0] + par0 * (self.bound[1] - self.bound[0])
res = least_squares(self._residuals, par0, bounds=self.bound)
if res.cost < cost_best:
cost_best = res.cost
res_best = res
else:
par0 = np.random.rand(nrand, self.n_to_fit)
par0 = self.bound[0].reshape((1,-1)) + par0 * (self.bound[1] - self.bound[0]).reshape((1,-1))
f = lambda p0: least_squares(self._residuals, p0, bounds=self.bound)
all_res = joblib.Parallel(n_jobs=self.numeroProcessadores)(joblib.delayed(f)(p0,) for p0 in par0)
costs = np.array([res.cost for res in all_res])
cost_best = all_res[costs.argmin()].cost
res_best = all_res[costs.argmin()]
else:
res_best = least_squares(self._residuals, init, bounds=bound )
self.pos_ls = res_best.x
self.pars_opt_ls = self._conversor(res_best.x, self.pars_init, self.padjus )
self.rmse_ls = (res_best.fun**2).mean()
self.result_ls = res_best
def predict(self, t=None, coefs=None, model_output=False):
if type(t) == type(None):
t = self.t
if type(coefs) == type(None):
coefs = self.pos
elif type(coefs) == str and coefs == 'LS':
coefs = self.pos_ls
ts, mY = self._call_ODE(t, self._conversor(coefs, self.pars_init, self.padjus))
saida = np.zeros((len(ts), 0))
for i in self.i_integ:
if type(i) == list:
ytemp = (mY[:,i] *self.N.reshape((1,-1))).sum(axis=1)
else:
ytemp = mY[:,i] * self.N[i%self.nages]
saida = np.c_[saida, ytemp.reshape((-1,1))]
if model_output:
return ts, saida, mY
else:
return ts, saida
#ts, X = call_ODE(X0, tmax, betas, param, tcorte=tcorte)
#plt.plot(ts, X[:,:2], '.-')
| [
"numpy.ones_like",
"scipy.optimize.least_squares",
"numpy.sqrt",
"numpy.random.rand",
"scipy.integrate.odeint",
"numpy.flatnonzero",
"pyswarms.single.LocalBestPSO",
"joblib.Parallel",
"numpy.array",
"numpy.split",
"numpy.zeros",
"numpy.empty",
"numpy.isnan",
"copy.deepcopy",
"joblib.dela... | [((987, 4278), 'numpy.array', 'np.array', (['[[0.47868515, 0.50507561, 0.29848922, 0.15763748, 0.26276959, 0.40185462, \n 0.46855027, 0.42581354, 0.2150961, 0.0856771, 0.08705463, 0.07551931, \n 0.05129175, 0.02344832, 0.00793644, 0.01072846], [0.35580205, \n 0.77874482, 0.51392686, 0.21151069, 0.08597966, 0.28306027, 0.49982218,\n 0.52854893, 0.41220947, 0.15848728, 0.07491245, 0.07658339, 0.04772343,\n 0.02588962, 0.01125956, 0.01073152], [0.25903114, 0.63488713, \n 1.36175618, 0.50016515, 0.11748191, 0.10264613, 0.24113458, 0.47274372,\n 0.54026417, 0.26708819, 0.11007723, 0.04406045, 0.02746409, 0.02825033,\n 0.02044872, 0.01214665], [0.14223192, 0.24383932, 0.53761638, \n 1.05325205, 0.28778496, 0.10925453, 0.0651564, 0.2432454, 0.39011334, \n 0.41381277, 0.23194909, 0.07541471, 0.03428398, 0.02122257, 0.01033573,\n 0.00864859], [0.27381886, 0.15430529, 0.16053062, 0.5104134, 0.95175366,\n 0.3586594, 0.09248672, 0.04774269, 0.15814197, 0.36581739, 0.25544811, \n 0.13338965, 0.03461345, 0.01062458, 0.00844199, 0.00868782], [\n 0.59409802, 0.26971847, 0.10669146, 0.18330524, 0.39561893, 0.81955947,\n 0.26376865, 0.06604084, 0.03824556, 0.11560004, 0.23218163, 0.15331788,\n 0.07336147, 0.02312255, 0.00412646, 0.01025778], [0.63860889, \n 0.75760606, 0.43109156, 0.09913293, 0.13935789, 0.32056062, 0.65710277,\n 0.25488454, 0.1062129, 0.0430932, 0.06880784, 0.09938458, 0.09010691, \n 0.02233902, 0.01155556, 0.00695246], [0.56209348, 0.87334544, \n 0.75598244, 0.33199136, 0.07233271, 0.08674171, 0.20243583, 0.60062714,\n 0.17793601, 0.06307045, 0.04445926, 0.04082447, 0.06275133, 0.04051762,\n 0.01712777, 0.00598721], [0.35751289, 0.66234582, 0.77180208, \n 0.54993616, 0.17368099, 0.07361914, 0.13016852, 0.19937327, 0.46551558,\n 0.15412263, 0.06123041, 0.0182514, 0.04234381, 0.04312892, 0.01656267, \n 0.01175358], [0.208131, 0.41591452, 0.56510014, 0.67760241, 0.38146504,\n 0.14185001, 0.06160354, 0.12945701, 0.16470166, 0.41150841, 0.14596804,\n 0.04404807, 0.02395316, 0.01731295, 0.01469059, 0.02275339], [\n 0.30472548, 0.26744442, 0.41631962, 0.46516888, 0.41751365, 0.28520772,\n 0.13931619, 0.07682945, 0.11404965, 0.16122096, 0.33813266, 0.1349378, \n 0.03755396, 0.01429426, 0.01356763, 0.02551792], [0.52762004, \n 0.52787011, 0.33622117, 0.43037934, 0.36416323, 0.42655672, 0.33780201,\n 0.13492044, 0.0798784, 0.15795568, 0.20367727, 0.33176385, 0.12256126, \n 0.05573807, 0.0124446, 0.02190564], [0.53741472, 0.50750067, 0.3229994,\n 0.30706704, 0.21340314, 0.27424513, 0.32838657, 0.26023515, 0.13222548,\n 0.07284901, 0.11950584, 0.16376401, 0.25560123, 0.09269703, 0.02451284,\n 0.00631762], [0.37949376, 0.55324102, 0.47449156, 0.24796638, \n 0.19276924, 0.20675484, 0.3267867, 0.39525729, 0.3070043, 0.10088992, \n 0.10256839, 0.13016641, 0.1231421, 0.24067708, 0.05475668, 0.01401368],\n [0.16359554, 0.48536065, 0.40533723, 0.31542539, 0.06890518, 0.15670328,\n 0.12884062, 0.27912381, 0.25685832, 0.20143856, 0.12497647, 0.07565566,\n 0.10331686, 0.08830789, 0.15657321, 0.05744065], [0.29555039, \n 0.39898035, 0.60257982, 0.5009724, 0.13799378, 0.11716593, 0.14366306, \n 0.31602298, 0.34691652, 0.30960511, 0.31253708, 0.14557295, 0.06065554,\n 0.10654772, 0.06390924, 0.09827735]]'], {}), '([[0.47868515, 0.50507561, 0.29848922, 0.15763748, 0.26276959, \n 0.40185462, 0.46855027, 0.42581354, 0.2150961, 0.0856771, 0.08705463, \n 0.07551931, 0.05129175, 0.02344832, 0.00793644, 0.01072846], [\n 0.35580205, 0.77874482, 0.51392686, 0.21151069, 0.08597966, 0.28306027,\n 0.49982218, 0.52854893, 0.41220947, 0.15848728, 0.07491245, 0.07658339,\n 0.04772343, 0.02588962, 0.01125956, 0.01073152], [0.25903114, \n 0.63488713, 1.36175618, 0.50016515, 0.11748191, 0.10264613, 0.24113458,\n 0.47274372, 0.54026417, 0.26708819, 0.11007723, 0.04406045, 0.02746409,\n 0.02825033, 0.02044872, 0.01214665], [0.14223192, 0.24383932, \n 0.53761638, 1.05325205, 0.28778496, 0.10925453, 0.0651564, 0.2432454, \n 0.39011334, 0.41381277, 0.23194909, 0.07541471, 0.03428398, 0.02122257,\n 0.01033573, 0.00864859], [0.27381886, 0.15430529, 0.16053062, 0.5104134,\n 0.95175366, 0.3586594, 0.09248672, 0.04774269, 0.15814197, 0.36581739, \n 0.25544811, 0.13338965, 0.03461345, 0.01062458, 0.00844199, 0.00868782],\n [0.59409802, 0.26971847, 0.10669146, 0.18330524, 0.39561893, 0.81955947,\n 0.26376865, 0.06604084, 0.03824556, 0.11560004, 0.23218163, 0.15331788,\n 0.07336147, 0.02312255, 0.00412646, 0.01025778], [0.63860889, \n 0.75760606, 0.43109156, 0.09913293, 0.13935789, 0.32056062, 0.65710277,\n 0.25488454, 0.1062129, 0.0430932, 0.06880784, 0.09938458, 0.09010691, \n 0.02233902, 0.01155556, 0.00695246], [0.56209348, 0.87334544, \n 0.75598244, 0.33199136, 0.07233271, 0.08674171, 0.20243583, 0.60062714,\n 0.17793601, 0.06307045, 0.04445926, 0.04082447, 0.06275133, 0.04051762,\n 0.01712777, 0.00598721], [0.35751289, 0.66234582, 0.77180208, \n 0.54993616, 0.17368099, 0.07361914, 0.13016852, 0.19937327, 0.46551558,\n 0.15412263, 0.06123041, 0.0182514, 0.04234381, 0.04312892, 0.01656267, \n 0.01175358], [0.208131, 0.41591452, 0.56510014, 0.67760241, 0.38146504,\n 0.14185001, 0.06160354, 0.12945701, 0.16470166, 0.41150841, 0.14596804,\n 0.04404807, 0.02395316, 0.01731295, 0.01469059, 0.02275339], [\n 0.30472548, 0.26744442, 0.41631962, 0.46516888, 0.41751365, 0.28520772,\n 0.13931619, 0.07682945, 0.11404965, 0.16122096, 0.33813266, 0.1349378, \n 0.03755396, 0.01429426, 0.01356763, 0.02551792], [0.52762004, \n 0.52787011, 0.33622117, 0.43037934, 0.36416323, 0.42655672, 0.33780201,\n 0.13492044, 0.0798784, 0.15795568, 0.20367727, 0.33176385, 0.12256126, \n 0.05573807, 0.0124446, 0.02190564], [0.53741472, 0.50750067, 0.3229994,\n 0.30706704, 0.21340314, 0.27424513, 0.32838657, 0.26023515, 0.13222548,\n 0.07284901, 0.11950584, 0.16376401, 0.25560123, 0.09269703, 0.02451284,\n 0.00631762], [0.37949376, 0.55324102, 0.47449156, 0.24796638, \n 0.19276924, 0.20675484, 0.3267867, 0.39525729, 0.3070043, 0.10088992, \n 0.10256839, 0.13016641, 0.1231421, 0.24067708, 0.05475668, 0.01401368],\n [0.16359554, 0.48536065, 0.40533723, 0.31542539, 0.06890518, 0.15670328,\n 0.12884062, 0.27912381, 0.25685832, 0.20143856, 0.12497647, 0.07565566,\n 0.10331686, 0.08830789, 0.15657321, 0.05744065], [0.29555039, \n 0.39898035, 0.60257982, 0.5009724, 0.13799378, 0.11716593, 0.14366306, \n 0.31602298, 0.34691652, 0.30960511, 0.31253708, 0.14557295, 0.06065554,\n 0.10654772, 0.06390924, 0.09827735]])\n', (995, 4278), True, 'import numpy as np\n'), ((4604, 8613), 'numpy.array', 'np.array', (['[[0.321885854, 0.0431659966, 0.00788269419, 0.00809548363, 0.00535038146, \n 0.0218201974, 0.0401633514, 0.0299376002, 0.0140680283, 0.0166587853, \n 0.00947774696, 0.00741041622, 0.00128200661, 0.000779120405, \n 8.23608272e-66, 6.37926405e-120], [0.0540133328, 4.84870697, \n 0.270046494, 0.031477845, 0.0311206331, 0.0856826951, 0.108251879, \n 0.0946101139, 0.0863528188, 0.0551141159, 0.0419385198, 0.0120958942, \n 0.00477242219, 0.00139787217, 0.000347452943, 8.08973738e-39], [\n 0.000456461982, 1.04840235, 6.09152459, 0.198915822, 0.0199709921, \n 0.0668319525, 0.0658949586, 0.0970851505, 0.0954147078, 0.0670538232, \n 0.0424864096, 0.0198701346, 0.00511869429, 0.000727320438, \n 4.93746124e-25, 0.000182153965], [0.00259613205, 0.0473315233, \n 1.99337834, 7.200405, 0.0857326037, 0.0790668822, 0.0854208542, \n 0.110816964, 0.0876955236, 0.0922975521, 0.0458035025, 0.0251130956, \n 0.00571391798, 0.00107818752, 6.21174558e-33, 1.70710246e-70], [\n 0.0071915872, 0.0248833195, 0.00989727235, 0.876815025, 0.433963352, \n 0.0505185217, 0.0330594492, 0.0381384107, 0.0234709676, 0.0267235372, \n 0.0132913985, 0.00900655556, 0.000694913059, 0.00125675951, \n 0.000177164197, 1.21957619e-47], [0.00704119204, 0.119412206, \n 0.037501698, 0.202193056, 0.279822908, 0.168610223, 0.0286939363, \n 0.0356961469, 0.0409234494, 0.0332290896, 0.00812074348, 0.0126152144, \n 0.00427869081, 0.00241737477, 0.000463116893, 0.00128597237], [\n 0.014148632, 0.386561429, 0.255902236, 0.169973534, 0.049810401, \n 0.0898122446, 0.0795333394, 0.0519274611, 0.054661293, 0.0264567137, \n 0.0203241595, 0.0029626322, 0.00542888613, 0.00044758597, \n 1.65440335e-48, 3.11189454e-55], [0.0240945305, 0.211030046, \n 0.154767246, 0.0817929897, 0.0184061608, 0.0543009779, 0.0739351186, \n 0.0521677009, 0.0563267084, 0.0251807147, 0.00353972554, 0.00796646343,\n 0.000556929776, 0.00208530461, 1.8442829e-123, 9.69555083e-67], [\n 0.00781313905, 0.114371898, 0.0909011945, 0.380212104, 0.00854533192, \n 0.0262430162, 0.0251880009, 0.0322563508, 0.0673506045, 0.0224997143, \n 0.0239241043, 0.00650627191, 0.00550892674, 0.00047830885, \n 4.81213215e-68, 2.40231425e-92], [0.0655265016, 0.231163536, \n 0.149970765, 0.553563093, 0.00574032526, 0.0302865481, 0.0572506883, \n 0.0470559232, 0.0428736553, 0.0242614518, 0.0286665377, 0.0129570473, \n 0.00324362518, 0.00167930318, 6.2091695e-134, 3.27297624e-72], [\n 0.0172765646, 0.343744913, 0.430902785, 0.474293073, 0.00539328187, \n 0.014412874, 0.0395545363, 0.037378186, 0.0456834488, 0.0592135906, \n 0.0291473801, 0.0154857502, 0.0045310539, 8.87272668e-24, \n 1.23797452e-117, 5.64262349e-78], [0.0614363036, 0.298367348, 0.2590927,\n 0.300800812, 0.00592454596, 0.0526458862, 0.0202188672, 0.0327897605, \n 0.0407753741, 0.0283422407, 0.0243657809, 0.0273993226, 0.00887990718, \n 1.1327918e-31, 0.000781960493, 0.00076246751], [0.0363695643, \n 0.0596870355, 0.0305072624, 0.145523978, 0.0126062984, 0.00169458169, \n 0.0155127292, 0.042209767, 0.00921792425, 0.0142200652, 0.0110967529, \n 0.00577020348, 0.0204474044, 0.0111075734, 4.42271199e-67, \n 2.12068625e-37], [0.00167937029, 0.0272971001, 0.0105886266, \n 7.61087735e-32, 0.00197191559, 0.00192885006, 0.0124343737, \n 0.00539297787, 0.00541684968, 0.00863502071, 0.00194554498, \n 0.0149082274, 0.008117811, 0.0174395489, 0.0111239023, 3.45693088e-126],\n [1.28088348e-28, 5.110652e-26, 1.93019797e-40, 0.00760476035, \n 2.63586947e-22, 1.69749024e-24, 1.25875005e-26, 0.00762109877, \n 0.00784979948, 0.0211516023, 0.0352117832, 0.0214360383, 0.00773902109,\n 0.00801328325, 0.00791285055, 0.0213825814], [2.81655586e-94, \n 0.0211305187, 8.46562506e-42, 0.0212592841, 4.89802057e-36, \n 0.00759232387, 9.77247001e-69, 2.23108239e-60, 1.43715978e-48, \n 8.56015694e-60, 4.69469043e-42, 1.59822047e-46, 2.2097855e-83, \n 8.85861277e-107, 1.02042815e-80, 6.61413913e-113]]'], {}), '([[0.321885854, 0.0431659966, 0.00788269419, 0.00809548363, \n 0.00535038146, 0.0218201974, 0.0401633514, 0.0299376002, 0.0140680283, \n 0.0166587853, 0.00947774696, 0.00741041622, 0.00128200661, \n 0.000779120405, 8.23608272e-66, 6.37926405e-120], [0.0540133328, \n 4.84870697, 0.270046494, 0.031477845, 0.0311206331, 0.0856826951, \n 0.108251879, 0.0946101139, 0.0863528188, 0.0551141159, 0.0419385198, \n 0.0120958942, 0.00477242219, 0.00139787217, 0.000347452943, \n 8.08973738e-39], [0.000456461982, 1.04840235, 6.09152459, 0.198915822, \n 0.0199709921, 0.0668319525, 0.0658949586, 0.0970851505, 0.0954147078, \n 0.0670538232, 0.0424864096, 0.0198701346, 0.00511869429, 0.000727320438,\n 4.93746124e-25, 0.000182153965], [0.00259613205, 0.0473315233, \n 1.99337834, 7.200405, 0.0857326037, 0.0790668822, 0.0854208542, \n 0.110816964, 0.0876955236, 0.0922975521, 0.0458035025, 0.0251130956, \n 0.00571391798, 0.00107818752, 6.21174558e-33, 1.70710246e-70], [\n 0.0071915872, 0.0248833195, 0.00989727235, 0.876815025, 0.433963352, \n 0.0505185217, 0.0330594492, 0.0381384107, 0.0234709676, 0.0267235372, \n 0.0132913985, 0.00900655556, 0.000694913059, 0.00125675951, \n 0.000177164197, 1.21957619e-47], [0.00704119204, 0.119412206, \n 0.037501698, 0.202193056, 0.279822908, 0.168610223, 0.0286939363, \n 0.0356961469, 0.0409234494, 0.0332290896, 0.00812074348, 0.0126152144, \n 0.00427869081, 0.00241737477, 0.000463116893, 0.00128597237], [\n 0.014148632, 0.386561429, 0.255902236, 0.169973534, 0.049810401, \n 0.0898122446, 0.0795333394, 0.0519274611, 0.054661293, 0.0264567137, \n 0.0203241595, 0.0029626322, 0.00542888613, 0.00044758597, \n 1.65440335e-48, 3.11189454e-55], [0.0240945305, 0.211030046, \n 0.154767246, 0.0817929897, 0.0184061608, 0.0543009779, 0.0739351186, \n 0.0521677009, 0.0563267084, 0.0251807147, 0.00353972554, 0.00796646343,\n 0.000556929776, 0.00208530461, 1.8442829e-123, 9.69555083e-67], [\n 0.00781313905, 0.114371898, 0.0909011945, 0.380212104, 0.00854533192, \n 0.0262430162, 0.0251880009, 0.0322563508, 0.0673506045, 0.0224997143, \n 0.0239241043, 0.00650627191, 0.00550892674, 0.00047830885, \n 4.81213215e-68, 2.40231425e-92], [0.0655265016, 0.231163536, \n 0.149970765, 0.553563093, 0.00574032526, 0.0302865481, 0.0572506883, \n 0.0470559232, 0.0428736553, 0.0242614518, 0.0286665377, 0.0129570473, \n 0.00324362518, 0.00167930318, 6.2091695e-134, 3.27297624e-72], [\n 0.0172765646, 0.343744913, 0.430902785, 0.474293073, 0.00539328187, \n 0.014412874, 0.0395545363, 0.037378186, 0.0456834488, 0.0592135906, \n 0.0291473801, 0.0154857502, 0.0045310539, 8.87272668e-24, \n 1.23797452e-117, 5.64262349e-78], [0.0614363036, 0.298367348, 0.2590927,\n 0.300800812, 0.00592454596, 0.0526458862, 0.0202188672, 0.0327897605, \n 0.0407753741, 0.0283422407, 0.0243657809, 0.0273993226, 0.00887990718, \n 1.1327918e-31, 0.000781960493, 0.00076246751], [0.0363695643, \n 0.0596870355, 0.0305072624, 0.145523978, 0.0126062984, 0.00169458169, \n 0.0155127292, 0.042209767, 0.00921792425, 0.0142200652, 0.0110967529, \n 0.00577020348, 0.0204474044, 0.0111075734, 4.42271199e-67, \n 2.12068625e-37], [0.00167937029, 0.0272971001, 0.0105886266, \n 7.61087735e-32, 0.00197191559, 0.00192885006, 0.0124343737, \n 0.00539297787, 0.00541684968, 0.00863502071, 0.00194554498, \n 0.0149082274, 0.008117811, 0.0174395489, 0.0111239023, 3.45693088e-126],\n [1.28088348e-28, 5.110652e-26, 1.93019797e-40, 0.00760476035, \n 2.63586947e-22, 1.69749024e-24, 1.25875005e-26, 0.00762109877, \n 0.00784979948, 0.0211516023, 0.0352117832, 0.0214360383, 0.00773902109,\n 0.00801328325, 0.00791285055, 0.0213825814], [2.81655586e-94, \n 0.0211305187, 8.46562506e-42, 0.0212592841, 4.89802057e-36, \n 0.00759232387, 9.77247001e-69, 2.23108239e-60, 1.43715978e-48, \n 8.56015694e-60, 4.69469043e-42, 1.59822047e-46, 2.2097855e-83, \n 8.85861277e-107, 1.02042815e-80, 6.61413913e-113]])\n', (4612, 8613), True, 'import numpy as np\n'), ((9755, 13508), 'numpy.array', 'np.array', (['[[0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, \n 8.20604524e-92, 1.2058515e-05, 3.16436834e-125], [0.0, 0.00116840561, \n 9.90713236e-72, 4.42646396e-59, 2.91874286e-06, 0.00998773031, \n 0.0258779981, 0.00566104376, 0.0212699812, 0.00572117462, 0.00148212306,\n 0.00123926126, 1.28212945e-56, 1.34955578e-05, 7.64591325e-79, \n 2.38392073e-65], [0.0, 0.00256552144, 0.112756182, 0.0240351143, \n 0.0262981485, 0.00756512432, 0.0619587609, 0.0173269871, 0.0587405128, \n 0.0326749742, 0.0124709193, 2.93054408e-08, 3.71596993e-17, \n 2.79780317e-53, 4.9580077e-06, 3.77718083e-102], [0.0, 0.0107213881, \n 0.0428390448, 0.72276909, 0.593479736, 0.339341952, 0.317013715, \n 0.289168861, 0.31114318, 0.234889238, 0.132953769, 0.0601944097, \n 0.0147306181, 8.34699602e-06, 2.85972822e-06, 1.88926122e-31], [0.0, \n 0.00914252587, 0.0574508682, 0.400000235, 0.793386618, 0.755975146, \n 0.632277283, 0.683601459, 0.498506972, 0.382309992, 0.281363576, \n 0.123338103, 0.0415708021, 9.86113407e-06, 1.32609387e-05, \n 3.74318048e-06], [0.0, 0.0104243481, 0.0734587492, 0.349556755, \n 0.750680101, 1.25683393, 0.901245714, 0.863446835, 0.770443641, \n 0.517237071, 0.409810981, 0.1806454, 0.0551284783, 1.60674627e-05, \n 1.01182608e-05, 3.01442534e-06], [0.0, 0.0165842404, 0.0834076781, \n 0.189301935, 0.521246906, 0.854460001, 1.12054931, 0.964310078, \n 0.83467518, 0.652534012, 0.379383514, 0.211198205, 0.0517285688, \n 1.63795563e-05, 4.10100851e-06, 3.4947898e-06], [0.0, 0.0111666639, \n 0.0503319748, 0.370510313, 0.424294782, 0.787535547, 0.845085693, \n 1.14590365, 1.07673077, 0.713492115, 0.500740004, 0.190102207, \n 0.0359740115, 1.2298853e-05, 9.13512833e-06, 6.02097416e-06], [0.0, \n 0.0060779244, 0.0549337607, 0.223499535, 0.482353827, 0.752291991, \n 0.889187601, 0.93376537, 1.10492283, 0.850124391, 0.588941528, \n 0.194947085, 0.0509477228, 1.43626161e-05, 1.02721567e-05, \n 1.29503893e-05], [0.0, 0.00331622551, 0.0701829848, 0.267512972, \n 0.314796392, 0.541516885, 0.695769048, 0.750620518, 0.750038547, \n 0.700954088, 0.435197983, 0.211283335, 0.03885762, 1.6281037e-05, \n 1.0824361e-05, 6.09172339e-06], [0.0, 0.000439576425, 0.0717737968, \n 0.189254612, 0.247832532, 0.516027731, 0.602783971, 0.615949277, \n 0.805581107, 0.744063535, 0.544855374, 0.252198706, 0.0439235685, \n 1.18079721e-05, 1.18226645e-05, 1.01613165e-05], [0.0, 0.00491737561, \n 0.108686672, 0.124987806, 0.164110983, 0.300118829, 0.418159745, \n 0.386897613, 0.477718241, 0.36085425, 0.322466456, 0.192516925, \n 0.0407209694, 1.34978304e-05, 6.58739925e-06, 6.65716756e-06], [0.0, \n 0.000635447018, 0.039632962, 0.0183072502, 0.0704596701, 0.124861117, \n 0.137834574, 0.15984572, 0.166933479, 0.156084857, 0.114949158, \n 0.0846570798, 0.0150879843, 2.0301958e-05, 8.26102156e-06, \n 1.48398182e-05], [7.60299521e-06, 3.36326754e-06, 7.64855296e-06, \n 2.27621532e-05, 3.14933351e-05, 7.8930841e-05, 7.24212842e-05, \n 2.91748203e-05, 6.61873732e-05, 5.95693238e-05, 7.707135e-05, \n 5.30687748e-05, 4.66030117e-05, 1.41633235e-05, 2.49066205e-05, \n 1.19109038e-05], [5.7886384e-55, 7.88785149e-42, 2.54830412e-06, \n 2.60648191e-05, 1.68036205e-05, 2.12446739e-05, 3.57267603e-05, \n 4.02377033e-05, 3.56401935e-05, 3.09769252e-05, 2.13053382e-05, \n 4.49709414e-05, 2.61368373e-05, 1.68266203e-05, 1.66514322e-05, \n 2.60822813e-05], [2.35721271e-141, 9.06871674e-97, 1.18637122e-89, \n 9.39934076e-22, 4.66000452e-05, 4.69664011e-05, 4.69316082e-05, \n 8.42184044e-05, 2.77788168e-05, 1.03294378e-05, 1.06803618e-05, \n 7.26341826e-75, 1.10073971e-65, 1.02831671e-05, 5.16902994e-49, \n 8.28040509e-43]]'], {}), '([[0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0,\n 8.20604524e-92, 1.2058515e-05, 3.16436834e-125], [0.0, 0.00116840561, \n 9.90713236e-72, 4.42646396e-59, 2.91874286e-06, 0.00998773031, \n 0.0258779981, 0.00566104376, 0.0212699812, 0.00572117462, 0.00148212306,\n 0.00123926126, 1.28212945e-56, 1.34955578e-05, 7.64591325e-79, \n 2.38392073e-65], [0.0, 0.00256552144, 0.112756182, 0.0240351143, \n 0.0262981485, 0.00756512432, 0.0619587609, 0.0173269871, 0.0587405128, \n 0.0326749742, 0.0124709193, 2.93054408e-08, 3.71596993e-17, \n 2.79780317e-53, 4.9580077e-06, 3.77718083e-102], [0.0, 0.0107213881, \n 0.0428390448, 0.72276909, 0.593479736, 0.339341952, 0.317013715, \n 0.289168861, 0.31114318, 0.234889238, 0.132953769, 0.0601944097, \n 0.0147306181, 8.34699602e-06, 2.85972822e-06, 1.88926122e-31], [0.0, \n 0.00914252587, 0.0574508682, 0.400000235, 0.793386618, 0.755975146, \n 0.632277283, 0.683601459, 0.498506972, 0.382309992, 0.281363576, \n 0.123338103, 0.0415708021, 9.86113407e-06, 1.32609387e-05, \n 3.74318048e-06], [0.0, 0.0104243481, 0.0734587492, 0.349556755, \n 0.750680101, 1.25683393, 0.901245714, 0.863446835, 0.770443641, \n 0.517237071, 0.409810981, 0.1806454, 0.0551284783, 1.60674627e-05, \n 1.01182608e-05, 3.01442534e-06], [0.0, 0.0165842404, 0.0834076781, \n 0.189301935, 0.521246906, 0.854460001, 1.12054931, 0.964310078, \n 0.83467518, 0.652534012, 0.379383514, 0.211198205, 0.0517285688, \n 1.63795563e-05, 4.10100851e-06, 3.4947898e-06], [0.0, 0.0111666639, \n 0.0503319748, 0.370510313, 0.424294782, 0.787535547, 0.845085693, \n 1.14590365, 1.07673077, 0.713492115, 0.500740004, 0.190102207, \n 0.0359740115, 1.2298853e-05, 9.13512833e-06, 6.02097416e-06], [0.0, \n 0.0060779244, 0.0549337607, 0.223499535, 0.482353827, 0.752291991, \n 0.889187601, 0.93376537, 1.10492283, 0.850124391, 0.588941528, \n 0.194947085, 0.0509477228, 1.43626161e-05, 1.02721567e-05, \n 1.29503893e-05], [0.0, 0.00331622551, 0.0701829848, 0.267512972, \n 0.314796392, 0.541516885, 0.695769048, 0.750620518, 0.750038547, \n 0.700954088, 0.435197983, 0.211283335, 0.03885762, 1.6281037e-05, \n 1.0824361e-05, 6.09172339e-06], [0.0, 0.000439576425, 0.0717737968, \n 0.189254612, 0.247832532, 0.516027731, 0.602783971, 0.615949277, \n 0.805581107, 0.744063535, 0.544855374, 0.252198706, 0.0439235685, \n 1.18079721e-05, 1.18226645e-05, 1.01613165e-05], [0.0, 0.00491737561, \n 0.108686672, 0.124987806, 0.164110983, 0.300118829, 0.418159745, \n 0.386897613, 0.477718241, 0.36085425, 0.322466456, 0.192516925, \n 0.0407209694, 1.34978304e-05, 6.58739925e-06, 6.65716756e-06], [0.0, \n 0.000635447018, 0.039632962, 0.0183072502, 0.0704596701, 0.124861117, \n 0.137834574, 0.15984572, 0.166933479, 0.156084857, 0.114949158, \n 0.0846570798, 0.0150879843, 2.0301958e-05, 8.26102156e-06, \n 1.48398182e-05], [7.60299521e-06, 3.36326754e-06, 7.64855296e-06, \n 2.27621532e-05, 3.14933351e-05, 7.8930841e-05, 7.24212842e-05, \n 2.91748203e-05, 6.61873732e-05, 5.95693238e-05, 7.707135e-05, \n 5.30687748e-05, 4.66030117e-05, 1.41633235e-05, 2.49066205e-05, \n 1.19109038e-05], [5.7886384e-55, 7.88785149e-42, 2.54830412e-06, \n 2.60648191e-05, 1.68036205e-05, 2.12446739e-05, 3.57267603e-05, \n 4.02377033e-05, 3.56401935e-05, 3.09769252e-05, 2.13053382e-05, \n 4.49709414e-05, 2.61368373e-05, 1.68266203e-05, 1.66514322e-05, \n 2.60822813e-05], [2.35721271e-141, 9.06871674e-97, 1.18637122e-89, \n 9.39934076e-22, 4.66000452e-05, 4.69664011e-05, 4.69316082e-05, \n 8.42184044e-05, 2.77788168e-05, 1.03294378e-05, 1.06803618e-05, \n 7.26341826e-75, 1.10073971e-65, 1.02831671e-05, 5.16902994e-49, \n 8.28040509e-43]])\n', (9763, 13508), True, 'import numpy as np\n'), ((14907, 18188), 'numpy.array', 'np.array', (['[[0.95537734, 0.46860132, 0.27110607, 0.19447667, 0.32135073, 0.48782072, \n 0.54963024, 0.42195593, 0.27152038, 0.17864251, 0.20155642, 0.16358271,\n 0.1040159, 0.0874149, 0.05129938, 0.02153823], [0.51023519, 2.17757364,\n 0.9022516, 0.24304235, 0.20119518, 0.39689588, 0.47242431, 0.46949918, \n 0.37741651, 0.16843746, 0.12590504, 0.12682331, 0.11282247, 0.08222718,\n 0.03648526, 0.02404257], [0.18585796, 1.11958124, 4.47729443, \n 0.67959759, 0.43936317, 0.36934142, 0.41566744, 0.44467286, 0.48797422,\n 0.28795385, 0.17659191, 0.10674831, 0.07175567, 0.07249261, 0.04815305,\n 0.03697862], [0.09854482, 0.3514869, 1.84902386, 5.38491613, 1.27425161,\n 0.59242579, 0.36578735, 0.39181798, 0.38131832, 0.31501028, 0.13275648,\n 0.06408612, 0.04499218, 0.04000664, 0.02232326, 0.01322698], [\n 0.13674436, 0.1973461, 0.33264088, 2.08016394, 3.28810184, 1.29198125, \n 0.74642201, 0.44357051, 0.32781391, 0.35511243, 0.20132011, 0.12961, \n 0.04994553, 0.03748657, 0.03841073, 0.02700581], [0.23495203, \n 0.13839031, 0.14085679, 0.5347385, 1.46021275, 1.85222022, 1.02681162, \n 0.61513602, 0.39086271, 0.32871844, 0.25938947, 0.13520412, 0.05101963,\n 0.03714278, 0.02177751, 0.00979745], [0.23139098, 0.18634831, \n 0.32002214, 0.2477269, 0.64111274, 0.93691022, 1.14560725, 0.73176025, \n 0.43760432, 0.31057135, 0.29406937, 0.20632155, 0.09044896, 0.06448983,\n 0.03041877, 0.02522842], [0.18786196, 0.25090485, 0.21366969, \n 0.15358412, 0.35761286, 0.62390736, 0.76125666, 0.82975354, 0.54980593,\n 0.32778339, 0.20858991, 0.1607099, 0.13218526, 0.09042909, 0.04990491, \n 0.01762718], [0.12220241, 0.17968132, 0.31826246, 0.19846971, \n 0.34823183, 0.41563737, 0.55930999, 0.54070187, 0.5573184, 0.31526474, \n 0.20194048, 0.09234293, 0.08377534, 0.05819374, 0.0414762, 0.01563101],\n [0.03429527, 0.06388018, 0.09407867, 0.17418896, 0.23404519, 0.28879108,\n 0.34528852, 0.34507961, 0.31461973, 0.29954426, 0.21759668, 0.09684718,\n 0.06596679, 0.04274337, 0.0356891, 0.02459849], [0.05092152, 0.10829561,\n 0.13898902, 0.2005828, 0.35807132, 0.45181815, 0.32281821, 0.28014803, \n 0.30125545, 0.31260137, 0.22923948, 0.17657382, 0.10276889, 0.05555467,\n 0.03430327, 0.02064256], [0.06739051, 0.06795035, 0.0826437, 0.09522087,\n 0.23309189, 0.39055444, 0.39458465, 0.29290532, 0.27204846, 0.17810118,\n 0.24399007, 0.22146653, 0.13732849, 0.07585801, 0.03938794, 0.0190908],\n [0.04337917, 0.05375367, 0.05230119, 0.08066901, 0.16619572, 0.25423056,\n 0.25580913, 0.27430323, 0.22478799, 0.16909017, 0.14284879, 0.17211604,\n 0.14336033, 0.10344522, 0.06797049, 0.02546014], [0.04080687, \n 0.06113728, 0.04392062, 0.04488748, 0.12808591, 0.19886058, 0.24542711,\n 0.19678011, 0.17800136, 0.13147441, 0.13564091, 0.14280335, 0.12969805,\n 0.11181631, 0.05550193, 0.02956066], [0.01432324, 0.03441212, \n 0.05604694, 0.10154456, 0.09204, 0.13341443, 0.13396901, 0.16682638, \n 0.18562675, 0.1299677, 0.09922375, 0.09634331, 0.15184583, 0.13541738, \n 0.1169359, 0.03805293], [0.01972631, 0.02274412, 0.03797545, 0.02036785,\n 0.04357298, 0.05783639, 0.10706321, 0.07688271, 0.06969759, 0.08029393,\n 0.05466604, 0.05129046, 0.04648653, 0.06132882, 0.05004289, 0.03030569]]'], {}), '([[0.95537734, 0.46860132, 0.27110607, 0.19447667, 0.32135073, \n 0.48782072, 0.54963024, 0.42195593, 0.27152038, 0.17864251, 0.20155642,\n 0.16358271, 0.1040159, 0.0874149, 0.05129938, 0.02153823], [0.51023519,\n 2.17757364, 0.9022516, 0.24304235, 0.20119518, 0.39689588, 0.47242431, \n 0.46949918, 0.37741651, 0.16843746, 0.12590504, 0.12682331, 0.11282247,\n 0.08222718, 0.03648526, 0.02404257], [0.18585796, 1.11958124, \n 4.47729443, 0.67959759, 0.43936317, 0.36934142, 0.41566744, 0.44467286,\n 0.48797422, 0.28795385, 0.17659191, 0.10674831, 0.07175567, 0.07249261,\n 0.04815305, 0.03697862], [0.09854482, 0.3514869, 1.84902386, 5.38491613,\n 1.27425161, 0.59242579, 0.36578735, 0.39181798, 0.38131832, 0.31501028,\n 0.13275648, 0.06408612, 0.04499218, 0.04000664, 0.02232326, 0.01322698],\n [0.13674436, 0.1973461, 0.33264088, 2.08016394, 3.28810184, 1.29198125,\n 0.74642201, 0.44357051, 0.32781391, 0.35511243, 0.20132011, 0.12961, \n 0.04994553, 0.03748657, 0.03841073, 0.02700581], [0.23495203, \n 0.13839031, 0.14085679, 0.5347385, 1.46021275, 1.85222022, 1.02681162, \n 0.61513602, 0.39086271, 0.32871844, 0.25938947, 0.13520412, 0.05101963,\n 0.03714278, 0.02177751, 0.00979745], [0.23139098, 0.18634831, \n 0.32002214, 0.2477269, 0.64111274, 0.93691022, 1.14560725, 0.73176025, \n 0.43760432, 0.31057135, 0.29406937, 0.20632155, 0.09044896, 0.06448983,\n 0.03041877, 0.02522842], [0.18786196, 0.25090485, 0.21366969, \n 0.15358412, 0.35761286, 0.62390736, 0.76125666, 0.82975354, 0.54980593,\n 0.32778339, 0.20858991, 0.1607099, 0.13218526, 0.09042909, 0.04990491, \n 0.01762718], [0.12220241, 0.17968132, 0.31826246, 0.19846971, \n 0.34823183, 0.41563737, 0.55930999, 0.54070187, 0.5573184, 0.31526474, \n 0.20194048, 0.09234293, 0.08377534, 0.05819374, 0.0414762, 0.01563101],\n [0.03429527, 0.06388018, 0.09407867, 0.17418896, 0.23404519, 0.28879108,\n 0.34528852, 0.34507961, 0.31461973, 0.29954426, 0.21759668, 0.09684718,\n 0.06596679, 0.04274337, 0.0356891, 0.02459849], [0.05092152, 0.10829561,\n 0.13898902, 0.2005828, 0.35807132, 0.45181815, 0.32281821, 0.28014803, \n 0.30125545, 0.31260137, 0.22923948, 0.17657382, 0.10276889, 0.05555467,\n 0.03430327, 0.02064256], [0.06739051, 0.06795035, 0.0826437, 0.09522087,\n 0.23309189, 0.39055444, 0.39458465, 0.29290532, 0.27204846, 0.17810118,\n 0.24399007, 0.22146653, 0.13732849, 0.07585801, 0.03938794, 0.0190908],\n [0.04337917, 0.05375367, 0.05230119, 0.08066901, 0.16619572, 0.25423056,\n 0.25580913, 0.27430323, 0.22478799, 0.16909017, 0.14284879, 0.17211604,\n 0.14336033, 0.10344522, 0.06797049, 0.02546014], [0.04080687, \n 0.06113728, 0.04392062, 0.04488748, 0.12808591, 0.19886058, 0.24542711,\n 0.19678011, 0.17800136, 0.13147441, 0.13564091, 0.14280335, 0.12969805,\n 0.11181631, 0.05550193, 0.02956066], [0.01432324, 0.03441212, \n 0.05604694, 0.10154456, 0.09204, 0.13341443, 0.13396901, 0.16682638, \n 0.18562675, 0.1299677, 0.09922375, 0.09634331, 0.15184583, 0.13541738, \n 0.1169359, 0.03805293], [0.01972631, 0.02274412, 0.03797545, 0.02036785,\n 0.04357298, 0.05783639, 0.10706321, 0.07688271, 0.06969759, 0.08029393,\n 0.05466604, 0.05129046, 0.04648653, 0.06132882, 0.05004289, 0.03030569]])\n', (14915, 18188), True, 'import numpy as np\n'), ((961, 974), 'numpy.arange', 'np.arange', (['(16)'], {}), '(16)\n', (970, 974), True, 'import numpy as np\n'), ((18899, 18924), 'numpy.empty', 'np.empty', (['(4, nMat, nMat)'], {}), '((4, nMat, nMat))\n', (18907, 18924), True, 'import numpy as np\n'), ((18973, 19014), 'numpy.flatnonzero', 'np.flatnonzero', (['(ages_Mu_min <= age_sep[0])'], {}), '(ages_Mu_min <= age_sep[0])\n', (18987, 19014), True, 'import numpy as np\n'), ((19222, 19263), 'numpy.flatnonzero', 'np.flatnonzero', (['(ages_Mu_min > age_sep[-1])'], {}), '(ages_Mu_min > age_sep[-1])\n', (19236, 19263), True, 'import numpy as np\n'), ((20264, 20278), 'numpy.split', 'np.split', (['X', '(9)'], {}), '(X, 9)\n', (20272, 20278), True, 'import numpy as np\n'), ((21344, 21364), 'copy.deepcopy', 'copy.deepcopy', (['ppars'], {}), '(ppars)\n', (21357, 21364), False, 'import copy\n'), ((25461, 25481), 'copy.deepcopy', 'copy.deepcopy', (['pars0'], {}), '(pars0)\n', (25474, 25481), False, 'import copy\n'), ((26209, 26238), 'numpy.zeros', 'np.zeros', (['coefs_list.shape[0]'], {}), '(coefs_list.shape[0])\n', (26217, 26238), True, 'import numpy as np\n'), ((26671, 26685), 'numpy.empty', 'np.empty', (['(0,)'], {}), '((0,))\n', (26679, 26685), True, 'import numpy as np\n'), ((27548, 27567), 'copy.deepcopy', 'copy.deepcopy', (['pars'], {}), '(pars)\n', (27561, 27567), False, 'import copy\n'), ((29108, 29246), 'pyswarms.single.LocalBestPSO', 'ps.single.LocalBestPSO', ([], {'n_particles': "paramPSO['n_particles']", 'dimensions': 'self.n_to_fit', 'options': "paramPSO['options']", 'bounds': 'self.bound'}), "(n_particles=paramPSO['n_particles'], dimensions=self\n .n_to_fit, options=paramPSO['options'], bounds=self.bound)\n", (29130, 29246), True, 'import pyswarms as ps\n'), ((19080, 19156), 'numpy.flatnonzero', 'np.flatnonzero', (['((ages_Mu_min > age_sep[i - 1]) * (ages_Mu_min <= age_sep[i]))'], {}), '((ages_Mu_min > age_sep[i - 1]) * (ages_Mu_min <= age_sep[i]))\n', (19094, 19156), True, 'import numpy as np\n'), ((21532, 21545), 'numpy.arange', 'np.arange', (['ts'], {}), '(ts)\n', (21541, 21545), True, 'import numpy as np\n'), ((25287, 25309), 'numpy.array', 'np.array', (['bound_new[0]'], {}), '(bound_new[0])\n', (25295, 25309), True, 'import numpy as np\n'), ((25337, 25359), 'numpy.array', 'np.array', (['bound_new[1]'], {}), '(bound_new[1])\n', (25345, 25359), True, 'import numpy as np\n'), ((31106, 31156), 'scipy.optimize.least_squares', 'least_squares', (['self._residuals', 'init'], {'bounds': 'bound'}), '(self._residuals, init, bounds=bound)\n', (31119, 31156), False, 'from scipy.optimize import least_squares\n'), ((22410, 22467), 'scipy.integrate.odeint', 'spi.odeint', (['self._SEIIHURD_age_eq', 'Y[-1]', 't'], {'args': '(pars,)'}), '(self._SEIIHURD_age_eq, Y[-1], t, args=(pars,))\n', (22420, 22467), True, 'import scipy.integrate as spi\n'), ((22655, 22730), 'scipy.integrate.odeint', 'spi.odeint', (['self._SEIIHURD_age_eq', 'Y[-1]', 'tcorte[i - 1:i + 1]'], {'args': '(pars,)'}), '(self._SEIIHURD_age_eq, Y[-1], tcorte[i - 1:i + 1], args=(pars,))\n', (22665, 22730), True, 'import scipy.integrate as spi\n'), ((26595, 26609), 'numpy.sqrt', 'np.sqrt', (['(x + 1)'], {}), '(x + 1)\n', (26602, 26609), True, 'import numpy as np\n'), ((26639, 26654), 'numpy.ones_like', 'np.ones_like', (['x'], {}), '(x)\n', (26651, 26654), True, 'import numpy as np\n'), ((27383, 27397), 'numpy.isnan', 'np.isnan', (['errs'], {}), '(errs)\n', (27391, 27397), True, 'import numpy as np\n'), ((30551, 30587), 'numpy.random.rand', 'np.random.rand', (['nrand', 'self.n_to_fit'], {}), '(nrand, self.n_to_fit)\n', (30565, 30587), True, 'import numpy as np\n'), ((30921, 30960), 'numpy.array', 'np.array', (['[res.cost for res in all_res]'], {}), '([res.cost for res in all_res])\n', (30929, 30960), True, 'import numpy as np\n'), ((30187, 30216), 'numpy.random.rand', 'np.random.rand', (['self.n_to_fit'], {}), '(self.n_to_fit)\n', (30201, 30216), True, 'import numpy as np\n'), ((30325, 30380), 'scipy.optimize.least_squares', 'least_squares', (['self._residuals', 'par0'], {'bounds': 'self.bound'}), '(self._residuals, par0, bounds=self.bound)\n', (30338, 30380), False, 'from scipy.optimize import least_squares\n'), ((30729, 30782), 'scipy.optimize.least_squares', 'least_squares', (['self._residuals', 'p0'], {'bounds': 'self.bound'}), '(self._residuals, p0, bounds=self.bound)\n', (30742, 30782), False, 'from scipy.optimize import least_squares\n'), ((30809, 30857), 'joblib.Parallel', 'joblib.Parallel', ([], {'n_jobs': 'self.numeroProcessadores'}), '(n_jobs=self.numeroProcessadores)\n', (30824, 30857), False, 'import joblib\n'), ((30858, 30875), 'joblib.delayed', 'joblib.delayed', (['f'], {}), '(f)\n', (30872, 30875), False, 'import joblib\n')] |
# -*- coding: utf-8 -*-
import os
import sys
from gluon import current
from gluon.storage import Storage
__all__ = ("PluginLoader",
)
# Name of the plugin directory in modules
PLUGINS = "plugins"
# Module names to ignore when scanning for plugins
IGNORE = ("skeleton", "__init__")
# Name of the setup function in plugins
SETUP = "setup"
# Name of the variable that contains the version info in plugins
VERSION = "__version__"
# =============================================================================
class PluginLoader(object):
"""
Simple plugin loader (experimental)
Plugins are python modules or packages in the modules/plugins
directory.
Each plugin defines a setup() function which is called during
the request cycle immediately before entering the controller.
Plugins can be added by simply placing them in the plugins
directory, without any code change required.
The plugin directory will be scanned for new or updated plugins
whenever a new session starts, or by calling explicitly:
PluginLoader.detect(reset_all=True)
NB the reloading of the plugins can only be enforced in the
current interpreter thread - while other threads may still
run the old version. Therefore, it is recommended to restart
all threads (=reloading the server) after installing or updating
a plugin.
NB failing setup() methods will not be tried again until the next
reload (new session, restart, or explicit call)
session.s3.plugins contains a dict of all current plugins, like:
{name: (version, status)}
where:
- name is the python module name of the plugin
- version is the version string provided by the plugin (or
"unknown" if not present)
- status is:
None = newly detected plugin, not set up yet
True = plugin has been set up successfully
False = plugin setup failed in the last attempt, deactivated
"""
# -------------------------------------------------------------------------
@classmethod
def setup_all(cls, reload_all=False):
"""
Setup all plugins
@param reload_all: reload all plugins and reset the registry
"""
if reload_all:
cls.detect(reset_all=True)
for name in cls._registry().keys():
cls.load(name)
# -------------------------------------------------------------------------
@classmethod
def detect(cls, reset_all=False):
"""
Detect new plugins and update the registry
@param reset_all: reset all entries in the registry
"""
default = (None, None)
if reset_all:
plugin = lambda name: default
else:
registry = cls._registry()
plugin = lambda name: registry.get(name, default)
plugins = dict((name, plugin(name)) for name in cls._scan())
cls._registry(plugins)
# -------------------------------------------------------------------------
@classmethod
def load(cls, name, force=False):
"""
Run the setup method of a particular plugin
@param name: the name of the plugin
@param force: enforce the plugin to be reloaded and its
setup method to be re-run regardless of the
previous status
"""
log = current.log
registry = cls._registry()
if name not in registry:
cls.detect()
if name not in registry:
raise NameError("plugin '%s' not found" % name)
# Get version and status info from registry
plugin_info = registry[name]
if force or not isinstance(plugin_info, tuple):
version, status = None, None
else:
version, status = plugin_info
if status is None:
new = True
if not (cls._reload(name)):
version, status = "unknown", False
else:
version, status = None, True
else:
new = False
if status is False:
# Skip plugins which have failed in previous attempts
registry[name] = (version, status)
return False
status = True
setup = None
# Import manifest
package = "%s.%s" % (PLUGINS, name)
try:
setup = getattr(__import__(package, fromlist=[SETUP]), SETUP)
except (ImportError, AttributeError):
# This may not be a plugin at all => remove from registry
if new:
log.debug("Plugin '%s' not found" % name)
registry.pop(name, None)
return False
except SyntaxError:
if new:
log.error("Skipping invalid plugin '%s'" % name)
if current.response.s3.debug:
raise
version, status = "invalid", False
if version is None:
# Update version info if plugin has been reloaded
try:
version = getattr(__import__(package, fromlist=[VERSION]), VERSION)
except (ImportError, AttributeError):
version = "unknown"
if status and not callable(setup):
# Is a module => find setup function
try:
setup = setup.setup
except AttributeError:
# No setup function found => treat as failed
if new:
log.debug("No setup function found for plugin '%s'" % name)
status = False
if status:
# Execute setup method
if new:
log.info("Setting up plugin '%s'" % name)
try:
setup()
except Exception:
log.error("Plugin '%s' setup failed" % name)
if current.response.s3.debug:
raise
status = False
# Update the registry
registry[name] = (version, status)
return status
# -------------------------------------------------------------------------
@classmethod
def _registry(cls, plugins=None):
"""
Get (or replace) the current plugin registry
@param plugins: the new registry
"""
session_s3 = current.session.s3
if plugins:
registry = session_s3.plugins = plugins
else:
registry = session_s3.plugins
if registry is None:
# New session => run detect
# - initialize registry first to prevent infinite recursion
registry = session_s3.plugins = {}
cls.detect()
return registry
# -------------------------------------------------------------------------
@staticmethod
def _scan():
"""
Iterator scanning the plugin directory for available plugins
@return: the names of the plugins
"""
folder = current.request.folder
path = os.path.join(folder, "modules", PLUGINS)
names = os.listdir(path)
for name in names:
name_, extension = os.path.splitext(name)
if name_ in IGNORE:
continue
path_ = os.path.join(path, name)
if os.path.isdir(path_) or extension == ".py":
yield(name_)
# -------------------------------------------------------------------------
@staticmethod
def _reload(name):
"""
Reload a plugin
@param name: the plugin name
@note: this works only within the current thread, other
threads may still be bound to the old version of
the plugin
"""
if name in IGNORE:
return
success = True
appname = current.request.application
plugin_name = "applications.%s.modules.%s.%s" % (appname, PLUGINS, name)
plugin = sys.modules.get(plugin_name)
if plugin is not None:
try:
reload(plugin)
except ImportError:
current.log.error("Reloading plugin '%s' failed" % name)
success = False
return success
# =============================================================================
# Do a full scan when reloading the module (=when the thread starts)
PluginLoader.detect(reset_all=True)
# =============================================================================
| [
"os.listdir",
"os.path.join",
"os.path.splitext",
"gluon.current.log.error",
"os.path.isdir",
"sys.modules.get"
] | [((7235, 7275), 'os.path.join', 'os.path.join', (['folder', '"""modules"""', 'PLUGINS'], {}), "(folder, 'modules', PLUGINS)\n", (7247, 7275), False, 'import os\n'), ((7293, 7309), 'os.listdir', 'os.listdir', (['path'], {}), '(path)\n', (7303, 7309), False, 'import os\n'), ((8183, 8211), 'sys.modules.get', 'sys.modules.get', (['plugin_name'], {}), '(plugin_name)\n', (8198, 8211), False, 'import sys\n'), ((7369, 7391), 'os.path.splitext', 'os.path.splitext', (['name'], {}), '(name)\n', (7385, 7391), False, 'import os\n'), ((7470, 7494), 'os.path.join', 'os.path.join', (['path', 'name'], {}), '(path, name)\n', (7482, 7494), False, 'import os\n'), ((7510, 7530), 'os.path.isdir', 'os.path.isdir', (['path_'], {}), '(path_)\n', (7523, 7530), False, 'import os\n'), ((8339, 8395), 'gluon.current.log.error', 'current.log.error', (['("Reloading plugin \'%s\' failed" % name)'], {}), '("Reloading plugin \'%s\' failed" % name)\n', (8356, 8395), False, 'from gluon import current\n')] |
# -*- coding: utf-8 -*-
# Form implementation generated from reading ui file 'ccm.ui'
#
# Created by: PyQt5 UI code generator 5.13.2
#
# WARNING! All changes made in this file will be lost!
from PyQt5 import QtCore, QtGui, QtWidgets
class Ui_CCMTask(object):
def setupUi(self, CCMTask):
CCMTask.setObjectName("CCMTask")
CCMTask.resize(712, 585)
sizePolicy = QtWidgets.QSizePolicy(QtWidgets.QSizePolicy.Ignored, QtWidgets.QSizePolicy.Ignored)
sizePolicy.setHorizontalStretch(0)
sizePolicy.setVerticalStretch(0)
sizePolicy.setHeightForWidth(CCMTask.sizePolicy().hasHeightForWidth())
CCMTask.setSizePolicy(sizePolicy)
CCMTask.setAutoFillBackground(False)
self.centralwidget = QtWidgets.QWidget(CCMTask)
self.centralwidget.setObjectName("centralwidget")
self.issueBox = QtWidgets.QGroupBox(self.centralwidget)
self.issueBox.setGeometry(QtCore.QRect(10, 110, 691, 55))
self.issueBox.setObjectName("issueBox")
self.horizontalLayout_3 = QtWidgets.QHBoxLayout(self.issueBox)
self.horizontalLayout_3.setObjectName("horizontalLayout_3")
self.ARDTSEdit = QtWidgets.QLineEdit(self.issueBox)
sizePolicy = QtWidgets.QSizePolicy(QtWidgets.QSizePolicy.Fixed, QtWidgets.QSizePolicy.Expanding)
sizePolicy.setHorizontalStretch(0)
sizePolicy.setVerticalStretch(0)
sizePolicy.setHeightForWidth(self.ARDTSEdit.sizePolicy().hasHeightForWidth())
self.ARDTSEdit.setSizePolicy(sizePolicy)
self.ARDTSEdit.setTabletTracking(True)
self.ARDTSEdit.setObjectName("ARDTSEdit")
self.horizontalLayout_3.addWidget(self.ARDTSEdit)
spacerItem = QtWidgets.QSpacerItem(70, 20, QtWidgets.QSizePolicy.Fixed, QtWidgets.QSizePolicy.Minimum)
self.horizontalLayout_3.addItem(spacerItem)
self.issueInfoEdit = QtWidgets.QLineEdit(self.issueBox)
sizePolicy = QtWidgets.QSizePolicy(QtWidgets.QSizePolicy.Expanding, QtWidgets.QSizePolicy.Expanding)
sizePolicy.setHorizontalStretch(0)
sizePolicy.setVerticalStretch(0)
sizePolicy.setHeightForWidth(self.issueInfoEdit.sizePolicy().hasHeightForWidth())
self.issueInfoEdit.setSizePolicy(sizePolicy)
self.issueInfoEdit.setTabletTracking(True)
self.issueInfoEdit.setObjectName("issueInfoEdit")
self.horizontalLayout_3.addWidget(self.issueInfoEdit)
self.label = QtWidgets.QLabel(self.issueBox)
self.label.setText("")
self.label.setObjectName("label")
self.horizontalLayout_3.addWidget(self.label)
self.issueDetailBox = QtWidgets.QGroupBox(self.centralwidget)
self.issueDetailBox.setGeometry(QtCore.QRect(10, 170, 691, 401))
self.issueDetailBox.setCursor(QtGui.QCursor(QtCore.Qt.ArrowCursor))
self.issueDetailBox.setTabletTracking(True)
self.issueDetailBox.setObjectName("issueDetailBox")
self.deletedParamsBox = QtWidgets.QGroupBox(self.issueDetailBox)
self.deletedParamsBox.setGeometry(QtCore.QRect(500, 20, 161, 271))
sizePolicy = QtWidgets.QSizePolicy(QtWidgets.QSizePolicy.Fixed, QtWidgets.QSizePolicy.Fixed)
sizePolicy.setHorizontalStretch(0)
sizePolicy.setVerticalStretch(0)
sizePolicy.setHeightForWidth(self.deletedParamsBox.sizePolicy().hasHeightForWidth())
self.deletedParamsBox.setSizePolicy(sizePolicy)
self.deletedParamsBox.setObjectName("deletedParamsBox")
self.deletedParamsEdit = QtWidgets.QTextEdit(self.deletedParamsBox)
self.deletedParamsEdit.setGeometry(QtCore.QRect(10, 20, 141, 231))
sizePolicy = QtWidgets.QSizePolicy(QtWidgets.QSizePolicy.Fixed, QtWidgets.QSizePolicy.Fixed)
sizePolicy.setHorizontalStretch(0)
sizePolicy.setVerticalStretch(0)
sizePolicy.setHeightForWidth(self.deletedParamsEdit.sizePolicy().hasHeightForWidth())
self.deletedParamsEdit.setSizePolicy(sizePolicy)
self.deletedParamsEdit.setObjectName("deletedParamsEdit")
self.opkeysBox_2 = QtWidgets.QGroupBox(self.issueDetailBox)
self.opkeysBox_2.setGeometry(QtCore.QRect(10, 210, 153, 182))
sizePolicy = QtWidgets.QSizePolicy(QtWidgets.QSizePolicy.Fixed, QtWidgets.QSizePolicy.Fixed)
sizePolicy.setHorizontalStretch(0)
sizePolicy.setVerticalStretch(0)
sizePolicy.setHeightForWidth(self.opkeysBox_2.sizePolicy().hasHeightForWidth())
self.opkeysBox_2.setSizePolicy(sizePolicy)
self.opkeysBox_2.setObjectName("opkeysBox_2")
self.verticalLayout_2 = QtWidgets.QVBoxLayout(self.opkeysBox_2)
self.verticalLayout_2.setObjectName("verticalLayout_2")
self.opkey1Edit_2 = QtWidgets.QLineEdit(self.opkeysBox_2)
self.opkey1Edit_2.setTabletTracking(True)
self.opkey1Edit_2.setText("")
self.opkey1Edit_2.setPlaceholderText("")
self.opkey1Edit_2.setObjectName("opkey1Edit_2")
self.verticalLayout_2.addWidget(self.opkey1Edit_2)
self.opkey2Edit_2 = QtWidgets.QLineEdit(self.opkeysBox_2)
self.opkey2Edit_2.setTabletTracking(True)
self.opkey2Edit_2.setText("")
self.opkey2Edit_2.setPlaceholderText("")
self.opkey2Edit_2.setObjectName("opkey2Edit_2")
self.verticalLayout_2.addWidget(self.opkey2Edit_2)
self.opkey3Edit_2 = QtWidgets.QLineEdit(self.opkeysBox_2)
self.opkey3Edit_2.setTabletTracking(True)
self.opkey3Edit_2.setText("")
self.opkey3Edit_2.setPlaceholderText("")
self.opkey3Edit_2.setObjectName("opkey3Edit_2")
self.verticalLayout_2.addWidget(self.opkey3Edit_2)
self.opkey4Edit_2 = QtWidgets.QLineEdit(self.opkeysBox_2)
self.opkey4Edit_2.setTabletTracking(True)
self.opkey4Edit_2.setText("")
self.opkey4Edit_2.setPlaceholderText("")
self.opkey4Edit_2.setObjectName("opkey4Edit_2")
self.verticalLayout_2.addWidget(self.opkey4Edit_2)
self.opkey5Edit_2 = QtWidgets.QLineEdit(self.opkeysBox_2)
self.opkey5Edit_2.setTabletTracking(True)
self.opkey5Edit_2.setText("")
self.opkey5Edit_2.setPlaceholderText("")
self.opkey5Edit_2.setObjectName("opkey5Edit_2")
self.verticalLayout_2.addWidget(self.opkey5Edit_2)
self.opkey6Edit_2 = QtWidgets.QLineEdit(self.opkeysBox_2)
self.opkey6Edit_2.setTabletTracking(True)
self.opkey6Edit_2.setText("")
self.opkey6Edit_2.setPlaceholderText("")
self.opkey6Edit_2.setClearButtonEnabled(False)
self.opkey6Edit_2.setObjectName("opkey6Edit_2")
self.verticalLayout_2.addWidget(self.opkey6Edit_2)
self.splitter_2 = QtWidgets.QSplitter(self.issueDetailBox)
self.splitter_2.setGeometry(QtCore.QRect(10, 20, 153, 182))
self.splitter_2.setOrientation(QtCore.Qt.Vertical)
self.splitter_2.setObjectName("splitter_2")
self.opkeysBox = QtWidgets.QGroupBox(self.splitter_2)
sizePolicy = QtWidgets.QSizePolicy(QtWidgets.QSizePolicy.Fixed, QtWidgets.QSizePolicy.Fixed)
sizePolicy.setHorizontalStretch(0)
sizePolicy.setVerticalStretch(0)
sizePolicy.setHeightForWidth(self.opkeysBox.sizePolicy().hasHeightForWidth())
self.opkeysBox.setSizePolicy(sizePolicy)
self.opkeysBox.setObjectName("opkeysBox")
self.verticalLayout = QtWidgets.QVBoxLayout(self.opkeysBox)
self.verticalLayout.setObjectName("verticalLayout")
self.opkey1Edit = QtWidgets.QLineEdit(self.opkeysBox)
self.opkey1Edit.setTabletTracking(True)
self.opkey1Edit.setText("")
self.opkey1Edit.setObjectName("opkey1Edit")
self.verticalLayout.addWidget(self.opkey1Edit)
self.opkey2Edit = QtWidgets.QLineEdit(self.opkeysBox)
self.opkey2Edit.setTabletTracking(True)
self.opkey2Edit.setText("")
self.opkey2Edit.setObjectName("opkey2Edit")
self.verticalLayout.addWidget(self.opkey2Edit)
self.opkey3Edit = QtWidgets.QLineEdit(self.opkeysBox)
self.opkey3Edit.setTabletTracking(True)
self.opkey3Edit.setText("")
self.opkey3Edit.setObjectName("opkey3Edit")
self.verticalLayout.addWidget(self.opkey3Edit)
self.opkey4Edit = QtWidgets.QLineEdit(self.opkeysBox)
self.opkey4Edit.setTabletTracking(True)
self.opkey4Edit.setText("")
self.opkey4Edit.setObjectName("opkey4Edit")
self.verticalLayout.addWidget(self.opkey4Edit)
self.opkey5Edit = QtWidgets.QLineEdit(self.opkeysBox)
self.opkey5Edit.setTabletTracking(True)
self.opkey5Edit.setText("")
self.opkey5Edit.setObjectName("opkey5Edit")
self.verticalLayout.addWidget(self.opkey5Edit)
self.opkey6Edit = QtWidgets.QLineEdit(self.opkeysBox)
self.opkey6Edit.setTabletTracking(True)
self.opkey6Edit.setText("")
self.opkey6Edit.setClearButtonEnabled(False)
self.opkey6Edit.setObjectName("opkey6Edit")
self.verticalLayout.addWidget(self.opkey6Edit)
self.splitter = QtWidgets.QSplitter(self.issueDetailBox)
self.splitter.setGeometry(QtCore.QRect(190, 20, 291, 361))
self.splitter.setOrientation(QtCore.Qt.Vertical)
self.splitter.setObjectName("splitter")
self.newParamsBox = QtWidgets.QGroupBox(self.splitter)
sizePolicy = QtWidgets.QSizePolicy(QtWidgets.QSizePolicy.Fixed, QtWidgets.QSizePolicy.Fixed)
sizePolicy.setHorizontalStretch(0)
sizePolicy.setVerticalStretch(0)
sizePolicy.setHeightForWidth(self.newParamsBox.sizePolicy().hasHeightForWidth())
self.newParamsBox.setSizePolicy(sizePolicy)
self.newParamsBox.setObjectName("newParamsBox")
self.newParamsEdit = QtWidgets.QTextEdit(self.newParamsBox)
self.newParamsEdit.setGeometry(QtCore.QRect(10, 20, 271, 141))
sizePolicy = QtWidgets.QSizePolicy(QtWidgets.QSizePolicy.Fixed, QtWidgets.QSizePolicy.Fixed)
sizePolicy.setHorizontalStretch(0)
sizePolicy.setVerticalStretch(0)
sizePolicy.setHeightForWidth(self.newParamsEdit.sizePolicy().hasHeightForWidth())
self.newParamsEdit.setSizePolicy(sizePolicy)
self.newParamsEdit.setPlaceholderText("")
self.newParamsEdit.setObjectName("newParamsEdit")
self.modifiedParamsBox = QtWidgets.QGroupBox(self.splitter)
sizePolicy = QtWidgets.QSizePolicy(QtWidgets.QSizePolicy.Fixed, QtWidgets.QSizePolicy.Fixed)
sizePolicy.setHorizontalStretch(0)
sizePolicy.setVerticalStretch(0)
sizePolicy.setHeightForWidth(self.modifiedParamsBox.sizePolicy().hasHeightForWidth())
self.modifiedParamsBox.setSizePolicy(sizePolicy)
self.modifiedParamsBox.setObjectName("modifiedParamsBox")
self.modifiedParamsEdit = QtWidgets.QTextEdit(self.modifiedParamsBox)
self.modifiedParamsEdit.setGeometry(QtCore.QRect(10, 20, 271, 121))
self.modifiedParamsEdit.setObjectName("modifiedParamsEdit")
self.widget = QtWidgets.QWidget(self.centralwidget)
self.widget.setGeometry(QtCore.QRect(22, 20, 661, 81))
self.widget.setObjectName("widget")
self.horizontalLayout = QtWidgets.QHBoxLayout(self.widget)
self.horizontalLayout.setContentsMargins(0, 0, 0, 0)
self.horizontalLayout.setObjectName("horizontalLayout")
self.branchSelectBox = QtWidgets.QGroupBox(self.widget)
sizePolicy = QtWidgets.QSizePolicy(QtWidgets.QSizePolicy.Preferred, QtWidgets.QSizePolicy.Preferred)
sizePolicy.setHorizontalStretch(0)
sizePolicy.setVerticalStretch(0)
sizePolicy.setHeightForWidth(self.branchSelectBox.sizePolicy().hasHeightForWidth())
self.branchSelectBox.setSizePolicy(sizePolicy)
self.branchSelectBox.setObjectName("branchSelectBox")
self.horizontalLayout_4 = QtWidgets.QHBoxLayout(self.branchSelectBox)
self.horizontalLayout_4.setObjectName("horizontalLayout_4")
self.checkBox10x = QtWidgets.QCheckBox(self.branchSelectBox)
self.checkBox10x.setChecked(True)
self.checkBox10x.setObjectName("checkBox10x")
self.horizontalLayout_4.addWidget(self.checkBox10x)
self.checkBox9x = QtWidgets.QCheckBox(self.branchSelectBox)
self.checkBox9x.setChecked(True)
self.checkBox9x.setObjectName("checkBox9x")
self.horizontalLayout_4.addWidget(self.checkBox9x)
self.horizontalLayout.addWidget(self.branchSelectBox)
spacerItem1 = QtWidgets.QSpacerItem(250, 20, QtWidgets.QSizePolicy.Fixed, QtWidgets.QSizePolicy.Minimum)
self.horizontalLayout.addItem(spacerItem1)
self.startButton = QtWidgets.QPushButton(self.widget)
sizePolicy = QtWidgets.QSizePolicy(QtWidgets.QSizePolicy.Preferred, QtWidgets.QSizePolicy.Preferred)
sizePolicy.setHorizontalStretch(0)
sizePolicy.setVerticalStretch(0)
sizePolicy.setHeightForWidth(self.startButton.sizePolicy().hasHeightForWidth())
self.startButton.setSizePolicy(sizePolicy)
font = QtGui.QFont()
font.setFamily("Consolas")
font.setPointSize(14)
self.startButton.setFont(font)
self.startButton.setWhatsThis("")
self.startButton.setObjectName("startButton")
self.horizontalLayout.addWidget(self.startButton)
CCMTask.setCentralWidget(self.centralwidget)
self.statusbar = QtWidgets.QStatusBar(CCMTask)
self.statusbar.setObjectName("statusbar")
CCMTask.setStatusBar(self.statusbar)
self.retranslateUi(CCMTask)
QtCore.QMetaObject.connectSlotsByName(CCMTask)
def retranslateUi(self, CCMTask):
_translate = QtCore.QCoreApplication.translate
CCMTask.setWindowTitle(_translate("CCMTask", "CCMTask"))
self.issueBox.setTitle(_translate("CCMTask", "需求信息"))
self.ARDTSEdit.setPlaceholderText(_translate("CCMTask", "AR或者DTS编号"))
self.issueInfoEdit.setPlaceholderText(_translate("CCMTask", "需求描述信息"))
self.issueDetailBox.setTitle(_translate("CCMTask", "需求内容"))
self.deletedParamsBox.setTitle(_translate("CCMTask", "删除参数"))
self.opkeysBox_2.setTitle(_translate("CCMTask", "审核人列表"))
self.opkeysBox.setTitle(_translate("CCMTask", "运营商列表"))
self.opkey1Edit.setPlaceholderText(_translate("CCMTask", "OPkey1"))
self.opkey2Edit.setPlaceholderText(_translate("CCMTask", "OPkey2"))
self.opkey3Edit.setPlaceholderText(_translate("CCMTask", "OPkey3"))
self.opkey4Edit.setPlaceholderText(_translate("CCMTask", "OPkey4"))
self.opkey5Edit.setPlaceholderText(_translate("CCMTask", "OPkey5"))
self.opkey6Edit.setPlaceholderText(_translate("CCMTask", "OPkey6"))
self.newParamsBox.setTitle(_translate("CCMTask", "新增参数"))
self.modifiedParamsBox.setTitle(_translate("CCMTask", "修改参数"))
self.branchSelectBox.setTitle(_translate("CCMTask", "分支选择"))
self.checkBox10x.setText(_translate("CCMTask", "10.x ALL"))
self.checkBox9x.setText(_translate("CCMTask", "9.x ALL"))
self.startButton.setText(_translate("CCMTask", "Start"))
| [
"PyQt5.QtWidgets.QWidget",
"PyQt5.QtWidgets.QTextEdit",
"PyQt5.QtGui.QFont",
"PyQt5.QtWidgets.QSpacerItem",
"PyQt5.QtCore.QMetaObject.connectSlotsByName",
"PyQt5.QtWidgets.QHBoxLayout",
"PyQt5.QtWidgets.QSplitter",
"PyQt5.QtWidgets.QVBoxLayout",
"PyQt5.QtCore.QRect",
"PyQt5.QtWidgets.QStatusBar",
... | [((391, 479), 'PyQt5.QtWidgets.QSizePolicy', 'QtWidgets.QSizePolicy', (['QtWidgets.QSizePolicy.Ignored', 'QtWidgets.QSizePolicy.Ignored'], {}), '(QtWidgets.QSizePolicy.Ignored, QtWidgets.QSizePolicy.\n Ignored)\n', (412, 479), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((754, 780), 'PyQt5.QtWidgets.QWidget', 'QtWidgets.QWidget', (['CCMTask'], {}), '(CCMTask)\n', (771, 780), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((863, 902), 'PyQt5.QtWidgets.QGroupBox', 'QtWidgets.QGroupBox', (['self.centralwidget'], {}), '(self.centralwidget)\n', (882, 902), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((1051, 1087), 'PyQt5.QtWidgets.QHBoxLayout', 'QtWidgets.QHBoxLayout', (['self.issueBox'], {}), '(self.issueBox)\n', (1072, 1087), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((1181, 1215), 'PyQt5.QtWidgets.QLineEdit', 'QtWidgets.QLineEdit', (['self.issueBox'], {}), '(self.issueBox)\n', (1200, 1215), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((1237, 1325), 'PyQt5.QtWidgets.QSizePolicy', 'QtWidgets.QSizePolicy', (['QtWidgets.QSizePolicy.Fixed', 'QtWidgets.QSizePolicy.Expanding'], {}), '(QtWidgets.QSizePolicy.Fixed, QtWidgets.QSizePolicy.\n Expanding)\n', (1258, 1325), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((1716, 1810), 'PyQt5.QtWidgets.QSpacerItem', 'QtWidgets.QSpacerItem', (['(70)', '(20)', 'QtWidgets.QSizePolicy.Fixed', 'QtWidgets.QSizePolicy.Minimum'], {}), '(70, 20, QtWidgets.QSizePolicy.Fixed, QtWidgets.\n QSizePolicy.Minimum)\n', (1737, 1810), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((1887, 1921), 'PyQt5.QtWidgets.QLineEdit', 'QtWidgets.QLineEdit', (['self.issueBox'], {}), '(self.issueBox)\n', (1906, 1921), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((1943, 2035), 'PyQt5.QtWidgets.QSizePolicy', 'QtWidgets.QSizePolicy', (['QtWidgets.QSizePolicy.Expanding', 'QtWidgets.QSizePolicy.Expanding'], {}), '(QtWidgets.QSizePolicy.Expanding, QtWidgets.\n QSizePolicy.Expanding)\n', (1964, 2035), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((2450, 2481), 'PyQt5.QtWidgets.QLabel', 'QtWidgets.QLabel', (['self.issueBox'], {}), '(self.issueBox)\n', (2466, 2481), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((2639, 2678), 'PyQt5.QtWidgets.QGroupBox', 'QtWidgets.QGroupBox', (['self.centralwidget'], {}), '(self.centralwidget)\n', (2658, 2678), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((2972, 3012), 'PyQt5.QtWidgets.QGroupBox', 'QtWidgets.QGroupBox', (['self.issueDetailBox'], {}), '(self.issueDetailBox)\n', (2991, 3012), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((3109, 3188), 'PyQt5.QtWidgets.QSizePolicy', 'QtWidgets.QSizePolicy', (['QtWidgets.QSizePolicy.Fixed', 'QtWidgets.QSizePolicy.Fixed'], {}), '(QtWidgets.QSizePolicy.Fixed, QtWidgets.QSizePolicy.Fixed)\n', (3130, 3188), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((3519, 3561), 'PyQt5.QtWidgets.QTextEdit', 'QtWidgets.QTextEdit', (['self.deletedParamsBox'], {}), '(self.deletedParamsBox)\n', (3538, 3561), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((3658, 3737), 'PyQt5.QtWidgets.QSizePolicy', 'QtWidgets.QSizePolicy', (['QtWidgets.QSizePolicy.Fixed', 'QtWidgets.QSizePolicy.Fixed'], {}), '(QtWidgets.QSizePolicy.Fixed, QtWidgets.QSizePolicy.Fixed)\n', (3679, 3737), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((4066, 4106), 'PyQt5.QtWidgets.QGroupBox', 'QtWidgets.QGroupBox', (['self.issueDetailBox'], {}), '(self.issueDetailBox)\n', (4085, 4106), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((4198, 4277), 'PyQt5.QtWidgets.QSizePolicy', 'QtWidgets.QSizePolicy', (['QtWidgets.QSizePolicy.Fixed', 'QtWidgets.QSizePolicy.Fixed'], {}), '(QtWidgets.QSizePolicy.Fixed, QtWidgets.QSizePolicy.Fixed)\n', (4219, 4277), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((4587, 4626), 'PyQt5.QtWidgets.QVBoxLayout', 'QtWidgets.QVBoxLayout', (['self.opkeysBox_2'], {}), '(self.opkeysBox_2)\n', (4608, 4626), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((4719, 4756), 'PyQt5.QtWidgets.QLineEdit', 'QtWidgets.QLineEdit', (['self.opkeysBox_2'], {}), '(self.opkeysBox_2)\n', (4738, 4756), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((5037, 5074), 'PyQt5.QtWidgets.QLineEdit', 'QtWidgets.QLineEdit', (['self.opkeysBox_2'], {}), '(self.opkeysBox_2)\n', (5056, 5074), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((5355, 5392), 'PyQt5.QtWidgets.QLineEdit', 'QtWidgets.QLineEdit', (['self.opkeysBox_2'], {}), '(self.opkeysBox_2)\n', (5374, 5392), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((5673, 5710), 'PyQt5.QtWidgets.QLineEdit', 'QtWidgets.QLineEdit', (['self.opkeysBox_2'], {}), '(self.opkeysBox_2)\n', (5692, 5710), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((5991, 6028), 'PyQt5.QtWidgets.QLineEdit', 'QtWidgets.QLineEdit', (['self.opkeysBox_2'], {}), '(self.opkeysBox_2)\n', (6010, 6028), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((6309, 6346), 'PyQt5.QtWidgets.QLineEdit', 'QtWidgets.QLineEdit', (['self.opkeysBox_2'], {}), '(self.opkeysBox_2)\n', (6328, 6346), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((6680, 6720), 'PyQt5.QtWidgets.QSplitter', 'QtWidgets.QSplitter', (['self.issueDetailBox'], {}), '(self.issueDetailBox)\n', (6699, 6720), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((6925, 6961), 'PyQt5.QtWidgets.QGroupBox', 'QtWidgets.QGroupBox', (['self.splitter_2'], {}), '(self.splitter_2)\n', (6944, 6961), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((6983, 7062), 'PyQt5.QtWidgets.QSizePolicy', 'QtWidgets.QSizePolicy', (['QtWidgets.QSizePolicy.Fixed', 'QtWidgets.QSizePolicy.Fixed'], {}), '(QtWidgets.QSizePolicy.Fixed, QtWidgets.QSizePolicy.Fixed)\n', (7004, 7062), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((7362, 7399), 'PyQt5.QtWidgets.QVBoxLayout', 'QtWidgets.QVBoxLayout', (['self.opkeysBox'], {}), '(self.opkeysBox)\n', (7383, 7399), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((7486, 7521), 'PyQt5.QtWidgets.QLineEdit', 'QtWidgets.QLineEdit', (['self.opkeysBox'], {}), '(self.opkeysBox)\n', (7505, 7521), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((7739, 7774), 'PyQt5.QtWidgets.QLineEdit', 'QtWidgets.QLineEdit', (['self.opkeysBox'], {}), '(self.opkeysBox)\n', (7758, 7774), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((7992, 8027), 'PyQt5.QtWidgets.QLineEdit', 'QtWidgets.QLineEdit', (['self.opkeysBox'], {}), '(self.opkeysBox)\n', (8011, 8027), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((8245, 8280), 'PyQt5.QtWidgets.QLineEdit', 'QtWidgets.QLineEdit', (['self.opkeysBox'], {}), '(self.opkeysBox)\n', (8264, 8280), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((8498, 8533), 'PyQt5.QtWidgets.QLineEdit', 'QtWidgets.QLineEdit', (['self.opkeysBox'], {}), '(self.opkeysBox)\n', (8517, 8533), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((8751, 8786), 'PyQt5.QtWidgets.QLineEdit', 'QtWidgets.QLineEdit', (['self.opkeysBox'], {}), '(self.opkeysBox)\n', (8770, 8786), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((9055, 9095), 'PyQt5.QtWidgets.QSplitter', 'QtWidgets.QSplitter', (['self.issueDetailBox'], {}), '(self.issueDetailBox)\n', (9074, 9095), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((9296, 9330), 'PyQt5.QtWidgets.QGroupBox', 'QtWidgets.QGroupBox', (['self.splitter'], {}), '(self.splitter)\n', (9315, 9330), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((9352, 9431), 'PyQt5.QtWidgets.QSizePolicy', 'QtWidgets.QSizePolicy', (['QtWidgets.QSizePolicy.Fixed', 'QtWidgets.QSizePolicy.Fixed'], {}), '(QtWidgets.QSizePolicy.Fixed, QtWidgets.QSizePolicy.Fixed)\n', (9373, 9431), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((9742, 9780), 'PyQt5.QtWidgets.QTextEdit', 'QtWidgets.QTextEdit', (['self.newParamsBox'], {}), '(self.newParamsBox)\n', (9761, 9780), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((9873, 9952), 'PyQt5.QtWidgets.QSizePolicy', 'QtWidgets.QSizePolicy', (['QtWidgets.QSizePolicy.Fixed', 'QtWidgets.QSizePolicy.Fixed'], {}), '(QtWidgets.QSizePolicy.Fixed, QtWidgets.QSizePolicy.Fixed)\n', (9894, 9952), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((10321, 10355), 'PyQt5.QtWidgets.QGroupBox', 'QtWidgets.QGroupBox', (['self.splitter'], {}), '(self.splitter)\n', (10340, 10355), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((10377, 10456), 'PyQt5.QtWidgets.QSizePolicy', 'QtWidgets.QSizePolicy', (['QtWidgets.QSizePolicy.Fixed', 'QtWidgets.QSizePolicy.Fixed'], {}), '(QtWidgets.QSizePolicy.Fixed, QtWidgets.QSizePolicy.Fixed)\n', (10398, 10456), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((10792, 10835), 'PyQt5.QtWidgets.QTextEdit', 'QtWidgets.QTextEdit', (['self.modifiedParamsBox'], {}), '(self.modifiedParamsBox)\n', (10811, 10835), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((11002, 11039), 'PyQt5.QtWidgets.QWidget', 'QtWidgets.QWidget', (['self.centralwidget'], {}), '(self.centralwidget)\n', (11019, 11039), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((11179, 11213), 'PyQt5.QtWidgets.QHBoxLayout', 'QtWidgets.QHBoxLayout', (['self.widget'], {}), '(self.widget)\n', (11200, 11213), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((11370, 11402), 'PyQt5.QtWidgets.QGroupBox', 'QtWidgets.QGroupBox', (['self.widget'], {}), '(self.widget)\n', (11389, 11402), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((11424, 11516), 'PyQt5.QtWidgets.QSizePolicy', 'QtWidgets.QSizePolicy', (['QtWidgets.QSizePolicy.Preferred', 'QtWidgets.QSizePolicy.Preferred'], {}), '(QtWidgets.QSizePolicy.Preferred, QtWidgets.\n QSizePolicy.Preferred)\n', (11445, 11516), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((11839, 11882), 'PyQt5.QtWidgets.QHBoxLayout', 'QtWidgets.QHBoxLayout', (['self.branchSelectBox'], {}), '(self.branchSelectBox)\n', (11860, 11882), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((11978, 12019), 'PyQt5.QtWidgets.QCheckBox', 'QtWidgets.QCheckBox', (['self.branchSelectBox'], {}), '(self.branchSelectBox)\n', (11997, 12019), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((12202, 12243), 'PyQt5.QtWidgets.QCheckBox', 'QtWidgets.QCheckBox', (['self.branchSelectBox'], {}), '(self.branchSelectBox)\n', (12221, 12243), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((12480, 12575), 'PyQt5.QtWidgets.QSpacerItem', 'QtWidgets.QSpacerItem', (['(250)', '(20)', 'QtWidgets.QSizePolicy.Fixed', 'QtWidgets.QSizePolicy.Minimum'], {}), '(250, 20, QtWidgets.QSizePolicy.Fixed, QtWidgets.\n QSizePolicy.Minimum)\n', (12501, 12575), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((12649, 12683), 'PyQt5.QtWidgets.QPushButton', 'QtWidgets.QPushButton', (['self.widget'], {}), '(self.widget)\n', (12670, 12683), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((12705, 12797), 'PyQt5.QtWidgets.QSizePolicy', 'QtWidgets.QSizePolicy', (['QtWidgets.QSizePolicy.Preferred', 'QtWidgets.QSizePolicy.Preferred'], {}), '(QtWidgets.QSizePolicy.Preferred, QtWidgets.\n QSizePolicy.Preferred)\n', (12726, 12797), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((13031, 13044), 'PyQt5.QtGui.QFont', 'QtGui.QFont', ([], {}), '()\n', (13042, 13044), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((13381, 13410), 'PyQt5.QtWidgets.QStatusBar', 'QtWidgets.QStatusBar', (['CCMTask'], {}), '(CCMTask)\n', (13401, 13410), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((13551, 13597), 'PyQt5.QtCore.QMetaObject.connectSlotsByName', 'QtCore.QMetaObject.connectSlotsByName', (['CCMTask'], {}), '(CCMTask)\n', (13588, 13597), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((937, 967), 'PyQt5.QtCore.QRect', 'QtCore.QRect', (['(10)', '(110)', '(691)', '(55)'], {}), '(10, 110, 691, 55)\n', (949, 967), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((2719, 2750), 'PyQt5.QtCore.QRect', 'QtCore.QRect', (['(10)', '(170)', '(691)', '(401)'], {}), '(10, 170, 691, 401)\n', (2731, 2750), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((2790, 2826), 'PyQt5.QtGui.QCursor', 'QtGui.QCursor', (['QtCore.Qt.ArrowCursor'], {}), '(QtCore.Qt.ArrowCursor)\n', (2803, 2826), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((3055, 3086), 'PyQt5.QtCore.QRect', 'QtCore.QRect', (['(500)', '(20)', '(161)', '(271)'], {}), '(500, 20, 161, 271)\n', (3067, 3086), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((3605, 3635), 'PyQt5.QtCore.QRect', 'QtCore.QRect', (['(10)', '(20)', '(141)', '(231)'], {}), '(10, 20, 141, 231)\n', (3617, 3635), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((4144, 4175), 'PyQt5.QtCore.QRect', 'QtCore.QRect', (['(10)', '(210)', '(153)', '(182)'], {}), '(10, 210, 153, 182)\n', (4156, 4175), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((6757, 6787), 'PyQt5.QtCore.QRect', 'QtCore.QRect', (['(10)', '(20)', '(153)', '(182)'], {}), '(10, 20, 153, 182)\n', (6769, 6787), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((9130, 9161), 'PyQt5.QtCore.QRect', 'QtCore.QRect', (['(190)', '(20)', '(291)', '(361)'], {}), '(190, 20, 291, 361)\n', (9142, 9161), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((9820, 9850), 'PyQt5.QtCore.QRect', 'QtCore.QRect', (['(10)', '(20)', '(271)', '(141)'], {}), '(10, 20, 271, 141)\n', (9832, 9850), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((10880, 10910), 'PyQt5.QtCore.QRect', 'QtCore.QRect', (['(10)', '(20)', '(271)', '(121)'], {}), '(10, 20, 271, 121)\n', (10892, 10910), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((11072, 11101), 'PyQt5.QtCore.QRect', 'QtCore.QRect', (['(22)', '(20)', '(661)', '(81)'], {}), '(22, 20, 661, 81)\n', (11084, 11101), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n')] |
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from __future__ import print_function
import multiprocessing
import os
import re
import socket
import subprocess
import sys
import warnings
import six
from django.conf import settings
from django.core.management import base
from django import template
# Suppress DeprecationWarnings which clutter the output to the point of
# rendering it unreadable.
warnings.simplefilter('ignore')
cmd_name = __name__.split('.')[-1]
CURDIR = os.path.realpath(os.path.dirname(__file__))
PROJECT_PATH = os.path.realpath(os.path.join(CURDIR, '../..'))
STATIC_PATH = os.path.realpath(os.path.join(PROJECT_PATH, '../static'))
# Known apache regular expression to retrieve it's version
APACHE_VERSION_REG = r'Apache/(?P<version>[\d.]*)'
# Known apache commands to retrieve it's version
APACHE2_VERSION_CMDS = (
(('/usr/sbin/apache2ctl', '-V'), APACHE_VERSION_REG),
(('/usr/sbin/apache2', '-v'), APACHE_VERSION_REG),
)
# Known apache log directory locations
APACHE_LOG_DIRS = (
'/var/log/httpd', # RHEL / Red Hat / CentOS / Fedora Linux
'/var/log/apache2', # Debian / Ubuntu Linux
)
# Default log directory
DEFAULT_LOG_DIR = '/var/log'
def _getattr(obj, name, default):
"""Like getattr but return `default` if None or False.
By default, getattr(obj, name, default) returns default only if
attr does not exist, here, we return `default` even if attr evaluates to
None or False.
"""
value = getattr(obj, name, default)
if value:
return value
else:
return default
context = template.Context({
'DJANGO_SETTINGS_MODULE': os.environ['DJANGO_SETTINGS_MODULE'],
'HOSTNAME': socket.getfqdn(),
'PROJECT_PATH': os.path.realpath(
_getattr(settings, 'ROOT_PATH', PROJECT_PATH)),
'STATIC_PATH': os.path.realpath(
_getattr(settings, 'STATIC_ROOT', STATIC_PATH)),
'SSLCERT': '/etc/pki/tls/certs/ca.crt',
'SSLKEY': '/etc/pki/tls/private/ca.key',
'CACERT': None,
'PROCESSES': multiprocessing.cpu_count() + 1,
})
context['PROJECT_ROOT'] = os.path.dirname(context['PROJECT_PATH'])
context['PROJECT_DIR_NAME'] = os.path.basename(
context['PROJECT_PATH'].split(context['PROJECT_ROOT'])[1])
context['PROJECT_NAME'] = context['PROJECT_DIR_NAME']
context['DEFAULT_WSGI_FILE'] = os.path.join(
context['PROJECT_PATH'], 'wsgi.py')
context['WSGI_FILE'] = os.path.join(
context['PROJECT_PATH'], 'horizon_wsgi.py')
VHOSTNAME = context['HOSTNAME'].split('.')
VHOSTNAME[0] = context['PROJECT_NAME']
context['VHOSTNAME'] = '.'.join(VHOSTNAME)
if len(VHOSTNAME) > 1:
context['DOMAINNAME'] = '.'.join(VHOSTNAME[1:])
else:
context['DOMAINNAME'] = 'openstack.org'
context['ADMIN'] = 'webmaster@%s' % context['DOMAINNAME']
context['ACTIVATE_THIS'] = None
virtualenv = os.environ.get('VIRTUAL_ENV')
if virtualenv:
activate_this = os.path.join(
virtualenv, 'bin/activate_this.py')
if os.path.exists(activate_this):
context['ACTIVATE_THIS'] = activate_this
# Try to detect apache's version
# We fallback on 2.4.
context['APACHE2_VERSION'] = 2.4
APACHE2_VERSION = None
for cmd in APACHE2_VERSION_CMDS:
if os.path.exists(cmd[0][0]):
try:
reg = re.compile(cmd[1])
output = subprocess.check_output(cmd[0], stderr=subprocess.STDOUT)
if isinstance(output, six.binary_type):
output = output.decode()
res = reg.search(output)
if res:
APACHE2_VERSION = res.group('version')
break
except subprocess.CalledProcessError:
pass
if APACHE2_VERSION:
ver_nums = APACHE2_VERSION.split('.')
if len(ver_nums) >= 2:
try:
context['APACHE2_VERSION'] = float('.'.join(ver_nums[:2]))
except ValueError:
pass
def find_apache_log_dir():
for log_dir in APACHE_LOG_DIRS:
if os.path.exists(log_dir) and os.path.isdir(log_dir):
return log_dir
return DEFAULT_LOG_DIR
context['LOGDIR'] = find_apache_log_dir()
class Command(base.BaseCommand):
args = ''
help = """Create %(wsgi_file)s
or the contents of an apache %(p_name)s.conf file (on stdout).
The apache configuration is generated on stdout because the place of this
file is distribution dependent.
examples::
manage.py %(cmd_name)s --wsgi # creates %(wsgi_file)s
manage.py %(cmd_name)s --apache # creates an apache vhost conf file (on \
stdout).
manage.py %(cmd_name)s --apache --ssl --mail=%(admin)s \
--project=%(p_name)s --hostname=%(hostname)s
To create an acpache configuration file, redirect the output towards the
location you desire, e.g.::
manage.py %(cmd_name)s --apache > \
/etc/httpd/conf.d/openstack_dashboard.conf
""" % {
'cmd_name': cmd_name,
'p_name': context['PROJECT_NAME'],
'wsgi_file': context['WSGI_FILE'],
'admin': context['ADMIN'],
'hostname': context['VHOSTNAME'], }
def add_arguments(self, parser):
# TODO(ygbo): Add an --nginx option.
parser.add_argument(
"-a", "--apache",
default=False, action="store_true", dest="apache",
help="generate an apache vhost configuration"
)
parser.add_argument(
"--cacert",
dest="cacert",
help=("Use with the --apache and --ssl option to define the path"
" to the SSLCACertificateFile"),
metavar="CACERT"
)
parser.add_argument(
"-f", "--force",
default=False, action="store_true", dest="force",
help="force overwriting of an existing %s file" %
context['WSGI_FILE']
)
parser.add_argument(
"-H", "--hostname",
dest="hostname",
help=("Use with the --apache option to define the server's"
" hostname (default : %s)") % context['VHOSTNAME'],
metavar="HOSTNAME"
)
parser.add_argument(
"--logdir",
dest="logdir",
help=("Use with the --apache option to define the path to "
"the apache log directory(default : %s)"
% context['LOGDIR']),
metavar="CACERT"
)
parser.add_argument(
"-m", "--mail",
dest="mail",
help=("Use with the --apache option to define the web site"
" administrator's email (default : %s)") %
context['ADMIN'],
metavar="MAIL"
)
parser.add_argument(
"-n", "--namedhost",
default=False, action="store_true", dest="namedhost",
help=("Use with the --apache option. The apache vhost "
"configuration will work only when accessed with "
"the proper hostname (see --hostname).")
)
parser.add_argument(
"--processes",
dest="processes",
help=("Use with the --apache option to define the number of "
"apache processes (by default the number of cpus +1 which "
"is %s on this machine).") % context['PROCESSES'],
metavar="PROCESSES"
)
parser.add_argument(
"-p", "--project",
dest="project",
help=("Use with the --apache option to define the project "
"name (default : %s)") % context['PROJECT_NAME'],
metavar="PROJECT"
)
parser.add_argument(
"-s", "--ssl",
default=False, action="store_true", dest="ssl",
help=("Use with the --apache option. The apache vhost "
"configuration will use an SSL configuration")
)
parser.add_argument(
"--sslcert",
dest="sslcert",
help=("Use with the --apache and --ssl option to define "
"the path to the SSLCertificateFile (default : %s)"
) % context['SSLCERT'],
metavar="SSLCERT"
)
parser.add_argument(
"--sslkey",
dest="sslkey",
help=("Use with the --apache and --ssl option to define "
"the path to the SSLCertificateKeyFile "
"(default : %s)") % context['SSLKEY'],
metavar="SSLKEY"
)
parser.add_argument(
"--apache-version",
dest="apache_version",
type=float,
help=("Use with the --apache option to define the apache "
"major (as a floating point number) version "
"(default : %s)."
% context['APACHE2_VERSION']),
metavar="APACHE_VERSION"
)
parser.add_argument(
"-w", "--wsgi",
default=False, action="store_true", dest="wsgi",
help="generate the horizon.wsgi file"
)
def handle(self, *args, **options):
force = options.get('force')
context['SSL'] = options.get('ssl')
if options.get('mail'):
context['ADMIN'] = options['mail']
if options.get('cacert'):
context['CACERT'] = options['cacert']
if options.get('logdir'):
context['LOGDIR'] = options['logdir'].rstrip('/')
if options.get('processes'):
context['PROCESSES'] = options['processes']
if options.get('project'):
context['PROJECT_NAME'] = options['project']
if options.get('hostname'):
context['VHOSTNAME'] = options['hostname']
if options.get('sslcert'):
context['SSLCERT'] = options['sslcert']
if options.get('sslkey'):
context['SSLKEY'] = options['sslkey']
if options.get('apache_version'):
context['APACHE2_VERSION'] = options['apache_version']
if options.get('namedhost'):
context['NAMEDHOST'] = context['VHOSTNAME']
else:
context['NAMEDHOST'] = '*'
# Generate the WSGI.
if options.get('wsgi'):
with open(
os.path.join(CURDIR, 'horizon.wsgi.template'), 'r'
) as fp:
wsgi_template = template.Template(fp.read())
if not os.path.exists(context['WSGI_FILE']) or force:
with open(context['WSGI_FILE'], 'w') as fp:
fp.write(wsgi_template.render(context))
print('Generated "%s"' % context['WSGI_FILE'])
else:
sys.exit('"%s" already exists, use --force to overwrite' %
context['WSGI_FILE'])
# Generate the apache configuration.
elif options.get('apache'):
# first check if custom wsgi file exists, if not, use default:
if not os.path.exists(context['WSGI_FILE']):
context['WSGI_FILE'] = context['DEFAULT_WSGI_FILE']
with open(
os.path.join(CURDIR, 'apache_vhost.conf.template'), 'r'
) as fp:
wsgi_template = template.Template(fp.read())
sys.stdout.write(wsgi_template.render(context))
else:
self.print_help('manage.py', cmd_name)
| [
"subprocess.check_output",
"os.path.exists",
"socket.getfqdn",
"re.compile",
"os.path.join",
"os.environ.get",
"multiprocessing.cpu_count",
"os.path.dirname",
"os.path.isdir",
"sys.exit",
"warnings.simplefilter"
] | [((901, 932), 'warnings.simplefilter', 'warnings.simplefilter', (['"""ignore"""'], {}), "('ignore')\n", (922, 932), False, 'import warnings\n'), ((2572, 2612), 'os.path.dirname', 'os.path.dirname', (["context['PROJECT_PATH']"], {}), "(context['PROJECT_PATH'])\n", (2587, 2612), False, 'import os\n'), ((2810, 2858), 'os.path.join', 'os.path.join', (["context['PROJECT_PATH']", '"""wsgi.py"""'], {}), "(context['PROJECT_PATH'], 'wsgi.py')\n", (2822, 2858), False, 'import os\n'), ((2887, 2943), 'os.path.join', 'os.path.join', (["context['PROJECT_PATH']", '"""horizon_wsgi.py"""'], {}), "(context['PROJECT_PATH'], 'horizon_wsgi.py')\n", (2899, 2943), False, 'import os\n'), ((3305, 3334), 'os.environ.get', 'os.environ.get', (['"""VIRTUAL_ENV"""'], {}), "('VIRTUAL_ENV')\n", (3319, 3334), False, 'import os\n'), ((996, 1021), 'os.path.dirname', 'os.path.dirname', (['__file__'], {}), '(__file__)\n', (1011, 1021), False, 'import os\n'), ((1055, 1084), 'os.path.join', 'os.path.join', (['CURDIR', '"""../.."""'], {}), "(CURDIR, '../..')\n", (1067, 1084), False, 'import os\n'), ((1117, 1156), 'os.path.join', 'os.path.join', (['PROJECT_PATH', '"""../static"""'], {}), "(PROJECT_PATH, '../static')\n", (1129, 1156), False, 'import os\n'), ((3370, 3418), 'os.path.join', 'os.path.join', (['virtualenv', '"""bin/activate_this.py"""'], {}), "(virtualenv, 'bin/activate_this.py')\n", (3382, 3418), False, 'import os\n'), ((3435, 3464), 'os.path.exists', 'os.path.exists', (['activate_this'], {}), '(activate_this)\n', (3449, 3464), False, 'import os\n'), ((3667, 3692), 'os.path.exists', 'os.path.exists', (['cmd[0][0]'], {}), '(cmd[0][0])\n', (3681, 3692), False, 'import os\n'), ((2177, 2193), 'socket.getfqdn', 'socket.getfqdn', ([], {}), '()\n', (2191, 2193), False, 'import socket\n'), ((2509, 2536), 'multiprocessing.cpu_count', 'multiprocessing.cpu_count', ([], {}), '()\n', (2534, 2536), False, 'import multiprocessing\n'), ((3725, 3743), 're.compile', 're.compile', (['cmd[1]'], {}), '(cmd[1])\n', (3735, 3743), False, 'import re\n'), ((3765, 3822), 'subprocess.check_output', 'subprocess.check_output', (['cmd[0]'], {'stderr': 'subprocess.STDOUT'}), '(cmd[0], stderr=subprocess.STDOUT)\n', (3788, 3822), False, 'import subprocess\n'), ((4406, 4429), 'os.path.exists', 'os.path.exists', (['log_dir'], {}), '(log_dir)\n', (4420, 4429), False, 'import os\n'), ((4434, 4456), 'os.path.isdir', 'os.path.isdir', (['log_dir'], {}), '(log_dir)\n', (4447, 4456), False, 'import os\n'), ((11067, 11152), 'sys.exit', 'sys.exit', (['(\'"%s" already exists, use --force to overwrite\' % context[\'WSGI_FILE\'])'], {}), '(\'"%s" already exists, use --force to overwrite\' % context[\'WSGI_FILE\']\n )\n', (11075, 11152), False, 'import sys\n'), ((10651, 10696), 'os.path.join', 'os.path.join', (['CURDIR', '"""horizon.wsgi.template"""'], {}), "(CURDIR, 'horizon.wsgi.template')\n", (10663, 10696), False, 'import os\n'), ((10803, 10839), 'os.path.exists', 'os.path.exists', (["context['WSGI_FILE']"], {}), "(context['WSGI_FILE'])\n", (10817, 10839), False, 'import os\n'), ((11349, 11385), 'os.path.exists', 'os.path.exists', (["context['WSGI_FILE']"], {}), "(context['WSGI_FILE'])\n", (11363, 11385), False, 'import os\n'), ((11495, 11545), 'os.path.join', 'os.path.join', (['CURDIR', '"""apache_vhost.conf.template"""'], {}), "(CURDIR, 'apache_vhost.conf.template')\n", (11507, 11545), False, 'import os\n')] |
import random
import numpy as np
import tensorflow as tf
from collections import deque
class PrioritizedReplayBuffer():
""" Class implements Prioritized Experience Replay (PER)
"""
def __init__(self, maxlen):
""" PER constructor
Args:
maxlen (int): buffer length
"""
self.maxlen = None if maxlen == "none" else maxlen
self.buffer = deque(maxlen=self.maxlen)
self.priorities = deque(maxlen=self.maxlen)
def add(self, experience):
""" Add experiences to buffer
Args:
experience (list): state, action, reward, next_state, done
Returns:
full_buffer (done): True if buffer is full
"""
full_buffer = len(self.buffer) == self.maxlen
self.buffer.append(experience)
self.priorities.append(max(self.priorities, default=1))
return full_buffer
def get_probabilities(self, priority_scale):
""" Get probabilities for experiences
Args:
priority_scale (float64): range [0, 1]
Returns:
sample_probabilities (numpy array): probabilities assigned to experiences based on weighting factor (scale)
"""
scaled_priorities = np.array(self.priorities) ** priority_scale
sample_probabilities = scaled_priorities / sum(scaled_priorities)
return sample_probabilities
def get_importance(self, probabilities):
""" Compute importance
Args:
probabilities (numpy array): experience probabilities
Returns:
importance_normalized (numpy array): normalized importance
"""
importance = 1 / len(self.buffer) * 1 / probabilities
importance_normalized = importance / max(importance)
return importance_normalized
def sample(self, batch_size, priority_scale=1.0):
""" Sample experiences
Args:
batch_size (int): size of batch
priority_scale (float, optional): range = [0, 1]. Defaults to 1.0.
Returns:
samples (list): sampled based on probabilities
importance (numpy array): Importance of samples
sample_indices (array): Indices of samples
"""
sample_size = min(len(self.buffer), batch_size)
sample_probs = self.get_probabilities(priority_scale)
sample_indices = random.choices(range(len(self.buffer)), k=sample_size, weights=sample_probs)
samples = np.array(self.buffer, dtype=object)[sample_indices]
importance = self.get_importance(sample_probs[sample_indices])
return samples, importance, sample_indices
def set_priorities(self, indices, errors, offset=0.1):
""" Set priorities to experiences
Args:
indices (array): sample indices
errors (array): corresponding losses
offset (float, optional): Small offset. Defaults to 0.1.
"""
for i, e in zip(indices, errors):
self.priorities[int(i)] = abs(e) + offset
def get_buffer_length(self):
""" Get buffer length
Returns:
(int): buffer length
"""
return len(self.buffer)
| [
"numpy.array",
"collections.deque"
] | [((400, 425), 'collections.deque', 'deque', ([], {'maxlen': 'self.maxlen'}), '(maxlen=self.maxlen)\n', (405, 425), False, 'from collections import deque\n'), ((452, 477), 'collections.deque', 'deque', ([], {'maxlen': 'self.maxlen'}), '(maxlen=self.maxlen)\n', (457, 477), False, 'from collections import deque\n'), ((1243, 1268), 'numpy.array', 'np.array', (['self.priorities'], {}), '(self.priorities)\n', (1251, 1268), True, 'import numpy as np\n'), ((2482, 2517), 'numpy.array', 'np.array', (['self.buffer'], {'dtype': 'object'}), '(self.buffer, dtype=object)\n', (2490, 2517), True, 'import numpy as np\n')] |
#!/usr/bin/python3
# coding=utf-8
# 环境准备:pip install opencv_contrib_python
# 输入话题:tianbot_mini/image_raw/compressed
# 输出话题:roi
import sys
import os
import rospy
import sensor_msgs.msg
from cv_bridge import CvBridge
import cv2
import numpy as np
from sensor_msgs.msg import RegionOfInterest as ROI
from sensor_msgs.msg import CompressedImage
br = CvBridge()
class MessageItem(object):
# 用于封装信息的类,包含图片和其他信息
def __init__(self,frame,message):
self._frame = frame
self._message = message
def getFrame(self):
# 图片信息
return self._frame
def getMessage(self):
#文字信息,json格式
return self._message
class Tracker(object):
'''
追踪者模块,用于追踪指定目标
'''
def __init__(self, tracker_type="TLD", draw_coord=True):
'''
初始化追踪器种类
'''
# 获得opencv版本
(major_ver, minor_ver, subminor_ver) = (cv2.__version__).split('.')
self.tracker_types = ['BOOSTING', 'MIL', 'KCF', 'TLD', 'MEDIANFLOW', 'GOTURN', "CSRT"]
self.tracker_type = tracker_type
self.isWorking = False
self.draw_coord = draw_coord
# 构造追踪器
if int(major_ver) < 3:
self.tracker = cv2.Tracker_create(tracker_type)
else:
if tracker_type == 'BOOSTING':
self.tracker = cv2.TrackerBoosting_create()
if tracker_type == 'MIL':
self.tracker = cv2.TrackerMIL_create()
if tracker_type == 'KCF':
self.tracker = cv2.TrackerKCF_create()
if tracker_type == 'TLD':
self.tracker = cv2.TrackerTLD_create()
if tracker_type == 'MEDIANFLOW':
self.tracker = cv2.TrackerMedianFlow_create()
if tracker_type == 'GOTURN':
self.tracker = cv2.TrackerGOTURN_create()
if tracker_type == "CSRT":
self.tracker = cv2.TrackerCSRT_create()
def initWorking(self, frame, box):
'''
追踪器工作初始化
frame:初始化追踪画面
box:追踪的区域
'''
if not self.tracker:
raise Exception("追踪器未初始化")
status = self.tracker.init(frame, box)
if not status:
raise Exception("追踪器工作初始化失败")
self.coord = box
self.isWorking = True
def track(self, frame):
'''
开启追踪
'''
message = None
if self.isWorking:
status, self.coord = self.tracker.update(frame)
if status:
message = {"coord": [((int(self.coord[0]), int(self.coord[1])),
(int(self.coord[0] + self.coord[2]), int(self.coord[1] + self.coord[3])))]}
if self.draw_coord:
p1 = (int(self.coord[0]), int(self.coord[1]))
p2 = (int(self.coord[0] + self.coord[2]), int(self.coord[1] + self.coord[3]))
cv2.rectangle(frame, p1, p2, (255, 0, 0), 2, 1)
message['msg'] = self.tracker_type + " is tracking"
# 更新ROI
if (int(self.coord[0]) <0 or int(self.coord[1]) <0):
tld_roi.x_offset = 0
tld_roi.y_offset = 0
tld_roi.width = 0
tld_roi.height = 0
else:
tld_roi.x_offset = int(self.coord[0])
tld_roi.y_offset = int(self.coord[1])
tld_roi.width = int(self.coord[2])
tld_roi.height = int(self.coord[3])
# 发布ROI
pub.publish(tld_roi)
return MessageItem(frame, message)
def compressed_detect_and_draw(compressed_imgmsg):
global br,gFrame,gCapStatus,getFrame,loopGetFrame
if ((getFrame == True) or (loopGetFrame == True)):
gFrame = br.compressed_imgmsg_to_cv2(compressed_imgmsg, "bgr8")
gCapStatus = True
getFrame = True
gFrame = np.zeros((640,640,3), np.uint8)
gCapStatus = False
getFrame = True
loopGetFrame = False
if __name__ == '__main__':
rospy.init_node('tbm_tld_tracker_node')
rospy.Subscriber("/image_raw", sensor_msgs.msg.CompressedImage, compressed_detect_and_draw)
pub = rospy.Publisher("roi",ROI,queue_size=10)
tld_roi = ROI()
# rate = rospy.Rate(10)
# rate.sleep()
# 选择 框选帧
print("按 n 渲染下一帧,按 y 设定当前帧作为ROI区域选择帧")
while True:
_key = cv2.waitKey(0) & 0xFF
if(_key == ord('n')):
# gCapStatus,gFrame = gVideoDevice.read()
getFrame = True
if(_key == ord('y')):
break
cv2.imshow("Pick frame",gFrame)
# 框选感兴趣区域region of interest
cv2.destroyWindow("Pick frame")
gROI = cv2.selectROI("ROI frame",gFrame,False)
if (not gROI):
print("空框选,退出")
quit()
# 初始化追踪器
gTracker = Tracker(tracker_type="TLD")
gTracker.initWorking(gFrame,gROI)
# 循环帧读取,开始跟踪
while not rospy.is_shutdown():
# gCapStatus, gFrame = gVideoDevice.read()
loopGetFrame = True
if(gCapStatus):
# 展示跟踪图片
_item = gTracker.track(gFrame)
cv2.imshow("Track result",_item.getFrame())
if _item.getMessage():
# 打印跟踪数据
print(_item.getMessage())
_key = cv2.waitKey(1) & 0xFF
if (_key == ord('q')) | (_key == 27):
break
if (_key == ord('r')) :
# 用户请求用初始ROI
print("用户请求用初始ROI")
gTracker = Tracker(tracker_type="TLD")
gTracker.initWorking(gFrame, gROI)
else:
print("捕获帧失败")
quit()
| [
"cv2.rectangle",
"cv2.TrackerGOTURN_create",
"rospy.init_node",
"cv2.TrackerKCF_create",
"cv2.imshow",
"cv2.TrackerMedianFlow_create",
"cv2.__version__.split",
"cv2.TrackerMIL_create",
"cv2.Tracker_create",
"cv_bridge.CvBridge",
"rospy.Subscriber",
"cv2.waitKey",
"sensor_msgs.msg.RegionOfInt... | [((350, 360), 'cv_bridge.CvBridge', 'CvBridge', ([], {}), '()\n', (358, 360), False, 'from cv_bridge import CvBridge\n'), ((4002, 4035), 'numpy.zeros', 'np.zeros', (['(640, 640, 3)', 'np.uint8'], {}), '((640, 640, 3), np.uint8)\n', (4010, 4035), True, 'import numpy as np\n'), ((4124, 4163), 'rospy.init_node', 'rospy.init_node', (['"""tbm_tld_tracker_node"""'], {}), "('tbm_tld_tracker_node')\n", (4139, 4163), False, 'import rospy\n'), ((4168, 4263), 'rospy.Subscriber', 'rospy.Subscriber', (['"""/image_raw"""', 'sensor_msgs.msg.CompressedImage', 'compressed_detect_and_draw'], {}), "('/image_raw', sensor_msgs.msg.CompressedImage,\n compressed_detect_and_draw)\n", (4184, 4263), False, 'import rospy\n'), ((4270, 4312), 'rospy.Publisher', 'rospy.Publisher', (['"""roi"""', 'ROI'], {'queue_size': '(10)'}), "('roi', ROI, queue_size=10)\n", (4285, 4312), False, 'import rospy\n'), ((4325, 4330), 'sensor_msgs.msg.RegionOfInterest', 'ROI', ([], {}), '()\n', (4328, 4330), True, 'from sensor_msgs.msg import RegionOfInterest as ROI\n'), ((4725, 4756), 'cv2.destroyWindow', 'cv2.destroyWindow', (['"""Pick frame"""'], {}), "('Pick frame')\n", (4742, 4756), False, 'import cv2\n'), ((4768, 4809), 'cv2.selectROI', 'cv2.selectROI', (['"""ROI frame"""', 'gFrame', '(False)'], {}), "('ROI frame', gFrame, False)\n", (4781, 4809), False, 'import cv2\n'), ((892, 918), 'cv2.__version__.split', 'cv2.__version__.split', (['"""."""'], {}), "('.')\n", (913, 918), False, 'import cv2\n'), ((4656, 4688), 'cv2.imshow', 'cv2.imshow', (['"""Pick frame"""', 'gFrame'], {}), "('Pick frame', gFrame)\n", (4666, 4688), False, 'import cv2\n'), ((4993, 5012), 'rospy.is_shutdown', 'rospy.is_shutdown', ([], {}), '()\n', (5010, 5012), False, 'import rospy\n'), ((1199, 1231), 'cv2.Tracker_create', 'cv2.Tracker_create', (['tracker_type'], {}), '(tracker_type)\n', (1217, 1231), False, 'import cv2\n'), ((4465, 4479), 'cv2.waitKey', 'cv2.waitKey', (['(0)'], {}), '(0)\n', (4476, 4479), False, 'import cv2\n'), ((1320, 1348), 'cv2.TrackerBoosting_create', 'cv2.TrackerBoosting_create', ([], {}), '()\n', (1346, 1348), False, 'import cv2\n'), ((1418, 1441), 'cv2.TrackerMIL_create', 'cv2.TrackerMIL_create', ([], {}), '()\n', (1439, 1441), False, 'import cv2\n'), ((1511, 1534), 'cv2.TrackerKCF_create', 'cv2.TrackerKCF_create', ([], {}), '()\n', (1532, 1534), False, 'import cv2\n'), ((1604, 1627), 'cv2.TrackerTLD_create', 'cv2.TrackerTLD_create', ([], {}), '()\n', (1625, 1627), False, 'import cv2\n'), ((1704, 1734), 'cv2.TrackerMedianFlow_create', 'cv2.TrackerMedianFlow_create', ([], {}), '()\n', (1732, 1734), False, 'import cv2\n'), ((1807, 1833), 'cv2.TrackerGOTURN_create', 'cv2.TrackerGOTURN_create', ([], {}), '()\n', (1831, 1833), False, 'import cv2\n'), ((1904, 1928), 'cv2.TrackerCSRT_create', 'cv2.TrackerCSRT_create', ([], {}), '()\n', (1926, 1928), False, 'import cv2\n'), ((5360, 5374), 'cv2.waitKey', 'cv2.waitKey', (['(1)'], {}), '(1)\n', (5371, 5374), False, 'import cv2\n'), ((2900, 2947), 'cv2.rectangle', 'cv2.rectangle', (['frame', 'p1', 'p2', '(255, 0, 0)', '(2)', '(1)'], {}), '(frame, p1, p2, (255, 0, 0), 2, 1)\n', (2913, 2947), False, 'import cv2\n')] |
# -*- coding:utf-8 -*-
# @Author: ZhaoWen <<EMAIL>>
# @Date: 2021/1/2
# @GiteePath: https://gitee.com/openeuler2020/team-1186152014
from method_analysis_utils.scanner import get_scanner,token_type
import os
import logging.config
from method_analysis_utils.complier import get_complier
# 配置日志
logging.config.fileConfig('logging.conf')
logger = logging.getLogger()
def comfig_complier():
'''
装配complier
:return: 返回一个配置好的解析器
'''
c = get_complier()
return c
def config_scanner():
'''
装配scanner
:return: a value named s,type is scanner 返回一个配置好的扫描器
'''
s = get_scanner()
# 初始化对象
s.method_list = []
s.left_single = 0
s.right_single = 0
# 1.方法名 method_name_token [a-zA-Z]+(虽然方法有诸如大驼峰 小驼峰之类的命名规范 但是有可能会有意外)
# 2.方法参数 param_token ^[(][a-zA-Z0-9.png$\s,<A-Z>]+[)] -> (Properties prop1,Properties prop2)
# 3.返回值类型 return_type_token 基本数据类型|自定义对象或者原生的对象|集合|void|泛型 (最简单的方法头一定都会标注返回类型)
# 4.方法花括号 end_token { -> 方法头结束的标志 也是判别一行是否为方法的重要标识
# 判断是否为为访问控制标识符
access_token = token_type("access_token","default|public|protected|private")
# 判断是否为关键字
key_token = token_type("key_token","final|abstract|static|synchronized")
# 判断是否还有下一行
next_token = token_type("next_token","[//]+")
# 判断是否为下一行类别的方法
next_method_token = token_type("next_method_token","([a-zA-Z]+)\).*{")
# 判断是否为必要token
imp_token = token_type("imp_token","(.*)([a-zA-Z]+)(\s){0,}(\(.*\))[a-zA-Z\s]{0,}{")
# 判断是否为无关字符使用代码即可完成 无需再使用正则
invalid_token = token_type("invalid_token",".*")
# 判断是否为接口
interface_token = token_type("interface_token","\s(interface)\s|\s(@interface)\s")
# 是否为类
class_token = token_type("class_token","(class)\s(.*){(.*)")
# 是否为包信息
package_token = token_type("package_token","^package")
# 是否为{
left_single_token = token_type("left_single_token","(.*){(.*)")
# 是否为}
right_single_token = token_type("right_Single_token","(.*)}(.*)")
# {} 同时存在
all_single_token = token_type("all_single_token","(.*)}(.*){(.*)")
token_type_dict = {"access_token":access_token,
"key_token":key_token,
"next_token":next_token,
"next_method_token":next_method_token,
"imp_token":imp_token,
"invalid_token":invalid_token,
"interface_token":interface_token,
"class_token":class_token,
"package_token":package_token,
"left_single_token":left_single_token,
"right_single_token":right_single_token,
"all_single_token":all_single_token
}
s.set_token_type(token_type_dict)
return s
def job_start(path):
'''
API分析工具开始入口
:return: 外部可访问API与外部不可访问API集合
'''
s = config_scanner()
isClass = False
###### 开始扫描源代码 #######
s.read_file(path)
method_list = s.find_method()
# 判断method_list.pop(-1)为True还是False
if method_list.pop(-1):
isClass = True
for m in method_list:
logging.info(m)
logger.info("总共提取到:(" + str(len(method_list)) + ") 行")
else:
logging.info("不是待提取文件")
s.close_file()
###########################
####开始解析提取到的方法头 ####
c = comfig_complier()
# 定义两个列表 一个用来装外部可访问的方法 另一个用来装外部不能访问到的方法
public_list = []
unpublic_list = []
info_list = []
c.complier_start()
for i in method_list:
if type(i) != dict:
if c.complier_method(i):
public_list.append(i)
logger.info("public -> "+i)
else:
unpublic_list.append(i)
logger.info("unpublic -> "+i)
else:
try:
info_list.append(i["package"].replace(";", "").strip())
info_list.append(i["class"].replace("{", "").strip())
except KeyError as e:
logging.info(str(type(e))+"......"+str(e.args))
c.complier_close()
###########################
# 文件类信息 | 外部可访问API列表 | 内部可访问API列表 | 是否为可提取的类文件(非接口文件之类)
return [info_list,public_list,unpublic_list,isClass] | [
"method_analysis_utils.scanner.get_scanner",
"method_analysis_utils.scanner.token_type",
"method_analysis_utils.complier.get_complier"
] | [((455, 469), 'method_analysis_utils.complier.get_complier', 'get_complier', ([], {}), '()\n', (467, 469), False, 'from method_analysis_utils.complier import get_complier\n'), ((601, 614), 'method_analysis_utils.scanner.get_scanner', 'get_scanner', ([], {}), '()\n', (612, 614), False, 'from method_analysis_utils.scanner import get_scanner, token_type\n'), ((1045, 1107), 'method_analysis_utils.scanner.token_type', 'token_type', (['"""access_token"""', '"""default|public|protected|private"""'], {}), "('access_token', 'default|public|protected|private')\n", (1055, 1107), False, 'from method_analysis_utils.scanner import get_scanner, token_type\n'), ((1139, 1200), 'method_analysis_utils.scanner.token_type', 'token_type', (['"""key_token"""', '"""final|abstract|static|synchronized"""'], {}), "('key_token', 'final|abstract|static|synchronized')\n", (1149, 1200), False, 'from method_analysis_utils.scanner import get_scanner, token_type\n'), ((1234, 1267), 'method_analysis_utils.scanner.token_type', 'token_type', (['"""next_token"""', '"""[//]+"""'], {}), "('next_token', '[//]+')\n", (1244, 1267), False, 'from method_analysis_utils.scanner import get_scanner, token_type\n'), ((1312, 1364), 'method_analysis_utils.scanner.token_type', 'token_type', (['"""next_method_token"""', '"""([a-zA-Z]+)\\\\).*{"""'], {}), "('next_method_token', '([a-zA-Z]+)\\\\).*{')\n", (1322, 1364), False, 'from method_analysis_utils.scanner import get_scanner, token_type\n'), ((1399, 1476), 'method_analysis_utils.scanner.token_type', 'token_type', (['"""imp_token"""', '"""(.*)([a-zA-Z]+)(\\\\s){0,}(\\\\(.*\\\\))[a-zA-Z\\\\s]{0,}{"""'], {}), "('imp_token', '(.*)([a-zA-Z]+)(\\\\s){0,}(\\\\(.*\\\\))[a-zA-Z\\\\s]{0,}{')\n", (1409, 1476), False, 'from method_analysis_utils.scanner import get_scanner, token_type\n'), ((1525, 1558), 'method_analysis_utils.scanner.token_type', 'token_type', (['"""invalid_token"""', '""".*"""'], {}), "('invalid_token', '.*')\n", (1535, 1558), False, 'from method_analysis_utils.scanner import get_scanner, token_type\n'), ((1595, 1664), 'method_analysis_utils.scanner.token_type', 'token_type', (['"""interface_token"""', '"""\\\\s(interface)\\\\s|\\\\s(@interface)\\\\s"""'], {}), "('interface_token', '\\\\s(interface)\\\\s|\\\\s(@interface)\\\\s')\n", (1605, 1664), False, 'from method_analysis_utils.scanner import get_scanner, token_type\n'), ((1690, 1738), 'method_analysis_utils.scanner.token_type', 'token_type', (['"""class_token"""', '"""(class)\\\\s(.*){(.*)"""'], {}), "('class_token', '(class)\\\\s(.*){(.*)')\n", (1700, 1738), False, 'from method_analysis_utils.scanner import get_scanner, token_type\n'), ((1771, 1810), 'method_analysis_utils.scanner.token_type', 'token_type', (['"""package_token"""', '"""^package"""'], {}), "('package_token', '^package')\n", (1781, 1810), False, 'from method_analysis_utils.scanner import get_scanner, token_type\n'), ((1846, 1890), 'method_analysis_utils.scanner.token_type', 'token_type', (['"""left_single_token"""', '"""(.*){(.*)"""'], {}), "('left_single_token', '(.*){(.*)')\n", (1856, 1890), False, 'from method_analysis_utils.scanner import get_scanner, token_type\n'), ((1927, 1972), 'method_analysis_utils.scanner.token_type', 'token_type', (['"""right_Single_token"""', '"""(.*)}(.*)"""'], {}), "('right_Single_token', '(.*)}(.*)')\n", (1937, 1972), False, 'from method_analysis_utils.scanner import get_scanner, token_type\n'), ((2010, 2058), 'method_analysis_utils.scanner.token_type', 'token_type', (['"""all_single_token"""', '"""(.*)}(.*){(.*)"""'], {}), "('all_single_token', '(.*)}(.*){(.*)')\n", (2020, 2058), False, 'from method_analysis_utils.scanner import get_scanner, token_type\n')] |
from configparser import RawConfigParser
config = RawConfigParser()
config.read("configuration/config.ini")
class ReadConfig():
@staticmethod
def getApplicationURL():
url = (config.get('common info', 'baseURL'))
return url
@staticmethod
def getUserName():
username = (config.get('common info', 'username'))
return username
@staticmethod
def getPassword():
password = (config.get('common info', 'password'))
return password
| [
"configparser.RawConfigParser"
] | [((51, 68), 'configparser.RawConfigParser', 'RawConfigParser', ([], {}), '()\n', (66, 68), False, 'from configparser import RawConfigParser\n')] |
from __future__ import print_function
import warnings
import numpy as np
C4 = 261.6 # Hz
piano_max = 4186.01 # Hz
piano_min = 27.5000 # Hz - not audible
__all__ = ['cent_per_value','get_f_min','get_f_max','FrequencyScale']
def cent_per_value(f_min, f_max, v_min, v_max):
"""
This function takes in a frequency max and min, and y value max and min and returns a y scale parameter in units of cents/y value.
Cents are a logarithmic unit of tone intervals (https://en.wikipedia.org/wiki/Cent_(music)).
Parameters
----------
f_min : float
Minimum frequency.
f_max : float
Maximum frequency.
v_min : float
Minimum y value.
v_max : float
Maximum y value.
Returns
-------
float
A y-scale parameter in units of cents/y value.
"""
step = 1200 * np.log2(f_max / f_min) / (v_max - v_min)
return step
def get_f_min(f_max, cents_per_value, v_min, v_max):
"""
This function takes in a y value max and min, a maximum frequency and a y scale parameter in units of cents/y value, and returns the minimum frequency that fits to such a scale.
Cents are a logarithmic unit of tone intervals (https://en.wikipedia.org/wiki/Cent_(music)).
Parameters
----------
f_max : float
Maximum frequency.
cents_per_value : float
A y scale parameter in units of cents/y value.
v_min : float
Minimum y value.
v_max : float
Maximum y value.
Returns
-------
float
Minimum frequency.
"""
f_min = f_max / (2 ** ((v_max - v_min) * cents_per_value / 1200))
return f_min
def get_f_max(f_min, cents_per_value, v_min, v_max):
"""
This function takes in a y value max and min, a minimum frequency and a y scale parameter in units of cents/y value, and returns the maximum frequency that fits to such a scale.
Cents are a logarithmic unit of tone intervals (https://en.wikipedia.org/wiki/Cent_(music)).
Parameters
----------
f_min : float
Minimum frequency.
cents_per_value : float
A y scale parameter in units of cents/y value.
v_min : float
Minimum y value.
v_max : float
Maximum y value.
Returns
-------
float
Maximum frequency.
"""
f_max = f_min * (2 ** ((v_max - v_min) * cents_per_value / 1200))
return f_max
class FrequencyScale(object):
"""
This class builds a frequency scale and populates the namespace of frequency objects based on the given inputs from the following combos:
- frequency_min, frequency_max, y value min and y value max
- frequency_max, cents_per_value, y value min and y value max
- frequency_min, cents_per_value, y value min and y value max
Cents are a logarithmic unit of tone intervals (https://en.wikipedia.org/wiki/Cent_(music)).
Parameters
----------
frequency_min : float
Minimum frequency.
frequency_max : float
Maximum frequency.
cents_per_value : float
A y scale parameter in units of cents/y value.
value_min : float
Description of parameter `value_min`.
value_max : float
Description of parameter `value_max`.
verbose : bool
Flag to toggle printing functions.
"""
def __init__(self, value_min, value_max,
frequency_min=None, frequency_max=None, cents_per_value=None,
verbose=False):
if verbose:
print('initial vals (fmin, fmax, vmin, vmax):',
frequency_min, frequency_max, value_min, value_max)
# checking for which inputs were given
self.y_inputs = []
if frequency_min != None:
self.y_inputs.append('frequency_min')
if frequency_max != None:
self.y_inputs.append('frequency_max')
if cents_per_value != None:
self.y_inputs.append('cents_per_value')
self.y_n_inputs = len(self.y_inputs)
# raising exception if anything other than two inputs were given
if self.y_n_inputs != 2:
raise Exception('Frequency takes 2 of the frequency_min, frequency_max, and cents_per_value inputs. You inputted {} inputs, which were {}.'.format(
self.y_n_inputs, self.y_inputs))
# frequency_min and frequency_max input case
if (cents_per_value == None):
cents_per_value = cent_per_value(frequency_min, frequency_max,
value_min, value_max)
# cents_per_value and frequency_max input case
if (frequency_min == None):
frequency_min = get_f_min(frequency_max, cents_per_value,
value_min, value_max)
# cents_per_value and frequency_min input case
if (frequency_max == None):
frequency_max = get_f_max(frequency_min, cents_per_value,
value_min, value_max)
self.y_value_min = value_min
self.y_value_max = value_max
self.y_frequency_max = frequency_max
self.y_frequency_min = frequency_min
self.y_cents_per_value = cents_per_value
if self.y_frequency_max > piano_max:
warnings.warn('Your maximum frequency of {} Hz is above a pianos maximum of {} Hz.'.format(
np.round(self.y_frequency_max, 2), piano_max))
if self.y_frequency_min < piano_min:
warnings.warn('Your minimum frequency of {} Hz is below a pianos minimum of {} Hz.'.format(
np.round(self.y_frequency_min, 2), piano_min))
if self.y_value_min > self.y_value_max:
warnings.warn('Min y value is greater than max y value.')
if verbose:
print('initial vals (f_min, f_max, y_min, y_max):', self.y_frequency_min,
self.y_frequency_max, self.y_value_min, self.y_value_max)
def freq(v): return self.y_frequency_min * \
2 ** ((v - self.y_value_min) * self.y_cents_per_value / 1200)
self.y_freq_translate_to_range = lambda array: list(map(freq, array))
if verbose:
print('Frequency Scale Built')
| [
"warnings.warn",
"numpy.log2",
"numpy.round"
] | [((841, 863), 'numpy.log2', 'np.log2', (['(f_max / f_min)'], {}), '(f_max / f_min)\n', (848, 863), True, 'import numpy as np\n'), ((5680, 5737), 'warnings.warn', 'warnings.warn', (['"""Min y value is greater than max y value."""'], {}), "('Min y value is greater than max y value.')\n", (5693, 5737), False, 'import warnings\n'), ((5361, 5394), 'numpy.round', 'np.round', (['self.y_frequency_max', '(2)'], {}), '(self.y_frequency_max, 2)\n', (5369, 5394), True, 'import numpy as np\n'), ((5573, 5606), 'numpy.round', 'np.round', (['self.y_frequency_min', '(2)'], {}), '(self.y_frequency_min, 2)\n', (5581, 5606), True, 'import numpy as np\n')] |
from apscheduler.schedulers.background import BackgroundScheduler
from des.ccd import start_pipeline
def download_queue():
start_pipeline()
scheduler = BackgroundScheduler()
scheduler.add_job(
download_queue,
'interval',
# minutes=1
seconds=20,
max_instances=1,
id='des_download_ccd'
)
scheduler.start()
| [
"des.ccd.start_pipeline",
"apscheduler.schedulers.background.BackgroundScheduler"
] | [((161, 182), 'apscheduler.schedulers.background.BackgroundScheduler', 'BackgroundScheduler', ([], {}), '()\n', (180, 182), False, 'from apscheduler.schedulers.background import BackgroundScheduler\n'), ((130, 146), 'des.ccd.start_pipeline', 'start_pipeline', ([], {}), '()\n', (144, 146), False, 'from des.ccd import start_pipeline\n')] |
#!/usr/bin/env python
import os
import sys
import re
import math
import random
import matplotlib.pyplot as plt
import numpy as np
from google.protobuf import text_format
sys.path.append(os.path.dirname(os.path.realpath(__file__))+"/../../build")
import gsbn_pb2
if len(sys.argv) < 1:
print("Arguments wrong! Please retry with command :")
print("python "+os.path.realpath(__file__)+" <output file name>")
exit(-1)
filename = sys.argv[1]
patterns = []
masks = []
DIM_HCU = 10
DIM_MCU = 10
rd = gsbn_pb2.StimRawData()
p = [0x7fffffff,0x7fffffff,0x7fffffff,0x7fffffff,0x7fffffff,0x7fffffff,0x7fffffff,0x7fffffff,0x7fffffff,0x7fffffff]
patterns.append(p)
p = [0,1,2,3,4,5,6,7,8,9]
patterns.append(p)
p = [0,1,2,3,4,5,6,7,8,0xfffffff]
patterns.append(p)
p = [0,1,2,3,4,5,6,7,0x7fffffff,0x7fffffff]
patterns.append(p)
p = [0,1,2,3,4,5,6,0x7fffffff,0x7fffffff,0x7fffffff]
patterns.append(p)
p = [0,1,2,3,4,5,0x7fffffff,0x7fffffff,0x7fffffff,0x7fffffff]
patterns.append(p)
p = [0,1,2,3,4,0x7fffffff,0x7fffffff,0x7fffffff,0x7fffffff,0x7fffffff]
patterns.append(p)
p = [0,1,2,3,0x7fffffff,0x7fffffff,0x7fffffff,0x7fffffff,0x7fffffff,0x7fffffff]
patterns.append(p)
p = [0,1,2,0x7fffffff,0x7fffffff,0x7fffffff,0x7fffffff,0x7fffffff,0x7fffffff,0x7fffffff]
patterns.append(p)
p = [0,1,0x7fffffff,0x7fffffff,0x7fffffff,0x7fffffff,0x7fffffff,0x7fffffff,0x7fffffff,0x7fffffff]
patterns.append(p)
p = [0,0x7fffffff,0x7fffffff,0x7fffffff,0x7fffffff,0x7fffffff,0x7fffffff,0x7fffffff,0x7fffffff,0x7fffffff]
patterns.append(p)
m = [0,0,0,0,0,0,0,0,0,0]
masks.append(m)
m = [1,1,1,1,1,1,1,1,1,1]
masks.append(m)
for p in patterns:
for v in p:
rd.data.append(v)
for p in masks:
for v in p:
rd.mask.append(v)
rd.data_rows = len(patterns)
rd.data_cols = DIM_HCU
rd.mask_rows = len(masks)
rd.mask_cols = DIM_HCU
with open(filename, "wb+") as f:
f.write(rd.SerializeToString())
| [
"gsbn_pb2.StimRawData",
"os.path.realpath"
] | [((502, 524), 'gsbn_pb2.StimRawData', 'gsbn_pb2.StimRawData', ([], {}), '()\n', (522, 524), False, 'import gsbn_pb2\n'), ((204, 230), 'os.path.realpath', 'os.path.realpath', (['__file__'], {}), '(__file__)\n', (220, 230), False, 'import os\n'), ((359, 385), 'os.path.realpath', 'os.path.realpath', (['__file__'], {}), '(__file__)\n', (375, 385), False, 'import os\n')] |
#!/usr/bin/env python3
from geopy.geocoders import Nominatim
locator = Nominatim(user_agent="getcity")
loc = locator.geocode("Munich")
print(loc.latitude, loc.longitude)
| [
"geopy.geocoders.Nominatim"
] | [((74, 105), 'geopy.geocoders.Nominatim', 'Nominatim', ([], {'user_agent': '"""getcity"""'}), "(user_agent='getcity')\n", (83, 105), False, 'from geopy.geocoders import Nominatim\n')] |
from django.contrib.auth.models import User
from django.db import models
from markdownx.models import MarkdownxField
class Category(models.Model):
"""
Represents a COCO category
"""
coco_id = models.IntegerField(unique=True, db_index=True)
name = models.CharField(max_length=50)
supercategory = models.CharField(max_length=50)
def __str__(self):
return "Category {}: {} ({})".format(self.coco_id, self.name, self.supercategory)
class Task(models.Model):
"""
Represents a Task
"""
number = models.IntegerField(unique=True, db_index=True)
name = models.CharField(max_length=50)
desc = models.TextField(blank=True, null=True)
desc_image = models.ImageField(upload_to='task_images', blank=True, default=None, null=True)
def __str__(self):
return "Task {}: {}".format(self.number, self.name)
class Image(models.Model):
"""
Represents an image in the dataset
"""
coco_id = models.IntegerField(unique=True, db_index=True)
path = models.CharField(max_length=200)
set_name = models.CharField(max_length=10)
width = models.IntegerField()
height = models.IntegerField()
related_tasks = models.ManyToManyField(Task)
def __str__(self):
return "Image {}".format(self.coco_id)
class Annot(models.Model):
"""
Represents a COCO annotation for instances.
"""
coco_id = models.IntegerField(unique=True, db_index=True)
image = models.ForeignKey(Image, on_delete=models.CASCADE)
category = models.ForeignKey(Category, on_delete=models.CASCADE)
area = models.FloatField()
iscrowd = models.BooleanField()
bbox_x = models.FloatField()
bbox_y = models.FloatField()
bbox_w = models.FloatField()
bbox_h = models.FloatField()
segmentation = models.TextField() # I am going to store the segmentation as a text field.
# I will convert it into json on demand.
def __str__(self):
return "Annot {} ({})".format(self.coco_id, self.category)
def get_bbox(self):
return [self.bbox_x, self.bbox_y, self.bbox_w, self.bbox_h]
def set_bbox(self, bbox):
bbox = tuple(bbox)
self.bbox_x, self.bbox_y, self.bbox_w, self.bbox_h = bbox
class Job(models.Model):
"""
Represents a job (an annotation of the preferred objects) for an image by a user.
"""
task = models.ForeignKey(Task, on_delete=models.CASCADE, db_index=True)
image = models.ForeignKey(Image, on_delete=models.CASCADE, db_index=True)
user = models.ForeignKey(User, on_delete=models.CASCADE, db_index=True)
is_example = models.BooleanField(default=False, db_index=True)
is_done = models.BooleanField(default=False, db_index=True)
date_created = models.DateTimeField(auto_now_add=True)
def __str__(self):
return "Job[task={}, image={}, user={}]".format(self.task.name, self.image_id, self.user.first_name)
class PreferredAnnot(models.Model):
job = models.ForeignKey(Job, on_delete=models.CASCADE, db_index=True)
annot = models.ForeignKey(Annot, on_delete=models.CASCADE, db_index=True)
class AnnotationPolicy(models.Model):
policy = MarkdownxField()
| [
"django.db.models.FloatField",
"django.db.models.TextField",
"django.db.models.IntegerField",
"django.db.models.ForeignKey",
"django.db.models.ManyToManyField",
"django.db.models.DateTimeField",
"django.db.models.BooleanField",
"markdownx.models.MarkdownxField",
"django.db.models.ImageField",
"dja... | [((210, 257), 'django.db.models.IntegerField', 'models.IntegerField', ([], {'unique': '(True)', 'db_index': '(True)'}), '(unique=True, db_index=True)\n', (229, 257), False, 'from django.db import models\n'), ((269, 300), 'django.db.models.CharField', 'models.CharField', ([], {'max_length': '(50)'}), '(max_length=50)\n', (285, 300), False, 'from django.db import models\n'), ((321, 352), 'django.db.models.CharField', 'models.CharField', ([], {'max_length': '(50)'}), '(max_length=50)\n', (337, 352), False, 'from django.db import models\n'), ((546, 593), 'django.db.models.IntegerField', 'models.IntegerField', ([], {'unique': '(True)', 'db_index': '(True)'}), '(unique=True, db_index=True)\n', (565, 593), False, 'from django.db import models\n'), ((605, 636), 'django.db.models.CharField', 'models.CharField', ([], {'max_length': '(50)'}), '(max_length=50)\n', (621, 636), False, 'from django.db import models\n'), ((648, 687), 'django.db.models.TextField', 'models.TextField', ([], {'blank': '(True)', 'null': '(True)'}), '(blank=True, null=True)\n', (664, 687), False, 'from django.db import models\n'), ((705, 784), 'django.db.models.ImageField', 'models.ImageField', ([], {'upload_to': '"""task_images"""', 'blank': '(True)', 'default': 'None', 'null': '(True)'}), "(upload_to='task_images', blank=True, default=None, null=True)\n", (722, 784), False, 'from django.db import models\n'), ((967, 1014), 'django.db.models.IntegerField', 'models.IntegerField', ([], {'unique': '(True)', 'db_index': '(True)'}), '(unique=True, db_index=True)\n', (986, 1014), False, 'from django.db import models\n'), ((1026, 1058), 'django.db.models.CharField', 'models.CharField', ([], {'max_length': '(200)'}), '(max_length=200)\n', (1042, 1058), False, 'from django.db import models\n'), ((1074, 1105), 'django.db.models.CharField', 'models.CharField', ([], {'max_length': '(10)'}), '(max_length=10)\n', (1090, 1105), False, 'from django.db import models\n'), ((1118, 1139), 'django.db.models.IntegerField', 'models.IntegerField', ([], {}), '()\n', (1137, 1139), False, 'from django.db import models\n'), ((1153, 1174), 'django.db.models.IntegerField', 'models.IntegerField', ([], {}), '()\n', (1172, 1174), False, 'from django.db import models\n'), ((1195, 1223), 'django.db.models.ManyToManyField', 'models.ManyToManyField', (['Task'], {}), '(Task)\n', (1217, 1223), False, 'from django.db import models\n'), ((1402, 1449), 'django.db.models.IntegerField', 'models.IntegerField', ([], {'unique': '(True)', 'db_index': '(True)'}), '(unique=True, db_index=True)\n', (1421, 1449), False, 'from django.db import models\n'), ((1462, 1512), 'django.db.models.ForeignKey', 'models.ForeignKey', (['Image'], {'on_delete': 'models.CASCADE'}), '(Image, on_delete=models.CASCADE)\n', (1479, 1512), False, 'from django.db import models\n'), ((1528, 1581), 'django.db.models.ForeignKey', 'models.ForeignKey', (['Category'], {'on_delete': 'models.CASCADE'}), '(Category, on_delete=models.CASCADE)\n', (1545, 1581), False, 'from django.db import models\n'), ((1593, 1612), 'django.db.models.FloatField', 'models.FloatField', ([], {}), '()\n', (1610, 1612), False, 'from django.db import models\n'), ((1627, 1648), 'django.db.models.BooleanField', 'models.BooleanField', ([], {}), '()\n', (1646, 1648), False, 'from django.db import models\n'), ((1662, 1681), 'django.db.models.FloatField', 'models.FloatField', ([], {}), '()\n', (1679, 1681), False, 'from django.db import models\n'), ((1695, 1714), 'django.db.models.FloatField', 'models.FloatField', ([], {}), '()\n', (1712, 1714), False, 'from django.db import models\n'), ((1728, 1747), 'django.db.models.FloatField', 'models.FloatField', ([], {}), '()\n', (1745, 1747), False, 'from django.db import models\n'), ((1761, 1780), 'django.db.models.FloatField', 'models.FloatField', ([], {}), '()\n', (1778, 1780), False, 'from django.db import models\n'), ((1800, 1818), 'django.db.models.TextField', 'models.TextField', ([], {}), '()\n', (1816, 1818), False, 'from django.db import models\n'), ((2370, 2434), 'django.db.models.ForeignKey', 'models.ForeignKey', (['Task'], {'on_delete': 'models.CASCADE', 'db_index': '(True)'}), '(Task, on_delete=models.CASCADE, db_index=True)\n', (2387, 2434), False, 'from django.db import models\n'), ((2447, 2512), 'django.db.models.ForeignKey', 'models.ForeignKey', (['Image'], {'on_delete': 'models.CASCADE', 'db_index': '(True)'}), '(Image, on_delete=models.CASCADE, db_index=True)\n', (2464, 2512), False, 'from django.db import models\n'), ((2524, 2588), 'django.db.models.ForeignKey', 'models.ForeignKey', (['User'], {'on_delete': 'models.CASCADE', 'db_index': '(True)'}), '(User, on_delete=models.CASCADE, db_index=True)\n', (2541, 2588), False, 'from django.db import models\n'), ((2606, 2655), 'django.db.models.BooleanField', 'models.BooleanField', ([], {'default': '(False)', 'db_index': '(True)'}), '(default=False, db_index=True)\n', (2625, 2655), False, 'from django.db import models\n'), ((2670, 2719), 'django.db.models.BooleanField', 'models.BooleanField', ([], {'default': '(False)', 'db_index': '(True)'}), '(default=False, db_index=True)\n', (2689, 2719), False, 'from django.db import models\n'), ((2739, 2778), 'django.db.models.DateTimeField', 'models.DateTimeField', ([], {'auto_now_add': '(True)'}), '(auto_now_add=True)\n', (2759, 2778), False, 'from django.db import models\n'), ((2960, 3023), 'django.db.models.ForeignKey', 'models.ForeignKey', (['Job'], {'on_delete': 'models.CASCADE', 'db_index': '(True)'}), '(Job, on_delete=models.CASCADE, db_index=True)\n', (2977, 3023), False, 'from django.db import models\n'), ((3036, 3101), 'django.db.models.ForeignKey', 'models.ForeignKey', (['Annot'], {'on_delete': 'models.CASCADE', 'db_index': '(True)'}), '(Annot, on_delete=models.CASCADE, db_index=True)\n', (3053, 3101), False, 'from django.db import models\n'), ((3155, 3171), 'markdownx.models.MarkdownxField', 'MarkdownxField', ([], {}), '()\n', (3169, 3171), False, 'from markdownx.models import MarkdownxField\n')] |
import json
from newsservice.models import News
from flask import (Blueprint, request)
bp = Blueprint('request', __name__)
@bp.route('/requestnews', methods=['GET', 'POST'])
def requestdb():
"""
This Method receives filter values as a JSON and uses these to make queries at the database.
It creates a List with all entries of the database which match the filters.
Then it converts the list to a JSON document.
:return: JSON document containing all database entries which matches the filter values.
"""
data = []
articles = News.query.all()
if request.json['id'] != "":
articles = [article for article in articles if str(article.id) == request.json['id']]
if request.json['tag'] != "":
articles = [article for article in articles if article.tag == request.json['tag']]
if request.json['author'] != "":
articles = [article for article in articles if request.json['author'] in article.author]
if request.json['title'] != "":
articles = [article for article in articles if request.json['title'] in article.title]
if request.json['text'] != "":
articles = [article for article in articles if request.json['text'] in article.text]
if request.json['facilityid'] != "":
articles = [article for article in articles if request.json['facilityid'] in article.facilityid]
if request.json['older'] != "":
articles = [article for article in articles if article.time <= request.json['older']]
if request.json['newer'] != "":
articles = [article for article in articles if article.time >= request.json['newer']]
for article in articles:
data.insert(0, {'id': article.id, 'title': article.title, 'author': article.author, 'time': article.time, 'tag': article.tag,
'text': article.text, 'facilityid': article.facilityid})
return json.dumps(data)
| [
"json.dumps",
"flask.Blueprint",
"newsservice.models.News.query.all"
] | [((94, 124), 'flask.Blueprint', 'Blueprint', (['"""request"""', '__name__'], {}), "('request', __name__)\n", (103, 124), False, 'from flask import Blueprint, request\n'), ((559, 575), 'newsservice.models.News.query.all', 'News.query.all', ([], {}), '()\n', (573, 575), False, 'from newsservice.models import News\n'), ((1892, 1908), 'json.dumps', 'json.dumps', (['data'], {}), '(data)\n', (1902, 1908), False, 'import json\n')] |
import re
import unittest
from rexlex import Lexer
from rexlex.lexer.itemclass import get_itemclass
class TestableLexer(Lexer):
"""Test tuple state transitions including #pop."""
LOGLEVEL = None
re_skip = re.compile('\s+')
tokendefs = {
'root': [
('Root', 'a', 'bar'),
('Root', 'e'),
],
'foo': [
('Foo', 'd'),
],
'bar': [
('Bar', 'b', 'bar'),
('Bar', 'c', 'foo'),
],
}
class TupleTransTest(unittest.TestCase):
text = 'abcde'
Item = get_itemclass(text)
expected = [
Item(start=0, end=1, token='Root'),
Item(start=1, end=2, token='Bar'),
Item(start=2, end=3, token='Bar'),
Item(start=3, end=4, token='Foo'),
Item(start=4, end=5, token='Root')]
def test(self):
toks = list(TestableLexer(self.text))
self.assertEqual(toks, self.expected)
| [
"rexlex.lexer.itemclass.get_itemclass",
"re.compile"
] | [((222, 240), 're.compile', 're.compile', (['"""\\\\s+"""'], {}), "('\\\\s+')\n", (232, 240), False, 'import re\n'), ((575, 594), 'rexlex.lexer.itemclass.get_itemclass', 'get_itemclass', (['text'], {}), '(text)\n', (588, 594), False, 'from rexlex.lexer.itemclass import get_itemclass\n')] |
# -*- coding: utf-8 -*-
"""
Created on Tue May 30 16:43:10 2017
☜☜☜☜☜☜★☆★☆★☆★☆ provided code ★☆★☆★☆★☆☞☞☞☞☞☞
@author: Minsooyeo
"""
import os
import matplotlib.image as mpimg
import matplotlib.pyplot as plt
from PIL import Image as im
import numpy as np
import utills as ut
import tensorflow as tf
sess = tf.InteractiveSession()
train_epoch = 5000
#
FLAG_FINGER = 0
FLAG_FACE = 1
FLAG_ANGLE = 2
flag = FLAG_ANGLE
#
if flag is FLAG_FINGER:
class_num = 5
additional_path = '\\finger\\'
elif flag is FLAG_FACE:
class_num = 6
additional_path = '\\face\\'
elif flag is FLAG_ANGLE:
class_num = 4
additional_path = '\\angle\\'
else:
raise Exception("Unknown flag %d" %flag)
# define parameter
data_length = []
dir_image = []
data = []
label = []
data_shape = [298, 298]
current_pwd = os.getcwd()
for i in range(class_num):
dir_image.append(ut.search(current_pwd + additional_path + str(i + 1)))
data_length.append(len(dir_image[i]))
data.append(np.zeros([data_length[i], data_shape[1], data_shape[0]]))
label.append(np.zeros([data_length[i], class_num]))
label[i][:, i] = 1
# load data
for q in range(class_num):
for i in range(data_length[q]):
if i % 100 == 0:
print("%dth data is opening" %i)
data[q][i, :, :] = np.mean(im.open(current_pwd + additional_path + str(q + 1) + '\\' + dir_image[q][i]), -1)
if flag is FLAG_FINGER:
rawdata = np.concatenate((data[0], data[1], data[2], data[3], data[4]), axis=0)
raw_label = np.concatenate((label[0], label[1], label[2], label[3], label[4]), axis=0)
elif flag is FLAG_FACE:
rawdata = np.concatenate((data[0], data[1], data[2], data[3], data[4], data[5]), axis=0)
raw_label = np.concatenate((label[0], label[1], label[2], label[3], label[4], label[5]), axis=0)
elif flag is FLAG_ANGLE:
rawdata = np.concatenate((data[0], data[1], data[2], data[3]), axis=0)
raw_label = np.concatenate((label[0], label[1], label[2], label[3]), axis=0)
else:
raise Exception("Unknown class number %d" %class_num)
del data
del label
total_data_poin = rawdata.shape[0]
permutation = np.random.permutation(total_data_poin)
rawdata = rawdata[permutation, :, :]
raw_label = raw_label[permutation, :]
rawdata = np.reshape(rawdata, [rawdata.shape[0], data_shape[0] * data_shape[1]])
########################################################################################################
#
img_width = data_shape[0]
img_height = data_shape[1]
if flag is FLAG_FINGER:
train_count = 5000 # 손가락 인식을 테스트하려는 경우 이 부분을 수정하십시오. (2000 또는 5000으로 테스트함)
test_count = 490
elif flag is FLAG_FACE:
train_count = 2000 # train data 수가 5000개가 안 돼서 또는 overfitting에 의해 NaN 문제가 발생합니다. 값을 바꾸지 마십시오!
test_count = 490
elif flag is FLAG_ANGLE:
train_count = 6000 # train data 수가 5000개가 안 돼서 또는 overfitting에 의해 NaN 문제가 발생합니다. 값을 바꾸지 마십시오!
test_count = 1000
else:
raise Exception("unknown flag %d" %flag)
#
train_epoch = train_count
#
TrainX = rawdata[:train_count] # mnist.train.images
TrainY = raw_label[:train_count] # mnist.train.labels
testX = rawdata[train_count:train_count+test_count] # mnist.test.images
testY = raw_label[train_count:train_count+test_count] # mnist.test.labels
# 손가락 구분을 테스트하기 위해 층의 수를 바꾸는 경우 else 부분을 수정하십시오.
if flag is FLAG_FINGER: # 손가락 구분의 경우 층에 따라 경우를 테스트하려면 이 부분을 수정하십시오.
CNNModel, x = ut._CNNModel(img_width=img_width, img_height=img_height,
kernel_info=[
[3, 2, 32, True],
[3, 2, 64, True],
[3, 2, 128, True],
[3, 2, 64, True],
[3, 2, 128, True],
# [3, 2, 128, True],
])
elif flag is FLAG_FACE: # 얼굴 인식의 경우 2개의 층만으로도 구분이 완전히 잘 됩니다. 층의 수를 수정하지 마십시오.
CNNModel, x = ut._CNNModel(img_width=img_width, img_height=img_height,
kernel_info=[
[3, 2, 32, True],
[3, 2, 64, True],
# [3, 2, 128, True],
# [3, 2, 64, True],
# [3, 2, 128, True],
# [3, 2, 128, True],
])
elif flag is FLAG_ANGLE: #
CNNModel, x = ut._CNNModel(img_width=img_width, img_height=img_height,
kernel_info=[
[1, 1, 32, True],
# [1, 1, 64, True],
# [1, 1, 128, True],
# [1, 1, 64, True],
# [1, 1, 128, True],
# [3, 2, 128, True],
])
else:
raise Exception("Unknown flag %d" %flag)
FlatModel = ut._FlatModel(CNNModel, fc_outlayer_count=128)
DropOut, keep_prob = ut._DropOut(FlatModel)
SoftMaxModel = ut._SoftMax(DropOut, label_count=class_num, fc_outlayer_count=128)
TrainStep, Accuracy, y_, correct_prediction = ut._SetAccuracy(SoftMaxModel, label_count=class_num)
sess.run(tf.global_variables_initializer())
for i in range(train_epoch):
tmp_trainX, tmp_trainY = ut.Nextbatch(TrainX, TrainY, 50)
if i%100 == 0:
train_accuracy = Accuracy.eval(feed_dict={x: tmp_trainX, y_: tmp_trainY, keep_prob: 1.0})
print("step %d, training accuracy %g"%(i, train_accuracy))
TrainStep.run(feed_dict={x: tmp_trainX, y_: tmp_trainY, keep_prob: 0.7})
print("test accuracy %g" %Accuracy.eval(feed_dict={x: testX[1:1000, :], y_: testY[1:1000], keep_prob: 1.0})) | [
"tensorflow.InteractiveSession",
"numpy.reshape",
"utills._FlatModel",
"utills._DropOut",
"utills._SoftMax",
"os.getcwd",
"tensorflow.global_variables_initializer",
"utills._CNNModel",
"numpy.zeros",
"numpy.concatenate",
"utills.Nextbatch",
"utills._SetAccuracy",
"numpy.random.permutation"
] | [((305, 328), 'tensorflow.InteractiveSession', 'tf.InteractiveSession', ([], {}), '()\n', (326, 328), True, 'import tensorflow as tf\n'), ((809, 820), 'os.getcwd', 'os.getcwd', ([], {}), '()\n', (818, 820), False, 'import os\n'), ((2117, 2155), 'numpy.random.permutation', 'np.random.permutation', (['total_data_poin'], {}), '(total_data_poin)\n', (2138, 2155), True, 'import numpy as np\n'), ((2242, 2312), 'numpy.reshape', 'np.reshape', (['rawdata', '[rawdata.shape[0], data_shape[0] * data_shape[1]]'], {}), '(rawdata, [rawdata.shape[0], data_shape[0] * data_shape[1]])\n', (2252, 2312), True, 'import numpy as np\n'), ((4957, 5003), 'utills._FlatModel', 'ut._FlatModel', (['CNNModel'], {'fc_outlayer_count': '(128)'}), '(CNNModel, fc_outlayer_count=128)\n', (4970, 5003), True, 'import utills as ut\n'), ((5025, 5047), 'utills._DropOut', 'ut._DropOut', (['FlatModel'], {}), '(FlatModel)\n', (5036, 5047), True, 'import utills as ut\n'), ((5063, 5129), 'utills._SoftMax', 'ut._SoftMax', (['DropOut'], {'label_count': 'class_num', 'fc_outlayer_count': '(128)'}), '(DropOut, label_count=class_num, fc_outlayer_count=128)\n', (5074, 5129), True, 'import utills as ut\n'), ((5176, 5228), 'utills._SetAccuracy', 'ut._SetAccuracy', (['SoftMaxModel'], {'label_count': 'class_num'}), '(SoftMaxModel, label_count=class_num)\n', (5191, 5228), True, 'import utills as ut\n'), ((1423, 1492), 'numpy.concatenate', 'np.concatenate', (['(data[0], data[1], data[2], data[3], data[4])'], {'axis': '(0)'}), '((data[0], data[1], data[2], data[3], data[4]), axis=0)\n', (1437, 1492), True, 'import numpy as np\n'), ((1509, 1583), 'numpy.concatenate', 'np.concatenate', (['(label[0], label[1], label[2], label[3], label[4])'], {'axis': '(0)'}), '((label[0], label[1], label[2], label[3], label[4]), axis=0)\n', (1523, 1583), True, 'import numpy as np\n'), ((3361, 3531), 'utills._CNNModel', 'ut._CNNModel', ([], {'img_width': 'img_width', 'img_height': 'img_height', 'kernel_info': '[[3, 2, 32, True], [3, 2, 64, True], [3, 2, 128, True], [3, 2, 64, True], [\n 3, 2, 128, True]]'}), '(img_width=img_width, img_height=img_height, kernel_info=[[3, 2,\n 32, True], [3, 2, 64, True], [3, 2, 128, True], [3, 2, 64, True], [3, 2,\n 128, True]])\n', (3373, 3531), True, 'import utills as ut\n'), ((5239, 5272), 'tensorflow.global_variables_initializer', 'tf.global_variables_initializer', ([], {}), '()\n', (5270, 5272), True, 'import tensorflow as tf\n'), ((5333, 5365), 'utills.Nextbatch', 'ut.Nextbatch', (['TrainX', 'TrainY', '(50)'], {}), '(TrainX, TrainY, 50)\n', (5345, 5365), True, 'import utills as ut\n'), ((982, 1038), 'numpy.zeros', 'np.zeros', (['[data_length[i], data_shape[1], data_shape[0]]'], {}), '([data_length[i], data_shape[1], data_shape[0]])\n', (990, 1038), True, 'import numpy as np\n'), ((1057, 1094), 'numpy.zeros', 'np.zeros', (['[data_length[i], class_num]'], {}), '([data_length[i], class_num])\n', (1065, 1094), True, 'import numpy as np\n'), ((1622, 1700), 'numpy.concatenate', 'np.concatenate', (['(data[0], data[1], data[2], data[3], data[4], data[5])'], {'axis': '(0)'}), '((data[0], data[1], data[2], data[3], data[4], data[5]), axis=0)\n', (1636, 1700), True, 'import numpy as np\n'), ((1717, 1805), 'numpy.concatenate', 'np.concatenate', (['(label[0], label[1], label[2], label[3], label[4], label[5])'], {'axis': '(0)'}), '((label[0], label[1], label[2], label[3], label[4], label[5]),\n axis=0)\n', (1731, 1805), True, 'import numpy as np\n'), ((3916, 4026), 'utills._CNNModel', 'ut._CNNModel', ([], {'img_width': 'img_width', 'img_height': 'img_height', 'kernel_info': '[[3, 2, 32, True], [3, 2, 64, True]]'}), '(img_width=img_width, img_height=img_height, kernel_info=[[3, 2,\n 32, True], [3, 2, 64, True]])\n', (3928, 4026), True, 'import utills as ut\n'), ((1841, 1901), 'numpy.concatenate', 'np.concatenate', (['(data[0], data[1], data[2], data[3])'], {'axis': '(0)'}), '((data[0], data[1], data[2], data[3]), axis=0)\n', (1855, 1901), True, 'import numpy as np\n'), ((1918, 1982), 'numpy.concatenate', 'np.concatenate', (['(label[0], label[1], label[2], label[3])'], {'axis': '(0)'}), '((label[0], label[1], label[2], label[3]), axis=0)\n', (1932, 1982), True, 'import numpy as np\n'), ((4426, 4518), 'utills._CNNModel', 'ut._CNNModel', ([], {'img_width': 'img_width', 'img_height': 'img_height', 'kernel_info': '[[1, 1, 32, True]]'}), '(img_width=img_width, img_height=img_height, kernel_info=[[1, 1,\n 32, True]])\n', (4438, 4518), True, 'import utills as ut\n')] |
import numpy as np
from common import projection_back
EPS = 1e-9
def ilrma(mix, n_iter, n_basis=2, proj_back=True):
"""Implementation of ILRMA (Independent Low-Rank Matrix Analysis).
This algorithm is called ILRMA1 in http://d-kitamura.net/pdf/misc/AlgorithmsForIndependentLowRankMatrixAnalysis.pdf
It only works in determined case (n_sources == n_channels).
Args:
mix (numpy.ndarray): (n_frequencies, n_channels, n_frames)
STFT representation of the observed signal.
n_iter (int): Number of iterations.
n_basis (int): Number of basis in the NMF model.
proj_back (bool): If use back-projection technique.
Returns:
tuple[numpy.ndarray, numpy.ndarray]: Tuple of separated signal and
separation matrix. The shapes of separated signal and separation
matrix are (n_frequencies, n_sources, n_frames) and
(n_sources, n_channels), respectively.
"""
n_freq, n_src, n_frame = mix.shape
sep_mat = np.stack([np.eye(n_src, dtype=mix.dtype) for _ in range(n_freq)])
basis = np.abs(np.random.randn(n_src, n_freq, n_basis))
act = np.abs(np.random.randn(n_src, n_basis, n_frame))
sep = sep_mat @ mix
sep_pow = np.power(np.abs(sep), 2) # (n_freq, n_src, n_frame)
model = basis @ act # (n_src, n_freq, n_frame)
m_reci = 1 / model
eye = np.tile(np.eye(n_src), (n_freq, 1, 1))
for _ in range(n_iter):
for src in range(n_src):
h = (sep_pow[:, src, :] * m_reci[src]**2) @ act[src].T
h /= m_reci[src] @ act[src].T
h = np.sqrt(h, out=h)
basis[src] *= h
np.clip(basis[src], a_min=EPS, a_max=None, out=basis[src])
model[src] = basis[src] @ act[src]
m_reci[src] = 1 / model[src]
h = basis[src].T @ (sep_pow[:, src, :] * m_reci[src]**2)
h /= basis[src].T @ m_reci[src]
h = np.sqrt(h, out=h)
act[src] *= h
np.clip(act[src], a_min=EPS, a_max=None, out=act[src])
model[src] = basis[src] @ act[src]
m_reci[src] = 1 / model[src]
h = m_reci[src, :, :, None] @ np.ones((1, n_src))
h = mix.conj() @ (mix.swapaxes(1, 2) * h)
u_mat = h.swapaxes(1, 2) / n_frame
h = sep_mat @ u_mat + EPS * eye
sep_mat[:, src, :] = np.linalg.solve(h, eye[:, :, src]).conj()
h = sep_mat[:, src, None, :] @ u_mat
h = (h @ sep_mat[:, src, :, None].conj()).squeeze(2)
sep_mat[:, src, :] = (sep_mat[:, src, :] / np.sqrt(h).conj())
np.matmul(sep_mat, mix, out=sep)
np.power(np.abs(sep), 2, out=sep_pow)
np.clip(sep_pow, a_min=EPS, a_max=None, out=sep_pow)
for src in range(n_src):
lbd = np.sqrt(np.sum(sep_pow[:, src, :]) / n_freq / n_frame)
sep_mat[:, src, :] /= lbd
sep_pow[:, src, :] /= lbd ** 2
model[src] /= lbd ** 2
basis[src] /= lbd ** 2
# Back-projection technique
if proj_back:
z = projection_back(sep, mix[:, 0, :])
sep *= np.conj(z[:, :, None])
return sep, sep_mat
| [
"numpy.clip",
"numpy.abs",
"numpy.eye",
"numpy.linalg.solve",
"numpy.sqrt",
"numpy.ones",
"numpy.conj",
"numpy.sum",
"numpy.matmul",
"common.projection_back",
"numpy.random.randn"
] | [((1098, 1137), 'numpy.random.randn', 'np.random.randn', (['n_src', 'n_freq', 'n_basis'], {}), '(n_src, n_freq, n_basis)\n', (1113, 1137), True, 'import numpy as np\n'), ((1156, 1196), 'numpy.random.randn', 'np.random.randn', (['n_src', 'n_basis', 'n_frame'], {}), '(n_src, n_basis, n_frame)\n', (1171, 1196), True, 'import numpy as np\n'), ((1245, 1256), 'numpy.abs', 'np.abs', (['sep'], {}), '(sep)\n', (1251, 1256), True, 'import numpy as np\n'), ((1383, 1396), 'numpy.eye', 'np.eye', (['n_src'], {}), '(n_src)\n', (1389, 1396), True, 'import numpy as np\n'), ((2617, 2649), 'numpy.matmul', 'np.matmul', (['sep_mat', 'mix'], {'out': 'sep'}), '(sep_mat, mix, out=sep)\n', (2626, 2649), True, 'import numpy as np\n'), ((2704, 2756), 'numpy.clip', 'np.clip', (['sep_pow'], {'a_min': 'EPS', 'a_max': 'None', 'out': 'sep_pow'}), '(sep_pow, a_min=EPS, a_max=None, out=sep_pow)\n', (2711, 2756), True, 'import numpy as np\n'), ((3078, 3112), 'common.projection_back', 'projection_back', (['sep', 'mix[:, 0, :]'], {}), '(sep, mix[:, 0, :])\n', (3093, 3112), False, 'from common import projection_back\n'), ((3128, 3150), 'numpy.conj', 'np.conj', (['z[:, :, None]'], {}), '(z[:, :, None])\n', (3135, 3150), True, 'import numpy as np\n'), ((1023, 1053), 'numpy.eye', 'np.eye', (['n_src'], {'dtype': 'mix.dtype'}), '(n_src, dtype=mix.dtype)\n', (1029, 1053), True, 'import numpy as np\n'), ((1601, 1618), 'numpy.sqrt', 'np.sqrt', (['h'], {'out': 'h'}), '(h, out=h)\n', (1608, 1618), True, 'import numpy as np\n'), ((1659, 1717), 'numpy.clip', 'np.clip', (['basis[src]'], {'a_min': 'EPS', 'a_max': 'None', 'out': 'basis[src]'}), '(basis[src], a_min=EPS, a_max=None, out=basis[src])\n', (1666, 1717), True, 'import numpy as np\n'), ((1937, 1954), 'numpy.sqrt', 'np.sqrt', (['h'], {'out': 'h'}), '(h, out=h)\n', (1944, 1954), True, 'import numpy as np\n'), ((1993, 2047), 'numpy.clip', 'np.clip', (['act[src]'], {'a_min': 'EPS', 'a_max': 'None', 'out': 'act[src]'}), '(act[src], a_min=EPS, a_max=None, out=act[src])\n', (2000, 2047), True, 'import numpy as np\n'), ((2667, 2678), 'numpy.abs', 'np.abs', (['sep'], {}), '(sep)\n', (2673, 2678), True, 'import numpy as np\n'), ((2180, 2199), 'numpy.ones', 'np.ones', (['(1, n_src)'], {}), '((1, n_src))\n', (2187, 2199), True, 'import numpy as np\n'), ((2378, 2412), 'numpy.linalg.solve', 'np.linalg.solve', (['h', 'eye[:, :, src]'], {}), '(h, eye[:, :, src])\n', (2393, 2412), True, 'import numpy as np\n'), ((2589, 2599), 'numpy.sqrt', 'np.sqrt', (['h'], {}), '(h)\n', (2596, 2599), True, 'import numpy as np\n'), ((2817, 2843), 'numpy.sum', 'np.sum', (['sep_pow[:, src, :]'], {}), '(sep_pow[:, src, :])\n', (2823, 2843), True, 'import numpy as np\n')] |
import random
import math
from functools import reduce
import torch
import torch.nn as nn
def random_z_v(z_dim, z_num):
# ret = np.random.normal(0.01, 1.0, z_dim * z_num)
return torch.distributions.normal.Normal(torch.zeros([z_num, z_dim]), 0.1).sample()
class HyperNN(nn.Module):
def __init__(self, obs_space, action_space, pnn, tiling=64, shrink=1):
super().__init__()
self._tiling = tiling
self.z_dim = int(32 * shrink)
self.z_v_evolve_prob = 0.5
self.pnn = pnn(obs_space, action_space)
self.pnn_modules = list(dict(self.pnn.named_children()).keys())
self.out_features = self._get_out_features()
self.z_num, self.z_indexer = self._get_z_num()
in_size = int(128 * shrink)
self.hnn = nn.Sequential(
nn.Linear(self.z_dim, in_size),
nn.ReLU(),
nn.Linear(in_size, in_size),
nn.ReLU(),
nn.Linear(in_size, self.out_features),
)
self.register_buffer('z_v', random_z_v(self.z_dim, self.z_num))
self.add_tensors = {}
self._init_nn()
def forward(self, layer_index=None):
if layer_index is None:
return [self.hnn(x) for x in self.z_v]
else:
if isinstance(layer_index, int):
module_name = self.pnn_modules[layer_index]
else:
module_name = layer_index
z_shard = self.z_indexer[module_name]
return [self.hnn(x) for x in self.z_v[z_shard]]
def evolve(self, sigma):
coin_toss = random.random()
if coin_toss > self.z_v_evolve_prob:
# evolve z vector
module_idx = math.floor(random.random() * len(self.pnn_modules))
module_name = self.pnn_modules[module_idx]
for name in self.z_indexer:
if module_name in name:
z_shard = self.z_indexer[name]
self.z_v[z_shard] += torch.distributions.normal.Normal(
torch.zeros([z_shard.stop - z_shard.start, self.z_dim]),
sigma
).sample()
self._update_pnn()
else:
# evolve weights
params = self.named_parameters()
for name, tensor in sorted(params):
if 'z_v' not in name:
to_add = self.add_tensors[tensor.size()]
to_add.normal_(0.0, sigma)
tensor.data.add_(to_add)
self._update_pnn()
def evaluate(self, env, max_eval, render=False, fps=60):
return self.pnn.evaluate(env, max_eval, render, fps)
def _init_nn(self):
for name, tensor in self.named_parameters():
if tensor.size() not in self.add_tensors:
self.add_tensors[tensor.size()] = torch.Tensor(tensor.size())
if 'weight' in name:
nn.init.kaiming_normal_(tensor)
elif 'z_v' not in name:
tensor.data.zero_()
self._update_pnn()
# tiling not supported (but it should be a bit faster, performance gain unclear)
def _update_pnn(self):
weights = self()
if self._tiling:
for name, param in self.pnn.named_parameters():
z_shard = self.z_indexer[name]
param.data = self._shape_w(weights[z_shard], param.shape).data
else:
i = 0
for name, param in self.pnn.named_parameters():
param.data = self._shape_w(weights[i], param.shape).data
i += 1
def _shape_w(self, w, layer_shape):
if isinstance(w, list):
w = torch.cat(w)
w = torch.Tensor(w)
w = torch.narrow(w, 0, 0, reduce((lambda x, y: x * y), layer_shape))
w = w.view(layer_shape)
return w
def _get_z_num(self):
z_num = 0
z_indexer = {}
# tiling
for name, param in self.pnn.named_parameters():
if self._tiling is not None:
layer_shape = param.shape
layer_size = reduce((lambda x, y: x * y), layer_shape, 1)
z_shard = math.ceil(layer_size / self.out_features)
z_indexer[name] = slice(z_num, z_num + z_shard, 1)
z_num += z_shard
else:
z_num += 1
return z_num, z_indexer
def _get_out_features(self):
if self._tiling is not None:
return self._tiling
ret = 0
for name, param in self.pnn.named_parameters():
if 'weight' in name:
layer_shape = param.shape
layer_size = reduce((lambda x, y: x * y), layer_shape)
if layer_size > ret:
ret = layer_size
return ret
| [
"torch.nn.ReLU",
"math.ceil",
"functools.reduce",
"torch.Tensor",
"torch.nn.init.kaiming_normal_",
"torch.nn.Linear",
"random.random",
"torch.zeros",
"torch.cat"
] | [((1585, 1600), 'random.random', 'random.random', ([], {}), '()\n', (1598, 1600), False, 'import random\n'), ((3713, 3728), 'torch.Tensor', 'torch.Tensor', (['w'], {}), '(w)\n', (3725, 3728), False, 'import torch\n'), ((813, 843), 'torch.nn.Linear', 'nn.Linear', (['self.z_dim', 'in_size'], {}), '(self.z_dim, in_size)\n', (822, 843), True, 'import torch.nn as nn\n'), ((857, 866), 'torch.nn.ReLU', 'nn.ReLU', ([], {}), '()\n', (864, 866), True, 'import torch.nn as nn\n'), ((880, 907), 'torch.nn.Linear', 'nn.Linear', (['in_size', 'in_size'], {}), '(in_size, in_size)\n', (889, 907), True, 'import torch.nn as nn\n'), ((921, 930), 'torch.nn.ReLU', 'nn.ReLU', ([], {}), '()\n', (928, 930), True, 'import torch.nn as nn\n'), ((944, 981), 'torch.nn.Linear', 'nn.Linear', (['in_size', 'self.out_features'], {}), '(in_size, self.out_features)\n', (953, 981), True, 'import torch.nn as nn\n'), ((3688, 3700), 'torch.cat', 'torch.cat', (['w'], {}), '(w)\n', (3697, 3700), False, 'import torch\n'), ((3763, 3802), 'functools.reduce', 'reduce', (['(lambda x, y: x * y)', 'layer_shape'], {}), '(lambda x, y: x * y, layer_shape)\n', (3769, 3802), False, 'from functools import reduce\n'), ((222, 249), 'torch.zeros', 'torch.zeros', (['[z_num, z_dim]'], {}), '([z_num, z_dim])\n', (233, 249), False, 'import torch\n'), ((2929, 2960), 'torch.nn.init.kaiming_normal_', 'nn.init.kaiming_normal_', (['tensor'], {}), '(tensor)\n', (2952, 2960), True, 'import torch.nn as nn\n'), ((4110, 4152), 'functools.reduce', 'reduce', (['(lambda x, y: x * y)', 'layer_shape', '(1)'], {}), '(lambda x, y: x * y, layer_shape, 1)\n', (4116, 4152), False, 'from functools import reduce\n'), ((4181, 4222), 'math.ceil', 'math.ceil', (['(layer_size / self.out_features)'], {}), '(layer_size / self.out_features)\n', (4190, 4222), False, 'import math\n'), ((4683, 4722), 'functools.reduce', 'reduce', (['(lambda x, y: x * y)', 'layer_shape'], {}), '(lambda x, y: x * y, layer_shape)\n', (4689, 4722), False, 'from functools import reduce\n'), ((1712, 1727), 'random.random', 'random.random', ([], {}), '()\n', (1725, 1727), False, 'import random\n'), ((2040, 2095), 'torch.zeros', 'torch.zeros', (['[z_shard.stop - z_shard.start, self.z_dim]'], {}), '([z_shard.stop - z_shard.start, self.z_dim])\n', (2051, 2095), False, 'import torch\n')] |
import numpy as np
import pandas as pd
import matplotlib.pyplot as plt
import seaborn as sns
# [0,0] = TN
# [1,1] = TP
# [0,1] = FP
# [1,0] = FN
# cm is a confusion matrix
# Accuracy: (TP + TN) / Total
def accuracy(cm: pd.DataFrame) -> float:
return (cm[0,0] + cm[1,1]) / cm.sum()
# Precision: TP / (TP + FP)
def precision(cm: pd.DataFrame) -> float:
return cm[1,1] / (cm[1,1] + cm[0,1])
# False positive rate: FP / N = FP / (FP + TN)
def false_positive(cm: pd.DataFrame) -> float:
return cm[0,1] / (cm[0,0] + cm[0,1])
# True positive rate: TP / P = TP / (TP + FN)
# Equivalent to sensitivity/recall
def true_positive(cm: pd.DataFrame) -> float:
return cm[1,1] / (cm[1,0] + cm[1,1])
# F1 score: 2 * precision * recall / (precision + recall)
def f_score(cm: pd.DataFrame) -> float:
return 2 * precision(cm) * true_positive(cm) / (precision(cm) + true_positive(cm))
# Returns a confusion matrix for labels and predictions
# [[TN, FP],
# [FN, TP]]
def confusion_matrix(y, y_hat):
cm = np.zeros((2, 2))
np.add.at(cm, [y.astype(int), y_hat.astype(int)], 1)
return cm
def visualize_cm(cm):
df_cm = pd.DataFrame(cm, columns=['0', '1'], index=['0', '1'])
df_cm.index.name = 'Actual'
df_cm.columns.name = 'Predicted'
plt.figure(figsize=(5, 3))
sns.heatmap(df_cm, cmap='Blues', annot=True, annot_kws={'size': 16}, fmt='g')
# Function to return two shuffled arrays, is a deep copy
def shuffle(x, y):
x_copy = x.copy()
y_copy = y.copy()
rand = np.random.randint(0, 10000)
np.random.seed(rand)
np.random.shuffle(x_copy)
np.random.seed(rand)
np.random.shuffle(y_copy)
return x_copy, y_copy
# Shuffles and splits data into two sets
# test split will be 1/size of the data
def split(x, y, size):
x1, y1, = shuffle(x, y)
x1_test = x1[0:int(x1.shape[0] / size)]
x1_train = x1[int(x1.shape[0] / size):]
y1_test = y1[0:int(y1.shape[0] / size)]
y1_train = y1[int(y1.shape[0] / size):]
return x1_train, x1_test, y1_train, y1_test
def cross_validation(k, X, Y, model, lr=0.5, regularization=0, eps=1e-2, verbose=True):
# randomize X and Y by shuffling
x, y = shuffle(X, Y)
# split into k folds
x_folds = np.array_split(x, k)
y_folds = np.array_split(y, k)
acc = 0
f1 = 0
prec = 0
rec = 0
cms = []
for i in range(k):
validation_features = x_folds[i]
validation_labels = np.squeeze(y_folds[i])
train_features = np.delete(x_folds, i, axis=0)
train_features = np.concatenate(train_features)
train_labels = np.delete(y_folds, i, axis=0)
train_labels = np.concatenate(train_labels)
m = model(train_features, train_labels)
m.fit(lr, verbose=False, regularization=regularization, eps=eps)
predicted_labels = m.predict(validation_features)
cm = confusion_matrix(validation_labels, predicted_labels)
acc += accuracy(cm)
f1 += f_score(cm)
prec += precision(cm)
rec += true_positive(cm)
cms.append(cm)
if verbose:
print("Accuracy:", acc/k, "Precision:", prec/k, "Recall:", rec/k, "F1:", f1/k)
# Return the accuracy and array of confusion matrices
return acc/k, np.array(cms)
# assume 5 fold for now
def cross_validation_naive(k, df, model, label, cont=[], cat=[], bin=[]):
df = df.copy(deep=True)
np.random.shuffle(df.values)
df = df.reset_index(drop=True)
indices = np.arange(df.shape[0])
indices = np.array_split(indices, k)
acc = 0
f1 = 0
prec = 0
rec = 0
cms = []
for i in range(k):
val = df.loc[indices[i]]
train = df.loc[np.concatenate(np.delete(indices, i, axis=0))]
m = model(train, label, cont, cat, bin)
pred = val.apply(m.predict, axis=1)
cm = confusion_matrix(val[label], pred)
acc += accuracy(cm)
f1 += f_score(cm)
prec += precision(cm)
rec += true_positive(cm)
cms.append(cm)
print("Accuracy:", acc / k, "Precision:", prec / k, "Recall:", rec / k, "F1:", f1 / k)
# Return the accuracy and array of confusion matrices
return acc / k, np.array(cms)
def cv_task_2(k, X, Y, model, lr = 0.5, regularization=0, eps = 1e-2, iterations=200):
# randomize X and Y by shuffling
x, y = shuffle(X, Y)
# split into k folds
x_folds = np.array_split(x, k)
y_folds = np.array_split(y, k)
train_acc_history = np.empty([k, iterations])
val_acc_history = np.empty([k, iterations])
for i in range(k):
val_features = x_folds[i]
val_labels = np.squeeze(y_folds[i])
train_features = np.delete(x_folds, i)
train_features = np.concatenate(train_features)
train_labels = np.delete(y_folds, i, axis=0)
train_labels = np.concatenate(train_labels)
m = model(train_features, train_labels)
costs = []
train_accuracies = []
val_accuracies = []
# Keep on training until difference reached threshold
for j in range(iterations):
# fit model for 1 iteration
cost = m.fit(lr=lr, verbose=False, regularization=regularization, eps=None, epochs=1)
costs.append(cost)
# predict the labels and eval accuracy for train and val split
val_pred_labels = m.predict(val_features)
train_pred_labels = m.predict(train_features)
cm_val = confusion_matrix(val_labels, val_pred_labels)
cm_train = confusion_matrix(train_labels, train_pred_labels)
val_accuracies.append(accuracy(cm_val))
train_accuracies.append(accuracy(cm_train))
# store the costs and accuracies
train_acc_history[i] = np.array(train_accuracies)
val_acc_history[i] = np.array(val_accuracies)
return train_acc_history, val_acc_history
def grid_search(learning_rates, epsilons, lambdas, x, y, model):
max_acc = 0
arg_max = [0,0,0]
for lr in learning_rates:
for eps in epsilons:
for regularization in lambdas:
#print(lr, eps, regularization)
acc, cm = cross_validation(5, x, y, lr=lr, eps=eps, regularization=regularization, model=model, verbose=False)
if acc > max_acc:
max_acc = acc
arg_max = [lr, eps, regularization]
max_cm = cm
f1 = []
prec = []
rec = []
for cm in max_cm:
f1.append(f_score(cm))
prec.append(precision(cm))
rec.append(true_positive(cm))
f1 = np.mean(f1)
prec = np.mean(prec)
rec = np.mean(rec)
print(arg_max)
print("Accuracy:", max_acc, "Precision:", prec, "Recall:", rec, "F1:", f1)
return max_acc, arg_max | [
"numpy.mean",
"numpy.delete",
"seaborn.heatmap",
"numpy.squeeze",
"numpy.array_split",
"numpy.array",
"numpy.random.randint",
"numpy.zeros",
"matplotlib.pyplot.figure",
"numpy.random.seed",
"numpy.empty",
"numpy.concatenate",
"pandas.DataFrame",
"numpy.arange",
"numpy.random.shuffle"
] | [((1018, 1034), 'numpy.zeros', 'np.zeros', (['(2, 2)'], {}), '((2, 2))\n', (1026, 1034), True, 'import numpy as np\n'), ((1142, 1196), 'pandas.DataFrame', 'pd.DataFrame', (['cm'], {'columns': "['0', '1']", 'index': "['0', '1']"}), "(cm, columns=['0', '1'], index=['0', '1'])\n", (1154, 1196), True, 'import pandas as pd\n'), ((1270, 1296), 'matplotlib.pyplot.figure', 'plt.figure', ([], {'figsize': '(5, 3)'}), '(figsize=(5, 3))\n', (1280, 1296), True, 'import matplotlib.pyplot as plt\n'), ((1301, 1378), 'seaborn.heatmap', 'sns.heatmap', (['df_cm'], {'cmap': '"""Blues"""', 'annot': '(True)', 'annot_kws': "{'size': 16}", 'fmt': '"""g"""'}), "(df_cm, cmap='Blues', annot=True, annot_kws={'size': 16}, fmt='g')\n", (1312, 1378), True, 'import seaborn as sns\n'), ((1512, 1539), 'numpy.random.randint', 'np.random.randint', (['(0)', '(10000)'], {}), '(0, 10000)\n', (1529, 1539), True, 'import numpy as np\n'), ((1544, 1564), 'numpy.random.seed', 'np.random.seed', (['rand'], {}), '(rand)\n', (1558, 1564), True, 'import numpy as np\n'), ((1569, 1594), 'numpy.random.shuffle', 'np.random.shuffle', (['x_copy'], {}), '(x_copy)\n', (1586, 1594), True, 'import numpy as np\n'), ((1599, 1619), 'numpy.random.seed', 'np.random.seed', (['rand'], {}), '(rand)\n', (1613, 1619), True, 'import numpy as np\n'), ((1624, 1649), 'numpy.random.shuffle', 'np.random.shuffle', (['y_copy'], {}), '(y_copy)\n', (1641, 1649), True, 'import numpy as np\n'), ((2225, 2245), 'numpy.array_split', 'np.array_split', (['x', 'k'], {}), '(x, k)\n', (2239, 2245), True, 'import numpy as np\n'), ((2260, 2280), 'numpy.array_split', 'np.array_split', (['y', 'k'], {}), '(y, k)\n', (2274, 2280), True, 'import numpy as np\n'), ((3390, 3418), 'numpy.random.shuffle', 'np.random.shuffle', (['df.values'], {}), '(df.values)\n', (3407, 3418), True, 'import numpy as np\n'), ((3469, 3491), 'numpy.arange', 'np.arange', (['df.shape[0]'], {}), '(df.shape[0])\n', (3478, 3491), True, 'import numpy as np\n'), ((3506, 3532), 'numpy.array_split', 'np.array_split', (['indices', 'k'], {}), '(indices, k)\n', (3520, 3532), True, 'import numpy as np\n'), ((4387, 4407), 'numpy.array_split', 'np.array_split', (['x', 'k'], {}), '(x, k)\n', (4401, 4407), True, 'import numpy as np\n'), ((4422, 4442), 'numpy.array_split', 'np.array_split', (['y', 'k'], {}), '(y, k)\n', (4436, 4442), True, 'import numpy as np\n'), ((4468, 4493), 'numpy.empty', 'np.empty', (['[k, iterations]'], {}), '([k, iterations])\n', (4476, 4493), True, 'import numpy as np\n'), ((4516, 4541), 'numpy.empty', 'np.empty', (['[k, iterations]'], {}), '([k, iterations])\n', (4524, 4541), True, 'import numpy as np\n'), ((6602, 6613), 'numpy.mean', 'np.mean', (['f1'], {}), '(f1)\n', (6609, 6613), True, 'import numpy as np\n'), ((6625, 6638), 'numpy.mean', 'np.mean', (['prec'], {}), '(prec)\n', (6632, 6638), True, 'import numpy as np\n'), ((6649, 6661), 'numpy.mean', 'np.mean', (['rec'], {}), '(rec)\n', (6656, 6661), True, 'import numpy as np\n'), ((2436, 2458), 'numpy.squeeze', 'np.squeeze', (['y_folds[i]'], {}), '(y_folds[i])\n', (2446, 2458), True, 'import numpy as np\n'), ((2485, 2514), 'numpy.delete', 'np.delete', (['x_folds', 'i'], {'axis': '(0)'}), '(x_folds, i, axis=0)\n', (2494, 2514), True, 'import numpy as np\n'), ((2540, 2570), 'numpy.concatenate', 'np.concatenate', (['train_features'], {}), '(train_features)\n', (2554, 2570), True, 'import numpy as np\n'), ((2594, 2623), 'numpy.delete', 'np.delete', (['y_folds', 'i'], {'axis': '(0)'}), '(y_folds, i, axis=0)\n', (2603, 2623), True, 'import numpy as np\n'), ((2647, 2675), 'numpy.concatenate', 'np.concatenate', (['train_labels'], {}), '(train_labels)\n', (2661, 2675), True, 'import numpy as np\n'), ((3245, 3258), 'numpy.array', 'np.array', (['cms'], {}), '(cms)\n', (3253, 3258), True, 'import numpy as np\n'), ((4181, 4194), 'numpy.array', 'np.array', (['cms'], {}), '(cms)\n', (4189, 4194), True, 'import numpy as np\n'), ((4622, 4644), 'numpy.squeeze', 'np.squeeze', (['y_folds[i]'], {}), '(y_folds[i])\n', (4632, 4644), True, 'import numpy as np\n'), ((4671, 4692), 'numpy.delete', 'np.delete', (['x_folds', 'i'], {}), '(x_folds, i)\n', (4680, 4692), True, 'import numpy as np\n'), ((4718, 4748), 'numpy.concatenate', 'np.concatenate', (['train_features'], {}), '(train_features)\n', (4732, 4748), True, 'import numpy as np\n'), ((4772, 4801), 'numpy.delete', 'np.delete', (['y_folds', 'i'], {'axis': '(0)'}), '(y_folds, i, axis=0)\n', (4781, 4801), True, 'import numpy as np\n'), ((4825, 4853), 'numpy.concatenate', 'np.concatenate', (['train_labels'], {}), '(train_labels)\n', (4839, 4853), True, 'import numpy as np\n'), ((5760, 5786), 'numpy.array', 'np.array', (['train_accuracies'], {}), '(train_accuracies)\n', (5768, 5786), True, 'import numpy as np\n'), ((5816, 5840), 'numpy.array', 'np.array', (['val_accuracies'], {}), '(val_accuracies)\n', (5824, 5840), True, 'import numpy as np\n'), ((3691, 3720), 'numpy.delete', 'np.delete', (['indices', 'i'], {'axis': '(0)'}), '(indices, i, axis=0)\n', (3700, 3720), True, 'import numpy as np\n')] |
from __future__ import print_function
import numba.unittest_support as unittest
from numba.utils import PYVERSION
from .support import TestCase, enable_pyobj_flags
def build_set_usecase(*args):
ns = {}
src = """if 1:
def build_set():
return {%s}
""" % ', '.join(repr(arg) for arg in args)
code = compile(src, '<>', 'exec')
eval(code, ns)
return ns['build_set']
needs_set_literals = unittest.skipIf(PYVERSION < (2, 7),
"set literals unavailable before Python 2.7")
class SetTestCase(TestCase):
@needs_set_literals
def test_build_set(self, flags=enable_pyobj_flags):
pyfunc = build_set_usecase(1, 2, 3, 2)
self.run_nullary_func(pyfunc, flags=flags)
@needs_set_literals
def test_build_heterogenous_set(self, flags=enable_pyobj_flags):
pyfunc = build_set_usecase(1, 2.0, 3j, 2)
self.run_nullary_func(pyfunc, flags=flags)
# Check that items are inserted in the right order (here the
# result will be {2}, not {2.0})
pyfunc = build_set_usecase(2.0, 2)
got, expected = self.run_nullary_func(pyfunc, flags=flags)
self.assertIs(type(got.pop()), type(expected.pop()))
if __name__ == '__main__':
unittest.main()
| [
"numba.unittest_support.main",
"numba.unittest_support.skipIf"
] | [((423, 508), 'numba.unittest_support.skipIf', 'unittest.skipIf', (['(PYVERSION < (2, 7))', '"""set literals unavailable before Python 2.7"""'], {}), "(PYVERSION < (2, 7),\n 'set literals unavailable before Python 2.7')\n", (438, 508), True, 'import numba.unittest_support as unittest\n'), ((1261, 1276), 'numba.unittest_support.main', 'unittest.main', ([], {}), '()\n', (1274, 1276), True, 'import numba.unittest_support as unittest\n')] |
from pep272_encryption import PEP272Cipher, MODE_ECB
block_size = 1
key_size = 0
def new(*args, **kwargs):
return RC4Cipher(*args, **kwargs)
class RC4Cipher(PEP272Cipher):
block_size = 1
key_size = 0
def __init__(self, key, mode=MODE_ECB, **kwargs):
if mode != MODE_ECB:
raise ValueError("Stream ciphers only support ECB mode")
self.S = list(range(256))
j = 0
for i in range(256):
j = (j + self.S[i] + key[i % len(key)]) % 256
self.S[i], self.S[j] = self.S[j], self.S[i]
self.i = self.j = 0
PEP272Cipher.__init__(self, key, mode, **kwargs)
def encrypt_block(self, key, block, **kwargs):
self.i = (self.i + 1) % 256
self.j = (self.j + self.S[self.i]) % 256
self.S[self.i], self.S[self.j] = self.S[self.j], self.S[self.i]
K = self.S[(self.S[self.i] + self.S[self.j]) % 256]
return bytes([block[0] ^ K])
def decrypt_block(self, key, block, **kwargs):
return self.encrypt_block(key, block, **kwargs)
assert RC4Cipher(b'\x01\x02\x03\x04\x05').encrypt(b'\x00'*16) \
== b"\xb29c\x05\xf0=\xc0'\xcc\xc3RJ\n\x11\x18\xa8"
| [
"pep272_encryption.PEP272Cipher.__init__"
] | [((600, 648), 'pep272_encryption.PEP272Cipher.__init__', 'PEP272Cipher.__init__', (['self', 'key', 'mode'], {}), '(self, key, mode, **kwargs)\n', (621, 648), False, 'from pep272_encryption import PEP272Cipher, MODE_ECB\n')] |
"""
color_scheme_matcher.
Licensed under MIT.
Copyright (C) 2012 <NAME> <<EMAIL>>
Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated
documentation files (the "Software"), to deal in the Software without restriction, including without limitation
the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and
to permit persons to whom the Software is furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in all copies or substantial portions of
the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO
THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT,
TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
---------------------
Original code has been heavily modifed by <NAME> <<EMAIL>> for the ExportHtml project.
Algorithm has been split out into a separate library and been enhanced with a number of features.
"""
from __future__ import absolute_import
import sublime
import re
from .rgba import RGBA
from os import path
from collections import namedtuple
from plistlib import readPlistFromBytes
class SchemeColors(
namedtuple(
'SchemeColors',
['fg', 'fg_simulated', 'bg', "bg_simulated", "style", "fg_selector", "bg_selector", "style_selectors"],
verbose=False
)
):
"""SchemeColors."""
class SchemeSelectors(namedtuple('SchemeSelectors', ['name', 'scope'], verbose=False)):
"""SchemeSelectors."""
def sublime_format_path(pth):
"""Format path for sublime internal use."""
m = re.match(r"^([A-Za-z]{1}):(?:/|\\)(.*)", pth)
if sublime.platform() == "windows" and m is not None:
pth = m.group(1) + "/" + m.group(2)
return pth.replace("\\", "/")
class ColorSchemeMatcher(object):
"""Determine color scheme colors and style for text in a Sublime view buffer."""
def __init__(self, scheme_file, color_filter=None):
"""Initialize."""
if color_filter is None:
color_filter = self.filter
self.color_scheme = path.normpath(scheme_file)
self.scheme_file = path.basename(self.color_scheme)
self.plist_file = color_filter(
readPlistFromBytes(
re.sub(
br"^[\r\n\s]*<!--[\s\S]*?-->[\s\r\n]*|<!--[\s\S]*?-->", b'',
sublime.load_binary_resource(sublime_format_path(self.color_scheme))
)
)
)
self.scheme_file = scheme_file
self.matched = {}
self.parse_scheme()
def filter(self, plist):
"""Dummy filter call that does nothing."""
return plist
def parse_scheme(self):
"""Parse the color scheme."""
color_settings = {}
for item in self.plist_file["settings"]:
if item.get('scope', None) is None and item.get('name', None) is None:
color_settings = item["settings"]
break
# Get general theme colors from color scheme file
bground, bground_sim = self.strip_color(
color_settings.get("background", '#FFFFFF'), simple_strip=True
)
# Need to set background so other colors can simulate their transparency.
self.special_colors = {
"background": {'color': bground, 'color_simulated': bground_sim}
}
fground, fground_sim = self.strip_color(color_settings.get("foreground", '#000000'))
sbground = self.strip_color(color_settings.get("selection", fground))[0]
sbground_sim = self.strip_color(color_settings.get("selection", fground_sim))[1]
sfground, sfground_sim = self.strip_color(color_settings.get("selectionForeground", None))
gbground = self.strip_color(color_settings.get("gutter", bground))[0]
gbground_sim = self.strip_color(color_settings.get("gutter", bground_sim))[1]
gfground = self.strip_color(color_settings.get("gutterForeground", fground))[0]
gfground_sim = self.strip_color(color_settings.get("gutterForeground", fground_sim))[1]
self.special_colors["foreground"] = {'color': fground, 'color_simulated': fground_sim}
self.special_colors["background"] = {'color': bground, 'color_simulated': bground_sim}
self.special_colors["selectionForeground"] = {'color': sfground, 'color_simulated': sfground_sim}
self.special_colors["selection"] = {'color': sbground, 'color_simulated': sbground_sim}
self.special_colors["gutter"] = {'color': gbground, 'color_simulated': gbground_sim}
self.special_colors["gutterForeground"] = {'color': gfground, 'color_simulated': gfground_sim}
# Create scope colors mapping from color scheme file
self.colors = {}
for item in self.plist_file["settings"]:
name = item.get('name', '')
scope = item.get('scope', None)
color = None
style = []
if 'settings' in item and scope is not None:
color = item['settings'].get('foreground', None)
bgcolor = item['settings'].get('background', None)
if 'fontStyle' in item['settings']:
for s in item['settings']['fontStyle'].split(' '):
if s == "bold" or s == "italic": # or s == "underline":
style.append(s)
if scope is not None and (color is not None or bgcolor is not None):
fg, fg_sim = self.strip_color(color)
bg, bg_sim = self.strip_color(bgcolor)
self.colors[scope] = {
"name": name,
"scope": scope,
"color": fg,
"color_simulated": fg_sim,
"bgcolor": bg,
"bgcolor_simulated": bg_sim,
"style": style
}
def strip_color(self, color, simple_strip=False):
"""
Strip transparency from the color value.
Transparency can be stripped in one of two ways:
- Simply mask off the alpha channel.
- Apply the alpha channel to the color essential getting the color seen by the eye.
"""
if color is None or color.strip() == "":
return None, None
rgba = RGBA(color.replace(" ", ""))
if not simple_strip:
bground = self.special_colors['background']['color_simulated']
rgba.apply_alpha(bground if bground != "" else "#FFFFFF")
return color, rgba.get_rgb()
def get_special_color(self, name, simulate_transparency=False):
"""
Get the core colors (background, foreground) for the view and gutter.
Get the visible look of the color by simulated transparency if requrested.
"""
return self.special_colors.get(name, {}).get('color_simulated' if simulate_transparency else 'color')
def get_plist_file(self):
"""Get the plist file used during the process."""
return self.plist_file
def get_scheme_file(self):
"""Get the scheme file used during the process."""
return self.scheme_file
def guess_color(self, scope_key, selected=False, explicit_background=False):
"""
Guess the colors and style of the text for the given Sublime scope.
By default, we always fall back to the schemes default background,
but if desired, we can show that no background was explicitly
specified by returning None. This is done by enabling explicit_background.
This will only show backgrounds that were explicitly specified.
This was orginially introduced for mdpopups so that it would
know when a background was not needed. This allowed mdpopups
to generate syntax highlighted code that could be overlayed on
block elements with different background colors and allow that
background would show through.
"""
color = self.special_colors['foreground']['color']
color_sim = self.special_colors['foreground']['color_simulated']
bgcolor = self.special_colors['background']['color'] if not explicit_background else None
bgcolor_sim = self.special_colors['background']['color_simulated'] if not explicit_background else None
style = set([])
color_selector = SchemeSelectors("foreground", "foreground")
bg_selector = SchemeSelectors("background", "background")
style_selectors = {"bold": SchemeSelectors("", ""), "italic": SchemeSelectors("", "")}
if scope_key in self.matched:
color = self.matched[scope_key]["color"]
color_sim = self.matched[scope_key]["color_simulated"]
style = self.matched[scope_key]["style"]
bgcolor = self.matched[scope_key]["bgcolor"]
bgcolor_sim = self.matched[scope_key]["bgcolor_simulated"]
selectors = self.matched[scope_key]["selectors"]
color_selector = selectors["color"]
bg_selector = selectors["background"]
style_selectors = selectors["style"]
else:
best_match_bg = 0
best_match_fg = 0
best_match_style = 0
for key in self.colors:
match = sublime.score_selector(scope_key, key)
if self.colors[key]["color"] is not None and match > best_match_fg:
best_match_fg = match
color = self.colors[key]["color"]
color_sim = self.colors[key]["color_simulated"]
color_selector = SchemeSelectors(self.colors[key]["name"], self.colors[key]["scope"])
if self.colors[key]["style"] is not None and match > best_match_style:
best_match_style = match
for s in self.colors[key]["style"]:
style.add(s)
if s == "bold":
style_selectors["bold"] = SchemeSelectors(
self.colors[key]["name"], self.colors[key]["scope"]
)
elif s == "italic":
style_selectors["italic"] = SchemeSelectors(
self.colors[key]["name"], self.colors[key]["scope"]
)
if self.colors[key]["bgcolor"] is not None and match > best_match_bg:
best_match_bg = match
bgcolor = self.colors[key]["bgcolor"]
bgcolor_sim = self.colors[key]["bgcolor_simulated"]
bg_selector = SchemeSelectors(self.colors[key]["name"], self.colors[key]["scope"])
if len(style) == 0:
style = ""
else:
style = ' '.join(style)
self.matched[scope_key] = {
"color": color,
"bgcolor": bgcolor,
"color_simulated": color_sim,
"bgcolor_simulated": bgcolor_sim,
"style": style,
"selectors": {
"color": color_selector,
"background": bg_selector,
"style": style_selectors
}
}
if selected:
if self.special_colors['selectionForeground']['color']:
color = self.special_colors['selectionForeground']['color']
color_sim = color = self.special_colors['selectionForeground']['color_simulated']
style = ''
if self.special_colors['selection']['color']:
bgcolor = self.special_colors['selection']['color']
bgcolor_sim = color = self.special_colors['selection']['color_simulated']
return SchemeColors(
color, color_sim, bgcolor, bgcolor_sim, style,
color_selector, bg_selector, style_selectors
)
| [
"collections.namedtuple",
"sublime.score_selector",
"re.match",
"os.path.normpath",
"os.path.basename",
"sublime.platform"
] | [((1530, 1679), 'collections.namedtuple', 'namedtuple', (['"""SchemeColors"""', "['fg', 'fg_simulated', 'bg', 'bg_simulated', 'style', 'fg_selector',\n 'bg_selector', 'style_selectors']"], {'verbose': '(False)'}), "('SchemeColors', ['fg', 'fg_simulated', 'bg', 'bg_simulated',\n 'style', 'fg_selector', 'bg_selector', 'style_selectors'], verbose=False)\n", (1540, 1679), False, 'from collections import namedtuple\n'), ((1757, 1820), 'collections.namedtuple', 'namedtuple', (['"""SchemeSelectors"""', "['name', 'scope']"], {'verbose': '(False)'}), "('SchemeSelectors', ['name', 'scope'], verbose=False)\n", (1767, 1820), False, 'from collections import namedtuple\n'), ((1939, 1985), 're.match', 're.match', (['"""^([A-Za-z]{1}):(?:/|\\\\\\\\)(.*)"""', 'pth'], {}), "('^([A-Za-z]{1}):(?:/|\\\\\\\\)(.*)', pth)\n", (1947, 1985), False, 'import re\n'), ((2425, 2451), 'os.path.normpath', 'path.normpath', (['scheme_file'], {}), '(scheme_file)\n', (2438, 2451), False, 'from os import path\n'), ((2479, 2511), 'os.path.basename', 'path.basename', (['self.color_scheme'], {}), '(self.color_scheme)\n', (2492, 2511), False, 'from os import path\n'), ((1992, 2010), 'sublime.platform', 'sublime.platform', ([], {}), '()\n', (2008, 2010), False, 'import sublime\n'), ((9628, 9666), 'sublime.score_selector', 'sublime.score_selector', (['scope_key', 'key'], {}), '(scope_key, key)\n', (9650, 9666), False, 'import sublime\n')] |
import re
class Node:
def __init__(self, id, ip, hostname, type):
self.id = id
self.ip = ip
self.hostname = hostname
self.type = type
self.validate()
def validate(self):
self.illegal = False
if re.match("^(\d{1,3}\.){3}\d{1,3}$", self.ip):
self.illegal = reduce(lambda x, y : x and y, map(lambda x : True if int(x) <= 255 else False, self.ip.split(".")), True)
if self.illegal == False:
raise Exception("IP Format Error, " + self.ip + " is illegal.")
def __repr__(self):
return str(self)
def __str__(self):
return "<IP: %s, id: %s, hostname: %s, type: %s>" % (self.ip, self.id, self.hostname, self.type)
# if __name__ == "__main__":
# a = Node(1, "192.168.1.300", 1, 1)
# a.validate()
| [
"re.match"
] | [((262, 309), 're.match', 're.match', (['"""^(\\\\d{1,3}\\\\.){3}\\\\d{1,3}$"""', 'self.ip'], {}), "('^(\\\\d{1,3}\\\\.){3}\\\\d{1,3}$', self.ip)\n", (270, 309), False, 'import re\n')] |
import math
def radix_sort(arr):
if arr != []:
bucket_size = 10
maxLength = False
temp = -1
placement = 1
while not maxLength:
maxLength = True
buckets = [list() for i in range( bucket_size )]
#empty the arr
for i in arr:
temp = math.floor(i / placement)
buckets[temp % bucket_size].append( i )
if maxLength and temp > 0:
maxLength = False
a = 0
#append numbers back to arr in order
for b in range( bucket_size ):
buck = buckets[b]
for i in buck:
arr[a] = i
a += 1
placement *= bucket_size
return arr
return arr | [
"math.floor"
] | [((339, 364), 'math.floor', 'math.floor', (['(i / placement)'], {}), '(i / placement)\n', (349, 364), False, 'import math\n')] |
from django.conf.urls import url
from django.contrib.auth import views as auth_views
from django.contrib.auth import views
from . import views
urlpatterns = [
#Custom login view
# url(r'^login/$', views.user_login, name='login'),
#Builtin login view
url(r'^login/$', auth_views.login, name='login'),
url(r'^edit/$', views.edit, name='edit'),
url(r'^logout/$', auth_views.logout, name='logout'),
url(r'^logout_then_login/$', auth_views.logout_then_login, name='logout_then_login'),
url(r'^$', views.dashboard, name='dashboard'),
url(r'^password_change/$', auth_views.password_change, name='password_change'),
url(r'^password_change/done/$', auth_views.password_change_done, name='password_change_done'),
url(r'^password_reset/$', auth_views.password_reset, name='password_reset'),
url(r'^password_reset/done/$', auth_views.password_reset_done, name='password_reset_done'),
url(r'^password_reset/confirm/(?P<uidb64>[0-9A-Za-z]+)-(?P<token>.+)/$', auth_views.password_reset_confirm, name='password_reset_confirm'),
url(r'^password_reset/complete/$', auth_views.password_reset_complete, name='password_reset_complete'),
url(r'^register/$', views.register, name='register'),
]
| [
"django.conf.urls.url"
] | [((268, 315), 'django.conf.urls.url', 'url', (['"""^login/$"""', 'auth_views.login'], {'name': '"""login"""'}), "('^login/$', auth_views.login, name='login')\n", (271, 315), False, 'from django.conf.urls import url\n'), ((322, 361), 'django.conf.urls.url', 'url', (['"""^edit/$"""', 'views.edit'], {'name': '"""edit"""'}), "('^edit/$', views.edit, name='edit')\n", (325, 361), False, 'from django.conf.urls import url\n'), ((368, 418), 'django.conf.urls.url', 'url', (['"""^logout/$"""', 'auth_views.logout'], {'name': '"""logout"""'}), "('^logout/$', auth_views.logout, name='logout')\n", (371, 418), False, 'from django.conf.urls import url\n'), ((425, 513), 'django.conf.urls.url', 'url', (['"""^logout_then_login/$"""', 'auth_views.logout_then_login'], {'name': '"""logout_then_login"""'}), "('^logout_then_login/$', auth_views.logout_then_login, name=\n 'logout_then_login')\n", (428, 513), False, 'from django.conf.urls import url\n'), ((515, 559), 'django.conf.urls.url', 'url', (['"""^$"""', 'views.dashboard'], {'name': '"""dashboard"""'}), "('^$', views.dashboard, name='dashboard')\n", (518, 559), False, 'from django.conf.urls import url\n'), ((566, 643), 'django.conf.urls.url', 'url', (['"""^password_change/$"""', 'auth_views.password_change'], {'name': '"""password_change"""'}), "('^password_change/$', auth_views.password_change, name='password_change')\n", (569, 643), False, 'from django.conf.urls import url\n'), ((650, 747), 'django.conf.urls.url', 'url', (['"""^password_change/done/$"""', 'auth_views.password_change_done'], {'name': '"""password_change_done"""'}), "('^password_change/done/$', auth_views.password_change_done, name=\n 'password_change_done')\n", (653, 747), False, 'from django.conf.urls import url\n'), ((749, 823), 'django.conf.urls.url', 'url', (['"""^password_reset/$"""', 'auth_views.password_reset'], {'name': '"""password_reset"""'}), "('^password_reset/$', auth_views.password_reset, name='password_reset')\n", (752, 823), False, 'from django.conf.urls import url\n'), ((830, 924), 'django.conf.urls.url', 'url', (['"""^password_reset/done/$"""', 'auth_views.password_reset_done'], {'name': '"""password_reset_done"""'}), "('^password_reset/done/$', auth_views.password_reset_done, name=\n 'password_reset_done')\n", (833, 924), False, 'from django.conf.urls import url\n'), ((926, 1067), 'django.conf.urls.url', 'url', (['"""^password_reset/confirm/(?P<uidb64>[0-9A-Za-z]+)-(?P<token>.+)/$"""', 'auth_views.password_reset_confirm'], {'name': '"""password_reset_confirm"""'}), "('^password_reset/confirm/(?P<uidb64>[0-9A-Za-z]+)-(?P<token>.+)/$',\n auth_views.password_reset_confirm, name='password_reset_confirm')\n", (929, 1067), False, 'from django.conf.urls import url\n'), ((1070, 1176), 'django.conf.urls.url', 'url', (['"""^password_reset/complete/$"""', 'auth_views.password_reset_complete'], {'name': '"""password_reset_complete"""'}), "('^password_reset/complete/$', auth_views.password_reset_complete, name=\n 'password_reset_complete')\n", (1073, 1176), False, 'from django.conf.urls import url\n'), ((1178, 1229), 'django.conf.urls.url', 'url', (['"""^register/$"""', 'views.register'], {'name': '"""register"""'}), "('^register/$', views.register, name='register')\n", (1181, 1229), False, 'from django.conf.urls import url\n')] |
# -*- coding: utf-8 -*-
from pyfr.mpiutil import get_comm_rank_root
from pyfr.plugins.base import BasePlugin, init_csv
class DtStatsPlugin(BasePlugin):
name = 'dtstats'
systems = ['*']
formulations = ['std']
def __init__(self, intg, cfgsect, prefix):
super().__init__(intg, cfgsect, prefix)
self.flushsteps = self.cfg.getint(self.cfgsect, 'flushsteps', 500)
self.count = 0
self.stats = []
self.tprev = intg.tcurr
# MPI info
comm, rank, root = get_comm_rank_root()
# The root rank needs to open the output file
if rank == root:
self.outf = init_csv(self.cfg, cfgsect, 'n,t,dt,action,error')
else:
self.outf = None
def __call__(self, intg):
# Process the sequence of rejected/accepted steps
for i, (dt, act, err) in enumerate(intg.stepinfo, start=self.count):
self.stats.append((i, self.tprev, dt, act, err))
# Update the total step count and save the current time
self.count += len(intg.stepinfo)
self.tprev = intg.tcurr
# If we're the root rank then output
if self.outf:
for s in self.stats:
print(','.join(str(c) for c in s), file=self.outf)
# Periodically flush to disk
if intg.nacptsteps % self.flushsteps == 0:
self.outf.flush()
# Reset the stats
self.stats = []
| [
"pyfr.plugins.base.init_csv",
"pyfr.mpiutil.get_comm_rank_root"
] | [((522, 542), 'pyfr.mpiutil.get_comm_rank_root', 'get_comm_rank_root', ([], {}), '()\n', (540, 542), False, 'from pyfr.mpiutil import get_comm_rank_root\n'), ((647, 697), 'pyfr.plugins.base.init_csv', 'init_csv', (['self.cfg', 'cfgsect', '"""n,t,dt,action,error"""'], {}), "(self.cfg, cfgsect, 'n,t,dt,action,error')\n", (655, 697), False, 'from pyfr.plugins.base import BasePlugin, init_csv\n')] |
import math
import random
import numpy as np
import matplotlib as mpl
import matplotlib.pyplot as plt
from fmt.pythonfmt.doubleintegrator import filter_reachable, gen_trajectory, show_trajectory
from fmt.pythonfmt.world import World
def dist2(p, q):
return math.sqrt((p[1] - q[1]) ** 2 + (p[2] - q[2]) ** 2)
# FMTree class
class FMTree:
# s_init::Vec4f
# s_goal::Vec4f
# N #number of samples
# Pset::Vector{Vec4f} # Point set
# cost::Vector{Float64} #cost
# time::Vector{Float64} #optimal time to connect one node to its parent node
# parent::Vector{Int64} #parent node
# bool_unvisit::BitVector #logical value for Vunvisit
# bool_open::BitVector #logical value for Open
# bool_closed::BitVector #logical value for Closed
# world::World # simulation world config
# itr::Int64 # iteration num
def __init__(self, s_init, s_goal, N, world):
# constructer: sampling valid point from the configurationspace
print("initializing fmt ...")
self.s_init = s_init
self.s_goal = s_goal
self.N = N
self.world = world
self.Pset = np.zeros((N, 4))
self.Pset[0, :] = np.array(s_init)
def myrn(min, max):
return min + (max - min) * random.random()
# 采样N个点
n = 1
while True:
num_ran = 2*N
rp = np.empty((4, num_ran))
rp[0, :] = np.random.default_rng().uniform(self.world.x_min[0], self.world.x_max[0], num_ran)
rp[1, :] = np.random.default_rng().uniform(self.world.x_min[1], self.world.x_max[1], num_ran)
rp[2, :] = np.random.default_rng().uniform(self.world.v_min[0], self.world.v_max[0], num_ran)
rp[3, :] = np.random.default_rng().uniform(self.world.v_min[1], self.world.v_max[1], num_ran)
# p = np.array([myrn(world.x_min[0], world.x_max[0]),
# myrn(world.x_min[1], world.x_max[1]),
# myrn(world.v_min[0], world.v_max[0]),
# myrn(world.v_min[1], world.v_max[1])])
for i_rp in range(0, num_ran):
if self.world.isValid(rp[:, i_rp]):
self.Pset[n, :] = rp[:, i_rp]
n = n + 1
if n == N-1:
break
if n == N-1:
break
self.Pset[-1, :] = np.array(s_goal) # inply idx_goal = N [last] ? 修改為最後一個是終點
self.cost = np.zeros(N)
self.time = np.zeros(N)
self.parent = np.zeros(N, dtype=int)
self.bool_unvisit = np.ones(N, dtype=np.bool_)
self.bool_unvisit[0] = False
self.bool_closed = np.zeros(N, dtype=np.bool_)
self.bool_open = np.zeros(N, dtype=np.bool_)
self.bool_open[0] = True
self.itr = 0
print("finish initializing")
# new(s_init, s_goal,
# N, Pset, cost, time, parent, bool_unvisit, bool_open, bool_closed, world, 0)
def show(self, ax):
print("drawing...")
# 先画障碍物
N = len(self.Pset)
mat = np.zeros((2, N))
for idx in range(0, N):
mat[:, idx] = self.Pset[idx, 0:2]
idxset_open = np.nonzero(self.bool_open)[0]
idxset_closed = np.nonzero(self.bool_closed)[0]
idxset_unvisit = np.nonzero(self.bool_unvisit)[0]
# idxset_tree = setdiff(union(idxset_open, idxset_closed), [1])
idxset_tree = np.concatenate((idxset_closed, idxset_open)) # 没有和原来一样去除 id 1
# 起点,重点,open, close
ax.scatter(mat[0, 0], mat[1, 0], c='blue', s=20, zorder=100)
ax.scatter(mat[0, -1], mat[1, -1], c='blue', s=20, zorder=101)
ax.scatter(mat[0, idxset_open], mat[1, idxset_open], c='orange', s=5)
ax.scatter(mat[0, idxset_closed], mat[1, idxset_closed], c='red', s=5)
# ax.scatter(mat[0, idxset_unvisit], mat[1, idxset_unvisit], c='khaki', s=2)
for idx in idxset_tree:
s0 = self.Pset[self.parent[idx]]
s1 = self.Pset[idx]
tau = self.time[idx]
show_trajectory(s0, s1, tau, N_split=5, ax=ax)
# 起点重点画了第二次?
# ax.scatter(mat[0, 1], mat[1, 1], c='blue', s=20, zorder=100)
# ax.scatter(mat[0, -1], mat[1, -1], c='blue', s=20, zorder=101)
# plt.xlim(this.world.x_min[1]-0.05, this.world.x_max[1]+0.05)
# plt.ylim(this.world.x_min[2]-0.05, this.world.x_max[2]+0.05)
print("finish drawing")
def solve(self, ax=None, show=False, save=False):
# keep extending the node until the tree reaches the goal
print("please set with_savefig=false if you want to measure the computation time")
print("start solving")
while True:
if not self.extend(): # 擴展失敗
break
# if ((self.itr < 100) and (self.itr % 20 == 1)) or (self.itr % 200 == 1):
if self.itr % 40 == 1:
print("itr: ", self.itr)
if ax and show:
# close()
self.show(ax)
plt.pause(1)
if ax and save:
plt.savefig("./fig/" + str(self.itr) + ".png")
# 这里需要通过传递fig解决
if not self.bool_unvisit[-1]:
break
# 無法連接到終點的情況處理待定
idx = -1
idx_solution = [idx]
while True:
idx = self.parent[idx]
idx_solution.append(idx)
if idx == 0:
break
print("finish solving")
return np.array(idx_solution)
def extend(self):
# extend node
self.itr += 1
r = 1.0 # 这是什么参数?
# 此處數據結構可以優化, idxset_open和idxset_unvisit不用每次檢索
idxset_open = np.nonzero(self.bool_open)[0] #這裡莫名返回一個tuple,需要取第一個
if idxset_open.size == 0: #無法再繼續擴展
return False
idxset_unvisit = np.nonzero(self.bool_unvisit)[0]
idx_lowest = idxset_open[np.argmin(self.cost[idxset_open])]
# idx_lowest = idxset_open[findmin(this.cost[idxset_open])[2]]
s_c = self.Pset[idx_lowest, :]
idxset_near, _, _ = filter_reachable(self.Pset, idxset_unvisit,
self.Pset[idx_lowest], r, "F")
for idx_near in idxset_near:
idxset_cand, distset_cand, timeset_cand = filter_reachable(self.Pset, idxset_open,
self.Pset[idx_near], r, "B")
if len(idxset_cand) == 0:
return
idx_costmin = np.argmin(self.cost[idxset_cand] + distset_cand)
cost_new = self.cost[idxset_cand[idx_costmin]] + distset_cand[idx_costmin]
# cost_new, idx_costmin = findmin(this.cost[idxset_cand] + distset_cand)
# optimal time for new connection
time_new = timeset_cand[idx_costmin]
idx_parent = idxset_cand[idx_costmin]
waypoints = gen_trajectory(self.Pset[idx_parent], self.Pset[idx_near], time_new, 10)
if self.world.isValid(waypoints):
self.bool_unvisit[idx_near] = False
self.bool_open[idx_near] = True
self.cost[idx_near] = cost_new
self.time[idx_near] = time_new
self.parent[idx_near] = idx_parent
# print("nonzero cost idx: ", np.nonzero(self.cost))
self.bool_open[idx_lowest] = False
self.bool_closed[idx_lowest] = True
return True
| [
"numpy.ones",
"numpy.random.default_rng",
"fmt.pythonfmt.doubleintegrator.gen_trajectory",
"fmt.pythonfmt.doubleintegrator.show_trajectory",
"math.sqrt",
"numpy.array",
"numpy.zeros",
"numpy.empty",
"numpy.concatenate",
"numpy.nonzero",
"numpy.argmin",
"random.random",
"matplotlib.pyplot.pau... | [((263, 313), 'math.sqrt', 'math.sqrt', (['((p[1] - q[1]) ** 2 + (p[2] - q[2]) ** 2)'], {}), '((p[1] - q[1]) ** 2 + (p[2] - q[2]) ** 2)\n', (272, 313), False, 'import math\n'), ((1135, 1151), 'numpy.zeros', 'np.zeros', (['(N, 4)'], {}), '((N, 4))\n', (1143, 1151), True, 'import numpy as np\n'), ((1178, 1194), 'numpy.array', 'np.array', (['s_init'], {}), '(s_init)\n', (1186, 1194), True, 'import numpy as np\n'), ((2399, 2415), 'numpy.array', 'np.array', (['s_goal'], {}), '(s_goal)\n', (2407, 2415), True, 'import numpy as np\n'), ((2478, 2489), 'numpy.zeros', 'np.zeros', (['N'], {}), '(N)\n', (2486, 2489), True, 'import numpy as np\n'), ((2510, 2521), 'numpy.zeros', 'np.zeros', (['N'], {}), '(N)\n', (2518, 2521), True, 'import numpy as np\n'), ((2544, 2566), 'numpy.zeros', 'np.zeros', (['N'], {'dtype': 'int'}), '(N, dtype=int)\n', (2552, 2566), True, 'import numpy as np\n'), ((2595, 2621), 'numpy.ones', 'np.ones', (['N'], {'dtype': 'np.bool_'}), '(N, dtype=np.bool_)\n', (2602, 2621), True, 'import numpy as np\n'), ((2686, 2713), 'numpy.zeros', 'np.zeros', (['N'], {'dtype': 'np.bool_'}), '(N, dtype=np.bool_)\n', (2694, 2713), True, 'import numpy as np\n'), ((2739, 2766), 'numpy.zeros', 'np.zeros', (['N'], {'dtype': 'np.bool_'}), '(N, dtype=np.bool_)\n', (2747, 2766), True, 'import numpy as np\n'), ((3089, 3105), 'numpy.zeros', 'np.zeros', (['(2, N)'], {}), '((2, N))\n', (3097, 3105), True, 'import numpy as np\n'), ((3445, 3489), 'numpy.concatenate', 'np.concatenate', (['(idxset_closed, idxset_open)'], {}), '((idxset_closed, idxset_open))\n', (3459, 3489), True, 'import numpy as np\n'), ((5546, 5568), 'numpy.array', 'np.array', (['idx_solution'], {}), '(idx_solution)\n', (5554, 5568), True, 'import numpy as np\n'), ((6125, 6199), 'fmt.pythonfmt.doubleintegrator.filter_reachable', 'filter_reachable', (['self.Pset', 'idxset_unvisit', 'self.Pset[idx_lowest]', 'r', '"""F"""'], {}), "(self.Pset, idxset_unvisit, self.Pset[idx_lowest], r, 'F')\n", (6141, 6199), False, 'from fmt.pythonfmt.doubleintegrator import filter_reachable, gen_trajectory, show_trajectory\n'), ((1373, 1395), 'numpy.empty', 'np.empty', (['(4, num_ran)'], {}), '((4, num_ran))\n', (1381, 1395), True, 'import numpy as np\n'), ((3207, 3233), 'numpy.nonzero', 'np.nonzero', (['self.bool_open'], {}), '(self.bool_open)\n', (3217, 3233), True, 'import numpy as np\n'), ((3261, 3289), 'numpy.nonzero', 'np.nonzero', (['self.bool_closed'], {}), '(self.bool_closed)\n', (3271, 3289), True, 'import numpy as np\n'), ((3318, 3347), 'numpy.nonzero', 'np.nonzero', (['self.bool_unvisit'], {}), '(self.bool_unvisit)\n', (3328, 3347), True, 'import numpy as np\n'), ((4072, 4118), 'fmt.pythonfmt.doubleintegrator.show_trajectory', 'show_trajectory', (['s0', 's1', 'tau'], {'N_split': '(5)', 'ax': 'ax'}), '(s0, s1, tau, N_split=5, ax=ax)\n', (4087, 4118), False, 'from fmt.pythonfmt.doubleintegrator import filter_reachable, gen_trajectory, show_trajectory\n'), ((5741, 5767), 'numpy.nonzero', 'np.nonzero', (['self.bool_open'], {}), '(self.bool_open)\n', (5751, 5767), True, 'import numpy as np\n'), ((5886, 5915), 'numpy.nonzero', 'np.nonzero', (['self.bool_unvisit'], {}), '(self.bool_unvisit)\n', (5896, 5915), True, 'import numpy as np\n'), ((5952, 5985), 'numpy.argmin', 'np.argmin', (['self.cost[idxset_open]'], {}), '(self.cost[idxset_open])\n', (5961, 5985), True, 'import numpy as np\n'), ((6337, 6406), 'fmt.pythonfmt.doubleintegrator.filter_reachable', 'filter_reachable', (['self.Pset', 'idxset_open', 'self.Pset[idx_near]', 'r', '"""B"""'], {}), "(self.Pset, idxset_open, self.Pset[idx_near], r, 'B')\n", (6353, 6406), False, 'from fmt.pythonfmt.doubleintegrator import filter_reachable, gen_trajectory, show_trajectory\n'), ((6565, 6613), 'numpy.argmin', 'np.argmin', (['(self.cost[idxset_cand] + distset_cand)'], {}), '(self.cost[idxset_cand] + distset_cand)\n', (6574, 6613), True, 'import numpy as np\n'), ((6955, 7027), 'fmt.pythonfmt.doubleintegrator.gen_trajectory', 'gen_trajectory', (['self.Pset[idx_parent]', 'self.Pset[idx_near]', 'time_new', '(10)'], {}), '(self.Pset[idx_parent], self.Pset[idx_near], time_new, 10)\n', (6969, 7027), False, 'from fmt.pythonfmt.doubleintegrator import filter_reachable, gen_trajectory, show_trajectory\n'), ((1263, 1278), 'random.random', 'random.random', ([], {}), '()\n', (1276, 1278), False, 'import random\n'), ((1419, 1442), 'numpy.random.default_rng', 'np.random.default_rng', ([], {}), '()\n', (1440, 1442), True, 'import numpy as np\n'), ((1525, 1548), 'numpy.random.default_rng', 'np.random.default_rng', ([], {}), '()\n', (1546, 1548), True, 'import numpy as np\n'), ((1631, 1654), 'numpy.random.default_rng', 'np.random.default_rng', ([], {}), '()\n', (1652, 1654), True, 'import numpy as np\n'), ((1737, 1760), 'numpy.random.default_rng', 'np.random.default_rng', ([], {}), '()\n', (1758, 1760), True, 'import numpy as np\n'), ((5077, 5089), 'matplotlib.pyplot.pause', 'plt.pause', (['(1)'], {}), '(1)\n', (5086, 5089), True, 'import matplotlib.pyplot as plt\n')] |
# import unittest
import logging
from timeit import timeit
logging.basicConfig(level=logging.INFO)
def memoize(function):
cache = {}
def memo(*args):
if args not in cache:
cache[args] = function(*args)
return cache[args]
return memo
@memoize
def edit_distance_recursive(source, target):
if source == "":
return len(target)
if target == "":
return len(source)
if source[-1] == target[-1]:
cost = 0
else:
cost = 1
return min(
edit_distance_recursive(source[:-1], target) + 1,
edit_distance_recursive(source, target[:-1]) + 1,
edit_distance_recursive(source[:-1], target[:-1]) + cost
)
logging.info(edit_distance_recursive("intention", "execution"))
logging.info(edit_distance_recursive("jackrabbits", "jackhammer"))
logging.info(edit_distance_recursive("ie", "e"))
def edit_distance_iterative(source, target):
rows = len(source)
columns = len(target)
if rows == 0:
return columns
if columns == 0:
return rows
# Initalize 2D array.
edit_distances = [[0] * columns for i in range(rows)]
for row in range(rows):
edit_distances[row][0] = row
for column in range(columns):
edit_distances[0][column] = column
for column in range(1, columns):
for row in range(1, rows):
if source[row - 1] == target[column - 1]:
cost = 0
else:
cost = 1
edit_distances[row][column] = min(
edit_distances[row - 1][column] + 1,
edit_distances[row][column - 1] + 1,
edit_distances[row - 1][column - 1] + cost
)
# for row in range(rows):
# logging.info(edit_distances[row])
return edit_distances[row][column]
logging.info(edit_distance_iterative("intention", "execution"))
logging.info(edit_distance_iterative("jackrabbits", "jackhammer"))
logging.info(edit_distance_iterative("ie", "e"))
logging.info(timeit('edit_distance_recursive("intention", "execution")',
setup='from __main__ import edit_distance_recursive', number=100))
logging.info(timeit('edit_distance_iterative("intention", "execution")',
setup='from __main__ import edit_distance_iterative', number=100)) | [
"logging.basicConfig",
"timeit.timeit"
] | [((60, 99), 'logging.basicConfig', 'logging.basicConfig', ([], {'level': 'logging.INFO'}), '(level=logging.INFO)\n', (79, 99), False, 'import logging\n'), ((2032, 2162), 'timeit.timeit', 'timeit', (['"""edit_distance_recursive("intention", "execution")"""'], {'setup': '"""from __main__ import edit_distance_recursive"""', 'number': '(100)'}), '(\'edit_distance_recursive("intention", "execution")\', setup=\n \'from __main__ import edit_distance_recursive\', number=100)\n', (2038, 2162), False, 'from timeit import timeit\n'), ((2192, 2322), 'timeit.timeit', 'timeit', (['"""edit_distance_iterative("intention", "execution")"""'], {'setup': '"""from __main__ import edit_distance_iterative"""', 'number': '(100)'}), '(\'edit_distance_iterative("intention", "execution")\', setup=\n \'from __main__ import edit_distance_iterative\', number=100)\n', (2198, 2322), False, 'from timeit import timeit\n')] |
import sys
import os
import json
import hashlib
import logging
import base64
import shutil
from concurrent.futures import ProcessPoolExecutor
from subprocess import Popen, PIPE, STDOUT
from jvd.disassembler import DisassemblerAbstract
import logging as log
import traceback
from jvd.utils import read_gz_js, write_gz_js, which, check_output_ctx
import platform
from jvd.resources import require
import time
import threading
SRC = os.path.split(os.path.realpath(__file__))[0]
IDA_script = os.path.join(SRC, 'ida_script.py')
ida_available = which('ida64.exe' if platform.system()
== 'Windows' else 'ida64') != None
ida64 = 'ida64' if platform.system() == 'Windows' else 'idat64'
ida32 = 'ida' if platform.system() == 'Windows' else 'idat'
class IDA(DisassemblerAbstract):
def __init__(self):
pass
def _process(self, file, file_type, output_file_path, decompile=False, verbose=-1):
if not ida_available and 'idaapi' not in sys.modules:
raise FileNotFoundError('IDA is not found!')
log = None
program = ida64
extension = None
if file_type.startswith('IDA '):
# 32-bit database
program = ida32
extension = '.idb'
elif file_type.startswith('FoxPro FPT'):
# 64-bit database
program = ida64
extension = '.i64'
if extension:
db = file + extension
if not os.path.exists(db):
shutil.copyfile(file, db)
file = db
cmd = [program, '-A', '-S{}'.format(IDA_script), file]
# print(cmd)
sub_env = os.environ.copy()
sub_env["output_file_path"] = os.path.abspath(output_file_path)
# print(cmd)
# p = Popen(
# cmd,
# env=sub_env,
# stdout=PIPE,
# stderr=STDOUT)
# log, _ = p.communicate(timeout=self.timeout)
if verbose > 1:
print(' '.join(cmd))
with check_output_ctx(cmd, timeout=self.timeout, env=sub_env) as log:
if not log:
log = ''
if decompile:
# assuming that IDA does not support decompilation
# transfer decompiled code to IDA
jar = require('ghidrajar')
java = require('jdk')
from jvd.ghidra.decompiler import process as gh_process
obj = read_gz_js(output_file_path)
func_entries = [f['addr_start']-obj['bin']['base']
for f in obj['functions']]
output_file_path_gh = output_file_path + '.gh.gz'
gh_process(java, jar, file, output_file_path_gh,
decompile=True, func_entries=func_entries)
if os.path.exists(output_file_path_gh):
obj_gh = read_gz_js(output_file_path_gh)
src = obj_gh['functions_src']
base_diff = obj_gh['bin']['base'] - obj['bin']['base']
for f in src:
f['addr_start'] = f['addr_start'] - base_diff
obj['functions_src'] = src
write_gz_js(obj, output_file_path)
return output_file_path, log
def context_init(self):
if 'idaapi' in sys.modules:
import idaapi
self.f_current = None
def _check():
addr = idaapi.get_screen_ea()
f_current = idaapi.get_func(addr)
if f_current and f_current != self.f_current:
self.f_current = f_current
from jvd.client import search
search(self.context_function_info)
def _step():
idaapi.execute_sync(_check, idaapi.MFF_FAST)
tt = threading.Timer(.5, _step)
tt.daemon = True
tt.start()
_step()
return True
return False
def _get_all_wrapped(self, **kwargs):
from jvd.ida.ida_utils import get_all
import idaapi
# this import cannot be moved to the header since it can
# be only imported when running in context
_bin = {}
def _get():
_bin.update(get_all(**kwargs))
idaapi.execute_sync(_get, idaapi.MFF_FAST)
return _bin
def context_binary_info(self):
_bin_info = self._get_all_wrapped(
function_eas=None,
with_blocks=False)['bin']
return {
k: v for k, v in _bin_info.items() if k not in ['strings', 'data', ]
}
def context_function_info(self):
_all_info = self._get_all_wrapped(
function_eas=None,
with_blocks=True,
current_ea=True
)
refs = set()
for b in _all_info['blocks']:
for i in b.get('ins', []):
for r in i.get('dr', []) + i.get('cr', []):
refs.add(r)
_cleaned_bin = {
k: v for k, v in _all_info['bin'].items() if k not in [
'strings', 'data', 'import_functions', 'export_functions',
'import_modules', 'seg', 'entry_points']
}
_cleaned_bin['strings'] = {
k: v for k, v in _all_info['bin']['strings'].items() if k in refs
}
_cleaned_bin['data'] = {
k: v for k, v in _all_info['bin']['strings'].items() if k in refs
}
return {
'bin': _cleaned_bin,
'functions': _all_info['functions'],
'blocks': _all_info['blocks'],
'comments': _all_info['comments'],
}
| [
"idaapi.get_func",
"os.path.exists",
"jvd.resources.require",
"idaapi.execute_sync",
"jvd.utils.write_gz_js",
"jvd.ghidra.decompiler.process",
"threading.Timer",
"jvd.ida.ida_utils.get_all",
"os.path.join",
"os.environ.copy",
"os.path.realpath",
"jvd.utils.read_gz_js",
"platform.system",
"... | [((490, 524), 'os.path.join', 'os.path.join', (['SRC', '"""ida_script.py"""'], {}), "(SRC, 'ida_script.py')\n", (502, 524), False, 'import os\n'), ((446, 472), 'os.path.realpath', 'os.path.realpath', (['__file__'], {}), '(__file__)\n', (462, 472), False, 'import os\n'), ((658, 675), 'platform.system', 'platform.system', ([], {}), '()\n', (673, 675), False, 'import platform\n'), ((720, 737), 'platform.system', 'platform.system', ([], {}), '()\n', (735, 737), False, 'import platform\n'), ((1641, 1658), 'os.environ.copy', 'os.environ.copy', ([], {}), '()\n', (1656, 1658), False, 'import os\n'), ((1697, 1730), 'os.path.abspath', 'os.path.abspath', (['output_file_path'], {}), '(output_file_path)\n', (1712, 1730), False, 'import os\n'), ((4206, 4248), 'idaapi.execute_sync', 'idaapi.execute_sync', (['_get', 'idaapi.MFF_FAST'], {}), '(_get, idaapi.MFF_FAST)\n', (4225, 4248), False, 'import idaapi\n'), ((2000, 2056), 'jvd.utils.check_output_ctx', 'check_output_ctx', (['cmd'], {'timeout': 'self.timeout', 'env': 'sub_env'}), '(cmd, timeout=self.timeout, env=sub_env)\n', (2016, 2056), False, 'from jvd.utils import read_gz_js, write_gz_js, which, check_output_ctx\n'), ((2264, 2284), 'jvd.resources.require', 'require', (['"""ghidrajar"""'], {}), "('ghidrajar')\n", (2271, 2284), False, 'from jvd.resources import require\n'), ((2304, 2318), 'jvd.resources.require', 'require', (['"""jdk"""'], {}), "('jdk')\n", (2311, 2318), False, 'from jvd.resources import require\n'), ((2405, 2433), 'jvd.utils.read_gz_js', 'read_gz_js', (['output_file_path'], {}), '(output_file_path)\n', (2415, 2433), False, 'from jvd.utils import read_gz_js, write_gz_js, which, check_output_ctx\n'), ((2627, 2722), 'jvd.ghidra.decompiler.process', 'gh_process', (['java', 'jar', 'file', 'output_file_path_gh'], {'decompile': '(True)', 'func_entries': 'func_entries'}), '(java, jar, file, output_file_path_gh, decompile=True,\n func_entries=func_entries)\n', (2637, 2722), True, 'from jvd.ghidra.decompiler import process as gh_process\n'), ((2757, 2792), 'os.path.exists', 'os.path.exists', (['output_file_path_gh'], {}), '(output_file_path_gh)\n', (2771, 2792), False, 'import os\n'), ((3100, 3134), 'jvd.utils.write_gz_js', 'write_gz_js', (['obj', 'output_file_path'], {}), '(obj, output_file_path)\n', (3111, 3134), False, 'from jvd.utils import read_gz_js, write_gz_js, which, check_output_ctx\n'), ((563, 580), 'platform.system', 'platform.system', ([], {}), '()\n', (578, 580), False, 'import platform\n'), ((1455, 1473), 'os.path.exists', 'os.path.exists', (['db'], {}), '(db)\n', (1469, 1473), False, 'import os\n'), ((1491, 1516), 'shutil.copyfile', 'shutil.copyfile', (['file', 'db'], {}), '(file, db)\n', (1506, 1516), False, 'import shutil\n'), ((2819, 2850), 'jvd.utils.read_gz_js', 'read_gz_js', (['output_file_path_gh'], {}), '(output_file_path_gh)\n', (2829, 2850), False, 'from jvd.utils import read_gz_js, write_gz_js, which, check_output_ctx\n'), ((3348, 3370), 'idaapi.get_screen_ea', 'idaapi.get_screen_ea', ([], {}), '()\n', (3368, 3370), False, 'import idaapi\n'), ((3399, 3420), 'idaapi.get_func', 'idaapi.get_func', (['addr'], {}), '(addr)\n', (3414, 3420), False, 'import idaapi\n'), ((3592, 3626), 'jvd.client.search', 'search', (['self.context_function_info'], {}), '(self.context_function_info)\n', (3598, 3626), False, 'from jvd.client import search\n'), ((3669, 3713), 'idaapi.execute_sync', 'idaapi.execute_sync', (['_check', 'idaapi.MFF_FAST'], {}), '(_check, idaapi.MFF_FAST)\n', (3688, 3713), False, 'import idaapi\n'), ((3735, 3762), 'threading.Timer', 'threading.Timer', (['(0.5)', '_step'], {}), '(0.5, _step)\n', (3750, 3762), False, 'import threading\n'), ((4178, 4195), 'jvd.ida.ida_utils.get_all', 'get_all', ([], {}), '(**kwargs)\n', (4185, 4195), False, 'from jvd.ida.ida_utils import get_all\n')] |
# Imports
import socket
import subprocess
import os
import requests
# from prettytable import PrettyTable
import getpass
import CONFIG
def send_message(text):
try:
requests.post('https://slack.com/api/chat.postMessage', {
'token': CONFIG.SLACK_TOKEN,
'channel': CONFIG.SLACK_CHANNEL_INFO,
'text': text,
'username': CONFIG.SLACK_BOT_NAME,
})
except ConnectionError:
exit("Connection Error.")
def get_username():
return getpass.getuser()
def get_hostname():
return socket.gethostname()
def get_local_ip():
local_ip_socket = socket.socket(socket.AF_INET, socket.SOCK_DGRAM)
local_ip_socket.connect(('10.255.255.255', 1))
local_ip_address = local_ip_socket.getsockname()[0]
local_ip_socket.close()
return local_ip_address
def get_connected_network():
output = str(subprocess.check_output(['iwgetid']))
network= output.split('"')[1]
return network
def get_using_interface():
output = str(subprocess.check_output(['iwgetid']))
network = output.split(' ')[0]
return network
def get_device_uptime():
uptime_data = os.popen('uptime -p').read()[:-1]
uptime_data = [f'{x.capitalize()} ' for x in uptime_data.split(' ')]
uptime_data = ''.join(uptime_data).rstrip()
return uptime_data
def get_ram_usage():
total_m = os.popen('free -h').readlines()[1].split()[1]
used_m= os.popen('free -h').readlines()[1].split()[2]
return f'{used_m} of {total_m}'
username = get_username()
hostname = get_hostname()
local_ip = get_local_ip()
wifi = get_connected_network()
interface = get_using_interface()
device_uptime = get_device_uptime()
ram = get_ram_usage()
ssh_port = '*under_construction*'
INFORMATION = '''USERNAME: "{}"
HOSTNAME: "{}"
LOCAL IP: "{}"
CONNECTED NETWORK: "{}"
USING NETWORK INTERFACE: "{}"
DEVICE UPTIME: "{}"
RAM USAGE: "{}"
SSH PORT: "{}"'''.format(username, hostname, local_ip, wifi, interface, device_uptime, ram, ssh_port)
def make_table():
# table = PrettyTable(['Hostname', 'Local IP', 'Wi-Fi', 'Interface', 'Uptime', 'RAM'])
# data = ([hostname, local_ip, wifi, interface, device_uptime, ram])
# table.add_row(data)
# print(table)
pass
send_message(INFORMATION)
| [
"subprocess.check_output",
"requests.post",
"socket.socket",
"os.popen",
"getpass.getuser",
"socket.gethostname"
] | [((480, 497), 'getpass.getuser', 'getpass.getuser', ([], {}), '()\n', (495, 497), False, 'import getpass\n'), ((530, 550), 'socket.gethostname', 'socket.gethostname', ([], {}), '()\n', (548, 550), False, 'import socket\n'), ((594, 642), 'socket.socket', 'socket.socket', (['socket.AF_INET', 'socket.SOCK_DGRAM'], {}), '(socket.AF_INET, socket.SOCK_DGRAM)\n', (607, 642), False, 'import socket\n'), ((177, 359), 'requests.post', 'requests.post', (['"""https://slack.com/api/chat.postMessage"""', "{'token': CONFIG.SLACK_TOKEN, 'channel': CONFIG.SLACK_CHANNEL_INFO, 'text':\n text, 'username': CONFIG.SLACK_BOT_NAME}"], {}), "('https://slack.com/api/chat.postMessage', {'token': CONFIG.\n SLACK_TOKEN, 'channel': CONFIG.SLACK_CHANNEL_INFO, 'text': text,\n 'username': CONFIG.SLACK_BOT_NAME})\n", (190, 359), False, 'import requests\n'), ((855, 891), 'subprocess.check_output', 'subprocess.check_output', (["['iwgetid']"], {}), "(['iwgetid'])\n", (878, 891), False, 'import subprocess\n'), ((991, 1027), 'subprocess.check_output', 'subprocess.check_output', (["['iwgetid']"], {}), "(['iwgetid'])\n", (1014, 1027), False, 'import subprocess\n'), ((1127, 1148), 'os.popen', 'os.popen', (['"""uptime -p"""'], {}), "('uptime -p')\n", (1135, 1148), False, 'import os\n'), ((1341, 1360), 'os.popen', 'os.popen', (['"""free -h"""'], {}), "('free -h')\n", (1349, 1360), False, 'import os\n'), ((1399, 1418), 'os.popen', 'os.popen', (['"""free -h"""'], {}), "('free -h')\n", (1407, 1418), False, 'import os\n')] |
import typing as t
import warnings
from .error_handler import MissingExtensionError, MissingExtensionWarning
class ExtensionMixin:
"""
A base class for mixing in custom classes (extensions) into another classes.
"""
AUTHOR = "unknown"
NAME = "unknown"
ID = f"{AUTHOR}-{NAME}"
SOFT_DEPENDENCIES = []
HARD_DEPENCENDIES = []
@classmethod
def get_dependencies(cls) -> t.Dict[str, t.List[object]]:
"""
This should return the following `dict`:
```python
{
"hard": [<class>, <class>, ...],
"soft": [<class>, <class>, ...]
}
```
A dependency is anything that you can pass into `FlarumUser(extensions=[...])` (e. g. an extension class).
#### Hard-dependencies:
- Will raise an error when they're not found. It is impossible for the extension to function without these.
#### Soft-dependencies:
- Will raise just a warning. It is possible for the extension to function without these, although with limitations
(such that some functions might be unavailable).
"""
return {
"soft": cls.SOFT_DEPENDENCIES,
"hard": cls.HARD_DEPENCENDIES
}
@classmethod
def mixin(cls, class_to_patch: object, class_to_mix_in: object, skip_protected: bool=True):
"""
A function to mix-in/merge properties, methods, functions, etc... of one class into another.
This skips all functions and properties starting with `__` (double underscore), unless `skip_protected` is False.
This sets/overwrites attributes of `class_to_patch` to attributes of `class_to_mix_in` (monkey-patch).
### Example:
```python
extension.mixin(myclass, pyflarum_class)
```
"""
for property, value in vars(class_to_mix_in).items():
if property.startswith('__') and skip_protected:
continue
setattr(class_to_patch, f'{property}', value)
def mixin_extensions(extensions: t.List[t.Type[ExtensionMixin]]) -> None:
for extension in extensions:
dependencies = extension.get_dependencies()
hard = dependencies.get("hard", None)
soft = dependencies.get("soft", None)
if hard and len(hard) > 0:
for hard_dependency in hard:
if hard_dependency not in extensions:
raise MissingExtensionError(f'`{extension}` hardly depends on `{hard_dependency}`. Please, include that extension too in your extension list.')
extension.mixin()
if soft and len(soft) > 0:
for soft_dependency in soft:
if soft_dependency not in extensions:
warnings.warn(f'`{extension}` softly depends on `{soft_dependency}`. Some features might be unavailable.', MissingExtensionWarning)
| [
"warnings.warn"
] | [((2853, 2994), 'warnings.warn', 'warnings.warn', (['f"""`{extension}` softly depends on `{soft_dependency}`. Some features might be unavailable."""', 'MissingExtensionWarning'], {}), "(\n f'`{extension}` softly depends on `{soft_dependency}`. Some features might be unavailable.'\n , MissingExtensionWarning)\n", (2866, 2994), False, 'import warnings\n')] |
import numpy as np
import h5py
import pyglib.basic.units as units
import pyglib.basic.splot as splot
'''
Equation of state.
'''
def Murnaghan(parameters, vol):
'''
Given a vector of parameters and volumes, return a vector of energies.
equation From PRB 28,5480 (1983)
'''
E0 = parameters[0]
B0 = parameters[1]
BP = parameters[2]
V0 = parameters[3]
return E0 + B0 * vol / BP * (((V0 / vol)**BP) / \
(BP - 1) + 1) - V0 * B0 / (BP - 1.0)
def Murnaghan_pv(parameters, vol):
'''
function P(V).
'''
B0 = parameters[1]
BP = parameters[2]
V0 = parameters[3]
return B0 / BP * ((V0 / vol)**BP - 1.0)
def eos_fit_fun(pars, y, x):
'''
The objective function that will be minimized.
'''
return y - Murnaghan(pars, x)
def get_ev_fit(v, e):
'''
Fitting the Birch-Murnaghan EOS to data. v in \A^3, e in eV.
Based on http://gilgamesh.cheme.cmu.edu/doc/software/jacapo/
appendices/appendix-eos.html
'''
from pylab import polyfit
from scipy.optimize import leastsq
# fit a parabola to the data
# y = ax^2 + bx + c
a, b, c = polyfit(v, e, 2)
'''The parabola does not fit the data very well, but we can use it to get
some analytical guesses for other parameters.
V0 = minimum energy volume, or where dE/dV=0
E = aV^2 + bV + c
dE/dV = 2aV + b = 0
V0 = -b/2a
E0 is the minimum energy, which is:
E0 = aV0^2 + bV0 + c
B is equal to V0*d^2E/dV^2, which is just 2a*V0
and from experience we know Bprime_0 is usually a small number like 4
'''
# now here are our initial guesses.
v0 = -b / (2 * a)
e0 = a * v0**2 + b * v0 + c
b0 = 2 * a * v0
bP = 4
# initial guesses in the same order used in the Murnaghan function
x0 = [e0, b0, bP, v0]
murnpars, ier = leastsq(eos_fit_fun, x0, args=(e, v))
return murnpars
def h5get_mfit_ev(nmesh_fac=10, fsave='results.h5', path='/lapw'):
'''Calculate and save Murnaghan fiting results in fsave.
Interpolated e-v and p-v data on volume mesh with a factor a
nmesh_fac of the original one are also stored.
'''
# Get e,v data.
with h5py.File(fsave, 'r') as f:
e_list = f[path+'/etot_list'][...]
v_list = f['/vol_list'][...]
# fitting
murnpars = get_ev_fit(v_list, e_list)
vh = np.linspace(v_list[0], v_list[-1], nmesh_fac * len(v_list) - 1)
eh = Murnaghan(murnpars, vh)
ph = Murnaghan_pv(murnpars, vh)*units.eVA_GPa
with h5py.File(fsave, 'a') as f:
if path+'/eosfit' in f:
del f[path+'/eosfit']
f[path+'/eosfit/e0'] = murnpars[0]
f[path+'/eosfit/b0'] = murnpars[1]
f[path+'/eosfit/bp'] = murnpars[2]
f[path+'/eosfit/v0'] = murnpars[3]
f[path+'/eosfit/v_list'] = vh
f[path+'/eosfit/e_list'] = eh
f[path+'/eosfit/p_list'] = ph
splot.xy2_plot([v_list, vh], [e_list, eh], ['o', '-'], ['raw', 'fitting'],
xlabel='V ($\AA^3$/primitive cell)',
ylabel='E (eV/primitive cell)', fsave=path+'_evfit.pdf')
splot.xy_plot(vh, ph, xlabel='V ($\AA^3$/primitive cell)',
ylabel='P (GPa)', fsave=path+'_pvfit.pdf')
def eos_spline(v, e, tol):
'''
Get volume, energy, pressure, and bulk modulus using spline, given
v in \A^3 and e in eV.
'''
from scipy.interpolate import UnivariateSpline
s = UnivariateSpline(v, e, k=3, s=tol)
vh = np.linspace(v[0], v[-1], 10 * len(v) - 1)
eh = [s.derivatives(i)[0] for i in vh]
ph = [-s.derivatives(i)[1] * units.eVA_GPa for i in vh]
bh = [s.derivatives(i)[2] * vh[i] * units.eVA_GPa for i in vh]
return vh, eh, ph, bh
| [
"pyglib.basic.splot.xy_plot",
"pylab.polyfit",
"pyglib.basic.splot.xy2_plot",
"h5py.File",
"scipy.optimize.leastsq",
"scipy.interpolate.UnivariateSpline"
] | [((1148, 1164), 'pylab.polyfit', 'polyfit', (['v', 'e', '(2)'], {}), '(v, e, 2)\n', (1155, 1164), False, 'from pylab import polyfit\n'), ((1844, 1881), 'scipy.optimize.leastsq', 'leastsq', (['eos_fit_fun', 'x0'], {'args': '(e, v)'}), '(eos_fit_fun, x0, args=(e, v))\n', (1851, 1881), False, 'from scipy.optimize import leastsq\n'), ((2900, 3079), 'pyglib.basic.splot.xy2_plot', 'splot.xy2_plot', (['[v_list, vh]', '[e_list, eh]', "['o', '-']", "['raw', 'fitting']"], {'xlabel': '"""V ($\\\\AA^3$/primitive cell)"""', 'ylabel': '"""E (eV/primitive cell)"""', 'fsave': "(path + '_evfit.pdf')"}), "([v_list, vh], [e_list, eh], ['o', '-'], ['raw', 'fitting'],\n xlabel='V ($\\\\AA^3$/primitive cell)', ylabel='E (eV/primitive cell)',\n fsave=path + '_evfit.pdf')\n", (2914, 3079), True, 'import pyglib.basic.splot as splot\n'), ((3097, 3206), 'pyglib.basic.splot.xy_plot', 'splot.xy_plot', (['vh', 'ph'], {'xlabel': '"""V ($\\\\AA^3$/primitive cell)"""', 'ylabel': '"""P (GPa)"""', 'fsave': "(path + '_pvfit.pdf')"}), "(vh, ph, xlabel='V ($\\\\AA^3$/primitive cell)', ylabel=\n 'P (GPa)', fsave=path + '_pvfit.pdf')\n", (3110, 3206), True, 'import pyglib.basic.splot as splot\n'), ((3413, 3447), 'scipy.interpolate.UnivariateSpline', 'UnivariateSpline', (['v', 'e'], {'k': '(3)', 's': 'tol'}), '(v, e, k=3, s=tol)\n', (3429, 3447), False, 'from scipy.interpolate import UnivariateSpline\n'), ((2185, 2206), 'h5py.File', 'h5py.File', (['fsave', '"""r"""'], {}), "(fsave, 'r')\n", (2194, 2206), False, 'import h5py\n'), ((2516, 2537), 'h5py.File', 'h5py.File', (['fsave', '"""a"""'], {}), "(fsave, 'a')\n", (2525, 2537), False, 'import h5py\n')] |
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
"""
Created on Thu Aug 27 23:58:37 2020
@author: manal
"""
import numpy as np
import GPy
from GPy.kern.src.stationary import Stationary
class Cosine_prod(Stationary):
"""
Cosine kernel:
Product of 1D Cosine kernels
.. math::
&k(x,x')_i = \sigma^2 \prod_{j=1}^{dimension} \cos(x_{i,j}-x_{i,j}')
&x,x' \in \mathcal{M}_{n,dimension}
&k \in \mathcal{M}_{n,n}
"""
def __init__(self, input_dim, variance=1., lengthscale=None, ARD=False, active_dims=None, name='Cosine_prod'):
super(Cosine_prod, self).__init__(input_dim, variance, lengthscale, ARD, active_dims, name)
def K_of_r(self, dist):
n = dist.shape[2]
p = 1
# l = self.lengthscale
for k in range(n):
p*= np.cos(dist[:,:,k])#/l)
return self.variance * p
def K(self, X, X2):
dist = X[:,None,:]-X2[None,:,:]
return self.K_of_r(dist)
def dK_dr(self,dist,dimX):
n = dist.shape[2]
m = dist.shape[0]
# l = self.lengthscale
dK = np.zeros((m,m,n))
for i in range(n):
dK[:,:,i]= np.cos(dist[:,:,i])#/l)
dK[:,:,dimX] = -np.sin(dist[:,:,dimX])#/l)
return self.variance * np.prod(dK,2)#/l
def dK_dX(self, X, X2, dimX):
dist = X[:,None,:]-X2[None,:,:]
dK_dr = self.dK_dr(dist,dimX)
return dK_dr
def dK_dX2(self,X,X2,dimX2):
return -self.dK_dX(X,X2, dimX2)
def dK2_dXdX2(self, X, X2, dimX, dimX2):
dist = X[:,None,:]-X2[None,:,:]
K = self.K_of_r(dist)
n = dist.shape[2]
m = dist.shape[0]
# l = self.lengthscale
dK = np.zeros((m,m,n))
for i in range(n):
dK[:,:,i]= np.cos(dist[:,:,i])#/l)
dK[:,:,dimX] = np.sin(dist[:,:,dimX])#/l)
dK[:,:,dimX2] = np.sin(dist[:,:,dimX2])#/l)
return ((dimX==dimX2)*K - (dimX!=dimX2)*np.prod(dK,2))#/(l**2)
| [
"numpy.sin",
"numpy.prod",
"numpy.zeros",
"numpy.cos"
] | [((1126, 1145), 'numpy.zeros', 'np.zeros', (['(m, m, n)'], {}), '((m, m, n))\n', (1134, 1145), True, 'import numpy as np\n'), ((1749, 1768), 'numpy.zeros', 'np.zeros', (['(m, m, n)'], {}), '((m, m, n))\n', (1757, 1768), True, 'import numpy as np\n'), ((1864, 1888), 'numpy.sin', 'np.sin', (['dist[:, :, dimX]'], {}), '(dist[:, :, dimX])\n', (1870, 1888), True, 'import numpy as np\n'), ((1915, 1940), 'numpy.sin', 'np.sin', (['dist[:, :, dimX2]'], {}), '(dist[:, :, dimX2])\n', (1921, 1940), True, 'import numpy as np\n'), ((839, 860), 'numpy.cos', 'np.cos', (['dist[:, :, k]'], {}), '(dist[:, :, k])\n', (845, 860), True, 'import numpy as np\n'), ((1194, 1215), 'numpy.cos', 'np.cos', (['dist[:, :, i]'], {}), '(dist[:, :, i])\n', (1200, 1215), True, 'import numpy as np\n'), ((1242, 1266), 'numpy.sin', 'np.sin', (['dist[:, :, dimX]'], {}), '(dist[:, :, dimX])\n', (1248, 1266), True, 'import numpy as np\n'), ((1300, 1314), 'numpy.prod', 'np.prod', (['dK', '(2)'], {}), '(dK, 2)\n', (1307, 1314), True, 'import numpy as np\n'), ((1817, 1838), 'numpy.cos', 'np.cos', (['dist[:, :, i]'], {}), '(dist[:, :, i])\n', (1823, 1838), True, 'import numpy as np\n'), ((1991, 2005), 'numpy.prod', 'np.prod', (['dK', '(2)'], {}), '(dK, 2)\n', (1998, 2005), True, 'import numpy as np\n')] |
import logging
logging.basicConfig(level=logging.INFO, format="%(asctime)s %(levelname)s (%(threadName)s-%(process)d) %(message)s")
__version__ = "2.2.0"
| [
"logging.basicConfig"
] | [((16, 137), 'logging.basicConfig', 'logging.basicConfig', ([], {'level': 'logging.INFO', 'format': '"""%(asctime)s %(levelname)s (%(threadName)s-%(process)d) %(message)s"""'}), "(level=logging.INFO, format=\n '%(asctime)s %(levelname)s (%(threadName)s-%(process)d) %(message)s')\n", (35, 137), False, 'import logging\n')] |
import logging
import time
from pathlib import Path
from subprocess import call
import cli
def parse_args(args):
parser = cli.argparser()
subparsers = parser.add_subparsers(
help="Arguments for specific action.", dest="dtype"
)
subparsers.required = True
slurm = subparsers.add_parser("slurm", help='use SLURM to submit jobs')
slurm.add_argument(
"script", type=str,
help='path to script to run'
)
slurm.add_argument(
"--python", default=f'{Path.home()}/anaconda3/envs/deep/bin/python', type=str,
help='path to ext python to run program with'
)
slurm.add_argument(
"--task", action='append',
help='any additional flags you want to run the script with'
)
slurm.add_argument(
"--taskname", action='append',
help='allies name for each task'
)
slurm.add_argument(
"--outdir", default='/clusterfs/fiona/thayer/opticalaberrations/models', type=str,
help='output directory'
)
slurm.add_argument(
"--partition", default='abc', type=str,
)
slurm.add_argument(
"--qos", default='abc_high', type=str,
help='using `abc_high` for unlimited runtime',
)
slurm.add_argument(
"--gpus", default=1, type=int,
help='number of GPUs to use for this job'
)
slurm.add_argument(
"--cpus", default=5, type=int,
help='number of CPUs to use for this job'
)
slurm.add_argument(
"--mem", default='160G', type=str,
help='requested RAM to use for this job'
)
slurm.add_argument(
"--name", default='train', type=str,
help='allies name for this job'
)
slurm.add_argument(
"--job", default='job.slm', type=str,
help='path to slurm job template'
)
slurm.add_argument(
"--constraint", default=None, type=str,
help='select a specific node type eg. titan'
)
default = subparsers.add_parser("default", help='run a job using default python')
default.add_argument(
"script", type=str,
help='path to script to run'
)
default.add_argument(
"--python", default=f'{Path.home()}/anaconda3/envs/deep/bin/python', type=str,
help='path to ext python to run program with'
)
default.add_argument(
"--flags", default='', type=str,
help='any additional flags you want to run the script with'
)
default.add_argument(
"--outdir", default='/clusterfs/fiona/thayer/opticalaberrations/models', type=str,
help='output directory'
)
default.add_argument(
"--name", default='train', type=str,
help='allies name for this job'
)
return parser.parse_args(args)
def main(args=None):
args = parse_args(args)
outdir = Path(f"{args.outdir}/{args.name}")
outdir.mkdir(exist_ok=True, parents=True)
profiler = f"/usr/bin/time -v -o {outdir}/{args.script.split('.')[0]}_profile.log "
if args.dtype == 'default':
sjob = profiler
sjob += f"{args.python} "
sjob += f"{args.script} "
sjob += f" --outdir {outdir} {args.flags} 2>&1 | tee {outdir}/{args.script.split('.')[0]}.log"
call([sjob], shell=True)
elif args.dtype == 'slurm':
sjob = '/usr/bin/sbatch '
sjob += f' --qos={args.qos} '
sjob += f' --partition={args.partition} '
if args.constraint is not None:
sjob += f" -C '{args.constraint}' "
sjob += f' --gres=gpu:{args.gpus} '
sjob += f' --cpus-per-task={args.cpus} '
sjob += f" --mem='{args.mem}' "
sjob += f" --job-name={args.name} "
sjob += f" --output={outdir}/{args.script.split('.')[0]}.log"
sjob += f" --export=ALL,"
sjob += f"PROFILER='{profiler}',"
sjob += f"SCRIPT='{args.script}',"
sjob += f"PYTHON='{args.python}',"
sjob += f"JOBS='{len(args.task)}',"
for i, (t, n) in enumerate(zip(args.task, args.taskname)):
sjob += f"TASK_{i + 1}='{profiler} {args.python} {args.script} --cpu_workers -1 --gpu_workers -1 --outdir {outdir/n} {t}'"
sjob += ',' if i < len(args.task)-1 else ' '
sjob += args.job
call([sjob], shell=True)
else:
logging.error('Unknown action')
if __name__ == "__main__":
main()
| [
"pathlib.Path",
"pathlib.Path.home",
"subprocess.call",
"cli.argparser",
"logging.error"
] | [((129, 144), 'cli.argparser', 'cli.argparser', ([], {}), '()\n', (142, 144), False, 'import cli\n'), ((2845, 2879), 'pathlib.Path', 'Path', (['f"""{args.outdir}/{args.name}"""'], {}), "(f'{args.outdir}/{args.name}')\n", (2849, 2879), False, 'from pathlib import Path\n'), ((3250, 3274), 'subprocess.call', 'call', (['[sjob]'], {'shell': '(True)'}), '([sjob], shell=True)\n', (3254, 3274), False, 'from subprocess import call\n'), ((4265, 4289), 'subprocess.call', 'call', (['[sjob]'], {'shell': '(True)'}), '([sjob], shell=True)\n', (4269, 4289), False, 'from subprocess import call\n'), ((4309, 4340), 'logging.error', 'logging.error', (['"""Unknown action"""'], {}), "('Unknown action')\n", (4322, 4340), False, 'import logging\n'), ((512, 523), 'pathlib.Path.home', 'Path.home', ([], {}), '()\n', (521, 523), False, 'from pathlib import Path\n'), ((2212, 2223), 'pathlib.Path.home', 'Path.home', ([], {}), '()\n', (2221, 2223), False, 'from pathlib import Path\n')] |
from dataclasses import dataclass, field
from typing import Any, Dict, List
from aiographql.client.error import GraphQLError
from aiographql.client.request import GraphQLRequestContainer
@dataclass(frozen=True)
class GraphQLBaseResponse(GraphQLRequestContainer):
json: Dict[str, Any] = field(default_factory=dict)
@dataclass(frozen=True)
class GraphQLResponse(GraphQLBaseResponse):
"""
GraphQL Response object wrapping response data and any errors. This object also
contains the a copy of the :class:`GraphQLRequest` that produced this response.
"""
@property
def errors(self) -> List[GraphQLError]:
"""
A list of :class:`GraphQLError` objects if server responded with query errors.
"""
return [GraphQLError.load(error) for error in self.json.get("errors", list())]
@property
def data(self) -> Dict[str, Any]:
"""The data payload the server responded with."""
return self.json.get("data", dict())
@property
def query(self) -> str:
"""The query string used to produce this response."""
return self.request.query
| [
"dataclasses.dataclass",
"dataclasses.field",
"aiographql.client.error.GraphQLError.load"
] | [((191, 213), 'dataclasses.dataclass', 'dataclass', ([], {'frozen': '(True)'}), '(frozen=True)\n', (200, 213), False, 'from dataclasses import dataclass, field\n'), ((324, 346), 'dataclasses.dataclass', 'dataclass', ([], {'frozen': '(True)'}), '(frozen=True)\n', (333, 346), False, 'from dataclasses import dataclass, field\n'), ((293, 320), 'dataclasses.field', 'field', ([], {'default_factory': 'dict'}), '(default_factory=dict)\n', (298, 320), False, 'from dataclasses import dataclass, field\n'), ((761, 785), 'aiographql.client.error.GraphQLError.load', 'GraphQLError.load', (['error'], {}), '(error)\n', (778, 785), False, 'from aiographql.client.error import GraphQLError\n')] |
# Fichier main de gestion des ressources du robot
from micropython import const
from machine import *
from DRV8833 import *
from BME280 import *
import pycom
import time
import os
# Variables globales pour moteurs et pont en H
DRV8833_Sleep_pin = "P20" # Pin SLEEP
DRV8833_AIN1 = "P22" # Entrée PWM moteur A : AIN1
DRV8833_AIN2 = "P21" # Entrée PWM moteur A : AIN2
DRV8833_BIN1 = "P19" # Entrée PWM moteur B : BIN1
DRV8833_BIN2 = "P12" # Entrée PWM moteur B : BIN2
# Vitesse de rotation des roues
V_MAX = 1.0
V_MOYEN = 0.5
V_MIN = 0.25
# ---------------------------------------------------------------------------
# Routines de déplacements du robot
def Avancer(vitesse):
Moteur_Droit.Cmde_moteur(SENS_HORAIRE, vitesse)
Moteur_Gauche.Cmde_moteur(SENS_ANTI_HORAIRE, vitesse)
def Reculer(vitesse):
Moteur_Droit.Cmde_moteur(SENS_ANTI_HORAIRE, vitesse)
Moteur_Gauche.Cmde_moteur(SENS_HORAIRE, vitesse)
def Pivoter_droite(vitesse):
Moteur_Droit.Cmde_moteur(SENS_ANTI_HORAIRE, vitesse)
Moteur_Gauche.Cmde_moteur(SENS_ANTI_HORAIRE, vitesse)
def Pivoter_gauche(vitesse):
Moteur_Droit.Cmde_moteur(SENS_HORAIRE, vitesse)
Moteur_Gauche.Cmde_moteur(SENS_HORAIRE, vitesse)
def Arret():
Moteur_Droit.Cmde_moteur(SENS_HORAIRE, 0)
Moteur_Gauche.Cmde_moteur(SENS_HORAIRE, 0)
# ------------------------------------------------------------------------
# Initialisation des moteurs
# IN1_pin : entrée PWM 1 DRV8833
# IN2_pin : entrée PWM 2 DRV8833
# sleep_pin : SLP pin pour désactiver les ponts en H du DRV8833
# timer_number : dans [0,1,2,3]. Choix du timer utilisé pour générer le signal pwm
# freq : fréquence du signal pwm
# num_channel_pwm_In1 : numéro de l'Id du canal PWM associé à la broche In1_pin
# num_channel_pwm_In2 : numéro de l'Id du canal PWM associé à la broche In2_pin
# DRV8833 (In1_pin, In2_pin, sleep_pin, timer_number, freq, num_channel_pwm_In1, num_channel_pwm_In2)
Moteur_Gauche = DRV8833(
DRV8833_AIN1, DRV8833_AIN2, DRV8833_Sleep_pin, 1, 500, 0, 1
) # Sur connecteur Encoder1
Moteur_Droit = DRV8833(
DRV8833_BIN1, DRV8833_BIN2, DRV8833_Sleep_pin, 1, 500, 2, 3
) # Sur connecteur Encoder2
Arret()
bus_i2c = I2C()
bus_i2c.init(I2C.MASTER, baudrate=400000)
adr = bus_i2c.scan()
Id_BME280 = bus_i2c.readfrom_mem(BME280_I2C_ADR, BME280_CHIP_ID_ADDR, 1)
capteur_BME280 = BME280(BME280_I2C_ADR, bus_i2c) # --Calibrage du capteur
capteur_BME280.Calibration_Param_Load()
rtc = RTC()
rtc.init((2020, 10, 26, 0, 0, 0, 0, 0))
jour = rtc.now()
date = "Date : " + str(jour[0]) + "/" + str(jour[1]) + "/" + str(jour[2])
print("L'adresse du périphérique I2C est :", adr)
print("Valeur ID BME280 :", hex(Id_BME280[0]))
while True:
jour = rtc.now()
temps = str(jour[3]) + "h " + str(jour[4]) + "m " + str(jour[5]) + "s"
temp = capteur_BME280.read_temp()
humi = capteur_BME280.read_humidity()
pres = capteur_BME280.read_pression()
print("-------------------------------------------------------------------")
print(
"Temps passé :",
temps,
"- Température :",
"%.2f" % temp,
"- Humidité :",
"%.2f" % humi,
"- Préssion :",
"%.2f" % pres,
)
print("--------------")
print("-> Démarage")
print("-Avancer")
Avancer(V_MIN)
time.sleep(2)
print("-Reculer")
Reculer(V_MIN)
time.sleep(2)
print("-Pivoter droite")
Pivoter_droite(V_MIN)
time.sleep(2)
print("-Pivoter gauche")
Pivoter_gauche(V_MIN)
time.sleep(2)
print("-> Arret")
Arret()
time.sleep(2)
"""
Index = 0
while True :
print('Index : ', Index)
# Définition d'une séquence de mouvements
time.sleep(0.25)
Index +=1
"""
| [
"time.sleep"
] | [((3406, 3419), 'time.sleep', 'time.sleep', (['(2)'], {}), '(2)\n', (3416, 3419), False, 'import time\n'), ((3468, 3481), 'time.sleep', 'time.sleep', (['(2)'], {}), '(2)\n', (3478, 3481), False, 'import time\n'), ((3544, 3557), 'time.sleep', 'time.sleep', (['(2)'], {}), '(2)\n', (3554, 3557), False, 'import time\n'), ((3620, 3633), 'time.sleep', 'time.sleep', (['(2)'], {}), '(2)\n', (3630, 3633), False, 'import time\n'), ((3675, 3688), 'time.sleep', 'time.sleep', (['(2)'], {}), '(2)\n', (3685, 3688), False, 'import time\n')] |
from json import dumps
from typing import Callable
from flask.json import jsonify
from flask.wrappers import Response
from flask_verify.verify_json import verify_json_response
from pytest import raises
@verify_json_response
def _view_function_response() -> Response:
"""
To test if an endpoint that already returns a response work.
Positive test case, should work just fine.
"""
return Response(dumps({"message": "This is a JSON."}),
status=200, content_type='application/json')
@verify_json_response
def _view_function_response_failure() -> Response:
"""
To test if an endpoint that already returns a malformed response work.
Negative test case, should raise an error that will result in a 500.
"""
return Response("This is obviously not JSON.", content_type='plain/text',
status=200)
@verify_json_response
def _view_function_tuple(dictionary: dict) -> tuple[dict, int]:
"""
To test if an endpoint that returns a tuple successfully get converted
to a Response.
"""
return dictionary, 200
@verify_json_response
def _view_function_tuple_failure() -> tuple[Callable, int]:
"""
To test if an endpoint that cannot be converted into a JSON
raises a TypeException.
"""
return lambda x: 1, 20
@verify_json_response
def _view_function_tuple_pack() -> tuple[dict, int, int]:
"""
To test if an endpoint that returns too many values raises
a TypeException.
"""
return {"msg": "This is a JSON."}, 200, 0
@verify_json_response
def _view_function_invalid_status() -> tuple[dict, str]:
"""
To test if an endpoint that does not return a status code
raises a TypeException.
"""
return {"msg": "This is okay."}, "This is not a status."
def test_already_response() -> None:
"""
Test if a view function that already returns a Response object
does not get corrupted.
"""
actual = _view_function_response()
expected = Response(dumps({"message": "This is a JSON."}),
status=200, content_type='application/json')
assert actual.response == expected.response
assert actual.status_code == expected.status_code
assert actual.content_type == expected.content_type
def test_non_json_response() -> None:
"""
Test if a view function whose Response is not of type JSON
successfully raises an exception.
"""
with raises(TypeError):
_view_function_response_failure()
def test_tuple_response() -> None:
"""
Test if a view function that returns a tuple automatically
gets converted to a JSON response.
"""
dictionary = {"message": "This should be converted to JSON."}
actual = _view_function_tuple(dictionary)
expected = Response(dumps(dictionary), status=200, content_type='application/json')
assert actual.content_type == expected.content_type
assert actual.status_code == expected.status_code
assert actual.response == expected.response
def test_tuple_response_fail() -> None:
"""
Test the fail conditions of the view functions that return
tuples.
"""
fail_conditions = (_view_function_invalid_status,
_view_function_tuple_failure,
_view_function_tuple_pack)
for fail_condition in fail_conditions:
with raises(TypeError):
fail_condition()
| [
"json.dumps",
"pytest.raises",
"flask.wrappers.Response"
] | [((779, 857), 'flask.wrappers.Response', 'Response', (['"""This is obviously not JSON."""'], {'content_type': '"""plain/text"""', 'status': '(200)'}), "('This is obviously not JSON.', content_type='plain/text', status=200)\n", (787, 857), False, 'from flask.wrappers import Response\n'), ((421, 458), 'json.dumps', 'dumps', (["{'message': 'This is a JSON.'}"], {}), "({'message': 'This is a JSON.'})\n", (426, 458), False, 'from json import dumps\n'), ((2032, 2069), 'json.dumps', 'dumps', (["{'message': 'This is a JSON.'}"], {}), "({'message': 'This is a JSON.'})\n", (2037, 2069), False, 'from json import dumps\n'), ((2464, 2481), 'pytest.raises', 'raises', (['TypeError'], {}), '(TypeError)\n', (2470, 2481), False, 'from pytest import raises\n'), ((2820, 2837), 'json.dumps', 'dumps', (['dictionary'], {}), '(dictionary)\n', (2825, 2837), False, 'from json import dumps\n'), ((3392, 3409), 'pytest.raises', 'raises', (['TypeError'], {}), '(TypeError)\n', (3398, 3409), False, 'from pytest import raises\n')] |
import numpy as np
import sys
import cv2
sys.path.append("../")
from utils.config import config
class TestLoader:
def __init__(self, imdb, batch_size=1, shuffle=False):
self.imdb = imdb
self.batch_size = batch_size
self.shuffle = shuffle
self.size = len(imdb)#num of data
self.cur = 0
self.data = None
self.label = None
self.reset()
self.get_batch()
def reset(self):
self.cur = 0
if self.shuffle:
np.random.shuffle(self.imdb)
def iter_next(self):
return self.cur + self.batch_size <= self.size
def __iter__(self):
return self
def __next__(self):
return self.next()
def next(self):
if self.iter_next():
self.get_batch()
self.cur += self.batch_size
return self.data
else:
raise StopIteration
def getindex(self):
return self.cur / self.batch_size
def getpad(self):
if self.cur + self.batch_size > self.size:
return self.cur + self.batch_size - self.size
else:
return 0
def get_batch(self):
imdb = self.imdb[self.cur]
im = cv2.imread(imdb)
self.data = im
| [
"numpy.random.shuffle",
"sys.path.append",
"cv2.imread"
] | [((41, 63), 'sys.path.append', 'sys.path.append', (['"""../"""'], {}), "('../')\n", (56, 63), False, 'import sys\n'), ((1226, 1242), 'cv2.imread', 'cv2.imread', (['imdb'], {}), '(imdb)\n', (1236, 1242), False, 'import cv2\n'), ((516, 544), 'numpy.random.shuffle', 'np.random.shuffle', (['self.imdb'], {}), '(self.imdb)\n', (533, 544), True, 'import numpy as np\n')] |
import time
from unittest import mock
import pytest
from django.contrib.auth.models import AnonymousUser
from django.core.exceptions import ValidationError
from django.db import IntegrityError
from django.http import Http404
from django.test import RequestFactory, TestCase
from django.urls import reverse
from wagtail.admin.edit_handlers import ObjectList
from wagtail.core.blocks.stream_block import StreamBlockValidationError
from wagtail.core.models import Collection
from wagtail.images import get_image_model
from wagtail.images.tests.utils import get_test_image_file
from wagtail.tests.utils import WagtailPageTests, WagtailTestUtils
from wagtail_factories import ImageFactory
from core.mixins import AuthenticatedUserRequired
from core.models import (
AbstractObjectHash,
CaseStudyRelatedPages,
Country,
CuratedListPage,
DetailPage,
IndustryTag,
InterstitialPage,
LandingPage,
LessonPlaceholderPage,
ListPage,
MagnaPageChooserPanel,
Product,
Region,
Tag,
TopicPage,
case_study_body_validation,
)
from domestic.models import DomesticDashboard, DomesticHomePage, GreatDomesticHomePage
from tests.helpers import SetUpLocaleMixin, make_test_video
from tests.unit.core import factories
from .factories import (
CaseStudyFactory,
DetailPageFactory,
LessonPlaceholderPageFactory,
StructurePageFactory,
TopicPageFactory,
)
def test_object_hash():
mocked_file = mock.Mock()
mocked_file.read.return_value = b'foo'
hash = AbstractObjectHash.generate_content_hash(mocked_file)
assert hash == 'acbd18db4cc2f85cedef654fccc4a4d8'
@pytest.mark.django_db
def test_detail_page_can_mark_as_read(client, domestic_homepage, user, domestic_site, mock_get_user_profile):
# given the user has not read a lesson
client.force_login(user)
list_page = factories.ListPageFactory(parent=domestic_homepage, record_read_progress=True)
curated_list_page = factories.CuratedListPageFactory(parent=list_page)
topic_page = factories.TopicPageFactory(parent=curated_list_page)
detail_page = factories.DetailPageFactory(parent=topic_page)
client.get(detail_page.url)
# then the progress is saved
read_hit = detail_page.page_views.get()
assert read_hit.sso_id == str(user.pk)
assert read_hit.list_page == list_page
@pytest.mark.django_db
def test_detail_page_cannot_mark_as_read(client, domestic_homepage, user, domestic_site, mock_get_user_profile):
# given the user has not read a lesson
client.force_login(user)
list_page = factories.ListPageFactory(parent=domestic_homepage, record_read_progress=False)
curated_list_page = factories.CuratedListPageFactory(parent=list_page)
topic_page = factories.TopicPageFactory(parent=curated_list_page)
detail_page = factories.DetailPageFactory(parent=topic_page)
client.get(detail_page.url)
# then the progress is saved
assert detail_page.page_views.count() == 0
@pytest.mark.django_db
def test_detail_page_anon_user_not_marked_as_read(client, domestic_homepage, domestic_site, mock_get_user_profile):
# given the user has not read a lesson
clp = factories.CuratedListPageFactory(parent=domestic_homepage)
topic_page = factories.TopicPageFactory(parent=clp)
detail_page = factories.DetailPageFactory(parent=topic_page)
client.get(detail_page.url)
# then the progress is unaffected
assert detail_page.page_views.count() == 0
@pytest.mark.django_db
def test_curated_list_page_has_link_in_context_back_to_parent(
client,
domestic_homepage,
domestic_site,
mock_export_plan_detail_list,
patch_get_user_lesson_completed,
user,
mock_get_user_profile,
):
list_page = factories.ListPageFactory(
parent=domestic_homepage, record_read_progress=False, slug='example-learning-homepage'
)
curated_list_page = factories.CuratedListPageFactory(parent=list_page, slug='example-module')
expected_url = list_page.url
assert expected_url == '/example-learning-homepage/'
client.force_login(user) # because unauthed users get redirected
resp = client.get(curated_list_page.url)
# Make a more precise string to search for: one that's marked up as a
# hyperlink target, at least
expected_link_string = f'href="{expected_url}"'
assert expected_link_string.encode('utf-8') in resp.content
@pytest.mark.django_db
@pytest.mark.parametrize(
'querystring_to_add,expected_backlink_value',
(
('', None),
('?return-link=%2Fexport-plan%2F1%2Fabout-your-business%2F', '/export-plan/1/about-your-business/'),
(
'?return-link=%2Fexport-plan%2F1%2Fabout-your-business%2F%3Ffoo%3Dbar',
'/export-plan/1/about-your-business/?foo=bar',
),
(
'?bam=baz&return-link=%2Fexport-plan%2F1%2Fabout-your-business%2F%3Ffoo%3Dbar',
'/export-plan/1/about-your-business/?foo=bar', # NB: bam=baz should not be here
),
('?bam=baz&return-link=example%2Fexport-plan%2Fpath%2F%3Ffoo%3Dbar', None),
(
(
'?bam=baz&return-link=https%3A%2F%2Fphishing.example.com'
'%2Fexport-plan%2F1%2Fabout-your-business%2F%3Ffoo%3Dbar'
),
None,
),
(
(
'?bam=baz&return-link=%3A%2F%2Fphishing.example.com'
'%2Fexport-plan%2F1%2Fabout-your-business%2F%3Ffoo%3Dbar'
),
None,
),
('?bam=baz', None),
(
'?bam=baz&return-link=%2Fexport-plan%2F1%2Fabout-your-business%2F%3Ffoo%3Dbar',
'/export-plan/1/about-your-business/?foo=bar',
),
),
ids=(
'no backlink querystring present',
'backlink querystring present without encoded querystring of its own',
'backlink querystring present WITH encoded querystring of its own',
'backlink querystring present WITH encoded querystring and other args',
'backlink querystring present WITH bad payload - path does not start with / ',
'backlink querystring present WITH bad payload - path is a full URL',
'backlink querystring present WITH bad payload - path is a URL with flexible proto',
'backlink querystring NOT present BUT another querystring is',
'backlink querystring present WITH OTHER QUERYSTRING TOO',
),
)
def test_detail_page_get_context_handles_backlink_querystring_appropriately(
rf, domestic_homepage, domestic_site, user, querystring_to_add, expected_backlink_value, export_plan_data
):
list_page = factories.ListPageFactory(parent=domestic_homepage, record_read_progress=False)
curated_list_page = factories.CuratedListPageFactory(parent=list_page)
topic_page = factories.TopicPageFactory(parent=curated_list_page)
detail_page = factories.DetailPageFactory(parent=topic_page, template='learn/detail_page.html')
lesson_page_url = detail_page.url
if querystring_to_add:
lesson_page_url += querystring_to_add
request = rf.get(lesson_page_url)
request.user = user
context = detail_page.get_context(request)
if expected_backlink_value is None:
assert 'backlink' not in context
else:
assert context.get('backlink') == expected_backlink_value
@pytest.mark.django_db
@pytest.mark.parametrize(
'backlink_path,expected',
(
(None, None),
('', None),
('/export-plan/1/about-your-business/', 'About your business'),
('/export-plan/1/business-objectives/', 'Business objectives'),
('/export-plan/1/target-markets-research/', 'Target markets research'),
('/export-plan/1/adapting-your-product/', 'Adapting your product'),
('/export-plan/1/marketing-approach/', 'Marketing approach'),
('/export-plan/1/costs-and-pricing/', 'Costs and pricing'),
('/export-plan/1/funding-and-credit/', 'Funding and credit'),
('/export-plan/1/getting-paid/', 'Getting paid'),
('/export-plan/1/travel-plan/', 'Travel plan'),
('/export-plan/1/business-risk/', 'Business risk'),
('/export-plan/1/adapting-your-product/?foo=bar', 'Adapting your product'),
('/export-plan/', None),
('/path/that/will/not/match/anything/', None),
),
ids=(
'no backlink',
'empty string backlink',
'Seeking: About your business',
'Seeking: Business objectives',
'Seeking: Target markets research',
'Seeking: Adapting your product',
'Seeking: Marketing approach',
'Seeking: Costs and pricing',
'Seeking: Getting paid',
'Seeking: Funding and credit',
'Seeking: Travel plan',
'Seeking: Business risk',
'Valid backlink with querystring does not break name lookup',
'backlink for real page that is not an export plan step',
'backlink for a non-existent page',
),
)
def test_detail_page_get_context_gets_backlink_title_based_on_backlink(
backlink_path,
expected,
en_locale,
):
detail_page = factories.DetailPageFactory(template='learn/detail_page.html')
assert detail_page._get_backlink_title(backlink_path) == expected
@pytest.mark.django_db
def test_case_study__str_method():
case_study = CaseStudyFactory(title='', summary_context='Test Co')
assert f'{case_study}' == 'Test Co'
case_study = CaseStudyFactory(title='Alice and Bob export to every continent', summary_context='Test Co')
assert f'{case_study}' == 'Alice and Bob export to every continent'
@pytest.mark.django_db
def test_case_study__timestamps():
case_study = CaseStudyFactory(summary_context='Test Co')
created = case_study.created
modified = case_study.created
assert created == modified
time.sleep(1) # Forgive this - we need to have a real, later save
case_study.save()
case_study.refresh_from_db()
assert case_study.created == created
assert case_study.modified > modified
_case_study_top_level_error_message = (
'This block must contain one Media section (with one or two items in it) and one Text section.'
)
_case_study_one_video_only_error_message = 'Only one video may be used in a case study.'
_case_study_video_order_error_message = 'The video must come before a still image.'
@pytest.mark.django_db
@pytest.mark.parametrize(
'block_type_values,exception_message',
(
(['text'], _case_study_top_level_error_message),
([('media', ('video',))], _case_study_top_level_error_message),
([], None),
(['text', 'text'], _case_study_top_level_error_message),
(['text', ('media', ('video', 'image'))], _case_study_top_level_error_message),
([('media', ('video',)), ('media', ('video',))], _case_study_top_level_error_message),
(['text', ('media', ('video', 'image')), 'text'], _case_study_top_level_error_message),
([('media', ('video', 'image')), 'text', ('media', ('video', 'image'))], _case_study_top_level_error_message),
([('media', ('video', 'image')), 'text'], None),
([('media', ('video',)), 'text'], None),
([('media', ('image',)), 'text'], None),
([('media', ('image', 'image')), 'text'], None),
([('media', ('image', 'video')), 'text'], _case_study_video_order_error_message),
([('media', ('video', 'video')), 'text'], _case_study_one_video_only_error_message),
(['quote', ('media', ('video', 'image')), 'text'], None),
(['quote', 'quote', ('media', ('video', 'image')), 'text'], None),
),
ids=(
'1. Top-level check: text node only: not fine',
'2. Top-level check: media node only: not fine',
'3. Top-level check: no nodes: fine - requirement is done at a higher level',
'4. Top-level check: two text nodes: not fine',
'5. Top-level check: text before media: not fine',
'6. Top-level check: two media nodes: not fine',
'7. Top-level check: text, media, text: not fine',
'8. Top-level check: media, text, media: not fine',
'9. media node (video and image) and text node: fine',
'10. media node (video only) and text node: fine',
'11. media node (image only) and text node: fine',
'12. media node (two images) and text node: fine',
'13. media node (image before video) and text node: not fine',
'14. media node (two videos) and text node: not fine',
'15. quote node, media node (video and image) and text node: fine',
'16. 2 quote nodes, media node (video and image) and text node: fine',
),
)
def test_case_study_body_validation(block_type_values, exception_message):
def _create_block(block_type):
mock_block = mock.Mock()
mock_block.block_type = block_type
return mock_block
value = []
for block_spec in block_type_values:
if type(block_spec) == tuple:
parent_block = _create_block(block_spec[0])
children = []
for subblock_spec in block_spec[1]:
children.append(_create_block(subblock_spec))
parent_block.value = children
value.append(parent_block)
else:
value.append(_create_block(block_spec))
if exception_message:
with pytest.raises(StreamBlockValidationError) as ctx:
case_study_body_validation(value)
assert ctx.message == exception_message
else:
# should not blow up
case_study_body_validation(value)
class LandingPageTests(WagtailPageTests):
def test_can_be_created_under_homepage(self):
self.assertAllowedParentPageTypes(
LandingPage,
{
DomesticHomePage,
GreatDomesticHomePage,
},
)
def test_can_be_created_under_landing_page(self):
self.assertAllowedSubpageTypes(LandingPage, {ListPage, InterstitialPage, DomesticDashboard})
class ListPageTests(WagtailPageTests):
def test_can_be_created_under_landing_page(self):
self.assertAllowedParentPageTypes(ListPage, {LandingPage})
def test_allowed_subtypes(self):
self.assertAllowedSubpageTypes(ListPage, {CuratedListPage})
class CuratedListPageTests(WagtailPageTests):
def test_can_be_created_under_list_page(self):
self.assertAllowedParentPageTypes(CuratedListPage, {ListPage})
def test_allowed_subtypes(self):
self.assertAllowedSubpageTypes(CuratedListPage, {TopicPage})
@pytest.mark.django_db
def test_curatedlistpage_count_detail_pages(curated_list_pages_with_lessons):
data = curated_list_pages_with_lessons
clp_1 = data[0][0]
clp_2 = data[1][0]
assert clp_1.count_detail_pages == 2 # 2 pages, placeholder ignored
assert clp_2.count_detail_pages == 1 # 1 page only, no placeholders at all
class TopicPageTests(WagtailPageTests):
def test_parent_page_types(self):
self.assertAllowedParentPageTypes(TopicPage, {CuratedListPage})
def test_allowed_subtypes(self):
self.assertAllowedSubpageTypes(
TopicPage,
{
DetailPage,
LessonPlaceholderPage,
},
)
@pytest.mark.django_db
def test_topic_page_redirects_to_module(
rf,
domestic_homepage,
domestic_site,
):
# The topic pages should never render their own content - they are basically
# scaffolding to give us a sensible page tree. As such they shouldn't be
# rendered
list_page = factories.ListPageFactory(parent=domestic_homepage, record_read_progress=False)
curated_list_page = factories.CuratedListPageFactory(parent=list_page)
topic_page = TopicPageFactory(
parent=curated_list_page,
)
# Check that we have the page tree set up correctly, else this is None
assert curated_list_page.url is not None
for page_method in ('serve', 'serve_preview'):
request = rf.get(topic_page.url)
resp = getattr(topic_page, page_method)(request)
assert resp._headers['location'] == ('Location', curated_list_page.url)
class LessonPlaceholderPageTests(WagtailPageTests):
def test_parent_page_types(self):
self.assertAllowedParentPageTypes(LessonPlaceholderPage, {TopicPage})
def test_allowed_subtypes(self):
self.assertAllowedSubpageTypes(LessonPlaceholderPage, {})
@pytest.mark.django_db
def test_context_cms_generic_page(rf, domestic_homepage):
assert 'page' in domestic_homepage.get_context(rf)
@pytest.mark.django_db
def test_placeholder_page_redirects_to_module(
rf,
domestic_homepage,
domestic_site,
):
# The topic pages should never render their own content and instead redirect
list_page = factories.ListPageFactory(parent=domestic_homepage, record_read_progress=False)
curated_list_page = factories.CuratedListPageFactory(parent=list_page)
topic_page = TopicPageFactory(
parent=curated_list_page,
)
placeholder_page = LessonPlaceholderPageFactory(parent=topic_page)
# Check that we have the page tree set up correctly, else this is None
assert curated_list_page.url is not None
for page_method in ('serve', 'serve_preview'):
request = rf.get(placeholder_page.url)
resp = getattr(placeholder_page, page_method)(request)
assert resp._headers['location'] == ('Location', curated_list_page.url)
@pytest.mark.django_db
def test_structure_page_redirects_to_http404(
rf,
domestic_homepage,
domestic_site,
):
# The structure pages should never render their own content and instead return Http404
structure_page = StructurePageFactory(parent=domestic_homepage)
for page_method in ('serve', 'serve_preview'):
request = rf.get('/foo/')
with pytest.raises(Http404):
getattr(structure_page, page_method)(request)
class DetailPageTests(SetUpLocaleMixin, WagtailPageTests):
def test_parent_page_types(self):
self.assertAllowedParentPageTypes(DetailPage, {TopicPage})
def test_detail_page_creation_for_single_hero_image(self):
detail_page = DetailPageFactory(hero=[('Image', ImageFactory())])
self.assert_(detail_page, True)
def test_validation_kick_for_multiple_hero_image(self):
with pytest.raises(ValidationError):
detail_page = DetailPageFactory(hero=[('Image', ImageFactory()), ('Image', ImageFactory())])
self.assert_(detail_page, None)
@pytest.mark.django_db
def test_redirection_for_unauthenticated_user(
client,
domestic_homepage,
domestic_site,
mock_export_plan_detail_list,
patch_get_user_lesson_completed,
user,
mock_get_user_profile,
):
landing_page = factories.LandingPageFactory(parent=domestic_homepage)
interstitial_page = factories.InterstitialPageFactory(parent=landing_page)
list_page = factories.ListPageFactory(parent=domestic_homepage)
curated_list_page = factories.CuratedListPageFactory(parent=list_page)
topic_page = factories.TopicPageFactory(parent=curated_list_page)
detail_page = factories.DetailPageFactory(parent=topic_page)
pages = [
landing_page,
interstitial_page,
list_page,
curated_list_page,
detail_page,
]
for page in pages:
assert isinstance(page, AuthenticatedUserRequired)
for page in pages:
response = client.get(page.url, follow=False)
assert response.status_code == 302
assert response._headers['location'] == ('Location', f'/signup/?next={page.url}')
# Show an authenticated user can still get in there
client.force_login(user)
for page in pages:
response = client.get(page.url, follow=False)
assert response.status_code == 200
class TestImageAltRendition(TestCase, WagtailTestUtils):
def setUp(self):
self.login()
root_collection, _ = Collection.objects.get_or_create(name='Root', depth=0)
great_image_collection = root_collection.add_child(name='Great Images')
# Create an image with alt text
AltTextImage = get_image_model() # Noqa
self.image = AltTextImage.objects.create(
title='Test image', file=get_test_image_file(), alt_text='smart alt text', collection=great_image_collection
)
def test_image_alt_rendition(self):
rendition = self.image.get_rendition('width-100')
assert rendition.alt == 'smart alt text'
assert self.image.title != rendition.alt
class TestGreatMedia(TestCase):
def test_sources_mp4_with_no_transcript(self):
media = make_test_video()
self.assertEqual(
media.sources,
[
{
'src': '/media/movie.mp4',
'type': 'video/mp4',
'transcript': None,
}
],
)
def test_sources_mp4_with_transcript(self):
media = make_test_video(transcript='A test transcript text')
self.assertEqual(
media.sources,
[
{
'src': '/media/movie.mp4',
'type': 'video/mp4',
'transcript': 'A test transcript text',
}
],
)
def test_subtitles__present(self):
media = make_test_video()
media.subtitles_en = 'Dummy subtitles content'
media.save()
self.assertTrue(media.subtitles_en)
expected = [
{
'srclang': 'en',
'label': 'English',
'url': reverse('core:subtitles-serve', args=[media.id, 'en']),
'default': False,
},
]
self.assertEqual(media.subtitles, expected)
def test_subtitles__not_present(self):
media = make_test_video()
self.assertFalse(media.subtitles_en)
self.assertEqual(media.subtitles, [])
class TestSmallSnippets(TestCase):
# Most snippets are generally small models. Move them out of this test case
# into their own if/when they gain any custom methods beyond __str__
def test_region(self):
region = Region.objects.create(name='Test Region')
self.assertEqual(region.name, 'Test Region')
self.assertEqual(f'{region}', 'Test Region') # tests __str__
def test_country(self):
region = Region.objects.create(name='Test Region')
# NB: slugs are not automatically set.
# The SlugField is about valiation, not auto-population by default
country1 = Country.objects.create(
name='Test Country',
slug='test-country',
)
country2 = Country.objects.create(
name='Other Country',
slug='other-country',
region=region,
)
country_unicode = Country.objects.create(
name='Téßt Country',
slug='tt-country',
)
self.assertEqual(country1.name, 'Test Country')
self.assertEqual(country1.slug, 'test-country')
self.assertEqual(country1.region, None)
self.assertEqual(f'{country1}', 'Test Country') # tests __str__
self.assertEqual(country2.name, 'Other Country')
self.assertEqual(country2.slug, 'other-country')
self.assertEqual(country2.region, region)
self.assertEqual(country_unicode.name, 'Téßt Country')
# by default, ASCII only - https://docs.djangoproject.com/en/2.2/ref/utils/#django.utils.text.slugify
self.assertEqual(country_unicode.slug, 'tt-country')
self.assertEqual(country_unicode.region, None)
self.assertEqual(f'{country_unicode}', 'Téßt Country') # tests __str__
def test_country_sets_slug_on_save(self):
country = Country.objects.create(name='Test Country')
country.refresh_from_db()
self.assertEqual(country.slug, 'test-country')
# Slug is set only on first save, if not already set
country_2 = Country.objects.create(name='Another Country')
self.assertEqual(country_2.slug, 'another-country')
country_2.name = 'Changed country name'
country_2.save()
country_2.refresh_from_db()
self.assertEqual(
country_2.slug,
'another-country',
'Slug should not have changed',
)
# Can specify slug up-front
country_3 = Country.objects.create(
name='Country Three',
slug='somewhere',
)
country_3.refresh_from_db()
self.assertEqual(country_3.slug, 'somewhere')
# Can't reuse slug
with self.assertRaises(IntegrityError):
Country.objects.create(name='Test Country')
def test_product(self):
product = Product.objects.create(name='Test Product')
self.assertEqual(product.name, 'Test Product')
self.assertEqual(f'{product}', 'Test Product') # tests __str__
def test_tag(self):
tag = Tag.objects.create(name='Test Tag')
self.assertEqual(tag.name, 'Test Tag')
self.assertEqual(f'{tag}', 'Test Tag') # tests __str__
def test_industry_tag(self):
tag = IndustryTag.objects.create(name='Test IndustryTag')
self.assertEqual(tag.name, 'Test IndustryTag')
self.assertEqual(f'{tag}', 'Test IndustryTag') # tests __str__
class TestMagnaPageChooserPanel(SetUpLocaleMixin, TestCase):
def setUp(self):
self.request = RequestFactory().get('/')
user = AnonymousUser() # technically, Anonymous users cannot access the admin
self.request.user = user
model = CaseStudyRelatedPages # a model with a foreign key to Page which we want to render as a page chooser
# a MagnaPageChooserPanel class that works on CaseStudyRelatedPages's 'page' field
self.edit_handler = ObjectList(
[MagnaPageChooserPanel('page', [DetailPage, CuratedListPage, TopicPage])]
).bind_to(model=model, request=self.request)
self.my_page_chooser_panel = self.edit_handler.children[0]
# build a form class containing the fields that MyPageChooserPanel wants
self.PageChooserForm = self.edit_handler.get_form_class()
# a test instance of PageChooserModel, pointing to the 'christmas' page
self.detail_page = DetailPageFactory(slug='detail-page')
self.test_instance = model.objects.create(page=self.detail_page)
self.form = self.PageChooserForm(instance=self.test_instance)
self.page_chooser_panel = self.my_page_chooser_panel.bind_to(instance=self.test_instance, form=self.form)
def test_magna_page_chooser_panel_target_models(self):
result = (
MagnaPageChooserPanel('page', [DetailPage, CuratedListPage, TopicPage])
.bind_to(model=MagnaPageChooserPanel)
.target_models()
)
self.assertEqual(result, [DetailPage, CuratedListPage, TopicPage])
def test_magna_page_chooser_panel_render_as_empty_field(self):
test_instance = CaseStudyRelatedPages()
form = self.PageChooserForm(instance=test_instance)
page_chooser_panel = self.my_page_chooser_panel.bind_to(instance=test_instance, form=form, request=self.request)
result = page_chooser_panel.render_as_field()
self.assertIn('<span class="title"></span>', result)
self.assertIn('Choose a page', result)
| [
"core.models.Country.objects.create",
"tests.unit.core.factories.CuratedListPageFactory",
"tests.helpers.make_test_video",
"time.sleep",
"tests.unit.core.factories.TopicPageFactory",
"wagtail.core.models.Collection.objects.get_or_create",
"wagtail.images.tests.utils.get_test_image_file",
"wagtail_fact... | [((4414, 6025), 'pytest.mark.parametrize', 'pytest.mark.parametrize', (['"""querystring_to_add,expected_backlink_value"""', "(('', None), ('?return-link=%2Fexport-plan%2F1%2Fabout-your-business%2F',\n '/export-plan/1/about-your-business/'), (\n '?return-link=%2Fexport-plan%2F1%2Fabout-your-business%2F%3Ffoo%3Dbar',\n '/export-plan/1/about-your-business/?foo=bar'), (\n '?bam=baz&return-link=%2Fexport-plan%2F1%2Fabout-your-business%2F%3Ffoo%3Dbar'\n , '/export-plan/1/about-your-business/?foo=bar'), (\n '?bam=baz&return-link=example%2Fexport-plan%2Fpath%2F%3Ffoo%3Dbar',\n None), (\n '?bam=baz&return-link=https%3A%2F%2Fphishing.example.com%2Fexport-plan%2F1%2Fabout-your-business%2F%3Ffoo%3Dbar'\n , None), (\n '?bam=baz&return-link=%3A%2F%2Fphishing.example.com%2Fexport-plan%2F1%2Fabout-your-business%2F%3Ffoo%3Dbar'\n , None), ('?bam=baz', None), (\n '?bam=baz&return-link=%2Fexport-plan%2F1%2Fabout-your-business%2F%3Ffoo%3Dbar'\n , '/export-plan/1/about-your-business/?foo=bar'))"], {'ids': "('no backlink querystring present',\n 'backlink querystring present without encoded querystring of its own',\n 'backlink querystring present WITH encoded querystring of its own',\n 'backlink querystring present WITH encoded querystring and other args',\n 'backlink querystring present WITH bad payload - path does not start with / '\n , 'backlink querystring present WITH bad payload - path is a full URL',\n 'backlink querystring present WITH bad payload - path is a URL with flexible proto'\n , 'backlink querystring NOT present BUT another querystring is',\n 'backlink querystring present WITH OTHER QUERYSTRING TOO')"}), "('querystring_to_add,expected_backlink_value', (('',\n None), ('?return-link=%2Fexport-plan%2F1%2Fabout-your-business%2F',\n '/export-plan/1/about-your-business/'), (\n '?return-link=%2Fexport-plan%2F1%2Fabout-your-business%2F%3Ffoo%3Dbar',\n '/export-plan/1/about-your-business/?foo=bar'), (\n '?bam=baz&return-link=%2Fexport-plan%2F1%2Fabout-your-business%2F%3Ffoo%3Dbar'\n , '/export-plan/1/about-your-business/?foo=bar'), (\n '?bam=baz&return-link=example%2Fexport-plan%2Fpath%2F%3Ffoo%3Dbar',\n None), (\n '?bam=baz&return-link=https%3A%2F%2Fphishing.example.com%2Fexport-plan%2F1%2Fabout-your-business%2F%3Ffoo%3Dbar'\n , None), (\n '?bam=baz&return-link=%3A%2F%2Fphishing.example.com%2Fexport-plan%2F1%2Fabout-your-business%2F%3Ffoo%3Dbar'\n , None), ('?bam=baz', None), (\n '?bam=baz&return-link=%2Fexport-plan%2F1%2Fabout-your-business%2F%3Ffoo%3Dbar'\n , '/export-plan/1/about-your-business/?foo=bar')), ids=(\n 'no backlink querystring present',\n 'backlink querystring present without encoded querystring of its own',\n 'backlink querystring present WITH encoded querystring of its own',\n 'backlink querystring present WITH encoded querystring and other args',\n 'backlink querystring present WITH bad payload - path does not start with / '\n , 'backlink querystring present WITH bad payload - path is a full URL',\n 'backlink querystring present WITH bad payload - path is a URL with flexible proto'\n , 'backlink querystring NOT present BUT another querystring is',\n 'backlink querystring present WITH OTHER QUERYSTRING TOO'))\n", (4437, 6025), False, 'import pytest\n'), ((7344, 8772), 'pytest.mark.parametrize', 'pytest.mark.parametrize', (['"""backlink_path,expected"""', "((None, None), ('', None), ('/export-plan/1/about-your-business/',\n 'About your business'), ('/export-plan/1/business-objectives/',\n 'Business objectives'), ('/export-plan/1/target-markets-research/',\n 'Target markets research'), ('/export-plan/1/adapting-your-product/',\n 'Adapting your product'), ('/export-plan/1/marketing-approach/',\n 'Marketing approach'), ('/export-plan/1/costs-and-pricing/',\n 'Costs and pricing'), ('/export-plan/1/funding-and-credit/',\n 'Funding and credit'), ('/export-plan/1/getting-paid/', 'Getting paid'),\n ('/export-plan/1/travel-plan/', 'Travel plan'), (\n '/export-plan/1/business-risk/', 'Business risk'), (\n '/export-plan/1/adapting-your-product/?foo=bar',\n 'Adapting your product'), ('/export-plan/', None), (\n '/path/that/will/not/match/anything/', None))"], {'ids': "('no backlink', 'empty string backlink', 'Seeking: About your business',\n 'Seeking: Business objectives', 'Seeking: Target markets research',\n 'Seeking: Adapting your product', 'Seeking: Marketing approach',\n 'Seeking: Costs and pricing', 'Seeking: Getting paid',\n 'Seeking: Funding and credit', 'Seeking: Travel plan',\n 'Seeking: Business risk',\n 'Valid backlink with querystring does not break name lookup',\n 'backlink for real page that is not an export plan step',\n 'backlink for a non-existent page')"}), "('backlink_path,expected', ((None, None), ('', None),\n ('/export-plan/1/about-your-business/', 'About your business'), (\n '/export-plan/1/business-objectives/', 'Business objectives'), (\n '/export-plan/1/target-markets-research/', 'Target markets research'),\n ('/export-plan/1/adapting-your-product/', 'Adapting your product'), (\n '/export-plan/1/marketing-approach/', 'Marketing approach'), (\n '/export-plan/1/costs-and-pricing/', 'Costs and pricing'), (\n '/export-plan/1/funding-and-credit/', 'Funding and credit'), (\n '/export-plan/1/getting-paid/', 'Getting paid'), (\n '/export-plan/1/travel-plan/', 'Travel plan'), (\n '/export-plan/1/business-risk/', 'Business risk'), (\n '/export-plan/1/adapting-your-product/?foo=bar',\n 'Adapting your product'), ('/export-plan/', None), (\n '/path/that/will/not/match/anything/', None)), ids=('no backlink',\n 'empty string backlink', 'Seeking: About your business',\n 'Seeking: Business objectives', 'Seeking: Target markets research',\n 'Seeking: Adapting your product', 'Seeking: Marketing approach',\n 'Seeking: Costs and pricing', 'Seeking: Getting paid',\n 'Seeking: Funding and credit', 'Seeking: Travel plan',\n 'Seeking: Business risk',\n 'Valid backlink with querystring does not break name lookup',\n 'backlink for real page that is not an export plan step',\n 'backlink for a non-existent page'))\n", (7367, 8772), False, 'import pytest\n'), ((10346, 12461), 'pytest.mark.parametrize', 'pytest.mark.parametrize', (['"""block_type_values,exception_message"""', "((['text'], _case_study_top_level_error_message), ([('media', ('video',))],\n _case_study_top_level_error_message), ([], None), (['text', 'text'],\n _case_study_top_level_error_message), (['text', ('media', ('video',\n 'image'))], _case_study_top_level_error_message), ([('media', ('video',\n )), ('media', ('video',))], _case_study_top_level_error_message), ([\n 'text', ('media', ('video', 'image')), 'text'],\n _case_study_top_level_error_message), ([('media', ('video', 'image')),\n 'text', ('media', ('video', 'image'))],\n _case_study_top_level_error_message), ([('media', ('video', 'image')),\n 'text'], None), ([('media', ('video',)), 'text'], None), ([('media', (\n 'image',)), 'text'], None), ([('media', ('image', 'image')), 'text'],\n None), ([('media', ('image', 'video')), 'text'],\n _case_study_video_order_error_message), ([('media', ('video', 'video')),\n 'text'], _case_study_one_video_only_error_message), (['quote', ('media',\n ('video', 'image')), 'text'], None), (['quote', 'quote', ('media', (\n 'video', 'image')), 'text'], None))"], {'ids': "('1. Top-level check: text node only: not fine',\n '2. Top-level check: media node only: not fine',\n '3. Top-level check: no nodes: fine - requirement is done at a higher level'\n , '4. Top-level check: two text nodes: not fine',\n '5. Top-level check: text before media: not fine',\n '6. Top-level check: two media nodes: not fine',\n '7. Top-level check: text, media, text: not fine',\n '8. Top-level check: media, text, media: not fine',\n '9. media node (video and image) and text node: fine',\n '10. media node (video only) and text node: fine',\n '11. media node (image only) and text node: fine',\n '12. media node (two images) and text node: fine',\n '13. media node (image before video) and text node: not fine',\n '14. media node (two videos) and text node: not fine',\n '15. quote node, media node (video and image) and text node: fine',\n '16. 2 quote nodes, media node (video and image) and text node: fine')"}), "('block_type_values,exception_message', ((['text'],\n _case_study_top_level_error_message), ([('media', ('video',))],\n _case_study_top_level_error_message), ([], None), (['text', 'text'],\n _case_study_top_level_error_message), (['text', ('media', ('video',\n 'image'))], _case_study_top_level_error_message), ([('media', ('video',\n )), ('media', ('video',))], _case_study_top_level_error_message), ([\n 'text', ('media', ('video', 'image')), 'text'],\n _case_study_top_level_error_message), ([('media', ('video', 'image')),\n 'text', ('media', ('video', 'image'))],\n _case_study_top_level_error_message), ([('media', ('video', 'image')),\n 'text'], None), ([('media', ('video',)), 'text'], None), ([('media', (\n 'image',)), 'text'], None), ([('media', ('image', 'image')), 'text'],\n None), ([('media', ('image', 'video')), 'text'],\n _case_study_video_order_error_message), ([('media', ('video', 'video')),\n 'text'], _case_study_one_video_only_error_message), (['quote', ('media',\n ('video', 'image')), 'text'], None), (['quote', 'quote', ('media', (\n 'video', 'image')), 'text'], None)), ids=(\n '1. Top-level check: text node only: not fine',\n '2. Top-level check: media node only: not fine',\n '3. Top-level check: no nodes: fine - requirement is done at a higher level'\n , '4. Top-level check: two text nodes: not fine',\n '5. Top-level check: text before media: not fine',\n '6. Top-level check: two media nodes: not fine',\n '7. Top-level check: text, media, text: not fine',\n '8. Top-level check: media, text, media: not fine',\n '9. media node (video and image) and text node: fine',\n '10. media node (video only) and text node: fine',\n '11. media node (image only) and text node: fine',\n '12. media node (two images) and text node: fine',\n '13. media node (image before video) and text node: not fine',\n '14. media node (two videos) and text node: not fine',\n '15. quote node, media node (video and image) and text node: fine',\n '16. 2 quote nodes, media node (video and image) and text node: fine'))\n", (10369, 12461), False, 'import pytest\n'), ((1453, 1464), 'unittest.mock.Mock', 'mock.Mock', ([], {}), '()\n', (1462, 1464), False, 'from unittest import mock\n'), ((1519, 1572), 'core.models.AbstractObjectHash.generate_content_hash', 'AbstractObjectHash.generate_content_hash', (['mocked_file'], {}), '(mocked_file)\n', (1559, 1572), False, 'from core.models import AbstractObjectHash, CaseStudyRelatedPages, Country, CuratedListPage, DetailPage, IndustryTag, InterstitialPage, LandingPage, LessonPlaceholderPage, ListPage, MagnaPageChooserPanel, Product, Region, Tag, TopicPage, case_study_body_validation\n'), ((1851, 1929), 'tests.unit.core.factories.ListPageFactory', 'factories.ListPageFactory', ([], {'parent': 'domestic_homepage', 'record_read_progress': '(True)'}), '(parent=domestic_homepage, record_read_progress=True)\n', (1876, 1929), False, 'from tests.unit.core import factories\n'), ((1954, 2004), 'tests.unit.core.factories.CuratedListPageFactory', 'factories.CuratedListPageFactory', ([], {'parent': 'list_page'}), '(parent=list_page)\n', (1986, 2004), False, 'from tests.unit.core import factories\n'), ((2022, 2074), 'tests.unit.core.factories.TopicPageFactory', 'factories.TopicPageFactory', ([], {'parent': 'curated_list_page'}), '(parent=curated_list_page)\n', (2048, 2074), False, 'from tests.unit.core import factories\n'), ((2093, 2139), 'tests.unit.core.factories.DetailPageFactory', 'factories.DetailPageFactory', ([], {'parent': 'topic_page'}), '(parent=topic_page)\n', (2120, 2139), False, 'from tests.unit.core import factories\n'), ((2563, 2642), 'tests.unit.core.factories.ListPageFactory', 'factories.ListPageFactory', ([], {'parent': 'domestic_homepage', 'record_read_progress': '(False)'}), '(parent=domestic_homepage, record_read_progress=False)\n', (2588, 2642), False, 'from tests.unit.core import factories\n'), ((2667, 2717), 'tests.unit.core.factories.CuratedListPageFactory', 'factories.CuratedListPageFactory', ([], {'parent': 'list_page'}), '(parent=list_page)\n', (2699, 2717), False, 'from tests.unit.core import factories\n'), ((2735, 2787), 'tests.unit.core.factories.TopicPageFactory', 'factories.TopicPageFactory', ([], {'parent': 'curated_list_page'}), '(parent=curated_list_page)\n', (2761, 2787), False, 'from tests.unit.core import factories\n'), ((2806, 2852), 'tests.unit.core.factories.DetailPageFactory', 'factories.DetailPageFactory', ([], {'parent': 'topic_page'}), '(parent=topic_page)\n', (2833, 2852), False, 'from tests.unit.core import factories\n'), ((3161, 3219), 'tests.unit.core.factories.CuratedListPageFactory', 'factories.CuratedListPageFactory', ([], {'parent': 'domestic_homepage'}), '(parent=domestic_homepage)\n', (3193, 3219), False, 'from tests.unit.core import factories\n'), ((3237, 3275), 'tests.unit.core.factories.TopicPageFactory', 'factories.TopicPageFactory', ([], {'parent': 'clp'}), '(parent=clp)\n', (3263, 3275), False, 'from tests.unit.core import factories\n'), ((3294, 3340), 'tests.unit.core.factories.DetailPageFactory', 'factories.DetailPageFactory', ([], {'parent': 'topic_page'}), '(parent=topic_page)\n', (3321, 3340), False, 'from tests.unit.core import factories\n'), ((3730, 3848), 'tests.unit.core.factories.ListPageFactory', 'factories.ListPageFactory', ([], {'parent': 'domestic_homepage', 'record_read_progress': '(False)', 'slug': '"""example-learning-homepage"""'}), "(parent=domestic_homepage, record_read_progress=\n False, slug='example-learning-homepage')\n", (3755, 3848), False, 'from tests.unit.core import factories\n'), ((3882, 3955), 'tests.unit.core.factories.CuratedListPageFactory', 'factories.CuratedListPageFactory', ([], {'parent': 'list_page', 'slug': '"""example-module"""'}), "(parent=list_page, slug='example-module')\n", (3914, 3955), False, 'from tests.unit.core import factories\n'), ((6612, 6691), 'tests.unit.core.factories.ListPageFactory', 'factories.ListPageFactory', ([], {'parent': 'domestic_homepage', 'record_read_progress': '(False)'}), '(parent=domestic_homepage, record_read_progress=False)\n', (6637, 6691), False, 'from tests.unit.core import factories\n'), ((6716, 6766), 'tests.unit.core.factories.CuratedListPageFactory', 'factories.CuratedListPageFactory', ([], {'parent': 'list_page'}), '(parent=list_page)\n', (6748, 6766), False, 'from tests.unit.core import factories\n'), ((6784, 6836), 'tests.unit.core.factories.TopicPageFactory', 'factories.TopicPageFactory', ([], {'parent': 'curated_list_page'}), '(parent=curated_list_page)\n', (6810, 6836), False, 'from tests.unit.core import factories\n'), ((6855, 6941), 'tests.unit.core.factories.DetailPageFactory', 'factories.DetailPageFactory', ([], {'parent': 'topic_page', 'template': '"""learn/detail_page.html"""'}), "(parent=topic_page, template=\n 'learn/detail_page.html')\n", (6882, 6941), False, 'from tests.unit.core import factories\n'), ((9085, 9147), 'tests.unit.core.factories.DetailPageFactory', 'factories.DetailPageFactory', ([], {'template': '"""learn/detail_page.html"""'}), "(template='learn/detail_page.html')\n", (9112, 9147), False, 'from tests.unit.core import factories\n'), ((9796, 9809), 'time.sleep', 'time.sleep', (['(1)'], {}), '(1)\n', (9806, 9809), False, 'import time\n'), ((15516, 15595), 'tests.unit.core.factories.ListPageFactory', 'factories.ListPageFactory', ([], {'parent': 'domestic_homepage', 'record_read_progress': '(False)'}), '(parent=domestic_homepage, record_read_progress=False)\n', (15541, 15595), False, 'from tests.unit.core import factories\n'), ((15620, 15670), 'tests.unit.core.factories.CuratedListPageFactory', 'factories.CuratedListPageFactory', ([], {'parent': 'list_page'}), '(parent=list_page)\n', (15652, 15670), False, 'from tests.unit.core import factories\n'), ((16734, 16813), 'tests.unit.core.factories.ListPageFactory', 'factories.ListPageFactory', ([], {'parent': 'domestic_homepage', 'record_read_progress': '(False)'}), '(parent=domestic_homepage, record_read_progress=False)\n', (16759, 16813), False, 'from tests.unit.core import factories\n'), ((16838, 16888), 'tests.unit.core.factories.CuratedListPageFactory', 'factories.CuratedListPageFactory', ([], {'parent': 'list_page'}), '(parent=list_page)\n', (16870, 16888), False, 'from tests.unit.core import factories\n'), ((18720, 18774), 'tests.unit.core.factories.LandingPageFactory', 'factories.LandingPageFactory', ([], {'parent': 'domestic_homepage'}), '(parent=domestic_homepage)\n', (18748, 18774), False, 'from tests.unit.core import factories\n'), ((18799, 18853), 'tests.unit.core.factories.InterstitialPageFactory', 'factories.InterstitialPageFactory', ([], {'parent': 'landing_page'}), '(parent=landing_page)\n', (18832, 18853), False, 'from tests.unit.core import factories\n'), ((18870, 18921), 'tests.unit.core.factories.ListPageFactory', 'factories.ListPageFactory', ([], {'parent': 'domestic_homepage'}), '(parent=domestic_homepage)\n', (18895, 18921), False, 'from tests.unit.core import factories\n'), ((18946, 18996), 'tests.unit.core.factories.CuratedListPageFactory', 'factories.CuratedListPageFactory', ([], {'parent': 'list_page'}), '(parent=list_page)\n', (18978, 18996), False, 'from tests.unit.core import factories\n'), ((19014, 19066), 'tests.unit.core.factories.TopicPageFactory', 'factories.TopicPageFactory', ([], {'parent': 'curated_list_page'}), '(parent=curated_list_page)\n', (19040, 19066), False, 'from tests.unit.core import factories\n'), ((19085, 19131), 'tests.unit.core.factories.DetailPageFactory', 'factories.DetailPageFactory', ([], {'parent': 'topic_page'}), '(parent=topic_page)\n', (19112, 19131), False, 'from tests.unit.core import factories\n'), ((12744, 12755), 'unittest.mock.Mock', 'mock.Mock', ([], {}), '()\n', (12753, 12755), False, 'from unittest import mock\n'), ((13494, 13527), 'core.models.case_study_body_validation', 'case_study_body_validation', (['value'], {}), '(value)\n', (13520, 13527), False, 'from core.models import AbstractObjectHash, CaseStudyRelatedPages, Country, CuratedListPage, DetailPage, IndustryTag, InterstitialPage, LandingPage, LessonPlaceholderPage, ListPage, MagnaPageChooserPanel, Product, Region, Tag, TopicPage, case_study_body_validation\n'), ((19899, 19953), 'wagtail.core.models.Collection.objects.get_or_create', 'Collection.objects.get_or_create', ([], {'name': '"""Root"""', 'depth': '(0)'}), "(name='Root', depth=0)\n", (19931, 19953), False, 'from wagtail.core.models import Collection\n'), ((20098, 20115), 'wagtail.images.get_image_model', 'get_image_model', ([], {}), '()\n', (20113, 20115), False, 'from wagtail.images import get_image_model\n'), ((20603, 20620), 'tests.helpers.make_test_video', 'make_test_video', ([], {}), '()\n', (20618, 20620), False, 'from tests.helpers import SetUpLocaleMixin, make_test_video\n'), ((20942, 20994), 'tests.helpers.make_test_video', 'make_test_video', ([], {'transcript': '"""A test transcript text"""'}), "(transcript='A test transcript text')\n", (20957, 20994), False, 'from tests.helpers import SetUpLocaleMixin, make_test_video\n'), ((21328, 21345), 'tests.helpers.make_test_video', 'make_test_video', ([], {}), '()\n', (21343, 21345), False, 'from tests.helpers import SetUpLocaleMixin, make_test_video\n'), ((21820, 21837), 'tests.helpers.make_test_video', 'make_test_video', ([], {}), '()\n', (21835, 21837), False, 'from tests.helpers import SetUpLocaleMixin, make_test_video\n'), ((22164, 22205), 'core.models.Region.objects.create', 'Region.objects.create', ([], {'name': '"""Test Region"""'}), "(name='Test Region')\n", (22185, 22205), False, 'from core.models import AbstractObjectHash, CaseStudyRelatedPages, Country, CuratedListPage, DetailPage, IndustryTag, InterstitialPage, LandingPage, LessonPlaceholderPage, ListPage, MagnaPageChooserPanel, Product, Region, Tag, TopicPage, case_study_body_validation\n'), ((22376, 22417), 'core.models.Region.objects.create', 'Region.objects.create', ([], {'name': '"""Test Region"""'}), "(name='Test Region')\n", (22397, 22417), False, 'from core.models import AbstractObjectHash, CaseStudyRelatedPages, Country, CuratedListPage, DetailPage, IndustryTag, InterstitialPage, LandingPage, LessonPlaceholderPage, ListPage, MagnaPageChooserPanel, Product, Region, Tag, TopicPage, case_study_body_validation\n'), ((22560, 22624), 'core.models.Country.objects.create', 'Country.objects.create', ([], {'name': '"""Test Country"""', 'slug': '"""test-country"""'}), "(name='Test Country', slug='test-country')\n", (22582, 22624), False, 'from core.models import AbstractObjectHash, CaseStudyRelatedPages, Country, CuratedListPage, DetailPage, IndustryTag, InterstitialPage, LandingPage, LessonPlaceholderPage, ListPage, MagnaPageChooserPanel, Product, Region, Tag, TopicPage, case_study_body_validation\n'), ((22679, 22765), 'core.models.Country.objects.create', 'Country.objects.create', ([], {'name': '"""Other Country"""', 'slug': '"""other-country"""', 'region': 'region'}), "(name='Other Country', slug='other-country', region=\n region)\n", (22701, 22765), False, 'from core.models import AbstractObjectHash, CaseStudyRelatedPages, Country, CuratedListPage, DetailPage, IndustryTag, InterstitialPage, LandingPage, LessonPlaceholderPage, ListPage, MagnaPageChooserPanel, Product, Region, Tag, TopicPage, case_study_body_validation\n'), ((22834, 22896), 'core.models.Country.objects.create', 'Country.objects.create', ([], {'name': '"""Téßt Country"""', 'slug': '"""tt-country"""'}), "(name='Téßt Country', slug='tt-country')\n", (22856, 22896), False, 'from core.models import AbstractObjectHash, CaseStudyRelatedPages, Country, CuratedListPage, DetailPage, IndustryTag, InterstitialPage, LandingPage, LessonPlaceholderPage, ListPage, MagnaPageChooserPanel, Product, Region, Tag, TopicPage, case_study_body_validation\n'), ((23768, 23811), 'core.models.Country.objects.create', 'Country.objects.create', ([], {'name': '"""Test Country"""'}), "(name='Test Country')\n", (23790, 23811), False, 'from core.models import AbstractObjectHash, CaseStudyRelatedPages, Country, CuratedListPage, DetailPage, IndustryTag, InterstitialPage, LandingPage, LessonPlaceholderPage, ListPage, MagnaPageChooserPanel, Product, Region, Tag, TopicPage, case_study_body_validation\n'), ((23983, 24029), 'core.models.Country.objects.create', 'Country.objects.create', ([], {'name': '"""Another Country"""'}), "(name='Another Country')\n", (24005, 24029), False, 'from core.models import AbstractObjectHash, CaseStudyRelatedPages, Country, CuratedListPage, DetailPage, IndustryTag, InterstitialPage, LandingPage, LessonPlaceholderPage, ListPage, MagnaPageChooserPanel, Product, Region, Tag, TopicPage, case_study_body_validation\n'), ((24395, 24457), 'core.models.Country.objects.create', 'Country.objects.create', ([], {'name': '"""Country Three"""', 'slug': '"""somewhere"""'}), "(name='Country Three', slug='somewhere')\n", (24417, 24457), False, 'from core.models import AbstractObjectHash, CaseStudyRelatedPages, Country, CuratedListPage, DetailPage, IndustryTag, InterstitialPage, LandingPage, LessonPlaceholderPage, ListPage, MagnaPageChooserPanel, Product, Region, Tag, TopicPage, case_study_body_validation\n'), ((24762, 24805), 'core.models.Product.objects.create', 'Product.objects.create', ([], {'name': '"""Test Product"""'}), "(name='Test Product')\n", (24784, 24805), False, 'from core.models import AbstractObjectHash, CaseStudyRelatedPages, Country, CuratedListPage, DetailPage, IndustryTag, InterstitialPage, LandingPage, LessonPlaceholderPage, ListPage, MagnaPageChooserPanel, Product, Region, Tag, TopicPage, case_study_body_validation\n'), ((24973, 25008), 'core.models.Tag.objects.create', 'Tag.objects.create', ([], {'name': '"""Test Tag"""'}), "(name='Test Tag')\n", (24991, 25008), False, 'from core.models import AbstractObjectHash, CaseStudyRelatedPages, Country, CuratedListPage, DetailPage, IndustryTag, InterstitialPage, LandingPage, LessonPlaceholderPage, ListPage, MagnaPageChooserPanel, Product, Region, Tag, TopicPage, case_study_body_validation\n'), ((25169, 25220), 'core.models.IndustryTag.objects.create', 'IndustryTag.objects.create', ([], {'name': '"""Test IndustryTag"""'}), "(name='Test IndustryTag')\n", (25195, 25220), False, 'from core.models import AbstractObjectHash, CaseStudyRelatedPages, Country, CuratedListPage, DetailPage, IndustryTag, InterstitialPage, LandingPage, LessonPlaceholderPage, ListPage, MagnaPageChooserPanel, Product, Region, Tag, TopicPage, case_study_body_validation\n'), ((25497, 25512), 'django.contrib.auth.models.AnonymousUser', 'AnonymousUser', ([], {}), '()\n', (25510, 25512), False, 'from django.contrib.auth.models import AnonymousUser\n'), ((27030, 27053), 'core.models.CaseStudyRelatedPages', 'CaseStudyRelatedPages', ([], {}), '()\n', (27051, 27053), False, 'from core.models import AbstractObjectHash, CaseStudyRelatedPages, Country, CuratedListPage, DetailPage, IndustryTag, InterstitialPage, LandingPage, LessonPlaceholderPage, ListPage, MagnaPageChooserPanel, Product, Region, Tag, TopicPage, case_study_body_validation\n'), ((13299, 13340), 'pytest.raises', 'pytest.raises', (['StreamBlockValidationError'], {}), '(StreamBlockValidationError)\n', (13312, 13340), False, 'import pytest\n'), ((13361, 13394), 'core.models.case_study_body_validation', 'case_study_body_validation', (['value'], {}), '(value)\n', (13387, 13394), False, 'from core.models import AbstractObjectHash, CaseStudyRelatedPages, Country, CuratedListPage, DetailPage, IndustryTag, InterstitialPage, LandingPage, LessonPlaceholderPage, ListPage, MagnaPageChooserPanel, Product, Region, Tag, TopicPage, case_study_body_validation\n'), ((17782, 17804), 'pytest.raises', 'pytest.raises', (['Http404'], {}), '(Http404)\n', (17795, 17804), False, 'import pytest\n'), ((18282, 18312), 'pytest.raises', 'pytest.raises', (['ValidationError'], {}), '(ValidationError)\n', (18295, 18312), False, 'import pytest\n'), ((24671, 24714), 'core.models.Country.objects.create', 'Country.objects.create', ([], {'name': '"""Test Country"""'}), "(name='Test Country')\n", (24693, 24714), False, 'from core.models import AbstractObjectHash, CaseStudyRelatedPages, Country, CuratedListPage, DetailPage, IndustryTag, InterstitialPage, LandingPage, LessonPlaceholderPage, ListPage, MagnaPageChooserPanel, Product, Region, Tag, TopicPage, case_study_body_validation\n'), ((20211, 20232), 'wagtail.images.tests.utils.get_test_image_file', 'get_test_image_file', ([], {}), '()\n', (20230, 20232), False, 'from wagtail.images.tests.utils import get_test_image_file\n'), ((21593, 21647), 'django.urls.reverse', 'reverse', (['"""core:subtitles-serve"""'], {'args': "[media.id, 'en']"}), "('core:subtitles-serve', args=[media.id, 'en'])\n", (21600, 21647), False, 'from django.urls import reverse\n'), ((25456, 25472), 'django.test.RequestFactory', 'RequestFactory', ([], {}), '()\n', (25470, 25472), False, 'from django.test import RequestFactory, TestCase\n'), ((18150, 18164), 'wagtail_factories.ImageFactory', 'ImageFactory', ([], {}), '()\n', (18162, 18164), False, 'from wagtail_factories import ImageFactory\n'), ((25866, 25937), 'core.models.MagnaPageChooserPanel', 'MagnaPageChooserPanel', (['"""page"""', '[DetailPage, CuratedListPage, TopicPage]'], {}), "('page', [DetailPage, CuratedListPage, TopicPage])\n", (25887, 25937), False, 'from core.models import AbstractObjectHash, CaseStudyRelatedPages, Country, CuratedListPage, DetailPage, IndustryTag, InterstitialPage, LandingPage, LessonPlaceholderPage, ListPage, MagnaPageChooserPanel, Product, Region, Tag, TopicPage, case_study_body_validation\n'), ((26702, 26773), 'core.models.MagnaPageChooserPanel', 'MagnaPageChooserPanel', (['"""page"""', '[DetailPage, CuratedListPage, TopicPage]'], {}), "('page', [DetailPage, CuratedListPage, TopicPage])\n", (26723, 26773), False, 'from core.models import AbstractObjectHash, CaseStudyRelatedPages, Country, CuratedListPage, DetailPage, IndustryTag, InterstitialPage, LandingPage, LessonPlaceholderPage, ListPage, MagnaPageChooserPanel, Product, Region, Tag, TopicPage, case_study_body_validation\n'), ((18374, 18388), 'wagtail_factories.ImageFactory', 'ImageFactory', ([], {}), '()\n', (18386, 18388), False, 'from wagtail_factories import ImageFactory\n'), ((18401, 18415), 'wagtail_factories.ImageFactory', 'ImageFactory', ([], {}), '()\n', (18413, 18415), False, 'from wagtail_factories import ImageFactory\n')] |
# Copyright (C) 2013 <NAME>
"""
This file provides a mpirun work-around for clusters that do not have the ibrun
command.
"""
import os, stat
class random_manningsn(object):
"""
This class is an implementation of
:class:`polyadcirc.run_framework.random_manningsn` that provides a
``mpirun`` based work-around for clusters that do not have ibrun. It is
probabaly system dependent and might need to be modified.
"""
def __init__(self, script_name, fdir):
self.script_name = script_name
self.base_dir = fdir
self.rf_dirs = ['dirone', 'dirtwo', 'dirthree']
def write_run_script_no_ibrun(self, num_procs, num_jobs, procs_pnode, TpN,
screenout=True, num_writers=None):
"""
Creates a bash script called ``self.script_name`` in ``self.base_dir``
and a set of rankfiles named ``rankfile_n`` to run multiple
non-interacting parallel programs in parallel.
:type num_procs: int
:param num_procs: number of processes per job
:type num_jobs: int
:param num_jobs: number of jobs to run
:param int procs_pnode: number of processors per node
:param bool screenout: flag (True -- write ``ADCIRC`` output to
screen, False -- write ``ADCIRC`` output to temp file)
:param int num_writers: number of MPI processes to dedicate soley to
the task of writing ascii files
:param int TpN: number of tasks (processors to use) per node (wayness)
:rtype: string
:returns: name of bash script for running a batch of jobs within our
processor allotment
"""
tmp_file = self.script_name.partition('.')[0]+'.tmp'
# num_nodes = int(math.ceil(num_procs*num_jobs/float(TpN)))
with open(os.path.join(self.base_dir, self.script_name), 'w') as f:
f.write('#!/bin/bash\n')
# change i to 2*i or something like that to no use all of the
# processors on a node?
for i in xrange(num_jobs):
# write the bash file containing mpi commands
#line = 'ibrun -n {:d} -o {:d} '.format(num_procs,
# num_procs*i*(procs_pnode/TpN))
rankfile = 'rankfile{:d}'.format(i)
line = 'mpirun -machinefile $TMP/machines -rf '
line += rankfile+' -np {:d} '.format(num_procs)
line += './padcirc -I {0} -O {0} '.format(self.rf_dirs[i])
if num_writers:
line += '-W '+str(num_writers)+' '
if not screenout:
line += '> '+tmp_file
line += ' &\n'
f.write(line)
# write the rankfile containing the bindings
with open(os.path.join(self.base_dir, rankfile), 'w') as frank:
for j in xrange(num_procs):
# rank, node_num, slot_nums
if TpN == procs_pnode:
line = 'rank {:d}=n+{:d} slot={:d}'.format(j,\
(i*num_procs+j)/procs_pnode,\
(i*num_procs+j)%procs_pnode)
else:
processors_per_process = procs_pnode/TpN
line = 'rank {:d}=n+{:d} slot={:d}-{:d}'.format(j,\
(i*num_procs+j)/TpN,\
((i*num_procs+j)*processors_per_process)\
%procs_pnode,\
((i*num_procs+j)*processors_per_process)\
%procs_pnode+processors_per_process-1)
if j < num_procs-1:
line += '\n'
frank.write(line)
f.write('wait\n')
curr_stat = os.stat(os.path.join(self.base_dir, self.script_name))
os.chmod(os.path.join(self.base_dir, self.script_name),
curr_stat.st_mode | stat.S_IXUSR)
return self.script_name
| [
"os.path.join"
] | [((3939, 3984), 'os.path.join', 'os.path.join', (['self.base_dir', 'self.script_name'], {}), '(self.base_dir, self.script_name)\n', (3951, 3984), False, 'import os, stat\n'), ((4003, 4048), 'os.path.join', 'os.path.join', (['self.base_dir', 'self.script_name'], {}), '(self.base_dir, self.script_name)\n', (4015, 4048), False, 'import os, stat\n'), ((1833, 1878), 'os.path.join', 'os.path.join', (['self.base_dir', 'self.script_name'], {}), '(self.base_dir, self.script_name)\n', (1845, 1878), False, 'import os, stat\n'), ((2828, 2865), 'os.path.join', 'os.path.join', (['self.base_dir', 'rankfile'], {}), '(self.base_dir, rankfile)\n', (2840, 2865), False, 'import os, stat\n')] |
import unittest
import logging
from flask import Flask
@unittest.skip("needs refactoring")
class driftTestCase(unittest.TestCase):
def setUp(self):
self.app = Flask(__name__)
logging.basicConfig(level="ERROR")
self.app.testing = True
self.test_client = self.app.test_client()
def tearDown(self):
pass
def test_flasksetup(self):
# Run minimal setup
# flasksetup(self.app, options=[])
pass
def test_all(self):
# Run with all options
# flasksetup(self.app)
pass
if __name__ == "__main__":
unittest.main()
| [
"unittest.main",
"unittest.skip",
"logging.basicConfig",
"flask.Flask"
] | [((59, 93), 'unittest.skip', 'unittest.skip', (['"""needs refactoring"""'], {}), "('needs refactoring')\n", (72, 93), False, 'import unittest\n'), ((604, 619), 'unittest.main', 'unittest.main', ([], {}), '()\n', (617, 619), False, 'import unittest\n'), ((175, 190), 'flask.Flask', 'Flask', (['__name__'], {}), '(__name__)\n', (180, 190), False, 'from flask import Flask\n'), ((199, 233), 'logging.basicConfig', 'logging.basicConfig', ([], {'level': '"""ERROR"""'}), "(level='ERROR')\n", (218, 233), False, 'import logging\n')] |
import os
import yaml
import xlrd
from openpyxl import load_workbook
from util_func import securely_check_dir
class ExcelHandler:
def __init__(self, config):
self.config = config
securely_check_dir('forms')
securely_check_dir('att')
securely_check_dir('config')
self.subject = []
for item in self.config['responds'].keys():
if not item.startswith('default'):
self.subject.append(item)
self.handle_config = []
config_root = 'config'
for _, _, files in os.walk(config_root):
for file in files:
subject, _ = os.path.splitext(file)
if subject != 'top' and not subject.endswith('-old'):
with open(os.path.join(config_root, file)) as fp:
subject_config = yaml.load(fp.read())
self.handle_config.append({'subject_name': subject, 'config': subject_config})
def handle(self):
att_root = 'att'
for subject_config in self.handle_config:
subject = subject_config['subject_name']
config = subject_config['config']
if os.path.exists(os.path.join(att_root, subject)):
for _, _, files in os.walk(os.path.join(att_root, subject)):
for f in files:
short_name, ext = os.path.splitext(f)
if not short_name.endswith('-old') and not f.startswith('.'):
workbook = load_workbook(os.path.join(att_root, subject, f))
sheet_names = config.keys()
for sheet_name in sheet_names:
from_row = config[sheet_name]['header']['row']['to'] + 1
from_column = config[sheet_name]['column']['from']
sheet = workbook[sheet_name]
content = []
tmp_work_book = xlrd.open_workbook(os.path.join(att_root, subject, f))
tmp_sheet = tmp_work_book.sheet_by_name(sheet_name)
lines = tmp_sheet.nrows
tmp_work_book.release_resources()
for i in range(from_row, lines + 1):
row = [val.value for val in sheet[i]][from_column - 1:-1]
content.append(row)
form_workbook = load_workbook(
os.path.join('forms', subject, config[sheet_name]['destination_file']))
form_sheet = form_workbook[sheet_name]
tmp_work_book = xlrd.open_workbook(
os.path.join('forms', subject, config[sheet_name]['destination_file']))
tmp_sheet = tmp_work_book.sheet_by_name(sheet_name)
lines = tmp_sheet.nrows
tmp_work_book.release_resources()
for i in range(len(content)):
for j in range(len(content[i])):
form_sheet[lines + i + 1][j].value = content[i][j]
form_workbook.save(
os.path.join('forms', subject, config[sheet_name]['destination_file']))
form_workbook.close()
workbook.close()
os.rename(os.path.join(att_root, subject, f),
os.path.join(att_root, subject, '{}{}{}'.format(short_name, '-old', ext)))
if __name__ == '__main__':
config_file = 'config/top.yml'
if not os.path.exists(config_file):
print('No top.yml file found!')
exit(-1)
with open(config_file, encoding='utf-8') as f:
config_file = yaml.load(f.read())
excel_handler = ExcelHandler(config_file)
excel_handler.handle()
| [
"os.path.exists",
"os.path.join",
"os.path.splitext",
"util_func.securely_check_dir",
"os.walk"
] | [((204, 231), 'util_func.securely_check_dir', 'securely_check_dir', (['"""forms"""'], {}), "('forms')\n", (222, 231), False, 'from util_func import securely_check_dir\n'), ((240, 265), 'util_func.securely_check_dir', 'securely_check_dir', (['"""att"""'], {}), "('att')\n", (258, 265), False, 'from util_func import securely_check_dir\n'), ((274, 302), 'util_func.securely_check_dir', 'securely_check_dir', (['"""config"""'], {}), "('config')\n", (292, 302), False, 'from util_func import securely_check_dir\n'), ((562, 582), 'os.walk', 'os.walk', (['config_root'], {}), '(config_root)\n', (569, 582), False, 'import os\n'), ((3871, 3898), 'os.path.exists', 'os.path.exists', (['config_file'], {}), '(config_file)\n', (3885, 3898), False, 'import os\n'), ((644, 666), 'os.path.splitext', 'os.path.splitext', (['file'], {}), '(file)\n', (660, 666), False, 'import os\n'), ((1195, 1226), 'os.path.join', 'os.path.join', (['att_root', 'subject'], {}), '(att_root, subject)\n', (1207, 1226), False, 'import os\n'), ((1272, 1303), 'os.path.join', 'os.path.join', (['att_root', 'subject'], {}), '(att_root, subject)\n', (1284, 1303), False, 'import os\n'), ((1384, 1403), 'os.path.splitext', 'os.path.splitext', (['f'], {}), '(f)\n', (1400, 1403), False, 'import os\n'), ((767, 798), 'os.path.join', 'os.path.join', (['config_root', 'file'], {}), '(config_root, file)\n', (779, 798), False, 'import os\n'), ((1543, 1577), 'os.path.join', 'os.path.join', (['att_root', 'subject', 'f'], {}), '(att_root, subject, f)\n', (1555, 1577), False, 'import os\n'), ((3647, 3681), 'os.path.join', 'os.path.join', (['att_root', 'subject', 'f'], {}), '(att_root, subject, f)\n', (3659, 3681), False, 'import os\n'), ((2039, 2073), 'os.path.join', 'os.path.join', (['att_root', 'subject', 'f'], {}), '(att_root, subject, f)\n', (2051, 2073), False, 'import os\n'), ((2599, 2669), 'os.path.join', 'os.path.join', (['"""forms"""', 'subject', "config[sheet_name]['destination_file']"], {}), "('forms', subject, config[sheet_name]['destination_file'])\n", (2611, 2669), False, 'import os\n'), ((2846, 2916), 'os.path.join', 'os.path.join', (['"""forms"""', 'subject', "config[sheet_name]['destination_file']"], {}), "('forms', subject, config[sheet_name]['destination_file'])\n", (2858, 2916), False, 'import os\n'), ((3434, 3504), 'os.path.join', 'os.path.join', (['"""forms"""', 'subject', "config[sheet_name]['destination_file']"], {}), "('forms', subject, config[sheet_name]['destination_file'])\n", (3446, 3504), False, 'import os\n')] |
from ipware.ip import get_ip
from ipware.utils import is_private_ip
def is_private_ip_from_request(request) -> bool:
return is_private_ip(get_ip(request))
| [
"ipware.ip.get_ip"
] | [((144, 159), 'ipware.ip.get_ip', 'get_ip', (['request'], {}), '(request)\n', (150, 159), False, 'from ipware.ip import get_ip\n')] |
import urllib.request
from bs4 import BeautifulSoup
import matplotlib.pyplot as plt
from PIL import Image
import os
def image_poster(title_address):
url = f'{title_address}'
req = urllib.request.Request(url)
res = urllib.request.urlopen(url).read()
soup = BeautifulSoup(res, 'html.parser')
soup = soup.find("div", class_="poster")
# img의 경로를 받아온다
imgUrl = soup.find("img")["src"]
# urlretrieve는 다운로드 함수
# img.alt는 이미지 대체 텍스트
urllib.request.urlretrieve(imgUrl, soup.find("img")["alt"] + '.jpg')
plt.show()
| [
"bs4.BeautifulSoup",
"matplotlib.pyplot.show"
] | [((275, 308), 'bs4.BeautifulSoup', 'BeautifulSoup', (['res', '"""html.parser"""'], {}), "(res, 'html.parser')\n", (288, 308), False, 'from bs4 import BeautifulSoup\n'), ((543, 553), 'matplotlib.pyplot.show', 'plt.show', ([], {}), '()\n', (551, 553), True, 'import matplotlib.pyplot as plt\n')] |
# -*- coding: utf-8 -*-
"""Nowruz at SemEval 2022: Tackling Cloze Tests with Transformers and Ordinal Regression
Automatically generated by Colaboratory.
Original file is located at
https://colab.research.google.com/drive/1RXkjBpzNJtc0WhhrKMjU-50rd5uSviX3
"""
import torch
import torch.nn as nn
from torch.functional import F
from datasets import Dataset
import transformers as ts
from transformers import AutoTokenizer , AutoModelForSequenceClassification
from transformers import TrainingArguments, Trainer
from transformers import DataCollatorWithPadding
from transformers import create_optimizer
from transformers.file_utils import ModelOutput
from transformers.modeling_outputs import SequenceClassifierOutput
from coral_pytorch.layers import CoralLayer
from coral_pytorch.losses import coral_loss
from coral_pytorch.dataset import levels_from_labelbatch
from coral_pytorch.dataset import proba_to_label
from dataclasses import dataclass
from typing import Optional, Tuple
import numpy as np
import pandas as pd
from scipy import stats
import sys
from data_loader import (
retrieve_instances_from_dataset,
retrieve_labels_from_dataset_for_classification,
retrieve_labels_from_dataset_for_ranking,
write_predictions_to_file,
)
"""#Preparing Data"""
def loadDataset(dataPath , labelPath=None , scoresPath=None):
dataset = pd.read_csv(dataPath, sep="\t", quoting=3)
ids , sentences , fillers = retrieve_instances_from_dataset(dataset)
#Creating dictionaries to convert datas to Huggingface Dataset
datasetDict = {
"id": ids,
"sentence": sentences,
"filler": fillers,
}
labels = None
if labelPath != None:
labels = pd.read_csv(labelPath, sep="\t", header=None, names=["Id", "Label"])
labels = retrieve_labels_from_dataset_for_classification(labels)
datasetDict["labels"] = labels
scores = None
if scoresPath != None:
scores = pd.read_csv(scoresPath, sep="\t", header=None, names=["Id", "Label"])
scores = retrieve_labels_from_dataset_for_ranking(scores)
datasetDict["scores"] = scores
#Removing Periods if fillers appear at the end of the sentence (because if we don't period will be considered last word piece of the filler)
for index , _ in enumerate(fillers):
fillers[index].replace("." , "")
#Creating Huggingface Datasets from Dictionaries
dataset = Dataset.from_dict(datasetDict)
return dataset
"""#Preprocessing"""
def preprocessDataset(dataset , tokenizer):
def addToDict(dict_1 , dict_2 , columns_1=[] , columns_2=["input_ids" , "attention_mask"]):
for item_1 , item_2 in zip(columns_1 , columns_2):
dict_1[item_1] = dict_2.pop(item_2)
def mappingFunction(dataset):
outputDict = {}
cleanedSentence = dataset["sentence"].replace("\n" , " ").replace("(...)" , "").strip()
sentenceWithFiller = cleanedSentence.replace("[MASK]" , dataset["filler"].strip()).strip()
tokenized_sentence = tokenizer(sentenceWithFiller)
addToDict(outputDict , tokenized_sentence , ["input_ids" , "attention_mask"])
#Getting the index of the last word piece of the filler
if "cls_token" in tokenizer.special_tokens_map.keys():
filler_indecies = len(tokenizer(tokenizer.special_tokens_map["cls_token"] + " " + cleanedSentence.split("[MASK]")[0].strip() + " " + dataset["filler"].strip() , add_special_tokens=False)["input_ids"]) - 1
elif "bos_token" in tokenizer.special_tokens_map.keys():
filler_indecies = len(tokenizer(tokenizer.special_tokens_map["bos_token"] + " " + cleanedSentence.split("[MASK]")[0].strip() + " " + dataset["filler"].strip() , add_special_tokens=False)["input_ids"]) - 1
else:
filler_indecies = len(tokenizer(cleanedSentence.split("[MASK]")[0].strip() + " " + dataset["filler"].strip() , add_special_tokens=False)["input_ids"]) - 1
outputDict["filler_indecies"] = filler_indecies
return outputDict
return dataset.map(mappingFunction , batched=False)
"""#Model Definition"""
@dataclass
class CustomOutput(ModelOutput):
loss: Optional[torch.FloatTensor] = None
logits: torch.FloatTensor = None
classificationOutput: torch.FloatTensor = None
regressionOutput: torch.FloatTensor = None
class SequenceClassificationModel(nn.Module):
def __init__(self,
encoder,
dim,
use_coral=False,
use_cls=True,
supportPooledRepresentation=False,
mode="both",
num_labels=3,
num_ranks=5,
lambda_c=0.5,
lambda_r=0.5,
dropout_rate=0.2):
super().__init__()
#mode can be one of these: ["both" , "classification" , "regression"]
self.encoder = encoder
self.dim = dim
self.use_coral = use_coral
self.use_cls = use_cls
self.supportPooledRepresentation = supportPooledRepresentation
self.mode = mode
self.num_labels = num_labels
self.num_ranks = num_ranks
self.lambda_c = lambda_c
self.lambda_r = lambda_r
self.dropout_rate = dropout_rate
if self.use_cls:
self.pre_classifier = nn.Linear(self.dim*2 , self.dim , bias=True)
else:
self.pre_classifier = nn.Linear(self.dim , self.dim , bias=True)
self.dropout = nn.Dropout(p=self.dropout_rate , inplace=False)
self.regressionHead = CoralLayer(self.dim , self.num_ranks)
if use_coral:
self.classificationHead = CoralLayer(self.dim , self.num_labels)
else:
self.classificationHead = nn.Linear(self.dim , self.num_labels , bias=True)
def forward(
self,
input_ids,
attention_mask,
filler_indecies,
labels=None,
scores=None,
**args):
device = self.encoder.device
# Getting fillers representation from pre-trained transformer (encoder)
sentence_embedding = self.encoder(
input_ids=input_ids,
attention_mask=attention_mask,
)
#Getting Fillers Representation
filler_tokens = sentence_embedding[0][filler_indecies[0] , filler_indecies[1]]
fillers = filler_tokens[: , 0 , :]
#Concatenating [CLS] output with Filler output if the model supports [CLS]
pooled_output = None
if self.use_cls:
if self.supportPooledRepresentation:
pooled_output = torch.concat((sentence_embedding[1] , fillers) , dim=-1)
else:
pooled_output = torch.concat((sentence_embedding[0][: , 0 , :] , fillers) , dim=-1)
else:
pooled_output = fillers
#Passing Pooled Output to another dense layer followed by activation function and dropout
pooled_output = self.pre_classifier(pooled_output)
pooled_output = nn.GELU()(pooled_output)
pooled_output = self.dropout(pooled_output)
#Passing the final output to the classificationHead and RegressionHead
classificationOutput = self.classificationHead(pooled_output)
regressionOutput = self.regressionHead(pooled_output)
totalLoss = None
classification_loss = None
regression_loss = None
#Computing classification loss
if labels != None and (self.mode.lower() == "both" or self.mode.lower() == "classification"):
if self.use_coral:
levels = levels_from_labelbatch(labels.view(-1) , self.num_labels).to(device)
classification_loss = coral_loss(classificationOutput.view(-1 , self.num_labels - 1) , levels.view(-1 , self.num_labels - 1))
else:
loss_fct = nn.CrossEntropyLoss()
classification_loss = loss_fct(classificationOutput.view(-1 , self.num_labels) , labels.view(-1))
#Computing regression loss
if scores != None and (self.mode.lower() == "both" or self.mode.lower() == "regression"):
levels = levels_from_labelbatch(scores.view(-1) , self.num_ranks).to(device)
regression_loss = coral_loss(regressionOutput.view(-1 , self.num_ranks - 1) , levels.view(-1 , self.num_ranks - 1))
if self.mode.lower() == "both" and (labels != None and scores != None):
totalLoss = (self.lambda_c * classification_loss) + (self.lambda_r * regression_loss)
elif self.mode.lower() == "classification" and labels != None:
totalLoss = classification_loss
elif self.mode.lower() == "regression" and scores != None:
totalLoss = regression_loss
outputs = torch.concat((classificationOutput , regressionOutput) , dim=-1)
finalClassificationOutput = torch.sigmoid(classificationOutput)
finalRegressionOutput = torch.sigmoid(regressionOutput)
finalClassificationOutput = proba_to_label(finalClassificationOutput.cpu().detach()).numpy()
finalRegressionOutput = torch.sum(finalRegressionOutput.cpu().detach() , dim=-1).numpy() + 1
return CustomOutput(
loss=totalLoss,
logits=outputs,
classificationOutput=finalClassificationOutput,
regressionOutput=finalRegressionOutput,
)
def model_init(encoderPath=None,
dimKey=None,
customEncoder=None,
customDim=None,
mode="both",
use_coral=True,
use_cls=True,
supportPooledRepresentation=False,
freezeEmbedding=True,
num_labels=3,
num_ranks=5,
lambda_c=0.5,
lambda_r=0.5,
dropout_rate=0.2,):
encoder = ts.AutoModel.from_pretrained(encoderPath) if encoderPath != None else customEncoder
dim = encoder.config.to_dict()[dimKey] if dimKey != None else customDim
model = SequenceClassificationModel(
encoder,
dim,
use_coral=use_coral,
use_cls=use_cls,
supportPooledRepresentation=supportPooledRepresentation,
mode=mode,
num_labels=num_labels,
num_ranks=num_ranks,
lambda_c=lambda_c,
lambda_r=lambda_r,
dropout_rate=dropout_rate,
)
try:
if freezeEmbedding:
for param in model.encoder.embeddings.parameters():
param.requires_grad = False
except:
print("The embedding layer name is different in this model, try to find the name of the emebdding layer and freeze it manually")
return model
def makeTrainer(model,
trainDataset,
data_collator,
tokenizer,
outputsPath,
learning_rate=1.90323e-05,
scheduler="cosine",
save_steps=5000,
batch_size=8,
num_epochs=5,
weight_decay=0.00123974,
roundingType="F"):
def data_collator_fn(items , columns=[]):
data_collator_input = {
"input_ids": items[columns[0]],
"attention_mask": items[columns[1]]
}
result = data_collator(data_collator_input)
items[columns[0]] = result["input_ids"]
items[columns[1]] = result["attention_mask"]
def collate_function(items):
outputDict = {
key: [] for key in items[0].keys()
}
for item in items:
for key in item.keys():
outputDict[key].append(item[key])
data_collator_fn(outputDict , ["input_ids" , "attention_mask"])
#Removing unnecessary Items from outputDict
columns = ["sentence" , "filler" , "id"]
for item in columns:
try:
outputDict.pop(item)
except:
pass
#Adding New Columns
if "labels" in outputDict.keys():
outputDict["labels"] = torch.tensor(outputDict.pop("labels"))
if "scores" in outputDict.keys():
if roundingType == "F":
outputDict["scores"] = torch.tensor(outputDict.pop("scores") , dtype=torch.int32) - 1
elif roundingType == "R":
outputDict["scores"] = torch.tensor([round(score) for score in outputDict.pop("scores")] , dtype=torch.int32) - 1
filler_indecies = torch.tensor(outputDict.pop("filler_indecies")).view(-1 , 1)
outputDict["filler_indecies"] = (torch.arange(filler_indecies.shape[0]).view(-1 , 1) , filler_indecies)
return outputDict
training_args = TrainingArguments(
outputsPath,
learning_rate= learning_rate,
lr_scheduler_type=scheduler,
save_steps=save_steps,
per_device_train_batch_size=batch_size,
num_train_epochs=num_epochs,
weight_decay=weight_decay,
remove_unused_columns=False,
)
trainer = Trainer(
model=model,
args=training_args,
train_dataset=trainDataset,
tokenizer=tokenizer,
data_collator=collate_function,
)
return trainer , collate_function
"""#Evaluating on Val Dataset"""
def evaluateModel(
model,
dataset,
collate_function,
):
model.eval()
#Passing the inputs through model
labels = []
scores = []
for item in dataset:
sample_input = collate_function([item])
outputs = model(input_ids=sample_input["input_ids"].to(model.encoder.device),
attention_mask=sample_input["attention_mask"].to(model.encoder.device),
filler_indecies=sample_input["filler_indecies"],
scores=None)
labels.append(outputs["classificationOutput"][0])
scores.append(outputs["regressionOutput"][0])
#Computing Accuracy
count = 0
correctCount = 0
for prediction , target in zip(labels , dataset["labels"]):
count += 1
correctCount += 1 if prediction == target else 0
accuracy = (correctCount / count)
#Computing Spearman
scores = np.array(scores , dtype=np.float32)
valScores = np.array(dataset["scores"] , dtype=np.float32)
spearman = stats.spearmanr(scores.reshape(-1 , 1) , valScores.reshape(-1 , 1))
return (labels , scores) , accuracy , spearman
"""#Making Predictions on Test Dataset"""
def predictOnTestDataset(
model,
dataset,
collate_function,
labelsPath=None,
scoresPath=None,
):
model.eval()
ids = []
classification_predictions = []
ranking_predictions = []
for item in dataset:
sample_input = collate_function([item])
outputs = model(input_ids=sample_input["input_ids"].to(model.encoder.device),
attention_mask=sample_input["attention_mask"].to(model.encoder.device),
filler_indecies=sample_input["filler_indecies"],
scores=None,
labels=None)
ids.append(item["id"])
classification_predictions.append(outputs["classificationOutput"][0])
ranking_predictions.append(outputs["regressionOutput"][0])
if labelsPath != None:
open(labelsPath , mode="wb")
write_predictions_to_file(labelsPath , ids , classification_predictions , "classification")
if scoresPath != None:
open(scoresPath , mode="wb")
write_predictions_to_file(scoresPath , ids , ranking_predictions , "ranking")
return ids , classification_predictions , ranking_predictions
"""#Inference"""
def inference(
model,
sentences,
fillers,
tokenizer,
collate_function
):
model.eval()
datasetDict = {
"sentence": sentences,
"filler": fillers,
}
dataset = Dataset.from_dict(datasetDict)
tokenizedDataset = preprocessDataset(dataset , tokenizer)
finalInput = collate_function(tokenizedDataset)
outputs = model(
input_ids=finalInput["input_ids"].to(model.encoder.device),
attention_mask=finalInput["attention_mask"].to(model.encoder.device),
filler_indecies=finalInput["filler_indecies"],
)
finalLabels = []
for item in outputs["classificationOutput"].reshape(-1):
if item == 0:
finalLabels.append("Implausible")
elif item == 1:
finalLabels.append("Neutral")
elif item == 2:
finalLabels.append("Plausible")
finalLabels = np.array(finalLabels)
return {
"labels": finalLabels,
"scores": outputs["regressionOutput"],
}
| [
"transformers.AutoModel.from_pretrained",
"torch.nn.Dropout",
"torch.nn.GELU",
"transformers.TrainingArguments",
"pandas.read_csv",
"torch.nn.CrossEntropyLoss",
"datasets.Dataset.from_dict",
"torch.sigmoid",
"data_loader.retrieve_labels_from_dataset_for_classification",
"numpy.array",
"data_load... | [((1360, 1402), 'pandas.read_csv', 'pd.read_csv', (['dataPath'], {'sep': '"""\t"""', 'quoting': '(3)'}), "(dataPath, sep='\\t', quoting=3)\n", (1371, 1402), True, 'import pandas as pd\n'), ((1436, 1476), 'data_loader.retrieve_instances_from_dataset', 'retrieve_instances_from_dataset', (['dataset'], {}), '(dataset)\n', (1467, 1476), False, 'from data_loader import retrieve_instances_from_dataset, retrieve_labels_from_dataset_for_classification, retrieve_labels_from_dataset_for_ranking, write_predictions_to_file\n'), ((2373, 2403), 'datasets.Dataset.from_dict', 'Dataset.from_dict', (['datasetDict'], {}), '(datasetDict)\n', (2390, 2403), False, 'from datasets import Dataset\n'), ((12310, 12558), 'transformers.TrainingArguments', 'TrainingArguments', (['outputsPath'], {'learning_rate': 'learning_rate', 'lr_scheduler_type': 'scheduler', 'save_steps': 'save_steps', 'per_device_train_batch_size': 'batch_size', 'num_train_epochs': 'num_epochs', 'weight_decay': 'weight_decay', 'remove_unused_columns': '(False)'}), '(outputsPath, learning_rate=learning_rate,\n lr_scheduler_type=scheduler, save_steps=save_steps,\n per_device_train_batch_size=batch_size, num_train_epochs=num_epochs,\n weight_decay=weight_decay, remove_unused_columns=False)\n', (12327, 12558), False, 'from transformers import TrainingArguments, Trainer\n'), ((12615, 12740), 'transformers.Trainer', 'Trainer', ([], {'model': 'model', 'args': 'training_args', 'train_dataset': 'trainDataset', 'tokenizer': 'tokenizer', 'data_collator': 'collate_function'}), '(model=model, args=training_args, train_dataset=trainDataset,\n tokenizer=tokenizer, data_collator=collate_function)\n', (12622, 12740), False, 'from transformers import TrainingArguments, Trainer\n'), ((13701, 13735), 'numpy.array', 'np.array', (['scores'], {'dtype': 'np.float32'}), '(scores, dtype=np.float32)\n', (13709, 13735), True, 'import numpy as np\n'), ((13751, 13796), 'numpy.array', 'np.array', (["dataset['scores']"], {'dtype': 'np.float32'}), "(dataset['scores'], dtype=np.float32)\n", (13759, 13796), True, 'import numpy as np\n'), ((15307, 15337), 'datasets.Dataset.from_dict', 'Dataset.from_dict', (['datasetDict'], {}), '(datasetDict)\n', (15324, 15337), False, 'from datasets import Dataset\n'), ((15936, 15957), 'numpy.array', 'np.array', (['finalLabels'], {}), '(finalLabels)\n', (15944, 15957), True, 'import numpy as np\n'), ((1693, 1761), 'pandas.read_csv', 'pd.read_csv', (['labelPath'], {'sep': '"""\t"""', 'header': 'None', 'names': "['Id', 'Label']"}), "(labelPath, sep='\\t', header=None, names=['Id', 'Label'])\n", (1704, 1761), True, 'import pandas as pd\n'), ((1775, 1830), 'data_loader.retrieve_labels_from_dataset_for_classification', 'retrieve_labels_from_dataset_for_classification', (['labels'], {}), '(labels)\n', (1822, 1830), False, 'from data_loader import retrieve_instances_from_dataset, retrieve_labels_from_dataset_for_classification, retrieve_labels_from_dataset_for_ranking, write_predictions_to_file\n'), ((1922, 1991), 'pandas.read_csv', 'pd.read_csv', (['scoresPath'], {'sep': '"""\t"""', 'header': 'None', 'names': "['Id', 'Label']"}), "(scoresPath, sep='\\t', header=None, names=['Id', 'Label'])\n", (1933, 1991), True, 'import pandas as pd\n'), ((2005, 2053), 'data_loader.retrieve_labels_from_dataset_for_ranking', 'retrieve_labels_from_dataset_for_ranking', (['scores'], {}), '(scores)\n', (2045, 2053), False, 'from data_loader import retrieve_instances_from_dataset, retrieve_labels_from_dataset_for_classification, retrieve_labels_from_dataset_for_ranking, write_predictions_to_file\n'), ((5273, 5319), 'torch.nn.Dropout', 'nn.Dropout', ([], {'p': 'self.dropout_rate', 'inplace': '(False)'}), '(p=self.dropout_rate, inplace=False)\n', (5283, 5319), True, 'import torch.nn as nn\n'), ((5348, 5384), 'coral_pytorch.layers.CoralLayer', 'CoralLayer', (['self.dim', 'self.num_ranks'], {}), '(self.dim, self.num_ranks)\n', (5358, 5384), False, 'from coral_pytorch.layers import CoralLayer\n'), ((8287, 8349), 'torch.concat', 'torch.concat', (['(classificationOutput, regressionOutput)'], {'dim': '(-1)'}), '((classificationOutput, regressionOutput), dim=-1)\n', (8299, 8349), False, 'import torch\n'), ((8385, 8420), 'torch.sigmoid', 'torch.sigmoid', (['classificationOutput'], {}), '(classificationOutput)\n', (8398, 8420), False, 'import torch\n'), ((8449, 8480), 'torch.sigmoid', 'torch.sigmoid', (['regressionOutput'], {}), '(regressionOutput)\n', (8462, 8480), False, 'import torch\n'), ((9335, 9376), 'transformers.AutoModel.from_pretrained', 'ts.AutoModel.from_pretrained', (['encoderPath'], {}), '(encoderPath)\n', (9363, 9376), True, 'import transformers as ts\n'), ((14787, 14879), 'data_loader.write_predictions_to_file', 'write_predictions_to_file', (['labelsPath', 'ids', 'classification_predictions', '"""classification"""'], {}), "(labelsPath, ids, classification_predictions,\n 'classification')\n", (14812, 14879), False, 'from data_loader import retrieve_instances_from_dataset, retrieve_labels_from_dataset_for_classification, retrieve_labels_from_dataset_for_ranking, write_predictions_to_file\n'), ((14944, 15018), 'data_loader.write_predictions_to_file', 'write_predictions_to_file', (['scoresPath', 'ids', 'ranking_predictions', '"""ranking"""'], {}), "(scoresPath, ids, ranking_predictions, 'ranking')\n", (14969, 15018), False, 'from data_loader import retrieve_instances_from_dataset, retrieve_labels_from_dataset_for_classification, retrieve_labels_from_dataset_for_ranking, write_predictions_to_file\n'), ((5127, 5171), 'torch.nn.Linear', 'nn.Linear', (['(self.dim * 2)', 'self.dim'], {'bias': '(True)'}), '(self.dim * 2, self.dim, bias=True)\n', (5136, 5171), True, 'import torch.nn as nn\n'), ((5210, 5250), 'torch.nn.Linear', 'nn.Linear', (['self.dim', 'self.dim'], {'bias': '(True)'}), '(self.dim, self.dim, bias=True)\n', (5219, 5250), True, 'import torch.nn as nn\n'), ((5437, 5474), 'coral_pytorch.layers.CoralLayer', 'CoralLayer', (['self.dim', 'self.num_labels'], {}), '(self.dim, self.num_labels)\n', (5447, 5474), False, 'from coral_pytorch.layers import CoralLayer\n'), ((5518, 5565), 'torch.nn.Linear', 'nn.Linear', (['self.dim', 'self.num_labels'], {'bias': '(True)'}), '(self.dim, self.num_labels, bias=True)\n', (5527, 5565), True, 'import torch.nn as nn\n'), ((6673, 6682), 'torch.nn.GELU', 'nn.GELU', ([], {}), '()\n', (6680, 6682), True, 'import torch.nn as nn\n'), ((6296, 6350), 'torch.concat', 'torch.concat', (['(sentence_embedding[1], fillers)'], {'dim': '(-1)'}), '((sentence_embedding[1], fillers), dim=-1)\n', (6308, 6350), False, 'import torch\n'), ((6389, 6452), 'torch.concat', 'torch.concat', (['(sentence_embedding[0][:, 0, :], fillers)'], {'dim': '(-1)'}), '((sentence_embedding[0][:, 0, :], fillers), dim=-1)\n', (6401, 6452), False, 'import torch\n'), ((7442, 7463), 'torch.nn.CrossEntropyLoss', 'nn.CrossEntropyLoss', ([], {}), '()\n', (7461, 7463), True, 'import torch.nn as nn\n'), ((12196, 12234), 'torch.arange', 'torch.arange', (['filler_indecies.shape[0]'], {}), '(filler_indecies.shape[0])\n', (12208, 12234), False, 'import torch\n')] |
'''
This provides the view functions for the /api/libraries endpoints
'''
import flask
from flask import current_app
class ApiEndpoint(object):
def __init__(self, blueprint):
blueprint.add_url_rule("/libraries/", view_func = self.get_libraries)
blueprint.add_url_rule("/libraries/<int:collection_id>", view_func = self.get_library)
def get_libraries(self):
kwdb = current_app.kwdb
query_pattern = flask.request.args.get('pattern', "*").strip().lower()
libraries = kwdb.get_collections(query_pattern)
return flask.jsonify(libraries=libraries)
def get_library(self, collection_id):
# if collection_id is a library _name_, redirect
print("get_library: collection_id=", collection_id)
kwdb = current_app.kwdb
collection = kwdb.get_collection(collection_id)
if collection is None:
flask.abort(404)
return flask.jsonify(collection=collection)
| [
"flask.abort",
"flask.request.args.get",
"flask.jsonify"
] | [((569, 603), 'flask.jsonify', 'flask.jsonify', ([], {'libraries': 'libraries'}), '(libraries=libraries)\n', (582, 603), False, 'import flask\n'), ((927, 963), 'flask.jsonify', 'flask.jsonify', ([], {'collection': 'collection'}), '(collection=collection)\n', (940, 963), False, 'import flask\n'), ((895, 911), 'flask.abort', 'flask.abort', (['(404)'], {}), '(404)\n', (906, 911), False, 'import flask\n'), ((442, 480), 'flask.request.args.get', 'flask.request.args.get', (['"""pattern"""', '"""*"""'], {}), "('pattern', '*')\n", (464, 480), False, 'import flask\n')] |
''' A module for defining and producing the linekey object, which is used
to determine and store information about data format in a CRREL
ice mass balance buoy.'''
class linekey:
def __init__(self,date_index = 0):
self.date_index = date_index
self.value_index = []
self.phenomena_names = []
self.lon_flip_ew = (False,-1,-1)
self.lat_flip_ns = (False,-1,-1)
self.vertical_scale = 1.
self.fliplon = False
def add_value_index(self,phenomenon,index):
self.value_index.append(index)
self.phenomena_names.append(phenomenon)
def ns(self,index_flippee,index_flipper):
self.lat_flip_ns = (True,index_flippee,index_flipper)
def ew(self,index_flippee,index_flipper):
self.lon_flip_ew = (True,index_flippee,index_flipper)
def get_temp_linekey(data_file):
import csv
fileh = open(data_file)
rows = csv.reader(fileh)
found_key = False
found_date = False
for row in rows:
print(row)
for (i,strtest) in enumerate(row):
if ('Date' in strtest) or ('DATE' in strtest):
key = linekey(date_index = i)
found_date = True
break
if found_date:
temp_codes = {}
temp_type = ''
for (i,strtest) in enumerate(row):
result = classify_temp_header(strtest)
if result[0]==1:
if temp_type == 'subjective':
print('Unable to determine temperature type')
return None
temp_type = 'objective'
prefix = 'TO'
if result[0]==2:
if temp_type == 'objective':
print('Unable to determine temperature type')
return None
temp_type = 'subjective'
prefix = 'TS'
temp_codes[i] = classify_temp_header(strtest)
if result[0]!=0:
key.add_value_index(prefix+str(result[1]),i)
break
return key
def get_linekey(data_file,variable_list,buoy_name):
import dictionaries
import csv
fileh = open(data_file)
rows = csv.reader(fileh)
found_key = False
found_date = False
td = dictionaries.title_dic()
variable_keys_list = [td[variable_name] for variable_name in variable_list]
vertical_scale = 1.
fliplon = False
for row in rows:
if not found_key:
for (i,strtest) in enumerate(row):
if ('Date' in strtest) or ('DATE' in strtest):
key = linekey(date_index = i)
found_date = True
break
if found_date:
for (varno,variable_keys) in enumerate(variable_keys_list):
found_key = False
for string in variable_keys:
for (i,strtest) in enumerate(row):
if (string == strtest.strip()):
key.add_value_index(variable_list[varno],i)
found_key = True
i_key = i
if '(cm)' in string:
vertical_scale = 0.01
if '(m)' in string:
vertical_scale = 1.
if string=='Longitude (W)':
fliplon = True
if not found_key:
key.add_value_index(variable_list[varno],-1)
if variable_list[varno]=='latitude':
for (i,strtest) in enumerate(row):
if (strtest == 'N/S'):
key.ns(i_key,i)
if variable_list[varno]=='longitude':
for (i,strtest) in enumerate(row):
if (strtest == 'E/W'):
key.ew(i_key,i)
if True in [('units are cm') in item for item in row]:
vertical_scale = 0.01
if 'E/W' in row and 'longitude' in key.phenomena_names:
i_flipper = row.index('E/W')
i_flippee = key.value_index[key.phenomena_names.index('longitude')]
key.ew(i_flippee,i_flipper)
if 'N/S' in row and 'latitude' in key.phenomena_names:
i_flipper = row.index('N/S')
i_flippee = key.value_index[key.phenomena_names.index('latitude')]
key.ew(i_flippee,i_flipper)
if not found_date:
print('Could not find date')
fileh.close()
return None
key.vertical_scale = vertical_scale
key.fliplon = fliplon
fileh.close()
return key
def classify_temp_header(string):
import functions
if functions.is_number(string):
number = float(string)
return (1,number)
elif string[0:1]=='T' and string[-3:]=='(C)' and functions.is_number(string[1:-3]):
number = int(string[1:-3])
return (2,number)
elif string[0:1]=='T' and functions.is_number(string[1:]):
number = int(string[1:])
return (2,number)
elif len(string) >= 4:
if string[0:4]=='TEMP' and functions.is_number(string[4:]):
number = int(string[4:])
return (2,number)
elif string[0:5]=='Temp ' and functions.is_number(string[5:]):
number = int(string[5:])
return (2,number)
else:
return (0,0)
else:
return (0,0)
| [
"dictionaries.title_dic",
"functions.is_number",
"csv.reader"
] | [((930, 947), 'csv.reader', 'csv.reader', (['fileh'], {}), '(fileh)\n', (940, 947), False, 'import csv\n'), ((2444, 2461), 'csv.reader', 'csv.reader', (['fileh'], {}), '(fileh)\n', (2454, 2461), False, 'import csv\n'), ((2521, 2545), 'dictionaries.title_dic', 'dictionaries.title_dic', ([], {}), '()\n', (2543, 2545), False, 'import dictionaries\n'), ((5194, 5221), 'functions.is_number', 'functions.is_number', (['string'], {}), '(string)\n', (5213, 5221), False, 'import functions\n'), ((5338, 5371), 'functions.is_number', 'functions.is_number', (['string[1:-3]'], {}), '(string[1:-3])\n', (5357, 5371), False, 'import functions\n'), ((5465, 5496), 'functions.is_number', 'functions.is_number', (['string[1:]'], {}), '(string[1:])\n', (5484, 5496), False, 'import functions\n'), ((5620, 5651), 'functions.is_number', 'functions.is_number', (['string[4:]'], {}), '(string[4:])\n', (5639, 5651), False, 'import functions\n'), ((5758, 5789), 'functions.is_number', 'functions.is_number', (['string[5:]'], {}), '(string[5:])\n', (5777, 5789), False, 'import functions\n')] |
from flask import Flask, render_template, session, redirect, url_for
app = Flask(__name__)
app.config['SECRET_KEY'] = '<PASSWORD>'
@app.route('/')
def index():
return render_template('index.html')
@app.route('/set-background/<mode>')
def set_background(mode):
session['mode'] = mode
return redirect(url_for('index'))
@app.route('/drop-session')
def drop_session():
session.pop('mode', None)
return redirect(url_for('index')) | [
"flask.render_template",
"flask.session.pop",
"flask.url_for",
"flask.Flask"
] | [((78, 93), 'flask.Flask', 'Flask', (['__name__'], {}), '(__name__)\n', (83, 93), False, 'from flask import Flask, render_template, session, redirect, url_for\n'), ((180, 209), 'flask.render_template', 'render_template', (['"""index.html"""'], {}), "('index.html')\n", (195, 209), False, 'from flask import Flask, render_template, session, redirect, url_for\n'), ((401, 426), 'flask.session.pop', 'session.pop', (['"""mode"""', 'None'], {}), "('mode', None)\n", (412, 426), False, 'from flask import Flask, render_template, session, redirect, url_for\n'), ((326, 342), 'flask.url_for', 'url_for', (['"""index"""'], {}), "('index')\n", (333, 342), False, 'from flask import Flask, render_template, session, redirect, url_for\n'), ((448, 464), 'flask.url_for', 'url_for', (['"""index"""'], {}), "('index')\n", (455, 464), False, 'from flask import Flask, render_template, session, redirect, url_for\n')] |
import random
from collections import deque
import networkx as nx
from lib import puzzle
def draw_grid(grid):
min_y, max_y = 0, 0
min_x, max_x = 0, 0
for y, x in grid:
if y < min_y:
min_y = y
if y > max_y:
max_y = y
if x < min_x:
min_x = x
if x > max_x:
max_x = x
y_range = range(min_y, max_y + 2)
x_range = range(min_x, max_x + 2)
output = ''
for y in y_range:
for x in x_range:
if (y, x) in grid:
output += grid[(y, x)]
else:
output += '#'
output += '\n'
return output
def construct_graph(grid, position, keys):
g = nx.Graph()
l = deque([position])
visited = set()
movable = {'.', '@', *[chr(x) for x in range(ord('a'), ord('z') + 1)]}
possible_keys = []
while len(l) > 0:
n = l.popleft()
visited.add(n)
if n in grid and grid[n] in keys:
possible_keys.append(n)
for y, x in [(n[0] + 1, n[1]), (n[0] - 1, n[1]), (n[0], n[1] + 1), (n[0], n[1] - 1)]:
if (y, x) in grid and grid[(y, x)] in movable:
g.add_edge(n, (y, x))
if (y, x) not in visited:
l.append((y, x))
return g, possible_keys
def new_state_path_length(t):
return len(t[-1])
class Day18(puzzle.Puzzle):
year = '2019'
day = '18'
def get_data(self):
data = self.input_data
return data
def part1(self):
data = self.get_data()
g = nx.Graph()
keys = {}
pos_to_key = {}
doors = {}
position = (0, 0)
grid = {}
for y, row in enumerate(data.splitlines()):
for x, c in enumerate(row):
if c == '#':
continue
if c == '.':
grid[(y, x)] = '.'
continue
if c == '@':
position = (y, x)
grid[(y, x)] = c
if ord(c) in set(range(ord('a'), ord('z') + 1)):
keys[c] = (y, x)
pos_to_key[(y, x)] = c
grid[(y, x)] = c
if ord(c) in set(range(ord('A'), ord('Z') + 1)):
doors[c] = (y, x)
grid[(y, x)] = c
paths = []
state = deque([(dict(grid), position, set(), [position])])
shortest_path = 5423
b_next = True
counted = 0
discarded = 0
discarded2 = 0
reached_end = 0
reached_end2 = 0
count = 0
while len(state) > 0:
count += 1
if count % 100 == 0:
print(f'{count}, states: {len(state)}, shortest path: {shortest_path}, discarded: {discarded}, discarded2: {discarded2}, reached end: {reached_end}, reached end2: {reached_end2}')
b_next = True
if b_next == True:
current_grid, position, keys_collected, path = state.popleft()
#print(f'b_next, {len(paths)}')
else:
current_grid, position, keys_collected, path = state.pop()
#print(f'At position {position}, keys collected {keys_collected}')
#print(draw_grid(current_grid))
#print(len(keys_collected), len(keys))
if len(path) >= shortest_path:
b_next = True
discarded += 1
continue
if len(keys_collected) == len(keys):
reached_end2 += 1
if len(path) < shortest_path:
shortest_path = len(path)
print(f'new shortest path {shortest_path}, paths: {len(paths)}, discarded: {discarded}')
#b_next = True
continue
graph, possible_keys = construct_graph(current_grid, position, keys)
#print(f'possible keys: {possible_keys}, collected: {keys_collected}')
b_next = False
new_states = []
for key_pos in possible_keys:
#print(f'Adding path to {key_pos}')
path_to_key = nx.shortest_path(graph, position, key_pos)[1:]
new_path = path + path_to_key
if len(new_path) >= shortest_path:
#b_next = True
discarded += 1
continue
if (len(new_path) / (len(keys_collected) + 1)) >= (shortest_path / len(pos_to_key)):
#b_next = True
discarded2 += 1
continue
new_position = key_pos
new_keys_collected = set(keys_collected)
new_keys_collected.add(current_grid[key_pos])
#print(f'new keys collected {new_keys_collected}')
key = current_grid[key_pos]
new_grid = dict(current_grid)
new_grid[position] = '.'
if key.upper() in doors:
new_grid[doors[pos_to_key[key_pos].upper()]] = '.'
new_grid[key_pos] = '@'
new_states.append((
new_grid,
new_position,
new_keys_collected,
new_path,
))
#for new_state in sorted(new_states, key=new_state_path_length):
for new_state in random.sample(new_states, len(new_states)):
state.append(new_state)
if len(new_states) == 0:
reached_end += 1
b_next = True
print(draw_grid(grid))
print(paths)
print(len(paths))
lengths = []
for path in paths:
print(len(path) - 1)
lengths.append(len(path) - 1)
return min(lengths)
def part2(self):
return None
def main(self):
print(f'Part 1 Answer: {self.part1()}')
print(f'Part 2 Answer: {self.part2()}')
| [
"networkx.shortest_path",
"collections.deque",
"networkx.Graph"
] | [((715, 725), 'networkx.Graph', 'nx.Graph', ([], {}), '()\n', (723, 725), True, 'import networkx as nx\n'), ((735, 752), 'collections.deque', 'deque', (['[position]'], {}), '([position])\n', (740, 752), False, 'from collections import deque\n'), ((1580, 1590), 'networkx.Graph', 'nx.Graph', ([], {}), '()\n', (1588, 1590), True, 'import networkx as nx\n'), ((4204, 4246), 'networkx.shortest_path', 'nx.shortest_path', (['graph', 'position', 'key_pos'], {}), '(graph, position, key_pos)\n', (4220, 4246), True, 'import networkx as nx\n')] |
# --------------
# Import packages
import numpy as np
import pandas as pd
from scipy.stats import mode
# code starts here
bank=pd.read_csv(path)
categorical_var=bank.select_dtypes(include='object')
print(categorical_var)
numerical_var=bank.select_dtypes(include='number')
print(numerical_var)
# code ends here
# --------------
banks=bank.drop(['Loan_ID'],axis=1)
print(banks.isnull().sum())
bank_mode=banks.mode().iloc[0]
print(bank_mode)
banks.fillna(bank_mode,inplace=True)
print(banks.isnull().sum())
# --------------
# Code starts here
import pandas as pd
import numpy as np
avg_loan_amount=pd.pivot_table(banks,index=['Gender','Married','Self_Employed'],
values= ['LoanAmount'],aggfunc='mean')
print(avg_loan_amount)
# code ends here
# --------------
# code starts here
yes=(banks['Loan_Status']=='Y') & (banks['Self_Employed']=='Yes')
loan_approved_se=banks[yes].count()[0]
no=(banks['Loan_Status']=='Y') & (banks['Self_Employed']=='No')
loan_approved_nse=banks[no].count()[0]
Loan_Status_count=banks['Loan_Status'].count()
percentage_se=100*loan_approved_se/Loan_Status_count
percentage_nse=100*loan_approved_nse/Loan_Status_count
print(percentage_nse,percentage_se)
# code ends here
# --------------
# code starts here
loan_term=banks['Loan_Amount_Term'].apply(lambda x: int(x)/12)
big_loan_term=len(loan_term[loan_term>=25])
print(big_loan_term)
# code ends here
# --------------
# code starts here
loan_groupby=banks.groupby(['Loan_Status'])['ApplicantIncome','Credit_History']
mean_values=loan_groupby.mean()
print(mean_values)
# code ends here
| [
"pandas.pivot_table",
"pandas.read_csv"
] | [((133, 150), 'pandas.read_csv', 'pd.read_csv', (['path'], {}), '(path)\n', (144, 150), True, 'import pandas as pd\n'), ((611, 722), 'pandas.pivot_table', 'pd.pivot_table', (['banks'], {'index': "['Gender', 'Married', 'Self_Employed']", 'values': "['LoanAmount']", 'aggfunc': '"""mean"""'}), "(banks, index=['Gender', 'Married', 'Self_Employed'], values=\n ['LoanAmount'], aggfunc='mean')\n", (625, 722), True, 'import pandas as pd\n')] |
# -*- coding: utf-8 -*-
""" Example definition of a borehole. A top-view plot of the borehole is
created and the borehole resistance is computed.
"""
from __future__ import absolute_import, division, print_function
import pygfunction as gt
from numpy import pi
def main():
# Borehole dimensions
H = 400. # Borehole length (m)
D = 5. # Borehole buried depth (m)
r_b = 0.0875 # Borehole radius (m)
# Pipe dimensions
rp_out = 0.0133 # Pipe outer radius (m)
rp_in = 0.0108 # Pipe inner radius (m)
D_s = 0.029445 # Shank spacing (m)
epsilon = 1.0e-6 # Pipe roughness (m)
# Pipe positions
# Single U-tube [(x_in, y_in), (x_out, y_out)]
pos = [(-D_s, 0.), (D_s, 0.)]
# Define a borehole
borehole = gt.boreholes.Borehole(H, D, r_b, x=0., y=0.)
k_p = 0.4 # Pipe thermal conductivity (W/m.K)
k_s = 2.0 # Ground thermal conductivity (W/m.K)
k_g = 1.0 # Grout thermal conductivity (W/m.K)
# Fluid properties
m_flow = 0.25 # Total fluid mass flow rate per borehole (kg/s)
cp_f = 3977. # Fluid specific isobaric heat capacity (J/kg.K)
den_f = 1015. # Fluid density (kg/m3)
visc_f = 0.00203 # Fluid dynamic viscosity (kg/m.s)
k_f = 0.492 # Fluid thermal conductivity (W/m.K)
# Pipe thermal resistance
R_p = gt.pipes.conduction_thermal_resistance_circular_pipe(rp_in,
rp_out,
k_p)
# Fluid to inner pipe wall thermal resistance (Single U-tube)
h_f = gt.pipes.convective_heat_transfer_coefficient_circular_pipe(m_flow,
rp_in,
visc_f,
den_f,
k_f,
cp_f,
epsilon)
R_f = 1.0 / (h_f * 2 * pi * rp_in)
SingleUTube = gt.pipes.SingleUTube(
pos, rp_in, rp_out, borehole, k_s, k_g, R_f + R_p)
Rb = gt.pipes.borehole_thermal_resistance(SingleUTube, m_flow, cp_f)
print('Borehole thermal resistance: {0:.4f} m.K/W'.format(Rb))
# Check the geometry to make sure it is physically possible
#
# This class method is automatically called at the instanciation of the
# pipe object and raises an error if the pipe geometry is invalid. It is
# manually called here for demosntration.
check = SingleUTube._check_geometry()
print('The geometry of the borehole is valid (realistic/possible): '
+ str(check))
# Create a borehole top view
fig = SingleUTube.visualize_pipes()
# Save the figure as a pdf
fig.savefig('borehole-top-view.pdf')
if __name__ == '__main__':
main()
| [
"pygfunction.pipes.convective_heat_transfer_coefficient_circular_pipe",
"pygfunction.pipes.SingleUTube",
"pygfunction.pipes.borehole_thermal_resistance",
"pygfunction.boreholes.Borehole",
"pygfunction.pipes.conduction_thermal_resistance_circular_pipe"
] | [((792, 838), 'pygfunction.boreholes.Borehole', 'gt.boreholes.Borehole', (['H', 'D', 'r_b'], {'x': '(0.0)', 'y': '(0.0)'}), '(H, D, r_b, x=0.0, y=0.0)\n', (813, 838), True, 'import pygfunction as gt\n'), ((1357, 1429), 'pygfunction.pipes.conduction_thermal_resistance_circular_pipe', 'gt.pipes.conduction_thermal_resistance_circular_pipe', (['rp_in', 'rp_out', 'k_p'], {}), '(rp_in, rp_out, k_p)\n', (1409, 1429), True, 'import pygfunction as gt\n'), ((1632, 1745), 'pygfunction.pipes.convective_heat_transfer_coefficient_circular_pipe', 'gt.pipes.convective_heat_transfer_coefficient_circular_pipe', (['m_flow', 'rp_in', 'visc_f', 'den_f', 'k_f', 'cp_f', 'epsilon'], {}), '(m_flow, rp_in,\n visc_f, den_f, k_f, cp_f, epsilon)\n', (1691, 1745), True, 'import pygfunction as gt\n'), ((2220, 2291), 'pygfunction.pipes.SingleUTube', 'gt.pipes.SingleUTube', (['pos', 'rp_in', 'rp_out', 'borehole', 'k_s', 'k_g', '(R_f + R_p)'], {}), '(pos, rp_in, rp_out, borehole, k_s, k_g, R_f + R_p)\n', (2240, 2291), True, 'import pygfunction as gt\n'), ((2311, 2374), 'pygfunction.pipes.borehole_thermal_resistance', 'gt.pipes.borehole_thermal_resistance', (['SingleUTube', 'm_flow', 'cp_f'], {}), '(SingleUTube, m_flow, cp_f)\n', (2347, 2374), True, 'import pygfunction as gt\n')] |
# -*- coding: UTF-8 -*-
from random import randint
import math
from project import matplt,database
from geopy.geocoders import Nominatim
from geopy import exc
import os, shutil
categ_coef = 17960
geolocator = Nominatim()
def clean_temp_folder(folder):
for the_file in os.listdir(folder):
file_path = os.path.join(folder, the_file)
if os.path.isfile(file_path):
os.unlink(file_path)
def data_validation(list_data_for_valid):
for i in list_data_for_valid:
try:
i = float(i)
except ValueError:
i = 0
return list_data_for_valid
def plotting(post_plot_distr):
# collecting data for plot
metrics = ['price','room','all_area','livin_area','kitch_area','all_floors','year']
to_return_plot_data = []
for i in range(3):
num_of_metr = randint(0,len(metrics)-1)
plt_data = []
for i in range(0,len(post_plot_distr)):
try :
plt_data.append(post_plot_distr[i][metrics[num_of_metr]])
except IndexError:
break
to_return_plot_data.append([metrics[num_of_metr],plt_data])
metrics.remove(metrics[num_of_metr])
path_img_hist = matplt.plot_hist(to_return_plot_data)
path_img_scatter = matplt.plot_scatter(to_return_plot_data)
return [path_img_hist[0],path_img_scatter[0],[path_img_hist[1],path_img_scatter[1],path_img_scatter[2]]]
def calculating(street,num_build):
location = None
try:
location = geolocator.geocode("Киев, " + street +' '+ num_build )
if location != None:
lat = location.latitude
lon = location.longitude
else :
lat = 0
lon = 0
except exc.GeocoderTimedOut:
lat = 0
lon = 0
return [lat,lon]
def choose_full_info_row(list_df_na):
indicator = False
data_for_posting = None
while indicator == False:
rand_n = randint(0,len(list_df_na))
try:
if list_df_na[rand_n]['price'] != '' and list_df_na[rand_n]['street'] != ''and list_df_na[rand_n]['distr'] != ''and list_df_na[rand_n]['all_area'] != ''and list_df_na[rand_n]['all_floors'] != ''and list_df_na[rand_n]['room'] != '':
data_for_posting = list_df_na[rand_n]
indicator = True
except IndexError:
indicator = True
return data_for_posting
| [
"os.listdir",
"project.matplt.plot_hist",
"geopy.geocoders.Nominatim",
"os.path.join",
"os.path.isfile",
"os.unlink",
"project.matplt.plot_scatter"
] | [((211, 222), 'geopy.geocoders.Nominatim', 'Nominatim', ([], {}), '()\n', (220, 222), False, 'from geopy.geocoders import Nominatim\n'), ((276, 294), 'os.listdir', 'os.listdir', (['folder'], {}), '(folder)\n', (286, 294), False, 'import os, shutil\n'), ((1213, 1250), 'project.matplt.plot_hist', 'matplt.plot_hist', (['to_return_plot_data'], {}), '(to_return_plot_data)\n', (1229, 1250), False, 'from project import matplt, database\n'), ((1278, 1318), 'project.matplt.plot_scatter', 'matplt.plot_scatter', (['to_return_plot_data'], {}), '(to_return_plot_data)\n', (1297, 1318), False, 'from project import matplt, database\n'), ((316, 346), 'os.path.join', 'os.path.join', (['folder', 'the_file'], {}), '(folder, the_file)\n', (328, 346), False, 'import os, shutil\n'), ((358, 383), 'os.path.isfile', 'os.path.isfile', (['file_path'], {}), '(file_path)\n', (372, 383), False, 'import os, shutil\n'), ((397, 417), 'os.unlink', 'os.unlink', (['file_path'], {}), '(file_path)\n', (406, 417), False, 'import os, shutil\n')] |
#!/usr/bin/env python
import os
import re
import argparse
re_junk = re.compile(r'[._-]')
re_spaces = re.compile(r'\s\s+')
def print_rename(old_filename, new_filename):
print('{} -> {}'.format(old_filename, new_filename))
def print_and_rename(old_path, new_path):
print_rename(old_path, new_path)
os.rename(old_path, new_path)
def get_new_path(old_path):
""" Get the new path, titlecased and (a little bit) sanitized.
- Only operate on the basename:
+ don't touch parent directories
+ don't touch the extension
- Sanitize:
+ replace junk characters with space
+ replace multiple spaces with single space
+ trim extra spaces at start and end
:param old_path: the path to rename
:return: titlecased and a little bit sanitized new path
"""
dirpart, filepart = os.path.split(old_path)
if filepart.startswith('.'):
return old_path
base, ext = os.path.splitext(filepart)
base = re_junk.sub(' ', base)
base = re_spaces.sub(' ', base).strip()
if not base:
return old_path
return os.path.join(dirpart, base.title() + ext)
def titlecase(old_path, rename_function):
if not os.path.exists(old_path):
return
new_path = get_new_path(old_path)
if old_path == new_path:
return
rename_function(old_path, new_path)
def main():
parser = argparse.ArgumentParser(description='Rename files to "titlecased" and "sanitized"')
parser.add_argument('-n', '--dry-run', action='store_true', help='Print what would happen, don\'t rename')
parser.add_argument('paths', nargs='+')
args = parser.parse_args()
rename_function = print_rename if args.dry_run else print_and_rename
for path in args.paths:
titlecase(path, rename_function)
if __name__ == '__main__':
main()
| [
"os.path.exists",
"argparse.ArgumentParser",
"re.compile",
"os.rename",
"os.path.splitext",
"os.path.split"
] | [((76, 95), 're.compile', 're.compile', (['"""[._-]"""'], {}), "('[._-]')\n", (86, 95), False, 'import re\n'), ((110, 131), 're.compile', 're.compile', (['"""\\\\s\\\\s+"""'], {}), "('\\\\s\\\\s+')\n", (120, 131), False, 'import re\n'), ((330, 359), 'os.rename', 'os.rename', (['old_path', 'new_path'], {}), '(old_path, new_path)\n', (339, 359), False, 'import os\n'), ((877, 900), 'os.path.split', 'os.path.split', (['old_path'], {}), '(old_path)\n', (890, 900), False, 'import os\n'), ((979, 1005), 'os.path.splitext', 'os.path.splitext', (['filepart'], {}), '(filepart)\n', (995, 1005), False, 'import os\n'), ((1447, 1535), 'argparse.ArgumentParser', 'argparse.ArgumentParser', ([], {'description': '"""Rename files to "titlecased" and "sanitized\\""""'}), '(description=\n \'Rename files to "titlecased" and "sanitized"\')\n', (1470, 1535), False, 'import argparse\n'), ((1244, 1268), 'os.path.exists', 'os.path.exists', (['old_path'], {}), '(old_path)\n', (1258, 1268), False, 'import os\n')] |
# Copyright 2018-2020 Streamlit Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import io
import threading
from typing import Dict, NamedTuple, Optional, List, Tuple
from blinker import Signal
class UploadedFileRec(NamedTuple):
"""Metadata and raw bytes for an uploaded file. Immutable."""
id: str
name: str
type: str
data: bytes
class UploadedFile(io.BytesIO):
"""A mutable uploaded file.
This class extends BytesIO, which has copy-on-write semantics when
initialized with `bytes`.
"""
def __init__(self, record: UploadedFileRec):
# BytesIO's copy-on-write semantics doesn't seem to be mentioned in
# the Python docs - possibly because it's a CPython-only optimization
# and not guaranteed to be in other Python runtimes. But it's detailed
# here: https://hg.python.org/cpython/rev/79a5fbe2c78f
super(UploadedFile, self).__init__(record.data)
self.id = record.id
self.name = record.name
self.type = record.type
self.size = len(record.data)
class UploadedFileManager(object):
"""Holds files uploaded by users of the running Streamlit app,
and emits an event signal when a file is added.
"""
def __init__(self):
self._files_by_id: Dict[Tuple[str, str], List[UploadedFileRec]] = {}
self._file_counts_by_id: Dict[Tuple[str, str], int] = {}
# Prevents concurrent access to the _files_by_id dict.
# In remove_session_files(), we iterate over the dict's keys. It's
# an error to mutate a dict while iterating; this lock prevents that.
self._files_lock = threading.Lock()
self.on_files_updated = Signal(
doc="""Emitted when a file list is added to the manager or updated.
Parameters
----------
session_id : str
The session_id for the session whose files were updated.
"""
)
def _on_files_updated(self, session_id: str, widget_id: str):
files_by_widget = session_id, widget_id
if files_by_widget in self._file_counts_by_id:
expected_file_count: int = self._file_counts_by_id[files_by_widget]
actual_file_count: int = (
len(self._files_by_id[files_by_widget])
if files_by_widget in self._files_by_id
else 0
)
if expected_file_count == actual_file_count:
self.on_files_updated.send(session_id)
else:
self.on_files_updated.send(session_id)
def _add_files(
self,
session_id: str,
widget_id: str,
files: List[UploadedFileRec],
):
"""
Add a list of files to the FileManager. Does not emit any signals
"""
files_by_widget = session_id, widget_id
with self._files_lock:
file_list = self._files_by_id.get(files_by_widget, None)
if file_list:
files = file_list + files
self._files_by_id[files_by_widget] = files
def add_files(
self,
session_id: str,
widget_id: str,
files: List[UploadedFileRec],
) -> None:
"""Add a list of files to the FileManager.
The "on_file_added" Signal will be emitted after the list is added.
Parameters
----------
session_id : str
The session ID of the report that owns the files.
widget_id : str
The widget ID of the FileUploader that created the files.
files : List[UploadedFileRec]
The file records to add.
"""
self._add_files(session_id, widget_id, files)
self._on_files_updated(session_id, widget_id)
def get_files(
self, session_id: str, widget_id: str
) -> Optional[List[UploadedFileRec]]:
"""Return the file list with the given ID, or None if the ID doesn't exist.
Parameters
----------
session_id : str
The session ID of the report that owns the file.
widget_id : str
The widget ID of the FileUploader that created the file.
Returns
-------
list of UploadedFileRec or None
"""
files_by_widget = session_id, widget_id
with self._files_lock:
return self._files_by_id.get(files_by_widget, None)
def remove_file(self, session_id: str, widget_id: str, file_id: str) -> None:
"""Remove the file list with the given ID, if it exists."""
files_by_widget = session_id, widget_id
with self._files_lock:
file_list = self._files_by_id[files_by_widget]
self._files_by_id[files_by_widget] = [
file for file in file_list if file.id != file_id
]
if len(file_list) != len(self._files_by_id[files_by_widget]):
self._on_files_updated(session_id, widget_id)
def _remove_files(self, session_id: str, widget_id: str) -> None:
"""Remove the file list for the provided widget in the
provided session, if it exists.
Does not emit any signals.
"""
files_by_widget = session_id, widget_id
self.update_file_count(session_id, widget_id, 0)
with self._files_lock:
self._files_by_id.pop(files_by_widget, None)
def remove_files(self, session_id: str, widget_id: str) -> None:
"""Remove the file list for the provided widget in the
provided session, if it exists.
Parameters
----------
session_id : str
The session ID of the report that owns the file.
widget_id : str
The widget ID of the FileUploader that created the file.
"""
self._remove_files(session_id, widget_id)
self._on_files_updated(session_id, widget_id)
def remove_session_files(self, session_id: str) -> None:
"""Remove all files that belong to the given report.
Parameters
----------
session_id : str
The session ID of the report whose files we're removing.
"""
# Copy the keys into a list, because we'll be mutating the dictionary.
with self._files_lock:
all_ids = list(self._files_by_id.keys())
for files_id in all_ids:
if files_id[0] == session_id:
self.remove_files(*files_id)
def replace_files(
self,
session_id: str,
widget_id: str,
files: List[UploadedFileRec],
) -> None:
"""Removes the file list for the provided widget in the
provided session, if it exists and add the provided files
to the widget in the session
Parameters
----------
session_id : str
The session ID of the report that owns the file.
widget_id : str
The widget ID of the FileUploader that created the file.
files : List[UploadedFileRec]
The files to add.
"""
self._remove_files(session_id, widget_id)
self._add_files(session_id, widget_id, files)
self._on_files_updated(session_id, widget_id)
def update_file_count(
self,
session_id: str,
widget_id: str,
file_count: int,
) -> None:
files_by_widget = session_id, widget_id
self._file_counts_by_id[files_by_widget] = file_count
self._on_files_updated(session_id, widget_id)
| [
"threading.Lock",
"blinker.Signal"
] | [((2137, 2153), 'threading.Lock', 'threading.Lock', ([], {}), '()\n', (2151, 2153), False, 'import threading\n'), ((2186, 2436), 'blinker.Signal', 'Signal', ([], {'doc': '"""Emitted when a file list is added to the manager or updated.\n\n Parameters\n ----------\n session_id : str\n The session_id for the session whose files were updated.\n """'}), '(doc=\n """Emitted when a file list is added to the manager or updated.\n\n Parameters\n ----------\n session_id : str\n The session_id for the session whose files were updated.\n """\n )\n', (2192, 2436), False, 'from blinker import Signal\n')] |
# encoding: utf-8
"""
build transform
"""
#import torchvision.transforms as T
#from PIL import Image
#from .transforms import RandomErasing,RandomErasingCorner
from .data_preprocessing import TrainAugmentation_albu,TestAugmentation_albu,TrainAugmentation_bone,TestAugmentation_bone
import torchvision.transforms as transforms
from data.transforms.RandAugment.augmentations import RandAugment,Lighting
_IMAGENET_PCA = {
'eigval': [0.2175, 0.0188, 0.0045],
'eigvec': [
[-0.5675, 0.7192, 0.4009],
[-0.5808, -0.0045, -0.8140],
[-0.5836, -0.6948, 0.4203],
]
}
def get_transform(resize, phase='train'):
if phase == 'train':
tfms = transforms.Compose([
transforms.Resize(size=(int(resize[0] / 0.875), int(resize[1] / 0.875))),
transforms.RandomCrop(resize),
transforms.RandomHorizontalFlip(0.5),
transforms.ColorJitter(brightness=0.126, saturation=0.5),
transforms.ToTensor(),
#Lighting(0.1, _IMAGENET_PCA['eigval'], _IMAGENET_PCA['eigvec']),
transforms.Normalize(mean=[0.485, 0.456, 0.406], std=[0.229, 0.224, 0.225])
])
# Add RandAugment with N, M(hyperparameter)
#tfms.transforms.insert(1, RandAugment(2, 9))
return tfms
else:
return transforms.Compose([
transforms.Resize(size=(int(resize[0] / 0.875), int(resize[1] / 0.875))),
transforms.CenterCrop(resize),
transforms.ToTensor(),
transforms.Normalize(mean=[0.485, 0.456, 0.406], std=[0.229, 0.224, 0.225])
])
def build_transforms(cfg, is_train=True, weak_aug = False,n_aug = 1):
if cfg.INPUT.USE_FGTFMS is True:
if is_train is True:
transform = get_transform( cfg.INPUT.SIZE_TRAIN_PRED, 'train')
else:
transform = get_transform( cfg.INPUT.SIZE_TRAIN_PRED, 'val')
return transform
if cfg.DATASETS.NAMES =='ISIC':
if is_train is True:
if weak_aug is False:
transform = TrainAugmentation_albu(sz_hw = cfg.INPUT.SIZE_TRAIN_IN, \
mean = cfg.INPUT.PIXEL_MEAN, std = cfg.INPUT.PIXEL_STD, \
crp_scale = cfg.INPUT.CRP_SCALE, crp_ratio = cfg.INPUT.CRP_RATIO, n_aug = n_aug,out_augpos = cfg.DATASETS.OUT_AUGPOS)
else:
transform = TrainAugmentation_albu(sz_hw = cfg.INPUT.SIZE_TRAIN_IN, \
mean = cfg.INPUT.PIXEL_MEAN, std = cfg.INPUT.PIXEL_STD, \
crp_scale = cfg.INPUT.CRP_SCALE_WEAK, crp_ratio = cfg.INPUT.CRP_RATIO,weak_aug = True, n_aug = n_aug)
else:
transform = TestAugmentation_albu(size = cfg.INPUT.SIZE_TRAIN_IN, mean = cfg.INPUT.PIXEL_MEAN, std = cfg.INPUT.PIXEL_STD,out_augpos = cfg.DATASETS.OUT_AUGPOS)
elif cfg.DATASETS.NAMES =='BoneXray':
#size = configs.image_size, mean = configs.image_mean, std = configs.image_std, ext_p =configs.ext_p
if is_train is True:
transform = TrainAugmentation_bone(sz_in_hw = cfg.INPUT.SIZE_TRAIN_IN, sz_out_hw = cfg.INPUT.SIZE_TRAIN_PRED, \
mean = cfg.INPUT.PIXEL_MEAN, std = cfg.INPUT.PIXEL_STD, \
minmax_h = cfg.INPUT.MINMAX_H, w2h_ratio = cfg.INPUT.W2H_RATIO)
else:
transform = TestAugmentation_bone(sz_in_hw = cfg.INPUT.SIZE_TRAIN_IN,sz_out_hw = cfg.INPUT.SIZE_TRAIN_PRED, mean = cfg.INPUT.PIXEL_MEAN, std = cfg.INPUT.PIXEL_STD)
else:
raise ValueError('unknown transform for dataset {cfg.DATASETS.NAMES}')
# local att
#train_transform_lc = TrainAugmentation_albu(sz_in_hw = configs.sz_in_hw_lc, sz_out_hw = configs.sz_out_hw_lc, mean = configs.image_mean, std = configs.image_std,
# minmax_h= configs.minmax_h_lc,w2h_ratio = configs.w2h_ratio_lc)
return transform
| [
"torchvision.transforms.CenterCrop",
"torchvision.transforms.RandomHorizontalFlip",
"torchvision.transforms.RandomCrop",
"torchvision.transforms.ColorJitter",
"torchvision.transforms.Normalize",
"torchvision.transforms.ToTensor"
] | [((804, 833), 'torchvision.transforms.RandomCrop', 'transforms.RandomCrop', (['resize'], {}), '(resize)\n', (825, 833), True, 'import torchvision.transforms as transforms\n'), ((847, 883), 'torchvision.transforms.RandomHorizontalFlip', 'transforms.RandomHorizontalFlip', (['(0.5)'], {}), '(0.5)\n', (878, 883), True, 'import torchvision.transforms as transforms\n'), ((897, 953), 'torchvision.transforms.ColorJitter', 'transforms.ColorJitter', ([], {'brightness': '(0.126)', 'saturation': '(0.5)'}), '(brightness=0.126, saturation=0.5)\n', (919, 953), True, 'import torchvision.transforms as transforms\n'), ((967, 988), 'torchvision.transforms.ToTensor', 'transforms.ToTensor', ([], {}), '()\n', (986, 988), True, 'import torchvision.transforms as transforms\n'), ((1080, 1155), 'torchvision.transforms.Normalize', 'transforms.Normalize', ([], {'mean': '[0.485, 0.456, 0.406]', 'std': '[0.229, 0.224, 0.225]'}), '(mean=[0.485, 0.456, 0.406], std=[0.229, 0.224, 0.225])\n', (1100, 1155), True, 'import torchvision.transforms as transforms\n'), ((1451, 1480), 'torchvision.transforms.CenterCrop', 'transforms.CenterCrop', (['resize'], {}), '(resize)\n', (1472, 1480), True, 'import torchvision.transforms as transforms\n'), ((1494, 1515), 'torchvision.transforms.ToTensor', 'transforms.ToTensor', ([], {}), '()\n', (1513, 1515), True, 'import torchvision.transforms as transforms\n'), ((1529, 1604), 'torchvision.transforms.Normalize', 'transforms.Normalize', ([], {'mean': '[0.485, 0.456, 0.406]', 'std': '[0.229, 0.224, 0.225]'}), '(mean=[0.485, 0.456, 0.406], std=[0.229, 0.224, 0.225])\n', (1549, 1604), True, 'import torchvision.transforms as transforms\n')] |
#!/usr/bin/python
# -*- coding: utf-8 -*-
# Copyright 2019 Red Hat
# GNU General Public License v3.0+
# (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
#############################################
# WARNING #
#############################################
#
# This file is auto generated by the resource
# module builder playbook.
#
# Do not edit this file manually.
#
# Changes to this file will be over written
# by the resource module builder.
#
# Changes should be made in the model used to
# generate this file or in the resource module
# builder template.
#
#############################################
"""
The module file for nxos_l3_interfaces
"""
from __future__ import absolute_import, division, print_function
__metaclass__ = type
DOCUMENTATION = """
module: nxos_l3_interfaces
short_description: L3 interfaces resource module
description: This module manages Layer-3 interfaces attributes of NX-OS Interfaces.
version_added: 1.0.0
author: <NAME> (@trishnaguha)
notes:
- Tested against NXOS 7.3.(0)D1(1) on VIRL
options:
running_config:
description:
- This option is used only with state I(parsed).
- The value of this option should be the output received from the NX-OS device
by executing the command B(show running-config | section '^interface').
- The state I(parsed) reads the configuration from C(running_config) option and
transforms it into Ansible structured data as per the resource module's argspec
and the value is then returned in the I(parsed) key within the result.
type: str
config:
description: A dictionary of Layer-3 interface options
type: list
elements: dict
suboptions:
name:
description:
- Full name of L3 interface, i.e. Ethernet1/1.
type: str
required: true
dot1q:
description:
- Configures IEEE 802.1Q VLAN encapsulation on a subinterface.
type: int
ipv4:
description:
- IPv4 address and attributes of the L3 interface.
type: list
elements: dict
suboptions:
address:
description:
- IPV4 address of the L3 interface.
type: str
tag:
description:
- URIB route tag value for local/direct routes.
type: int
secondary:
description:
- A boolean attribute to manage addition of secondary IP address.
type: bool
default: false
ipv6:
description:
- IPv6 address and attributes of the L3 interface.
type: list
elements: dict
suboptions:
address:
description:
- IPV6 address of the L3 interface.
type: str
tag:
description:
- URIB route tag value for local/direct routes.
type: int
redirects:
description:
- Enables/disables ip redirects
type: bool
unreachables:
description:
- Enables/disables ip redirects
type: bool
evpn_multisite_tracking:
description:
- VxLAN evpn multisite Interface tracking. Supported only on selected model.
type: str
version_added: 1.1.0
choices:
- fabric-tracking
- dci-tracking
state:
description:
- The state of the configuration after module completion.
- The state I(overridden) would override the IP address configuration
of all interfaces on the device with the provided configuration in
the task. Use caution with this state as you may loose access to the
device.
type: str
choices:
- merged
- replaced
- overridden
- deleted
- gathered
- rendered
- parsed
default: merged
"""
EXAMPLES = """
# Using merged
# Before state:
# -------------
#
# interface Ethernet1/6
- name: Merge provided configuration with device configuration.
cisco.nxos.nxos_l3_interfaces:
config:
- name: Ethernet1/6
ipv4:
- address: 192.168.1.1/24
tag: 5
- address: 10.1.1.1/24
secondary: true
tag: 10
ipv6:
- address: fd5d:12c9:2201:2::1/64
tag: 6
- name: Ethernet1/7.42
dot1q: 42
redirects: false
unreachables: false
state: merged
# After state:
# ------------
#
# interface Ethernet1/6
# ip address 192.168.22.1/24 tag 5
# ip address 10.1.1.1/24 secondary tag 10
# interface Ethernet1/6
# ipv6 address fd5d:12c9:2201:2::1/64 tag 6
# interface Ethernet1/7.42
# encapsulation dot1q 42
# no ip redirects
# no ip unreachables
# Using replaced
# Before state:
# -------------
#
# interface Ethernet1/6
# ip address 192.168.22.1/24
# ipv6 address "fd5d:fdf8:f53e:61e4::18/64"
- name: Replace device configuration of specified L3 interfaces with provided configuration.
cisco.nxos.nxos_l3_interfaces:
config:
- name: Ethernet1/6
ipv4:
- address: 192.168.22.3/24
state: replaced
# After state:
# ------------
#
# interface Ethernet1/6
# ip address 192.168.22.3/24
# Using overridden
# Before state:
# -------------
#
# interface Ethernet1/2
# ip address 192.168.22.1/24
# interface Ethernet1/6
# ipv6 address "fd5d:fdf8:f53e:61e4::18/64"
- name: Override device configuration of all L3 interfaces on device with provided
configuration.
cisco.nxos.nxos_l3_interfaces:
config:
- name: Ethernet1/2
ipv4: 192.168.22.3/4
state: overridden
# After state:
# ------------
#
# interface Ethernet1/2
# ipv4 address 192.168.22.3/24
# interface Ethernet1/6
# Using deleted
# Before state:
# -------------
#
# interface Ethernet1/6
# ip address 192.168.22.1/24
# interface Ethernet1/2
# ipv6 address "fd5d:12c9:2201:1::1/64"
- name: Delete L3 attributes of given interfaces (This won't delete the interface
itself).
cisco.nxos.nxos_l3_interfaces:
config:
- name: Ethernet1/6
- name: Ethernet1/2
state: deleted
# After state:
# ------------
#
# interface Ethernet1/6
# interface Ethernet1/2
# Using rendered
- name: Use rendered state to convert task input to device specific commands
cisco.nxos.nxos_l3_interfaces:
config:
- name: Ethernet1/800
ipv4:
- address: 192.168.1.100/24
tag: 5
- address: 10.1.1.1/24
secondary: true
tag: 10
- name: Ethernet1/800
ipv6:
- address: fd5d:12c9:2201:2::1/64
tag: 6
state: rendered
# Task Output (redacted)
# -----------------------
# rendered:
# - "interface Ethernet1/800"
# - "ip address 192.168.1.100/24 tag 5"
# - "ip address 10.1.1.1/24 secondary tag 10"
# - "interface Ethernet1/800"
# - "ipv6 address fd5d:12c9:2201:2::1/64 tag 6"
# Using parsed
# parsed.cfg
# ------------
# interface Ethernet1/800
# ip address 192.168.1.100/24 tag 5
# ip address 10.1.1.1/24 secondary tag 10
# no ip redirects
# interface Ethernet1/801
# ipv6 address fd5d:fc00:db20:35b:7399::5/64 tag 6
# ip unreachables
# interface mgmt0
# ip address dhcp
# vrf member management
- name: Use parsed state to convert externally supplied config to structured format
cisco.nxos.nxos_l3_interfaces:
running_config: "{{ lookup('file', 'parsed.cfg') }}"
state: parsed
# Task output (redacted)
# -----------------------
# parsed:
# - name: Ethernet1/800
# ipv4:
# - address: 192.168.1.100/24
# tag: 5
# - address: 10.1.1.1/24
# secondary: True
# tag: 10
# redirects: False
# - name: Ethernet1/801
# ipv6:
# - address: fd5d:12c9:2201:2::1/64
# tag: 6
# unreachables: True
# Using gathered
# Existing device config state
# -------------------------------
# interface Ethernet1/1
# ip address 192.0.2.100/24
# interface Ethernet1/2
# no ip redirects
# ip address 203.0.113.10/24
# ip unreachables
# ipv6 address 2001:db8::1/32
- name: Gather l3_interfaces facts from the device using nxos_l3_interfaces
cisco.nxos.nxos_l3_interfaces:
state: gathered
# Task output (redacted)
# -----------------------
# gathered:
# - name: Ethernet1/1
# ipv4:
# - address: 192.0.2.100/24
# - name: Ethernet1/2
# ipv4:
# - address: 203.0.113.10/24
# ipv6:
# - address: 2001:db8::1/32
# redirects: False
# unreachables: True
"""
RETURN = """
before:
description: The configuration as structured data prior to module invocation.
returned: always
type: list
sample: >
The configuration returned will always be in the same format
of the parameters above.
after:
description: The configuration as structured data after module completion.
returned: when changed
type: list
sample: >
The configuration returned will always be in the same format
of the parameters above.
commands:
description: The set of commands pushed to the remote device.
returned: always
type: list
sample: ['interface Ethernet1/2', 'ip address 192.168.0.1/2']
"""
from ansible.module_utils.basic import AnsibleModule
from ansible_collections.cisco.nxos.plugins.module_utils.network.nxos.argspec.l3_interfaces.l3_interfaces import (
L3_interfacesArgs,
)
from ansible_collections.cisco.nxos.plugins.module_utils.network.nxos.config.l3_interfaces.l3_interfaces import (
L3_interfaces,
)
def main():
"""
Main entry point for module execution
:returns: the result form module invocation
"""
required_if = [
("state", "merged", ("config",)),
("state", "replaced", ("config",)),
("state", "overridden", ("config",)),
("state", "rendered", ("config",)),
("state", "parsed", ("running_config",)),
]
mutually_exclusive = [("config", "running_config")]
module = AnsibleModule(
argument_spec=L3_interfacesArgs.argument_spec,
required_if=required_if,
mutually_exclusive=mutually_exclusive,
supports_check_mode=True,
)
result = L3_interfaces(module).execute_module()
module.exit_json(**result)
if __name__ == "__main__":
main()
| [
"ansible.module_utils.basic.AnsibleModule",
"ansible_collections.cisco.nxos.plugins.module_utils.network.nxos.config.l3_interfaces.l3_interfaces.L3_interfaces"
] | [((9832, 9992), 'ansible.module_utils.basic.AnsibleModule', 'AnsibleModule', ([], {'argument_spec': 'L3_interfacesArgs.argument_spec', 'required_if': 'required_if', 'mutually_exclusive': 'mutually_exclusive', 'supports_check_mode': '(True)'}), '(argument_spec=L3_interfacesArgs.argument_spec, required_if=\n required_if, mutually_exclusive=mutually_exclusive, supports_check_mode\n =True)\n', (9845, 9992), False, 'from ansible.module_utils.basic import AnsibleModule\n'), ((10036, 10057), 'ansible_collections.cisco.nxos.plugins.module_utils.network.nxos.config.l3_interfaces.l3_interfaces.L3_interfaces', 'L3_interfaces', (['module'], {}), '(module)\n', (10049, 10057), False, 'from ansible_collections.cisco.nxos.plugins.module_utils.network.nxos.config.l3_interfaces.l3_interfaces import L3_interfaces\n')] |
# MIT License
#
# Copyright (c) 2018 <NAME>, <EMAIL>
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in all
# copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
# SOFTWARE.
import json
import pytest
import tests.resources
from bscetl.jobs.exporters.traces_item_exporter import traces_item_exporter
from bscetl.jobs.extract_geth_traces_job import ExtractGethTracesJob
from tests.helpers import compare_lines_ignore_order, read_file
RESOURCE_GROUP = 'test_extract_geth_traces_job'
def read_resource(resource_group, file_name):
return tests.resources.read_resource([RESOURCE_GROUP, resource_group], file_name)
@pytest.mark.parametrize('resource_group', [
'block_without_transactions',
'block_with_create',
'block_with_suicide',
'block_with_subtraces',
'block_with_error',
])
def test_extract_traces_job(tmpdir, resource_group):
output_file = str(tmpdir.join('actual_traces.csv'))
geth_traces_content = read_resource(resource_group, 'geth_traces.json')
traces_iterable = (json.loads(line) for line in geth_traces_content.splitlines())
job = ExtractGethTracesJob(
traces_iterable=traces_iterable,
batch_size=2,
item_exporter=traces_item_exporter(output_file),
max_workers=5
)
job.run()
print('=====================')
print(read_file(output_file))
compare_lines_ignore_order(
read_resource(resource_group, 'expected_traces.csv'), read_file(output_file)
)
| [
"pytest.mark.parametrize",
"json.loads",
"tests.helpers.read_file",
"bscetl.jobs.exporters.traces_item_exporter.traces_item_exporter"
] | [((1557, 1725), 'pytest.mark.parametrize', 'pytest.mark.parametrize', (['"""resource_group"""', "['block_without_transactions', 'block_with_create', 'block_with_suicide',\n 'block_with_subtraces', 'block_with_error']"], {}), "('resource_group', ['block_without_transactions',\n 'block_with_create', 'block_with_suicide', 'block_with_subtraces',\n 'block_with_error'])\n", (1580, 1725), False, 'import pytest\n'), ((1950, 1966), 'json.loads', 'json.loads', (['line'], {}), '(line)\n', (1960, 1966), False, 'import json\n'), ((2253, 2275), 'tests.helpers.read_file', 'read_file', (['output_file'], {}), '(output_file)\n', (2262, 2275), False, 'from tests.helpers import compare_lines_ignore_order, read_file\n'), ((2371, 2393), 'tests.helpers.read_file', 'read_file', (['output_file'], {}), '(output_file)\n', (2380, 2393), False, 'from tests.helpers import compare_lines_ignore_order, read_file\n'), ((2130, 2163), 'bscetl.jobs.exporters.traces_item_exporter.traces_item_exporter', 'traces_item_exporter', (['output_file'], {}), '(output_file)\n', (2150, 2163), False, 'from bscetl.jobs.exporters.traces_item_exporter import traces_item_exporter\n')] |
"""Module to add Employee fields to the User admin interface."""
from django.contrib import admin
from django.contrib.auth.admin import UserAdmin as BaseUserAdmin
from django.contrib.auth.models import User
from .models import Employee
class EmployeeInline(admin.StackedInline):
model = Employee
can_delete = False
max_num = 1
verbose_name_plural = 'employee'
class UserAdmin(BaseUserAdmin):
# Add the ssn, salary and last_updated fields to User admin view
inlines = (EmployeeInline,)
admin.site.unregister(User)
admin.site.register(User, UserAdmin)
| [
"django.contrib.admin.site.unregister",
"django.contrib.admin.site.register"
] | [((520, 547), 'django.contrib.admin.site.unregister', 'admin.site.unregister', (['User'], {}), '(User)\n', (541, 547), False, 'from django.contrib import admin\n'), ((548, 584), 'django.contrib.admin.site.register', 'admin.site.register', (['User', 'UserAdmin'], {}), '(User, UserAdmin)\n', (567, 584), False, 'from django.contrib import admin\n')] |
# Copyright 2021 Pants project contributors (see CONTRIBUTORS.md).
# Licensed under the Apache License, Version 2.0 (see LICENSE).
from __future__ import annotations
import logging
from dataclasses import dataclass
from pants.backend.codegen.thrift.apache.subsystem import ApacheThriftSubsystem
from pants.backend.codegen.thrift.target_types import ThriftSourceField
from pants.core.util_rules.source_files import SourceFiles, SourceFilesRequest
from pants.engine.environment import Environment, EnvironmentRequest
from pants.engine.fs import CreateDigest, Digest, Directory, MergeDigests, RemovePrefix, Snapshot
from pants.engine.internals.selectors import Get, MultiGet
from pants.engine.process import (
BinaryNotFoundError,
BinaryPathRequest,
BinaryPaths,
BinaryPathTest,
Process,
ProcessCacheScope,
ProcessResult,
)
from pants.engine.rules import collect_rules, rule
from pants.engine.target import TransitiveTargets, TransitiveTargetsRequest
from pants.source.source_root import SourceRootsRequest, SourceRootsResult
from pants.util.logging import LogLevel
from pants.util.strutil import bullet_list
logger = logging.getLogger(__name__)
@dataclass(frozen=True)
class GenerateThriftSourcesRequest:
thrift_source_field: ThriftSourceField
lang_id: str
lang_options: tuple[str, ...]
lang_name: str
@dataclass(frozen=True)
class GeneratedThriftSources:
snapshot: Snapshot
@dataclass(frozen=True)
class ApacheThriftSetup:
path: str
@rule
async def generate_apache_thrift_sources(
request: GenerateThriftSourcesRequest,
thrift: ApacheThriftSetup,
) -> GeneratedThriftSources:
output_dir = "_generated_files"
transitive_targets, empty_output_dir_digest = await MultiGet(
Get(TransitiveTargets, TransitiveTargetsRequest([request.thrift_source_field.address])),
Get(Digest, CreateDigest([Directory(output_dir)])),
)
transitive_sources, target_sources = await MultiGet(
Get(
SourceFiles,
SourceFilesRequest(
tgt[ThriftSourceField]
for tgt in transitive_targets.closure
if tgt.has_field(ThriftSourceField)
),
),
Get(SourceFiles, SourceFilesRequest([request.thrift_source_field])),
)
sources_roots = await Get(
SourceRootsResult,
SourceRootsRequest,
SourceRootsRequest.for_files(transitive_sources.snapshot.files),
)
deduped_source_root_paths = sorted({sr.path for sr in sources_roots.path_to_root.values()})
input_digest = await Get(
Digest,
MergeDigests(
[
transitive_sources.snapshot.digest,
target_sources.snapshot.digest,
empty_output_dir_digest,
]
),
)
options_str = ""
if request.lang_options:
options_str = f":{','.join(opt for opt in request.lang_options)}"
maybe_include_paths = []
for path in deduped_source_root_paths:
maybe_include_paths.extend(["-I", path])
args = [
thrift.path,
"-out",
output_dir,
*maybe_include_paths,
"--gen",
f"{request.lang_id}{options_str}",
*target_sources.snapshot.files,
]
result = await Get(
ProcessResult,
Process(
args,
input_digest=input_digest,
output_directories=(output_dir,),
description=f"Generating {request.lang_name} sources from {request.thrift_source_field.address}.",
level=LogLevel.DEBUG,
),
)
output_snapshot = await Get(Snapshot, RemovePrefix(result.output_digest, output_dir))
return GeneratedThriftSources(output_snapshot)
@rule
async def setup_thrift_tool(apache_thrift: ApacheThriftSubsystem) -> ApacheThriftSetup:
env = await Get(Environment, EnvironmentRequest(["PATH"]))
search_paths = apache_thrift.thrift_search_paths(env)
all_thrift_binary_paths = await Get(
BinaryPaths,
BinaryPathRequest(
search_path=search_paths,
binary_name="thrift",
test=BinaryPathTest(["-version"]),
),
)
if not all_thrift_binary_paths.paths:
raise BinaryNotFoundError(
"Cannot find any `thrift` binaries using the option "
f"`[apache-thrift].thrift_search_paths`: {list(search_paths)}\n\n"
"To fix, please install Apache Thrift (https://thrift.apache.org/) with the version "
f"{apache_thrift.expected_version} (set by `[apache-thrift].expected_version`) and ensure "
"that it is discoverable via `[apache-thrift].thrift_search_paths`."
)
version_results = await MultiGet(
Get(
ProcessResult,
Process(
(binary_path.path, "-version"),
description=f"Determine Apache Thrift version for {binary_path.path}",
level=LogLevel.DEBUG,
cache_scope=ProcessCacheScope.PER_RESTART_SUCCESSFUL,
),
)
for binary_path in all_thrift_binary_paths.paths
)
invalid_versions = []
for binary_path, version_result in zip(all_thrift_binary_paths.paths, version_results):
try:
_raw_version = version_result.stdout.decode("utf-8").split()[2]
_version_components = _raw_version.split(".") # e.g. [1, 17] or [1, 17, 1]
version = f"{_version_components[0]}.{_version_components[1]}"
except IndexError:
raise AssertionError(
f"Failed to parse `thrift -version` output for {binary_path}. Please open a bug at "
f"https://github.com/pantsbuild/pants/issues/new/choose with the below data:"
f"\n\n"
f"{version_result}"
)
if version == apache_thrift.expected_version:
return ApacheThriftSetup(binary_path.path)
logger.debug(
f"The Thrift binary at {binary_path.path} has version {version}, but this "
f"project is using {apache_thrift.expected_version} "
"(set by `[apache-thrift].expected_version`). Ignoring."
)
invalid_versions.append((binary_path.path, version))
invalid_versions_str = bullet_list(
f"{path}: {version}" for path, version in sorted(invalid_versions)
)
raise BinaryNotFoundError(
"Cannot find a `thrift` binary with the expected version of "
f"{apache_thrift.expected_version} (set by `[apache-thrift].expected_version`).\n\n"
f"Found these `thrift` binaries, but they had different versions:\n\n"
f"{invalid_versions_str}\n\n"
"To fix, please install the expected version (https://thrift.apache.org/) and ensure "
"that it is discoverable via the option `[apache-thrift].thrift_search_paths`, or change "
"`[apache-thrift].expected_version`."
)
def rules():
return collect_rules()
| [
"logging.getLogger",
"pants.core.util_rules.source_files.SourceFilesRequest",
"pants.engine.fs.MergeDigests",
"dataclasses.dataclass",
"pants.engine.fs.RemovePrefix",
"pants.source.source_root.SourceRootsRequest.for_files",
"pants.engine.target.TransitiveTargetsRequest",
"pants.engine.process.BinaryPa... | [((1146, 1173), 'logging.getLogger', 'logging.getLogger', (['__name__'], {}), '(__name__)\n', (1163, 1173), False, 'import logging\n'), ((1177, 1199), 'dataclasses.dataclass', 'dataclass', ([], {'frozen': '(True)'}), '(frozen=True)\n', (1186, 1199), False, 'from dataclasses import dataclass\n'), ((1352, 1374), 'dataclasses.dataclass', 'dataclass', ([], {'frozen': '(True)'}), '(frozen=True)\n', (1361, 1374), False, 'from dataclasses import dataclass\n'), ((1431, 1453), 'dataclasses.dataclass', 'dataclass', ([], {'frozen': '(True)'}), '(frozen=True)\n', (1440, 1453), False, 'from dataclasses import dataclass\n'), ((6385, 6857), 'pants.engine.process.BinaryNotFoundError', 'BinaryNotFoundError', (['f"""Cannot find a `thrift` binary with the expected version of {apache_thrift.expected_version} (set by `[apache-thrift].expected_version`).\n\nFound these `thrift` binaries, but they had different versions:\n\n{invalid_versions_str}\n\nTo fix, please install the expected version (https://thrift.apache.org/) and ensure that it is discoverable via the option `[apache-thrift].thrift_search_paths`, or change `[apache-thrift].expected_version`."""'], {}), '(\n f"""Cannot find a `thrift` binary with the expected version of {apache_thrift.expected_version} (set by `[apache-thrift].expected_version`).\n\nFound these `thrift` binaries, but they had different versions:\n\n{invalid_versions_str}\n\nTo fix, please install the expected version (https://thrift.apache.org/) and ensure that it is discoverable via the option `[apache-thrift].thrift_search_paths`, or change `[apache-thrift].expected_version`."""\n )\n', (6404, 6857), False, 'from pants.engine.process import BinaryNotFoundError, BinaryPathRequest, BinaryPaths, BinaryPathTest, Process, ProcessCacheScope, ProcessResult\n'), ((6958, 6973), 'pants.engine.rules.collect_rules', 'collect_rules', ([], {}), '()\n', (6971, 6973), False, 'from pants.engine.rules import collect_rules, rule\n'), ((2389, 2452), 'pants.source.source_root.SourceRootsRequest.for_files', 'SourceRootsRequest.for_files', (['transitive_sources.snapshot.files'], {}), '(transitive_sources.snapshot.files)\n', (2417, 2452), False, 'from pants.source.source_root import SourceRootsRequest, SourceRootsResult\n'), ((2611, 2723), 'pants.engine.fs.MergeDigests', 'MergeDigests', (['[transitive_sources.snapshot.digest, target_sources.snapshot.digest,\n empty_output_dir_digest]'], {}), '([transitive_sources.snapshot.digest, target_sources.snapshot.\n digest, empty_output_dir_digest])\n', (2623, 2723), False, 'from pants.engine.fs import CreateDigest, Digest, Directory, MergeDigests, RemovePrefix, Snapshot\n'), ((3321, 3530), 'pants.engine.process.Process', 'Process', (['args'], {'input_digest': 'input_digest', 'output_directories': '(output_dir,)', 'description': 'f"""Generating {request.lang_name} sources from {request.thrift_source_field.address}."""', 'level': 'LogLevel.DEBUG'}), "(args, input_digest=input_digest, output_directories=(output_dir,),\n description=\n f'Generating {request.lang_name} sources from {request.thrift_source_field.address}.'\n , level=LogLevel.DEBUG)\n", (3328, 3530), False, 'from pants.engine.process import BinaryNotFoundError, BinaryPathRequest, BinaryPaths, BinaryPathTest, Process, ProcessCacheScope, ProcessResult\n'), ((3638, 3684), 'pants.engine.fs.RemovePrefix', 'RemovePrefix', (['result.output_digest', 'output_dir'], {}), '(result.output_digest, output_dir)\n', (3650, 3684), False, 'from pants.engine.fs import CreateDigest, Digest, Directory, MergeDigests, RemovePrefix, Snapshot\n'), ((3866, 3894), 'pants.engine.environment.EnvironmentRequest', 'EnvironmentRequest', (["['PATH']"], {}), "(['PATH'])\n", (3884, 3894), False, 'from pants.engine.environment import Environment, EnvironmentRequest\n'), ((1780, 1843), 'pants.engine.target.TransitiveTargetsRequest', 'TransitiveTargetsRequest', (['[request.thrift_source_field.address]'], {}), '([request.thrift_source_field.address])\n', (1804, 1843), False, 'from pants.engine.target import TransitiveTargets, TransitiveTargetsRequest\n'), ((2236, 2285), 'pants.core.util_rules.source_files.SourceFilesRequest', 'SourceFilesRequest', (['[request.thrift_source_field]'], {}), '([request.thrift_source_field])\n', (2254, 2285), False, 'from pants.core.util_rules.source_files import SourceFiles, SourceFilesRequest\n'), ((4132, 4160), 'pants.engine.process.BinaryPathTest', 'BinaryPathTest', (["['-version']"], {}), "(['-version'])\n", (4146, 4160), False, 'from pants.engine.process import BinaryNotFoundError, BinaryPathRequest, BinaryPaths, BinaryPathTest, Process, ProcessCacheScope, ProcessResult\n'), ((4785, 4981), 'pants.engine.process.Process', 'Process', (["(binary_path.path, '-version')"], {'description': 'f"""Determine Apache Thrift version for {binary_path.path}"""', 'level': 'LogLevel.DEBUG', 'cache_scope': 'ProcessCacheScope.PER_RESTART_SUCCESSFUL'}), "((binary_path.path, '-version'), description=\n f'Determine Apache Thrift version for {binary_path.path}', level=\n LogLevel.DEBUG, cache_scope=ProcessCacheScope.PER_RESTART_SUCCESSFUL)\n", (4792, 4981), False, 'from pants.engine.process import BinaryNotFoundError, BinaryPathRequest, BinaryPaths, BinaryPathTest, Process, ProcessCacheScope, ProcessResult\n'), ((1880, 1901), 'pants.engine.fs.Directory', 'Directory', (['output_dir'], {}), '(output_dir)\n', (1889, 1901), False, 'from pants.engine.fs import CreateDigest, Digest, Directory, MergeDigests, RemovePrefix, Snapshot\n')] |
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import models, migrations
class Migration(migrations.Migration):
dependencies = [
]
operations = [
migrations.CreateModel(
name='DeliriumUser',
fields=[
('id', models.AutoField(primary_key=True, verbose_name='ID', auto_created=True, serialize=False)),
('username', models.CharField(max_length=255, verbose_name='Имя пользователя', default='')),
('avatar', models.CharField(max_length=255, verbose_name='Аватара', default='')),
],
options={
'verbose_name_plural': 'Пользователи Delirium',
'verbose_name': 'Пользователь Delirium',
},
bases=(models.Model,),
),
migrations.CreateModel(
name='Post',
fields=[
('id', models.AutoField(primary_key=True, verbose_name='ID', auto_created=True, serialize=False)),
('topic', models.CharField(max_length=255, verbose_name='Топик', default='')),
('posted_at', models.DateTimeField(verbose_name='Время')),
('is_registered', models.BooleanField(verbose_name='Зарегистрирован', default=False)),
('username', models.CharField(max_length=255, verbose_name='Имя пользователя (в посте)', default='')),
('text', models.TextField(verbose_name='Пост', default='')),
('user', models.ForeignKey(blank=True, to='delirium.DeliriumUser', related_name='posts', null=True)),
],
options={
'verbose_name_plural': 'Посты',
'verbose_name': 'Пост',
},
bases=(models.Model,),
),
]
| [
"django.db.models.TextField",
"django.db.models.ForeignKey",
"django.db.models.BooleanField",
"django.db.models.AutoField",
"django.db.models.DateTimeField",
"django.db.models.CharField"
] | [((304, 397), 'django.db.models.AutoField', 'models.AutoField', ([], {'primary_key': '(True)', 'verbose_name': '"""ID"""', 'auto_created': '(True)', 'serialize': '(False)'}), "(primary_key=True, verbose_name='ID', auto_created=True,\n serialize=False)\n", (320, 397), False, 'from django.db import models, migrations\n'), ((425, 502), 'django.db.models.CharField', 'models.CharField', ([], {'max_length': '(255)', 'verbose_name': '"""Имя пользователя"""', 'default': '""""""'}), "(max_length=255, verbose_name='Имя пользователя', default='')\n", (441, 502), False, 'from django.db import models, migrations\n'), ((532, 600), 'django.db.models.CharField', 'models.CharField', ([], {'max_length': '(255)', 'verbose_name': '"""Аватара"""', 'default': '""""""'}), "(max_length=255, verbose_name='Аватара', default='')\n", (548, 600), False, 'from django.db import models, migrations\n'), ((923, 1016), 'django.db.models.AutoField', 'models.AutoField', ([], {'primary_key': '(True)', 'verbose_name': '"""ID"""', 'auto_created': '(True)', 'serialize': '(False)'}), "(primary_key=True, verbose_name='ID', auto_created=True,\n serialize=False)\n", (939, 1016), False, 'from django.db import models, migrations\n'), ((1041, 1107), 'django.db.models.CharField', 'models.CharField', ([], {'max_length': '(255)', 'verbose_name': '"""Топик"""', 'default': '""""""'}), "(max_length=255, verbose_name='Топик', default='')\n", (1057, 1107), False, 'from django.db import models, migrations\n'), ((1140, 1182), 'django.db.models.DateTimeField', 'models.DateTimeField', ([], {'verbose_name': '"""Время"""'}), "(verbose_name='Время')\n", (1160, 1182), False, 'from django.db import models, migrations\n'), ((1219, 1285), 'django.db.models.BooleanField', 'models.BooleanField', ([], {'verbose_name': '"""Зарегистрирован"""', 'default': '(False)'}), "(verbose_name='Зарегистрирован', default=False)\n", (1238, 1285), False, 'from django.db import models, migrations\n'), ((1317, 1408), 'django.db.models.CharField', 'models.CharField', ([], {'max_length': '(255)', 'verbose_name': '"""Имя пользователя (в посте)"""', 'default': '""""""'}), "(max_length=255, verbose_name='Имя пользователя (в посте)',\n default='')\n", (1333, 1408), False, 'from django.db import models, migrations\n'), ((1432, 1481), 'django.db.models.TextField', 'models.TextField', ([], {'verbose_name': '"""Пост"""', 'default': '""""""'}), "(verbose_name='Пост', default='')\n", (1448, 1481), False, 'from django.db import models, migrations\n'), ((1509, 1604), 'django.db.models.ForeignKey', 'models.ForeignKey', ([], {'blank': '(True)', 'to': '"""delirium.DeliriumUser"""', 'related_name': '"""posts"""', 'null': '(True)'}), "(blank=True, to='delirium.DeliriumUser', related_name=\n 'posts', null=True)\n", (1526, 1604), False, 'from django.db import models, migrations\n')] |
import os
import salt.utils.platform
from tests.support.mock import patch
from tests.support.unit import TestCase, skipIf
try:
import salt.utils.win_system as win_system
except Exception as exc: # pylint: disable=broad-except
win_system = exc
class WinSystemImportTestCase(TestCase):
"""
Simply importing should not raise an error, especially on Linux
"""
def test_import(self):
if isinstance(win_system, Exception):
raise Exception(
"Importing win_system caused traceback: {}".format(win_system)
)
@skipIf(not salt.utils.platform.is_windows(), "Only test on Windows systems")
class WinSystemTestCase(TestCase):
"""
Test cases for salt.utils.win_system
"""
def test_get_computer_name(self):
"""
Should return the computer name
"""
with patch("win32api.GetComputerNameEx", return_value="FAKENAME"):
self.assertEqual(win_system.get_computer_name(), "FAKENAME")
def test_get_computer_name_fail(self):
"""
If it fails, it returns False
"""
with patch("win32api.GetComputerNameEx", return_value=None):
self.assertFalse(win_system.get_computer_name())
def test_get_pending_computer_name(self):
"""
Will return the pending computer name if one is pending
"""
expected = "PendingName"
patch_value = {"vdata": expected}
with patch("salt.utils.win_reg.read_value", return_value=patch_value):
result = win_system.get_pending_computer_name()
self.assertEqual(expected, result)
def test_get_pending_computer_name_none(self):
"""
Will return the None if the pending computer is the current name
"""
patch_value = {"vdata": os.environ.get("COMPUTERNAME")}
with patch("salt.utils.win_reg.read_value", return_value=patch_value):
self.assertIsNone(win_system.get_pending_computer_name())
def test_get_pending_computer_name_false(self):
"""
Will return False if there is no pending computer name
"""
with patch("salt.utils.win_reg.read_value", return_value=False):
self.assertIsNone(win_system.get_pending_computer_name())
def test_get_pending_component_servicing(self):
"""
If none of the keys exist, should return False
"""
with patch("salt.utils.win_reg.key_exists", return_value=False):
self.assertFalse(win_system.get_pending_component_servicing())
def test_get_pending_component_servicing_true_1(self):
"""
If the RebootPending key exists, should return True
"""
with patch("salt.utils.win_reg.key_exists", side_effect=[True]):
self.assertTrue(win_system.get_pending_component_servicing())
def test_get_pending_component_servicing_true_2(self):
"""
If the RebootInProgress key exists, should return True
"""
with patch("salt.utils.win_reg.key_exists", side_effect=[False, True]):
self.assertTrue(win_system.get_pending_component_servicing())
def test_get_pending_component_servicing_true_3(self):
"""
If the PackagesPending key exists, should return True
"""
with patch("salt.utils.win_reg.key_exists", side_effect=[False, False, True]):
self.assertTrue(win_system.get_pending_component_servicing())
def test_get_pending_domain_join(self):
"""
If none of the keys exist, should return False
"""
with patch("salt.utils.win_reg.key_exists", return_value=False):
self.assertFalse(win_system.get_pending_domain_join())
def test_get_pending_domain_join_true_1(self):
"""
If the AvoidSpnSet key exists, should return True
"""
with patch("salt.utils.win_reg.key_exists", side_effect=[True]):
self.assertTrue(win_system.get_pending_domain_join())
def test_get_pending_domain_join_true_2(self):
"""
If the JoinDomain key exists, should return True
"""
with patch("salt.utils.win_reg.key_exists", side_effect=[False, True]):
self.assertTrue(win_system.get_pending_domain_join())
def test_get_pending_file_rename_false_1(self):
"""
If none of the value names exist, should return False
"""
patched_return = {"success": False}
with patch("salt.utils.win_reg.read_value", return_value=patched_return):
self.assertFalse(win_system.get_pending_file_rename())
def test_get_pending_file_rename_false_2(self):
"""
If one of the value names exists but is not set, should return False
"""
patched_return = {"success": True, "vdata": "(value not set)"}
with patch("salt.utils.win_reg.read_value", return_value=patched_return):
self.assertFalse(win_system.get_pending_file_rename())
def test_get_pending_file_rename_true_1(self):
"""
If one of the value names exists and is set, should return True
"""
patched_return = {"success": True, "vdata": "some value"}
with patch("salt.utils.win_reg.read_value", return_value=patched_return):
self.assertTrue(win_system.get_pending_file_rename())
def test_get_pending_servermanager_false_1(self):
"""
If the CurrentRebootAttempts value name does not exist, should return
False
"""
patched_return = {"success": False}
with patch("salt.utils.win_reg.read_value", return_value=patched_return):
self.assertFalse(win_system.get_pending_servermanager())
def test_get_pending_servermanager_false_2(self):
"""
If the CurrentRebootAttempts value name exists but is not an integer,
should return False
"""
patched_return = {"success": True, "vdata": "(value not set)"}
with patch("salt.utils.win_reg.read_value", return_value=patched_return):
self.assertFalse(win_system.get_pending_file_rename())
def test_get_pending_servermanager_true(self):
"""
If the CurrentRebootAttempts value name exists and is an integer,
should return True
"""
patched_return = {"success": True, "vdata": 1}
with patch("salt.utils.win_reg.read_value", return_value=patched_return):
self.assertTrue(win_system.get_pending_file_rename())
def test_get_pending_dvd_reboot(self):
"""
If the DVDRebootSignal value name does not exist, should return False
"""
with patch("salt.utils.win_reg.value_exists", return_value=False):
self.assertFalse(win_system.get_pending_dvd_reboot())
def test_get_pending_dvd_reboot_true(self):
"""
If the DVDRebootSignal value name exists, should return True
"""
with patch("salt.utils.win_reg.value_exists", return_value=True):
self.assertTrue(win_system.get_pending_dvd_reboot())
def test_get_pending_update(self):
"""
If none of the keys exist and there are not subkeys, should return False
"""
with patch("salt.utils.win_reg.key_exists", return_value=False), patch(
"salt.utils.win_reg.list_keys", return_value=[]
):
self.assertFalse(win_system.get_pending_update())
def test_get_pending_update_true_1(self):
"""
If the RebootRequired key exists, should return True
"""
with patch("salt.utils.win_reg.key_exists", side_effect=[True]):
self.assertTrue(win_system.get_pending_update())
def test_get_pending_update_true_2(self):
"""
If the PostRebootReporting key exists, should return True
"""
with patch("salt.utils.win_reg.key_exists", side_effect=[False, True]):
self.assertTrue(win_system.get_pending_update())
def test_get_reboot_required_witnessed_false_1(self):
"""
The ``Reboot Required`` value name does not exist, should return False
"""
patched_data = {"vdata": None}
with patch("salt.utils.win_reg.read_value", return_value=patched_data):
self.assertFalse(win_system.get_reboot_required_witnessed())
def test_get_reboot_required_witnessed_false_2(self):
"""
The ``Reboot required`` value name is set to 0, should return False
"""
patched_data = {"vdata": 0}
with patch("salt.utils.win_reg.read_value", return_value=patched_data):
self.assertFalse(win_system.get_reboot_required_witnessed())
def test_get_reboot_required_witnessed_true(self):
"""
The ``Reboot required`` value name is set to 1, should return True
"""
patched_data = {"vdata": 1}
with patch("salt.utils.win_reg.read_value", return_value=patched_data):
self.assertTrue(win_system.get_reboot_required_witnessed())
def test_set_reboot_required_witnessed(self):
"""
The call to ``set_value`` should return True and should be called with
the specified parameters
"""
with patch("salt.utils.win_reg.set_value", return_value=True) as sv:
self.assertTrue(win_system.set_reboot_required_witnessed())
sv.assert_called_once_with(
hive="HKLM",
key=win_system.MINION_VOLATILE_KEY,
volatile=True,
vname=win_system.REBOOT_REQUIRED_NAME,
vdata=1,
vtype="REG_DWORD",
)
def test_get_pending_update_exe_volatile_false_1(self):
"""
If UpdateExeVolatile value name is 0, should return False
"""
patched_data = {"success": True, "vdata": 0}
with patch("salt.utils.win_reg.read_value", return_value=patched_data):
self.assertFalse(win_system.get_pending_update_exe_volatile())
def test_get_pending_update_exe_volatile_false_2(self):
"""
If UpdateExeVolatile value name is not present, should return False
"""
patched_data = {"success": False}
with patch("salt.utils.win_reg.read_value", return_value=patched_data):
self.assertFalse(win_system.get_pending_update_exe_volatile())
def test_get_pending_update_exe_volatile_true_1(self):
"""
If UpdateExeVolatile value name is not 0, should return True
"""
patched_data = {"success": True, "vdata": 1}
with patch("salt.utils.win_reg.read_value", return_value=patched_data):
self.assertTrue(win_system.get_pending_update_exe_volatile())
def test_get_pending_reboot(self):
"""
If all functions return Falsy data, should return False
"""
with patch(
"salt.utils.win_system.get_pending_update", return_value=False
), patch("salt.utils.win_update.needs_reboot", return_value=False), patch(
"salt.utils.win_system.get_pending_update_exe_volatile", return_value=False
), patch(
"salt.utils.win_system.get_pending_file_rename", return_value=False
), patch(
"salt.utils.win_system.get_pending_servermanager", return_value=False
), patch(
"salt.utils.win_system.get_pending_component_servicing", return_value=False
), patch(
"salt.utils.win_system.get_pending_dvd_reboot", return_value=False
), patch(
"salt.utils.win_system.get_reboot_required_witnessed", return_value=False
), patch(
"salt.utils.win_system.get_pending_computer_name", return_value=None
), patch(
"salt.utils.win_system.get_pending_domain_join", return_value=False
):
self.assertFalse(win_system.get_pending_reboot())
def test_get_pending_reboot_true_1(self):
"""
If any boolean returning functions return True, should return True
"""
with patch(
"salt.utils.win_system.get_pending_update", return_value=False
), patch("salt.utils.win_update.needs_reboot", return_value=False), patch(
"salt.utils.win_system.get_pending_update_exe_volatile", return_value=False
), patch(
"salt.utils.win_system.get_pending_file_rename", return_value=False
), patch(
"salt.utils.win_system.get_pending_servermanager", return_value=False
), patch(
"salt.utils.win_system.get_pending_component_servicing", return_value=False
), patch(
"salt.utils.win_system.get_pending_dvd_reboot", return_value=False
), patch(
"salt.utils.win_system.get_reboot_required_witnessed", return_value=False
), patch(
"salt.utils.win_system.get_pending_computer_name", return_value=None
), patch(
"salt.utils.win_system.get_pending_domain_join", return_value=True
):
self.assertTrue(win_system.get_pending_reboot())
def test_get_pending_reboot_true_2(self):
"""
If a computer name is returned, should return True
"""
with patch(
"salt.utils.win_system.get_pending_update", return_value=False
), patch("salt.utils.win_update.needs_reboot", return_value=False), patch(
"salt.utils.win_system.get_pending_update_exe_volatile", return_value=False
), patch(
"salt.utils.win_system.get_pending_file_rename", return_value=False
), patch(
"salt.utils.win_system.get_pending_servermanager", return_value=False
), patch(
"salt.utils.win_system.get_pending_component_servicing", return_value=False
), patch(
"salt.utils.win_system.get_pending_dvd_reboot", return_value=False
), patch(
"salt.utils.win_system.get_reboot_required_witnessed", return_value=False
), patch(
"salt.utils.win_system.get_pending_computer_name",
return_value="pending name",
):
self.assertTrue(win_system.get_pending_reboot())
def test_get_pending_reboot_details(self):
"""
All items False should return a dictionary with all items False
"""
with patch(
"salt.utils.win_system.get_pending_update", return_value=False
), patch("salt.utils.win_update.needs_reboot", return_value=False), patch(
"salt.utils.win_system.get_pending_update_exe_volatile", return_value=False
), patch(
"salt.utils.win_system.get_pending_file_rename", return_value=False
), patch(
"salt.utils.win_system.get_pending_servermanager", return_value=False
), patch(
"salt.utils.win_system.get_pending_component_servicing", return_value=False
), patch(
"salt.utils.win_system.get_pending_dvd_reboot", return_value=False
), patch(
"salt.utils.win_system.get_reboot_required_witnessed", return_value=False
), patch(
"salt.utils.win_system.get_pending_computer_name", return_value=None
), patch(
"salt.utils.win_system.get_pending_domain_join", return_value=False
):
expected = {
"Pending Component Servicing": False,
"Pending Computer Rename": False,
"Pending DVD Reboot": False,
"Pending File Rename": False,
"Pending Join Domain": False,
"Pending ServerManager": False,
"Pending Update": False,
"Pending Windows Update": False,
"Reboot Required Witnessed": False,
"Volatile Update Exe": False,
}
result = win_system.get_pending_reboot_details()
self.assertDictEqual(expected, result)
def test_get_pending_reboot_details_true(self):
"""
All items True should return a dictionary with all items True
"""
with patch(
"salt.utils.win_system.get_pending_update", return_value=True
), patch("salt.utils.win_update.needs_reboot", return_value=True), patch(
"salt.utils.win_system.get_pending_update_exe_volatile", return_value=True
), patch(
"salt.utils.win_system.get_pending_file_rename", return_value=True
), patch(
"salt.utils.win_system.get_pending_servermanager", return_value=True
), patch(
"salt.utils.win_system.get_pending_component_servicing", return_value=True
), patch(
"salt.utils.win_system.get_pending_dvd_reboot", return_value=True
), patch(
"salt.utils.win_system.get_reboot_required_witnessed", return_value=True
), patch(
"salt.utils.win_system.get_pending_computer_name",
return_value="pending name",
), patch(
"salt.utils.win_system.get_pending_domain_join", return_value=True
):
expected = {
"Pending Component Servicing": True,
"Pending Computer Rename": True,
"Pending DVD Reboot": True,
"Pending File Rename": True,
"Pending Join Domain": True,
"Pending ServerManager": True,
"Pending Update": True,
"Pending Windows Update": True,
"Reboot Required Witnessed": True,
"Volatile Update Exe": True,
}
result = win_system.get_pending_reboot_details()
self.assertDictEqual(expected, result)
| [
"salt.utils.win_system.get_pending_computer_name",
"salt.utils.win_system.get_pending_domain_join",
"tests.support.mock.patch",
"salt.utils.win_system.get_computer_name",
"salt.utils.win_system.set_reboot_required_witnessed",
"os.environ.get",
"salt.utils.win_system.get_reboot_required_witnessed",
"sa... | [((865, 925), 'tests.support.mock.patch', 'patch', (['"""win32api.GetComputerNameEx"""'], {'return_value': '"""FAKENAME"""'}), "('win32api.GetComputerNameEx', return_value='FAKENAME')\n", (870, 925), False, 'from tests.support.mock import patch\n'), ((1119, 1173), 'tests.support.mock.patch', 'patch', (['"""win32api.GetComputerNameEx"""'], {'return_value': 'None'}), "('win32api.GetComputerNameEx', return_value=None)\n", (1124, 1173), False, 'from tests.support.mock import patch\n'), ((1459, 1523), 'tests.support.mock.patch', 'patch', (['"""salt.utils.win_reg.read_value"""'], {'return_value': 'patch_value'}), "('salt.utils.win_reg.read_value', return_value=patch_value)\n", (1464, 1523), False, 'from tests.support.mock import patch\n'), ((1546, 1584), 'salt.utils.win_system.get_pending_computer_name', 'win_system.get_pending_computer_name', ([], {}), '()\n', (1582, 1584), True, 'import salt.utils.win_system as win_system\n'), ((1813, 1843), 'os.environ.get', 'os.environ.get', (['"""COMPUTERNAME"""'], {}), "('COMPUTERNAME')\n", (1827, 1843), False, 'import os\n'), ((1858, 1922), 'tests.support.mock.patch', 'patch', (['"""salt.utils.win_reg.read_value"""'], {'return_value': 'patch_value'}), "('salt.utils.win_reg.read_value', return_value=patch_value)\n", (1863, 1922), False, 'from tests.support.mock import patch\n'), ((2147, 2205), 'tests.support.mock.patch', 'patch', (['"""salt.utils.win_reg.read_value"""'], {'return_value': '(False)'}), "('salt.utils.win_reg.read_value', return_value=False)\n", (2152, 2205), False, 'from tests.support.mock import patch\n'), ((2422, 2480), 'tests.support.mock.patch', 'patch', (['"""salt.utils.win_reg.key_exists"""'], {'return_value': '(False)'}), "('salt.utils.win_reg.key_exists', return_value=False)\n", (2427, 2480), False, 'from tests.support.mock import patch\n'), ((2714, 2772), 'tests.support.mock.patch', 'patch', (['"""salt.utils.win_reg.key_exists"""'], {'side_effect': '[True]'}), "('salt.utils.win_reg.key_exists', side_effect=[True])\n", (2719, 2772), False, 'from tests.support.mock import patch\n'), ((3008, 3073), 'tests.support.mock.patch', 'patch', (['"""salt.utils.win_reg.key_exists"""'], {'side_effect': '[False, True]'}), "('salt.utils.win_reg.key_exists', side_effect=[False, True])\n", (3013, 3073), False, 'from tests.support.mock import patch\n'), ((3308, 3380), 'tests.support.mock.patch', 'patch', (['"""salt.utils.win_reg.key_exists"""'], {'side_effect': '[False, False, True]'}), "('salt.utils.win_reg.key_exists', side_effect=[False, False, True])\n", (3313, 3380), False, 'from tests.support.mock import patch\n'), ((3593, 3651), 'tests.support.mock.patch', 'patch', (['"""salt.utils.win_reg.key_exists"""'], {'return_value': '(False)'}), "('salt.utils.win_reg.key_exists', return_value=False)\n", (3598, 3651), False, 'from tests.support.mock import patch\n'), ((3867, 3925), 'tests.support.mock.patch', 'patch', (['"""salt.utils.win_reg.key_exists"""'], {'side_effect': '[True]'}), "('salt.utils.win_reg.key_exists', side_effect=[True])\n", (3872, 3925), False, 'from tests.support.mock import patch\n'), ((4139, 4204), 'tests.support.mock.patch', 'patch', (['"""salt.utils.win_reg.key_exists"""'], {'side_effect': '[False, True]'}), "('salt.utils.win_reg.key_exists', side_effect=[False, True])\n", (4144, 4204), False, 'from tests.support.mock import patch\n'), ((4468, 4535), 'tests.support.mock.patch', 'patch', (['"""salt.utils.win_reg.read_value"""'], {'return_value': 'patched_return'}), "('salt.utils.win_reg.read_value', return_value=patched_return)\n", (4473, 4535), False, 'from tests.support.mock import patch\n'), ((4842, 4909), 'tests.support.mock.patch', 'patch', (['"""salt.utils.win_reg.read_value"""'], {'return_value': 'patched_return'}), "('salt.utils.win_reg.read_value', return_value=patched_return)\n", (4847, 4909), False, 'from tests.support.mock import patch\n'), ((5205, 5272), 'tests.support.mock.patch', 'patch', (['"""salt.utils.win_reg.read_value"""'], {'return_value': 'patched_return'}), "('salt.utils.win_reg.read_value', return_value=patched_return)\n", (5210, 5272), False, 'from tests.support.mock import patch\n'), ((5568, 5635), 'tests.support.mock.patch', 'patch', (['"""salt.utils.win_reg.read_value"""'], {'return_value': 'patched_return'}), "('salt.utils.win_reg.read_value', return_value=patched_return)\n", (5573, 5635), False, 'from tests.support.mock import patch\n'), ((5975, 6042), 'tests.support.mock.patch', 'patch', (['"""salt.utils.win_reg.read_value"""'], {'return_value': 'patched_return'}), "('salt.utils.win_reg.read_value', return_value=patched_return)\n", (5980, 6042), False, 'from tests.support.mock import patch\n'), ((6356, 6423), 'tests.support.mock.patch', 'patch', (['"""salt.utils.win_reg.read_value"""'], {'return_value': 'patched_return'}), "('salt.utils.win_reg.read_value', return_value=patched_return)\n", (6361, 6423), False, 'from tests.support.mock import patch\n'), ((6650, 6710), 'tests.support.mock.patch', 'patch', (['"""salt.utils.win_reg.value_exists"""'], {'return_value': '(False)'}), "('salt.utils.win_reg.value_exists', return_value=False)\n", (6655, 6710), False, 'from tests.support.mock import patch\n'), ((6933, 6992), 'tests.support.mock.patch', 'patch', (['"""salt.utils.win_reg.value_exists"""'], {'return_value': '(True)'}), "('salt.utils.win_reg.value_exists', return_value=True)\n", (6938, 6992), False, 'from tests.support.mock import patch\n'), ((7217, 7275), 'tests.support.mock.patch', 'patch', (['"""salt.utils.win_reg.key_exists"""'], {'return_value': '(False)'}), "('salt.utils.win_reg.key_exists', return_value=False)\n", (7222, 7275), False, 'from tests.support.mock import patch\n'), ((7277, 7331), 'tests.support.mock.patch', 'patch', (['"""salt.utils.win_reg.list_keys"""'], {'return_value': '[]'}), "('salt.utils.win_reg.list_keys', return_value=[])\n", (7282, 7331), False, 'from tests.support.mock import patch\n'), ((7562, 7620), 'tests.support.mock.patch', 'patch', (['"""salt.utils.win_reg.key_exists"""'], {'side_effect': '[True]'}), "('salt.utils.win_reg.key_exists', side_effect=[True])\n", (7567, 7620), False, 'from tests.support.mock import patch\n'), ((7833, 7898), 'tests.support.mock.patch', 'patch', (['"""salt.utils.win_reg.key_exists"""'], {'side_effect': '[False, True]'}), "('salt.utils.win_reg.key_exists', side_effect=[False, True])\n", (7838, 7898), False, 'from tests.support.mock import patch\n'), ((8175, 8240), 'tests.support.mock.patch', 'patch', (['"""salt.utils.win_reg.read_value"""'], {'return_value': 'patched_data'}), "('salt.utils.win_reg.read_value', return_value=patched_data)\n", (8180, 8240), False, 'from tests.support.mock import patch\n'), ((8523, 8588), 'tests.support.mock.patch', 'patch', (['"""salt.utils.win_reg.read_value"""'], {'return_value': 'patched_data'}), "('salt.utils.win_reg.read_value', return_value=patched_data)\n", (8528, 8588), False, 'from tests.support.mock import patch\n'), ((8867, 8932), 'tests.support.mock.patch', 'patch', (['"""salt.utils.win_reg.read_value"""'], {'return_value': 'patched_data'}), "('salt.utils.win_reg.read_value', return_value=patched_data)\n", (8872, 8932), False, 'from tests.support.mock import patch\n'), ((9206, 9262), 'tests.support.mock.patch', 'patch', (['"""salt.utils.win_reg.set_value"""'], {'return_value': '(True)'}), "('salt.utils.win_reg.set_value', return_value=True)\n", (9211, 9262), False, 'from tests.support.mock import patch\n'), ((9840, 9905), 'tests.support.mock.patch', 'patch', (['"""salt.utils.win_reg.read_value"""'], {'return_value': 'patched_data'}), "('salt.utils.win_reg.read_value', return_value=patched_data)\n", (9845, 9905), False, 'from tests.support.mock import patch\n'), ((10198, 10263), 'tests.support.mock.patch', 'patch', (['"""salt.utils.win_reg.read_value"""'], {'return_value': 'patched_data'}), "('salt.utils.win_reg.read_value', return_value=patched_data)\n", (10203, 10263), False, 'from tests.support.mock import patch\n'), ((10559, 10624), 'tests.support.mock.patch', 'patch', (['"""salt.utils.win_reg.read_value"""'], {'return_value': 'patched_data'}), "('salt.utils.win_reg.read_value', return_value=patched_data)\n", (10564, 10624), False, 'from tests.support.mock import patch\n'), ((10841, 10910), 'tests.support.mock.patch', 'patch', (['"""salt.utils.win_system.get_pending_update"""'], {'return_value': '(False)'}), "('salt.utils.win_system.get_pending_update', return_value=False)\n", (10846, 10910), False, 'from tests.support.mock import patch\n'), ((10934, 10997), 'tests.support.mock.patch', 'patch', (['"""salt.utils.win_update.needs_reboot"""'], {'return_value': '(False)'}), "('salt.utils.win_update.needs_reboot', return_value=False)\n", (10939, 10997), False, 'from tests.support.mock import patch\n'), ((10999, 11086), 'tests.support.mock.patch', 'patch', (['"""salt.utils.win_system.get_pending_update_exe_volatile"""'], {'return_value': '(False)'}), "('salt.utils.win_system.get_pending_update_exe_volatile', return_value\n =False)\n", (11004, 11086), False, 'from tests.support.mock import patch\n'), ((11105, 11179), 'tests.support.mock.patch', 'patch', (['"""salt.utils.win_system.get_pending_file_rename"""'], {'return_value': '(False)'}), "('salt.utils.win_system.get_pending_file_rename', return_value=False)\n", (11110, 11179), False, 'from tests.support.mock import patch\n'), ((11203, 11279), 'tests.support.mock.patch', 'patch', (['"""salt.utils.win_system.get_pending_servermanager"""'], {'return_value': '(False)'}), "('salt.utils.win_system.get_pending_servermanager', return_value=False)\n", (11208, 11279), False, 'from tests.support.mock import patch\n'), ((11303, 11390), 'tests.support.mock.patch', 'patch', (['"""salt.utils.win_system.get_pending_component_servicing"""'], {'return_value': '(False)'}), "('salt.utils.win_system.get_pending_component_servicing', return_value\n =False)\n", (11308, 11390), False, 'from tests.support.mock import patch\n'), ((11409, 11482), 'tests.support.mock.patch', 'patch', (['"""salt.utils.win_system.get_pending_dvd_reboot"""'], {'return_value': '(False)'}), "('salt.utils.win_system.get_pending_dvd_reboot', return_value=False)\n", (11414, 11482), False, 'from tests.support.mock import patch\n'), ((11506, 11591), 'tests.support.mock.patch', 'patch', (['"""salt.utils.win_system.get_reboot_required_witnessed"""'], {'return_value': '(False)'}), "('salt.utils.win_system.get_reboot_required_witnessed', return_value=False\n )\n", (11511, 11591), False, 'from tests.support.mock import patch\n'), ((11610, 11685), 'tests.support.mock.patch', 'patch', (['"""salt.utils.win_system.get_pending_computer_name"""'], {'return_value': 'None'}), "('salt.utils.win_system.get_pending_computer_name', return_value=None)\n", (11615, 11685), False, 'from tests.support.mock import patch\n'), ((11709, 11783), 'tests.support.mock.patch', 'patch', (['"""salt.utils.win_system.get_pending_domain_join"""'], {'return_value': '(False)'}), "('salt.utils.win_system.get_pending_domain_join', return_value=False)\n", (11714, 11783), False, 'from tests.support.mock import patch\n'), ((12028, 12097), 'tests.support.mock.patch', 'patch', (['"""salt.utils.win_system.get_pending_update"""'], {'return_value': '(False)'}), "('salt.utils.win_system.get_pending_update', return_value=False)\n", (12033, 12097), False, 'from tests.support.mock import patch\n'), ((12121, 12184), 'tests.support.mock.patch', 'patch', (['"""salt.utils.win_update.needs_reboot"""'], {'return_value': '(False)'}), "('salt.utils.win_update.needs_reboot', return_value=False)\n", (12126, 12184), False, 'from tests.support.mock import patch\n'), ((12186, 12273), 'tests.support.mock.patch', 'patch', (['"""salt.utils.win_system.get_pending_update_exe_volatile"""'], {'return_value': '(False)'}), "('salt.utils.win_system.get_pending_update_exe_volatile', return_value\n =False)\n", (12191, 12273), False, 'from tests.support.mock import patch\n'), ((12292, 12366), 'tests.support.mock.patch', 'patch', (['"""salt.utils.win_system.get_pending_file_rename"""'], {'return_value': '(False)'}), "('salt.utils.win_system.get_pending_file_rename', return_value=False)\n", (12297, 12366), False, 'from tests.support.mock import patch\n'), ((12390, 12466), 'tests.support.mock.patch', 'patch', (['"""salt.utils.win_system.get_pending_servermanager"""'], {'return_value': '(False)'}), "('salt.utils.win_system.get_pending_servermanager', return_value=False)\n", (12395, 12466), False, 'from tests.support.mock import patch\n'), ((12490, 12577), 'tests.support.mock.patch', 'patch', (['"""salt.utils.win_system.get_pending_component_servicing"""'], {'return_value': '(False)'}), "('salt.utils.win_system.get_pending_component_servicing', return_value\n =False)\n", (12495, 12577), False, 'from tests.support.mock import patch\n'), ((12596, 12669), 'tests.support.mock.patch', 'patch', (['"""salt.utils.win_system.get_pending_dvd_reboot"""'], {'return_value': '(False)'}), "('salt.utils.win_system.get_pending_dvd_reboot', return_value=False)\n", (12601, 12669), False, 'from tests.support.mock import patch\n'), ((12693, 12778), 'tests.support.mock.patch', 'patch', (['"""salt.utils.win_system.get_reboot_required_witnessed"""'], {'return_value': '(False)'}), "('salt.utils.win_system.get_reboot_required_witnessed', return_value=False\n )\n", (12698, 12778), False, 'from tests.support.mock import patch\n'), ((12797, 12872), 'tests.support.mock.patch', 'patch', (['"""salt.utils.win_system.get_pending_computer_name"""'], {'return_value': 'None'}), "('salt.utils.win_system.get_pending_computer_name', return_value=None)\n", (12802, 12872), False, 'from tests.support.mock import patch\n'), ((12896, 12969), 'tests.support.mock.patch', 'patch', (['"""salt.utils.win_system.get_pending_domain_join"""'], {'return_value': '(True)'}), "('salt.utils.win_system.get_pending_domain_join', return_value=True)\n", (12901, 12969), False, 'from tests.support.mock import patch\n'), ((13197, 13266), 'tests.support.mock.patch', 'patch', (['"""salt.utils.win_system.get_pending_update"""'], {'return_value': '(False)'}), "('salt.utils.win_system.get_pending_update', return_value=False)\n", (13202, 13266), False, 'from tests.support.mock import patch\n'), ((13290, 13353), 'tests.support.mock.patch', 'patch', (['"""salt.utils.win_update.needs_reboot"""'], {'return_value': '(False)'}), "('salt.utils.win_update.needs_reboot', return_value=False)\n", (13295, 13353), False, 'from tests.support.mock import patch\n'), ((13355, 13442), 'tests.support.mock.patch', 'patch', (['"""salt.utils.win_system.get_pending_update_exe_volatile"""'], {'return_value': '(False)'}), "('salt.utils.win_system.get_pending_update_exe_volatile', return_value\n =False)\n", (13360, 13442), False, 'from tests.support.mock import patch\n'), ((13461, 13535), 'tests.support.mock.patch', 'patch', (['"""salt.utils.win_system.get_pending_file_rename"""'], {'return_value': '(False)'}), "('salt.utils.win_system.get_pending_file_rename', return_value=False)\n", (13466, 13535), False, 'from tests.support.mock import patch\n'), ((13559, 13635), 'tests.support.mock.patch', 'patch', (['"""salt.utils.win_system.get_pending_servermanager"""'], {'return_value': '(False)'}), "('salt.utils.win_system.get_pending_servermanager', return_value=False)\n", (13564, 13635), False, 'from tests.support.mock import patch\n'), ((13659, 13746), 'tests.support.mock.patch', 'patch', (['"""salt.utils.win_system.get_pending_component_servicing"""'], {'return_value': '(False)'}), "('salt.utils.win_system.get_pending_component_servicing', return_value\n =False)\n", (13664, 13746), False, 'from tests.support.mock import patch\n'), ((13765, 13838), 'tests.support.mock.patch', 'patch', (['"""salt.utils.win_system.get_pending_dvd_reboot"""'], {'return_value': '(False)'}), "('salt.utils.win_system.get_pending_dvd_reboot', return_value=False)\n", (13770, 13838), False, 'from tests.support.mock import patch\n'), ((13862, 13947), 'tests.support.mock.patch', 'patch', (['"""salt.utils.win_system.get_reboot_required_witnessed"""'], {'return_value': '(False)'}), "('salt.utils.win_system.get_reboot_required_witnessed', return_value=False\n )\n", (13867, 13947), False, 'from tests.support.mock import patch\n'), ((13966, 14056), 'tests.support.mock.patch', 'patch', (['"""salt.utils.win_system.get_pending_computer_name"""'], {'return_value': '"""pending name"""'}), "('salt.utils.win_system.get_pending_computer_name', return_value=\n 'pending name')\n", (13971, 14056), False, 'from tests.support.mock import patch\n'), ((14306, 14375), 'tests.support.mock.patch', 'patch', (['"""salt.utils.win_system.get_pending_update"""'], {'return_value': '(False)'}), "('salt.utils.win_system.get_pending_update', return_value=False)\n", (14311, 14375), False, 'from tests.support.mock import patch\n'), ((14399, 14462), 'tests.support.mock.patch', 'patch', (['"""salt.utils.win_update.needs_reboot"""'], {'return_value': '(False)'}), "('salt.utils.win_update.needs_reboot', return_value=False)\n", (14404, 14462), False, 'from tests.support.mock import patch\n'), ((14464, 14551), 'tests.support.mock.patch', 'patch', (['"""salt.utils.win_system.get_pending_update_exe_volatile"""'], {'return_value': '(False)'}), "('salt.utils.win_system.get_pending_update_exe_volatile', return_value\n =False)\n", (14469, 14551), False, 'from tests.support.mock import patch\n'), ((14570, 14644), 'tests.support.mock.patch', 'patch', (['"""salt.utils.win_system.get_pending_file_rename"""'], {'return_value': '(False)'}), "('salt.utils.win_system.get_pending_file_rename', return_value=False)\n", (14575, 14644), False, 'from tests.support.mock import patch\n'), ((14668, 14744), 'tests.support.mock.patch', 'patch', (['"""salt.utils.win_system.get_pending_servermanager"""'], {'return_value': '(False)'}), "('salt.utils.win_system.get_pending_servermanager', return_value=False)\n", (14673, 14744), False, 'from tests.support.mock import patch\n'), ((14768, 14855), 'tests.support.mock.patch', 'patch', (['"""salt.utils.win_system.get_pending_component_servicing"""'], {'return_value': '(False)'}), "('salt.utils.win_system.get_pending_component_servicing', return_value\n =False)\n", (14773, 14855), False, 'from tests.support.mock import patch\n'), ((14874, 14947), 'tests.support.mock.patch', 'patch', (['"""salt.utils.win_system.get_pending_dvd_reboot"""'], {'return_value': '(False)'}), "('salt.utils.win_system.get_pending_dvd_reboot', return_value=False)\n", (14879, 14947), False, 'from tests.support.mock import patch\n'), ((14971, 15056), 'tests.support.mock.patch', 'patch', (['"""salt.utils.win_system.get_reboot_required_witnessed"""'], {'return_value': '(False)'}), "('salt.utils.win_system.get_reboot_required_witnessed', return_value=False\n )\n", (14976, 15056), False, 'from tests.support.mock import patch\n'), ((15075, 15150), 'tests.support.mock.patch', 'patch', (['"""salt.utils.win_system.get_pending_computer_name"""'], {'return_value': 'None'}), "('salt.utils.win_system.get_pending_computer_name', return_value=None)\n", (15080, 15150), False, 'from tests.support.mock import patch\n'), ((15174, 15248), 'tests.support.mock.patch', 'patch', (['"""salt.utils.win_system.get_pending_domain_join"""'], {'return_value': '(False)'}), "('salt.utils.win_system.get_pending_domain_join', return_value=False)\n", (15179, 15248), False, 'from tests.support.mock import patch\n'), ((15809, 15848), 'salt.utils.win_system.get_pending_reboot_details', 'win_system.get_pending_reboot_details', ([], {}), '()\n', (15846, 15848), True, 'import salt.utils.win_system as win_system\n'), ((16060, 16128), 'tests.support.mock.patch', 'patch', (['"""salt.utils.win_system.get_pending_update"""'], {'return_value': '(True)'}), "('salt.utils.win_system.get_pending_update', return_value=True)\n", (16065, 16128), False, 'from tests.support.mock import patch\n'), ((16152, 16214), 'tests.support.mock.patch', 'patch', (['"""salt.utils.win_update.needs_reboot"""'], {'return_value': '(True)'}), "('salt.utils.win_update.needs_reboot', return_value=True)\n", (16157, 16214), False, 'from tests.support.mock import patch\n'), ((16216, 16302), 'tests.support.mock.patch', 'patch', (['"""salt.utils.win_system.get_pending_update_exe_volatile"""'], {'return_value': '(True)'}), "('salt.utils.win_system.get_pending_update_exe_volatile', return_value\n =True)\n", (16221, 16302), False, 'from tests.support.mock import patch\n'), ((16321, 16394), 'tests.support.mock.patch', 'patch', (['"""salt.utils.win_system.get_pending_file_rename"""'], {'return_value': '(True)'}), "('salt.utils.win_system.get_pending_file_rename', return_value=True)\n", (16326, 16394), False, 'from tests.support.mock import patch\n'), ((16418, 16493), 'tests.support.mock.patch', 'patch', (['"""salt.utils.win_system.get_pending_servermanager"""'], {'return_value': '(True)'}), "('salt.utils.win_system.get_pending_servermanager', return_value=True)\n", (16423, 16493), False, 'from tests.support.mock import patch\n'), ((16517, 16603), 'tests.support.mock.patch', 'patch', (['"""salt.utils.win_system.get_pending_component_servicing"""'], {'return_value': '(True)'}), "('salt.utils.win_system.get_pending_component_servicing', return_value\n =True)\n", (16522, 16603), False, 'from tests.support.mock import patch\n'), ((16622, 16694), 'tests.support.mock.patch', 'patch', (['"""salt.utils.win_system.get_pending_dvd_reboot"""'], {'return_value': '(True)'}), "('salt.utils.win_system.get_pending_dvd_reboot', return_value=True)\n", (16627, 16694), False, 'from tests.support.mock import patch\n'), ((16718, 16797), 'tests.support.mock.patch', 'patch', (['"""salt.utils.win_system.get_reboot_required_witnessed"""'], {'return_value': '(True)'}), "('salt.utils.win_system.get_reboot_required_witnessed', return_value=True)\n", (16723, 16797), False, 'from tests.support.mock import patch\n'), ((16821, 16911), 'tests.support.mock.patch', 'patch', (['"""salt.utils.win_system.get_pending_computer_name"""'], {'return_value': '"""pending name"""'}), "('salt.utils.win_system.get_pending_computer_name', return_value=\n 'pending name')\n", (16826, 16911), False, 'from tests.support.mock import patch\n'), ((16943, 17016), 'tests.support.mock.patch', 'patch', (['"""salt.utils.win_system.get_pending_domain_join"""'], {'return_value': '(True)'}), "('salt.utils.win_system.get_pending_domain_join', return_value=True)\n", (16948, 17016), False, 'from tests.support.mock import patch\n'), ((17567, 17606), 'salt.utils.win_system.get_pending_reboot_details', 'win_system.get_pending_reboot_details', ([], {}), '()\n', (17604, 17606), True, 'import salt.utils.win_system as win_system\n'), ((956, 986), 'salt.utils.win_system.get_computer_name', 'win_system.get_computer_name', ([], {}), '()\n', (984, 986), True, 'import salt.utils.win_system as win_system\n'), ((1204, 1234), 'salt.utils.win_system.get_computer_name', 'win_system.get_computer_name', ([], {}), '()\n', (1232, 1234), True, 'import salt.utils.win_system as win_system\n'), ((1954, 1992), 'salt.utils.win_system.get_pending_computer_name', 'win_system.get_pending_computer_name', ([], {}), '()\n', (1990, 1992), True, 'import salt.utils.win_system as win_system\n'), ((2237, 2275), 'salt.utils.win_system.get_pending_computer_name', 'win_system.get_pending_computer_name', ([], {}), '()\n', (2273, 2275), True, 'import salt.utils.win_system as win_system\n'), ((2511, 2555), 'salt.utils.win_system.get_pending_component_servicing', 'win_system.get_pending_component_servicing', ([], {}), '()\n', (2553, 2555), True, 'import salt.utils.win_system as win_system\n'), ((2802, 2846), 'salt.utils.win_system.get_pending_component_servicing', 'win_system.get_pending_component_servicing', ([], {}), '()\n', (2844, 2846), True, 'import salt.utils.win_system as win_system\n'), ((3103, 3147), 'salt.utils.win_system.get_pending_component_servicing', 'win_system.get_pending_component_servicing', ([], {}), '()\n', (3145, 3147), True, 'import salt.utils.win_system as win_system\n'), ((3410, 3454), 'salt.utils.win_system.get_pending_component_servicing', 'win_system.get_pending_component_servicing', ([], {}), '()\n', (3452, 3454), True, 'import salt.utils.win_system as win_system\n'), ((3682, 3718), 'salt.utils.win_system.get_pending_domain_join', 'win_system.get_pending_domain_join', ([], {}), '()\n', (3716, 3718), True, 'import salt.utils.win_system as win_system\n'), ((3955, 3991), 'salt.utils.win_system.get_pending_domain_join', 'win_system.get_pending_domain_join', ([], {}), '()\n', (3989, 3991), True, 'import salt.utils.win_system as win_system\n'), ((4234, 4270), 'salt.utils.win_system.get_pending_domain_join', 'win_system.get_pending_domain_join', ([], {}), '()\n', (4268, 4270), True, 'import salt.utils.win_system as win_system\n'), ((4566, 4602), 'salt.utils.win_system.get_pending_file_rename', 'win_system.get_pending_file_rename', ([], {}), '()\n', (4600, 4602), True, 'import salt.utils.win_system as win_system\n'), ((4940, 4976), 'salt.utils.win_system.get_pending_file_rename', 'win_system.get_pending_file_rename', ([], {}), '()\n', (4974, 4976), True, 'import salt.utils.win_system as win_system\n'), ((5302, 5338), 'salt.utils.win_system.get_pending_file_rename', 'win_system.get_pending_file_rename', ([], {}), '()\n', (5336, 5338), True, 'import salt.utils.win_system as win_system\n'), ((5666, 5704), 'salt.utils.win_system.get_pending_servermanager', 'win_system.get_pending_servermanager', ([], {}), '()\n', (5702, 5704), True, 'import salt.utils.win_system as win_system\n'), ((6073, 6109), 'salt.utils.win_system.get_pending_file_rename', 'win_system.get_pending_file_rename', ([], {}), '()\n', (6107, 6109), True, 'import salt.utils.win_system as win_system\n'), ((6453, 6489), 'salt.utils.win_system.get_pending_file_rename', 'win_system.get_pending_file_rename', ([], {}), '()\n', (6487, 6489), True, 'import salt.utils.win_system as win_system\n'), ((6741, 6776), 'salt.utils.win_system.get_pending_dvd_reboot', 'win_system.get_pending_dvd_reboot', ([], {}), '()\n', (6774, 6776), True, 'import salt.utils.win_system as win_system\n'), ((7022, 7057), 'salt.utils.win_system.get_pending_dvd_reboot', 'win_system.get_pending_dvd_reboot', ([], {}), '()\n', (7055, 7057), True, 'import salt.utils.win_system as win_system\n'), ((7384, 7415), 'salt.utils.win_system.get_pending_update', 'win_system.get_pending_update', ([], {}), '()\n', (7413, 7415), True, 'import salt.utils.win_system as win_system\n'), ((7650, 7681), 'salt.utils.win_system.get_pending_update', 'win_system.get_pending_update', ([], {}), '()\n', (7679, 7681), True, 'import salt.utils.win_system as win_system\n'), ((7928, 7959), 'salt.utils.win_system.get_pending_update', 'win_system.get_pending_update', ([], {}), '()\n', (7957, 7959), True, 'import salt.utils.win_system as win_system\n'), ((8271, 8313), 'salt.utils.win_system.get_reboot_required_witnessed', 'win_system.get_reboot_required_witnessed', ([], {}), '()\n', (8311, 8313), True, 'import salt.utils.win_system as win_system\n'), ((8619, 8661), 'salt.utils.win_system.get_reboot_required_witnessed', 'win_system.get_reboot_required_witnessed', ([], {}), '()\n', (8659, 8661), True, 'import salt.utils.win_system as win_system\n'), ((8962, 9004), 'salt.utils.win_system.get_reboot_required_witnessed', 'win_system.get_reboot_required_witnessed', ([], {}), '()\n', (9002, 9004), True, 'import salt.utils.win_system as win_system\n'), ((9298, 9340), 'salt.utils.win_system.set_reboot_required_witnessed', 'win_system.set_reboot_required_witnessed', ([], {}), '()\n', (9338, 9340), True, 'import salt.utils.win_system as win_system\n'), ((9936, 9980), 'salt.utils.win_system.get_pending_update_exe_volatile', 'win_system.get_pending_update_exe_volatile', ([], {}), '()\n', (9978, 9980), True, 'import salt.utils.win_system as win_system\n'), ((10294, 10338), 'salt.utils.win_system.get_pending_update_exe_volatile', 'win_system.get_pending_update_exe_volatile', ([], {}), '()\n', (10336, 10338), True, 'import salt.utils.win_system as win_system\n'), ((10654, 10698), 'salt.utils.win_system.get_pending_update_exe_volatile', 'win_system.get_pending_update_exe_volatile', ([], {}), '()\n', (10696, 10698), True, 'import salt.utils.win_system as win_system\n'), ((11836, 11867), 'salt.utils.win_system.get_pending_reboot', 'win_system.get_pending_reboot', ([], {}), '()\n', (11865, 11867), True, 'import salt.utils.win_system as win_system\n'), ((13021, 13052), 'salt.utils.win_system.get_pending_reboot', 'win_system.get_pending_reboot', ([], {}), '()\n', (13050, 13052), True, 'import salt.utils.win_system as win_system\n'), ((14116, 14147), 'salt.utils.win_system.get_pending_reboot', 'win_system.get_pending_reboot', ([], {}), '()\n', (14145, 14147), True, 'import salt.utils.win_system as win_system\n')] |
"""Tests for the bradley_terry module"""
# Copyright 2019 <NAME>
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import numpy as np
import unittest
from ..bradley_terry import get_bt_summation_terms, get_bt_derivatives, sum
from .assertions import assert_close
class TestBradleyTerryFunctions(unittest.TestCase):
"""Tests for functions in the bradley_terry module"""
def setUp(self):
np.seterr(all="raise")
self.assert_close = assert_close.__get__(self, self.__class__)
def test_get_bt_summation_terms(self):
"""Test get_bt_summation_terms()"""
gamma = np.array([1.0, 2.0])
adversary_gamma = np.array([1.0, 2.0])
d1, d2 = get_bt_summation_terms(gamma, adversary_gamma)
self.assert_close([0.5, 0.5], d1, "d1")
self.assert_close([0.25, 0.25], d2, "d2")
def test_sum(self):
"""Test sum()"""
x = np.array([1.0, 2.0, 4.0, 8.0])
self.assertEqual(15.0, sum(x, 0, 4))
self.assertEqual(0.0, sum(x, 0, 0))
self.assertEqual(6.0, sum(x, 1, 3))
self.assertEqual(7.0, sum(x, 0, 3))
def test_sum(self):
"""Test sum() error compensation"""
x = np.full([10], 0.1)
self.assertEqual(1.0, sum(x, 0, 10))
x = np.array([1e100, -1.0, -1e100, 1.0])
self.assertEqual(0.0, sum(x, 0, 4))
x = np.array([1e100, 1.0, -1e100, 1.0])
self.assertEqual(2.0, sum(x, 0, 4))
def test_get_bt_derivatives_single_win(self):
"""Test get_bt_derivatives() with a single win"""
slices = [(0, 1)]
wins = np.array([1.0])
gamma = np.array([1.0])
adversary_gamma = np.array([1.0])
d1, d2 = get_bt_derivatives(slices, wins, gamma, adversary_gamma)
self.assert_close([0.5], d1, "d1")
self.assert_close([-0.25], d2, "d2")
def test_get_bt_derivatives_single_loss(self):
"""Test get_bt_derivatives() with a single loss"""
slices = [(0, 1)]
wins = np.array([0.0])
gamma = np.array([1.0])
adversary_gamma = np.array([1.0])
d1, d2 = get_bt_derivatives(slices, wins, gamma, adversary_gamma)
self.assert_close([-0.5], d1, "d1")
self.assert_close([-0.25], d2, "d2")
def test_get_bt_derivatives_four_losses(self):
"""Test get_bt_derivatives() with four losses"""
slices = [(0, 4)]
wins = np.array([0.0])
gamma = np.array([4.0, 4.0, 4.0, 4.0])
adversary_gamma = np.array([1.0, 1.0, 1.0, 1.0])
d1, d2 = get_bt_derivatives(slices, wins, gamma, adversary_gamma)
self.assert_close([-3.2], d1, "d1")
self.assert_close([-0.64], d2, "d2")
def test_get_bt_derivatives_no_ascents(self):
"""Test get_bt_derivatives() with no ascents"""
slices = [(0, 0)]
wins = np.array([])
gamma = np.array([])
adversary_gamma = np.array([])
d1, d2 = get_bt_derivatives(slices, wins, gamma, adversary_gamma)
self.assert_close([0.0], d1, "d1")
self.assert_close([0.0], d2, "d2")
def test_get_bt_derivatives(self):
"""Test get_bt_derivatives() with multiple slices"""
slices = [(0, 1), (1, 4)]
wins = np.array([1.0, 2.0])
gamma = np.array([6.0, 4.0, 4.0, 4.0])
adversary_gamma = np.array([6.0, 4.0, 12.0, 12.0])
d1, d2 = get_bt_derivatives(slices, wins, gamma, adversary_gamma)
self.assert_close([0.5, 1.0], d1, "d1")
self.assert_close([-0.25, -0.625], d2, "d2")
| [
"numpy.array",
"numpy.seterr",
"numpy.full"
] | [((903, 925), 'numpy.seterr', 'np.seterr', ([], {'all': '"""raise"""'}), "(all='raise')\n", (912, 925), True, 'import numpy as np\n'), ((1101, 1121), 'numpy.array', 'np.array', (['[1.0, 2.0]'], {}), '([1.0, 2.0])\n', (1109, 1121), True, 'import numpy as np\n'), ((1148, 1168), 'numpy.array', 'np.array', (['[1.0, 2.0]'], {}), '([1.0, 2.0])\n', (1156, 1168), True, 'import numpy as np\n'), ((1393, 1423), 'numpy.array', 'np.array', (['[1.0, 2.0, 4.0, 8.0]'], {}), '([1.0, 2.0, 4.0, 8.0])\n', (1401, 1423), True, 'import numpy as np\n'), ((1682, 1700), 'numpy.full', 'np.full', (['[10]', '(0.1)'], {}), '([10], 0.1)\n', (1689, 1700), True, 'import numpy as np\n'), ((1758, 1796), 'numpy.array', 'np.array', (['[1e+100, -1.0, -1e+100, 1.0]'], {}), '([1e+100, -1.0, -1e+100, 1.0])\n', (1766, 1796), True, 'import numpy as np\n'), ((1851, 1888), 'numpy.array', 'np.array', (['[1e+100, 1.0, -1e+100, 1.0]'], {}), '([1e+100, 1.0, -1e+100, 1.0])\n', (1859, 1888), True, 'import numpy as np\n'), ((2081, 2096), 'numpy.array', 'np.array', (['[1.0]'], {}), '([1.0])\n', (2089, 2096), True, 'import numpy as np\n'), ((2113, 2128), 'numpy.array', 'np.array', (['[1.0]'], {}), '([1.0])\n', (2121, 2128), True, 'import numpy as np\n'), ((2155, 2170), 'numpy.array', 'np.array', (['[1.0]'], {}), '([1.0])\n', (2163, 2170), True, 'import numpy as np\n'), ((2485, 2500), 'numpy.array', 'np.array', (['[0.0]'], {}), '([0.0])\n', (2493, 2500), True, 'import numpy as np\n'), ((2517, 2532), 'numpy.array', 'np.array', (['[1.0]'], {}), '([1.0])\n', (2525, 2532), True, 'import numpy as np\n'), ((2559, 2574), 'numpy.array', 'np.array', (['[1.0]'], {}), '([1.0])\n', (2567, 2574), True, 'import numpy as np\n'), ((2888, 2903), 'numpy.array', 'np.array', (['[0.0]'], {}), '([0.0])\n', (2896, 2903), True, 'import numpy as np\n'), ((2920, 2950), 'numpy.array', 'np.array', (['[4.0, 4.0, 4.0, 4.0]'], {}), '([4.0, 4.0, 4.0, 4.0])\n', (2928, 2950), True, 'import numpy as np\n'), ((2977, 3007), 'numpy.array', 'np.array', (['[1.0, 1.0, 1.0, 1.0]'], {}), '([1.0, 1.0, 1.0, 1.0])\n', (2985, 3007), True, 'import numpy as np\n'), ((3319, 3331), 'numpy.array', 'np.array', (['[]'], {}), '([])\n', (3327, 3331), True, 'import numpy as np\n'), ((3348, 3360), 'numpy.array', 'np.array', (['[]'], {}), '([])\n', (3356, 3360), True, 'import numpy as np\n'), ((3387, 3399), 'numpy.array', 'np.array', (['[]'], {}), '([])\n', (3395, 3399), True, 'import numpy as np\n'), ((3710, 3730), 'numpy.array', 'np.array', (['[1.0, 2.0]'], {}), '([1.0, 2.0])\n', (3718, 3730), True, 'import numpy as np\n'), ((3747, 3777), 'numpy.array', 'np.array', (['[6.0, 4.0, 4.0, 4.0]'], {}), '([6.0, 4.0, 4.0, 4.0])\n', (3755, 3777), True, 'import numpy as np\n'), ((3804, 3836), 'numpy.array', 'np.array', (['[6.0, 4.0, 12.0, 12.0]'], {}), '([6.0, 4.0, 12.0, 12.0])\n', (3812, 3836), True, 'import numpy as np\n')] |
#!/usr/bin/env python3
"""Log Analysis Project for Full Stack Nanodegree by Udacity"""
import psycopg2
DBNAME = "news"
def three_most_popular_articles():
"""Queries and displays the top three most viewed articles."""
conn = psycopg2.connect(database=DBNAME)
cur = conn.cursor()
query = 'VIEW_top_three_articles'
cur.execute(query)
result = cursor.fetchall()
cur.close()
conn.close()
print()
print('Three most popular articles of all time')
print('=======================================')
for result in results:
print('"{title}" - {count} views'
.format(title=result[0], count=result[1]))
print()
return
def most_popular_authors():
"""Queries and displays the Authors with the most views."""
conn = psycopg2.connect(database=DBNAME)
cur = conn.cursor()
query = 'VIEW_most_popular_authors'
cur.execute(query)
result = cursor.fetchall()
cur.close()
conn.close()
print()
print('Three most popular authors')
print('=======================================')
for result in results:
print('"{author}" - {count} views'
.format(author=result[0], count=result[1]))
print()
return
def days_with_high_errors():
"""Queries and displays the days when errors were above 1%."""
conn = psycopg2.connect(database=DBNAME)
cur = conn.cursor()
query = 'VIEW_days_with_over_one_percent_errors'
cur.execute(query)
result = cursor.fetchall()
cur.close()
conn.close()
print()
print('Days with over 1% errors')
print('=======================================')
for result in results:
print('"{day}" - {error_rate} errors'
.format(day=result[0], error_rate=result[1]))
print()
return
def main():
three_most_popular_articles()
most_popular_authors()
days_with_high_errors()
if __name__ == '__main__':
main()
| [
"psycopg2.connect"
] | [((235, 268), 'psycopg2.connect', 'psycopg2.connect', ([], {'database': 'DBNAME'}), '(database=DBNAME)\n', (251, 268), False, 'import psycopg2\n'), ((799, 832), 'psycopg2.connect', 'psycopg2.connect', ([], {'database': 'DBNAME'}), '(database=DBNAME)\n', (815, 832), False, 'import psycopg2\n'), ((1358, 1391), 'psycopg2.connect', 'psycopg2.connect', ([], {'database': 'DBNAME'}), '(database=DBNAME)\n', (1374, 1391), False, 'import psycopg2\n')] |
import torch
import torch.nn as nn
import pytorch_lightning as pl
from torchvision.models import (
alexnet,
vgg16_bn,
resnet18,
resnet34,
resnet50,
densenet121,
densenet161,
)
from torch.nn import functional as F
from pytorch_lightning.metrics.functional import accuracy, precision_recall
class MarsModel(pl.LightningModule):
def __init__(self, hyper_param):
super().__init__()
self.momentum = hyper_param["momentum"]
self.optimizer = hyper_param["optimizer"]
self.lr = hyper_param["learning_rate"]
self.num_classes = hyper_param["num_classes"]
if hyper_param["model"] == "resnet18":
"""
Resnet18
"""
self.net = resnet18(pretrained=hyper_param["pretrained"])
if hyper_param["transfer_learning"] is True:
self.set_parameter_requires_grad(self.net)
num_ftrs = self.net.fc.in_features
self.net.fc = nn.Linear(num_ftrs, hyper_param["num_classes"])
elif hyper_param["model"] == "resnet34":
"""
Resnet34
"""
self.net = resnet34(pretrained=hyper_param["pretrained"])
if hyper_param["transfer_learning"] is True:
self.set_parameter_requires_grad(self.net)
num_ftrs = self.net.fc.in_features
self.net.fc = nn.Linear(num_ftrs, hyper_param["num_classes"])
elif hyper_param["model"] == "resnet50":
"""
Resnet50
"""
self.net = resnet50(pretrained=hyper_param["pretrained"])
if hyper_param["transfer_learning"] is True:
self.set_parameter_requires_grad(self.net)
num_ftrs = self.net.fc.in_features
self.net.fc = nn.Linear(num_ftrs, hyper_param["num_classes"])
elif hyper_param["model"] == "alexnet":
"""
Alexnet
"""
self.net = alexnet(pretrained=hyper_param["pretrained"])
if hyper_param["transfer_learning"] is True:
self.set_parameter_requires_grad(self.net)
num_ftrs = self.net.classifier[6].in_features
self.net.classifier[6] = nn.Linear(num_ftrs, hyper_param["num_classes"])
elif hyper_param["model"] == "vgg16":
"""
VGG16_bn
"""
self.net = vgg16_bn(pretrained=hyper_param["pretrained"])
if hyper_param["transfer_learning"] is True:
self.set_parameter_requires_grad(self.net)
num_ftrs = self.net.classifier[6].in_features
self.net.classifier[6] = nn.Linear(num_ftrs, hyper_param["num_classes"])
elif hyper_param["model"] == "densenet121":
"""
Densenet-121
"""
self.net = densenet121(pretrained=hyper_param["pretrained"])
if hyper_param["transfer_learning"] is True:
self.set_parameter_requires_grad(self.net)
num_ftrs = self.net.classifier.in_features
self.net.classifier = nn.Linear(num_ftrs, hyper_param["num_classes"])
elif hyper_param["model"] == "densenet161":
"""
Densenet-161
"""
self.net = densenet161(pretrained=hyper_param["pretrained"])
if hyper_param["transfer_learning"] is True:
self.set_parameter_requires_grad(self.net)
num_ftrs = self.net.classifier.in_features
self.net.classifier = nn.Linear(num_ftrs, hyper_param["num_classes"])
else:
print("Invalid model name, exiting...")
exit()
def forward(self, x):
return self.net(x)
def training_step(self, batch, batch_idx):
x, y = batch
y_hat = self(x)
loss = F.cross_entropy(y_hat, y)
return {"loss": loss}
def validation_step(self, batch, batch_idx):
x, y = batch
y_hat = self(x)
loss = F.cross_entropy(y_hat, y)
acc = accuracy(torch.argmax(y_hat, dim=1), y, num_classes=self.num_classes)
prec, recall = precision_recall(
F.softmax(y_hat, dim=1), y, num_classes=self.num_classes, reduction="none"
)
return {
"val_loss": loss,
"val_acc": acc,
"val_prec": prec,
"val_recall": recall,
}
def validation_epoch_end(self, outputs):
avg_loss = torch.stack([x["val_loss"] for x in outputs]).mean()
avg_acc = torch.stack([x["val_acc"] for x in outputs]).mean()
return {
"val_loss": avg_loss,
"progress_bar": {"val_loss": avg_loss, "val_acc": avg_acc},
}
def test_step(self, batch, batch_idx):
x, y = batch
y_hat = self(x)
loss = F.cross_entropy(y_hat, y)
return {"test_loss": loss}
def test_epoch_end(self, outputs):
avg_loss = torch.stack([x["test_loss"] for x in outputs]).mean()
logs = {"test_loss": avg_loss}
return {"test_loss": avg_loss, "log": logs}
def configure_optimizers(self):
params_to_update = []
print("Params to learn:")
for name, param in self.net.named_parameters():
if param.requires_grad is True:
params_to_update.append(param)
print("\t", name)
if self.optimizer == "adam":
optimizer = torch.optim.Adam(params_to_update, lr=self.lr)
elif self.optimizer == "sgd":
optimizer = torch.optim.SGD(
params_to_update, lr=self.lr, momentum=self.momentum
)
else:
print("Invalid optimizer, exiting...")
exit()
return optimizer
def set_parameter_requires_grad(model):
for param in model.parameters():
param.requires_grad = False
| [
"torch.optim.Adam",
"torch.optim.SGD",
"torchvision.models.densenet161",
"torch.stack",
"torchvision.models.resnet18",
"torchvision.models.alexnet",
"torchvision.models.resnet34",
"torchvision.models.vgg16_bn",
"torch.nn.Linear",
"torch.nn.functional.cross_entropy",
"torchvision.models.densenet1... | [((3826, 3851), 'torch.nn.functional.cross_entropy', 'F.cross_entropy', (['y_hat', 'y'], {}), '(y_hat, y)\n', (3841, 3851), True, 'from torch.nn import functional as F\n'), ((3992, 4017), 'torch.nn.functional.cross_entropy', 'F.cross_entropy', (['y_hat', 'y'], {}), '(y_hat, y)\n', (4007, 4017), True, 'from torch.nn import functional as F\n'), ((4814, 4839), 'torch.nn.functional.cross_entropy', 'F.cross_entropy', (['y_hat', 'y'], {}), '(y_hat, y)\n', (4829, 4839), True, 'from torch.nn import functional as F\n'), ((743, 789), 'torchvision.models.resnet18', 'resnet18', ([], {'pretrained': "hyper_param['pretrained']"}), "(pretrained=hyper_param['pretrained'])\n", (751, 789), False, 'from torchvision.models import alexnet, vgg16_bn, resnet18, resnet34, resnet50, densenet121, densenet161\n'), ((980, 1027), 'torch.nn.Linear', 'nn.Linear', (['num_ftrs', "hyper_param['num_classes']"], {}), "(num_ftrs, hyper_param['num_classes'])\n", (989, 1027), True, 'import torch.nn as nn\n'), ((4041, 4067), 'torch.argmax', 'torch.argmax', (['y_hat'], {'dim': '(1)'}), '(y_hat, dim=1)\n', (4053, 4067), False, 'import torch\n'), ((4155, 4178), 'torch.nn.functional.softmax', 'F.softmax', (['y_hat'], {'dim': '(1)'}), '(y_hat, dim=1)\n', (4164, 4178), True, 'from torch.nn import functional as F\n'), ((5423, 5469), 'torch.optim.Adam', 'torch.optim.Adam', (['params_to_update'], {'lr': 'self.lr'}), '(params_to_update, lr=self.lr)\n', (5439, 5469), False, 'import torch\n'), ((1154, 1200), 'torchvision.models.resnet34', 'resnet34', ([], {'pretrained': "hyper_param['pretrained']"}), "(pretrained=hyper_param['pretrained'])\n", (1162, 1200), False, 'from torchvision.models import alexnet, vgg16_bn, resnet18, resnet34, resnet50, densenet121, densenet161\n'), ((1390, 1437), 'torch.nn.Linear', 'nn.Linear', (['num_ftrs', "hyper_param['num_classes']"], {}), "(num_ftrs, hyper_param['num_classes'])\n", (1399, 1437), True, 'import torch.nn as nn\n'), ((4454, 4499), 'torch.stack', 'torch.stack', (["[x['val_loss'] for x in outputs]"], {}), "([x['val_loss'] for x in outputs])\n", (4465, 4499), False, 'import torch\n'), ((4525, 4569), 'torch.stack', 'torch.stack', (["[x['val_acc'] for x in outputs]"], {}), "([x['val_acc'] for x in outputs])\n", (4536, 4569), False, 'import torch\n'), ((4934, 4980), 'torch.stack', 'torch.stack', (["[x['test_loss'] for x in outputs]"], {}), "([x['test_loss'] for x in outputs])\n", (4945, 4980), False, 'import torch\n'), ((5532, 5601), 'torch.optim.SGD', 'torch.optim.SGD', (['params_to_update'], {'lr': 'self.lr', 'momentum': 'self.momentum'}), '(params_to_update, lr=self.lr, momentum=self.momentum)\n', (5547, 5601), False, 'import torch\n'), ((1564, 1610), 'torchvision.models.resnet50', 'resnet50', ([], {'pretrained': "hyper_param['pretrained']"}), "(pretrained=hyper_param['pretrained'])\n", (1572, 1610), False, 'from torchvision.models import alexnet, vgg16_bn, resnet18, resnet34, resnet50, densenet121, densenet161\n'), ((1800, 1847), 'torch.nn.Linear', 'nn.Linear', (['num_ftrs', "hyper_param['num_classes']"], {}), "(num_ftrs, hyper_param['num_classes'])\n", (1809, 1847), True, 'import torch.nn as nn\n'), ((1972, 2017), 'torchvision.models.alexnet', 'alexnet', ([], {'pretrained': "hyper_param['pretrained']"}), "(pretrained=hyper_param['pretrained'])\n", (1979, 2017), False, 'from torchvision.models import alexnet, vgg16_bn, resnet18, resnet34, resnet50, densenet121, densenet161\n'), ((2229, 2276), 'torch.nn.Linear', 'nn.Linear', (['num_ftrs', "hyper_param['num_classes']"], {}), "(num_ftrs, hyper_param['num_classes'])\n", (2238, 2276), True, 'import torch.nn as nn\n'), ((2400, 2446), 'torchvision.models.vgg16_bn', 'vgg16_bn', ([], {'pretrained': "hyper_param['pretrained']"}), "(pretrained=hyper_param['pretrained'])\n", (2408, 2446), False, 'from torchvision.models import alexnet, vgg16_bn, resnet18, resnet34, resnet50, densenet121, densenet161\n'), ((2658, 2705), 'torch.nn.Linear', 'nn.Linear', (['num_ftrs', "hyper_param['num_classes']"], {}), "(num_ftrs, hyper_param['num_classes'])\n", (2667, 2705), True, 'import torch.nn as nn\n'), ((2839, 2888), 'torchvision.models.densenet121', 'densenet121', ([], {'pretrained': "hyper_param['pretrained']"}), "(pretrained=hyper_param['pretrained'])\n", (2850, 2888), False, 'from torchvision.models import alexnet, vgg16_bn, resnet18, resnet34, resnet50, densenet121, densenet161\n'), ((3094, 3141), 'torch.nn.Linear', 'nn.Linear', (['num_ftrs', "hyper_param['num_classes']"], {}), "(num_ftrs, hyper_param['num_classes'])\n", (3103, 3141), True, 'import torch.nn as nn\n'), ((3275, 3324), 'torchvision.models.densenet161', 'densenet161', ([], {'pretrained': "hyper_param['pretrained']"}), "(pretrained=hyper_param['pretrained'])\n", (3286, 3324), False, 'from torchvision.models import alexnet, vgg16_bn, resnet18, resnet34, resnet50, densenet121, densenet161\n'), ((3530, 3577), 'torch.nn.Linear', 'nn.Linear', (['num_ftrs', "hyper_param['num_classes']"], {}), "(num_ftrs, hyper_param['num_classes'])\n", (3539, 3577), True, 'import torch.nn as nn\n')] |
import gi
import time
import os
import json
gi.require_version('Notify', '0.7')
gi.require_version('Gtk', '3.0')
from gi.repository import Notify, Gtk
from gi.repository import Gio, GLib, GObject, Peas
from gi.repository import RB
from pypresence import Presence
from status_prefs import discord_status_prefs
class discord_status_dev(GObject.Object, Peas.Activatable):
path = os.path.join(os.path.dirname(os.path.realpath(__file__)), "settings.json")
with open(path) as file:
settings = json.load(file)
show_notifs = settings["show_notifs"]
time_style = settings["time_style"]
try:
Notify.init("Rhythmbox")
except:
print("Failed to init Notify. Is the notificaion service running?")
is_streaming = False
RPC = Presence("589905203533185064")
connected = False
gave_up = False
try:
RPC.connect()
try:
if show_notifs:
Notify.Notification.new("Rhythmbox Discord Status Plugin", "Connected to Discord").show()
Notify.uninit()
except:
print("Failed to init Notify. Is the notificaion service running?")
connected = True
except ConnectionRefusedError:
try:
if show_notifs:
Notify.Notification.new("Rhythmbox Discord Status Plugin", "Failed to connect to discord: ConnectionRefused. Is discord open?").show()
Notify.uninit()
except:
print("Failed to init Notify. Is the notificaion service running?")
if show_notifs:
while not connected and not gave_up:
dialog = Gtk.Dialog(title = "Discord Rhythmbox Status Plugin",
parent = None,
buttons = (Gtk.STOCK_CANCEL, Gtk.ResponseType.CANCEL,
Gtk.STOCK_OK, Gtk.ResponseType.OK)
)
hbox = Gtk.HBox()
label = Gtk.Label("\nFailed to connect to the discord client. Make sure that discord is open. Retry?\n")
hbox.pack_start(label, True, True, 0)
dialog.vbox.pack_start(hbox, True, True, 0)
dialog.vbox.show_all()
response = dialog.run()
if (response == Gtk.ResponseType.OK):
try:
RPC.connect()
connected = True
except ConnectionRefusedError:
print('Failed to retry connection to discord')
elif (response == Gtk.ResponseType.CANCEL):
gave_up = True
dialog.destroy()
else:
pass
dialog.destroy()
__gtype_name__ = 'DiscordStatusPlugin'
object = GObject.property(type=GObject.Object)
start_date = None
playing_date = None
is_playing = False
def __init__ (self):
GObject.Object.__init__ (self)
def do_activate(self):
shell = self.object
sp = shell.props.shell_player
self.psc_id = sp.connect ('playing-song-changed',
self.playing_entry_changed)
self.pc_id = sp.connect ('playing-changed',
self.playing_changed)
self.ec_id = sp.connect ('elapsed-changed',
self.elapsed_changed)
self.pspc_id = sp.connect ('playing-song-property-changed',
self.playing_song_property_changed)
self.RPC.update(state="Playback Stopped", details="Rhythmbox Status Plugin", large_image="rhythmbox", small_image="stop", small_text="Stopped")
def do_deactivate(self):
shell = self.object
sp = shell.props.shell_player
sp.disconnect (self.psc_id)
sp.disconnect (self.pc_id)
sp.disconnect (self.ec_id)
sp.disconnect (self.pspc_id)
self.RPC.clear(pid=os.getpid())
self.RPC.close()
def get_info(self, sp):
album = None
title = None
artist = None
duration = None
if not sp.get_playing_entry().get_string(RB.RhythmDBPropType.ALBUM):
album = 'Unknown'
else:
album = sp.get_playing_entry().get_string(RB.RhythmDBPropType.ALBUM)
if not sp.get_playing_entry().get_string(RB.RhythmDBPropType.TITLE):
title = 'Unknown'
else:
title = sp.get_playing_entry().get_string(RB.RhythmDBPropType.TITLE)
if not sp.get_playing_entry().get_string(RB.RhythmDBPropType.ARTIST):
artist = 'Unknown'
else:
artist = sp.get_playing_entry().get_string(RB.RhythmDBPropType.ARTIST)
if not sp.get_playing_entry().get_ulong(RB.RhythmDBPropType.DURATION):
duration = 0
else:
duration = sp.get_playing_entry().get_ulong(RB.RhythmDBPropType.DURATION)
if len(album) < 2:
album = "%s" %(album)
return [album, title, artist, duration]
def playing_song_property_changed(self, sp, uri, property, old, newvalue):
print("playing_song_property_changed: %s %s %s %s" %(uri, property, old, newvalue))
info = self.get_info(sp)
if property == "rb:stream-song-title":
self.is_streaming = True
self.update_streaming_rpc(info, newvalue)
def update_streaming_rpc(self, info, d):
self.RPC.update(state=info[1][0:127], details=d[0:127], large_image="rhythmbox", small_image="play", small_text="Streaming", start=int(time.time()))
def playing_entry_changed(self, sp, entry):
if sp.get_playing_entry():
self.start_date = int(time.time())
self.playing_date = self.start_date
info = self.get_info(sp)
album = info[0]
title = info[1]
artist = info[2]
duration = info[3]
if duration == 0 and not self.is_streaming:
self.update_streaming_rpc(info, "Unknown - Unknown")
elif duration == 0 and self.is_streaming:
self.update_streaming_rpc(info, "Unknown - Unknown")
return
else:
self.is_streaming = False
details="%s - %s" %(title, artist)
self.is_playing = True
start_time = int(time.time())
pos = sp.get_playing_time().time
end_time = (start_time + duration - pos) if self.time_style == 1 else None
self.RPC.update(state=album[0:127], details=details[0:127], large_image="rhythmbox", small_image="play", small_text="Playing", start=start_time, end=end_time)
def playing_changed(self, sp, playing):
album = None
title = None
artist = None
if sp.get_playing_entry():
info = self.get_info(sp)
album = info[0]
title = info[1]
artist = info[2]
duration = info[3]
if duration == 0 and not self.is_streaming:
self.update_streaming_rpc(info, "Unknown - Unknown")
elif duration == 0:
return
else:
self.is_streaming = False
details="%s - %s" %(title, artist)
start_time = int(time.time())
pos = sp.get_playing_time().time
end_time = (start_time + duration - pos) if self.time_style == 1 else None
if playing:
self.is_playing = True
self.RPC.update(state=album[0:127], details=details[0:127], large_image="rhythmbox", small_image="play", small_text="Playing", start=start_time, end=end_time)
elif not playing and not sp.get_playing_entry():
self.is_playing = False
self.RPC.update(state="Playback Stopped", details="Rhythmbox Status Plugin", large_image="rhythmbox", small_image="stop", small_text="Stopped")
else:
self.is_playing = False
self.RPC.update(state=album[0:127], details=details[0:127], large_image="rhythmbox", small_image="pause", small_text="Paused")
def elapsed_changed(self, sp, elapsed):
if not self.playing_date or not self.is_playing or self.time_style == 0:
return
else:
self.playing_date += 1
if self.playing_date - elapsed == self.start_date:
return
else:
if sp.get_playing_entry() and self.is_playing and not elapsed == 0:
self.playing_date = self.start_date + elapsed
info = self.get_info(sp)
album = info[0]
title = info[1]
artist = info[2]
duration = info[3]
if duration == 0 and not self.is_streaming:
self.update_streaming_rpc(info, "Unknown - Unknown")
elif duration == 0:
return
else:
self.is_streaming = False
details="%s - %s" %(title, artist)
start_time = int(time.time())
pos = sp.get_playing_time().time
end_time = (start_time + duration - pos) if self.time_style == 1 else None
self.RPC.update(state=album[0:127], details=details[0:127], large_image="rhythmbox", small_image="play", small_text="Playing", start=start_time, end=end_time)
| [
"gi.repository.Gtk.Dialog",
"pypresence.Presence",
"gi.repository.Gtk.HBox",
"gi.repository.Notify.uninit",
"gi.require_version",
"gi.repository.GObject.Object.__init__",
"os.path.realpath",
"gi.repository.Gtk.Label",
"gi.repository.Notify.init",
"os.getpid",
"gi.repository.Notify.Notification.n... | [((44, 79), 'gi.require_version', 'gi.require_version', (['"""Notify"""', '"""0.7"""'], {}), "('Notify', '0.7')\n", (62, 79), False, 'import gi\n'), ((80, 112), 'gi.require_version', 'gi.require_version', (['"""Gtk"""', '"""3.0"""'], {}), "('Gtk', '3.0')\n", (98, 112), False, 'import gi\n'), ((744, 774), 'pypresence.Presence', 'Presence', (['"""589905203533185064"""'], {}), "('589905203533185064')\n", (752, 774), False, 'from pypresence import Presence\n'), ((2517, 2554), 'gi.repository.GObject.property', 'GObject.property', ([], {'type': 'GObject.Object'}), '(type=GObject.Object)\n', (2533, 2554), False, 'from gi.repository import Gio, GLib, GObject, Peas\n'), ((498, 513), 'json.load', 'json.load', (['file'], {}), '(file)\n', (507, 513), False, 'import json\n'), ((605, 629), 'gi.repository.Notify.init', 'Notify.init', (['"""Rhythmbox"""'], {}), "('Rhythmbox')\n", (616, 629), False, 'from gi.repository import Notify, Gtk\n'), ((2645, 2674), 'gi.repository.GObject.Object.__init__', 'GObject.Object.__init__', (['self'], {}), '(self)\n', (2668, 2674), False, 'from gi.repository import Gio, GLib, GObject, Peas\n'), ((409, 435), 'os.path.realpath', 'os.path.realpath', (['__file__'], {}), '(__file__)\n', (425, 435), False, 'import os\n'), ((978, 993), 'gi.repository.Notify.uninit', 'Notify.uninit', ([], {}), '()\n', (991, 993), False, 'from gi.repository import Notify, Gtk\n'), ((3596, 3607), 'os.getpid', 'os.getpid', ([], {}), '()\n', (3605, 3607), False, 'import os\n'), ((5256, 5267), 'time.time', 'time.time', ([], {}), '()\n', (5265, 5267), False, 'import time\n'), ((5811, 5822), 'time.time', 'time.time', ([], {}), '()\n', (5820, 5822), False, 'import time\n'), ((6623, 6634), 'time.time', 'time.time', ([], {}), '()\n', (6632, 6634), False, 'import time\n'), ((1316, 1331), 'gi.repository.Notify.uninit', 'Notify.uninit', ([], {}), '()\n', (1329, 1331), False, 'from gi.repository import Notify, Gtk\n'), ((1499, 1661), 'gi.repository.Gtk.Dialog', 'Gtk.Dialog', ([], {'title': '"""Discord Rhythmbox Status Plugin"""', 'parent': 'None', 'buttons': '(Gtk.STOCK_CANCEL, Gtk.ResponseType.CANCEL, Gtk.STOCK_OK, Gtk.ResponseType.OK)'}), "(title='Discord Rhythmbox Status Plugin', parent=None, buttons=(\n Gtk.STOCK_CANCEL, Gtk.ResponseType.CANCEL, Gtk.STOCK_OK, Gtk.\n ResponseType.OK))\n", (1509, 1661), False, 'from gi.repository import Notify, Gtk\n'), ((1797, 1807), 'gi.repository.Gtk.HBox', 'Gtk.HBox', ([], {}), '()\n', (1805, 1807), False, 'from gi.repository import Notify, Gtk\n'), ((1825, 1933), 'gi.repository.Gtk.Label', 'Gtk.Label', (['"""\nFailed to connect to the discord client. Make sure that discord is open. Retry?\n"""'], {}), '(\n """\nFailed to connect to the discord client. Make sure that discord is open. Retry?\n"""\n )\n', (1834, 1933), False, 'from gi.repository import Notify, Gtk\n'), ((5136, 5147), 'time.time', 'time.time', ([], {}), '()\n', (5145, 5147), False, 'import time\n'), ((8165, 8176), 'time.time', 'time.time', ([], {}), '()\n', (8174, 8176), False, 'import time\n'), ((880, 966), 'gi.repository.Notify.Notification.new', 'Notify.Notification.new', (['"""Rhythmbox Discord Status Plugin"""', '"""Connected to Discord"""'], {}), "('Rhythmbox Discord Status Plugin',\n 'Connected to Discord')\n", (903, 966), False, 'from gi.repository import Notify, Gtk\n'), ((1173, 1304), 'gi.repository.Notify.Notification.new', 'Notify.Notification.new', (['"""Rhythmbox Discord Status Plugin"""', '"""Failed to connect to discord: ConnectionRefused. Is discord open?"""'], {}), "('Rhythmbox Discord Status Plugin',\n 'Failed to connect to discord: ConnectionRefused. Is discord open?')\n", (1196, 1304), False, 'from gi.repository import Notify, Gtk\n')] |
# coding: utf-8
# Copyright (c) 2016, 2021, Oracle and/or its affiliates. All rights reserved.
# This software is dual-licensed to you under the Universal Permissive License (UPL) 1.0 as shown at https://oss.oracle.com/licenses/upl or Apache License 2.0 as shown at http://www.apache.org/licenses/LICENSE-2.0. You may choose either license.
from oci.util import formatted_flat_dict, NONE_SENTINEL, value_allowed_none_or_none_sentinel # noqa: F401
from oci.decorators import init_model_state_from_kwargs
@init_model_state_from_kwargs
class SqlTuningAdvisorTaskSummaryFindingCounts(object):
"""
The finding counts data for the SQL Tuning Advisor summary report.
"""
def __init__(self, **kwargs):
"""
Initializes a new SqlTuningAdvisorTaskSummaryFindingCounts object with values from keyword arguments.
The following keyword arguments are supported (corresponding to the getters/setters of this class):
:param recommended_sql_profile:
The value to assign to the recommended_sql_profile property of this SqlTuningAdvisorTaskSummaryFindingCounts.
:type recommended_sql_profile: int
:param implemented_sql_profile:
The value to assign to the implemented_sql_profile property of this SqlTuningAdvisorTaskSummaryFindingCounts.
:type implemented_sql_profile: int
:param index:
The value to assign to the index property of this SqlTuningAdvisorTaskSummaryFindingCounts.
:type index: int
:param restructure:
The value to assign to the restructure property of this SqlTuningAdvisorTaskSummaryFindingCounts.
:type restructure: int
:param statistics:
The value to assign to the statistics property of this SqlTuningAdvisorTaskSummaryFindingCounts.
:type statistics: int
:param alternate_plan:
The value to assign to the alternate_plan property of this SqlTuningAdvisorTaskSummaryFindingCounts.
:type alternate_plan: int
"""
self.swagger_types = {
'recommended_sql_profile': 'int',
'implemented_sql_profile': 'int',
'index': 'int',
'restructure': 'int',
'statistics': 'int',
'alternate_plan': 'int'
}
self.attribute_map = {
'recommended_sql_profile': 'recommendedSqlProfile',
'implemented_sql_profile': 'implementedSqlProfile',
'index': 'index',
'restructure': 'restructure',
'statistics': 'statistics',
'alternate_plan': 'alternatePlan'
}
self._recommended_sql_profile = None
self._implemented_sql_profile = None
self._index = None
self._restructure = None
self._statistics = None
self._alternate_plan = None
@property
def recommended_sql_profile(self):
"""
**[Required]** Gets the recommended_sql_profile of this SqlTuningAdvisorTaskSummaryFindingCounts.
The count of distinct SQL statements with recommended SQL profiles.
:return: The recommended_sql_profile of this SqlTuningAdvisorTaskSummaryFindingCounts.
:rtype: int
"""
return self._recommended_sql_profile
@recommended_sql_profile.setter
def recommended_sql_profile(self, recommended_sql_profile):
"""
Sets the recommended_sql_profile of this SqlTuningAdvisorTaskSummaryFindingCounts.
The count of distinct SQL statements with recommended SQL profiles.
:param recommended_sql_profile: The recommended_sql_profile of this SqlTuningAdvisorTaskSummaryFindingCounts.
:type: int
"""
self._recommended_sql_profile = recommended_sql_profile
@property
def implemented_sql_profile(self):
"""
**[Required]** Gets the implemented_sql_profile of this SqlTuningAdvisorTaskSummaryFindingCounts.
The count of distinct SQL statements with implemented SQL profiles.
:return: The implemented_sql_profile of this SqlTuningAdvisorTaskSummaryFindingCounts.
:rtype: int
"""
return self._implemented_sql_profile
@implemented_sql_profile.setter
def implemented_sql_profile(self, implemented_sql_profile):
"""
Sets the implemented_sql_profile of this SqlTuningAdvisorTaskSummaryFindingCounts.
The count of distinct SQL statements with implemented SQL profiles.
:param implemented_sql_profile: The implemented_sql_profile of this SqlTuningAdvisorTaskSummaryFindingCounts.
:type: int
"""
self._implemented_sql_profile = implemented_sql_profile
@property
def index(self):
"""
**[Required]** Gets the index of this SqlTuningAdvisorTaskSummaryFindingCounts.
The count of distinct SQL statements with index recommendations.
:return: The index of this SqlTuningAdvisorTaskSummaryFindingCounts.
:rtype: int
"""
return self._index
@index.setter
def index(self, index):
"""
Sets the index of this SqlTuningAdvisorTaskSummaryFindingCounts.
The count of distinct SQL statements with index recommendations.
:param index: The index of this SqlTuningAdvisorTaskSummaryFindingCounts.
:type: int
"""
self._index = index
@property
def restructure(self):
"""
**[Required]** Gets the restructure of this SqlTuningAdvisorTaskSummaryFindingCounts.
The count of distinct SQL statements with restructure SQL recommendations.
:return: The restructure of this SqlTuningAdvisorTaskSummaryFindingCounts.
:rtype: int
"""
return self._restructure
@restructure.setter
def restructure(self, restructure):
"""
Sets the restructure of this SqlTuningAdvisorTaskSummaryFindingCounts.
The count of distinct SQL statements with restructure SQL recommendations.
:param restructure: The restructure of this SqlTuningAdvisorTaskSummaryFindingCounts.
:type: int
"""
self._restructure = restructure
@property
def statistics(self):
"""
**[Required]** Gets the statistics of this SqlTuningAdvisorTaskSummaryFindingCounts.
The count of distinct SQL statements with stale/missing optimizer statistics recommendations.
:return: The statistics of this SqlTuningAdvisorTaskSummaryFindingCounts.
:rtype: int
"""
return self._statistics
@statistics.setter
def statistics(self, statistics):
"""
Sets the statistics of this SqlTuningAdvisorTaskSummaryFindingCounts.
The count of distinct SQL statements with stale/missing optimizer statistics recommendations.
:param statistics: The statistics of this SqlTuningAdvisorTaskSummaryFindingCounts.
:type: int
"""
self._statistics = statistics
@property
def alternate_plan(self):
"""
**[Required]** Gets the alternate_plan of this SqlTuningAdvisorTaskSummaryFindingCounts.
The count of distinct SQL statements with alternative plan recommendations.
:return: The alternate_plan of this SqlTuningAdvisorTaskSummaryFindingCounts.
:rtype: int
"""
return self._alternate_plan
@alternate_plan.setter
def alternate_plan(self, alternate_plan):
"""
Sets the alternate_plan of this SqlTuningAdvisorTaskSummaryFindingCounts.
The count of distinct SQL statements with alternative plan recommendations.
:param alternate_plan: The alternate_plan of this SqlTuningAdvisorTaskSummaryFindingCounts.
:type: int
"""
self._alternate_plan = alternate_plan
def __repr__(self):
return formatted_flat_dict(self)
def __eq__(self, other):
if other is None:
return False
return self.__dict__ == other.__dict__
def __ne__(self, other):
return not self == other
| [
"oci.util.formatted_flat_dict"
] | [((7845, 7870), 'oci.util.formatted_flat_dict', 'formatted_flat_dict', (['self'], {}), '(self)\n', (7864, 7870), False, 'from oci.util import formatted_flat_dict, NONE_SENTINEL, value_allowed_none_or_none_sentinel\n')] |
from project.settings import INSTALLED_APPS, ALLOWED_HOSTS, BASE_DIR
import os
INSTALLED_APPS.append( 'webpack_loader',)
INSTALLED_APPS.append( 'app',)
ALLOWED_HOSTS.append('*',)
# STATIC_ROOT = os.path.join(BASE_DIR, 'static')
STATICFILES_DIRS = [
# os.path.join(BASE_DIR, 'static',)
os.path.join(BASE_DIR, 'app', 'vueapp','dist', 'static')
]
WEBPACK_LOADER = {
'DEFAULT': {
'BUNDLE_DIR_NAME': 'static/vueapp/',
'STATS_FILE': os.path.join(BASE_DIR, 'app', 'vueapp', 'webpack-stats.json')
}
}
INTERNAL_IPS = (
'0.0.0.0',
'127.0.0.1',
) | [
"project.settings.INSTALLED_APPS.append",
"project.settings.ALLOWED_HOSTS.append",
"os.path.join"
] | [((80, 119), 'project.settings.INSTALLED_APPS.append', 'INSTALLED_APPS.append', (['"""webpack_loader"""'], {}), "('webpack_loader')\n", (101, 119), False, 'from project.settings import INSTALLED_APPS, ALLOWED_HOSTS, BASE_DIR\n'), ((122, 150), 'project.settings.INSTALLED_APPS.append', 'INSTALLED_APPS.append', (['"""app"""'], {}), "('app')\n", (143, 150), False, 'from project.settings import INSTALLED_APPS, ALLOWED_HOSTS, BASE_DIR\n'), ((154, 179), 'project.settings.ALLOWED_HOSTS.append', 'ALLOWED_HOSTS.append', (['"""*"""'], {}), "('*')\n", (174, 179), False, 'from project.settings import INSTALLED_APPS, ALLOWED_HOSTS, BASE_DIR\n'), ((297, 354), 'os.path.join', 'os.path.join', (['BASE_DIR', '"""app"""', '"""vueapp"""', '"""dist"""', '"""static"""'], {}), "(BASE_DIR, 'app', 'vueapp', 'dist', 'static')\n", (309, 354), False, 'import os\n'), ((460, 521), 'os.path.join', 'os.path.join', (['BASE_DIR', '"""app"""', '"""vueapp"""', '"""webpack-stats.json"""'], {}), "(BASE_DIR, 'app', 'vueapp', 'webpack-stats.json')\n", (472, 521), False, 'import os\n')] |
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
import sys
import numpy as np
from PyQt5 import QtWidgets as QtWid
import pyqtgraph as pg
from dvg_pyqtgraph_threadsafe import PlotCurve
USE_OPENGL = True
if USE_OPENGL:
print("OpenGL acceleration: Enabled")
pg.setConfigOptions(useOpenGL=True)
pg.setConfigOptions(antialias=True)
pg.setConfigOptions(enableExperimental=True)
# ------------------------------------------------------------------------------
# MainWindow
# ------------------------------------------------------------------------------
class MainWindow(QtWid.QWidget):
def __init__(self, parent=None, **kwargs):
super().__init__(parent, **kwargs)
self.setGeometry(350, 50, 800, 660)
self.setWindowTitle("Demo: dvg_pyqtgraph_threadsafe")
# GraphicsLayoutWidget
self.gw = pg.GraphicsLayoutWidget()
self.plot_1 = self.gw.addPlot()
self.plot_1.showGrid(x=1, y=1)
self.plot_1.setRange(
xRange=[0, 5], yRange=[0, 4], disableAutoRange=True,
)
self.tscurve = PlotCurve(
linked_curve=self.plot_1.plot(
pen=pg.mkPen(color=[255, 255, 0], width=3)
),
)
x = np.array([0, 1, 2, 3, 4])
y = np.array([0, 1, np.nan, 3, 3])
# x = np.array([np.nan] * 5)
# y = np.array([np.nan] * 5)
self.tscurve.setData(x, y)
self.tscurve.update()
# Round up full window
hbox = QtWid.QHBoxLayout(self)
hbox.addWidget(self.gw, 1)
# ------------------------------------------------------------------------------
# Main
# ------------------------------------------------------------------------------
if __name__ == "__main__":
app = QtWid.QApplication(sys.argv)
window = MainWindow()
window.show()
sys.exit(app.exec_())
| [
"PyQt5.QtWidgets.QHBoxLayout",
"pyqtgraph.setConfigOptions",
"numpy.array",
"PyQt5.QtWidgets.QApplication",
"pyqtgraph.GraphicsLayoutWidget",
"pyqtgraph.mkPen"
] | [((267, 302), 'pyqtgraph.setConfigOptions', 'pg.setConfigOptions', ([], {'useOpenGL': '(True)'}), '(useOpenGL=True)\n', (286, 302), True, 'import pyqtgraph as pg\n'), ((307, 342), 'pyqtgraph.setConfigOptions', 'pg.setConfigOptions', ([], {'antialias': '(True)'}), '(antialias=True)\n', (326, 342), True, 'import pyqtgraph as pg\n'), ((347, 391), 'pyqtgraph.setConfigOptions', 'pg.setConfigOptions', ([], {'enableExperimental': '(True)'}), '(enableExperimental=True)\n', (366, 391), True, 'import pyqtgraph as pg\n'), ((1764, 1792), 'PyQt5.QtWidgets.QApplication', 'QtWid.QApplication', (['sys.argv'], {}), '(sys.argv)\n', (1782, 1792), True, 'from PyQt5 import QtWidgets as QtWid\n'), ((852, 877), 'pyqtgraph.GraphicsLayoutWidget', 'pg.GraphicsLayoutWidget', ([], {}), '()\n', (875, 877), True, 'import pyqtgraph as pg\n'), ((1238, 1263), 'numpy.array', 'np.array', (['[0, 1, 2, 3, 4]'], {}), '([0, 1, 2, 3, 4])\n', (1246, 1263), True, 'import numpy as np\n'), ((1276, 1306), 'numpy.array', 'np.array', (['[0, 1, np.nan, 3, 3]'], {}), '([0, 1, np.nan, 3, 3])\n', (1284, 1306), True, 'import numpy as np\n'), ((1494, 1517), 'PyQt5.QtWidgets.QHBoxLayout', 'QtWid.QHBoxLayout', (['self'], {}), '(self)\n', (1511, 1517), True, 'from PyQt5 import QtWidgets as QtWid\n'), ((1161, 1199), 'pyqtgraph.mkPen', 'pg.mkPen', ([], {'color': '[255, 255, 0]', 'width': '(3)'}), '(color=[255, 255, 0], width=3)\n', (1169, 1199), True, 'import pyqtgraph as pg\n')] |
from turtle import forward, left, right, width, color, clearscreen
clearscreen()
color("lightblue")
width(3)
for i in range(6):
forward(50)
left(60)
forward(25)
left(180)
forward(25)
left(60)
forward(25)
left(180)
forward(25)
right(120)
forward(25)
left(180)
forward(75)
left(120)
| [
"turtle.width",
"turtle.color",
"turtle.forward",
"turtle.right",
"turtle.left",
"turtle.clearscreen"
] | [((69, 82), 'turtle.clearscreen', 'clearscreen', ([], {}), '()\n', (80, 82), False, 'from turtle import forward, left, right, width, color, clearscreen\n'), ((83, 101), 'turtle.color', 'color', (['"""lightblue"""'], {}), "('lightblue')\n", (88, 101), False, 'from turtle import forward, left, right, width, color, clearscreen\n'), ((102, 110), 'turtle.width', 'width', (['(3)'], {}), '(3)\n', (107, 110), False, 'from turtle import forward, left, right, width, color, clearscreen\n'), ((135, 146), 'turtle.forward', 'forward', (['(50)'], {}), '(50)\n', (142, 146), False, 'from turtle import forward, left, right, width, color, clearscreen\n'), ((151, 159), 'turtle.left', 'left', (['(60)'], {}), '(60)\n', (155, 159), False, 'from turtle import forward, left, right, width, color, clearscreen\n'), ((164, 175), 'turtle.forward', 'forward', (['(25)'], {}), '(25)\n', (171, 175), False, 'from turtle import forward, left, right, width, color, clearscreen\n'), ((180, 189), 'turtle.left', 'left', (['(180)'], {}), '(180)\n', (184, 189), False, 'from turtle import forward, left, right, width, color, clearscreen\n'), ((194, 205), 'turtle.forward', 'forward', (['(25)'], {}), '(25)\n', (201, 205), False, 'from turtle import forward, left, right, width, color, clearscreen\n'), ((210, 218), 'turtle.left', 'left', (['(60)'], {}), '(60)\n', (214, 218), False, 'from turtle import forward, left, right, width, color, clearscreen\n'), ((223, 234), 'turtle.forward', 'forward', (['(25)'], {}), '(25)\n', (230, 234), False, 'from turtle import forward, left, right, width, color, clearscreen\n'), ((239, 248), 'turtle.left', 'left', (['(180)'], {}), '(180)\n', (243, 248), False, 'from turtle import forward, left, right, width, color, clearscreen\n'), ((253, 264), 'turtle.forward', 'forward', (['(25)'], {}), '(25)\n', (260, 264), False, 'from turtle import forward, left, right, width, color, clearscreen\n'), ((269, 279), 'turtle.right', 'right', (['(120)'], {}), '(120)\n', (274, 279), False, 'from turtle import forward, left, right, width, color, clearscreen\n'), ((284, 295), 'turtle.forward', 'forward', (['(25)'], {}), '(25)\n', (291, 295), False, 'from turtle import forward, left, right, width, color, clearscreen\n'), ((300, 309), 'turtle.left', 'left', (['(180)'], {}), '(180)\n', (304, 309), False, 'from turtle import forward, left, right, width, color, clearscreen\n'), ((314, 325), 'turtle.forward', 'forward', (['(75)'], {}), '(75)\n', (321, 325), False, 'from turtle import forward, left, right, width, color, clearscreen\n'), ((330, 339), 'turtle.left', 'left', (['(120)'], {}), '(120)\n', (334, 339), False, 'from turtle import forward, left, right, width, color, clearscreen\n')] |
import numpy as np
import itertools
from .contrib import compress_filter, smooth, residual_model
from .contrib import reduce_interferences
def expectation_maximization(y, x, iterations=2, verbose=0, eps=None):
r"""Expectation maximization algorithm, for refining source separation
estimates.
This algorithm allows to make source separation results better by
enforcing multichannel consistency for the estimates. This usually means
a better perceptual quality in terms of spatial artifacts.
The implementation follows the details presented in [1]_, taking
inspiration from the original EM algorithm proposed in [2]_ and its
weighted refinement proposed in [3]_, [4]_.
It works by iteratively:
* Re-estimate source parameters (power spectral densities and spatial
covariance matrices) through :func:`get_local_gaussian_model`.
* Separate again the mixture with the new parameters by first computing
the new modelled mixture covariance matrices with :func:`get_mix_model`,
prepare the Wiener filters through :func:`wiener_gain` and apply them
with :func:`apply_filter``.
References
----------
.. [1] <NAME> and <NAME> and <NAME> and <NAME> and <NAME> and
<NAME> and <NAME>, "Improving music source separation based
on deep neural networks through data augmentation and network
blending." 2017 IEEE International Conference on Acoustics, Speech
and Signal Processing (ICASSP). IEEE, 2017.
.. [2] <NAME> and <NAME> and R.Gribonval. "Under-determined
reverberant audio source separation using a full-rank spatial
covariance model." IEEE Transactions on Audio, Speech, and Language
Processing 18.7 (2010): 1830-1840.
.. [3] <NAME> and <NAME> and <NAME>. "Multichannel audio source
separation with deep neural networks." IEEE/ACM Transactions on Audio,
Speech, and Language Processing 24.9 (2016): 1652-1664.
.. [4] <NAME> and <NAME> and <NAME>. "Multichannel music
separation with deep neural networks." 2016 24th European Signal
Processing Conference (EUSIPCO). IEEE, 2016.
.. [5] <NAME> and <NAME> and <NAME> "Kernel additive models for
source separation." IEEE Transactions on Signal Processing
62.16 (2014): 4298-4310.
Parameters
----------
y: np.ndarray [shape=(nb_frames, nb_bins, nb_channels, nb_sources)]
initial estimates for the sources
x: np.ndarray [shape=(nb_frames, nb_bins, nb_channels)]
complex STFT of the mixture signal
iterations: int [scalar]
number of iterations for the EM algorithm.
verbose: boolean
display some information if True
eps: float or None [scalar]
The epsilon value to use for regularization and filters.
If None, the default will use the epsilon of np.real(x) dtype.
Returns
-------
y: np.ndarray [shape=(nb_frames, nb_bins, nb_channels, nb_sources)]
estimated sources after iterations
v: np.ndarray [shape=(nb_frames, nb_bins, nb_sources)]
estimated power spectral densities
R: np.ndarray [shape=(nb_bins, nb_channels, nb_channels, nb_sources)]
estimated spatial covariance matrices
Note
-----
* You need an initial estimate for the sources to apply this
algorithm. This is precisely what the :func:`wiener` function does.
* This algorithm *is not* an implementation of the "exact" EM
proposed in [1]_. In particular, it does compute the posterior
covariance matrices the same (exact) way. Instead, it uses the
simplified approximate scheme initially proposed in [5]_ and further
refined in [3]_, [4]_, that boils down to just take the empirical
covariance of the recent source estimates, followed by a weighted
average for the update of the spatial covariance matrix. It has been
empirically demonstrated that this simplified algorithm is more
robust for music separation.
Warning
-------
It is *very* important to make sure `x.dtype` is `np.complex`
if you want double precision, because this function will **not**
do such conversion for you from `np.complex64`, in case you want the
smaller RAM usage on purpose.
It is usually always better in terms of quality to have double
precision, by e.g. calling :func:`expectation_maximization`
with ``x.astype(np.complex)``.
This is notably needed if you let common deep learning frameworks like
PyTorch or TensorFlow do the STFT, because this usually happens in
single precision.
"""
# to avoid dividing by zero
if eps is None:
eps = np.finfo(np.real(x[0]).dtype).eps
# dimensions
(nb_frames, nb_bins, nb_channels) = x.shape
nb_sources = y.shape[-1]
# allocate the spatial covariance matrices and PSD
R = np.zeros((nb_bins, nb_channels, nb_channels, nb_sources), x.dtype)
v = np.zeros((nb_frames, nb_bins, nb_sources))
if verbose:
print('Number of iterations: ', iterations)
regularization = np.sqrt(eps) * (
np.tile(np.eye(nb_channels, dtype=np.complex64),
(1, nb_bins, 1, 1)))
for it in range(iterations):
# constructing the mixture covariance matrix. Doing it with a loop
# to avoid storing anytime in RAM the whole 6D tensor
if verbose:
print('EM, iteration %d' % (it+1))
for j in range(nb_sources):
# update the spectrogram model for source j
v[..., j], R[..., j] = get_local_gaussian_model(
y[..., j],
eps)
for t in range(nb_frames):
Cxx = get_mix_model(v[None, t, ...], R)
Cxx += regularization
inv_Cxx = _invert(Cxx, eps)
# separate the sources
for j in range(nb_sources):
W_j = wiener_gain(v[None, t, ..., j], R[..., j], inv_Cxx)
y[t, ..., j] = apply_filter(x[None, t, ...], W_j)[0]
return y, v, R
def wiener(v, x, iterations=1, use_softmask=True, eps=None):
"""Wiener-based separation for multichannel audio.
The method uses the (possibly multichannel) spectrograms `v` of the
sources to separate the (complex) Short Term Fourier Transform `x` of the
mix. Separation is done in a sequential way by:
* Getting an initial estimate. This can be done in two ways: either by
directly using the spectrograms with the mixture phase, or
by using :func:`softmask`.
* Refinining these initial estimates through a call to
:func:`expectation_maximization`.
This implementation also allows to specify the epsilon value used for
regularization. It is based on [1]_, [2]_, [3]_, [4]_.
References
----------
.. [1] <NAME> and <NAME> and <NAME> and <NAME> and <NAME> and
<NAME> and <NAME>, "Improving music source separation based
on deep neural networks through data augmentation and network
blending." 2017 IEEE International Conference on Acoustics, Speech
and Signal Processing (ICASSP). IEEE, 2017.
.. [2] <NAME> and <NAME> and <NAME>. "Multichannel audio source
separation with deep neural networks." IEEE/ACM Transactions on Audio,
Speech, and Language Processing 24.9 (2016): 1652-1664.
.. [3] <NAME> and <NAME> and <NAME>. "Multichannel music
separation with deep neural networks." 2016 24th European Signal
Processing Conference (EUSIPCO). IEEE, 2016.
.. [4] <NAME> and <NAME> and <NAME> "Kernel additive models for
source separation." IEEE Transactions on Signal Processing
62.16 (2014): 4298-4310.
Parameters
----------
v: np.ndarray [shape=(nb_frames, nb_bins, {1,nb_channels}, nb_sources)]
spectrograms of the sources. This is a nonnegative tensor that is
usually the output of the actual separation method of the user. The
spectrograms may be mono, but they need to be 4-dimensional in all
cases.
x: np.ndarray [complex, shape=(nb_frames, nb_bins, nb_channels)]
STFT of the mixture signal.
iterations: int [scalar]
number of iterations for the EM algorithm
use_softmask: boolean
* if `False`, then the mixture phase will directly be used with the
spectrogram as initial estimates.
* if `True`, a softmasking strategy will be used as described in
:func:`softmask`.
eps: {None, float}
Epsilon value to use for computing the separations. This is used
whenever division with a model energy is performed, i.e. when
softmasking and when iterating the EM.
It can be understood as the energy of the additional white noise
that is taken out when separating.
If `None`, the default value is taken as `np.finfo(np.real(x[0])).eps`.
Returns
-------
y: np.ndarray
[complex, shape=(nb_frames, nb_bins, nb_channels, nb_sources)]
STFT of estimated sources
Note
----
* Be careful that you need *magnitude spectrogram estimates* for the
case `softmask==False`.
* We recommand to use `softmask=False` only if your spectrogram model is
pretty good, e.g. when the output of a deep neural net. In the case
it is not so great, opt for an initial softmasking strategy.
* The epsilon value will have a huge impact on performance. If it's large,
only the parts of the signal with a significant energy will be kept in
the sources. This epsilon then directly controls the energy of the
reconstruction error.
Warning
-------
As in :func:`expectation_maximization`, we recommend converting the
mixture `x` to double precision `np.complex` *before* calling
:func:`wiener`.
"""
if use_softmask:
y = softmask(v, x, eps=eps)
else:
y = v * np.exp(1j*np.angle(x[..., None]))
if not iterations:
return y
# we need to refine the estimates. Scales down the estimates for
# numerical stability
max_abs = max(1, np.abs(x).max()/10.)
x /= max_abs
y = expectation_maximization(y/max_abs, x, iterations, eps=eps)[0]
return y*max_abs
def softmask(v, x, logit=None, eps=None):
"""Separates a mixture with a ratio mask, using the provided sources
spectrograms estimates. Additionally allows compressing the mask with
a logit function for soft binarization.
The filter does *not* take multichannel correlations into account.
The masking strategy can be traced back to the work of <NAME> in the
case of *power* spectrograms [1]_. In the case of *fractional* spectrograms
like magnitude, this filter is often referred to a "ratio mask", and
has been shown to be the optimal separation procedure under alpha-stable
assumptions [2]_.
References
----------
.. [1] <NAME>,"Extrapolation, Inerpolation, and Smoothing of Stationary
Time Series." 1949.
.. [2] <NAME> and <NAME>. "Generalized Wiener filtering with
fractional power spectrograms." 2015 IEEE International Conference on
Acoustics, Speech and Signal Processing (ICASSP). IEEE, 2015.
Parameters
----------
v: np.ndarray [shape=(nb_frames, nb_bins, nb_channels, nb_sources)]
spectrograms of the sources
x: np.ndarray [shape=(nb_frames, nb_bins, nb_channels)]
mixture signal
logit: {None, float between 0 and 1}
enable a compression of the filter. If not None, it is the threshold
value for the logit function: a softmask above this threshold is
brought closer to 1, and a softmask below is brought closer to 0.
Returns
-------
ndarray, shape=(nb_frames, nb_bins, nb_channels, nb_sources)
estimated sources
"""
# to avoid dividing by zero
if eps is None:
eps = np.finfo(np.real(x[0]).dtype).eps
total_energy = np.sum(v, axis=-1, keepdims=True)
filter = v/(eps + total_energy.astype(x.dtype))
if logit is not None:
filter = compress_filter(filter, eps, thresh=logit, multichannel=False)
return filter * x[..., None]
def _invert(M, eps):
"""
Invert matrices, with special fast handling of the 1x1 and 2x2 cases.
Will generate errors if the matrices are singular: user must handle this
through his own regularization schemes.
Parameters
----------
M: np.ndarray [shape=(..., nb_channels, nb_channels)]
matrices to invert: must be square along the last two dimensions
eps: [scalar]
regularization parameter to use _only in the case of matrices
bigger than 2x2
Returns
-------
invM: np.ndarray, [shape=M.shape]
inverses of M
"""
nb_channels = M.shape[-1]
if nb_channels == 1:
# scalar case
invM = 1.0/(M+eps)
elif nb_channels == 2:
# two channels case: analytical expression
det = (
M[..., 0, 0]*M[..., 1, 1] -
M[..., 0, 1]*M[..., 1, 0])
invDet = 1.0/(det)
invM = np.empty_like(M)
invM[..., 0, 0] = invDet*M[..., 1, 1]
invM[..., 1, 0] = -invDet*M[..., 1, 0]
invM[..., 0, 1] = -invDet*M[..., 0, 1]
invM[..., 1, 1] = invDet*M[..., 0, 0]
else:
# general case : no use of analytical expression (slow!)
invM = np.linalg.pinv(M, eps)
return invM
def wiener_gain(v_j, R_j, inv_Cxx):
"""
Compute the wiener gain for separating one source, given all parameters.
It is the matrix applied to the mix to get the posterior mean of the source
as in [1]_
References
----------
.. [1] <NAME> and <NAME> and R.Gribonval. "Under-determined
reverberant audio source separation using a full-rank spatial
covariance model." IEEE Transactions on Audio, Speech, and Language
Processing 18.7 (2010): 1830-1840.
Parameters
----------
v_j: np.ndarray [shape=(nb_frames, nb_bins, nb_channels)]
power spectral density of the target source.
R_j: np.ndarray [shape=(nb_bins, nb_channels, nb_channels)]
spatial covariance matrix of the target source
inv_Cxx: np.ndarray [shape=(nb_frames, nb_bins, nb_channels, nb_channels)]
inverse of the mixture covariance matrices
Returns
-------
G: np.ndarray [shape=(nb_frames, nb_bins, nb_channels, nb_channels)]
wiener filtering matrices, to apply to the mix, e.g. through
:func:`apply_filter` to get the target source estimate.
"""
(_, nb_channels) = R_j.shape[:2]
# computes multichannel Wiener gain as v_j R_j inv_Cxx
G = np.zeros_like(inv_Cxx)
for (i1, i2, i3) in itertools.product(*(range(nb_channels),)*3):
G[..., i1, i2] += (R_j[None, :, i1, i3] * inv_Cxx[..., i3, i2])
G *= v_j[..., None, None]
return G
def apply_filter(x, W):
"""
Applies a filter on the mixture. Just corresponds to a matrix
multiplication.
Parameters
----------
x: np.ndarray [shape=(nb_frames, nb_bins, nb_channels)]
STFT of the signal on which to apply the filter.
W: np.ndarray [shape=(nb_frames, nb_bins, nb_channels, nb_channels)]
filtering matrices, as returned, e.g. by :func:`wiener_gain`
Returns
-------
y_hat: np.ndarray [shape=(nb_frames, nb_bins, nb_channels)]
filtered signal
"""
nb_channels = W.shape[-1]
# apply the filter
y_hat = 0+0j
for i in range(nb_channels):
y_hat += W[..., i] * x[..., i, None]
return y_hat
def get_mix_model(v, R):
"""
Compute the model covariance of a mixture based on local Gaussian models.
simply adds up all the v[..., j] * R[..., j]
Parameters
----------
v: np.ndarray [shape=(nb_frames, nb_bins, nb_sources)]
Power spectral densities for the sources
R: np.ndarray [shape=(nb_bins, nb_channels, nb_channels, nb_sources)]
Spatial covariance matrices of each sources
Returns
-------
Cxx: np.ndarray [shape=(nb_frames, nb_bins, nb_channels, nb_channels)]
Covariance matrix for the mixture
"""
nb_channels = R.shape[1]
(nb_frames, nb_bins, nb_sources) = v.shape
Cxx = np.zeros((nb_frames, nb_bins, nb_channels, nb_channels), R.dtype)
for j in range(nb_sources):
Cxx += v[..., j, None, None] * R[None, ..., j]
return Cxx
def _covariance(y_j):
"""
Compute the empirical covariance for a source.
Parameters
----------
y_j: np.ndarray [shape=(nb_frames, nb_bins, nb_channels)].
complex stft of the source.
Returns
-------
Cj: np.ndarray [shape=(nb_frames, nb_bins, nb_channels, nb_channels)]
just y_j * conj(y_j.T): empirical covariance for each TF bin.
"""
(nb_frames, nb_bins, nb_channels) = y_j.shape
Cj = np.zeros((nb_frames, nb_bins, nb_channels, nb_channels),
y_j.dtype)
for (i1, i2) in itertools.product(*(range(nb_channels),)*2):
Cj[..., i1, i2] += y_j[..., i1] * np.conj(y_j[..., i2])
return Cj
def get_local_gaussian_model(y_j, eps=1.):
r"""
Compute the local Gaussian model [1]_ for a source given the complex STFT.
First get the power spectral densities, and then the spatial covariance
matrix, as done in [1]_, [2]_
References
----------
.. [1] <NAME> and <NAME> and R.Gribonval. "Under-determined
reverberant audio source separation using a full-rank spatial
covariance model." IEEE Transactions on Audio, Speech, and Language
Processing 18.7 (2010): 1830-1840.
.. [2] <NAME> and <NAME> and <NAME>. "Low bitrate informed
source separation of realistic mixtures." 2013 IEEE International
Conference on Acoustics, Speech and Signal Processing. IEEE, 2013.
Parameters
----------
y_j: np.ndarray [shape=(nb_frames, nb_bins, nb_channels)]
complex stft of the source.
eps: float [scalar]
regularization term
Returns
-------
v_j: np.ndarray [shape=(nb_frames, nb_bins)]
power spectral density of the source
R_J: np.ndarray [shape=(nb_bins, nb_channels, nb_channels)]
Spatial covariance matrix of the source
"""
v_j = np.mean(np.abs(y_j)**2, axis=2)
# updates the spatial covariance matrix
nb_frames = y_j.shape[0]
R_j = 0
weight = eps
for t in range(nb_frames):
R_j += _covariance(y_j[None, t, ...])
weight += v_j[None, t, ...]
R_j /= weight[..., None, None]
return v_j, R_j
| [
"numpy.abs",
"numpy.eye",
"numpy.sqrt",
"numpy.linalg.pinv",
"numpy.conj",
"numpy.angle",
"numpy.sum",
"numpy.zeros",
"numpy.real",
"numpy.empty_like",
"numpy.zeros_like"
] | [((4979, 5045), 'numpy.zeros', 'np.zeros', (['(nb_bins, nb_channels, nb_channels, nb_sources)', 'x.dtype'], {}), '((nb_bins, nb_channels, nb_channels, nb_sources), x.dtype)\n', (4987, 5045), True, 'import numpy as np\n'), ((5054, 5096), 'numpy.zeros', 'np.zeros', (['(nb_frames, nb_bins, nb_sources)'], {}), '((nb_frames, nb_bins, nb_sources))\n', (5062, 5096), True, 'import numpy as np\n'), ((12060, 12093), 'numpy.sum', 'np.sum', (['v'], {'axis': '(-1)', 'keepdims': '(True)'}), '(v, axis=-1, keepdims=True)\n', (12066, 12093), True, 'import numpy as np\n'), ((14777, 14799), 'numpy.zeros_like', 'np.zeros_like', (['inv_Cxx'], {}), '(inv_Cxx)\n', (14790, 14799), True, 'import numpy as np\n'), ((16346, 16411), 'numpy.zeros', 'np.zeros', (['(nb_frames, nb_bins, nb_channels, nb_channels)', 'R.dtype'], {}), '((nb_frames, nb_bins, nb_channels, nb_channels), R.dtype)\n', (16354, 16411), True, 'import numpy as np\n'), ((16965, 17032), 'numpy.zeros', 'np.zeros', (['(nb_frames, nb_bins, nb_channels, nb_channels)', 'y_j.dtype'], {}), '((nb_frames, nb_bins, nb_channels, nb_channels), y_j.dtype)\n', (16973, 17032), True, 'import numpy as np\n'), ((5187, 5199), 'numpy.sqrt', 'np.sqrt', (['eps'], {}), '(eps)\n', (5194, 5199), True, 'import numpy as np\n'), ((5224, 5263), 'numpy.eye', 'np.eye', (['nb_channels'], {'dtype': 'np.complex64'}), '(nb_channels, dtype=np.complex64)\n', (5230, 5263), True, 'import numpy as np\n'), ((13200, 13216), 'numpy.empty_like', 'np.empty_like', (['M'], {}), '(M)\n', (13213, 13216), True, 'import numpy as np\n'), ((13493, 13515), 'numpy.linalg.pinv', 'np.linalg.pinv', (['M', 'eps'], {}), '(M, eps)\n', (13507, 13515), True, 'import numpy as np\n'), ((17158, 17179), 'numpy.conj', 'np.conj', (['y_j[..., i2]'], {}), '(y_j[..., i2])\n', (17165, 17179), True, 'import numpy as np\n'), ((18377, 18388), 'numpy.abs', 'np.abs', (['y_j'], {}), '(y_j)\n', (18383, 18388), True, 'import numpy as np\n'), ((4795, 4808), 'numpy.real', 'np.real', (['x[0]'], {}), '(x[0])\n', (4802, 4808), True, 'import numpy as np\n'), ((10034, 10056), 'numpy.angle', 'np.angle', (['x[..., None]'], {}), '(x[..., None])\n', (10042, 10056), True, 'import numpy as np\n'), ((10216, 10225), 'numpy.abs', 'np.abs', (['x'], {}), '(x)\n', (10222, 10225), True, 'import numpy as np\n'), ((12016, 12029), 'numpy.real', 'np.real', (['x[0]'], {}), '(x[0])\n', (12023, 12029), True, 'import numpy as np\n')] |
# coding=utf-8
import logging
from subliminal.providers.legendastv import LegendasTVSubtitle as _LegendasTVSubtitle, \
LegendasTVProvider as _LegendasTVProvider, Episode, Movie, guess_matches, guessit, sanitize
logger = logging.getLogger(__name__)
class LegendasTVSubtitle(_LegendasTVSubtitle):
def __init__(self, language, type, title, year, imdb_id, season, archive, name):
super(LegendasTVSubtitle, self).__init__(language, type, title, year, imdb_id, season, archive, name)
self.archive.content = None
self.release_info = archive.name
self.page_link = archive.link
def make_picklable(self):
self.archive.content = None
return self
def get_matches(self, video, hearing_impaired=False):
matches = set()
# episode
if isinstance(video, Episode) and self.type == 'episode':
# series
if video.series and sanitize(self.title) == sanitize(video.series):
matches.add('series')
# year
if video.original_series and self.year is None or video.year and video.year == self.year:
matches.add('year')
# imdb_id
if video.series_imdb_id and self.imdb_id == video.series_imdb_id:
matches.add('series_imdb_id')
# movie
elif isinstance(video, Movie) and self.type == 'movie':
# title
if video.title and sanitize(self.title) == sanitize(video.title):
matches.add('title')
# year
if video.year and self.year == video.year:
matches.add('year')
# imdb_id
if video.imdb_id and self.imdb_id == video.imdb_id:
matches.add('imdb_id')
# name
matches |= guess_matches(video, guessit(self.name, {'type': self.type, 'single_value': True}))
return matches
class LegendasTVProvider(_LegendasTVProvider):
subtitle_class = LegendasTVSubtitle
def download_subtitle(self, subtitle):
super(LegendasTVProvider, self).download_subtitle(subtitle)
subtitle.archive.content = None
| [
"logging.getLogger",
"subliminal.providers.legendastv.guessit",
"subliminal.providers.legendastv.sanitize"
] | [((226, 253), 'logging.getLogger', 'logging.getLogger', (['__name__'], {}), '(__name__)\n', (243, 253), False, 'import logging\n'), ((1821, 1882), 'subliminal.providers.legendastv.guessit', 'guessit', (['self.name', "{'type': self.type, 'single_value': True}"], {}), "(self.name, {'type': self.type, 'single_value': True})\n", (1828, 1882), False, 'from subliminal.providers.legendastv import LegendasTVSubtitle as _LegendasTVSubtitle, LegendasTVProvider as _LegendasTVProvider, Episode, Movie, guess_matches, guessit, sanitize\n'), ((921, 941), 'subliminal.providers.legendastv.sanitize', 'sanitize', (['self.title'], {}), '(self.title)\n', (929, 941), False, 'from subliminal.providers.legendastv import LegendasTVSubtitle as _LegendasTVSubtitle, LegendasTVProvider as _LegendasTVProvider, Episode, Movie, guess_matches, guessit, sanitize\n'), ((945, 967), 'subliminal.providers.legendastv.sanitize', 'sanitize', (['video.series'], {}), '(video.series)\n', (953, 967), False, 'from subliminal.providers.legendastv import LegendasTVSubtitle as _LegendasTVSubtitle, LegendasTVProvider as _LegendasTVProvider, Episode, Movie, guess_matches, guessit, sanitize\n'), ((1444, 1464), 'subliminal.providers.legendastv.sanitize', 'sanitize', (['self.title'], {}), '(self.title)\n', (1452, 1464), False, 'from subliminal.providers.legendastv import LegendasTVSubtitle as _LegendasTVSubtitle, LegendasTVProvider as _LegendasTVProvider, Episode, Movie, guess_matches, guessit, sanitize\n'), ((1468, 1489), 'subliminal.providers.legendastv.sanitize', 'sanitize', (['video.title'], {}), '(video.title)\n', (1476, 1489), False, 'from subliminal.providers.legendastv import LegendasTVSubtitle as _LegendasTVSubtitle, LegendasTVProvider as _LegendasTVProvider, Episode, Movie, guess_matches, guessit, sanitize\n')] |
from django.shortcuts import render, redirect, get_object_or_404
import json
from django.http import HttpResponse, Http404
from django.urls import reverse
from django.contrib.auth.decorators import login_required
from django.utils import timezone
# Create your views here.
from socialnetwork.forms import *
from socialnetwork.models import *
from socialnetwork.forms import ProfileForm, UpdateProfileForm
from socialnetwork.models import Profile
from allauth.account.views import SignupView, LoginView
from .models import User
import requests
from notifications.signals import notify
from notifications.models import Notification
import datetime
class MySignupView(SignupView):
template_name = 'templates/login.html'
class MyLoginView(LoginView):
template_name = 'templates/register.html'
@login_required
def user_profile(request):
profile = Profile.objects.get_or_create(user=request.user)
context = {}
context['form'] = ProfileForm()
context['userform'] = UpdateProfileForm()
current_user = get_object_or_404(Profile,
user=request.user)
context['p'] = current_user
context['following'] = current_user.following.all()
return render(request, 'user_profile.html', context)
@login_required
# return a list of usernames
def get_following_list(user_profile):
all_followings = user_profile.following.all()
following_list = [following.user.username for following in all_followings]
return following_list
@login_required
def get_photo(request, id):
item = get_object_or_404(Profile, id=id)
# Probably don't need this check as form validation requires a picture be uploaded.
if not item.picture:
raise Http404
return HttpResponse(item.picture, content_type=item.content_type)
@login_required
def get_profile(request, username):
context = {}
# Make sure profile is created for new users.
_ = Profile.objects.get_or_create(user=request.user)
request_user = get_object_or_404(Profile, user=request.user)
profile_by_give_username = None
try:
profile_by_give_username = get_object_or_404(
Profile, user__username=username)
except:
context["message"] = "User does not exist."
return render(request, 'error.html', context)
context['p'] = profile_by_give_username
logs_of_profile = Log.objects.filter(
user_id=profile_by_give_username.user.id)
following_list = profile_by_give_username.following.all()
follower_list = profile_by_give_username.follower.all()
bookmarked_logs = profile_by_give_username.bookmarked_logs.all()
context['following'] = following_list
context['followers'] = follower_list
context['bookmarked_logs'] = bookmarked_logs
context['logs_created_by_user'] = logs_of_profile
context['num_followers'] = len(follower_list)
context['num_followings'] = len(following_list)
context['num_logs'] = len(logs_of_profile)
if request.user.username == profile_by_give_username.user.username and request.user.email == profile_by_give_username.user.email:
return render(request, 'user_profile.html', context)
# Post_user is not request.user, means it must be someone else's profile.
follow_status = False
if profile_by_give_username in request_user.following.all():
follow_status = True
context['following_status'] = follow_status
return render(request, 'other_profile.html', context)
@login_required
def home(request):
context = {}
self_logs = Log.objects.filter(user_id=request.user.id)
other_logs = Log.objects.exclude(user_id=request.user.id)
self_ls = []
for log in self_logs:
self_geoinfo = [log.location.lat, log.location.lng,
log.location.placeID, str(log.picture), log.id]
self_ls.append(self_geoinfo)
self_ls = json.dumps(self_ls)
context["self_geoinfo"] = self_ls
other_ls = []
for log in other_logs:
if log.visibility:
other_geoinfo = [log.location.lat, log.location.lng,
log.location.placeID, str(log.picture), log.id]
other_ls.append(other_geoinfo)
other_ls = json.dumps(other_ls)
context["other_geoinfo"] = other_ls
return render(request, 'home.html', context)
@login_required
def filter_date(request):
if request.method == 'POST':
if 'start_date' not in request.POST or 'end_date' not in request.POST:
context = {}
context['message'] = "Some critical data is missing! Please try again."
return render(request, 'error.html', context)
try:
start_date = datetime.datetime.strptime(
request.POST['start_date'], '%Y-%m-%d').date()
end_date = datetime.datetime.strptime(request.POST['end_date'], '%Y-%m-%d')
end_date = (end_date + datetime.timedelta(days=1)).date()
if start_date > end_date:
return render(request, 'error.html', {'message': "Start date must be earlier than end date"})
filter_logs = Log.objects.filter(
creation_time__range=(start_date, end_date))
except ValueError as ve:
return render(request, 'error.html', {'message': "ValueError"})
return HttpResponse(serialize_log(filter_logs, request), content_type='application/json')
else:
return render(request, 'error.html', {'message': "You must use a POST request for this operation"})
@login_required
def filtered_stream(request):
return render(request, 'filtered_stream.html', {})
@login_required
def get_one_log(request, log_id):
request_log = []
log = get_object_or_404(Log, id=log_id)
request_log.append(log)
return HttpResponse(serialize_log(request_log, request), content_type='application/json')
@login_required
def get_user_logs(request, user_id):
user_logs = Log.objects.filter(user__id=user_id)
return HttpResponse(serialize_log(user_logs, request), content_type='application/json')
@login_required
def get_logs(request):
# Get request user's following list
logs = Log.objects.all()
return HttpResponse(serialize_log(logs, request), content_type='application/json')
@login_required
def get_bookmark_logs(request):
# Get request user's following list
request_user_profile = get_object_or_404(Profile, user=request.user)
bookmark_list = request_user_profile.bookmarked_logs.all()
return HttpResponse(serialize_log(bookmark_list, request), content_type='application/json')
def serialize_log(logs, request):
request_user_profile = get_object_or_404(Profile, user=request.user)
following_list = request_user_profile.following.all()
bookmark_list = request_user_profile.bookmarked_logs.all()
all_logs = []
for log in logs:
log_creator = log.user
# If log creator is already followed, pass this information
creator_profile, _ = Profile.objects.get_or_create(user=log.user)
is_self = False
if creator_profile == request_user_profile:
is_self = True
follow_status = False
if creator_profile in following_list:
follow_status = True
bookmarked = False
if log in bookmark_list:
bookmarked = True
liked = False
if request_user_profile in log.liked_users.all():
liked = True
num_likes = len(log.liked_users.all())
comments = []
for comment_item in Comment.objects.all():
if comment_item.of_log.id == log.id:
commentor_profile = get_object_or_404(Profile, user=comment_item.created_by)
comment = {
'comment_id': comment_item.id,
'text': comment_item.comment_content,
'date': comment_item.created_at.isoformat(),
'comment_profile_pic': str(commentor_profile.picture),
'username': comment_item.created_by.username,
'user_fn': comment_item.created_by.first_name,
'user_ln': comment_item.created_by.last_name,
}
comments.append(comment)
log_info = {
'user_id': log_creator.id,
'already_followed': follow_status,
'log_id': log.id,
'username': log_creator.username,
'profile_pic': str(creator_profile.picture),
'log_title': log.log_title,
'log_text': log.log_text,
'log_location': log.location.location_name,
'date': log.creation_time.isoformat(),
'log_pic': str(log.picture),
'bookmark_status': bookmarked,
'num_likes': num_likes,
'already_liked': liked,
'comments': comments,
'is_self': is_self,
'visibility': log.visibility
}
all_logs.append(log_info)
response_json = json.dumps(all_logs)
return response_json
@login_required
def add_profile(request):
context = {}
if request.method != 'POST':
return render(request, 'error.html', {'message': "You must use a POST request for this operation"})
user_form = UpdateProfileForm(request.POST)
if not user_form.is_valid():
if 'first_name' in request.POST and request.POST['first_name']:
request.user.first_name = request.POST['first_name']
if 'last_name' in request.POST and request.POST['last_name']:
request.user.last_name = request.POST['last_name']
if 'username' in request.POST and request.POST['username']:
num_users_with_username = User.objects.filter(username=request.POST['username']).count()
if num_users_with_username > 0 and request.POST['username'] != request.user.username:
context['message'] = 'Username already exists.'
return render(request, 'error.html', context)
request.user.username = request.POST['username']
request.user.save()
else:
request.user.first_name = request.POST['first_name']
request.user.last_name = request.POST['last_name']
num_users_with_username = User.objects.filter(username=request.POST['username']).count()
if num_users_with_username > 0 and request.POST['username'] != request.user.username:
context['message'] = 'Username already exists.'
return render(request, 'error.html', context)
request.user.username = request.POST['username']
request.user.save()
new_item = Profile.objects.get(user=request.user)
form = ProfileForm(request.POST, request.FILES, instance=new_item)
if not form.is_valid(): # 检查两个field
# context['form'] = form
if 'bio' in request.POST and request.POST['bio']:
new_item.bio = request.POST['bio']
if 'picture' in form.cleaned_data:
new_item.picture = form.cleaned_data['picture']
new_item.content_type = form.cleaned_data['picture'].content_type
# else:
# context["message"] = "Image setting failed. You must upload an image."
# return render(request, 'error.html', context)
new_item.save()
context['p'] = new_item
return get_profile(request, request.user.username)
else:
# Must copy content_type into a new model field because the model
# FileField will not store this in the database. (The uploaded file
# is actually a different object than what's return from a DB read.)
new_item.pic = form.cleaned_data['picture']
new_item.bio = form.cleaned_data['bio']
new_item.content_type = form.cleaned_data['picture'].content_type
new_item.save()
context['message'] = 'Item #{0} saved.'.format(new_item.id)
context['p'] = new_item
return get_profile(request, request.user.username)
@login_required
def follow(request, id):
context = {}
other_user = None
try:
other_user = get_object_or_404(Profile, id=id)
except:
context["message"] = "The user profile you are trying to follow doesn't exist."
return render(request, 'error.html', context)
current_user = request.user
# Other user is a profile
current_user.profile.following.add(other_user)
current_user.save()
other_user.follower.add(current_user.profile)
other_user.save()
context['following_status'] = True
context['p'] = other_user
return get_profile(request, other_user.user.username)
@login_required
def ajax_follow(request):
if request.method != 'POST':
return render(request, 'error.html', {'message': "You must use a POST request for this operation"})
if not 'user_id' in request.POST or not request.POST['user_id']:
return render(request, 'error.html', {'message': "The user you are trying to follow should not have empty ID."})
user_id = request.POST['user_id']
if user_id.isnumeric():
other_user_profile = get_object_or_404(Profile, user_id=user_id)
request_user_profile = get_object_or_404(Profile, user=request.user)
# Sanity check, users can't follow themselves
if request_user_profile != other_user_profile:
# Only Return when request_user_profile doesn't include the profile trying to follow.
if other_user_profile not in request_user_profile.following.all():
request_user_profile.following.add(other_user_profile)
request_user_profile.save()
other_user_profile.follower.add(request_user_profile)
other_user_profile.save()
else:
return get_logs(request)
return get_logs(request)
@login_required
def ajax_unfollow(request):
if request.method != 'POST':
return render(request, 'error.html', {'message': "You must use a POST request for this operation"})
if not 'user_id' in request.POST or not request.POST['user_id']:
return render(request, 'error.html', {'message': "The user you are trying to follow should not have empty ID."})
user_id = request.POST['user_id']
if user_id.isnumeric():
other_user_profile = get_object_or_404(Profile, user_id=user_id)
request_user_profile = get_object_or_404(Profile, user=request.user)
# Sanity check, users can't follow themselves
if request_user_profile != other_user_profile:
# Only Return when request_user_profile doesn't include the profile trying to follow.
if other_user_profile in request_user_profile.following.all():
request_user_profile.following.remove(other_user_profile)
request_user_profile.save()
other_user_profile.follower.remove(request_user_profile)
other_user_profile.save()
return get_logs(request)
@login_required
def unfollow(request, id):
context = {}
other_user = None
try:
other_user = get_object_or_404(Profile, id=id)
except:
context["message"] = "The user profile you are trying to unfollow doesn't exist."
return render(request, 'error.html', context)
current_user = request.user
current_user.profile.following.remove(other_user)
current_user.save()
other_user.follower.remove(current_user.profile)
other_user.save()
context['following_status'] = False
context['p'] = other_user
return get_profile(request, other_user.user.username)
@login_required
def add_comment(request):
if request.method != 'POST':
return render(request, 'error.html', {'message': "You must use a POST request for this operation"})
if not 'comment_text' in request.POST or not request.POST['comment_text']:
return render(request, 'error.html', {'message': "You comment should not be empty."})
if not 'log_id' in request.POST or not request.POST['log_id']:
return render(request, 'error.html', {'message': "Comment needs to be made on a log."})
logid = request.POST['log_id']
if logid.isnumeric():
belong_to_log = Log.objects.get(id=logid)
new_comment = Comment(comment_content=request.POST['comment_text'],
created_by=request.user,
created_at=timezone.now(),
of_log=belong_to_log)
new_comment.save()
notify.send(sender=request.user, recipient=belong_to_log.user,
verb='your log: <i>{}</i> has a new reply from <strong>{}</strong>: "{}"'.format(
belong_to_log.log_title,
new_comment.created_by.username,
new_comment.comment_content),
description="Comment",
target=belong_to_log)
return get_logs(request)
elif logid == 'xxxx':
return render(request, 'error.html', {'message': "Please dont' make changes to comment field name"})
@login_required
def log_editor(request):
context = {}
if request.method == 'POST':
if 'latLng' not in request.POST or not request.POST['latLng']:
context['message'] = "Some critical data is missing! Please try again."
return render(request, 'error.html', context)
try:
latLng = json.loads(request.POST['latLng'])
except:
context['message'] = "Some critical data is missing! Please try again."
return render(request, 'error.html', context)
if 'lat' not in latLng or 'lng' not in latLng:
context['message'] = "Some critical data is missing! Please try again."
return render(request, 'error.html', context)
try:
float(latLng['lat'])
float(latLng['lng'])
except ValueError:
context['message'] = "Some critical data is wrong! Please try again."
return render(request, 'error.html', context)
try:
location = Location.objects.get(lat=float(latLng['lat']), lng=float(latLng['lng']))
context['location_name'] = location.location_name
context['placeID'] = location.placeID
except Location.DoesNotExist:
if 'location_name' in request.POST and 'placeID' in request.POST:
context['location_name'] = request.POST['location_name']
context['placeID'] = request.POST['placeID']
else:
context['location_name'] = getLocationNameFromLatLng(latLng)
context['placeID'] = str(latLng['lat']) + str(latLng['lng'])
location = Location(
placeID=context['placeID'],
location_name=context['location_name'],
lat=float(latLng['lat']),
lng=float(latLng['lng']))
location.save()
context['log_id'] = 0
context['log_title'] = ''
context['log_text'] = ''
context['visibility'] = True
logs = Log.objects.filter(location=location)
context['log_num'] = len(logs)
user_set = set()
for log in logs:
user_set.add(log.user)
context['user_num'] = len(user_set)
return render(request, 'log_editor.html', context)
else:
context['message'] = "The page you try to visit only accepts POST request."
return render(request, 'error.html', context)
@login_required
def edit_log(request, log_id):
context = {}
log = get_object_or_404(Log, id=log_id)
if log.user != request.user:
context['message'] = "You cannot edit other user's log."
return render(request, 'error.html', context)
context['log_id'] = log.id
context['log_title'] = log.log_title
context['log_text'] = log.log_text
context['visibility'] = log.visibility
context['placeID'] = log.location.placeID
context['location_name'] = log.location.location_name
logs = Log.objects.filter(location=log.location)
context['log_num'] = len(logs)
user_set = set()
for log in logs:
user_set.add(log.user)
context['user_num'] = len(user_set)
return render(request, 'log_editor.html', context)
@login_required
def add_log(request, log_id):
context = {}
if request.method == 'POST':
form = EditorForm(request.POST, request.FILES)
try:
location = Location.objects.get(placeID=request.POST['placeID'])
except Location.DoesNotExist:
location = None
if not location:
context['message'] = 'Location not found.'
return render(request, 'error.html', context)
try:
log = Log.objects.get(id=log_id)
if log.user.id != request.user.id:
context['message'] = "You cannot edit other user's log."
return render(request, 'error.html', context)
except Log.DoesNotExist:
log = None
if not form.is_valid():
error_messages = []
if 'log_title' in request.POST and len(request.POST['log_title']) > 200:
error_messages.append("Log title exceeds max length (200).")
if 'log_text' in request.POST and len(request.POST['log_text']) > 20000000:
error_messages.append("Log text exceeds max length (20000).")
if 'picture' not in form.cleaned_data and 'picture' in request.FILES:
if not hasattr(request.FILES['picture'], 'content_type'):
error_messages.append('You must upload a picture.')
elif not request.FILES['picture'].content_type or not request.FILES['picture'].content_type.startswith(
'image'):
error_messages.append('File type is not image.')
elif request.FILES['picture'].size > 2500000:
error_messages.append('Cover image exceeds max size (2500000).')
context['log_id'] = log_id
if 'log_title' in request.POST:
context['log_title'] = request.POST['log_title']
else:
context['log_title'] = ''
if 'log_text' in request.POST:
context['log_text'] = request.POST['log_text']
else:
context['log_text'] = ''
if 'visibility' in request.POST:
context['visibility'] = False
else:
context['visibility'] = True
context['placeID'] = form.cleaned_data['placeID']
context['location_name'] = location.location_name
context['error_messages'] = error_messages
return render(request, 'log_editor.html', context)
try:
log = Log.objects.get(id=log_id)
log.log_title = form.cleaned_data['log_title']
log.log_text = form.cleaned_data['log_text']
if form.cleaned_data['picture']:
log.picture = form.cleaned_data['picture']
log.content_type = form.cleaned_data['picture'].content_type
if 'visibility' in request.POST:
log.visibility = False
else:
log.visibility = True
log.save()
except Log.DoesNotExist:
new_log = Log(log_title=form.cleaned_data['log_title'],
log_text=form.cleaned_data['log_text'],
user=request.user,
location=location)
if form.cleaned_data['picture']:
new_log.picture = form.cleaned_data['picture']
new_log.content_type = form.cleaned_data['picture'].content_type
if 'visibility' in request.POST:
new_log.visibility = False
else:
new_log.visibility = True
new_log.save()
return redirect(reverse('home'))
else:
context['message'] = "The page you try to visit only accepts POST request."
return render(request, 'error.html', context)
@login_required
def get_picture(request, log_id):
log = get_object_or_404(Log, id=log_id)
# Maybe we don't need this check as form validation requires a picture be uploaded.
# But someone could have delete the picture leaving the DB with a bad references.
if not log.picture:
raise Http404
return HttpResponse(log.picture, content_type=log.content_type)
@login_required
def log_display(request):
if request.method == 'POST':
context = {}
if 'latLng' not in request.POST or not request.POST['latLng']:
context['message'] = "Some critical data is missing! Please try again."
return render(request, 'error.html', context)
try:
latLng = json.loads(request.POST['latLng'])
except:
context['message'] = "Some critical data is missing! Please try again."
return render(request, 'error.html', context)
if 'lat' not in latLng or 'lng' not in latLng:
context['message'] = "Some critical data is missing! Please try again."
return render(request, 'error.html', context)
try:
float(latLng['lat'])
float(latLng['lng'])
except ValueError:
context['message'] = "Some critical data is wrong! Please try again."
return render(request, 'error.html', context)
latLng = json.loads(request.POST['latLng'])
location = Location.objects.filter(
lat=float(latLng['lat']), lng=float(latLng['lng']))[0]
logs_to_display = list(Log.objects.filter(location=location, visibility=True))
logs_to_display.extend(Log.objects.filter(location=location, user=request.user, visibility=False))
context['logs'] = logs_to_display
logs = Log.objects.filter(location=location)
context['log_num'] = len(logs)
user_set = set()
for log in logs:
user_set.add(log.user)
context['user_num'] = len(user_set)
return render(request, 'log_display.html', context)
else:
context = {}
context['message'] = "The page you try to visit only accepts POST request."
return render(request, 'error.html', context)
def getLocationNameFromLatLng(latLng):
# detailed retured json information please visit: https: // maps.googleapis.com/maps/api/geocode/json?latlng = 40.714224, -73.961452 & key = <KEY>
URL = "https://maps.googleapis.com/maps/api/geocode/json"
lat = latLng['lat']
lng = latLng['lng']
latLng_ = "{},{}".format(lat, lng)
# defining a params dict for the parameters to be sent to the API
PARAMS = {'latlng': latLng_,
'key': '<KEY>'
}
# sending get request and saving the response as response object
r = requests.get(url=URL, params=PARAMS)
# extracting data in json format
data = r.json()
# extracting latitude, longitude and formatted address
# of the first matching location(the nearest location to the given latlng)
latitude = data['results'][0]['geometry']['location']['lat']
longitude = data['results'][0]['geometry']['location']['lng']
formatted_address = data['results'][0]['formatted_address']
# # printing the output
return formatted_address
@login_required
def travel_stream(request):
# make sure user profile is created before accessing stream page.
# make sure user profile is created before accessing stream page.
request_user = Profile.objects.get_or_create(user=request.user)
return render(request, 'travel_stream.html', {})
@login_required
def bookmark_stream(request):
# make sure user profile is created before accessing stream page.
request_user = Profile.objects.get_or_create(user=request.user)
return render(request, 'bookmark_stream.html', {})
@login_required
def show_all_user_stream(request, user_id):
# make sure user profile is created before accessing stream page.
request_user = Profile.objects.get_or_create(user=request.user)
return render(request, 'user_stream.html', {'user_id': user_id})
@login_required
def one_log(request, log_id):
try:
valid_log = get_object_or_404(Log, id=log_id)
except:
context = {}
context["message"] = "The log you are trying to display doesn't exist"
return render(request, 'error.html', context)
return render(request, 'one_log.html', {'log_id': log_id})
@login_required
def my_notifications(request):
context = {}
return render(request, 'my_notifications.html', context)
# Add this log to User's bookmarked collection
@login_required
def add_bookmark(request):
if request.method != 'POST':
return render(request, 'error.html', {'message': "You must use a POST request for this operation"})
if not 'log_id' in request.POST or not request.POST['log_id']:
return render(request, 'error.html', {'message': "The log you are trying to bookmark shall not be empty."})
logid = request.POST['log_id']
if logid.isnumeric():
log_trying_to_bookmark = Log.objects.get(id=logid)
request_user = get_object_or_404(Profile, user=request.user)
request_user_current_collection = request_user.bookmarked_logs
if log_trying_to_bookmark in request_user_current_collection.all():
return render(request, 'error.html', {'message': "Log is already bookmarked, please check your collection"})
else:
request_user.bookmarked_logs.add(log_trying_to_bookmark)
request_user.save()
return get_logs(request)
elif logid == 'xxxx':
return render(request, 'error.html', {'message': "Please dont' make changes to comment field name"})
# Remove this log from User's bookmarked collection
@login_required
def remove_bookmark(request):
if request.method != 'POST':
return render(request, 'error.html', {'message': "You must use a POST request for this operation"})
if not 'log_id' in request.POST or not request.POST['log_id']:
return render(request, 'error.html', {'message': "The log you are trying to bookmark shall not be empty."})
logid = request.POST['log_id']
if logid.isnumeric():
log_trying_to_remove = Log.objects.get(id=logid)
request_user = get_object_or_404(Profile, user=request.user)
request_user_current_collection = request_user.bookmarked_logs
if log_trying_to_remove not in request_user_current_collection.all():
return render(request, 'error.html', {'message': "You can not remove a collection that is not bookmarked."})
else:
request_user.bookmarked_logs.remove(log_trying_to_remove)
request_user.save()
return get_logs(request)
else:
return render(request, 'error.html', {'message':
"Please dont' make changes to comment field name"})
# Like this log, add liked users to this log
@login_required
def like_log(request):
if request.method != 'POST':
return render(request, 'error.html', {'message': "You must use a POST request for this operation"})
if not 'log_id' in request.POST or not request.POST['log_id']:
return render(request, 'error.html', {'message': "The log you are trying to like shall not be empty."})
logid = request.POST['log_id']
if logid.isnumeric():
log_trying_to_like = Log.objects.get(id=logid)
request_user = get_object_or_404(Profile, user=request.user)
logs_liked_users = log_trying_to_like.liked_users
if request_user in logs_liked_users.all():
return render(request, 'error.html', {'message': "You already liked this Log"})
else:
logs_liked_users.add(request_user)
log_trying_to_like.save()
notify.send(sender=request.user, recipient=log_trying_to_like.user,
verb='Wow! Your log: <i>{}</i> is liked by <strong>{}</strong>.'.format(
log_trying_to_like.log_title,
request.user.username),
description="Like",
target=log_trying_to_like)
return get_logs(request)
elif logid == 'xxxx':
# print(postid.isnumeric())
return render(request, 'error.html', {'message':
"Please dont' make changes to comment field name"})
# Unlike this log, remove request user from liked_users of this Log
# Like this log, add liked users to this log
@login_required
def unlike_log(request):
if request.method != 'POST':
return render(request, 'error.html', {'message': "You must use a POST request for this operation"})
if not 'log_id' in request.POST or not request.POST['log_id']:
return render(request, 'error.html', {'message': "The log you are trying to like shall not be empty."})
logid = request.POST['log_id']
if logid.isnumeric():
log_trying_to_unlike = Log.objects.get(id=logid)
request_user = get_object_or_404(Profile, user=request.user)
if request_user not in log_trying_to_unlike.liked_users.all():
return render(request, 'error.html', {'message':
"You can't unlike this Log since it's not liked."})
else:
log_trying_to_unlike.liked_users.remove(request_user)
log_trying_to_unlike.save()
return get_logs(request)
elif logid == 'xxxx':
return render(request, 'error.html', {'message':
"Please dont' make changes to comment field name"})
@login_required
def mark_as_read_action(request):
if request.method == 'POST':
if 'notification_id' not in request.POST or not request.POST['notification_id'] or 'log_id' not in request.POST or not request.POST['log_id']:
return render(request, 'error.html', {'message': "Some critical data is missing. Please try again!"})
notification_id = request.POST['notification_id']
try:
log_id = int(request.POST['log_id'])
except ValueError:
return render(request, 'error.html', {'message': "Some critical data is wrong. Please try again!"})
notification = get_object_or_404(Notification, id=notification_id)
if notification.unread:
notification.unread = False
notification.save()
return redirect(reverse('one_log', kwargs={'log_id': log_id}))
else:
return render(request, 'error.html', {'message': "You must use a POST request for this operation"})
@login_required
def about(request):
return render(request, 'about.html', {}) | [
"django.shortcuts.render",
"json.loads",
"datetime.datetime.strptime",
"socialnetwork.models.Profile.objects.get_or_create",
"django.http.HttpResponse",
"django.shortcuts.get_object_or_404",
"json.dumps",
"django.urls.reverse",
"requests.get",
"django.utils.timezone.now",
"socialnetwork.forms.Pr... | [((864, 912), 'socialnetwork.models.Profile.objects.get_or_create', 'Profile.objects.get_or_create', ([], {'user': 'request.user'}), '(user=request.user)\n', (893, 912), False, 'from socialnetwork.models import Profile\n'), ((952, 965), 'socialnetwork.forms.ProfileForm', 'ProfileForm', ([], {}), '()\n', (963, 965), False, 'from socialnetwork.forms import ProfileForm, UpdateProfileForm\n'), ((992, 1011), 'socialnetwork.forms.UpdateProfileForm', 'UpdateProfileForm', ([], {}), '()\n', (1009, 1011), False, 'from socialnetwork.forms import ProfileForm, UpdateProfileForm\n'), ((1031, 1076), 'django.shortcuts.get_object_or_404', 'get_object_or_404', (['Profile'], {'user': 'request.user'}), '(Profile, user=request.user)\n', (1048, 1076), False, 'from django.shortcuts import render, redirect, get_object_or_404\n'), ((1213, 1258), 'django.shortcuts.render', 'render', (['request', '"""user_profile.html"""', 'context'], {}), "(request, 'user_profile.html', context)\n", (1219, 1258), False, 'from django.shortcuts import render, redirect, get_object_or_404\n'), ((1556, 1589), 'django.shortcuts.get_object_or_404', 'get_object_or_404', (['Profile'], {'id': 'id'}), '(Profile, id=id)\n', (1573, 1589), False, 'from django.shortcuts import render, redirect, get_object_or_404\n'), ((1738, 1796), 'django.http.HttpResponse', 'HttpResponse', (['item.picture'], {'content_type': 'item.content_type'}), '(item.picture, content_type=item.content_type)\n', (1750, 1796), False, 'from django.http import HttpResponse, Http404\n'), ((1926, 1974), 'socialnetwork.models.Profile.objects.get_or_create', 'Profile.objects.get_or_create', ([], {'user': 'request.user'}), '(user=request.user)\n', (1955, 1974), False, 'from socialnetwork.models import Profile\n'), ((1994, 2039), 'django.shortcuts.get_object_or_404', 'get_object_or_404', (['Profile'], {'user': 'request.user'}), '(Profile, user=request.user)\n', (2011, 2039), False, 'from django.shortcuts import render, redirect, get_object_or_404\n'), ((3421, 3467), 'django.shortcuts.render', 'render', (['request', '"""other_profile.html"""', 'context'], {}), "(request, 'other_profile.html', context)\n", (3427, 3467), False, 'from django.shortcuts import render, redirect, get_object_or_404\n'), ((3870, 3889), 'json.dumps', 'json.dumps', (['self_ls'], {}), '(self_ls)\n', (3880, 3889), False, 'import json\n'), ((4200, 4220), 'json.dumps', 'json.dumps', (['other_ls'], {}), '(other_ls)\n', (4210, 4220), False, 'import json\n'), ((4272, 4309), 'django.shortcuts.render', 'render', (['request', '"""home.html"""', 'context'], {}), "(request, 'home.html', context)\n", (4278, 4309), False, 'from django.shortcuts import render, redirect, get_object_or_404\n'), ((5562, 5605), 'django.shortcuts.render', 'render', (['request', '"""filtered_stream.html"""', '{}'], {}), "(request, 'filtered_stream.html', {})\n", (5568, 5605), False, 'from django.shortcuts import render, redirect, get_object_or_404\n'), ((5689, 5722), 'django.shortcuts.get_object_or_404', 'get_object_or_404', (['Log'], {'id': 'log_id'}), '(Log, id=log_id)\n', (5706, 5722), False, 'from django.shortcuts import render, redirect, get_object_or_404\n'), ((6359, 6404), 'django.shortcuts.get_object_or_404', 'get_object_or_404', (['Profile'], {'user': 'request.user'}), '(Profile, user=request.user)\n', (6376, 6404), False, 'from django.shortcuts import render, redirect, get_object_or_404\n'), ((6627, 6672), 'django.shortcuts.get_object_or_404', 'get_object_or_404', (['Profile'], {'user': 'request.user'}), '(Profile, user=request.user)\n', (6644, 6672), False, 'from django.shortcuts import render, redirect, get_object_or_404\n'), ((8962, 8982), 'json.dumps', 'json.dumps', (['all_logs'], {}), '(all_logs)\n', (8972, 8982), False, 'import json\n'), ((9227, 9258), 'socialnetwork.forms.UpdateProfileForm', 'UpdateProfileForm', (['request.POST'], {}), '(request.POST)\n', (9244, 9258), False, 'from socialnetwork.forms import ProfileForm, UpdateProfileForm\n'), ((10580, 10618), 'socialnetwork.models.Profile.objects.get', 'Profile.objects.get', ([], {'user': 'request.user'}), '(user=request.user)\n', (10599, 10618), False, 'from socialnetwork.models import Profile\n'), ((10630, 10689), 'socialnetwork.forms.ProfileForm', 'ProfileForm', (['request.POST', 'request.FILES'], {'instance': 'new_item'}), '(request.POST, request.FILES, instance=new_item)\n', (10641, 10689), False, 'from socialnetwork.forms import ProfileForm, UpdateProfileForm\n'), ((19508, 19541), 'django.shortcuts.get_object_or_404', 'get_object_or_404', (['Log'], {'id': 'log_id'}), '(Log, id=log_id)\n', (19525, 19541), False, 'from django.shortcuts import render, redirect, get_object_or_404\n'), ((20164, 20207), 'django.shortcuts.render', 'render', (['request', '"""log_editor.html"""', 'context'], {}), "(request, 'log_editor.html', context)\n", (20170, 20207), False, 'from django.shortcuts import render, redirect, get_object_or_404\n'), ((24106, 24139), 'django.shortcuts.get_object_or_404', 'get_object_or_404', (['Log'], {'id': 'log_id'}), '(Log, id=log_id)\n', (24123, 24139), False, 'from django.shortcuts import render, redirect, get_object_or_404\n'), ((24373, 24429), 'django.http.HttpResponse', 'HttpResponse', (['log.picture'], {'content_type': 'log.content_type'}), '(log.picture, content_type=log.content_type)\n', (24385, 24429), False, 'from django.http import HttpResponse, Http404\n'), ((26832, 26868), 'requests.get', 'requests.get', ([], {'url': 'URL', 'params': 'PARAMS'}), '(url=URL, params=PARAMS)\n', (26844, 26868), False, 'import requests\n'), ((27524, 27572), 'socialnetwork.models.Profile.objects.get_or_create', 'Profile.objects.get_or_create', ([], {'user': 'request.user'}), '(user=request.user)\n', (27553, 27572), False, 'from socialnetwork.models import Profile\n'), ((27584, 27625), 'django.shortcuts.render', 'render', (['request', '"""travel_stream.html"""', '{}'], {}), "(request, 'travel_stream.html', {})\n", (27590, 27625), False, 'from django.shortcuts import render, redirect, get_object_or_404\n'), ((27763, 27811), 'socialnetwork.models.Profile.objects.get_or_create', 'Profile.objects.get_or_create', ([], {'user': 'request.user'}), '(user=request.user)\n', (27792, 27811), False, 'from socialnetwork.models import Profile\n'), ((27823, 27866), 'django.shortcuts.render', 'render', (['request', '"""bookmark_stream.html"""', '{}'], {}), "(request, 'bookmark_stream.html', {})\n", (27829, 27866), False, 'from django.shortcuts import render, redirect, get_object_or_404\n'), ((28018, 28066), 'socialnetwork.models.Profile.objects.get_or_create', 'Profile.objects.get_or_create', ([], {'user': 'request.user'}), '(user=request.user)\n', (28047, 28066), False, 'from socialnetwork.models import Profile\n'), ((28078, 28135), 'django.shortcuts.render', 'render', (['request', '"""user_stream.html"""', "{'user_id': user_id}"], {}), "(request, 'user_stream.html', {'user_id': user_id})\n", (28084, 28135), False, 'from django.shortcuts import render, redirect, get_object_or_404\n'), ((28424, 28475), 'django.shortcuts.render', 'render', (['request', '"""one_log.html"""', "{'log_id': log_id}"], {}), "(request, 'one_log.html', {'log_id': log_id})\n", (28430, 28475), False, 'from django.shortcuts import render, redirect, get_object_or_404\n'), ((28553, 28602), 'django.shortcuts.render', 'render', (['request', '"""my_notifications.html"""', 'context'], {}), "(request, 'my_notifications.html', context)\n", (28559, 28602), False, 'from django.shortcuts import render, redirect, get_object_or_404\n'), ((34763, 34796), 'django.shortcuts.render', 'render', (['request', '"""about.html"""', '{}'], {}), "(request, 'about.html', {})\n", (34769, 34796), False, 'from django.shortcuts import render, redirect, get_object_or_404\n'), ((2120, 2171), 'django.shortcuts.get_object_or_404', 'get_object_or_404', (['Profile'], {'user__username': 'username'}), '(Profile, user__username=username)\n', (2137, 2171), False, 'from django.shortcuts import render, redirect, get_object_or_404\n'), ((3116, 3161), 'django.shortcuts.render', 'render', (['request', '"""user_profile.html"""', 'context'], {}), "(request, 'user_profile.html', context)\n", (3122, 3161), False, 'from django.shortcuts import render, redirect, get_object_or_404\n'), ((5410, 5506), 'django.shortcuts.render', 'render', (['request', '"""error.html"""', "{'message': 'You must use a POST request for this operation'}"], {}), "(request, 'error.html', {'message':\n 'You must use a POST request for this operation'})\n", (5416, 5506), False, 'from django.shortcuts import render, redirect, get_object_or_404\n'), ((6961, 7005), 'socialnetwork.models.Profile.objects.get_or_create', 'Profile.objects.get_or_create', ([], {'user': 'log.user'}), '(user=log.user)\n', (6990, 7005), False, 'from socialnetwork.models import Profile\n'), ((9117, 9213), 'django.shortcuts.render', 'render', (['request', '"""error.html"""', "{'message': 'You must use a POST request for this operation'}"], {}), "(request, 'error.html', {'message':\n 'You must use a POST request for this operation'})\n", (9123, 9213), False, 'from django.shortcuts import render, redirect, get_object_or_404\n'), ((12030, 12063), 'django.shortcuts.get_object_or_404', 'get_object_or_404', (['Profile'], {'id': 'id'}), '(Profile, id=id)\n', (12047, 12063), False, 'from django.shortcuts import render, redirect, get_object_or_404\n'), ((12648, 12744), 'django.shortcuts.render', 'render', (['request', '"""error.html"""', "{'message': 'You must use a POST request for this operation'}"], {}), "(request, 'error.html', {'message':\n 'You must use a POST request for this operation'})\n", (12654, 12744), False, 'from django.shortcuts import render, redirect, get_object_or_404\n'), ((12826, 12935), 'django.shortcuts.render', 'render', (['request', '"""error.html"""', "{'message': 'The user you are trying to follow should not have empty ID.'}"], {}), "(request, 'error.html', {'message':\n 'The user you are trying to follow should not have empty ID.'})\n", (12832, 12935), False, 'from django.shortcuts import render, redirect, get_object_or_404\n'), ((13028, 13071), 'django.shortcuts.get_object_or_404', 'get_object_or_404', (['Profile'], {'user_id': 'user_id'}), '(Profile, user_id=user_id)\n', (13045, 13071), False, 'from django.shortcuts import render, redirect, get_object_or_404\n'), ((13103, 13148), 'django.shortcuts.get_object_or_404', 'get_object_or_404', (['Profile'], {'user': 'request.user'}), '(Profile, user=request.user)\n', (13120, 13148), False, 'from django.shortcuts import render, redirect, get_object_or_404\n'), ((13845, 13941), 'django.shortcuts.render', 'render', (['request', '"""error.html"""', "{'message': 'You must use a POST request for this operation'}"], {}), "(request, 'error.html', {'message':\n 'You must use a POST request for this operation'})\n", (13851, 13941), False, 'from django.shortcuts import render, redirect, get_object_or_404\n'), ((14023, 14132), 'django.shortcuts.render', 'render', (['request', '"""error.html"""', "{'message': 'The user you are trying to follow should not have empty ID.'}"], {}), "(request, 'error.html', {'message':\n 'The user you are trying to follow should not have empty ID.'})\n", (14029, 14132), False, 'from django.shortcuts import render, redirect, get_object_or_404\n'), ((14225, 14268), 'django.shortcuts.get_object_or_404', 'get_object_or_404', (['Profile'], {'user_id': 'user_id'}), '(Profile, user_id=user_id)\n', (14242, 14268), False, 'from django.shortcuts import render, redirect, get_object_or_404\n'), ((14300, 14345), 'django.shortcuts.get_object_or_404', 'get_object_or_404', (['Profile'], {'user': 'request.user'}), '(Profile, user=request.user)\n', (14317, 14345), False, 'from django.shortcuts import render, redirect, get_object_or_404\n'), ((15005, 15038), 'django.shortcuts.get_object_or_404', 'get_object_or_404', (['Profile'], {'id': 'id'}), '(Profile, id=id)\n', (15022, 15038), False, 'from django.shortcuts import render, redirect, get_object_or_404\n'), ((15603, 15699), 'django.shortcuts.render', 'render', (['request', '"""error.html"""', "{'message': 'You must use a POST request for this operation'}"], {}), "(request, 'error.html', {'message':\n 'You must use a POST request for this operation'})\n", (15609, 15699), False, 'from django.shortcuts import render, redirect, get_object_or_404\n'), ((15791, 15869), 'django.shortcuts.render', 'render', (['request', '"""error.html"""', "{'message': 'You comment should not be empty.'}"], {}), "(request, 'error.html', {'message': 'You comment should not be empty.'})\n", (15797, 15869), False, 'from django.shortcuts import render, redirect, get_object_or_404\n'), ((15953, 16038), 'django.shortcuts.render', 'render', (['request', '"""error.html"""', "{'message': 'Comment needs to be made on a log.'}"], {}), "(request, 'error.html', {'message': 'Comment needs to be made on a log.'}\n )\n", (15959, 16038), False, 'from django.shortcuts import render, redirect, get_object_or_404\n'), ((19240, 19283), 'django.shortcuts.render', 'render', (['request', '"""log_editor.html"""', 'context'], {}), "(request, 'log_editor.html', context)\n", (19246, 19283), False, 'from django.shortcuts import render, redirect, get_object_or_404\n'), ((19393, 19431), 'django.shortcuts.render', 'render', (['request', '"""error.html"""', 'context'], {}), "(request, 'error.html', context)\n", (19399, 19431), False, 'from django.shortcuts import render, redirect, get_object_or_404\n'), ((19655, 19693), 'django.shortcuts.render', 'render', (['request', '"""error.html"""', 'context'], {}), "(request, 'error.html', context)\n", (19661, 19693), False, 'from django.shortcuts import render, redirect, get_object_or_404\n'), ((24005, 24043), 'django.shortcuts.render', 'render', (['request', '"""error.html"""', 'context'], {}), "(request, 'error.html', context)\n", (24011, 24043), False, 'from django.shortcuts import render, redirect, get_object_or_404\n'), ((25432, 25466), 'json.loads', 'json.loads', (["request.POST['latLng']"], {}), "(request.POST['latLng'])\n", (25442, 25466), False, 'import json\n'), ((26051, 26095), 'django.shortcuts.render', 'render', (['request', '"""log_display.html"""', 'context'], {}), "(request, 'log_display.html', context)\n", (26057, 26095), False, 'from django.shortcuts import render, redirect, get_object_or_404\n'), ((26226, 26264), 'django.shortcuts.render', 'render', (['request', '"""error.html"""', 'context'], {}), "(request, 'error.html', context)\n", (26232, 26264), False, 'from django.shortcuts import render, redirect, get_object_or_404\n'), ((28213, 28246), 'django.shortcuts.get_object_or_404', 'get_object_or_404', (['Log'], {'id': 'log_id'}), '(Log, id=log_id)\n', (28230, 28246), False, 'from django.shortcuts import render, redirect, get_object_or_404\n'), ((28743, 28839), 'django.shortcuts.render', 'render', (['request', '"""error.html"""', "{'message': 'You must use a POST request for this operation'}"], {}), "(request, 'error.html', {'message':\n 'You must use a POST request for this operation'})\n", (28749, 28839), False, 'from django.shortcuts import render, redirect, get_object_or_404\n'), ((28919, 29023), 'django.shortcuts.render', 'render', (['request', '"""error.html"""', "{'message': 'The log you are trying to bookmark shall not be empty.'}"], {}), "(request, 'error.html', {'message':\n 'The log you are trying to bookmark shall not be empty.'})\n", (28925, 29023), False, 'from django.shortcuts import render, redirect, get_object_or_404\n'), ((29164, 29209), 'django.shortcuts.get_object_or_404', 'get_object_or_404', (['Profile'], {'user': 'request.user'}), '(Profile, user=request.user)\n', (29181, 29209), False, 'from django.shortcuts import render, redirect, get_object_or_404\n'), ((29910, 30006), 'django.shortcuts.render', 'render', (['request', '"""error.html"""', "{'message': 'You must use a POST request for this operation'}"], {}), "(request, 'error.html', {'message':\n 'You must use a POST request for this operation'})\n", (29916, 30006), False, 'from django.shortcuts import render, redirect, get_object_or_404\n'), ((30086, 30190), 'django.shortcuts.render', 'render', (['request', '"""error.html"""', "{'message': 'The log you are trying to bookmark shall not be empty.'}"], {}), "(request, 'error.html', {'message':\n 'The log you are trying to bookmark shall not be empty.'})\n", (30092, 30190), False, 'from django.shortcuts import render, redirect, get_object_or_404\n'), ((30329, 30374), 'django.shortcuts.get_object_or_404', 'get_object_or_404', (['Profile'], {'user': 'request.user'}), '(Profile, user=request.user)\n', (30346, 30374), False, 'from django.shortcuts import render, redirect, get_object_or_404\n'), ((30820, 30917), 'django.shortcuts.render', 'render', (['request', '"""error.html"""', '{\'message\': "Please dont\' make changes to comment field name"}'], {}), '(request, \'error.html\', {\'message\':\n "Please dont\' make changes to comment field name"})\n', (30826, 30917), False, 'from django.shortcuts import render, redirect, get_object_or_404\n'), ((31098, 31194), 'django.shortcuts.render', 'render', (['request', '"""error.html"""', "{'message': 'You must use a POST request for this operation'}"], {}), "(request, 'error.html', {'message':\n 'You must use a POST request for this operation'})\n", (31104, 31194), False, 'from django.shortcuts import render, redirect, get_object_or_404\n'), ((31274, 31374), 'django.shortcuts.render', 'render', (['request', '"""error.html"""', "{'message': 'The log you are trying to like shall not be empty.'}"], {}), "(request, 'error.html', {'message':\n 'The log you are trying to like shall not be empty.'})\n", (31280, 31374), False, 'from django.shortcuts import render, redirect, get_object_or_404\n'), ((31511, 31556), 'django.shortcuts.get_object_or_404', 'get_object_or_404', (['Profile'], {'user': 'request.user'}), '(Profile, user=request.user)\n', (31528, 31556), False, 'from django.shortcuts import render, redirect, get_object_or_404\n'), ((32699, 32795), 'django.shortcuts.render', 'render', (['request', '"""error.html"""', "{'message': 'You must use a POST request for this operation'}"], {}), "(request, 'error.html', {'message':\n 'You must use a POST request for this operation'})\n", (32705, 32795), False, 'from django.shortcuts import render, redirect, get_object_or_404\n'), ((32875, 32975), 'django.shortcuts.render', 'render', (['request', '"""error.html"""', "{'message': 'The log you are trying to like shall not be empty.'}"], {}), "(request, 'error.html', {'message':\n 'The log you are trying to like shall not be empty.'})\n", (32881, 32975), False, 'from django.shortcuts import render, redirect, get_object_or_404\n'), ((33114, 33159), 'django.shortcuts.get_object_or_404', 'get_object_or_404', (['Profile'], {'user': 'request.user'}), '(Profile, user=request.user)\n', (33131, 33159), False, 'from django.shortcuts import render, redirect, get_object_or_404\n'), ((34369, 34420), 'django.shortcuts.get_object_or_404', 'get_object_or_404', (['Notification'], {'id': 'notification_id'}), '(Notification, id=notification_id)\n', (34386, 34420), False, 'from django.shortcuts import render, redirect, get_object_or_404\n'), ((34621, 34717), 'django.shortcuts.render', 'render', (['request', '"""error.html"""', "{'message': 'You must use a POST request for this operation'}"], {}), "(request, 'error.html', {'message':\n 'You must use a POST request for this operation'})\n", (34627, 34717), False, 'from django.shortcuts import render, redirect, get_object_or_404\n'), ((2264, 2302), 'django.shortcuts.render', 'render', (['request', '"""error.html"""', 'context'], {}), "(request, 'error.html', context)\n", (2270, 2302), False, 'from django.shortcuts import render, redirect, get_object_or_404\n'), ((4594, 4632), 'django.shortcuts.render', 'render', (['request', '"""error.html"""', 'context'], {}), "(request, 'error.html', context)\n", (4600, 4632), False, 'from django.shortcuts import render, redirect, get_object_or_404\n'), ((4786, 4850), 'datetime.datetime.strptime', 'datetime.datetime.strptime', (["request.POST['end_date']", '"""%Y-%m-%d"""'], {}), "(request.POST['end_date'], '%Y-%m-%d')\n", (4812, 4850), False, 'import datetime\n'), ((9912, 9950), 'django.shortcuts.render', 'render', (['request', '"""error.html"""', 'context'], {}), "(request, 'error.html', context)\n", (9918, 9950), False, 'from django.shortcuts import render, redirect, get_object_or_404\n'), ((10440, 10478), 'django.shortcuts.render', 'render', (['request', '"""error.html"""', 'context'], {}), "(request, 'error.html', context)\n", (10446, 10478), False, 'from django.shortcuts import render, redirect, get_object_or_404\n'), ((12179, 12217), 'django.shortcuts.render', 'render', (['request', '"""error.html"""', 'context'], {}), "(request, 'error.html', context)\n", (12185, 12217), False, 'from django.shortcuts import render, redirect, get_object_or_404\n'), ((15156, 15194), 'django.shortcuts.render', 'render', (['request', '"""error.html"""', 'context'], {}), "(request, 'error.html', context)\n", (15162, 15194), False, 'from django.shortcuts import render, redirect, get_object_or_404\n'), ((16905, 17002), 'django.shortcuts.render', 'render', (['request', '"""error.html"""', '{\'message\': "Please dont\' make changes to comment field name"}'], {}), '(request, \'error.html\', {\'message\':\n "Please dont\' make changes to comment field name"})\n', (16911, 17002), False, 'from django.shortcuts import render, redirect, get_object_or_404\n'), ((17266, 17304), 'django.shortcuts.render', 'render', (['request', '"""error.html"""', 'context'], {}), "(request, 'error.html', context)\n", (17272, 17304), False, 'from django.shortcuts import render, redirect, get_object_or_404\n'), ((17339, 17373), 'json.loads', 'json.loads', (["request.POST['latLng']"], {}), "(request.POST['latLng'])\n", (17349, 17373), False, 'import json\n'), ((17691, 17729), 'django.shortcuts.render', 'render', (['request', '"""error.html"""', 'context'], {}), "(request, 'error.html', context)\n", (17697, 17729), False, 'from django.shortcuts import render, redirect, get_object_or_404\n'), ((20617, 20655), 'django.shortcuts.render', 'render', (['request', '"""error.html"""', 'context'], {}), "(request, 'error.html', context)\n", (20623, 20655), False, 'from django.shortcuts import render, redirect, get_object_or_404\n'), ((22671, 22714), 'django.shortcuts.render', 'render', (['request', '"""log_editor.html"""', 'context'], {}), "(request, 'log_editor.html', context)\n", (22677, 22714), False, 'from django.shortcuts import render, redirect, get_object_or_404\n'), ((23879, 23894), 'django.urls.reverse', 'reverse', (['"""home"""'], {}), "('home')\n", (23886, 23894), False, 'from django.urls import reverse\n'), ((24702, 24740), 'django.shortcuts.render', 'render', (['request', '"""error.html"""', 'context'], {}), "(request, 'error.html', context)\n", (24708, 24740), False, 'from django.shortcuts import render, redirect, get_object_or_404\n'), ((24776, 24810), 'json.loads', 'json.loads', (["request.POST['latLng']"], {}), "(request.POST['latLng'])\n", (24786, 24810), False, 'import json\n'), ((25128, 25166), 'django.shortcuts.render', 'render', (['request', '"""error.html"""', 'context'], {}), "(request, 'error.html', context)\n", (25134, 25166), False, 'from django.shortcuts import render, redirect, get_object_or_404\n'), ((28374, 28412), 'django.shortcuts.render', 'render', (['request', '"""error.html"""', 'context'], {}), "(request, 'error.html', context)\n", (28380, 28412), False, 'from django.shortcuts import render, redirect, get_object_or_404\n'), ((29376, 29481), 'django.shortcuts.render', 'render', (['request', '"""error.html"""', "{'message': 'Log is already bookmarked, please check your collection'}"], {}), "(request, 'error.html', {'message':\n 'Log is already bookmarked, please check your collection'})\n", (29382, 29481), False, 'from django.shortcuts import render, redirect, get_object_or_404\n'), ((29668, 29765), 'django.shortcuts.render', 'render', (['request', '"""error.html"""', '{\'message\': "Please dont\' make changes to comment field name"}'], {}), '(request, \'error.html\', {\'message\':\n "Please dont\' make changes to comment field name"})\n', (29674, 29765), False, 'from django.shortcuts import render, redirect, get_object_or_404\n'), ((30543, 30648), 'django.shortcuts.render', 'render', (['request', '"""error.html"""', "{'message': 'You can not remove a collection that is not bookmarked.'}"], {}), "(request, 'error.html', {'message':\n 'You can not remove a collection that is not bookmarked.'})\n", (30549, 30648), False, 'from django.shortcuts import render, redirect, get_object_or_404\n'), ((31685, 31757), 'django.shortcuts.render', 'render', (['request', '"""error.html"""', "{'message': 'You already liked this Log'}"], {}), "(request, 'error.html', {'message': 'You already liked this Log'})\n", (31691, 31757), False, 'from django.shortcuts import render, redirect, get_object_or_404\n'), ((32351, 32448), 'django.shortcuts.render', 'render', (['request', '"""error.html"""', '{\'message\': "Please dont\' make changes to comment field name"}'], {}), '(request, \'error.html\', {\'message\':\n "Please dont\' make changes to comment field name"})\n', (32357, 32448), False, 'from django.shortcuts import render, redirect, get_object_or_404\n'), ((33250, 33347), 'django.shortcuts.render', 'render', (['request', '"""error.html"""', '{\'message\': "You can\'t unlike this Log since it\'s not liked."}'], {}), '(request, \'error.html\', {\'message\':\n "You can\'t unlike this Log since it\'s not liked."})\n', (33256, 33347), False, 'from django.shortcuts import render, redirect, get_object_or_404\n'), ((33593, 33690), 'django.shortcuts.render', 'render', (['request', '"""error.html"""', '{\'message\': "Please dont\' make changes to comment field name"}'], {}), '(request, \'error.html\', {\'message\':\n "Please dont\' make changes to comment field name"})\n', (33599, 33690), False, 'from django.shortcuts import render, redirect, get_object_or_404\n'), ((33992, 34090), 'django.shortcuts.render', 'render', (['request', '"""error.html"""', "{'message': 'Some critical data is missing. Please try again!'}"], {}), "(request, 'error.html', {'message':\n 'Some critical data is missing. Please try again!'})\n", (33998, 34090), False, 'from django.shortcuts import render, redirect, get_object_or_404\n'), ((34549, 34594), 'django.urls.reverse', 'reverse', (['"""one_log"""'], {'kwargs': "{'log_id': log_id}"}), "('one_log', kwargs={'log_id': log_id})\n", (34556, 34594), False, 'from django.urls import reverse\n'), ((4983, 5073), 'django.shortcuts.render', 'render', (['request', '"""error.html"""', "{'message': 'Start date must be earlier than end date'}"], {}), "(request, 'error.html', {'message':\n 'Start date must be earlier than end date'})\n", (4989, 5073), False, 'from django.shortcuts import render, redirect, get_object_or_404\n'), ((5229, 5285), 'django.shortcuts.render', 'render', (['request', '"""error.html"""', "{'message': 'ValueError'}"], {}), "(request, 'error.html', {'message': 'ValueError'})\n", (5235, 5285), False, 'from django.shortcuts import render, redirect, get_object_or_404\n'), ((7618, 7674), 'django.shortcuts.get_object_or_404', 'get_object_or_404', (['Profile'], {'user': 'comment_item.created_by'}), '(Profile, user=comment_item.created_by)\n', (7635, 7674), False, 'from django.shortcuts import render, redirect, get_object_or_404\n'), ((16318, 16332), 'django.utils.timezone.now', 'timezone.now', ([], {}), '()\n', (16330, 16332), False, 'from django.utils import timezone\n'), ((17493, 17531), 'django.shortcuts.render', 'render', (['request', '"""error.html"""', 'context'], {}), "(request, 'error.html', context)\n", (17499, 17531), False, 'from django.shortcuts import render, redirect, get_object_or_404\n'), ((17938, 17976), 'django.shortcuts.render', 'render', (['request', '"""error.html"""', 'context'], {}), "(request, 'error.html', context)\n", (17944, 17976), False, 'from django.shortcuts import render, redirect, get_object_or_404\n'), ((20858, 20896), 'django.shortcuts.render', 'render', (['request', '"""error.html"""', 'context'], {}), "(request, 'error.html', context)\n", (20864, 20896), False, 'from django.shortcuts import render, redirect, get_object_or_404\n'), ((24930, 24968), 'django.shortcuts.render', 'render', (['request', '"""error.html"""', 'context'], {}), "(request, 'error.html', context)\n", (24936, 24968), False, 'from django.shortcuts import render, redirect, get_object_or_404\n'), ((25375, 25413), 'django.shortcuts.render', 'render', (['request', '"""error.html"""', 'context'], {}), "(request, 'error.html', context)\n", (25381, 25413), False, 'from django.shortcuts import render, redirect, get_object_or_404\n'), ((34253, 34349), 'django.shortcuts.render', 'render', (['request', '"""error.html"""', "{'message': 'Some critical data is wrong. Please try again!'}"], {}), "(request, 'error.html', {'message':\n 'Some critical data is wrong. Please try again!'})\n", (34259, 34349), False, 'from django.shortcuts import render, redirect, get_object_or_404\n'), ((4672, 4738), 'datetime.datetime.strptime', 'datetime.datetime.strptime', (["request.POST['start_date']", '"""%Y-%m-%d"""'], {}), "(request.POST['start_date'], '%Y-%m-%d')\n", (4698, 4738), False, 'import datetime\n'), ((4886, 4912), 'datetime.timedelta', 'datetime.timedelta', ([], {'days': '(1)'}), '(days=1)\n', (4904, 4912), False, 'import datetime\n')] |
"""
Run capture as a separate process
"""
import time
from barcap.barcode import BarcodeCapture
def main():
# Default camera index
camera_index = 0
# Camera selection routine
try:
from .device_list import select_camera, camera_list
# Get camera list
dev_list = camera_list()
# Select a camera
camera_index = select_camera(len(dev_list))
except:
print('Unable to run camera selection routine!')
# Start capture
# print(f'camera_index: {camera_index}')
capture = BarcodeCapture(camera=camera_index)
capture.start()
# Run capture loop
while capture.is_alive():
if capture.new:
# Debugging
print(f'output: {capture.output}')
# Debugging
time_stamp = time.strftime('%Y-%m-%d %H:%M:%S', time.localtime(capture.last_epoch))
print(f'last capture: {time_stamp}')
# # Stop capture on the first output reading
# capture.stop()
# break
time.sleep(0.1)
| [
"barcap.barcode.BarcodeCapture",
"time.localtime",
"time.sleep"
] | [((549, 584), 'barcap.barcode.BarcodeCapture', 'BarcodeCapture', ([], {'camera': 'camera_index'}), '(camera=camera_index)\n', (563, 584), False, 'from barcap.barcode import BarcodeCapture\n'), ((1040, 1055), 'time.sleep', 'time.sleep', (['(0.1)'], {}), '(0.1)\n', (1050, 1055), False, 'import time\n'), ((839, 873), 'time.localtime', 'time.localtime', (['capture.last_epoch'], {}), '(capture.last_epoch)\n', (853, 873), False, 'import time\n')] |
import os
import requests
from typing import Optional, List
from pydantic import Field, validator
from dbt_cloud.command.command import DbtCloudAccountCommand
from dbt_cloud.field import JOB_ID_FIELD
class DbtCloudJobRunCommand(DbtCloudAccountCommand):
"""Triggers a dbt Cloud job run and returns a status JSON response."""
job_id: int = JOB_ID_FIELD
cause: str = Field(
default="Triggered via API",
description="A text description of the reason for running this job",
)
git_sha: Optional[str] = Field(
description="The git sha to check out before running this job"
)
git_branch: Optional[str] = Field(
description="The git branch to check out before running this job"
)
schema_override: Optional[str] = Field(
description="Override the destination schema in the configured target for this job"
)
dbt_version_override: Optional[str] = Field(
description="Override the version of dbt used to run this job"
)
threads_override: Optional[int] = Field(
description="Override the number of threads used to run this job"
)
target_name_override: Optional[str] = Field(
description="Override the target.name context variable used when running this job"
)
generate_docs_override: Optional[bool] = Field(
description="Override whether or not this job generates docs (true=yes, false=no)"
)
timeout_seconds_override: Optional[int] = Field(
description="Override the timeout in seconds for this job"
)
steps_override: Optional[List[str]] = Field(
description="Override the list of steps for this job"
)
@validator("steps_override")
def check_steps_override_is_none_if_empty(cls, value):
return value or None
@property
def api_url(self) -> str:
return f"{super().api_url}/jobs/{self.job_id}/run/"
def execute(self) -> requests.Response:
response = requests.post(
url=self.api_url,
headers=self.request_headers,
json=self.get_payload(),
)
return response
| [
"pydantic.Field",
"pydantic.validator"
] | [((379, 487), 'pydantic.Field', 'Field', ([], {'default': '"""Triggered via API"""', 'description': '"""A text description of the reason for running this job"""'}), "(default='Triggered via API', description=\n 'A text description of the reason for running this job')\n", (384, 487), False, 'from pydantic import Field, validator\n'), ((535, 604), 'pydantic.Field', 'Field', ([], {'description': '"""The git sha to check out before running this job"""'}), "(description='The git sha to check out before running this job')\n", (540, 604), False, 'from pydantic import Field, validator\n'), ((651, 723), 'pydantic.Field', 'Field', ([], {'description': '"""The git branch to check out before running this job"""'}), "(description='The git branch to check out before running this job')\n", (656, 723), False, 'from pydantic import Field, validator\n'), ((775, 870), 'pydantic.Field', 'Field', ([], {'description': '"""Override the destination schema in the configured target for this job"""'}), "(description=\n 'Override the destination schema in the configured target for this job')\n", (780, 870), False, 'from pydantic import Field, validator\n'), ((922, 991), 'pydantic.Field', 'Field', ([], {'description': '"""Override the version of dbt used to run this job"""'}), "(description='Override the version of dbt used to run this job')\n", (927, 991), False, 'from pydantic import Field, validator\n'), ((1044, 1116), 'pydantic.Field', 'Field', ([], {'description': '"""Override the number of threads used to run this job"""'}), "(description='Override the number of threads used to run this job')\n", (1049, 1116), False, 'from pydantic import Field, validator\n'), ((1173, 1267), 'pydantic.Field', 'Field', ([], {'description': '"""Override the target.name context variable used when running this job"""'}), "(description=\n 'Override the target.name context variable used when running this job')\n", (1178, 1267), False, 'from pydantic import Field, validator\n'), ((1322, 1416), 'pydantic.Field', 'Field', ([], {'description': '"""Override whether or not this job generates docs (true=yes, false=no)"""'}), "(description=\n 'Override whether or not this job generates docs (true=yes, false=no)')\n", (1327, 1416), False, 'from pydantic import Field, validator\n'), ((1472, 1537), 'pydantic.Field', 'Field', ([], {'description': '"""Override the timeout in seconds for this job"""'}), "(description='Override the timeout in seconds for this job')\n", (1477, 1537), False, 'from pydantic import Field, validator\n'), ((1594, 1654), 'pydantic.Field', 'Field', ([], {'description': '"""Override the list of steps for this job"""'}), "(description='Override the list of steps for this job')\n", (1599, 1654), False, 'from pydantic import Field, validator\n'), ((1675, 1702), 'pydantic.validator', 'validator', (['"""steps_override"""'], {}), "('steps_override')\n", (1684, 1702), False, 'from pydantic import Field, validator\n')] |
# Generated by Django 3.0.3 on 2020-10-14 12:25
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('modelzoo', '0023_model_con_mod_dims'),
]
operations = [
migrations.RemoveField(
model_name='model',
name='author_email',
),
migrations.RemoveField(
model_name='model',
name='author_name',
),
migrations.RemoveField(
model_name='model',
name='description',
),
migrations.RemoveField(
model_name='model',
name='paper',
),
migrations.RemoveField(
model_name='model',
name='readme_txt',
),
migrations.RemoveField(
model_name='model',
name='screenshot',
),
migrations.RemoveField(
model_name='model',
name='short_description',
),
migrations.AlterField(
model_name='model',
name='modality',
field=models.CharField(blank=True, choices=[('CT', 'CT'), ('Ultrasound', 'Ultrasound'), ('MRI', 'MRI'), ('PET', 'PET'), ('X-Ray', 'X-Ray')], default='', max_length=15),
),
]
| [
"django.db.migrations.RemoveField",
"django.db.models.CharField"
] | [((236, 299), 'django.db.migrations.RemoveField', 'migrations.RemoveField', ([], {'model_name': '"""model"""', 'name': '"""author_email"""'}), "(model_name='model', name='author_email')\n", (258, 299), False, 'from django.db import migrations, models\n'), ((344, 406), 'django.db.migrations.RemoveField', 'migrations.RemoveField', ([], {'model_name': '"""model"""', 'name': '"""author_name"""'}), "(model_name='model', name='author_name')\n", (366, 406), False, 'from django.db import migrations, models\n'), ((451, 513), 'django.db.migrations.RemoveField', 'migrations.RemoveField', ([], {'model_name': '"""model"""', 'name': '"""description"""'}), "(model_name='model', name='description')\n", (473, 513), False, 'from django.db import migrations, models\n'), ((558, 614), 'django.db.migrations.RemoveField', 'migrations.RemoveField', ([], {'model_name': '"""model"""', 'name': '"""paper"""'}), "(model_name='model', name='paper')\n", (580, 614), False, 'from django.db import migrations, models\n'), ((659, 720), 'django.db.migrations.RemoveField', 'migrations.RemoveField', ([], {'model_name': '"""model"""', 'name': '"""readme_txt"""'}), "(model_name='model', name='readme_txt')\n", (681, 720), False, 'from django.db import migrations, models\n'), ((765, 826), 'django.db.migrations.RemoveField', 'migrations.RemoveField', ([], {'model_name': '"""model"""', 'name': '"""screenshot"""'}), "(model_name='model', name='screenshot')\n", (787, 826), False, 'from django.db import migrations, models\n'), ((871, 939), 'django.db.migrations.RemoveField', 'migrations.RemoveField', ([], {'model_name': '"""model"""', 'name': '"""short_description"""'}), "(model_name='model', name='short_description')\n", (893, 939), False, 'from django.db import migrations, models\n'), ((1086, 1255), 'django.db.models.CharField', 'models.CharField', ([], {'blank': '(True)', 'choices': "[('CT', 'CT'), ('Ultrasound', 'Ultrasound'), ('MRI', 'MRI'), ('PET', 'PET'),\n ('X-Ray', 'X-Ray')]", 'default': '""""""', 'max_length': '(15)'}), "(blank=True, choices=[('CT', 'CT'), ('Ultrasound',\n 'Ultrasound'), ('MRI', 'MRI'), ('PET', 'PET'), ('X-Ray', 'X-Ray')],\n default='', max_length=15)\n", (1102, 1255), False, 'from django.db import migrations, models\n')] |
from __future__ import division
from __future__ import print_function
from __future__ import absolute_import
import os
from time import time, localtime, strftime
import numpy as np
from scipy.io import savemat
from dotmap import DotMap
from src.modeling.trainers import BNN_trainer
from src.misc.DotmapUtils import get_required_argument
from src.misc.Agent import Agent
from src.modeling.trainers.registry import get_config
from src.controllers.MPC import MPC
SAVE_EVERY = 25
class MBExperiment:
def __init__(self, args):
"""Initializes class instance.
Argument:
params (DotMap): A DotMap containing the following:
.sim_cfg:
.env (gym.env): Environment for this experiment
.task_hor (int): Task horizon
.stochastic (bool): (optional) If True, agent adds noise to its actions.
Must provide noise_std (see below). Defaults to False.
.noise_std (float): for stochastic agents, noise of the form N(0, noise_std^2I)
will be added.
.exp_cfg:
.ntrain_iters (int): Number of training iterations to be performed.
.nrollouts_per_iter (int): (optional) Number of rollouts done between training
iterations. Defaults to 1.
.ninit_rollouts (int): (optional) Number of initial rollouts. Defaults to 1.
.policy (controller): Policy that will be trained.
.log_cfg:
.logdir (str): Parent of directory path where experiment data will be saved.
Experiment will be saved in logdir/<date+time of experiment start>
.nrecord (int): (optional) Number of rollouts to record for every iteration.
Defaults to 0.
.neval (int): (optional) Number of rollouts for performance evaluation.
Defaults to 1.
"""
self.args = args
self.env_config = get_config(self.args.env)(self.args)
self.env = self.env_config.env
self.agent = Agent(self.args, self.env)
self.model = self.env_config.nn_constructor()
self.model_trainer = BNN_trainer(self.args, self.model)
self.policy = MPC(
self.env_config, self.args, self.model_trainer
) # TODO: Convert MPC and make an object here; we need a get controller here
def run_experiment(self):
"""Perform experiment."""
# os.makedirs(self.logdir, exist_ok=True)
traj_obs, traj_acs, traj_rets, traj_rews = [], [], [], []
# Perform initial rollouts
samples = []
for i in range(self.args.ninit_rollouts):
samples.append(self.agent.sample(self.args.task_hor, self.policy))
traj_obs.append(samples[-1]["obs"])
traj_acs.append(samples[-1]["ac"])
traj_rews.append(samples[-1]["rewards"])
if self.args.ninit_rollouts > 0:
self.policy.train(
[sample["obs"] for sample in samples],
[sample["ac"] for sample in samples],
[sample["rewards"] for sample in samples],
)
# Training loop
for i in range(self.args.ntrain_iters):
print(
"####################################################################"
)
print("Starting training iteration %d." % (i + 1))
# iter_dir = os.path.join(self.logdir, "train_iter%d" % (i + 1))
# os.makedirs(iter_dir, exist_ok=True)
samples = []
for j in range(self.args.n_record):
samples.append(
self.agent.sample(
self.args.task_hor,
self.policy,
None
# os.path.join(self.args.output_dir, "rollout%d.mp4" % j),
)
)
# if self.args.nrecord > 0:
# for item in filter(lambda f: f.endswith(".json"), os.listdir(iter_dir)):
# os.remove(os.path.join(iter_dir, item))
for j in range(
max(self.args.n_eval, self.args.nrollouts_per_iter) - self.args.n_record
):
samples.append(self.agent.sample(self.args.task_hor, self.policy))
print(
"Rewards obtained:",
[sample["reward_sum"] for sample in samples[: self.args.n_eval]],
)
traj_obs.extend(
[sample["obs"] for sample in samples[: self.args.nrollouts_per_iter]]
)
traj_acs.extend(
[sample["ac"] for sample in samples[: self.args.nrollouts_per_iter]]
)
traj_rets.extend(
[sample["reward_sum"] for sample in samples[: self.args.n_eval]]
)
traj_rews.extend(
[
sample["rewards"]
for sample in samples[: self.args.nrollouts_per_iter]
]
)
samples = samples[: self.args.nrollouts_per_iter]
savemat(
os.path.join(self.args.output_dir, "logs.mat"),
{
"observations": traj_obs,
"actions": traj_acs,
"returns": traj_rets,
"rewards": traj_rews,
},
)
if i < self.args.ntrain_iters - 1:
self.policy.train(
[sample["obs"] for sample in samples],
[sample["ac"] for sample in samples],
[sample["rewards"] for sample in samples],
)
# Delete iteration directory if not used
if len(os.listdir(self.args.output_dir)) == 0:
os.rmdir(self.args.output_dir)
| [
"os.listdir",
"src.modeling.trainers.BNN_trainer",
"os.path.join",
"src.modeling.trainers.registry.get_config",
"src.controllers.MPC.MPC",
"os.rmdir",
"src.misc.Agent.Agent"
] | [((2186, 2212), 'src.misc.Agent.Agent', 'Agent', (['self.args', 'self.env'], {}), '(self.args, self.env)\n', (2191, 2212), False, 'from src.misc.Agent import Agent\n'), ((2296, 2330), 'src.modeling.trainers.BNN_trainer', 'BNN_trainer', (['self.args', 'self.model'], {}), '(self.args, self.model)\n', (2307, 2330), False, 'from src.modeling.trainers import BNN_trainer\n'), ((2353, 2404), 'src.controllers.MPC.MPC', 'MPC', (['self.env_config', 'self.args', 'self.model_trainer'], {}), '(self.env_config, self.args, self.model_trainer)\n', (2356, 2404), False, 'from src.controllers.MPC import MPC\n'), ((2088, 2113), 'src.modeling.trainers.registry.get_config', 'get_config', (['self.args.env'], {}), '(self.args.env)\n', (2098, 2113), False, 'from src.modeling.trainers.registry import get_config\n'), ((5271, 5317), 'os.path.join', 'os.path.join', (['self.args.output_dir', '"""logs.mat"""'], {}), "(self.args.output_dir, 'logs.mat')\n", (5283, 5317), False, 'import os\n'), ((5951, 5981), 'os.rmdir', 'os.rmdir', (['self.args.output_dir'], {}), '(self.args.output_dir)\n', (5959, 5981), False, 'import os\n'), ((5895, 5927), 'os.listdir', 'os.listdir', (['self.args.output_dir'], {}), '(self.args.output_dir)\n', (5905, 5927), False, 'import os\n')] |
from eblib import libcollect
# Create a LibCollect object
lc = libcollect.LibCollect()
# Prepare arguments for do_collect
#
# Path to the script (can be absolute or relative)
scriptname = 'plotting_data_monitor.pyw'
# Ask the resulting distribution to be placed in
# directory distrib
targetdir = 'distrib'
# Specify which libraries to exclude from the
# distribution (because you know they're installed
# on the target machine)
excludes = ["PyQt4",
"numpy",
"serial",
"pywin",
"win32api",
"win32com"]
# This does the actual work
# See the documentation of LibCollect for more options
#
lc.do_collect( scriptname,
targetdir,
excludes,
verbose=True)
| [
"eblib.libcollect.LibCollect"
] | [((67, 90), 'eblib.libcollect.LibCollect', 'libcollect.LibCollect', ([], {}), '()\n', (88, 90), False, 'from eblib import libcollect\n')] |
# Copyright 2019 Amazon.com, Inc. or its affiliates. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance
# with the License. A copy of the License is located at
#
# http://aws.amazon.com/apache2.0/
#
# or in the "LICENSE.txt" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES
# OR CONDITIONS OF ANY KIND, express or implied. See the License for the specific language governing permissions and
# limitations under the License.
import boto3
import pytest
from botocore.stub import Stubber
@pytest.fixture()
def test_datadir(request, datadir):
"""
Inject the datadir with resources for the specific test function.
If the test function is declared in a class then datadir is ClassName/FunctionName
otherwise it is only FunctionName.
"""
function_name = request.function.__name__
if not request.cls:
return datadir / function_name
class_name = request.cls.__name__
return datadir / "{0}/{1}".format(class_name, function_name)
@pytest.fixture()
def boto3_stubber(mocker, boto3_stubber_path):
"""
Create a function to easily mock boto3 clients.
To mock a boto3 service simply pass the name of the service to mock and
the mocked requests, where mocked_requests is an object containing the method to mock,
the response to return and the expected params for the boto3 method that gets called.
The function makes use of botocore.Stubber to mock the boto3 API calls.
Multiple boto3 services can be mocked as part of the same test.
:param boto3_stubber_path is the path of the boto3 import to mock. (e.g. pcluster.config.validators.boto3)
"""
__tracebackhide__ = True
created_stubbers = []
mocked_clients = {}
mocked_client_factory = mocker.patch(boto3_stubber_path, autospec=True)
# use **kwargs to skip parameters passed to the boto3.client other than the "service"
# e.g. boto3.client("ec2", region_name=region, ...) --> x = ec2
mocked_client_factory.client.side_effect = lambda x, **kwargs: mocked_clients[x]
def _boto3_stubber(service, mocked_requests):
client = boto3.client(service)
stubber = Stubber(client)
# Save a ref to the stubber so that we can deactivate it at the end of the test.
created_stubbers.append(stubber)
# Attach mocked requests to the Stubber and activate it.
if not isinstance(mocked_requests, list):
mocked_requests = [mocked_requests]
for mocked_request in mocked_requests:
if mocked_request.generate_error:
stubber.add_client_error(
mocked_request.method,
service_message=mocked_request.response,
expected_params=mocked_request.expected_params,
service_error_code=mocked_request.error_code,
)
else:
stubber.add_response(
mocked_request.method, mocked_request.response, expected_params=mocked_request.expected_params
)
stubber.activate()
# Add stubber to the collection of mocked clients. This allows to mock multiple clients.
# Mocking twice the same client will replace the previous one.
mocked_clients[service] = client
return client
# yield allows to return the value and then continue the execution when the test is over.
# Used for resources cleanup.
yield _boto3_stubber
# Assert that all mocked requests were consumed and deactivate all stubbers.
for stubber in created_stubbers:
stubber.assert_no_pending_responses()
stubber.deactivate()
| [
"pytest.fixture",
"boto3.client",
"botocore.stub.Stubber"
] | [((618, 634), 'pytest.fixture', 'pytest.fixture', ([], {}), '()\n', (632, 634), False, 'import pytest\n'), ((1100, 1116), 'pytest.fixture', 'pytest.fixture', ([], {}), '()\n', (1114, 1116), False, 'import pytest\n'), ((2214, 2235), 'boto3.client', 'boto3.client', (['service'], {}), '(service)\n', (2226, 2235), False, 'import boto3\n'), ((2254, 2269), 'botocore.stub.Stubber', 'Stubber', (['client'], {}), '(client)\n', (2261, 2269), False, 'from botocore.stub import Stubber\n')] |