code stringlengths 22 1.05M | apis listlengths 1 3.31k | extract_api stringlengths 75 3.25M |
|---|---|---|
# -*- coding: utf-8 -*-
"""\
This is a python port of "Goose" orignialy licensed to Gravity.com
under one or more contributor license agreements. See the NOTICE file
distributed with this work for additional information
regarding copyright ownership.
Python port was written by <NAME>
Gravity.com licenses this file
to you under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance
with the License. You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
"""
try:
import urllib2 as request
except ImportError:
from urllib import request
class HtmlFetcher(object):
def __init__(self):
pass
def get_http_client(self):
pass
def get_html(self, config, url):
"""\
"""
headers = {'User-agent': config.browser_user_agent}
req = request.Request(url, headers=headers)
try:
result = request.urlopen(req).read()
except:
return None
return result
| [
"urllib.request.Request",
"urllib.request.urlopen"
] | [((1178, 1215), 'urllib.request.Request', 'request.Request', (['url'], {'headers': 'headers'}), '(url, headers=headers)\n', (1193, 1215), False, 'from urllib import request\n'), ((1251, 1271), 'urllib.request.urlopen', 'request.urlopen', (['req'], {}), '(req)\n', (1266, 1271), False, 'from urllib import request\n')] |
"""
Helper functions.
"""
import os
import json
import six
import argparse
import subprocess
def ensure_dir(d, verbose=True):
if not os.path.exists(d):
if verbose:
print("Directory {} do not exist; creating...".format(d))
os.makedirs(d)
class FileLogger(object):
"""
A file logger that opens the file periodically and write to it.
"""
def __init__(self, filename, header=None):
self.filename = filename
if os.path.exists(filename):
# remove the old file
os.remove(filename)
if header is not None:
with open(filename, 'w') as out:
print(header, file=out)
def log(self, message):
with open(self.filename, 'a') as out:
print(message, file=out)
def print_arguments(args):
print('----------- Configuration Arguments -----------')
for arg, value in sorted(six.iteritems(vars(args))):
print('%s: %s' % (arg, value))
print('------------------------------------------------')
def unpack_raw_data(raw_data, batch_size=32):
unpacked = []
for d in raw_data:
for a in d['aspects']:
unpacked.append({'token':d['token'], 'aspect':a, 'polarity':a['polarity']})
batches = [unpacked[i:i+batch_size] for i in range(0, len(unpacked), batch_size)]
unpacked = []
for batch in batches:
lens = [len(x['token']) for x in batch]
temp = [t[0] for t in list(sorted(zip(batch,lens,list(range(len(lens)))), key=lambda x:(x[1],x[2]), reverse=True))]
unpacked.extend(temp)
return unpacked
| [
"os.path.exists",
"os.makedirs",
"os.remove"
] | [((138, 155), 'os.path.exists', 'os.path.exists', (['d'], {}), '(d)\n', (152, 155), False, 'import os\n'), ((255, 269), 'os.makedirs', 'os.makedirs', (['d'], {}), '(d)\n', (266, 269), False, 'import os\n'), ((472, 496), 'os.path.exists', 'os.path.exists', (['filename'], {}), '(filename)\n', (486, 496), False, 'import os\n'), ((544, 563), 'os.remove', 'os.remove', (['filename'], {}), '(filename)\n', (553, 563), False, 'import os\n')] |
# Django
from django.db import models
# Utilities
from utils.models import CommonModel
class Author(CommonModel):
"""Author model.
Model to store the data of the authors of the books.
"""
name = models.CharField(
'name',
max_length=150,
blank=False,
null=False,
help_text="Name author"
)
def __str__(self):
"""Return name author."""
return self.name
| [
"django.db.models.CharField"
] | [((215, 310), 'django.db.models.CharField', 'models.CharField', (['"""name"""'], {'max_length': '(150)', 'blank': '(False)', 'null': '(False)', 'help_text': '"""Name author"""'}), "('name', max_length=150, blank=False, null=False, help_text\n ='Name author')\n", (231, 310), False, 'from django.db import models\n')] |
from PyQt5 import uic, QtWidgets
import mysql.connector
from reportlab.pdfgen import canvas
global_id = 0
#Cria o banco de dados para ser usado na aplicação.
banco_de_dados = mysql.connector.connect(
host="localhost",
user="root",
password="",
database='cadastro_produtos'
)
def funcao_principal():
#Resgata os valores dos campos do formulário
codigo = formulario.lineEdit.text()
descricao = formulario.lineEdit_2.text()
preco = formulario.lineEdit_3.text()
categoria = ''
#Checa qual opção esta marcada no radio Button
if formulario.radioButton.isChecked():
categoria = 'Informática'
elif formulario.radioButton_2.isChecked():
categoria = 'Alimentos'
else:
categoria = 'Eletrônicos'
#coloca os dados inseridos, no terminal
print('='*30)
print('Código: ', codigo)
print('Descrição: ', descricao)
print('Categoria: ', categoria)
print('Preço: ', preco)
print('='*30)
cursor = banco_de_dados.cursor() #Cria o cursor
comando_SQL = "INSERT INTO produtos (codigo,descricao,categoria,preco) VALUES (%s,%s,%s,%s)"#Insere os dados no banco de dados MySQL
dados = (str(codigo), str(descricao), str(categoria), str(preco))#Insere os valores
cursor.execute(comando_SQL,dados)
banco_de_dados.commit()
#Limpa os campos após o envio
formulario.lineEdit.setText('')
formulario.lineEdit_2.setText('')
formulario.lineEdit_3.setText('')
formulario.radioButton.setCheckable(False)
def chama_listagem():
formulario.close()
listagem.show()
cursor = banco_de_dados.cursor() #Cria o cursor
comando_SQL = "SELECT * FROM produtos"
cursor.execute(comando_SQL)
dados_lidos = cursor.fetchall()
print(dados_lidos)
listagem.tableWidget.setRowCount(len(dados_lidos))
listagem.tableWidget.setColumnCount(5)
for linha in range(0, len(dados_lidos)):
for coluna in range(0,5):
listagem.tableWidget.setItem(linha,coluna,QtWidgets.QTableWidgetItem(str(dados_lidos[linha][coluna])))
def gerar_pdf():
cursor = banco_de_dados.cursor() #Cria o cursor
comando_SQL = "SELECT * FROM produtos"
cursor.execute(comando_SQL)
dados_lidos = cursor.fetchall()
y=0
pdf = canvas.Canvas('listagem_produtos.pdf')
pdf.setFont('Times-Bold', 25)
pdf.drawString(200,800, 'Produtos cadastrados: ')
pdf.setFont('Times-Bold', 18)
pdf.drawString(10,750, 'ID')
pdf.drawString(110,750, 'CÓDIGO')
pdf.drawString(210,750, 'DESCRIÇÃO')
pdf.drawString(310,750, 'PREÇO')
pdf.drawString(410,750, 'CATEGORIA')
for i in range(0, len(dados_lidos)):
y += 50
pdf.drawString(10,750 - y, str(dados_lidos[i][0]))
pdf.drawString(110,750 - y, str(dados_lidos[i][1]))
pdf.drawString(210,750 - y, str(dados_lidos[i][2]))
pdf.drawString(310,750 - y, str(dados_lidos[i][3]))
pdf.drawString(410,750 - y, str(dados_lidos[i][4]))
pdf.save()
print('PDF foi gerado com sucesso!')
def excluir_dados():
linha = listagem.tableWidget.currentRow()
listagem.tableWidget.removeRow(linha)
cursor = banco_de_dados.cursor() #Cria o cursor
cursor.execute('SELECT id FROM produtos')
id = cursor.fetchall()
valor_id = id[linha][0]
cursor.execute(f'DELETE FROM produtos WHERE id = {str(valor_id)}')
banco_de_dados.commit()
def editar_dados():
global global_id
linha = listagem.tableWidget.currentRow()
cursor = banco_de_dados.cursor() #Cria o cursor
cursor.execute('SELECT id FROM produtos')
id = cursor.fetchall()
valor_id = id[linha][0]
cursor.execute('SELECT * FROM produtos WHERE id =' + str(valor_id))
produto = cursor.fetchall()
tela_editar.show()
global_id = valor_id
tela_editar.lineEdit.setText(str(produto[0][0]))
tela_editar.lineEdit_2.setText(str(produto[0][1]))
tela_editar.lineEdit_3.setText(str(produto[0][2]))
tela_editar.lineEdit_4.setText(str(produto[0][3]))
tela_editar.lineEdit_5.setText(str(produto[0][4]))
def salvar_dados_editados():
#captura o número do ID global
global global_id
#Valor presente nos campos da tela_editar
codigo = tela_editar.lineEdit_2.text()
descricao = tela_editar.lineEdit_3.text()
preco = tela_editar.lineEdit_4.text()
categoria = tela_editar.lineEdit_5.text()
#Atualizar os dados do banco de dados
cursor = banco_de_dados.cursor()
cursor.execute("UPDATE produtos SET codigo = '{}', preco = '{}', categoria = '{}', descricao = '{}' WHERE id = '{}'".format(codigo,preco,categoria,descricao,global_id))
banco_de_dados.commit()
print('='*20)
print('Produto alterado com sucesso!')
print('='*20)
#Atualizar as janelas
tela_editar.close()
listagem.close()
chama_listagem()
app=QtWidgets.QApplication([])#Cria a aplicação
formulario=uic.loadUi("formulario01.ui")#Cria o formulário
listagem=uic.loadUi("listagem.ui")
tela_editar = uic.loadUi('tela_salvar.ui')
formulario.pushButton.clicked.connect(funcao_principal)
formulario.pushButton_2.clicked.connect(chama_listagem)
listagem.pushButton.clicked.connect(gerar_pdf)
listagem.pushButton_2.clicked.connect(excluir_dados)
listagem.pushButton_3.clicked.connect(editar_dados)
tela_editar.pushButton.clicked.connect(salvar_dados_editados)
formulario.show()
app.exec() | [
"PyQt5.uic.loadUi",
"PyQt5.QtWidgets.QApplication",
"reportlab.pdfgen.canvas.Canvas"
] | [((4847, 4873), 'PyQt5.QtWidgets.QApplication', 'QtWidgets.QApplication', (['[]'], {}), '([])\n', (4869, 4873), False, 'from PyQt5 import uic, QtWidgets\n'), ((4902, 4931), 'PyQt5.uic.loadUi', 'uic.loadUi', (['"""formulario01.ui"""'], {}), "('formulario01.ui')\n", (4912, 4931), False, 'from PyQt5 import uic, QtWidgets\n'), ((4959, 4984), 'PyQt5.uic.loadUi', 'uic.loadUi', (['"""listagem.ui"""'], {}), "('listagem.ui')\n", (4969, 4984), False, 'from PyQt5 import uic, QtWidgets\n'), ((4999, 5027), 'PyQt5.uic.loadUi', 'uic.loadUi', (['"""tela_salvar.ui"""'], {}), "('tela_salvar.ui')\n", (5009, 5027), False, 'from PyQt5 import uic, QtWidgets\n'), ((2267, 2305), 'reportlab.pdfgen.canvas.Canvas', 'canvas.Canvas', (['"""listagem_produtos.pdf"""'], {}), "('listagem_produtos.pdf')\n", (2280, 2305), False, 'from reportlab.pdfgen import canvas\n')] |
#!/usr/bin/env python2
import argparse
from PIL import Image
from onnx import onnx
import os
import sys
import struct
def read_array_from_tensor(filename):
tensor = onnx.TensorProto()
with open(filename, 'rb') as file:
tensor.ParseFromString(file.read())
assert tensor.data_type == 1 # only allows float tensor
assert len(tensor.dims) == 2
assert tensor.dims[0] == 1
# get the # of values in tensor
size = 1
for dim in tensor.dims[:2]:
size *= dim
return struct.unpack('%sf' % size, tensor.raw_data)
def read_array_from_dimg(filename):
with open(filename, 'r') as file:
tokens = file.readline().split(' ')[:-1]
return [float(token) for token in tokens]
def read_array(filename):
extension = os.path.splitext(filename)[1]
if extension == '.dimg':
return read_array_from_dimg(filename)
else:
return read_array_from_tensor(filename)
def main():
parser = argparse.ArgumentParser(description='Display prediction ranks of given file.')
parser.add_argument('input', type=str, metavar='INPUT',
help='ONNX tensor for NVDLA rawdump file')
parser.add_argument('-n', type=int, metavar='N', default=5,
help='Max line of ranks')
args = parser.parse_args()
values = sorted(enumerate(read_array(args.input)), key=lambda x: x[1], reverse=True)
for idx, value in values[:min(args.n, len(values))]:
sys.stdout.write('{}: {}\n'.format(idx, value))
return 0
if __name__ == '__main__':
sys.exit(main())
| [
"onnx.onnx.TensorProto",
"struct.unpack",
"os.path.splitext",
"argparse.ArgumentParser"
] | [((171, 189), 'onnx.onnx.TensorProto', 'onnx.TensorProto', ([], {}), '()\n', (187, 189), False, 'from onnx import onnx\n'), ((512, 556), 'struct.unpack', 'struct.unpack', (["('%sf' % size)", 'tensor.raw_data'], {}), "('%sf' % size, tensor.raw_data)\n", (525, 556), False, 'import struct\n'), ((963, 1041), 'argparse.ArgumentParser', 'argparse.ArgumentParser', ([], {'description': '"""Display prediction ranks of given file."""'}), "(description='Display prediction ranks of given file.')\n", (986, 1041), False, 'import argparse\n'), ((774, 800), 'os.path.splitext', 'os.path.splitext', (['filename'], {}), '(filename)\n', (790, 800), False, 'import os\n')] |
from django.urls import path
from rest_framework_jwt.views import obtain_jwt_token
from rest_framework import routers
from .views import (BankingUserCreateViewSet, BankingUserVerifyViewSet,
BankAccountViewSet, TransactionViewSet)
router = routers.SimpleRouter()
router.register('account', BankAccountViewSet)
urlpatterns = [
path(
'auth/register/',
BankingUserCreateViewSet.as_view({'post': 'create'}),
name='user-register'
),
path(
'auth/verify/',
BankingUserVerifyViewSet.as_view({'post': 'verify'}),
name='user-verify'
),
path('auth/login/', obtain_jwt_token, name='user-verify'),
path('accounts/', BankAccountViewSet.as_view({
'post': 'create',
'get': 'list'})),
path('accounts/<int:pk>/', BankAccountViewSet.as_view({
'get': 'retrieve',
'put': 'update',
'delete': 'destroy'})),
path('transactions/', TransactionViewSet.as_view({
'post': 'do_transact',
'get': 'list'}),
name='account-deposit'),
]
| [
"rest_framework.routers.SimpleRouter",
"django.urls.path"
] | [((261, 283), 'rest_framework.routers.SimpleRouter', 'routers.SimpleRouter', ([], {}), '()\n', (281, 283), False, 'from rest_framework import routers\n'), ((616, 673), 'django.urls.path', 'path', (['"""auth/login/"""', 'obtain_jwt_token'], {'name': '"""user-verify"""'}), "('auth/login/', obtain_jwt_token, name='user-verify')\n", (620, 673), False, 'from django.urls import path\n')] |
import os
import flopy.utils.binaryfile as bf
class ReadConcentration:
_filename = None
def __init__(self, workspace):
for file in os.listdir(workspace):
if file.upper() == "MT3D001.UCN":
self._filename = os.path.join(workspace, file)
pass
def read_times(self):
try:
ucn_obj = bf.UcnFile(filename=self._filename, precision='single')
return ucn_obj.get_times()
except:
return []
def read_number_of_layers(self):
try:
ucn_obj = bf.UcnFile(filename=self._filename, precision='single')
number_of_layers = ucn_obj.get_data().shape[0]
return number_of_layers
except:
return 0
def read_layer(self, totim, layer):
try:
ucn_obj = bf.UcnFile(filename=self._filename, precision='single')
data = ucn_obj.get_data(totim=totim, mflay=layer).tolist()
for i in range(len(data)):
for j in range(len(data[i])):
data[i][j] = round(data[i][j], 2)
if data[i][j] < -999:
data[i][j] = None
return data
except:
return []
def read_ts(self, layer, row, column):
try:
ucn_obj = bf.UcnFile(filename=self._filename, precision='single')
return ucn_obj.get_ts(idx=(layer, row, column)).tolist()
except:
return []
| [
"os.listdir",
"os.path.join",
"flopy.utils.binaryfile.UcnFile"
] | [((150, 171), 'os.listdir', 'os.listdir', (['workspace'], {}), '(workspace)\n', (160, 171), False, 'import os\n'), ((357, 412), 'flopy.utils.binaryfile.UcnFile', 'bf.UcnFile', ([], {'filename': 'self._filename', 'precision': '"""single"""'}), "(filename=self._filename, precision='single')\n", (367, 412), True, 'import flopy.utils.binaryfile as bf\n'), ((563, 618), 'flopy.utils.binaryfile.UcnFile', 'bf.UcnFile', ([], {'filename': 'self._filename', 'precision': '"""single"""'}), "(filename=self._filename, precision='single')\n", (573, 618), True, 'import flopy.utils.binaryfile as bf\n'), ((827, 882), 'flopy.utils.binaryfile.UcnFile', 'bf.UcnFile', ([], {'filename': 'self._filename', 'precision': '"""single"""'}), "(filename=self._filename, precision='single')\n", (837, 882), True, 'import flopy.utils.binaryfile as bf\n'), ((1318, 1373), 'flopy.utils.binaryfile.UcnFile', 'bf.UcnFile', ([], {'filename': 'self._filename', 'precision': '"""single"""'}), "(filename=self._filename, precision='single')\n", (1328, 1373), True, 'import flopy.utils.binaryfile as bf\n'), ((252, 281), 'os.path.join', 'os.path.join', (['workspace', 'file'], {}), '(workspace, file)\n', (264, 281), False, 'import os\n')] |
from unittest.mock import patch, MagicMock, PropertyMock
import pytest
from cincoconfig.core import ConfigFormat
from cincoconfig.formats.json import JsonConfigFormat
from cincoconfig.formats.bson import BsonConfigFormat
from cincoconfig.formats.yaml import YamlConfigFormat
from cincoconfig.formats.xml import XmlConfigFormat
from cincoconfig.formats.pickle import PickleConfigFormat
class TestFormatRegistry:
def setup_method(self, _):
ConfigFormat._ConfigFormat__registry = {}
ConfigFormat._ConfigFormat__initialized = False
def test_register(self):
fmt = MagicMock
ConfigFormat.register('blah', fmt)
assert ConfigFormat._ConfigFormat__registry['blah'] is fmt
def test_get(self):
fmt = MagicMock()
fmt.return_value = 'hello'
ConfigFormat._ConfigFormat__registry['blah'] = fmt
ConfigFormat._ConfigFormat__initialized = True
check = ConfigFormat.get('blah', x=1, y='2')
fmt.assert_called_once_with(x=1, y='2')
assert check == 'hello'
@patch.object(ConfigFormat, 'initialize_registry')
def test_get_initialize(self, mock_init):
ConfigFormat._ConfigFormat__registry['blah'] = MagicMock()
ConfigFormat.get('blah')
mock_init.assert_called_once()
def test_get_no_exists(self):
with pytest.raises(KeyError):
ConfigFormat.get('asdfasdfasdf')
def test_base_formats(self):
ConfigFormat.initialize_registry()
assert ConfigFormat._ConfigFormat__registry == {
'json': JsonConfigFormat,
'yaml': YamlConfigFormat,
'bson': BsonConfigFormat,
'pickle': PickleConfigFormat,
'xml': XmlConfigFormat
}
def test_initialize_cache(self):
ConfigFormat.initialize_registry()
reg = ConfigFormat._ConfigFormat__registry = object()
ConfigFormat.initialize_registry()
assert ConfigFormat._ConfigFormat__registry is reg
| [
"cincoconfig.core.ConfigFormat.get",
"unittest.mock.MagicMock",
"cincoconfig.core.ConfigFormat.register",
"cincoconfig.core.ConfigFormat.initialize_registry",
"pytest.raises",
"unittest.mock.patch.object"
] | [((1055, 1104), 'unittest.mock.patch.object', 'patch.object', (['ConfigFormat', '"""initialize_registry"""'], {}), "(ConfigFormat, 'initialize_registry')\n", (1067, 1104), False, 'from unittest.mock import patch, MagicMock, PropertyMock\n'), ((613, 647), 'cincoconfig.core.ConfigFormat.register', 'ConfigFormat.register', (['"""blah"""', 'fmt'], {}), "('blah', fmt)\n", (634, 647), False, 'from cincoconfig.core import ConfigFormat\n'), ((754, 765), 'unittest.mock.MagicMock', 'MagicMock', ([], {}), '()\n', (763, 765), False, 'from unittest.mock import patch, MagicMock, PropertyMock\n'), ((932, 968), 'cincoconfig.core.ConfigFormat.get', 'ConfigFormat.get', (['"""blah"""'], {'x': '(1)', 'y': '"""2"""'}), "('blah', x=1, y='2')\n", (948, 968), False, 'from cincoconfig.core import ConfigFormat\n'), ((1206, 1217), 'unittest.mock.MagicMock', 'MagicMock', ([], {}), '()\n', (1215, 1217), False, 'from unittest.mock import patch, MagicMock, PropertyMock\n'), ((1226, 1250), 'cincoconfig.core.ConfigFormat.get', 'ConfigFormat.get', (['"""blah"""'], {}), "('blah')\n", (1242, 1250), False, 'from cincoconfig.core import ConfigFormat\n'), ((1450, 1484), 'cincoconfig.core.ConfigFormat.initialize_registry', 'ConfigFormat.initialize_registry', ([], {}), '()\n', (1482, 1484), False, 'from cincoconfig.core import ConfigFormat\n'), ((1790, 1824), 'cincoconfig.core.ConfigFormat.initialize_registry', 'ConfigFormat.initialize_registry', ([], {}), '()\n', (1822, 1824), False, 'from cincoconfig.core import ConfigFormat\n'), ((1895, 1929), 'cincoconfig.core.ConfigFormat.initialize_registry', 'ConfigFormat.initialize_registry', ([], {}), '()\n', (1927, 1929), False, 'from cincoconfig.core import ConfigFormat\n'), ((1338, 1361), 'pytest.raises', 'pytest.raises', (['KeyError'], {}), '(KeyError)\n', (1351, 1361), False, 'import pytest\n'), ((1375, 1407), 'cincoconfig.core.ConfigFormat.get', 'ConfigFormat.get', (['"""asdfasdfasdf"""'], {}), "('asdfasdfasdf')\n", (1391, 1407), False, 'from cincoconfig.core import ConfigFormat\n')] |
from DaPy.core import Series, SeriesSet
from DaPy.core import is_seq
from copy import copy
def proba2label(seq, labels):
if hasattr(seq, 'shape') is False:
seq = SeriesSet(seq)
if seq.shape[1] > 1:
return clf_multilabel(seq, labels)
return clf_binlabel(seq, labels)
def clf_multilabel(seq, groupby=None):
if is_seq(groupby):
groupby = dict(enumerate(map(str, groupby)))
if not groupby:
groupby = dict()
assert isinstance(groupby, dict), '`labels` must be a list of str or dict object.'
max_ind = seq.argmax(axis=1).T.tolist()[0]
return Series(groupby.get(int(_), _) for _ in max_ind)
def clf_binlabel(seq, labels, cutpoint=0.5):
return Series(labels[0] if _ >= cutpoint else labels[1] for _ in seq)
class BaseClassifier(object):
def __init__(self):
self._labels = []
@property
def labels(self):
return copy(self._labels)
def _calculate_accuracy(self, predict, target):
pred_labels = predict.argmax(axis=1).T.tolist()[0]
targ_labels = target.argmax(axis=1).T.tolist()[0]
return sum(1.0 for p, t in zip(pred_labels, targ_labels) if p == t) / len(predict)
def predict_proba(self, X):
'''
Predict your own data with fitted model
Paremeter
---------
data : matrix
The new data that you expect to predict.
Return
------
Matrix: the predict result of your data.
'''
X = self._engine.mat(X)
return self._forecast(X)
def predict(self, X):
'''
Predict your data with a fitted model and return the label
Parameter
---------
data : matrix
the data that you expect to predict
Return
------
Series : the labels of each record
'''
return proba2label(self.predict_proba(X), self._labels)
| [
"DaPy.core.is_seq",
"copy.copy",
"DaPy.core.SeriesSet",
"DaPy.core.Series"
] | [((343, 358), 'DaPy.core.is_seq', 'is_seq', (['groupby'], {}), '(groupby)\n', (349, 358), False, 'from DaPy.core import is_seq\n'), ((712, 774), 'DaPy.core.Series', 'Series', (['(labels[0] if _ >= cutpoint else labels[1] for _ in seq)'], {}), '(labels[0] if _ >= cutpoint else labels[1] for _ in seq)\n', (718, 774), False, 'from DaPy.core import Series, SeriesSet\n'), ((176, 190), 'DaPy.core.SeriesSet', 'SeriesSet', (['seq'], {}), '(seq)\n', (185, 190), False, 'from DaPy.core import Series, SeriesSet\n'), ((909, 927), 'copy.copy', 'copy', (['self._labels'], {}), '(self._labels)\n', (913, 927), False, 'from copy import copy\n')] |
import os
import shutil
try:
input = raw_input
except NameError:
pass
def uniform_meta(f):
xmp_path = ""
f_list = []
for root, dirs, files in os.walk(f):
for f in files:
if f.split(".")[-1].upper() == "ARW":
f_list.append(os.path.join(root, f))
if f.split(".")[-1].upper() == "XMP":
a = input("Found XMP file %s, apply to all other frames? y/n " % f)
a = True if a == "y" else False
if a:
xmp_path = os.path.join(root, f)
else:
quit("Aborted")
if xmp_path == "":
raise FileNotFoundError("XMP file not found. Aborting.")
# print (f_list)
# print (xmp_path)
for f in f_list:
dst = f.replace("ARW", "xmp")
try:
shutil.copyfile(xmp_path, dst)
print ("%s ~> %s" % (xmp_path, dst))
except shutil.Error:
pass
f = input("Please input training example folder path:")
uniform_meta(f)
| [
"shutil.copyfile",
"os.path.join",
"os.walk"
] | [((165, 175), 'os.walk', 'os.walk', (['f'], {}), '(f)\n', (172, 175), False, 'import os\n'), ((836, 866), 'shutil.copyfile', 'shutil.copyfile', (['xmp_path', 'dst'], {}), '(xmp_path, dst)\n', (851, 866), False, 'import shutil\n'), ((281, 302), 'os.path.join', 'os.path.join', (['root', 'f'], {}), '(root, f)\n', (293, 302), False, 'import os\n'), ((539, 560), 'os.path.join', 'os.path.join', (['root', 'f'], {}), '(root, f)\n', (551, 560), False, 'import os\n')] |
import numpy as np
import matplotlib.pyplot as plt
import os
import cv2
from tensorflow.python.keras.callbacks import TensorBoard
from tqdm import tqdm
import tensorflow as tf
from tensorflow.keras.models import Sequential
from tensorflow.keras.layers import Dense, Dropout, Activation, Flatten
from tensorflow.keras.layers import Conv2D, MaxPooling2D
import random
import pickle
import logging
import logging.config
import sys
import time
LOGGING = {
'version': 1,
'handlers': {
'console': {
'class': 'logging.StreamHandler',
'stream': sys.stdout,
}
},
'root': {
'handlers': ['console'],
'level': 'INFO'
}
}
logging.config.dictConfig(LOGGING)
DATADIR = "/home/pedro/Área de Trabalho/PetImages"
CATEGORIES = ["Cachorro", "Gato"]
training_data = []
IMG_SIZE = 50
def prepare(filepath):
img_array = cv2.imread(filepath, cv2.IMREAD_GRAYSCALE) # read in the image, convert to grayscale
new_array = cv2.resize(img_array, (IMG_SIZE, IMG_SIZE)) # resize image to match model's expected sizing
return new_array.reshape(-1, IMG_SIZE, IMG_SIZE, 1)
# def loadData():
#
# for category in CATEGORIES: # do dogs and cats
# path = os.path.join(DATADIR, category) # create path to dogs and cats
#
# for img in os.listdir(path): # iterate over each image per dogs and cats
#
# img_array = cv2.imread(os.path.join(path, img), cv2.IMREAD_GRAYSCALE) # convert to array
# plt.imshow(img_array, cmap='gray') # graph it
# plt.show() # display!
#
# break # we just want one for now so break
# break #...and one more!
# def create_training_data():
# for category in CATEGORIES: # do dogs and cats
#
# path = os.path.join(DATADIR, category) # create path to dogs and cats
# class_num = CATEGORIES.index(category) # get the classification (0 or a 1). 0=dog 1=cat
#
# for img in tqdm(os.listdir(path)): # iterate over each image per dogs and cats
# try:
# img_array = cv2.imread(os.path.join(path,img) ,cv2.IMREAD_GRAYSCALE) # convert to array
# new_array = cv2.resize(img_array, (IMG_SIZE, IMG_SIZE)) # resize to normalize data size
# training_data.append([new_array, class_num]) # add this to our training_data
# except Exception as e:
# pass
#except OSError as e:
# print("OSErrroBad img most likely", e, os.path.join(path,img))
#except Exception as e:
# print("general exception", e, os.path.join(path,img))
# def main():
# loadData()
# create_training_data()
# print(len(training_data))
# shuffle_train_and_save()
# train3()
#open_model_64x3()
#
#
# def shuffle_train_and_save():
# random.shuffle(training_data)
# for sample in training_data[:10]:
# print(sample[1])
#
# X = []
# y = []
# for features, label in training_data:
# X.append(features)
# y.append(label)
#
# X = np.array(X).reshape(-1, IMG_SIZE, IMG_SIZE, 1)
#
# pickle_out = open("X.pickle", "wb")
# pickle.dump(X, pickle_out)
# pickle_out.close()
#
# pickle_out = open("y.pickle", "wb")
# pickle.dump(y, pickle_out)
# pickle_out.close()
def open_data():
pickle_in = open("X.pickle", "rb")
X = pickle.load(pickle_in)
pickle_in = open("y.pickle", "rb")
y = pickle.load(pickle_in)
X = X/255.0
return X, y
#
# def model(X, y):
#
# model = Sequential()
#
# model.add(Conv2D(256, (3, 3), input_shape=X.shape[1:]))
# model.add(Activation('relu'))
# model.add(MaxPooling2D(pool_size=(2, 2)))
#
# model.add(Conv2D(256, (3, 3)))
# model.add(Activation('relu'))
# model.add(MaxPooling2D(pool_size=(2, 2)))
#
# model.add(Flatten()) # this converts our 3D feature maps to 1D feature vectors
#
# model.add(Dense(64))
#
# model.add(Dense(1))
# model.add(Activation('sigmoid'))
#
# model.compile(loss='binary_crossentropy',
# optimizer='adam',
# metrics=['accuracy'])
#
# model.fit(X, y, batch_size=32, epochs=3, validation_split=0.3)
#
#
# def train():
#
# NAME = "Cats-vs-dogs-64x2-CNN"
# X,y = open_data()
# model = Sequential()
#
# model.add(Conv2D(64, (3, 3), input_shape=X.shape[1:]))
# model.add(Activation('relu'))
# model.add(MaxPooling2D(pool_size=(2, 2)))
#
# model.add(Conv2D(64, (3, 3)))
# model.add(Activation('relu'))
# model.add(MaxPooling2D(pool_size=(2, 2)))
#
# model.add(Flatten()) # this converts our 3D feature maps to 1D feature vectors
# model.add(Dense(64))
# model.add(Activation('relu'))
#
# model.add(Dense(1))
# model.add(Activation('sigmoid'))
#
# tensorboard = TensorBoard(log_dir="logs/{}".format(NAME))
#
# model.compile(loss='binary_crossentropy',
# optimizer='adam',
# metrics=['accuracy'],
# )
#
# model.fit(X, y,
# batch_size=32,
# epochs=10,
# validation_split=0.3,
# callbacks=[tensorboard])
#
#
# def train2():
#
# X,y = open_data()
#
# dense_layers = [0, 1, 2]
# layer_sizes = [32, 64, 128]
# conv_layers = [1, 2, 3]
#
# for dense_layer in dense_layers:
# for layer_size in layer_sizes:
# for conv_layer in conv_layers:
# import time
# NAME = "{}-conv-{}-nodes-{}-dense-{}".format(conv_layer, layer_size, dense_layer, int(time.time()))
# print(NAME)
#
# model = Sequential()
#
# model.add(Conv2D(layer_size, (3, 3), input_shape=X.shape[1:]))
# model.add(Activation('relu'))
# model.add(MaxPooling2D(pool_size=(2, 2)))
#
# for l in range(conv_layer-1):
# model.add(Conv2D(layer_size, (3, 3)))
# model.add(Activation('relu'))
# model.add(MaxPooling2D(pool_size=(2, 2)))
#
# model.add(Flatten())
#
# for _ in range(dense_layer):
# model.add(Dense(layer_size))
# model.add(Activation('relu'))
#
# model.add(Dense(1))
# model.add(Activation('sigmoid'))
#
# tensorboard = TensorBoard(log_dir="logs/{}".format(NAME))
#
# model.compile(loss='binary_crossentropy',
# optimizer='adam',
# metrics=['accuracy'],
# )
#
# model.fit(X, y,
# batch_size=32,
# epochs=10,
# validation_split=0.3,
# callbacks=[tensorboard])
#
#
# def train3():
#
# X, y = open_data()
#
# dense_layers = [0]
# layer_sizes = [64]
# conv_layers = [3]
#
# for dense_layer in dense_layers:
# for layer_size in layer_sizes:
# for conv_layer in conv_layers:
# import time
# NAME = "{}-conv-{}-nodes-{}-dense-{}".format(conv_layer, layer_size, dense_layer, int(time.time()))
# print(NAME)
#
# model = Sequential()
#
# model.add(Conv2D(layer_size, (3, 3), input_shape=X.shape[1:]))
# model.add(Activation('relu'))
# model.add(MaxPooling2D(pool_size=(2, 2)))
#
# for l in range(conv_layer-1):
# model.add(Conv2D(layer_size, (3, 3)))
# model.add(Activation('relu'))
# model.add(MaxPooling2D(pool_size=(2, 2)))
#
# model.add(Flatten())
#
# for _ in range(dense_layer):
# model.add(Dense(layer_size))
# model.add(Activation('relu'))
#
# model.add(Dense(1))
# model.add(Activation('sigmoid'))
#
# tensorboard = TensorBoard(log_dir="logs/{}".format(NAME))
#
# model.compile(loss='binary_crossentropy',
# optimizer='adam',
# metrics=['accuracy'],
# )
#
# model.fit(X, y,
# batch_size=32,
# epochs=10,
# validation_split=0.3,
# callbacks=[tensorboard])
#
# model.save('64x3-CNN.model')
#
def open_model_64x3(path):
model = tf.keras.models.load_model("source/core/machine/64x3-CNN.model")
prediction = model.predict([prepare(path)])
logging.info(time.time())
logging.info(CATEGORIES[int(prediction[0][0])])
return CATEGORIES[int(prediction[0][0])]
| [
"logging.config.dictConfig",
"pickle.load",
"tensorflow.keras.models.load_model",
"time.time",
"cv2.resize",
"cv2.imread"
] | [((687, 721), 'logging.config.dictConfig', 'logging.config.dictConfig', (['LOGGING'], {}), '(LOGGING)\n', (712, 721), False, 'import logging\n'), ((882, 924), 'cv2.imread', 'cv2.imread', (['filepath', 'cv2.IMREAD_GRAYSCALE'], {}), '(filepath, cv2.IMREAD_GRAYSCALE)\n', (892, 924), False, 'import cv2\n'), ((984, 1027), 'cv2.resize', 'cv2.resize', (['img_array', '(IMG_SIZE, IMG_SIZE)'], {}), '(img_array, (IMG_SIZE, IMG_SIZE))\n', (994, 1027), False, 'import cv2\n'), ((3403, 3425), 'pickle.load', 'pickle.load', (['pickle_in'], {}), '(pickle_in)\n', (3414, 3425), False, 'import pickle\n'), ((3474, 3496), 'pickle.load', 'pickle.load', (['pickle_in'], {}), '(pickle_in)\n', (3485, 3496), False, 'import pickle\n'), ((8650, 8714), 'tensorflow.keras.models.load_model', 'tf.keras.models.load_model', (['"""source/core/machine/64x3-CNN.model"""'], {}), "('source/core/machine/64x3-CNN.model')\n", (8676, 8714), True, 'import tensorflow as tf\n'), ((8780, 8791), 'time.time', 'time.time', ([], {}), '()\n', (8789, 8791), False, 'import time\n')] |
# MIT License
#
# (C) Copyright [2022] Hewlett Packard Enterprise Development LP
#
# Permission is hereby granted, free of charge, to any person obtaining a
# copy of this software and associated documentation files (the "Software"),
# to deal in the Software without restriction, including without limitation
# the rights to use, copy, modify, merge, publish, distribute, sublicense,
# and/or sell copies of the Software, and to permit persons to whom the
# Software is furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included
# in all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL
# THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR
# OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE,
# ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR
# OTHER DEALINGS IN THE SOFTWARE.
"""CANU commands that validate the shcd."""
from collections import defaultdict
import datetime
import json
import logging
from os import path
from pathlib import Path
import re
import sys
import click
from click_help_colors import HelpColorsCommand
import natsort
from network_modeling.NetworkNodeFactory import NetworkNodeFactory
from network_modeling.NetworkPort import NetworkPort
from network_modeling.NodeLocation import NodeLocation
from openpyxl import load_workbook
# Get project root directory
if getattr(sys, "frozen", False) and hasattr(sys, "_MEIPASS"): # pragma: no cover
project_root = sys._MEIPASS
else:
prog = __file__
project_root = Path(__file__).resolve().parent.parent.parent.parent
canu_version_file = path.join(project_root, "canu", ".version")
with open(canu_version_file, "r") as version_file:
version = version_file.read().replace("\n", "")
log = logging.getLogger("validate_shcd")
@click.command(
cls=HelpColorsCommand,
help_headers_color="yellow",
help_options_color="blue",
)
@click.option(
"--architecture",
"-a",
type=click.Choice(["Full", "TDS", "V1"], case_sensitive=False),
help="CSM architecture",
required=True,
prompt="Architecture type",
)
@click.option(
"--shcd",
help="SHCD file",
type=click.File("rb"),
required=True,
)
@click.option(
"--tabs",
help="The tabs on the SHCD file to check, e.g. 10G_25G_40G_100G,NMN,HMN.",
)
@click.option(
"--corners",
help="The corners on each tab, comma separated e.g. 'J37,U227,J15,T47,J20,U167'.",
)
@click.option(
"--out",
help="Output results to a file",
type=click.File("w"),
default="-",
)
@click.option("--json", "json_", is_flag=True, help="Output JSON model to a file")
@click.option(
"--log",
"log_",
help="Level of logging.",
type=click.Choice(["DEBUG", "INFO", "WARNING", "ERROR"]),
default="ERROR",
)
@click.pass_context
def shcd(ctx, architecture, shcd, tabs, corners, out, json_, log_):
"""Validate a SHCD file.
CANU can be used to validate that an SHCD (SHasta Cabling Diagram) passes basic validation checks.
- Use the '--tabs' flag to select which tabs on the spreadsheet will be included.
- The '--corners' flag is used to input the upper left and lower right corners of the table on each tab of the worksheet. If the corners are not specified, you will be prompted to enter them for each tab.
- The table should contain the 11 headers: Source, Rack, Location, Slot, (Blank), Port, Destination, Rack, Location, (Blank), Port.
--------
\f
# noqa: D301, B950
Args:
ctx: CANU context settings
architecture: CSM architecture
shcd: SHCD file
tabs: The tabs on the SHCD file to check, e.g. 10G_25G_40G_100G,NMN,HMN.
corners: The corners on each tab, comma separated e.g. 'J37,U227,J15,T47,J20,U167'.
out: Filename for the JSON Topology if requested.
json_: Bool indicating json output
log_: Level of logging.
"""
logging.basicConfig(format="%(name)s - %(levelname)s: %(message)s", level=log_)
if architecture.lower() == "full":
architecture = "network_v2"
elif architecture.lower() == "tds":
architecture = "network_v2_tds"
elif architecture.lower() == "v1":
architecture = "network_v1"
# SHCD Parsing
try:
sheets = shcd_to_sheets(shcd, tabs, corners)
except Exception:
return
# Create Node factory
factory = NetworkNodeFactory(architecture_version=architecture)
node_list, warnings = node_model_from_shcd(
factory=factory,
spreadsheet=shcd,
sheets=sheets,
)
if json_:
json_output(node_list, factory, architecture, shcd, out)
else:
print_node_list(node_list, "SHCD", out)
node_list_warnings(node_list, warnings, out)
switch_unused_ports(node_list)
def shcd_to_sheets(shcd, tabs, corners):
"""Parse SHCD tabs and corners into sheets.
Args:
shcd: SHCD file
tabs: The tabs on the SHCD file to check, e.g. 10G_25G_40G_100G,NMN,HMN.
corners: The corners on each tab, comma separated e.g. 'J37,U227,J15,T47,J20,U167'.
Returns:
sheets
Raises:
Exception: If there are not pairs of corners
"""
sheets = []
if not tabs:
wb = load_workbook(shcd, read_only=True, data_only=True)
click.secho("What tabs would you like to check in the SHCD?")
tab_options = wb.sheetnames
for x in tab_options:
click.secho(f"{x}", fg="green")
tabs = click.prompt(
"Please enter the tabs to check separated by a comma, e.g. 10G_25G_40G_100G,NMN,HMN.",
type=str,
)
if corners:
if len(tabs.split(",")) * 2 != len(corners.split(",")):
log.error("")
click.secho("Not enough corners.\n", fg="red")
click.secho(
f"Make sure each tab: {tabs.split(',')} has 2 corners.\n",
fg="red",
)
click.secho(
f"There were {len(corners.split(','))} corners entered, but there should be {len(tabs.split(',')) * 2}.",
fg="red",
)
click.secho(
f"{corners}\n",
fg="red",
)
raise Exception
# Each tab should have 2 corners entered in comma separated
for i in range(len(tabs.split(","))):
# 0 -> 0,1
# 1 -> 2,3
# 2 -> 4,5
sheets.append(
(
tabs.split(",")[i],
corners.split(",")[i * 2].strip(),
corners.split(",")[i * 2 + 1].strip(),
),
)
else:
for tab in tabs.split(","):
click.secho(f"\nFor the Sheet {tab}", fg="green")
range_start = click.prompt(
"Enter the cell of the upper left corner (Labeled 'Source')",
type=str,
)
range_end = click.prompt(
"Enter the cell of the lower right corner",
type=str,
)
sheets.append((tab, range_start, range_end))
return sheets
def get_node_common_name(name, rack_number, rack_elevation, mapper):
"""Map SHCD device names to hostname.
Args:
name: A string from SHCD representing the device name
rack_number: A string for rack the device is in (e.g. x1001)
rack_elevation: A string for the position of the device in the rack (e.g. u19)
mapper: An array of tuples (SHCD name, hostname, device type)
Returns:
common_name: A string of the hostname
"""
common_name = None
for node in mapper:
for lookup_name in node[0]:
if re.match("^{}".format(lookup_name.strip()), name):
# One naming convention for switches, another for else.
tmp_name = None
if node[1].find("sw-hsn") != -1:
tmp_name = node[1] + "-" + rack_number + "-"
elif node[1].find("sw-") != -1:
tmp_name = node[1] + "-"
elif node[1].find("cmm") != -1:
tmp_name = node[1] + "-" + rack_number + "-"
elif node[1].find("cec") != -1:
tmp_name = node[1] + "-" + rack_number + "-"
elif node[1].find("pdu") != -1:
tmp_name = node[1] + "-" + rack_number + "-"
else:
tmp_name = node[1]
if tmp_name == "sw-cdu-" and not name.startswith("sw-cdu"):
# cdu0sw1 --> sw-cdu-001
# cdu0sw2 --> sw-cdu-002
# cdu1sw1 --> sw-cdu-003
# cdu1sw2 --> sw-cdu-004
# cdu2sw1 --> sw-cdu-005
# cdu2sw2 --> sw-cdu-006
digits = re.findall(r"\d+", name)
tmp_id = int(digits[0]) * 2 + int(digits[1])
common_name = f"{tmp_name}{tmp_id:0>3}"
elif tmp_name.startswith("pdu"):
digits = re.findall(r"\d+", name)
digit = digits[-1]
# Original names of:
# pdu1 in x3113, or
# x3113pdu1 in x3113, or
# x3113p1 in x3113
# Becomes pdu-xXXXX-NNN, or pdu-x3113-001
common_name = f"{tmp_name}{digit:0>3}"
else:
tmp_id = re.sub(
"^({})0*([1-9]*)".format(lookup_name),
r"\2",
name,
).strip("-")
common_name = f"{tmp_name}{tmp_id:0>3}"
return common_name
return common_name
def get_node_type(name, mapper):
"""Map SHCD device name to device type.
Args:
name: A string from SHCD representing the device name
mapper: An array of tuples (SHCD name, hostname, device type)
Returns:
node_type: A string with the device type
"""
node_type = None
for node in mapper:
for lookup_name in node[0]:
if re.match("^{}".format(lookup_name.strip()), name):
node_type = node[2]
return node_type
return node_type
def validate_shcd_slot_data(cell, sheet, warnings, is_src_slot=False):
"""Ensure that a slot from the SHCD is a proper string.
Args:
cell: Cell object of a port from the SHCD spreadsheet
sheet: SHCD spreadsheet sheet/tab name
warnings: Existing list of warnings to post to
is_src_slot: Boolean hack to work around SHCD inconsistencies.
Returns:
slot: A cleaned up string value from the cell
"""
valid_slot_names = ["ocp", "pcie-slot1", "bmc", "mgmt", "onboard", "s", "pci", None]
location = None
if cell.value is not None:
location = cell.coordinate
slot = cell.value
if isinstance(slot, str):
slot = slot.strip()
if slot and slot[0] == "s":
warnings["shcd_slot_data"].append(sheet + ":" + location)
log.warning(
'Prepending the character "s" to a slot will not be allowed in the future. '
+ f"Please correct cell {sheet}:{location} in the SHCD with value {slot} and prefer pcie-slot1.",
)
slot = "pcie-slot" + slot[1:]
if slot and slot == "pci":
warnings["shcd_slot_data"].append(sheet + ":" + location)
log.warning(
"The name pcie alone as a slot will not be allowed in the future"
+ f"Please correct cell {sheet}:{location} in the SHCD with value {slot} and prefer pcie-slot1.",
)
slot = "pcie-slot1"
if slot not in valid_slot_names:
warnings["shcd_slot_data"].append(sheet + ":" + location)
log.warning(
f"Slots must be named from the following list {valid_slot_names}."
+ f"Please correct cell {sheet}:{location} in the SHCD with value {slot}.",
)
if not slot:
slot = None
# Awful hack around the convention that src slot can be blank and a bmc
# is noted by port 3 when there is physically one port.
# NOTE: This is required for the port to get fixed.
if is_src_slot:
if sheet == "HMN" and slot is None:
warnings["shcd_slot_data"].append(f"{sheet}:{cell.coordinate}")
log.warning(
'A source slot of type "bmc" for servers or "mgmt" for switches must be specified in the HMN tab. '
+ f"Please correct the SHCD for {sheet}:{cell.coordinate} with an empty value.",
)
slot = "bmc"
elif sheet == "NMN" and slot is None:
warnings["shcd_slot_data"].append(f"{sheet}:{cell.coordinate}")
log.warning(
'A source slot of type "onboard" for servers must be specified in the NMN tab. '
+ f"Please correct the SHCD for {sheet}:{cell.coordinate} with an empty value.",
)
slot = "onboard"
return slot
def validate_shcd_port_data(cell, sheet, warnings, is_src_port=False, node_type=None):
"""Ensure that a port from the SHCD is a proper integer.
Args:
cell: Cell object of a port from the SHCD spreadsheet
sheet: SHCD spreadsheet sheet/tab name
warnings: Existing list of warnings to post to
is_src_port: (optional) Boolean triggers a hack to work around SHCD inconsistencies
node_type: (optional) if node_type is 'subrack' returns a None instead of an Exception
Returns:
port: A cleaned up integer value from the cell
"""
location = None
if cell.value is not None:
location = cell.coordinate
port = cell.value
if isinstance(port, str):
port = port.strip()
if not port:
if node_type == "subrack":
return None
log.fatal(
"A port number must be specified. "
+ f"Please correct the SHCD for {sheet}:{location} with an empty value",
)
sys.exit(1)
if port[0].lower() == "j":
warnings["shcd_port_data"].append(f"{sheet}:{location}")
log.warning(
'Prepending the character "j" to a port will not be allowed in the future. '
+ f"Please correct cell {sheet}:{location} in the SHCD with value {port}",
)
port = port[1:]
# For SubRacks
if port.upper() == "CMC" or port.upper() == "RCM":
port = "1"
if re.search(r"\D", port) is not None:
log.fatal(
"Port numbers must be integers. "
+ f'Please correct in the SHCD for cell {sheet}:{location} with value "{port}"',
)
sys.exit(1)
if int(port) < 1:
log.fatal(
"Ports numbers must be greater than 1. Port numbering must begin at 1. "
+ f'Please correct in the SHCD for cell {sheet}:{location} with value "{port}"',
)
sys.exit(1)
if is_src_port:
# Awful hack around the convention that src slot can be blank and a bmc
# is noted by port 3 when there is physically one port.
# NOTE: This assumes that the slot has already been corrected to "bmc"
if sheet == "HMN" and int(port) == 3:
warnings["shcd_port_conventions"].append(f"{sheet}:{location}")
log.warning(
f'Bad slot/port convention for port "j{port}" in location {sheet}:{location}.'
+ 'This should be slot "bmc" for servers and "mgmt" for switches, and port "1".',
)
port = 1
if port is None:
if node_type == "subrack":
return None
else:
log.fatal(
"A port number must be specified. "
+ f"Please correct the SHCD for {sheet}:{cell.coordinate} with an empty value",
)
sys.exit(1)
return int(port)
def node_model_from_shcd(factory, spreadsheet, sheets):
"""Create a list of nodes from SHCD.
Args:
factory: Node factory object
spreadsheet: The SHCD spreadsheet
sheets: An array of tabs and their corners on the spreadsheet
Returns:
node_list: A list of created nodes
warnings: A list of warnings
"""
# Generated nodes
node_list = []
node_name_list = []
warnings = defaultdict(list)
wb = load_workbook(spreadsheet, read_only=True, data_only=True)
for tab in sheets:
sheet = tab[0]
range_start = tab[1]
range_end = tab[2]
log.info("---------------------------------------------")
log.info(f"Working on tab/worksheet {sheet}")
log.info("---------------------------------------------")
log.info("")
if sheet not in wb.sheetnames:
log.fatal("")
click.secho(f"Tab {sheet} not found in {spreadsheet.name}\n", fg="red")
click.secho(f"Available tabs: {wb.sheetnames}", fg="red")
sys.exit(1)
ws = wb[sheet]
try:
block = ws[range_start:range_end]
except ValueError:
log.error("")
click.secho(f"Bad range of cells entered for tab {sheet}.", fg="red")
click.secho(f"{range_start}:{range_end}\n", fg="red")
click.secho(
"Ensure that the upper left corner (Labeled 'Source'), and the lower right corner of the table is entered.",
fg="red",
)
sys.exit(1)
# Process Headers
required_header = [
"Source",
"Rack",
"Location",
"Slot",
"Port",
"Destination",
"Rack",
"Location",
"Port",
]
header = block[0]
if len(header) == 0 or len(header) < len(required_header):
log.fatal("")
click.secho(
f"Bad range of cells entered for tab {sheet}:{range_start}:{range_end}.",
fg="red",
)
click.secho(
"Not enough columns exist.\n"
"Columns must exist in the following order, but may have other columns in between:\n"
f"{required_header}\n"
"Ensure that the upper left corner (Labeled 'Source'), and the lower right corner of the table is entered.",
fg="red",
)
sys.exit(1)
log.info(
f"Expecting header with {len(required_header):>2} columns: {required_header}",
)
log.info(
f"Found header with {len(header):>2} columns: {[x.value for x in header]}",
)
log.info("Mapping required columns to actual.")
subrack = None
start_index = 0
for required_index in range(len(required_header)):
found = None
for current_index in range(start_index, len(header)):
if header[current_index].value == "Parent":
subrack = current_index
elif header[current_index].value == required_header[required_index]:
found = current_index
break
else:
found = None
continue
if found is not None:
log.info(
f"Required header column {required_header[required_index]} "
f"found in spreadsheet cell {header[current_index].coordinate}",
)
required_header[required_index] = found
start_index = current_index + 1
found = None
continue
else:
log.error("")
click.secho(
f"On tab {sheet}, header column {required_header[required_index]} not found.",
fg="red",
)
log.fatal("")
click.secho(
f"On tab {sheet}, the header is formatted incorrectly.\n"
"Columns must exist in the following order, but may have other columns in between:\n"
f"{required_header}",
fg="red",
)
sys.exit(1)
# Process Data
block = block[1:]
for row in block:
# Cable source
try:
current_row = row[required_header[0]].row
log.debug(f"---- Working in sheet {sheet} on row {current_row} ----")
src_name = row[required_header[0]].value.strip()
tmp_slot = row[required_header[3]]
tmp_port = row[required_header[4]]
src_rack = None
if row[required_header[1]].value:
src_rack = row[required_header[1]].value.strip()
src_elevation = None
if row[required_header[2]].value:
src_elevation = row[required_header[2]].value.strip()
src_location = NodeLocation(src_rack, src_elevation)
except AttributeError:
log.fatal("")
click.secho(
f"Bad cell data or range of cells entered for sheet {sheet} in row {current_row} for source data.",
fg="red",
)
click.secho(
"Ensure the range entered does not contain empty cells.",
fg="red",
)
sys.exit(1)
src_slot = validate_shcd_slot_data(
tmp_slot,
sheet,
warnings,
is_src_slot=True,
)
node_name = get_node_common_name(
src_name,
src_rack,
src_elevation,
factory.lookup_mapper(),
)
log.debug(f"Source Name Lookup: {node_name}")
log.debug(f"Source rack {src_rack} in location {src_elevation}")
node_type = get_node_type(src_name, factory.lookup_mapper())
log.debug(f"Source Node Type Lookup: {node_type}")
# Create src_node if it does not exist
src_node = None
src_index = None
parent = None
if node_type is not None and node_name is not None:
if node_name not in node_name_list:
log.info(f"Creating new node {node_name} of type {node_type}")
try:
src_node = factory.generate_node(node_type)
except Exception as e:
print(e)
sys.exit(1)
src_node.common_name(node_name)
src_node.location(src_location)
node_list.append(src_node)
node_name_list.append(node_name)
else:
# If the src is a subrack, add location info
if node_type == "subrack":
node_list[node_name_list.index(node_name)].location(
src_location,
)
log.debug(
f"Node {node_name} already exists, skipping node creation.",
)
src_index = node_name_list.index(node_name)
# update the node with parent location if it exists
if subrack is not None:
parent = row[subrack].value
if parent is not None:
src_node = node_list[src_index]
src_location.parent(parent)
src_node.location(src_location)
else:
warnings["node_type"].append(
f"{src_name}@@{sheet}@@{row[required_header[0]].coordinate}",
)
log.warning(
f"Node type for {src_name} cannot be determined by node type ({node_type}) or node name ({node_name})",
)
# If a parent is specified, need to make a connection between the src and the parent
if parent:
src_parent_port = 1
src_parent_slot = "cmc"
src_node_port = NetworkPort(
number=src_parent_port,
slot=src_parent_slot,
)
parent_slot = "cmc"
parent_rack = None
parent_elevation = None
node_name_parent = get_node_common_name(
parent,
parent_rack,
parent_elevation,
factory.lookup_mapper(),
)
node_type_parent = get_node_type(parent, factory.lookup_mapper())
parent_location = None
parent_index = create_dst_node(
node_type_parent,
node_name_parent,
node_name_list,
parent_location,
node_list,
factory,
)
# We do not know what port numbers to connect to on the SubRack,
# Get the next_free_port, and use that as the port number
parent_port = node_list[parent_index].available_ports(
speed=1,
slot="cmc",
next_free_port=True,
)
parent_node_port = NetworkPort(number=parent_port, slot=parent_slot)
src_node = node_list[src_index]
parent_node = node_list[parent_index]
try:
connect_src_dst(
src_node,
parent_node,
src_node_port,
parent_node_port,
)
except Exception as err:
log.fatal(err)
log.fatal(
click.secho(
f"Failed to connect {src_node.common_name()} "
+ f"to parent {parent_node.common_name()} bi-directionally "
+ f"while working on sheet {sheet}, row {current_row}.",
fg="red",
),
)
sys.exit(1)
# If the tmp_port is None, make one connection:
# src ==> parent
# Continue to connect the rest of the nodes
if tmp_port.value is None:
continue
src_port = validate_shcd_port_data(
tmp_port,
sheet,
warnings,
is_src_port=True,
node_type=node_type,
)
# Sometimes a CMC is in the SHCD with no destination
# If this is the case, continue to connect the other nodes
if node_type == "subrack" and src_port is None:
continue
log.debug(f"Source Data: {src_name} {src_slot} {src_port}")
# Create the source port for the node
src_node_port = NetworkPort(number=src_port, slot=src_slot)
# Cable destination
try:
dst_name = row[required_header[5]].value.strip()
dst_slot = None # dst is always a switch
dst_port = validate_shcd_port_data(
row[required_header[8]],
sheet,
warnings,
)
dst_rack = None
if row[required_header[6]].value:
dst_rack = row[required_header[6]].value.strip()
dst_elevation = None
if row[required_header[7]].value:
dst_elevation = row[required_header[7]].value.strip()
dst_location = NodeLocation(dst_rack, dst_elevation)
except AttributeError:
log.fatal("")
click.secho(
f"Bad cell data or range of cells entered for sheet {sheet} in row {current_row} for destination data.",
fg="red",
)
click.secho(
"Ensure the range entered does not contain empty cells.",
fg="red",
)
sys.exit(1)
log.debug(f"Destination Data: {dst_name} {dst_slot} {dst_port}")
node_name = get_node_common_name(
dst_name,
dst_rack,
dst_elevation,
factory.lookup_mapper(),
)
log.debug(f"Destination Name Lookup: {node_name}")
node_type = get_node_type(dst_name, factory.lookup_mapper())
log.debug(f"Destination Node Type Lookup: {node_type}")
# Create dst_node if it does not exist
try:
dst_index = create_dst_node(
node_type,
node_name,
node_name_list,
dst_location,
node_list,
factory,
)
except Exception:
warnings["node_type"].append(
f"{dst_name}@@{sheet}@@{row[required_header[5]].coordinate}",
)
log.warning(
f"Node type for {dst_name} cannot be determined by node type ({node_type}) or node name ({node_name})",
)
dst_index = None
# Create the destination port
dst_node_port = NetworkPort(number=dst_port, slot=dst_slot)
if src_index is not None and dst_index is not None:
src_node = node_list[src_index]
dst_node = node_list[dst_index]
try:
connect_src_dst(
src_node,
dst_node,
src_node_port,
dst_node_port,
)
except Exception as err:
log.fatal(err)
log.fatal(
click.secho(
f"Failed to connect {src_node.common_name()} "
+ f"to {dst_node.common_name()} bi-directionally "
+ f"while working on sheet {sheet}, row {current_row}.",
fg="red",
),
)
sys.exit(1)
wb.close()
return node_list, warnings
def create_dst_node(
node_type,
node_name,
node_name_list,
dst_location,
node_list,
factory,
):
"""Create a destination node.
Args:
node_type: Type of node
node_name: Node name
node_name_list: List of node names
dst_location: Location object for the destination node
node_list: A list of nodes
factory: Node factory object
Returns:
dst_index: Index in the node_name_list of the destination
Raises:
Exception: If a node cannot be determined
"""
dst_node = None
dst_index = None
if node_type is not None and node_name is not None:
if node_name not in node_name_list:
log.info(f"Creating new node {node_name} of type {node_type}")
try:
dst_node = factory.generate_node(node_type)
except Exception as e:
print(e)
sys.exit(1)
dst_node.common_name(node_name)
dst_node.location(dst_location)
node_list.append(dst_node)
node_name_list.append(node_name)
else:
log.debug(
f"Node {node_name} already exists, skipping node creation",
)
dst_index = node_name_list.index(node_name)
else:
raise Exception
return dst_index
def connect_src_dst(
src_node,
dst_node,
src_node_port,
dst_node_port,
):
"""Connect a source node to a destination node.
Args:
src_node: Source node
dst_node: Destination node
src_node_port: Source node port
dst_node_port: Destination node port
Raises:
Exception: If nodes cannot be connected
"""
try:
connected = src_node.connect(
dst_node,
src_port=src_node_port,
dst_port=dst_node_port,
strict=True,
)
except Exception as err:
raise err
if connected:
log.info(
f"Connected {src_node.common_name()} to {dst_node.common_name()} bi-directionally",
)
else:
log.error(
click.secho(
f"Failed to connect {src_node.common_name()}"
+ f" to {dst_node.common_name()} bi-directionally",
fg="red",
),
)
raise Exception
def node_list_warnings(node_list, warnings, out="-"):
"""Print the warnings found while validating the SHCD.
Args:
node_list: A list of nodes
warnings: A dictionary of warnings
out: Defaults to stdout, but will print to the file name passed in
"""
dash = "-" * 80
# Generate warnings
# Additional warnings about the data will be entered here
for node in node_list:
if len(node.edges()) == 0:
warnings["zero_connections"].append(node.common_name())
# Print Warnings
if warnings:
click.secho("\nWarnings", fg="red", file=out)
if warnings["node_type"]:
click.secho(
"\nNode type could not be determined for the following."
+ "\nThese nodes are not currently included in the model."
+ "\n(This may be a missing architectural definition/lookup or a spelling error)",
fg="red",
file=out,
)
click.secho(dash, file=out)
nodes = set(warnings["node_type"])
nodes = natsort.natsorted(nodes)
has_mac = False
node_type_warnings = defaultdict(list)
for node in nodes:
# Missing nodes with '@@' are from the SHCD
if "@@" in node:
name = node.split("@@")[0]
sheet = node.split("@@")[1]
cell = node.split("@@")[2]
node_type_warnings[sheet].append(f"Cell: {cell:<8s} Name: {name}")
else:
# If the string has a mac address in it, set to True
if bool(re.search(r"(?:[0-9a-fA-F]:?){12}", str(node))):
has_mac = True
click.secho(node, fg="bright_white", file=out)
if len(node_type_warnings) > 0:
for sheet, cell_list in node_type_warnings.items():
click.secho(f"Sheet: {sheet}", fg="bright_white", file=out)
for cell in cell_list:
click.secho(cell, file=out)
click.secho("", file=out)
if has_mac is True:
click.secho(
"Nodes that show up as MAC addresses might need to have LLDP enabled.",
file=out,
)
if warnings["zero_connections"]:
click.secho(
"\nThe following nodes have zero connections"
+ "\n(The node type may not have been found or no connections are present)",
fg="red",
file=out,
)
click.secho(dash, file=out)
nodes = set(warnings["zero_connections"])
nodes = natsort.natsorted(nodes)
for node in nodes:
click.secho(node, fg="bright_white", file=out)
if warnings["rename"]:
click.secho(
"\nThe following nodes should be renamed",
fg="red",
file=out,
)
click.secho(dash, file=out)
nodes = set()
for x in warnings["rename"]:
new_name = x[1]
if new_name == "": # pragma: no cover
new_name = "(could not identify node)"
nodes.add((x[0], new_name))
nodes = natsort.natsorted(nodes)
for node in nodes:
click.secho(
f"{node[0]} should be renamed {node[1]}",
fg="bright_white",
file=out,
)
if warnings["shcd_port_data"]:
click.secho(
'\nSHCD port definitions are using a deprecated "j" prefix'
+ '\n(Ports should be an integer, remove the "j" in each cell)',
fg="red",
file=out,
)
click.secho(dash, file=out)
port_warnings = defaultdict(list)
for x in warnings["shcd_port_data"]:
sheet = x.split(":")[0]
cell = x.split(":")[1]
port_warnings[sheet].append(cell)
for sheet, cell_list in port_warnings.items():
click.secho(f"Sheet: {sheet}", fg="bright_white", file=out)
click.secho(f"{', '.join(cell_list)}\n", file=out)
if warnings["shcd_port_conventions"]:
click.secho(
"\nSHCD port convention in the HMN tab is to use port 3 to represent BMCs."
+ '\n(For servers, correct the following cells to use a Slot of "bmc" and a port of "1")'
+ '\n(For Switches, correct the following cells to use a Slot of "mgmt" and a port of "1")',
fg="red",
file=out,
)
click.secho(dash, file=out)
slot_warnings = defaultdict(list)
for x in warnings["shcd_port_conventions"]:
sheet = x.split(":")[0]
cell = x.split(":")[1]
slot_warnings[sheet].append(cell)
for sheet, cell_list in slot_warnings.items():
click.secho(f"Sheet: {sheet}", fg="bright_white", file=out)
click.secho(f"{', '.join(cell_list)}\n", file=out)
if warnings["shcd_slot_data"]:
click.secho(
"\nSHCD slot definitions used are either deprecated, missing or incorrect."
+ '\n(The cells below should only be one of the following ["bmc", "ocp", "pcie-slot1, "mgmt", None])',
fg="red",
file=out,
)
click.secho(dash, file=out)
def_warnings = defaultdict(list)
for x in warnings["shcd_slot_data"]:
sheet = x.split(":")[0]
cell = x.split(":")[1]
def_warnings[sheet].append(cell)
for sheet, cell_list in def_warnings.items():
click.secho(f"Sheet: {sheet}", fg="bright_white", file=out)
click.secho(f"{', '.join(cell_list)}\n", file=out)
def switch_unused_ports(node_list):
"""Create a dictionary of unused ports.
Args:
node_list: A list of nodes
Returns:
unused_ports: Dictionary of switches and their unused ports
"""
unused_ports = {}
for node in node_list:
if "sw" in node.common_name() and "sw-hsn" not in node.common_name():
unused_ports[node.common_name()] = []
unused_block = []
logical_index = 1
for port in node.ports():
if port is None:
unused_ports[node.common_name()].append(logical_index)
unused_block.append(logical_index)
logical_index += 1
continue
if unused_block:
unused_block = [] # reset
logical_index += 1
unused_ports[node.common_name()].pop()
return unused_ports
def print_node_list(node_list, title, out="-"):
"""Print the nodes found in the SHCD.
Args:
node_list: A list of nodes
title: Title to be printed
out: Defaults to stdout, but will print to the file name passed in
"""
dash = "-" * 60
click.echo("\n", file=out)
click.secho(f"{title} Node Connections", fg="bright_white", file=out)
click.echo(dash, file=out)
for node in node_list:
click.echo(
f"{node.id()}: {node.common_name()} connects to {len(node.edges())} nodes: {node.edges()}",
file=out,
)
dash = "-" * 60
click.echo("\n", file=out)
click.secho(f"{title} Port Usage", fg="bright_white", file=out)
click.echo(dash, file=out)
for node in node_list:
click.echo(
f"{node.id()}: {node.common_name()} has the following port usage:",
file=out,
)
unused_block = []
logical_index = 1
for port in node.ports():
if port is None:
unused_block.append(logical_index)
logical_index += 1
continue
if unused_block:
if len(unused_block) == 1:
port_string = f"{unused_block[0]:02}==>UNUSED"
else:
port_string = f"{unused_block[0]:02}-{unused_block[len(unused_block)-1]:02}==>UNUSED"
unused_block = [] # reset
click.secho(f" {port_string}", fg="green", file=out)
destination_node_name = [
x.common_name()
for x in node_list
if x.id() == port["destination_node_id"]
]
destination_node_name = destination_node_name[0]
destination_port_slot = None
if port["destination_slot"] is None:
destination_port_slot = f'{port["destination_port"]}'
else:
destination_port_slot = (
f'{port["destination_slot"]}:{port["destination_port"]}'
)
if port["slot"] is None:
port_string = f'{port["port"]:>02}==>{destination_node_name}:{destination_port_slot}'
else:
port_string = f'{port["slot"]}:{port["port"]}==>{destination_node_name}:{destination_port_slot}'
click.echo(f" {port_string}", file=out)
logical_index += 1
def json_output(node_list, factory, architecture, shcd, out):
"""Create a schema-validated JSON Topology file from the model."""
topology = []
for node in node_list:
topology.append(node.serialize())
paddle = {
"canu_version": version,
"architecture": architecture,
"shcd_file": path.basename(shcd.name),
"updated_at": datetime.datetime.now().strftime(
"%Y-%m-%d %H:%M:%S",
),
"topology": topology,
}
factory.validate_paddle(paddle)
json_model = json.dumps(paddle, indent=2)
click.echo(json_model, file=out)
| [
"logging.getLogger",
"click.Choice",
"click.File",
"click.echo",
"sys.exit",
"re.search",
"click.secho",
"click.option",
"pathlib.Path",
"json.dumps",
"click.command",
"network_modeling.NetworkPort.NetworkPort",
"click.prompt",
"network_modeling.NetworkNodeFactory.NetworkNodeFactory",
"r... | [((1879, 1922), 'os.path.join', 'path.join', (['project_root', '"""canu"""', '""".version"""'], {}), "(project_root, 'canu', '.version')\n", (1888, 1922), False, 'from os import path\n'), ((2034, 2068), 'logging.getLogger', 'logging.getLogger', (['"""validate_shcd"""'], {}), "('validate_shcd')\n", (2051, 2068), False, 'import logging\n'), ((2072, 2168), 'click.command', 'click.command', ([], {'cls': 'HelpColorsCommand', 'help_headers_color': '"""yellow"""', 'help_options_color': '"""blue"""'}), "(cls=HelpColorsCommand, help_headers_color='yellow',\n help_options_color='blue')\n", (2085, 2168), False, 'import click\n'), ((2477, 2579), 'click.option', 'click.option', (['"""--tabs"""'], {'help': '"""The tabs on the SHCD file to check, e.g. 10G_25G_40G_100G,NMN,HMN."""'}), "('--tabs', help=\n 'The tabs on the SHCD file to check, e.g. 10G_25G_40G_100G,NMN,HMN.')\n", (2489, 2579), False, 'import click\n'), ((2587, 2705), 'click.option', 'click.option', (['"""--corners"""'], {'help': '"""The corners on each tab, comma separated e.g. \'J37,U227,J15,T47,J20,U167\'."""'}), '(\'--corners\', help=\n "The corners on each tab, comma separated e.g. \'J37,U227,J15,T47,J20,U167\'."\n )\n', (2599, 2705), False, 'import click\n'), ((2818, 2904), 'click.option', 'click.option', (['"""--json"""', '"""json_"""'], {'is_flag': '(True)', 'help': '"""Output JSON model to a file"""'}), "('--json', 'json_', is_flag=True, help=\n 'Output JSON model to a file')\n", (2830, 2904), False, 'import click\n'), ((4181, 4260), 'logging.basicConfig', 'logging.basicConfig', ([], {'format': '"""%(name)s - %(levelname)s: %(message)s"""', 'level': 'log_'}), "(format='%(name)s - %(levelname)s: %(message)s', level=log_)\n", (4200, 4260), False, 'import logging\n'), ((4652, 4705), 'network_modeling.NetworkNodeFactory.NetworkNodeFactory', 'NetworkNodeFactory', ([], {'architecture_version': 'architecture'}), '(architecture_version=architecture)\n', (4670, 4705), False, 'from network_modeling.NetworkNodeFactory import NetworkNodeFactory\n'), ((16909, 16926), 'collections.defaultdict', 'defaultdict', (['list'], {}), '(list)\n', (16920, 16926), False, 'from collections import defaultdict\n'), ((16937, 16995), 'openpyxl.load_workbook', 'load_workbook', (['spreadsheet'], {'read_only': '(True)', 'data_only': '(True)'}), '(spreadsheet, read_only=True, data_only=True)\n', (16950, 16995), False, 'from openpyxl import load_workbook\n'), ((40871, 40897), 'click.echo', 'click.echo', (['"""\n"""'], {'file': 'out'}), "('\\n', file=out)\n", (40881, 40897), False, 'import click\n'), ((40902, 40971), 'click.secho', 'click.secho', (['f"""{title} Node Connections"""'], {'fg': '"""bright_white"""', 'file': 'out'}), "(f'{title} Node Connections', fg='bright_white', file=out)\n", (40913, 40971), False, 'import click\n'), ((40976, 41002), 'click.echo', 'click.echo', (['dash'], {'file': 'out'}), '(dash, file=out)\n', (40986, 41002), False, 'import click\n'), ((41212, 41238), 'click.echo', 'click.echo', (['"""\n"""'], {'file': 'out'}), "('\\n', file=out)\n", (41222, 41238), False, 'import click\n'), ((41243, 41306), 'click.secho', 'click.secho', (['f"""{title} Port Usage"""'], {'fg': '"""bright_white"""', 'file': 'out'}), "(f'{title} Port Usage', fg='bright_white', file=out)\n", (41254, 41306), False, 'import click\n'), ((41311, 41337), 'click.echo', 'click.echo', (['dash'], {'file': 'out'}), '(dash, file=out)\n', (41321, 41337), False, 'import click\n'), ((43571, 43599), 'json.dumps', 'json.dumps', (['paddle'], {'indent': '(2)'}), '(paddle, indent=2)\n', (43581, 43599), False, 'import json\n'), ((43604, 43636), 'click.echo', 'click.echo', (['json_model'], {'file': 'out'}), '(json_model, file=out)\n', (43614, 43636), False, 'import click\n'), ((2236, 2293), 'click.Choice', 'click.Choice', (["['Full', 'TDS', 'V1']"], {'case_sensitive': '(False)'}), "(['Full', 'TDS', 'V1'], case_sensitive=False)\n", (2248, 2293), False, 'import click\n'), ((2437, 2453), 'click.File', 'click.File', (['"""rb"""'], {}), "('rb')\n", (2447, 2453), False, 'import click\n'), ((2781, 2796), 'click.File', 'click.File', (['"""w"""'], {}), "('w')\n", (2791, 2796), False, 'import click\n'), ((2979, 3030), 'click.Choice', 'click.Choice', (["['DEBUG', 'INFO', 'WARNING', 'ERROR']"], {}), "(['DEBUG', 'INFO', 'WARNING', 'ERROR'])\n", (2991, 3030), False, 'import click\n'), ((5516, 5567), 'openpyxl.load_workbook', 'load_workbook', (['shcd'], {'read_only': '(True)', 'data_only': '(True)'}), '(shcd, read_only=True, data_only=True)\n', (5529, 5567), False, 'from openpyxl import load_workbook\n'), ((5576, 5637), 'click.secho', 'click.secho', (['"""What tabs would you like to check in the SHCD?"""'], {}), "('What tabs would you like to check in the SHCD?')\n", (5587, 5637), False, 'import click\n'), ((5764, 5883), 'click.prompt', 'click.prompt', (['"""Please enter the tabs to check separated by a comma, e.g. 10G_25G_40G_100G,NMN,HMN."""'], {'type': 'str'}), "(\n 'Please enter the tabs to check separated by a comma, e.g. 10G_25G_40G_100G,NMN,HMN.'\n , type=str)\n", (5776, 5883), False, 'import click\n'), ((34150, 34195), 'click.secho', 'click.secho', (['"""\nWarnings"""'], {'fg': '"""red"""', 'file': 'out'}), "('\\nWarnings', fg='red', file=out)\n", (34161, 34195), False, 'import click\n'), ((43354, 43378), 'os.path.basename', 'path.basename', (['shcd.name'], {}), '(shcd.name)\n', (43367, 43378), False, 'from os import path\n'), ((5716, 5747), 'click.secho', 'click.secho', (['f"""{x}"""'], {'fg': '"""green"""'}), "(f'{x}', fg='green')\n", (5727, 5747), False, 'import click\n'), ((6028, 6074), 'click.secho', 'click.secho', (['"""Not enough corners.\n"""'], {'fg': '"""red"""'}), "('Not enough corners.\\n', fg='red')\n", (6039, 6074), False, 'import click\n'), ((6414, 6451), 'click.secho', 'click.secho', (['f"""{corners}\n"""'], {'fg': '"""red"""'}), "(f'{corners}\\n', fg='red')\n", (6425, 6451), False, 'import click\n'), ((7002, 7054), 'click.secho', 'click.secho', (['f"""\nFor the Sheet {tab}"""'], {'fg': '"""green"""'}), '(f"""\nFor the Sheet {tab}""", fg=\'green\')\n', (7013, 7054), False, 'import click\n'), ((7078, 7166), 'click.prompt', 'click.prompt', (['"""Enter the cell of the upper left corner (Labeled \'Source\')"""'], {'type': 'str'}), '("Enter the cell of the upper left corner (Labeled \'Source\')",\n type=str)\n', (7090, 7166), False, 'import click\n'), ((7234, 7300), 'click.prompt', 'click.prompt', (['"""Enter the cell of the lower right corner"""'], {'type': 'str'}), "('Enter the cell of the lower right corner', type=str)\n", (7246, 7300), False, 'import click\n'), ((14480, 14491), 'sys.exit', 'sys.exit', (['(1)'], {}), '(1)\n', (14488, 14491), False, 'import sys\n'), ((14963, 14985), 're.search', 're.search', (['"""\\\\D"""', 'port'], {}), "('\\\\D', port)\n", (14972, 14985), False, 'import re\n'), ((15195, 15206), 'sys.exit', 'sys.exit', (['(1)'], {}), '(1)\n', (15203, 15206), False, 'import sys\n'), ((15468, 15479), 'sys.exit', 'sys.exit', (['(1)'], {}), '(1)\n', (15476, 15479), False, 'import sys\n'), ((16434, 16445), 'sys.exit', 'sys.exit', (['(1)'], {}), '(1)\n', (16442, 16445), False, 'import sys\n'), ((17386, 17457), 'click.secho', 'click.secho', (['f"""Tab {sheet} not found in {spreadsheet.name}\n"""'], {'fg': '"""red"""'}), "(f'Tab {sheet} not found in {spreadsheet.name}\\n', fg='red')\n", (17397, 17457), False, 'import click\n'), ((17470, 17527), 'click.secho', 'click.secho', (['f"""Available tabs: {wb.sheetnames}"""'], {'fg': '"""red"""'}), "(f'Available tabs: {wb.sheetnames}', fg='red')\n", (17481, 17527), False, 'import click\n'), ((17540, 17551), 'sys.exit', 'sys.exit', (['(1)'], {}), '(1)\n', (17548, 17551), False, 'import sys\n'), ((18444, 18548), 'click.secho', 'click.secho', (['f"""Bad range of cells entered for tab {sheet}:{range_start}:{range_end}."""'], {'fg': '"""red"""'}), "(\n f'Bad range of cells entered for tab {sheet}:{range_start}:{range_end}.',\n fg='red')\n", (18455, 18548), False, 'import click\n'), ((18599, 18870), 'click.secho', 'click.secho', (['f"""Not enough columns exist.\nColumns must exist in the following order, but may have other columns in between:\n{required_header}\nEnsure that the upper left corner (Labeled \'Source\'), and the lower right corner of the table is entered."""'], {'fg': '"""red"""'}), '(\n f"""Not enough columns exist.\nColumns must exist in the following order, but may have other columns in between:\n{required_header}\nEnsure that the upper left corner (Labeled \'Source\'), and the lower right corner of the table is entered."""\n , fg=\'red\')\n', (18610, 18870), False, 'import click\n'), ((18976, 18987), 'sys.exit', 'sys.exit', (['(1)'], {}), '(1)\n', (18984, 18987), False, 'import sys\n'), ((27787, 27830), 'network_modeling.NetworkPort.NetworkPort', 'NetworkPort', ([], {'number': 'src_port', 'slot': 'src_slot'}), '(number=src_port, slot=src_slot)\n', (27798, 27830), False, 'from network_modeling.NetworkPort import NetworkPort\n'), ((30252, 30295), 'network_modeling.NetworkPort.NetworkPort', 'NetworkPort', ([], {'number': 'dst_port', 'slot': 'dst_slot'}), '(number=dst_port, slot=dst_slot)\n', (30263, 30295), False, 'from network_modeling.NetworkPort import NetworkPort\n'), ((34242, 34494), 'click.secho', 'click.secho', (['("""\nNode type could not be determined for the following.""" +\n """\nThese nodes are not currently included in the model.""" +\n """\n(This may be a missing architectural definition/lookup or a spelling error)"""\n )'], {'fg': '"""red"""', 'file': 'out'}), '("""\nNode type could not be determined for the following.""" +\n """\nThese nodes are not currently included in the model.""" +\n """\n(This may be a missing architectural definition/lookup or a spelling error)"""\n , fg=\'red\', file=out)\n', (34253, 34494), False, 'import click\n'), ((34580, 34607), 'click.secho', 'click.secho', (['dash'], {'file': 'out'}), '(dash, file=out)\n', (34591, 34607), False, 'import click\n'), ((34675, 34699), 'natsort.natsorted', 'natsort.natsorted', (['nodes'], {}), '(nodes)\n', (34692, 34699), False, 'import natsort\n'), ((34761, 34778), 'collections.defaultdict', 'defaultdict', (['list'], {}), '(list)\n', (34772, 34778), False, 'from collections import defaultdict\n'), ((35998, 36167), 'click.secho', 'click.secho', (['("""\nThe following nodes have zero connections""" +\n """\n(The node type may not have been found or no connections are present)"""\n )'], {'fg': '"""red"""', 'file': 'out'}), '("""\nThe following nodes have zero connections""" +\n """\n(The node type may not have been found or no connections are present)"""\n , fg=\'red\', file=out)\n', (36009, 36167), False, 'import click\n'), ((36244, 36271), 'click.secho', 'click.secho', (['dash'], {'file': 'out'}), '(dash, file=out)\n', (36255, 36271), False, 'import click\n'), ((36346, 36370), 'natsort.natsorted', 'natsort.natsorted', (['nodes'], {}), '(nodes)\n', (36363, 36370), False, 'import natsort\n'), ((36508, 36585), 'click.secho', 'click.secho', (['"""\nThe following nodes should be renamed"""'], {'fg': '"""red"""', 'file': 'out'}), '("""\nThe following nodes should be renamed""", fg=\'red\', file=out)\n', (36519, 36585), False, 'import click\n'), ((36658, 36685), 'click.secho', 'click.secho', (['dash'], {'file': 'out'}), '(dash, file=out)\n', (36669, 36685), False, 'import click\n'), ((36963, 36987), 'natsort.natsorted', 'natsort.natsorted', (['nodes'], {}), '(nodes)\n', (36980, 36987), False, 'import natsort\n'), ((37248, 37419), 'click.secho', 'click.secho', (['("""\nSHCD port definitions are using a deprecated "j" prefix""" +\n """\n(Ports should be an integer, remove the "j" in each cell)""")'], {'fg': '"""red"""', 'file': 'out'}), '("""\nSHCD port definitions are using a deprecated "j" prefix""" +\n """\n(Ports should be an integer, remove the "j" in each cell)""", fg=\n \'red\', file=out)\n', (37259, 37419), False, 'import click\n'), ((37496, 37523), 'click.secho', 'click.secho', (['dash'], {'file': 'out'}), '(dash, file=out)\n', (37507, 37523), False, 'import click\n'), ((37552, 37569), 'collections.defaultdict', 'defaultdict', (['list'], {}), '(list)\n', (37563, 37569), False, 'from collections import defaultdict\n'), ((38010, 38337), 'click.secho', 'click.secho', (['(\n """\nSHCD port convention in the HMN tab is to use port 3 to represent BMCs."""\n +\n """\n(For servers, correct the following cells to use a Slot of "bmc" and a port of "1")"""\n +\n """\n(For Switches, correct the following cells to use a Slot of "mgmt" and a port of "1")"""\n )'], {'fg': '"""red"""', 'file': 'out'}), '(\n """\nSHCD port convention in the HMN tab is to use port 3 to represent BMCs."""\n +\n """\n(For servers, correct the following cells to use a Slot of "bmc" and a port of "1")"""\n +\n """\n(For Switches, correct the following cells to use a Slot of "mgmt" and a port of "1")"""\n , fg=\'red\', file=out)\n', (38021, 38337), False, 'import click\n'), ((38408, 38435), 'click.secho', 'click.secho', (['dash'], {'file': 'out'}), '(dash, file=out)\n', (38419, 38435), False, 'import click\n'), ((38464, 38481), 'collections.defaultdict', 'defaultdict', (['list'], {}), '(list)\n', (38475, 38481), False, 'from collections import defaultdict\n'), ((38922, 39157), 'click.secho', 'click.secho', (['("""\nSHCD slot definitions used are either deprecated, missing or incorrect."""\n +\n """\n(The cells below should only be one of the following ["bmc", "ocp", "pcie-slot1, "mgmt", None])"""\n )'], {'fg': '"""red"""', 'file': 'out'}), '(\n """\nSHCD slot definitions used are either deprecated, missing or incorrect."""\n +\n """\n(The cells below should only be one of the following ["bmc", "ocp", "pcie-slot1, "mgmt", None])"""\n , fg=\'red\', file=out)\n', (38933, 39157), False, 'import click\n'), ((39224, 39251), 'click.secho', 'click.secho', (['dash'], {'file': 'out'}), '(dash, file=out)\n', (39235, 39251), False, 'import click\n'), ((39279, 39296), 'collections.defaultdict', 'defaultdict', (['list'], {}), '(list)\n', (39290, 39296), False, 'from collections import defaultdict\n'), ((42946, 42992), 'click.echo', 'click.echo', (['f""" {port_string}"""'], {'file': 'out'}), "(f' {port_string}', file=out)\n", (42956, 42992), False, 'import click\n'), ((17700, 17769), 'click.secho', 'click.secho', (['f"""Bad range of cells entered for tab {sheet}."""'], {'fg': '"""red"""'}), "(f'Bad range of cells entered for tab {sheet}.', fg='red')\n", (17711, 17769), False, 'import click\n'), ((17782, 17835), 'click.secho', 'click.secho', (['f"""{range_start}:{range_end}\n"""'], {'fg': '"""red"""'}), "(f'{range_start}:{range_end}\\n', fg='red')\n", (17793, 17835), False, 'import click\n'), ((17848, 17988), 'click.secho', 'click.secho', (['"""Ensure that the upper left corner (Labeled \'Source\'), and the lower right corner of the table is entered."""'], {'fg': '"""red"""'}), '(\n "Ensure that the upper left corner (Labeled \'Source\'), and the lower right corner of the table is entered."\n , fg=\'red\')\n', (17859, 17988), False, 'import click\n'), ((18038, 18049), 'sys.exit', 'sys.exit', (['(1)'], {}), '(1)\n', (18046, 18049), False, 'import sys\n'), ((20289, 20399), 'click.secho', 'click.secho', (['f"""On tab {sheet}, header column {required_header[required_index]} not found."""'], {'fg': '"""red"""'}), "(\n f'On tab {sheet}, header column {required_header[required_index]} not found.'\n , fg='red')\n", (20300, 20399), False, 'import click\n'), ((20495, 20687), 'click.secho', 'click.secho', (['f"""On tab {sheet}, the header is formatted incorrectly.\nColumns must exist in the following order, but may have other columns in between:\n{required_header}"""'], {'fg': '"""red"""'}), '(\n f"""On tab {sheet}, the header is formatted incorrectly.\nColumns must exist in the following order, but may have other columns in between:\n{required_header}"""\n , fg=\'red\')\n', (20506, 20687), False, 'import click\n'), ((20798, 20809), 'sys.exit', 'sys.exit', (['(1)'], {}), '(1)\n', (20806, 20809), False, 'import sys\n'), ((21584, 21621), 'network_modeling.NodeLocation.NodeLocation', 'NodeLocation', (['src_rack', 'src_elevation'], {}), '(src_rack, src_elevation)\n', (21596, 21621), False, 'from network_modeling.NodeLocation import NodeLocation\n'), ((24848, 24905), 'network_modeling.NetworkPort.NetworkPort', 'NetworkPort', ([], {'number': 'src_parent_port', 'slot': 'src_parent_slot'}), '(number=src_parent_port, slot=src_parent_slot)\n', (24859, 24905), False, 'from network_modeling.NetworkPort import NetworkPort\n'), ((26077, 26126), 'network_modeling.NetworkPort.NetworkPort', 'NetworkPort', ([], {'number': 'parent_port', 'slot': 'parent_slot'}), '(number=parent_port, slot=parent_slot)\n', (26088, 26126), False, 'from network_modeling.NetworkPort import NetworkPort\n'), ((28519, 28556), 'network_modeling.NodeLocation.NodeLocation', 'NodeLocation', (['dst_rack', 'dst_elevation'], {}), '(dst_rack, dst_elevation)\n', (28531, 28556), False, 'from network_modeling.NodeLocation import NodeLocation\n'), ((35792, 35894), 'click.secho', 'click.secho', (['"""Nodes that show up as MAC addresses might need to have LLDP enabled."""'], {'file': 'out'}), "(\n 'Nodes that show up as MAC addresses might need to have LLDP enabled.',\n file=out)\n", (35803, 35894), False, 'import click\n'), ((36418, 36464), 'click.secho', 'click.secho', (['node'], {'fg': '"""bright_white"""', 'file': 'out'}), "(node, fg='bright_white', file=out)\n", (36429, 36464), False, 'import click\n'), ((37035, 37121), 'click.secho', 'click.secho', (['f"""{node[0]} should be renamed {node[1]}"""'], {'fg': '"""bright_white"""', 'file': 'out'}), "(f'{node[0]} should be renamed {node[1]}', fg='bright_white',\n file=out)\n", (37046, 37121), False, 'import click\n'), ((37824, 37883), 'click.secho', 'click.secho', (['f"""Sheet: {sheet}"""'], {'fg': '"""bright_white"""', 'file': 'out'}), "(f'Sheet: {sheet}', fg='bright_white', file=out)\n", (37835, 37883), False, 'import click\n'), ((38743, 38802), 'click.secho', 'click.secho', (['f"""Sheet: {sheet}"""'], {'fg': '"""bright_white"""', 'file': 'out'}), "(f'Sheet: {sheet}', fg='bright_white', file=out)\n", (38754, 38802), False, 'import click\n'), ((39549, 39608), 'click.secho', 'click.secho', (['f"""Sheet: {sheet}"""'], {'fg': '"""bright_white"""', 'file': 'out'}), "(f'Sheet: {sheet}', fg='bright_white', file=out)\n", (39560, 39608), False, 'import click\n'), ((42051, 42110), 'click.secho', 'click.secho', (['f""" {port_string}"""'], {'fg': '"""green"""', 'file': 'out'}), "(f' {port_string}', fg='green', file=out)\n", (42062, 42110), False, 'import click\n'), ((43402, 43425), 'datetime.datetime.now', 'datetime.datetime.now', ([], {}), '()\n', (43423, 43425), False, 'import datetime\n'), ((9140, 9164), 're.findall', 're.findall', (['"""\\\\d+"""', 'name'], {}), "('\\\\d+', name)\n", (9150, 9164), False, 'import re\n'), ((21703, 21834), 'click.secho', 'click.secho', (['f"""Bad cell data or range of cells entered for sheet {sheet} in row {current_row} for source data."""'], {'fg': '"""red"""'}), "(\n f'Bad cell data or range of cells entered for sheet {sheet} in row {current_row} for source data.'\n , fg='red')\n", (21714, 21834), False, 'import click\n'), ((21900, 21979), 'click.secho', 'click.secho', (['"""Ensure the range entered does not contain empty cells."""'], {'fg': '"""red"""'}), "('Ensure the range entered does not contain empty cells.', fg='red')\n", (21911, 21979), False, 'import click\n'), ((22055, 22066), 'sys.exit', 'sys.exit', (['(1)'], {}), '(1)\n', (22063, 22066), False, 'import sys\n'), ((28638, 28774), 'click.secho', 'click.secho', (['f"""Bad cell data or range of cells entered for sheet {sheet} in row {current_row} for destination data."""'], {'fg': '"""red"""'}), "(\n f'Bad cell data or range of cells entered for sheet {sheet} in row {current_row} for destination data.'\n , fg='red')\n", (28649, 28774), False, 'import click\n'), ((28840, 28919), 'click.secho', 'click.secho', (['"""Ensure the range entered does not contain empty cells."""'], {'fg': '"""red"""'}), "('Ensure the range entered does not contain empty cells.', fg='red')\n", (28851, 28919), False, 'import click\n'), ((28995, 29006), 'sys.exit', 'sys.exit', (['(1)'], {}), '(1)\n', (29003, 29006), False, 'import sys\n'), ((32158, 32169), 'sys.exit', 'sys.exit', (['(1)'], {}), '(1)\n', (32166, 32169), False, 'import sys\n'), ((35364, 35410), 'click.secho', 'click.secho', (['node'], {'fg': '"""bright_white"""', 'file': 'out'}), "(node, fg='bright_white', file=out)\n", (35375, 35410), False, 'import click\n'), ((35543, 35602), 'click.secho', 'click.secho', (['f"""Sheet: {sheet}"""'], {'fg': '"""bright_white"""', 'file': 'out'}), "(f'Sheet: {sheet}', fg='bright_white', file=out)\n", (35554, 35602), False, 'import click\n'), ((35718, 35743), 'click.secho', 'click.secho', (['""""""'], {'file': 'out'}), "('', file=out)\n", (35729, 35743), False, 'import click\n'), ((9368, 9392), 're.findall', 're.findall', (['"""\\\\d+"""', 'name'], {}), "('\\\\d+', name)\n", (9378, 9392), False, 'import re\n'), ((26961, 26972), 'sys.exit', 'sys.exit', (['(1)'], {}), '(1)\n', (26969, 26972), False, 'import sys\n'), ((31173, 31184), 'sys.exit', 'sys.exit', (['(1)'], {}), '(1)\n', (31181, 31184), False, 'import sys\n'), ((35670, 35697), 'click.secho', 'click.secho', (['cell'], {'file': 'out'}), '(cell, file=out)\n', (35681, 35697), False, 'import click\n'), ((1805, 1819), 'pathlib.Path', 'Path', (['__file__'], {}), '(__file__)\n', (1809, 1819), False, 'from pathlib import Path\n'), ((23224, 23235), 'sys.exit', 'sys.exit', (['(1)'], {}), '(1)\n', (23232, 23235), False, 'import sys\n')] |
"""
Calculation of the superfluid neutron and superconducting proton gap
in the neutron star core following the parametrisation introduced in
Andersson et al. (2005) with the parameters given in Ho et al. (2015)
Authors:
<NAME> (<EMAIL>)
Copyright (c) <NAME>
"""
from scipy.optimize import newton
import numpy as np
# fit parameters for the two superfluids using the CCDK gap for the
# protons and the TToa gap for the neutrons
proton_singlet = {"Delta_0": 102, "k_1": 0, "k_2": 9, "k_3": 1.3, "k_4": 1.5}
neutron_triplet = {"Delta_0": 2.1, "k_1": 1.1, "k_2": 0.6, "k_3": 3.2, "k_4": 2.4}
def gap_neutrons_full(k_F_n: float) -> float:
"""
function calculates the parametrised neutron energy gap in MeV
for the full range (not cut off) of neutron Fermi number in 1/fm
"""
Delta_n = (
neutron_triplet["Delta_0"]
* (k_F_n - neutron_triplet["k_1"]) ** 2
/ ((k_F_n - neutron_triplet["k_1"]) ** 2 + neutron_triplet["k_2"])
* (k_F_n - neutron_triplet["k_3"]) ** 2
/ ((k_F_n - neutron_triplet["k_3"]) ** 2 + neutron_triplet["k_4"])
)
return Delta_n
def gap_neutrons(k_F_n: float) -> float:
"""
function determines the neutron energy gap in MeV as a function
of neutron Fermi number in 1/fm
"""
limit_left = newton(gap_neutrons_full, 1.1)
limit_right = newton(gap_neutrons_full, 3.5)
if k_F_n > limit_right:
return np.nan
else:
if k_F_n < limit_left:
return np.nan
else:
return gap_neutrons_full(k_F_n)
def gap_protons_full(k_F_p: float) -> float:
"""function calculates the parametrised proton energy gap in MeV for
the full range (not cut off) of neutron Fermi number in 1/fm """
Delta_p = (
proton_singlet["Delta_0"]
* (k_F_p - proton_singlet["k_1"]) ** 2
/ ((k_F_p - proton_singlet["k_1"]) ** 2 + proton_singlet["k_2"])
* (k_F_p - proton_singlet["k_3"]) ** 2
/ ((k_F_p - proton_singlet["k_3"]) ** 2 + proton_singlet["k_4"])
)
return Delta_p
def gap_protons(k_F_p: float) -> float:
"""function determines the proton energy gap in MeV as a function
of proton Fermi number in 1/fm """
limit_left = newton(gap_protons_full, 0.01)
limit_right = newton(gap_protons_full, 1.5)
if k_F_p > limit_right:
return np.nan
else:
if k_F_p < limit_left:
return np.nan
else:
return gap_protons_full(k_F_p)
| [
"scipy.optimize.newton"
] | [((1311, 1341), 'scipy.optimize.newton', 'newton', (['gap_neutrons_full', '(1.1)'], {}), '(gap_neutrons_full, 1.1)\n', (1317, 1341), False, 'from scipy.optimize import newton\n'), ((1360, 1390), 'scipy.optimize.newton', 'newton', (['gap_neutrons_full', '(3.5)'], {}), '(gap_neutrons_full, 3.5)\n', (1366, 1390), False, 'from scipy.optimize import newton\n'), ((2242, 2272), 'scipy.optimize.newton', 'newton', (['gap_protons_full', '(0.01)'], {}), '(gap_protons_full, 0.01)\n', (2248, 2272), False, 'from scipy.optimize import newton\n'), ((2291, 2320), 'scipy.optimize.newton', 'newton', (['gap_protons_full', '(1.5)'], {}), '(gap_protons_full, 1.5)\n', (2297, 2320), False, 'from scipy.optimize import newton\n')] |
import requests
import json
from auth import (
user,
password
)
class Scrape:
def __init__(self) -> None:
self.url = 'https://'+user+':'+password + \
'@opensky-network.org/api/states/all?lamin=52.311624&lomin=13.426828&lamax=52.416285&lomax=13.636693'
self.all_flights = []
self.currantly_landing = []
self.currantly_take_off = []
self.already_landed = []
pass
def scrape_data(self) -> bool:
result = requests.get(url=self.url)
result_json = json.loads(result.text)
if result_json['states'] != None:
self.set_allflight(result_json['states'])
else:
print("No aircraft found")
return False
def set_allflight(self, result) -> None:
for i in result:
if len(i[1].replace(" ", "")) > 3 and i[1] != "":
self.all_flights.append(
{"callsign": i[1].replace(" ", ""), "icao24": i[0], "origin_country": i[2], "altitude": i[13], "on_ground": i[8], "time_stamp": i[4], "vertical_rate": i[11]})
self.filter_allfligths()
def filter_allfligths(self) -> None:
for i in self.all_flights:
if i["on_ground"] == False and i['vertical_rate'] != None:
if i["altitude"] < 700:
if i['vertical_rate'] > 0:
self.currantly_take_off.append(i)
elif i['vertical_rate'] < 0:
self.currantly_landing.append(i)
else:
self.currantly_take_off.append(i)
else:
self.already_landed.append(i)
def get_allfligth_len(self) -> int:
return len(self.all_flights) | [
"json.loads",
"requests.get"
] | [((489, 515), 'requests.get', 'requests.get', ([], {'url': 'self.url'}), '(url=self.url)\n', (501, 515), False, 'import requests\n'), ((538, 561), 'json.loads', 'json.loads', (['result.text'], {}), '(result.text)\n', (548, 561), False, 'import json\n')] |
# -*- coding: utf-8 -*-
from odoo import http
from odoo.addons.website_sale_delivery.controllers.main import WebsiteSaleDelivery
from odoo.http import request
class WebsiteSaleCouponDelivery(WebsiteSaleDelivery):
@http.route()
def update_eshop_carrier(self, **post):
Monetary = request.env['ir.qweb.field.monetary']
result = super(WebsiteSaleCouponDelivery, self).update_eshop_carrier(**post)
order = request.website.sale_get_order()
free_shipping_lines = None
if order:
order.recompute_coupon_lines()
order.validate_taxes_on_sales_order()
free_shipping_lines = order._get_free_shipping_lines()
if free_shipping_lines:
currency = order.currency_id
amount_free_shipping = sum(free_shipping_lines.mapped('price_subtotal'))
result.update({
'new_amount_delivery': Monetary.value_to_html(0.0, {'display_currency': currency}),
'new_amount_untaxed': Monetary.value_to_html(order.amount_untaxed, {'display_currency': currency}),
'new_amount_tax': Monetary.value_to_html(order.amount_tax, {'display_currency': currency}),
'new_amount_total': Monetary.value_to_html(order.amount_total, {'display_currency': currency}),
'new_amount_order_discounted': Monetary.value_to_html(order.reward_amount - amount_free_shipping, {'display_currency': currency}),
})
return result
@http.route()
def cart_carrier_rate_shipment(self, carrier_id, **kw):
Monetary = request.env['ir.qweb.field.monetary']
order = request.website.sale_get_order(force_create=True)
free_shipping_lines = order._get_free_shipping_lines()
# Avoid computing carrier price delivery is free (coupon). It means if
# the carrier has error (eg 'delivery only for Belgium') it will show
# Free until the user clicks on it.
if free_shipping_lines:
return {
'carrier_id': carrier_id,
'status': True,
'is_free_delivery': True,
'new_amount_delivery': Monetary.value_to_html(0.0, {'display_currency': order.currency_id}),
'error_message': None,
}
return super(WebsiteSaleCouponDelivery, self).cart_carrier_rate_shipment(carrier_id, **kw)
| [
"odoo.http.request.website.sale_get_order",
"odoo.http.route"
] | [((221, 233), 'odoo.http.route', 'http.route', ([], {}), '()\n', (231, 233), False, 'from odoo import http\n'), ((1496, 1508), 'odoo.http.route', 'http.route', ([], {}), '()\n', (1506, 1508), False, 'from odoo import http\n'), ((436, 468), 'odoo.http.request.website.sale_get_order', 'request.website.sale_get_order', ([], {}), '()\n', (466, 468), False, 'from odoo.http import request\n'), ((1642, 1691), 'odoo.http.request.website.sale_get_order', 'request.website.sale_get_order', ([], {'force_create': '(True)'}), '(force_create=True)\n', (1672, 1691), False, 'from odoo.http import request\n')] |
# Copyright 2021 <NAME>
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""
Tests the NeuralLog dataset.
"""
import logging
import os
import unittest
from typing import List
from neurallog.knowledge.program import NeuralLogProgram, NO_EXAMPLE_SET
from neurallog.language.language import HornClause, Predicate, Constant
from neurallog.language.parser.ply.neural_log_parser import NeuralLogLexer, \
NeuralLogParser
from neurallog.network.dataset import LanguageDataset
from neurallog.network.trainer import Trainer
from neurallog.run import configure_log
RESOURCE_PATH = os.path.dirname(os.path.realpath(__file__))
VOCABULARY_FILE = os.path.join(RESOURCE_PATH, "vocab.txt")
POSSIBLE_LABELS = ["[PAD]", "B", "I", "O", "X", "[CLS]", "[SEP]"]
# noinspection SpellCheckingInspection
TOKENS = [
"Clustering",
"of",
"missense",
"mutations",
"in",
"the",
"ataxia",
"-",
"telangiectasia",
"gene",
"in",
"a",
"sporadic",
"T",
"-",
"cell",
"leukaemia",
".",
]
EXPANDED_TOKENS = [
("[CLS]", "[CLS]"),
("C", "O"),
("##luster", "X"),
("##ing", "X"),
("of", "O"),
("miss", "O"),
("##ense", "X"),
("mutations", "O"),
("in", "O"),
("the", "O"),
("at", "O"),
("##ax", "X"),
("##ia", "X"),
("-", "O"),
("te", "O"),
("##lang", "X"),
("##ie", "X"),
("##ct", "X"), # Included in 18
("##asi", "X"),
("##a", "X"), # Included in 20
("gene", "O"),
("in", "O"), # Included in 22
("a", "O"),
("s", "O"),
("##poradic", "X"),
("T", "O"),
("-", "O"),
("cell", "O"),
("le", "O"),
("##uka", "X"),
("##emia", "X"),
(".", "O"),
("[SEP]", "[SEP]"),
]
EXAMPLE = \
"\n".join(map(lambda x: f"mega_example(0, ner, \"{x}\", \"O\").", TOKENS))
possible_labels = "\n".join(map(lambda x: f'label("{x}").', POSSIBLE_LABELS))
# noinspection SpellCheckingInspection
PROGRAM = f"""
word("<OOV>").
word("Clustering").
word("of").
word("missense").
word("mutations").
word("in").
word("the").
word("ataxia").
word("-").
word("telangiectasia").
word("gene").
word("a").
word("sporadic").
word("T").
word("cell").
word("leukaemia").
word(".").
{possible_labels}
type_ner(X, Y):- ner(X, Y), word(X), label(Y).
ner(X, Y) :- true.
set_parameter(inverse_relations, false).
""".strip()
SEQUENCE_DATASET = """
set_parameter(dataset_class, class_name, sequence_dataset).
set_parameter(dataset_class, config, oov_word, "<OOV>").
set_parameter(dataset_class, config, expand_one_hot, "False").
set_parameter(dataset_class, config, empty_word_index, -1).
""".strip()
LANGUAGE_DATASET = f"""
set_parameter(dataset_class, class_name, language_dataset).
set_parameter(dataset_class, config, inverse_relations, false).
set_parameter(dataset_class, config, vocabulary_file, "{VOCABULARY_FILE}").
set_parameter(dataset_class, config, initial_token, "[CLS]").
set_parameter(dataset_class, config, final_token, "[SEP]").
set_parameter(dataset_class, config, pad_token, "[PAD]").
set_parameter(dataset_class, config, sub_token_label, "X").
set_parameter(dataset_class, config, maximum_sentence_length, {{maximum_len}}).
set_parameter(dataset_class, config, pad_to_maximum_length, {{pad_to_maximum}}).
set_parameter(dataset_class, config, do_lower_case, false).
""".strip()
def _read_program(program):
"""
Reads the meta program.
:return: the list of meta clauses
:rtype: List[HornClause]
"""
lexer = NeuralLogLexer()
parser = NeuralLogParser(lexer)
parser.parser.parse(input=program, lexer=lexer)
parser.expand_placeholders()
# noinspection PyTypeChecker
return list(parser.get_clauses())
# noinspection DuplicatedCode
class TestSequenceDataset(unittest.TestCase):
# noinspection PyMissingOrEmptyDocstring
@classmethod
def setUpClass(cls) -> None:
configure_log(level=logging.DEBUG)
def test_sequence_dataset(self):
program = NeuralLogProgram()
program.add_clauses(_read_program(PROGRAM))
program.add_clauses(_read_program(SEQUENCE_DATASET))
program.add_clauses(_read_program(EXAMPLE))
program.build_program()
trainer = Trainer(program, output_path=None)
trainer.init_model()
trainer.read_parameters()
neural_dataset = trainer.build_dataset(override_targets=False)
result = list(neural_dataset.build(NO_EXAMPLE_SET))
# dataset = neural_dataset.get_dataset(NO_EXAMPLE_SET)
self.assertEqual(len(TOKENS), len(result[0][0][0]))
self.assertEqual((len(TOKENS), len(POSSIBLE_LABELS)),
result[0][1][0].numpy().shape)
target = Predicate("ner", 2)
label_index = program.get_index_of_constant(target, 1, Constant("O"))
for i, token_id in enumerate(result[0][0][0]):
self.assertEqual(
program.get_index_of_constant(target, 0, Constant(TOKENS[i])),
token_id)
self.assertEqual(label_index, result[0][1][0][i].numpy().argmax())
def test_language_dataset(self):
program = NeuralLogProgram()
program.add_clauses(_read_program(PROGRAM))
program.add_clauses(_read_program(
LANGUAGE_DATASET.format(maximum_len=128, pad_to_maximum="false")))
program.add_clauses(_read_program(EXAMPLE))
program.build_program()
trainer = Trainer(program, output_path=None)
trainer.init_model()
trainer.read_parameters()
# noinspection PyTypeChecker
neural_dataset: LanguageDataset = \
trainer.build_dataset(override_targets=False)
result = list(neural_dataset.build(NO_EXAMPLE_SET))
# dataset = neural_dataset.get_dataset(NO_EXAMPLE_SET)
self.assertEqual(len(EXPANDED_TOKENS), len(result[0][0][0]))
self.assertEqual((len(EXPANDED_TOKENS), len(POSSIBLE_LABELS)),
result[0][1][0].numpy().shape)
target = Predicate("ner", 2)
expected_ids = neural_dataset.tokenizer.convert_tokens_to_ids(
map(lambda x: x[0], EXPANDED_TOKENS))
for i, token_id in enumerate(result[0][0][0]):
self.assertEqual(expected_ids[i], token_id)
label_index = program.get_index_of_constant(
target, -1, Constant(EXPANDED_TOKENS[i][1]))
self.assertEqual(label_index, result[0][1][0][i].numpy().argmax())
def test_language_dataset_maximum_length(self):
self.language_dataset_maximum_length(22)
def test_language_dataset_maximum_length_end_sub_token(self):
self.language_dataset_maximum_length(20)
def test_language_dataset_maximum_length_middle_sub_token(self):
self.language_dataset_maximum_length(18)
def language_dataset_maximum_length(self, maximum_length):
"""
Tests the language dataset with a maximum length.
:param maximum_length: the maximum length
:type maximum_length: int
"""
program = NeuralLogProgram()
program.add_clauses(_read_program(PROGRAM))
program.add_clauses(_read_program(
LANGUAGE_DATASET.format(maximum_len=maximum_length,
pad_to_maximum="false")))
program.add_clauses(_read_program(EXAMPLE), )
program.build_program()
trainer = Trainer(program, output_path=None)
trainer.init_model()
trainer.read_parameters()
# noinspection PyTypeChecker
neural_dataset: LanguageDataset = \
trainer.build_dataset(override_targets=False)
result = list(neural_dataset.build(NO_EXAMPLE_SET))
# dataset = neural_dataset.get_dataset(NO_EXAMPLE_SET)
self.assertEqual(maximum_length, len(result[0][0][0]))
self.assertEqual(
(maximum_length, len(POSSIBLE_LABELS)),
result[0][1][0].numpy().shape)
target = Predicate("ner", 2)
expected_ids = neural_dataset.tokenizer.convert_tokens_to_ids(
map(lambda x: x[0], EXPANDED_TOKENS))
for i, token_id in enumerate(result[0][0][0][:maximum_length - 1]):
self.assertEqual(expected_ids[i], token_id)
label_index = program.get_index_of_constant(
target, -1, Constant(EXPANDED_TOKENS[i][1]))
self.assertEqual(label_index, result[0][1][0][i].numpy().argmax())
self.assertEqual(expected_ids[-1], result[0][0][0][-1])
label_index = program.get_index_of_constant(
target, -1, Constant(EXPANDED_TOKENS[-1][1]))
self.assertEqual(label_index, result[0][1][0][-1].numpy().argmax())
def test_language_dataset_padding(self):
program = NeuralLogProgram()
program.add_clauses(_read_program(PROGRAM))
maximum_length = 40
program.add_clauses(_read_program(
LANGUAGE_DATASET.format(maximum_len=maximum_length,
pad_to_maximum="true")))
program.add_clauses(_read_program(EXAMPLE), )
program.build_program()
trainer = Trainer(program, output_path=None)
trainer.init_model()
trainer.read_parameters()
# noinspection PyTypeChecker
neural_dataset: LanguageDataset = \
trainer.build_dataset(override_targets=False)
result = list(neural_dataset.build(NO_EXAMPLE_SET))
# dataset = neural_dataset.get_dataset(NO_EXAMPLE_SET)
self.assertEqual(maximum_length, len(result[0][0][0]))
self.assertEqual((maximum_length, len(POSSIBLE_LABELS)),
result[0][1][0].numpy().shape)
target = Predicate("ner", 2)
expected_ids = neural_dataset.tokenizer.convert_tokens_to_ids(
map(lambda x: x[0], EXPANDED_TOKENS))
for i, token_id in enumerate(result[0][0][0][:len(EXPANDED_TOKENS)]):
self.assertEqual(expected_ids[i], token_id)
label_index = program.get_index_of_constant(
target, 1, Constant(EXPANDED_TOKENS[i][1]))
self.assertEqual(label_index, result[0][1][0][i].numpy().argmax())
pad_feature = neural_dataset.tokenizer.vocab["[PAD]"]
pad_label = program.get_index_of_constant(target, -1, Constant("[PAD]"))
for i, token_id in enumerate(result[0][0][0][len(EXPANDED_TOKENS):]):
i += len(EXPANDED_TOKENS)
self.assertEqual(pad_feature, token_id)
self.assertEqual(pad_label, result[0][1][0][i].numpy().argmax())
| [
"neurallog.language.language.Predicate",
"neurallog.network.trainer.Trainer",
"neurallog.language.parser.ply.neural_log_parser.NeuralLogParser",
"os.path.join",
"neurallog.language.language.Constant",
"os.path.realpath",
"neurallog.language.parser.ply.neural_log_parser.NeuralLogLexer",
"neurallog.run.... | [((1146, 1186), 'os.path.join', 'os.path.join', (['RESOURCE_PATH', '"""vocab.txt"""'], {}), "(RESOURCE_PATH, 'vocab.txt')\n", (1158, 1186), False, 'import os\n'), ((1100, 1126), 'os.path.realpath', 'os.path.realpath', (['__file__'], {}), '(__file__)\n', (1116, 1126), False, 'import os\n'), ((3991, 4007), 'neurallog.language.parser.ply.neural_log_parser.NeuralLogLexer', 'NeuralLogLexer', ([], {}), '()\n', (4005, 4007), False, 'from neurallog.language.parser.ply.neural_log_parser import NeuralLogLexer, NeuralLogParser\n'), ((4021, 4043), 'neurallog.language.parser.ply.neural_log_parser.NeuralLogParser', 'NeuralLogParser', (['lexer'], {}), '(lexer)\n', (4036, 4043), False, 'from neurallog.language.parser.ply.neural_log_parser import NeuralLogLexer, NeuralLogParser\n'), ((4382, 4416), 'neurallog.run.configure_log', 'configure_log', ([], {'level': 'logging.DEBUG'}), '(level=logging.DEBUG)\n', (4395, 4416), False, 'from neurallog.run import configure_log\n'), ((4473, 4491), 'neurallog.knowledge.program.NeuralLogProgram', 'NeuralLogProgram', ([], {}), '()\n', (4489, 4491), False, 'from neurallog.knowledge.program import NeuralLogProgram, NO_EXAMPLE_SET\n'), ((4708, 4742), 'neurallog.network.trainer.Trainer', 'Trainer', (['program'], {'output_path': 'None'}), '(program, output_path=None)\n', (4715, 4742), False, 'from neurallog.network.trainer import Trainer\n'), ((5196, 5215), 'neurallog.language.language.Predicate', 'Predicate', (['"""ner"""', '(2)'], {}), "('ner', 2)\n", (5205, 5215), False, 'from neurallog.language.language import HornClause, Predicate, Constant\n'), ((5619, 5637), 'neurallog.knowledge.program.NeuralLogProgram', 'NeuralLogProgram', ([], {}), '()\n', (5635, 5637), False, 'from neurallog.knowledge.program import NeuralLogProgram, NO_EXAMPLE_SET\n'), ((5915, 5949), 'neurallog.network.trainer.Trainer', 'Trainer', (['program'], {'output_path': 'None'}), '(program, output_path=None)\n', (5922, 5949), False, 'from neurallog.network.trainer import Trainer\n'), ((6489, 6508), 'neurallog.language.language.Predicate', 'Predicate', (['"""ner"""', '(2)'], {}), "('ner', 2)\n", (6498, 6508), False, 'from neurallog.language.language import HornClause, Predicate, Constant\n'), ((7524, 7542), 'neurallog.knowledge.program.NeuralLogProgram', 'NeuralLogProgram', ([], {}), '()\n', (7540, 7542), False, 'from neurallog.knowledge.program import NeuralLogProgram, NO_EXAMPLE_SET\n'), ((7868, 7902), 'neurallog.network.trainer.Trainer', 'Trainer', (['program'], {'output_path': 'None'}), '(program, output_path=None)\n', (7875, 7902), False, 'from neurallog.network.trainer import Trainer\n'), ((8429, 8448), 'neurallog.language.language.Predicate', 'Predicate', (['"""ner"""', '(2)'], {}), "('ner', 2)\n", (8438, 8448), False, 'from neurallog.language.language import HornClause, Predicate, Constant\n'), ((9214, 9232), 'neurallog.knowledge.program.NeuralLogProgram', 'NeuralLogProgram', ([], {}), '()\n', (9230, 9232), False, 'from neurallog.knowledge.program import NeuralLogProgram, NO_EXAMPLE_SET\n'), ((9585, 9619), 'neurallog.network.trainer.Trainer', 'Trainer', (['program'], {'output_path': 'None'}), '(program, output_path=None)\n', (9592, 9619), False, 'from neurallog.network.trainer import Trainer\n'), ((10146, 10165), 'neurallog.language.language.Predicate', 'Predicate', (['"""ner"""', '(2)'], {}), "('ner', 2)\n", (10155, 10165), False, 'from neurallog.language.language import HornClause, Predicate, Constant\n'), ((5279, 5292), 'neurallog.language.language.Constant', 'Constant', (['"""O"""'], {}), "('O')\n", (5287, 5292), False, 'from neurallog.language.language import HornClause, Predicate, Constant\n'), ((9040, 9072), 'neurallog.language.language.Constant', 'Constant', (['EXPANDED_TOKENS[-1][1]'], {}), '(EXPANDED_TOKENS[-1][1])\n', (9048, 9072), False, 'from neurallog.language.language import HornClause, Predicate, Constant\n'), ((10742, 10759), 'neurallog.language.language.Constant', 'Constant', (['"""[PAD]"""'], {}), "('[PAD]')\n", (10750, 10759), False, 'from neurallog.language.language import HornClause, Predicate, Constant\n'), ((6826, 6857), 'neurallog.language.language.Constant', 'Constant', (['EXPANDED_TOKENS[i][1]'], {}), '(EXPANDED_TOKENS[i][1])\n', (6834, 6857), False, 'from neurallog.language.language import HornClause, Predicate, Constant\n'), ((8787, 8818), 'neurallog.language.language.Constant', 'Constant', (['EXPANDED_TOKENS[i][1]'], {}), '(EXPANDED_TOKENS[i][1])\n', (8795, 8818), False, 'from neurallog.language.language import HornClause, Predicate, Constant\n'), ((10505, 10536), 'neurallog.language.language.Constant', 'Constant', (['EXPANDED_TOKENS[i][1]'], {}), '(EXPANDED_TOKENS[i][1])\n', (10513, 10536), False, 'from neurallog.language.language import HornClause, Predicate, Constant\n'), ((5436, 5455), 'neurallog.language.language.Constant', 'Constant', (['TOKENS[i]'], {}), '(TOKENS[i])\n', (5444, 5455), False, 'from neurallog.language.language import HornClause, Predicate, Constant\n')] |
"""
This tests ensures that there is no memory leakage
when params.cpp:ExecuteMulti function does conversion of Unicode to Bytes.
In ExecuteMulti function after DoExecute label
SQLExecute returns
One scenario where SQLParamData function will be used is when there is a varchar(max),
a parameter with an unknown size in the INSERT INTO query.
In this case, a unicode string is being added to a varchar(max) field.
In order to execute the INSERT INTO query, SQLExecute is used. SQLExecute will return
SQL_NEED_DATA (SQL_NEED_DATA = 99). Then SQLParamData will be used to create a SQL
parameter and will return SQL_NEED_DATA too. When PyUnicode_Check(pInfo->cell) is true,
a conversion of Unicode to Bytes is required before it can be used by SQLPutData.
During this conversion a new PyObject, called bytes, is created and assigned to objCell.
This object never gets Py_XDECREF, and the data will stay stuck in the memory without a
reference.
This memory leak is only visible when using varchar(max) because varchar(max) required
additional allocation of memory that correspond to the size of the input while
varchar(100) for example will not case another SQL_NEED_DATA status.
To see how to reproduce the memory leak,
look at https://github.com/mkleehammer/pyodbc/issues/802
"""
import os
import unittest
import psutil
from tests3.testutils import add_to_path, load_setup_connection_string
add_to_path()
import pyodbc
KB = 1024
MB = KB * 1024
CONNECTION_STRING = None
CONNECTION_STRING_ERROR_MESSAGE = (
r"Please create tmp\setup.cfg file or set a valid value to CONNECTION_STRING."
)
process = psutil.Process()
def memory():
return process.memory_info().vms
class SQLPutDataUnicodeToBytesMemoryLeakTestCase(unittest.TestCase):
driver = pyodbc
@classmethod
def setUpClass(cls):
filename = os.path.splitext(os.path.basename(__file__))[0]
cls.connection_string = (
load_setup_connection_string(filename) or CONNECTION_STRING
)
if not cls.connection_string:
return ValueError(CONNECTION_STRING_ERROR_MESSAGE)
def test__varchar_max__inserting_many_rows__same_memory_usage(self):
varchar_limit = "max"
num_rows = 50_000
data = [(i, f"col{i:06}", 3.14159265 * (i + 1)) for i in range(num_rows)]
table_name = "pd_test"
col_names = ["id", "txt_col", "float_col"]
ins_sql = f"INSERT INTO {table_name} ({','.join(col_names)}) VALUES ({','.join('?' * len(col_names))})"
with pyodbc.connect(self.connection_string, autocommit=True) as cnxn:
# First time adds memory, not related to the test.
self.action(cnxn, data, ins_sql, table_name, varchar_limit)
for iteration in range(3):
start_memory = memory()
self.action(cnxn, data, ins_sql, table_name, varchar_limit)
end_memory = memory()
memory_diff = end_memory - start_memory
self.assertLess(memory_diff, 100 * KB)
def action(self, cnxn, data, ins_sql, table_name, varchar_limit):
crsr = cnxn.cursor()
crsr.execute(f"DROP TABLE IF EXISTS {table_name}")
crsr.execute(
f"CREATE TABLE {table_name} (id int, txt_col varchar({varchar_limit}), float_col float(53))"
)
crsr.fast_executemany = True
crsr.executemany(ins_sql, data)
crsr.close()
def main():
unittest.main()
if __name__ == "__main__":
main()
| [
"pyodbc.connect",
"tests3.testutils.add_to_path",
"psutil.Process",
"tests3.testutils.load_setup_connection_string",
"os.path.basename",
"unittest.main"
] | [((1397, 1410), 'tests3.testutils.add_to_path', 'add_to_path', ([], {}), '()\n', (1408, 1410), False, 'from tests3.testutils import add_to_path, load_setup_connection_string\n'), ((1610, 1626), 'psutil.Process', 'psutil.Process', ([], {}), '()\n', (1624, 1626), False, 'import psutil\n'), ((3435, 3450), 'unittest.main', 'unittest.main', ([], {}), '()\n', (3448, 3450), False, 'import unittest\n'), ((1927, 1965), 'tests3.testutils.load_setup_connection_string', 'load_setup_connection_string', (['filename'], {}), '(filename)\n', (1955, 1965), False, 'from tests3.testutils import add_to_path, load_setup_connection_string\n'), ((2519, 2574), 'pyodbc.connect', 'pyodbc.connect', (['self.connection_string'], {'autocommit': '(True)'}), '(self.connection_string, autocommit=True)\n', (2533, 2574), False, 'import pyodbc\n'), ((1850, 1876), 'os.path.basename', 'os.path.basename', (['__file__'], {}), '(__file__)\n', (1866, 1876), False, 'import os\n')] |
import torch
import torch.nn as nn
import torch.nn.functional as F
from torch.distributions import Bernoulli
from src.models.nns import Decoder
class ConvDecoder(nn.Module):
def __init__(self, z_dim):
super().__init__()
self.z_dim = z_dim
self.decoder = Decoder(
128,
z_dim,
28,
28,
1
)
def decode(self, z: torch.tensor) -> torch.tensor:
"""Decode latent points into data points
Parameters
----------
z : torch.tensor
[B, Z]
Returns
-------
torch.tensor
[B, D]
"""
# Output is Bernoulli
loc_img = torch.sigmoid(self.decoder(z))
return loc_img
def forward(self, x: torch.tensor, z: torch.tensor) -> torch.tensor:
"""log p(x|z), reconstruction term
Parameters
----------
x : torch.tensor
[B, C, H, W], Data points
z : torch.tensor
[B, Z], Latent points
Returns
-------
torch.tensor
[B,], log p(x|z)
"""
x = x.reshape(-1, 784)
loc_img = self.decode(z)
# FIXME Use F.binary_cross_entropy instead? it's the same
dist = Bernoulli(probs=loc_img)
log_px_z = dist.log_prob(x).sum(-1)
# log_px_z = -F.binary_cross_entropy(loc_img, x, reduction="none").sum(-1)
return log_px_z
class MNISTDecoder(nn.Module):
def __init__(self, z_dim, hidden_dim=400):
super().__init__()
self.z_dim = z_dim
self.fc1 = nn.Linear(z_dim, hidden_dim)
self.fc2 = nn.Linear(hidden_dim, 784)
def decode(self, z: torch.tensor) -> torch.tensor:
"""Decode latent points into data points
Parameters
----------
z : torch.tensor
[B, Z]
Returns
-------
torch.tensor
[B, D]
"""
hidden = F.relu(self.fc1(z))
# Output is Bernoulli
loc_img = torch.sigmoid(self.fc2(hidden))
return loc_img
def forward(self, x: torch.tensor, z: torch.tensor) -> torch.tensor:
"""log p(x|z), reconstruction term
Parameters
----------
x : torch.tensor
[B, C, H, W], Data points
z : torch.tensor
[B, Z], Latent points
Returns
-------
torch.tensor
[B,], log p(x|z)
"""
x = x.reshape(-1, 784)
loc_img = self.decode(z)
# FIXME Use F.binary_cross_entropy instead? it's the same
dist = Bernoulli(probs=loc_img)
log_px_z = dist.log_prob(x).sum(-1)
# log_px_z = -F.binary_cross_entropy(loc_img, x, reduction="none").sum(-1)
return log_px_z
| [
"torch.distributions.Bernoulli",
"torch.nn.Linear",
"src.models.nns.Decoder"
] | [((285, 315), 'src.models.nns.Decoder', 'Decoder', (['(128)', 'z_dim', '(28)', '(28)', '(1)'], {}), '(128, z_dim, 28, 28, 1)\n', (292, 315), False, 'from src.models.nns import Decoder\n'), ((1282, 1306), 'torch.distributions.Bernoulli', 'Bernoulli', ([], {'probs': 'loc_img'}), '(probs=loc_img)\n', (1291, 1306), False, 'from torch.distributions import Bernoulli\n'), ((1615, 1643), 'torch.nn.Linear', 'nn.Linear', (['z_dim', 'hidden_dim'], {}), '(z_dim, hidden_dim)\n', (1624, 1643), True, 'import torch.nn as nn\n'), ((1663, 1689), 'torch.nn.Linear', 'nn.Linear', (['hidden_dim', '(784)'], {}), '(hidden_dim, 784)\n', (1672, 1689), True, 'import torch.nn as nn\n'), ((2625, 2649), 'torch.distributions.Bernoulli', 'Bernoulli', ([], {'probs': 'loc_img'}), '(probs=loc_img)\n', (2634, 2649), False, 'from torch.distributions import Bernoulli\n')] |
#!/usr/bin/env python
import os
import sys
sys.path.append(os.curdir)
from pelicanconf import *
SITEURL = 'https://getudata.org'
RELATIVE_URLS = False
FEED_DOMAIN = SITEURL
FEED_ALL_ATOM = 'feeds/all.atom'
CATEGORY_FEED_ATOM = 'feeds/%s.atom'
ARTICLE_URL = 'blog/{date:%Y}/{date:%m}/{date:%d}/{slug}/'
CATEGORY_URL = 'blog/category/{slug}/'
TAG_URL = 'blog/tag/{slug}/'
PAGE_URL = '{slug}/'
DELETE_OUTPUT_DIRECTORY = True
# Following items are often useful when publishing
#DISQUS_SITENAME = ""
#GOOGLE_ANALYTICS = ""
PLUGINS += (
'image_optimizer',
'gzip_cache',
)
| [
"sys.path.append"
] | [((44, 70), 'sys.path.append', 'sys.path.append', (['os.curdir'], {}), '(os.curdir)\n', (59, 70), False, 'import sys\n')] |
import json
from channels.generic.websocket import AsyncWebsocketConsumer
#from asgiref.sync import await_to_sync
from chat.services import chat_save_message
class ChatConsumer(AsyncWebsocketConsumer):
""" handshake websocket front end """
room_name = None
room_group_name = None
async def connect(self):
self.room_name = self.scope['url_route']['kwargs']['room_name']
self.room_group_name = 'chat_%s' % self.room_name
# Join room group
await self.channel_layer.group_add(
self.room_group_name,
self.channel_name
)
await self.accept()
async def disconnect(self, code):
# Leave room group
if self.room_group_name and self.channel_name:
await self.channel_layer.group_discard(
self.room_group_name,
self.channel_name
)
# Receive message from WebSocket
async def receive(self, text_data=None, bytes_data=None):
text_data_json = json.loads(text_data)
message = text_data_json['message']
image_caption = text_data_json['image_caption']
message_type = text_data_json['message_type']
# Send message to room group
await self.channel_layer.group_send(
self.room_group_name,
{
'type': 'chat_message',
'username': self.scope['user'].username.title(),
'message': message,
'message_type': message_type,
'image_caption': image_caption
}
)
await chat_save_message(
username=self.scope['user'].username.title(),
room_id=self.room_name,
message=message,
message_type=message_type,
image_caption=image_caption
)
# Receive message from room group
async def chat_message(self, event):
""" exhange message here """
message = event['message']
username = event['username']
image_caption = event['image_caption']
message_type = event['message_type']
# Send message to WebSocket
await self.send(text_data=json.dumps({
'message': message,
'username': username,
'image_caption': image_caption,
'message_type': message_type
}))
| [
"json.loads",
"json.dumps"
] | [((1015, 1036), 'json.loads', 'json.loads', (['text_data'], {}), '(text_data)\n', (1025, 1036), False, 'import json\n'), ((2179, 2299), 'json.dumps', 'json.dumps', (["{'message': message, 'username': username, 'image_caption': image_caption,\n 'message_type': message_type}"], {}), "({'message': message, 'username': username, 'image_caption':\n image_caption, 'message_type': message_type})\n", (2189, 2299), False, 'import json\n')] |
"""Increase length of artist name column
Revision ID: <KEY>
Revises: <KEY>8
Create Date: 2020-12-27 00:44:46.286228
"""
from alembic import op
import sqlalchemy as sa
# revision identifiers, used by Alembic.
revision = 'f<PASSWORD>'
down_revision = '<KEY>'
branch_labels = None
depends_on = None
def upgrade():
# ### commands auto generated by Alembic - please adjust! ###
op.alter_column('artist', 'name',
existing_type=sa.VARCHAR(length=32),
type_=sa.String(length=160),
existing_nullable=True)
# ### end Alembic commands ###
def downgrade():
# ### commands auto generated by Alembic - please adjust! ###
op.alter_column('artist', 'name',
existing_type=sa.String(length=160),
type_=sa.VARCHAR(length=32),
existing_nullable=True)
# ### end Alembic commands ###
| [
"sqlalchemy.VARCHAR",
"sqlalchemy.String"
] | [((450, 471), 'sqlalchemy.VARCHAR', 'sa.VARCHAR', ([], {'length': '(32)'}), '(length=32)\n', (460, 471), True, 'import sqlalchemy as sa\n'), ((494, 515), 'sqlalchemy.String', 'sa.String', ([], {'length': '(160)'}), '(length=160)\n', (503, 515), True, 'import sqlalchemy as sa\n'), ((743, 764), 'sqlalchemy.String', 'sa.String', ([], {'length': '(160)'}), '(length=160)\n', (752, 764), True, 'import sqlalchemy as sa\n'), ((787, 808), 'sqlalchemy.VARCHAR', 'sa.VARCHAR', ([], {'length': '(32)'}), '(length=32)\n', (797, 808), True, 'import sqlalchemy as sa\n')] |
from dao.squad_dao import SquadDao
from model.squad import Squad
from controller.squad_controller import SquadController
class FrameworkFrontEndController:
dao=FrameworkFrontEndDao()
squad_controller=SquadController()
def listar_todos(self):
return self.dao.listar_todos()
def buscar_por_id(self,id):
return self.dao.buscar_por_id(id)
def salvar(self,frameworkfrontend:FrameworkFrontEnd):
frameworkfrontend.squad.id=self.squad_controller.salvar(squad.frameworkfrontend)
return self.dao.salvar(frameworkfrontend)
def alterar(self,frameworkfrontend:FrameworkFrontEnd):
self.dao.alterar(frameworkfrontend)
def deletar(self,id):
self.dao.deletar(id) | [
"controller.squad_controller.SquadController"
] | [((209, 226), 'controller.squad_controller.SquadController', 'SquadController', ([], {}), '()\n', (224, 226), False, 'from controller.squad_controller import SquadController\n')] |
from django.contrib import admin
from thing.models import APIKey, BlueprintInstance, Campaign, Character, CharacterConfig, Corporation, \
Alliance, APIKeyFailure, Asset, AssetSummary, BlueprintComponent, Blueprint, CorpWallet, \
TaskState, CharacterDetails, Contract, UserProfile, Transaction, JournalEntry, Colony, Pin, BlueprintProduct, \
IndustryJob, SkillPlan
class APIKeyAdmin(admin.ModelAdmin):
list_display = ('id', 'user', 'name', 'key_type', 'corporation', 'valid')
raw_id_fields = ('characters', 'corp_character', 'corporation')
search_fields = ['characters__name', 'corporation__name']
class BlueprintInstanceAdmin(admin.ModelAdmin):
list_display = ('blueprint', 'original', 'material_level', 'productivity_level')
class CharacterAdmin(admin.ModelAdmin):
fieldsets = [
('Character information', {
'fields': ['name', 'corporation']
}),
]
list_display = ('id', 'name', 'corporation')
raw_id_fields = ('corporation',)
search_fields = ['name']
class CharacterDetailsAdmin(admin.ModelAdmin):
raw_id_fields = ('character',)
class CharacterConfigAdmin(admin.ModelAdmin):
raw_id_fields = ('character',)
class CampaignAdmin(admin.ModelAdmin):
prepopulated_fields = {'slug': ('title',)}
class AllianceAdmin(admin.ModelAdmin):
list_display = ('id', 'name', 'short_name')
class APIKeyFailureAdmin(admin.ModelAdmin):
list_display = ('user', 'keyid', 'fail_time')
class AssetAdmin(admin.ModelAdmin):
list_display = ('character', 'system', 'station', 'item', 'quantity')
raw_id_fields = ('character',)
class AssetSummaryAdmin(admin.ModelAdmin):
list_display = ('character', 'system', 'station', 'total_items', 'total_value')
raw_id_fields = ('character',)
class BlueprintComponentAdmin(admin.ModelAdmin):
list_display = ('blueprint', 'activity', 'item', 'count', 'consumed')
list_filter = ('activity',)
class BlueprintProductAdmin(admin.ModelAdmin):
list_display = ('blueprint', 'activity', 'item', 'count')
list_filter = ('activity',)
class BlueprintAdmin(admin.ModelAdmin):
list_display = ('name',)
class CorpWalletAdmin(admin.ModelAdmin):
list_display = ('corporation', 'description', 'balance')
raw_id_fields = ('corporation',)
class TaskStateAdmin(admin.ModelAdmin):
list_display = ('keyid', 'url', 'state', 'mod_time', 'next_time', 'parameter')
class ContractAdmin(admin.ModelAdmin):
list_display = ('contract_id', 'date_issued', 'date_expired', 'date_completed')
raw_id_fields = ('character', 'corporation', 'issuer_char', 'issuer_corp')
class UserProfileAdmin(admin.ModelAdmin):
list_display = ('user', 'last_seen', 'can_add_keys')
class TransactionAdmin(admin.ModelAdmin):
list_display = ('transaction_id', 'date', 'character', 'corp_wallet', 'other_char', 'other_corp', 'item')
class JournalEntryAdmin(admin.ModelAdmin):
list_display = ('date', 'character', 'corp_wallet', 'ref_type', 'amount', 'owner1_id', 'owner2_id', 'reason')
raw_id_fields = ('character', 'corp_wallet', 'tax_corp')
class ColonyAdmin(admin.ModelAdmin):
list_display = ('character', 'system', 'planet', 'planet_type', 'last_update', 'level', 'pins')
list_filter = ('level', 'planet_type')
raw_id_fields = ('character',)
class PinAdmin(admin.ModelAdmin):
list_display = ('pin_id', 'colony', 'type', 'expires')
class IndustryJobAdmin(admin.ModelAdmin):
list_display = ('character', 'activity', 'blueprint', 'product', 'status')
list_filter = ('activity', 'status')
raw_id_fields = ('character', 'corporation')
class SkillPlanAdmin(admin.ModelAdmin):
list_display = ('name', 'user', 'visibility')
list_filter = ('visibility',)
admin.site.register(APIKey, APIKeyAdmin)
admin.site.register(Character, CharacterAdmin)
admin.site.register(CharacterConfig, CharacterConfigAdmin)
admin.site.register(BlueprintInstance, BlueprintInstanceAdmin)
admin.site.register(Campaign, CampaignAdmin)
admin.site.register(Corporation)
admin.site.register(Alliance, AllianceAdmin)
admin.site.register(APIKeyFailure, APIKeyFailureAdmin)
admin.site.register(Asset, AssetAdmin)
admin.site.register(AssetSummary, AssetSummaryAdmin)
admin.site.register(BlueprintComponent, BlueprintComponentAdmin)
admin.site.register(BlueprintProduct, BlueprintProductAdmin)
admin.site.register(Blueprint, BlueprintAdmin)
admin.site.register(CorpWallet, CorpWalletAdmin)
admin.site.register(TaskState, TaskStateAdmin)
admin.site.register(CharacterDetails, CharacterDetailsAdmin)
admin.site.register(Contract, ContractAdmin)
admin.site.register(UserProfile, UserProfileAdmin)
admin.site.register(Transaction, TransactionAdmin)
admin.site.register(JournalEntry, JournalEntryAdmin)
admin.site.register(Colony, ColonyAdmin)
admin.site.register(Pin, PinAdmin)
admin.site.register(IndustryJob, IndustryJobAdmin)
admin.site.register(SkillPlan, SkillPlanAdmin)
| [
"django.contrib.admin.site.register"
] | [((3751, 3791), 'django.contrib.admin.site.register', 'admin.site.register', (['APIKey', 'APIKeyAdmin'], {}), '(APIKey, APIKeyAdmin)\n', (3770, 3791), False, 'from django.contrib import admin\n'), ((3792, 3838), 'django.contrib.admin.site.register', 'admin.site.register', (['Character', 'CharacterAdmin'], {}), '(Character, CharacterAdmin)\n', (3811, 3838), False, 'from django.contrib import admin\n'), ((3839, 3897), 'django.contrib.admin.site.register', 'admin.site.register', (['CharacterConfig', 'CharacterConfigAdmin'], {}), '(CharacterConfig, CharacterConfigAdmin)\n', (3858, 3897), False, 'from django.contrib import admin\n'), ((3898, 3960), 'django.contrib.admin.site.register', 'admin.site.register', (['BlueprintInstance', 'BlueprintInstanceAdmin'], {}), '(BlueprintInstance, BlueprintInstanceAdmin)\n', (3917, 3960), False, 'from django.contrib import admin\n'), ((3961, 4005), 'django.contrib.admin.site.register', 'admin.site.register', (['Campaign', 'CampaignAdmin'], {}), '(Campaign, CampaignAdmin)\n', (3980, 4005), False, 'from django.contrib import admin\n'), ((4006, 4038), 'django.contrib.admin.site.register', 'admin.site.register', (['Corporation'], {}), '(Corporation)\n', (4025, 4038), False, 'from django.contrib import admin\n'), ((4039, 4083), 'django.contrib.admin.site.register', 'admin.site.register', (['Alliance', 'AllianceAdmin'], {}), '(Alliance, AllianceAdmin)\n', (4058, 4083), False, 'from django.contrib import admin\n'), ((4084, 4138), 'django.contrib.admin.site.register', 'admin.site.register', (['APIKeyFailure', 'APIKeyFailureAdmin'], {}), '(APIKeyFailure, APIKeyFailureAdmin)\n', (4103, 4138), False, 'from django.contrib import admin\n'), ((4139, 4177), 'django.contrib.admin.site.register', 'admin.site.register', (['Asset', 'AssetAdmin'], {}), '(Asset, AssetAdmin)\n', (4158, 4177), False, 'from django.contrib import admin\n'), ((4178, 4230), 'django.contrib.admin.site.register', 'admin.site.register', (['AssetSummary', 'AssetSummaryAdmin'], {}), '(AssetSummary, AssetSummaryAdmin)\n', (4197, 4230), False, 'from django.contrib import admin\n'), ((4231, 4295), 'django.contrib.admin.site.register', 'admin.site.register', (['BlueprintComponent', 'BlueprintComponentAdmin'], {}), '(BlueprintComponent, BlueprintComponentAdmin)\n', (4250, 4295), False, 'from django.contrib import admin\n'), ((4296, 4356), 'django.contrib.admin.site.register', 'admin.site.register', (['BlueprintProduct', 'BlueprintProductAdmin'], {}), '(BlueprintProduct, BlueprintProductAdmin)\n', (4315, 4356), False, 'from django.contrib import admin\n'), ((4357, 4403), 'django.contrib.admin.site.register', 'admin.site.register', (['Blueprint', 'BlueprintAdmin'], {}), '(Blueprint, BlueprintAdmin)\n', (4376, 4403), False, 'from django.contrib import admin\n'), ((4404, 4452), 'django.contrib.admin.site.register', 'admin.site.register', (['CorpWallet', 'CorpWalletAdmin'], {}), '(CorpWallet, CorpWalletAdmin)\n', (4423, 4452), False, 'from django.contrib import admin\n'), ((4453, 4499), 'django.contrib.admin.site.register', 'admin.site.register', (['TaskState', 'TaskStateAdmin'], {}), '(TaskState, TaskStateAdmin)\n', (4472, 4499), False, 'from django.contrib import admin\n'), ((4500, 4560), 'django.contrib.admin.site.register', 'admin.site.register', (['CharacterDetails', 'CharacterDetailsAdmin'], {}), '(CharacterDetails, CharacterDetailsAdmin)\n', (4519, 4560), False, 'from django.contrib import admin\n'), ((4561, 4605), 'django.contrib.admin.site.register', 'admin.site.register', (['Contract', 'ContractAdmin'], {}), '(Contract, ContractAdmin)\n', (4580, 4605), False, 'from django.contrib import admin\n'), ((4606, 4656), 'django.contrib.admin.site.register', 'admin.site.register', (['UserProfile', 'UserProfileAdmin'], {}), '(UserProfile, UserProfileAdmin)\n', (4625, 4656), False, 'from django.contrib import admin\n'), ((4657, 4707), 'django.contrib.admin.site.register', 'admin.site.register', (['Transaction', 'TransactionAdmin'], {}), '(Transaction, TransactionAdmin)\n', (4676, 4707), False, 'from django.contrib import admin\n'), ((4708, 4760), 'django.contrib.admin.site.register', 'admin.site.register', (['JournalEntry', 'JournalEntryAdmin'], {}), '(JournalEntry, JournalEntryAdmin)\n', (4727, 4760), False, 'from django.contrib import admin\n'), ((4761, 4801), 'django.contrib.admin.site.register', 'admin.site.register', (['Colony', 'ColonyAdmin'], {}), '(Colony, ColonyAdmin)\n', (4780, 4801), False, 'from django.contrib import admin\n'), ((4802, 4836), 'django.contrib.admin.site.register', 'admin.site.register', (['Pin', 'PinAdmin'], {}), '(Pin, PinAdmin)\n', (4821, 4836), False, 'from django.contrib import admin\n'), ((4837, 4887), 'django.contrib.admin.site.register', 'admin.site.register', (['IndustryJob', 'IndustryJobAdmin'], {}), '(IndustryJob, IndustryJobAdmin)\n', (4856, 4887), False, 'from django.contrib import admin\n'), ((4888, 4934), 'django.contrib.admin.site.register', 'admin.site.register', (['SkillPlan', 'SkillPlanAdmin'], {}), '(SkillPlan, SkillPlanAdmin)\n', (4907, 4934), False, 'from django.contrib import admin\n')] |
"""
Copyright (c) 2021, NVIDIA CORPORATION.
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
"""
"""
These APIs are used along with TensorFlow 1.x.
They are similar to tf.get_variable()
"""
from tensorflow.python.framework import ops
import threading
_SparseOperationKitEmbeddingLayerStoreKey = "SparseOperationKitEmbeddingLayerStore"
class _EmbeddingLayerStore(threading.local):
def __init__(self):
super(_EmbeddingLayerStore, self).__init__()
self._embedding_layer_container = dict()
def _create_embedding(self, name, constructor, **kwargs):
if constructor is None:
raise ValueError("embedding_layer: '{}' does not exist and "
"cannot create it with constructor: "
"{}".format(name, constructor))
embedding_layer = constructor(**kwargs)
self._embedding_layer_container[name] = embedding_layer
return embedding_layer
def get_embedding(self, name, constructor=None, **kwargs):
emb = self._embedding_layer_container.get(name, None)
return emb or self._create_embedding(name,
constructor=constructor, **kwargs)
def _get_embedding_store():
emb_store = ops.get_collection(_SparseOperationKitEmbeddingLayerStoreKey)
if not emb_store:
emb_store = _EmbeddingLayerStore()
ops.add_to_collection(_SparseOperationKitEmbeddingLayerStoreKey, emb_store)
else:
emb_store = emb_store[0]
return emb_store
def get_embedding(name, constructor=None, **kwargs):
"""
This method is used to get or create a embedding layer.
Parameters
----------
name: string
unique name used to identify embedding layer.
constructor: SOK.embedding_layer
the construction function used to create a new embedding layer.
When creating a new embedding layer, constructor(**kwargs) will be called.
kwargs: keyword arguments for new embedding layer creation.
Returns
-------
embedding_layer: embedding layer
created by constructor(**kwargs)
Examples
--------
.. code-block:: python
# here to create a new embedding layer.
emb_layer = sok.get_embedding(name="Emb", sok.All2AllDenseEmbedding,
max_vocabulary_size_per_gpu=1024,
embedding_vec_size=16,
slot_num=10,
nnz_per_slot=1,
dynamic_input=False)
outputs = emb_layer(inputs)
...
# here to reuse already created embedding layer.
emb_layer = sok.get_embedding(name="Emb")
outputs_1 = emb_layer(inputs)
...
"""
return _get_embedding_store().get_embedding(name=name,
constructor=constructor, **kwargs) | [
"tensorflow.python.framework.ops.add_to_collection",
"tensorflow.python.framework.ops.get_collection"
] | [((1719, 1780), 'tensorflow.python.framework.ops.get_collection', 'ops.get_collection', (['_SparseOperationKitEmbeddingLayerStoreKey'], {}), '(_SparseOperationKitEmbeddingLayerStoreKey)\n', (1737, 1780), False, 'from tensorflow.python.framework import ops\n'), ((1854, 1929), 'tensorflow.python.framework.ops.add_to_collection', 'ops.add_to_collection', (['_SparseOperationKitEmbeddingLayerStoreKey', 'emb_store'], {}), '(_SparseOperationKitEmbeddingLayerStoreKey, emb_store)\n', (1875, 1929), False, 'from tensorflow.python.framework import ops\n')] |
from asgard.app import app
from asgard.handlers import http
app.run()
| [
"asgard.app.app.run"
] | [((61, 70), 'asgard.app.app.run', 'app.run', ([], {}), '()\n', (68, 70), False, 'from asgard.app import app\n')] |
import numpy as np
from numpy.testing import assert_equal
import scipy.sparse as sp
import tensorflow as tf
from .math import (sparse_scalar_multiply, sparse_tensor_diag_matmul,
_diag_matmul_py, _diag_matmul_transpose_py)
from .convert import sparse_to_tensor
class MathTest(tf.test.TestCase):
def test_sparse_scalar_multiply(self):
a = [[0, 2, 3], [0, 1, 0]]
a = sp.coo_matrix(a)
a = sparse_to_tensor(a)
a = sparse_scalar_multiply(a, 2)
a = tf.sparse_tensor_to_dense(a)
expected = [[0, 4, 6], [0, 2, 0]]
with self.test_session():
self.assertAllEqual(a.eval(), expected)
def test_sparse_tensor_diag_matmul(self):
a = [[2, 3, 0], [1, 0, 2], [0, 3, 0]]
a = sp.coo_matrix(a)
a = sparse_to_tensor(a)
diag = [2, 0.5, 3]
diag = tf.constant(diag)
b = sparse_tensor_diag_matmul(a, diag)
b = tf.sparse_tensor_to_dense(b)
expected = [[4, 6, 0], [0.5, 0, 1], [0, 9, 0]]
with self.test_session():
self.assertAllEqual(b.eval(), expected)
b = sparse_tensor_diag_matmul(a, diag, transpose=True)
b = tf.sparse_tensor_to_dense(b)
expected = [[4, 1.5, 0], [2, 0, 6], [0, 1.5, 0]]
with self.test_session():
self.assertAllEqual(b.eval(), expected)
def test_diag_matmul_py(self):
indices = np.array([[0, 0], [0, 1], [1, 0], [1, 2], [2, 1]])
values = np.array([2, 3, 1, 2, 3])
diag = np.array([2, 0.5, 3])
result = _diag_matmul_py(indices, values, diag)
expected = [4, 6, 0.5, 1, 9]
assert_equal(result, expected)
def test_diag_matmul_transpose_py(self):
indices = np.array([[1, 0], [0, 0], [0, 1], [1, 2], [2, 1]])
values = np.array([1, 2, 3, 2, 3])
diag = np.array([2, 0.5, 3])
result = _diag_matmul_transpose_py(indices, values, diag)
expected = [2, 4, 1.5, 6, 1.5]
assert_equal(result, expected)
| [
"numpy.testing.assert_equal",
"numpy.array",
"tensorflow.constant",
"scipy.sparse.coo_matrix",
"tensorflow.sparse_tensor_to_dense"
] | [((406, 422), 'scipy.sparse.coo_matrix', 'sp.coo_matrix', (['a'], {}), '(a)\n', (419, 422), True, 'import scipy.sparse as sp\n'), ((508, 536), 'tensorflow.sparse_tensor_to_dense', 'tf.sparse_tensor_to_dense', (['a'], {}), '(a)\n', (533, 536), True, 'import tensorflow as tf\n'), ((771, 787), 'scipy.sparse.coo_matrix', 'sp.coo_matrix', (['a'], {}), '(a)\n', (784, 787), True, 'import scipy.sparse as sp\n'), ((863, 880), 'tensorflow.constant', 'tf.constant', (['diag'], {}), '(diag)\n', (874, 880), True, 'import tensorflow as tf\n'), ((941, 969), 'tensorflow.sparse_tensor_to_dense', 'tf.sparse_tensor_to_dense', (['b'], {}), '(b)\n', (966, 969), True, 'import tensorflow as tf\n'), ((1188, 1216), 'tensorflow.sparse_tensor_to_dense', 'tf.sparse_tensor_to_dense', (['b'], {}), '(b)\n', (1213, 1216), True, 'import tensorflow as tf\n'), ((1415, 1465), 'numpy.array', 'np.array', (['[[0, 0], [0, 1], [1, 0], [1, 2], [2, 1]]'], {}), '([[0, 0], [0, 1], [1, 0], [1, 2], [2, 1]])\n', (1423, 1465), True, 'import numpy as np\n'), ((1483, 1508), 'numpy.array', 'np.array', (['[2, 3, 1, 2, 3]'], {}), '([2, 3, 1, 2, 3])\n', (1491, 1508), True, 'import numpy as np\n'), ((1524, 1545), 'numpy.array', 'np.array', (['[2, 0.5, 3]'], {}), '([2, 0.5, 3])\n', (1532, 1545), True, 'import numpy as np\n'), ((1648, 1678), 'numpy.testing.assert_equal', 'assert_equal', (['result', 'expected'], {}), '(result, expected)\n', (1660, 1678), False, 'from numpy.testing import assert_equal\n'), ((1743, 1793), 'numpy.array', 'np.array', (['[[1, 0], [0, 0], [0, 1], [1, 2], [2, 1]]'], {}), '([[1, 0], [0, 0], [0, 1], [1, 2], [2, 1]])\n', (1751, 1793), True, 'import numpy as np\n'), ((1811, 1836), 'numpy.array', 'np.array', (['[1, 2, 3, 2, 3]'], {}), '([1, 2, 3, 2, 3])\n', (1819, 1836), True, 'import numpy as np\n'), ((1852, 1873), 'numpy.array', 'np.array', (['[2, 0.5, 3]'], {}), '([2, 0.5, 3])\n', (1860, 1873), True, 'import numpy as np\n'), ((1988, 2018), 'numpy.testing.assert_equal', 'assert_equal', (['result', 'expected'], {}), '(result, expected)\n', (2000, 2018), False, 'from numpy.testing import assert_equal\n')] |
#!/usr/bin/env python
import os
import pprint
import sys
# pretty printer settings
pp = pprint.PrettyPrinter(indent=4)
# the label
label = 'all'
# the used ratio
ratio = '90_10'
# specify the classifier path
classifier_path = 'food/binary/unbalanced'
# source path
source = 'data/prepared/{}/{}/{}'.format(classifier_path, ratio, label)
# target path
target = 'data/processed/{}/{}/elements/{}'.format(classifier_path, ratio, label)
# specifies whether different models are to be generated
multi_model = True
# force prepare environment (even if already exists)
force = True
# folders within the environment
folders = ['charts', 'csv', 'log', 'models']
def collect_all_classes(path):
"""Collects all class names from the given folder. The class names must be folders in the given folder."""
classes = []
# collect all classes
for name in os.listdir(path):
classes.append(name)
return classes
# check if target environment already exists
if os.path.exists(target) and not force:
sys.exit('Environment folder "{}" already exists. Cancel.'.format(target))
# collect all class names
class_names = collect_all_classes(source)
# create environment folder
if not os.path.exists(target):
print('Create environment folder "{}"'.format(target))
os.makedirs(target)
# create folders
for folder in folders:
folder_path = os.path.join(target, folder)
if not os.path.exists(folder_path):
print('Create folder "{}"'.format(folder_path))
os.makedirs(folder_path)
if multi_model:
folder_path = os.path.join(target, 'data')
# create data folder
if not os.path.exists(folder_path):
print('Create data folder {}'.format(folder_path))
os.makedirs(folder_path)
# create data symlinks
for class_name in class_names:
symlink_source = '../../../../../../../../../' + os.path.join(source, class_name)
symlink_target = os.path.join(folder_path, class_name)
if not os.path.exists(symlink_target):
print('Create symlink {}'.format(symlink_target))
os.symlink(symlink_source, symlink_target)
# create class_model folders
for folder in folders:
type_path = os.path.join(target, folder, class_name)
if not os.path.exists(type_path):
print('Create folder {}'.format(type_path))
os.makedirs(type_path)
| [
"os.path.exists",
"os.listdir",
"os.makedirs",
"os.path.join",
"os.symlink",
"pprint.PrettyPrinter"
] | [((90, 120), 'pprint.PrettyPrinter', 'pprint.PrettyPrinter', ([], {'indent': '(4)'}), '(indent=4)\n', (110, 120), False, 'import pprint\n'), ((867, 883), 'os.listdir', 'os.listdir', (['path'], {}), '(path)\n', (877, 883), False, 'import os\n'), ((984, 1006), 'os.path.exists', 'os.path.exists', (['target'], {}), '(target)\n', (998, 1006), False, 'import os\n'), ((1206, 1228), 'os.path.exists', 'os.path.exists', (['target'], {}), '(target)\n', (1220, 1228), False, 'import os\n'), ((1293, 1312), 'os.makedirs', 'os.makedirs', (['target'], {}), '(target)\n', (1304, 1312), False, 'import os\n'), ((1372, 1400), 'os.path.join', 'os.path.join', (['target', 'folder'], {}), '(target, folder)\n', (1384, 1400), False, 'import os\n'), ((1566, 1594), 'os.path.join', 'os.path.join', (['target', '"""data"""'], {}), "(target, 'data')\n", (1578, 1594), False, 'import os\n'), ((1413, 1440), 'os.path.exists', 'os.path.exists', (['folder_path'], {}), '(folder_path)\n', (1427, 1440), False, 'import os\n'), ((1506, 1530), 'os.makedirs', 'os.makedirs', (['folder_path'], {}), '(folder_path)\n', (1517, 1530), False, 'import os\n'), ((1632, 1659), 'os.path.exists', 'os.path.exists', (['folder_path'], {}), '(folder_path)\n', (1646, 1659), False, 'import os\n'), ((1728, 1752), 'os.makedirs', 'os.makedirs', (['folder_path'], {}), '(folder_path)\n', (1739, 1752), False, 'import os\n'), ((1931, 1968), 'os.path.join', 'os.path.join', (['folder_path', 'class_name'], {}), '(folder_path, class_name)\n', (1943, 1968), False, 'import os\n'), ((1873, 1905), 'os.path.join', 'os.path.join', (['source', 'class_name'], {}), '(source, class_name)\n', (1885, 1905), False, 'import os\n'), ((1985, 2015), 'os.path.exists', 'os.path.exists', (['symlink_target'], {}), '(symlink_target)\n', (1999, 2015), False, 'import os\n'), ((2091, 2133), 'os.symlink', 'os.symlink', (['symlink_source', 'symlink_target'], {}), '(symlink_source, symlink_target)\n', (2101, 2133), False, 'import os\n'), ((2227, 2267), 'os.path.join', 'os.path.join', (['target', 'folder', 'class_name'], {}), '(target, folder, class_name)\n', (2239, 2267), False, 'import os\n'), ((2288, 2313), 'os.path.exists', 'os.path.exists', (['type_path'], {}), '(type_path)\n', (2302, 2313), False, 'import os\n'), ((2391, 2413), 'os.makedirs', 'os.makedirs', (['type_path'], {}), '(type_path)\n', (2402, 2413), False, 'import os\n')] |
"""
Copyright (c) Contributors to the Open 3D Engine Project.
For complete copyright and license terms please see the LICENSE at the root of this distribution.
SPDX-License-Identifier: Apache-2.0 OR MIT
"""
import os
import pytest
from Automated.atom_utils import hydra_test_utils as hydra
from Automated.atom_utils.automated_test_base import TestAutomationBase
TEST_DIRECTORY = os.path.dirname(__file__)
LOG_MONITOR_TIMEOUT = 60
@pytest.mark.parametrize("project", ["AtomTest"])
@pytest.mark.parametrize("launcher_platform", ["windows_generic"])
class TestOpeningFileSourceLocation(TestAutomationBase):
@pytest.fixture(autouse=True)
def setup_teardown(self, request):
def teardown():
# Close the beach_parking_1k_iblglobalcm.exr material opened in WindowsExplorer
os.system('TASKKILL /F /FI "WINDOWTITLE eq LightingPresets" /IM explorer.exe')
request.addfinalizer(teardown)
@pytest.mark.parametrize("exe_file_name", ["MaterialEditor"])
def test_OpeningMaterial_InAssetBrowser_OpenedSuccessfully(
self, request, launcher_platform, generic_launcher, exe_file_name
):
"""
Please review the hydra script run by this test for more specific test info.
Tests for Opening File Source Location.
"""
expected_lines = [
"beach_parking_1k_iblglobalcm.exr opened with relative path: \dev\Gems\Atom\TestData\TestData\LightingPresets in WindowsExplorer",
]
unexpected_lines = [
"Trace::Assert",
"Trace::Error",
"Traceback (most recent call last):",
]
hydra.launch_and_validate_results(
request,
TEST_DIRECTORY,
generic_launcher,
"OpeningFileSourceLocation_test_case.py",
timeout=LOG_MONITOR_TIMEOUT,
expected_lines=expected_lines,
unexpected_lines=unexpected_lines,
halt_on_unexpected=True,
log_file_name="MaterialEditor.log",
)
| [
"os.path.dirname",
"Automated.atom_utils.hydra_test_utils.launch_and_validate_results",
"pytest.mark.parametrize",
"pytest.fixture",
"os.system"
] | [((383, 408), 'os.path.dirname', 'os.path.dirname', (['__file__'], {}), '(__file__)\n', (398, 408), False, 'import os\n'), ((437, 485), 'pytest.mark.parametrize', 'pytest.mark.parametrize', (['"""project"""', "['AtomTest']"], {}), "('project', ['AtomTest'])\n", (460, 485), False, 'import pytest\n'), ((487, 552), 'pytest.mark.parametrize', 'pytest.mark.parametrize', (['"""launcher_platform"""', "['windows_generic']"], {}), "('launcher_platform', ['windows_generic'])\n", (510, 552), False, 'import pytest\n'), ((615, 643), 'pytest.fixture', 'pytest.fixture', ([], {'autouse': '(True)'}), '(autouse=True)\n', (629, 643), False, 'import pytest\n'), ((936, 996), 'pytest.mark.parametrize', 'pytest.mark.parametrize', (['"""exe_file_name"""', "['MaterialEditor']"], {}), "('exe_file_name', ['MaterialEditor'])\n", (959, 996), False, 'import pytest\n'), ((1634, 1920), 'Automated.atom_utils.hydra_test_utils.launch_and_validate_results', 'hydra.launch_and_validate_results', (['request', 'TEST_DIRECTORY', 'generic_launcher', '"""OpeningFileSourceLocation_test_case.py"""'], {'timeout': 'LOG_MONITOR_TIMEOUT', 'expected_lines': 'expected_lines', 'unexpected_lines': 'unexpected_lines', 'halt_on_unexpected': '(True)', 'log_file_name': '"""MaterialEditor.log"""'}), "(request, TEST_DIRECTORY, generic_launcher,\n 'OpeningFileSourceLocation_test_case.py', timeout=LOG_MONITOR_TIMEOUT,\n expected_lines=expected_lines, unexpected_lines=unexpected_lines,\n halt_on_unexpected=True, log_file_name='MaterialEditor.log')\n", (1667, 1920), True, 'from Automated.atom_utils import hydra_test_utils as hydra\n'), ((811, 889), 'os.system', 'os.system', (['"""TASKKILL /F /FI "WINDOWTITLE eq LightingPresets" /IM explorer.exe"""'], {}), '(\'TASKKILL /F /FI "WINDOWTITLE eq LightingPresets" /IM explorer.exe\')\n', (820, 889), False, 'import os\n')] |
import pdfcutter
import helper
import json #For writing PDF Link JSON File
import os #To check if PDF Link JSON File exists
#get_session is main method for parsing session to Senats/Bundesrats Texts dict
class MainExtractorMethod:
#In: Can't init TextExtractorHolder before (missing paras in get_beschluesse_text), so have class as input in init
def __init__(self, textExtractorHolderSubclass):
self.textExtractorHolderSubclass = textExtractorHolderSubclass #Needed for get_beschluesse_text and no cyclic dependencies
#In: Session Dict
#Out: Dict of "TOP: {'senat': senatsText, 'bundesrat': BRText}" entries
def get_session(self, session):
PDF_URLS = dict(self._get_pdf_urls())
URLFILENAME = "session_urls.json"
if not os.path.exists(URLFILENAME): #Create PDF Link JSON File
with open(URLFILENAME, 'w') as f: #Because of override of MainExtractorMethod in counties, the FILENAME is always relative to folder
json.dump(PDF_URLS, f)
try:
filename = helper.get_session_pdf_filename(session, PDF_URLS)
except KeyError:
return
return self.get_beschluesse_text(session, filename)
#Out: Dict of {sessionNumberOfBR: PDFWebLink} entries
#For each County very different, so implement it new each time
def _get_pdf_urls(self):
raise NotImplementedError()
#Out: Dict of "TOP: {'senat': senatsText, 'bundesrat': BRText}" entries
#Extraction work done in AbstractSenatsAndBRTextExtractor Subclasses
def get_beschluesse_text(self, session, filename):
extractor = self.textExtractorHolderSubclass(filename, session)
return dict(extractor.getSenatsAndBRTextsForAllSessionTOPs())
| [
"os.path.exists",
"helper.get_session_pdf_filename",
"json.dump"
] | [((775, 802), 'os.path.exists', 'os.path.exists', (['URLFILENAME'], {}), '(URLFILENAME)\n', (789, 802), False, 'import os\n'), ((1053, 1103), 'helper.get_session_pdf_filename', 'helper.get_session_pdf_filename', (['session', 'PDF_URLS'], {}), '(session, PDF_URLS)\n', (1084, 1103), False, 'import helper\n'), ((992, 1014), 'json.dump', 'json.dump', (['PDF_URLS', 'f'], {}), '(PDF_URLS, f)\n', (1001, 1014), False, 'import json\n')] |
import neptune
from tensorflow.keras.callbacks import BaseLogger
class NeptuneMonitor(BaseLogger):
def __init__(self, name, api_token, prj_name, params: tuple = None):
assert api_token is not None
assert prj_name is not None
super(BaseLogger, self).__init__()
self.my_name = name
self.stateful_metrics = set(['loss'] or [])
neptune.init(
api_token=api_token,
project_qualified_name=prj_name)
self.experiment = neptune.create_experiment(name=self.my_name, params=params)
self.log_neptune = True
def on_epoch_end(self, epoch, logs={}):
#acc = logs['acc']
loss = logs['loss']
if self.log_neptune:
self.experiment.append_tag(self.my_name)
#self.experiment.send_metric('acc', acc)
self.experiment.send_metric('loss', loss)
#self.experiment.send_metric('epoch', epoch)
def on_train_end(self, logs={}):
if self.log_neptune:
self.experiment.stop() | [
"neptune.create_experiment",
"neptune.init"
] | [((379, 445), 'neptune.init', 'neptune.init', ([], {'api_token': 'api_token', 'project_qualified_name': 'prj_name'}), '(api_token=api_token, project_qualified_name=prj_name)\n', (391, 445), False, 'import neptune\n'), ((497, 556), 'neptune.create_experiment', 'neptune.create_experiment', ([], {'name': 'self.my_name', 'params': 'params'}), '(name=self.my_name, params=params)\n', (522, 556), False, 'import neptune\n')] |
from merc import errors
from merc import feature
from merc import message
class KillFeature(feature.Feature):
NAME = __name__
install = KillFeature.install
@KillFeature.register_user_command
class Kill(message.Command):
NAME = "KILL"
MIN_ARITY = 2
def __init__(self, nickname, reason, *args):
self.nickname = nickname
self.reason = reason
@message.Command.requires_registration
def handle_for(self, app, user, prefix):
user.check_is_irc_operator()
target = app.users.get(self.nickname)
disconnect_reason = "Killed by {}: {}".format(user.nickname, self.reason)
target.send(None, errors.Error(disconnect_reason))
target.close(disconnect_reason)
| [
"merc.errors.Error"
] | [((624, 655), 'merc.errors.Error', 'errors.Error', (['disconnect_reason'], {}), '(disconnect_reason)\n', (636, 655), False, 'from merc import errors\n')] |
"""Operator to delete an Export Collection by name."""
from bpy.props import StringProperty
from bpy.types import Operator
from ..functions import get_export_collection_by_name
class EmbarkDeleteExportCollection(Operator): # pylint: disable=too-few-public-methods
"""Deletes the named Export Collection, but leaves all contained objects in the scene."""
bl_idname = "object.embark_delete_export_collection"
bl_label = "Delete Export Collection"
bl_description = "Deletes this Export Collection, but leaves all contained objects in the scene"
bl_options = {'REGISTER', 'UNDO'}
collection_name: StringProperty(options={'HIDDEN'})
def execute(self, context):
"""Deletes the named Collection."""
collection = get_export_collection_by_name(self.collection_name)
if not collection:
self.report({'ERROR'}, f"Failed to find an Export Collection named '{self.collection_name}'")
return {'CANCELLED'}
collection.delete()
self.report({'INFO'}, f"Deleted Export Collection '{self.collection_name}'")
return {'FINISHED'}
| [
"bpy.props.StringProperty"
] | [((624, 658), 'bpy.props.StringProperty', 'StringProperty', ([], {'options': "{'HIDDEN'}"}), "(options={'HIDDEN'})\n", (638, 658), False, 'from bpy.props import StringProperty\n')] |
# coding: utf8
import clinica.pipelines.engine as cpe
class DwiNoddi(cpe.Pipeline):
"""NODDI model for corrected DWI datasets."""
def check_custom_dependencies(self):
"""Check dependencies that can not be listed in the `info.json` file.
"""
pass
def get_input_fields(self):
"""Specify the list of possible inputs of this pipeline.
Returns:
A list of (string) input fields name.
"""
return ['subject_id_list',
'noddi_preprocessed_dwi',
'noddi_preprocessed_bvec',
'noddi_preprocessed_bval',
'noddi_preprocessed_mask',
'n_procs',
'noddi_toolbox_dir',
'nifti_matlib_dir']
def get_output_fields(self):
"""Specify the list of possible outputs of this pipeline.
Returns:
A list of (string) output fields name.
"""
return ['fit_icvf', 'fit_isovf', 'fit_od']
def build_input_node(self):
"""Build and connect an input node to the pipeline.
"""
import nipype.interfaces.utility as nutil
import nipype.pipeline.engine as npe
import clinica.pipelines.dwi_processing_noddi.dwi_processing_noddi_utils as utils
subject_id_list, noddi_preprocessed_dwi, noddi_preprocessed_bvec, noddi_preprocessed_bval, noddi_preprocessed_mask = utils.grab_noddi_preprocessed_files(
self.caps_directory, self.tsv_file)
read_parameters_node = npe.Node(name="LoadingCLIArguments",
interface=nutil.IdentityInterface(
fields=self.get_input_fields(),
mandatory_inputs=True))
read_parameters_node.inputs.bvalue_str = self.parameters['n_procs']['n_procs']
read_parameters_node.inputs.subject_id_list = subject_id_list
read_parameters_node.inputs.noddi_preprocessed_dwi = noddi_preprocessed_dwi
read_parameters_node.inputs.noddi_preprocessed_bvec = noddi_preprocessed_bvec
read_parameters_node.inputs.noddi_preprocessed_bval = noddi_preprocessed_bval
read_parameters_node.inputs.noddi_preprocessed_mask = noddi_preprocessed_mask
read_parameters_node.inputs.noddi_toolbox_dir = self.parameters['noddi_toolbox_dir']['noddi_toolbox_dir']
read_parameters_node.inputs.nifti_matlib_dir = self.parameters['nifti_matlib_dir']['nifti_matlib_dir']
self.connect([
(read_parameters_node, self.input_node, [('subject_id_list', 'subject_id_list')]),
(read_parameters_node, self.input_node, [('noddi_preprocessed_dwi', 'noddi_preprocessed_dwi')]),
(read_parameters_node, self.input_node, [('noddi_preprocessed_bvec', 'noddi_preprocessed_bvec')]),
(read_parameters_node, self.input_node, [('noddi_preprocessed_bval', 'noddi_preprocessed_bval')]),
(read_parameters_node, self.input_node, [('noddi_preprocessed_mask', 'noddi_preprocessed_mask')]),
(read_parameters_node, self.input_node, [('n_procs', 'n_procs')]),
(read_parameters_node, self.input_node, [('noddi_toolbox_dir', 'noddi_toolbox_dir')]),
(read_parameters_node, self.input_node, [('nifti_matlib_dir', 'nifti_matlib_dir')]),
])
def build_output_node(self):
"""Build and connect an output node to the pipeline.
"""
import nipype.interfaces.utility as nutil
import nipype.pipeline.engine as npe
import nipype.interfaces.io as nio
import clinica.pipelines.dwi_processing_noddi.dwi_processing_noddi_utils as utils
# Find container path from DWI filename
# =====================================
get_identifiers = npe.MapNode(nutil.Function(
input_names=['in_file', 'caps_directory'], output_names=['base_directory', 'subst_tuple_list'],
function=utils.get_subid_sesid), name='get_subid_sesid', iterfield=['in_file'])
get_identifiers.inputs.caps_directory = self.caps_directory
# datasink
datasink = npe.MapNode(nio.DataSink(infields=['@fit_icvf', '@fit_isovf', '@fit_od']),
name='datasinker',
iterfield=['base_directory', 'substitutions', '@fit_icvf', '@fit_isovf', '@fit_od'])
self.connect([
# datasink
(self.output_node, get_identifiers, [('fit_icvf', 'in_file')]),
(get_identifiers, datasink, [('base_directory', 'base_directory')]),
(get_identifiers, datasink, [('subst_tuple_list', 'substitutions')]),
# original files
(self.output_node, datasink, [('fit_icvf', '@fit_icvf')]),
(self.output_node, datasink, [('fit_isovf', '@fit_isovf')]),
(self.output_node, datasink, [('fit_od', '@fit_od')]),
])
def build_core_nodes(self):
"""Build and connect the core nodes of the pipeline.
"""
import clinica.pipelines.dwi_processing_noddi.dwi_processing_noddi_utils as utils
processing_pipeline = utils.matlab_noddi_processing(self.caps_directory,
num_cores=self.parameters['n_procs']['n_procs'],
bStep=self.parameters['bvalue_str']['bvalue_str'])
# Connection
# ==========
self.connect([
(self.input_node, processing_pipeline, [('subject_id_list', 'inputnode.subject_id_list')]),
(self.input_node, processing_pipeline, [('noddi_preprocessed_dwi', 'inputnode.noddi_preprocessed_dwi')]),
(self.input_node, processing_pipeline, [('noddi_preprocessed_bvec', 'inputnode.noddi_preprocessed_bvec')]),
(self.input_node, processing_pipeline, [('noddi_preprocessed_bval', 'inputnode.noddi_preprocessed_bval')]),
(self.input_node, processing_pipeline, [('noddi_preprocessed_mask', 'inputnode.noddi_preprocessed_mask')]),
(self.input_node, processing_pipeline, [('noddi_toolbox_dir', 'inputnode.noddi_toolbox_dir')]),
(self.input_node, processing_pipeline, [('nifti_matlib_dir', 'inputnode.nifti_matlib_dir')]),
# output
(processing_pipeline, self.output_node, [('outputnode.fit_icvf', 'fit_icvf')]), # noqa
(processing_pipeline, self.output_node, [('outputnode.fit_isovf', 'fit_isovf')]), # noqa
(processing_pipeline, self.output_node, [('outputnode.fit_od', 'fit_od')]) # noqa
])
| [
"clinica.pipelines.dwi_processing_noddi.dwi_processing_noddi_utils.matlab_noddi_processing",
"nipype.interfaces.utility.Function",
"clinica.pipelines.dwi_processing_noddi.dwi_processing_noddi_utils.grab_noddi_preprocessed_files",
"nipype.interfaces.io.DataSink"
] | [((1417, 1488), 'clinica.pipelines.dwi_processing_noddi.dwi_processing_noddi_utils.grab_noddi_preprocessed_files', 'utils.grab_noddi_preprocessed_files', (['self.caps_directory', 'self.tsv_file'], {}), '(self.caps_directory, self.tsv_file)\n', (1452, 1488), True, 'import clinica.pipelines.dwi_processing_noddi.dwi_processing_noddi_utils as utils\n'), ((5248, 5408), 'clinica.pipelines.dwi_processing_noddi.dwi_processing_noddi_utils.matlab_noddi_processing', 'utils.matlab_noddi_processing', (['self.caps_directory'], {'num_cores': "self.parameters['n_procs']['n_procs']", 'bStep': "self.parameters['bvalue_str']['bvalue_str']"}), "(self.caps_directory, num_cores=self.\n parameters['n_procs']['n_procs'], bStep=self.parameters['bvalue_str'][\n 'bvalue_str'])\n", (5277, 5408), True, 'import clinica.pipelines.dwi_processing_noddi.dwi_processing_noddi_utils as utils\n'), ((3919, 4066), 'nipype.interfaces.utility.Function', 'nutil.Function', ([], {'input_names': "['in_file', 'caps_directory']", 'output_names': "['base_directory', 'subst_tuple_list']", 'function': 'utils.get_subid_sesid'}), "(input_names=['in_file', 'caps_directory'], output_names=[\n 'base_directory', 'subst_tuple_list'], function=utils.get_subid_sesid)\n", (3933, 4066), True, 'import nipype.interfaces.utility as nutil\n'), ((4254, 4315), 'nipype.interfaces.io.DataSink', 'nio.DataSink', ([], {'infields': "['@fit_icvf', '@fit_isovf', '@fit_od']"}), "(infields=['@fit_icvf', '@fit_isovf', '@fit_od'])\n", (4266, 4315), True, 'import nipype.interfaces.io as nio\n')] |
import sys
from mattermostdriver import Driver
from jira import JIRA
from datetime import datetime
import urllib3
import json
import re
import webhook
import requests
def getAdjustDict():
adjust_dict = {'h1\. ': '## ',
'h2\. ': '## ',
'h3\. ': '### ',
'h4\. ': '### ',
'h5\. ': '#### ',
'h6\. ': '#### ',
'\s+\*(\w+)\*\s+': '**\\1**',
'\s+-(\w+)-\s+': '~~\\1~~',
'\?\?(\w+)\?\?': '--\\1',
'{{(\w+)}}': '`\\1`',
'\[([\w \-\&":,;_\.\+]+)\|([\w\/\.:\-\?=&#\+]+)\]': '[\\1](\\2)',
':\)': u'\u200b:smiley:\u200b',
':\(': u'\u200b:worried:\u200b',
':P': u'\u200b:stuck_out_tongue:\u200b',
':D': u'\u200b:smile:\u200b',
'\;\)': u'\u200b:smirk:\u200b',
'\(y\)': u'\u200b:thumbsup:\u200b',
'\(n\)': u'\u200b:thumbsdown:\u200b',
'\(i\)': u'\u200b:information_source:\u200b',
'\(\/\)': u'\u200b:white_check_mark:\u200b',
'\(x\)': u'\u200b:x:\u200b',
'\(\!\)': u'\u200b:warning:\u200b',
'\(\?\)': u'\u200b:question:\u200b',
'\(\+\)': u'\u200b:heavy_plus_sign:\u200b',
'\(\-\)': u'\u200b:heavy_minus_sign:\u200b',
'\(on\)': u'\u200b:bulb:\u200b',
'\(off\)': u'\u200b:bulb: -off\u200b',
'\(\*\)': u'\u200b:star:\u200b',
'\(\*y\)': u'\u200b:star:\u200b',
'\n # ': '\n 1. ',
'(\![\w\-_\.äüö]+\.\w+\!)': '\\1',
'!([\w\s\-_\+]*\.\w+)\|thumbnail!': '\\1',
'!([\w\s\-_\+]*\.\w+)!': '\\1'}
return adjust_dict
def getCommentString():
return ' \n{} **@{}**: \n {} \n\n Erstellt: {} \n Updated: {} \n\n'
def getCommentHeadline():
return ' \n #### Kommentare:'
def getAttachmentString():
return '\n #### Folgende Anhänge wurden hinzugefügt: \n'
def getStatusString(status):
return '\n #### Status: {} \n'.format(status)
def getAssigneeString(current_avatar_link, current_username):
return '\n #### Bearbeiter: {} @{} \n'.format(current_avatar_link, current_username)
def getAssigneeChangedString(old_avatar_link, old_username, new_avatar_link, new_username):
return '\n #### Bearbeiter: von {} @{} zu {} @{} \n'.format(old_avatar_link, old_username, new_avatar_link,
new_username)
def getDescriptionString(description, changed):
if changed:
changed_string = '(verändert)'
else:
changed_string = ''
return '\n #### Beschreibung {}: \n {} \n'.format(changed_string, description)
def getUXDesignChangedString(link):
return '\n #### Folgendes UX Design wurde hinzugefügt: [click here]({})'.format(link)
def changeString(field, old, new):
return '\n #### {}: von ~~{}~~ zu {} \n'.format(field, old, new)
def getTitleString(picture, issue_as_string, link_to_issue, summary):
return '### {} [{}]({}): {} \n'.format(picture, issue_as_string, link_to_issue, summary)
def getPictureString(link_to_pic, hover_text):
return ''.format(link_to_pic, hover_text)
def getPictureStringWithLink(link_to_pic, hover_text):
return '\n []({}) \n'.format(hover_text, link_to_pic, hover_text, link_to_pic)
def formatDate(date):
return datetime.strptime(date[:-9], '%Y-%m-%dT%H:%M:%S')
def getTimestamp():
return str(datetime.now())[:-16] + 'T' + str(datetime.now())[11:] + '00'
def sendMattermost(driver, text):
driver.webhooks.call_webhook(webhook.token, {"channel": webhook.channel, "username": webhook.botName, "text": text})
def adjustSyntax(text):
adjust = getAdjustDict()
for key in adjust:
text = re.compile(key).sub(adjust[key], text)
return text
def getStatusAndAssignee(issue):
status = issue.fields.status.name
if issue.fields.assignee is not None:
assignee = issue.fields.assignee.key
else:
assignee = 'unlocated'
return assignee, status
def setTimestampJSONFile():
file = {
"timestamp": getTimestamp(),
}
return file
def readJSON():
try:
with open(webhook.pathToFile, "r") as read_file:
lastViewed = json.load(read_file)
lastViewed = lastViewed["timestamp"]
return lastViewed
except Exception as inst:
print(inst.args)
def writeJSON():
try:
with open(webhook.pathToFile, "w") as write_file:
print("file = {}".format(str(file)))
json.dump(file, write_file)
except Exception as inst:
print(inst.args)
def print_exception_details(entry, inst, issue_as_string):
print("Problems with {}! Details: Exception: {} {} Issue: {}; changelog:\n fromString: {} \n toString: {} ".format(
'checkAttachmentChanges', type(inst), inst.args, issue_as_string, entry.fromString, entry.toString))
def checkNewComments(issue, last_viewed_formatted):
global comments_string
if len(issue.fields.comment.comments) > 0:
gen = (comment for comment in issue.fields.comment.comments if
formatDate(comment.updated) > last_viewed_formatted)
all_comments = [
(comment.author.name, comment.body, comment.created, comment.updated)
for comment in gen]
comments_selected = [
getCommentString().format(getPictureString(default_avatar_url + author, author), author, adjustSyntax(body),
formatDate(created),
formatDate(updated))
for (author, body, created, updated) in all_comments]
if len(comments_selected) > 0:
comments_string = seperator.join(comments_selected)
return comments_string
def checkStatusChanges(entry, new_status, issue_as_string):
global status_string
status_mentioned = False
if entry.field == 'status' and not status_mentioned:
try:
if entry.fromString is not None:
old_status = entry.fromString
status_string = changeString('Status', old_status, new_status)
status_mentioned = True
except Exception as inst:
print_exception_details(entry, inst, issue_as_string)
elif not status_mentioned:
status_string = getStatusString(new_status)
return status_string, status_mentioned
def checkAssigneeChanges(entry, assignee, issue_as_string):
global assignee_string
assignee_mentioned = False
if entry.field == 'assignee' and not assignee_mentioned:
try:
if entry.fromString is not None:
oldAssignee = jira.search_users(entry.fromString)[0].key
assignee_string = getAssigneeChangedString(
getPictureString(default_avatar_url + oldAssignee, oldAssignee),
oldAssignee,
getPictureString(default_avatar_url + assignee, assignee),
assignee)
assignee_mentioned = True
except Exception as inst:
print_exception_details(entry, inst, issue_as_string)
elif not assignee_mentioned:
assignee_string = create_default_assignee_string(assignee)
return assignee_string, assignee_mentioned
def checkAttachmentChanges(entry, issue_as_string):
global attachment_string, attachments, attachment_included
if entry.field == 'Attachment':
try:
pictureString, pictureURL = get_attachment_data(entry)
attachment_string = getPictureStringWithLink(pictureURL, pictureString)
attachments.append(attachment_string)
attachments = list(filter(None, attachments))
if len(attachments) > 0:
attachment_string = seperator.join(attachments)
attachment_included = True
except Exception as inst:
print_exception_details(entry, inst, issue_as_string)
return attachment_string, attachment_included
def get_attachment_data(entry):
pictureString = entry.toString
pictureID = entry.to
if pictureID is not None:
attachmentObject = jira.attachment(pictureID)
pictureURL = attachmentObject.content
return pictureString, pictureURL
def checkDescriptionChanges(entry, issue_as_string, issue):
global description_string
if entry.field == 'description':
try:
newDescription = adjustSyntax(issue.fields.description)
description_string = getDescriptionString(newDescription, True)
except Exception as inst:
print_exception_details(entry, inst, issue_as_string)
return description_string
def checkUXDesignChanges(entry, issue_as_string):
global ux_string
if entry.field == 'UX Design':
try:
newUXDesign_link = entry.toString
ux_string = getUXDesignChangedString(newUXDesign_link)
except Exception as inst:
print_exception_details(entry, inst, issue_as_string)
return ux_string
def checkIssueTypeChanges(entry, issue_as_string):
global issueType_string
if entry.field == 'issuetype':
try:
old_type, new_type = get_changes(entry)
issueType_string = changeString('Ticket-Typ', old_type, new_type)
except Exception as inst:
print_exception_details(entry, inst, issue_as_string)
return issueType_string
def checkPriorityChanges(entry, issue_as_string):
global priority_string
if entry.field == 'priority':
try:
old_priority, new_priority = get_changes(entry)
priority_string = changeString('Priorität', old_priority, new_priority)
except Exception as inst:
print_exception_details(entry, inst, issue_as_string)
return priority_string
def get_changes(entry):
return entry.fromString, entry.toString
def checkForUpdates(historyItem, assignee, status, issue_as_string, issue):
global status_included, assignee_included, attachment_included, attachments, status_string, assignee_string, attachment_string, description_string, ux_string, issueType_string, priority_string
if webhook.postStatus and not status_included:
status_string, status_included = checkStatusChanges(historyItem, status, issue_as_string)
if webhook.postAssignee and not assignee_included:
assignee_string, assignee_included = checkAssigneeChanges(historyItem, assignee, issue_as_string)
if webhook.postAttachment:
attachment_string, attachment_included = checkAttachmentChanges(historyItem, issue_as_string)
if webhook.postDescription:
description_string = checkDescriptionChanges(historyItem, issue_as_string, issue)
if webhook.postUXDesign:
ux_string = checkUXDesignChanges(historyItem, issue_as_string)
if webhook.postIssueType:
issueType_string = checkIssueTypeChanges(historyItem, issue_as_string)
if webhook.postPriority:
checkPriorityChanges(historyItem, issue_as_string)
return status_string, assignee_string, attachment_string, description_string, ux_string, issueType_string, priority_string, attachment_included
def init_global_vars():
global seperator, message, status_included, assignee_included, attachment_included, attachments, all_attachments, status_string, assignee_string, attachment_string, description_string, comments_string, ux_string, issueType_string, priority_string
seperator = '___'
def init_jira():
jira = JIRA(webhook.jiraUrl, auth=(webhook.username, webhook.password))
return jira
def init_mattermost_driver():
driver = Driver({'url': webhook.url, 'scheme': webhook.scheme, 'port': webhook.port, 'verify': webhook.verify})
return driver
def iterate_through_issues():
global message, status_string, assignee_string, attachment_string, description_string, ux_string, issueType_string, priority_string, attachment_included, comments_string, status_included, assignee_included, attachments, all_attachments
last_viewed_formatted = formatDate(lastViewed)
jira_timestamp = str(last_viewed_formatted)[:-3]
for selectedIssue in jira.search_issues(webhook.projectSearchString.format(jira_timestamp), maxResults=webhook.maxResults,
expand=webhook.expand):
issue = jira.issue(str(selectedIssue))
assignee, status = getStatusAndAssignee(issue)
issue_as_string = str(issue)
reset_variables()
priority_picture = getPictureString(issue.fields.priority.iconUrl, issue.fields.priority.name)
issue_link = webhook.jqlQueryString + issue_as_string
all_attachments = collect_all_attachments(issue_as_string, selectedIssue)
if formatDate(issue.fields.created) >= last_viewed_formatted:
append_string = collect_main_information(all_attachments, assignee, issue, issue_as_string, issue_link,
priority_picture, status)
else:
init_message_with_title(issue, issue_as_string, issue_link, priority_picture)
iterate_through_changelog(assignee, issue_as_string, last_viewed_formatted, selectedIssue, status, issue)
description_string = link_attachments(all_attachments, description_string)
append_string = construct_append_string(assignee_string, attachment_included, attachment_string,
description_string, issue, issueType_string, last_viewed_formatted,
priority_string, status_string, ux_string, status_included, assignee_included, assignee, status)
message = message + append_string
sendMattermost(driver, message)
def collect_main_information(all_attachments, assignee, issue, issue_as_string, issue_link, priority_picture, status):
global description_string, status_string, assignee_string
init_message_with_title(issue, issue_as_string, issue_link, ':new: ' + priority_picture)
if webhook.postDescription and issue.fields.description != '':
description_string = link_attachments(all_attachments, issue.fields.description)
description_string = getDescriptionString(description_string, False)
if webhook.postStatus:
status_string = getStatusString(status)
if webhook.postAssignee:
assignee_string = create_default_assignee_string(assignee)
append_string = status_string + assignee_string + description_string
return append_string
def link_attachments(all_attachments, text):
for attachment in all_attachments:
try:
attachment_split = attachment.split("|")
picture_string = attachment_split[0]
picture_url = attachment_split[1]
text = re.compile(picture_string).sub("[]({})".format(picture_string, picture_url, picture_url), text)
except Exception as inst:
print(inst.args)
return text
def collect_all_attachments(issue_as_string, selectedIssue):
global all_attachments
all_attachments = []
for item in (history for history in selectedIssue.changelog.histories):
for entry in item.items:
if entry.field == 'Attachment':
try:
if entry.to is not None:
pictureString, pictureURL = get_attachment_data(entry)
all_attachments.append("{}|{}".format(pictureString, pictureURL))
except Exception as inst:
print_exception_details(entry, inst, issue_as_string)
return all_attachments
def construct_append_string(assignee_string, attachment_included, attachment_string, description_string, issue,
issueType_string, last_viewed_formatted, priority_string, status_string, ux_string, status_included, assignee_included, assignee, status):
global comments_string
if webhook.postComments:
comments_string = checkNewComments(issue, last_viewed_formatted)
if comments_string != '':
comments_string = getCommentHeadline() + comments_string
if attachment_included:
append_attachment = getAttachmentString() + attachment_string
else:
append_attachment = ''
if not status_included:
status_string = getStatusString(status)
if not assignee_included:
assignee_string = create_default_assignee_string(assignee)
append_string = priority_string + issueType_string + status_string + assignee_string + ux_string + description_string + comments_string + append_attachment
return append_string
def create_default_assignee_string(assignee):
assignee_string = getAssigneeString(getPictureString(default_avatar_url + assignee, assignee), assignee)
return assignee_string
def iterate_through_changelog(assignee, issue_as_string, last_viewed_formatted, selectedIssue, status, issue):
global status_string, assignee_string, attachment_string, description_string, ux_string, issueType_string, priority_string, attachment_included
for item in (history for history in selectedIssue.changelog.histories if
formatDate(history.created) > last_viewed_formatted):
for selectedItem in item.items:
status_string, assignee_string, attachment_string, description_string, ux_string, issueType_string, priority_string, attachment_included = checkForUpdates(
selectedItem, assignee, status, issue_as_string, issue)
def init_message_with_title(issue, issue_as_string, issue_link, priority_picture):
global message
message = '' + getTitleString(priority_picture,
issue_as_string,
issue_link,
str(issue.fields.summary))
def reset_variables():
global status_string, assignee_string, attachment_string, description_string, comments_string, ux_string, issueType_string, priority_string, status_included, assignee_included, attachment_included, attachments, all_attachments
status_string, assignee_string, attachment_string, description_string, comments_string, ux_string, issueType_string, priority_string = '', '', '', '', '', '', '', ''
status_included = False
assignee_included = False
attachment_included = False
attachments = []
all_attachments = []
if __name__ == '__main__':
init_global_vars()
default_avatar_url = webhook.jiraUrl + 'secure/useravatar?ownerId='
jira = init_jira()
driver = init_mattermost_driver()
lastViewed = readJSON()
iterate_through_issues()
file = setTimestampJSONFile()
writeJSON()
| [
"webhook.projectSearchString.format",
"re.compile",
"datetime.datetime.strptime",
"jira.JIRA",
"datetime.datetime.now",
"json.load",
"json.dump",
"mattermostdriver.Driver"
] | [((3606, 3655), 'datetime.datetime.strptime', 'datetime.strptime', (['date[:-9]', '"""%Y-%m-%dT%H:%M:%S"""'], {}), "(date[:-9], '%Y-%m-%dT%H:%M:%S')\n", (3623, 3655), False, 'from datetime import datetime\n'), ((11781, 11845), 'jira.JIRA', 'JIRA', (['webhook.jiraUrl'], {'auth': '(webhook.username, webhook.password)'}), '(webhook.jiraUrl, auth=(webhook.username, webhook.password))\n', (11785, 11845), False, 'from jira import JIRA\n'), ((11907, 12013), 'mattermostdriver.Driver', 'Driver', (["{'url': webhook.url, 'scheme': webhook.scheme, 'port': webhook.port,\n 'verify': webhook.verify}"], {}), "({'url': webhook.url, 'scheme': webhook.scheme, 'port': webhook.port,\n 'verify': webhook.verify})\n", (11913, 12013), False, 'from mattermostdriver import Driver\n'), ((12448, 12498), 'webhook.projectSearchString.format', 'webhook.projectSearchString.format', (['jira_timestamp'], {}), '(jira_timestamp)\n', (12482, 12498), False, 'import webhook\n'), ((4500, 4520), 'json.load', 'json.load', (['read_file'], {}), '(read_file)\n', (4509, 4520), False, 'import json\n'), ((4794, 4821), 'json.dump', 'json.dump', (['file', 'write_file'], {}), '(file, write_file)\n', (4803, 4821), False, 'import json\n'), ((4005, 4020), 're.compile', 're.compile', (['key'], {}), '(key)\n', (4015, 4020), False, 'import re\n'), ((3727, 3741), 'datetime.datetime.now', 'datetime.now', ([], {}), '()\n', (3739, 3741), False, 'from datetime import datetime\n'), ((15089, 15115), 're.compile', 're.compile', (['picture_string'], {}), '(picture_string)\n', (15099, 15115), False, 'import re\n'), ((3693, 3707), 'datetime.datetime.now', 'datetime.now', ([], {}), '()\n', (3705, 3707), False, 'from datetime import datetime\n')] |
#!/usr/bin/env python
# -*- coding: utf-8 -*-
# Cerealizer
# Copyright (C) 2005-2006 <NAME>
#
# This program is free software.
# It is available under the Python licence.
import imp
import unittest
import cerealizer
class TestBasicType(unittest.TestCase):
def setUp(self):
self.obj = [1, 2, "jiba"]
def loads_dumps_and_compare(self, obj1):
obj2 = cerealizer.loads(cerealizer.dumps(obj1))
assert obj1 == obj2
def test_int1 (self): self.loads_dumps_and_compare(7828)
def test_int2 (self): self.loads_dumps_and_compare(-579)
def test_float1 (self): self.loads_dumps_and_compare(4.9)
def test_float2 (self): self.loads_dumps_and_compare(-0.0043)
def test_float3 (self): self.loads_dumps_and_compare(4.0)
def test_complex(self): self.loads_dumps_and_compare(1 + 2j)
def test_string1 (self): self.loads_dumps_and_compare("jiba")
def test_string2 (self): self.loads_dumps_and_compare("jibé")
def test_unicode1(self): self.loads_dumps_and_compare(u"jiba")
def test_unicode2(self): self.loads_dumps_and_compare(u"jibé")
def test_tuple1 (self): self.loads_dumps_and_compare(())
def test_tuple2 (self): self.loads_dumps_and_compare((1, 2.2, "jiba"))
def test_tuple3 (self): self.loads_dumps_and_compare((1, (2.2, "jiba")))
def test_frozenset(self): self.loads_dumps_and_compare(frozenset([1, (2.2, "jiba")]))
def test_list1 (self): self.loads_dumps_and_compare([])
def test_list2 (self): self.loads_dumps_and_compare([1, 2.2, "jiba"])
def test_list3 (self): self.loads_dumps_and_compare([1, [2.2, "jiba"]])
def test_set1 (self): self.loads_dumps_and_compare(set())
def test_set2 (self): self.loads_dumps_and_compare(set([1, 2.2, "jiba"]))
def test_dict1 (self): self.loads_dumps_and_compare({})
def test_dict2 (self): self.loads_dumps_and_compare({"jiba": 100, "other": 0 })
def test_dict3 (self): self.loads_dumps_and_compare({"jiba": range(100), "other": {1: 2}})
def test_None(self): self.loads_dumps_and_compare(None)
def test_obj_oldstyle(self):
class Obj1:
def __init__(self):
self.x = 1
self.name = "jiba"
def __eq__(a, b): return (a.__class__ is b.__class__) and (a.__dict__ == b.__dict__)
cerealizer.register(Obj1)
self.loads_dumps_and_compare(Obj1())
def test_obj_setstate_priority1(self):
LIST = [1, 2, 3]
class ObjList(object):
def __init__(self):
self.x = 1
self.name = "jiba"
self.list = LIST[:]
def __eq__(a, b): return (a.__class__ is b.__class__) and (a.__dict__ == b.__dict__)
def __setstate__(self, state):
assert state["list"] == LIST # Test that list is initialized BEFORE the object
self.__dict__ = state
cerealizer.register(ObjList)
self.loads_dumps_and_compare(ObjList())
def test_obj_setstate_priority2(self):
TUPLE = (1, 2, 3, (4, (5, 6, (7,))))
class ObjTuple(object):
def __init__(self):
self.x = 1
self.name = "jiba"
self.list = TUPLE[:]
def __eq__(a, b): return (a.__class__ is b.__class__) and (a.__dict__ == b.__dict__)
def __setstate__(self, state):
assert state["list"] == TUPLE # Test that list is initialized BEFORE the object
self.__dict__ = state
cerealizer.register(ObjTuple)
self.loads_dumps_and_compare(ObjTuple())
def test_obj_getstate_setstate(self):
STATE = (1, 2, "jiba")
class ObjState(object):
def __init__(self):
self.x = 1
self.name = "jiba"
def __eq__(a, b): return (a.__class__ is b.__class__) and (a.__dict__ == b.__dict__)
def __getstate__(self): return STATE
def __setstate__(self, state):
assert state == STATE
self.x = 1
self.name = "jiba"
cerealizer.register(ObjState)
self.loads_dumps_and_compare(ObjState())
def test_obj_new_and_init(self):
nbs = [0, 0]
class ObjInitNew(object):
def __new__(Class):
nbs[0] += 1
return object.__new__(Class)
def __init__(self):
nbs[1] += 1
self.x = 1
self.name = "jiba"
def __eq__(a, b): return (a.__class__ is b.__class__) and (a.__dict__ == b.__dict__)
cerealizer.register(ObjInitNew)
o = ObjInitNew()
self.loads_dumps_and_compare(o)
assert nbs == [2, 1]
def test_identity(self):
o = {}
l1 = [o, o]
l2 = cerealizer.loads(cerealizer.dumps(l1))
assert l2[0] is l2[1]
def test_cycle1(self):
obj1 = [1, [2.2, "jiba"]]
obj1[1].append(obj1)
obj2 = cerealizer.loads(cerealizer.dumps(obj1))
assert repr(obj1) == repr(obj2) # Cannot use == on cyclic list!
def test_cycle2(self):
class ObjEmpty(object):
pass
cerealizer.register(ObjEmpty)
o = ObjEmpty()
o.o = o
o2 = cerealizer.loads(cerealizer.dumps(o))
assert o2.o is o2
def test_cycle3(self):
class Parent: pass
class Child:
def __init__(self, parent): self.parent = parent
def __getstate__(self): return (self.parent,)
def __setstate__(self, state): self.parent = state[0]
cerealizer.register(Parent)
cerealizer.register(Child)
p = Parent()
p.c = Child(p)
p2 = cerealizer.loads(cerealizer.dumps(p))
assert p2.c.parent is not None
def test_obj_slot(self):
class ObjSlot(object):
__slots__ = ["x", "name"]
def __init__(self):
self.x = 11.1
self.name = "jiba"
def __eq__(a, b): return (a.__class__ is b.__class__) and (a.x == b.x) and (a.name == b.name)
cerealizer.register(ObjSlot)
o = ObjSlot()
self.loads_dumps_and_compare(o)
def test_obj_initargs1(self):
class ObjInitargs:
def __init__(self, x, name):
self.x = x
self.name = name
def __getinitargs__(self): return self.x, self.name
def __eq__(a, b): return (a.__class__ is b.__class__) and (a.x == b.x) and (a.name == b.name)
cerealizer.register(ObjInitargs)
o = ObjInitargs(45, u"uioef")
self.loads_dumps_and_compare(o)
def test_obj_newargs1(self):
class ObjNewargs1(object):
def __new__(Class, x, name):
self = object.__new__(Class)
self.x = x
self.name = name
return self
def __getnewargs__(self): return self.x, self.name
def __eq__(a, b): return (a.__class__ is b.__class__) and (a.x == b.x) and (a.name == b.name)
cerealizer.register(ObjNewargs1)
o = ObjNewargs1(45, u"uioef")
self.loads_dumps_and_compare(o)
def test_obj_newargs2(self):
class ObjNewargs2(object):
def __new__(Class, x):
self = object.__new__(Class)
self.x = x
return self
def __getnewargs__(self): return (self.x,)
def __eq__(a, b): return (a.__class__ is b.__class__) and (a.x == b.x)
cerealizer.register(ObjNewargs2)
o = ObjNewargs2(45)
o2 = ObjNewargs2(o)
self.loads_dumps_and_compare(o2)
def test_alias(self):
class OldObj(object):
def __init__(self):
self.x = 1
self.name = "jiba"
cerealizer.register(OldObj)
o = OldObj()
s = cerealizer.dumps(o)
imp.reload(cerealizer)
class NewObj(object):
def __init__(self):
self.x = 2
self.name = "jiba2"
cerealizer.register(NewObj)
cerealizer.register_alias(NewObj, "__main__.OldObj")
o = cerealizer.loads(s)
assert o.__class__ is NewObj
assert o.x == 1
assert o.name == "jiba"
| [
"cerealizer.register_alias",
"cerealizer.loads",
"cerealizer.register",
"imp.reload",
"cerealizer.dumps"
] | [((2336, 2361), 'cerealizer.register', 'cerealizer.register', (['Obj1'], {}), '(Obj1)\n', (2355, 2361), False, 'import cerealizer\n'), ((2921, 2949), 'cerealizer.register', 'cerealizer.register', (['ObjList'], {}), '(ObjList)\n', (2940, 2949), False, 'import cerealizer\n'), ((3535, 3564), 'cerealizer.register', 'cerealizer.register', (['ObjTuple'], {}), '(ObjTuple)\n', (3554, 3564), False, 'import cerealizer\n'), ((4113, 4142), 'cerealizer.register', 'cerealizer.register', (['ObjState'], {}), '(ObjState)\n', (4132, 4142), False, 'import cerealizer\n'), ((4619, 4650), 'cerealizer.register', 'cerealizer.register', (['ObjInitNew'], {}), '(ObjInitNew)\n', (4638, 4650), False, 'import cerealizer\n'), ((5197, 5226), 'cerealizer.register', 'cerealizer.register', (['ObjEmpty'], {}), '(ObjEmpty)\n', (5216, 5226), False, 'import cerealizer\n'), ((5613, 5640), 'cerealizer.register', 'cerealizer.register', (['Parent'], {}), '(Parent)\n', (5632, 5640), False, 'import cerealizer\n'), ((5649, 5675), 'cerealizer.register', 'cerealizer.register', (['Child'], {}), '(Child)\n', (5668, 5675), False, 'import cerealizer\n'), ((6121, 6149), 'cerealizer.register', 'cerealizer.register', (['ObjSlot'], {}), '(ObjSlot)\n', (6140, 6149), False, 'import cerealizer\n'), ((6553, 6585), 'cerealizer.register', 'cerealizer.register', (['ObjInitargs'], {}), '(ObjInitargs)\n', (6572, 6585), False, 'import cerealizer\n'), ((7084, 7116), 'cerealizer.register', 'cerealizer.register', (['ObjNewargs1'], {}), '(ObjNewargs1)\n', (7103, 7116), False, 'import cerealizer\n'), ((7546, 7578), 'cerealizer.register', 'cerealizer.register', (['ObjNewargs2'], {}), '(ObjNewargs2)\n', (7565, 7578), False, 'import cerealizer\n'), ((7835, 7862), 'cerealizer.register', 'cerealizer.register', (['OldObj'], {}), '(OldObj)\n', (7854, 7862), False, 'import cerealizer\n'), ((7896, 7915), 'cerealizer.dumps', 'cerealizer.dumps', (['o'], {}), '(o)\n', (7912, 7915), False, 'import cerealizer\n'), ((7925, 7947), 'imp.reload', 'imp.reload', (['cerealizer'], {}), '(cerealizer)\n', (7935, 7947), False, 'import imp\n'), ((8081, 8108), 'cerealizer.register', 'cerealizer.register', (['NewObj'], {}), '(NewObj)\n', (8100, 8108), False, 'import cerealizer\n'), ((8117, 8169), 'cerealizer.register_alias', 'cerealizer.register_alias', (['NewObj', '"""__main__.OldObj"""'], {}), "(NewObj, '__main__.OldObj')\n", (8142, 8169), False, 'import cerealizer\n'), ((8182, 8201), 'cerealizer.loads', 'cerealizer.loads', (['s'], {}), '(s)\n', (8198, 8201), False, 'import cerealizer\n'), ((395, 417), 'cerealizer.dumps', 'cerealizer.dumps', (['obj1'], {}), '(obj1)\n', (411, 417), False, 'import cerealizer\n'), ((4840, 4860), 'cerealizer.dumps', 'cerealizer.dumps', (['l1'], {}), '(l1)\n', (4856, 4860), False, 'import cerealizer\n'), ((5015, 5037), 'cerealizer.dumps', 'cerealizer.dumps', (['obj1'], {}), '(obj1)\n', (5031, 5037), False, 'import cerealizer\n'), ((5296, 5315), 'cerealizer.dumps', 'cerealizer.dumps', (['o'], {}), '(o)\n', (5312, 5315), False, 'import cerealizer\n'), ((5751, 5770), 'cerealizer.dumps', 'cerealizer.dumps', (['p'], {}), '(p)\n', (5767, 5770), False, 'import cerealizer\n')] |
#!/usr/bin/python
import RPi.GPIO as GPIO
import time
GPIO.setmode(GPIO.BOARD)
GPIO.setup(11, GPIO.OUT)
p = GPIO.PWM(11,50)
p.start(7.5)
try:
while True:
# MID
#p.ChangeDutyCycle(7.5)
#time.sleep(1)
# LEFT
p.ChangeDutyCycle(20)
time.sleep(1)
# RIGHT
p.ChangeDutyCycle(1)
time.sleep(1)
except KeyboardInterrupt:
p.stop()
GPIO.cleanup()
| [
"RPi.GPIO.cleanup",
"RPi.GPIO.setup",
"time.sleep",
"RPi.GPIO.PWM",
"RPi.GPIO.setmode"
] | [((56, 80), 'RPi.GPIO.setmode', 'GPIO.setmode', (['GPIO.BOARD'], {}), '(GPIO.BOARD)\n', (68, 80), True, 'import RPi.GPIO as GPIO\n'), ((81, 105), 'RPi.GPIO.setup', 'GPIO.setup', (['(11)', 'GPIO.OUT'], {}), '(11, GPIO.OUT)\n', (91, 105), True, 'import RPi.GPIO as GPIO\n'), ((110, 126), 'RPi.GPIO.PWM', 'GPIO.PWM', (['(11)', '(50)'], {}), '(11, 50)\n', (118, 126), True, 'import RPi.GPIO as GPIO\n'), ((283, 296), 'time.sleep', 'time.sleep', (['(1)'], {}), '(1)\n', (293, 296), False, 'import time\n'), ((350, 363), 'time.sleep', 'time.sleep', (['(1)'], {}), '(1)\n', (360, 363), False, 'import time\n'), ((407, 421), 'RPi.GPIO.cleanup', 'GPIO.cleanup', ([], {}), '()\n', (419, 421), True, 'import RPi.GPIO as GPIO\n')] |
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
"""
Created on Wed Sep 22 13:13:06 2021
@author: hossam
"""
from base import BaseTrain
import numpy as np
import matplotlib.pylab as plt
from matplotlib.pyplot import savefig
from scipy.stats import multivariate_normal
class vaeTrainer(BaseTrain):
def __init__(self, sess, model, data, config):
super(vaeTrainer, self).__init__(sess, model, data, config)
def train_epoch(self):
self.cur_epoch = self.model.cur_epoch_tensor.eval(self.sess)
# training
self.sess.run(self.model.iterator.initializer,
feed_dict={self.model.original_signal: self.data.train_set_vae['data'],
self.model.seed: self.cur_epoch})
self.n_train_iter = self.data.n_train_vae // self.config['batch_size']
idx_check_point = (self.n_train_iter - 1)
train_loss_cur_epoch = 0.0
for i in range(self.n_train_iter):
loss = self.train_step()
self.sess.run(self.model.increment_global_step_tensor)
self.train_loss.append(np.squeeze(loss))
train_loss_cur_epoch = train_loss_cur_epoch + loss
if i == idx_check_point:
test_loss, test_recons_loss_weighted, test_kl, test_sigma_regularisor, test_code_std_norm, test_cur_sigma2, test_recons_loss_ls = self.test_step()
self.train_loss_ave_epoch.append(train_loss_cur_epoch / self.n_train_iter)
# validation
self.iter_epochs_list.append(self.n_train_iter * (self.cur_epoch + 1))
self.sess.run(self.model.iterator.initializer,
feed_dict={self.model.original_signal: self.data.val_set_vae['data'],
self.model.seed: self.cur_epoch})
self.n_val_iter = self.data.n_val_vae // self.config['batch_size']
val_loss_cur_epoch = 0.0
for i in range(self.n_val_iter):
val_loss = self.val_step()
val_loss_cur_epoch = val_loss_cur_epoch + val_loss
self.val_loss_ave_epoch.append(val_loss_cur_epoch / self.n_val_iter)
# save the model parameters at the end of this epoch
self.model.save(self.sess)
print(
"{}/{}, test loss: -elbo: {:.4f}, recons_loss_weighted: {:.4f}, recons_loss_ls: {:.4f}, KL_loss: {:.4f}, sigma_regularisor: {:.4f}, code_std_dev: {}".format(
self.cur_epoch,
self.config['num_epochs_vae'] - 1,
test_loss,
test_recons_loss_weighted,
np.squeeze(np.mean(test_recons_loss_ls)),
test_kl,
test_sigma_regularisor,
np.squeeze(test_code_std_norm)))
print("Loss on training and val sets:\ntrain: {:.4f}, val: {:.4f}".format(
self.train_loss_ave_epoch[self.cur_epoch],
self.val_loss_ave_epoch[self.cur_epoch]))
print("Current sigma2: {:.7f}".format(test_cur_sigma2))
# save the current variables
self.save_variables_VAE()
# reconstruction plot
self.plot_reconstructed_signal()
# generate samples from prior
self.generate_samples_from_prior()
# plot the training and validation loss over iterations/epochs
self.plot_train_and_val_loss()
def train_step(self):
batch_image = self.sess.run(self.model.input_image)
feed_dict = {self.model.original_signal: batch_image,
self.model.is_code_input: False,
self.model.code_input: np.zeros((1, self.config['code_size'])),
self.model.lr: self.config['learning_rate_vae'] * (0.98 ** self.cur_epoch)}
train_loss, _ = self.sess.run([self.model.elbo_loss, self.model.train_step_gradient],
feed_dict=feed_dict)
return train_loss
def val_step(self):
input_image_val = self.sess.run(self.model.input_image)
val_cost, recon_loss_val, kl_loss_val, std_dev_loss_val = self.sess.run([self.model.elbo_loss,
self.model.ls_reconstruction_error,
self.model.KL_loss,
self.model.std_dev_norm],
feed_dict={
self.model.original_signal: input_image_val,
self.model.is_code_input: False,
self.model.code_input: np.zeros(
(1, self.config['code_size']))})
self.val_loss.append(np.squeeze(val_cost))
self.recons_loss_val.append(np.squeeze(np.mean(recon_loss_val)))
self.KL_loss_val.append(kl_loss_val)
return val_cost
def test_step(self):
feed_dict = {self.model.original_signal: self.data.test_set_vae['data'],
self.model.is_code_input: False,
self.model.code_input: np.zeros((1, self.config['code_size']))}
self.output_test, test_loss, test_recons_loss_weighted, test_kl, test_sigma_regularisor, test_code_std_norm, test_cur_sigma2, test_recons_loss_ls = self.sess.run(
[self.model.decoded,
self.model.elbo_loss,
self.model.weighted_reconstruction_error_dataset,
self.model.KL_loss,
self.model.sigma_regularisor_dataset,
self.model.std_dev_norm,
self.model.sigma2,
self.model.ls_reconstruction_error],
feed_dict=feed_dict)
self.test_sigma2.append(np.squeeze(test_cur_sigma2))
return test_loss, test_recons_loss_weighted, test_kl, test_sigma_regularisor, test_code_std_norm, np.squeeze(
test_cur_sigma2), test_recons_loss_ls
def plot_reconstructed_signal(self):
input_images = np.squeeze(self.data.test_set_vae['data'])
decoded_images = np.squeeze(self.output_test)
n_images = 20
# plot the reconstructed image for a shape
for j in range(self.config['n_channel']):
fig, axs = plt.subplots(4, 5, figsize=(18, 10), edgecolor='k')
fig.subplots_adjust(hspace=.4, wspace=.4)
axs = axs.ravel()
for i in range(n_images):
if self.config['n_channel'] == 1:
axs[i].plot(input_images[i])
axs[i].plot(decoded_images[i])
else:
axs[i].plot(input_images[i, :, j])
axs[i].plot(decoded_images[i, :, j])
axs[i].grid(True)
axs[i].set_xlim(0, self.config['l_win'])
axs[i].set_ylim(-5, 5)
if i == 19:
axs[i].legend(('original', 'reconstructed'))
plt.suptitle('Channel {}'.format(j))
savefig(self.config['result_dir'] + 'test_reconstructed_{}_{}.pdf'.format(self.cur_epoch, j))
fig.clf()
plt.close()
def generate_samples_from_prior(self):
rv = multivariate_normal(np.zeros(self.config['code_size']), np.diag(np.ones(self.config['code_size'])))
# Generate a batch size of samples from the prior samples
n_images = 20
samples_code_prior = rv.rvs(n_images)
sampled_images = self.sess.run(self.model.decoded,
feed_dict={self.model.original_signal: np.zeros(
(n_images, self.config['l_win'], self.config['n_channel'])),
self.model.is_code_input: True,
self.model.code_input: samples_code_prior})
sampled_images = np.squeeze(sampled_images)
for j in range(self.config['n_channel']):
fig, axs = plt.subplots(4, 5, figsize=(18, 10), edgecolor='k')
fig.subplots_adjust(hspace=.4, wspace=.4)
axs = axs.ravel()
for i in range(n_images):
if self.config['n_channel'] == 1:
axs[i].plot(sampled_images[i])
else:
axs[i].plot(sampled_images[i, :, j])
axs[i].grid(True)
axs[i].set_xlim(0, self.config['l_win'])
axs[i].set_ylim(-5, 5)
plt.suptitle('Channel {}'.format(j))
savefig(self.config['result_dir'] + 'generated_samples_{}_{}.pdf'.format(self.cur_epoch, j))
fig.clf()
plt.close() | [
"numpy.mean",
"matplotlib.pylab.subplots",
"numpy.ones",
"numpy.squeeze",
"numpy.zeros",
"matplotlib.pylab.close"
] | [((5772, 5814), 'numpy.squeeze', 'np.squeeze', (["self.data.test_set_vae['data']"], {}), "(self.data.test_set_vae['data'])\n", (5782, 5814), True, 'import numpy as np\n'), ((5836, 5864), 'numpy.squeeze', 'np.squeeze', (['self.output_test'], {}), '(self.output_test)\n', (5846, 5864), True, 'import numpy as np\n'), ((7434, 7460), 'numpy.squeeze', 'np.squeeze', (['sampled_images'], {}), '(sampled_images)\n', (7444, 7460), True, 'import numpy as np\n'), ((3266, 3305), 'numpy.zeros', 'np.zeros', (["(1, self.config['code_size'])"], {}), "((1, self.config['code_size']))\n", (3274, 3305), True, 'import numpy as np\n'), ((4633, 4653), 'numpy.squeeze', 'np.squeeze', (['val_cost'], {}), '(val_cost)\n', (4643, 4653), True, 'import numpy as np\n'), ((4976, 5015), 'numpy.zeros', 'np.zeros', (["(1, self.config['code_size'])"], {}), "((1, self.config['code_size']))\n", (4984, 5015), True, 'import numpy as np\n'), ((5526, 5553), 'numpy.squeeze', 'np.squeeze', (['test_cur_sigma2'], {}), '(test_cur_sigma2)\n', (5536, 5553), True, 'import numpy as np\n'), ((5657, 5684), 'numpy.squeeze', 'np.squeeze', (['test_cur_sigma2'], {}), '(test_cur_sigma2)\n', (5667, 5684), True, 'import numpy as np\n'), ((5993, 6044), 'matplotlib.pylab.subplots', 'plt.subplots', (['(4)', '(5)'], {'figsize': '(18, 10)', 'edgecolor': '"""k"""'}), "(4, 5, figsize=(18, 10), edgecolor='k')\n", (6005, 6044), True, 'import matplotlib.pylab as plt\n'), ((6723, 6734), 'matplotlib.pylab.close', 'plt.close', ([], {}), '()\n', (6732, 6734), True, 'import matplotlib.pylab as plt\n'), ((6806, 6840), 'numpy.zeros', 'np.zeros', (["self.config['code_size']"], {}), "(self.config['code_size'])\n", (6814, 6840), True, 'import numpy as np\n'), ((7524, 7575), 'matplotlib.pylab.subplots', 'plt.subplots', (['(4)', '(5)'], {'figsize': '(18, 10)', 'edgecolor': '"""k"""'}), "(4, 5, figsize=(18, 10), edgecolor='k')\n", (7536, 7575), True, 'import matplotlib.pylab as plt\n'), ((8094, 8105), 'matplotlib.pylab.close', 'plt.close', ([], {}), '()\n', (8103, 8105), True, 'import matplotlib.pylab as plt\n'), ((1035, 1051), 'numpy.squeeze', 'np.squeeze', (['loss'], {}), '(loss)\n', (1045, 1051), True, 'import numpy as np\n'), ((2463, 2493), 'numpy.squeeze', 'np.squeeze', (['test_code_std_norm'], {}), '(test_code_std_norm)\n', (2473, 2493), True, 'import numpy as np\n'), ((4698, 4721), 'numpy.mean', 'np.mean', (['recon_loss_val'], {}), '(recon_loss_val)\n', (4705, 4721), True, 'import numpy as np\n'), ((6850, 6883), 'numpy.ones', 'np.ones', (["self.config['code_size']"], {}), "(self.config['code_size'])\n", (6857, 6883), True, 'import numpy as np\n'), ((2375, 2403), 'numpy.mean', 'np.mean', (['test_recons_loss_ls'], {}), '(test_recons_loss_ls)\n', (2382, 2403), True, 'import numpy as np\n'), ((4485, 4524), 'numpy.zeros', 'np.zeros', (["(1, self.config['code_size'])"], {}), "((1, self.config['code_size']))\n", (4493, 4524), True, 'import numpy as np\n'), ((7137, 7205), 'numpy.zeros', 'np.zeros', (["(n_images, self.config['l_win'], self.config['n_channel'])"], {}), "((n_images, self.config['l_win'], self.config['n_channel']))\n", (7145, 7205), True, 'import numpy as np\n')] |
import astropy.io.fits as pft
import numpy as np
from astropy.time import Time
import itertools
from astropy.stats import sigma_clipped_stats
from scipy.signal import convolve
def group_image_pairs(file_list, by_next=False, by_exptime=False):
if by_next:
image_pair_lists = [[file_list[2*i], file_list[2*i+1]] for i in range(int(len(file_list)/2)-1)]
elif by_exptime:
exp_times = np.array([np.float(pft.open(f)[0].header['EXPTIME']) for f in flist])
print(list(set(exp_times)))
image_pair_lists = []
for e in exp_times:
i = np.where(exp_times == e)[0]
imlist = file_list[i]
combinations = list(itertools.combinations(imlist,2))
image_pair_lists += combinations
else:
image_pair_lists = list(itertools.combinations(file_list,2))
return image_pair_lists
def diff_image_stats(img1, img2, sigma_clip=True):
diff_img = (img1 - img2).flatten()
if sigma_clip:
mean, med, stddev = sigma_clipped_stats(diff_img)
var = stddev**2./2.
else:
mean = np.mean(diff_img)
med = np.median(diff_img)
var = np.stddev(diff_img)**2./2.
return mean, med, var
def rebin_image(img, bin_row, bin_col):
kernel = np.ones((bin_row, bin_col)).astype(np.int)
c = convolve(img, kernel, mode='valid')
return c[::bin_row, ::bin_col]
| [
"numpy.mean",
"scipy.signal.convolve",
"numpy.median",
"numpy.ones",
"numpy.where",
"itertools.combinations",
"numpy.stddev",
"astropy.io.fits.open",
"astropy.stats.sigma_clipped_stats"
] | [((1312, 1347), 'scipy.signal.convolve', 'convolve', (['img', 'kernel'], {'mode': '"""valid"""'}), "(img, kernel, mode='valid')\n", (1320, 1347), False, 'from scipy.signal import convolve\n'), ((1004, 1033), 'astropy.stats.sigma_clipped_stats', 'sigma_clipped_stats', (['diff_img'], {}), '(diff_img)\n', (1023, 1033), False, 'from astropy.stats import sigma_clipped_stats\n'), ((1087, 1104), 'numpy.mean', 'np.mean', (['diff_img'], {}), '(diff_img)\n', (1094, 1104), True, 'import numpy as np\n'), ((1119, 1138), 'numpy.median', 'np.median', (['diff_img'], {}), '(diff_img)\n', (1128, 1138), True, 'import numpy as np\n'), ((1261, 1288), 'numpy.ones', 'np.ones', (['(bin_row, bin_col)'], {}), '((bin_row, bin_col))\n', (1268, 1288), True, 'import numpy as np\n'), ((801, 837), 'itertools.combinations', 'itertools.combinations', (['file_list', '(2)'], {}), '(file_list, 2)\n', (823, 837), False, 'import itertools\n'), ((1153, 1172), 'numpy.stddev', 'np.stddev', (['diff_img'], {}), '(diff_img)\n', (1162, 1172), True, 'import numpy as np\n'), ((586, 610), 'numpy.where', 'np.where', (['(exp_times == e)'], {}), '(exp_times == e)\n', (594, 610), True, 'import numpy as np\n'), ((680, 713), 'itertools.combinations', 'itertools.combinations', (['imlist', '(2)'], {}), '(imlist, 2)\n', (702, 713), False, 'import itertools\n'), ((425, 436), 'astropy.io.fits.open', 'pft.open', (['f'], {}), '(f)\n', (433, 436), True, 'import astropy.io.fits as pft\n')] |
import unittest
import pytexcount.parser as P
from pytexcount.count import WordCounter
class LexerTestCase(unittest.TestCase):
def test_lexer_base(self):
expected = [
P.Token(P.TokenType.CHAR, 'a'),
P.Token(P.TokenType.SPACE, ' '),
P.Token(P.TokenType.BACKSLASH, '\\'),
P.Token(P.TokenType.CHAR, 't'),
P.Token(P.TokenType.LCBRACE, '{'),
P.Token(P.TokenType.CHAR, 'x'),
P.Token(P.TokenType.RCBRACE, '}'),
P.Token(P.TokenType.EOS, '\0')
]
for i, token in enumerate(P.Lexer('a \\t{x}').tokenize()):
self.assertEqual(token.type, expected[i].type)
self.assertEqual(token.value, expected[i].value)
class ParserTestCase(unittest.TestCase):
def parse(self, text):
return P.Parser(text).parse()
def test_parser_text(self):
text = 'xy'
tree = self.parse(text)
self.assertEqual(len(tree.children), 1)
self.assertIsInstance(tree.children[0], P.Text)
self.assertEqual(tree.children[0].text, text)
def test_parser_text_with_comment(self):
text = 'xy %a'
tree = self.parse(text)
self.assertEqual(len(tree.children), 1)
self.assertIsInstance(tree.children[0], P.Text)
self.assertEqual(tree.children[0].text, text[:text.find('%')])
def test_parser_enclosed(self):
bef_text = 'x'
enclosed_text = 'a'
aft_text = 'y'
text = '{}[{}]{}'.format(bef_text, enclosed_text, aft_text)
tree = self.parse(text)
self.assertEqual(len(tree.children), 3)
self.assertIsInstance(tree.children[0], P.Text)
self.assertEqual(tree.children[0].text, bef_text)
self.assertIsInstance(tree.children[1], P.Enclosed)
self.assertEqual(tree.children[1].opening, P.TokenType.LSBRACE)
self.assertEqual(tree.children[1].closing(), P.TokenType.RSBRACE)
self.assertEqual(tree.children[1].children[0].text, enclosed_text)
self.assertIsInstance(tree.children[2], P.Text)
self.assertEqual(tree.children[2].text, aft_text)
def test_parser_macro(self):
name = 'test'
optarg = 'x'
mandarg = 'y'
text = '\\{}[{}]{{{}}}'.format(name, optarg, mandarg)
tree = self.parse(text)
self.assertEqual(len(tree.children), 1)
self.assertIsInstance(tree.children[0], P.Macro)
macro: P.Macro = tree.children[0]
self.assertEqual(macro.name, name)
self.assertEqual(len(macro.arguments), 2)
self.assertEqual(macro.arguments[0].children[0].text, optarg)
self.assertTrue(macro.arguments[0].optional)
self.assertEqual(macro.arguments[1].children[0].text, mandarg)
self.assertFalse(macro.arguments[1].optional)
def test_macro_name(self):
def mc(t) -> P.Macro:
return P.Parser(t).escape_or_macro()
self.assertEqual(mc('\\test').name, 'test')
self.assertEqual(mc('\\test2').name, 'test2')
self.assertEqual(mc('\\test*').name, 'test*')
self.assertEqual(mc('\\x@test').name, 'x@test')
self.assertEqual(mc('\\test_2').name, 'test')
self.assertEqual(mc('\\test^2').name, 'test')
def test_parser_math(self):
textpart = 'a'
mathpart = 'x'
text = '{}${}$'.format(textpart, mathpart)
tree = self.parse(text)
self.assertEqual(len(tree.children), 2)
self.assertIsInstance(tree.children[0], P.Text)
self.assertEqual(tree.children[0].text, textpart)
self.assertIsInstance(tree.children[1], P.MathDollarEnv)
mathenv = tree.children[1]
self.assertEqual(mathenv.children[0].text, mathpart)
def test_parser_env(self):
name = 'test'
content = 'tmp'
text = '\\begin{{{name}}}{ctn}\\end{{{name}}}'.format(name=name, ctn=content)
tree = self.parse(text)
self.assertEqual(len(tree.children), 1)
self.assertIsInstance(tree.children[0], P.Environment)
self.assertEqual(tree.children[0].name, name)
self.assertEqual(tree.children[0].children[0].text, content)
def test_parser_env_in_env(self):
"""Environment in environment (with the same name)
"""
text = 'a\\begin{test}b\\begin{test}c\\end{test}d\\end{test}e'
tree = self.parse(text)
self.assertEqual(len(tree.children), 3)
self.assertIsInstance(tree.children[0], P.Text)
self.assertEqual(tree.children[0].text, 'a')
self.assertIsInstance(tree.children[1], P.Environment)
self.assertIsInstance(tree.children[2], P.Text)
self.assertEqual(tree.children[2].text, 'e')
env: P.Environment = tree.children[1]
self.assertEqual(env.name, 'test')
self.assertEqual(len(env.children), 3)
self.assertIsInstance(env.children[0], P.Text)
self.assertEqual(env.children[0].text, 'b')
self.assertIsInstance(env.children[1], P.Environment)
self.assertEqual(env.children[1].children[0].text, 'c')
self.assertIsInstance(env.children[2], P.Text)
self.assertEqual(env.children[2].text, 'd')
def test_unary(self):
content = 'xy'
text = '\\alpha_{{{}}}'.format(content)
tree = self.parse(text)
self.assertEqual(len(tree.children), 2)
self.assertIsInstance(tree.children[0], P.Macro)
self.assertIsInstance(tree.children[1], P.UnaryOperator)
self.assertEqual(tree.children[1].children[0].text, content)
def test_separator(self):
bef = 'a'
aft = 'b'
text = '{}&{}'.format(bef, aft)
tree = self.parse(text)
self.assertEqual(len(tree.children), 3)
self.assertIsInstance(tree.children[0], P.Text)
self.assertEqual(tree.children[0].text, bef)
self.assertIsInstance(tree.children[1], P.Separator)
self.assertIsInstance(tree.children[2], P.Text)
self.assertEqual(tree.children[2].text, aft)
class WordCountTestCase(unittest.TestCase):
def count(self, text, exclude_env=None, include_macro=None, macro_as_word=None):
tree = P.Parser(text).parse()
return WordCounter(exclude_env, include_macro, macro_as_word)(tree)
def test_text(self):
self.assertEqual(self.count('this is a test'), 4)
self.assertEqual(self.count('this is the 2nd test'), 5)
def test_enclosed(self):
self.assertEqual(self.count('a [b] c'), 3)
def test_macro(self):
self.assertEqual(self.count('\\test{x}{y}'), 0)
self.assertEqual(self.count('\\test{x}{y}', include_macro=['test']), 2)
def test_macro_as_word(self):
self.assertEqual(self.count('a \\test'), 1)
self.assertEqual(self.count('a \\test', macro_as_word=['test']), 2)
def test_environment(self):
self.assertEqual(self.count('\\begin{test}two words\\end{test}'), 2)
self.assertEqual(self.count('\\begin{test}two words\\end{test}', exclude_env=['test']), 0)
def test_tabular(self):
self.assertEqual(self.count('\\begin{tabular}a & b\\end{tabular}'), 2)
def test_unary(self):
self.assertEqual(self.count('\\alpha_{xx}', macro_as_word=['alpha']), 1)
| [
"pytexcount.parser.Lexer",
"pytexcount.parser.Parser",
"pytexcount.count.WordCounter",
"pytexcount.parser.Token"
] | [((195, 225), 'pytexcount.parser.Token', 'P.Token', (['P.TokenType.CHAR', '"""a"""'], {}), "(P.TokenType.CHAR, 'a')\n", (202, 225), True, 'import pytexcount.parser as P\n'), ((239, 270), 'pytexcount.parser.Token', 'P.Token', (['P.TokenType.SPACE', '""" """'], {}), "(P.TokenType.SPACE, ' ')\n", (246, 270), True, 'import pytexcount.parser as P\n'), ((284, 320), 'pytexcount.parser.Token', 'P.Token', (['P.TokenType.BACKSLASH', '"""\\\\"""'], {}), "(P.TokenType.BACKSLASH, '\\\\')\n", (291, 320), True, 'import pytexcount.parser as P\n'), ((334, 364), 'pytexcount.parser.Token', 'P.Token', (['P.TokenType.CHAR', '"""t"""'], {}), "(P.TokenType.CHAR, 't')\n", (341, 364), True, 'import pytexcount.parser as P\n'), ((378, 411), 'pytexcount.parser.Token', 'P.Token', (['P.TokenType.LCBRACE', '"""{"""'], {}), "(P.TokenType.LCBRACE, '{')\n", (385, 411), True, 'import pytexcount.parser as P\n'), ((425, 455), 'pytexcount.parser.Token', 'P.Token', (['P.TokenType.CHAR', '"""x"""'], {}), "(P.TokenType.CHAR, 'x')\n", (432, 455), True, 'import pytexcount.parser as P\n'), ((469, 502), 'pytexcount.parser.Token', 'P.Token', (['P.TokenType.RCBRACE', '"""}"""'], {}), "(P.TokenType.RCBRACE, '}')\n", (476, 502), True, 'import pytexcount.parser as P\n'), ((516, 548), 'pytexcount.parser.Token', 'P.Token', (['P.TokenType.EOS', "'\\x00'"], {}), "(P.TokenType.EOS, '\\x00')\n", (523, 548), True, 'import pytexcount.parser as P\n'), ((6229, 6283), 'pytexcount.count.WordCounter', 'WordCounter', (['exclude_env', 'include_macro', 'macro_as_word'], {}), '(exclude_env, include_macro, macro_as_word)\n', (6240, 6283), False, 'from pytexcount.count import WordCounter\n'), ((831, 845), 'pytexcount.parser.Parser', 'P.Parser', (['text'], {}), '(text)\n', (839, 845), True, 'import pytexcount.parser as P\n'), ((6191, 6205), 'pytexcount.parser.Parser', 'P.Parser', (['text'], {}), '(text)\n', (6199, 6205), True, 'import pytexcount.parser as P\n'), ((592, 611), 'pytexcount.parser.Lexer', 'P.Lexer', (['"""a \\\\t{x}"""'], {}), "('a \\\\t{x}')\n", (599, 611), True, 'import pytexcount.parser as P\n'), ((2911, 2922), 'pytexcount.parser.Parser', 'P.Parser', (['t'], {}), '(t)\n', (2919, 2922), True, 'import pytexcount.parser as P\n')] |
import os
class Config:
SQLALCHEMY_TRACK_MODIFICATIONS = False
CSRF_ENABLED = True
FLASK_DEBUG = True
SQLALCHEMY_DATABASE_URI = os.environ.get('DATABASE_URL', 'mysql+mysqlconnector://root:root@localhost:8889/lak_db')
DATABASE_URL = os.environ.get('CLEARDB_DATABASE_URL', 'mysql+mysqlconnector://root:root@localhost:8889/hnuvs_db')
PORT = int(os.environ.get('PORT', '5000'))
SECRET_KEY = os.environ.get('SECRET_KEY', 'SITE_BOMBS')
| [
"os.environ.get"
] | [((146, 238), 'os.environ.get', 'os.environ.get', (['"""DATABASE_URL"""', '"""mysql+mysqlconnector://root:root@localhost:8889/lak_db"""'], {}), "('DATABASE_URL',\n 'mysql+mysqlconnector://root:root@localhost:8889/lak_db')\n", (160, 238), False, 'import os\n'), ((254, 356), 'os.environ.get', 'os.environ.get', (['"""CLEARDB_DATABASE_URL"""', '"""mysql+mysqlconnector://root:root@localhost:8889/hnuvs_db"""'], {}), "('CLEARDB_DATABASE_URL',\n 'mysql+mysqlconnector://root:root@localhost:8889/hnuvs_db')\n", (268, 356), False, 'import os\n'), ((417, 459), 'os.environ.get', 'os.environ.get', (['"""SECRET_KEY"""', '"""SITE_BOMBS"""'], {}), "('SECRET_KEY', 'SITE_BOMBS')\n", (431, 459), False, 'import os\n'), ((368, 398), 'os.environ.get', 'os.environ.get', (['"""PORT"""', '"""5000"""'], {}), "('PORT', '5000')\n", (382, 398), False, 'import os\n')] |
from yowsup.structs import ProtocolEntity, ProtocolTreeNode
class ChatstateProtocolEntity(ProtocolEntity):
'''
INCOMING
<chatstate from="<EMAIL>">
<{{composing|paused}}></{{composing|paused}}>
</chatstate>
OUTGOING
<chatstate to="<EMAIL>">
<{{composing|paused}}></{{composing|paused}}>
</chatstate>
'''
STATE_TYPING = "composing"
STATE_PAUSED = "paused"
STATES = (STATE_TYPING, STATE_PAUSED)
def __init__(self, _state):
super(ChatstateProtocolEntity, self).__init__("chatstate")
assert _state in self.__class__.STATES, "Expected chat state to be in %s, got %s" % (self.__class__.STATES, _state)
self._state = _state
def getState(self):
return self._state
def toProtocolTreeNode(self):
node = self._createProtocolTreeNode({}, None, data = None)
node.addChild(ProtocolTreeNode(self._state))
return node
def __str__(self):
out = "CHATSTATE:\n"
out += "State: %s\n" % self._state
return out
@staticmethod
def fromProtocolTreeNode(node):
return ChatstateProtocolEntity(
node.getAllChildren()[0].tag,
)
| [
"yowsup.structs.ProtocolTreeNode"
] | [((878, 907), 'yowsup.structs.ProtocolTreeNode', 'ProtocolTreeNode', (['self._state'], {}), '(self._state)\n', (894, 907), False, 'from yowsup.structs import ProtocolEntity, ProtocolTreeNode\n')] |
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
"""
Created on Wed Jun 12 09:36:00 2019
@author: minjie
"""
import SimpleITK as sitk
import numpy as np
from pathlib import Path
import os
import cv2
import pydicom
import h5py
from scipy.ndimage import binary_dilation
fn1 = 'resources/CT/01/data/A_1.mha'
fn2 = 'resources/CT/01/data/PV_1.mha'
dicom_fd = './resources/CT/01/data/'
name_chs = ['V','A','PV']
out_h5py = './resources/ct01m.h5'
n_class = 4
w_class = [0.1, 0.2, 0.2 ,0.2]
w_class_in_roi = [1.0, 2.0, 2.0 ,2.0]
itkimage1 = sitk.ReadImage(fn1)
labels1 = (sitk.GetArrayFromImage(itkimage1)[::-1,:,:]==1).astype('int') #dongmai
itkimage2 = sitk.ReadImage(fn2)
labels2 = (sitk.GetArrayFromImage(itkimage2)[::-1,:,:]==1).astype('int') #mengjinmai
labels3 = (sitk.GetArrayFromImage(itkimage2)[::-1,:,:]==2).astype('int') #mengjinmai
labels0 = (labels1+labels2+labels3 ==0).astype('int')
labels1_dilate = binary_dilation(labels1,structure = np.ones((5,5,5))).astype(labels1.dtype)
labels2_dilate = binary_dilation(labels2,structure = np.ones((5,5,5))).astype(labels2.dtype)
labels3_dilate = binary_dilation(labels3,structure = np.ones((5,5,5))).astype(labels3.dtype)
labels = np.stack((labels0,labels1,labels2,labels3),axis = 0)
labels_dilate = ((labels1_dilate + labels2_dilate + labels3_dilate)>0).astype('int')
#%%
flist = [str(fn) for fn in (Path(dicom_fd)/name_chs[0]).glob('*')]
n_slice = len(flist)
row,col = 512,512
labels_pos = np.where(labels_dilate==1)
z_min,z_max = labels_pos[0].min(),labels_pos[0].max()
y_min,y_max = labels_pos[1].min(),labels_pos[1].max()
x_min,x_max = labels_pos[2].min(),labels_pos[2].max()
z_min = max(0,z_min - 8)
z_max = min(labels.shape[1],z_max + 8)
y_min = max(0,y_min - 16)
y_max = min(labels.shape[2],y_max + 16)
x_min = max(0,x_min - 16)
x_max = min(labels.shape[3],x_max + 16)
xyz = [x_min,x_max,y_min,y_max,z_min,z_max]
labels = labels[:,z_min:z_max+1,y_min:y_max+1,x_min:x_max+1]
labels_dilate = labels_dilate[z_min:z_max+1,y_min:y_max+1,x_min:x_max+1]
#%%
weights = np.zeros_like(labels,dtype = 'float32')
for i in range(n_class):
weights[i] = w_class[i]
weights[i][labels_dilate==1] = w_class_in_roi[i]
weights = weights.astype('float32')
labels = labels.astype('uint8')
#%%
raw_im = np.zeros((3,n_slice,row,col),dtype = 'float32')
for fn in flist:
with pydicom.dcmread(fn) as dc:
img_dicom1 = (dc.pixel_array).copy()
fn2 = fn.replace('/'+name_chs[0] + '/','/'+name_chs[1] + '/')
fn3 = fn.replace('/'+name_chs[0] + '/','/'+name_chs[2] + '/')
with pydicom.dcmread(fn2) as dc:
img_dicom2 = (dc.pixel_array).copy()
with pydicom.dcmread(fn3) as dc:
img_dicom3 = (dc.pixel_array).copy()
fname = Path(fn).stem
idx = int(Path(fn).stem.replace('IM',''))
raw_im[0,idx,:,:] = img_dicom1
raw_im[1,idx,:,:] = img_dicom2
raw_im[2,idx,:,:] = img_dicom3
rs = int(dc.RescaleSlope)
ri = int(dc.RescaleIntercept)
wc = 80#int(dc.WindowCenter)
wh = 240#int(dc.WindowWidth)
raw_im = rs * raw_im + ri
raw_im = (raw_im.astype('float') - (wc - wh/2.0))/wh
#img_dicom = (img_dicom/2500.0).astype('float32')
raw_im = np.clip(raw_im,0.0,1.0)-0.5
raw_im = raw_im[:,z_min:z_max+1,y_min:y_max+1,x_min:x_max+1].astype('float32')
#%%
#%%
#write data
#with h5py.File(out_h5py, 'w') as f:
# f.create_dataset('label', data=labels,compression='lzf')
# f.create_dataset('raw', data=raw_im,compression='lzf')
# f.create_dataset('weight', data=weights,compression='lzf')
# f.create_dataset('xyz', data=xyz)
#
##%%
#with h5py.File(out_h5py, 'w') as f:
# f.create_dataset('label', data=labels,compression='gzip')
# f.create_dataset('raw', data=raw_im,compression='gzip')
# f.create_dataset('weight', data=weights,compression='gzip')
# f.create_dataset('xyz', data=xyz)
#%%
with h5py.File(out_h5py, 'w') as f:
f.create_dataset('label', data=labels)
f.create_dataset('raw', data=raw_im)
f.create_dataset('weight', data=weights)
f.create_dataset('xyz', data=xyz)
| [
"numpy.clip",
"pydicom.dcmread",
"numpy.ones",
"pathlib.Path",
"numpy.where",
"SimpleITK.GetArrayFromImage",
"h5py.File",
"numpy.stack",
"numpy.zeros",
"SimpleITK.ReadImage",
"numpy.zeros_like"
] | [((541, 560), 'SimpleITK.ReadImage', 'sitk.ReadImage', (['fn1'], {}), '(fn1)\n', (555, 560), True, 'import SimpleITK as sitk\n'), ((657, 676), 'SimpleITK.ReadImage', 'sitk.ReadImage', (['fn2'], {}), '(fn2)\n', (671, 676), True, 'import SimpleITK as sitk\n'), ((1198, 1252), 'numpy.stack', 'np.stack', (['(labels0, labels1, labels2, labels3)'], {'axis': '(0)'}), '((labels0, labels1, labels2, labels3), axis=0)\n', (1206, 1252), True, 'import numpy as np\n'), ((1463, 1491), 'numpy.where', 'np.where', (['(labels_dilate == 1)'], {}), '(labels_dilate == 1)\n', (1471, 1491), True, 'import numpy as np\n'), ((2050, 2088), 'numpy.zeros_like', 'np.zeros_like', (['labels'], {'dtype': '"""float32"""'}), "(labels, dtype='float32')\n", (2063, 2088), True, 'import numpy as np\n'), ((2279, 2328), 'numpy.zeros', 'np.zeros', (['(3, n_slice, row, col)'], {'dtype': '"""float32"""'}), "((3, n_slice, row, col), dtype='float32')\n", (2287, 2328), True, 'import numpy as np\n'), ((3218, 3243), 'numpy.clip', 'np.clip', (['raw_im', '(0.0)', '(1.0)'], {}), '(raw_im, 0.0, 1.0)\n', (3225, 3243), True, 'import numpy as np\n'), ((3914, 3938), 'h5py.File', 'h5py.File', (['out_h5py', '"""w"""'], {}), "(out_h5py, 'w')\n", (3923, 3938), False, 'import h5py\n'), ((2358, 2377), 'pydicom.dcmread', 'pydicom.dcmread', (['fn'], {}), '(fn)\n', (2373, 2377), False, 'import pydicom\n'), ((2591, 2611), 'pydicom.dcmread', 'pydicom.dcmread', (['fn2'], {}), '(fn2)\n', (2606, 2611), False, 'import pydicom\n'), ((2673, 2693), 'pydicom.dcmread', 'pydicom.dcmread', (['fn3'], {}), '(fn3)\n', (2688, 2693), False, 'import pydicom\n'), ((2776, 2784), 'pathlib.Path', 'Path', (['fn'], {}), '(fn)\n', (2780, 2784), False, 'from pathlib import Path\n'), ((572, 605), 'SimpleITK.GetArrayFromImage', 'sitk.GetArrayFromImage', (['itkimage1'], {}), '(itkimage1)\n', (594, 605), True, 'import SimpleITK as sitk\n'), ((688, 721), 'SimpleITK.GetArrayFromImage', 'sitk.GetArrayFromImage', (['itkimage2'], {}), '(itkimage2)\n', (710, 721), True, 'import SimpleITK as sitk\n'), ((774, 807), 'SimpleITK.GetArrayFromImage', 'sitk.GetArrayFromImage', (['itkimage2'], {}), '(itkimage2)\n', (796, 807), True, 'import SimpleITK as sitk\n'), ((960, 978), 'numpy.ones', 'np.ones', (['(5, 5, 5)'], {}), '((5, 5, 5))\n', (967, 978), True, 'import numpy as np\n'), ((1053, 1071), 'numpy.ones', 'np.ones', (['(5, 5, 5)'], {}), '((5, 5, 5))\n', (1060, 1071), True, 'import numpy as np\n'), ((1146, 1164), 'numpy.ones', 'np.ones', (['(5, 5, 5)'], {}), '((5, 5, 5))\n', (1153, 1164), True, 'import numpy as np\n'), ((1369, 1383), 'pathlib.Path', 'Path', (['dicom_fd'], {}), '(dicom_fd)\n', (1373, 1383), False, 'from pathlib import Path\n'), ((2804, 2812), 'pathlib.Path', 'Path', (['fn'], {}), '(fn)\n', (2808, 2812), False, 'from pathlib import Path\n')] |
import asyncio
import argparse
import http
import json
import os
import time
import subprocess
import sys
import websockets
def check(rm_hostname):
try:
model = subprocess.run(
[
"ssh",
"-o",
"ConnectTimeout=2",
rm_hostname,
"cat",
"/proc/device-tree/model",
],
check=True,
capture_output=True,
)
return model.stdout[:14].decode("utf-8")
except subprocess.CalledProcessError:
print(f"Error: Can't connect to reMarkable tablet on hostname : {rm_hostname}")
os._exit(1)
async def refresh_ss(refresh):
now = time.time()
try:
if os.stat("resnap.jpg").st_mtime >= now - refresh:
return
except FileNotFoundError:
pass
try:
print("running resnap")
subprocess.run(['../scripts/host/resnap.sh', 'resnap.new.jpg'],
check=True)
os.rename('resnap.new.jpg', 'resnap.jpg')
print("ok")
except subprocess.CalledProcessError:
print("Could not resnap, continuing anyway")
except FileNotFoundError:
print("resnap ran but file not created? continuing anyway...")
class WebsocketHandler():
def __init__(self, rm_host, rm_model, args):
if rm_model == "reMarkable 1.0":
self.device = "/dev/input/event0"
elif rm_model == "reMarkable 2.0":
self.device = "/dev/input/event1"
else:
raise NotImplementedError(f"Unsupported reMarkable Device : {rm_model}")
self.rm_host = rm_host
self.args = args
self.websockets = []
self.running = False
async def websocket_broadcast(self, msg):
for websocket in self.websockets:
try:
await websocket.send(msg)
except Exception as e:
print("send error: %s ; removing client" % repr(e))
await self.websocket_remove(websocket)
async def websocket_handler(self, websocket, path):
self.websockets.append(websocket)
if not self.running:
self.running = True
tasks = []
tasks.append(asyncio.create_task(self.ssh_stream()))
if self.args.autorefresh:
tasks.append(asyncio.create_task(self.ssh_pagechange()))
tasks.append(asyncio.create_task(self.read_websocket(websocket)))
await asyncio.gather(*tasks)
else:
await self.read_websocket(websocket)
async def websocket_remove(self, websocket):
if websocket in self.websockets:
self.websockets.remove(websocket)
if not self.websockets:
if self.pagechange_proc.returncode is None:
self.pagechange_proc.kill()
if self.stream_proc.returncode is None:
self.stream_proc.kill()
self.running = False
async def read_websocket(self, websocket):
try:
while True:
msg = await websocket.recv()
print(f"got {msg}")
except (websockets.exceptions.ConnectionClosedOK,
websockets.exceptions.ConnectionClosedError):
print("Disconnecting client")
await self.websocket_remove(websocket)
async def ssh_pagechange(self):
command = f"ssh -o ConnectTimeout=2 {self.rm_host} /opt/bin/inotifywait -m -e CLOSE .local/share/remarkable/xochitl/"
self.pagechange_proc = await asyncio.create_subprocess_shell(
command, stdout=asyncio.subprocess.PIPE
)
print("Started pagechange watcher process")
try:
last = time.time()
while self.pagechange_proc.returncode is None:
buf = await self.pagechange_proc.stdout.read(1000)
if not buf:
try:
await asyncio.wait_for(self.pagechange_proc.wait(), 1)
except TimeoutError:
continue
print(f"pagechange watcher return code {self.pagechange_proc.returncode}")
break
if b'metadata' in buf:
now = time.time()
if now - last > 2:
print("page change")
time.sleep(0.5)
await self.websocket_broadcast(json.dumps(("redraw",)))
last = now
finally:
print("pagechange watcher stopped.")
if self.pagechange_proc.returncode is None:
self.pagechange_proc.kill()
async def ssh_stream(self):
# The async subprocess library only accepts a string command, not a list.
command = f"ssh -o ConnectTimeout=2 {self.rm_host} cat {self.device}"
x = 0
y = 0
pressure = 0
throttle = 0
eraser = False
self.stream_proc = await asyncio.create_subprocess_shell(
command, stdout=asyncio.subprocess.PIPE
)
print("Started event stream process")
try:
# Keep looping as long as the process is alive.
# Terminated websocket connection is handled with a throw.
while self.stream_proc.returncode is None:
buf = await self.stream_proc.stdout.read(16)
if not buf:
try:
await asyncio.wait_for(self.stream_proc.wait(), 1)
except TimeoutError:
continue
break
# TODO expect 16-bit chunks, or no data.
# There are synchronisation signals in the data stream, maybe use those
# if we drift somehow.
if len(buf) == 16:
timestamp = buf[0:4]
a = buf[4:8]
b = buf[8:12]
c = buf[12:16]
# Using notes from https://github.com/ichaozi/RemarkableFramebuffer
# or https://github.com/canselcik/libremarkable/wiki
typ = b[0]
code = b[2] + b[3] * 0x100
val = c[0] + c[1] * 0x100 + c[2] * 0x10000 + c[3] * 0x1000000
# print("Type %d, code %d, val %d" % (typ, code, val))
# Pen side
if typ == 1:
# code = 320 = normal, 321 = erase
# val = 0 = off, 1 = closes in
# resend picture on 321 off?
if code == 321:
if val == 1:
eraser = True
print("eraser on")
else:
eraser = False
print("eraser off")
await self.websocket_broadcast(
json.dumps(("redraw",)))
# 0= 20966, 1 = 15725
# Absolute position.
if typ == 3:
if code == 0:
y = val
elif code == 1:
x = val
elif code == 24:
pressure = val
throttle = throttle + 1
if not eraser and throttle % 6 == 0:
await self.websocket_broadcast(
json.dumps((x, y, pressure)))
finally:
print("stream event task stopped.")
self.running = False
if self.stream_proc.returncode is None:
self.stream_proc.kill()
async def screenshot(path, request):
cachetime = 2
if path.endswith("id=0"):
cachetime = 30
await refresh_ss(cachetime)
body = open("resnap.jpg", "rb").read()
headers = [
("Content-Type", "image/jpeg"),
("Content-Length", str(len(body))),
("Connection", "close"),
]
return (http.HTTPStatus.OK, headers, body)
async def http_handler(path, request):
# only serve index file or defer to websocket handler.
if path == "/websocket":
return None
if path.startswith("/screenshot"):
return await screenshot(path, request)
if path != "/":
return (http.HTTPStatus.NOT_FOUND, [], "")
body = open("index.html", "rb").read()
headers = [
("Content-Type", "text/html"),
("Content-Length", str(len(body))),
("Connection", "close"),
]
return (http.HTTPStatus.OK, headers, body)
def run():
parser = argparse.ArgumentParser(
description='stream remarkable')
parser.add_argument(
'-r', '--rm_host', default='remarkable',
help='remarkable host')
parser.add_argument(
'-s', '--server_host', default='localhost',
help='websocket server listen host')
parser.add_argument(
'-p', '--port', default=6789,
help='websocket server port')
parser.add_argument(
'-n', '--no-autorefresh', dest='autorefresh', default=True,
action="store_false",
help='trigger refresh on page change etc')
parser.add_argument(
'--auth', type=str, help='basic user:pass auth')
args = parser.parse_args()
# rm_model = check(rm_host)
rm_model = "reMarkable 2.0"
handler = WebsocketHandler(args.rm_host, rm_model, args)
create_protocol = None
if args.auth:
creds = args.auth.split(':')
if len(creds) != 2:
print('--auth arg must be of form user:pass')
sys.exit(1)
create_protocol = websockets.basic_auth_protocol_factory(
realm="remarkable stream",
credentials=tuple(creds),
)
start_server = websockets.serve(
handler.websocket_handler, args.server_host, args.port,
ping_interval=1000, process_request=http_handler,
create_protocol=create_protocol
)
print(f"Visit http://{args.server_host}:{args.port}/")
asyncio.get_event_loop().run_until_complete(start_server)
asyncio.get_event_loop().run_forever()
if __name__ == "__main__":
run()
| [
"asyncio.gather",
"argparse.ArgumentParser",
"os.rename",
"asyncio.get_event_loop",
"subprocess.run",
"json.dumps",
"time.sleep",
"websockets.serve",
"os._exit",
"sys.exit",
"os.stat",
"asyncio.create_subprocess_shell",
"time.time"
] | [((706, 717), 'time.time', 'time.time', ([], {}), '()\n', (715, 717), False, 'import time\n'), ((8797, 8853), 'argparse.ArgumentParser', 'argparse.ArgumentParser', ([], {'description': '"""stream remarkable"""'}), "(description='stream remarkable')\n", (8820, 8853), False, 'import argparse\n'), ((10012, 10176), 'websockets.serve', 'websockets.serve', (['handler.websocket_handler', 'args.server_host', 'args.port'], {'ping_interval': '(1000)', 'process_request': 'http_handler', 'create_protocol': 'create_protocol'}), '(handler.websocket_handler, args.server_host, args.port,\n ping_interval=1000, process_request=http_handler, create_protocol=\n create_protocol)\n', (10028, 10176), False, 'import websockets\n'), ((176, 309), 'subprocess.run', 'subprocess.run', (["['ssh', '-o', 'ConnectTimeout=2', rm_hostname, 'cat', '/proc/device-tree/model'\n ]"], {'check': '(True)', 'capture_output': '(True)'}), "(['ssh', '-o', 'ConnectTimeout=2', rm_hostname, 'cat',\n '/proc/device-tree/model'], check=True, capture_output=True)\n", (190, 309), False, 'import subprocess\n'), ((898, 973), 'subprocess.run', 'subprocess.run', (["['../scripts/host/resnap.sh', 'resnap.new.jpg']"], {'check': '(True)'}), "(['../scripts/host/resnap.sh', 'resnap.new.jpg'], check=True)\n", (912, 973), False, 'import subprocess\n'), ((1005, 1046), 'os.rename', 'os.rename', (['"""resnap.new.jpg"""', '"""resnap.jpg"""'], {}), "('resnap.new.jpg', 'resnap.jpg')\n", (1014, 1046), False, 'import os\n'), ((651, 662), 'os._exit', 'os._exit', (['(1)'], {}), '(1)\n', (659, 662), False, 'import os\n'), ((3574, 3646), 'asyncio.create_subprocess_shell', 'asyncio.create_subprocess_shell', (['command'], {'stdout': 'asyncio.subprocess.PIPE'}), '(command, stdout=asyncio.subprocess.PIPE)\n', (3605, 3646), False, 'import asyncio\n'), ((3753, 3764), 'time.time', 'time.time', ([], {}), '()\n', (3762, 3764), False, 'import time\n'), ((5023, 5095), 'asyncio.create_subprocess_shell', 'asyncio.create_subprocess_shell', (['command'], {'stdout': 'asyncio.subprocess.PIPE'}), '(command, stdout=asyncio.subprocess.PIPE)\n', (5054, 5095), False, 'import asyncio\n'), ((9828, 9839), 'sys.exit', 'sys.exit', (['(1)'], {}), '(1)\n', (9836, 9839), False, 'import sys\n'), ((10263, 10287), 'asyncio.get_event_loop', 'asyncio.get_event_loop', ([], {}), '()\n', (10285, 10287), False, 'import asyncio\n'), ((10325, 10349), 'asyncio.get_event_loop', 'asyncio.get_event_loop', ([], {}), '()\n', (10347, 10349), False, 'import asyncio\n'), ((738, 759), 'os.stat', 'os.stat', (['"""resnap.jpg"""'], {}), "('resnap.jpg')\n", (745, 759), False, 'import os\n'), ((2491, 2513), 'asyncio.gather', 'asyncio.gather', (['*tasks'], {}), '(*tasks)\n', (2505, 2513), False, 'import asyncio\n'), ((4284, 4295), 'time.time', 'time.time', ([], {}), '()\n', (4293, 4295), False, 'import time\n'), ((4404, 4419), 'time.sleep', 'time.sleep', (['(0.5)'], {}), '(0.5)\n', (4414, 4419), False, 'import time\n'), ((4475, 4498), 'json.dumps', 'json.dumps', (["('redraw',)"], {}), "(('redraw',))\n", (4485, 4498), False, 'import json\n'), ((7643, 7671), 'json.dumps', 'json.dumps', (['(x, y, pressure)'], {}), '((x, y, pressure))\n', (7653, 7671), False, 'import json\n'), ((7061, 7084), 'json.dumps', 'json.dumps', (["('redraw',)"], {}), "(('redraw',))\n", (7071, 7084), False, 'import json\n')] |
from pynput import keyboard
from pycreate2 import Create2
import time
#Robot initialization________________________________________________________________
if __name__ == "__main__":
port = '/dev/ttyUSB0'
baud = {
'default': 115200,
'alt': 19200 # shouldn't need this unless you accidentally set it to this
}
bot = Create2(port=port, baud=baud['default'])
bot.start()
bot.safe()
bot.full()
print('Starting ...')
bot.safe()
cnt = 0
init_flag=0
coll_flag=0
lightmax=0
#Key board controls__________________________________________________________
def on_press(key):
if key == keyboard.Key.up:
bot.drive_direct(100,100)
if key == keyboard.Key.down:
bot.drive_direct(-100,-100)
if key == keyboard.Key.left:
bot.drive_direct(100,-100)
if key == keyboard.Key.right:
bot.drive_direct(-100,100)
def on_release(key):
print('{0} released'.format(key))
if key == keyboard.Key.up or key == keyboard.Key.down or key == keyboard.Key.left or key == keyboard.Key.right:
bot.drive_direct(0,0)
# Collect events until released
with keyboard.Listener(on_press=on_press,on_release=on_release) as listener:
listener.join()
# ...or, in a non-blocking fashion:
listener = keyboard.Listener(on_press=on_press,on_release=on_release)
listener.start()
| [
"pycreate2.Create2",
"pynput.keyboard.Listener"
] | [((351, 391), 'pycreate2.Create2', 'Create2', ([], {'port': 'port', 'baud': "baud['default']"}), "(port=port, baud=baud['default'])\n", (358, 391), False, 'from pycreate2 import Create2\n'), ((1409, 1468), 'pynput.keyboard.Listener', 'keyboard.Listener', ([], {'on_press': 'on_press', 'on_release': 'on_release'}), '(on_press=on_press, on_release=on_release)\n', (1426, 1468), False, 'from pynput import keyboard\n'), ((1257, 1316), 'pynput.keyboard.Listener', 'keyboard.Listener', ([], {'on_press': 'on_press', 'on_release': 'on_release'}), '(on_press=on_press, on_release=on_release)\n', (1274, 1316), False, 'from pynput import keyboard\n')] |
# Copyright 2019 The Regents of the University of California.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
#
# Original version written by <NAME> <<EMAIL>>
from swift.common.utils import get_logger, split_path, list_from_csv
from swift.common.swob import Request, Response, wsgify
from swift.common.constraints import valid_api_version
from swift.common.request_helpers import get_param
from swift.proxy.controllers.base import get_container_info, get_object_info
from swift.common.swob import wsgify
from avro_streamer.avro_streamer import GenericStrippingAvroParser
class AvroFilterMiddleware(object):
"""
Swift middleware for removing certain fields from downloaded Avro
objects, depending on a user's role.
Essentially, this allows the Avro objects to be selectively censored
for different classes of user -- for instance, there may be sensitive
data that is being collected that should only be made available to
privileged users.
See attached README.md file for instructions on how to configure this
middleware appropriately.
Stripping is only applied to objects that have a Content-Type of
'application/vnd.caida.<datatype>.avro'.
Requires: python-avro-streamer (https://github.com/CAIDA/python-avro-streamer)
"""
def __init__(self, app, conf, logger=None):
self.app = app
if logger:
self.logger = logger
else:
self.logger = get_logger(conf, log_route='avrofilter')
# Any roles specified as "nostrip_roles" will always receive the
# full uncensored Avro data
if 'nostrip_roles' in conf:
self.nostrip_roles = set([x.strip() \
for x in conf['nostrip_roles'].split(',')])
else:
self.nostrip_roles = set()
# admin should always be a nostrip role
self.nostrip_roles.add('admin')
self.defaultstrip = {}
self.dontstrip = {}
# Any field mentioned in a "retain_keys" option will be stripped
# by default, unless the user matches a role where that field is
# explicitly listed as being retained
# In other words: defaultstrip is the union of all of the fields that
# are explicitly configured as retainable. Any "public" fields should
# NOT be listed as a retained field for any role.
for k,v in conf.iteritems():
# The role that this option applies to is specified in the
# prefix of the configuration option name
# e.g. "swiftro_retain_keys" -> role = "swiftro"
if not k.endswith("_retain_keys"):
continue
role = k[:-12]
if role in self.dontstrip:
self.logger.info("Warning: role '%s' appears multiple times in AvroFilterMiddleware configuration" % (role))
# TODO only warn once per duplicate role
continue
self.dontstrip[role] = {}
for ts in list_from_csv(v):
ts = ts.strip()
if len(ts) == 0:
continue
# fields are listed using <datatype>:<fieldname> format, e.g.
# "flowtuple:netacq_country"
ts = ts.split(':')
if len(ts) != 2:
self.logger.info("Invalid 'retain_keys' parameter format, should be <data type>:<field name> (not %s)" % (ts))
continue
if ts[0] not in self.dontstrip[role]:
self.dontstrip[role][ts[0]] = set()
if ts[0] not in self.defaultstrip:
self.defaultstrip[ts[0]] = set()
self.dontstrip[role][ts[0]].add(ts[1])
self.defaultstrip[ts[0]].add(ts[1])
@wsgify
def __call__(self, req):
try:
(version, account, container, obj) = \
split_path(req.path_info, 4, 4, True)
except ValueError:
return req.get_response(self.app)
# Only worry about data fetches, not uploads.
if not valid_api_version(version) or req.method not in ('GET', 'HEAD'):
return req.get_response(self.app)
# Get all roles that apply to the user making the request
roles = set()
if (req.environ.get('HTTP_X_IDENTITY_STATUS') == 'Confirmed' or \
req.environ.get('HTTP_X_SERVICE_IDENTITY_STATUS') in \
(None, "Confirmed")):
roles = set(list_from_csv(req.environ.get('HTTP_X_ROLES', '')))
# If we have one of the "nostrip" roles, then don't do any stripping
if roles.intersection(self.nostrip_roles):
return req.get_response(self.app)
# Perform the request and grab a response object that we can work
# with
resp = req.get_response(self.app)
# Check that the requested object is actually a CAIDA avro file
conttype = resp.headers.get("Content-Type", None)
if conttype is None:
return resp
if not conttype.startswith("application/vnd.caida."):
return resp
if not conttype.endswith(".avro"):
return resp
dtype = conttype.replace("application/vnd.caida.", "", 1)[:-5]
if dtype not in self.defaultstrip:
return resp
# Start by planning to strip all fields for this datatype that have
# been explicitly appeared in the config file. Then for each role that
# the user has, remove any fields from the strip set that should be
# retained for that role.
tostrip = self.defaultstrip[dtype]
for r in roles:
if r not in self.dontstrip:
# No specified config for this role, so leave strip set as is
continue
if dtype not in self.dontstrip[r]:
continue
tostrip = tostrip - self.dontstrip[r][dtype]
# Remove the Etag because otherwise swift clients get very upset
# about the md5sum of the response body not matching the md5sum
# in the Etag header :/
if 'Etag' in resp.headers:
del(resp.headers['Etag'])
# If we are going to be stripping fields, replace our response
# iterable with one that will parse the received Avro and remove
# the desired fields. The swift proxy should handle the rest.
x = GenericStrippingAvroParser(resp.app_iter, resp.body, tostrip)
resp.app_iter = x
return resp
def filter_factory(global_conf, **local_conf):
"""Returns a WSGI filter app for use with paste.deploy."""
conf = global_conf.copy()
conf.update(local_conf)
def avro_strip(app):
return AvroFilterMiddleware(app, conf)
return avro_strip
# vim: set sw=4 tabstop=4 softtabstop=4 expandtab :
| [
"swift.common.utils.split_path",
"swift.common.utils.get_logger",
"swift.common.constraints.valid_api_version",
"swift.common.utils.list_from_csv",
"avro_streamer.avro_streamer.GenericStrippingAvroParser"
] | [((6924, 6985), 'avro_streamer.avro_streamer.GenericStrippingAvroParser', 'GenericStrippingAvroParser', (['resp.app_iter', 'resp.body', 'tostrip'], {}), '(resp.app_iter, resp.body, tostrip)\n', (6950, 6985), False, 'from avro_streamer.avro_streamer import GenericStrippingAvroParser\n'), ((1946, 1986), 'swift.common.utils.get_logger', 'get_logger', (['conf'], {'log_route': '"""avrofilter"""'}), "(conf, log_route='avrofilter')\n", (1956, 1986), False, 'from swift.common.utils import get_logger, split_path, list_from_csv\n'), ((3489, 3505), 'swift.common.utils.list_from_csv', 'list_from_csv', (['v'], {}), '(v)\n', (3502, 3505), False, 'from swift.common.utils import get_logger, split_path, list_from_csv\n'), ((4403, 4440), 'swift.common.utils.split_path', 'split_path', (['req.path_info', '(4)', '(4)', '(True)'], {}), '(req.path_info, 4, 4, True)\n', (4413, 4440), False, 'from swift.common.utils import get_logger, split_path, list_from_csv\n'), ((4584, 4610), 'swift.common.constraints.valid_api_version', 'valid_api_version', (['version'], {}), '(version)\n', (4601, 4610), False, 'from swift.common.constraints import valid_api_version\n')] |
"""
Nozomi
Character Module
author: <EMAIL>
"""
from nozomi.temporal.time import NozomiTime
from nozomi.security.ip_address import IpAddress
from nozomi.http.user_agent import UserAgent
from nozomi.http.accept_language import AcceptLanguage
from nozomi.http.headers import Headers
from nozomi.ancillary.configuration import Configuration
from typing import Optional
from nozomi.ancillary.immutable import Immutable
class Character:
def __init__(
self,
headers: Headers,
configuration: Configuration
) -> None:
self._headers = headers
self._configuration = configuration
self._ip_address: Optional[IpAddress] = None
self._user_agent: Optional[UserAgent] = None
self._language: Optional[AcceptLanguage] = None
return
ip_address = Immutable(lambda s: s._parse_ip_address())
user_agent = Immutable(lambda s: s._parse_user_agent())
accept_language = Immutable(lambda s: s._parse_accept_language())
def _parse_ip_address(self) -> IpAddress:
if self._ip_address is None:
self._ip_address = IpAddress.from_headers(
headers=self._headers,
boundary_ip_header=self._configuration.boundary_ip_header,
debug=self._configuration.debug,
debug_address='127.0.0.1'
)
return self._ip_address
def _parse_user_agent(self) -> UserAgent:
if self._user_agent is None:
self._user_agent = UserAgent.from_headers(self._headers)
return self._user_agent
def _parse_accept_language(self) -> Optional[AcceptLanguage]:
if self._language is None:
self._language = AcceptLanguage.from_headers(self._headers)
return self._language
| [
"nozomi.http.accept_language.AcceptLanguage.from_headers",
"nozomi.security.ip_address.IpAddress.from_headers",
"nozomi.http.user_agent.UserAgent.from_headers"
] | [((1108, 1281), 'nozomi.security.ip_address.IpAddress.from_headers', 'IpAddress.from_headers', ([], {'headers': 'self._headers', 'boundary_ip_header': 'self._configuration.boundary_ip_header', 'debug': 'self._configuration.debug', 'debug_address': '"""127.0.0.1"""'}), "(headers=self._headers, boundary_ip_header=self.\n _configuration.boundary_ip_header, debug=self._configuration.debug,\n debug_address='127.0.0.1')\n", (1130, 1281), False, 'from nozomi.security.ip_address import IpAddress\n'), ((1500, 1537), 'nozomi.http.user_agent.UserAgent.from_headers', 'UserAgent.from_headers', (['self._headers'], {}), '(self._headers)\n', (1522, 1537), False, 'from nozomi.http.user_agent import UserAgent\n'), ((1703, 1745), 'nozomi.http.accept_language.AcceptLanguage.from_headers', 'AcceptLanguage.from_headers', (['self._headers'], {}), '(self._headers)\n', (1730, 1745), False, 'from nozomi.http.accept_language import AcceptLanguage\n')] |
# Fix for #17 (and ulauncher's #703): explicitly defining Gdk version
import gi
gi.require_version('GLib', '2.0')
gi.require_version('Gtk', '3.0')
gi.require_version('Gdk', '3.0')
gi.require_version('GObject', '2.0')
gi.require_version('Gio', '2.0')
gi.require_version('GdkX11', '3.0')
gi.require_version('GdkPixbuf', '2.0')
from ulauncher.api.client.Extension import Extension # noqa
from ulauncher.api.shared.event import KeywordQueryEvent, ItemEnterEvent, SystemExitEvent # noqa
from ulauncher.api.shared.event import PreferencesEvent, PreferencesUpdateEvent # noqa
from ulauncher.api.client.EventListener import EventListener # noqa
from ulauncher.api.shared.item.ExtensionResultItem import ExtensionResultItem # noqa
from ulauncher.api.shared.item.ExtensionSmallResultItem import ExtensionSmallResultItem # noqa
from ulauncher.api.shared.action.ExtensionCustomAction import ExtensionCustomAction # noqa
from ulauncher.api.shared.action.RenderResultListAction import RenderResultListAction # noqa
from ulauncher.api.shared.action.BaseAction import BaseAction # noqa
from ulauncher.api.shared.action.SetUserQueryAction import SetUserQueryAction # noqa
from ulauncher.api.shared.action.DoNothingAction import DoNothingAction # noqa
from ulauncher.api.shared.action.HideWindowAction import HideWindowAction # noqa
from ulauncher.api.shared.action.OpenUrlAction import OpenUrlAction # noqa
import gettext
import time
import os
import logging
import random
import shutil
from urllib.parse import urlparse, quote_plus
from typing import Union
import math
try:
import spotipy
from spotipy.oauth2 import SpotifyPKCE
import requests
except ImportError:
# If import failed, try to automatically install the dependencies
import subprocess
import sys
subprocess.call([sys.executable, '-m', 'pip', 'install', '--user', '-r',
os.path.join(os.path.dirname(__file__), 'requirements.txt')])
# And try to re-import
import spotipy
from spotipy.oauth2 import SpotifyPKCE
import requests
logger = logging.getLogger(__name__)
_ = gettext.gettext
class UlauncherSpotifyAPIExtension(Extension, EventListener):
CLIENT_ID = '1f3a663c5fdd4056b4c0e122ea55a3af'
SCOPES = 'user-modify-playback-state user-read-playback-state user-read-recently-played user-library-modify'
CACHE_FOLDER = os.path.join(os.path.dirname(__file__), 'cache')
ACCESS_TOKEN_CACHE = os.path.join(os.path.dirname(__file__), 'cache.json')
POSSIBLE_PORTS = [8080, 5000, 5050, 6666] # spotify API redirect uris
ICONS = {
'main': os.path.join(os.path.dirname(__file__), 'images/icon.png'),
'play': os.path.join(os.path.dirname(__file__), 'images/play.png'),
'pause': os.path.join(os.path.dirname(__file__), 'images/pause.png'),
'next': os.path.join(os.path.dirname(__file__), 'images/next.png'),
'prev': os.path.join(os.path.dirname(__file__), 'images/prev.png'),
'repeat_off': os.path.join(os.path.dirname(__file__), 'images/repeat_off.png'),
'repeat_context': os.path.join(os.path.dirname(__file__), 'images/repeat_context.png'),
'repeat_track': os.path.join(os.path.dirname(__file__), 'images/repeat_track.png'),
'shuffle': os.path.join(os.path.dirname(__file__), 'images/shuffle_on.png'),
'no_shuffle': os.path.join(os.path.dirname(__file__), 'images/shuffle_off.png'),
'question': os.path.join(os.path.dirname(__file__), 'images/question.png'),
'track': os.path.join(os.path.dirname(__file__), 'images/track.png'),
'album': os.path.join(os.path.dirname(__file__), 'images/album.png'),
'playlist': os.path.join(os.path.dirname(__file__), 'images/playlist.png'),
'artist': os.path.join(os.path.dirname(__file__), 'images/artist.png'),
'search': os.path.join(os.path.dirname(__file__), 'images/search.png'),
'devices': os.path.join(os.path.dirname(__file__), 'images/devices.png'),
'volume': os.path.join(os.path.dirname(__file__), 'images/volume.png'),
'save': os.path.join(os.path.dirname(__file__), 'images/save.png')
}
LANGUAGES = [
'de',
'en',
]
def __init__(self):
super(UlauncherSpotifyAPIExtension, self).__init__()
# even though it might be a good idea to separate extension logic thoroughly
# there's no added benefit for such a small project
# so instead of using proper listener, handle the events here
self.subscribe(KeywordQueryEvent, self)
self.subscribe(ItemEnterEvent, self)
self.subscribe(SystemExitEvent, self)
self.subscribe(PreferencesEvent, self)
self.subscribe(PreferencesUpdateEvent, self)
# create image cache folder if it doesn't exist
if not os.path.exists(self.CACHE_FOLDER):
os.mkdir(self.CACHE_FOLDER)
# api placeholder
self.api = None
# preferences placeholder with default settings
# in case existing user upgrades and initial preferences are empty
self.preferences = {
'main_keyword': 'sp',
'main_language': 'en',
'auth_port': '8080',
'clear_cache': 'No',
'show_help': 'Yes',
'aliases': 's: search; song: track; vol: volume; like: save; ?: help',
'search_results_limit': '8',
'request_timeout': '0.5'
}
# aliases placeholder
self.aliases = {}
def _generate_api(self):
logger.debug('Generating Spotipy object')
redirect_uri = 'http://127.0.0.1:' + str(self.preferences['auth_port'])
if int(self.preferences['auth_port']) not in self.POSSIBLE_PORTS:
logger.debug(_('Port set in the preferences is not one of the supported ports.'))
logger.debug(_('Something went very wrong, please report this issue on github.'))
auth = SpotifyPKCE(client_id=self.CLIENT_ID,
redirect_uri=redirect_uri,
scope=self.SCOPES,
cache_path=self.ACCESS_TOKEN_CACHE)
self.api = spotipy.Spotify(auth_manager=auth)
return
# generate aliases
def _generate_aliases(self):
logger.debug(f'Generating aliases')
self.aliases = {k: v for k, v in [p.split(': ') for p in self.preferences['aliases'].split('; ')]}
return
def _clear_cache(self) -> None:
shutil.rmtree(self.CACHE_FOLDER, ignore_errors=True)
return
# download image to cache and return path to the cached image
def _dl_image(self, url: str) -> str:
filename = os.path.basename(urlparse(url).path)
cache_path = os.path.join(self.CACHE_FOLDER, filename)
if os.path.exists(cache_path):
return cache_path
else:
img = requests.get(url, stream=True)
with open(cache_path, 'wb') as f:
shutil.copyfileobj(img.raw, f)
return cache_path
# helper for humanizing duration in ms
def _parse_duration(self, ms: int, short: bool = False) -> str:
hours, ms = divmod(ms, 3600000)
minutes, ms = divmod(ms, 60000)
seconds = float(ms) / 1000
if short:
if hours:
return f'{hours:.0f}:{minutes:02.0f}:{seconds:02.0f}'
return f'{minutes:.0f}:{seconds:02.0f}'
else:
duration = ''
if hours:
duration += f'{hours:.0f}h'
if minutes:
duration += f'{minutes:.0f}m'
if seconds:
duration += f' {seconds:.0f}s'
return duration
# a wrapper helper to generate an item
def _generate_item(self, title: str = '', desc: str = '', icon: str = '', small: bool = False,
action: Union[dict, BaseAction] = DoNothingAction(),
alt_action: Union[dict, BaseAction] = DoNothingAction(),
keep_open: bool = False) -> Union[ExtensionResultItem, ExtensionSmallResultItem]:
if isinstance(action, dict):
action['_keep_app_open'] = keep_open
action = ExtensionCustomAction(action, keep_app_open=keep_open)
if isinstance(alt_action, dict):
alt_action['_keep_app_open'] = keep_open
alt_action = ExtensionCustomAction(alt_action, keep_app_open=keep_open)
if small:
item = ExtensionSmallResultItem
else:
item = ExtensionResultItem
return item(name=title.replace('&', '&') if title else '',
description=desc.replace('&', '&') if desc else '',
icon=icon if icon else self.ICONS['main'],
on_enter=action if action else DoNothingAction(),
on_alt_enter=alt_action if alt_action else DoNothingAction())
# helper for the currently playing entries
def _generate_now_playing_menu(self, currently_playing: dict = None,
next: bool = True, prev: bool = True, help: bool = True):
if not currently_playing:
currently_playing = self.api.current_playback(additional_types='episode')
if not currently_playing or not currently_playing['item']:
return self._generate_item(_('Nothing is playing at this moment'),
_('Start playing first'),
action=HideWindowAction())
if currently_playing['currently_playing_type'] == 'track':
artists = ', '.join([artist['name'] for artist in currently_playing['item']['artists']])
song_name = currently_playing['item']['name']
album_name = currently_playing['item']['album']['name']
device_playing_on_type = currently_playing['device']['type'].lower()
device_playing_on_name = currently_playing['device']['name']
is_playing = currently_playing['is_playing']
status = _('Playing') if is_playing else _('Paused')
track_progress = self._parse_duration(currently_playing['progress_ms'], short=True)
track_duration = self._parse_duration(currently_playing['item']['duration_ms'], short=True)
items = [self._generate_item(f'{artists} -- {song_name}',
f'{_("Album")}: {album_name} | '
f'{status} {_("on")}: {device_playing_on_type} {device_playing_on_name} | '
f'{track_progress}/{track_duration}',
self.ICONS['pause'] if is_playing else self.ICONS['play'],
action={'command': 'pause' if is_playing else 'play'},
keep_open=True if not is_playing else False)]
elif currently_playing['currently_playing_type'] == 'episode':
show = currently_playing['item']['show']['name']
episode = currently_playing['item']['name']
device_playing_on_type = currently_playing['device']['type'].lower()
device_playing_on_name = currently_playing['device']['name']
is_playing = currently_playing['is_playing']
status = _('Playing') if is_playing else _('Paused')
episode_progress = self._parse_duration(currently_playing['progress_ms'], short=True)
episode_duration = self._parse_duration(currently_playing['item']['duration_ms'], short=True)
items = [self._generate_item(f'{episode}',
f'{show} | '
f'{status} {_("on")}: {device_playing_on_type} {device_playing_on_name} | '
f'{episode_progress}/{episode_duration}',
self.ICONS['pause'] if is_playing else self.ICONS['play'],
action={'command': 'pause' if is_playing else 'play'},
keep_open=True if not is_playing else False)]
else:
return
if next:
items.append(self._generate_item(_('Next track'),
_('Skip playback to next track'),
self.ICONS['next'],
action={'command': 'next'},
keep_open=True))
if prev:
items.append(self._generate_item(_('Previous track'),
_('Skip playback to previous track'),
self.ICONS['prev'],
action={'command': 'prev'},
keep_open=True))
if help and self.preferences['show_help'] == 'Yes':
items.append(self._generate_item(_('Extension cheatsheet'),
_('List of all available commands'),
self.ICONS['question'],
action=SetUserQueryAction(f'sp help')))
return items
# another helper to render items or a single item
def _render(self, i: Union[list, ExtensionResultItem]) -> RenderResultListAction:
if isinstance(i, list):
return RenderResultListAction(i)
elif isinstance(i, ExtensionResultItem):
return RenderResultListAction([i])
def on_event(self, event, extension):
# Set language
language = self.preferences['main_language']
domain = 'base'
local_path = os.path.join(os.path.dirname(__file__), 'locales')
logger.debug(f'Extension language is: {language}. Searching translation files in {local_path}.')
# Only translate if need to
if language != 'en':
if language in self.LANGUAGES:
translation_file_path = gettext.find(domain, local_path, [language])
logger.debug(f'Translation file path: {translation_file_path}')
try:
translator = gettext.translation(domain=domain,
localedir=local_path,
languages=[language])
translator.install()
except FileNotFoundError:
logger.debug('Translation file not found. Go with default.')
else:
global _
_ = translator.gettext
# distribute events to proper listeners
if extension is not self:
raise RuntimeError('Something is very wrong.')
if isinstance(event, KeywordQueryEvent):
return self.on_keyword_query(event.get_keyword(), event.get_argument())
if isinstance(event, ItemEnterEvent):
return self.on_item_enter(event.get_data())
if isinstance(event, SystemExitEvent):
return self.on_system_exit()
if isinstance(event, PreferencesEvent):
return self.on_preferences(event.preferences)
if isinstance(event, PreferencesUpdateEvent):
return self.on_preferences_update(event.id, event.old_value, event.new_value)
def on_system_exit(self):
logger.debug('Received system exit event')
if self.preferences['clear_cache'] == 'Yes':
logger.debug('Clearing downloaded image cache')
return self._clear_cache()
def on_preferences(self, preferences: dict):
logger.debug(f'Received preferences event: {preferences}')
for p in preferences:
self.on_preferences_update(p, self.preferences[p], preferences[p], False)
self._generate_api()
self._generate_aliases()
def on_preferences_update(self, key: str, old_value: str, new_value: str, regenerate: bool = True):
if old_value == new_value or not new_value:
return
logger.debug(f'Received preferences update event for {key}, changing from {old_value} to {new_value}')
self.preferences[key] = new_value
if regenerate:
self._generate_api()
self._generate_aliases()
def on_keyword_query(self, keyword: str, argument: str):
# if user is not authorized or no cached token => go through authorization flow and get the tokens
if self.api.auth_manager.get_cached_token() is None:
return self._render(self._generate_item(title=_('Authorization'),
desc=_('Authorize the extension with your Spotify account'),
action={'command': 'auth'}))
# if user has a query => process the query
if argument:
# Parse arguments
command, *components = argument.split()
logger.debug(f'Recognized query "{argument}", split into command "{command}" and components "{components}"')
if command in self.aliases:
logger.debug(f'Command {command} is an alias for {self.aliases[command]}')
command = self.aliases[command]
if command == 'switch':
logger.debug(f'Playback transfer')
user_devices = self.api.devices()
if user_devices.get('devices', None):
items = []
for device in user_devices['devices']:
device_name = device.get('name', 'device_name')
device_id = device.get('id', 'device_id')
device_type = device.get('type', 'device_type').lower()
current = _('Current device') + ' | ' if device.get('is_active') else ''
items.append(self._generate_item(title=_('Switch playback to') + f' {device_type} {device_name}',
desc=f'{current}' + _('Device id') + f': {device_id}',
icon=self.ICONS['play'], # TODO switch icon
action={'command': 'switch',
'device_id': device_id}))
return self._render(items)
else:
return self._render(self._generate_item(title=_('No active devices running Spotify found'),
desc=_('Open Spotify on one of your devices first'),
action=SetUserQueryAction('Spotify')))
elif command in ['album', 'track', 'artist', 'playlist', 'search']:
logger.debug(f'Searching')
if len(components) == 0:
examples = {
'album': ['sp album mick gordon doom', 'sp album beach house bloom',
'sp album foals holy fire'],
'artist': ['sp artist spice girls', 'sp artist britney spears', 'sp artist jakey'],
'track': ['sp track led zep no quarter', 'sp track post malone congratulations',
'sp track post malone wow'],
'playlist': ['sp playlist brain food', 'sp playlist russian hardbass',
'sp playlist spanish flamenco'],
'search': ['sp search bad guy', 'sp search gojira', 'sp search bonobo']
}
if command != 'search':
search_for = _('Search for') + f' {command}s'
else:
search_for = f'Enter your search query'
return self._render(self._generate_item(f'{search_for}',
f'{_("For example")}: {random.choice(examples[command])}',
icon=self.ICONS['main'],
action=DoNothingAction()))
if command == 'search':
type_search = 'album,track,artist,playlist'
limit = math.ceil(int(self.preferences['search_results_limit']) / 4)
else:
type_search = command
limit = int(self.preferences['search_results_limit'])
query = ' '.join(components)
search_results = self.api.search(query, limit=limit, type=type_search)
if not search_results:
return self._render(self._generate_item(f'{_("Nothing found for")} {query}',
_('Try again with different query?'),
action=DoNothingAction()))
items = []
results = [item for i in search_results for item in search_results[i]['items']]
for res in results:
category = res['type']
context_or_track_uri = 'uris' if category == 'track' else 'context_uri'
uri = res['uri']
alt_action = DoNothingAction()
if category == 'album':
artists = ', '.join([artist['name'] for artist in res['artists']])
name = res['name']
n_tracks = res['total_tracks']
released = res['release_date']
if 'images' in res and res['images']:
smallest_img = min(res['images'], key=lambda x: x['height'])
img = self._dl_image(smallest_img['url'])
else:
img = self.ICONS['main']
title = f'{artists} -- {name}'
desc = f'{_("Album")} | {n_tracks} {_("tracks")} | Released {released}'
elif category == 'artist':
name = res['name']
popularity = res['popularity']
genres = ', '.join(res['genres']).capitalize()
genres_output = f' | {genres}' if genres else ''
if 'images' in res and res['images']:
smallest_img = min(res['images'], key=lambda x: x['height'])
img = self._dl_image(smallest_img['url'])
else:
img = self.ICONS['main']
title = f'{name}'
desc = f'{_("Artist")}{genres_output} | {_("Popularity")} {popularity}%'
elif category == 'track':
artists = ', '.join([artist['name'] for artist in res['artists']])
name = res['name']
album_name = res['album']['name']
popularity = res['popularity']
duration = self._parse_duration(res['duration_ms'])
if 'images' in res['album'] and res['album']['images']:
smallest_img = min(res['album']['images'], key=lambda x: x['height'])
img = self._dl_image(smallest_img['url'])
else:
img = self.ICONS['main']
title = f'{artists} -- {name}'
desc = f'{_("Track")} | {duration} | {_("Popularity")} {popularity}% | {album_name}'
alt_action = {'command': 'queue', 'uri': uri}
uri = [uri]
elif category == 'playlist':
name = res['name']
description = f' | {res["description"]}' if res['description'] else ''
owner = res['owner']['display_name']
n_tracks = res['tracks']['total']
if 'images' in res and res['images']:
img = self._dl_image(res['images'][0]['url'])
else:
img = self.ICONS['main']
title = f'{name}'
desc = f'{_("Playlist by")} {owner} | {n_tracks} {_("tracks")}{description}'
else:
raise RuntimeError('Wrong category received from Spotify api?')
items.append(self._generate_item(title, desc, img,
action={'command': 'play',
context_or_track_uri: uri},
alt_action=alt_action,
keep_open=False))
return self._render(items)
elif command == 'repeat':
logger.debug(f'Playback repeat status')
currently_playing = self.api.current_playback()
if not currently_playing or not currently_playing['item']:
return self._render(self._generate_item(_('Nothing is playing at this moment'),
_('Start playing first'),
action=HideWindowAction()))
states = ['off', 'context', 'track']
state_names = [_('do not repeat'), _('repeat context'), _('repeat track')]
current_repeat_state: str = currently_playing.get('repeat_state')
current_repeat_state_index = states.index(current_repeat_state)
items = [self._generate_item(f'{_("Current state")}: {state_names[current_repeat_state_index]}',
small=True, icon=self.ICONS[f'repeat_{current_repeat_state}'],
action=DoNothingAction())]
for i in range(len(states)):
if i == current_repeat_state_index:
continue
items.append(self._generate_item(f'{_("Set to")} {state_names[i]}',
small=True, icon=self.ICONS[f'repeat_{states[i]}'],
action={'command': 'repeat',
'state': states[i]},
keep_open=False))
return self._render(items)
elif command == 'shuffle':
logger.debug(f'Playback shuffle status')
currently_playing = self.api.current_playback()
if not currently_playing or not currently_playing['item']:
return self._render(self._generate_item(_('Nothing is playing at this moment'),
_('Start playing first'),
action=HideWindowAction()))
current_shuffle_state = currently_playing.get('shuffle_state')
states = [True, False]
state_names = [_('shuffle'), _('do not shuffle')]
state_icons = ['shuffle', 'no_shuffle']
current_shuffle_state_index = states.index(current_shuffle_state)
items = [self._generate_item(f'{_("Current state")}: {state_names[current_shuffle_state_index]}',
small=True, icon=self.ICONS[state_icons[current_shuffle_state_index]],
action=DoNothingAction())]
for i in range(len(states)):
if i == current_shuffle_state_index:
continue
items.append(self._generate_item(f'{_("Set to")} {state_names[i]}',
small=True, icon=self.ICONS[state_icons[i]],
action={'command': 'shuffle',
'state': states[i]},
keep_open=False))
return self._render(items)
elif command == 'history':
logger.debug(f'History')
history = self.api.current_user_recently_played(limit=int(self.preferences['search_results_limit']))
if not history['items']:
return self._render(self._generate_item(_('No previously played songs found'),
_('Maybe an API bug?'), icon=self.ICONS['question'],
action=HideWindowAction()))
items = []
for res in history['items']:
track = res['track']
uri = track['uri']
track_name = track['name']
album_name = track['album']['name']
artists = ', '.join([artist['name'] for artist in track['artists']])
popularity = track['popularity']
duration = self._parse_duration(track['duration_ms'])
if 'images' in track['album'] and track['album']['images']:
smallest_img = min(track['album']['images'], key=lambda x: x['height'])
img = self._dl_image(smallest_img['url'])
else:
img = self.ICONS['main']
title = f'{artists} -- {track_name}'
desc = _('Track') + f' | {duration} | ' + _('Popularity') + f' {popularity}% | {album_name}'
alt_action = {'command': 'queue', 'uri': uri}
uri = [uri]
items.append(self._generate_item(title, desc, img,
action={'command': 'play',
'uris': uri},
alt_action=alt_action,
keep_open=False))
return self._render(items)
elif command == 'volume':
logger.debug(f'Volume controls')
current_volume = self.api.current_playback(additional_types='episode')
if not current_volume:
return self._render(self._generate_item(_('Can not set volume when nothing is playing'),
icon=self.ICONS['volume'],
action=HideWindowAction()))
current_volume = current_volume['device']['volume_percent']
if len(components) == 0:
items = [self._generate_item(f'{_("Current volume")}: {current_volume}%',
small=True, icon=self.ICONS['volume'],
action=DoNothingAction()),
self._generate_item(_('Mute: 0% volume'),
small=True,
icon=self.ICONS['volume'], # TODO mute icon
action={'command': 'volume',
'state': 0}),
self._generate_item(_('Full volume: 100% volume'),
small=True, icon=self.ICONS['volume'],
action={'command': 'volume',
'state': 100})]
return self._render(items)
else:
try:
requested_volume = int(components[0])
except ValueError:
return self._render(self._generate_item(_('The volume must be from 0 to 100'),
_('0 = mute; 100 = full volume'),
icon=self.ICONS['volume'],
action=SetUserQueryAction(f'{keyword} volume ')))
logger.debug(f'Interpreting "{components}" input as {requested_volume}')
if (requested_volume < 0) or (requested_volume > 100):
return self._render(self._generate_item(_('The volume must be from 0 to 100'),
_('0 = mute; 100 = full volume'),
icon=self.ICONS['volume'],
action=SetUserQueryAction(f'{keyword} volume ')))
return self._render(self._generate_item(_('Set volume to') + f' {requested_volume}%',
icon=self.ICONS['volume'],
action={'command': 'volume',
'state': requested_volume}))
elif command == 'save':
logger.debug(f'Saving track')
current_track = self.api.current_playback(additional_types='episode')
if not current_track:
return self._render(self._generate_item(_('Can not save a song when nothing is playing'),
icon=self.ICONS['save'],
action=HideWindowAction()))
if current_track['currently_playing_type'] != 'track':
return self._render(self._generate_item(_('You can save only tracks'),
icon=self.ICONS['save'],
action=HideWindowAction()))
artists = ', '.join([artist['name'] for artist in current_track['item']['artists']])
song_name = current_track['item']['name']
song_uri = current_track['item']['uri']
return self._render(self._generate_item(f'{artists} -- {song_name}',
desc=_('Add to your Liked Songs'),
icon=self.ICONS['save'],
action={'command': 'save_tracks',
'state': [song_uri]}))
elif command == 'lyrics':
logger.debug('Lyrics search request')
current_track = self.api.current_playback(additional_types='episode')
if not current_track:
return self._render(self._generate_item(_('Nothing is playing'),
icon=self.ICONS['search'],
action=HideWindowAction()))
if current_track['currently_playing_type'] != 'track':
return self._render(self._generate_item(_('You can search only tracks'),
icon=self.ICONS['search'],
action=HideWindowAction()))
artist = current_track['item']['artists'][0]['name']
song_name = current_track['item']['name']
query = quote_plus(f'{artist} - {song_name}')
genius_link = 'https://genius.com/search?q=' + query
azlyrics_link = 'https://search.azlyrics.com/search.php?q=' + query
# TODO: any other popular lyrics provider?
return self._render([
self._generate_item(_('Search genius.com'),
desc=f'{genius_link}',
icon=self.ICONS['search'],
action=OpenUrlAction(genius_link)),
self._generate_item(_('Search azlyrics.com'),
desc=f'{azlyrics_link}',
icon=self.ICONS['search'],
action=OpenUrlAction(azlyrics_link))
])
elif command == 'help':
items = [
self._generate_item(f'{_("This help menu")}: {keyword} help',
icon=self.ICONS['question'], small=True),
self._generate_item(_('Add selected track to queue: Alt + Enter'),
icon=self.ICONS['play'], small=True,
action=HideWindowAction()),
self._generate_item(f'{_("Switch playback between devices")}: {keyword} switch',
icon=self.ICONS['devices'], small=True,
action=SetUserQueryAction(f'{keyword} switch')),
self._generate_item(f'{_("Change playback volume")}: {keyword} volume',
icon=self.ICONS['volume'], small=True,
action=SetUserQueryAction(f'{keyword} volume')),
self._generate_item(f'{_("Save currently playing song to your Liked Songs")}: {keyword} save',
icon=self.ICONS['save'], small=True,
action=SetUserQueryAction(f'{keyword} save')),
self._generate_item(f'{_("Change repeat state")}: {keyword} repeat',
icon=self.ICONS['repeat_context'], small=True,
action=SetUserQueryAction(f'{keyword} repeat')),
self._generate_item(f'{_("Change shuffle state")}: {keyword} shuffle',
icon=self.ICONS['shuffle'], small=True,
action=SetUserQueryAction(f'{keyword} shuffle')),
self._generate_item(f'{_("Search for a track")}: {keyword} track {_("-your-query-")}',
icon=self.ICONS['track'], small=True,
action=SetUserQueryAction(f'{keyword} track ')),
self._generate_item(f'{_("Search for an album")}: {keyword} album {_("-your-query-")}',
icon=self.ICONS['album'], small=True,
action=SetUserQueryAction(f'{keyword} album ')),
self._generate_item(f'{_("Search for an artist")}: {keyword} artist {_("-your-query-")}',
icon=self.ICONS['artist'], small=True,
action=SetUserQueryAction(f'{keyword} artist ')),
self._generate_item(f'{_("Search for a playlist")}: {keyword} playlist {_("-your-query-")}',
icon=self.ICONS['playlist'], small=True,
action=SetUserQueryAction(f'{keyword} playlist ')),
self._generate_item(f'{_("General search")}: {keyword} search {_("-your-query-")}',
icon=self.ICONS['search'], small=True,
action=SetUserQueryAction(f'{keyword} search ')),
self._generate_item(f'{_("Recently played tracks")}: {keyword} history',
icon=self.ICONS['play'], small=True,
action=SetUserQueryAction(f'{keyword} history')),
self._generate_item(f'{_("Lyrics of the currently playing track")}: {keyword} lyrics',
icon=self.ICONS['search'], small=True,
action=SetUserQueryAction(f'{keyword} lyrics'))
]
return self._render(items)
# no query, but something is playing currently => show now playing menu
current_playback = self.api.current_playback(additional_types='episode')
if current_playback:
return self._render(self._generate_now_playing_menu(current_playback))
# no query, nothing is playing, but there are devices online => offer user to start playback on one of them
user_devices = self.api.devices()
if user_devices.get('devices', None):
items = []
for device in user_devices['devices']:
device_name = device.get('name', 'device_name')
device_id = device.get('id', 'device_id')
device_type = device.get('type', 'device_type').lower()
items.append(self._generate_item(_('Start playback on') + f' {device_type} {device_name}',
_('Device id') + f': {device_id}',
self.ICONS['play'],
action={'command': 'play',
'device_id': device_id},
keep_open=True))
return self._render(items)
# no query, nothing is playing, no devices online => prompt to open Spotify anywhere first
return self._render(self._generate_item(_('No active devices running Spotify found'),
_('Open Spotify on one of your devices first'),
action=SetUserQueryAction('Spotify')))
def on_item_enter(self, data: dict):
command = data.get('command', '')
keep_open = data.get('_keep_app_open', False)
logger.debug(f'Received command {command} ({data})')
try:
if command == 'auth':
try:
self.api.auth_manager.get_access_token()
return
except spotipy.SpotifyOauthError as e:
logger.debug(f'Could not authenticate', e)
return
elif command == 'pause':
logger.debug(f'Pausing...')
self.api.pause_playback()
elif command == 'play':
device_id = data.get('device_id', None)
context_uri = data.get('context_uri', None)
uris = data.get('uris', [])
if uris:
logger.debug(f'Playing (device_id: {device_id}, uris: {uris})...')
self.api.start_playback(device_id=device_id, uris=uris)
elif context_uri:
logger.debug(f'Playing (device_id: {device_id}, context_uri: {context_uri}...')
self.api.start_playback(device_id=device_id, context_uri=context_uri)
else:
logger.debug(f'Playing (device_id: {device_id})...')
self.api.start_playback(device_id=device_id)
elif command == 'queue':
uri = data.get('uri', None)
logger.debug(f'Adding {uri} to queue...')
self.api.add_to_queue(uri)
elif command == 'next':
logger.debug(f'Skipping to next...')
self.api.next_track()
elif command == 'prev':
logger.debug(f'Skipping to previous...')
self.api.previous_track()
elif command == 'switch':
logger.debug(f'Switching device...')
self.api.transfer_playback(device_id=data.get('device_id', None))
elif command == 'shuffle':
state = data.get('state', False)
logger.debug(f'Setting shuffle to {state}')
self.api.shuffle(state)
elif command == 'repeat':
state = data.get('state', 'off')
logger.debug(f'Setting repeat to {state}')
self.api.repeat(state)
elif command == 'volume':
state = data.get('state', 0)
logger.debug(f'Setting volume to {state}')
self.api.volume(state)
elif command == 'save_tracks':
state = data.get('state', [])
logger.debug(f'Saving tracks {state}')
self.api.current_user_saved_tracks_add(state)
else:
logger.debug('No handler for this command...')
return self._render(self._generate_item('Empty or unknown command',
'Please investigate or open a github issue!',
action=HideWindowAction()))
if keep_open:
# very ugly hack :(
time.sleep(float(self.preferences['request_timeout']))
# Spotify api is asynchronous and without this,
# there might be a discrepancy in what's currently playing.
# For example, you press next, the request to skip is sent and successfully acknowledged (http 204)
# but what's currently playing depends on client and it still hasn't changed.
# This ugly hack will minimize the possibility of that happening, but probably not completely.
# There must be a better way.
return self._render(self._generate_now_playing_menu())
else:
return
except spotipy.SpotifyException as e:
logger.debug(f'Received an exception, {e}')
if e.http_status == 403:
return self._render(self._generate_item(_('Spotify: 403 Forbidden'),
_('Forbidden to access this endpoint or state has changed'),
action=HideWindowAction()))
elif e.http_status == 401:
return self._render(self._generate_item(_('Spotify: 401 Unauthorized'),
_('Probably, scope of the request is not authorized'),
action=HideWindowAction()))
elif e.http_status == 404:
return self._render(self._generate_item(_('Spotify: 404 Not Found'),
_('Probably, request is not complete and missing something'),
action=HideWindowAction()))
elif e.http_status in self.api.default_retry_codes:
return self._render(self._generate_item(f'Spotify: {e.http_status}',
_('Spotify asks to try again later'),
action=HideWindowAction()))
else:
logger.debug('Unknown SpotifyException', e)
return self._render(self._generate_item(_('Spotify exception') + f': {e.http_status}',
_('Code:') + f' {e.code}, ' + _('msg:') + f' {e.msg}',
action=HideWindowAction()))
if __name__ == '__main__':
UlauncherSpotifyAPIExtension().run()
| [
"logging.getLogger",
"spotipy.Spotify",
"ulauncher.api.shared.action.RenderResultListAction.RenderResultListAction",
"ulauncher.api.shared.action.DoNothingAction.DoNothingAction",
"urllib.parse.quote_plus",
"ulauncher.api.shared.action.HideWindowAction.HideWindowAction",
"os.path.exists",
"gettext.tra... | [((80, 113), 'gi.require_version', 'gi.require_version', (['"""GLib"""', '"""2.0"""'], {}), "('GLib', '2.0')\n", (98, 113), False, 'import gi\n'), ((114, 146), 'gi.require_version', 'gi.require_version', (['"""Gtk"""', '"""3.0"""'], {}), "('Gtk', '3.0')\n", (132, 146), False, 'import gi\n'), ((147, 179), 'gi.require_version', 'gi.require_version', (['"""Gdk"""', '"""3.0"""'], {}), "('Gdk', '3.0')\n", (165, 179), False, 'import gi\n'), ((180, 216), 'gi.require_version', 'gi.require_version', (['"""GObject"""', '"""2.0"""'], {}), "('GObject', '2.0')\n", (198, 216), False, 'import gi\n'), ((217, 249), 'gi.require_version', 'gi.require_version', (['"""Gio"""', '"""2.0"""'], {}), "('Gio', '2.0')\n", (235, 249), False, 'import gi\n'), ((250, 285), 'gi.require_version', 'gi.require_version', (['"""GdkX11"""', '"""3.0"""'], {}), "('GdkX11', '3.0')\n", (268, 285), False, 'import gi\n'), ((286, 324), 'gi.require_version', 'gi.require_version', (['"""GdkPixbuf"""', '"""2.0"""'], {}), "('GdkPixbuf', '2.0')\n", (304, 324), False, 'import gi\n'), ((2051, 2078), 'logging.getLogger', 'logging.getLogger', (['__name__'], {}), '(__name__)\n', (2068, 2078), False, 'import logging\n'), ((2360, 2385), 'os.path.dirname', 'os.path.dirname', (['__file__'], {}), '(__file__)\n', (2375, 2385), False, 'import os\n'), ((2434, 2459), 'os.path.dirname', 'os.path.dirname', (['__file__'], {}), '(__file__)\n', (2449, 2459), False, 'import os\n'), ((5984, 6108), 'spotipy.oauth2.SpotifyPKCE', 'SpotifyPKCE', ([], {'client_id': 'self.CLIENT_ID', 'redirect_uri': 'redirect_uri', 'scope': 'self.SCOPES', 'cache_path': 'self.ACCESS_TOKEN_CACHE'}), '(client_id=self.CLIENT_ID, redirect_uri=redirect_uri, scope=self\n .SCOPES, cache_path=self.ACCESS_TOKEN_CACHE)\n', (5995, 6108), False, 'from spotipy.oauth2 import SpotifyPKCE\n'), ((6204, 6238), 'spotipy.Spotify', 'spotipy.Spotify', ([], {'auth_manager': 'auth'}), '(auth_manager=auth)\n', (6219, 6238), False, 'import spotipy\n'), ((6522, 6574), 'shutil.rmtree', 'shutil.rmtree', (['self.CACHE_FOLDER'], {'ignore_errors': '(True)'}), '(self.CACHE_FOLDER, ignore_errors=True)\n', (6535, 6574), False, 'import shutil\n'), ((6776, 6817), 'os.path.join', 'os.path.join', (['self.CACHE_FOLDER', 'filename'], {}), '(self.CACHE_FOLDER, filename)\n', (6788, 6817), False, 'import os\n'), ((6830, 6856), 'os.path.exists', 'os.path.exists', (['cache_path'], {}), '(cache_path)\n', (6844, 6856), False, 'import os\n'), ((7936, 7953), 'ulauncher.api.shared.action.DoNothingAction.DoNothingAction', 'DoNothingAction', ([], {}), '()\n', (7951, 7953), False, 'from ulauncher.api.shared.action.DoNothingAction import DoNothingAction\n'), ((8016, 8033), 'ulauncher.api.shared.action.DoNothingAction.DoNothingAction', 'DoNothingAction', ([], {}), '()\n', (8031, 8033), False, 'from ulauncher.api.shared.action.DoNothingAction import DoNothingAction\n'), ((2597, 2622), 'os.path.dirname', 'os.path.dirname', (['__file__'], {}), '(__file__)\n', (2612, 2622), False, 'import os\n'), ((2677, 2702), 'os.path.dirname', 'os.path.dirname', (['__file__'], {}), '(__file__)\n', (2692, 2702), False, 'import os\n'), ((2758, 2783), 'os.path.dirname', 'os.path.dirname', (['__file__'], {}), '(__file__)\n', (2773, 2783), False, 'import os\n'), ((2839, 2864), 'os.path.dirname', 'os.path.dirname', (['__file__'], {}), '(__file__)\n', (2854, 2864), False, 'import os\n'), ((2919, 2944), 'os.path.dirname', 'os.path.dirname', (['__file__'], {}), '(__file__)\n', (2934, 2944), False, 'import os\n'), ((3005, 3030), 'os.path.dirname', 'os.path.dirname', (['__file__'], {}), '(__file__)\n', (3020, 3030), False, 'import os\n'), ((3101, 3126), 'os.path.dirname', 'os.path.dirname', (['__file__'], {}), '(__file__)\n', (3116, 3126), False, 'import os\n'), ((3199, 3224), 'os.path.dirname', 'os.path.dirname', (['__file__'], {}), '(__file__)\n', (3214, 3224), False, 'import os\n'), ((3290, 3315), 'os.path.dirname', 'os.path.dirname', (['__file__'], {}), '(__file__)\n', (3305, 3315), False, 'import os\n'), ((3382, 3407), 'os.path.dirname', 'os.path.dirname', (['__file__'], {}), '(__file__)\n', (3397, 3407), False, 'import os\n'), ((3473, 3498), 'os.path.dirname', 'os.path.dirname', (['__file__'], {}), '(__file__)\n', (3488, 3498), False, 'import os\n'), ((3558, 3583), 'os.path.dirname', 'os.path.dirname', (['__file__'], {}), '(__file__)\n', (3573, 3583), False, 'import os\n'), ((3640, 3665), 'os.path.dirname', 'os.path.dirname', (['__file__'], {}), '(__file__)\n', (3655, 3665), False, 'import os\n'), ((3725, 3750), 'os.path.dirname', 'os.path.dirname', (['__file__'], {}), '(__file__)\n', (3740, 3750), False, 'import os\n'), ((3811, 3836), 'os.path.dirname', 'os.path.dirname', (['__file__'], {}), '(__file__)\n', (3826, 3836), False, 'import os\n'), ((3895, 3920), 'os.path.dirname', 'os.path.dirname', (['__file__'], {}), '(__file__)\n', (3910, 3920), False, 'import os\n'), ((3980, 4005), 'os.path.dirname', 'os.path.dirname', (['__file__'], {}), '(__file__)\n', (3995, 4005), False, 'import os\n'), ((4065, 4090), 'os.path.dirname', 'os.path.dirname', (['__file__'], {}), '(__file__)\n', (4080, 4090), False, 'import os\n'), ((4147, 4172), 'os.path.dirname', 'os.path.dirname', (['__file__'], {}), '(__file__)\n', (4162, 4172), False, 'import os\n'), ((4864, 4897), 'os.path.exists', 'os.path.exists', (['self.CACHE_FOLDER'], {}), '(self.CACHE_FOLDER)\n', (4878, 4897), False, 'import os\n'), ((4911, 4938), 'os.mkdir', 'os.mkdir', (['self.CACHE_FOLDER'], {}), '(self.CACHE_FOLDER)\n', (4919, 4938), False, 'import os\n'), ((6920, 6950), 'requests.get', 'requests.get', (['url'], {'stream': '(True)'}), '(url, stream=True)\n', (6932, 6950), False, 'import requests\n'), ((8248, 8302), 'ulauncher.api.shared.action.ExtensionCustomAction.ExtensionCustomAction', 'ExtensionCustomAction', (['action'], {'keep_app_open': 'keep_open'}), '(action, keep_app_open=keep_open)\n', (8269, 8302), False, 'from ulauncher.api.shared.action.ExtensionCustomAction import ExtensionCustomAction\n'), ((8422, 8480), 'ulauncher.api.shared.action.ExtensionCustomAction.ExtensionCustomAction', 'ExtensionCustomAction', (['alt_action'], {'keep_app_open': 'keep_open'}), '(alt_action, keep_app_open=keep_open)\n', (8443, 8480), False, 'from ulauncher.api.shared.action.ExtensionCustomAction import ExtensionCustomAction\n'), ((13571, 13596), 'ulauncher.api.shared.action.RenderResultListAction.RenderResultListAction', 'RenderResultListAction', (['i'], {}), '(i)\n', (13593, 13596), False, 'from ulauncher.api.shared.action.RenderResultListAction import RenderResultListAction\n'), ((13870, 13895), 'os.path.dirname', 'os.path.dirname', (['__file__'], {}), '(__file__)\n', (13885, 13895), False, 'import os\n'), ((6735, 6748), 'urllib.parse.urlparse', 'urlparse', (['url'], {}), '(url)\n', (6743, 6748), False, 'from urllib.parse import urlparse, quote_plus\n'), ((7013, 7043), 'shutil.copyfileobj', 'shutil.copyfileobj', (['img.raw', 'f'], {}), '(img.raw, f)\n', (7031, 7043), False, 'import shutil\n'), ((13665, 13692), 'ulauncher.api.shared.action.RenderResultListAction.RenderResultListAction', 'RenderResultListAction', (['[i]'], {}), '([i])\n', (13687, 13692), False, 'from ulauncher.api.shared.action.RenderResultListAction import RenderResultListAction\n'), ((14161, 14205), 'gettext.find', 'gettext.find', (['domain', 'local_path', '[language]'], {}), '(domain, local_path, [language])\n', (14173, 14205), False, 'import gettext\n'), ((1881, 1906), 'os.path.dirname', 'os.path.dirname', (['__file__'], {}), '(__file__)\n', (1896, 1906), False, 'import os\n'), ((8859, 8876), 'ulauncher.api.shared.action.DoNothingAction.DoNothingAction', 'DoNothingAction', ([], {}), '()\n', (8874, 8876), False, 'from ulauncher.api.shared.action.DoNothingAction import DoNothingAction\n'), ((8941, 8958), 'ulauncher.api.shared.action.DoNothingAction.DoNothingAction', 'DoNothingAction', ([], {}), '()\n', (8956, 8958), False, 'from ulauncher.api.shared.action.DoNothingAction import DoNothingAction\n'), ((9553, 9571), 'ulauncher.api.shared.action.HideWindowAction.HideWindowAction', 'HideWindowAction', ([], {}), '()\n', (9569, 9571), False, 'from ulauncher.api.shared.action.HideWindowAction import HideWindowAction\n'), ((14340, 14418), 'gettext.translation', 'gettext.translation', ([], {'domain': 'domain', 'localedir': 'local_path', 'languages': '[language]'}), '(domain=domain, localedir=local_path, languages=[language])\n', (14359, 14418), False, 'import gettext\n'), ((42530, 42559), 'ulauncher.api.shared.action.SetUserQueryAction.SetUserQueryAction', 'SetUserQueryAction', (['"""Spotify"""'], {}), "('Spotify')\n", (42548, 42559), False, 'from ulauncher.api.shared.action.SetUserQueryAction import SetUserQueryAction\n'), ((13325, 13355), 'ulauncher.api.shared.action.SetUserQueryAction.SetUserQueryAction', 'SetUserQueryAction', (['f"""sp help"""'], {}), "(f'sp help')\n", (13343, 13355), False, 'from ulauncher.api.shared.action.SetUserQueryAction import SetUserQueryAction\n'), ((21555, 21572), 'ulauncher.api.shared.action.DoNothingAction.DoNothingAction', 'DoNothingAction', ([], {}), '()\n', (21570, 21572), False, 'from ulauncher.api.shared.action.DoNothingAction import DoNothingAction\n'), ((18885, 18914), 'ulauncher.api.shared.action.SetUserQueryAction.SetUserQueryAction', 'SetUserQueryAction', (['"""Spotify"""'], {}), "('Spotify')\n", (18903, 18914), False, 'from ulauncher.api.shared.action.SetUserQueryAction import SetUserQueryAction\n'), ((46852, 46870), 'ulauncher.api.shared.action.HideWindowAction.HideWindowAction', 'HideWindowAction', ([], {}), '()\n', (46868, 46870), False, 'from ulauncher.api.shared.action.HideWindowAction import HideWindowAction\n'), ((20383, 20400), 'ulauncher.api.shared.action.DoNothingAction.DoNothingAction', 'DoNothingAction', ([], {}), '()\n', (20398, 20400), False, 'from ulauncher.api.shared.action.DoNothingAction import DoNothingAction\n'), ((21169, 21186), 'ulauncher.api.shared.action.DoNothingAction.DoNothingAction', 'DoNothingAction', ([], {}), '()\n', (21184, 21186), False, 'from ulauncher.api.shared.action.DoNothingAction import DoNothingAction\n'), ((26330, 26347), 'ulauncher.api.shared.action.DoNothingAction.DoNothingAction', 'DoNothingAction', ([], {}), '()\n', (26345, 26347), False, 'from ulauncher.api.shared.action.DoNothingAction import DoNothingAction\n'), ((47175, 47193), 'ulauncher.api.shared.action.HideWindowAction.HideWindowAction', 'HideWindowAction', ([], {}), '()\n', (47191, 47193), False, 'from ulauncher.api.shared.action.HideWindowAction import HideWindowAction\n'), ((20195, 20227), 'random.choice', 'random.choice', (['examples[command]'], {}), '(examples[command])\n', (20208, 20227), False, 'import random\n'), ((25728, 25746), 'ulauncher.api.shared.action.HideWindowAction.HideWindowAction', 'HideWindowAction', ([], {}), '()\n', (25744, 25746), False, 'from ulauncher.api.shared.action.HideWindowAction import HideWindowAction\n'), ((28073, 28090), 'ulauncher.api.shared.action.DoNothingAction.DoNothingAction', 'DoNothingAction', ([], {}), '()\n', (28088, 28090), False, 'from ulauncher.api.shared.action.DoNothingAction import DoNothingAction\n'), ((47502, 47520), 'ulauncher.api.shared.action.HideWindowAction.HideWindowAction', 'HideWindowAction', ([], {}), '()\n', (47518, 47520), False, 'from ulauncher.api.shared.action.HideWindowAction import HideWindowAction\n'), ((27446, 27464), 'ulauncher.api.shared.action.HideWindowAction.HideWindowAction', 'HideWindowAction', ([], {}), '()\n', (27462, 27464), False, 'from ulauncher.api.shared.action.HideWindowAction import HideWindowAction\n'), ((36283, 36320), 'urllib.parse.quote_plus', 'quote_plus', (['f"""{artist} - {song_name}"""'], {}), "(f'{artist} - {song_name}')\n", (36293, 36320), False, 'from urllib.parse import urlparse, quote_plus\n'), ((47830, 47848), 'ulauncher.api.shared.action.HideWindowAction.HideWindowAction', 'HideWindowAction', ([], {}), '()\n', (47846, 47848), False, 'from ulauncher.api.shared.action.HideWindowAction import HideWindowAction\n'), ((48206, 48224), 'ulauncher.api.shared.action.HideWindowAction.HideWindowAction', 'HideWindowAction', ([], {}), '()\n', (48222, 48224), False, 'from ulauncher.api.shared.action.HideWindowAction import HideWindowAction\n'), ((29213, 29231), 'ulauncher.api.shared.action.HideWindowAction.HideWindowAction', 'HideWindowAction', ([], {}), '()\n', (29229, 29231), False, 'from ulauncher.api.shared.action.HideWindowAction import HideWindowAction\n'), ((31189, 31207), 'ulauncher.api.shared.action.HideWindowAction.HideWindowAction', 'HideWindowAction', ([], {}), '()\n', (31205, 31207), False, 'from ulauncher.api.shared.action.HideWindowAction import HideWindowAction\n'), ((31566, 31583), 'ulauncher.api.shared.action.DoNothingAction.DoNothingAction', 'DoNothingAction', ([], {}), '()\n', (31581, 31583), False, 'from ulauncher.api.shared.action.DoNothingAction import DoNothingAction\n'), ((33414, 33454), 'ulauncher.api.shared.action.SetUserQueryAction.SetUserQueryAction', 'SetUserQueryAction', (['f"""{keyword} volume """'], {}), "(f'{keyword} volume ')\n", (33432, 33454), False, 'from ulauncher.api.shared.action.SetUserQueryAction import SetUserQueryAction\n'), ((34307, 34325), 'ulauncher.api.shared.action.HideWindowAction.HideWindowAction', 'HideWindowAction', ([], {}), '()\n', (34323, 34325), False, 'from ulauncher.api.shared.action.HideWindowAction import HideWindowAction\n'), ((34642, 34660), 'ulauncher.api.shared.action.HideWindowAction.HideWindowAction', 'HideWindowAction', ([], {}), '()\n', (34658, 34660), False, 'from ulauncher.api.shared.action.HideWindowAction import HideWindowAction\n'), ((32839, 32879), 'ulauncher.api.shared.action.SetUserQueryAction.SetUserQueryAction', 'SetUserQueryAction', (['f"""{keyword} volume """'], {}), "(f'{keyword} volume ')\n", (32857, 32879), False, 'from ulauncher.api.shared.action.SetUserQueryAction import SetUserQueryAction\n'), ((35770, 35788), 'ulauncher.api.shared.action.HideWindowAction.HideWindowAction', 'HideWindowAction', ([], {}), '()\n', (35786, 35788), False, 'from ulauncher.api.shared.action.HideWindowAction import HideWindowAction\n'), ((36109, 36127), 'ulauncher.api.shared.action.HideWindowAction.HideWindowAction', 'HideWindowAction', ([], {}), '()\n', (36125, 36127), False, 'from ulauncher.api.shared.action.HideWindowAction import HideWindowAction\n'), ((36814, 36840), 'ulauncher.api.shared.action.OpenUrlAction.OpenUrlAction', 'OpenUrlAction', (['genius_link'], {}), '(genius_link)\n', (36827, 36840), False, 'from ulauncher.api.shared.action.OpenUrlAction import OpenUrlAction\n'), ((37088, 37116), 'ulauncher.api.shared.action.OpenUrlAction.OpenUrlAction', 'OpenUrlAction', (['azlyrics_link'], {}), '(azlyrics_link)\n', (37101, 37116), False, 'from ulauncher.api.shared.action.OpenUrlAction import OpenUrlAction\n'), ((37575, 37593), 'ulauncher.api.shared.action.HideWindowAction.HideWindowAction', 'HideWindowAction', ([], {}), '()\n', (37591, 37593), False, 'from ulauncher.api.shared.action.HideWindowAction import HideWindowAction\n'), ((37824, 37863), 'ulauncher.api.shared.action.SetUserQueryAction.SetUserQueryAction', 'SetUserQueryAction', (['f"""{keyword} switch"""'], {}), "(f'{keyword} switch')\n", (37842, 37863), False, 'from ulauncher.api.shared.action.SetUserQueryAction import SetUserQueryAction\n'), ((38084, 38123), 'ulauncher.api.shared.action.SetUserQueryAction.SetUserQueryAction', 'SetUserQueryAction', (['f"""{keyword} volume"""'], {}), "(f'{keyword} volume')\n", (38102, 38123), False, 'from ulauncher.api.shared.action.SetUserQueryAction import SetUserQueryAction\n'), ((38365, 38402), 'ulauncher.api.shared.action.SetUserQueryAction.SetUserQueryAction', 'SetUserQueryAction', (['f"""{keyword} save"""'], {}), "(f'{keyword} save')\n", (38383, 38402), False, 'from ulauncher.api.shared.action.SetUserQueryAction import SetUserQueryAction\n'), ((38628, 38667), 'ulauncher.api.shared.action.SetUserQueryAction.SetUserQueryAction', 'SetUserQueryAction', (['f"""{keyword} repeat"""'], {}), "(f'{keyword} repeat')\n", (38646, 38667), False, 'from ulauncher.api.shared.action.SetUserQueryAction import SetUserQueryAction\n'), ((38888, 38928), 'ulauncher.api.shared.action.SetUserQueryAction.SetUserQueryAction', 'SetUserQueryAction', (['f"""{keyword} shuffle"""'], {}), "(f'{keyword} shuffle')\n", (38906, 38928), False, 'from ulauncher.api.shared.action.SetUserQueryAction import SetUserQueryAction\n'), ((39163, 39202), 'ulauncher.api.shared.action.SetUserQueryAction.SetUserQueryAction', 'SetUserQueryAction', (['f"""{keyword} track """'], {}), "(f'{keyword} track ')\n", (39181, 39202), False, 'from ulauncher.api.shared.action.SetUserQueryAction import SetUserQueryAction\n'), ((39438, 39477), 'ulauncher.api.shared.action.SetUserQueryAction.SetUserQueryAction', 'SetUserQueryAction', (['f"""{keyword} album """'], {}), "(f'{keyword} album ')\n", (39456, 39477), False, 'from ulauncher.api.shared.action.SetUserQueryAction import SetUserQueryAction\n'), ((39716, 39756), 'ulauncher.api.shared.action.SetUserQueryAction.SetUserQueryAction', 'SetUserQueryAction', (['f"""{keyword} artist """'], {}), "(f'{keyword} artist ')\n", (39734, 39756), False, 'from ulauncher.api.shared.action.SetUserQueryAction import SetUserQueryAction\n'), ((40000, 40042), 'ulauncher.api.shared.action.SetUserQueryAction.SetUserQueryAction', 'SetUserQueryAction', (['f"""{keyword} playlist """'], {}), "(f'{keyword} playlist ')\n", (40018, 40042), False, 'from ulauncher.api.shared.action.SetUserQueryAction import SetUserQueryAction\n'), ((40275, 40315), 'ulauncher.api.shared.action.SetUserQueryAction.SetUserQueryAction', 'SetUserQueryAction', (['f"""{keyword} search """'], {}), "(f'{keyword} search ')\n", (40293, 40315), False, 'from ulauncher.api.shared.action.SetUserQueryAction import SetUserQueryAction\n'), ((40535, 40575), 'ulauncher.api.shared.action.SetUserQueryAction.SetUserQueryAction', 'SetUserQueryAction', (['f"""{keyword} history"""'], {}), "(f'{keyword} history')\n", (40553, 40575), False, 'from ulauncher.api.shared.action.SetUserQueryAction import SetUserQueryAction\n'), ((40811, 40850), 'ulauncher.api.shared.action.SetUserQueryAction.SetUserQueryAction', 'SetUserQueryAction', (['f"""{keyword} lyrics"""'], {}), "(f'{keyword} lyrics')\n", (40829, 40850), False, 'from ulauncher.api.shared.action.SetUserQueryAction import SetUserQueryAction\n'), ((45672, 45690), 'ulauncher.api.shared.action.HideWindowAction.HideWindowAction', 'HideWindowAction', ([], {}), '()\n', (45688, 45690), False, 'from ulauncher.api.shared.action.HideWindowAction import HideWindowAction\n')] |
"""
AIOHappyBase connection module.
"""
import os
import logging
from typing import AnyStr, List, Dict, Any
from thriftpy2.contrib.aio.transport import (
TAsyncBufferedTransportFactory,
TAsyncFramedTransportFactory,
)
from thriftpy2.contrib.aio.protocol import (
TAsyncBinaryProtocolFactory,
TAsyncCompactProtocolFactory,
)
from thriftpy2.contrib.aio.rpc import make_client
from Hbase_thrift import Hbase, ColumnDescriptor
from .table import Table
from ._util import (
ensure_bytes,
snake_to_camel_case,
check_invalid_items,
run_coro,
)
try:
from thriftpy2_httpx_client import make_aio_client as make_http_client
except ImportError: # pragma: no cover
async def make_http_client(*_, **__):
raise RuntimeError("thriftpy2_httpx_client is required to"
" use the HTTP client protocol.")
logger = logging.getLogger(__name__)
COMPAT_MODES = ('0.90', '0.92', '0.94', '0.96', '0.98')
DEFAULT_HOST = os.environ.get('AIOHAPPYBASE_HOST', 'localhost')
DEFAULT_PORT = int(os.environ.get('AIOHAPPYBASE_PORT', '9090'))
DEFAULT_COMPAT = os.environ.get('AIOHAPPYBASE_COMPAT', '0.98')
DEFAULT_TRANSPORT = os.environ.get('AIOHAPPYBASE_TRANSPORT', 'buffered')
DEFAULT_PROTOCOL = os.environ.get('AIOHAPPYBASE_PROTOCOL', 'binary')
DEFAULT_CLIENT = os.environ.get('AIOHAPPYBASE_CLIENT', 'socket')
class Connection:
"""
Connection to an HBase Thrift server.
The `host` and `port` arguments specify the host name and TCP port
of the HBase Thrift server to connect to. If omitted or ``None``,
a connection to the default port on ``localhost`` is made. If
specifed, the `timeout` argument specifies the socket timeout in
milliseconds.
If `autoconnect` is `True` the connection is made directly during
initialization. Otherwise a context manager should be used (with
Connection...) or :py:meth:`Connection.open` must be called explicitly
before first use. Note that due to limitations in the Python async
framework, a RuntimeError will be raised if it is used inside of a running
asyncio event loop.
The optional `table_prefix` and `table_prefix_separator` arguments
specify a prefix and a separator string to be prepended to all table
names, e.g. when :py:meth:`Connection.table` is invoked. For
example, if `table_prefix` is ``myproject``, all tables will
have names like ``myproject_XYZ``.
The optional `compat` argument sets the compatibility level for
this connection. Older HBase versions have slightly different Thrift
interfaces, and using the wrong protocol can lead to crashes caused
by communication errors, so make sure to use the correct one. This
value can be either the string ``0.90``, ``0.92``, ``0.94``, or
``0.96`` (the default).
The optional `transport` argument specifies the Thrift transport
mode to use. Supported values for this argument are ``buffered``
(the default) and ``framed``. Make sure to choose the right one,
since otherwise you might see non-obvious connection errors or
program hangs when making a connection. HBase versions before 0.94
always use the buffered transport. Starting with HBase 0.94, the
Thrift server optionally uses a framed transport, depending on the
argument passed to the ``hbase-daemon.sh start thrift`` command.
The default ``-threadpool`` mode uses the buffered transport; the
``-hsha``, ``-nonblocking``, and ``-threadedselector`` modes use the
framed transport.
The optional `protocol` argument specifies the Thrift transport
protocol to use. Supported values for this argument are ``binary``
(the default) and ``compact``. Make sure to choose the right one,
since otherwise you might see non-obvious connection errors or
program hangs when making a connection. ``TCompactProtocol`` is
a more compact binary format that is typically more efficient to
process as well. ``TBinaryProtocol`` is the default protocol that
AIOHappyBase uses.
The optional `client` argument specifies the type of Thrift client
to use. Supported values for this argument are ``socket``
(the default) and ``http``. Make sure to choose the right one,
since otherwise you might see non-obvious connection errors or
program hangs when making a connection. To check which client
you should use, refer to the ``hbase.regionserver.thrift.http``
setting. If it is ``true`` use ``http``, otherwise use ``socket``.
.. versionadded:: v1.4.0
`client` argument
.. versionadded:: 0.9
`protocol` argument
.. versionadded:: 0.5
`timeout` argument
.. versionadded:: 0.4
`table_prefix_separator` argument
.. versionadded:: 0.4
support for framed Thrift transports
"""
# TODO: Auto generate these?
THRIFT_TRANSPORTS = dict(
buffered=TAsyncBufferedTransportFactory(),
framed=TAsyncFramedTransportFactory(),
)
THRIFT_PROTOCOLS = dict(
binary=TAsyncBinaryProtocolFactory(decode_response=False),
compact=TAsyncCompactProtocolFactory(decode_response=False),
)
THRIFT_CLIENTS = dict(
socket=make_client,
http=make_http_client,
)
def __init__(self,
host: str = DEFAULT_HOST,
port: int = DEFAULT_PORT,
timeout: int = None,
autoconnect: bool = False,
table_prefix: AnyStr = None,
table_prefix_separator: AnyStr = b'_',
compat: str = DEFAULT_COMPAT,
transport: str = DEFAULT_TRANSPORT,
protocol: str = DEFAULT_PROTOCOL,
client: str = DEFAULT_CLIENT,
**client_kwargs: Any):
"""
:param host: The host to connect to
:param port: The port to connect to
:param timeout: The socket timeout in milliseconds (optional)
:param autoconnect: Whether the connection should be opened directly
:param table_prefix: Prefix used to construct table names (optional)
:param table_prefix_separator: Separator used for `table_prefix`
:param compat: Compatibility mode (optional)
:param transport: Thrift transport mode (optional)
:param protocol: Thrift protocol mode (optional)
:param client: Thrift client mode (optional)
:param client_kwargs:
Extra keyword arguments for `make_client()`. See the ThriftPy2
documentation for more information.
"""
if table_prefix is not None:
if not isinstance(table_prefix, (str, bytes)):
raise TypeError("'table_prefix' must be a string")
table_prefix = ensure_bytes(table_prefix)
if not isinstance(table_prefix_separator, (str, bytes)):
raise TypeError("'table_prefix_separator' must be a string")
table_prefix_separator = ensure_bytes(table_prefix_separator)
check_invalid_items(
compat=(compat, COMPAT_MODES),
transport=(transport, self.THRIFT_TRANSPORTS),
protocol=(protocol, self.THRIFT_PROTOCOLS),
client=(client, self.THRIFT_CLIENTS),
)
# Allow host and port to be None, which may be easier for
# applications wrapping a Connection instance.
self.host = host or DEFAULT_HOST
self.port = port or DEFAULT_PORT
self.timeout = timeout
self.table_prefix = table_prefix
self.table_prefix_separator = table_prefix_separator
self.compat = compat
self._transport_factory = self.THRIFT_TRANSPORTS[transport]
self._protocol_factory = self.THRIFT_PROTOCOLS[protocol]
self._client_factory = self.THRIFT_CLIENTS[client]
self.client_kwargs = {
'service': Hbase,
'host': self.host,
'port': self.port,
'timeout': self.timeout,
'trans_factory': self._transport_factory,
'proto_factory': self._protocol_factory,
**client_kwargs,
}
self.client = None
if autoconnect:
self._autoconnect()
def _autoconnect(self):
run_coro(self.open(), "Cannot autoconnect in a running event loop!")
def _table_name(self, name: AnyStr) -> bytes:
"""Construct a table name by optionally adding a table name prefix."""
name = ensure_bytes(name)
if self.table_prefix is None:
return name
return self.table_prefix + self.table_prefix_separator + name
async def open(self) -> None:
"""
Create and open the underlying client to the HBase instance. This
method can safely be called more than once.
"""
if self.client is not None:
return # _refresh_thrift_client opened the transport
logger.debug(f"Opening Thrift transport to {self.host}:{self.port}")
self.client = await self._client_factory(**self.client_kwargs)
def close(self) -> None:
"""
Close the underlying client to the HBase instance. This method
can be safely called more than once. Note that the client is
destroyed after it is closed which will cause errors to occur
if it is used again before reopening. The :py:class:`Connection`
can be reopened by calling :py:meth:`open` again.
"""
if self.client is None:
return
if logger is not None:
# If called from __del__(), module variables may no longer exist.
logger.debug(f"Closing Thrift transport to {self.host}:{self.port}")
self.client.close()
self.client = None
def table(self, name: AnyStr, use_prefix: bool = True) -> Table:
"""
Return a table object.
Returns a :py:class:`happybase.Table` instance for the table
named `name`. This does not result in a round-trip to the
server, and the table is not checked for existence.
The optional `use_prefix` argument specifies whether the table
prefix (if any) is prepended to the specified `name`. Set this
to `False` if you want to use a table that resides in another
‘prefix namespace’, e.g. a table from a ‘friendly’ application
co-hosted on the same HBase instance. See the `table_prefix`
argument to the :py:class:`Connection` constructor for more
information.
:param name: the name of the table
:param use_prefix: whether to use the table prefix (if any)
:return: Table instance
"""
name = ensure_bytes(name)
if use_prefix:
name = self._table_name(name)
return Table(name, self)
# Table administration and maintenance
async def tables(self) -> List[bytes]:
"""
Return a list of table names available in this HBase instance.
If a `table_prefix` was set for this :py:class:`Connection`, only
tables that have the specified prefix will be listed.
:return: The table names
"""
names = await self.client.getTableNames()
# Filter using prefix, and strip prefix from names
if self.table_prefix is not None:
prefix = self._table_name(b'')
offset = len(prefix)
names = [n[offset:] for n in names if n.startswith(prefix)]
return names
async def create_table(self,
name: AnyStr,
families: Dict[str, Dict[str, Any]]) -> Table:
"""
Create a table.
:param name: The table name
:param families: The name and options for each column family
:return: The created table instance
The `families` argument is a dictionary mapping column family
names to a dictionary containing the options for this column
family, e.g.
::
families = {
'cf1': dict(max_versions=10),
'cf2': dict(max_versions=1, block_cache_enabled=False),
'cf3': dict(), # use defaults
}
connection.create_table('mytable', families)
These options correspond to the ColumnDescriptor structure in
the Thrift API, but note that the names should be provided in
Python style, not in camel case notation, e.g. `time_to_live`,
not `timeToLive`. The following options are supported:
* ``max_versions`` (`int`)
* ``compression`` (`str`)
* ``in_memory`` (`bool`)
* ``bloom_filter_type`` (`str`)
* ``bloom_filter_vector_size`` (`int`)
* ``bloom_filter_nb_hashes`` (`int`)
* ``block_cache_enabled`` (`bool`)
* ``time_to_live`` (`int`)
"""
name = self._table_name(name)
if not isinstance(families, dict):
raise TypeError("'families' arg must be a dictionary")
if not families:
raise ValueError(f"No column families given for table: {name!r}")
column_descriptors = []
for cf_name, options in families.items():
kwargs = {
snake_to_camel_case(option_name): value
for option_name, value in (options or {}).items()
}
if not cf_name.endswith(':'):
cf_name += ':'
kwargs['name'] = cf_name
column_descriptors.append(ColumnDescriptor(**kwargs))
await self.client.createTable(name, column_descriptors)
return self.table(name, use_prefix=False)
async def delete_table(self, name: AnyStr, disable: bool = False) -> None:
"""
Delete the specified table.
.. versionadded:: 0.5
`disable` argument
In HBase, a table always needs to be disabled before it can be
deleted. If the `disable` argument is `True`, this method first
disables the table if it wasn't already and then deletes it.
:param name: The table name
:param disable: Whether to first disable the table if needed
"""
if disable and await self.is_table_enabled(name):
await self.disable_table(name)
await self.client.deleteTable(self._table_name(name))
async def enable_table(self, name: AnyStr) -> None:
"""
Enable the specified table.
:param name: The table name
"""
await self.client.enableTable(self._table_name(name))
async def disable_table(self, name: AnyStr) -> None:
"""
Disable the specified table.
:param name: The table name
"""
await self.client.disableTable(self._table_name(name))
async def is_table_enabled(self, name: AnyStr) -> None:
"""
Return whether the specified table is enabled.
:param str name: The table name
:return: whether the table is enabled
:rtype: bool
"""
return await self.client.isTableEnabled(self._table_name(name))
async def compact_table(self, name: AnyStr, major: bool = False) -> None:
"""Compact the specified table.
:param str name: The table name
:param bool major: Whether to perform a major compaction.
"""
name = self._table_name(name)
if major:
await self.client.majorCompact(name)
else:
await self.client.compact(name)
# Support async context usage
async def __aenter__(self) -> 'Connection':
await self.open()
return self
async def __aexit__(self, *_exc) -> None:
self.close()
# Support context usage
def __enter__(self) -> 'Connection':
run_coro(self.open(), error="Use 'async with' in a running event loop!")
return self
def __exit__(self, *_exc) -> None:
self.close()
def __del__(self) -> None:
try:
if self.client._iprot.trans.is_open(): # noqa
logger.warning(f"{self} was not closed!")
except: # noqa
pass
| [
"logging.getLogger",
"thriftpy2.contrib.aio.transport.TAsyncFramedTransportFactory",
"thriftpy2.contrib.aio.transport.TAsyncBufferedTransportFactory",
"Hbase_thrift.ColumnDescriptor",
"thriftpy2.contrib.aio.protocol.TAsyncCompactProtocolFactory",
"os.environ.get",
"thriftpy2.contrib.aio.protocol.TAsyncB... | [((874, 901), 'logging.getLogger', 'logging.getLogger', (['__name__'], {}), '(__name__)\n', (891, 901), False, 'import logging\n'), ((975, 1023), 'os.environ.get', 'os.environ.get', (['"""AIOHAPPYBASE_HOST"""', '"""localhost"""'], {}), "('AIOHAPPYBASE_HOST', 'localhost')\n", (989, 1023), False, 'import os\n'), ((1105, 1150), 'os.environ.get', 'os.environ.get', (['"""AIOHAPPYBASE_COMPAT"""', '"""0.98"""'], {}), "('AIOHAPPYBASE_COMPAT', '0.98')\n", (1119, 1150), False, 'import os\n'), ((1171, 1223), 'os.environ.get', 'os.environ.get', (['"""AIOHAPPYBASE_TRANSPORT"""', '"""buffered"""'], {}), "('AIOHAPPYBASE_TRANSPORT', 'buffered')\n", (1185, 1223), False, 'import os\n'), ((1243, 1292), 'os.environ.get', 'os.environ.get', (['"""AIOHAPPYBASE_PROTOCOL"""', '"""binary"""'], {}), "('AIOHAPPYBASE_PROTOCOL', 'binary')\n", (1257, 1292), False, 'import os\n'), ((1310, 1357), 'os.environ.get', 'os.environ.get', (['"""AIOHAPPYBASE_CLIENT"""', '"""socket"""'], {}), "('AIOHAPPYBASE_CLIENT', 'socket')\n", (1324, 1357), False, 'import os\n'), ((1043, 1086), 'os.environ.get', 'os.environ.get', (['"""AIOHAPPYBASE_PORT"""', '"""9090"""'], {}), "('AIOHAPPYBASE_PORT', '9090')\n", (1057, 1086), False, 'import os\n'), ((4898, 4930), 'thriftpy2.contrib.aio.transport.TAsyncBufferedTransportFactory', 'TAsyncBufferedTransportFactory', ([], {}), '()\n', (4928, 4930), False, 'from thriftpy2.contrib.aio.transport import TAsyncBufferedTransportFactory, TAsyncFramedTransportFactory\n'), ((4947, 4977), 'thriftpy2.contrib.aio.transport.TAsyncFramedTransportFactory', 'TAsyncFramedTransportFactory', ([], {}), '()\n', (4975, 4977), False, 'from thriftpy2.contrib.aio.transport import TAsyncBufferedTransportFactory, TAsyncFramedTransportFactory\n'), ((5029, 5079), 'thriftpy2.contrib.aio.protocol.TAsyncBinaryProtocolFactory', 'TAsyncBinaryProtocolFactory', ([], {'decode_response': '(False)'}), '(decode_response=False)\n', (5056, 5079), False, 'from thriftpy2.contrib.aio.protocol import TAsyncBinaryProtocolFactory, TAsyncCompactProtocolFactory\n'), ((5097, 5148), 'thriftpy2.contrib.aio.protocol.TAsyncCompactProtocolFactory', 'TAsyncCompactProtocolFactory', ([], {'decode_response': '(False)'}), '(decode_response=False)\n', (5125, 5148), False, 'from thriftpy2.contrib.aio.protocol import TAsyncBinaryProtocolFactory, TAsyncCompactProtocolFactory\n'), ((13448, 13474), 'Hbase_thrift.ColumnDescriptor', 'ColumnDescriptor', ([], {}), '(**kwargs)\n', (13464, 13474), False, 'from Hbase_thrift import Hbase, ColumnDescriptor\n')] |
# Generated by Django 3.1.7 on 2021-03-09 12:17
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('songs', '0005_auto_20210309_1203'),
]
operations = [
migrations.AddField(
model_name='song',
name='spotify_id',
field=models.IntegerField(blank=True, default=None, null=True, verbose_name='Spotify Id'),
),
]
| [
"django.db.models.IntegerField"
] | [((334, 422), 'django.db.models.IntegerField', 'models.IntegerField', ([], {'blank': '(True)', 'default': 'None', 'null': '(True)', 'verbose_name': '"""Spotify Id"""'}), "(blank=True, default=None, null=True, verbose_name=\n 'Spotify Id')\n", (353, 422), False, 'from django.db import migrations, models\n')] |
import sys
sys.path.append("../modules/")
import numpy as np
import matplotlib.pyplot as plt
from skimage import feature
from utils import conf
if __name__ == "__main__":
filename = conf.dir_data_temp + "slice.npy"
img = np.load(filename)
filename = conf.dir_data_mask + "endocardial_mask.npy"
endocardial_mask = np.load(filename)
filename = conf.dir_data_mask + "epicardial_mask.npy"
epicardial_mask = np.load(filename)
# get contour of masks
endocardial_border = feature.canny(endocardial_mask, sigma=3)
#endocardial_border = np.zeros((img.shape[0], img.shape[1]))
#endocardial_border = _endocardial_border[
epicardial_border = feature.canny(epicardial_mask, sigma=3)
fig1 = plt.figure()
ax11 = fig1.add_subplot(1, 3, 1)
ax11.imshow(img, cmap=plt.cm.gray)
ax11.set_xlim([0., img.shape[1]])
ax11.set_ylim([img.shape[0], 0.])
ax12 = fig1.add_subplot(1, 3, 2)
ax12.imshow(epicardial_mask, cmap=plt.cm.gray)
ax12.set_xlim([0., endocardial_mask.shape[1]])
ax12.set_ylim([endocardial_mask.shape[0], 0.])
ax13 = fig1.add_subplot(1, 3, 3)
ax13.imshow(img, cmap=plt.cm.gray)
ax13.set_xlim([0., endocardial_mask.shape[1]])
ax13.set_ylim([endocardial_mask.shape[0], 0.])
ax13.contour(epicardial_border, colors='r')
plt.show()
| [
"matplotlib.pyplot.figure",
"skimage.feature.canny",
"numpy.load",
"sys.path.append",
"matplotlib.pyplot.show"
] | [((11, 41), 'sys.path.append', 'sys.path.append', (['"""../modules/"""'], {}), "('../modules/')\n", (26, 41), False, 'import sys\n'), ((233, 250), 'numpy.load', 'np.load', (['filename'], {}), '(filename)\n', (240, 250), True, 'import numpy as np\n'), ((333, 350), 'numpy.load', 'np.load', (['filename'], {}), '(filename)\n', (340, 350), True, 'import numpy as np\n'), ((431, 448), 'numpy.load', 'np.load', (['filename'], {}), '(filename)\n', (438, 448), True, 'import numpy as np\n'), ((503, 543), 'skimage.feature.canny', 'feature.canny', (['endocardial_mask'], {'sigma': '(3)'}), '(endocardial_mask, sigma=3)\n', (516, 543), False, 'from skimage import feature\n'), ((681, 720), 'skimage.feature.canny', 'feature.canny', (['epicardial_mask'], {'sigma': '(3)'}), '(epicardial_mask, sigma=3)\n', (694, 720), False, 'from skimage import feature\n'), ((733, 745), 'matplotlib.pyplot.figure', 'plt.figure', ([], {}), '()\n', (743, 745), True, 'import matplotlib.pyplot as plt\n'), ((1323, 1333), 'matplotlib.pyplot.show', 'plt.show', ([], {}), '()\n', (1331, 1333), True, 'import matplotlib.pyplot as plt\n')] |
'''This module extends PTPDevice for Sony devices.
Use it in a master module that determines the vendor and automatically uses its
extension. This is why inheritance is not explicit.
'''
from contextlib import contextmanager
from construct import Container, Struct, Range, Computed, Enum, Array, PrefixedArray, Pass, ExprAdapter
from ..ptp import PTPError
import logging
logger = logging.getLogger(__name__)
__all__ = ('Sony',)
class SonyError(PTPError):
pass
class Sony(object):
'''This class implements Sony's PTP operations.'''
def __init__(self, *args, **kwargs):
logger.debug('Init Sony')
super(Sony, self).__init__(*args, **kwargs)
# TODO: expose the choice to disable automatic Sony extension
self.__raw = False
@contextmanager
def session(self):
'''
Manage Sony session with context manager.
'''
# When raw device, do not perform
if self.__raw:
with super(Sony, self).session():
yield
return
with super(Sony, self).session():
logger.debug('Authentication')
r = []
r.append(self.sdio_connect(1))
r.append(self.sdio_connect(2))
r.append(self.sdio_get_ext_device_info())
r.append(self.sdio_connect(3))
if not all(map(lambda r: r.ResponseCode == 'OK', r)):
raise SonyError('Could not authenticate')
else:
logger.debug('Authentication done')
yield
def _shutdown(self):
logger.debug('Shutdown Sony')
super(Sony, self)._shutdown()
def _PropertyCode(self, **product_properties):
return super(Sony, self)._PropertyCode(
DPCCompensation=0xD200,
DRangeOptimize=0xD201,
SonyImageSize=0xD203,
ShutterSpeed=0xD20D,
ColorTemp=0xD20F,
CCFilter=0xD210,
AspectRatio=0xD211,
FocusFound=0xD213,
ObjectInMemory=0xD215,
ExposeIndex=0xD216,
SonyBatteryLevel=0xD218,
PictureEffect=0xD21B,
ABFilter=0xD21C,
ISO=0xD21E,
AutoFocus=0xD2C1,
Capture=0xD2C2,
Movie=0xD2C8,
StillImage=0xD2C7,
**product_properties
)
def _OperationCode(self, **product_operations):
return super(Sony, self)._OperationCode(
SDIOConnect=0x9201,
SDIOGetExtDeviceInfo=0x9202,
SonyGetDevicePropDesc=0x9203,
SonyGetDevicePropValue=0x9204,
SetControlDeviceA=0x9205,
GetControlDeviceDesc=0x9206,
SetControlDeviceB=0x9207,
GetAllDevicePropData=0x9209,
**product_operations
)
def _ObjectFormatCode(self, **product_object_formats):
return super(Sony, self)._ObjectFormatCode(
RAW=0xb101,
**product_object_formats
)
def _ResponseCode(self, **product_responses):
return super(Sony, self)._ResponseCode(
Sony1=0xa101,
**product_responses
)
def _EventCode(self, **product_events):
return super(Sony, self)._EventCode(
SonyObjectAdded=0xc201,
SonyObjectRemoved=0xc202,
SonyPropertyChanged=0xc203,
**product_events
)
def _FilesystemType(self, **product_filesystem_types):
return super(Sony, self)._FilesystemType(
**product_filesystem_types
)
def _SonyDeviceInfo(self):
return Range(0, 2, self._PTPArray(self._PropertyCode))
def _Visibility(self):
return Enum(
self._UInt8,
Disabled=0x00,
Enabled=0x01,
DisplayOnly=0x02,
)
def _SonyPropDesc(self):
return Struct(
'PropertyCode' / self._PropertyCode,
'DataTypeCode' / self._DataTypeCode,
'SonyGetSet' / self._UInt8,
'GetSet' / Computed(
lambda x: 'GetSet' if x.SonyGetSet & 0x01 else 'Get'
),
'Visibility' / self._Visibility,
'FactoryDefaultValue' / self._DataType,
'CurrentValue' / self._DataType,
'FormFlag' / self._FormFlag,
'Form' / self._Form(self._DataType)
)
def _SonyAllPropDesc(self):
return PrefixedArray(self._UInt64, self._SonyPropDesc)
def _ExposureProgramMode(self):
return Enum(
self._UInt16,
default=Pass,
IntelligentAuto=0x8000,
SuperiorAuto=0x8001,
P=0x2,
A=0x3,
S=0x4,
M=0x1,
MovieP=0x8050,
MovieA=0x8051,
MovieS=0x8052,
MovieM=0x8053,
# Mode=0x8054, # TODO: ??
Panoramic=0x8041,
Portrait=0x7,
SportsAction=0x8011,
Macro=0x8015,
Landscape=0x8014,
Sunset=0x8012,
NightScene=0x8013,
HandheldTwilight=0x8016,
NightPortrait=0x8017,
AntiMotionBlur=0x8018,
)
def _AutoFocus(self):
return Enum(
self._UInt16,
default=Pass,
)
def _PictureEffect(self):
return Enum(
self._UInt16,
default=Pass,
Off=0x8000,
ToyCameraNormal=0x8001,
ToyCameraCool=0x8002,
ToyCameraWarm=0x8003,
ToyCameraGreen=0x8004,
ToyCameraMagenta=0x8005,
Pop=0x8010,
PosterizationBW=0x8020,
PosterizationColor=0x8021,
Retro=0x8030,
SoftHighKey=0x8030,
PartialColorRed=0x8050,
PartialColorGreen=0x8051,
PartialColorBlue=0x8052,
PartialColorYellow=0x8053,
HighContrastMono=0x8060,
SoftFocusLow=0x8070,
SoftFocusMid=0x8071,
SoftFocusHigh=0x8072,
HDRPaintingLow=0x8080,
HDRPaintingMid=0x8081,
HDRPaintingHigh=0x8082,
RichToneMono=0x8090,
MiniatureAuto=0x80a0,
MiniatureTop=0x80a1,
MiniatureMiddleHorizontal=0x80a2,
MiniatureBottom=0x80a3,
MiniatureRight=0x80a4,
MiniatureMiddleVertical=0x80a5,
MiniatureLeft=0x80a6,
Watercolor=0x80b0,
IllustrationLow=0x80c0,
IllustrationMid=0x80c1,
IllustrationHigh=0x80c2,
)
def _StillCaptureMode(self):
'''DriveMode in Sony terminology'''
return Enum(
self._UInt16,
default=Pass,
Single=0x0001,
SelfTimer10s=0x8004,
SelfTimer2s=0x8005,
SelfTimer10sContinuous3Images=0x8008,
SelfTimer10sContinuous5Images=0x8009,
Continuous=0x8013,
ContinuousSpeedPriority=0x8014,
WhiteBalanceBracketLow=0x8018,
WhiteBalanceBracketHigh=0x8028,
DRangeOptimizerBracketLow=0x8019,
DRangoOptimizerBracketHigh=0x8029,
ContinuousBracket1_0EV3Image=0x8311,
ContinuousBracket2_0EV3Image=0x8321,
ContinuousBracket3_0EV3Image=0x8331,
ContinuousBracket0_3EV3Image=0x8337,
ContinuousBracket0_5EV3Image=0x8357,
ContinuousBracket0_7EV3Image=0x8377,
ContinuousBracket0_3EV5Image=0x8537,
ContinuousBracket0_5EV5Image=0x8557,
ContinuousBracket0_7EV5Image=0x8577,
SingleBracket1_0EV3Image=0x8310,
SingleBracket2_0EV3Image=0x8320,
SingleBracket3_0EV3Image=0x8330,
SingleBracket0_3EV3Image=0x8336,
SingleBracket0_5EV3Image=0x8356,
SingleBracket0_7EV3Image=0x8376,
SingleBracket0_3EV5Image=0x8536,
SingleBracket0_5EV5Image=0x8556,
SingleBracket0_7EV5Image=0x8576,
)
def _ExposureBiasCompensation(self):
return ExprAdapter(
self._UInt16,
encode=lambda x: x*1000,
decode=lambda x: x/1000.,
)
def _set_endian(self, endian):
logger.debug('Set Sony endianness')
super(Sony, self)._set_endian(endian)
self._ExposureProgramMode = self._ExposureProgramMode()
self._AutoFocus = self._AutoFocus()
self._PictureEffect = self._PictureEffect()
self._StillCaptureMode = self._StillCaptureMode()
self._Visibility = self._Visibility()
self._SonyPropDesc = self._SonyPropDesc()
self._SonyDeviceInfo = self._SonyDeviceInfo()
self._SonyAllPropDesc = self._SonyAllPropDesc()
def event(self, wait=False):
'''Check Sony or PTP events
If `wait` this function is blocking. Otherwise it may return None.
'''
evt = super(Sony, self).event(wait=wait)
return evt
def sdio_connect(self, step, key1=0, key2=0):
'''Authentication handshake'''
ptp = Container(
OperationCode='SDIOConnect',
SessionID=self._session,
TransactionID=self._transaction,
Parameter=[step, key1, key2]
)
return self.recv(ptp)
def sdio_get_ext_device_info(self, version=0xc8):
'''Sony DeviceInfo'''
ptp = Container(
OperationCode='SDIOGetExtDeviceInfo',
SessionID=self._session,
TransactionID=self._transaction,
Parameter=[version]
)
return self.recv(ptp)
def get_all_device_prop_data(self):
ptp = Container(
OperationCode='GetAllDevicePropData',
SessionID=self._session,
TransactionID=self._transaction,
Parameter=[]
)
response = self.recv(ptp)
return self._parse_if_data(response, self._SonyAllPropDesc)
def set_control_device_A(self, device_property, value_payload):
code = self._code(device_property, self._PropertyCode)
ptp = Container(
OperationCode='SetControlDeviceA',
SessionID=self._session,
TransactionID=self._transaction,
Parameter=[code]
)
response = self.send(ptp, value_payload)
return response
def set_control_device_B(self, device_property, value_payload):
code = self._code(device_property, self._PropertyCode)
ptp = Container(
OperationCode='SetControlDeviceB',
SessionID=self._session,
TransactionID=self._transaction,
Parameter=[code]
)
response = self.send(ptp, value_payload)
return response
def get_control_device_desc(self, device_property):
code = self._code(device_property, self._PropertyCode)
ptp = Container(
OperationCode='GetControlDeviceDesc',
SessionID=self._session,
TransactionID=self._transaction,
Parameter=[code]
)
response = self.recv(ptp)
return response
| [
"logging.getLogger",
"construct.Enum",
"construct.PrefixedArray",
"construct.Computed",
"construct.Container",
"construct.ExprAdapter"
] | [((381, 408), 'logging.getLogger', 'logging.getLogger', (['__name__'], {}), '(__name__)\n', (398, 408), False, 'import logging\n'), ((3703, 3758), 'construct.Enum', 'Enum', (['self._UInt8'], {'Disabled': '(0)', 'Enabled': '(1)', 'DisplayOnly': '(2)'}), '(self._UInt8, Disabled=0, Enabled=1, DisplayOnly=2)\n', (3707, 3758), False, 'from construct import Container, Struct, Range, Computed, Enum, Array, PrefixedArray, Pass, ExprAdapter\n'), ((4424, 4471), 'construct.PrefixedArray', 'PrefixedArray', (['self._UInt64', 'self._SonyPropDesc'], {}), '(self._UInt64, self._SonyPropDesc)\n', (4437, 4471), False, 'from construct import Container, Struct, Range, Computed, Enum, Array, PrefixedArray, Pass, ExprAdapter\n'), ((4524, 4870), 'construct.Enum', 'Enum', (['self._UInt16'], {'default': 'Pass', 'IntelligentAuto': '(32768)', 'SuperiorAuto': '(32769)', 'P': '(2)', 'A': '(3)', 'S': '(4)', 'M': '(1)', 'MovieP': '(32848)', 'MovieA': '(32849)', 'MovieS': '(32850)', 'MovieM': '(32851)', 'Panoramic': '(32833)', 'Portrait': '(7)', 'SportsAction': '(32785)', 'Macro': '(32789)', 'Landscape': '(32788)', 'Sunset': '(32786)', 'NightScene': '(32787)', 'HandheldTwilight': '(32790)', 'NightPortrait': '(32791)', 'AntiMotionBlur': '(32792)'}), '(self._UInt16, default=Pass, IntelligentAuto=32768, SuperiorAuto=32769,\n P=2, A=3, S=4, M=1, MovieP=32848, MovieA=32849, MovieS=32850, MovieM=\n 32851, Panoramic=32833, Portrait=7, SportsAction=32785, Macro=32789,\n Landscape=32788, Sunset=32786, NightScene=32787, HandheldTwilight=32790,\n NightPortrait=32791, AntiMotionBlur=32792)\n', (4528, 4870), False, 'from construct import Container, Struct, Range, Computed, Enum, Array, PrefixedArray, Pass, ExprAdapter\n'), ((5234, 5266), 'construct.Enum', 'Enum', (['self._UInt16'], {'default': 'Pass'}), '(self._UInt16, default=Pass)\n', (5238, 5266), False, 'from construct import Container, Struct, Range, Computed, Enum, Array, PrefixedArray, Pass, ExprAdapter\n'), ((5348, 6168), 'construct.Enum', 'Enum', (['self._UInt16'], {'default': 'Pass', 'Off': '(32768)', 'ToyCameraNormal': '(32769)', 'ToyCameraCool': '(32770)', 'ToyCameraWarm': '(32771)', 'ToyCameraGreen': '(32772)', 'ToyCameraMagenta': '(32773)', 'Pop': '(32784)', 'PosterizationBW': '(32800)', 'PosterizationColor': '(32801)', 'Retro': '(32816)', 'SoftHighKey': '(32816)', 'PartialColorRed': '(32848)', 'PartialColorGreen': '(32849)', 'PartialColorBlue': '(32850)', 'PartialColorYellow': '(32851)', 'HighContrastMono': '(32864)', 'SoftFocusLow': '(32880)', 'SoftFocusMid': '(32881)', 'SoftFocusHigh': '(32882)', 'HDRPaintingLow': '(32896)', 'HDRPaintingMid': '(32897)', 'HDRPaintingHigh': '(32898)', 'RichToneMono': '(32912)', 'MiniatureAuto': '(32928)', 'MiniatureTop': '(32929)', 'MiniatureMiddleHorizontal': '(32930)', 'MiniatureBottom': '(32931)', 'MiniatureRight': '(32932)', 'MiniatureMiddleVertical': '(32933)', 'MiniatureLeft': '(32934)', 'Watercolor': '(32944)', 'IllustrationLow': '(32960)', 'IllustrationMid': '(32961)', 'IllustrationHigh': '(32962)'}), '(self._UInt16, default=Pass, Off=32768, ToyCameraNormal=32769,\n ToyCameraCool=32770, ToyCameraWarm=32771, ToyCameraGreen=32772,\n ToyCameraMagenta=32773, Pop=32784, PosterizationBW=32800,\n PosterizationColor=32801, Retro=32816, SoftHighKey=32816,\n PartialColorRed=32848, PartialColorGreen=32849, PartialColorBlue=32850,\n PartialColorYellow=32851, HighContrastMono=32864, SoftFocusLow=32880,\n SoftFocusMid=32881, SoftFocusHigh=32882, HDRPaintingLow=32896,\n HDRPaintingMid=32897, HDRPaintingHigh=32898, RichToneMono=32912,\n MiniatureAuto=32928, MiniatureTop=32929, MiniatureMiddleHorizontal=\n 32930, MiniatureBottom=32931, MiniatureRight=32932,\n MiniatureMiddleVertical=32933, MiniatureLeft=32934, Watercolor=32944,\n IllustrationLow=32960, IllustrationMid=32961, IllustrationHigh=32962)\n', (5352, 6168), False, 'from construct import Container, Struct, Range, Computed, Enum, Array, PrefixedArray, Pass, ExprAdapter\n'), ((6694, 7695), 'construct.Enum', 'Enum', (['self._UInt16'], {'default': 'Pass', 'Single': '(1)', 'SelfTimer10s': '(32772)', 'SelfTimer2s': '(32773)', 'SelfTimer10sContinuous3Images': '(32776)', 'SelfTimer10sContinuous5Images': '(32777)', 'Continuous': '(32787)', 'ContinuousSpeedPriority': '(32788)', 'WhiteBalanceBracketLow': '(32792)', 'WhiteBalanceBracketHigh': '(32808)', 'DRangeOptimizerBracketLow': '(32793)', 'DRangoOptimizerBracketHigh': '(32809)', 'ContinuousBracket1_0EV3Image': '(33553)', 'ContinuousBracket2_0EV3Image': '(33569)', 'ContinuousBracket3_0EV3Image': '(33585)', 'ContinuousBracket0_3EV3Image': '(33591)', 'ContinuousBracket0_5EV3Image': '(33623)', 'ContinuousBracket0_7EV3Image': '(33655)', 'ContinuousBracket0_3EV5Image': '(34103)', 'ContinuousBracket0_5EV5Image': '(34135)', 'ContinuousBracket0_7EV5Image': '(34167)', 'SingleBracket1_0EV3Image': '(33552)', 'SingleBracket2_0EV3Image': '(33568)', 'SingleBracket3_0EV3Image': '(33584)', 'SingleBracket0_3EV3Image': '(33590)', 'SingleBracket0_5EV3Image': '(33622)', 'SingleBracket0_7EV3Image': '(33654)', 'SingleBracket0_3EV5Image': '(34102)', 'SingleBracket0_5EV5Image': '(34134)', 'SingleBracket0_7EV5Image': '(34166)'}), '(self._UInt16, default=Pass, Single=1, SelfTimer10s=32772, SelfTimer2s=\n 32773, SelfTimer10sContinuous3Images=32776,\n SelfTimer10sContinuous5Images=32777, Continuous=32787,\n ContinuousSpeedPriority=32788, WhiteBalanceBracketLow=32792,\n WhiteBalanceBracketHigh=32808, DRangeOptimizerBracketLow=32793,\n DRangoOptimizerBracketHigh=32809, ContinuousBracket1_0EV3Image=33553,\n ContinuousBracket2_0EV3Image=33569, ContinuousBracket3_0EV3Image=33585,\n ContinuousBracket0_3EV3Image=33591, ContinuousBracket0_5EV3Image=33623,\n ContinuousBracket0_7EV3Image=33655, ContinuousBracket0_3EV5Image=34103,\n ContinuousBracket0_5EV5Image=34135, ContinuousBracket0_7EV5Image=34167,\n SingleBracket1_0EV3Image=33552, SingleBracket2_0EV3Image=33568,\n SingleBracket3_0EV3Image=33584, SingleBracket0_3EV3Image=33590,\n SingleBracket0_5EV3Image=33622, SingleBracket0_7EV3Image=33654,\n SingleBracket0_3EV5Image=34102, SingleBracket0_5EV5Image=34134,\n SingleBracket0_7EV5Image=34166)\n', (6698, 7695), False, 'from construct import Container, Struct, Range, Computed, Enum, Array, PrefixedArray, Pass, ExprAdapter\n'), ((8112, 8198), 'construct.ExprAdapter', 'ExprAdapter', (['self._UInt16'], {'encode': '(lambda x: x * 1000)', 'decode': '(lambda x: x / 1000.0)'}), '(self._UInt16, encode=lambda x: x * 1000, decode=lambda x: x / \n 1000.0)\n', (8123, 8198), False, 'from construct import Container, Struct, Range, Computed, Enum, Array, PrefixedArray, Pass, ExprAdapter\n'), ((9116, 9246), 'construct.Container', 'Container', ([], {'OperationCode': '"""SDIOConnect"""', 'SessionID': 'self._session', 'TransactionID': 'self._transaction', 'Parameter': '[step, key1, key2]'}), "(OperationCode='SDIOConnect', SessionID=self._session,\n TransactionID=self._transaction, Parameter=[step, key1, key2])\n", (9125, 9246), False, 'from construct import Container, Struct, Range, Computed, Enum, Array, PrefixedArray, Pass, ExprAdapter\n'), ((9430, 9560), 'construct.Container', 'Container', ([], {'OperationCode': '"""SDIOGetExtDeviceInfo"""', 'SessionID': 'self._session', 'TransactionID': 'self._transaction', 'Parameter': '[version]'}), "(OperationCode='SDIOGetExtDeviceInfo', SessionID=self._session,\n TransactionID=self._transaction, Parameter=[version])\n", (9439, 9560), False, 'from construct import Container, Struct, Range, Computed, Enum, Array, PrefixedArray, Pass, ExprAdapter\n'), ((9700, 9823), 'construct.Container', 'Container', ([], {'OperationCode': '"""GetAllDevicePropData"""', 'SessionID': 'self._session', 'TransactionID': 'self._transaction', 'Parameter': '[]'}), "(OperationCode='GetAllDevicePropData', SessionID=self._session,\n TransactionID=self._transaction, Parameter=[])\n", (9709, 9823), False, 'from construct import Container, Struct, Range, Computed, Enum, Array, PrefixedArray, Pass, ExprAdapter\n'), ((10126, 10250), 'construct.Container', 'Container', ([], {'OperationCode': '"""SetControlDeviceA"""', 'SessionID': 'self._session', 'TransactionID': 'self._transaction', 'Parameter': '[code]'}), "(OperationCode='SetControlDeviceA', SessionID=self._session,\n TransactionID=self._transaction, Parameter=[code])\n", (10135, 10250), False, 'from construct import Container, Struct, Range, Computed, Enum, Array, PrefixedArray, Pass, ExprAdapter\n'), ((10524, 10648), 'construct.Container', 'Container', ([], {'OperationCode': '"""SetControlDeviceB"""', 'SessionID': 'self._session', 'TransactionID': 'self._transaction', 'Parameter': '[code]'}), "(OperationCode='SetControlDeviceB', SessionID=self._session,\n TransactionID=self._transaction, Parameter=[code])\n", (10533, 10648), False, 'from construct import Container, Struct, Range, Computed, Enum, Array, PrefixedArray, Pass, ExprAdapter\n'), ((10910, 11037), 'construct.Container', 'Container', ([], {'OperationCode': '"""GetControlDeviceDesc"""', 'SessionID': 'self._session', 'TransactionID': 'self._transaction', 'Parameter': '[code]'}), "(OperationCode='GetControlDeviceDesc', SessionID=self._session,\n TransactionID=self._transaction, Parameter=[code])\n", (10919, 11037), False, 'from construct import Container, Struct, Range, Computed, Enum, Array, PrefixedArray, Pass, ExprAdapter\n'), ((4041, 4100), 'construct.Computed', 'Computed', (["(lambda x: 'GetSet' if x.SonyGetSet & 1 else 'Get')"], {}), "(lambda x: 'GetSet' if x.SonyGetSet & 1 else 'Get')\n", (4049, 4100), False, 'from construct import Container, Struct, Range, Computed, Enum, Array, PrefixedArray, Pass, ExprAdapter\n')] |
import functools
import itertools
from typing import Callable, Dict, Iterable, List, Union
from omegaconf import DictConfig, MissingMandatoryValue, OmegaConf
class InvalidArgumentError(Exception):
"""Invalid argument on command line"""
pass
class OptionHandler:
"""Handling an option"""
arg: List[str]
handler: Callable
help: str
def __init__(self, arg: Union[str, List[str]], handler: Callable, help='', help_arg='') -> None:
"""Initialize option handler"""
self.arg = arg if isinstance(arg, list) else [arg]
self.handler = handler
self.help = help or ''
self._help_arg = help_arg or ''
@property
def help_arg(self):
"""Text for the help line"""
if self._help_arg:
return self._help_arg
return ', '.join(self.arg)
@help_arg.setter
def help_arg(self, value):
self._help_arg = value
def __repr__(self) -> str:
return "%s: %s" % (self.arg, self.handler)
class ArgumentParser:
"""Simple argument parsers for alphaconf"""
app_properties: Dict
parse_result: str
configuration_list: List[Union[str, DictConfig]]
other_arguments: List[str]
_option_handler_dict: Dict[str, OptionHandler]
_option_handler_list: List[OptionHandler]
def __init__(self, app_properties: Dict) -> None:
"""Initialze the parser with an application"""
self.app_properties = app_properties
self._option_handler_list = []
self._option_handler_dict = {}
self.reset()
@functools.wraps(OptionHandler.__init__)
def add_option_handler(self, *a, **kw):
"""Add an option to the parser (passed to OptionHandler)"""
opt = OptionHandler(*a, **kw)
self._option_handler_list.append(opt)
for arg in opt.arg:
self._option_handler_dict[arg] = opt
return opt
def reset(self):
"""Reset the parser"""
self.parse_result = ''
self.configuration_list = []
self.other_arguments = []
def configurations(self) -> Iterable[DictConfig]:
"""List parsed configuration dicts"""
for typ, conf in itertools.groupby(self.configuration_list, type):
if issubclass(typ, DictConfig):
yield from conf
else:
yield OmegaConf.from_dotlist(list(conf))
def parse_arguments(self, args: List[str]) -> None:
"""Parse arguments
Parsing rules:
- "--" indicates end of arguments, rest is put into other arguments
- "ABC=DEF" is considered as argument ABC and value DEF to load into configuration
- If the argument starts with a "-", it is handled as an option
- If the option handling misses an argument, it may raise MissingMandatoryValue
- The handling is either a special code or a list of "key=value"; see handle_option()
:param args: List of arguments to parse
"""
parse_result = ''
skip = 0
for i, arg in enumerate(args):
if skip:
skip -= 1
continue
# Split argument on '=' sign
arg_split = arg.split('=', 1)
value = None
if len(arg_split) > 1:
arg, value = arg_split
# Handle option
try:
if value is not None and arg[:1] != '-':
result = ["%s=%s" % (arg, value)]
else:
result = self.handle_option(arg, value)
except MissingMandatoryValue:
if len(args) <= i + 1 and value is None:
raise
value = args[i + 1]
skip = 1
result = self.handle_option(arg, value)
# Handle result option
if isinstance(result, DictConfig):
self.configuration_list.append(result)
elif isinstance(result, list):
self.configuration_list.extend(result)
elif result == "stop":
self.other_arguments += args[i:]
break
elif result == 'skip':
pass
elif result == 'skip_next':
skip += 1
elif result == 'exit':
parse_result = 'exit'
elif result.startswith('result:') and not parse_result:
parse_result = result[7:]
else:
raise RuntimeError('Invalid result of handling option')
try:
self.handle_other_arguments()
except InvalidArgumentError:
if not bool(parse_result):
raise
# otherwise skip argument errors since we have a result
self.parse_result = parse_result or 'ok'
def handle_option(self, arg: str, value: str) -> Union[str, List[str], DictConfig]:
"""Handle an option found in the arguments
May rise InvalidArgumentError if the argument is unrecognized.
Special string values that may be returned:
- stop: Stop parsing the rest of the arguments and send them to other (default for '--')
- skip: No-op
- skip_next: Skip parsing the next argument
- "result:*": Set the parse result value
:param arg: The argument
:param value: The name of the argument (or None)
"""
if arg == '--':
return 'stop'
handler = self._option_handler_dict.get(arg)
if handler:
return handler.handler(value)
raise InvalidArgumentError('Unexpected argument: %s' % arg)
def handle_other_arguments(self):
"""Handle other unparsed or skipped arguments"""
other = self.other_arguments
if other and other[0] != '--':
raise InvalidArgumentError('Unparsed arguments: %s' % other)
def print_help(self, brief: str = None):
"""Print the help with options
:param brief: Whether to print a brief summary ("version" for version only)
"""
app = self.app_properties
if brief == 'version':
print(f"{app.get('name')} {app.get('version', '(no-version)')}")
return
# Header
first_line = f"Usage: {app.get('name')} [ options ]"
description = app.get('description', '')
short_description = app.get(
'short_description',
description if 0 < len(description) < 50 else "",
)
if short_description:
first_line += (
" - " if len(first_line) + len(short_description) < 75 else "\n"
) + short_description
print(first_line)
if brief:
return
# Description
if description and description != short_description:
print()
print(description)
# Options
print()
line_format = ' {arg:32s} {description}'
for handler in self._option_handler_list:
print(line_format.format(arg=handler.help_arg, description=handler.help))
print(line_format.format(arg='key=value', description="Load a configuration key-value"))
def add_default_option_handlers(parser: ArgumentParser, *, add_help_version=True) -> None:
"""Add default options to the parser
- help
- version
- configuration: show it
- config-file: load the file
- select: shortcut
"""
def help(brief=None):
parser.print_help(brief)
return 'exit'
def load_configuration(value):
if value is None:
raise MissingMandatoryValue('Missing filename for configuration file')
return OmegaConf.load(value)
def select_option(value):
if value is None or '=' not in value:
raise MissingMandatoryValue('--select requires an argument with an equal sign')
value_split = value.split('=', 1)
value = value_split[1] if len(value_split) > 1 else 'default'
return ["{key}=${{oc.select:base.{key}.{value}}}".format(key=value_split[0], value=value)]
if add_help_version:
parser.add_option_handler(
['-h', '-?', '--help'],
lambda _: help(),
help="Print help message",
help_arg='-h, --help',
)
parser.add_option_handler(
['-V', '--version'],
lambda _: help('version'),
help="Print the version",
)
parser.add_option_handler(
['-C', '--configuration'],
lambda _: "result:show_configuration",
help="Show the configuration",
)
parser.add_option_handler(
['-f', '--config', '--config-file'],
load_configuration,
help="Load configuration from file",
help_arg='-f, --config[-file] path',
)
parser.add_option_handler(
['--select'],
select_option,
help="Shortcut to select a base configuration",
help_arg="--select key=base_template",
)
| [
"omegaconf.OmegaConf.load",
"omegaconf.MissingMandatoryValue",
"itertools.groupby",
"functools.wraps"
] | [((1564, 1603), 'functools.wraps', 'functools.wraps', (['OptionHandler.__init__'], {}), '(OptionHandler.__init__)\n', (1579, 1603), False, 'import functools\n'), ((2177, 2225), 'itertools.groupby', 'itertools.groupby', (['self.configuration_list', 'type'], {}), '(self.configuration_list, type)\n', (2194, 2225), False, 'import itertools\n'), ((7650, 7671), 'omegaconf.OmegaConf.load', 'OmegaConf.load', (['value'], {}), '(value)\n', (7664, 7671), False, 'from omegaconf import DictConfig, MissingMandatoryValue, OmegaConf\n'), ((7570, 7634), 'omegaconf.MissingMandatoryValue', 'MissingMandatoryValue', (['"""Missing filename for configuration file"""'], {}), "('Missing filename for configuration file')\n", (7591, 7634), False, 'from omegaconf import DictConfig, MissingMandatoryValue, OmegaConf\n'), ((7767, 7840), 'omegaconf.MissingMandatoryValue', 'MissingMandatoryValue', (['"""--select requires an argument with an equal sign"""'], {}), "('--select requires an argument with an equal sign')\n", (7788, 7840), False, 'from omegaconf import DictConfig, MissingMandatoryValue, OmegaConf\n')] |
from django.apps import AppConfig
from django.utils.translation import gettext_lazy as _
class PropertiesConfig(AppConfig):
name = 'apartacho.properties'
verbose_name = _("Properties")
| [
"django.utils.translation.gettext_lazy"
] | [((179, 194), 'django.utils.translation.gettext_lazy', '_', (['"""Properties"""'], {}), "('Properties')\n", (180, 194), True, 'from django.utils.translation import gettext_lazy as _\n')] |
from analyzer import matcher, common_pb2
from tests import *
fieldType = common_pb2.FieldTypes()
fieldType.name = common_pb2.FieldTypesEnum.Name(common_pb2.UK_NHS)
types = [fieldType]
def test_valid_uk_nhs():
num = '401-023-2137'
results = match.analyze_text(num, types)
assert len(results) == 1
assert results[0].text == num
num = '221 395 1837'
results = match.analyze_text(num, types)
assert len(results) == 1
assert results[0].text == num
num = '0032698674'
results = match.analyze_text(num, types)
assert len(results) == 1
assert results[0].text == num
def test_invalid_uk_nhs():
num = '401-023-2138'
results = match.analyze_text(num, types)
assert len(results) == 0 | [
"analyzer.common_pb2.FieldTypesEnum.Name",
"analyzer.common_pb2.FieldTypes"
] | [((74, 97), 'analyzer.common_pb2.FieldTypes', 'common_pb2.FieldTypes', ([], {}), '()\n', (95, 97), False, 'from analyzer import matcher, common_pb2\n'), ((115, 164), 'analyzer.common_pb2.FieldTypesEnum.Name', 'common_pb2.FieldTypesEnum.Name', (['common_pb2.UK_NHS'], {}), '(common_pb2.UK_NHS)\n', (145, 164), False, 'from analyzer import matcher, common_pb2\n')] |
import unittest
import imp
import sys
import shapy
class TestSettings(unittest.TestCase):
def setUp(self):
self.settings = imp.new_module('test_settings')
sys.modules.update(test_settings=self.settings)
setattr(self.settings, 'UNITS', 'override')
setattr(self.settings, 'NEW_OPTION', 'new')
def test_settings_override(self):
shapy.register_settings('test_settings')
from shapy import settings
self.assertEqual(settings.UNITS, 'override')
self.assertEqual(getattr(settings, 'NEW_OPTION', None), 'new') | [
"shapy.register_settings",
"sys.modules.update",
"imp.new_module"
] | [((137, 168), 'imp.new_module', 'imp.new_module', (['"""test_settings"""'], {}), "('test_settings')\n", (151, 168), False, 'import imp\n'), ((177, 224), 'sys.modules.update', 'sys.modules.update', ([], {'test_settings': 'self.settings'}), '(test_settings=self.settings)\n', (195, 224), False, 'import sys\n'), ((376, 416), 'shapy.register_settings', 'shapy.register_settings', (['"""test_settings"""'], {}), "('test_settings')\n", (399, 416), False, 'import shapy\n')] |
#This program displays a simple math quiz
import random
def main():
number1 = random.randint(1,350)
number2 = random.randint(1,350)
correct_answer = calculate_correct_answer(number1,number2)
print('\t',number1,'\n+\t',number2)
question = int(input('Enter the answer : '))
check_answer(question,correct_answer)
def calculate_correct_answer(num1,num2):
correct_answer = num1 + num2
return correct_answer
def check_answer(question,correct_answer):
if question == correct_answer:
print('\nCongratulations!',correct_answer,'is correct')
else:
print('\nWrong! the correct answwer =',correct_answer)
main()
| [
"random.randint"
] | [((83, 105), 'random.randint', 'random.randint', (['(1)', '(350)'], {}), '(1, 350)\n', (97, 105), False, 'import random\n'), ((119, 141), 'random.randint', 'random.randint', (['(1)', '(350)'], {}), '(1, 350)\n', (133, 141), False, 'import random\n')] |
from urllib import request
from urllib import parse
head = request.urlopen("https://mp.weixin.qq.com/s?__biz=MzU3NTc0NzE0Mw==&mid=2247483739&idx=1&sn"
"=e62a61120c73ebb93029a2897ca60ccd&chksm"
"=fd1f2473ca68ad658b9b0388e6fc122951812ba08ada771f3acc4f1f25953de92df837f21795&mpshare=1&scene"
"=1&srcid=&sharer_sharetime=1584410683814&sharer_shareid=b14cc42c8dff2f85ec3b42ef9ff07140&key"
"=e5d129685831bfd623675c86aca8c5d71f7039e1528f4d357e1ee960fa45ff25c931951e9e391cc21362eb047c4be8befc73a3c7bdb580dcd9b09cf0e793e3fa3f628004c4516333e0ad741f5b771893&ascene=1&uin=MzkzMDAwOTYz&devicetype=Windows+10&version=62080085&lang=zh_CN&exportkey=AVLGCY0spN1AMCYHOT1pGLc%3D&pass_ticket=JeeLLBclQmIly5Ufx2oeHDjvEaxGUIAkQ3OplCJz9sTRvWul2fEP5%2FfN6FvDrxlX")
print("读取数据", head.read())
print("按行读取数据", head.readline())
print("多行读取数据", head.readlines())
print("读取返回的信息:", head.getcode())
request.urlretrieve(
"https://timgsa.baidu.com/timg?image&quality=80&size=b9999_10000&sec=1581932221441&di"
"=09a3b2baa29cfe07c21337a30e5c4080&imgtype=jpg&src=http%3A%2F%2Fwww.rfuchina.com%2Fuploadfile%2F2019%2F0417"
"%2F20190417091923688.jpg",
"01/01.jpg")
# 将指定文件保存到本地
params = {"name": "张三", "age": "18", "greet": "hello world"}
result = parse.urlencode(params)
print(result)
# 将字符进行编码
| [
"urllib.parse.urlencode",
"urllib.request.urlopen",
"urllib.request.urlretrieve"
] | [((60, 739), 'urllib.request.urlopen', 'request.urlopen', (['"""https://mp.weixin.qq.com/s?__biz=MzU3NTc0NzE0Mw==&mid=2247483739&idx=1&sn=e62a61120c73ebb93029a2897ca60ccd&chksm=fd1f2473ca68ad658b9b0388e6fc122951812ba08ada771f3acc4f1f25953de92df837f21795&mpshare=1&scene=1&srcid=&sharer_sharetime=1584410683814&sharer_shareid=b14cc42c8dff2f85ec3b42ef9ff07140&key=e5d129685831bfd623675c86aca8c5d71f7039e1528f4d357e1ee960fa45ff25c931951e9e391cc21362eb047c4be8befc73a3c7bdb580dcd9b09cf0e793e3fa3f628004c4516333e0ad741f5b771893&ascene=1&uin=MzkzMDAwOTYz&devicetype=Windows+10&version=62080085&lang=zh_CN&exportkey=AVLGCY0spN1AMCYHOT1pGLc%3D&pass_ticket=JeeLLBclQmIly5Ufx2oeHDjvEaxGUIAkQ3OplCJz9sTRvWul2fEP5%2FfN6FvDrxlX"""'], {}), "(\n 'https://mp.weixin.qq.com/s?__biz=MzU3NTc0NzE0Mw==&mid=2247483739&idx=1&sn=e62a61120c73ebb93029a2897ca60ccd&chksm=fd1f2473ca68ad658b9b0388e6fc122951812ba08ada771f3acc4f1f25953de92df837f21795&mpshare=1&scene=1&srcid=&sharer_sharetime=1584410683814&sharer_shareid=b14cc42c8dff2f85ec3b42ef9ff07140&key=e5d129685831bfd623675c86aca8c5d71f7039e1528f4d357e1ee960fa45ff25c931951e9e391cc21362eb047c4be8befc73a3c7bdb580dcd9b09cf0e793e3fa3f628004c4516333e0ad741f5b771893&ascene=1&uin=MzkzMDAwOTYz&devicetype=Windows+10&version=62080085&lang=zh_CN&exportkey=AVLGCY0spN1AMCYHOT1pGLc%3D&pass_ticket=JeeLLBclQmIly5Ufx2oeHDjvEaxGUIAkQ3OplCJz9sTRvWul2fEP5%2FfN6FvDrxlX'\n )\n", (75, 739), False, 'from urllib import request\n'), ((962, 1222), 'urllib.request.urlretrieve', 'request.urlretrieve', (['"""https://timgsa.baidu.com/timg?image&quality=80&size=b9999_10000&sec=1581932221441&di=09a3b2baa29cfe07c21337a30e5c4080&imgtype=jpg&src=http%3A%2F%2Fwww.rfuchina.com%2Fuploadfile%2F2019%2F0417%2F20190417091923688.jpg"""', '"""01/01.jpg"""'], {}), "(\n 'https://timgsa.baidu.com/timg?image&quality=80&size=b9999_10000&sec=1581932221441&di=09a3b2baa29cfe07c21337a30e5c4080&imgtype=jpg&src=http%3A%2F%2Fwww.rfuchina.com%2Fuploadfile%2F2019%2F0417%2F20190417091923688.jpg'\n , '01/01.jpg')\n", (981, 1222), False, 'from urllib import request\n'), ((1320, 1343), 'urllib.parse.urlencode', 'parse.urlencode', (['params'], {}), '(params)\n', (1335, 1343), False, 'from urllib import parse\n')] |
import unittest
if __name__ == "__main__":
# Finds all tests in submodules ending in *tests.py and runs them
suite = unittest.TestLoader().discover('.', pattern = "*tests.py")
unittest.TextTestRunner().run(suite)
| [
"unittest.TextTestRunner",
"unittest.TestLoader"
] | [((126, 147), 'unittest.TestLoader', 'unittest.TestLoader', ([], {}), '()\n', (145, 147), False, 'import unittest\n'), ((189, 214), 'unittest.TextTestRunner', 'unittest.TextTestRunner', ([], {}), '()\n', (212, 214), False, 'import unittest\n')] |
"""
Pattern matching utilities.
"""
import re
from functools import wraps
from typing import Iterable
def match_subgroup(sequence, pattern):
"""Yield the sub-group element dictionary that match a regex pattern."""
for element in sequence:
match = re.match(pattern, element)
if match:
yield match.groupdict()
def match_group(choices: Iterable) -> str:
return '|'.join(choices)
def add_regex_start_end(pattern_function):
"""Decorator for adding regex pattern start and end characters."""
@wraps(pattern_function)
def func_wrapper(*args, **kwargs):
return r'^{}$'.format(pattern_function(*args, **kwargs))
return func_wrapper
def tokenize_group_pattern(name, choices):
pattern = match_group(choices)
return tokenize_pattern(name, pattern)
def tokenize_pattern(name, pattern):
return '(?P<%s>%s)' % (name, pattern)
| [
"re.match",
"functools.wraps"
] | [((545, 568), 'functools.wraps', 'wraps', (['pattern_function'], {}), '(pattern_function)\n', (550, 568), False, 'from functools import wraps\n'), ((267, 293), 're.match', 're.match', (['pattern', 'element'], {}), '(pattern, element)\n', (275, 293), False, 'import re\n')] |
'''
qc command tests
'''
import mock
import unittest
from cirrus.quality_control import main, run_linters, build_parser
class QCCommandTests(unittest.TestCase):
"""
test coverage for qc command module
"""
def test_build_parser(self):
"""test build_parser call"""
args = [
'--include-files', 'file1', 'file2',
'--exclude-files', 'file3', 'file4',
'--exclude-dirs', 'dir1',
'--linters', 'Pep8', 'Pyflakes'
]
qc_conf = {}
opts = build_parser(args, qc_conf)
self.assertEqual(opts.include_files, ['file1', 'file2'])
self.assertEqual(opts.exclude_files, ['file3', 'file4'])
self.assertEqual(opts.exclude_dirs, ['dir1'])
self.assertEqual(opts.linters, ['Pep8', 'Pyflakes'])
qc_conf = {'exclude_dirs': ['dir3', 'dir4']}
args = [
'--include-files', 'file1', 'file2',
'--exclude-files', 'file3', 'file4',
'--linters', 'Pep8', 'Pyflakes'
]
opts = build_parser(args, qc_conf)
self.assertEqual(opts.exclude_dirs, ['dir3', 'dir4'])
@mock.patch("cirrus.quality_control.load_configuration")
@mock.patch("cirrus.quality_control.FACTORY")
def test_run_linters(self, mock_factory, mock_conf):
"""test pass case"""
mock_linter = mock.Mock()
mock_linter.test_mode = False
mock_linter.check = mock.Mock()
mock_linter.errors = None
mock_factory.return_value = mock_linter
mock_factory.registry = {
'Pep8': None,
'Pylint': None
}
opts = mock.Mock()
opts.test_only = False
opts.linters = ['Pep8', 'Pylint']
run_linters(opts, mock_conf, {})
self.assertTrue(mock_linter.check.called)
self.assertEqual(mock_linter.check.call_count, 2)
@mock.patch("cirrus.quality_control.load_configuration")
@mock.patch("cirrus.quality_control.FACTORY")
def test_run_linters_fail(self, mock_factory, mock_conf):
"""test fail case"""
mock_linter = mock.Mock()
mock_linter.test_mode = False
mock_linter.check = mock.Mock()
mock_linter.errors = 100
mock_factory.return_value = mock_linter
mock_factory.registry = {
'Pep8': None,
'Pylint': None
}
opts = mock.Mock()
opts.test_only = False
opts.linters = ['Pep8', 'Pylint']
self.assertRaises(RuntimeError, run_linters, opts, mock_conf, {})
@mock.patch("cirrus.quality_control.load_configuration")
@mock.patch("cirrus.quality_control.FACTORY")
def test_bad_linter_name(self, mock_factory, mock_conf):
"""test fail case"""
mock_linter = mock.Mock()
mock_linter.test_mode = False
mock_linter.check = mock.Mock()
mock_linter.errors = 100
mock_factory.return_value = mock_linter
mock_factory.registry = {
'Pep8': None,
'Pylint': None
}
opts = mock.Mock()
opts.test_only = False
opts.linters = ['WOMP']
self.assertRaises(RuntimeError, run_linters, opts, mock_conf, {})
@mock.patch("cirrus.quality_control.load_configuration")
@mock.patch("cirrus.quality_control.build_parser")
@mock.patch("cirrus.quality_control.run_linters")
@mock.patch("cirrus.quality_control.get_diff_files")
def test_main(self, mock_diffs, mock_rl, mock_bp, mock_conf):
mock_qc = {}
mock_conf.quality_control = mock.Mock(return_value=mock_qc)
mock_opts = mock.Mock()
mock_opts.only_changes = False
mock_bp.return_value = mock_opts
main()
self.assertTrue(mock_rl.called)
@mock.patch("cirrus.quality_control.load_configuration")
@mock.patch("cirrus.quality_control.build_parser")
@mock.patch("cirrus.quality_control.run_linters")
@mock.patch("cirrus.quality_control.get_diff_files")
def test_main_diffs(self, mock_diffs, mock_rl, mock_bp, mock_conf):
mock_qc = {}
mock_conf.quality_control = mock.Mock(return_value=mock_qc)
mock_opts = mock.Mock()
mock_opts.only_changes = True
mock_opts.incude_files = None
mock_bp.return_value = mock_opts
mock_diffs.return_value = ['DIFF1.py', 'DIFF2.py']
main()
self.assertTrue(mock_rl.called)
self.assertEqual(mock_opts.include_files, mock_diffs.return_value)
mock_diffs.return_value = []
self.assertRaises(SystemExit, main)
if __name__ == '__main__':
unittest.main()
| [
"mock.patch",
"cirrus.quality_control.main",
"cirrus.quality_control.build_parser",
"mock.Mock",
"cirrus.quality_control.run_linters",
"unittest.main"
] | [((1141, 1196), 'mock.patch', 'mock.patch', (['"""cirrus.quality_control.load_configuration"""'], {}), "('cirrus.quality_control.load_configuration')\n", (1151, 1196), False, 'import mock\n'), ((1202, 1246), 'mock.patch', 'mock.patch', (['"""cirrus.quality_control.FACTORY"""'], {}), "('cirrus.quality_control.FACTORY')\n", (1212, 1246), False, 'import mock\n'), ((1880, 1935), 'mock.patch', 'mock.patch', (['"""cirrus.quality_control.load_configuration"""'], {}), "('cirrus.quality_control.load_configuration')\n", (1890, 1935), False, 'import mock\n'), ((1941, 1985), 'mock.patch', 'mock.patch', (['"""cirrus.quality_control.FACTORY"""'], {}), "('cirrus.quality_control.FACTORY')\n", (1951, 1985), False, 'import mock\n'), ((2548, 2603), 'mock.patch', 'mock.patch', (['"""cirrus.quality_control.load_configuration"""'], {}), "('cirrus.quality_control.load_configuration')\n", (2558, 2603), False, 'import mock\n'), ((2609, 2653), 'mock.patch', 'mock.patch', (['"""cirrus.quality_control.FACTORY"""'], {}), "('cirrus.quality_control.FACTORY')\n", (2619, 2653), False, 'import mock\n'), ((3205, 3260), 'mock.patch', 'mock.patch', (['"""cirrus.quality_control.load_configuration"""'], {}), "('cirrus.quality_control.load_configuration')\n", (3215, 3260), False, 'import mock\n'), ((3266, 3315), 'mock.patch', 'mock.patch', (['"""cirrus.quality_control.build_parser"""'], {}), "('cirrus.quality_control.build_parser')\n", (3276, 3315), False, 'import mock\n'), ((3321, 3369), 'mock.patch', 'mock.patch', (['"""cirrus.quality_control.run_linters"""'], {}), "('cirrus.quality_control.run_linters')\n", (3331, 3369), False, 'import mock\n'), ((3375, 3426), 'mock.patch', 'mock.patch', (['"""cirrus.quality_control.get_diff_files"""'], {}), "('cirrus.quality_control.get_diff_files')\n", (3385, 3426), False, 'import mock\n'), ((3756, 3811), 'mock.patch', 'mock.patch', (['"""cirrus.quality_control.load_configuration"""'], {}), "('cirrus.quality_control.load_configuration')\n", (3766, 3811), False, 'import mock\n'), ((3817, 3866), 'mock.patch', 'mock.patch', (['"""cirrus.quality_control.build_parser"""'], {}), "('cirrus.quality_control.build_parser')\n", (3827, 3866), False, 'import mock\n'), ((3872, 3920), 'mock.patch', 'mock.patch', (['"""cirrus.quality_control.run_linters"""'], {}), "('cirrus.quality_control.run_linters')\n", (3882, 3920), False, 'import mock\n'), ((3926, 3977), 'mock.patch', 'mock.patch', (['"""cirrus.quality_control.get_diff_files"""'], {}), "('cirrus.quality_control.get_diff_files')\n", (3936, 3977), False, 'import mock\n'), ((4593, 4608), 'unittest.main', 'unittest.main', ([], {}), '()\n', (4606, 4608), False, 'import unittest\n'), ((534, 561), 'cirrus.quality_control.build_parser', 'build_parser', (['args', 'qc_conf'], {}), '(args, qc_conf)\n', (546, 561), False, 'from cirrus.quality_control import main, run_linters, build_parser\n'), ((1045, 1072), 'cirrus.quality_control.build_parser', 'build_parser', (['args', 'qc_conf'], {}), '(args, qc_conf)\n', (1057, 1072), False, 'from cirrus.quality_control import main, run_linters, build_parser\n'), ((1355, 1366), 'mock.Mock', 'mock.Mock', ([], {}), '()\n', (1364, 1366), False, 'import mock\n'), ((1433, 1444), 'mock.Mock', 'mock.Mock', ([], {}), '()\n', (1442, 1444), False, 'import mock\n'), ((1640, 1651), 'mock.Mock', 'mock.Mock', ([], {}), '()\n', (1649, 1651), False, 'import mock\n'), ((1733, 1765), 'cirrus.quality_control.run_linters', 'run_linters', (['opts', 'mock_conf', '{}'], {}), '(opts, mock_conf, {})\n', (1744, 1765), False, 'from cirrus.quality_control import main, run_linters, build_parser\n'), ((2099, 2110), 'mock.Mock', 'mock.Mock', ([], {}), '()\n', (2108, 2110), False, 'import mock\n'), ((2177, 2188), 'mock.Mock', 'mock.Mock', ([], {}), '()\n', (2186, 2188), False, 'import mock\n'), ((2383, 2394), 'mock.Mock', 'mock.Mock', ([], {}), '()\n', (2392, 2394), False, 'import mock\n'), ((2766, 2777), 'mock.Mock', 'mock.Mock', ([], {}), '()\n', (2775, 2777), False, 'import mock\n'), ((2844, 2855), 'mock.Mock', 'mock.Mock', ([], {}), '()\n', (2853, 2855), False, 'import mock\n'), ((3050, 3061), 'mock.Mock', 'mock.Mock', ([], {}), '()\n', (3059, 3061), False, 'import mock\n'), ((3550, 3581), 'mock.Mock', 'mock.Mock', ([], {'return_value': 'mock_qc'}), '(return_value=mock_qc)\n', (3559, 3581), False, 'import mock\n'), ((3602, 3613), 'mock.Mock', 'mock.Mock', ([], {}), '()\n', (3611, 3613), False, 'import mock\n'), ((3703, 3709), 'cirrus.quality_control.main', 'main', ([], {}), '()\n', (3707, 3709), False, 'from cirrus.quality_control import main, run_linters, build_parser\n'), ((4107, 4138), 'mock.Mock', 'mock.Mock', ([], {'return_value': 'mock_qc'}), '(return_value=mock_qc)\n', (4116, 4138), False, 'import mock\n'), ((4159, 4170), 'mock.Mock', 'mock.Mock', ([], {}), '()\n', (4168, 4170), False, 'import mock\n'), ((4356, 4362), 'cirrus.quality_control.main', 'main', ([], {}), '()\n', (4360, 4362), False, 'from cirrus.quality_control import main, run_linters, build_parser\n')] |
# -*- coding: utf-8 -*-
import requests
import json
import tensorflow as tf
import sys,os
father_path = os.path.join(os.getcwd())
print(father_path, "==father path==")
def find_bert(father_path):
if father_path.split("/")[-1] == "BERT":
return father_path
output_path = ""
for fi in os.listdir(father_path):
if fi == "BERT":
output_path = os.path.join(father_path, fi)
break
else:
if os.path.isdir(os.path.join(father_path, fi)):
find_bert(os.path.join(father_path, fi))
else:
continue
return output_path
bert_path = find_bert(father_path)
t2t_bert_path = os.path.join(bert_path, "t2t_bert")
sys.path.extend([bert_path, t2t_bert_path])
print(sys.path)
from distributed_single_sentence_classification import tf_serving_data_prepare as single_sent_data_prepare
from distributed_pair_sentence_classification import tf_serving_data_prepare as pair_sent_data_prepare
flags = tf.flags
FLAGS = flags.FLAGS
## Required parameters
flags.DEFINE_string(
"buckets", None,
"The config json file corresponding to the pre-trained BERT model. "
"This specifies the model architecture.")
flags.DEFINE_string(
"vocab", None,
"The config json file corresponding to the pre-trained BERT model. "
"This specifies the model architecture.")
flags.DEFINE_bool(
"do_lower_case", True,
"The config json file corresponding to the pre-trained BERT model. "
"This specifies the model architecture.")
flags.DEFINE_string(
"url", None,
"The config json file corresponding to the pre-trained BERT model. "
"This specifies the model architecture.")
flags.DEFINE_string(
"port", None,
"The config json file corresponding to the pre-trained BERT model. "
"This specifies the model architecture.")
flags.DEFINE_string(
"model_name", None,
"The config json file corresponding to the pre-trained BERT model. "
"This specifies the model architecture.")
flags.DEFINE_string(
"signature_name", None,
"The config json file corresponding to the pre-trained BERT model. "
"This specifies the model architecture.")
flags.DEFINE_string(
"keyword_path", "",
"The config json file corresponding to the pre-trained BERT model. "
"This specifies the model architecture.")
flags.DEFINE_string(
"background_label", "正常",
"The config json file corresponding to the pre-trained BERT model. "
"This specifies the model architecture.")
flags.DEFINE_string(
"label_dict", "",
"The config json file corresponding to the pre-trained BERT model. "
"This specifies the model architecture.")
flags.DEFINE_string(
"segment_id_type", "",
"The config json file corresponding to the pre-trained BERT model. "
"This specifies the model architecture.")
flags.DEFINE_string(
"add_word_path", "",
"The config json file corresponding to the pre-trained BERT model. "
"This specifies the model architecture.")
flags.DEFINE_string(
"delete_word_path", "",
"The config json file corresponding to the pre-trained BERT model. "
"This specifies the model architecture.")
flags.DEFINE_string(
"input_data", "",
"The config json file corresponding to the pre-trained BERT model. "
"This specifies the model architecture.")
flags.DEFINE_string(
"output_path", "",
"The config json file corresponding to the pre-trained BERT model. "
"This specifies the model architecture.")
flags.DEFINE_string(
"task_type", "",
"The config json file corresponding to the pre-trained BERT model. "
"This specifies the model architecture.")
flags.DEFINE_string(
"model_type", "",
"The config json file corresponding to the pre-trained BERT model. "
"This specifies the model architecture.")
flags.DEFINE_string(
"tokenizer", "",
"The config json file corresponding to the pre-trained BERT model. "
"This specifies the model architecture.")
flags.DEFINE_string(
"with_char", "",
"The config json file corresponding to the pre-trained BERT model. "
"This specifies the model architecture.")
flags.DEFINE_string(
"versions", "",
"The config json file corresponding to the pre-trained BERT model. "
"This specifies the model architecture.")
flags.DEFINE_integer(
"max_seq_length", 64,
"The config json file corresponding to the pre-trained BERT model. "
"This specifies the model architecture.")
def main(_):
if FLAGS.task_type == "pair_sentence_classification":
vocab_path = os.path.join(FLAGS.buckets, FLAGS.vocab)
corpus_path = os.path.join(FLAGS.buckets, FLAGS.input_data)
print(corpus_path, vocab_path)
feed_dict = pair_sent_data_prepare.get_feeddict(FLAGS, vocab_path, corpus_path)
output_path = os.path.join(FLAGS.buckets, FLAGS.output_path)
elif FLAGS.task_type == "single_sentence_classification":
vocab_path = os.path.join(FLAGS.buckets, FLAGS.vocab)
corpus_path = os.path.join(FLAGS.buckets, FLAGS.input_data)
print(corpus_path, vocab_path)
feed_dict = single_sent_data_prepare.get_feeddict(FLAGS, vocab_path, corpus_path)
output_path = os.path.join(FLAGS.buckets, FLAGS.output_path)
results = requests.post("http://%s:%s/v1/models/%s/versions/%s:predict" % (FLAGS.url,
FLAGS.port, FLAGS.model_name,
FLAGS.versions),
json=feed_dict)
try:
with tf.gfile.Open(output_path, "w") as fwobj:
pred_lst = results.content.decode()
json.dump(pred_lst, fwobj)
print(results.content.decode())
except:
predict_info = []
print(results.content.decode())
if __name__ == "__main__":
tf.app.run()
| [
"tensorflow.gfile.Open",
"requests.post",
"os.listdir",
"distributed_pair_sentence_classification.tf_serving_data_prepare.get_feeddict",
"distributed_single_sentence_classification.tf_serving_data_prepare.get_feeddict",
"os.path.join",
"os.getcwd",
"sys.path.extend",
"json.dump",
"tensorflow.app.r... | [((591, 626), 'os.path.join', 'os.path.join', (['bert_path', '"""t2t_bert"""'], {}), "(bert_path, 't2t_bert')\n", (603, 626), False, 'import sys, os\n'), ((627, 670), 'sys.path.extend', 'sys.path.extend', (['[bert_path, t2t_bert_path]'], {}), '([bert_path, t2t_bert_path])\n', (642, 670), False, 'import sys, os\n'), ((119, 130), 'os.getcwd', 'os.getcwd', ([], {}), '()\n', (128, 130), False, 'import sys, os\n'), ((292, 315), 'os.listdir', 'os.listdir', (['father_path'], {}), '(father_path)\n', (302, 315), False, 'import sys, os\n'), ((4953, 5095), 'requests.post', 'requests.post', (["('http://%s:%s/v1/models/%s/versions/%s:predict' % (FLAGS.url, FLAGS.port,\n FLAGS.model_name, FLAGS.versions))"], {'json': 'feed_dict'}), "('http://%s:%s/v1/models/%s/versions/%s:predict' % (FLAGS.url,\n FLAGS.port, FLAGS.model_name, FLAGS.versions), json=feed_dict)\n", (4966, 5095), False, 'import requests\n'), ((5390, 5402), 'tensorflow.app.run', 'tf.app.run', ([], {}), '()\n', (5400, 5402), True, 'import tensorflow as tf\n'), ((4303, 4343), 'os.path.join', 'os.path.join', (['FLAGS.buckets', 'FLAGS.vocab'], {}), '(FLAGS.buckets, FLAGS.vocab)\n', (4315, 4343), False, 'import sys, os\n'), ((4360, 4405), 'os.path.join', 'os.path.join', (['FLAGS.buckets', 'FLAGS.input_data'], {}), '(FLAGS.buckets, FLAGS.input_data)\n', (4372, 4405), False, 'import sys, os\n'), ((4453, 4520), 'distributed_pair_sentence_classification.tf_serving_data_prepare.get_feeddict', 'pair_sent_data_prepare.get_feeddict', (['FLAGS', 'vocab_path', 'corpus_path'], {}), '(FLAGS, vocab_path, corpus_path)\n', (4488, 4520), True, 'from distributed_pair_sentence_classification import tf_serving_data_prepare as pair_sent_data_prepare\n'), ((4537, 4583), 'os.path.join', 'os.path.join', (['FLAGS.buckets', 'FLAGS.output_path'], {}), '(FLAGS.buckets, FLAGS.output_path)\n', (4549, 4583), False, 'import sys, os\n'), ((353, 382), 'os.path.join', 'os.path.join', (['father_path', 'fi'], {}), '(father_path, fi)\n', (365, 382), False, 'import sys, os\n'), ((4658, 4698), 'os.path.join', 'os.path.join', (['FLAGS.buckets', 'FLAGS.vocab'], {}), '(FLAGS.buckets, FLAGS.vocab)\n', (4670, 4698), False, 'import sys, os\n'), ((4715, 4760), 'os.path.join', 'os.path.join', (['FLAGS.buckets', 'FLAGS.input_data'], {}), '(FLAGS.buckets, FLAGS.input_data)\n', (4727, 4760), False, 'import sys, os\n'), ((4808, 4877), 'distributed_single_sentence_classification.tf_serving_data_prepare.get_feeddict', 'single_sent_data_prepare.get_feeddict', (['FLAGS', 'vocab_path', 'corpus_path'], {}), '(FLAGS, vocab_path, corpus_path)\n', (4845, 4877), True, 'from distributed_single_sentence_classification import tf_serving_data_prepare as single_sent_data_prepare\n'), ((4894, 4940), 'os.path.join', 'os.path.join', (['FLAGS.buckets', 'FLAGS.output_path'], {}), '(FLAGS.buckets, FLAGS.output_path)\n', (4906, 4940), False, 'import sys, os\n'), ((5153, 5184), 'tensorflow.gfile.Open', 'tf.gfile.Open', (['output_path', '"""w"""'], {}), "(output_path, 'w')\n", (5166, 5184), True, 'import tensorflow as tf\n'), ((5237, 5263), 'json.dump', 'json.dump', (['pred_lst', 'fwobj'], {}), '(pred_lst, fwobj)\n', (5246, 5263), False, 'import json\n'), ((420, 449), 'os.path.join', 'os.path.join', (['father_path', 'fi'], {}), '(father_path, fi)\n', (432, 449), False, 'import sys, os\n'), ((466, 495), 'os.path.join', 'os.path.join', (['father_path', 'fi'], {}), '(father_path, fi)\n', (478, 495), False, 'import sys, os\n')] |
import numpy as np
import torch.nn as nn
import torch.nn.functional as F
import torch
class Generator(nn.Module):
def __init__(self, configs, shape):
super(Generator, self).__init__()
self.label_emb = nn.Embedding(configs.n_classes, configs.n_classes)
self.shape = shape
def block(in_feat, out_feat, normalize=True):
layers = [nn.Linear(in_feat, out_feat)]
if normalize:
layers.append(nn.BatchNorm1d(out_feat, 0.8))
layers.append(nn.LeakyReLU(0.2, inplace=True))
return layers
self.model = nn.Sequential(
*block(configs.latent_dim + configs.n_classes, 128, normalize=False),
*block(128, 256),
*block(256, 512),
*block(512, 1024),
nn.Linear(1024, int(np.prod(shape))),
nn.Tanh()
)
def forward(self, noise, labels):
# Concatenate label embedding and data to produce input
gen_input = torch.cat((self.label_emb(labels), noise), -1)
input = self.model(gen_input)
input = input.view(input.size(0), -1) # resize
return input
class Discriminator(nn.Module):
def __init__(self, configs, shape):
super(Discriminator, self).__init__()
self.label_embedding = nn.Embedding(configs.n_classes, configs.n_classes)
self.model = nn.Sequential(
nn.Linear(configs.n_classes + int(np.prod(shape)), 512),
nn.LeakyReLU(0.2, inplace=True),
nn.Linear(512, 512),
nn.Dropout(0.4),
nn.LeakyReLU(0.2, inplace=True),
nn.Linear(512, 512),
nn.Dropout(0.4),
nn.LeakyReLU(0.2, inplace=True),
nn.Linear(512, 1),
)
def forward(self, input, labels):
# Concatenate label embedding and data to produce input
d_in = torch.cat((input.view(input.size(0), -1), self.label_embedding(labels)), -1)
validity = self.model(d_in)
return validity
| [
"numpy.prod",
"torch.nn.Dropout",
"torch.nn.Tanh",
"torch.nn.LeakyReLU",
"torch.nn.BatchNorm1d",
"torch.nn.Linear",
"torch.nn.Embedding"
] | [((223, 273), 'torch.nn.Embedding', 'nn.Embedding', (['configs.n_classes', 'configs.n_classes'], {}), '(configs.n_classes, configs.n_classes)\n', (235, 273), True, 'import torch.nn as nn\n'), ((1308, 1358), 'torch.nn.Embedding', 'nn.Embedding', (['configs.n_classes', 'configs.n_classes'], {}), '(configs.n_classes, configs.n_classes)\n', (1320, 1358), True, 'import torch.nn as nn\n'), ((852, 861), 'torch.nn.Tanh', 'nn.Tanh', ([], {}), '()\n', (859, 861), True, 'import torch.nn as nn\n'), ((1477, 1508), 'torch.nn.LeakyReLU', 'nn.LeakyReLU', (['(0.2)'], {'inplace': '(True)'}), '(0.2, inplace=True)\n', (1489, 1508), True, 'import torch.nn as nn\n'), ((1522, 1541), 'torch.nn.Linear', 'nn.Linear', (['(512)', '(512)'], {}), '(512, 512)\n', (1531, 1541), True, 'import torch.nn as nn\n'), ((1555, 1570), 'torch.nn.Dropout', 'nn.Dropout', (['(0.4)'], {}), '(0.4)\n', (1565, 1570), True, 'import torch.nn as nn\n'), ((1584, 1615), 'torch.nn.LeakyReLU', 'nn.LeakyReLU', (['(0.2)'], {'inplace': '(True)'}), '(0.2, inplace=True)\n', (1596, 1615), True, 'import torch.nn as nn\n'), ((1629, 1648), 'torch.nn.Linear', 'nn.Linear', (['(512)', '(512)'], {}), '(512, 512)\n', (1638, 1648), True, 'import torch.nn as nn\n'), ((1662, 1677), 'torch.nn.Dropout', 'nn.Dropout', (['(0.4)'], {}), '(0.4)\n', (1672, 1677), True, 'import torch.nn as nn\n'), ((1691, 1722), 'torch.nn.LeakyReLU', 'nn.LeakyReLU', (['(0.2)'], {'inplace': '(True)'}), '(0.2, inplace=True)\n', (1703, 1722), True, 'import torch.nn as nn\n'), ((1736, 1753), 'torch.nn.Linear', 'nn.Linear', (['(512)', '(1)'], {}), '(512, 1)\n', (1745, 1753), True, 'import torch.nn as nn\n'), ((378, 406), 'torch.nn.Linear', 'nn.Linear', (['in_feat', 'out_feat'], {}), '(in_feat, out_feat)\n', (387, 406), True, 'import torch.nn as nn\n'), ((521, 552), 'torch.nn.LeakyReLU', 'nn.LeakyReLU', (['(0.2)'], {'inplace': '(True)'}), '(0.2, inplace=True)\n', (533, 552), True, 'import torch.nn as nn\n'), ((464, 493), 'torch.nn.BatchNorm1d', 'nn.BatchNorm1d', (['out_feat', '(0.8)'], {}), '(out_feat, 0.8)\n', (478, 493), True, 'import torch.nn as nn\n'), ((822, 836), 'numpy.prod', 'np.prod', (['shape'], {}), '(shape)\n', (829, 836), True, 'import numpy as np\n'), ((1442, 1456), 'numpy.prod', 'np.prod', (['shape'], {}), '(shape)\n', (1449, 1456), True, 'import numpy as np\n')] |
import BotDecidesPos
import numpy as np
class Collision_check:
def __init__(self):
self.m=0.0
self.n=0.0
def load_data(self,bot):
tgx,tgy=bot.getTarget()
mpx,mpy=bot.getPos()
spd=bot.getSpeed()
return spd,mpx,mpy,tgx,tgy
def checkCollision(self,bot1,bot2):
eg = Engine()
sp1,x,y,x1,y1=self.load_data(bot1)
sp2,a,b,a1,b1=self.load_data(bot2)
p=eg.findDist(x,y,x1,y1)
q=eg.findDist(a,b,a1,b1)
#v1=[sp1*(x-x1)/p,sp1*(y-y1)/p]
#v2 = [sp2 * (a - a1) / q, sp2 * (b - b1) / q]
#Ax=C, which is the matrix from of the equation on the path of the vehicle
s=[[x-x1,y-y1],[a-a1,b-b1]]
t=[y*x1-x*y1,b*a1-a*b1]
self.m,self.n=eg.eq_StraightLine(s,t)
p1=eg.findDist(x,y,self.m,self.n)
q1=eg.findDist(a,b,self.m,self.n)
eta1=p1/sp1;
eta2=q1/sp2;
if np.absolute(eta1-eta2)<1 :
return True
else:
return False
def getCollisionIndex(self):
return self.m,self.n
def setCollisionIndex(self,a,b):
self.m=a
self.n=b
| [
"numpy.absolute"
] | [((958, 982), 'numpy.absolute', 'np.absolute', (['(eta1 - eta2)'], {}), '(eta1 - eta2)\n', (969, 982), True, 'import numpy as np\n')] |
#!/usr/bin/env python
# -*- coding: utf-8 -*-
"""
Dataset conversion script
Author: <NAME>
"""
import json
import argparse
import clevercsv
def month2index(month):
return {
"Jan": "01",
"Feb": "02",
"Mar": "03",
"Apr": "04",
"May": "05",
"Jun": "06",
"Jul": "07",
"Aug": "08",
"Sep": "09",
"Oct": "10",
"Nov": "11",
"Dec": "12",
}[month]
def parse_args():
parser = argparse.ArgumentParser()
parser.add_argument("input_file", help="File to convert")
parser.add_argument("output_file", help="File to write to")
return parser.parse_args()
def main():
args = parse_args()
with open(args.input_file, "r", newline="", encoding="ascii") as fp:
reader = clevercsv.DictReader(
fp, delimiter=",", quotechar="", escapechar=""
)
items = list(reader)
for it in items:
it["time"] = f"{it['Year']}-{month2index(it['Month'])}"
it["value"] = int(it["Total Passengers"])
jfks = [it for it in items if it["Airport Code"] == "JFK"]
pairs = [(it["time"], it["value"]) for it in jfks]
# with this date format string sort is date sort
pairs.sort()
name = "jfk_passengers"
longname = "JFK Passengers"
time_fmt = "%Y-%m"
time = [p[0] for p in pairs]
values = [p[1] for p in pairs]
series = [{"label": "Number of Passengers", "type": "int", "raw": values}]
data = {
"name": name,
"longname": longname,
"n_obs": len(time),
"n_dim": len(series),
"time": {
"type": "string",
"format": time_fmt,
"index": list(range(len(time))),
"raw": time,
},
"series": series,
}
with open(args.output_file, "w") as fp:
json.dump(data, fp, indent="\t")
if __name__ == "__main__":
main()
| [
"json.dump",
"clevercsv.DictReader",
"argparse.ArgumentParser"
] | [((481, 506), 'argparse.ArgumentParser', 'argparse.ArgumentParser', ([], {}), '()\n', (504, 506), False, 'import argparse\n'), ((793, 861), 'clevercsv.DictReader', 'clevercsv.DictReader', (['fp'], {'delimiter': '""","""', 'quotechar': '""""""', 'escapechar': '""""""'}), "(fp, delimiter=',', quotechar='', escapechar='')\n", (813, 861), False, 'import clevercsv\n'), ((1841, 1873), 'json.dump', 'json.dump', (['data', 'fp'], {'indent': '"""\t"""'}), "(data, fp, indent='\\t')\n", (1850, 1873), False, 'import json\n')] |
#!/usr/bin/env python
# -*- coding: utf-8 -*-
import os
dirs = ['/tmp/CTP_L2data/', '/tmp/CTP_tradedata/']
for d in dirs:
if not os.path.isdir(d):
os.makedirs(d)
| [
"os.path.isdir",
"os.makedirs"
] | [((135, 151), 'os.path.isdir', 'os.path.isdir', (['d'], {}), '(d)\n', (148, 151), False, 'import os\n'), ((161, 175), 'os.makedirs', 'os.makedirs', (['d'], {}), '(d)\n', (172, 175), False, 'import os\n')] |
# Generated by Django 2.2.13 on 2020-08-15 07:28
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('pipeline', '0053_auto_20200813_0521'),
]
operations = [
migrations.AddField(
model_name='censussubdivision',
name='low_income_status_0_to_17',
field=models.IntegerField(null=True),
),
migrations.AddField(
model_name='censussubdivision',
name='low_income_status_18_to_64',
field=models.IntegerField(null=True),
),
migrations.AddField(
model_name='censussubdivision',
name='low_income_status_65_and_over',
field=models.IntegerField(null=True),
),
migrations.AddField(
model_name='censussubdivision',
name='median_total_household_income',
field=models.FloatField(null=True),
),
migrations.AddField(
model_name='censussubdivision',
name='median_total_household_income_one_person',
field=models.FloatField(null=True),
),
migrations.AddField(
model_name='censussubdivision',
name='median_total_household_income_two_or_more_person',
field=models.FloatField(null=True),
),
migrations.AddField(
model_name='censussubdivision',
name='total_income_100000_to_149999',
field=models.IntegerField(null=True),
),
migrations.AddField(
model_name='censussubdivision',
name='total_income_10000_to_19999',
field=models.IntegerField(null=True),
),
migrations.AddField(
model_name='censussubdivision',
name='total_income_150000_and_over',
field=models.IntegerField(null=True),
),
migrations.AddField(
model_name='censussubdivision',
name='total_income_20000_to_29999',
field=models.IntegerField(null=True),
),
migrations.AddField(
model_name='censussubdivision',
name='total_income_30000_to_39999',
field=models.IntegerField(null=True),
),
migrations.AddField(
model_name='censussubdivision',
name='total_income_40000_to_49999',
field=models.IntegerField(null=True),
),
migrations.AddField(
model_name='censussubdivision',
name='total_income_50000_to_59999',
field=models.IntegerField(null=True),
),
migrations.AddField(
model_name='censussubdivision',
name='total_income_60000_to_69999',
field=models.IntegerField(null=True),
),
migrations.AddField(
model_name='censussubdivision',
name='total_income_70000_to_79999',
field=models.IntegerField(null=True),
),
migrations.AddField(
model_name='censussubdivision',
name='total_income_80000_to_89999',
field=models.IntegerField(null=True),
),
migrations.AddField(
model_name='censussubdivision',
name='total_income_90000_to_99999',
field=models.IntegerField(null=True),
),
migrations.AddField(
model_name='censussubdivision',
name='total_income_under_10000',
field=models.IntegerField(null=True),
),
]
| [
"django.db.models.FloatField",
"django.db.models.IntegerField"
] | [((366, 396), 'django.db.models.IntegerField', 'models.IntegerField', ([], {'null': '(True)'}), '(null=True)\n', (385, 396), False, 'from django.db import migrations, models\n'), ((547, 577), 'django.db.models.IntegerField', 'models.IntegerField', ([], {'null': '(True)'}), '(null=True)\n', (566, 577), False, 'from django.db import migrations, models\n'), ((731, 761), 'django.db.models.IntegerField', 'models.IntegerField', ([], {'null': '(True)'}), '(null=True)\n', (750, 761), False, 'from django.db import migrations, models\n'), ((915, 943), 'django.db.models.FloatField', 'models.FloatField', ([], {'null': '(True)'}), '(null=True)\n', (932, 943), False, 'from django.db import migrations, models\n'), ((1108, 1136), 'django.db.models.FloatField', 'models.FloatField', ([], {'null': '(True)'}), '(null=True)\n', (1125, 1136), False, 'from django.db import migrations, models\n'), ((1309, 1337), 'django.db.models.FloatField', 'models.FloatField', ([], {'null': '(True)'}), '(null=True)\n', (1326, 1337), False, 'from django.db import migrations, models\n'), ((1491, 1521), 'django.db.models.IntegerField', 'models.IntegerField', ([], {'null': '(True)'}), '(null=True)\n', (1510, 1521), False, 'from django.db import migrations, models\n'), ((1673, 1703), 'django.db.models.IntegerField', 'models.IntegerField', ([], {'null': '(True)'}), '(null=True)\n', (1692, 1703), False, 'from django.db import migrations, models\n'), ((1856, 1886), 'django.db.models.IntegerField', 'models.IntegerField', ([], {'null': '(True)'}), '(null=True)\n', (1875, 1886), False, 'from django.db import migrations, models\n'), ((2038, 2068), 'django.db.models.IntegerField', 'models.IntegerField', ([], {'null': '(True)'}), '(null=True)\n', (2057, 2068), False, 'from django.db import migrations, models\n'), ((2220, 2250), 'django.db.models.IntegerField', 'models.IntegerField', ([], {'null': '(True)'}), '(null=True)\n', (2239, 2250), False, 'from django.db import migrations, models\n'), ((2402, 2432), 'django.db.models.IntegerField', 'models.IntegerField', ([], {'null': '(True)'}), '(null=True)\n', (2421, 2432), False, 'from django.db import migrations, models\n'), ((2584, 2614), 'django.db.models.IntegerField', 'models.IntegerField', ([], {'null': '(True)'}), '(null=True)\n', (2603, 2614), False, 'from django.db import migrations, models\n'), ((2766, 2796), 'django.db.models.IntegerField', 'models.IntegerField', ([], {'null': '(True)'}), '(null=True)\n', (2785, 2796), False, 'from django.db import migrations, models\n'), ((2948, 2978), 'django.db.models.IntegerField', 'models.IntegerField', ([], {'null': '(True)'}), '(null=True)\n', (2967, 2978), False, 'from django.db import migrations, models\n'), ((3130, 3160), 'django.db.models.IntegerField', 'models.IntegerField', ([], {'null': '(True)'}), '(null=True)\n', (3149, 3160), False, 'from django.db import migrations, models\n'), ((3312, 3342), 'django.db.models.IntegerField', 'models.IntegerField', ([], {'null': '(True)'}), '(null=True)\n', (3331, 3342), False, 'from django.db import migrations, models\n'), ((3491, 3521), 'django.db.models.IntegerField', 'models.IntegerField', ([], {'null': '(True)'}), '(null=True)\n', (3510, 3521), False, 'from django.db import migrations, models\n')] |
import numpy as np
import matplotlib.pyplot as plt
def sigmoid(x, w, b):
return 1/(1 + np.exp(-x*w+b))
x = np.arange(-5.0, 5.0, 0.1)
y1 = sigmoid(x, 0.5, 0)
y2 = sigmoid(x, 1, 0)
y3 = sigmoid(x, 2, 0)
plt.plot(x, y1, "r", linestyle='--')
plt.plot(x, y2, 'g')
plt.plot(x, y3, 'b', linestyle='--')
plt.plot([0, 0], [1.0, 0.0], ":")
plt.title("sigmoid function")
plt.show()
| [
"matplotlib.pyplot.plot",
"numpy.exp",
"matplotlib.pyplot.title",
"numpy.arange",
"matplotlib.pyplot.show"
] | [((114, 139), 'numpy.arange', 'np.arange', (['(-5.0)', '(5.0)', '(0.1)'], {}), '(-5.0, 5.0, 0.1)\n', (123, 139), True, 'import numpy as np\n'), ((209, 245), 'matplotlib.pyplot.plot', 'plt.plot', (['x', 'y1', '"""r"""'], {'linestyle': '"""--"""'}), "(x, y1, 'r', linestyle='--')\n", (217, 245), True, 'import matplotlib.pyplot as plt\n'), ((246, 266), 'matplotlib.pyplot.plot', 'plt.plot', (['x', 'y2', '"""g"""'], {}), "(x, y2, 'g')\n", (254, 266), True, 'import matplotlib.pyplot as plt\n'), ((267, 303), 'matplotlib.pyplot.plot', 'plt.plot', (['x', 'y3', '"""b"""'], {'linestyle': '"""--"""'}), "(x, y3, 'b', linestyle='--')\n", (275, 303), True, 'import matplotlib.pyplot as plt\n'), ((304, 337), 'matplotlib.pyplot.plot', 'plt.plot', (['[0, 0]', '[1.0, 0.0]', '""":"""'], {}), "([0, 0], [1.0, 0.0], ':')\n", (312, 337), True, 'import matplotlib.pyplot as plt\n'), ((338, 367), 'matplotlib.pyplot.title', 'plt.title', (['"""sigmoid function"""'], {}), "('sigmoid function')\n", (347, 367), True, 'import matplotlib.pyplot as plt\n'), ((368, 378), 'matplotlib.pyplot.show', 'plt.show', ([], {}), '()\n', (376, 378), True, 'import matplotlib.pyplot as plt\n'), ((92, 110), 'numpy.exp', 'np.exp', (['(-x * w + b)'], {}), '(-x * w + b)\n', (98, 110), True, 'import numpy as np\n')] |
import torch
from torch import nn
from torch.utils.data import DataLoader
from nn_model import NeuralNetwork, FrankWolfeDataset
train_dataset = FrankWolfeDataset("train_dataset")
test_dataset = FrankWolfeDataset("test_dataset")
train_dataloader = DataLoader(train_dataset, batch_size=128, shuffle=True)
test_dataloader = DataLoader(test_dataset, batch_size=128, shuffle=True)
device = "cuda" if torch.cuda.is_available() else "cpu"
print("Using {} device".format(device))
model = NeuralNetwork().to(device)
loss_fn = nn.MSELoss()
optimizer = torch.optim.Adam(model.parameters(), lr=1e-3)
def train(dataloader, model, loss_fn, optimizer):
size = len(dataloader.dataset)
model.train()
for batch, (X, y) in enumerate(dataloader):
X, y = X.to(device), y.to(device)
pred = model(X.float())
loss = loss_fn(pred, y.float())
optimizer.zero_grad()
loss.backward()
optimizer.step()
if batch % 100 == 0:
loss, current = loss.item(), batch * len(X)
print(f"Loss: {loss:>7f} [{current:>5f}/{size:>5d}]")
def test(dataloader, model, loss_fn):
num_batches = len(dataloader)
model.eval()
test_loss = 0
with torch.no_grad():
for X, y in dataloader:
X, y = X.to(device), y.to(device)
pred = model(X.float())
test_loss += loss_fn(pred, y.float()).item()
test_loss /= num_batches
print(f"Test Avg loss: {test_loss:>8f} \n")
epochs = 150
for t in range(epochs):
print(f"Epoch {t+1}\n-------------------------------")
train(train_dataloader, model, loss_fn, optimizer)
test(test_dataloader, model, loss_fn)
print("Done!")
torch.save(model.state_dict(), "frank_wolfe_nn")
| [
"nn_model.FrankWolfeDataset",
"torch.nn.MSELoss",
"torch.cuda.is_available",
"torch.utils.data.DataLoader",
"torch.no_grad",
"nn_model.NeuralNetwork"
] | [((146, 180), 'nn_model.FrankWolfeDataset', 'FrankWolfeDataset', (['"""train_dataset"""'], {}), "('train_dataset')\n", (163, 180), False, 'from nn_model import NeuralNetwork, FrankWolfeDataset\n'), ((196, 229), 'nn_model.FrankWolfeDataset', 'FrankWolfeDataset', (['"""test_dataset"""'], {}), "('test_dataset')\n", (213, 229), False, 'from nn_model import NeuralNetwork, FrankWolfeDataset\n'), ((250, 305), 'torch.utils.data.DataLoader', 'DataLoader', (['train_dataset'], {'batch_size': '(128)', 'shuffle': '(True)'}), '(train_dataset, batch_size=128, shuffle=True)\n', (260, 305), False, 'from torch.utils.data import DataLoader\n'), ((324, 378), 'torch.utils.data.DataLoader', 'DataLoader', (['test_dataset'], {'batch_size': '(128)', 'shuffle': '(True)'}), '(test_dataset, batch_size=128, shuffle=True)\n', (334, 378), False, 'from torch.utils.data import DataLoader\n'), ((524, 536), 'torch.nn.MSELoss', 'nn.MSELoss', ([], {}), '()\n', (534, 536), False, 'from torch import nn\n'), ((399, 424), 'torch.cuda.is_available', 'torch.cuda.is_available', ([], {}), '()\n', (422, 424), False, 'import torch\n'), ((486, 501), 'nn_model.NeuralNetwork', 'NeuralNetwork', ([], {}), '()\n', (499, 501), False, 'from nn_model import NeuralNetwork, FrankWolfeDataset\n'), ((1216, 1231), 'torch.no_grad', 'torch.no_grad', ([], {}), '()\n', (1229, 1231), False, 'import torch\n')] |
from setuptools import setup
# Read the README.md file
with open('README.md') as file_handle:
file_content = file_handle.read()
setup(
name='kamalsql',
packages=['kamalsql'],
version='1.0.0',
license='MIT',
description='A simple Python wrapper for your MySQL needs.',
long_description=file_content,
long_description_content_type="text/markdown",
author='<NAME>',
author_email='<EMAIL>',
url='https://github.com/KamalDGRT/kamalsql',
keywords=['Simple', 'Functional', 'Dependable'],
install_requires=[
'mysql-connector',
'tabulate',
],
classifiers=[
'Development Status :: 1 - Planning',
'Intended Audience :: Developers',
'Natural Language :: English',
'Topic :: Software Development :: Libraries :: Python Modules',
'Topic :: Database',
'Topic :: Software Development :: Libraries',
'License :: OSI Approved :: MIT License',
'Programming Language :: Python :: 3',
],
)
| [
"setuptools.setup"
] | [((134, 912), 'setuptools.setup', 'setup', ([], {'name': '"""kamalsql"""', 'packages': "['kamalsql']", 'version': '"""1.0.0"""', 'license': '"""MIT"""', 'description': '"""A simple Python wrapper for your MySQL needs."""', 'long_description': 'file_content', 'long_description_content_type': '"""text/markdown"""', 'author': '"""<NAME>"""', 'author_email': '"""<EMAIL>"""', 'url': '"""https://github.com/KamalDGRT/kamalsql"""', 'keywords': "['Simple', 'Functional', 'Dependable']", 'install_requires': "['mysql-connector', 'tabulate']", 'classifiers': "['Development Status :: 1 - Planning', 'Intended Audience :: Developers',\n 'Natural Language :: English',\n 'Topic :: Software Development :: Libraries :: Python Modules',\n 'Topic :: Database', 'Topic :: Software Development :: Libraries',\n 'License :: OSI Approved :: MIT License',\n 'Programming Language :: Python :: 3']"}), "(name='kamalsql', packages=['kamalsql'], version='1.0.0', license=\n 'MIT', description='A simple Python wrapper for your MySQL needs.',\n long_description=file_content, long_description_content_type=\n 'text/markdown', author='<NAME>', author_email='<EMAIL>', url=\n 'https://github.com/KamalDGRT/kamalsql', keywords=['Simple',\n 'Functional', 'Dependable'], install_requires=['mysql-connector',\n 'tabulate'], classifiers=['Development Status :: 1 - Planning',\n 'Intended Audience :: Developers', 'Natural Language :: English',\n 'Topic :: Software Development :: Libraries :: Python Modules',\n 'Topic :: Database', 'Topic :: Software Development :: Libraries',\n 'License :: OSI Approved :: MIT License',\n 'Programming Language :: Python :: 3'])\n", (139, 912), False, 'from setuptools import setup\n')] |
import pickle
import tempfile
import os
from .XGBoostParser import parse_model as parse_model_xgb
def load_model(filename, feature_names=None):
with open(filename, 'rb') as f:
model = pickle.load(f)
_, tmp = tempfile.mkstemp(text=True)
model.get_booster().dump_model(tmp)
trees = parse_model_xgb(tmp, model.get_booster().feature_names)
os.remove(tmp)
return trees
| [
"pickle.load",
"tempfile.mkstemp",
"os.remove"
] | [((229, 256), 'tempfile.mkstemp', 'tempfile.mkstemp', ([], {'text': '(True)'}), '(text=True)\n', (245, 256), False, 'import tempfile\n'), ((372, 386), 'os.remove', 'os.remove', (['tmp'], {}), '(tmp)\n', (381, 386), False, 'import os\n'), ((200, 214), 'pickle.load', 'pickle.load', (['f'], {}), '(f)\n', (211, 214), False, 'import pickle\n')] |
import random
# If the input list is empty or contains just one element, it is already sorted. Return it.
# If not, divide the list of numbers into two roughly equal parts.
# Sort each part recursively using the merge sort algorithm. You'll get back two sorted lists.
# Merge the two sorted lists to get a single sorted list
test0 = {'input_list': [0, 2, 3, 6, -5, 8],
'output_list': [-5, 0, 2, 3, 6, 8]}
test1 = {'input_list': [3, 5, 6, 8, 9, 10, 99],
'output_list': [3, 5, 6, 8, 9, 10, 99]}
test2 = {'input_list': [99, 10, 9, 8, 6, 5, 3],
'output_list': [3, 5, 6, 8, 9, 10, 99]}
test3 = {'input_list': [5, -12, 2, 6, 1, 23, 7, 7, -12, 6, 12, 1, -243, 1, 0],
'output_list': [-243, -12, -12, 0, 1, 1, 1, 2, 5, 6, 6, 7, 7, 12, 23]}
test4 = {'input_list': [],
'output_list': []}
test5 = {'input_list': [14],
'output_list': [14]}
test6 = {'input_list': [[42, 42, 42, 42, 42, 42, 42]],
'output_list': [[42, 42, 42, 42, 42, 42, 42]]}
in_list = list(range(100))
out_list = list(range(100))
random.shuffle(in_list)
test7 = {'input_list': in_list,
'output_list': out_list}
tests = [test0, test1, test2, test3, test4, test5, test6, test7]
def merge_sort(nums):
# if list is 0 or 1 element long
if len(nums) > 1:
# get the midpoint
mid = len(nums) // 2
# split the list into two halves
left_nums = nums[:mid]
right_nums = nums[mid:]
print(left_nums)
print(right_nums)
# solve the problem for each half recursively
# after these 2 lines left and right nums are sorted
left_sorted = merge_sort(left_nums)
right_sorted = merge_sort(right_nums)
# merge step
i = 0 # keep track of leftmost element in left array
j = 0 # keep track of leftmost element in right array
k = 0 # keeps track of the index in merge array
# copy data to temp arrays right_nums and left_nums
while i < len(left_nums) and j < len(right_nums):
if left_nums[i] < right_nums[j]:
nums[k] = left_nums[i]
i += 1
k += 1
else:
nums[k] = right_nums[j]
j += 1
k += 1
# checking if any element is left
while i < len(left_nums):
nums[k] = left_nums[i]
i += 1
k += 1
while j < len(right_nums):
nums[k] = right_nums[j]
j += 1
k += 1
print(nums)
merge_sort(test0['input_list']) | [
"random.shuffle"
] | [((1049, 1072), 'random.shuffle', 'random.shuffle', (['in_list'], {}), '(in_list)\n', (1063, 1072), False, 'import random\n')] |
from typing import Union
from estruturas.excecoes import EstruturaException
from estruturas.no import Node
class ListaEncadeada:
def __init__(self) -> None:
self.__head = None
self.__tamanho = 0
@property
def head(self) -> object:
return self.__head
@head.setter
def head(self, novoHead: object = None) -> None:
self.__head = novoHead
@property
def tamanho(self) -> int:
return self.__tamanho
def vazia(self) -> bool:
if self.__tamanho == 0:
return True
def inserir(self, posicao: int, dado: object) -> None:
try:
assert posicao > 0
if self.vazia():
if posicao != 1:
raise EstruturaException(
'lista vazia, insira apenas na posição 1')
self.__head = Node(dado)
self.__tamanho += 1
return
if posicao == 1:
no = Node(dado)
no.prox = self.__head
self.__head = no
self.__tamanho += 1
return
cursor = self.__head
contador = 1
while (contador < posicao - 1) and (cursor != None):
cursor = cursor.prox
contador += 1
if cursor == None:
raise EstruturaException('A posição é inválida para inserção')
no = Node(dado)
no.prox = cursor.prox
cursor.prox = no
self.__tamanho += 1
except TypeError:
raise EstruturaException('A posição deve ser um valor inteiro')
except AssertionError:
raise EstruturaException('Posição negativa não é valida')
except:
raise
def remover(self, posicao: int) -> None:
try:
assert posicao > 0
if self.vazia():
raise EstruturaException('A lista está vazia')
cursor: object = self.__head
contador: int = 1
while (contador <= posicao - 1) and (cursor != None):
anterior = cursor
cursor = cursor.prox
contador += 1
if cursor == None:
raise EstruturaException('Posição inválida para remoção')
if posicao == 1:
self.__head = cursor.prox
else:
anterior.prox = cursor.prox
self.__tamanho -= 1
except TypeError:
raise EstruturaException('A posição deve ser um valor inteiro')
except AssertionError:
raise EstruturaException('Posição negativa não é valida')
except:
raise
def elemento(self, posicao: int) -> object:
try:
assert posicao > 0
if self.vazia():
raise EstruturaException('A lista está vazia')
cursor = self.__head
contador = 1
while (contador <= posicao - 1) and (cursor != None):
cursor = cursor.prox
if contador == posicao - 1:
return cursor.dado
else:
contador += 1
if cursor == None:
raise EstruturaException('Posição inválida para busca')
if posicao == 1:
return self.__head.dado
except TypeError:
raise EstruturaException('A posição deve ser um valor inteiro')
except AssertionError:
raise EstruturaException('Posição negativa não é valida')
except:
raise
def busca_por(self, chave: str, dado: Union[str, int]) -> list:
try:
if self.vazia():
raise EstruturaException('A lista está vazia')
lista: list = []
cursor: object = self.__head
contador: int = 1
ocorre: int = 0
while cursor != None:
if getattr(cursor.dado, chave) == dado:
lista.append(cursor.dado)
ocorre += 1
else:
contador += 1
cursor = cursor.prox
if ocorre == 0:
raise EstruturaException('Pokemon não registrado na pokedex')
else:
return lista
except TypeError:
raise EstruturaException('A posição deve ser um valor inteiro')
except AssertionError:
raise EstruturaException('Posição negativa não é valida')
except:
raise
def ordenar_por(self, chave: str) -> None:
"""Usa Bubble sort para ordenar a lista sequencial
a chave passada como parametro será o critério
para a ordenação
"""
for i in range(self.__tamanho-1):
atual: object = self.__head
seguinte: object = atual.prox
anterior: object = None
while seguinte:
if getattr(atual.dado, chave) > getattr(seguinte.dado, chave):
if anterior == None:
anterior = atual.prox
seguinte = seguinte.prox
anterior.prox = atual
atual.prox = seguinte
self.start = anterior
else:
temp: object = seguinte
seguinte = seguinte.prox
anterior.prox = atual.prox
anterior = temp
temp.prox = atual
atual.prox = seguinte
else:
anterior = atual
atual = seguinte
seguinte = seguinte.prox
i = i+1
def __str__(self) -> str:
saida: str = 'Lista: ['
cursor: object = self.__head
while cursor != None:
saida += f'{str(cursor.dado)}'
cursor = cursor.prox
if cursor != None:
saida += ', '
saida += ']'
return saida
| [
"estruturas.no.Node",
"estruturas.excecoes.EstruturaException"
] | [((1445, 1455), 'estruturas.no.Node', 'Node', (['dado'], {}), '(dado)\n', (1449, 1455), False, 'from estruturas.no import Node\n'), ((862, 872), 'estruturas.no.Node', 'Node', (['dado'], {}), '(dado)\n', (866, 872), False, 'from estruturas.no import Node\n'), ((983, 993), 'estruturas.no.Node', 'Node', (['dado'], {}), '(dado)\n', (987, 993), False, 'from estruturas.no import Node\n'), ((1370, 1426), 'estruturas.excecoes.EstruturaException', 'EstruturaException', (['"""A posição é inválida para inserção"""'], {}), "('A posição é inválida para inserção')\n", (1388, 1426), False, 'from estruturas.excecoes import EstruturaException\n'), ((1596, 1653), 'estruturas.excecoes.EstruturaException', 'EstruturaException', (['"""A posição deve ser um valor inteiro"""'], {}), "('A posição deve ser um valor inteiro')\n", (1614, 1653), False, 'from estruturas.excecoes import EstruturaException\n'), ((1703, 1754), 'estruturas.excecoes.EstruturaException', 'EstruturaException', (['"""Posição negativa não é valida"""'], {}), "('Posição negativa não é valida')\n", (1721, 1754), False, 'from estruturas.excecoes import EstruturaException\n'), ((1931, 1971), 'estruturas.excecoes.EstruturaException', 'EstruturaException', (['"""A lista está vazia"""'], {}), "('A lista está vazia')\n", (1949, 1971), False, 'from estruturas.excecoes import EstruturaException\n'), ((2266, 2317), 'estruturas.excecoes.EstruturaException', 'EstruturaException', (['"""Posição inválida para remoção"""'], {}), "('Posição inválida para remoção')\n", (2284, 2317), False, 'from estruturas.excecoes import EstruturaException\n'), ((2531, 2588), 'estruturas.excecoes.EstruturaException', 'EstruturaException', (['"""A posição deve ser um valor inteiro"""'], {}), "('A posição deve ser um valor inteiro')\n", (2549, 2588), False, 'from estruturas.excecoes import EstruturaException\n'), ((2638, 2689), 'estruturas.excecoes.EstruturaException', 'EstruturaException', (['"""Posição negativa não é valida"""'], {}), "('Posição negativa não é valida')\n", (2656, 2689), False, 'from estruturas.excecoes import EstruturaException\n'), ((2869, 2909), 'estruturas.excecoes.EstruturaException', 'EstruturaException', (['"""A lista está vazia"""'], {}), "('A lista está vazia')\n", (2887, 2909), False, 'from estruturas.excecoes import EstruturaException\n'), ((3265, 3314), 'estruturas.excecoes.EstruturaException', 'EstruturaException', (['"""Posição inválida para busca"""'], {}), "('Posição inválida para busca')\n", (3283, 3314), False, 'from estruturas.excecoes import EstruturaException\n'), ((3430, 3487), 'estruturas.excecoes.EstruturaException', 'EstruturaException', (['"""A posição deve ser um valor inteiro"""'], {}), "('A posição deve ser um valor inteiro')\n", (3448, 3487), False, 'from estruturas.excecoes import EstruturaException\n'), ((3537, 3588), 'estruturas.excecoes.EstruturaException', 'EstruturaException', (['"""Posição negativa não é valida"""'], {}), "('Posição negativa não é valida')\n", (3555, 3588), False, 'from estruturas.excecoes import EstruturaException\n'), ((3756, 3796), 'estruturas.excecoes.EstruturaException', 'EstruturaException', (['"""A lista está vazia"""'], {}), "('A lista está vazia')\n", (3774, 3796), False, 'from estruturas.excecoes import EstruturaException\n'), ((4239, 4294), 'estruturas.excecoes.EstruturaException', 'EstruturaException', (['"""Pokemon não registrado na pokedex"""'], {}), "('Pokemon não registrado na pokedex')\n", (4257, 4294), False, 'from estruturas.excecoes import EstruturaException\n'), ((4388, 4445), 'estruturas.excecoes.EstruturaException', 'EstruturaException', (['"""A posição deve ser um valor inteiro"""'], {}), "('A posição deve ser um valor inteiro')\n", (4406, 4445), False, 'from estruturas.excecoes import EstruturaException\n'), ((4495, 4546), 'estruturas.excecoes.EstruturaException', 'EstruturaException', (['"""Posição negativa não é valida"""'], {}), "('Posição negativa não é valida')\n", (4513, 4546), False, 'from estruturas.excecoes import EstruturaException\n'), ((745, 806), 'estruturas.excecoes.EstruturaException', 'EstruturaException', (['"""lista vazia, insira apenas na posição 1"""'], {}), "('lista vazia, insira apenas na posição 1')\n", (763, 806), False, 'from estruturas.excecoes import EstruturaException\n')] |
import numpy as np
from scipy.optimize import fsolve
from scipy.linalg import expm
import matplotlib.pyplot as plt
# Some utilities
# map a vector to a skew symmetric matrix
def skew(x):
return np.array([[0, -x[2], x[1]], [x[2], 0, -x[0]], [-x[1], x[0], 0]])
# map a twist to its adjoint form
def adjoint(x):
return np.concatenate(
[np.concatenate([skew(x[:3]), np.zeros((3, 3))], 1), np.concatenate([skew(x[3:]), skew(x[:3])], 1)])
# flatten a homogeneous transformation matrix to a vector
def flatten(g):
return np.concatenate([np.reshape(g[:3, :3], (9,)), g[:3, 3]])
# unflatten a homogeneous transformation
def unflatten(g):
return np.row_stack((np.column_stack((np.reshape(g[:9], (3, 3)), g[9:])), np.array([0, 0, 0, 1])))
# the matrix representation of a twist vector
def se(x):
return np.row_stack((np.column_stack((skew(x[:3]), x[3:])), np.array([0, 0, 0, 0])))
# Initialization
def initRod(N):
L = 10e-2 # length of the rod
g = np.zeros((N, 12))
xi = np.repeat(np.array([[0, np.pi/4/L, 0, 0, 0, 1]]), N, 0)
eta = np.zeros((N, 6))
#explicit Euler RKMK
G = np.eye(4)
ds = L / (N - 1)
g[0, :] = flatten(G)
for i in range(1, N):
G = G @ expm(se(ds * xi[i - 1, :]))
g[i, :] = flatten(G)
return g, xi, eta
#Integration
def step(g, xi, eta):
# determine xi0 by solving tip condition
xi0 = fsolve(lambda x: condition(g, xi, eta, x), xi[0, :])
# integrate the system with the solved xi0
return integrate(g, xi, eta, xi0)
def condition(g, xi, eta, xi0):
g_next, xi_next, eta_next = integrate(g, xi, eta, xi0)
return xi_next[-1, :] - np.array([0, 0, 0, 0, 0, 1])
def integrate(g, xi, eta, xi0):
# initialize empty matrices for storage
g_next = np.zeros_like(g)
xi_next = np.zeros_like(xi)
eta_next = np.zeros_like(eta)
# determine number of spatial points, just believe everything is the right size
(N, _) = xi.shape
# set the guessed value
xi_next[0, :] = xi0
# material and geometric properties
xi_ref = np.array([0, 0, 0, 0, 0, 1])
L = 10e-2
D = 1e-2
E = 1e6
rho = 1e3
ds = L / (N - 1)
dt = 0.01
A = np.pi / 4 * D ** 2
I = np.pi / 64 * D ** 4
J = 2 * I
G = E / 3
K = np.diag([E * I, E * I, G * J, G * A, G * A, E * A])
M = rho * np.diag([I, I, J, A, A, A])
# integration over the body (don't need the initial point as the initial values are determined already)
for i in range(N - 1):
# averaging over steps to get half step values
xi_half = (xi_next[i, :] + xi[i, :]) / 2
eta_half = (eta_next[i, :] + eta[i, :]) / 2
# implicit midpoint approximation
xi_dot = (xi_next[i, :] - xi[i, :]) / dt
eta_dot = (eta_next[i, :] - eta[i, :]) / dt
# spatial derivatives
xi_der = np.linalg.inv(K) @ (
(M @ eta_dot) - (adjoint(eta_half).T @ M @ eta_half) + (adjoint(xi_half).T @ K @ (xi_half - xi_ref)))
eta_der = xi_dot - (adjoint(xi_half) @ eta_half)
# explicit Euler step
xi_half_next = xi_half + ds * xi_der
eta_half_next = eta_half + ds * eta_der
# determine next step from half step value
xi_next[i + 1, :] = 2 * xi_half_next - xi[i+1, :]
eta_next[i + 1, :] = 2 * eta_half_next - eta[i+1, :]
# midpoint RKMK to step the g values
for i in range(N):
g_next[i, :] = flatten(unflatten(g[i,:]) @ expm(se(dt * (eta_next[i,:] + eta[i,:])/2)))
return g_next, xi_next, eta_next
# Testing functions
def plotDynamics(N, steps):
# start figure
fig, ax = plt.subplots()
g, xi, eta = initRod(N)
ax.plot(g[:,9], g[:,11])
ax.set_aspect('equal')
plt.pause(0.01) # make the plots show up as they're updated
for i in range(steps):
g, xi, eta = step(g, xi, eta)
ax.plot(g[:,9], g[:,11])
plt.pause(0.01) # make the plots show up as they're updated
#make sure it stays open for looking at and saving
plt.show()
def energy(xi,eta):
# similar to the setup for the integrator
(N, _) = xi.shape
xi_ref = np.array([0, 0, 0, 0, 0, 1])
L = 10e-2
D = 1e-2
E = 1e6
rho = 1e3
ds = L / (N - 1)
dt = 0.01
A = np.pi / 4 * D ** 2
I = np.pi / 64 * D ** 4
J = 2 * I
G = E / 3
K = np.diag([E * I, E * I, G * J, G * A, G * A, E * A])
M = rho * np.diag([I, I, J, A, A, A])
H = 0 # total energy
# integrate over the rod
for i in range(N):
T = eta[i,:].T @ M @ eta[i,:]
U = (xi[i,:]-xi_ref).T @ K @ (xi[i,:]-xi_ref)
H += 1/2*(T + U)
return ds*H #multiply by discrete step size to scale
def plotEnergy(N, steps):
fig, ax = plt.subplots()
g, xi, eta = initRod(N)
E = []
for i in range(steps):
g, xi, eta = step(g, xi, eta)
E.append(energy(xi,eta))
ax.plot(E)
plt.show()
# Call the script as python conservative.py
if __name__ == "__main__":
# plotDynamics(100, 20)
plotEnergy(100,100) | [
"numpy.eye",
"numpy.reshape",
"numpy.diag",
"numpy.array",
"numpy.zeros",
"numpy.linalg.inv",
"matplotlib.pyplot.pause",
"numpy.zeros_like",
"matplotlib.pyplot.subplots",
"matplotlib.pyplot.show"
] | [((199, 263), 'numpy.array', 'np.array', (['[[0, -x[2], x[1]], [x[2], 0, -x[0]], [-x[1], x[0], 0]]'], {}), '([[0, -x[2], x[1]], [x[2], 0, -x[0]], [-x[1], x[0], 0]])\n', (207, 263), True, 'import numpy as np\n'), ((981, 998), 'numpy.zeros', 'np.zeros', (['(N, 12)'], {}), '((N, 12))\n', (989, 998), True, 'import numpy as np\n'), ((1074, 1090), 'numpy.zeros', 'np.zeros', (['(N, 6)'], {}), '((N, 6))\n', (1082, 1090), True, 'import numpy as np\n'), ((1125, 1134), 'numpy.eye', 'np.eye', (['(4)'], {}), '(4)\n', (1131, 1134), True, 'import numpy as np\n'), ((1771, 1787), 'numpy.zeros_like', 'np.zeros_like', (['g'], {}), '(g)\n', (1784, 1787), True, 'import numpy as np\n'), ((1802, 1819), 'numpy.zeros_like', 'np.zeros_like', (['xi'], {}), '(xi)\n', (1815, 1819), True, 'import numpy as np\n'), ((1835, 1853), 'numpy.zeros_like', 'np.zeros_like', (['eta'], {}), '(eta)\n', (1848, 1853), True, 'import numpy as np\n'), ((2068, 2096), 'numpy.array', 'np.array', (['[0, 0, 0, 0, 0, 1]'], {}), '([0, 0, 0, 0, 0, 1])\n', (2076, 2096), True, 'import numpy as np\n'), ((2276, 2327), 'numpy.diag', 'np.diag', (['[E * I, E * I, G * J, G * A, G * A, E * A]'], {}), '([E * I, E * I, G * J, G * A, G * A, E * A])\n', (2283, 2327), True, 'import numpy as np\n'), ((3627, 3641), 'matplotlib.pyplot.subplots', 'plt.subplots', ([], {}), '()\n', (3639, 3641), True, 'import matplotlib.pyplot as plt\n'), ((3730, 3745), 'matplotlib.pyplot.pause', 'plt.pause', (['(0.01)'], {}), '(0.01)\n', (3739, 3745), True, 'import matplotlib.pyplot as plt\n'), ((4017, 4027), 'matplotlib.pyplot.show', 'plt.show', ([], {}), '()\n', (4025, 4027), True, 'import matplotlib.pyplot as plt\n'), ((4130, 4158), 'numpy.array', 'np.array', (['[0, 0, 0, 0, 0, 1]'], {}), '([0, 0, 0, 0, 0, 1])\n', (4138, 4158), True, 'import numpy as np\n'), ((4338, 4389), 'numpy.diag', 'np.diag', (['[E * I, E * I, G * J, G * A, G * A, E * A]'], {}), '([E * I, E * I, G * J, G * A, G * A, E * A])\n', (4345, 4389), True, 'import numpy as np\n'), ((4726, 4740), 'matplotlib.pyplot.subplots', 'plt.subplots', ([], {}), '()\n', (4738, 4740), True, 'import matplotlib.pyplot as plt\n'), ((4899, 4909), 'matplotlib.pyplot.show', 'plt.show', ([], {}), '()\n', (4907, 4909), True, 'import matplotlib.pyplot as plt\n'), ((1018, 1060), 'numpy.array', 'np.array', (['[[0, np.pi / 4 / L, 0, 0, 0, 1]]'], {}), '([[0, np.pi / 4 / L, 0, 0, 0, 1]])\n', (1026, 1060), True, 'import numpy as np\n'), ((1652, 1680), 'numpy.array', 'np.array', (['[0, 0, 0, 0, 0, 1]'], {}), '([0, 0, 0, 0, 0, 1])\n', (1660, 1680), True, 'import numpy as np\n'), ((2342, 2369), 'numpy.diag', 'np.diag', (['[I, I, J, A, A, A]'], {}), '([I, I, J, A, A, A])\n', (2349, 2369), True, 'import numpy as np\n'), ((3897, 3912), 'matplotlib.pyplot.pause', 'plt.pause', (['(0.01)'], {}), '(0.01)\n', (3906, 3912), True, 'import matplotlib.pyplot as plt\n'), ((4404, 4431), 'numpy.diag', 'np.diag', (['[I, I, J, A, A, A]'], {}), '([I, I, J, A, A, A])\n', (4411, 4431), True, 'import numpy as np\n'), ((553, 580), 'numpy.reshape', 'np.reshape', (['g[:3, :3]', '(9,)'], {}), '(g[:3, :3], (9,))\n', (563, 580), True, 'import numpy as np\n'), ((731, 753), 'numpy.array', 'np.array', (['[0, 0, 0, 1]'], {}), '([0, 0, 0, 1])\n', (739, 753), True, 'import numpy as np\n'), ((878, 900), 'numpy.array', 'np.array', (['[0, 0, 0, 0]'], {}), '([0, 0, 0, 0])\n', (886, 900), True, 'import numpy as np\n'), ((2854, 2870), 'numpy.linalg.inv', 'np.linalg.inv', (['K'], {}), '(K)\n', (2867, 2870), True, 'import numpy as np\n'), ((380, 396), 'numpy.zeros', 'np.zeros', (['(3, 3)'], {}), '((3, 3))\n', (388, 396), True, 'import numpy as np\n'), ((695, 720), 'numpy.reshape', 'np.reshape', (['g[:9]', '(3, 3)'], {}), '(g[:9], (3, 3))\n', (705, 720), True, 'import numpy as np\n')] |
# !/usr/bin/env python
# -*-coding:utf-8-*-
from flask import jsonify,request,g,url_for,current_app
from .. import db
from ..models import ShareCategory,Topic
from . import api
from .errors import forbidden
@api.route('/getCategory')
def get_category():
u"""
获取主题信息,可以通过type确定返回为JsonArray还是Json.
:return:
"""
# type控制请求类型,type为list返回一个JsonArray,type为single时候返回一个category,此时可以获取一个name属性.
type = request.args.get('type',"list")
json = None
if type == "list":
categorys = ShareCategory.query.all()
return jsonify([category.to_json for category in categorys])
elif type == "single":
name = request.args.get('name',"")
if name is not None and name != "":
category = ShareCategory.query.filter_by(name = name).first()
json = category.to_json()
if json is None:
json ={
"status":1000,
"errorMsg":u"请求参数异常!"
}
return jsonify(json)
@api.route('/addCategory', methods=['POST'])
def add_category():
json = request.json
name = json.get("name", None)
if name is None:
return jsonify(
{
"status": 1000,
"errorMsg": u"请求参数异常!"
}
)
category = ShareCategory.query.filter_by(name = name).first()
if category is None:
category = ShareCategory(name = name)
category.author = json.get("author", "adm")
category.summary = json.get('summary','Empty Content.')
category.icon = json.get('icon',"")
db.session.add(category)
db.session.commit()
return jsonify({
"status":200,
"info":category.to_json
})
| [
"flask.request.args.get",
"flask.jsonify"
] | [((421, 453), 'flask.request.args.get', 'request.args.get', (['"""type"""', '"""list"""'], {}), "('type', 'list')\n", (437, 453), False, 'from flask import jsonify, request, g, url_for, current_app\n'), ((953, 966), 'flask.jsonify', 'jsonify', (['json'], {}), '(json)\n', (960, 966), False, 'from flask import jsonify, request, g, url_for, current_app\n'), ((1597, 1647), 'flask.jsonify', 'jsonify', (["{'status': 200, 'info': category.to_json}"], {}), "({'status': 200, 'info': category.to_json})\n", (1604, 1647), False, 'from flask import jsonify, request, g, url_for, current_app\n'), ((554, 607), 'flask.jsonify', 'jsonify', (['[category.to_json for category in categorys]'], {}), '([category.to_json for category in categorys])\n', (561, 607), False, 'from flask import jsonify, request, g, url_for, current_app\n'), ((1128, 1177), 'flask.jsonify', 'jsonify', (["{'status': 1000, 'errorMsg': u'请求参数异常!'}"], {}), "({'status': 1000, 'errorMsg': u'请求参数异常!'})\n", (1135, 1177), False, 'from flask import jsonify, request, g, url_for, current_app\n'), ((650, 678), 'flask.request.args.get', 'request.args.get', (['"""name"""', '""""""'], {}), "('name', '')\n", (666, 678), False, 'from flask import jsonify, request, g, url_for, current_app\n')] |
import GradientBasedOptimization as gbopt
import openpyxl as pyxl
import time
from QAnsatz import *
from QSubspaceEigensolver import *
import k_nearest_data as k_data
from QMeasure import HadamardTest_Analytical
W_Hamiltonian = Hamiltonian_in_Pauli_String(qubits=3,
unitary=['X0', 'X1', 'X1X0', 'X1Z0', 'Z1X0', 'X2', 'X2X0',
'X2Z0', 'X2X1', 'X2X1X0', 'X2X1Z0', 'X2Y1Y0', 'X2Z1',
'X2Z1X0', 'X2Z1Z0', 'Y2Y0', 'Y2Y1', 'Y2Y1Z0', 'Y2Z1Y0',
'Z2X0', 'Z2X1', 'Z2X1Z0', 'Z2Y1Y0', 'Z2Z1X0'],
coefficient=[6 / 8, 6 / 8, 4 / 8, 2 / 8, -2 / 8, 6 / 8, 2 / 8, -2 / 8,
2 / 8, 4 / 8, 2 / 8, -4 / 8, -2 / 8, 2 / 8, -2 / 8, 2 / 8,
-2 / 8, -2 / 8, 2 / 8, -2 / 8, -2 / 8, 2 / 8, -4 / 8, -2 / 8],
hamiltonian_mat=k_data.W)
D_Hamiltonian = Hamiltonian_in_Pauli_String(qubits=3,
unitary=['I2I1I0', 'Z0', 'Z1', 'Z1Z0', 'Z2', 'Z2Z1Z0'],
coefficient=[30 / 8, 2 / 8, -2 / 8, 2 / 8, -4 / 8, 4 / 8, ],
hamiltonian_mat=k_data.D)
I_Hamiltonian = Hamiltonian_in_Pauli_String(qubits=3,
unitary=['I2I1I0'],
coefficient=[1],
hamiltonian_mat=np.eye(2 ** 3))
L_Hamiltonian = D_Hamiltonian - W_Hamiltonian
if __name__ == '__main__':
eig = 0
iterations = 5
state_scale = 3
layer = 5
parameter_num = layer * np.math.floor(state_scale / 2) * 12
timestamp = time.strftime("_%Y%m%d_%H%M%S", time.localtime())
filename = 'xlsxdata\\data' + timestamp + '.xlsx'
sigma = 1
sigma_analytical = [0,
0.4495,
0.9015,
1.0000,
1.1949,
1.3027,
1.5000,
1.6514]
tracers = []
print('----------------------------------------------')
print('eig = ' + str(eig))
init_parameter = [0 for i in range(parameter_num)]
res_parameter = init_parameter
tracer_parameter = []
for it in range(iterations):
H = L_Hamiltonian - sigma * D_Hamiltonian
print('eig = ' + str(eig) + ' it = ' + str(it))
Solver = SubspaceEigSolver_ClassicalEfficientSimulator(Hamiltonian=H,
ansatze=HardwareEfficientAnsatze_halflayer(state_scale,
layer,
res_parameter),
weight_list=[i + 1 for i in range(eig + 1)])
if it == 0:
initvec = np.zeros(2 ** state_scale)
initvec[0] = 1
check_circuit = QuantumCircuit(state_scale)
check_circuit.initialize(initvec, [i for i in range(state_scale)])
check_circuit.compose(Solver.ansatze.circuit(), [i for i in range(state_scale)], inplace=True)
job = execute(check_circuit, state_backend)
result = job.result()
eigvec = result.get_statevector(check_circuit, decimals=3)
delta_vec = np.dot(H.hamiltonian_mat, eigvec)
norm = np.real(np.dot(delta_vec, delta_vec.conj()))
tracers.append((sigma, norm))
res_parameter = gbopt.steepest(Solver.getLossFunctionAnalytical,
Solver.GetJacobianAnalytical,
res_parameter,
alpha=0.5,
iters=100,
direct='-',
tol=1e-7)
tracer_parameter.append(res_parameter)
''' get the ith eigenvector '''
for j in range(eig + 1):
initvec = np.zeros(2 ** state_scale)
initvec[j] = 1
check_circuit = QuantumCircuit(state_scale)
check_circuit.initialize(initvec, [i for i in range(state_scale)])
check_circuit.compose(Solver.ansatze.circuit(), [i for i in range(state_scale)], inplace=True)
job = execute(check_circuit, state_backend)
result = job.result()
state = result.get_statevector(check_circuit, decimals=3)
lamb = np.real(np.dot(np.dot(state.conj(), H.hamiltonian_mat), state))
print('eig_' + str(j) + ' = ' + str(lamb))
initvec = np.zeros(2 ** state_scale)
initvec[0] = 1
check_circuit = QuantumCircuit(state_scale)
check_circuit.initialize(initvec, [i for i in range(state_scale)])
check_circuit.compose(Solver.ansatze.circuit(), [i for i in range(state_scale)], inplace=True)
''' update sigma '''
AE = L_Hamiltonian.ExpectationMeasurement(MeasurementMethod=HadamardTest_Analytical,
test_circuit=check_circuit,
active_qubits=[i for i in range(state_scale)])
BE = D_Hamiltonian.ExpectationMeasurement(MeasurementMethod=HadamardTest_Analytical,
test_circuit=check_circuit,
active_qubits=[i for i in range(state_scale)])
sigma = AE / BE
job = execute(check_circuit, state_backend)
result = job.result()
eigvec = result.get_statevector(check_circuit, decimals=3)
H_ana = L_Hamiltonian - sigma_analytical[eig] * D_Hamiltonian
delta_vec = np.dot(H_ana.hamiltonian_mat, eigvec)
norm = np.real(np.dot(delta_vec, delta_vec.conj()))
tracers.append((sigma, norm))
print('sigma = ' + str(sigma))
print('----------------------------------------------')
# Create xlsx file
wb = pyxl.Workbook()
wb.save(filename)
wb = pyxl.load_workbook(filename)
# Record experiment data per experiment
trace_sheet = wb.create_sheet(title='Experiment Data')
trace_sheet.cell(1, 1).value = 'timestep'
trace_sheet.cell(1, 2).value = 'eigval_' + str(eig)
trace_sheet.cell(1, 3).value = 'norm_' + str(eig)
for piece in range(len(tracers)):
trace_sheet.cell(2 + piece, 2).value = tracers[piece][0]
trace_sheet.cell(2 + piece, 3).value = tracers[piece][1]
wb.save(filename)
wb = pyxl.load_workbook(filename)
sh = wb.create_sheet(title='parameters')
for i in range(len(tracer_parameter)):
for j in range(len(tracer_parameter[i])):
sh.cell(i + 1, 1 + j).value = tracer_parameter[i][j]
wb.save(filename)
| [
"openpyxl.load_workbook",
"GradientBasedOptimization.steepest",
"time.localtime",
"openpyxl.Workbook"
] | [((6498, 6513), 'openpyxl.Workbook', 'pyxl.Workbook', ([], {}), '()\n', (6511, 6513), True, 'import openpyxl as pyxl\n'), ((6547, 6575), 'openpyxl.load_workbook', 'pyxl.load_workbook', (['filename'], {}), '(filename)\n', (6565, 6575), True, 'import openpyxl as pyxl\n'), ((7046, 7074), 'openpyxl.load_workbook', 'pyxl.load_workbook', (['filename'], {}), '(filename)\n', (7064, 7074), True, 'import openpyxl as pyxl\n'), ((1981, 1997), 'time.localtime', 'time.localtime', ([], {}), '()\n', (1995, 1997), False, 'import time\n'), ((3952, 4099), 'GradientBasedOptimization.steepest', 'gbopt.steepest', (['Solver.getLossFunctionAnalytical', 'Solver.GetJacobianAnalytical', 'res_parameter'], {'alpha': '(0.5)', 'iters': '(100)', 'direct': '"""-"""', 'tol': '(1e-07)'}), "(Solver.getLossFunctionAnalytical, Solver.\n GetJacobianAnalytical, res_parameter, alpha=0.5, iters=100, direct='-',\n tol=1e-07)\n", (3966, 4099), True, 'import GradientBasedOptimization as gbopt\n')] |
import datetime
import json
from datetime import datetime
from django.core.exceptions import PermissionDenied
from django.http import Http404
from django.shortcuts import get_object_or_404, redirect, render
from django.template import loader
from django.utils import translation
from django.views.generic import TemplateView
from rest_framework import generics, mixins, status, viewsets
from rest_framework.authentication import (SessionAuthentication,
TokenAuthentication)
from rest_framework.permissions import IsAuthenticated
from rest_framework.response import Response
from rest_framework.views import APIView
from registry.models import Activity, Authorization, Contact, Operator, Aircraft, Pilot, Test, TestValidity
from registry.serializers import (ContactSerializer, OperatorSerializer, PilotSerializer,
PrivilagedContactSerializer, PrivilagedPilotSerializer,
PrivilagedOperatorSerializer, AircraftSerializer, AircraftESNSerializer)
from django.http import JsonResponse
from rest_framework.decorators import api_view
from six.moves.urllib import request as req
from functools import wraps
class OperatorList(mixins.ListModelMixin,
generics.GenericAPIView):
"""
List all operators, or create a new operator.
"""
queryset = Operator.objects.all()
serializer_class = OperatorSerializer
def get(self, request, *args, **kwargs):
return self.list(request, *args, **kwargs)
class OperatorDetail(mixins.RetrieveModelMixin,
generics.GenericAPIView):
"""
Retrieve, update or delete a Operator instance.
"""
queryset = Operator.objects.all()
serializer_class = OperatorSerializer
def get(self, request, *args, **kwargs):
return self.retrieve(request, *args, **kwargs)
def delete(self, request, *args, **kwargs):
return self.destroy(request, *args, **kwargs)
class OperatorDetailPrivilaged(mixins.RetrieveModelMixin,
generics.GenericAPIView):
"""
Retrieve, update or delete a Operator instance.
"""
queryset = Operator.objects.all()
serializer_class = PrivilagedOperatorSerializer
def get(self, request, *args, **kwargs):
return self.retrieve(request, *args, **kwargs)
class OperatorAircraft(mixins.RetrieveModelMixin,
generics.GenericAPIView):
"""
Retrieve, update or delete a Operator instance.
"""
queryset = Aircraft.objects.all()
serializer_class = AircraftSerializer
def get_Aircraft(self, pk):
try:
o = Operator.objects.get(id=pk)
except Operator.DoesNotExist:
raise Http404
else:
return Aircraft.objects.filter(operator = o)
def get(self, request, pk,format=None):
aircraft = self.get_Aircraft(pk)
serializer = AircraftSerializer(aircraft, many=True)
return Response(serializer.data)
class AircraftESNDetails(mixins.RetrieveModelMixin,
generics.GenericAPIView):
queryset = Aircraft.objects.all()
serializer_class = AircraftESNSerializer
lookup_field = 'esn'
def get(self, request, *args, **kwargs):
return self.retrieve(request, *args, **kwargs)
class ContactList(mixins.ListModelMixin,
generics.GenericAPIView):
"""
List all contacts in the database
"""
queryset = Contact.objects.all()
serializer_class = ContactSerializer
def get(self, request, *args, **kwargs):
return self.list(request, *args, **kwargs)
class ContactDetail(mixins.RetrieveModelMixin,
generics.GenericAPIView):
"""
Retrieve, update or delete a Contact instance.
"""
queryset = Contact.objects.all()
serializer_class = ContactSerializer
def get(self, request, *args, **kwargs):
return self.retrieve(request, *args, **kwargs)
class ContactDetailPrivilaged(mixins.RetrieveModelMixin,
generics.GenericAPIView):
"""
Retrieve, update or delete a Contact instance.
"""
queryset = Contact.objects.all()
serializer_class = PrivilagedContactSerializer
def get(self, request, *args, **kwargs):
return self.retrieve(request, *args, **kwargs)
class PilotList(mixins.ListModelMixin,
generics.GenericAPIView):
"""
List all pilots in the database
"""
queryset = Pilot.objects.all()
serializer_class = PilotSerializer
def get(self, request, *args, **kwargs):
return self.list(request, *args, **kwargs)
class PilotDetail(mixins.RetrieveModelMixin,
generics.GenericAPIView):
"""
Retrieve, update or delete a Pilot instance.
"""
queryset = Pilot.objects.all()
serializer_class = PilotSerializer
def get(self, request, *args, **kwargs):
return self.retrieve(request, *args, **kwargs)
class PilotDetailPrivilaged(mixins.RetrieveModelMixin,
generics.GenericAPIView):
"""
Retrieve, update or delete a Pilot instance.
"""
queryset = Pilot.objects.all()
serializer_class = PrivilagedPilotSerializer
def get(self, request, *args, **kwargs):
return self.retrieve(request, *args, **kwargs)
class HomeView(TemplateView):
template_name ='registry/index.html'
class APIView(TemplateView):
template_name ='registry/api.html'
| [
"registry.models.Pilot.objects.all",
"registry.models.Operator.objects.all",
"registry.models.Aircraft.objects.all",
"registry.models.Aircraft.objects.filter",
"rest_framework.response.Response",
"registry.models.Operator.objects.get",
"registry.serializers.AircraftSerializer",
"registry.models.Contac... | [((1357, 1379), 'registry.models.Operator.objects.all', 'Operator.objects.all', ([], {}), '()\n', (1377, 1379), False, 'from registry.models import Activity, Authorization, Contact, Operator, Aircraft, Pilot, Test, TestValidity\n'), ((1678, 1700), 'registry.models.Operator.objects.all', 'Operator.objects.all', ([], {}), '()\n', (1698, 1700), False, 'from registry.models import Activity, Authorization, Contact, Operator, Aircraft, Pilot, Test, TestValidity\n'), ((2110, 2132), 'registry.models.Operator.objects.all', 'Operator.objects.all', ([], {}), '()\n', (2130, 2132), False, 'from registry.models import Activity, Authorization, Contact, Operator, Aircraft, Pilot, Test, TestValidity\n'), ((2448, 2470), 'registry.models.Aircraft.objects.all', 'Aircraft.objects.all', ([], {}), '()\n', (2468, 2470), False, 'from registry.models import Activity, Authorization, Contact, Operator, Aircraft, Pilot, Test, TestValidity\n'), ((2972, 2994), 'registry.models.Aircraft.objects.all', 'Aircraft.objects.all', ([], {}), '()\n', (2992, 2994), False, 'from registry.models import Activity, Authorization, Contact, Operator, Aircraft, Pilot, Test, TestValidity\n'), ((3300, 3321), 'registry.models.Contact.objects.all', 'Contact.objects.all', ([], {}), '()\n', (3319, 3321), False, 'from registry.models import Activity, Authorization, Contact, Operator, Aircraft, Pilot, Test, TestValidity\n'), ((3616, 3637), 'registry.models.Contact.objects.all', 'Contact.objects.all', ([], {}), '()\n', (3635, 3637), False, 'from registry.models import Activity, Authorization, Contact, Operator, Aircraft, Pilot, Test, TestValidity\n'), ((3947, 3968), 'registry.models.Contact.objects.all', 'Contact.objects.all', ([], {}), '()\n', (3966, 3968), False, 'from registry.models import Activity, Authorization, Contact, Operator, Aircraft, Pilot, Test, TestValidity\n'), ((4239, 4258), 'registry.models.Pilot.objects.all', 'Pilot.objects.all', ([], {}), '()\n', (4256, 4258), False, 'from registry.models import Activity, Authorization, Contact, Operator, Aircraft, Pilot, Test, TestValidity\n'), ((4547, 4566), 'registry.models.Pilot.objects.all', 'Pilot.objects.all', ([], {}), '()\n', (4564, 4566), False, 'from registry.models import Activity, Authorization, Contact, Operator, Aircraft, Pilot, Test, TestValidity\n'), ((4872, 4891), 'registry.models.Pilot.objects.all', 'Pilot.objects.all', ([], {}), '()\n', (4889, 4891), False, 'from registry.models import Activity, Authorization, Contact, Operator, Aircraft, Pilot, Test, TestValidity\n'), ((2781, 2820), 'registry.serializers.AircraftSerializer', 'AircraftSerializer', (['aircraft'], {'many': '(True)'}), '(aircraft, many=True)\n', (2799, 2820), False, 'from registry.serializers import ContactSerializer, OperatorSerializer, PilotSerializer, PrivilagedContactSerializer, PrivilagedPilotSerializer, PrivilagedOperatorSerializer, AircraftSerializer, AircraftESNSerializer\n'), ((2831, 2856), 'rest_framework.response.Response', 'Response', (['serializer.data'], {}), '(serializer.data)\n', (2839, 2856), False, 'from rest_framework.response import Response\n'), ((2555, 2582), 'registry.models.Operator.objects.get', 'Operator.objects.get', ([], {'id': 'pk'}), '(id=pk)\n', (2575, 2582), False, 'from registry.models import Activity, Authorization, Contact, Operator, Aircraft, Pilot, Test, TestValidity\n'), ((2651, 2686), 'registry.models.Aircraft.objects.filter', 'Aircraft.objects.filter', ([], {'operator': 'o'}), '(operator=o)\n', (2674, 2686), False, 'from registry.models import Activity, Authorization, Contact, Operator, Aircraft, Pilot, Test, TestValidity\n')] |
import json
import os
import time
import ray
from ray.train import Trainer
from ray.train.examples.horovod.horovod_example import (
train_func as horovod_torch_train_func,
)
if __name__ == "__main__":
ray.init(address=os.environ.get("RAY_ADDRESS", "auto"))
start_time = time.time()
num_workers = 8
num_epochs = 10
trainer = Trainer("horovod", num_workers)
trainer.start()
results = trainer.run(
horovod_torch_train_func, config={"num_epochs": num_epochs, "lr": 1e-3}
)
trainer.shutdown()
assert len(results) == num_workers
for worker_result in results:
assert len(worker_result) == num_epochs
assert worker_result[num_epochs - 1] < worker_result[0]
delta = time.time() - start_time
with open(os.environ["TEST_OUTPUT_JSON"], "w") as f:
f.write(json.dumps({"train_time": delta, "success": True}))
| [
"json.dumps",
"time.time",
"os.environ.get",
"ray.train.Trainer"
] | [((284, 295), 'time.time', 'time.time', ([], {}), '()\n', (293, 295), False, 'import time\n'), ((351, 382), 'ray.train.Trainer', 'Trainer', (['"""horovod"""', 'num_workers'], {}), "('horovod', num_workers)\n", (358, 382), False, 'from ray.train import Trainer\n'), ((738, 749), 'time.time', 'time.time', ([], {}), '()\n', (747, 749), False, 'import time\n'), ((228, 265), 'os.environ.get', 'os.environ.get', (['"""RAY_ADDRESS"""', '"""auto"""'], {}), "('RAY_ADDRESS', 'auto')\n", (242, 265), False, 'import os\n'), ((836, 886), 'json.dumps', 'json.dumps', (["{'train_time': delta, 'success': True}"], {}), "({'train_time': delta, 'success': True})\n", (846, 886), False, 'import json\n')] |
import numpy as np
from gtsam import SfmTrack
from gtsfm.common.image import Image
import gtsfm.utils.images as image_utils
def test_get_average_point_color():
""" Ensure 3d point color is computed as mean of RGB per 2d measurement."""
# random point; 2d measurements below are dummy locations (not actual projection)
triangulated_pt = np.array([1, 2, 1])
track_3d = SfmTrack(triangulated_pt)
# in camera 0
track_3d.add_measurement(idx=0, m=np.array([130, 80]))
# in camera 1
track_3d.add_measurement(idx=1, m=np.array([10, 60]))
img0 = np.zeros((100, 200, 3), dtype=np.uint8)
img0[80, 130] = np.array([40, 50, 60])
img1 = np.zeros((100, 200, 3), dtype=np.uint8)
img1[60, 10] = np.array([60, 70, 80])
images = {0: Image(img0), 1: Image(img1)}
r, g, b = image_utils.get_average_point_color(track_3d, images)
assert r == 50
assert g == 60
assert b == 70
def test_get_downsampling_factor_per_axis_leaveintact() -> None:
"""Ensure that image is left intact, when shorter side is smaller than max_resolution."""
img_h = 700
img_w = 1500
img = Image(np.zeros((img_h, img_w, 3), dtype=np.uint8))
max_resolution = 800
scale_u, scale_v, new_h, new_w = image_utils.get_downsampling_factor_per_axis(img_h, img_w, max_resolution)
assert scale_u == 1.0
assert scale_v == 1.0
assert new_h == 700
assert new_w == 1500
def test_get_rescaling_factor_per_axis_upsample() -> None:
"""Ensure that max resolution constraint is met, when upsampling image.
Resize a 700x1500 image, so that the shorter image side is EXACTLY 800 px.
"""
img_h = 700
img_w = 1500
img = Image(np.zeros((img_h, img_w, 3), dtype=np.uint8))
max_resolution = 800
scale_u, scale_v, new_h, new_w = image_utils.get_rescaling_factor_per_axis(img_h, img_w, max_resolution)
# 8/7 will not give a clean integer division
assert np.isclose(scale_u, 1.1427, atol=4)
assert np.isclose(scale_v, 1.1429, atol=4)
assert new_h == 800
assert new_w == 1714
def test_get_downsampling_factor_per_axis() -> None:
"""Ensure that max resolution constraint is met, when downsampling image.
Resize a 700x1500 image, so that the shorter image side is AT MOST 600 px.
Image is in landscape mode.
"""
img_h = 700
img_w = 1500
img = Image(np.zeros((img_h, img_w, 3), dtype=np.uint8))
max_resolution = 600
scale_u, scale_v, new_h, new_w = image_utils.get_downsampling_factor_per_axis(img_h, img_w, max_resolution)
# Note that 600 / 700 = 0.85714
# 1500 * 0.85714 = 1285.7, which we round up to 1286.
assert np.isclose(scale_u, 0.8573, atol=4)
assert np.isclose(scale_v, 0.8571, atol=4)
assert new_h == 600
assert new_w == 1286
def test_get_rescaling_factor_per_axis_downsample() -> None:
"""Ensure that max resolution constraint is met, when downsampling image.
Resize a 700x1500 image, so that the shorter image side is EXACTLY 600 px.
Image is in landscape mode.
"""
img_h = 700
img_w = 1500
img = Image(np.zeros((img_h, img_w, 3), dtype=np.uint8))
max_resolution = 600
scale_u, scale_v, new_h, new_w = image_utils.get_rescaling_factor_per_axis(img_h, img_w, max_resolution)
# Note that 600 / 700 = 0.85714
# 1500 * 0.85714 = 1285.7, which we round up to 1286.
assert np.isclose(scale_u, 0.8573, atol=4)
assert np.isclose(scale_v, 0.8571, atol=4)
assert new_h == 600
assert new_w == 1286
def test_get_downsampling_factor_per_axis_portrait() -> None:
"""Ensure that max resolution constraint is met, when downsampling image.
Resize a 700x1500 image, so that the shorter image side is AT MOST 600 px.
Image is in portrait mode.
"""
img_h = 1500
img_w = 700
img = Image(np.zeros((img_h, img_w, 3), dtype=np.uint8))
max_resolution = 600
scale_u, scale_v, new_h, new_w = image_utils.get_downsampling_factor_per_axis(img_h, img_w, max_resolution)
# Note that 600 / 700 = 0.85714
# 1500 * 0.85714 = 1285.7, which we round up to 1286.
assert np.isclose(scale_u, 0.8571, atol=4)
assert np.isclose(scale_v, 0.8573, atol=4)
assert new_h == 1286
assert new_w == 600
def test_get_rescaling_factor_per_axis_downsample_portrait() -> None:
"""Ensure that max resolution constraint is met, when downsampling image.
Resize a 700x1500 image, so that the shorter image side is EXACTLY 600 px.
Image is in portrait mode.
"""
img_h = 1500
img_w = 700
img = Image(np.zeros((img_h, img_w, 3), dtype=np.uint8))
max_resolution = 600
scale_u, scale_v, new_h, new_w = image_utils.get_rescaling_factor_per_axis(img_h, img_w, max_resolution)
# Note that 600 / 700 = 0.85714
# 1500 * 0.85714 = 1285.7, which we round up to 1286.
assert np.isclose(scale_v, 0.8571, atol=4)
assert np.isclose(scale_u, 0.8573, atol=4)
assert new_h == 1286
assert new_w == 600
| [
"gtsfm.utils.images.get_rescaling_factor_per_axis",
"gtsfm.utils.images.get_downsampling_factor_per_axis",
"numpy.isclose",
"numpy.array",
"numpy.zeros",
"gtsfm.utils.images.get_average_point_color",
"gtsfm.common.image.Image",
"gtsam.SfmTrack"
] | [((351, 370), 'numpy.array', 'np.array', (['[1, 2, 1]'], {}), '([1, 2, 1])\n', (359, 370), True, 'import numpy as np\n'), ((386, 411), 'gtsam.SfmTrack', 'SfmTrack', (['triangulated_pt'], {}), '(triangulated_pt)\n', (394, 411), False, 'from gtsam import SfmTrack\n'), ((578, 617), 'numpy.zeros', 'np.zeros', (['(100, 200, 3)'], {'dtype': 'np.uint8'}), '((100, 200, 3), dtype=np.uint8)\n', (586, 617), True, 'import numpy as np\n'), ((638, 660), 'numpy.array', 'np.array', (['[40, 50, 60]'], {}), '([40, 50, 60])\n', (646, 660), True, 'import numpy as np\n'), ((673, 712), 'numpy.zeros', 'np.zeros', (['(100, 200, 3)'], {'dtype': 'np.uint8'}), '((100, 200, 3), dtype=np.uint8)\n', (681, 712), True, 'import numpy as np\n'), ((732, 754), 'numpy.array', 'np.array', (['[60, 70, 80]'], {}), '([60, 70, 80])\n', (740, 754), True, 'import numpy as np\n'), ((817, 870), 'gtsfm.utils.images.get_average_point_color', 'image_utils.get_average_point_color', (['track_3d', 'images'], {}), '(track_3d, images)\n', (852, 870), True, 'import gtsfm.utils.images as image_utils\n'), ((1245, 1319), 'gtsfm.utils.images.get_downsampling_factor_per_axis', 'image_utils.get_downsampling_factor_per_axis', (['img_h', 'img_w', 'max_resolution'], {}), '(img_h, img_w, max_resolution)\n', (1289, 1319), True, 'import gtsfm.utils.images as image_utils\n'), ((1803, 1874), 'gtsfm.utils.images.get_rescaling_factor_per_axis', 'image_utils.get_rescaling_factor_per_axis', (['img_h', 'img_w', 'max_resolution'], {}), '(img_h, img_w, max_resolution)\n', (1844, 1874), True, 'import gtsfm.utils.images as image_utils\n'), ((1940, 1975), 'numpy.isclose', 'np.isclose', (['scale_u', '(1.1427)'], {'atol': '(4)'}), '(scale_u, 1.1427, atol=4)\n', (1950, 1975), True, 'import numpy as np\n'), ((1987, 2022), 'numpy.isclose', 'np.isclose', (['scale_v', '(1.1429)'], {'atol': '(4)'}), '(scale_v, 1.1429, atol=4)\n', (1997, 2022), True, 'import numpy as np\n'), ((2481, 2555), 'gtsfm.utils.images.get_downsampling_factor_per_axis', 'image_utils.get_downsampling_factor_per_axis', (['img_h', 'img_w', 'max_resolution'], {}), '(img_h, img_w, max_resolution)\n', (2525, 2555), True, 'import gtsfm.utils.images as image_utils\n'), ((2662, 2697), 'numpy.isclose', 'np.isclose', (['scale_u', '(0.8573)'], {'atol': '(4)'}), '(scale_u, 0.8573, atol=4)\n', (2672, 2697), True, 'import numpy as np\n'), ((2709, 2744), 'numpy.isclose', 'np.isclose', (['scale_v', '(0.8571)'], {'atol': '(4)'}), '(scale_v, 0.8571, atol=4)\n', (2719, 2744), True, 'import numpy as np\n'), ((3211, 3282), 'gtsfm.utils.images.get_rescaling_factor_per_axis', 'image_utils.get_rescaling_factor_per_axis', (['img_h', 'img_w', 'max_resolution'], {}), '(img_h, img_w, max_resolution)\n', (3252, 3282), True, 'import gtsfm.utils.images as image_utils\n'), ((3389, 3424), 'numpy.isclose', 'np.isclose', (['scale_u', '(0.8573)'], {'atol': '(4)'}), '(scale_u, 0.8573, atol=4)\n', (3399, 3424), True, 'import numpy as np\n'), ((3436, 3471), 'numpy.isclose', 'np.isclose', (['scale_v', '(0.8571)'], {'atol': '(4)'}), '(scale_v, 0.8571, atol=4)\n', (3446, 3471), True, 'import numpy as np\n'), ((3939, 4013), 'gtsfm.utils.images.get_downsampling_factor_per_axis', 'image_utils.get_downsampling_factor_per_axis', (['img_h', 'img_w', 'max_resolution'], {}), '(img_h, img_w, max_resolution)\n', (3983, 4013), True, 'import gtsfm.utils.images as image_utils\n'), ((4120, 4155), 'numpy.isclose', 'np.isclose', (['scale_u', '(0.8571)'], {'atol': '(4)'}), '(scale_u, 0.8571, atol=4)\n', (4130, 4155), True, 'import numpy as np\n'), ((4167, 4202), 'numpy.isclose', 'np.isclose', (['scale_v', '(0.8573)'], {'atol': '(4)'}), '(scale_v, 0.8573, atol=4)\n', (4177, 4202), True, 'import numpy as np\n'), ((4677, 4748), 'gtsfm.utils.images.get_rescaling_factor_per_axis', 'image_utils.get_rescaling_factor_per_axis', (['img_h', 'img_w', 'max_resolution'], {}), '(img_h, img_w, max_resolution)\n', (4718, 4748), True, 'import gtsfm.utils.images as image_utils\n'), ((4855, 4890), 'numpy.isclose', 'np.isclose', (['scale_v', '(0.8571)'], {'atol': '(4)'}), '(scale_v, 0.8571, atol=4)\n', (4865, 4890), True, 'import numpy as np\n'), ((4902, 4937), 'numpy.isclose', 'np.isclose', (['scale_u', '(0.8573)'], {'atol': '(4)'}), '(scale_u, 0.8573, atol=4)\n', (4912, 4937), True, 'import numpy as np\n'), ((773, 784), 'gtsfm.common.image.Image', 'Image', (['img0'], {}), '(img0)\n', (778, 784), False, 'from gtsfm.common.image import Image\n'), ((789, 800), 'gtsfm.common.image.Image', 'Image', (['img1'], {}), '(img1)\n', (794, 800), False, 'from gtsfm.common.image import Image\n'), ((1138, 1181), 'numpy.zeros', 'np.zeros', (['(img_h, img_w, 3)'], {'dtype': 'np.uint8'}), '((img_h, img_w, 3), dtype=np.uint8)\n', (1146, 1181), True, 'import numpy as np\n'), ((1696, 1739), 'numpy.zeros', 'np.zeros', (['(img_h, img_w, 3)'], {'dtype': 'np.uint8'}), '((img_h, img_w, 3), dtype=np.uint8)\n', (1704, 1739), True, 'import numpy as np\n'), ((2374, 2417), 'numpy.zeros', 'np.zeros', (['(img_h, img_w, 3)'], {'dtype': 'np.uint8'}), '((img_h, img_w, 3), dtype=np.uint8)\n', (2382, 2417), True, 'import numpy as np\n'), ((3104, 3147), 'numpy.zeros', 'np.zeros', (['(img_h, img_w, 3)'], {'dtype': 'np.uint8'}), '((img_h, img_w, 3), dtype=np.uint8)\n', (3112, 3147), True, 'import numpy as np\n'), ((3832, 3875), 'numpy.zeros', 'np.zeros', (['(img_h, img_w, 3)'], {'dtype': 'np.uint8'}), '((img_h, img_w, 3), dtype=np.uint8)\n', (3840, 3875), True, 'import numpy as np\n'), ((4570, 4613), 'numpy.zeros', 'np.zeros', (['(img_h, img_w, 3)'], {'dtype': 'np.uint8'}), '((img_h, img_w, 3), dtype=np.uint8)\n', (4578, 4613), True, 'import numpy as np\n'), ((469, 488), 'numpy.array', 'np.array', (['[130, 80]'], {}), '([130, 80])\n', (477, 488), True, 'import numpy as np\n'), ((546, 564), 'numpy.array', 'np.array', (['[10, 60]'], {}), '([10, 60])\n', (554, 564), True, 'import numpy as np\n')] |
from setuptools import setup, find_packages
import os
version = '0.0'
requires = [
"setuptools>=0.7",
"pyramid",
]
tests_require = [
"pytest",
"testfixtures",
"webtest",
]
long_description = (
open('README.rst').read()
+ '\n' +
'Contributors\n'
'============\n'
+ '\n' +
open('CONTRIBUTORS.txt').read()
+ '\n' +
open('CHANGES.txt').read()
+ '\n')
setup(name='rebecca.menu',
version=version,
description="",
long_description=long_description,
# Get more strings from
# http://pypi.python.org/pypi?%3Aaction=list_classifiers
classifiers=[
"Programming Language :: Python",
"Programming Language :: Python :: 2.7",
"Programming Language :: Python :: 3",
"Programming Language :: Python :: 3.3",
"License :: OSI Approved :: MIT License",
"Framework :: Pyramid",
],
keywords='',
author='<NAME>',
author_email='<EMAIL>',
url='https://github.com/rebeccaframework/rebecca.menu',
license='MIT',
packages=find_packages('src'),
package_dir = {'': 'src'},
namespace_packages=['rebecca'],
include_package_data=True,
zip_safe=False,
install_requires=requires,
tests_require=tests_require,
extras_require={
"testing": tests_require,
},
)
| [
"setuptools.find_packages"
] | [((1084, 1104), 'setuptools.find_packages', 'find_packages', (['"""src"""'], {}), "('src')\n", (1097, 1104), False, 'from setuptools import setup, find_packages\n')] |
from collections import defaultdict
from advent_of_code.core import parse_input, mapt
test_input = """0,9 -> 5,9
8,0 -> 0,8
9,4 -> 3,4
2,2 -> 2,1
7,0 -> 7,4
6,4 -> 2,0
0,9 -> 2,9
3,4 -> 1,4
0,0 -> 8,8
5,5 -> 8,2"""
def count(iterable, predicate=bool):
return sum(1 for item in iterable if predicate(item))
def str_to_tup(string):
"""
convert string to tuple
example: str_to_tup('0, 0') => (0, 0)
"""
return mapt(int, string.split(","))
def X(point):
return point[0]
def Y(point):
return point[1]
def count_points(p1: tuple, p2: tuple, part1=True):
min_y, max_y = min(Y(p2), Y(p1)), max(Y(p2), Y(p1))
min_x, max_x = min(X(p2), X(p1)), max(X(p2), X(p1))
if X(p1) == X(p2):
return [(X(p1), y) for y in range(min_y, max_y + 1)]
elif Y(p1) == Y(p2):
return [(x, Y(p1)) for x in range(min_x, max_x + 1)]
if not part1:
grad = (Y(p2) - Y(p1)) / (X(p2) - X(p1))
return [(x, grad * (x - min_x) + Y(p1)) for x in range(min_x, max_x + 1)]
return []
def do(_input, istest, part1):
point_map = defaultdict(int)
_input = parse_input(
_input, parser=lambda s: mapt(str_to_tup, s.split(" -> ")), test=istest
)
for points in _input:
p1, p2 = points
if X(p1) > X(p2):
p1, p2 = p2, p1
px = count_points(p1, p2, part1)
for p in px:
point_map[p] += 1
return count(point_map.values(), lambda c: c > 1)
# assert do(test_input, True, part1=True) == 5
# assert do('data/input5.txt', False, part1=True) == 6461
## part2
assert do(test_input, istest=True, part1=False) == 12
assert do("data/input5.txt", istest=False, part1=False) == 18065
# %%
| [
"collections.defaultdict"
] | [((1087, 1103), 'collections.defaultdict', 'defaultdict', (['int'], {}), '(int)\n', (1098, 1103), False, 'from collections import defaultdict\n')] |
import os
import pandas as pd
from sqlalchemy.sql import column
from igf_data.igfdb.baseadaptor import BaseAdaptor
from igf_data.igfdb.igfTables import File, File_attribute
class FileAdaptor(BaseAdaptor):
'''
An adaptor class for File tables
'''
def store_file_and_attribute_data(self,data,autosave=True):
'''
A method for dividing and storing data to file and attribute table
:param data: A list of dictionary or a Pandas DataFrame
:param autosave: A Toggle for automatically saving changes to db, default True
:returns: None
'''
(file_data, file_attr_data)=\
self.divide_data_to_table_and_attribute(data=data)
try:
self.store_file_data(data=file_data)
if len(file_attr_data.index)>0: # check if any attribute exists
self.store_file_attributes(data=file_attr_data)
if autosave:
self.commit_session()
except Exception as e:
if autosave:
self.rollback_session()
raise ValueError(
'Failed to store file and attributes, error: {0}'.format(e))
def divide_data_to_table_and_attribute(
self,data,required_column='file_path',table_columns=None,
attribute_name_column='attribute_name',attribute_value_column='attribute_value'):
'''
A method for separating data for File and File_attribute tables
:param data: A list of dictionary or a Pandas DataFrame
:param table_columns: List of table column names, default None
:param required_column: A column name to add to the attribute data
:param attribute_name_column: A label for attribute name column
:param attribute_value_column: A label for attribute value column
:returns: Two pandas dataframes, one for File and another for File_attribute table
'''
try:
if not isinstance(data, pd.DataFrame):
data = pd.DataFrame(data)
file_columns = \
self.get_table_columns(
table_name=File,
excluded_columns=['file_id']) # get required columns for file table
(file_df, file_attr_df) = \
BaseAdaptor.\
divide_data_to_table_and_attribute(
self,
data=data,
required_column=required_column,
table_columns=file_columns,
attribute_name_column=attribute_name_column,
attribute_value_column=attribute_value_column) # divide dataframe
return (file_df, file_attr_df)
except Exception as e:
raise ValueError(
'Failed to divide file data, error: {0}'.format(e))
def store_file_data(self,data,autosave=False):
'''
Load data to file table
:param data: A list of dictionary or a Pandas DataFrame
:param autosave: A Toggle for automatically saving changes to db, default True
:returns: None
'''
if not isinstance(data, pd.DataFrame):
data = pd.DataFrame(data)
try:
self.store_records(
table=File,
data=data) # store data without autocommit
if autosave:
self.commit_session()
except Exception as e:
if autosave:
self.rollback_session()
raise ValueError('Failed to store file data, error: {0}'.format(e))
def store_file_attributes(self,data,file_id='',autosave=False):
'''
A method for storing data to File_attribute table
:param data: A list of dictionary or a Pandas DataFrame
:param file_id: A file_id for updating the attribute table, default empty string
:param autosave: A Toggle for automatically saving changes to db, default True
:returns: None
'''
try:
if not isinstance(data, pd.DataFrame):
data = pd.DataFrame(data)
if 'file_path' in data.columns:
map_function = \
lambda x: \
self.map_foreign_table_and_store_attribute(
data=x,
lookup_table=File,
lookup_column_name='file_path',
target_column_name='file_id') # prepare the map function for File id
data['file_id'] = ''
data = \
data.apply(
map_function,
axis=1,
result_type=None) # map file id
data.drop(
'file_path',
axis=1,
inplace=True)
#data=new_data # overwrite data
self.store_attributes(
attribute_table=File_attribute,
linked_column='file_id',
db_id=file_id,
data=data) # store data without autocommit
if autosave:
self.commit_session()
except Exception as e:
if autosave:
self.rollback_session()
raise ValueError(
'Failed to store file attributes, error: {0}'.format(e))
def fetch_file_records_file_path(self,file_path):
'''
A method for fetching data for file table
:param file_path: an absolute file path
:returns: A file object
'''
try:
file_obj = \
self.fetch_records_by_column(
table=File,
column_name=File.file_path,
column_id=file_path,
output_mode='one')
return file_obj
except Exception as e:
raise ValueError(
'Failed to fetch file record, error: {0}'.format(e))
def check_file_records_file_path(self,file_path):
'''
A method for checking file information in database
:param file_path: A absolute filepath
:returns: True if the file is present in db or False if its not
'''
try:
file_check = False
file_obj = \
self.fetch_records_by_column(
table=File,
column_name=File.file_path,
column_id=file_path,
output_mode='one_or_none')
if file_obj:
file_check = True
return file_check
except Exception as e:
raise ValueError(
'Failed to check file records, error: {0}'.format(e))
def remove_file_data_for_file_path(
self,file_path,remove_file=False,autosave=True):
'''
A method for removing entry for a specific file.
:param file_path: A complete file_path for checking database
:param remove_file: A toggle for removing filepath, default False
:param autosave: A toggle for automatically saving changes to database, default True
:returns: None
'''
try:
file_exists = \
self.check_file_records_file_path(file_path=file_path)
if not file_exists:
raise ValueError(
'File {0} not found in database'.format(file_path))
self.session.\
query(File).\
filter(File.file_path==file_path).\
delete(synchronize_session=False) # remove record from db
if remove_file:
os.remove(path=file_path) # removing file from disk
if autosave:
self.commit_session() # save changes to database
except Exception as e:
raise ValueError(
'Failed to remove file, error: {0}'.format(e))
def update_file_table_for_file_path(self,file_path,tag,value,autosave=False):
'''
A method for updating file table
:param file_path: A file_path for database look up
:param tag: A keyword for file column name
:param value: A new value for the file column
:param autosave: Toggle autosave, default off
:returns: None
'''
try:
file_columns = \
self.get_table_columns(
table_name=File,
excluded_columns=['file_id'])
if tag not in file_columns:
raise ValueError(
'column name {0} not allowed for table File'.\
format(tag))
_ = \
self.session.\
query(File).\
filter(File.file_path==file_path).\
update({tag:value},synchronize_session=False)
if autosave:
self.commit_session()
except Exception as e:
raise ValueError(
'Failed to update file entry, error: {0}'.format(e))
| [
"igf_data.igfdb.baseadaptor.BaseAdaptor.divide_data_to_table_and_attribute",
"pandas.DataFrame",
"os.remove"
] | [((2153, 2382), 'igf_data.igfdb.baseadaptor.BaseAdaptor.divide_data_to_table_and_attribute', 'BaseAdaptor.divide_data_to_table_and_attribute', (['self'], {'data': 'data', 'required_column': 'required_column', 'table_columns': 'file_columns', 'attribute_name_column': 'attribute_name_column', 'attribute_value_column': 'attribute_value_column'}), '(self, data=data,\n required_column=required_column, table_columns=file_columns,\n attribute_name_column=attribute_name_column, attribute_value_column=\n attribute_value_column)\n', (2199, 2382), False, 'from igf_data.igfdb.baseadaptor import BaseAdaptor\n'), ((2967, 2985), 'pandas.DataFrame', 'pd.DataFrame', (['data'], {}), '(data)\n', (2979, 2985), True, 'import pandas as pd\n'), ((1890, 1908), 'pandas.DataFrame', 'pd.DataFrame', (['data'], {}), '(data)\n', (1902, 1908), True, 'import pandas as pd\n'), ((3819, 3837), 'pandas.DataFrame', 'pd.DataFrame', (['data'], {}), '(data)\n', (3831, 3837), True, 'import pandas as pd\n'), ((7066, 7091), 'os.remove', 'os.remove', ([], {'path': 'file_path'}), '(path=file_path)\n', (7075, 7091), False, 'import os\n')] |
import os
import subprocess
from E2E.configuration_loader import Configuration
config = Configuration().get_configuration()
EXE_FOLDER, EXE_NAME = os.path.split(config['shotExtractor'])
SHOTS_OUTPUT_SUFFIX = '_shot_scene.txt'
KEYFRAMES_DIR = 'Keyframes'
class Keyframe:
def __init__(self, keyframe_name):
self.id, self.frame_number = self.parse_image_name(keyframe_name)
self.file = keyframe_name
@staticmethod
def parse_image_name(keyframe_name):
split_args = keyframe_name.split('.jpg')[0].split('_')
if len(split_args) < 4:
raise Exception('bad keyframe name')
return int(split_args[-2]), int(split_args[-1])
def __repr__(self):
return f'Keyframe(id:{self.id}, frame:{self.frame_number})'
class Shot:
def __init__(self, shot_line):
attributes_str = shot_line.split(',')
self.id = int(attributes_str[0].split('#')[-1])
self.start = float(attributes_str[2].split(' ')[-1])
self.end = float(attributes_str[3].split(' ')[-1])
frame_range = attributes_str[4].split('Frame: ')[-1].split(' ~ ')
self.frame_start = int(frame_range[0])
self.frame_end = int(float(frame_range[1]))
self.keyframes = []
def __repr__(self):
return f'Shot(id:{self.id}, start:{self.frame_start}, end:{self.frame_end})'
class ShotExtractor:
"""
[dump_shot_scene_list]: when specified 1, write detected shot and scene list into a txt file. Default: 0
[scene_number]: Maximum number of scenes expected to output. Default: 30
[dump_shot_features]: when specified 1, write extracted shot features into a txt file. Default: 0
[dump_shot_keyframes]: when specified 1, write extracted shot keyframes to PPM image files. Default: 0
[output_static_thumbnail_folder]:Output folder for static thumbnail images. Default: Executable location.
[max_static_thumbnail_count]: How many static thumbnails should output? Negative number means no static thumbnail output, 0 means using default setting. Default: 0
[max_motion_thumbnail_length]: How long the motion thumbnail shuold be? Negative number menas no motion thumbnail output, 0.0 means using default setting. Default: 0.0
[output_audio]: when specified 1, output audio in the motion thumbnail; otherwise no. Default: 1
[fade_in_fade_out]: when specified 1, add fade in/out effects to motion thumbnail, otherwise no. Default: 1
"""
def __init__(self):
self.exe_path = f'"{os.path.join(EXE_FOLDER, EXE_NAME)}"'
@staticmethod
def exe_cmd(input_video_path, output_path):
return f'"{input_video_path}" "{output_path}" 1 30 1 1 1 11 1'
def extract_shots(self, input_video_path, output_folder):
if not os.path.isdir(output_folder):
raise Exception('output_folder does not exist')
shots_output_path = os.path.join(output_folder, f'{os.path.basename(input_video_path)}{SHOTS_OUTPUT_SUFFIX}')
# lazy shot extraction
if os.path.exists(shots_output_path):
print('shots have already been extracted - skipping execution...')
shots = self.parse(shots_output_path)
return shots
# execute
print('start extracting shots')
os.chdir(output_folder)
result = subprocess.run(' '.join([self.exe_path, self.exe_cmd(input_video_path, output_folder)]),
stdout=subprocess.PIPE, stderr=subprocess.PIPE, universal_newlines=True)
output, err = result.stdout, result.stderr
exit_code = result.returncode
# validate
if exit_code == 0:
print("finished extracting shots")
else:
raise Exception(f'failed extracting shots with exit code {exit_code} and error: {err}')
# parse shots
if not os.path.exists(shots_output_path):
raise Exception('failed extracting shots')
shots = self.parse(shots_output_path)
return shots
@staticmethod
def parse(txt_path):
with open(txt_path, 'r') as out_file:
lines = out_file.readlines()
shots = []
for line in lines:
if not line.startswith('Shot #'):
continue
shots.append(Shot(line))
return shots
@staticmethod
def __match_keyframes_to_shots(kfs, shots):
kfs = sorted(kfs, key=lambda kf: kf.frame_number)
shots = sorted(shots, key=lambda sh: sh.frame_start)
k = 0
s = 0
while k < len(kfs) and s < len(shots):
if shots[s].frame_start <= kfs[k].frame_number <= shots[s].frame_end:
shots[s].keyframes.append(kfs[k])
k += 1
else:
s += 1
return shots
| [
"os.path.exists",
"os.path.join",
"os.path.split",
"os.chdir",
"os.path.isdir",
"os.path.basename",
"E2E.configuration_loader.Configuration"
] | [((149, 187), 'os.path.split', 'os.path.split', (["config['shotExtractor']"], {}), "(config['shotExtractor'])\n", (162, 187), False, 'import os\n'), ((90, 105), 'E2E.configuration_loader.Configuration', 'Configuration', ([], {}), '()\n', (103, 105), False, 'from E2E.configuration_loader import Configuration\n'), ((3012, 3045), 'os.path.exists', 'os.path.exists', (['shots_output_path'], {}), '(shots_output_path)\n', (3026, 3045), False, 'import os\n'), ((3268, 3291), 'os.chdir', 'os.chdir', (['output_folder'], {}), '(output_folder)\n', (3276, 3291), False, 'import os\n'), ((2760, 2788), 'os.path.isdir', 'os.path.isdir', (['output_folder'], {}), '(output_folder)\n', (2773, 2788), False, 'import os\n'), ((3838, 3871), 'os.path.exists', 'os.path.exists', (['shots_output_path'], {}), '(shots_output_path)\n', (3852, 3871), False, 'import os\n'), ((2506, 2540), 'os.path.join', 'os.path.join', (['EXE_FOLDER', 'EXE_NAME'], {}), '(EXE_FOLDER, EXE_NAME)\n', (2518, 2540), False, 'import os\n'), ((2910, 2944), 'os.path.basename', 'os.path.basename', (['input_video_path'], {}), '(input_video_path)\n', (2926, 2944), False, 'import os\n')] |
from typing import Union
import httpx
from aos_sw_api.validate import validate_200
from ._model import TacacsProfileModel
class TacacsProfile:
def __new__(cls, session: Union[httpx.Client, httpx.AsyncClient], **kwargs):
if isinstance(session, httpx.Client):
return TacacsProfileSync(session=session)
elif isinstance(session, httpx.AsyncClient):
return TacacsProfileAsync(session=session)
class TacacsProfileBase:
def __init__(self, session: Union[httpx.Client, httpx.AsyncClient]):
self._session = session
self._tacacs_profile_base_url = "tacacs_profile"
class TacacsProfileSync(TacacsProfileBase):
def __init__(self, session: httpx.Client):
super().__init__(session=session)
def get_tacacs_profile(self) -> TacacsProfileModel:
r = self._session.get(url=self._tacacs_profile_base_url)
validate_200(r)
return TacacsProfileModel(**r.json())
class TacacsProfileAsync(TacacsProfileBase):
def __init__(self, session: httpx.AsyncClient):
super().__init__(session=session)
async def get_tacacs_profile(self) -> TacacsProfileModel:
r = await self._session.get(url=self._tacacs_profile_base_url)
validate_200(r)
return TacacsProfileModel(**r.json())
| [
"aos_sw_api.validate.validate_200"
] | [((891, 906), 'aos_sw_api.validate.validate_200', 'validate_200', (['r'], {}), '(r)\n', (903, 906), False, 'from aos_sw_api.validate import validate_200\n'), ((1236, 1251), 'aos_sw_api.validate.validate_200', 'validate_200', (['r'], {}), '(r)\n', (1248, 1251), False, 'from aos_sw_api.validate import validate_200\n')] |
import os
import glob
import platform
import sys
sys.path = sys.path[1:]
from hops import __get_abspath__ as hops__get_abspath__
name = 'hops'
here = os.path.abspath(os.path.dirname(__file__))
shortcut = open(os.path.join(here, 'shortcut.txt')).read()
shortcut = shortcut.replace('{{x}}', hops__get_abspath__())
# create shortcut
print('\n\n************\n')
executable = {'Darwin': 'command', 'Linux': 'sh', 'Windows': 'cmd'}[platform.system()]
try:
shortcut_path = os.path.join(os.path.expanduser('~'), 'Desktop', name + '.' + executable)
w = open(shortcut_path, 'w')
w.write(shortcut)
w.close()
except:
try:
shortcut_path = os.path.join(glob.glob(os.path.join(os.path.expanduser('~'), '*', 'Desktop'))[0], name + '.' + executable)
w = open(shortcut_path, 'w')
w.write(shortcut)
w.close()
except IndexError:
shortcut_path = os.path.join(os.path.expanduser('~'), name + '.' + executable)
w = open(shortcut_path, 'w')
w.write(shortcut)
w.close()
if platform.system() == 'Darwin':
os.system('chmod 755 ' + shortcut_path)
elif platform.system() == 'Linux':
os.system('chmod +x ' + shortcut_path)
print('HOPS successfully installed.')
print('The shortcut has been saved here:\n\n{0}\n\n'
'You can freely move this file to your preferred location.'.format(shortcut_path))
| [
"os.path.join",
"os.path.dirname",
"platform.system",
"os.system",
"hops.__get_abspath__",
"os.path.expanduser"
] | [((170, 195), 'os.path.dirname', 'os.path.dirname', (['__file__'], {}), '(__file__)\n', (185, 195), False, 'import os\n'), ((293, 314), 'hops.__get_abspath__', 'hops__get_abspath__', ([], {}), '()\n', (312, 314), True, 'from hops import __get_abspath__ as hops__get_abspath__\n'), ((432, 449), 'platform.system', 'platform.system', ([], {}), '()\n', (447, 449), False, 'import platform\n'), ((1044, 1061), 'platform.system', 'platform.system', ([], {}), '()\n', (1059, 1061), False, 'import platform\n'), ((1079, 1118), 'os.system', 'os.system', (["('chmod 755 ' + shortcut_path)"], {}), "('chmod 755 ' + shortcut_path)\n", (1088, 1118), False, 'import os\n'), ((490, 513), 'os.path.expanduser', 'os.path.expanduser', (['"""~"""'], {}), "('~')\n", (508, 513), False, 'import os\n'), ((1124, 1141), 'platform.system', 'platform.system', ([], {}), '()\n', (1139, 1141), False, 'import platform\n'), ((1158, 1196), 'os.system', 'os.system', (["('chmod +x ' + shortcut_path)"], {}), "('chmod +x ' + shortcut_path)\n", (1167, 1196), False, 'import os\n'), ((213, 247), 'os.path.join', 'os.path.join', (['here', '"""shortcut.txt"""'], {}), "(here, 'shortcut.txt')\n", (225, 247), False, 'import os\n'), ((909, 932), 'os.path.expanduser', 'os.path.expanduser', (['"""~"""'], {}), "('~')\n", (927, 932), False, 'import os\n'), ((697, 720), 'os.path.expanduser', 'os.path.expanduser', (['"""~"""'], {}), "('~')\n", (715, 720), False, 'import os\n')] |
"""Build outline module."""
# Official Libraries
# My Modules
from stobu.formats.outline import format_outlines_data
from stobu.syss import messages as msg
from stobu.tools.elmchecker import is_enable_the_elm
from stobu.tools.storydatareader import elm_outline_of, elm_title_of
from stobu.tools.translater import translate_tags_str, translate_tags_text_list
from stobu.types.element import ElmType
from stobu.types.outline import OutlineRecord, OutlinesData
from stobu.types.output import OutputsData
from stobu.types.story import StoryData, StoryRecord
from stobu.utils import assertion
from stobu.utils.log import logger
__all__ = (
'outlines_data_from',
'outputs_data_from_outlines_data',
)
# Define Constants
PROC = 'BUILD OUTLINE'
ENABLE_ELMS = [
ElmType.BOOK,
ElmType.CHAPTER,
ElmType.EPISODE,
ElmType.SCENE,
]
# Main
def outlines_data_from(story_data: StoryData, tags: dict) -> OutlinesData:
assert isinstance(story_data, StoryData)
assert isinstance(tags, dict)
logger.debug(msg.PROC_START.format(proc=PROC))
tmp = []
for record in story_data.get_data():
assert isinstance(record, StoryRecord)
ret = _conv_outline_record(record)
if ret:
tmp.append(ret)
if not tmp:
logger.error(msg.ERR_FAIL_INVALID_DATA.format(data=f"outline data in {PROC}"))
return None
reordered = _reorder_outlines_data(OutlinesData(tmp))
if not reordered or not reordered.has_data():
logger.error(msg.ERR_FAILED_PROC.format(proc=f"reorder data in {PROC}"))
return None
translated = _translate_outlines_data(reordered, tags)
if not translated or not translated.has_data():
logger.error(msg.ERR_FAILED_PROC.format(proc=f"translate data in {PROC}"))
return None
logger.debug(msg.PROC_SUCCESS.format(proc=PROC))
return translated
def outputs_data_from_outlines_data(outlines_data: OutlinesData, tags: dict) -> OutputsData:
assert isinstance(outlines_data, OutlinesData)
assert isinstance(tags, dict)
_PROC = f"{PROC}: convert outputs data"
logger.debug(msg.PROC_SUCCESS.format(proc=_PROC))
formatted = format_outlines_data(outlines_data)
if not formatted:
logger.error(msg.ERR_FAIL_INVALID_DATA.format(data=f"outputs data in {PROC}"))
return None
translated = translate_tags_text_list(formatted, tags)
logger.debug(msg.PROC_SUCCESS.format(proc=_PROC))
return OutputsData(translated)
# Private Functions
def _conv_outline_record(record: StoryRecord) -> OutlineRecord:
assert isinstance(record, StoryRecord)
elm = assertion.is_instance(record.type, ElmType)
if not is_enable_the_elm(elm, ENABLE_ELMS):
logger.warning(msg.ERR_FAIL_INVALID_DATA.format(data=f"element type in {PROC}"))
return None
title = assertion.is_str(elm_title_of(record))
outline = assertion.is_str(elm_outline_of(record))
return OutlineRecord(elm, title, outline)
def _reorder_outlines_data(outlines_data: OutlinesData) -> OutlinesData:
assert isinstance(outlines_data, OutlinesData)
tmp = []
for elm in ENABLE_ELMS:
for record in outlines_data.get_data():
assert isinstance(record, OutlineRecord)
if elm is record.type:
tmp.append(record)
return OutlinesData(tmp)
def _translate_outlines_data(outlines_data: OutlinesData, tags: dict) -> OutlinesData:
assert isinstance(outlines_data, OutlinesData)
assert isinstance(tags, dict)
tmp = []
for record in outlines_data.get_data():
assert isinstance(record, OutlineRecord)
tmp.append(_translate_record(record, tags))
return OutlinesData(tmp)
def _translate_record(record: OutlineRecord, tags: dict) -> OutlineRecord:
assert isinstance(record, OutlineRecord)
assert isinstance(tags, dict)
return OutlineRecord(
record.type,
translate_tags_str(record.title, tags),
translate_tags_str(record.outline, tags),
)
| [
"stobu.syss.messages.PROC_START.format",
"stobu.syss.messages.PROC_SUCCESS.format",
"stobu.types.outline.OutlineRecord",
"stobu.types.output.OutputsData",
"stobu.tools.storydatareader.elm_title_of",
"stobu.utils.assertion.is_instance",
"stobu.tools.translater.translate_tags_text_list",
"stobu.tools.st... | [((2216, 2251), 'stobu.formats.outline.format_outlines_data', 'format_outlines_data', (['outlines_data'], {}), '(outlines_data)\n', (2236, 2251), False, 'from stobu.formats.outline import format_outlines_data\n'), ((2399, 2440), 'stobu.tools.translater.translate_tags_text_list', 'translate_tags_text_list', (['formatted', 'tags'], {}), '(formatted, tags)\n', (2423, 2440), False, 'from stobu.tools.translater import translate_tags_str, translate_tags_text_list\n'), ((2507, 2530), 'stobu.types.output.OutputsData', 'OutputsData', (['translated'], {}), '(translated)\n', (2518, 2530), False, 'from stobu.types.output import OutputsData\n'), ((2671, 2714), 'stobu.utils.assertion.is_instance', 'assertion.is_instance', (['record.type', 'ElmType'], {}), '(record.type, ElmType)\n', (2692, 2714), False, 'from stobu.utils import assertion\n'), ((2991, 3025), 'stobu.types.outline.OutlineRecord', 'OutlineRecord', (['elm', 'title', 'outline'], {}), '(elm, title, outline)\n', (3004, 3025), False, 'from stobu.types.outline import OutlineRecord, OutlinesData\n'), ((3378, 3395), 'stobu.types.outline.OutlinesData', 'OutlinesData', (['tmp'], {}), '(tmp)\n', (3390, 3395), False, 'from stobu.types.outline import OutlineRecord, OutlinesData\n'), ((3742, 3759), 'stobu.types.outline.OutlinesData', 'OutlinesData', (['tmp'], {}), '(tmp)\n', (3754, 3759), False, 'from stobu.types.outline import OutlineRecord, OutlinesData\n'), ((1071, 1103), 'stobu.syss.messages.PROC_START.format', 'msg.PROC_START.format', ([], {'proc': 'PROC'}), '(proc=PROC)\n', (1092, 1103), True, 'from stobu.syss import messages as msg\n'), ((1459, 1476), 'stobu.types.outline.OutlinesData', 'OutlinesData', (['tmp'], {}), '(tmp)\n', (1471, 1476), False, 'from stobu.types.outline import OutlineRecord, OutlinesData\n'), ((1862, 1896), 'stobu.syss.messages.PROC_SUCCESS.format', 'msg.PROC_SUCCESS.format', ([], {'proc': 'PROC'}), '(proc=PROC)\n', (1885, 1896), True, 'from stobu.syss import messages as msg\n'), ((2162, 2197), 'stobu.syss.messages.PROC_SUCCESS.format', 'msg.PROC_SUCCESS.format', ([], {'proc': '_PROC'}), '(proc=_PROC)\n', (2185, 2197), True, 'from stobu.syss import messages as msg\n'), ((2459, 2494), 'stobu.syss.messages.PROC_SUCCESS.format', 'msg.PROC_SUCCESS.format', ([], {'proc': '_PROC'}), '(proc=_PROC)\n', (2482, 2494), True, 'from stobu.syss import messages as msg\n'), ((2726, 2761), 'stobu.tools.elmchecker.is_enable_the_elm', 'is_enable_the_elm', (['elm', 'ENABLE_ELMS'], {}), '(elm, ENABLE_ELMS)\n', (2743, 2761), False, 'from stobu.tools.elmchecker import is_enable_the_elm\n'), ((2902, 2922), 'stobu.tools.storydatareader.elm_title_of', 'elm_title_of', (['record'], {}), '(record)\n', (2914, 2922), False, 'from stobu.tools.storydatareader import elm_outline_of, elm_title_of\n'), ((2955, 2977), 'stobu.tools.storydatareader.elm_outline_of', 'elm_outline_of', (['record'], {}), '(record)\n', (2969, 2977), False, 'from stobu.tools.storydatareader import elm_outline_of, elm_title_of\n'), ((3980, 4018), 'stobu.tools.translater.translate_tags_str', 'translate_tags_str', (['record.title', 'tags'], {}), '(record.title, tags)\n', (3998, 4018), False, 'from stobu.tools.translater import translate_tags_str, translate_tags_text_list\n'), ((4032, 4072), 'stobu.tools.translater.translate_tags_str', 'translate_tags_str', (['record.outline', 'tags'], {}), '(record.outline, tags)\n', (4050, 4072), False, 'from stobu.tools.translater import translate_tags_str, translate_tags_text_list\n'), ((1333, 1397), 'stobu.syss.messages.ERR_FAIL_INVALID_DATA.format', 'msg.ERR_FAIL_INVALID_DATA.format', ([], {'data': 'f"""outline data in {PROC}"""'}), "(data=f'outline data in {PROC}')\n", (1365, 1397), True, 'from stobu.syss import messages as msg\n'), ((1549, 1607), 'stobu.syss.messages.ERR_FAILED_PROC.format', 'msg.ERR_FAILED_PROC.format', ([], {'proc': 'f"""reorder data in {PROC}"""'}), "(proc=f'reorder data in {PROC}')\n", (1575, 1607), True, 'from stobu.syss import messages as msg\n'), ((1762, 1822), 'stobu.syss.messages.ERR_FAILED_PROC.format', 'msg.ERR_FAILED_PROC.format', ([], {'proc': 'f"""translate data in {PROC}"""'}), "(proc=f'translate data in {PROC}')\n", (1788, 1822), True, 'from stobu.syss import messages as msg\n'), ((2295, 2359), 'stobu.syss.messages.ERR_FAIL_INVALID_DATA.format', 'msg.ERR_FAIL_INVALID_DATA.format', ([], {'data': 'f"""outputs data in {PROC}"""'}), "(data=f'outputs data in {PROC}')\n", (2327, 2359), True, 'from stobu.syss import messages as msg\n'), ((2786, 2850), 'stobu.syss.messages.ERR_FAIL_INVALID_DATA.format', 'msg.ERR_FAIL_INVALID_DATA.format', ([], {'data': 'f"""element type in {PROC}"""'}), "(data=f'element type in {PROC}')\n", (2818, 2850), True, 'from stobu.syss import messages as msg\n')] |
r"""
Difference between magnetic dipole and loop sources
===================================================
In this example we look at the differences between an electric loop loop, which
results in a magnetic source, and a magnetic dipole source.
The derivation of the electromagnetic field in Hunziker et al. (2015) is for
electric and magnetic point-dipole sources and receivers. The magnetic field
due to a magnetic source (:math:`mm`) is obtain from the electric field due to
an electric source (:math:`ee`) using the duality principle, given in their
Equation (11),
.. math::
\hat{G}^{mm}_{pq}(\mathbf{x}, \mathbf{x'}, s, \eta_{kr}, \zeta_{ij}) =
-\hat{G}^{ee}_{pq}(\mathbf{x}, \mathbf{x'}, s, -\zeta_{kr}, -\eta_{ij}) \,
. \qquad (1)
Without going into the details of the different parameters, we can focus on the
difference between the :math:`mm` and :math:`ee` fields for a homogeneous,
isotropic fullspace by simplifying this further to
.. math::
\mathbf{G}^{mm}_\text{dip-dip} = \frac{\eta}{\zeta}\mathbf{G}^{ee} \quad
\xrightarrow{\text{diff. approx}} \quad \frac{\sigma}{\mathrm{i}\omega
\mu}\mathbf{G}^{ee}_\text{dip-dip} \, . \qquad (2)
Here, :math:`\sigma` is conductivity (S/m), :math:`\omega=2\pi f` is angular
frequency (Hz), and :math:`\mu` is the magnetic permeability (H/m). So from
Equation (2) we see that the :math:`mm` field differs from the :math:`ee`
field by a factor :math:`\sigma/(\mathrm{i}\omega\mu)`.
A magnetic dipole source has a moment of :math:`I^mds`; however, a magnetic
dipole source is basically never used in geophysics. Instead a loop of an
electric wire is used, which generates a magnetic field. The moment generated
by this loop is given by :math:`I^m = \mathrm{i}\omega\mu N A I^e`, where
:math:`A` is the area of the loop (m:math:`^2`), and :math:`N` the number of
turns of the loop. So the difference between a unit magnetic dipole and a unit
loop (:math:`A=1, N=1`) is the factor :math:`\mathrm{i}\omega\mu`, hence
Equation (2) becomes
.. math::
\mathbf{G}^{mm}_\text{loop-dip} =
\mathrm{i}\omega\mu\mathbf{G}^{mm}_\text{dip-dip} =
\sigma\,\mathbf{G}^{ee}_\text{dip-dip} \, . \qquad (3)
This notebook shows this relation in the frequency domain, as well as for
impulse, step-on, and step-off responses in the time domain.
We can actually model an **electric loop** instead of adjusting the magnetic
dipole solution to correspond to a loop source. This is shown in the second
part of the notebook.
**References**
- <NAME>., <NAME>, and <NAME>, 2015, The electromagnetic response
in a layered vertical transverse isotropic medium: A new look at an old
problem: Geophysics, 80(1), F1–F18; DOI: `10.1190/geo2013-0411.1
<https://doi.org/10.1190/geo2013-0411.1>`_.
"""
import empymod
import numpy as np
import matplotlib.pyplot as plt
plt.style.use('ggplot')
# sphinx_gallery_thumbnail_number = 3
###############################################################################
# 1. Using the magnetic dipole solution
# -------------------------------------
#
# Survey parameters
# ~~~~~~~~~~~~~~~~~
#
# - Homogenous fullspace of :math:`\sigma` = 0.01 S/m.
# - Source at the origin, x-directed.
# - Inline receiver with offset of 100 m, x-directed.
freq = np.logspace(-1, 5, 301) # Frequencies (Hz)
time = np.logspace(-6, 0, 301) # Times (s)
src = [0, 0, 0, 0, 0] # x-dir. source at the origin [x, y, z, azimuth, dip]
rec = [100, 0, 0, 0, 0] # x-dir. receiver 100m away from source, inline
cond = 0.01 # Conductivity (S/m)
###############################################################################
# Computation using ``empymod``
# ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
# Collect common parameters
inp = {'src': src, 'rec': rec, 'depth': [], 'res': 1/cond, 'verb': 1}
# Frequency domain
inp['freqtime'] = freq
fee_dip_dip = empymod.bipole(**inp)
fmm_dip_dip = empymod.bipole(msrc=True, mrec=True, **inp)
f_loo_dip = empymod.loop(**inp)
# Time domain
inp['freqtime'] = time
# ee
ee_dip_dip_of = empymod.bipole(signal=-1, **inp)
ee_dip_dip_im = empymod.bipole(signal=0, **inp)
ee_dip_dip_on = empymod.bipole(signal=1, **inp)
# mm dip-dip
dip_dip_of = empymod.bipole(signal=-1, msrc=True, mrec=True, **inp)
dip_dip_im = empymod.bipole(signal=0, msrc=True, mrec=True, **inp)
dip_dip_on = empymod.bipole(signal=1, msrc=True, mrec=True, **inp)
# mm loop-dip
loo_dip_of = empymod.loop(signal=-1, **inp)
loo_dip_im = empymod.loop(signal=0, **inp)
loo_dip_on = empymod.loop(signal=1, **inp)
###############################################################################
# Plot the result
# ~~~~~~~~~~~~~~~
fs = 16 # Fontsize
# Figure
fig = plt.figure(figsize=(12, 8))
# Frequency Domain
plt.subplot(231)
plt.title(r'$G^{ee}_{\rm{dip-dip}}$', fontsize=fs)
plt.plot(freq, fee_dip_dip.real, 'C0-', label='Real')
plt.plot(freq, -fee_dip_dip.real, 'C0--')
plt.plot(freq, fee_dip_dip.imag, 'C1-', label='Imag')
plt.plot(freq, -fee_dip_dip.imag, 'C1--')
plt.xscale('log')
plt.yscale('log')
plt.ylim([5e-8, 2e-5])
ax1 = plt.subplot(232)
plt.title(r'$G^{mm}_{\rm{dip-dip}}$', fontsize=fs)
plt.plot(freq, fmm_dip_dip.real, 'C0-', label='Real')
plt.plot(freq, -fmm_dip_dip.real, 'C0--')
plt.plot(freq, fmm_dip_dip.imag, 'C1-', label='Imag')
plt.plot(freq, -fmm_dip_dip.imag, 'C1--')
plt.xscale('log')
plt.yscale('log')
plt.xlabel('Frequency (Hz)', fontsize=fs-2)
plt.legend()
plt.subplot(233)
plt.title(r'$G^{mm}_{\rm{loop-dip}}$', fontsize=fs)
plt.plot(freq, f_loo_dip.real, 'C0-', label='Real')
plt.plot(freq, -f_loo_dip.real, 'C0--')
plt.plot(freq, f_loo_dip.imag, 'C1-', label='Imag')
plt.plot(freq, -f_loo_dip.imag, 'C1--')
plt.xscale('log')
plt.yscale('log')
plt.ylim([5e-10, 2e-7])
plt.text(1.05, 0.5, "Frequency Domain", {'fontsize': fs},
horizontalalignment='left', verticalalignment='center',
rotation=-90, clip_on=False, transform=plt.gca().transAxes)
# Time Domain
plt.subplot(234)
plt.plot(time, ee_dip_dip_of, 'C0-', label='Step-Off')
plt.plot(time, -ee_dip_dip_of, 'C0--')
plt.plot(time, ee_dip_dip_im, 'C1-', label='Impulse')
plt.plot(time, -ee_dip_dip_im, 'C1--')
plt.plot(time, ee_dip_dip_on, 'C2-', label='Step-On')
plt.plot(time, -ee_dip_dip_on, 'C2--')
plt.xscale('log')
plt.yscale('log')
plt.subplot(235)
plt.plot(time, dip_dip_of, 'C0-', label='Step-Off')
plt.plot(time, -dip_dip_of, 'C0--')
plt.plot(time, dip_dip_im, 'C1-', label='Impulse')
plt.plot(time, -dip_dip_im, 'C1--')
plt.plot(time, dip_dip_on, 'C2-', label='Step-On')
plt.plot(time, -dip_dip_on, 'C2--')
plt.xscale('log')
plt.yscale('log')
plt.xlabel('Time (s)', fontsize=fs-2)
plt.legend()
plt.subplot(236)
plt.plot(time, loo_dip_of, 'C0-', label='Step-Off')
plt.plot(time, -loo_dip_of, 'C0--')
plt.plot(time, loo_dip_im, 'C1-', label='Impulse')
plt.plot(time, -loo_dip_im, 'C1--')
plt.plot(time, loo_dip_on, 'C2-', label='Step-On')
plt.plot(time, -loo_dip_on, 'C2--')
plt.xscale('log')
plt.yscale('log')
plt.text(1.05, 0.5, "Time Domain", {'fontsize': fs},
horizontalalignment='left', verticalalignment='center',
rotation=-90, clip_on=False, transform=plt.gca().transAxes)
fig.text(-0.01, 0.5, 'Amplitude; e-rec (V/m); m-rec (A/m)',
va='center', rotation='vertical', fontsize=fs, color='.4')
plt.tight_layout()
plt.show()
###############################################################################
# The figure shows the main points of Equations (2) and (3):
#
# - The magnetic dipole-dipole response differs by a factor
# :math:`\sigma/(\mathrm{i}\omega\mu)` from the electric dipole-dipole
# response. That means for the time-domain that the magnetic response looks
# more like the time derivative of the electric response (e.g., the magnetic
# impulse responses resembles the electric step-on response).
# - The magnetic loop-dipole response differs only by :math:`\sigma` from the
# electric dipole-dipole response, hence a factor of 0.01.
#
# The units of the response only depend on the receiver, what the receiver
# actually measures. So if we change the source from a dipole to a loop it does
# not change the units of the received responses.
#
# 2. Using an electric loop
# -------------------------
#
# We can use ``empymod`` to model arbitrary shaped sources by simply adding
# point dipole sources together. This is what ``empymod`` does internally to
# model a finite length dipole (``empymod.bipole``), where it uses a Gaussian
# quadrature with a few points.
#
# Here, we are going to compare the result from ``loop``, as presented above,
# with two different simulations of an electric loop source, assuming a square
# loop which sides are 1 m long, so the area correspond to one square meter.
#
# Plotting routines
# ~~~~~~~~~~~~~~~~~
def plot_result(data1, data2, x, title, vmin=-15., vmax=-7., rx=0):
"""Plot result."""
fig = plt.figure(figsize=(18, 10))
def setplot(name):
"""Plot settings"""
plt.title(name)
plt.xlim(rx.min(), rx.max())
plt.ylim(rx.min(), rx.max())
plt.axis("equal")
# Plot Re(data)
ax1 = plt.subplot(231)
setplot(r"(a) |Re(magn.dip*iwu)|")
cf0 = plt.pcolormesh(rx, rx, np.log10(np.abs(data1.real)), linewidth=0,
rasterized=True, cmap="viridis", vmin=vmin, vmax=vmax,
shading='nearest')
ax2 = plt.subplot(232)
setplot(r"(b) |Re(el. square)|")
plt.pcolormesh(rx, rx, np.log10(np.abs(data2.real)), linewidth=0,
rasterized=True, cmap="viridis", vmin=vmin, vmax=vmax,
shading='nearest')
ax3 = plt.subplot(233)
setplot(r"(c) Error real part")
error_r = np.abs((data1.real-data2.real)/data1.real)*100
cf2 = plt.pcolormesh(rx, rx, np.log10(error_r), vmin=-2, vmax=2,
linewidth=0, rasterized=True,
cmap=plt.cm.get_cmap("RdBu_r", 8),
shading='nearest')
# Plot Im(data)
ax4 = plt.subplot(234)
setplot(r"(d) |Im(magn.dip*iwu)|")
plt.pcolormesh(rx, rx, np.log10(np.abs(data1.imag)), linewidth=0,
rasterized=True, cmap="viridis", vmin=vmin, vmax=vmax,
shading='nearest')
ax5 = plt.subplot(235)
setplot(r"(e) |Im(el. square)|")
plt.pcolormesh(rx, rx, np.log10(np.abs(data2.imag)), linewidth=0,
rasterized=True, cmap="viridis", vmin=vmin, vmax=vmax,
shading='nearest')
ax6 = plt.subplot(236)
setplot(r"(f) Error imag part")
error_i = np.abs((data1.imag-data2.imag)/data1.imag)*100
plt.pcolormesh(rx, rx, np.log10(error_i), vmin=-2, vmax=2,
linewidth=0, rasterized=True,
cmap=plt.cm.get_cmap("RdBu_r", 8),
shading='nearest')
# Colorbars
fig.colorbar(cf0, ax=[ax1, ax2, ax3], label=r"$\log_{10}$ Amplitude (A/m)")
cbar = fig.colorbar(cf2, ax=[ax4, ax5, ax6], label=r"Relative Error")
cbar.set_ticks([-2, -1, 0, 1, 2])
cbar.ax.set_yticklabels([r"$0.01\,\%$", r"$0.1\,\%$", r"$1\,\%$",
r"$10\,\%$", r"$100\,\%$"])
# Axis label
fig.text(0.4, 0.05, "Inline Offset (m)", fontsize=14)
fig.text(0.08, 0.5, 'Crossline Offset (m)', rotation=90, fontsize=14)
# Title
fig.suptitle(title, y=.95, fontsize=20)
plt.show()
###############################################################################
# Model parameters
# ~~~~~~~~~~~~~~~~
#
# - Resistivity: :math:`1 \Omega` m fullspace
#
# Survey
# ~~~~~~
#
# - Source at [0, 0, 0]
# - Receivers at [x, y, 10]
# - frequencies: 100 Hz.
# - Offsets: -250 m - 250 m
# Survey parameters
x = ((np.arange(502))-250.5)
rx = np.repeat([x, ], np.size(x), axis=0)
ry = rx.transpose()
rxx = rx.ravel()
ryy = ry.ravel()
# Model
model = {
'depth': [], # Fullspace
'res': 1., # 1 Ohm.m
'freqtime': 100, # 100 Hz
'htarg': {'pts_per_dec': -1},
'verb': 1,
}
###############################################################################
# Compute ``empymod.loop`` result
# ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
epm_loop = empymod.loop(src=[0, 0, 0, 0, 90], rec=[rxx, ryy, 10, 0, 0],
**model).reshape(np.shape(rx))
###############################################################################
# 2.1 Point dipoles at (x, y) using ``empymod.dipole``
# ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
#
# - (0.5, 0), ab=42
# - (0, 0.5), ab=41
# - (-0.5, 0), ab=-42
# - (0, -0.5), ab=-41
#
rec_dip = [rxx, ryy, 10]
square_pts = +empymod.dipole(src=[+0.5, +0.0, 0], rec=rec_dip, ab=42,
**model).reshape(np.shape(rx))
square_pts += empymod.dipole(src=[+0.0, +0.5, 0], rec=rec_dip, ab=41,
**model).reshape(np.shape(rx))
square_pts -= empymod.dipole(src=[-0.5, +0.0, 0], rec=rec_dip, ab=42,
**model).reshape(np.shape(rx))
square_pts -= empymod.dipole(src=[+0.0, -0.5, 0], rec=rec_dip, ab=41,
**model).reshape(np.shape(rx))
plot_result(epm_loop, square_pts, x, 'Loop made of four points',
vmin=-13, vmax=-5, rx=x)
###############################################################################
# 2.2 Finite length dipoles using ``empymod.bipole``
# ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
#
# Each simulated with a 5pt Gaussian quadrature. The dipoles are:
#
# - (-0.5, -0.5) to (+0.5, -0.5)
# - (+0.5, -0.5) to (+0.5, +0.5)
# - (+0.5, +0.5) to (-0.5, +0.5)
# - (-0.5, +0.5) to (-0.5, -0.5)
inp_dip = {
'rec': [rxx, ryy, 10, 0, 0],
'mrec': True,
'srcpts': 5 # Gaussian quadr. with 5 pts to simulate a finite length dip.
}
square_dip = +empymod.bipole(src=[+0.5, +0.5, -0.5, +0.5, 0, 0],
**inp_dip, **model)
square_dip += empymod.bipole(src=[+0.5, -0.5, +0.5, +0.5, 0, 0],
**inp_dip, **model)
square_dip += empymod.bipole(src=[-0.5, -0.5, +0.5, -0.5, 0, 0],
**inp_dip, **model)
square_dip += empymod.bipole(src=[-0.5, +0.5, -0.5, -0.5, 0, 0],
**inp_dip, **model)
square_dip = square_dip.reshape(np.shape(rx))
plot_result(epm_loop, square_dip, x, 'Loop made of four dipoles',
vmin=-13, vmax=-5, rx=x)
###############################################################################
# Close to the source the results between
#
# - (1) a magnetic dipole,
# - (2) an electric loop conisting of four point sources, and
# - (3) an electric loop consisting of four finite length dipoles,
#
# differ, as expected. However, for the vast majority they are identical. Skin
# depth for our example with :math:`\rho=1\Omega` m and :math:`f=100` Hz is
# roughly 50 m, so the results are basically identical for 4-5 skin depths,
# after which the signal is very low.
empymod.Report()
| [
"numpy.log10",
"empymod.loop",
"numpy.arange",
"matplotlib.pyplot.xlabel",
"matplotlib.pyplot.plot",
"matplotlib.pyplot.style.use",
"matplotlib.pyplot.ylim",
"matplotlib.pyplot.axis",
"matplotlib.pyplot.yscale",
"numpy.logspace",
"numpy.abs",
"matplotlib.pyplot.gca",
"numpy.size",
"empymod... | [((2839, 2862), 'matplotlib.pyplot.style.use', 'plt.style.use', (['"""ggplot"""'], {}), "('ggplot')\n", (2852, 2862), True, 'import matplotlib.pyplot as plt\n'), ((3261, 3284), 'numpy.logspace', 'np.logspace', (['(-1)', '(5)', '(301)'], {}), '(-1, 5, 301)\n', (3272, 3284), True, 'import numpy as np\n'), ((3312, 3335), 'numpy.logspace', 'np.logspace', (['(-6)', '(0)', '(301)'], {}), '(-6, 0, 301)\n', (3323, 3335), True, 'import numpy as np\n'), ((3848, 3869), 'empymod.bipole', 'empymod.bipole', ([], {}), '(**inp)\n', (3862, 3869), False, 'import empymod\n'), ((3884, 3927), 'empymod.bipole', 'empymod.bipole', ([], {'msrc': '(True)', 'mrec': '(True)'}), '(msrc=True, mrec=True, **inp)\n', (3898, 3927), False, 'import empymod\n'), ((3940, 3959), 'empymod.loop', 'empymod.loop', ([], {}), '(**inp)\n', (3952, 3959), False, 'import empymod\n'), ((4020, 4052), 'empymod.bipole', 'empymod.bipole', ([], {'signal': '(-1)'}), '(signal=-1, **inp)\n', (4034, 4052), False, 'import empymod\n'), ((4069, 4100), 'empymod.bipole', 'empymod.bipole', ([], {'signal': '(0)'}), '(signal=0, **inp)\n', (4083, 4100), False, 'import empymod\n'), ((4117, 4148), 'empymod.bipole', 'empymod.bipole', ([], {'signal': '(1)'}), '(signal=1, **inp)\n', (4131, 4148), False, 'import empymod\n'), ((4176, 4230), 'empymod.bipole', 'empymod.bipole', ([], {'signal': '(-1)', 'msrc': '(True)', 'mrec': '(True)'}), '(signal=-1, msrc=True, mrec=True, **inp)\n', (4190, 4230), False, 'import empymod\n'), ((4244, 4297), 'empymod.bipole', 'empymod.bipole', ([], {'signal': '(0)', 'msrc': '(True)', 'mrec': '(True)'}), '(signal=0, msrc=True, mrec=True, **inp)\n', (4258, 4297), False, 'import empymod\n'), ((4311, 4364), 'empymod.bipole', 'empymod.bipole', ([], {'signal': '(1)', 'msrc': '(True)', 'mrec': '(True)'}), '(signal=1, msrc=True, mrec=True, **inp)\n', (4325, 4364), False, 'import empymod\n'), ((4393, 4423), 'empymod.loop', 'empymod.loop', ([], {'signal': '(-1)'}), '(signal=-1, **inp)\n', (4405, 4423), False, 'import empymod\n'), ((4437, 4466), 'empymod.loop', 'empymod.loop', ([], {'signal': '(0)'}), '(signal=0, **inp)\n', (4449, 4466), False, 'import empymod\n'), ((4480, 4509), 'empymod.loop', 'empymod.loop', ([], {'signal': '(1)'}), '(signal=1, **inp)\n', (4492, 4509), False, 'import empymod\n'), ((4664, 4691), 'matplotlib.pyplot.figure', 'plt.figure', ([], {'figsize': '(12, 8)'}), '(figsize=(12, 8))\n', (4674, 4691), True, 'import matplotlib.pyplot as plt\n'), ((4712, 4728), 'matplotlib.pyplot.subplot', 'plt.subplot', (['(231)'], {}), '(231)\n', (4723, 4728), True, 'import matplotlib.pyplot as plt\n'), ((4729, 4779), 'matplotlib.pyplot.title', 'plt.title', (['"""$G^{ee}_{\\\\rm{dip-dip}}$"""'], {'fontsize': 'fs'}), "('$G^{ee}_{\\\\rm{dip-dip}}$', fontsize=fs)\n", (4738, 4779), True, 'import matplotlib.pyplot as plt\n'), ((4780, 4833), 'matplotlib.pyplot.plot', 'plt.plot', (['freq', 'fee_dip_dip.real', '"""C0-"""'], {'label': '"""Real"""'}), "(freq, fee_dip_dip.real, 'C0-', label='Real')\n", (4788, 4833), True, 'import matplotlib.pyplot as plt\n'), ((4834, 4875), 'matplotlib.pyplot.plot', 'plt.plot', (['freq', '(-fee_dip_dip.real)', '"""C0--"""'], {}), "(freq, -fee_dip_dip.real, 'C0--')\n", (4842, 4875), True, 'import matplotlib.pyplot as plt\n'), ((4876, 4929), 'matplotlib.pyplot.plot', 'plt.plot', (['freq', 'fee_dip_dip.imag', '"""C1-"""'], {'label': '"""Imag"""'}), "(freq, fee_dip_dip.imag, 'C1-', label='Imag')\n", (4884, 4929), True, 'import matplotlib.pyplot as plt\n'), ((4930, 4971), 'matplotlib.pyplot.plot', 'plt.plot', (['freq', '(-fee_dip_dip.imag)', '"""C1--"""'], {}), "(freq, -fee_dip_dip.imag, 'C1--')\n", (4938, 4971), True, 'import matplotlib.pyplot as plt\n'), ((4972, 4989), 'matplotlib.pyplot.xscale', 'plt.xscale', (['"""log"""'], {}), "('log')\n", (4982, 4989), True, 'import matplotlib.pyplot as plt\n'), ((4990, 5007), 'matplotlib.pyplot.yscale', 'plt.yscale', (['"""log"""'], {}), "('log')\n", (5000, 5007), True, 'import matplotlib.pyplot as plt\n'), ((5008, 5032), 'matplotlib.pyplot.ylim', 'plt.ylim', (['[5e-08, 2e-05]'], {}), '([5e-08, 2e-05])\n', (5016, 5032), True, 'import matplotlib.pyplot as plt\n'), ((5038, 5054), 'matplotlib.pyplot.subplot', 'plt.subplot', (['(232)'], {}), '(232)\n', (5049, 5054), True, 'import matplotlib.pyplot as plt\n'), ((5055, 5105), 'matplotlib.pyplot.title', 'plt.title', (['"""$G^{mm}_{\\\\rm{dip-dip}}$"""'], {'fontsize': 'fs'}), "('$G^{mm}_{\\\\rm{dip-dip}}$', fontsize=fs)\n", (5064, 5105), True, 'import matplotlib.pyplot as plt\n'), ((5106, 5159), 'matplotlib.pyplot.plot', 'plt.plot', (['freq', 'fmm_dip_dip.real', '"""C0-"""'], {'label': '"""Real"""'}), "(freq, fmm_dip_dip.real, 'C0-', label='Real')\n", (5114, 5159), True, 'import matplotlib.pyplot as plt\n'), ((5160, 5201), 'matplotlib.pyplot.plot', 'plt.plot', (['freq', '(-fmm_dip_dip.real)', '"""C0--"""'], {}), "(freq, -fmm_dip_dip.real, 'C0--')\n", (5168, 5201), True, 'import matplotlib.pyplot as plt\n'), ((5202, 5255), 'matplotlib.pyplot.plot', 'plt.plot', (['freq', 'fmm_dip_dip.imag', '"""C1-"""'], {'label': '"""Imag"""'}), "(freq, fmm_dip_dip.imag, 'C1-', label='Imag')\n", (5210, 5255), True, 'import matplotlib.pyplot as plt\n'), ((5256, 5297), 'matplotlib.pyplot.plot', 'plt.plot', (['freq', '(-fmm_dip_dip.imag)', '"""C1--"""'], {}), "(freq, -fmm_dip_dip.imag, 'C1--')\n", (5264, 5297), True, 'import matplotlib.pyplot as plt\n'), ((5298, 5315), 'matplotlib.pyplot.xscale', 'plt.xscale', (['"""log"""'], {}), "('log')\n", (5308, 5315), True, 'import matplotlib.pyplot as plt\n'), ((5316, 5333), 'matplotlib.pyplot.yscale', 'plt.yscale', (['"""log"""'], {}), "('log')\n", (5326, 5333), True, 'import matplotlib.pyplot as plt\n'), ((5334, 5379), 'matplotlib.pyplot.xlabel', 'plt.xlabel', (['"""Frequency (Hz)"""'], {'fontsize': '(fs - 2)'}), "('Frequency (Hz)', fontsize=fs - 2)\n", (5344, 5379), True, 'import matplotlib.pyplot as plt\n'), ((5378, 5390), 'matplotlib.pyplot.legend', 'plt.legend', ([], {}), '()\n', (5388, 5390), True, 'import matplotlib.pyplot as plt\n'), ((5392, 5408), 'matplotlib.pyplot.subplot', 'plt.subplot', (['(233)'], {}), '(233)\n', (5403, 5408), True, 'import matplotlib.pyplot as plt\n'), ((5409, 5460), 'matplotlib.pyplot.title', 'plt.title', (['"""$G^{mm}_{\\\\rm{loop-dip}}$"""'], {'fontsize': 'fs'}), "('$G^{mm}_{\\\\rm{loop-dip}}$', fontsize=fs)\n", (5418, 5460), True, 'import matplotlib.pyplot as plt\n'), ((5461, 5512), 'matplotlib.pyplot.plot', 'plt.plot', (['freq', 'f_loo_dip.real', '"""C0-"""'], {'label': '"""Real"""'}), "(freq, f_loo_dip.real, 'C0-', label='Real')\n", (5469, 5512), True, 'import matplotlib.pyplot as plt\n'), ((5513, 5552), 'matplotlib.pyplot.plot', 'plt.plot', (['freq', '(-f_loo_dip.real)', '"""C0--"""'], {}), "(freq, -f_loo_dip.real, 'C0--')\n", (5521, 5552), True, 'import matplotlib.pyplot as plt\n'), ((5553, 5604), 'matplotlib.pyplot.plot', 'plt.plot', (['freq', 'f_loo_dip.imag', '"""C1-"""'], {'label': '"""Imag"""'}), "(freq, f_loo_dip.imag, 'C1-', label='Imag')\n", (5561, 5604), True, 'import matplotlib.pyplot as plt\n'), ((5605, 5644), 'matplotlib.pyplot.plot', 'plt.plot', (['freq', '(-f_loo_dip.imag)', '"""C1--"""'], {}), "(freq, -f_loo_dip.imag, 'C1--')\n", (5613, 5644), True, 'import matplotlib.pyplot as plt\n'), ((5645, 5662), 'matplotlib.pyplot.xscale', 'plt.xscale', (['"""log"""'], {}), "('log')\n", (5655, 5662), True, 'import matplotlib.pyplot as plt\n'), ((5663, 5680), 'matplotlib.pyplot.yscale', 'plt.yscale', (['"""log"""'], {}), "('log')\n", (5673, 5680), True, 'import matplotlib.pyplot as plt\n'), ((5681, 5705), 'matplotlib.pyplot.ylim', 'plt.ylim', (['[5e-10, 2e-07]'], {}), '([5e-10, 2e-07])\n', (5689, 5705), True, 'import matplotlib.pyplot as plt\n'), ((5913, 5929), 'matplotlib.pyplot.subplot', 'plt.subplot', (['(234)'], {}), '(234)\n', (5924, 5929), True, 'import matplotlib.pyplot as plt\n'), ((5930, 5984), 'matplotlib.pyplot.plot', 'plt.plot', (['time', 'ee_dip_dip_of', '"""C0-"""'], {'label': '"""Step-Off"""'}), "(time, ee_dip_dip_of, 'C0-', label='Step-Off')\n", (5938, 5984), True, 'import matplotlib.pyplot as plt\n'), ((5985, 6023), 'matplotlib.pyplot.plot', 'plt.plot', (['time', '(-ee_dip_dip_of)', '"""C0--"""'], {}), "(time, -ee_dip_dip_of, 'C0--')\n", (5993, 6023), True, 'import matplotlib.pyplot as plt\n'), ((6024, 6077), 'matplotlib.pyplot.plot', 'plt.plot', (['time', 'ee_dip_dip_im', '"""C1-"""'], {'label': '"""Impulse"""'}), "(time, ee_dip_dip_im, 'C1-', label='Impulse')\n", (6032, 6077), True, 'import matplotlib.pyplot as plt\n'), ((6078, 6116), 'matplotlib.pyplot.plot', 'plt.plot', (['time', '(-ee_dip_dip_im)', '"""C1--"""'], {}), "(time, -ee_dip_dip_im, 'C1--')\n", (6086, 6116), True, 'import matplotlib.pyplot as plt\n'), ((6117, 6170), 'matplotlib.pyplot.plot', 'plt.plot', (['time', 'ee_dip_dip_on', '"""C2-"""'], {'label': '"""Step-On"""'}), "(time, ee_dip_dip_on, 'C2-', label='Step-On')\n", (6125, 6170), True, 'import matplotlib.pyplot as plt\n'), ((6171, 6209), 'matplotlib.pyplot.plot', 'plt.plot', (['time', '(-ee_dip_dip_on)', '"""C2--"""'], {}), "(time, -ee_dip_dip_on, 'C2--')\n", (6179, 6209), True, 'import matplotlib.pyplot as plt\n'), ((6210, 6227), 'matplotlib.pyplot.xscale', 'plt.xscale', (['"""log"""'], {}), "('log')\n", (6220, 6227), True, 'import matplotlib.pyplot as plt\n'), ((6228, 6245), 'matplotlib.pyplot.yscale', 'plt.yscale', (['"""log"""'], {}), "('log')\n", (6238, 6245), True, 'import matplotlib.pyplot as plt\n'), ((6247, 6263), 'matplotlib.pyplot.subplot', 'plt.subplot', (['(235)'], {}), '(235)\n', (6258, 6263), True, 'import matplotlib.pyplot as plt\n'), ((6264, 6315), 'matplotlib.pyplot.plot', 'plt.plot', (['time', 'dip_dip_of', '"""C0-"""'], {'label': '"""Step-Off"""'}), "(time, dip_dip_of, 'C0-', label='Step-Off')\n", (6272, 6315), True, 'import matplotlib.pyplot as plt\n'), ((6316, 6351), 'matplotlib.pyplot.plot', 'plt.plot', (['time', '(-dip_dip_of)', '"""C0--"""'], {}), "(time, -dip_dip_of, 'C0--')\n", (6324, 6351), True, 'import matplotlib.pyplot as plt\n'), ((6352, 6402), 'matplotlib.pyplot.plot', 'plt.plot', (['time', 'dip_dip_im', '"""C1-"""'], {'label': '"""Impulse"""'}), "(time, dip_dip_im, 'C1-', label='Impulse')\n", (6360, 6402), True, 'import matplotlib.pyplot as plt\n'), ((6403, 6438), 'matplotlib.pyplot.plot', 'plt.plot', (['time', '(-dip_dip_im)', '"""C1--"""'], {}), "(time, -dip_dip_im, 'C1--')\n", (6411, 6438), True, 'import matplotlib.pyplot as plt\n'), ((6439, 6489), 'matplotlib.pyplot.plot', 'plt.plot', (['time', 'dip_dip_on', '"""C2-"""'], {'label': '"""Step-On"""'}), "(time, dip_dip_on, 'C2-', label='Step-On')\n", (6447, 6489), True, 'import matplotlib.pyplot as plt\n'), ((6490, 6525), 'matplotlib.pyplot.plot', 'plt.plot', (['time', '(-dip_dip_on)', '"""C2--"""'], {}), "(time, -dip_dip_on, 'C2--')\n", (6498, 6525), True, 'import matplotlib.pyplot as plt\n'), ((6526, 6543), 'matplotlib.pyplot.xscale', 'plt.xscale', (['"""log"""'], {}), "('log')\n", (6536, 6543), True, 'import matplotlib.pyplot as plt\n'), ((6544, 6561), 'matplotlib.pyplot.yscale', 'plt.yscale', (['"""log"""'], {}), "('log')\n", (6554, 6561), True, 'import matplotlib.pyplot as plt\n'), ((6562, 6601), 'matplotlib.pyplot.xlabel', 'plt.xlabel', (['"""Time (s)"""'], {'fontsize': '(fs - 2)'}), "('Time (s)', fontsize=fs - 2)\n", (6572, 6601), True, 'import matplotlib.pyplot as plt\n'), ((6600, 6612), 'matplotlib.pyplot.legend', 'plt.legend', ([], {}), '()\n', (6610, 6612), True, 'import matplotlib.pyplot as plt\n'), ((6614, 6630), 'matplotlib.pyplot.subplot', 'plt.subplot', (['(236)'], {}), '(236)\n', (6625, 6630), True, 'import matplotlib.pyplot as plt\n'), ((6631, 6682), 'matplotlib.pyplot.plot', 'plt.plot', (['time', 'loo_dip_of', '"""C0-"""'], {'label': '"""Step-Off"""'}), "(time, loo_dip_of, 'C0-', label='Step-Off')\n", (6639, 6682), True, 'import matplotlib.pyplot as plt\n'), ((6683, 6718), 'matplotlib.pyplot.plot', 'plt.plot', (['time', '(-loo_dip_of)', '"""C0--"""'], {}), "(time, -loo_dip_of, 'C0--')\n", (6691, 6718), True, 'import matplotlib.pyplot as plt\n'), ((6719, 6769), 'matplotlib.pyplot.plot', 'plt.plot', (['time', 'loo_dip_im', '"""C1-"""'], {'label': '"""Impulse"""'}), "(time, loo_dip_im, 'C1-', label='Impulse')\n", (6727, 6769), True, 'import matplotlib.pyplot as plt\n'), ((6770, 6805), 'matplotlib.pyplot.plot', 'plt.plot', (['time', '(-loo_dip_im)', '"""C1--"""'], {}), "(time, -loo_dip_im, 'C1--')\n", (6778, 6805), True, 'import matplotlib.pyplot as plt\n'), ((6806, 6856), 'matplotlib.pyplot.plot', 'plt.plot', (['time', 'loo_dip_on', '"""C2-"""'], {'label': '"""Step-On"""'}), "(time, loo_dip_on, 'C2-', label='Step-On')\n", (6814, 6856), True, 'import matplotlib.pyplot as plt\n'), ((6857, 6892), 'matplotlib.pyplot.plot', 'plt.plot', (['time', '(-loo_dip_on)', '"""C2--"""'], {}), "(time, -loo_dip_on, 'C2--')\n", (6865, 6892), True, 'import matplotlib.pyplot as plt\n'), ((6893, 6910), 'matplotlib.pyplot.xscale', 'plt.xscale', (['"""log"""'], {}), "('log')\n", (6903, 6910), True, 'import matplotlib.pyplot as plt\n'), ((6911, 6928), 'matplotlib.pyplot.yscale', 'plt.yscale', (['"""log"""'], {}), "('log')\n", (6921, 6928), True, 'import matplotlib.pyplot as plt\n'), ((7247, 7265), 'matplotlib.pyplot.tight_layout', 'plt.tight_layout', ([], {}), '()\n', (7263, 7265), True, 'import matplotlib.pyplot as plt\n'), ((7266, 7276), 'matplotlib.pyplot.show', 'plt.show', ([], {}), '()\n', (7274, 7276), True, 'import matplotlib.pyplot as plt\n'), ((13819, 13889), 'empymod.bipole', 'empymod.bipole', ([], {'src': '[+0.5, -0.5, +0.5, +0.5, 0, 0]'}), '(src=[+0.5, -0.5, +0.5, +0.5, 0, 0], **inp_dip, **model)\n', (13833, 13889), False, 'import empymod\n'), ((13933, 14003), 'empymod.bipole', 'empymod.bipole', ([], {'src': '[-0.5, -0.5, +0.5, -0.5, 0, 0]'}), '(src=[-0.5, -0.5, +0.5, -0.5, 0, 0], **inp_dip, **model)\n', (13947, 14003), False, 'import empymod\n'), ((14047, 14117), 'empymod.bipole', 'empymod.bipole', ([], {'src': '[-0.5, +0.5, -0.5, -0.5, 0, 0]'}), '(src=[-0.5, +0.5, -0.5, -0.5, 0, 0], **inp_dip, **model)\n', (14061, 14117), False, 'import empymod\n'), ((14851, 14867), 'empymod.Report', 'empymod.Report', ([], {}), '()\n', (14865, 14867), False, 'import empymod\n'), ((8825, 8853), 'matplotlib.pyplot.figure', 'plt.figure', ([], {'figsize': '(18, 10)'}), '(figsize=(18, 10))\n', (8835, 8853), True, 'import matplotlib.pyplot as plt\n'), ((9061, 9077), 'matplotlib.pyplot.subplot', 'plt.subplot', (['(231)'], {}), '(231)\n', (9072, 9077), True, 'import matplotlib.pyplot as plt\n'), ((9328, 9344), 'matplotlib.pyplot.subplot', 'plt.subplot', (['(232)'], {}), '(232)\n', (9339, 9344), True, 'import matplotlib.pyplot as plt\n'), ((9575, 9591), 'matplotlib.pyplot.subplot', 'plt.subplot', (['(233)'], {}), '(233)\n', (9586, 9591), True, 'import matplotlib.pyplot as plt\n'), ((9948, 9964), 'matplotlib.pyplot.subplot', 'plt.subplot', (['(234)'], {}), '(234)\n', (9959, 9964), True, 'import matplotlib.pyplot as plt\n'), ((10197, 10213), 'matplotlib.pyplot.subplot', 'plt.subplot', (['(235)'], {}), '(235)\n', (10208, 10213), True, 'import matplotlib.pyplot as plt\n'), ((10444, 10460), 'matplotlib.pyplot.subplot', 'plt.subplot', (['(236)'], {}), '(236)\n', (10455, 10460), True, 'import matplotlib.pyplot as plt\n'), ((11309, 11319), 'matplotlib.pyplot.show', 'plt.show', ([], {}), '()\n', (11317, 11319), True, 'import matplotlib.pyplot as plt\n'), ((11642, 11656), 'numpy.arange', 'np.arange', (['(502)'], {}), '(502)\n', (11651, 11656), True, 'import numpy as np\n'), ((11687, 11697), 'numpy.size', 'np.size', (['x'], {}), '(x)\n', (11694, 11697), True, 'import numpy as np\n'), ((12197, 12209), 'numpy.shape', 'np.shape', (['rx'], {}), '(rx)\n', (12205, 12209), True, 'import numpy as np\n'), ((12771, 12783), 'numpy.shape', 'np.shape', (['rx'], {}), '(rx)\n', (12779, 12783), True, 'import numpy as np\n'), ((12901, 12913), 'numpy.shape', 'np.shape', (['rx'], {}), '(rx)\n', (12909, 12913), True, 'import numpy as np\n'), ((13031, 13043), 'numpy.shape', 'np.shape', (['rx'], {}), '(rx)\n', (13039, 13043), True, 'import numpy as np\n'), ((13705, 13775), 'empymod.bipole', 'empymod.bipole', ([], {'src': '[+0.5, +0.5, -0.5, +0.5, 0, 0]'}), '(src=[+0.5, +0.5, -0.5, +0.5, 0, 0], **inp_dip, **model)\n', (13719, 13775), False, 'import empymod\n'), ((14179, 14191), 'numpy.shape', 'np.shape', (['rx'], {}), '(rx)\n', (14187, 14191), True, 'import numpy as np\n'), ((8914, 8929), 'matplotlib.pyplot.title', 'plt.title', (['name'], {}), '(name)\n', (8923, 8929), True, 'import matplotlib.pyplot as plt\n'), ((9012, 9029), 'matplotlib.pyplot.axis', 'plt.axis', (['"""equal"""'], {}), "('equal')\n", (9020, 9029), True, 'import matplotlib.pyplot as plt\n'), ((9642, 9688), 'numpy.abs', 'np.abs', (['((data1.real - data2.real) / data1.real)'], {}), '((data1.real - data2.real) / data1.real)\n', (9648, 9688), True, 'import numpy as np\n'), ((9722, 9739), 'numpy.log10', 'np.log10', (['error_r'], {}), '(error_r)\n', (9730, 9739), True, 'import numpy as np\n'), ((10511, 10557), 'numpy.abs', 'np.abs', (['((data1.imag - data2.imag) / data1.imag)'], {}), '((data1.imag - data2.imag) / data1.imag)\n', (10517, 10557), True, 'import numpy as np\n'), ((10585, 10602), 'numpy.log10', 'np.log10', (['error_i'], {}), '(error_i)\n', (10593, 10602), True, 'import numpy as np\n'), ((12095, 12164), 'empymod.loop', 'empymod.loop', ([], {'src': '[0, 0, 0, 0, 90]', 'rec': '[rxx, ryy, 10, 0, 0]'}), '(src=[0, 0, 0, 0, 90], rec=[rxx, ryy, 10, 0, 0], **model)\n', (12107, 12164), False, 'import empymod\n'), ((12641, 12653), 'numpy.shape', 'np.shape', (['rx'], {}), '(rx)\n', (12649, 12653), True, 'import numpy as np\n'), ((12669, 12733), 'empymod.dipole', 'empymod.dipole', ([], {'src': '[+0.0, +0.5, 0]', 'rec': 'rec_dip', 'ab': '(41)'}), '(src=[+0.0, +0.5, 0], rec=rec_dip, ab=41, **model)\n', (12683, 12733), False, 'import empymod\n'), ((12799, 12863), 'empymod.dipole', 'empymod.dipole', ([], {'src': '[-0.5, +0.0, 0]', 'rec': 'rec_dip', 'ab': '(42)'}), '(src=[-0.5, +0.0, 0], rec=rec_dip, ab=42, **model)\n', (12813, 12863), False, 'import empymod\n'), ((12929, 12993), 'empymod.dipole', 'empymod.dipole', ([], {'src': '[+0.0, -0.5, 0]', 'rec': 'rec_dip', 'ab': '(41)'}), '(src=[+0.0, -0.5, 0], rec=rec_dip, ab=41, **model)\n', (12943, 12993), False, 'import empymod\n'), ((5877, 5886), 'matplotlib.pyplot.gca', 'plt.gca', ([], {}), '()\n', (5884, 5886), True, 'import matplotlib.pyplot as plt\n'), ((7096, 7105), 'matplotlib.pyplot.gca', 'plt.gca', ([], {}), '()\n', (7103, 7105), True, 'import matplotlib.pyplot as plt\n'), ((9159, 9177), 'numpy.abs', 'np.abs', (['data1.real'], {}), '(data1.real)\n', (9165, 9177), True, 'import numpy as np\n'), ((9418, 9436), 'numpy.abs', 'np.abs', (['data2.real'], {}), '(data2.real)\n', (9424, 9436), True, 'import numpy as np\n'), ((9843, 9871), 'matplotlib.pyplot.cm.get_cmap', 'plt.cm.get_cmap', (['"""RdBu_r"""', '(8)'], {}), "('RdBu_r', 8)\n", (9858, 9871), True, 'import matplotlib.pyplot as plt\n'), ((10040, 10058), 'numpy.abs', 'np.abs', (['data1.imag'], {}), '(data1.imag)\n', (10046, 10058), True, 'import numpy as np\n'), ((10287, 10305), 'numpy.abs', 'np.abs', (['data2.imag'], {}), '(data2.imag)\n', (10293, 10305), True, 'import numpy as np\n'), ((10694, 10722), 'matplotlib.pyplot.cm.get_cmap', 'plt.cm.get_cmap', (['"""RdBu_r"""', '(8)'], {}), "('RdBu_r', 8)\n", (10709, 10722), True, 'import matplotlib.pyplot as plt\n'), ((12539, 12603), 'empymod.dipole', 'empymod.dipole', ([], {'src': '[+0.5, +0.0, 0]', 'rec': 'rec_dip', 'ab': '(42)'}), '(src=[+0.5, +0.0, 0], rec=rec_dip, ab=42, **model)\n', (12553, 12603), False, 'import empymod\n')] |
import csv
from binance.client import Client
import pandas as pd
KEY ='<KEY>'
SECRET='<KEY>'
client = Client(KEY, SECRET)
candlesticks = client.get_historical_klines("BTCUSDT", Client.KLINE_INTERVAL_1DAY, "1 Jan, 2020", "12 Feb, 2020")
print(candlesticks)
time=[]
open=[]
high=[]
low=[]
close=[]
cols=['time', 'open', 'high', 'low','close']
for c in candlesticks:
time.append(c[0]/1000)
open.append(c[1])
high.append(c[2])
low.append(c[3])
close.append(c[4])
data_list=[time,open,high,low,close]
df=pd.DataFrame(columns=cols)
df.time=time
df.open=open
df.low=low
df.high=high
df.close=close
df.to_csv('data.csv',index=False)
| [
"pandas.DataFrame",
"binance.client.Client"
] | [((102, 121), 'binance.client.Client', 'Client', (['KEY', 'SECRET'], {}), '(KEY, SECRET)\n', (108, 121), False, 'from binance.client import Client\n'), ((510, 536), 'pandas.DataFrame', 'pd.DataFrame', ([], {'columns': 'cols'}), '(columns=cols)\n', (522, 536), True, 'import pandas as pd\n')] |
from django.contrib.auth import authenticate
from rest_framework import exceptions, serializers
from .models import Projects, Users
class MerchSerializer(serializers.ModelSerializer):
class Meta:
model = Projects
fields = ('title', 'description', 'image','link','date_posted','user')
class MerchUser(serializers.ModelSerializer):
class Meta:
model = Users
fields = ('email', 'username', 'bio','profile_photo')
class UserSerializer(serializers.ModelSerializer):
class Meta:
model = Users
fields = ['email', 'username', 'password']
extra_kwargs = {'password': {'write_only': True}}
def create(self, validated_data):
user = Users(
email=validated_data['email'],
username=validated_data['username']
)
user.set_password(validated_data['password'])
user.save()
return user
class LoginSerializer(serializers.Serializer):
username = serializers.CharField()
password = serializers.CharField()
def validate(self, data):
username = data.get("username", "")
password = data.get("password", "")
if username and password:
user = authenticate(username=username, password=password)
if user:
if user.is_active:
data["user"] = user
else:
msg = "User is deactivated."
raise exceptions.ValidationError(msg)
else:
msg = "Unable to login with given credentials."
raise exceptions.ValidationError(msg)
else:
msg = "Must provide username and password both."
raise exceptions.ValidationError(msg)
return data | [
"django.contrib.auth.authenticate",
"rest_framework.serializers.CharField",
"rest_framework.exceptions.ValidationError"
] | [((971, 994), 'rest_framework.serializers.CharField', 'serializers.CharField', ([], {}), '()\n', (992, 994), False, 'from rest_framework import exceptions, serializers\n'), ((1010, 1033), 'rest_framework.serializers.CharField', 'serializers.CharField', ([], {}), '()\n', (1031, 1033), False, 'from rest_framework import exceptions, serializers\n'), ((1207, 1257), 'django.contrib.auth.authenticate', 'authenticate', ([], {'username': 'username', 'password': 'password'}), '(username=username, password=password)\n', (1219, 1257), False, 'from django.contrib.auth import authenticate\n'), ((1712, 1743), 'rest_framework.exceptions.ValidationError', 'exceptions.ValidationError', (['msg'], {}), '(msg)\n', (1738, 1743), False, 'from rest_framework import exceptions, serializers\n'), ((1587, 1618), 'rest_framework.exceptions.ValidationError', 'exceptions.ValidationError', (['msg'], {}), '(msg)\n', (1613, 1618), False, 'from rest_framework import exceptions, serializers\n'), ((1451, 1482), 'rest_framework.exceptions.ValidationError', 'exceptions.ValidationError', (['msg'], {}), '(msg)\n', (1477, 1482), False, 'from rest_framework import exceptions, serializers\n')] |
from __future__ import absolute_import
from django.core.urlresolvers import reverse
from sentry.testutils import APITestCase
from sentry.models import UserReport
class ProjectUserReportsTest(APITestCase):
def test_simple(self):
self.login_as(user=self.user)
project = self.create_project()
group = self.create_group(project=project)
report_1 = UserReport.objects.create(
project=project,
event_id='a' * 32,
name='Foo',
email='<EMAIL>',
comments='Hello world',
group=group,
)
# should not be included due to missing link
UserReport.objects.create(
project=project,
event_id='b' * 32,
name='Bar',
email='<EMAIL>',
comments='Hello world',
)
url = reverse('sentry-api-0-project-user-reports', kwargs={
'organization_slug': project.organization.slug,
'project_slug': project.slug,
})
response = self.client.get(url, format='json')
assert response.status_code == 200, response.content
assert len(response.data) == 1
assert sorted(map(lambda x: x['id'], response.data)) == sorted([
str(report_1.id),
])
| [
"sentry.models.UserReport.objects.create",
"django.core.urlresolvers.reverse"
] | [((385, 516), 'sentry.models.UserReport.objects.create', 'UserReport.objects.create', ([], {'project': 'project', 'event_id': "('a' * 32)", 'name': '"""Foo"""', 'email': '"""<EMAIL>"""', 'comments': '"""Hello world"""', 'group': 'group'}), "(project=project, event_id='a' * 32, name='Foo',\n email='<EMAIL>', comments='Hello world', group=group)\n", (410, 516), False, 'from sentry.models import UserReport\n'), ((658, 776), 'sentry.models.UserReport.objects.create', 'UserReport.objects.create', ([], {'project': 'project', 'event_id': "('b' * 32)", 'name': '"""Bar"""', 'email': '"""<EMAIL>"""', 'comments': '"""Hello world"""'}), "(project=project, event_id='b' * 32, name='Bar',\n email='<EMAIL>', comments='Hello world')\n", (683, 776), False, 'from sentry.models import UserReport\n'), ((859, 994), 'django.core.urlresolvers.reverse', 'reverse', (['"""sentry-api-0-project-user-reports"""'], {'kwargs': "{'organization_slug': project.organization.slug, 'project_slug': project.slug}"}), "('sentry-api-0-project-user-reports', kwargs={'organization_slug':\n project.organization.slug, 'project_slug': project.slug})\n", (866, 994), False, 'from django.core.urlresolvers import reverse\n')] |
"""
Script for visualizing a robot from a URDF.
Author: <NAME>
"""
import argparse
import urdfpy
if __name__ == '__main__':
# Parse Args
parser = argparse.ArgumentParser(
description='Visualize a robot from a URDF file'
)
parser.add_argument('urdf', type=str,
help='Path to URDF file that describes the robot')
parser.add_argument('-a', action='store_true',
help='Visualize robot articulation')
parser.add_argument('-c', action='store_true',
help='Use collision geometry')
args = parser.parse_args()
robot = urdfpy.URDF.load(args.urdf)
if args.a:
robot.animate(use_collision=args.c)
else:
robot.show(use_collision=args.c)
| [
"urdfpy.URDF.load",
"argparse.ArgumentParser"
] | [((157, 230), 'argparse.ArgumentParser', 'argparse.ArgumentParser', ([], {'description': '"""Visualize a robot from a URDF file"""'}), "(description='Visualize a robot from a URDF file')\n", (180, 230), False, 'import argparse\n'), ((625, 652), 'urdfpy.URDF.load', 'urdfpy.URDF.load', (['args.urdf'], {}), '(args.urdf)\n', (641, 652), False, 'import urdfpy\n')] |
# -*- coding:utf-8 -*-
import os
import sys
sys.path.insert(0, '../')
import unittest
import membership_pb2
import utils
import models
class HeaderTest(utils.TestCase):
SERVICE_PLATFORM = models.service_platform()
PROTOCOL_VERSION = models.protocol_version()
def setUp(self):
app = models.app(app_id=1)
self.APP_NAME = app['app_name']
self.APP_KEY = app['app_key']
self.APP_SECRET = app['app_secret']
self.FACEBOOK_APP_ID = app['facebook_app_id']
self.FACEBOOK_APP_SECRET = app['facebook_app_secret']
self.FACEBOOK_API_VERSION = app['facebook_api_version']
def tearDown(self):
pass
# 잘못된 Accept 헤더
def test_headers_invalid_accept(self):
headers = self.make_header(accept=u'Invalid Accept')
response = self.get('/ping/access_token=access_token', headers=headers)
assert response.status_code == 404
# 잘못된 Content-Type 헤더
def test_headers_invalid_content_type(self):
headers = self.make_header(content_type=u'application/text')
response = self.get('/ping/access_token=access_token', headers=headers)
assert response.status_code == 404
# 잘못된 User-Agent 헤더
def test_headers_invalid_user_agent(self):
headers = self.make_header(user_agent=self.APP_NAME)
response = self.get('/ping/access_token=access_token', headers=headers)
assert response.status_code == 404
# 프로토콜 버전이 일치하지 않음
def test_headers_mismatch_protocol_version(self):
headers = self.make_header(user_agent=';'.join([self.APP_NAME, '10000', self.SERVICE_PLATFORM]))
request = self.protocol_members()
response = self.post('/members', headers=headers, data=self.make_request(request))
assert response.status_code == 406 # Not Acceptable
# 잘못된 digest 정보
def test_headers_invalid_digest(self):
headers = self.make_header(digest=u'digest')
response = self.get('/ping/access_token=access_token', headers=headers)
assert response.status_code == 404
| [
"models.service_platform",
"sys.path.insert",
"models.protocol_version",
"models.app"
] | [((45, 70), 'sys.path.insert', 'sys.path.insert', (['(0)', '"""../"""'], {}), "(0, '../')\n", (60, 70), False, 'import sys\n'), ((195, 220), 'models.service_platform', 'models.service_platform', ([], {}), '()\n', (218, 220), False, 'import models\n'), ((242, 267), 'models.protocol_version', 'models.protocol_version', ([], {}), '()\n', (265, 267), False, 'import models\n'), ((298, 318), 'models.app', 'models.app', ([], {'app_id': '(1)'}), '(app_id=1)\n', (308, 318), False, 'import models\n')] |
import requests
from json import loads
from termcolor import colored
from configparser import RawConfigParser
def init(domain):
PDCH = []
print(colored("[*]-Searching Project Discovery Chaos...", "yellow"))
parser = RawConfigParser()
parser.read("config.ini")
CHAOS_KEY = parser.get("PDChaos", "CHAOS_API_KEY")
if CHAOS_KEY == "":
print(" \__", colored("No Project Discovery Chaos API key configured", "red"))
return []
headers = {"user-agent": "Mozilla/5.0 (Windows NT 10.0; Win64; x64; rv:88.0) Gecko/20100101 Firefox/88.0", "Authorization": CHAOS_KEY}
url = "https://dns.projectdiscovery.io/dns/{0}/subdomains".format(domain)
try:
response = requests.get(url, headers=headers).text
subdomains = loads(response)["subdomains"]
for subdomain in subdomains:
if subdomain:
PDCH.append("{0}.{1}".format(subdomain, domain))
PDCH = set(PDCH)
print(" \__ {0}: {1}".format(colored("Subdomains found", "cyan"), colored(len(PDCH), "yellow")))
return PDCH
except requests.exceptions.RequestException as err:
print(" \__", colored(err, "red"))
return []
except requests.exceptions.HTTPError as errh:
print(" \__", colored(errh, "red"))
return []
except requests.exceptions.ConnectionError as errc:
print(" \__", colored(errc, "red"))
return []
except requests.exceptions.Timeout as errt:
print(" \__", colored(errt, "red"))
return []
except Exception:
print(" \__", colored("Something went wrong!", "red"))
return []
| [
"json.loads",
"configparser.RawConfigParser",
"requests.get",
"termcolor.colored"
] | [((223, 240), 'configparser.RawConfigParser', 'RawConfigParser', ([], {}), '()\n', (238, 240), False, 'from configparser import RawConfigParser\n'), ((149, 210), 'termcolor.colored', 'colored', (['"""[*]-Searching Project Discovery Chaos..."""', '"""yellow"""'], {}), "('[*]-Searching Project Discovery Chaos...', 'yellow')\n", (156, 210), False, 'from termcolor import colored\n'), ((359, 422), 'termcolor.colored', 'colored', (['"""No Project Discovery Chaos API key configured"""', '"""red"""'], {}), "('No Project Discovery Chaos API key configured', 'red')\n", (366, 422), False, 'from termcolor import colored\n'), ((668, 702), 'requests.get', 'requests.get', (['url'], {'headers': 'headers'}), '(url, headers=headers)\n', (680, 702), False, 'import requests\n'), ((723, 738), 'json.loads', 'loads', (['response'], {}), '(response)\n', (728, 738), False, 'from json import loads\n'), ((908, 943), 'termcolor.colored', 'colored', (['"""Subdomains found"""', '"""cyan"""'], {}), "('Subdomains found', 'cyan')\n", (915, 943), False, 'from termcolor import colored\n'), ((1061, 1080), 'termcolor.colored', 'colored', (['err', '"""red"""'], {}), "(err, 'red')\n", (1068, 1080), False, 'from termcolor import colored\n'), ((1159, 1179), 'termcolor.colored', 'colored', (['errh', '"""red"""'], {}), "(errh, 'red')\n", (1166, 1179), False, 'from termcolor import colored\n'), ((1264, 1284), 'termcolor.colored', 'colored', (['errc', '"""red"""'], {}), "(errc, 'red')\n", (1271, 1284), False, 'from termcolor import colored\n'), ((1361, 1381), 'termcolor.colored', 'colored', (['errt', '"""red"""'], {}), "(errt, 'red')\n", (1368, 1381), False, 'from termcolor import colored\n'), ((1432, 1471), 'termcolor.colored', 'colored', (['"""Something went wrong!"""', '"""red"""'], {}), "('Something went wrong!', 'red')\n", (1439, 1471), False, 'from termcolor import colored\n')] |
from pathlib import Path
def absolute_path(path):
src_path = str(Path(__file__).parent.resolve())
return src_path + "/" + path
# Paths
DATASET_DIRECTORY = absolute_path("../dataset")
CLEAN_DATA_PATH = absolute_path("clean_data")
TIME_SERIES_PATH = absolute_path("clean_data/series.npy")
TRAINED_MODELS_PATH = absolute_path("trained_models")
SCALER_PATH = absolute_path("trained_models/scaler.pkl")
GRU_PATH = absolute_path("trained_models/gru.hdf5")
LSTM_PATH = absolute_path("trained_models/lstm.hdf5")
XGB_MSE_REGRESSOR_PATH = absolute_path("trained_models/xgb-mse.json")
XGB_HUBBER_REGRESSOR_PATH = absolute_path("trained_models/xgb-hub.json")
XGB_QUANTILE_REGRESSOR_PREFIX = absolute_path("trained_models/xgb-")
ENSEMBLE_MODEL_PATH = absolute_path("trained_models/ensemble.hdf5")
SL_DATASET_TEMPLATE = absolute_path("clean_data/dataset-{}.npz")
# Create directories if not exist
Path(TRAINED_MODELS_PATH).mkdir(parents=False, exist_ok=True)
Path(CLEAN_DATA_PATH).mkdir(parents=False, exist_ok=True)
# Training set size.
LVL_0_TRAIN_SIZE = 450000
LVL_1_TRAIN_SIZE = 105000
# Default batch size for every model
DEFAULT_BATCH_SIZE = 1000
# Dataset repetitions (data augmentation).
# XGB regressors seem to work better without data augmentation.
DATA_REPETITIONS_XGB = 1
# NN models seem to work better with 3 dataset repetitions.
DATA_REPETITIONS_NN = 3
| [
"pathlib.Path"
] | [((897, 922), 'pathlib.Path', 'Path', (['TRAINED_MODELS_PATH'], {}), '(TRAINED_MODELS_PATH)\n', (901, 922), False, 'from pathlib import Path\n'), ((959, 980), 'pathlib.Path', 'Path', (['CLEAN_DATA_PATH'], {}), '(CLEAN_DATA_PATH)\n', (963, 980), False, 'from pathlib import Path\n'), ((71, 85), 'pathlib.Path', 'Path', (['__file__'], {}), '(__file__)\n', (75, 85), False, 'from pathlib import Path\n')] |
from pykafka import KafkaClient, exceptions
import logging, time
from robot.api import logger
import re
logging.basicConfig()
# RobotFramework.logger: http://robot-framework.readthedocs.org/en/2.9.2/_modules/robot/api/logger.html
# Print message to RobotFramework console & python output
def msg_print(msg):
print('[Lib_Kafka] ' + msg)
logger.info('[Lib_Kafka] ' + msg)
def event_consumer(host_kafka, host_zookeeper, topic, assertion_event_content, match_partially=False, timeout_sec=20):
try:
client = KafkaClient(hosts=host_kafka, socket_timeout_ms=10000)
topic = client.topics[str(topic)]
consumer = topic.get_balanced_consumer(consumer_group='Group-Lib_Kafka',
auto_commit_enable=True,
zookeeper_connect=host_zookeeper,
consumer_timeout_ms=100)
msg_print('Consumer started!')
if not match_partially:
msg_print('Search with method: Match All')
else:
msg_print('Search with method: Match Partially')
timeout = time.time() + int(timeout_sec) # timeout seconds from now
got_msg = False
while True:
msg = consumer.consume()
if msg is not None:
msg_print('Got event:' + str(msg.offset) + ' - ' + msg.value)
if not match_partially:
if msg.value == assertion_event_content:
got_msg = True
break
else:
if re.search(assertion_event_content, msg.value):
got_msg = True
break
if time.time() > timeout:
break
consumer.stop()
if got_msg:
return_result = 'GotEvent'
else:
return_result = 'Timeout'
except exceptions.KafkaException:
return_result = 'KafkaConnectFailed'
msg_print('Consumer stopped! ' + return_result)
return return_result
def event_consumer_multiple_match_partially(host_kafka, host_zookeeper, topic, assertion_event_content_arr, timeout_sec=20):
try:
client = KafkaClient(hosts=host_kafka, socket_timeout_ms=10000)
topic = client.topics[str(topic)]
consumer = topic.get_balanced_consumer(consumer_group='Group-Lib_Kafka',
auto_commit_enable=True,
zookeeper_connect=host_zookeeper,
consumer_timeout_ms=100)
msg_print('Consumer started!')
timeout = time.time() + int(timeout_sec) # timeout seconds from now
got_msg = False
while True:
msg = consumer.consume()
if msg is not None:
msg_print('Got event:' + str(msg.offset) + ' - ' + msg.value)
expect_assertion_count = len(assertion_event_content_arr)
got_assertion_count = 0
for assertion in assertion_event_content_arr:
if re.search(assertion, msg.value):
msg_print('Got assertion:' + assertion)
got_assertion_count += 1
if got_assertion_count == expect_assertion_count:
got_msg = True
break
if time.time() > timeout:
break
consumer.stop()
if got_msg:
return_result = 'GotEvent'
else:
return_result = 'Timeout'
except exceptions.KafkaException:
return_result = 'KafkaConnectFailed'
msg_print('Consumer stopped! ' + return_result)
return return_result
def event_single_consumer(host_kafka, host_zookeeper, topic, timeout_sec=20):
try:
client = KafkaClient(hosts=host_kafka, socket_timeout_ms=10000)
topic = client.topics[str(topic)]
consumer = topic.get_balanced_consumer(consumer_group='Group-Lib_Kafka',
auto_commit_enable=True,
zookeeper_connect=host_zookeeper,
consumer_timeout_ms=100)
timeout = time.time() + int(timeout_sec) # timeout seconds from now
got_msg = False
event = ''
while True:
msg = consumer.consume()
if msg is not None:
msg_print('Got event:' + str(msg.offset) + ' - ' + msg.value)
event = msg.value
got_msg = True
break
if time.time() > timeout:
break
consumer.stop()
if got_msg:
return_result = 'GotEvent'
else:
return_result = 'Timeout'
except exceptions.KafkaException:
return_result = 'KafkaConnectFailed'
r = {'result': return_result, 'event': event}
return r
def event_consume_to_latest(host_kafka, host_zookeeper, topic):
try:
client = KafkaClient(hosts=host_kafka, socket_timeout_ms=10000)
topic = client.topics[str(topic)]
consumer = topic.get_balanced_consumer(consumer_group='Group-Lib_Kafka',
auto_commit_enable=True,
zookeeper_connect=host_zookeeper,
consumer_timeout_ms=100)
msg_print('Consume to latest offset started!')
consume_done = False
while True:
msg = consumer.consume()
if msg is not None:
msg_print('Got event:' + str(msg.offset) + ' - ' + msg.value)
else:
consume_done = True
break
consumer.stop()
if consume_done:
return_result = 'ConsumeDone'
else:
return_result = 'Error'
except exceptions.KafkaException:
return_result = 'KafkaConnectFailed'
msg_print('Consumer stopped! ' + return_result)
return return_result
def event_producer(host_kafka, topic, event_content_arr, wait_ack=False):
event_num = len(event_content_arr)
event_sent = 0
msg_print('Producer started!')
try:
client = KafkaClient(hosts=host_kafka, socket_timeout_ms=10000)
topic = client.topics[str(topic)]
if bool(wait_ack):
msg_print('Waiting for ack mode!')
with topic.get_sync_producer() as producer: # Waiting for ack
for event_content in event_content_arr:
producer.produce(str(event_content))
msg_print('Send event:' + event_content)
event_sent += 1
else:
msg_print('No waiting for ack mode!')
with topic.get_producer(delivery_reports=True) as producer: # No waiting for ack
for event_content in event_content_arr:
producer.produce(str(event_content))
msg_print('Send event in index ' + str(event_sent) + ' success')
event_sent += 1
if event_num == event_sent:
return_result = 'SendSuccess'
else:
return_result = 'SendFailed'
except exceptions.KafkaException:
return_result = 'KafkaConnectFailed'
msg_print('Producer stopped! ' + return_result)
return return_result
| [
"logging.basicConfig",
"pykafka.KafkaClient",
"robot.api.logger.info",
"time.time",
"re.search"
] | [((105, 126), 'logging.basicConfig', 'logging.basicConfig', ([], {}), '()\n', (124, 126), False, 'import logging, time\n'), ((347, 380), 'robot.api.logger.info', 'logger.info', (["('[Lib_Kafka] ' + msg)"], {}), "('[Lib_Kafka] ' + msg)\n", (358, 380), False, 'from robot.api import logger\n'), ((528, 582), 'pykafka.KafkaClient', 'KafkaClient', ([], {'hosts': 'host_kafka', 'socket_timeout_ms': '(10000)'}), '(hosts=host_kafka, socket_timeout_ms=10000)\n', (539, 582), False, 'from pykafka import KafkaClient, exceptions\n'), ((2244, 2298), 'pykafka.KafkaClient', 'KafkaClient', ([], {'hosts': 'host_kafka', 'socket_timeout_ms': '(10000)'}), '(hosts=host_kafka, socket_timeout_ms=10000)\n', (2255, 2298), False, 'from pykafka import KafkaClient, exceptions\n'), ((3892, 3946), 'pykafka.KafkaClient', 'KafkaClient', ([], {'hosts': 'host_kafka', 'socket_timeout_ms': '(10000)'}), '(hosts=host_kafka, socket_timeout_ms=10000)\n', (3903, 3946), False, 'from pykafka import KafkaClient, exceptions\n'), ((5106, 5160), 'pykafka.KafkaClient', 'KafkaClient', ([], {'hosts': 'host_kafka', 'socket_timeout_ms': '(10000)'}), '(hosts=host_kafka, socket_timeout_ms=10000)\n', (5117, 5160), False, 'from pykafka import KafkaClient, exceptions\n'), ((6339, 6393), 'pykafka.KafkaClient', 'KafkaClient', ([], {'hosts': 'host_kafka', 'socket_timeout_ms': '(10000)'}), '(hosts=host_kafka, socket_timeout_ms=10000)\n', (6350, 6393), False, 'from pykafka import KafkaClient, exceptions\n'), ((1153, 1164), 'time.time', 'time.time', ([], {}), '()\n', (1162, 1164), False, 'import logging, time\n'), ((2707, 2718), 'time.time', 'time.time', ([], {}), '()\n', (2716, 2718), False, 'import logging, time\n'), ((4315, 4326), 'time.time', 'time.time', ([], {}), '()\n', (4324, 4326), False, 'import logging, time\n'), ((1749, 1760), 'time.time', 'time.time', ([], {}), '()\n', (1758, 1760), False, 'import logging, time\n'), ((3444, 3455), 'time.time', 'time.time', ([], {}), '()\n', (3453, 3455), False, 'import logging, time\n'), ((4686, 4697), 'time.time', 'time.time', ([], {}), '()\n', (4695, 4697), False, 'import logging, time\n'), ((1618, 1663), 're.search', 're.search', (['assertion_event_content', 'msg.value'], {}), '(assertion_event_content, msg.value)\n', (1627, 1663), False, 'import re\n'), ((3156, 3187), 're.search', 're.search', (['assertion', 'msg.value'], {}), '(assertion, msg.value)\n', (3165, 3187), False, 'import re\n')] |
'''OpenGL extension ANGLE.program_binary
This module customises the behaviour of the
OpenGL.raw.GLES2.ANGLE.program_binary to provide a more
Python-friendly API
Overview (from the spec)
This extension makes available a program binary format,
PROGRAM_BINARY_ANGLE. It enables retrieving and loading of pre-linked
ANGLE program objects.
The official definition of this extension is available here:
http://www.opengl.org/registry/specs/ANGLE/program_binary.txt
'''
from OpenGL import platform, constant, arrays
from OpenGL import extensions, wrapper
import ctypes
from OpenGL.raw.GLES2 import _types, _glgets
from OpenGL.raw.GLES2.ANGLE.program_binary import *
from OpenGL.raw.GLES2.ANGLE.program_binary import _EXTENSION_NAME
def glInitProgramBinaryANGLE():
'''Return boolean indicating whether this extension is available'''
from OpenGL import extensions
return extensions.hasGLExtension( _EXTENSION_NAME )
### END AUTOGENERATED SECTION | [
"OpenGL.extensions.hasGLExtension"
] | [((884, 926), 'OpenGL.extensions.hasGLExtension', 'extensions.hasGLExtension', (['_EXTENSION_NAME'], {}), '(_EXTENSION_NAME)\n', (909, 926), False, 'from OpenGL import extensions\n')] |
import subprocess
import os
def getBlame(f):
folder = os.path.split(f)[0]
cwd = os.getcwd()
os.chdir(folder)
cmd = "git blame --abbrev=0 -e \"" + f + "\""
try:
sub = subprocess.Popen(cmd, shell=True, stdout=subprocess.PIPE, stderr=subprocess.PIPE)
response, err = sub.communicate()
response = response.decode()
err = err.decode()
except subprocess.CalledProcessError as e:
print("Error: " + e.output)
response = ''
except UnicodeDecodeError as e:
print("Error: UnicodeDecodeError")
response = ''
if len(err) > 0:
if "no such path" in err:
response = '' # Ignore new file.
else:
print("Error: " + err)
response = ''
if response == '':
data_by_line = None
else:
data_by_line = response.split('\n')
os.chdir(cwd)
return data_by_line
def getAuthor(f,line):
author = 'Not found'
line += 1 # no line zero.
folder = os.path.split(f)[0]
cwd = os.getcwd()
os.chdir(folder)
cmd = "git blame -p -L " + str(line) + "," + str(line) + "\"" + f + "\""
try:
sub = subprocess.Popen(cmd, shell=True, stdout=subprocess.PIPE, stderr=subprocess.PIPE)
response, err = sub.communicate()
response = response.decode()
err = err.decode()
except subprocess.CalledProcessError as e:
print(e.output)
response = ''
os.chdir(cwd)
if 'fatal' in err:
return author
data_by_line = response.split('\n')
for row in data_by_line:
if row[:7] == 'author ':
author = row[7:]
break
return author
def getRepo(folder):
cmd = 'git config --get remote.origin.url'
cwd = os.getcwd()
os.chdir(folder)
try:
response = subprocess.Popen(cmd, shell=True, stdout=subprocess.PIPE).stdout.read()
response = response.decode()
except subprocess.CalledProcessError as e:
print(e.output)
response = ''
os.chdir(cwd)
return response
def getBranch(folder):
cmd = 'git branch | grep \'*\''
cwd = os.getcwd()
os.chdir(folder)
try:
response = subprocess.Popen(cmd, shell=True, stdout=subprocess.PIPE).stdout.read()
response = response.decode()
except subprocess.CalledProcessError as e:
print(e.output)
response = ''
os.chdir(cwd)
return response[2:]
def getDiff(folder):
cmd = 'git diff --name-status HEAD..HEAD~'
cwd = os.getcwd()
os.chdir(folder)
try:
response = subprocess.Popen(cmd, shell=True, stdout=subprocess.PIPE).stdout.read()
response = response.decode()
except subprocess.CalledProcessError as e:
print(e.output)
response = ''
os.chdir(cwd)
return response
def checkoutRevision(folder, prev):
cmd = 'git checkout HEAD~' + str(int(prev))
cwd = os.getcwd()
os.chdir(folder)
try:
sub = subprocess.Popen(cmd, shell=True, stdout=subprocess.PIPE, stderr=subprocess.PIPE)
_, err = sub.communicate()
err = err.decode()
except subprocess.CalledProcessError as e:
print('exception')
print(e.output)
os.chdir(cwd)
return err
def resetHead(folder, branch):
cmd = 'git checkout ' + branch
cwd = os.getcwd()
os.chdir(folder)
try:
response = subprocess.Popen(cmd, shell=True, stdout=subprocess.PIPE).stdout.read()
response = response.decode()
except subprocess.CalledProcessError as e:
print(e.output)
response = ''
os.chdir(cwd)
return response
def getFileCount(folder):
cmd = 'git ls-files | wc -l'
cwd = os.getcwd()
os.chdir(folder)
try:
response = subprocess.Popen(cmd, shell=True, stdout=subprocess.PIPE).stdout.read()
response = response.decode()
except subprocess.CalledProcessError as e:
print(e.output)
response = ''
os.chdir(cwd)
return response.strip()
def getLineCount(folder):
cmd = 'git ls-files | xargs wc -l'
cwd = os.getcwd()
os.chdir(folder)
try:
response = subprocess.Popen(cmd, shell=True, stdout=subprocess.PIPE).stdout.read()
response = response.decode()
except subprocess.CalledProcessError as e:
print(e.output)
response = ''
os.chdir(cwd)
response = response[:-1].split('\n')
return response[-1]
def getLastCommit(folder):
cmd = 'git log -1 --date=local'
cwd = os.getcwd()
os.chdir(folder)
try:
response = subprocess.Popen(cmd, shell=True, stdout=subprocess.PIPE).stdout.read()
response = response.decode()
except subprocess.CalledProcessError as e:
print(e.output)
response = ''
os.chdir(cwd)
return response
def getLastCommitDate(folder):
msg = getLastCommit(folder)
lines = msg.split('\n')
for line in lines:
if 'Date:' in line:
return line[5:].strip()
return 'Date not found.'
def getCommitNumber(folder):
msg = getLastCommit(folder)
lines = msg.split('\n')
return lines[0][7:14]
def parseRepo(url):
#url = url.decode()
splat = url.split('/')
splat = splat[-1].split('.')
splat = splat[0].split('\n')
return splat[0]
def getBaseRepoName(folder):
repo = getRepo(folder)
base = parseRepo(repo)
return base
if __name__ == '__main__':
cwd = os.getcwd()
f = cwd + '/birdseye.py'
folder = os.path.split(f)[0]
line = 20
print()
print("Get author: ")
print(getAuthor(f,line))
#branch = getBranch(folder)
#print(resetHead(folder,branch))
#print(checkoutRevision(folder,10))
print()
print("Line count: " + getLineCount(folder))
file_count = getFileCount(folder)
print()
print("File count: " + file_count)
last_commit = getLastCommit(folder)
print()
print("Last commit: ")
print(last_commit)
last_commit_date = getLastCommitDate(folder)
print()
print("Last commit date: ")
print(last_commit_date)
commit_number = getCommitNumber(folder)
print()
print("Last commit number: ")
print(commit_number)
repo = getRepo(folder)
print()
print("Repo: " + repo)
base = parseRepo(repo)
print()
print("Base: " + base)
print("Base repo name: " + getBaseRepoName(folder))
print()
#print(resetHead(folder,branch)) | [
"os.chdir",
"os.path.split",
"subprocess.Popen",
"os.getcwd"
] | [((92, 103), 'os.getcwd', 'os.getcwd', ([], {}), '()\n', (101, 103), False, 'import os\n'), ((108, 124), 'os.chdir', 'os.chdir', (['folder'], {}), '(folder)\n', (116, 124), False, 'import os\n'), ((920, 933), 'os.chdir', 'os.chdir', (['cwd'], {}), '(cwd)\n', (928, 933), False, 'import os\n'), ((1080, 1091), 'os.getcwd', 'os.getcwd', ([], {}), '()\n', (1089, 1091), False, 'import os\n'), ((1096, 1112), 'os.chdir', 'os.chdir', (['folder'], {}), '(folder)\n', (1104, 1112), False, 'import os\n'), ((1523, 1536), 'os.chdir', 'os.chdir', (['cwd'], {}), '(cwd)\n', (1531, 1536), False, 'import os\n'), ((1839, 1850), 'os.getcwd', 'os.getcwd', ([], {}), '()\n', (1848, 1850), False, 'import os\n'), ((1855, 1871), 'os.chdir', 'os.chdir', (['folder'], {}), '(folder)\n', (1863, 1871), False, 'import os\n'), ((2126, 2139), 'os.chdir', 'os.chdir', (['cwd'], {}), '(cwd)\n', (2134, 2139), False, 'import os\n'), ((2234, 2245), 'os.getcwd', 'os.getcwd', ([], {}), '()\n', (2243, 2245), False, 'import os\n'), ((2250, 2266), 'os.chdir', 'os.chdir', (['folder'], {}), '(folder)\n', (2258, 2266), False, 'import os\n'), ((2521, 2534), 'os.chdir', 'os.chdir', (['cwd'], {}), '(cwd)\n', (2529, 2534), False, 'import os\n'), ((2642, 2653), 'os.getcwd', 'os.getcwd', ([], {}), '()\n', (2651, 2653), False, 'import os\n'), ((2658, 2674), 'os.chdir', 'os.chdir', (['folder'], {}), '(folder)\n', (2666, 2674), False, 'import os\n'), ((2921, 2934), 'os.chdir', 'os.chdir', (['cwd'], {}), '(cwd)\n', (2929, 2934), False, 'import os\n'), ((3055, 3066), 'os.getcwd', 'os.getcwd', ([], {}), '()\n', (3064, 3066), False, 'import os\n'), ((3071, 3087), 'os.chdir', 'os.chdir', (['folder'], {}), '(folder)\n', (3079, 3087), False, 'import os\n'), ((3368, 3381), 'os.chdir', 'os.chdir', (['cwd'], {}), '(cwd)\n', (3376, 3381), False, 'import os\n'), ((3479, 3490), 'os.getcwd', 'os.getcwd', ([], {}), '()\n', (3488, 3490), False, 'import os\n'), ((3495, 3511), 'os.chdir', 'os.chdir', (['folder'], {}), '(folder)\n', (3503, 3511), False, 'import os\n'), ((3766, 3779), 'os.chdir', 'os.chdir', (['cwd'], {}), '(cwd)\n', (3774, 3779), False, 'import os\n'), ((3874, 3885), 'os.getcwd', 'os.getcwd', ([], {}), '()\n', (3883, 3885), False, 'import os\n'), ((3890, 3906), 'os.chdir', 'os.chdir', (['folder'], {}), '(folder)\n', (3898, 3906), False, 'import os\n'), ((4153, 4166), 'os.chdir', 'os.chdir', (['cwd'], {}), '(cwd)\n', (4161, 4166), False, 'import os\n'), ((4275, 4286), 'os.getcwd', 'os.getcwd', ([], {}), '()\n', (4284, 4286), False, 'import os\n'), ((4291, 4307), 'os.chdir', 'os.chdir', (['folder'], {}), '(folder)\n', (4299, 4307), False, 'import os\n'), ((4562, 4575), 'os.chdir', 'os.chdir', (['cwd'], {}), '(cwd)\n', (4570, 4575), False, 'import os\n'), ((4719, 4730), 'os.getcwd', 'os.getcwd', ([], {}), '()\n', (4728, 4730), False, 'import os\n'), ((4735, 4751), 'os.chdir', 'os.chdir', (['folder'], {}), '(folder)\n', (4743, 4751), False, 'import os\n'), ((5006, 5019), 'os.chdir', 'os.chdir', (['cwd'], {}), '(cwd)\n', (5014, 5019), False, 'import os\n'), ((5668, 5679), 'os.getcwd', 'os.getcwd', ([], {}), '()\n', (5677, 5679), False, 'import os\n'), ((62, 78), 'os.path.split', 'os.path.split', (['f'], {}), '(f)\n', (75, 78), False, 'import os\n'), ((206, 292), 'subprocess.Popen', 'subprocess.Popen', (['cmd'], {'shell': '(True)', 'stdout': 'subprocess.PIPE', 'stderr': 'subprocess.PIPE'}), '(cmd, shell=True, stdout=subprocess.PIPE, stderr=subprocess\n .PIPE)\n', (222, 292), False, 'import subprocess\n'), ((1050, 1066), 'os.path.split', 'os.path.split', (['f'], {}), '(f)\n', (1063, 1066), False, 'import os\n'), ((1222, 1308), 'subprocess.Popen', 'subprocess.Popen', (['cmd'], {'shell': '(True)', 'stdout': 'subprocess.PIPE', 'stderr': 'subprocess.PIPE'}), '(cmd, shell=True, stdout=subprocess.PIPE, stderr=subprocess\n .PIPE)\n', (1238, 1308), False, 'import subprocess\n'), ((3115, 3201), 'subprocess.Popen', 'subprocess.Popen', (['cmd'], {'shell': '(True)', 'stdout': 'subprocess.PIPE', 'stderr': 'subprocess.PIPE'}), '(cmd, shell=True, stdout=subprocess.PIPE, stderr=subprocess\n .PIPE)\n', (3131, 3201), False, 'import subprocess\n'), ((5722, 5738), 'os.path.split', 'os.path.split', (['f'], {}), '(f)\n', (5735, 5738), False, 'import os\n'), ((1904, 1961), 'subprocess.Popen', 'subprocess.Popen', (['cmd'], {'shell': '(True)', 'stdout': 'subprocess.PIPE'}), '(cmd, shell=True, stdout=subprocess.PIPE)\n', (1920, 1961), False, 'import subprocess\n'), ((2299, 2356), 'subprocess.Popen', 'subprocess.Popen', (['cmd'], {'shell': '(True)', 'stdout': 'subprocess.PIPE'}), '(cmd, shell=True, stdout=subprocess.PIPE)\n', (2315, 2356), False, 'import subprocess\n'), ((2707, 2764), 'subprocess.Popen', 'subprocess.Popen', (['cmd'], {'shell': '(True)', 'stdout': 'subprocess.PIPE'}), '(cmd, shell=True, stdout=subprocess.PIPE)\n', (2723, 2764), False, 'import subprocess\n'), ((3544, 3601), 'subprocess.Popen', 'subprocess.Popen', (['cmd'], {'shell': '(True)', 'stdout': 'subprocess.PIPE'}), '(cmd, shell=True, stdout=subprocess.PIPE)\n', (3560, 3601), False, 'import subprocess\n'), ((3939, 3996), 'subprocess.Popen', 'subprocess.Popen', (['cmd'], {'shell': '(True)', 'stdout': 'subprocess.PIPE'}), '(cmd, shell=True, stdout=subprocess.PIPE)\n', (3955, 3996), False, 'import subprocess\n'), ((4340, 4397), 'subprocess.Popen', 'subprocess.Popen', (['cmd'], {'shell': '(True)', 'stdout': 'subprocess.PIPE'}), '(cmd, shell=True, stdout=subprocess.PIPE)\n', (4356, 4397), False, 'import subprocess\n'), ((4784, 4841), 'subprocess.Popen', 'subprocess.Popen', (['cmd'], {'shell': '(True)', 'stdout': 'subprocess.PIPE'}), '(cmd, shell=True, stdout=subprocess.PIPE)\n', (4800, 4841), False, 'import subprocess\n')] |
import numpy as np
import pandas as pd
from sklearn import preprocessing
import math
def load_datasets_feature(filename):
features_df = pd.read_csv(filename, delimiter='\\s*,\\s*', header=0)
return features_df
def load_join_data3(features_df, result_file, histograms_path, num_rows, num_columns):
cols = ['dataset1', 'dataset2', 'result_size', 'mbr_tests', 'duration']
# Result DF contains dataset names, result cardinality, # of MBR tests, and duration in seconds
result_df = pd.read_csv(result_file, delimiter='\\s*,\\s*', header=None, names=cols)
# result_df = result_df.sample(frac=1)
# Add dataset information of the first (left) dataset
result_df = pd.merge(result_df, features_df, left_on='dataset1', right_on='dataset_name')
# Add dataset information for the second (right) dataset
result_df = pd.merge(result_df, features_df, left_on='dataset2', right_on='dataset_name')
# Load histograms
ds1_histograms, ds2_histograms, ds1_original_histograms, ds2_original_histograms, ds_all_histogram, ds_bops_histogram = load_histograms(
result_df, histograms_path, num_rows, num_columns)
#print(ds1_histograms.shape)
#print(result_df.shape)
#exit(0)
# Compute BOPS
# First, do an element-wise multiplication of the two histograms
bops = np.multiply(ds1_original_histograms, ds2_original_histograms)
# Reshape into a two dimensional array. First dimension represents the dataset number, e.g., first entry
# represents the first dataset of each. Second dimension represents the values in the multiplied histograms
bops = bops.reshape((bops.shape[0], num_rows * num_columns))
# Sum the values in each row to compute the final BOPS value
bops_values = np.sum(bops, axis=1)
# The final reshape puts each BOPS value in an array with a single value. Thus it produces a 2D array.
bops_values = bops_values.reshape((bops_values.shape[0], 1))
result_df['bops'] = bops_values
cardinality_x = result_df['cardinality_x']
cardinality_y = result_df['cardinality_y']
result_size = result_df['result_size']
mbr_tests = result_df['mbr_tests']
# Compute the join selectivity as result_cardinality/(cardinality x * cardinality y)
result_df['join_selectivity'] = result_size / (cardinality_x * cardinality_y)
# Compute the MBR selectivity in the same way
result_df['mbr_tests_selectivity'] = mbr_tests / (cardinality_x * cardinality_y)
# Apply MinMaxScaler to normalize numeric columns used in either training or testing to the range [0, 1]
# The following transformation tries to adjust relevant columns to be scaled together
column_groups = [
['duration'],
['AVG area_x', 'AVG area_y'],
['AVG x_x', 'AVG y_x', 'AVG x_y', 'AVG y_y'],
['E0_x', 'E2_x', 'E0_y', 'E2_y'],
['join_selectivity'],
['mbr_tests_selectivity'],
['cardinality_x', 'cardinality_y', 'result_size'],
['bops', 'mbr_tests']
]
for column_group in column_groups:
input_data = result_df[column_group].to_numpy()
original_shape = input_data.shape
reshaped = input_data.reshape(input_data.size, 1)
reshaped = preprocessing.minmax_scale(reshaped)
result_df[column_group] = reshaped.reshape(original_shape)
#result_df[column_group] = scaler.fit_transform(result_df[column_group])
return result_df, ds1_histograms, ds2_histograms, ds_all_histogram, ds_bops_histogram
def load_join_data(features_df, result_file, histograms_path, num_rows, num_columns):
cols = ['dataset1', 'dataset2', 'result_size', 'mbr_tests', 'duration']
# Result DF contains dataset names, result cardinality, # of MBR tests, and duration in seconds
result_df = pd.read_csv(result_file, delimiter=',', header=None, names=cols)
# result_df = result_df.sample(frac=1)
# Add dataset information of the first (left) dataset
result_df = pd.merge(result_df, features_df, left_on='dataset1', right_on='dataset_name')
# Add dataset information for the second (right) dataset
result_df = pd.merge(result_df, features_df, left_on='dataset2', right_on='dataset_name')
# Load histograms
ds1_histograms, ds2_histograms, ds1_original_histograms, ds2_original_histograms, ds_all_histogram, ds_bops_histogram = load_histograms(
result_df, histograms_path, num_rows, num_columns)
# Compute BOPS
# First, do an element-wise multiplication of the two histograms
bops = np.multiply(ds1_original_histograms, ds2_original_histograms)
# Reshape into a two dimensional array. First dimension represents the dataset number, e.g., first entry
# represents the first dataset of each. Second dimension represents the values in the multiplied histograms
bops = bops.reshape((bops.shape[0], num_rows * num_columns))
# Sum the values in each row to compute the final BOPS value
bops_values = np.sum(bops, axis=1)
# The final reshape puts each BOPS value in an array with a single value. Thus it produces a 2D array.
bops_values = bops_values.reshape((bops_values.shape[0], 1))
# result_df['bops'] = bops_values
cardinality_x = result_df[' cardinality_x']
cardinality_y = result_df[' cardinality_y']
result_size = result_df['result_size']
mbr_tests = result_df['mbr_tests']
# Compute the join selectivity as result_cardinality/(cardinality x * cardinality y) * 10E+9
join_selectivity = result_size / (cardinality_x * cardinality_y)
join_selectivity = join_selectivity * 1E5
# Compute the MBR selectivity in the same way
mbr_tests_selectivity = mbr_tests / (cardinality_x * cardinality_y)
mbr_tests_selectivity = mbr_tests_selectivity * 1E5
duration = result_df['duration']
dataset1 = result_df['dataset1']
dataset2 = result_df['dataset2']
# result_df = result_df.drop(columns=['result_size', 'dataset1', 'dataset2', 'dataset_name_x', 'dataset_name_y', ' cardinality_x', ' cardinality_y'])
# result_df = result_df.drop(
# columns=['result_size', 'dataset1', 'dataset2', 'dataset_name_x', 'dataset_name_y'])
result_df = result_df.drop(
columns=['result_size', 'dataset1', 'dataset2', 'dataset_name_x', 'dataset_name_y', ' cardinality_x',
' cardinality_y', 'mbr_tests', 'duration'])
# Normalize all the values using MinMax scaler
# These values are [AVG area_x, AVG x_x, AVG y_x, E0_x, E2_x, AVG area_y, AVG x_y, AVG y_y, E0_y, E2_y]
x = result_df.values
min_max_scaler = preprocessing.MinMaxScaler()
x_scaled = min_max_scaler.fit_transform(x)
result_df = pd.DataFrame(x_scaled, columns=result_df.columns)
result_df['cardinality_x'] = cardinality_x
result_df['cardinality_y'] = cardinality_y
result_df['bops'] = bops_values
result_df['dataset1'] = dataset1
result_df['dataset2'] = dataset2
result_df.insert(len(result_df.columns), 'result_size', result_size, True)
result_df.insert(len(result_df.columns), 'join_selectivity', join_selectivity, True)
result_df.insert(len(result_df.columns), 'mbr_tests', mbr_tests, True)
result_df.insert(len(result_df.columns), 'mbr_tests_selectivity', mbr_tests_selectivity, True)
result_df.insert(len(result_df.columns), 'duration', duration, True)
return result_df, ds1_histograms, ds2_histograms, ds_all_histogram, ds_bops_histogram
def load_join_data2(features_df, result_file, histograms_path, num_rows, num_columns):
cols = ['count', 'dataset1', 'dataset2', 'result_size', 'mbr_tests', 'duration']
result_df = pd.read_csv(result_file, delimiter=',', header=None, names=cols)
# result_df = result_df.sample(frac=1)
result_df = pd.merge(result_df, features_df, left_on='dataset1', right_on='dataset_name')
result_df = pd.merge(result_df, features_df, left_on='dataset2', right_on='dataset_name')
# Load histograms
ds1_histograms, ds2_histograms, ds1_original_histograms, ds2_original_histograms, ds_all_histogram, ds_bops_histogram = load_histograms2(
result_df, histograms_path, num_rows, num_columns)
# Compute BOPS
bops = np.multiply(ds1_original_histograms, ds2_original_histograms)
# print (bops)
bops = bops.reshape((bops.shape[0], num_rows * num_columns))
bops_values = np.sum(bops, axis=1)
bops_values = bops_values.reshape((bops_values.shape[0], 1))
# result_df['bops'] = bops_values
cardinality_x = result_df[' cardinality_x']
cardinality_y = result_df[' cardinality_y']
result_size = result_df['result_size']
mbr_tests = result_df['mbr_tests']
join_selectivity = result_size / (cardinality_x * cardinality_y)
join_selectivity = join_selectivity * math.pow(10, 9)
dataset1 = result_df['dataset1']
dataset2 = result_df['dataset2']
# result_df = result_df.drop(columns=['result_size', 'dataset1', 'dataset2', 'dataset_name_x', 'dataset_name_y', ' cardinality_x', ' cardinality_y'])
# result_df = result_df.drop(
# columns=['result_size', 'dataset1', 'dataset2', 'dataset_name_x', 'dataset_name_y'])
result_df = result_df.drop(
columns=['count', 'result_size', 'dataset1', 'dataset2', 'dataset_name_x', 'dataset_name_y', ' cardinality_x',
' cardinality_y', 'mbr_tests', 'duration'])
x = result_df.values
min_max_scaler = preprocessing.MinMaxScaler()
x_scaled = min_max_scaler.fit_transform(x)
result_df = pd.DataFrame(x_scaled)
result_df['cardinality_x'] = cardinality_x
result_df['cardinality_y'] = cardinality_y
result_df['bops'] = bops_values
result_df['dataset1'] = dataset1
result_df['dataset2'] = dataset2
result_df.insert(len(result_df.columns), 'result_size', result_size, True)
result_df.insert(len(result_df.columns), 'join_selectivity', join_selectivity, True)
result_df.insert(len(result_df.columns), 'mbr_tests', join_selectivity, True)
# print (len(result_df))
# result_df.to_csv('result_df.csv')
return result_df, ds1_histograms, ds2_histograms, ds_all_histogram, ds_bops_histogram
def load_histogram(histograms_path, num_rows, num_columns, dataset):
hist = np.genfromtxt('{}/{}x{}/{}'.format(histograms_path, num_rows, num_columns, dataset), delimiter=',')
normalized_hist = hist / hist.max()
normalized_hist = normalized_hist.reshape((hist.shape[0], hist.shape[1], 1))
hist = hist.reshape((hist.shape[0], hist.shape[1], 1))
return normalized_hist, hist
def load_histogram2(histograms_path, num_rows, num_columns, count, dataset):
hist = np.genfromtxt('{}/{}x{}/{}/{}'.format(histograms_path, num_rows, num_columns, count, dataset), delimiter=',')
normalized_hist = hist / hist.max()
normalized_hist = normalized_hist.reshape((hist.shape[0], hist.shape[1], 1))
hist = hist.reshape((hist.shape[0], hist.shape[1], 1))
return normalized_hist, hist
def load_histograms(result_df, histograms_path, num_rows, num_columns):
ds1_histograms = []
ds2_histograms = []
ds1_original_histograms = []
ds2_original_histograms = []
ds_all_histogram = []
ds_bops_histogram = []
for dataset in result_df['dataset1']:
normalized_hist, hist = load_histogram(histograms_path, num_rows, num_columns, dataset)
ds1_histograms.append(normalized_hist)
ds1_original_histograms.append(hist)
for dataset in result_df['dataset2']:
normalized_hist, hist = load_histogram(histograms_path, num_rows, num_columns, dataset)
ds2_histograms.append(normalized_hist)
ds2_original_histograms.append(hist)
for i in range(len(ds1_histograms)):
hist1 = ds1_original_histograms[i]
hist2 = ds2_original_histograms[i]
combined_hist = np.dstack((hist1, hist2))
combined_hist = combined_hist / combined_hist.max()
ds_all_histogram.append(combined_hist)
for i in range(len(ds1_histograms)):
hist1 = ds1_original_histograms[i]
hist2 = ds2_original_histograms[i]
bops_hist = np.multiply(hist1, hist2)
if bops_hist.max() > 0:
bops_hist = bops_hist / bops_hist.max()
ds_bops_histogram.append(bops_hist)
return np.array(ds1_histograms), np.array(ds2_histograms), np.array(ds1_original_histograms), np.array(
ds2_original_histograms), np.array(ds_all_histogram), np.array(ds_bops_histogram)
def load_histograms2(result_df, histograms_path, num_rows, num_columns):
ds1_histograms = []
ds2_histograms = []
ds1_original_histograms = []
ds2_original_histograms = []
ds_all_histogram = []
ds_bops_histogram = []
for index, row in result_df.iterrows():
count = row['count']
dataset1 = row['dataset1']
dataset2 = row['dataset2']
normalized_hist, hist = load_histogram2(histograms_path, num_rows, num_columns, count, dataset1)
ds1_histograms.append(normalized_hist)
ds1_original_histograms.append(hist)
normalized_hist, hist = load_histogram2(histograms_path, num_rows, num_columns, count, dataset2)
ds2_histograms.append(normalized_hist)
ds2_original_histograms.append(hist)
# count = 0
# for dataset in result_df['dataset1']:
# count += 1
# normalized_hist, hist = load_histogram2(histograms_path, num_rows, num_columns, count, dataset)
# ds1_histograms.append(normalized_hist)
# ds1_original_histograms.append(hist)
#
# count = 0
# for dataset in result_df['dataset2']:
# count += 1
# normalized_hist, hist = load_histogram2(histograms_path, num_rows, num_columns, count, dataset)
# ds2_histograms.append(normalized_hist)
# ds2_original_histograms.append(hist)
for i in range(len(ds1_histograms)):
hist1 = ds1_original_histograms[i]
hist2 = ds2_original_histograms[i]
combined_hist = np.dstack((hist1, hist2))
combined_hist = combined_hist / combined_hist.max()
ds_all_histogram.append(combined_hist)
for i in range(len(ds1_histograms)):
hist1 = ds1_original_histograms[i]
hist2 = ds2_original_histograms[i]
bops_hist = np.multiply(hist1, hist2)
if bops_hist.max() > 0:
bops_hist = bops_hist / bops_hist.max()
ds_bops_histogram.append(bops_hist)
return np.array(ds1_histograms), np.array(ds2_histograms), np.array(ds1_original_histograms), np.array(
ds2_original_histograms), np.array(ds_all_histogram), np.array(ds_bops_histogram)
def main():
print('Dataset utils')
# features_df = load_datasets_feature('data/uniform_datasets_features.csv')
# load_join_data(features_df, 'data/uniform_result_size.csv', 'data/histogram_uniform_values', 16, 16)
features_df = load_datasets_feature('data/data_aligned/aligned_small_datasets_features.csv')
join_data, ds1_histograms, ds2_histograms, ds_all_histogram = load_join_data(features_df,
'data/data_aligned/join_results_small_datasets.csv',
'data/data_aligned/histograms/small_datasets', 32,
32)
print (join_data)
if __name__ == '__main__':
main()
| [
"numpy.dstack",
"numpy.multiply",
"pandas.read_csv",
"math.pow",
"pandas.merge",
"numpy.sum",
"numpy.array",
"sklearn.preprocessing.minmax_scale",
"pandas.DataFrame",
"sklearn.preprocessing.MinMaxScaler"
] | [((142, 196), 'pandas.read_csv', 'pd.read_csv', (['filename'], {'delimiter': '"""\\\\s*,\\\\s*"""', 'header': '(0)'}), "(filename, delimiter='\\\\s*,\\\\s*', header=0)\n", (153, 196), True, 'import pandas as pd\n'), ((501, 573), 'pandas.read_csv', 'pd.read_csv', (['result_file'], {'delimiter': '"""\\\\s*,\\\\s*"""', 'header': 'None', 'names': 'cols'}), "(result_file, delimiter='\\\\s*,\\\\s*', header=None, names=cols)\n", (512, 573), True, 'import pandas as pd\n'), ((691, 768), 'pandas.merge', 'pd.merge', (['result_df', 'features_df'], {'left_on': '"""dataset1"""', 'right_on': '"""dataset_name"""'}), "(result_df, features_df, left_on='dataset1', right_on='dataset_name')\n", (699, 768), True, 'import pandas as pd\n'), ((846, 923), 'pandas.merge', 'pd.merge', (['result_df', 'features_df'], {'left_on': '"""dataset2"""', 'right_on': '"""dataset_name"""'}), "(result_df, features_df, left_on='dataset2', right_on='dataset_name')\n", (854, 923), True, 'import pandas as pd\n'), ((1322, 1383), 'numpy.multiply', 'np.multiply', (['ds1_original_histograms', 'ds2_original_histograms'], {}), '(ds1_original_histograms, ds2_original_histograms)\n', (1333, 1383), True, 'import numpy as np\n'), ((1753, 1773), 'numpy.sum', 'np.sum', (['bops'], {'axis': '(1)'}), '(bops, axis=1)\n', (1759, 1773), True, 'import numpy as np\n'), ((3774, 3838), 'pandas.read_csv', 'pd.read_csv', (['result_file'], {'delimiter': '""","""', 'header': 'None', 'names': 'cols'}), "(result_file, delimiter=',', header=None, names=cols)\n", (3785, 3838), True, 'import pandas as pd\n'), ((3956, 4033), 'pandas.merge', 'pd.merge', (['result_df', 'features_df'], {'left_on': '"""dataset1"""', 'right_on': '"""dataset_name"""'}), "(result_df, features_df, left_on='dataset1', right_on='dataset_name')\n", (3964, 4033), True, 'import pandas as pd\n'), ((4111, 4188), 'pandas.merge', 'pd.merge', (['result_df', 'features_df'], {'left_on': '"""dataset2"""', 'right_on': '"""dataset_name"""'}), "(result_df, features_df, left_on='dataset2', right_on='dataset_name')\n", (4119, 4188), True, 'import pandas as pd\n'), ((4512, 4573), 'numpy.multiply', 'np.multiply', (['ds1_original_histograms', 'ds2_original_histograms'], {}), '(ds1_original_histograms, ds2_original_histograms)\n', (4523, 4573), True, 'import numpy as np\n'), ((4943, 4963), 'numpy.sum', 'np.sum', (['bops'], {'axis': '(1)'}), '(bops, axis=1)\n', (4949, 4963), True, 'import numpy as np\n'), ((6551, 6579), 'sklearn.preprocessing.MinMaxScaler', 'preprocessing.MinMaxScaler', ([], {}), '()\n', (6577, 6579), False, 'from sklearn import preprocessing\n'), ((6643, 6692), 'pandas.DataFrame', 'pd.DataFrame', (['x_scaled'], {'columns': 'result_df.columns'}), '(x_scaled, columns=result_df.columns)\n', (6655, 6692), True, 'import pandas as pd\n'), ((7594, 7658), 'pandas.read_csv', 'pd.read_csv', (['result_file'], {'delimiter': '""","""', 'header': 'None', 'names': 'cols'}), "(result_file, delimiter=',', header=None, names=cols)\n", (7605, 7658), True, 'import pandas as pd\n'), ((7719, 7796), 'pandas.merge', 'pd.merge', (['result_df', 'features_df'], {'left_on': '"""dataset1"""', 'right_on': '"""dataset_name"""'}), "(result_df, features_df, left_on='dataset1', right_on='dataset_name')\n", (7727, 7796), True, 'import pandas as pd\n'), ((7813, 7890), 'pandas.merge', 'pd.merge', (['result_df', 'features_df'], {'left_on': '"""dataset2"""', 'right_on': '"""dataset_name"""'}), "(result_df, features_df, left_on='dataset2', right_on='dataset_name')\n", (7821, 7890), True, 'import pandas as pd\n'), ((8146, 8207), 'numpy.multiply', 'np.multiply', (['ds1_original_histograms', 'ds2_original_histograms'], {}), '(ds1_original_histograms, ds2_original_histograms)\n', (8157, 8207), True, 'import numpy as np\n'), ((8310, 8330), 'numpy.sum', 'np.sum', (['bops'], {'axis': '(1)'}), '(bops, axis=1)\n', (8316, 8330), True, 'import numpy as np\n'), ((9358, 9386), 'sklearn.preprocessing.MinMaxScaler', 'preprocessing.MinMaxScaler', ([], {}), '()\n', (9384, 9386), False, 'from sklearn import preprocessing\n'), ((9450, 9472), 'pandas.DataFrame', 'pd.DataFrame', (['x_scaled'], {}), '(x_scaled)\n', (9462, 9472), True, 'import pandas as pd\n'), ((3219, 3255), 'sklearn.preprocessing.minmax_scale', 'preprocessing.minmax_scale', (['reshaped'], {}), '(reshaped)\n', (3245, 3255), False, 'from sklearn import preprocessing\n'), ((8723, 8738), 'math.pow', 'math.pow', (['(10)', '(9)'], {}), '(10, 9)\n', (8731, 8738), False, 'import math\n'), ((11752, 11777), 'numpy.dstack', 'np.dstack', (['(hist1, hist2)'], {}), '((hist1, hist2))\n', (11761, 11777), True, 'import numpy as np\n'), ((12033, 12058), 'numpy.multiply', 'np.multiply', (['hist1', 'hist2'], {}), '(hist1, hist2)\n', (12044, 12058), True, 'import numpy as np\n'), ((12199, 12223), 'numpy.array', 'np.array', (['ds1_histograms'], {}), '(ds1_histograms)\n', (12207, 12223), True, 'import numpy as np\n'), ((12225, 12249), 'numpy.array', 'np.array', (['ds2_histograms'], {}), '(ds2_histograms)\n', (12233, 12249), True, 'import numpy as np\n'), ((12251, 12284), 'numpy.array', 'np.array', (['ds1_original_histograms'], {}), '(ds1_original_histograms)\n', (12259, 12284), True, 'import numpy as np\n'), ((12286, 12319), 'numpy.array', 'np.array', (['ds2_original_histograms'], {}), '(ds2_original_histograms)\n', (12294, 12319), True, 'import numpy as np\n'), ((12330, 12356), 'numpy.array', 'np.array', (['ds_all_histogram'], {}), '(ds_all_histogram)\n', (12338, 12356), True, 'import numpy as np\n'), ((12358, 12385), 'numpy.array', 'np.array', (['ds_bops_histogram'], {}), '(ds_bops_histogram)\n', (12366, 12385), True, 'import numpy as np\n'), ((13893, 13918), 'numpy.dstack', 'np.dstack', (['(hist1, hist2)'], {}), '((hist1, hist2))\n', (13902, 13918), True, 'import numpy as np\n'), ((14174, 14199), 'numpy.multiply', 'np.multiply', (['hist1', 'hist2'], {}), '(hist1, hist2)\n', (14185, 14199), True, 'import numpy as np\n'), ((14340, 14364), 'numpy.array', 'np.array', (['ds1_histograms'], {}), '(ds1_histograms)\n', (14348, 14364), True, 'import numpy as np\n'), ((14366, 14390), 'numpy.array', 'np.array', (['ds2_histograms'], {}), '(ds2_histograms)\n', (14374, 14390), True, 'import numpy as np\n'), ((14392, 14425), 'numpy.array', 'np.array', (['ds1_original_histograms'], {}), '(ds1_original_histograms)\n', (14400, 14425), True, 'import numpy as np\n'), ((14427, 14460), 'numpy.array', 'np.array', (['ds2_original_histograms'], {}), '(ds2_original_histograms)\n', (14435, 14460), True, 'import numpy as np\n'), ((14471, 14497), 'numpy.array', 'np.array', (['ds_all_histogram'], {}), '(ds_all_histogram)\n', (14479, 14497), True, 'import numpy as np\n'), ((14499, 14526), 'numpy.array', 'np.array', (['ds_bops_histogram'], {}), '(ds_bops_histogram)\n', (14507, 14526), True, 'import numpy as np\n')] |
from mycroft.messagebus.message import Message, dig_for_message
from mycroft.skills.core import FallbackSkill, intent_file_handler, intent_handler
from adapt.intent import IntentBuilder
from jarbas_hive_mind_red import get_listener
from jarbas_hive_mind.settings import CERTS_PATH
from jarbas_hive_mind.database import ClientDatabase
from jarbas_utils import create_daemon
import time
class NodeRedSkill(FallbackSkill):
def __init__(self):
super(NodeRedSkill, self).__init__(name='NodeRedSkill')
# can not reload, twisted reactor can not be restarted
self.reload_skill = False
if "timeout" not in self.settings:
self.settings["timeout"] = 15
if "secret" not in self.settings:
self.settings["secret"] = "unsafe"
if "priority" not in self.settings:
self.settings["priority"] = 50
# TODO pass these to hivemind / settingsmeta
if "host" not in self.settings:
self.settings["host"] = "127.0.0.1"
if "port" not in self.settings:
self.settings["port"] = 6789
if "ip_list" not in self.settings:
self.settings["ip_list"] = []
if "ip_blacklist" not in self.settings:
self.settings["ip_blacklist"] = True
if "safe_mode" not in self.settings:
self.settings["safe_mode"] = False
if "message_whitelist" not in self.settings:
self.settings["message_whitelist"] = []
if "cert" not in self.settings:
self.settings["cert"] = CERTS_PATH + '/red.crt'
if "key" not in self.settings:
self.settings["key"] = CERTS_PATH + '/red.key'
if "ssl" not in self.settings:
self.settings["ssl"] = False
self.waiting_for_node = False
self.conversing = False
self.old_key = self.settings["secret"]
self._error = None
self.settings_change_callback = self.on_web_settings_change
def initialize(self):
self.register_fallback(self.handle_fallback,
int(self.settings["priority"]))
self.add_event("node_red.success", self.handle_node_success)
self.add_event("node_red.intent_failure", self.handle_node_failure)
self.add_event("node_red.converse.activate",
self.handle_converse_enable)
self.add_event("node_red.converse.deactivate",
self.handle_converse_disable)
self.add_event("hive.client.connection.error",
self.handle_wrong_key)
self.converse_thread = create_daemon(self.converse_keepalive)
self.node_setup()
def on_web_settings_change(self):
self.change_password()
def change_password(self, force=False):
with ClientDatabase() as db:
mail = "<EMAIL>"
name = "nodered"
key = self.settings["secret"]
if not force:
if self.old_key != key:
db.change_key(self.old_key, key)
self.old_key = key
self.speak_dialog("change_key", wait=True)
self.speak_dialog("please_reboot")
self.set_context("KEY_CHANGED")
else:
db.add_client(name, mail, key, crypto_key=None)
@intent_handler(IntentBuilder("WhyRebootIntent")
.require("WhyKeyword").require("KEY_CHANGED"))
def handle_why_reboot(self, message):
self.speak_dialog("why", wait=True)
def handle_wrong_key(self, message):
error = message.data.get("error")
if self._error is None or error != self._error:
self.speak_dialog("bad_key")
self.speak(error)
self._error = error
def node_setup(self):
self.change_password(force=True)
self.node = get_listener(bus=self.bus)
config = {
"port": self.settings["port"],
"host": self.settings["host"],
"ssl":
{"use_ssl": self.settings["ssl"]}
}
self.node.load_config(config)
self.node._autorun = False
self.node.listen()
def shutdown(self):
self.node.stop_from_thread()
if self.converse_thread.running:
self.converse_thread.join(2)
super(NodeRedSkill, self).shutdown()
def get_intro_message(self):
# welcome dialog on skill install
self.speak_dialog("intro")
# node red control intents
@intent_file_handler("pingnode.intent")
def handle_ping_node(self, message):
self.speak("ping")
def pong(message):
self.speak("pong")
self.bus.once("node_red.pong", pong)
message = message.forward("node_red.ping")
self.bus.emit(message)
@intent_file_handler("converse.enable.intent")
def handle_converse_enable(self, message):
if self.conversing:
self.speak_dialog("converse_on")
else:
self.speak_dialog("converse_enable")
self.conversing = True
@intent_file_handler("converse.disable.intent")
def handle_converse_disable(self, message):
if not self.conversing:
self.speak_dialog("converse_off")
else:
self.speak_dialog("converse_disable")
self.conversing = False
# node red event handlers
def handle_node_success(self, message):
self.waiting_for_node = False
self.success = True
def handle_node_failure(self, message):
self.waiting_for_node = False
self.success = False
def wait_for_node(self):
start = time.time()
self.success = False
self.waiting_for_node = True
while self.waiting_for_node and \
time.time() - start < float(self.settings["timeout"]):
time.sleep(0.1)
if self.waiting_for_node:
message = dig_for_message()
if not message:
message = Message("node_red.timeout")
else:
message.reply("node_red.timeout")
self.bus.emit(message)
self.waiting_for_node = False
return self.success
# converse
def converse_keepalive(self):
while True:
if self.conversing:
# avoid converse timed_out
self.make_active()
time.sleep(60)
def converse(self, utterances, lang="en-us"):
if self.conversing:
message = dig_for_message()
if message:
message = message.reply("node_red.converse",
{"utterance": utterances[0]})
else:
message = Message("node_red.converse",
{"utterance": utterances[0]})
if not message.context.get("platform", "").startswith("NodeRedMind"):
self.bus.emit(message)
return self.wait_for_node()
return False
# fallback
def handle_fallback(self, message):
message = message.reply("node_red.fallback", message.data)
self.bus.emit(message)
return self.wait_for_node()
def create_skill():
return NodeRedSkill()
| [
"jarbas_hive_mind_red.get_listener",
"mycroft.messagebus.message.dig_for_message",
"adapt.intent.IntentBuilder",
"time.sleep",
"mycroft.skills.core.intent_file_handler",
"mycroft.messagebus.message.Message",
"time.time",
"jarbas_utils.create_daemon",
"jarbas_hive_mind.database.ClientDatabase"
] | [((4545, 4583), 'mycroft.skills.core.intent_file_handler', 'intent_file_handler', (['"""pingnode.intent"""'], {}), "('pingnode.intent')\n", (4564, 4583), False, 'from mycroft.skills.core import FallbackSkill, intent_file_handler, intent_handler\n'), ((4846, 4891), 'mycroft.skills.core.intent_file_handler', 'intent_file_handler', (['"""converse.enable.intent"""'], {}), "('converse.enable.intent')\n", (4865, 4891), False, 'from mycroft.skills.core import FallbackSkill, intent_file_handler, intent_handler\n'), ((5116, 5162), 'mycroft.skills.core.intent_file_handler', 'intent_file_handler', (['"""converse.disable.intent"""'], {}), "('converse.disable.intent')\n", (5135, 5162), False, 'from mycroft.skills.core import FallbackSkill, intent_file_handler, intent_handler\n'), ((2620, 2658), 'jarbas_utils.create_daemon', 'create_daemon', (['self.converse_keepalive'], {}), '(self.converse_keepalive)\n', (2633, 2658), False, 'from jarbas_utils import create_daemon\n'), ((3888, 3914), 'jarbas_hive_mind_red.get_listener', 'get_listener', ([], {'bus': 'self.bus'}), '(bus=self.bus)\n', (3900, 3914), False, 'from jarbas_hive_mind_red import get_listener\n'), ((5692, 5703), 'time.time', 'time.time', ([], {}), '()\n', (5701, 5703), False, 'import time\n'), ((2814, 2830), 'jarbas_hive_mind.database.ClientDatabase', 'ClientDatabase', ([], {}), '()\n', (2828, 2830), False, 'from jarbas_hive_mind.database import ClientDatabase\n'), ((5895, 5910), 'time.sleep', 'time.sleep', (['(0.1)'], {}), '(0.1)\n', (5905, 5910), False, 'import time\n'), ((5967, 5984), 'mycroft.messagebus.message.dig_for_message', 'dig_for_message', ([], {}), '()\n', (5982, 5984), False, 'from mycroft.messagebus.message import Message, dig_for_message\n'), ((6432, 6446), 'time.sleep', 'time.sleep', (['(60)'], {}), '(60)\n', (6442, 6446), False, 'import time\n'), ((6548, 6565), 'mycroft.messagebus.message.dig_for_message', 'dig_for_message', ([], {}), '()\n', (6563, 6565), False, 'from mycroft.messagebus.message import Message, dig_for_message\n'), ((6039, 6066), 'mycroft.messagebus.message.Message', 'Message', (['"""node_red.timeout"""'], {}), "('node_red.timeout')\n", (6046, 6066), False, 'from mycroft.messagebus.message import Message, dig_for_message\n'), ((6765, 6823), 'mycroft.messagebus.message.Message', 'Message', (['"""node_red.converse"""', "{'utterance': utterances[0]}"], {}), "('node_red.converse', {'utterance': utterances[0]})\n", (6772, 6823), False, 'from mycroft.messagebus.message import Message, dig_for_message\n'), ((5828, 5839), 'time.time', 'time.time', ([], {}), '()\n', (5837, 5839), False, 'import time\n'), ((3369, 3401), 'adapt.intent.IntentBuilder', 'IntentBuilder', (['"""WhyRebootIntent"""'], {}), "('WhyRebootIntent')\n", (3382, 3401), False, 'from adapt.intent import IntentBuilder\n')] |
from setuptools import setup, find_packages
setup(
name='GitHub Actions Workflow Representation',
version='0.9.2',
description='Workflow representation for GitHub Actions.',
long_description='See README.md',
author='YOCKOW',
url='https://github.com/YOCKOW/PythonGitHubActionsWorkflowRepresentation',
license='MIT',
packages=find_packages(exclude=('workflow_tests', 'docs')),
test_suite='workflow_tests'
) | [
"setuptools.find_packages"
] | [((357, 406), 'setuptools.find_packages', 'find_packages', ([], {'exclude': "('workflow_tests', 'docs')"}), "(exclude=('workflow_tests', 'docs'))\n", (370, 406), False, 'from setuptools import setup, find_packages\n')] |
import os
import numpy as np
import random
from math import isclose
import torch
import matplotlib.pyplot as plt
from modelZoo.DyanOF import OFModel, fista
from torch.autograd import Variable
import torch.nn
def gridRing(N):
# epsilon_low = 0.25
# epsilon_high = 0.15
# rmin = (1 - epsilon_low)
# rmax = (1 + epsilon_high)
epsilon_low = 0.25
epsilon_high = 0.15
rmin = (1 - epsilon_low)
rmax = (1 + epsilon_high)
thetaMin = 0.001
thetaMax = np.pi / 2 - 0.001
delta = 0.001
# Npole = int(N / 4)
Npole = int(N/2)
Pool = generateGridPoles(delta, rmin, rmax, thetaMin, thetaMax)
M = len(Pool)
idx = random.sample(range(0, M), Npole)
P = Pool[idx]
Pall = np.concatenate((P, -P, np.conjugate(P), np.conjugate(-P)), axis=0)
return P, Pall
## Generate the grid on poles
def generateGridPoles(delta, rmin, rmax, thetaMin, thetaMax):
rmin2 = pow(rmin, 2)
rmax2 = pow(rmax, 2)
xv = np.arange(-rmax, rmax, delta)
x, y = np.meshgrid(xv, xv, sparse=False)
mask = np.logical_and(np.logical_and(x ** 2 + y ** 2 >= rmin2, x ** 2 + y ** 2 <= rmax2),
np.logical_and(np.angle(x + 1j * y) >= thetaMin, np.angle(x + 1j * y) <= thetaMax))
px = x[mask]
py = y[mask]
P = px + 1j * py
return P
def getRowSparsity(inputDict):
rowNum = inputDict.shape[0]
L = inputDict.shape[1]
count = 0
for i in range(0, rowNum):
dictRow = inputDict[i,:].unsqueeze(0)
if len(dictRow.nonzero()) <= round(0.6*L):
count+=1
else:
continue
rowSparsity = count
return rowSparsity
def get_recover_fista(D, y, key_set, param, gpu_id):
if type(D) is np.ndarray:
D = torch.Tensor(D)
D_r = D[key_set]
if len(y.shape)==3:
y_r = y[:,key_set]
else:
y_r = y[key_set]
if D.is_cuda:
c_r = fista(D_r, y_r, param, 100, gpu_id)
y_hat = torch.matmul(D, c_r)
else:
c_r = fista(D_r.cuda(gpu_id), y_r, param, 100, gpu_id)
y_hat = torch.matmul(D.cuda(gpu_id), c_r)
return y_hat
| [
"numpy.logical_and",
"modelZoo.DyanOF.fista",
"numpy.conjugate",
"torch.Tensor",
"numpy.angle",
"torch.matmul",
"numpy.meshgrid",
"numpy.arange"
] | [((967, 996), 'numpy.arange', 'np.arange', (['(-rmax)', 'rmax', 'delta'], {}), '(-rmax, rmax, delta)\n', (976, 996), True, 'import numpy as np\n'), ((1008, 1041), 'numpy.meshgrid', 'np.meshgrid', (['xv', 'xv'], {'sparse': '(False)'}), '(xv, xv, sparse=False)\n', (1019, 1041), True, 'import numpy as np\n'), ((1068, 1134), 'numpy.logical_and', 'np.logical_and', (['(x ** 2 + y ** 2 >= rmin2)', '(x ** 2 + y ** 2 <= rmax2)'], {}), '(x ** 2 + y ** 2 >= rmin2, x ** 2 + y ** 2 <= rmax2)\n', (1082, 1134), True, 'import numpy as np\n'), ((1748, 1763), 'torch.Tensor', 'torch.Tensor', (['D'], {}), '(D)\n', (1760, 1763), False, 'import torch\n'), ((1906, 1941), 'modelZoo.DyanOF.fista', 'fista', (['D_r', 'y_r', 'param', '(100)', 'gpu_id'], {}), '(D_r, y_r, param, 100, gpu_id)\n', (1911, 1941), False, 'from modelZoo.DyanOF import OFModel, fista\n'), ((1958, 1978), 'torch.matmul', 'torch.matmul', (['D', 'c_r'], {}), '(D, c_r)\n', (1970, 1978), False, 'import torch\n'), ((750, 765), 'numpy.conjugate', 'np.conjugate', (['P'], {}), '(P)\n', (762, 765), True, 'import numpy as np\n'), ((767, 783), 'numpy.conjugate', 'np.conjugate', (['(-P)'], {}), '(-P)\n', (779, 783), True, 'import numpy as np\n'), ((1177, 1199), 'numpy.angle', 'np.angle', (['(x + 1.0j * y)'], {}), '(x + 1.0j * y)\n', (1185, 1199), True, 'import numpy as np\n'), ((1211, 1233), 'numpy.angle', 'np.angle', (['(x + 1.0j * y)'], {}), '(x + 1.0j * y)\n', (1219, 1233), True, 'import numpy as np\n')] |