code stringlengths 22 1.05M | apis listlengths 1 3.31k | extract_api stringlengths 75 3.25M |
|---|---|---|
from setuptools import setup, find_packages
setup(
name = "renewables_ninja_client",
version = "0.1.0",
description = ("Client for Renewables Ninja API."),
author = ["<NAME>"],
packages = find_packages(exclude=[
"docs", "tests", "examples",
"sandbox", "scripts"]),
install_requires=[
"pandas",
"numpy",
"requests",
'typing;python_version<"3.7"'],
) | [
"setuptools.find_packages"
] | [((209, 283), 'setuptools.find_packages', 'find_packages', ([], {'exclude': "['docs', 'tests', 'examples', 'sandbox', 'scripts']"}), "(exclude=['docs', 'tests', 'examples', 'sandbox', 'scripts'])\n", (222, 283), False, 'from setuptools import setup, find_packages\n')] |
# -*- coding: utf-8 -*-
# © 2016 <NAME>, Trustcode
# License AGPL-3.0 or later (http://www.gnu.org/licenses/agpl.html).
# #############################################################################
#
# <NAME> Sigep WEB
# Copyright (C) 2015 KMEE (http://www.kmee.com.br)
# @author: <NAME> <<EMAIL>>
# @author: <NAME> <<EMAIL>>
# Sponsored by Europestar www.europestar.com.br
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
from pysigep import send, _url
from pysigep.utils import _valida
import base64
import io
from PIL import Image, ImageDraw, ImageFont
from io import StringIO
import textwrap
import os
BASE_DIR = os.path.dirname(__file__)
_TTF_ARIAL = os.path.join(BASE_DIR, 'data/fonts/arial.ttf')
_TTF_ARIAL_N = os.path.join(BASE_DIR, 'data/fonts/arial_negrito.ttf')
def calcular_preco_prazo(**kwargs):
"""
#>>> request = {'nCdEmpresa': '08082650', 'sDsSenha': '<PASSWORD>',\
# 'nCdServico': '40215', 'sCepOrigem': '05311900',\
# 'sCepDestino': '83010140', 'nVlPeso': 1, 'nCdFormato': 1,\
# 'nVlComprimento': 20, 'nVlAltura': 20, 'nVlLargura': 20,\
# 'nVlDiametro': 20, 'sCdMaoPropria': 'S',\
# 'nVlValorDeclarado': 0, 'sCdAvisoRecebimento': 'S'}
#>>> calcular_preco_prazo(**request).cServico.Codigo
#40215
#>>> (calcular_preco_prazo(**request).cServico.ValorMaoPropria) > 0
#True
#>>> request['nVlPeso'] = 99999999999999
#>>> calcular_preco_prazo(**request) #doctest: +ELLIPSIS
#<Element Servicos at 0x...>
#>>> request['nVlPeso'] = 0
#>>> calcular_preco_prazo(**request) #doctest: +ELLIPSIS
#<Element Servicos at 0x...>
#>>> request['sCepDestino'] = '12345678'
#>>> calcular_preco_prazo(**request).cServico.Erro
#8
"""
path = 'CalcularPrecoPrazo.xml'
api = 'CalcularFretePrazo'
_valida('calcular_preco_prazo', api, kwargs)
ambiente = kwargs['ambiente'] if 'ambiente' in kwargs else 1
url = _url(ambiente, api)
return send(path, 'CalcPrecoPrazoResponse', api, url,
soap_action='http://tempuri.org/CalcPrecoPrazo', **kwargs)
def get_eventos(**kwargs):
"""
# >>> user = {'usuario': 'ECT', '<PASSWORD>ha': '<PASSWORD>',\
# 'objetos': ['PL207893158BR']}
# >>> get_eventos(**user) #doctest: +ELLIPSIS
# <Element return at 0x...>
# >>> get_eventos(**user).objeto.evento.destino.cidade
# 'Rio De Janeiro'
"""
api = 'BuscaEventos'
_valida('get_eventos', api, kwargs)
ambiente = kwargs['ambiente'] if 'ambiente' in kwargs else 1
url = _url(ambiente, api)
path = 'BuscaEventos.xml'
return send(path, 'buscaEventosListaResponse', api, url,
soap_action='eventos', **kwargs)
def sign_chancela(chancela, usuario_correios):
"""
:params:
chancela: imagem da chancela, codificada em base64
usuario_correios: {'contrato': idContrato,
'nome': nome da empresa,
'ano_assinatura': ano de assinatura,
'origem': sigla do estado de origem,
'postagem': sigla de estado de destino,}
:return:
imagem em base64
"""
t = base64.decodestring(chancela)
img = Image.open(StringIO(t)).convert("RGB")
draw = ImageDraw.ImageDraw(img)
font = ImageFont.truetype(_TTF_ARIAL, int(img.size[0]*0.07))
draw.setfont(font)
texto = usuario_correios['contrato'] + '/' + usuario_correios['ano_assinatura']
texto += ' - DR/' + usuario_correios['origem']
if usuario_correios['postagem'] != usuario_correios['origem']:
texto += '/' + usuario_correios['postagem']
tamanho_texto = draw.textsize(texto)
h_position = (img.size[0] - tamanho_texto[0]) / 2
v_position = img.size[1] / 2
draw.text((h_position, v_position), texto, fill=(0, 0, 0))
list_name = textwrap.wrap(usuario_correios['nome'], width=20)
font = ImageFont.truetype(_TTF_ARIAL_N, int(img.size[0]*0.07))
draw.setfont(font)
v_position = img.size[1] / 2 + int(img.size[0]*0.07)
y_text = v_position
for line in list_name:
width, height = font.getsize(line)
h_position = (img.size[0] - width) / 2
draw.text((h_position, y_text), line, fill=(0, 0, 0))
y_text += height + 5
size = max(img.size[0], img.size[1])
bg = Image.new("RGBA", (size, size), (255, 255, 255))
h_position = (bg.size[0] - img.size[0]) / 2
v_position = (bg.size[1] - img.size[1]) / 2
bg.paste(img, box=(h_position, v_position))
tmp = io.BytesIO()
bg.save(tmp, 'png')
bg = base64.b64encode(tmp.getvalue())
return bg
| [
"pysigep.utils._valida",
"PIL.Image.new",
"pysigep.send",
"os.path.join",
"io.BytesIO",
"os.path.dirname",
"pysigep._url",
"textwrap.wrap",
"base64.decodestring",
"io.StringIO",
"PIL.ImageDraw.ImageDraw"
] | [((1357, 1382), 'os.path.dirname', 'os.path.dirname', (['__file__'], {}), '(__file__)\n', (1372, 1382), False, 'import os\n'), ((1396, 1442), 'os.path.join', 'os.path.join', (['BASE_DIR', '"""data/fonts/arial.ttf"""'], {}), "(BASE_DIR, 'data/fonts/arial.ttf')\n", (1408, 1442), False, 'import os\n'), ((1458, 1512), 'os.path.join', 'os.path.join', (['BASE_DIR', '"""data/fonts/arial_negrito.ttf"""'], {}), "(BASE_DIR, 'data/fonts/arial_negrito.ttf')\n", (1470, 1512), False, 'import os\n'), ((2541, 2585), 'pysigep.utils._valida', '_valida', (['"""calcular_preco_prazo"""', 'api', 'kwargs'], {}), "('calcular_preco_prazo', api, kwargs)\n", (2548, 2585), False, 'from pysigep.utils import _valida\n'), ((2661, 2680), 'pysigep._url', '_url', (['ambiente', 'api'], {}), '(ambiente, api)\n', (2665, 2680), False, 'from pysigep import send, _url\n'), ((2692, 2802), 'pysigep.send', 'send', (['path', '"""CalcPrecoPrazoResponse"""', 'api', 'url'], {'soap_action': '"""http://tempuri.org/CalcPrecoPrazo"""'}), "(path, 'CalcPrecoPrazoResponse', api, url, soap_action=\n 'http://tempuri.org/CalcPrecoPrazo', **kwargs)\n", (2696, 2802), False, 'from pysigep import send, _url\n'), ((3167, 3202), 'pysigep.utils._valida', '_valida', (['"""get_eventos"""', 'api', 'kwargs'], {}), "('get_eventos', api, kwargs)\n", (3174, 3202), False, 'from pysigep.utils import _valida\n'), ((3278, 3297), 'pysigep._url', '_url', (['ambiente', 'api'], {}), '(ambiente, api)\n', (3282, 3297), False, 'from pysigep import send, _url\n'), ((3339, 3426), 'pysigep.send', 'send', (['path', '"""buscaEventosListaResponse"""', 'api', 'url'], {'soap_action': '"""eventos"""'}), "(path, 'buscaEventosListaResponse', api, url, soap_action='eventos', **\n kwargs)\n", (3343, 3426), False, 'from pysigep import send, _url\n'), ((3920, 3949), 'base64.decodestring', 'base64.decodestring', (['chancela'], {}), '(chancela)\n', (3939, 3949), False, 'import base64\n'), ((4010, 4034), 'PIL.ImageDraw.ImageDraw', 'ImageDraw.ImageDraw', (['img'], {}), '(img)\n', (4029, 4034), False, 'from PIL import Image, ImageDraw, ImageFont\n'), ((4584, 4633), 'textwrap.wrap', 'textwrap.wrap', (["usuario_correios['nome']"], {'width': '(20)'}), "(usuario_correios['nome'], width=20)\n", (4597, 4633), False, 'import textwrap\n'), ((5063, 5111), 'PIL.Image.new', 'Image.new', (['"""RGBA"""', '(size, size)', '(255, 255, 255)'], {}), "('RGBA', (size, size), (255, 255, 255))\n", (5072, 5111), False, 'from PIL import Image, ImageDraw, ImageFont\n'), ((5266, 5278), 'io.BytesIO', 'io.BytesIO', ([], {}), '()\n', (5276, 5278), False, 'import io\n'), ((3971, 3982), 'io.StringIO', 'StringIO', (['t'], {}), '(t)\n', (3979, 3982), False, 'from io import StringIO\n')] |
#*
#* Copyright (C) 2017-2019 Alibaba Group Holding Limited
#*
#* Licensed under the Apache License, Version 2.0 (the "License");
#* you may not use this file except in compliance with the License.
#* You may obtain a copy of the License at
#*
#* http://www.apache.org/licenses/LICENSE-2.0
#*
#* Unless required by applicable law or agreed to in writing, software
#* distributed under the License is distributed on an "AS IS" BASIS,
#* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
#* See the License for the specific language governing permissions and
#* limitations under the License.
import numpy as np
import onnx
import os
import glob
import caffe2.python.onnx.backend
from caffe2.python import core, workspace
from onnx import numpy_helper
import os
fail_sum = 0
dir_path = os.path.dirname(os.path.realpath(__file__))
test_dir = glob.glob(os.path.join(dir_path, 'test_*'))
model_paths = glob.glob(os.path.join(os.path.join(dir_path, 'test_*'), '*.onnx'))
m_len = len(model_paths)
for k in range(m_len):
model = onnx.load(model_paths[k])
test_data_dir = os.path.join(test_dir[k], 'test_data_set_0')
# Load inputs
inputs = []
inputs_num = len(glob.glob(os.path.join(test_data_dir, 'input_*.pb')))
for i in range(inputs_num):
input_file = os.path.join(test_data_dir, 'input_{}.pb'.format(i))
tensor = onnx.TensorProto()
with open(input_file, 'rb') as f:
tensor.ParseFromString(f.read())
inputs.append(numpy_helper.to_array(tensor))
# Load reference outputs
ref_outputs = []
ref_outputs_num = len(glob.glob(os.path.join(test_data_dir, 'output_*.pb')))
for j in range(ref_outputs_num):
output_file = os.path.join(test_data_dir, 'output_{}.pb'.format(j))
tensor = onnx.TensorProto()
with open(output_file, 'rb') as f:
tensor.ParseFromString(f.read())
ref_outputs.append(numpy_helper.to_array(tensor))
# Run the model on the backend
try:
outputs = list(caffe2.python.onnx.backend.run_model(model, inputs))
except RuntimeError:
print("!!Error: Model execution of " + test_dir[k] + " failed.")
fail_sum = fail_sum + 1
continue
idx = 0
# Results verification with golden data.
for ref_o, o in zip(ref_outputs, outputs):
try:
np.testing.assert_almost_equal(ref_o, o, decimal=5, err_msg="Failed test: " + test_dir[k])
except AssertionError:
print("!!Error: Output " + str(idx) + " of test: " + test_dir[k] + " failed")
fail_sum = fail_sum + 1
idx = idx + 1
print("============Summary:=============")
print(str(m_len) + " tests in total.")
print(str(m_len - fail_sum) + " tests passed.")
print(str(fail_sum) + " tests failed.")
print("=================================")
| [
"os.path.join",
"os.path.realpath",
"onnx.TensorProto",
"numpy.testing.assert_almost_equal",
"onnx.load",
"onnx.numpy_helper.to_array"
] | [((831, 857), 'os.path.realpath', 'os.path.realpath', (['__file__'], {}), '(__file__)\n', (847, 857), False, 'import os\n'), ((880, 912), 'os.path.join', 'os.path.join', (['dir_path', '"""test_*"""'], {}), "(dir_path, 'test_*')\n", (892, 912), False, 'import os\n'), ((1054, 1079), 'onnx.load', 'onnx.load', (['model_paths[k]'], {}), '(model_paths[k])\n', (1063, 1079), False, 'import onnx\n'), ((1098, 1142), 'os.path.join', 'os.path.join', (['test_dir[k]', '"""test_data_set_0"""'], {}), "(test_dir[k], 'test_data_set_0')\n", (1110, 1142), False, 'import os\n'), ((951, 983), 'os.path.join', 'os.path.join', (['dir_path', '"""test_*"""'], {}), "(dir_path, 'test_*')\n", (963, 983), False, 'import os\n'), ((1364, 1382), 'onnx.TensorProto', 'onnx.TensorProto', ([], {}), '()\n', (1380, 1382), False, 'import onnx\n'), ((1767, 1785), 'onnx.TensorProto', 'onnx.TensorProto', ([], {}), '()\n', (1783, 1785), False, 'import onnx\n'), ((1203, 1244), 'os.path.join', 'os.path.join', (['test_data_dir', '"""input_*.pb"""'], {}), "(test_data_dir, 'input_*.pb')\n", (1215, 1244), False, 'import os\n'), ((1486, 1515), 'onnx.numpy_helper.to_array', 'numpy_helper.to_array', (['tensor'], {}), '(tensor)\n', (1507, 1515), False, 'from onnx import numpy_helper\n'), ((1598, 1640), 'os.path.join', 'os.path.join', (['test_data_dir', '"""output_*.pb"""'], {}), "(test_data_dir, 'output_*.pb')\n", (1610, 1640), False, 'import os\n'), ((1895, 1924), 'onnx.numpy_helper.to_array', 'numpy_helper.to_array', (['tensor'], {}), '(tensor)\n', (1916, 1924), False, 'from onnx import numpy_helper\n'), ((2301, 2395), 'numpy.testing.assert_almost_equal', 'np.testing.assert_almost_equal', (['ref_o', 'o'], {'decimal': '(5)', 'err_msg': "('Failed test: ' + test_dir[k])"}), "(ref_o, o, decimal=5, err_msg='Failed test: ' +\n test_dir[k])\n", (2331, 2395), True, 'import numpy as np\n')] |
import sys, os
import numpy as np
import matplotlib.pyplot as plt
from tensorflow.keras.callbacks import Callback, ReduceLROnPlateau, TerminateOnNaN, ModelCheckpoint
import tensorflow.keras.backend as K
import tensorflow as tf
import time
###### Callbacks
# Create a callback to compute time spent between 10th and 110th epoch
class time_callback(Callback):
def __init__(self):
'''
Compute time spent between 10th and 110th epoch
'''
self.epoch = 1
self.t1 =0
self.t2 = 0
def on_epoch_end(self, epoch, t1):
if (self.epoch == 10):
self.t1 =time.time()
print('t1: '+str(self.t1))
elif (self.epoch == 110):
self.t2 = time.time()
print('t2: '+str(self.t2))
print('for 100 epochs from 10 to 110: '+str(self.t2 - self.t1))
self.epoch +=1 | [
"time.time"
] | [((621, 632), 'time.time', 'time.time', ([], {}), '()\n', (630, 632), False, 'import time\n'), ((728, 739), 'time.time', 'time.time', ([], {}), '()\n', (737, 739), False, 'import time\n')] |
"""Unit tests for the metrics module."""
import pytest
from forest import metrics
def test_counter():
"""Test counter."""
counter = metrics.Counter()
counter.increase()
assert counter.count == 1
counter.increase(10)
assert counter.count == 11
counter.decrease()
assert counter.count == 10
counter.decrease(11)
assert counter.count == -1
def test_histogram():
"""Test histogram."""
histogram = metrics.Histogram()
for value in range(0, 10):
histogram.update(value=value)
result = histogram.report()
assert result["min"] == 0
assert result["max"] == 9
assert result["medium"] == pytest.approx(4.5)
assert result["mean"] == pytest.approx(4.5)
assert result["stdDev"] == pytest.approx(2.8, rel=0.1) # relative tolerance of 0.1
assert result["percentile"]["75"] == pytest.approx(6.75)
assert result["percentile"]["95"] == pytest.approx(8.5, 0.1)
assert result["percentile"]["99"] == pytest.approx(9, 0.1)
def test_registry():
"""Test metrics registry."""
counter = metrics.Counter()
histogram = metrics.Histogram()
registry = metrics.MetricRegistry()
registry.register("counter", counter)
registry.register("histogram", histogram)
assert registry.get_metric("counter") is counter
assert registry.get_metric("histogram") is histogram
| [
"forest.metrics.Histogram",
"forest.metrics.MetricRegistry",
"pytest.approx",
"forest.metrics.Counter"
] | [((143, 160), 'forest.metrics.Counter', 'metrics.Counter', ([], {}), '()\n', (158, 160), False, 'from forest import metrics\n'), ((469, 488), 'forest.metrics.Histogram', 'metrics.Histogram', ([], {}), '()\n', (486, 488), False, 'from forest import metrics\n'), ((1099, 1116), 'forest.metrics.Counter', 'metrics.Counter', ([], {}), '()\n', (1114, 1116), False, 'from forest import metrics\n'), ((1133, 1152), 'forest.metrics.Histogram', 'metrics.Histogram', ([], {}), '()\n', (1150, 1152), False, 'from forest import metrics\n'), ((1168, 1192), 'forest.metrics.MetricRegistry', 'metrics.MetricRegistry', ([], {}), '()\n', (1190, 1192), False, 'from forest import metrics\n'), ((686, 704), 'pytest.approx', 'pytest.approx', (['(4.5)'], {}), '(4.5)\n', (699, 704), False, 'import pytest\n'), ((734, 752), 'pytest.approx', 'pytest.approx', (['(4.5)'], {}), '(4.5)\n', (747, 752), False, 'import pytest\n'), ((784, 811), 'pytest.approx', 'pytest.approx', (['(2.8)'], {'rel': '(0.1)'}), '(2.8, rel=0.1)\n', (797, 811), False, 'import pytest\n'), ((882, 901), 'pytest.approx', 'pytest.approx', (['(6.75)'], {}), '(6.75)\n', (895, 901), False, 'import pytest\n'), ((943, 966), 'pytest.approx', 'pytest.approx', (['(8.5)', '(0.1)'], {}), '(8.5, 0.1)\n', (956, 966), False, 'import pytest\n'), ((1008, 1029), 'pytest.approx', 'pytest.approx', (['(9)', '(0.1)'], {}), '(9, 0.1)\n', (1021, 1029), False, 'import pytest\n')] |
import importlib
import pytest
import yaml
import appmap._implementation
from appmap._implementation.env import Env
from appmap._implementation.recording import Recorder
def _data_dir(pytestconfig):
return pytestconfig.rootpath / 'appmap' / 'test' / 'data'
@pytest.fixture(name='data_dir')
def fixture_data_dir(pytestconfig):
return _data_dir(pytestconfig)
@pytest.fixture(name='with_data_dir')
def fixture_with_data_dir(data_dir, monkeypatch):
monkeypatch.syspath_prepend(data_dir)
return data_dir
@pytest.fixture
def events():
rec = Recorder()
rec.clear()
rec.enabled = True
yield rec.events
rec.enabled = False
rec.clear()
@pytest.hookimpl
def pytest_runtest_setup(item):
mark = item.get_closest_marker('appmap_enabled')
env = {}
if mark:
appmap_yml = mark.kwargs.get('config', 'appmap.yml')
d = _data_dir(item.config)
config = d / appmap_yml
Env.current.set('APPMAP_CONFIG', config)
env = {'APPMAP': 'true', 'APPMAP_CONFIG': config}
appmap._implementation.initialize(env=env) # pylint: disable=protected-access
# Some tests want yaml instrumented, others don't.
# Reload it to make sure it's instrumented, or not, as set in appmap.yml.
importlib.reload(yaml)
| [
"pytest.fixture",
"appmap._implementation.env.Env.current.set",
"appmap._implementation.recording.Recorder",
"importlib.reload"
] | [((265, 296), 'pytest.fixture', 'pytest.fixture', ([], {'name': '"""data_dir"""'}), "(name='data_dir')\n", (279, 296), False, 'import pytest\n'), ((370, 406), 'pytest.fixture', 'pytest.fixture', ([], {'name': '"""with_data_dir"""'}), "(name='with_data_dir')\n", (384, 406), False, 'import pytest\n'), ((560, 570), 'appmap._implementation.recording.Recorder', 'Recorder', ([], {}), '()\n', (568, 570), False, 'from appmap._implementation.recording import Recorder\n'), ((1257, 1279), 'importlib.reload', 'importlib.reload', (['yaml'], {}), '(yaml)\n', (1273, 1279), False, 'import importlib\n'), ((936, 976), 'appmap._implementation.env.Env.current.set', 'Env.current.set', (['"""APPMAP_CONFIG"""', 'config'], {}), "('APPMAP_CONFIG', config)\n", (951, 976), False, 'from appmap._implementation.env import Env\n')] |
"""
animation.py
This script is used to procduce animations of population behaviour
over a range of changing conditions. For example, if we wanted to
see how a population would change as light was elevated and wind
kept constant, we could produce the animation and watch the
general trend. This was mostly useful for visualisation, less for
formal analysis.
The script first constructs a series of Treatments which will be
simulated. The simulations are then run, each simulation produces
a plot which is stored in a target directory. These frames can
then be stitched together using a suitable tool, such as ffmpeg.
Note: the script is written for changing light elevations but
it should be reasonably straightforward to modify it for changing
other variables.
"""
from util.deserialiser import Deserialiser
from util.integration_models import *
from util.treatment import Treatment
from util.models import ReliabilityModel
from world.light import Light
from world.wind import Wind
import definitions as defn
import matplotlib.pyplot as plt
from scipy.special import i0
import numpy as np
import os
import shutil
def main():
#
# Simulator - can be anything in the util/integration_models module
#
simulator = CMLE()
rel_model = ReliabilityModel()
#
# Set the target output directory
#
os.chdir("frames/BWS")
print(os.getcwd())
start = 30 # Start elevation in degrees
end = 90 # End elevation
increment = 1 # Adjustment increment in degrees
iterations = 15 # Number of simulations to run at each elevation
treatnent_n = 30 # Number of individuals per treatment
elevation = np.radians(start)
filenumber = 0
wind_speed = 2.5 # Wind speed for each trial (this is assumed to be constant)
# While elevation still in range
while elevation < np.radians(end):
#
# Create the requisite treatment
#
treatment = Treatment()
treatment.set_reliability_model(rel_model)
treatment.set_n(treatnent_n)
treatment.set_id("Elevation {:.01f} degrees".format(np.degrees(elevation)))
init_light = Light(elevation, np.radians(0), treatment)
init_wind = Wind(wind_speed, np.radians(0), treatment)
initial = [init_wind, init_light]
conf_light = Light(elevation, np.radians(0), treatment)
conf_wind = Wind(wind_speed, np.radians(120), treatment)
conflict = [conf_wind, conf_light]
treatment.set_initial_cues(initial)
treatment.set_conflict_cues(conflict)
#
# Simulate the current treatment for some number of iterations.
#
for n in range(iterations):
#
# The filename format string is set to produce regular filenames
# which can easily be stitched into a video using ffmpeg. This can
# be modified.
#
filename = "{:05d}.png".format(filenumber)
simulator.simulate_treatment(treatment)
#
# Plot production
#
changes = treatment.get_changes_in_bearing()
avg_r, avg_t = treatment.get_avg_change()
plt.tight_layout()
ax = plt.subplot(121, projection='polar')
ax.plot(changes, np.ones(len(changes)), 'bo', color='magenta', alpha=0.2)
ax.plot(avg_t, avg_r, 'ro', markeredgecolor='k', label="R={:.02f},T={:.01f}".format(avg_r, np.degrees(avg_t)))
ax.set_title(treatment.get_id())
ax.set_rlim(0,1.1)
ax.set_theta_zero_location("N")
ax.set_theta_direction(-1)
ax.legend(loc='lower left')
params = treatment.get_cue_distribution_parameters()
initial_dist_ax = plt.subplot(222)
initial_light = params["initial"][0]
initial_wind = params["initial"][1]
light_mu = initial_light[0]
wind_mu = initial_wind[0]
light_kappa = initial_light[1]
wind_kappa = initial_wind[1]
light_x = np.linspace(-np.pi, np.pi, num=100)
light_y = np.exp(light_kappa*np.cos(light_x - light_mu))/(2*np.pi*i0(light_kappa))
wind_x = np.linspace(-np.pi, np.pi, num=100)
wind_y = np.exp(wind_kappa*np.cos(wind_x - wind_mu))/(2*np.pi*i0(wind_kappa))
initial_dist_ax.plot(np.degrees(light_x), light_y,
color='green',
label="Light: kappa={:.02f}".format(light_kappa)
)
initial_dist_ax.plot(np.degrees(wind_x),
wind_y,
color='blue',
label="Wind: kappa={:.02f}".format(wind_kappa))
initial_dist_ax.set_ylim([0,1])
initial_dist_ax.legend()
initial_dist_ax.set_title("Initial cue probability density")
initial_dist_ax.set_ylabel("Probability density")
conflict_dist_ax = plt.subplot(224)
conflict_light = params["conflict"][0]
conflict_wind = params["conflict"][1]
light_mu = conflict_light[0]
wind_mu = conflict_wind[0]
light_kappa = conflict_light[1]
wind_kappa = conflict_wind[1]
light_x = np.linspace(-np.pi, np.pi, num=100)
light_y = np.exp(light_kappa*np.cos(light_x - light_mu))/(2*np.pi*i0(light_kappa))
wind_x = np.linspace(-np.pi, np.pi, num=100)
wind_y = np.exp(wind_kappa*np.cos(wind_x - wind_mu))/(2*np.pi*i0(wind_kappa))
conflict_dist_ax.plot(np.degrees(light_x), light_y,
color='green',
label="Light: kappa={:.02f}".format(light_kappa)
)
conflict_dist_ax.plot(np.degrees(wind_x),
wind_y, color='blue',
label="Wind: kappa={:.02f}".format(wind_kappa))
conflict_dist_ax.set_ylim([0,1])
conflict_dist_ax.set_xlim([-180,180])
conflict_dist_ax.set_title("Conflict cue probability distributions")
conflict_dist_ax.set_xlabel("Degrees")
conflict_dist_ax.set_ylabel("Probability density")
# Bin data into 360/nbins degree bins to plot the population mass
nbins = 72
ch_hist = np.histogram(np.degrees(changes), np.linspace(-180, 180, nbins + 1))[0]
ch_hist_norm = ch_hist / sum(ch_hist)
# Plot population response alongside the cue distributions
plt.bar(np.linspace(-180, 180, nbins),
ch_hist_norm, width=360/nbins,
color='magenta',edgecolor='k', alpha=0.5,
label='Population response')
conflict_dist_ax.legend()
plt.gcf().set_size_inches(16,10)
plt.savefig(filename)
plt.clf()
# Loop admin
filenumber+=1
elevation+=np.radians(increment)
if __name__ == '__main__':
main()
| [
"numpy.radians",
"util.treatment.Treatment",
"matplotlib.pyplot.savefig",
"matplotlib.pyplot.gcf",
"matplotlib.pyplot.clf",
"os.getcwd",
"os.chdir",
"matplotlib.pyplot.subplot",
"numpy.linspace",
"scipy.special.i0",
"numpy.cos",
"matplotlib.pyplot.tight_layout",
"numpy.degrees",
"util.mode... | [((1259, 1277), 'util.models.ReliabilityModel', 'ReliabilityModel', ([], {}), '()\n', (1275, 1277), False, 'from util.models import ReliabilityModel\n'), ((1333, 1355), 'os.chdir', 'os.chdir', (['"""frames/BWS"""'], {}), "('frames/BWS')\n", (1341, 1355), False, 'import os\n'), ((1651, 1668), 'numpy.radians', 'np.radians', (['start'], {}), '(start)\n', (1661, 1668), True, 'import numpy as np\n'), ((1366, 1377), 'os.getcwd', 'os.getcwd', ([], {}), '()\n', (1375, 1377), False, 'import os\n'), ((1831, 1846), 'numpy.radians', 'np.radians', (['end'], {}), '(end)\n', (1841, 1846), True, 'import numpy as np\n'), ((1929, 1940), 'util.treatment.Treatment', 'Treatment', ([], {}), '()\n', (1938, 1940), False, 'from util.treatment import Treatment\n'), ((7032, 7053), 'numpy.radians', 'np.radians', (['increment'], {}), '(increment)\n', (7042, 7053), True, 'import numpy as np\n'), ((2152, 2165), 'numpy.radians', 'np.radians', (['(0)'], {}), '(0)\n', (2162, 2165), True, 'import numpy as np\n'), ((2215, 2228), 'numpy.radians', 'np.radians', (['(0)'], {}), '(0)\n', (2225, 2228), True, 'import numpy as np\n'), ((2322, 2335), 'numpy.radians', 'np.radians', (['(0)'], {}), '(0)\n', (2332, 2335), True, 'import numpy as np\n'), ((2385, 2400), 'numpy.radians', 'np.radians', (['(120)'], {}), '(120)\n', (2395, 2400), True, 'import numpy as np\n'), ((3177, 3195), 'matplotlib.pyplot.tight_layout', 'plt.tight_layout', ([], {}), '()\n', (3193, 3195), True, 'import matplotlib.pyplot as plt\n'), ((3213, 3249), 'matplotlib.pyplot.subplot', 'plt.subplot', (['(121)'], {'projection': '"""polar"""'}), "(121, projection='polar')\n", (3224, 3249), True, 'import matplotlib.pyplot as plt\n'), ((3754, 3770), 'matplotlib.pyplot.subplot', 'plt.subplot', (['(222)'], {}), '(222)\n', (3765, 3770), True, 'import matplotlib.pyplot as plt\n'), ((4053, 4088), 'numpy.linspace', 'np.linspace', (['(-np.pi)', 'np.pi'], {'num': '(100)'}), '(-np.pi, np.pi, num=100)\n', (4064, 4088), True, 'import numpy as np\n'), ((4205, 4240), 'numpy.linspace', 'np.linspace', (['(-np.pi)', 'np.pi'], {'num': '(100)'}), '(-np.pi, np.pi, num=100)\n', (4216, 4240), True, 'import numpy as np\n'), ((5008, 5024), 'matplotlib.pyplot.subplot', 'plt.subplot', (['(224)'], {}), '(224)\n', (5019, 5024), True, 'import matplotlib.pyplot as plt\n'), ((5315, 5350), 'numpy.linspace', 'np.linspace', (['(-np.pi)', 'np.pi'], {'num': '(100)'}), '(-np.pi, np.pi, num=100)\n', (5326, 5350), True, 'import numpy as np\n'), ((5467, 5502), 'numpy.linspace', 'np.linspace', (['(-np.pi)', 'np.pi'], {'num': '(100)'}), '(-np.pi, np.pi, num=100)\n', (5478, 5502), True, 'import numpy as np\n'), ((6917, 6938), 'matplotlib.pyplot.savefig', 'plt.savefig', (['filename'], {}), '(filename)\n', (6928, 6938), True, 'import matplotlib.pyplot as plt\n'), ((6951, 6960), 'matplotlib.pyplot.clf', 'plt.clf', ([], {}), '()\n', (6958, 6960), True, 'import matplotlib.pyplot as plt\n'), ((2089, 2110), 'numpy.degrees', 'np.degrees', (['elevation'], {}), '(elevation)\n', (2099, 2110), True, 'import numpy as np\n'), ((4364, 4383), 'numpy.degrees', 'np.degrees', (['light_x'], {}), '(light_x)\n', (4374, 4383), True, 'import numpy as np\n'), ((4571, 4589), 'numpy.degrees', 'np.degrees', (['wind_x'], {}), '(wind_x)\n', (4581, 4589), True, 'import numpy as np\n'), ((5628, 5647), 'numpy.degrees', 'np.degrees', (['light_x'], {}), '(light_x)\n', (5638, 5647), True, 'import numpy as np\n'), ((5839, 5857), 'numpy.degrees', 'np.degrees', (['wind_x'], {}), '(wind_x)\n', (5849, 5857), True, 'import numpy as np\n'), ((6627, 6656), 'numpy.linspace', 'np.linspace', (['(-180)', '(180)', 'nbins'], {}), '(-180, 180, nbins)\n', (6638, 6656), True, 'import numpy as np\n'), ((4167, 4182), 'scipy.special.i0', 'i0', (['light_kappa'], {}), '(light_kappa)\n', (4169, 4182), False, 'from scipy.special import i0\n'), ((4315, 4329), 'scipy.special.i0', 'i0', (['wind_kappa'], {}), '(wind_kappa)\n', (4317, 4329), False, 'from scipy.special import i0\n'), ((5429, 5444), 'scipy.special.i0', 'i0', (['light_kappa'], {}), '(light_kappa)\n', (5431, 5444), False, 'from scipy.special import i0\n'), ((5577, 5591), 'scipy.special.i0', 'i0', (['wind_kappa'], {}), '(wind_kappa)\n', (5579, 5591), False, 'from scipy.special import i0\n'), ((6426, 6445), 'numpy.degrees', 'np.degrees', (['changes'], {}), '(changes)\n', (6436, 6445), True, 'import numpy as np\n'), ((6447, 6480), 'numpy.linspace', 'np.linspace', (['(-180)', '(180)', '(nbins + 1)'], {}), '(-180, 180, nbins + 1)\n', (6458, 6480), True, 'import numpy as np\n'), ((6872, 6881), 'matplotlib.pyplot.gcf', 'plt.gcf', ([], {}), '()\n', (6879, 6881), True, 'import matplotlib.pyplot as plt\n'), ((3439, 3456), 'numpy.degrees', 'np.degrees', (['avg_t'], {}), '(avg_t)\n', (3449, 3456), True, 'import numpy as np\n'), ((4130, 4156), 'numpy.cos', 'np.cos', (['(light_x - light_mu)'], {}), '(light_x - light_mu)\n', (4136, 4156), True, 'import numpy as np\n'), ((4280, 4304), 'numpy.cos', 'np.cos', (['(wind_x - wind_mu)'], {}), '(wind_x - wind_mu)\n', (4286, 4304), True, 'import numpy as np\n'), ((5392, 5418), 'numpy.cos', 'np.cos', (['(light_x - light_mu)'], {}), '(light_x - light_mu)\n', (5398, 5418), True, 'import numpy as np\n'), ((5542, 5566), 'numpy.cos', 'np.cos', (['(wind_x - wind_mu)'], {}), '(wind_x - wind_mu)\n', (5548, 5566), True, 'import numpy as np\n')] |
# coding=utf-8
# *** WARNING: this file was generated by the Pulumi Terraform Bridge (tfgen) Tool. ***
# *** Do not edit by hand unless you're certain you know what you are doing! ***
import warnings
import pulumi
import pulumi.runtime
from typing import Any, Mapping, Optional, Sequence, Union, overload
from .. import _utilities
__all__ = [
'GetResolverEndpointResult',
'AwaitableGetResolverEndpointResult',
'get_resolver_endpoint',
]
@pulumi.output_type
class GetResolverEndpointResult:
"""
A collection of values returned by getResolverEndpoint.
"""
def __init__(__self__, compartment_id=None, endpoint_type=None, forwarding_address=None, id=None, is_forwarding=None, is_listening=None, listening_address=None, name=None, nsg_ids=None, resolver_endpoint_name=None, resolver_id=None, scope=None, self=None, state=None, subnet_id=None, time_created=None, time_updated=None):
if compartment_id and not isinstance(compartment_id, str):
raise TypeError("Expected argument 'compartment_id' to be a str")
pulumi.set(__self__, "compartment_id", compartment_id)
if endpoint_type and not isinstance(endpoint_type, str):
raise TypeError("Expected argument 'endpoint_type' to be a str")
pulumi.set(__self__, "endpoint_type", endpoint_type)
if forwarding_address and not isinstance(forwarding_address, str):
raise TypeError("Expected argument 'forwarding_address' to be a str")
pulumi.set(__self__, "forwarding_address", forwarding_address)
if id and not isinstance(id, str):
raise TypeError("Expected argument 'id' to be a str")
pulumi.set(__self__, "id", id)
if is_forwarding and not isinstance(is_forwarding, bool):
raise TypeError("Expected argument 'is_forwarding' to be a bool")
pulumi.set(__self__, "is_forwarding", is_forwarding)
if is_listening and not isinstance(is_listening, bool):
raise TypeError("Expected argument 'is_listening' to be a bool")
pulumi.set(__self__, "is_listening", is_listening)
if listening_address and not isinstance(listening_address, str):
raise TypeError("Expected argument 'listening_address' to be a str")
pulumi.set(__self__, "listening_address", listening_address)
if name and not isinstance(name, str):
raise TypeError("Expected argument 'name' to be a str")
pulumi.set(__self__, "name", name)
if nsg_ids and not isinstance(nsg_ids, list):
raise TypeError("Expected argument 'nsg_ids' to be a list")
pulumi.set(__self__, "nsg_ids", nsg_ids)
if resolver_endpoint_name and not isinstance(resolver_endpoint_name, str):
raise TypeError("Expected argument 'resolver_endpoint_name' to be a str")
pulumi.set(__self__, "resolver_endpoint_name", resolver_endpoint_name)
if resolver_id and not isinstance(resolver_id, str):
raise TypeError("Expected argument 'resolver_id' to be a str")
pulumi.set(__self__, "resolver_id", resolver_id)
if scope and not isinstance(scope, str):
raise TypeError("Expected argument 'scope' to be a str")
pulumi.set(__self__, "scope", scope)
if self and not isinstance(self, str):
raise TypeError("Expected argument 'self' to be a str")
pulumi.set(__self__, "self", self)
if state and not isinstance(state, str):
raise TypeError("Expected argument 'state' to be a str")
pulumi.set(__self__, "state", state)
if subnet_id and not isinstance(subnet_id, str):
raise TypeError("Expected argument 'subnet_id' to be a str")
pulumi.set(__self__, "subnet_id", subnet_id)
if time_created and not isinstance(time_created, str):
raise TypeError("Expected argument 'time_created' to be a str")
pulumi.set(__self__, "time_created", time_created)
if time_updated and not isinstance(time_updated, str):
raise TypeError("Expected argument 'time_updated' to be a str")
pulumi.set(__self__, "time_updated", time_updated)
@property
@pulumi.getter(name="compartmentId")
def compartment_id(self) -> str:
"""
The OCID of the owning compartment. This will match the resolver that the resolver endpoint is under and will be updated if the resolver's compartment is changed.
"""
return pulumi.get(self, "compartment_id")
@property
@pulumi.getter(name="endpointType")
def endpoint_type(self) -> str:
"""
The type of resolver endpoint. VNIC is currently the only supported type.
"""
return pulumi.get(self, "endpoint_type")
@property
@pulumi.getter(name="forwardingAddress")
def forwarding_address(self) -> str:
"""
An IP address from which forwarded queries may be sent. For VNIC endpoints, this IP address must be part of the subnet and will be assigned by the system if unspecified when isForwarding is true.
"""
return pulumi.get(self, "forwarding_address")
@property
@pulumi.getter
def id(self) -> str:
return pulumi.get(self, "id")
@property
@pulumi.getter(name="isForwarding")
def is_forwarding(self) -> bool:
"""
A Boolean flag indicating whether or not the resolver endpoint is for forwarding.
"""
return pulumi.get(self, "is_forwarding")
@property
@pulumi.getter(name="isListening")
def is_listening(self) -> bool:
"""
A Boolean flag indicating whether or not the resolver endpoint is for listening.
"""
return pulumi.get(self, "is_listening")
@property
@pulumi.getter(name="listeningAddress")
def listening_address(self) -> str:
"""
An IP address to listen to queries on. For VNIC endpoints this IP address must be part of the subnet and will be assigned by the system if unspecified when isListening is true.
"""
return pulumi.get(self, "listening_address")
@property
@pulumi.getter
def name(self) -> str:
"""
The name of the resolver endpoint. Must be unique, case-insensitive, within the resolver.
"""
return pulumi.get(self, "name")
@property
@pulumi.getter(name="nsgIds")
def nsg_ids(self) -> Sequence[str]:
"""
An array of network security group OCIDs for the resolver endpoint. These must be part of the VCN that the resolver endpoint is a part of.
"""
return pulumi.get(self, "nsg_ids")
@property
@pulumi.getter(name="resolverEndpointName")
def resolver_endpoint_name(self) -> str:
return pulumi.get(self, "resolver_endpoint_name")
@property
@pulumi.getter(name="resolverId")
def resolver_id(self) -> str:
return pulumi.get(self, "resolver_id")
@property
@pulumi.getter
def scope(self) -> str:
return pulumi.get(self, "scope")
@property
@pulumi.getter
def self(self) -> str:
"""
The canonical absolute URL of the resource.
"""
return pulumi.get(self, "self")
@property
@pulumi.getter
def state(self) -> str:
"""
The current state of the resource.
"""
return pulumi.get(self, "state")
@property
@pulumi.getter(name="subnetId")
def subnet_id(self) -> str:
"""
The OCID of a subnet. Must be part of the VCN that the resolver is attached to.
"""
return pulumi.get(self, "subnet_id")
@property
@pulumi.getter(name="timeCreated")
def time_created(self) -> str:
"""
The date and time the resource was created in "YYYY-MM-ddThh:mm:ssZ" format with a Z offset, as defined by RFC 3339.
"""
return pulumi.get(self, "time_created")
@property
@pulumi.getter(name="timeUpdated")
def time_updated(self) -> str:
"""
The date and time the resource was last updated in "YYYY-MM-ddThh:mm:ssZ" format with a Z offset, as defined by RFC 3339.
"""
return pulumi.get(self, "time_updated")
class AwaitableGetResolverEndpointResult(GetResolverEndpointResult):
# pylint: disable=using-constant-test
def __await__(self):
if False:
yield self
return GetResolverEndpointResult(
compartment_id=self.compartment_id,
endpoint_type=self.endpoint_type,
forwarding_address=self.forwarding_address,
id=self.id,
is_forwarding=self.is_forwarding,
is_listening=self.is_listening,
listening_address=self.listening_address,
name=self.name,
nsg_ids=self.nsg_ids,
resolver_endpoint_name=self.resolver_endpoint_name,
resolver_id=self.resolver_id,
scope=self.scope,
self=self.self,
state=self.state,
subnet_id=self.subnet_id,
time_created=self.time_created,
time_updated=self.time_updated)
def get_resolver_endpoint(resolver_endpoint_name: Optional[str] = None,
resolver_id: Optional[str] = None,
scope: Optional[str] = None,
opts: Optional[pulumi.InvokeOptions] = None) -> AwaitableGetResolverEndpointResult:
"""
This data source provides details about a specific Resolver Endpoint resource in Oracle Cloud Infrastructure DNS service.
Gets information about a specific resolver endpoint. Note that attempting to get a resolver endpoint
in the DELETED lifecycle state will result in a `404` response to be consistent with other operations of the
API. Requires a `PRIVATE` scope query parameter.
## Example Usage
```python
import pulumi
import pulumi_oci as oci
test_resolver_endpoint = oci.dns.get_resolver_endpoint(resolver_endpoint_name=oci_dns_resolver_endpoint["test_resolver_endpoint"]["name"],
resolver_id=oci_dns_resolver["test_resolver"]["id"],
scope="PRIVATE")
```
:param str resolver_endpoint_name: The name of the target resolver endpoint.
:param str resolver_id: The OCID of the target resolver.
:param str scope: Value must be `PRIVATE` when listing private name resolver endpoints.
"""
__args__ = dict()
__args__['resolverEndpointName'] = resolver_endpoint_name
__args__['resolverId'] = resolver_id
__args__['scope'] = scope
if opts is None:
opts = pulumi.InvokeOptions()
if opts.version is None:
opts.version = _utilities.get_version()
__ret__ = pulumi.runtime.invoke('oci:dns/getResolverEndpoint:getResolverEndpoint', __args__, opts=opts, typ=GetResolverEndpointResult).value
return AwaitableGetResolverEndpointResult(
compartment_id=__ret__.compartment_id,
endpoint_type=__ret__.endpoint_type,
forwarding_address=__ret__.forwarding_address,
id=__ret__.id,
is_forwarding=__ret__.is_forwarding,
is_listening=__ret__.is_listening,
listening_address=__ret__.listening_address,
name=__ret__.name,
nsg_ids=__ret__.nsg_ids,
resolver_endpoint_name=__ret__.resolver_endpoint_name,
resolver_id=__ret__.resolver_id,
scope=__ret__.scope,
self=__ret__.self,
state=__ret__.state,
subnet_id=__ret__.subnet_id,
time_created=__ret__.time_created,
time_updated=__ret__.time_updated)
| [
"pulumi.get",
"pulumi.getter",
"pulumi.set",
"pulumi.InvokeOptions",
"pulumi.runtime.invoke"
] | [((4180, 4215), 'pulumi.getter', 'pulumi.getter', ([], {'name': '"""compartmentId"""'}), "(name='compartmentId')\n", (4193, 4215), False, 'import pulumi\n'), ((4518, 4552), 'pulumi.getter', 'pulumi.getter', ([], {'name': '"""endpointType"""'}), "(name='endpointType')\n", (4531, 4552), False, 'import pulumi\n'), ((4764, 4803), 'pulumi.getter', 'pulumi.getter', ([], {'name': '"""forwardingAddress"""'}), "(name='forwardingAddress')\n", (4777, 4803), False, 'import pulumi\n'), ((5244, 5278), 'pulumi.getter', 'pulumi.getter', ([], {'name': '"""isForwarding"""'}), "(name='isForwarding')\n", (5257, 5278), False, 'import pulumi\n'), ((5499, 5532), 'pulumi.getter', 'pulumi.getter', ([], {'name': '"""isListening"""'}), "(name='isListening')\n", (5512, 5532), False, 'import pulumi\n'), ((5750, 5788), 'pulumi.getter', 'pulumi.getter', ([], {'name': '"""listeningAddress"""'}), "(name='listeningAddress')\n", (5763, 5788), False, 'import pulumi\n'), ((6334, 6362), 'pulumi.getter', 'pulumi.getter', ([], {'name': '"""nsgIds"""'}), "(name='nsgIds')\n", (6347, 6362), False, 'import pulumi\n'), ((6637, 6679), 'pulumi.getter', 'pulumi.getter', ([], {'name': '"""resolverEndpointName"""'}), "(name='resolverEndpointName')\n", (6650, 6679), False, 'import pulumi\n'), ((6803, 6835), 'pulumi.getter', 'pulumi.getter', ([], {'name': '"""resolverId"""'}), "(name='resolverId')\n", (6816, 6835), False, 'import pulumi\n'), ((7387, 7417), 'pulumi.getter', 'pulumi.getter', ([], {'name': '"""subnetId"""'}), "(name='subnetId')\n", (7400, 7417), False, 'import pulumi\n'), ((7627, 7660), 'pulumi.getter', 'pulumi.getter', ([], {'name': '"""timeCreated"""'}), "(name='timeCreated')\n", (7640, 7660), False, 'import pulumi\n'), ((7913, 7946), 'pulumi.getter', 'pulumi.getter', ([], {'name': '"""timeUpdated"""'}), "(name='timeUpdated')\n", (7926, 7946), False, 'import pulumi\n'), ((1061, 1115), 'pulumi.set', 'pulumi.set', (['__self__', '"""compartment_id"""', 'compartment_id'], {}), "(__self__, 'compartment_id', compartment_id)\n", (1071, 1115), False, 'import pulumi\n'), ((1266, 1318), 'pulumi.set', 'pulumi.set', (['__self__', '"""endpoint_type"""', 'endpoint_type'], {}), "(__self__, 'endpoint_type', endpoint_type)\n", (1276, 1318), False, 'import pulumi\n'), ((1484, 1546), 'pulumi.set', 'pulumi.set', (['__self__', '"""forwarding_address"""', 'forwarding_address'], {}), "(__self__, 'forwarding_address', forwarding_address)\n", (1494, 1546), False, 'import pulumi\n'), ((1664, 1694), 'pulumi.set', 'pulumi.set', (['__self__', '"""id"""', 'id'], {}), "(__self__, 'id', id)\n", (1674, 1694), False, 'import pulumi\n'), ((1847, 1899), 'pulumi.set', 'pulumi.set', (['__self__', '"""is_forwarding"""', 'is_forwarding'], {}), "(__self__, 'is_forwarding', is_forwarding)\n", (1857, 1899), False, 'import pulumi\n'), ((2049, 2099), 'pulumi.set', 'pulumi.set', (['__self__', '"""is_listening"""', 'is_listening'], {}), "(__self__, 'is_listening', is_listening)\n", (2059, 2099), False, 'import pulumi\n'), ((2262, 2322), 'pulumi.set', 'pulumi.set', (['__self__', '"""listening_address"""', 'listening_address'], {}), "(__self__, 'listening_address', listening_address)\n", (2272, 2322), False, 'import pulumi\n'), ((2446, 2480), 'pulumi.set', 'pulumi.set', (['__self__', '"""name"""', 'name'], {}), "(__self__, 'name', name)\n", (2456, 2480), False, 'import pulumi\n'), ((2615, 2655), 'pulumi.set', 'pulumi.set', (['__self__', '"""nsg_ids"""', 'nsg_ids'], {}), "(__self__, 'nsg_ids', nsg_ids)\n", (2625, 2655), False, 'import pulumi\n'), ((2833, 2903), 'pulumi.set', 'pulumi.set', (['__self__', '"""resolver_endpoint_name"""', 'resolver_endpoint_name'], {}), "(__self__, 'resolver_endpoint_name', resolver_endpoint_name)\n", (2843, 2903), False, 'import pulumi\n'), ((3048, 3096), 'pulumi.set', 'pulumi.set', (['__self__', '"""resolver_id"""', 'resolver_id'], {}), "(__self__, 'resolver_id', resolver_id)\n", (3058, 3096), False, 'import pulumi\n'), ((3223, 3259), 'pulumi.set', 'pulumi.set', (['__self__', '"""scope"""', 'scope'], {}), "(__self__, 'scope', scope)\n", (3233, 3259), False, 'import pulumi\n'), ((3383, 3417), 'pulumi.set', 'pulumi.set', (['__self__', '"""self"""', 'self'], {}), "(__self__, 'self', self)\n", (3393, 3417), False, 'import pulumi\n'), ((3544, 3580), 'pulumi.set', 'pulumi.set', (['__self__', '"""state"""', 'state'], {}), "(__self__, 'state', state)\n", (3554, 3580), False, 'import pulumi\n'), ((3719, 3763), 'pulumi.set', 'pulumi.set', (['__self__', '"""subnet_id"""', 'subnet_id'], {}), "(__self__, 'subnet_id', subnet_id)\n", (3729, 3763), False, 'import pulumi\n'), ((3911, 3961), 'pulumi.set', 'pulumi.set', (['__self__', '"""time_created"""', 'time_created'], {}), "(__self__, 'time_created', time_created)\n", (3921, 3961), False, 'import pulumi\n'), ((4109, 4159), 'pulumi.set', 'pulumi.set', (['__self__', '"""time_updated"""', 'time_updated'], {}), "(__self__, 'time_updated', time_updated)\n", (4119, 4159), False, 'import pulumi\n'), ((4463, 4497), 'pulumi.get', 'pulumi.get', (['self', '"""compartment_id"""'], {}), "(self, 'compartment_id')\n", (4473, 4497), False, 'import pulumi\n'), ((4710, 4743), 'pulumi.get', 'pulumi.get', (['self', '"""endpoint_type"""'], {}), "(self, 'endpoint_type')\n", (4720, 4743), False, 'import pulumi\n'), ((5088, 5126), 'pulumi.get', 'pulumi.get', (['self', '"""forwarding_address"""'], {}), "(self, 'forwarding_address')\n", (5098, 5126), False, 'import pulumi\n'), ((5201, 5223), 'pulumi.get', 'pulumi.get', (['self', '"""id"""'], {}), "(self, 'id')\n", (5211, 5223), False, 'import pulumi\n'), ((5445, 5478), 'pulumi.get', 'pulumi.get', (['self', '"""is_forwarding"""'], {}), "(self, 'is_forwarding')\n", (5455, 5478), False, 'import pulumi\n'), ((5697, 5729), 'pulumi.get', 'pulumi.get', (['self', '"""is_listening"""'], {}), "(self, 'is_listening')\n", (5707, 5729), False, 'import pulumi\n'), ((6053, 6090), 'pulumi.get', 'pulumi.get', (['self', '"""listening_address"""'], {}), "(self, 'listening_address')\n", (6063, 6090), False, 'import pulumi\n'), ((6289, 6313), 'pulumi.get', 'pulumi.get', (['self', '"""name"""'], {}), "(self, 'name')\n", (6299, 6313), False, 'import pulumi\n'), ((6589, 6616), 'pulumi.get', 'pulumi.get', (['self', '"""nsg_ids"""'], {}), "(self, 'nsg_ids')\n", (6599, 6616), False, 'import pulumi\n'), ((6740, 6782), 'pulumi.get', 'pulumi.get', (['self', '"""resolver_endpoint_name"""'], {}), "(self, 'resolver_endpoint_name')\n", (6750, 6782), False, 'import pulumi\n'), ((6885, 6916), 'pulumi.get', 'pulumi.get', (['self', '"""resolver_id"""'], {}), "(self, 'resolver_id')\n", (6895, 6916), False, 'import pulumi\n'), ((6994, 7019), 'pulumi.get', 'pulumi.get', (['self', '"""scope"""'], {}), "(self, 'scope')\n", (7004, 7019), False, 'import pulumi\n'), ((7172, 7196), 'pulumi.get', 'pulumi.get', (['self', '"""self"""'], {}), "(self, 'self')\n", (7182, 7196), False, 'import pulumi\n'), ((7341, 7366), 'pulumi.get', 'pulumi.get', (['self', '"""state"""'], {}), "(self, 'state')\n", (7351, 7366), False, 'import pulumi\n'), ((7577, 7606), 'pulumi.get', 'pulumi.get', (['self', '"""subnet_id"""'], {}), "(self, 'subnet_id')\n", (7587, 7606), False, 'import pulumi\n'), ((7860, 7892), 'pulumi.get', 'pulumi.get', (['self', '"""time_created"""'], {}), "(self, 'time_created')\n", (7870, 7892), False, 'import pulumi\n'), ((8151, 8183), 'pulumi.get', 'pulumi.get', (['self', '"""time_updated"""'], {}), "(self, 'time_updated')\n", (8161, 8183), False, 'import pulumi\n'), ((10568, 10590), 'pulumi.InvokeOptions', 'pulumi.InvokeOptions', ([], {}), '()\n', (10588, 10590), False, 'import pulumi\n'), ((10682, 10810), 'pulumi.runtime.invoke', 'pulumi.runtime.invoke', (['"""oci:dns/getResolverEndpoint:getResolverEndpoint"""', '__args__'], {'opts': 'opts', 'typ': 'GetResolverEndpointResult'}), "('oci:dns/getResolverEndpoint:getResolverEndpoint',\n __args__, opts=opts, typ=GetResolverEndpointResult)\n", (10703, 10810), False, 'import pulumi\n')] |
import struct
import scipy.io.wavfile as wf
import numpy
import pydub
# for i in range(wave_file.getnframes()):
# # read a single frame and advance to next frame
# current_frame = wave_file.readframes(1)
#
# # check for silence
# silent = True
# # wave frame samples are stored in little endian**
# # this example works for a single channel 16-bit per sample encoding
# unpacked_signed_value = struct.unpack("<h", current_frame) # *
# if abs(unpacked_signed_value[0]) > 500:
# silent = False
#
# if silent:
# print("Frame %s is silent." % wave_file.tell())
# else:
# print("Frame %s is not silent." % wave_file.tell())
# rate, data = wf.read('testing.wav')
# # data0 is the data from channel 0.
# data0 = data[:, 0]
#
# print(data0)
# from pydub import AudioSegment
# from pydub.silence import detect_silence, detect_nonsilent
#
# song = AudioSegment.from_wav("soundaudio.wav")
# val = detect_silence(song)
# print(val)
from pyAudioAnalysis import audioSegmentation as aS
[flagsInd, classesAll, acc, CM] = aS.mtFileClassification("data/scottish.wav", "data/svmSM", "svm", True, 'data/scottish.segments') | [
"pyAudioAnalysis.audioSegmentation.mtFileClassification"
] | [((1079, 1180), 'pyAudioAnalysis.audioSegmentation.mtFileClassification', 'aS.mtFileClassification', (['"""data/scottish.wav"""', '"""data/svmSM"""', '"""svm"""', '(True)', '"""data/scottish.segments"""'], {}), "('data/scottish.wav', 'data/svmSM', 'svm', True,\n 'data/scottish.segments')\n", (1102, 1180), True, 'from pyAudioAnalysis import audioSegmentation as aS\n')] |
import os
"""
# If you have multi-gpu, designate the number of GPU to use.
os.environ["CUDA_DEVICE_ORDER"]="PCI_BUS_ID"
os.environ["CUDA_VISIBLE_DEVICES"] = "6"
"""
import argparse
import logging
from tqdm import tqdm # progress bar
import numpy as np
import matplotlib.pyplot as plt
from keras import optimizers
from keras.callbacks import EarlyStopping, ModelCheckpoint, ReduceLROnPlateau
import segmentation_models as sm
from segmentation_models.utils import set_trainable
from dataset import DataGenerator
def train_model(model, train_gen, valid_gen, epochs, batch_size, save_cp=True):
total_batch_count = 0
train_batch_num = len(train_gen)
train_num = train_batch_num * batch_size
#train_gen_out = iter_sequence_infinite(train_gen)
valid_batch_num = len(valid_gen)
valid_num = valid_batch_num * batch_size
#valid_gen_out = iter_sequence_infinite(valid_gen)
for epoch in range(epochs): # interation as many epochs
set_trainable(model)
epoch_loss = 0 # loss in this epoch
epoch_iou = 0
count = 0
with tqdm(total=train_num, desc=f'Epoch {epoch + 1}/{epochs}', position=0, leave=True, unit='img') as pbar: # make progress bar
for batch in train_gen:
#batch = next(train_gen_out)
imgs = batch[0]
true_masks = batch[1]
loss, iou = model.train_on_batch(imgs, true_masks) # value of loss of this batch
epoch_loss += loss
epoch_iou += iou
pbar.set_postfix(**{'Batch loss': loss, 'Batch IoU': iou}) # floating the loss at the post in the pbar
pbar.update(imgs.shape[0]) # update progress
count += 1
total_batch_count += 1
print( "Epoch : loss: {}, IoU : {}".format(epoch_loss/count, epoch_iou/count))
# Do validation
validation_model(model, valid_gen, valid_num)
train_gen.on_epoch_end()
valid_gen.on_epoch_end()
if save_cp:
try:
if not os.path.isdir(checkpoint_dir):
os.mkdir(checkpoint_dir)
logging.info('Created checkpoint directory')
else:
pass
except OSError:
pass
model.save_weights(os.path.join(checkpoint_dir , f'CP_epoch{epoch + 1}.h5'))
logging.info(f'Checkpoint {epoch + 1} saved !')
def validation_model(model, valid_gen, valid_num):
epoch_loss = 0 # loss in this epoch
epoch_iou = 0
count = 0
with tqdm(total=valid_num, desc='Validation round', position=0, leave=True, unit='img') as pbar: # make progress bar
for batch in valid_gen:
#batch = next(valid_gen_out)
imgs = batch[0]
true_masks = batch[1]
loss, iou = model.test_on_batch(imgs, true_masks) # value of loss of this batch
epoch_loss += loss
epoch_iou += iou
pbar.set_postfix(**{'Batch, loss': loss, 'Batch IoU': iou}) # floating the loss at the post in the pbar
pbar.update(imgs.shape[0]) # update progress
count += 1
print("Validation loss: {}, IoU: {}".format(epoch_loss / count, epoch_iou / count))
pred_mask = model.predict(np.expand_dims(imgs[0],0))
plt.subplot(131)
plt.imshow(imgs[0])
plt.subplot(132)
plt.imshow(true_masks[0].squeeze(), cmap="gray")
plt.subplot(133)
plt.imshow(pred_mask.squeeze(), cmap="gray")
plt.show()
print()
def get_args():
parser = argparse.ArgumentParser(description='Train the UNet on images and target masks',
formatter_class=argparse.ArgumentDefaultsHelpFormatter)
parser.add_argument('-e', '--epochs', metavar='E', type=int, default=100,
help='Number of epochs', dest='epochs')
parser.add_argument('-b', '--batch_size', metavar='B', type=int, nargs='?', default=4,
help='Batch size', dest='batch_size')
parser.add_argument('-l', '--learning-rate', metavar='LR', type=float, nargs='?', default=1e-4,
help='Learning rate', dest='lr')
parser.add_argument('-bb', '--backbone', default='resnet50', metavar='FILE',
help="backcone name")
parser.add_argument('-w', '--weight', dest='load', type=str, default=False,
help='Load model from a .h5 file')
parser.add_argument('-s', '--resizing', dest='resizing', type=int, default=384,
help='Downscaling factor of the images')
parser.add_argument('-v', '--validation', dest='val', type=float, default=20.0,
help='Percent of the data that is used as validation (0-100)')
return parser.parse_args()
if __name__ == '__main__':
img_dir = '../data/train/imgs/' # ./data/train/imgs/CVC_Original/'
mask_dir = '../data/train/masks/' # ./data/train/masks/CVC_Ground Truth/'
checkpoint_dir = './checkpoints'
args = get_args()
# train path
train_ids = os.listdir(img_dir)
# Validation Data Size
n_val = int(len(train_ids) * args.val/100) # size of validation set
valid_ids = train_ids[:n_val] # list of image ids used for validation of result 0 to 9
train_ids = train_ids[n_val:] # list of image ids used for training dataset
# print(valid_ids, "\n\n")
print("training_size: ", len(train_ids), "validation_size: ", len(valid_ids))
train_gen = DataGenerator(train_ids, img_dir, mask_dir, img_size=args.resizing, batch_size=args.batch_size)
valid_gen = DataGenerator(valid_ids, img_dir, mask_dir, img_size=args.resizing, batch_size=args.batch_size)
print("total training batches: ", len(train_gen))
print("total validaton batches: ", len(valid_gen))
train_steps = len(train_ids) // args.batch_size
valid_steps = len(valid_ids) // args.batch_size
# define model
model = sm.Unet(args.backbone, encoder_weights='imagenet')
optimizer = optimizers.Adam(lr=args.lr, decay=1e-4)
model.compile(
optimizer=optimizer,
# "Adam",
loss=sm.losses.bce_dice_loss, # sm.losses.bce_jaccard_loss, # sm.losses.binary_crossentropy,
metrics=[sm.metrics.iou_score],
)
#model.summary()
callbacks = [
EarlyStopping(patience=6, verbose=1),
ReduceLROnPlateau(factor=0.1, patience=3, min_lr=1e-7, verbose=1),
ModelCheckpoint('./weights.Epoch{epoch:02d}-Loss{loss:.3f}-VIou{val_iou_score:.3f}.h5', verbose=1,
monitor='val_accuracy', save_best_only=True, save_weights_only=True)
]
train_model(model=model, train_gen=train_gen,
valid_gen=valid_gen, epochs=args.epochs, batch_size=args.batch_size)
| [
"matplotlib.pyplot.imshow",
"keras.optimizers.Adam",
"os.listdir",
"segmentation_models.utils.set_trainable",
"argparse.ArgumentParser",
"keras.callbacks.ModelCheckpoint",
"keras.callbacks.ReduceLROnPlateau",
"tqdm.tqdm",
"os.path.join",
"logging.info",
"os.path.isdir",
"dataset.DataGenerator"... | [((3351, 3367), 'matplotlib.pyplot.subplot', 'plt.subplot', (['(131)'], {}), '(131)\n', (3362, 3367), True, 'import matplotlib.pyplot as plt\n'), ((3372, 3391), 'matplotlib.pyplot.imshow', 'plt.imshow', (['imgs[0]'], {}), '(imgs[0])\n', (3382, 3391), True, 'import matplotlib.pyplot as plt\n'), ((3396, 3412), 'matplotlib.pyplot.subplot', 'plt.subplot', (['(132)'], {}), '(132)\n', (3407, 3412), True, 'import matplotlib.pyplot as plt\n'), ((3470, 3486), 'matplotlib.pyplot.subplot', 'plt.subplot', (['(133)'], {}), '(133)\n', (3481, 3486), True, 'import matplotlib.pyplot as plt\n'), ((3540, 3550), 'matplotlib.pyplot.show', 'plt.show', ([], {}), '()\n', (3548, 3550), True, 'import matplotlib.pyplot as plt\n'), ((3594, 3740), 'argparse.ArgumentParser', 'argparse.ArgumentParser', ([], {'description': '"""Train the UNet on images and target masks"""', 'formatter_class': 'argparse.ArgumentDefaultsHelpFormatter'}), "(description=\n 'Train the UNet on images and target masks', formatter_class=argparse.\n ArgumentDefaultsHelpFormatter)\n", (3617, 3740), False, 'import argparse\n'), ((5111, 5130), 'os.listdir', 'os.listdir', (['img_dir'], {}), '(img_dir)\n', (5121, 5130), False, 'import os\n'), ((5536, 5635), 'dataset.DataGenerator', 'DataGenerator', (['train_ids', 'img_dir', 'mask_dir'], {'img_size': 'args.resizing', 'batch_size': 'args.batch_size'}), '(train_ids, img_dir, mask_dir, img_size=args.resizing,\n batch_size=args.batch_size)\n', (5549, 5635), False, 'from dataset import DataGenerator\n'), ((5648, 5747), 'dataset.DataGenerator', 'DataGenerator', (['valid_ids', 'img_dir', 'mask_dir'], {'img_size': 'args.resizing', 'batch_size': 'args.batch_size'}), '(valid_ids, img_dir, mask_dir, img_size=args.resizing,\n batch_size=args.batch_size)\n', (5661, 5747), False, 'from dataset import DataGenerator\n'), ((5990, 6040), 'segmentation_models.Unet', 'sm.Unet', (['args.backbone'], {'encoder_weights': '"""imagenet"""'}), "(args.backbone, encoder_weights='imagenet')\n", (5997, 6040), True, 'import segmentation_models as sm\n'), ((6058, 6099), 'keras.optimizers.Adam', 'optimizers.Adam', ([], {'lr': 'args.lr', 'decay': '(0.0001)'}), '(lr=args.lr, decay=0.0001)\n', (6073, 6099), False, 'from keras import optimizers\n'), ((964, 984), 'segmentation_models.utils.set_trainable', 'set_trainable', (['model'], {}), '(model)\n', (977, 984), False, 'from segmentation_models.utils import set_trainable\n'), ((2599, 2686), 'tqdm.tqdm', 'tqdm', ([], {'total': 'valid_num', 'desc': '"""Validation round"""', 'position': '(0)', 'leave': '(True)', 'unit': '"""img"""'}), "(total=valid_num, desc='Validation round', position=0, leave=True, unit\n ='img')\n", (2603, 2686), False, 'from tqdm import tqdm\n'), ((3320, 3346), 'numpy.expand_dims', 'np.expand_dims', (['imgs[0]', '(0)'], {}), '(imgs[0], 0)\n', (3334, 3346), True, 'import numpy as np\n'), ((6367, 6403), 'keras.callbacks.EarlyStopping', 'EarlyStopping', ([], {'patience': '(6)', 'verbose': '(1)'}), '(patience=6, verbose=1)\n', (6380, 6403), False, 'from keras.callbacks import EarlyStopping, ModelCheckpoint, ReduceLROnPlateau\n'), ((6413, 6479), 'keras.callbacks.ReduceLROnPlateau', 'ReduceLROnPlateau', ([], {'factor': '(0.1)', 'patience': '(3)', 'min_lr': '(1e-07)', 'verbose': '(1)'}), '(factor=0.1, patience=3, min_lr=1e-07, verbose=1)\n', (6430, 6479), False, 'from keras.callbacks import EarlyStopping, ModelCheckpoint, ReduceLROnPlateau\n'), ((6488, 6668), 'keras.callbacks.ModelCheckpoint', 'ModelCheckpoint', (['"""./weights.Epoch{epoch:02d}-Loss{loss:.3f}-VIou{val_iou_score:.3f}.h5"""'], {'verbose': '(1)', 'monitor': '"""val_accuracy"""', 'save_best_only': '(True)', 'save_weights_only': '(True)'}), "(\n './weights.Epoch{epoch:02d}-Loss{loss:.3f}-VIou{val_iou_score:.3f}.h5',\n verbose=1, monitor='val_accuracy', save_best_only=True,\n save_weights_only=True)\n", (6503, 6668), False, 'from keras.callbacks import EarlyStopping, ModelCheckpoint, ReduceLROnPlateau\n'), ((1092, 1190), 'tqdm.tqdm', 'tqdm', ([], {'total': 'train_num', 'desc': 'f"""Epoch {epoch + 1}/{epochs}"""', 'position': '(0)', 'leave': '(True)', 'unit': '"""img"""'}), "(total=train_num, desc=f'Epoch {epoch + 1}/{epochs}', position=0, leave\n =True, unit='img')\n", (1096, 1190), False, 'from tqdm import tqdm\n'), ((2416, 2463), 'logging.info', 'logging.info', (['f"""Checkpoint {epoch + 1} saved !"""'], {}), "(f'Checkpoint {epoch + 1} saved !')\n", (2428, 2463), False, 'import logging\n'), ((2346, 2401), 'os.path.join', 'os.path.join', (['checkpoint_dir', 'f"""CP_epoch{epoch + 1}.h5"""'], {}), "(checkpoint_dir, f'CP_epoch{epoch + 1}.h5')\n", (2358, 2401), False, 'import os\n'), ((2078, 2107), 'os.path.isdir', 'os.path.isdir', (['checkpoint_dir'], {}), '(checkpoint_dir)\n', (2091, 2107), False, 'import os\n'), ((2129, 2153), 'os.mkdir', 'os.mkdir', (['checkpoint_dir'], {}), '(checkpoint_dir)\n', (2137, 2153), False, 'import os\n'), ((2174, 2218), 'logging.info', 'logging.info', (['"""Created checkpoint directory"""'], {}), "('Created checkpoint directory')\n", (2186, 2218), False, 'import logging\n')] |
from fastapi.security.api_key import APIKeyCookie, APIKeyHeader
API_KEY_NAME = "api_key"
cookie_scheme = APIKeyCookie(name="bgm-tv-auto-tracker", auto_error=False)
API_KEY_HEADER = APIKeyHeader(name="api-key", auto_error=False)
API_KEY_COOKIES = APIKeyCookie(name="api-key", auto_error=False)
| [
"fastapi.security.api_key.APIKeyCookie",
"fastapi.security.api_key.APIKeyHeader"
] | [((107, 165), 'fastapi.security.api_key.APIKeyCookie', 'APIKeyCookie', ([], {'name': '"""bgm-tv-auto-tracker"""', 'auto_error': '(False)'}), "(name='bgm-tv-auto-tracker', auto_error=False)\n", (119, 165), False, 'from fastapi.security.api_key import APIKeyCookie, APIKeyHeader\n'), ((183, 229), 'fastapi.security.api_key.APIKeyHeader', 'APIKeyHeader', ([], {'name': '"""api-key"""', 'auto_error': '(False)'}), "(name='api-key', auto_error=False)\n", (195, 229), False, 'from fastapi.security.api_key import APIKeyCookie, APIKeyHeader\n'), ((248, 294), 'fastapi.security.api_key.APIKeyCookie', 'APIKeyCookie', ([], {'name': '"""api-key"""', 'auto_error': '(False)'}), "(name='api-key', auto_error=False)\n", (260, 294), False, 'from fastapi.security.api_key import APIKeyCookie, APIKeyHeader\n')] |
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import shutil
import sys
import tempfile
from observations.r.labour import labour
def test_labour():
"""Test module labour.py by downloading
labour.csv and testing shape of
extracted data has 569 rows and 4 columns
"""
test_path = tempfile.mkdtemp()
x_train, metadata = labour(test_path)
try:
assert x_train.shape == (569, 4)
except:
shutil.rmtree(test_path)
raise()
| [
"observations.r.labour.labour",
"tempfile.mkdtemp",
"shutil.rmtree"
] | [((356, 374), 'tempfile.mkdtemp', 'tempfile.mkdtemp', ([], {}), '()\n', (372, 374), False, 'import tempfile\n'), ((397, 414), 'observations.r.labour.labour', 'labour', (['test_path'], {}), '(test_path)\n', (403, 414), False, 'from observations.r.labour import labour\n'), ((473, 497), 'shutil.rmtree', 'shutil.rmtree', (['test_path'], {}), '(test_path)\n', (486, 497), False, 'import shutil\n')] |
## Data Loader: TE-CCA zT Dataset
# <NAME> (<EMAIL>) 2021-03-12
#
from citrination_client import CitrinationClient, PifSystemReturningQuery
from citrination_client import DataQuery, DatasetQuery, Filter
from matminer.featurizers.base import MultipleFeaturizer
from matminer.featurizers import composition as cf
from pymatgen import Composition
from sl_utils import pifs2df, setResDir
import pandas as pd
import numpy as np
import os
import time
prefix = "zT"
file_responses = prefix + "_responses.csv"
file_features = prefix + "_features.csv"
## Helper functions
def get_compostion(c):
"""Attempt to parse composition, return None if failed"""
try:
return Composition(c)
except:
return None
def load_data_zT():
results_dir = setResDir()
## Metadata
keys_response = [
'Seebeck coefficient; squared',
'Electrical resistivity',
'Thermal conductivity'
]
sign = np.array([
+1, # Seebeck
-1, # Electric resistivity
-1 # Thermal conductivity
])
## Load data, if possible
# --------------------------------------------------
try:
df_X_all = pd.read_csv(results_dir + file_features)
X_all = df_X_all.drop(df_X_all.columns[0], axis = 1).values
df_Y_all = pd.read_csv(results_dir + file_responses)
Y_all = df_Y_all.drop(df_Y_all.columns[0], axis = 1).values
print("Cached data loaded.")
except FileNotFoundError:
## Data Import
# --------------------------------------------------
# Initialize client
print("Accessing data from Citrination...")
site = 'https://citrination.com' # Citrination
client = CitrinationClient(api_key=os.environ['CITRINATION_API_KEY'], site=site)
search_client = client.search
# Aluminum dataset
dataset_id = 178480 # ucsb_te_roomtemp_seebeck
system_query = PifSystemReturningQuery(
size=1000,
query=DataQuery(
dataset=DatasetQuery(id=Filter(equal=str(dataset_id)))
)
)
query_result = search_client.pif_search(system_query)
print(" Found {} PIFs in dataset {}.".format(
query_result.total_num_hits,
dataset_id
))
## Wrangle
# --------------------------------------------------
pifs = [x.system for x in query_result.hits]
# Utility function will tabularize PIFs
df_response = pifs2df(pifs)
# Down-select columns to play well with to_numeric
df_response = df_response[
['Seebeck coefficient', 'Electrical resistivity', 'Thermal conductivity']
]
df_response = df_response.apply(pd.to_numeric)
# Parse chemical compositions
formulas = [pif.chemical_formula for pif in pifs]
df_comp = pd.DataFrame(
columns = ['chemical_formula'],
data = formulas
)
# Join
df_data = pd.concat([df_comp, df_response], axis = 1)
print(" Accessed data.")
# Featurize
print("Featurizing data...")
df_data['composition'] = df_data['chemical_formula'].apply(get_compostion)
f = MultipleFeaturizer([
cf.Stoichiometry(),
cf.ElementProperty.from_preset("magpie"),
cf.ValenceOrbital(props=['avg']),
cf.IonProperty(fast=True)
])
X = np.array(f.featurize_many(df_data['composition']))
# Find valid response values
keys_original = [
'Seebeck coefficient',
'Electrical resistivity',
'Thermal conductivity'
]
index_valid_response = {
key: df_data[key].dropna().index.values for key in keys_original
}
index_valid_all = df_data[keys_original].dropna().index.values
X_all = X[index_valid_all, :]
Y_all = df_data[keys_original].iloc[index_valid_all].values
# Manipulate columns for proper objective values
Y_all[:, 0] = Y_all[:, 0] ** 2 # Squared seebeck
print(" Data prepared; {0:} valid observations.".format(X_all.shape[0]))
# Cache data
pd.DataFrame(data = X_all).to_csv(results_dir + file_features)
pd.DataFrame(
data = Y_all,
columns = keys_response
).to_csv(results_dir + file_responses)
print("Data cached in results directory.")
return X_all, Y_all, sign, keys_response, prefix
if __name__ == "__main__":
X_all, Y_all, sign, keys_response, prefix = load_data_zT()
| [
"matminer.featurizers.composition.ValenceOrbital",
"pandas.read_csv",
"citrination_client.CitrinationClient",
"sl_utils.setResDir",
"numpy.array",
"pandas.DataFrame",
"matminer.featurizers.composition.IonProperty",
"matminer.featurizers.composition.Stoichiometry",
"pymatgen.Composition",
"pandas.c... | [((764, 775), 'sl_utils.setResDir', 'setResDir', ([], {}), '()\n', (773, 775), False, 'from sl_utils import pifs2df, setResDir\n'), ((940, 962), 'numpy.array', 'np.array', (['[+1, -1, -1]'], {}), '([+1, -1, -1])\n', (948, 962), True, 'import numpy as np\n'), ((678, 692), 'pymatgen.Composition', 'Composition', (['c'], {}), '(c)\n', (689, 692), False, 'from pymatgen import Composition\n'), ((1169, 1209), 'pandas.read_csv', 'pd.read_csv', (['(results_dir + file_features)'], {}), '(results_dir + file_features)\n', (1180, 1209), True, 'import pandas as pd\n'), ((1298, 1339), 'pandas.read_csv', 'pd.read_csv', (['(results_dir + file_responses)'], {}), '(results_dir + file_responses)\n', (1309, 1339), True, 'import pandas as pd\n'), ((1712, 1783), 'citrination_client.CitrinationClient', 'CitrinationClient', ([], {'api_key': "os.environ['CITRINATION_API_KEY']", 'site': 'site'}), "(api_key=os.environ['CITRINATION_API_KEY'], site=site)\n", (1729, 1783), False, 'from citrination_client import CitrinationClient, PifSystemReturningQuery\n'), ((2501, 2514), 'sl_utils.pifs2df', 'pifs2df', (['pifs'], {}), '(pifs)\n', (2508, 2514), False, 'from sl_utils import pifs2df, setResDir\n'), ((2879, 2936), 'pandas.DataFrame', 'pd.DataFrame', ([], {'columns': "['chemical_formula']", 'data': 'formulas'}), "(columns=['chemical_formula'], data=formulas)\n", (2891, 2936), True, 'import pandas as pd\n'), ((3012, 3053), 'pandas.concat', 'pd.concat', (['[df_comp, df_response]'], {'axis': '(1)'}), '([df_comp, df_response], axis=1)\n', (3021, 3053), True, 'import pandas as pd\n'), ((3280, 3298), 'matminer.featurizers.composition.Stoichiometry', 'cf.Stoichiometry', ([], {}), '()\n', (3296, 3298), True, 'from matminer.featurizers import composition as cf\n'), ((3312, 3352), 'matminer.featurizers.composition.ElementProperty.from_preset', 'cf.ElementProperty.from_preset', (['"""magpie"""'], {}), "('magpie')\n", (3342, 3352), True, 'from matminer.featurizers import composition as cf\n'), ((3366, 3398), 'matminer.featurizers.composition.ValenceOrbital', 'cf.ValenceOrbital', ([], {'props': "['avg']"}), "(props=['avg'])\n", (3383, 3398), True, 'from matminer.featurizers import composition as cf\n'), ((3412, 3437), 'matminer.featurizers.composition.IonProperty', 'cf.IonProperty', ([], {'fast': '(True)'}), '(fast=True)\n', (3426, 3437), True, 'from matminer.featurizers import composition as cf\n'), ((4252, 4276), 'pandas.DataFrame', 'pd.DataFrame', ([], {'data': 'X_all'}), '(data=X_all)\n', (4264, 4276), True, 'import pandas as pd\n'), ((4323, 4370), 'pandas.DataFrame', 'pd.DataFrame', ([], {'data': 'Y_all', 'columns': 'keys_response'}), '(data=Y_all, columns=keys_response)\n', (4335, 4370), True, 'import pandas as pd\n')] |
import requests
import json
from datetime import datetime
headers = {"Content-type": "application/json", "Accept": "text/plain"}
def addUser():
url = "http://10.194.223.134:5000/add_user"
data = {"username": "test_user"}
requests.post(url, data=json.dumps(data), headers=headers)
def addMessage():
url = "http://10.194.223.134:5000/phone_data/test_user"
data = {"message": "My Sample Message", "timestamp": datetime.timestamp(datetime.now())}
r = requests.post(url, data=json.dumps(data), headers=headers)
print(r.json())
addUser()
addMessage()
| [
"datetime.datetime.now",
"json.dumps"
] | [((259, 275), 'json.dumps', 'json.dumps', (['data'], {}), '(data)\n', (269, 275), False, 'import json\n'), ((449, 463), 'datetime.datetime.now', 'datetime.now', ([], {}), '()\n', (461, 463), False, 'from datetime import datetime\n'), ((498, 514), 'json.dumps', 'json.dumps', (['data'], {}), '(data)\n', (508, 514), False, 'import json\n')] |
import unittest
from context import html2md
from assertions import assertEq
__author__ = 'alex'
class SpecialListsTest(unittest.TestCase):
def test_text_and_paragraph(self):
in_html = '''<ul>
<li>item 1</li>
<li>item 2
<p>item 2 paragraph</p>
<p>item 2 item 2</p>
</li>
<li>item 3</li>
</ul>'''
out_md = '''* item 1
* item 2
item 2 paragraph
item 2 item 2
* item 3'''
assertEq(out_md, html2md.html2md(in_html))
def test_paragraph_mixed(self):
in_html = '''<ul>
<li>item 1</li>
<li>item 2</li>
<li><p>item 3</p>
<p>item 3 paragraph 2</p></li>
<li>item 4</li>
<li>item 5</li>
</ul>'''
out_md = '''* item 1
* item 2
* item 3
item 3 paragraph 2
* item 4
* item 5'''
assertEq(out_md, html2md.html2md(in_html))
def test_blockquote(self):
in_html = '''
<ul>
<li><blockquote>
<p>item 1</p>
</blockquote></li>
<li><blockquote>
<p>item 2 paragraph 1</p>
<p>item 2 paragraph 2</p>
</blockquote></li>
<li><p>item 3</p></li>
</ul>
'''
out_md = '''* > item 1
* > item 2 paragraph 1
> item 2 paragraph 2
* item 3'''
assertEq(out_md, html2md.html2md(in_html))
def test_blockquote_complex(self):
in_html = '''<ul>
<li>item 1</li>
<li><p>item 2</p>
<blockquote>
<p>item 2 paragraph 1</p>
<p>item 2 paragraph 2</p>
</blockquote></li>
<li><p>item 3</p>
<blockquote>
<p>item 3 blockquote</p>
</blockquote></li>
</ul>'''
out_md = '''* item 1
* item 2
> item 2 paragraph 1
> item 2 paragraph 2
* item 3
> item 3 blockquote'''
assertEq(out_md, html2md.html2md(in_html))
def test_cheatsheet(self):
in_html = '''
<ul>
<li><p>A list item.</p>
<p>With multiple paragraphs.</p>
<blockquote>
<p>And a blockquote</p>
</blockquote></li>
<li><p>Another List item with
a hard wrapped 2nd line.</p>
<pre><code>
project/
__init__.py
example1.py
test/
__init__.py
test_example1.py
</code></pre></li>
</ul>'''
out_md = '''* A list item.
With multiple paragraphs.
> And a blockquote
* Another List item with
a hard wrapped 2nd line.
project/
__init__.py
example1.py
test/
__init__.py
test_example1.py'''
assertEq(out_md, html2md.html2md(in_html))
def suite():
return unittest.TestLoader().loadTestsFromTestCase(SpecialListsTest)
if __name__ == '__main__':
unittest.main() | [
"unittest.main",
"context.html2md.html2md",
"unittest.TestLoader"
] | [((2488, 2503), 'unittest.main', 'unittest.main', ([], {}), '()\n', (2501, 2503), False, 'import unittest\n'), ((435, 459), 'context.html2md.html2md', 'html2md.html2md', (['in_html'], {}), '(in_html)\n', (450, 459), False, 'from context import html2md\n'), ((775, 799), 'context.html2md.html2md', 'html2md.html2md', (['in_html'], {}), '(in_html)\n', (790, 799), False, 'from context import html2md\n'), ((1161, 1185), 'context.html2md.html2md', 'html2md.html2md', (['in_html'], {}), '(in_html)\n', (1176, 1185), False, 'from context import html2md\n'), ((1621, 1645), 'context.html2md.html2md', 'html2md.html2md', (['in_html'], {}), '(in_html)\n', (1636, 1645), False, 'from context import html2md\n'), ((2341, 2365), 'context.html2md.html2md', 'html2md.html2md', (['in_html'], {}), '(in_html)\n', (2356, 2365), False, 'from context import html2md\n'), ((2393, 2414), 'unittest.TestLoader', 'unittest.TestLoader', ([], {}), '()\n', (2412, 2414), False, 'import unittest\n')] |
import math
import numpy as np
from pymoo.model.algorithm import Algorithm
from pymoo.model.duplicate import DefaultDuplicateElimination
from pymoo.model.individual import Individual
from pymoo.model.population import Population
class GeneticAlgorithm(Algorithm):
def __init__(self,
pop_size,
sampling,
selection,
crossover,
mutation,
survival,
n_offsprings=None,
eliminate_duplicates=DefaultDuplicateElimination(),
repair=None,
individual=Individual(),
**kwargs
):
super().__init__(**kwargs)
# population size of the genetic algorithm
self.pop_size = pop_size
# initial sampling method: object, 2d array, or population (already evaluated)
self.sampling = sampling
# the method to be used to select parents for recombination
self.selection = selection
# method to do the crossover
self.crossover = crossover
# method for doing the mutation
self.mutation = mutation
# function to repair an offspring after mutation if necessary
self.repair = repair
# survival selection
self.survival = survival
# number of offsprings to generate through recombination
self.n_offsprings = n_offsprings
# set the duplicate detection class - a boolean value chooses the default duplicate detection
if isinstance(eliminate_duplicates, bool):
if eliminate_duplicates:
self.eliminate_duplicates = DefaultDuplicateElimination()
else:
self.eliminate_duplicates = None
else:
self.eliminate_duplicates = eliminate_duplicates
# the object to be used to represent an individual - either individual or derived class
self.individual = individual
# if the number of offspring is not set - equal to population size
if self.n_offsprings is None:
self.n_offsprings = pop_size
# other run specific data updated whenever solve is called - to share them in all algorithms
self.n_gen = None
self.pop = None
self.off = None
# this can be used to store additional data in submodules e.g. survival, recombination and so on
self.data = {}
def _initialize(self):
# ! get the initial population - different ways are possible
# provide a whole population object - (individuals might be already evaluated)
if isinstance(self.sampling, Population):
pop = self.sampling
else:
pop = Population(0, individual=self.individual)
if isinstance(self.sampling, np.ndarray):
pop = pop.new("X", self.sampling)
else:
pop = self.sampling.do(self.problem, self.pop_size, pop=pop, algorithm=self)
# repair all solutions that are not already evaluated
if self.repair:
I = [k for k in range(len(pop)) if pop[k].F is None]
pop = self.repair.do(self.problem, pop[I], algorithm=self)
# then evaluate using the objective function
self.evaluator.eval(self.problem, pop, algorithm=self)
# that call is a dummy survival to set attributes that are necessary for the mating selection
if self.survival:
pop = self.survival.do(self.problem, pop, len(pop), algorithm=self)
self.pop = pop
def _next(self):
# do the mating using the current population
self.off = self._mating(self.pop, n_max_iterations=100)
# if the mating could not generate any new offspring (duplicate elimination might make that happen)
if len(self.off) == 0:
self.termination.force_termination = True
return
# if not the desired number of offspring could be created
elif len(self.off) < self.n_offsprings:
if self.verbose:
print("WARNING: Mating could not produce the required number of (unique) offsprings!")
# evaluate the offspring
self.evaluator.eval(self.problem, self.off, algorithm=self)
# merge the offsprings with the current population
self.pop = self.pop.merge(self.off)
# the do survival selection
self.pop = self.survival.do(self.problem, self.pop, self.pop_size, algorithm=self)
def _mating(self, pop, n_max_iterations=100):
# the population object to be used
off = pop.new()
# mating counter - counts how often the mating needs to be done to fill up n_offsprings
n_matings = 0
# iterate until enough offsprings are created
while len(off) < self.n_offsprings:
# how many parents need to be select for the mating - depending on number of offsprings remaining
n_select = math.ceil((self.n_offsprings - len(off)) / self.crossover.n_offsprings)
# select the parents for the mating - just an index array
parents = self.selection.do(pop, n_select, self.crossover.n_parents, algorithm=self)
# do the crossover using the parents index and the population - additional data provided if necessary
_off = self.crossover.do(self.problem, pop, parents, algorithm=self)
# do the mutation on the offsprings created through crossover
_off = self.mutation.do(self.problem, _off, algorithm=self)
# repair the individuals if necessary
if self.repair:
_off = self.repair.do(self.problem, _off, algorithm=self)
if self.eliminate_duplicates is not None:
_off = self.eliminate_duplicates.do(_off, pop, off)
# if more offsprings than necessary - truncate them randomly
if len(off) + len(_off) > self.n_offsprings:
n_remaining = self.n_offsprings - len(off)
I = np.random.permutation(len(_off))[:n_remaining]
_off = _off[I]
# add to the offsprings and increase the mating counter
off = off.merge(_off)
n_matings += 1
# if no new offsprings can be generated within a pre-specified number of generations
if n_matings > n_max_iterations:
break
return off
def _finalize(self):
pass
| [
"pymoo.model.duplicate.DefaultDuplicateElimination",
"pymoo.model.individual.Individual",
"pymoo.model.population.Population"
] | [((530, 559), 'pymoo.model.duplicate.DefaultDuplicateElimination', 'DefaultDuplicateElimination', ([], {}), '()\n', (557, 559), False, 'from pymoo.model.duplicate import DefaultDuplicateElimination\n'), ((619, 631), 'pymoo.model.individual.Individual', 'Individual', ([], {}), '()\n', (629, 631), False, 'from pymoo.model.individual import Individual\n'), ((2742, 2783), 'pymoo.model.population.Population', 'Population', (['(0)'], {'individual': 'self.individual'}), '(0, individual=self.individual)\n', (2752, 2783), False, 'from pymoo.model.population import Population\n'), ((1677, 1706), 'pymoo.model.duplicate.DefaultDuplicateElimination', 'DefaultDuplicateElimination', ([], {}), '()\n', (1704, 1706), False, 'from pymoo.model.duplicate import DefaultDuplicateElimination\n')] |
#!/usr/local/bin/python3
print("send 3 non-overlapping ping fragments in all possible orders")
# |----|
# |----|
# |----|
import os
from addr import *
from scapy.all import *
permute=[]
permute.append([0,1,2])
permute.append([0,2,1])
permute.append([1,0,2])
permute.append([2,0,1])
permute.append([1,2,0])
permute.append([2,1,0])
pid=os.getpid()
payload=b"ABCDEFGHIJKLMNOP"
for p in permute:
pid += 1
eid=pid & 0xffff
packet=IP(src=LOCAL_ADDR, dst=REMOTE_ADDR)/ \
ICMP(type='echo-request', id=eid)/payload
frag=[]
fid=pid & 0xffff
frag.append(IP(src=LOCAL_ADDR, dst=REMOTE_ADDR, proto=1, id=fid,
flags='MF')/bytes(packet)[20:28])
frag.append(IP(src=LOCAL_ADDR, dst=REMOTE_ADDR, proto=1, id=fid,
frag=1, flags='MF')/bytes(packet)[28:36])
frag.append(IP(src=LOCAL_ADDR, dst=REMOTE_ADDR, proto=1, id=fid,
frag=2)/bytes(packet)[36:48])
eth=[]
for i in range(3):
eth.append(Ether(src=LOCAL_MAC, dst=REMOTE_MAC)/frag[p[i]])
if os.fork() == 0:
time.sleep(1)
sendp(eth, iface=LOCAL_IF)
os._exit(0)
ans=sniff(iface=LOCAL_IF, timeout=3, filter=
"ip and src "+REMOTE_ADDR+" and dst "+LOCAL_ADDR+" and icmp")
for a in ans:
if a and a.type == ETH_P_IP and \
a.payload.proto == 1 and \
a.payload.frag == 0 and a.payload.flags == 0 and \
icmptypes[a.payload.payload.type] == 'echo-reply':
id=a.payload.payload.id
print("id=%#x" % (id))
if id != eid:
print("WRONG ECHO REPLY ID")
exit(2)
data=a.payload.payload.payload.load
print("payload=%s" % (data))
if data == payload:
break
print("PAYLOAD!=%s" % (payload))
exit(1)
else:
print("NO ECHO REPLY")
exit(2)
| [
"os.fork",
"os._exit",
"os.getpid"
] | [((354, 365), 'os.getpid', 'os.getpid', ([], {}), '()\n', (363, 365), False, 'import os\n'), ((975, 984), 'os.fork', 'os.fork', ([], {}), '()\n', (982, 984), False, 'import os\n'), ((1038, 1049), 'os._exit', 'os._exit', (['(0)'], {}), '(0)\n', (1046, 1049), False, 'import os\n')] |
from rimu import lineblocks, io, api
from typing import Dict
def test_render():
tests: Dict[str, str] = {
r'# foo': r'<h1>foo</h1>',
r'// foo': r'',
r'<image:foo|bar>': r'<img src="foo" alt="bar">',
r'<<#foo>>': r'<div id="foo"></div>',
r'.class #id "css"': r'',
r".safeMode='0'": r'',
r"|code|='<code>|</code>'": r'',
r"^='<sup>|</sup>'": r'',
r"/\.{3}/i = '…'": r'',
r"{foo}='bar'": r'',
}
api.init()
for k, v in tests.items():
reader = io.Reader(k)
writer = io.Writer()
lineblocks.render(reader, writer)
got = writer.toString()
assert got == v
| [
"rimu.io.Writer",
"rimu.io.Reader",
"rimu.api.init",
"rimu.lineblocks.render"
] | [((493, 503), 'rimu.api.init', 'api.init', ([], {}), '()\n', (501, 503), False, 'from rimu import lineblocks, io, api\n'), ((552, 564), 'rimu.io.Reader', 'io.Reader', (['k'], {}), '(k)\n', (561, 564), False, 'from rimu import lineblocks, io, api\n'), ((582, 593), 'rimu.io.Writer', 'io.Writer', ([], {}), '()\n', (591, 593), False, 'from rimu import lineblocks, io, api\n'), ((602, 635), 'rimu.lineblocks.render', 'lineblocks.render', (['reader', 'writer'], {}), '(reader, writer)\n', (619, 635), False, 'from rimu import lineblocks, io, api\n')] |
import vcr
tap_vcr = vcr.VCR(
serializer='yaml',
cassette_library_dir='tests/fixtures/vcr_cassettes',
record_mode='new_episodes',
match_on=['uri', 'method'],
)
| [
"vcr.VCR"
] | [((22, 167), 'vcr.VCR', 'vcr.VCR', ([], {'serializer': '"""yaml"""', 'cassette_library_dir': '"""tests/fixtures/vcr_cassettes"""', 'record_mode': '"""new_episodes"""', 'match_on': "['uri', 'method']"}), "(serializer='yaml', cassette_library_dir=\n 'tests/fixtures/vcr_cassettes', record_mode='new_episodes', match_on=[\n 'uri', 'method'])\n", (29, 167), False, 'import vcr\n')] |
import matplotlib.pyplot as plt
import numpy as np
import sys
sys.path.append("./../")
from swarm import Bird
class GraphMaker():
"""
"""
def __init__(self, env , birds, FIELD_SIZE ):
self.env= env
fig, ax = plt.subplots()
self.fig=fig
self.ax=ax
self.birds=birds
self.FIELD_SIZE=FIELD_SIZE
self.locations=Locations(self.birds)
locations, colors=self.locations.get_locations()
update_graph(self.fig, self.ax, birds=self.birds, locations=locations, colors=colors, title= "Time "+str(self.env.now), FIELD_SIZE=self.FIELD_SIZE)
for bird in self.birds:
bird.__class__=graphing_Bird
bird.tographing(self.locations, self.FIELD_SIZE)
def run(self):
yield self.env.timeout(.0001)
while True:
if self.locations.updated:
self.fig.clear()
locations, colors=self.locations.get_locations()
update_graph(self.fig, self.ax, birds=self.birds, locations=locations, colors=colors, title= "Time "+str(self.env.now), FIELD_SIZE=self.FIELD_SIZE)
self.locations.update()
yield self.env.timeout(.03)
class Locations():
"""
Keeps track of Locations for graphing
"""
def __init__(self, birds):
self.birds=[b.name for b in birds]
self.locations=[[[b.x, b.y, "blue"]] for b in birds]
self.updated=0
def update(self):
for l , location in enumerate(self.locations.copy()):
if len(location)>1:
self.locations[l].pop(0)
def set_location(self, name, location):
if name in self.birds:
self.locations[self.birds.index(name)].append(location)
self.updated=1
else:
raise ValueError("Could not find ", name , " in ", self.birds)
def get_locations(self):
return ([b[0][0] for b in self.locations], [b[0][1] for b in self.locations]) ,[b[0][2] for b in self.locations]
class graphing_Bird(Bird):
"""
A normal shower manager but it also updates the temperature graph everytime a temperature change is made.
"""
def __init__(self, name, context, bandwidth = 1.0, hard_limit_concurrency = 20, space_capacity = 10, verbose=True, id=0, max_temp=70, min_temp=-20):
super.__init__( name, context, bandwidth , hard_limit_concurrency , space_capacity , verbose, id, max_temp, min_temp)
self.tographing()
self.locations=None
def tographing(self, locations, FIELD_SIZE):
self.locations=locations
self.restricted_movement=[FIELD_SIZE, FIELD_SIZE]
def update_graph(fig, ax, birds, locations, colors, title="", FIELD_SIZE=1000):
"""
Updates the location graph.
"""
p1 = plt.scatter(*locations, color=colors , s=5)
plt.axhline(0, color='grey', linewidth=0.8)
ax.set_ylabel(' ')
ax.set_xlabel(' ')
#ax.set_xticks(ind)
#ax.set_xticklabels([s.name for s in shower_Managers])
colors=["blue", "red", "yellow", "green", "black", "indigo", "darkred", "lime", "seagreen", "pink"]
plt.scatter([],[], color="blue", label= "Independant")
plt.scatter([],[], color="red", label= "Calling")
plt.scatter([],[], color="seagreen", label= "Listening")
plt.legend(loc='center left', bbox_to_anchor=(1, 0.5), title="Birds:")
plt.ylim(0, FIELD_SIZE)
plt.xlim(0, FIELD_SIZE)
plt.title(title)
plt.tight_layout()
plt.draw()
plt.pause(0.000001)
| [
"matplotlib.pyplot.draw",
"matplotlib.pyplot.title",
"matplotlib.pyplot.axhline",
"matplotlib.pyplot.scatter",
"matplotlib.pyplot.tight_layout",
"matplotlib.pyplot.pause",
"matplotlib.pyplot.ylim",
"matplotlib.pyplot.xlim",
"sys.path.append",
"matplotlib.pyplot.subplots",
"matplotlib.pyplot.lege... | [((62, 86), 'sys.path.append', 'sys.path.append', (['"""./../"""'], {}), "('./../')\n", (77, 86), False, 'import sys\n'), ((2786, 2828), 'matplotlib.pyplot.scatter', 'plt.scatter', (['*locations'], {'color': 'colors', 's': '(5)'}), '(*locations, color=colors, s=5)\n', (2797, 2828), True, 'import matplotlib.pyplot as plt\n'), ((2834, 2877), 'matplotlib.pyplot.axhline', 'plt.axhline', (['(0)'], {'color': '"""grey"""', 'linewidth': '(0.8)'}), "(0, color='grey', linewidth=0.8)\n", (2845, 2877), True, 'import matplotlib.pyplot as plt\n'), ((3116, 3170), 'matplotlib.pyplot.scatter', 'plt.scatter', (['[]', '[]'], {'color': '"""blue"""', 'label': '"""Independant"""'}), "([], [], color='blue', label='Independant')\n", (3127, 3170), True, 'import matplotlib.pyplot as plt\n'), ((3175, 3224), 'matplotlib.pyplot.scatter', 'plt.scatter', (['[]', '[]'], {'color': '"""red"""', 'label': '"""Calling"""'}), "([], [], color='red', label='Calling')\n", (3186, 3224), True, 'import matplotlib.pyplot as plt\n'), ((3229, 3285), 'matplotlib.pyplot.scatter', 'plt.scatter', (['[]', '[]'], {'color': '"""seagreen"""', 'label': '"""Listening"""'}), "([], [], color='seagreen', label='Listening')\n", (3240, 3285), True, 'import matplotlib.pyplot as plt\n'), ((3290, 3360), 'matplotlib.pyplot.legend', 'plt.legend', ([], {'loc': '"""center left"""', 'bbox_to_anchor': '(1, 0.5)', 'title': '"""Birds:"""'}), "(loc='center left', bbox_to_anchor=(1, 0.5), title='Birds:')\n", (3300, 3360), True, 'import matplotlib.pyplot as plt\n'), ((3365, 3388), 'matplotlib.pyplot.ylim', 'plt.ylim', (['(0)', 'FIELD_SIZE'], {}), '(0, FIELD_SIZE)\n', (3373, 3388), True, 'import matplotlib.pyplot as plt\n'), ((3393, 3416), 'matplotlib.pyplot.xlim', 'plt.xlim', (['(0)', 'FIELD_SIZE'], {}), '(0, FIELD_SIZE)\n', (3401, 3416), True, 'import matplotlib.pyplot as plt\n'), ((3421, 3437), 'matplotlib.pyplot.title', 'plt.title', (['title'], {}), '(title)\n', (3430, 3437), True, 'import matplotlib.pyplot as plt\n'), ((3442, 3460), 'matplotlib.pyplot.tight_layout', 'plt.tight_layout', ([], {}), '()\n', (3458, 3460), True, 'import matplotlib.pyplot as plt\n'), ((3465, 3475), 'matplotlib.pyplot.draw', 'plt.draw', ([], {}), '()\n', (3473, 3475), True, 'import matplotlib.pyplot as plt\n'), ((3480, 3496), 'matplotlib.pyplot.pause', 'plt.pause', (['(1e-06)'], {}), '(1e-06)\n', (3489, 3496), True, 'import matplotlib.pyplot as plt\n'), ((238, 252), 'matplotlib.pyplot.subplots', 'plt.subplots', ([], {}), '()\n', (250, 252), True, 'import matplotlib.pyplot as plt\n')] |
#!/usr/bin/python
"""
FAOSTAT:
-------
Reads FAOSTAT JSON and creates datasets.
"""
import logging
from datetime import datetime, timedelta
from os import remove, rename
from os.path import basename, exists, getctime, join
from urllib.parse import urlsplit
from zipfile import ZipFile
from hdx.data.dataset import Dataset
from hdx.data.hdxobject import HDXError
from hdx.data.showcase import Showcase
from hdx.location.country import Country
from hdx.utilities.dateparse import parse_date_range
from hdx.utilities.dictandlist import dict_of_lists_add
from slugify import slugify
logger = logging.getLogger(__name__)
description = "FAO statistics collates and disseminates food and agricultural statistics globally. The division develops methodologies and standards for data collection, and holds regular meetings and workshops to support member countries develop statistical systems. We produce publications, working papers and statistical yearbooks that cover food security, prices, production and trade and agri-environmental statistics."
hxltags = {
"Iso3": "#country+code",
"StartDate": "#date+start",
"EndDate": "#date+end",
"Year": "#date+year",
"Area": "#country+name",
"Item Code": "#indicator+code",
"Item": "#indicator+name",
"Unit": "#indicator+type",
"Value": "#indicator+value+num",
}
def download_indicatorsets(filelist_url, indicatorsetnames, downloader, folder):
indicatorsets = dict()
response = downloader.download(filelist_url)
jsonresponse = response.json()
def add_row(row, filepath, indicatorsetname):
row["path"] = filepath
quickcharts = indicatorsetname.get("quickcharts")
if quickcharts and row["DatasetCode"] == quickcharts["code"]:
row["quickcharts"] = quickcharts["indicators"]
else:
row["quickcharts"] = None
dict_of_lists_add(indicatorsets, indicatorsetname["category"], row)
for row in jsonresponse["Datasets"]["Dataset"]:
for indicatorsetname in indicatorsetnames:
category = indicatorsetname["category"]
datasetname = row["DatasetName"]
if f"{category}:" not in datasetname or "archive" in datasetname.lower():
continue
filelocation = row["FileLocation"]
urlpath = urlsplit(filelocation).path
filename = basename(urlpath).replace("zip", "csv")
if "Archive" in filename:
continue
indicatorsetcode = row["DatasetCode"]
filepath = join(folder, f"{indicatorsetcode}.csv")
statusfile = join(folder, f"{indicatorsetcode}.txt")
if exists(filepath):
if exists(statusfile):
filedate = datetime.fromtimestamp(getctime(statusfile))
if filedate > (datetime.now() - timedelta(days=1)):
with open(statusfile) as f:
status = f.read()
if status == "OK":
add_row(row, filepath, indicatorsetname)
continue
remove(statusfile)
remove(filepath)
path = filepath.replace(".csv", ".zip")
if exists(path):
remove(path)
path = downloader.download_file(filelocation, path=path)
with ZipFile(path, "r") as zip:
path = zip.extract(filename, path=folder)
rename(path, filepath)
with open(statusfile, "w") as f:
f.write("OK")
add_row(row, filepath, indicatorsetname)
return indicatorsets
def get_countries(countries_url, downloader):
countrymapping = dict()
_, iterator = downloader.get_tabular_rows(
countries_url, headers=1, dict_form=True, format="csv"
)
for row in iterator:
countryiso = row["ISO3 Code"].strip()
if not countryiso:
continue
try:
int(countryiso)
continue
except ValueError:
pass
countrymapping[row["Country Code"].strip()] = (
countryiso,
row["Country"].strip(),
)
countries = list()
for countryiso, countryname in sorted(countrymapping.values()):
newcountryname = Country.get_country_name_from_iso3(countryiso)
if newcountryname:
countries.append(
{
"iso3": countryiso,
"countryname": newcountryname,
"origname": countryname,
}
)
return countries, countrymapping
def generate_dataset_and_showcase(
indicatorsetname,
indicatorsets,
country,
countrymapping,
showcase_base_url,
filelist_url,
downloader,
folder,
):
countryiso = country["iso3"]
countryname = country["countryname"]
indicatorset = indicatorsets[indicatorsetname]
if indicatorsetname == "Prices":
indicatorsetdisplayname = indicatorsetname
else:
indicatorsetdisplayname = f"{indicatorsetname} Indicators"
title = f"{countryname} - {indicatorsetdisplayname}"
name = f"FAOSTAT {indicatorsetdisplayname} for {countryname}"
slugified_name = slugify(name).lower()
logger.info(f"Creating dataset: {title}")
dataset = Dataset({"name": slugified_name, "title": title})
dataset.set_maintainer("196196be-6037-4488-8b71-d786adf4c081")
dataset.set_organization("ed727a5b-3e6e-4cd6-b97e-4a71532085e6")
dataset.set_expected_update_frequency("Every year")
dataset.set_subnational(False)
try:
dataset.add_country_location(countryiso)
except HDXError as e:
logger.exception(f"{countryname} has a problem! {e}")
return None, None, None, None
tags = ["hxl", "indicators"]
tag = indicatorsetname.lower()
if " - " in tag:
tags.extend(tag.split(" - "))
else:
tags.append(tag)
dataset.add_tags(tags)
def process_date(row):
countrycode = row.get("Area Code")
if countrycode is None:
return None
result = countrymapping.get(countrycode)
if result is None:
return None
isolookup, _ = result
if isolookup != countryiso:
return None
row["Iso3"] = countryiso
year = row["Year"]
month = row.get("Months")
if month is not None and month != "Annual value":
startdate, enddate = parse_date_range(f"{month} {year}")
else:
if "-" in year:
yearrange = year.split("-")
startdate, _ = parse_date_range(yearrange[0])
_, enddate = parse_date_range(yearrange[1])
row["Year"] = yearrange[1]
else:
startdate, enddate = parse_date_range(year)
row["StartDate"] = startdate.strftime("%Y-%m-%d")
row["EndDate"] = enddate.strftime("%Y-%m-%d")
return {"startdate": startdate, "enddate": enddate}
bites_disabled = [True, True, True]
qc_indicators = None
categories = list()
for row in indicatorset:
longname = row["DatasetName"]
url = row["path"]
category = longname.split(": ")[1]
filename = f"{category}_{countryiso}.csv"
description = f"*{category}:*\n{row['DatasetDescription']}"
if category[-10:] == "Indicators":
name = category
else:
name = f"{category} data"
resourcedata = {"name": f"{name} for {countryname}", "description": description}
header_insertions = [(0, "EndDate"), (0, "StartDate"), (0, "Iso3")]
indicators_for_qc = row.get("quickcharts")
if indicators_for_qc:
quickcharts = {
"hashtag": "#indicator+code",
"values": [x["code"] for x in indicators_for_qc],
"numeric_hashtag": "#indicator+value+num",
"cutdown": 2,
"cutdownhashtags": ["#indicator+code", "#country+code", "#date+year"],
}
qc_indicators = indicators_for_qc
else:
quickcharts = None
success, results = dataset.download_and_generate_resource(
downloader,
url,
hxltags,
folder,
filename,
resourcedata,
header_insertions=header_insertions,
date_function=process_date,
quickcharts=quickcharts,
encoding="WINDOWS-1252",
)
if success is False:
logger.warning(f"{category} for {countryname} has no data!")
continue
disabled_bites = results.get("bites_disabled")
if disabled_bites:
bites_disabled = disabled_bites
categories.append(category)
if dataset.number_of_resources() == 0:
logger.warning(f"{countryname} has no data!")
return None, None, None, None
dataset.quickcharts_resource_last()
notes = [
f"{indicatorsetdisplayname} for {countryname}.\n\n",
f"Contains data from the FAOSTAT [bulk data service]({filelist_url})",
]
if len(categories) == 1:
notes.append(".")
else:
notes.append(f" covering the following categories: {', '.join(categories)}")
dataset["notes"] = "".join(notes)
showcase = Showcase(
{
"name": f"{slugified_name}-showcase",
"title": title,
"notes": f"{indicatorsetname} Data Dashboard for {countryname}",
"url": f"{showcase_base_url}{countryiso}",
"image_url": "https://pbs.twimg.com/profile_images/1375385494167691269/Bc49-Yx8_400x400.jpg",
}
)
showcase.add_tags(tags)
return dataset, showcase, bites_disabled, qc_indicators
| [
"logging.getLogger",
"os.path.exists",
"zipfile.ZipFile",
"hdx.data.showcase.Showcase",
"urllib.parse.urlsplit",
"os.rename",
"os.path.getctime",
"os.path.join",
"hdx.location.country.Country.get_country_name_from_iso3",
"hdx.data.dataset.Dataset",
"os.remove",
"datetime.datetime.now",
"date... | [((593, 620), 'logging.getLogger', 'logging.getLogger', (['__name__'], {}), '(__name__)\n', (610, 620), False, 'import logging\n'), ((5366, 5415), 'hdx.data.dataset.Dataset', 'Dataset', (["{'name': slugified_name, 'title': title}"], {}), "({'name': slugified_name, 'title': title})\n", (5373, 5415), False, 'from hdx.data.dataset import Dataset\n'), ((9384, 9667), 'hdx.data.showcase.Showcase', 'Showcase', (["{'name': f'{slugified_name}-showcase', 'title': title, 'notes':\n f'{indicatorsetname} Data Dashboard for {countryname}', 'url':\n f'{showcase_base_url}{countryiso}', 'image_url':\n 'https://pbs.twimg.com/profile_images/1375385494167691269/Bc49-Yx8_400x400.jpg'\n }"], {}), "({'name': f'{slugified_name}-showcase', 'title': title, 'notes':\n f'{indicatorsetname} Data Dashboard for {countryname}', 'url':\n f'{showcase_base_url}{countryiso}', 'image_url':\n 'https://pbs.twimg.com/profile_images/1375385494167691269/Bc49-Yx8_400x400.jpg'\n })\n", (9392, 9667), False, 'from hdx.data.showcase import Showcase\n'), ((1863, 1930), 'hdx.utilities.dictandlist.dict_of_lists_add', 'dict_of_lists_add', (['indicatorsets', "indicatorsetname['category']", 'row'], {}), "(indicatorsets, indicatorsetname['category'], row)\n", (1880, 1930), False, 'from hdx.utilities.dictandlist import dict_of_lists_add\n'), ((4340, 4386), 'hdx.location.country.Country.get_country_name_from_iso3', 'Country.get_country_name_from_iso3', (['countryiso'], {}), '(countryiso)\n', (4374, 4386), False, 'from hdx.location.country import Country\n'), ((2539, 2578), 'os.path.join', 'join', (['folder', 'f"""{indicatorsetcode}.csv"""'], {}), "(folder, f'{indicatorsetcode}.csv')\n", (2543, 2578), False, 'from os.path import basename, exists, getctime, join\n'), ((2604, 2643), 'os.path.join', 'join', (['folder', 'f"""{indicatorsetcode}.txt"""'], {}), "(folder, f'{indicatorsetcode}.txt')\n", (2608, 2643), False, 'from os.path import basename, exists, getctime, join\n'), ((2659, 2675), 'os.path.exists', 'exists', (['filepath'], {}), '(filepath)\n', (2665, 2675), False, 'from os.path import basename, exists, getctime, join\n'), ((3262, 3274), 'os.path.exists', 'exists', (['path'], {}), '(path)\n', (3268, 3274), False, 'from os.path import basename, exists, getctime, join\n'), ((5284, 5297), 'slugify.slugify', 'slugify', (['name'], {}), '(name)\n', (5291, 5297), False, 'from slugify import slugify\n'), ((6518, 6553), 'hdx.utilities.dateparse.parse_date_range', 'parse_date_range', (['f"""{month} {year}"""'], {}), "(f'{month} {year}')\n", (6534, 6553), False, 'from hdx.utilities.dateparse import parse_date_range\n'), ((2312, 2334), 'urllib.parse.urlsplit', 'urlsplit', (['filelocation'], {}), '(filelocation)\n', (2320, 2334), False, 'from urllib.parse import urlsplit\n'), ((2696, 2714), 'os.path.exists', 'exists', (['statusfile'], {}), '(statusfile)\n', (2702, 2714), False, 'from os.path import basename, exists, getctime, join\n'), ((3178, 3194), 'os.remove', 'remove', (['filepath'], {}), '(filepath)\n', (3184, 3194), False, 'from os import remove, rename\n'), ((3292, 3304), 'os.remove', 'remove', (['path'], {}), '(path)\n', (3298, 3304), False, 'from os import remove, rename\n'), ((3391, 3409), 'zipfile.ZipFile', 'ZipFile', (['path', '"""r"""'], {}), "(path, 'r')\n", (3398, 3409), False, 'from zipfile import ZipFile\n'), ((3492, 3514), 'os.rename', 'rename', (['path', 'filepath'], {}), '(path, filepath)\n', (3498, 3514), False, 'from os import remove, rename\n'), ((6671, 6701), 'hdx.utilities.dateparse.parse_date_range', 'parse_date_range', (['yearrange[0]'], {}), '(yearrange[0])\n', (6687, 6701), False, 'from hdx.utilities.dateparse import parse_date_range\n'), ((6731, 6761), 'hdx.utilities.dateparse.parse_date_range', 'parse_date_range', (['yearrange[1]'], {}), '(yearrange[1])\n', (6747, 6761), False, 'from hdx.utilities.dateparse import parse_date_range\n'), ((6860, 6882), 'hdx.utilities.dateparse.parse_date_range', 'parse_date_range', (['year'], {}), '(year)\n', (6876, 6882), False, 'from hdx.utilities.dateparse import parse_date_range\n'), ((2363, 2380), 'os.path.basename', 'basename', (['urlpath'], {}), '(urlpath)\n', (2371, 2380), False, 'from os.path import basename, exists, getctime, join\n'), ((3143, 3161), 'os.remove', 'remove', (['statusfile'], {}), '(statusfile)\n', (3149, 3161), False, 'from os import remove, rename\n'), ((2770, 2790), 'os.path.getctime', 'getctime', (['statusfile'], {}), '(statusfile)\n', (2778, 2790), False, 'from os.path import basename, exists, getctime, join\n'), ((2827, 2841), 'datetime.datetime.now', 'datetime.now', ([], {}), '()\n', (2839, 2841), False, 'from datetime import datetime, timedelta\n'), ((2844, 2861), 'datetime.timedelta', 'timedelta', ([], {'days': '(1)'}), '(days=1)\n', (2853, 2861), False, 'from datetime import datetime, timedelta\n')] |
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from time import timezone
from django.db import models
# Create your models here.
class UserInfo(models.Model):
user= models.CharField(max_length = 30)
pwd = models.CharField(max_length = 30)
# class Publisher(models.Model):
# name = models.CharField(max_length=30)
# address = models.CharField(max_length=50)
# website = models.URLField()
# class Author(models.Model):
# email = models.EmailField()
# first_name = models.CharField(max_length = 30)
# last_name = models.CharField(max_length = 30)
# class Book(models.Model):
# title = models.CharField(max_length = 150)
# authors = models.ManyToManyField(Author)
# publisher = models.ForeignKey(Publisher)
class DoubanMovie(models.Model):
name = models.CharField(max_length = 200)
info = models.CharField(max_length = 10000)
rating = models.CharField(max_length = 20)
num = models.CharField(max_length = 50)
quote = models.CharField(max_length = 150)
img_url = models.CharField(max_length = 300)
| [
"django.db.models.CharField"
] | [((191, 222), 'django.db.models.CharField', 'models.CharField', ([], {'max_length': '(30)'}), '(max_length=30)\n', (207, 222), False, 'from django.db import models\n'), ((233, 264), 'django.db.models.CharField', 'models.CharField', ([], {'max_length': '(30)'}), '(max_length=30)\n', (249, 264), False, 'from django.db import models\n'), ((803, 835), 'django.db.models.CharField', 'models.CharField', ([], {'max_length': '(200)'}), '(max_length=200)\n', (819, 835), False, 'from django.db import models\n'), ((847, 881), 'django.db.models.CharField', 'models.CharField', ([], {'max_length': '(10000)'}), '(max_length=10000)\n', (863, 881), False, 'from django.db import models\n'), ((895, 926), 'django.db.models.CharField', 'models.CharField', ([], {'max_length': '(20)'}), '(max_length=20)\n', (911, 926), False, 'from django.db import models\n'), ((937, 968), 'django.db.models.CharField', 'models.CharField', ([], {'max_length': '(50)'}), '(max_length=50)\n', (953, 968), False, 'from django.db import models\n'), ((981, 1013), 'django.db.models.CharField', 'models.CharField', ([], {'max_length': '(150)'}), '(max_length=150)\n', (997, 1013), False, 'from django.db import models\n'), ((1028, 1060), 'django.db.models.CharField', 'models.CharField', ([], {'max_length': '(300)'}), '(max_length=300)\n', (1044, 1060), False, 'from django.db import models\n')] |
from hwt.synthesizer.unit import Unit
from hwt.interfaces.std import VectSignal
from hwt.hdl.types.struct import HStruct
from hwt.interfaces.utils import addClkRstn
class PrivateSignalsOfStructType(Unit):
def _declr(self):
addClkRstn(self)
self.a = VectSignal(8)
self.b = VectSignal(8)._m()
self.c = VectSignal(8)
self.d = VectSignal(8)._m()
def _impl(self):
t = self.a._dtype
tmp_t = \
HStruct(
(t, "a0"),
(t, "a1"),
(t[2], "a2_3"),
(HStruct(
(t, "a4"),
(t[2], "a5_6"),
),
"a4_5_6"
),
)
tmp = self._sig("tmp", tmp_t)
self.connect_tmp_chain(tmp, self.a, self.b)
tmp_reg = self._reg("tmp_reg", tmp_t, def_val={
"a0": 0,
"a1": 1,
"a2_3": [2, 3],
"a4_5_6": {
"a4": 4,
"a5_6": [5, 6],
}
})
self.connect_tmp_chain(tmp_reg, self.c, self.d)
def connect_tmp_chain(self, tmp, a_in, a_out):
# a connected to b using chain of tmp signals from tmp sig
tmp.a0(a_in)
tmp.a1(tmp.a0)
tmp.a2_3[0](tmp.a1)
tmp.a2_3[1](tmp.a2_3[0])
tmp.a4_5_6.a4(tmp.a2_3[1])
tmp.a4_5_6.a5_6[0](tmp.a4_5_6.a4)
tmp.a4_5_6.a5_6[1](tmp.a4_5_6.a5_6[0])
a_out(tmp.a4_5_6.a5_6[1])
if __name__ == "__main__":
from hwt.synthesizer.utils import to_rtl_str
u = PrivateSignalsOfStructType()
print(to_rtl_str(u))
| [
"hwt.interfaces.std.VectSignal",
"hwt.synthesizer.utils.to_rtl_str",
"hwt.interfaces.utils.addClkRstn",
"hwt.hdl.types.struct.HStruct"
] | [((238, 254), 'hwt.interfaces.utils.addClkRstn', 'addClkRstn', (['self'], {}), '(self)\n', (248, 254), False, 'from hwt.interfaces.utils import addClkRstn\n'), ((272, 285), 'hwt.interfaces.std.VectSignal', 'VectSignal', (['(8)'], {}), '(8)\n', (282, 285), False, 'from hwt.interfaces.std import VectSignal\n'), ((340, 353), 'hwt.interfaces.std.VectSignal', 'VectSignal', (['(8)'], {}), '(8)\n', (350, 353), False, 'from hwt.interfaces.std import VectSignal\n'), ((1583, 1596), 'hwt.synthesizer.utils.to_rtl_str', 'to_rtl_str', (['u'], {}), '(u)\n', (1593, 1596), False, 'from hwt.synthesizer.utils import to_rtl_str\n'), ((303, 316), 'hwt.interfaces.std.VectSignal', 'VectSignal', (['(8)'], {}), '(8)\n', (313, 316), False, 'from hwt.interfaces.std import VectSignal\n'), ((371, 384), 'hwt.interfaces.std.VectSignal', 'VectSignal', (['(8)'], {}), '(8)\n', (381, 384), False, 'from hwt.interfaces.std import VectSignal\n'), ((560, 594), 'hwt.hdl.types.struct.HStruct', 'HStruct', (["(t, 'a4')", "(t[2], 'a5_6')"], {}), "((t, 'a4'), (t[2], 'a5_6'))\n", (567, 594), False, 'from hwt.hdl.types.struct import HStruct\n')] |
import json
import re
from urllib.parse import urljoin
import scrapy
from ponnobot.items import ProductItem
class DarazSpider(scrapy.Spider):
name = "daraz"
allowed_domains = ['daraz.com.bd']
BASE_URL = 'https://www.daraz.com.bd'
# HEADERS = {
# 'authority': 'my.daraz.com.bd',
# 'pragma': 'no-cache',
# 'cache-control': 'no-cache',
# 'dnt': '1',
# 'origin': 'https://www.daraz.com.bd',
# 'referer': 'https://www.daraz.com.bd/',
# 'upgrade-insecure-requests': '1',
# 'user-agent': 'Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/89.0.4389.90 Safari/537.36',
# 'accept': 'application/json, text/javascript',
# 'accept-encoding': 'gzip, deflate, br',
# 'content-type': 'application/x-www-form-urlencoded',
# 'sec-ch-ua': '"Google Chrome";v="89", "Chromium";v="89", ";Not A Brand";v="99"',
# 'sec-ch-ua-mobile': '?0',
# 'sec-fetch-site': 'same-site',
# 'sec-fetch-mode': 'cors',
# 'sec-fetch-dest': 'empty',
# 'accept-language': 'en-US,en;q=0.9,bn;q=0.8,hi;q=0.7',
# }
# HEADERS = {
# 'authority': 'my.daraz.com.bd',
# 'pragma': 'no-cache',
# 'cache-control': 'no-cache',
# 'dnt': '1',
# 'upgrade-insecure-requests': '1',
# 'user-agent': 'Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/89.0.4389.90 Safari/537.36',
# 'accept': 'text/html,application/xhtml+xml,application/xml;q=0.9,image/webp,image/apng,*/*;q=0.8,application/signed-exchange;v=b3;q=0.9',
# 'sec-fetch-site': 'none',
# 'sec-fetch-mode': 'navigate',
# 'sec-fetch-dest': 'document',
# 'accept-language': 'en-GB,en-US;q=0.9,en;q=0.8',
# }
def start_requests(self):
yield scrapy.Request(url=self.BASE_URL, callback=self.begin_parse)
def begin_parse(self, response):
urls = response.css('ul.lzd-site-menu-sub li.lzd-site-menu-sub-item > a::attr("href")').getall()
for url in urls:
url = "https:" + str(url)
yield scrapy.Request(url=url, callback=self.parse)
def parse(self, response, **kwargs):
"""
:param response:
:return: products and pagination callback
"""
""" parse products """
raw_product_list = re.compile(r'window.pageData=(.*)</script>').search(response.text)
product_list = json.loads(raw_product_list.group(1).strip())['mods']['listItems']
product_page_links = [urljoin(self.BASE_URL, product["thumbs"][0]['productUrl']) for product in product_list]
yield from response.follow_all(product_page_links, self.parse_product)
""" pagination """
try:
pagination_links = response.css('link[rel="next"] ::attr("href")').get()
yield response.follow(pagination_links, self.parse)
except IndexError as ie:
# logging.info(ie, logging.WARN)
print(ie)
except TypeError as te:
# logging.info(te, logging.WARN)
print(te)
except ValueError as ve:
print(ve)
def parse_product(self, response):
item = ProductItem()
raw_product_data = re.compile(r'app.run\((.*)\);').search(response.text)
product_json = json.loads(raw_product_data.group(1).strip())['data']['root']['fields']['skuInfos']['0']
# print(product_json,type(product_json))
# print(raw_product_data.group(1))
try:
item['vendor'] = self.name
item['product_url'] = response.url
item['name'] = product_json["dataLayer"]["pdt_name"]
item['image_url'] = product_json["image"]
item['price'] = int(float(product_json["price"]["salePrice"]["value"]))
item['in_stock'] = True if product_json["stock"] > 0 else False
except Exception as e:
print(e, response.url)
if item['name'] is not None:
item.save()
| [
"scrapy.Request",
"ponnobot.items.ProductItem",
"urllib.parse.urljoin",
"re.compile"
] | [((3268, 3281), 'ponnobot.items.ProductItem', 'ProductItem', ([], {}), '()\n', (3279, 3281), False, 'from ponnobot.items import ProductItem\n'), ((1886, 1946), 'scrapy.Request', 'scrapy.Request', ([], {'url': 'self.BASE_URL', 'callback': 'self.begin_parse'}), '(url=self.BASE_URL, callback=self.begin_parse)\n', (1900, 1946), False, 'import scrapy\n'), ((2603, 2661), 'urllib.parse.urljoin', 'urljoin', (['self.BASE_URL', "product['thumbs'][0]['productUrl']"], {}), "(self.BASE_URL, product['thumbs'][0]['productUrl'])\n", (2610, 2661), False, 'from urllib.parse import urljoin\n'), ((2171, 2215), 'scrapy.Request', 'scrapy.Request', ([], {'url': 'url', 'callback': 'self.parse'}), '(url=url, callback=self.parse)\n', (2185, 2215), False, 'import scrapy\n'), ((2416, 2459), 're.compile', 're.compile', (['"""window.pageData=(.*)</script>"""'], {}), "('window.pageData=(.*)</script>')\n", (2426, 2459), False, 'import re\n'), ((3309, 3341), 're.compile', 're.compile', (['"""app.run\\\\((.*)\\\\);"""'], {}), "('app.run\\\\((.*)\\\\);')\n", (3319, 3341), False, 'import re\n')] |
from gkdtex.wrap import parse
from gkdtex.interpreter import Interpreter, CBVFunction
from gkdtex.developer_utilities import *
import sys
src = r"""
\newcommand{\GKDCreateId}{\input{|"gkdmgr --op uuid --rt A"}}
\makeatletter
\newcommand*\GKDNewTemp[2]{
\@ifundefined{GKDTemp#1}{
\expandafter\newcommand\csname GKDTemp#1\endcsname{#2}
}{
\expandafter\renewcommand\csname GKDTemp#1\endcsname{#2}
}
}
\makeatother
\GKDNewTemp{ConstID}{\GKDCreateId}
\newcommand{\GKDSet}[2]{\input{|"gkdmgr --op set --rt \GKDTempConstID #1 #2"}}
\newcommand{\GKDGet}[1]{\input{|"gkdmgr --op get --rt \GKDTempConstID #1"}}
\newcommand{\GKDPush}[2]{\input{|"gkdmgr --op push --rt \GKDTempConstID #1 #2"}}
\newcommand{\GKDPop}[1]{\input{|"gkdmgr --op pop --rt \GKDTempConstID #1"}}
\newcommand{\GKDPyCall}[2]{\input{|"gkdmgr --op call --rt \GKDTempConstID #1 #2"}}
\makeatletter
\newenvironment{GKDBNF}[1]
{\VerbatimEnvironment
\GKDNewTemp{A}{#1}
\input{|"gkdmgr --op createDirFor --rt any ./gkdbnf/#1.bnf"}
\VerbatimOut{./gkdbnf/#1.bnf}
}%
{%
\endVerbatimOut%
\toks0{\immediate\write18}%
\begin{bnf*}
\input{|"gkdmgr --op bnf --rt any ./gkdbnf/\GKDTempA.bnf"}%
\end{bnf*}
}
\verb{a}
\makeatother
"""
body = parse(r"""$ #\1^{ #\1#1 } $""")
interpreter = Interpreter()
interpreter.filename = "a.tex"
interpreter.src = src
interpreter.globals['mk'] = CBVFunction([""], [None], dict(d=0), body)
def verb(a: Group, *, self: Interpreter, tex_print):
tex_print('<<')
tex_print(get_raw_from_span_params(self.src, a.offs))
tex_print('>>')
interpreter.globals['verb'] = verb
interpreter.interp(sys.stdout.write, parse(src, "a.tex"))
| [
"gkdtex.interpreter.Interpreter",
"gkdtex.wrap.parse"
] | [((1260, 1288), 'gkdtex.wrap.parse', 'parse', (['"""$ #\\\\1^{ #\\\\1#1 } $"""'], {}), "('$ #\\\\1^{ #\\\\1#1 } $')\n", (1265, 1288), False, 'from gkdtex.wrap import parse\n'), ((1307, 1320), 'gkdtex.interpreter.Interpreter', 'Interpreter', ([], {}), '()\n', (1318, 1320), False, 'from gkdtex.interpreter import Interpreter, CBVFunction\n'), ((1671, 1690), 'gkdtex.wrap.parse', 'parse', (['src', '"""a.tex"""'], {}), "(src, 'a.tex')\n", (1676, 1690), False, 'from gkdtex.wrap import parse\n')] |
"""
Start local development server
"""
import argparse
import logging
import shlex
import subprocess
import webbrowser
from contextlib import suppress
from http.server import HTTPServer, SimpleHTTPRequestHandler
from pathlib import Path
from ssl import wrap_socket
from tempfile import NamedTemporaryFile
from threading import Thread
from livereload.server import LogFormatter, Server
from watchdog.observers import Observer
from watchdog.tricks import ShellCommandTrick
import build
PARCEL_CLI = "./node_modules/.bin/parcel"
BUNDLER_COMMAND = f"{PARCEL_CLI} watch --no-hmr src/*.html"
LIVERELOAD_DELAY = 0.1
ROOT_DIR = "dist/"
PATHS_TO_WATCH_FOR_THEMATIQUES = (
"build.py",
"mistune_toc.py",
"contenus/meta/*.md",
"contenus/thematiques/*.md",
"templates/thematique.html",
)
PATHS_TO_WATCH_FOR_INDEX = (
"build.py",
"contenus/conseils/*.md",
"contenus/meta/*.md",
"contenus/questions/*.md",
"contenus/réponses/*.md",
"contenus/statuts/*.md",
"contenus/suivi/*.md",
"templates/index.html",
)
def parse_args():
parser = argparse.ArgumentParser()
parser.add_argument("--address", default="0.0.0.0")
parser.add_argument("--port", type=int, default=None)
parser.add_argument("--ssl", action="store_true")
parser.add_argument("--ssl-cert", default="cert.pem")
parser.add_argument("--ssl-key", default="key.pem")
parser.add_argument("--open", action="store_true")
parser.add_argument("--watch", action="store_true")
return parser.parse_args()
def serve(address, port, open_, watch, ssl, ssl_cert, ssl_key, bundler_watch_filename):
if ssl:
return serve_https(
address=args.address,
port=args.port or 8443,
open_=args.open,
watch=args.watch,
ssl_cert=args.ssl_cert,
ssl_key=args.ssl_key,
)
else:
return serve_http(
address=args.address,
port=args.port or 5500,
open_=args.open,
watch=args.watch,
bundler_watch_filename=bundler_watch_filename,
)
class CustomServer(Server):
"""
Custom server with logger that decodes bytes in logs
"""
def _setup_logging(self):
super()._setup_logging()
logger = logging.getLogger("livereload")
formatter = self.BytesFormatter()
for handler in logger.handlers:
handler.setFormatter(formatter)
class BytesFormatter(LogFormatter):
def format(self, record):
if isinstance(record.msg, bytes):
with suppress(UnicodeDecodeError):
record.msg = record.msg.decode("utf-8")
return super().format(record)
def serve_http(address, port, open_, watch, bundler_watch_filename):
server = CustomServer()
if watch:
for path in PATHS_TO_WATCH_FOR_THEMATIQUES:
server.watch(path, build.thematiques, delay="forever")
for path in PATHS_TO_WATCH_FOR_INDEX:
server.watch(path, build.index, delay="forever")
server.watch(bundler_watch_filename, delay=LIVERELOAD_DELAY)
server.serve(
host=address,
port=port,
root=ROOT_DIR,
open_url_delay=0.1 if open_ else None,
)
def serve_https(address, port, open_, watch, ssl_cert, ssl_key):
class MyHTTPRequestHandler(SimpleHTTPRequestHandler):
def __init__(self, *args, **kwargs):
super().__init__(*args, directory=ROOT_DIR, **kwargs)
def log_request(self, *args, **kwargs):
pass
class BuildThematiquesEventHandler(ShellCommandTrick):
def __init__(self):
super().__init__(
shell_command="python3 build.py thematiques",
wait_for_process=True,
drop_during_process=True,
)
def on_any_event(self, event):
if event.event_type == "modified" and not event.is_directory:
super().on_any_event(event)
class BuildIndexEventHandler(ShellCommandTrick):
def __init__(self):
super().__init__(
shell_command="python3 build.py index",
wait_for_process=True,
drop_during_process=True,
)
def on_any_event(self, event):
if event.event_type == "modified" and not event.is_directory:
super().on_any_event(event)
if watch:
observer = Observer()
thematiques_handler = BuildThematiquesEventHandler()
for pattern in PATHS_TO_WATCH_FOR_THEMATIQUES:
directory = Path(pattern).parts[0]
observer.schedule(thematiques_handler, directory, recursive=True)
index_handler = BuildIndexEventHandler()
for pattern in PATHS_TO_WATCH_FOR_THEMATIQUES:
directory = Path(pattern).parts[0]
observer.schedule(index_handler, directory, recursive=True)
observer.start()
url = f"https://{address}:{port}/"
print(f"Listening on {url}")
if open_:
webbrowser.open(url)
logging.getLogger()
httpd = HTTPServer((address, port), MyHTTPRequestHandler)
httpd.socket = wrap_socket(
httpd.socket, certfile=ssl_cert, keyfile=ssl_key, server_side=True
)
httpd.serve_forever()
class BundlerThread(Thread):
def __init__(self, watch_file):
super().__init__()
self.watch_file = watch_file
self.daemon = True
def run(self):
proc = subprocess.Popen(shlex.split(BUNDLER_COMMAND), stdout=subprocess.PIPE)
while True:
for line_bytes in proc.stdout:
line = line_bytes.decode("utf-8")
print(line)
if line.startswith("✨ Built in"):
self.trigger_livereload()
def trigger_livereload(self):
self.watch_file.truncate(0)
if __name__ == "__main__":
args = parse_args()
with NamedTemporaryFile(delete=True) as bundler_watch_file:
bundler_thread = BundlerThread(watch_file=bundler_watch_file)
bundler_thread.start()
serve(
address=args.address,
port=args.port,
open_=args.open,
watch=args.watch,
ssl=args.ssl,
ssl_cert=args.ssl_cert,
ssl_key=args.ssl_key,
bundler_watch_filename=bundler_watch_file.name,
)
| [
"logging.getLogger",
"argparse.ArgumentParser",
"pathlib.Path",
"shlex.split",
"ssl.wrap_socket",
"webbrowser.open",
"http.server.HTTPServer",
"contextlib.suppress",
"tempfile.NamedTemporaryFile",
"watchdog.observers.Observer"
] | [((1082, 1107), 'argparse.ArgumentParser', 'argparse.ArgumentParser', ([], {}), '()\n', (1105, 1107), False, 'import argparse\n'), ((5076, 5095), 'logging.getLogger', 'logging.getLogger', ([], {}), '()\n', (5093, 5095), False, 'import logging\n'), ((5108, 5157), 'http.server.HTTPServer', 'HTTPServer', (['(address, port)', 'MyHTTPRequestHandler'], {}), '((address, port), MyHTTPRequestHandler)\n', (5118, 5157), False, 'from http.server import HTTPServer, SimpleHTTPRequestHandler\n'), ((5177, 5256), 'ssl.wrap_socket', 'wrap_socket', (['httpd.socket'], {'certfile': 'ssl_cert', 'keyfile': 'ssl_key', 'server_side': '(True)'}), '(httpd.socket, certfile=ssl_cert, keyfile=ssl_key, server_side=True)\n', (5188, 5256), False, 'from ssl import wrap_socket\n'), ((2290, 2321), 'logging.getLogger', 'logging.getLogger', (['"""livereload"""'], {}), "('livereload')\n", (2307, 2321), False, 'import logging\n'), ((4451, 4461), 'watchdog.observers.Observer', 'Observer', ([], {}), '()\n', (4459, 4461), False, 'from watchdog.observers import Observer\n'), ((5050, 5070), 'webbrowser.open', 'webbrowser.open', (['url'], {}), '(url)\n', (5065, 5070), False, 'import webbrowser\n'), ((5933, 5964), 'tempfile.NamedTemporaryFile', 'NamedTemporaryFile', ([], {'delete': '(True)'}), '(delete=True)\n', (5951, 5964), False, 'from tempfile import NamedTemporaryFile\n'), ((5507, 5535), 'shlex.split', 'shlex.split', (['BUNDLER_COMMAND'], {}), '(BUNDLER_COMMAND)\n', (5518, 5535), False, 'import shlex\n'), ((2590, 2618), 'contextlib.suppress', 'suppress', (['UnicodeDecodeError'], {}), '(UnicodeDecodeError)\n', (2598, 2618), False, 'from contextlib import suppress\n'), ((4603, 4616), 'pathlib.Path', 'Path', (['pattern'], {}), '(pattern)\n', (4607, 4616), False, 'from pathlib import Path\n'), ((4833, 4846), 'pathlib.Path', 'Path', (['pattern'], {}), '(pattern)\n', (4837, 4846), False, 'from pathlib import Path\n')] |
from django.conf.urls import url
from .views import tracon2022_afterparty_participants_view, tracon2022_afterparty_summary_view
urlpatterns = [
url(
r'^events/(?P<event_slug>tracon2022)/labour/surveys/kaatoilmo/results.xlsx$',
tracon2022_afterparty_participants_view,
name='tracon2022_afterparty_participants_view',
),
url(
r'^events/(?P<event_slug>tracon2022)/labour/surveys/kaatoilmo/summary/?$',
tracon2022_afterparty_summary_view,
name='tracon2022_afterparty_summary_view',
),
]
| [
"django.conf.urls.url"
] | [((151, 330), 'django.conf.urls.url', 'url', (['"""^events/(?P<event_slug>tracon2022)/labour/surveys/kaatoilmo/results.xlsx$"""', 'tracon2022_afterparty_participants_view'], {'name': '"""tracon2022_afterparty_participants_view"""'}), "('^events/(?P<event_slug>tracon2022)/labour/surveys/kaatoilmo/results.xlsx$'\n , tracon2022_afterparty_participants_view, name=\n 'tracon2022_afterparty_participants_view')\n", (154, 330), False, 'from django.conf.urls import url\n'), ((359, 524), 'django.conf.urls.url', 'url', (['"""^events/(?P<event_slug>tracon2022)/labour/surveys/kaatoilmo/summary/?$"""', 'tracon2022_afterparty_summary_view'], {'name': '"""tracon2022_afterparty_summary_view"""'}), "('^events/(?P<event_slug>tracon2022)/labour/surveys/kaatoilmo/summary/?$',\n tracon2022_afterparty_summary_view, name=\n 'tracon2022_afterparty_summary_view')\n", (362, 524), False, 'from django.conf.urls import url\n')] |
import re
import unittest
from faker import Faker
from faker.providers.bank.ru_RU import Provider as RuBank
class TestCreditCardProvider(unittest.TestCase):
def setUp(self):
self.fake = Faker(locale='en_US')
Faker.seed(0)
self.provider = self.fake.provider('faker.providers.credit_card')
self.mastercard_pattern = r'^(?:5[1-5][0-9]{2}|222[1-9]|22[3-9][0-9]|2[3-6][0-9]{2}|27[01][0-9]|2720)[0-9]{12}$'
self.visa_pattern = r'^4[0-9]{12}([0-9]{3}){0,2}$'
self.discover_pattern = r'^6(?:011|5[0-9]{2})[0-9]{12}$'
self.diners_club_pattern = r'^3(?:0[0-5]|[68][0-9])[0-9]{11}$'
self.jcb_pattern = r'^(?:2131|1800|35\d{3})\d{11}$'
def test_mastercard(self):
for prefix in self.provider.prefix_mastercard:
number = self.provider._generate_number(prefix, 16)
assert re.match(self.mastercard_pattern, number)
def test_visa13(self):
for prefix in self.provider.prefix_visa:
number = self.provider._generate_number(prefix, 13)
assert re.match(self.visa_pattern, number)
def test_visa16(self):
for prefix in self.provider.prefix_visa:
number = self.provider._generate_number(prefix, 16)
assert re.match(self.visa_pattern, number)
def test_visa19(self):
for prefix in self.provider.prefix_visa:
number = self.provider._generate_number(prefix, 19)
assert re.match(self.visa_pattern, number)
def test_discover(self):
for prefix in self.provider.prefix_discover:
number = self.provider._generate_number(prefix, 16)
assert re.match(self.discover_pattern, number)
def test_diners_club(self):
for prefix in self.provider.prefix_diners:
number = self.provider._generate_number(prefix, 14)
assert re.match(self.diners_club_pattern, number)
def test_jcb16(self):
for prefix in self.provider.prefix_jcb16:
number = self.provider._generate_number(prefix, 16)
assert re.match(self.jcb_pattern, number)
def test_jcb15(self):
for prefix in self.provider.prefix_jcb15:
number = self.provider._generate_number(prefix, 15)
assert re.match(self.jcb_pattern, number)
class TestRuRu(unittest.TestCase):
""" Tests credit card in the ru_RU locale """
def setUp(self):
self.fake = Faker('ru_RU')
Faker.seed(0)
self.visa_pattern = r'^4[0-9]{15}$'
self.mastercard_pattern = r'^(?:5[1-5][0-9]{2}|222[1-9]|22[3-9][0-9]|2[3-6][0-9]{2}|27[01][0-9]|2720)[0-9]{12}$'
self.mir_pattern = r'^220[0-4][0-9]{12}$'
self.maestro_pattern = r'^50|5[6-9]|6[0-9][0-9]{14}$'
self.amex_pattern = r'^3[4|7][0-9]{13}$'
self.unionpay_pattern = r'^62|81[0-9]{14}$'
def test_visa(self):
number = self.fake.credit_card_number('visa')
assert re.match(self.visa_pattern, number)
def test_mastercard(self):
number = self.fake.credit_card_number('mastercard')
assert re.match(self.mastercard_pattern, number)
def test_mir(self):
number = self.fake.credit_card_number('mir')
assert re.match(self.mir_pattern, number)
def test_maestro(self):
number = self.fake.credit_card_number('maestro')
assert re.match(self.maestro_pattern, number)
def test_amex(self):
number = self.fake.credit_card_number('amex')
assert re.match(self.amex_pattern, number)
def test_unionpay(self):
number = self.fake.credit_card_number('unionpay')
assert re.match(self.unionpay_pattern, number)
def test_owner(self):
card_data = self.fake.credit_card_full().split('\n')
assert re.match('[A-Za-z]+', card_data[1])
def test_issuer(self):
card_data = self.fake.credit_card_full().split('\n')
assert card_data[4] in RuBank.banks
| [
"faker.Faker",
"faker.Faker.seed",
"re.match"
] | [((202, 223), 'faker.Faker', 'Faker', ([], {'locale': '"""en_US"""'}), "(locale='en_US')\n", (207, 223), False, 'from faker import Faker\n'), ((232, 245), 'faker.Faker.seed', 'Faker.seed', (['(0)'], {}), '(0)\n', (242, 245), False, 'from faker import Faker\n'), ((2431, 2445), 'faker.Faker', 'Faker', (['"""ru_RU"""'], {}), "('ru_RU')\n", (2436, 2445), False, 'from faker import Faker\n'), ((2454, 2467), 'faker.Faker.seed', 'Faker.seed', (['(0)'], {}), '(0)\n', (2464, 2467), False, 'from faker import Faker\n'), ((2941, 2976), 're.match', 're.match', (['self.visa_pattern', 'number'], {}), '(self.visa_pattern, number)\n', (2949, 2976), False, 'import re\n'), ((3084, 3125), 're.match', 're.match', (['self.mastercard_pattern', 'number'], {}), '(self.mastercard_pattern, number)\n', (3092, 3125), False, 'import re\n'), ((3219, 3253), 're.match', 're.match', (['self.mir_pattern', 'number'], {}), '(self.mir_pattern, number)\n', (3227, 3253), False, 'import re\n'), ((3355, 3393), 're.match', 're.match', (['self.maestro_pattern', 'number'], {}), '(self.maestro_pattern, number)\n', (3363, 3393), False, 'import re\n'), ((3489, 3524), 're.match', 're.match', (['self.amex_pattern', 'number'], {}), '(self.amex_pattern, number)\n', (3497, 3524), False, 'import re\n'), ((3628, 3667), 're.match', 're.match', (['self.unionpay_pattern', 'number'], {}), '(self.unionpay_pattern, number)\n', (3636, 3667), False, 'import re\n'), ((3771, 3806), 're.match', 're.match', (['"""[A-Za-z]+"""', 'card_data[1]'], {}), "('[A-Za-z]+', card_data[1])\n", (3779, 3806), False, 'import re\n'), ((866, 907), 're.match', 're.match', (['self.mastercard_pattern', 'number'], {}), '(self.mastercard_pattern, number)\n', (874, 907), False, 'import re\n'), ((1068, 1103), 're.match', 're.match', (['self.visa_pattern', 'number'], {}), '(self.visa_pattern, number)\n', (1076, 1103), False, 'import re\n'), ((1264, 1299), 're.match', 're.match', (['self.visa_pattern', 'number'], {}), '(self.visa_pattern, number)\n', (1272, 1299), False, 'import re\n'), ((1460, 1495), 're.match', 're.match', (['self.visa_pattern', 'number'], {}), '(self.visa_pattern, number)\n', (1468, 1495), False, 'import re\n'), ((1662, 1701), 're.match', 're.match', (['self.discover_pattern', 'number'], {}), '(self.discover_pattern, number)\n', (1670, 1701), False, 'import re\n'), ((1869, 1911), 're.match', 're.match', (['self.diners_club_pattern', 'number'], {}), '(self.diners_club_pattern, number)\n', (1877, 1911), False, 'import re\n'), ((2072, 2106), 're.match', 're.match', (['self.jcb_pattern', 'number'], {}), '(self.jcb_pattern, number)\n', (2080, 2106), False, 'import re\n'), ((2267, 2301), 're.match', 're.match', (['self.jcb_pattern', 'number'], {}), '(self.jcb_pattern, number)\n', (2275, 2301), False, 'import re\n')] |
#---- Python VM startup for LISTENERLISTENER_3_from_1 ---
import SSL_listener
incomingIP="localhost"
incomingPort=10031
incomingPrivateKeyFile="server.key"
incomingPublicKeyFile="server.crt"
outgoingIP="localhost"
outgoingPort=00000
outgoingPublicKeyFile="server.crt"
def startLISTENER_3_from_1():
incoming_ssl_EncryptionVNF= SSL_listener.SSL_listener(incomingIP, incomingPort, incomingPrivateKeyFile, incomingPublicKeyFile,"" )
#-------
| [
"SSL_listener.SSL_listener"
] | [((329, 435), 'SSL_listener.SSL_listener', 'SSL_listener.SSL_listener', (['incomingIP', 'incomingPort', 'incomingPrivateKeyFile', 'incomingPublicKeyFile', '""""""'], {}), "(incomingIP, incomingPort, incomingPrivateKeyFile,\n incomingPublicKeyFile, '')\n", (354, 435), False, 'import SSL_listener\n')] |
"""
WSGI config for portfolify project.
It exposes the WSGI callable as a module-level variable named ``application``.
For more information on this file, see
https://docs.djangoproject.com/en/3.1/howto/deployment/wsgi/
"""
import os
import sys
from django.core.wsgi import get_wsgi_application
# only for dev/test
from dotenv import load_dotenv
TESTING = "test" in sys.argv
load_dotenv(".env.backend.test" if TESTING else ".env.backend.local")
os.environ.setdefault('DJANGO_SETTINGS_MODULE', 'portfolify.settings')
application = get_wsgi_application()
| [
"os.environ.setdefault",
"django.core.wsgi.get_wsgi_application",
"dotenv.load_dotenv"
] | [((379, 448), 'dotenv.load_dotenv', 'load_dotenv', (["('.env.backend.test' if TESTING else '.env.backend.local')"], {}), "('.env.backend.test' if TESTING else '.env.backend.local')\n", (390, 448), False, 'from dotenv import load_dotenv\n'), ((450, 520), 'os.environ.setdefault', 'os.environ.setdefault', (['"""DJANGO_SETTINGS_MODULE"""', '"""portfolify.settings"""'], {}), "('DJANGO_SETTINGS_MODULE', 'portfolify.settings')\n", (471, 520), False, 'import os\n'), ((536, 558), 'django.core.wsgi.get_wsgi_application', 'get_wsgi_application', ([], {}), '()\n', (556, 558), False, 'from django.core.wsgi import get_wsgi_application\n')] |
import tm1637
import machine
import utime
disp = tm1637.TM1637(clk=machine.Pin(3), dio=machine.Pin(2))
adc = machine.ADC(4)
def display_mv(timer):
global adc, disp
mv = 0
N = 50
for k in range(N):
mv += 3300*adc.read_u16()/65535/N
disp.number(int(mv))
machine.Timer(freq=2, mode=machine.Timer.PERIODIC, callback=display_mv)
| [
"machine.Timer",
"machine.Pin",
"machine.ADC"
] | [((111, 125), 'machine.ADC', 'machine.ADC', (['(4)'], {}), '(4)\n', (122, 125), False, 'import machine\n'), ((284, 355), 'machine.Timer', 'machine.Timer', ([], {'freq': '(2)', 'mode': 'machine.Timer.PERIODIC', 'callback': 'display_mv'}), '(freq=2, mode=machine.Timer.PERIODIC, callback=display_mv)\n', (297, 355), False, 'import machine\n'), ((69, 83), 'machine.Pin', 'machine.Pin', (['(3)'], {}), '(3)\n', (80, 83), False, 'import machine\n'), ((89, 103), 'machine.Pin', 'machine.Pin', (['(2)'], {}), '(2)\n', (100, 103), False, 'import machine\n')] |
#
# journal/listener.py
#
# futaba - A Discord Mod bot for the Programming server
# Copyright (c) 2017-2020 <NAME>, <NAME>, jackylam5
#
# futaba is available free of charge under the terms of the MIT
# License. You are free to redistribute and/or modify it under those
# terms. It is distributed in the hopes that it will be useful, but
# WITHOUT ANY WARRANTY. See the LICENSE file for more details.
#
import logging
from abc import abstractmethod
from pathlib import PurePath
logger = logging.getLogger(__name__)
__all__ = ["Listener"]
class Listener:
def __init__(self, router, path, recursive=True):
self.router = router
self.path = PurePath(path)
self.recursive = recursive
def check(self, path, guild, content, attributes):
if not self.filter(path, guild, content, attributes):
logger.debug("Filter rejected journal entry")
return False
if not self.recursive:
if self.path != path:
logger.debug("Ignoring non-recursive listener")
return False
return True
# This method is meant to provide a default implementation that can be overriden.
# pylint: disable=no-self-use
def filter(self, path, guild, content, attributes):
"""
Overridable method for further filtering listener events that are passed through.
"""
return True
@abstractmethod
async def handle(self, path, guild, content, attributes):
"""
Abstract method for handling the event, in whatever way
the implementation decides.
"""
# - pass -
| [
"logging.getLogger",
"pathlib.PurePath"
] | [((488, 515), 'logging.getLogger', 'logging.getLogger', (['__name__'], {}), '(__name__)\n', (505, 515), False, 'import logging\n'), ((661, 675), 'pathlib.PurePath', 'PurePath', (['path'], {}), '(path)\n', (669, 675), False, 'from pathlib import PurePath\n')] |
from django.http import Http404
from django.core import exceptions
from rest_framework import status
from rest_framework.generics import RetrieveUpdateAPIView
from rest_framework.permissions import (
IsAuthenticatedOrReadOnly, IsAuthenticated
)
from rest_framework.response import Response
from rest_framework.views import APIView
from ..models import Article
from ..renderers import ArticleJSONRenderer
from ..serializers import (
ArticleSerializer
)
from ..exceptions import ArticleDoesNotExist
from django.db.models import Avg
from django.shortcuts import render, get_object_or_404
class ArticleFavouriteAPIView(APIView):
permission_classes = (IsAuthenticated,)
renderer_classes = (ArticleJSONRenderer,)
serializer_class = ArticleSerializer
def post(self, request, slug):
profile = request.user.profile
serializer_context = {'request': request}
article = self.get_article(slug)
profile.favourite(article)
serializer = self.serializer_class(article, context=serializer_context)
return Response(serializer.data, status=status.HTTP_201_CREATED)
def delete(self, request, slug):
profile = request.user.profile
serializer_context = {'request': request}
article = self.get_article(slug)
profile.unfavourite(article)
serializer = self.serializer_class(article, context=serializer_context)
return Response(serializer.data, status=status.HTTP_200_OK)
def get_article(self, slug):
try:
return Article.objects.get(slug=slug)
except Article.DoesNotExist:
raise Http404
| [
"rest_framework.response.Response"
] | [((1073, 1130), 'rest_framework.response.Response', 'Response', (['serializer.data'], {'status': 'status.HTTP_201_CREATED'}), '(serializer.data, status=status.HTTP_201_CREATED)\n', (1081, 1130), False, 'from rest_framework.response import Response\n'), ((1434, 1486), 'rest_framework.response.Response', 'Response', (['serializer.data'], {'status': 'status.HTTP_200_OK'}), '(serializer.data, status=status.HTTP_200_OK)\n', (1442, 1486), False, 'from rest_framework.response import Response\n')] |
try:
import bpy
from bpy.types import WindowManager, AddonPreferences
from bpy.props import StringProperty, EnumProperty
in_blender = True
except ImportError as e:
in_blender = False
if in_blender:
from batchd import client
batchd_client = None
batchd_queues = []
batchd_types = []
def get_preferences():
return bpy.context.user_preferences.addons.get("batchd").preferences
def get_batchd_client(context):
global batchd_client
if batchd_client is not None:
return batchd_client
addon = get_preferences()
batchd_client = client.Client(addon.manager_url, addon.username, addon.password)
return batchd_client
def queues_from_batchd(self, context):
global batchd_queues
if len(batchd_queues) > 0 or context is None:
return batchd_queues
c = get_batchd_client(context)
for queue in c.get_queues():
name = queue.get('name', None)
title = queue.get('title', name)
batchd_queues.append((name, title, title))
return batchd_queues
def types_from_batchd(self, context):
global batchd_types
if len(batchd_types) > 0 or context is None:
print("types: {}, context: {}".format(batchd_types, context))
return batchd_types
c = get_batchd_client(context)
for type in c.get_job_types():
name = type.get('name')
title = type.get('title', name)
if not title:
title = name
batchd_types.append((name, title, title))
print(batchd_types)
return batchd_types
class SettingsPanel(bpy.types.AddonPreferences):
bl_label = "Batchd settings"
bl_idname = __package__
manager_url = StringProperty(
name = "batchd manager URL",
default = "http://localhost:9681")
batchd_queue = EnumProperty(name="Queue", items = queues_from_batchd)
job_type_name = EnumProperty(name="batchd job type", items = types_from_batchd)
username = StringProperty(name="batchd user name")
password = StringProperty(name="<PASSWORD>", subtype="PASSWORD")
def draw(self, context):
layout = self.layout
layout.prop(self, "manager_url")
layout.prop(self, "username")
layout.prop(self, "password")
layout.prop(self, "batchd_queue")
layout.prop(self, "job_type_name")
class EnqueuePanel(bpy.types.Panel):
bl_label = "Submit to batchd"
bl_idname = "batchd.enqueue.panel"
bl_space_type = "PROPERTIES"
bl_context = "render"
bl_region_type = "WINDOW"
def draw(self, context):
layout = self.layout
wm = context.window_manager
layout.operator("batchd.enqueue")
class EnqueueOperator(bpy.types.Operator):
bl_label = "Submit to batchd"
bl_idname = "batchd.enqueue"
def execute(self, context):
wm = context.window_manager
bpy.ops.file.pack_all()
current_file = bpy.data.filepath
target_file = bpy.path.abspath(bpy.context.scene.render.filepath)
job_type_name = get_preferences().job_type_name
queue_name = get_preferences().batchd_queue
c = get_batchd_client(context)
params = dict(input=current_file, output=target_file, frame="1")
c.do_enqueue(queue_name, job_type_name, params)
return {'FINISHED'}
def register():
bpy.utils.register_class(SettingsPanel)
bpy.utils.register_class(EnqueueOperator)
bpy.utils.register_class(EnqueuePanel)
def unregister():
bpy.utils.unregister_class(EnqueuePanel)
bpy.utils.unregister_class(EnqueueOperator)
bpy.utils.unregister_class(SettingsPanel)
if __name__ == "__main__":
register()
| [
"bpy.utils.unregister_class",
"bpy.props.StringProperty",
"bpy.path.abspath",
"batchd.client.Client",
"bpy.props.EnumProperty",
"bpy.ops.file.pack_all",
"bpy.utils.register_class",
"bpy.context.user_preferences.addons.get"
] | [((622, 686), 'batchd.client.Client', 'client.Client', (['addon.manager_url', 'addon.username', 'addon.password'], {}), '(addon.manager_url, addon.username, addon.password)\n', (635, 686), False, 'from batchd import client\n'), ((1829, 1903), 'bpy.props.StringProperty', 'StringProperty', ([], {'name': '"""batchd manager URL"""', 'default': '"""http://localhost:9681"""'}), "(name='batchd manager URL', default='http://localhost:9681')\n", (1843, 1903), False, 'from bpy.props import StringProperty, EnumProperty\n'), ((1965, 2017), 'bpy.props.EnumProperty', 'EnumProperty', ([], {'name': '"""Queue"""', 'items': 'queues_from_batchd'}), "(name='Queue', items=queues_from_batchd)\n", (1977, 2017), False, 'from bpy.props import StringProperty, EnumProperty\n'), ((2044, 2105), 'bpy.props.EnumProperty', 'EnumProperty', ([], {'name': '"""batchd job type"""', 'items': 'types_from_batchd'}), "(name='batchd job type', items=types_from_batchd)\n", (2056, 2105), False, 'from bpy.props import StringProperty, EnumProperty\n'), ((2127, 2166), 'bpy.props.StringProperty', 'StringProperty', ([], {'name': '"""batchd user name"""'}), "(name='batchd user name')\n", (2141, 2166), False, 'from bpy.props import StringProperty, EnumProperty\n'), ((2186, 2239), 'bpy.props.StringProperty', 'StringProperty', ([], {'name': '"""<PASSWORD>"""', 'subtype': '"""PASSWORD"""'}), "(name='<PASSWORD>', subtype='PASSWORD')\n", (2200, 2239), False, 'from bpy.props import StringProperty, EnumProperty\n'), ((3628, 3667), 'bpy.utils.register_class', 'bpy.utils.register_class', (['SettingsPanel'], {}), '(SettingsPanel)\n', (3652, 3667), False, 'import bpy\n'), ((3676, 3717), 'bpy.utils.register_class', 'bpy.utils.register_class', (['EnqueueOperator'], {}), '(EnqueueOperator)\n', (3700, 3717), False, 'import bpy\n'), ((3726, 3764), 'bpy.utils.register_class', 'bpy.utils.register_class', (['EnqueuePanel'], {}), '(EnqueuePanel)\n', (3750, 3764), False, 'import bpy\n'), ((3796, 3836), 'bpy.utils.unregister_class', 'bpy.utils.unregister_class', (['EnqueuePanel'], {}), '(EnqueuePanel)\n', (3822, 3836), False, 'import bpy\n'), ((3845, 3888), 'bpy.utils.unregister_class', 'bpy.utils.unregister_class', (['EnqueueOperator'], {}), '(EnqueueOperator)\n', (3871, 3888), False, 'import bpy\n'), ((3897, 3938), 'bpy.utils.unregister_class', 'bpy.utils.unregister_class', (['SettingsPanel'], {}), '(SettingsPanel)\n', (3923, 3938), False, 'import bpy\n'), ((363, 412), 'bpy.context.user_preferences.addons.get', 'bpy.context.user_preferences.addons.get', (['"""batchd"""'], {}), "('batchd')\n", (402, 412), False, 'import bpy\n'), ((3121, 3144), 'bpy.ops.file.pack_all', 'bpy.ops.file.pack_all', ([], {}), '()\n', (3142, 3144), False, 'import bpy\n'), ((3216, 3267), 'bpy.path.abspath', 'bpy.path.abspath', (['bpy.context.scene.render.filepath'], {}), '(bpy.context.scene.render.filepath)\n', (3232, 3267), False, 'import bpy\n')] |
from __future__ import unicode_literals
import os, sys, subprocess, ast
from nbconvert.preprocessors import Preprocessor
from holoviews.core import Dimensioned, Store
from holoviews.ipython.preprocessors import OptsMagicProcessor, OutputMagicProcessor
from holoviews.ipython.preprocessors import StripMagicsProcessor
from holoviews.util.command import export_to_python
import tempfile
import matplotlib.pyplot as plt
plt.switch_backend('agg')
def comment_out_magics(source):
"""
Utility used to make sure AST parser does not choke on unrecognized
magics.
"""
filtered = []
for line in source.splitlines():
if line.strip().startswith('%'):
filtered.append('# ' + line)
else:
filtered.append(line)
return '\n'.join(filtered)
def wrap_cell_expression(source, template='{expr}'):
"""
If a cell ends in an expression that could be displaying a HoloViews
object (as determined using the AST), wrap it with a given prefix
and suffix string.
If the cell doesn't end in an expression, return the source unchanged.
"""
cell_output_types = (ast.IfExp, ast.BoolOp, ast.BinOp, ast.Call,
ast.Name, ast.Attribute)
try:
node = ast.parse(comment_out_magics(source))
except SyntaxError:
return source
filtered = source.splitlines()
if node.body != []:
last_expr = node.body[-1]
if not isinstance(last_expr, ast.Expr):
pass # Not an expression
elif isinstance(last_expr.value, cell_output_types):
# CAREFUL WITH UTF8!
expr_end_slice = filtered[last_expr.lineno-1][:last_expr.col_offset]
expr_start_slice = filtered[last_expr.lineno-1][last_expr.col_offset:]
start = '\n'.join(filtered[:last_expr.lineno-1]
+ ([expr_end_slice] if expr_end_slice else []))
ending = '\n'.join(([expr_start_slice] if expr_start_slice else [])
+ filtered[last_expr.lineno:])
if ending.strip().endswith(';'):
return source
# BUG!! Adds newline for 'foo'; <expr>
return start + '\n' + template.format(expr=ending)
return source
def strip_specific_magics(source, magic):
"""
Given the source of a cell, filter out specific cell and line magics.
"""
filtered=[]
for line in source.splitlines():
if line.startswith(f'%{magic}'):
filtered.append(line.lstrip(f'%{magic}').strip(' '))
if line.startswith(f'%%{magic}'):
filtered.append(line.lstrip(f'%%{magic}').strip(' '))
else:
filtered.append(line)
return '\n'.join(filtered)
class StripTimeMagicsProcessor(Preprocessor):
"""
Preprocessor to convert notebooks to Python source strips out just time
magics while keeping the rest of the cell.
"""
def preprocess_cell(self, cell, resources, index):
if cell['cell_type'] == 'code':
cell['source'] = strip_specific_magics(cell['source'], 'time')
return cell, resources
def __call__(self, nb, resources): return self.preprocess(nb,resources)
def strip_trailing_semicolons(source, function):
"""
Give the source of a cell, filter out lines that contain a specified
function call and end in a semicolon.
"""
filtered=[]
for line in source.splitlines():
if line.endswith(f'{function}();'):
filtered.append(line[:-1])
else:
filtered.append(line)
return '\n'.join(filtered)
class StripServableSemicolonsProcessor(Preprocessor):
"""
Preprocessor to convert notebooks to Python source strips out just semicolons
that come after the servable function call.
"""
def preprocess_cell(self, cell, resources, index):
if cell['cell_type'] == 'code':
cell['source'] = strip_trailing_semicolons(cell['source'], 'servable')
return cell, resources
def __call__(self, nb, resources): return self.preprocess(nb,resources)
def thumbnail(obj, basename):
import os
if isinstance(obj, Dimensioned) and not os.path.isfile(basename+'.png'):
Store.renderers[Store.current_backend].save(obj, basename, fmt='png')
elif 'panel' in sys.modules:
from panel.viewable import Viewable
if isinstance(obj, Viewable) and not os.path.isfile(basename+'.png'):
obj.save(basename+'.png')
return obj
class ThumbnailProcessor(Preprocessor):
def __init__(self, basename, **kwargs):
self.basename = basename
super(ThumbnailProcessor, self).__init__(**kwargs)
def preprocess_cell(self, cell, resources, index):
if cell['cell_type'] == 'code':
template = 'from nbsite.gallery.thumbnailer import thumbnail;thumbnail({{expr}}, {basename!r})'
cell['source'] = wrap_cell_expression(cell['source'],
template.format(
basename=self.basename))
return cell, resources
def __call__(self, nb, resources): return self.preprocess(nb,resources)
def execute(code, cwd, env):
with tempfile.NamedTemporaryFile('wb', delete=True) as f:
f.write(code)
f.flush()
proc = subprocess.Popen(['python', f.name], cwd=cwd, env=env)
proc.wait()
return proc.returncode
def notebook_thumbnail(filename, subpath):
basename = os.path.splitext(os.path.basename(filename))[0]
dir_path = os.path.abspath(os.path.join(subpath, 'thumbnails'))
absdirpath= os.path.abspath(os.path.join('.', dir_path))
if not os.path.exists(absdirpath):
os.makedirs(absdirpath)
preprocessors = [OptsMagicProcessor(),
OutputMagicProcessor(),
StripTimeMagicsProcessor(),
StripServableSemicolonsProcessor(),
StripMagicsProcessor(),
ThumbnailProcessor(os.path.abspath(os.path.join(dir_path, basename)))]
return export_to_python(filename, preprocessors)
if __name__ == '__main__':
files = []
abspath = os.path.abspath(sys.argv[1])
split_path = abspath.split(os.path.sep)
if os.path.isdir(abspath):
if 'examples' not in split_path:
print('Can only thumbnail notebooks in examples/')
sys.exit()
subpath = os.path.sep.join(split_path[split_path.index('examples')+1:])
files = [os.path.join(abspath, f) for f in os.listdir(abspath)
if f.endswith('.ipynb')]
elif os.path.isfile(abspath):
subpath = os.path.sep.join(split_path[split_path.index('examples')+1:-1])
files=[abspath]
else:
print('Path {path} does not exist'.format(path=abspath))
for f in files:
print('Generating thumbnail for file {filename}'.format(filename=f))
code = notebook_thumbnail(f, subpath)
try:
retcode = execute(code.encode('utf8'), cwd=os.path.split(f)[0], env={})
except Exception as e:
print('Failed to generate thumbnail for {filename}'.format(filename=f))
print(str(e))
| [
"os.path.exists",
"os.listdir",
"sys.exit",
"os.makedirs",
"holoviews.util.command.export_to_python",
"subprocess.Popen",
"holoviews.ipython.preprocessors.OptsMagicProcessor",
"holoviews.ipython.preprocessors.StripMagicsProcessor",
"matplotlib.pyplot.switch_backend",
"os.path.join",
"holoviews.i... | [((419, 444), 'matplotlib.pyplot.switch_backend', 'plt.switch_backend', (['"""agg"""'], {}), "('agg')\n", (437, 444), True, 'import matplotlib.pyplot as plt\n'), ((6100, 6141), 'holoviews.util.command.export_to_python', 'export_to_python', (['filename', 'preprocessors'], {}), '(filename, preprocessors)\n', (6116, 6141), False, 'from holoviews.util.command import export_to_python\n'), ((6199, 6227), 'os.path.abspath', 'os.path.abspath', (['sys.argv[1]'], {}), '(sys.argv[1])\n', (6214, 6227), False, 'import os\n'), ((6279, 6301), 'os.path.isdir', 'os.path.isdir', (['abspath'], {}), '(abspath)\n', (6292, 6301), False, 'import os\n'), ((5240, 5286), 'tempfile.NamedTemporaryFile', 'tempfile.NamedTemporaryFile', (['"""wb"""'], {'delete': '(True)'}), "('wb', delete=True)\n", (5267, 5286), False, 'import tempfile\n'), ((5348, 5402), 'subprocess.Popen', 'subprocess.Popen', (["['python', f.name]"], {'cwd': 'cwd', 'env': 'env'}), "(['python', f.name], cwd=cwd, env=env)\n", (5364, 5402), False, 'import os, sys, subprocess, ast\n'), ((5588, 5623), 'os.path.join', 'os.path.join', (['subpath', '"""thumbnails"""'], {}), "(subpath, 'thumbnails')\n", (5600, 5623), False, 'import os\n'), ((5657, 5684), 'os.path.join', 'os.path.join', (['"""."""', 'dir_path'], {}), "('.', dir_path)\n", (5669, 5684), False, 'import os\n'), ((5697, 5723), 'os.path.exists', 'os.path.exists', (['absdirpath'], {}), '(absdirpath)\n', (5711, 5723), False, 'import os\n'), ((5733, 5756), 'os.makedirs', 'os.makedirs', (['absdirpath'], {}), '(absdirpath)\n', (5744, 5756), False, 'import os\n'), ((5779, 5799), 'holoviews.ipython.preprocessors.OptsMagicProcessor', 'OptsMagicProcessor', ([], {}), '()\n', (5797, 5799), False, 'from holoviews.ipython.preprocessors import OptsMagicProcessor, OutputMagicProcessor\n'), ((5822, 5844), 'holoviews.ipython.preprocessors.OutputMagicProcessor', 'OutputMagicProcessor', ([], {}), '()\n', (5842, 5844), False, 'from holoviews.ipython.preprocessors import OptsMagicProcessor, OutputMagicProcessor\n'), ((5973, 5995), 'holoviews.ipython.preprocessors.StripMagicsProcessor', 'StripMagicsProcessor', ([], {}), '()\n', (5993, 5995), False, 'from holoviews.ipython.preprocessors import StripMagicsProcessor\n'), ((6632, 6655), 'os.path.isfile', 'os.path.isfile', (['abspath'], {}), '(abspath)\n', (6646, 6655), False, 'import os\n'), ((4178, 4211), 'os.path.isfile', 'os.path.isfile', (["(basename + '.png')"], {}), "(basename + '.png')\n", (4192, 4211), False, 'import os\n'), ((5526, 5552), 'os.path.basename', 'os.path.basename', (['filename'], {}), '(filename)\n', (5542, 5552), False, 'import os\n'), ((6419, 6429), 'sys.exit', 'sys.exit', ([], {}), '()\n', (6427, 6429), False, 'import os, sys, subprocess, ast\n'), ((6527, 6551), 'os.path.join', 'os.path.join', (['abspath', 'f'], {}), '(abspath, f)\n', (6539, 6551), False, 'import os\n'), ((6053, 6085), 'os.path.join', 'os.path.join', (['dir_path', 'basename'], {}), '(dir_path, basename)\n', (6065, 6085), False, 'import os\n'), ((6561, 6580), 'os.listdir', 'os.listdir', (['abspath'], {}), '(abspath)\n', (6571, 6580), False, 'import os\n'), ((4411, 4444), 'os.path.isfile', 'os.path.isfile', (["(basename + '.png')"], {}), "(basename + '.png')\n", (4425, 4444), False, 'import os\n'), ((7050, 7066), 'os.path.split', 'os.path.split', (['f'], {}), '(f)\n', (7063, 7066), False, 'import os\n')] |
import os
# getting path so you can run the script python3 App_init, python3 .
if os.getcwd()[-8:] != "App_init":
default_path = "App_init/"
print(default_path)
else:
default_path = ""
# reading all built in modules
default_modules = open(f"{default_path}default_modules.txt", "r").readlines()
for a, i in enumerate(default_modules):
if i[0] != "#":
# make a list of all names
default_modules[a] = i.replace("\n", "")
default_modules[a] = default_modules[a].replace(" ", "")
# removing all comments
for i in default_modules:
if i[0] == "#":
default_modules.remove(i)
| [
"os.getcwd"
] | [((83, 94), 'os.getcwd', 'os.getcwd', ([], {}), '()\n', (92, 94), False, 'import os\n')] |
import serial
import io
import MySQLdb
device = '/dev/ttyACM1'
#ser = serial.Serial('/dev/ttyACM1', 9600)
arduino = serial.Serial(device, 9600)
#dataTemp = arduino.readline()
temp = 5
motorPos = 50
hIndex = 4
i = 0
while(i<3):
dataIndicator = arduino.readline()
indicator = dataIndicator.decode().strip()
ind = indicator
print("ind: " + ind)
#print(dataIndicator)
if(ind == 'temp'):
# print('test 1st if in while loop')
dataTemp = arduino.readline()
temp = dataTemp.decode('UTF-8')
i = i + 1
elif(indicator == "index"):
dataHeatIndex = arduino.readline()
hIndex = dataHeatIndex.decode('UTF-8')
i = i + 1
elif(indicator == "pos"):
dataMotorPos = arduino.readline()
motorPos = dataMotorPos.decode('UTF-8')
i = i + 1
print('Encoded Serial Temp: '+ temp)
print('Encoded Serial Heat Index: '+ hIndex)
print('Encoded Serial Motor Position: '+ motorPos)
#Make DB connection
dbConn = MySQLdb.connect("localhost", "root", "password", "tempdb") or die("Could not connect to the database")
print(dbConn)
#with dbConn:
try:
cursor = dbConn.cursor()
#cursor.execute("INSERT INTO tempLog (Temperature) VALUES (%s)" % (temp))
except (MySQLdb.Error) as e:
print(e)
dbConn.rollback()
else:
dbConn.commit()
finally:
cursor.close()
| [
"MySQLdb.connect",
"serial.Serial"
] | [((119, 146), 'serial.Serial', 'serial.Serial', (['device', '(9600)'], {}), '(device, 9600)\n', (132, 146), False, 'import serial\n'), ((999, 1057), 'MySQLdb.connect', 'MySQLdb.connect', (['"""localhost"""', '"""root"""', '"""password"""', '"""tempdb"""'], {}), "('localhost', 'root', 'password', 'tempdb')\n", (1014, 1057), False, 'import MySQLdb\n')] |
import time
from typing import Optional
from pollect.core import Helper
from pollect.core.ValueSet import ValueSet, Value
from pollect.sources.Source import Source
class HttpSource(Source):
status_code: Optional[int] = None
def __init__(self, config):
super().__init__(config)
self.url = config.get('url')
self.timeout = config.get('timeout', 10)
self.status_code = config.get('statusCode')
def _probe(self):
data = ValueSet()
try:
start = time.time() * 1000
Helper.get_url(self.url, timeout=self.timeout, expected_status=self.status_code)
end = time.time() * 1000
data.add(Value(int(end - start)))
except Exception as e:
self.log.error('Could not probe ' + str(e))
data.add(Value(self.timeout))
return data
| [
"pollect.core.Helper.get_url",
"pollect.core.ValueSet.Value",
"time.time",
"pollect.core.ValueSet.ValueSet"
] | [((473, 483), 'pollect.core.ValueSet.ValueSet', 'ValueSet', ([], {}), '()\n', (481, 483), False, 'from pollect.core.ValueSet import ValueSet, Value\n'), ((548, 633), 'pollect.core.Helper.get_url', 'Helper.get_url', (['self.url'], {'timeout': 'self.timeout', 'expected_status': 'self.status_code'}), '(self.url, timeout=self.timeout, expected_status=self.status_code\n )\n', (562, 633), False, 'from pollect.core import Helper\n'), ((517, 528), 'time.time', 'time.time', ([], {}), '()\n', (526, 528), False, 'import time\n'), ((647, 658), 'time.time', 'time.time', ([], {}), '()\n', (656, 658), False, 'import time\n'), ((820, 839), 'pollect.core.ValueSet.Value', 'Value', (['self.timeout'], {}), '(self.timeout)\n', (825, 839), False, 'from pollect.core.ValueSet import ValueSet, Value\n')] |
from datetime import datetime
from enum import Enum, auto
import re
guard_id_regex = re.compile(r'#\d+')
class Record():
def __init__(self, record_string):
self.timestamp = datetime.strptime(record_string[1:17], '%Y-%m-%d %H:%M')
guard_id = guard_id_regex.search(record_string)
self.guard_id = guard_id[0][1:] if guard_id else None
if 'wakes up' in record_string:
self.type = 'WAKES_UP'
elif 'falls asleep' in record_string:
self.type = 'FALLS_ASLEEP'
else:
self.type = 'BEGINS_SHIFT'
def track_sleep_minutes(minutes_already_slept, start_minute, end_minute):
for minute in range(start_minute, end_minute):
minutes_already_slept[minute] += 1
def build_sleep_log(records):
sleep_log = {}
current_guard_id = records[0].guard_id
asleep_since = None
for record in records:
if record.type == 'BEGINS_SHIFT':
current_guard_id = record.guard_id
asleep_since = None
if record.type == 'FALLS_ASLEEP':
asleep_since = record.timestamp
if record.type == 'WAKES_UP':
time_asleep = record.timestamp - asleep_since
if current_guard_id in sleep_log:
sleep_log[current_guard_id]['time_asleep'] += time_asleep
else:
sleep_log[current_guard_id] = {'time_asleep': time_asleep, 'minutes': [0] * 60}
for minute in range(asleep_since.minute, record.timestamp.minute):
sleep_log[current_guard_id]['minutes'][minute] += 1
return sleep_log
if __name__ == '__main__':
with open('2018/sampleinputs/day04.txt') as file:
records = sorted([Record(r) for r in file.read().split('\n')[:-1]], key=lambda r: r.timestamp)
sleep_log = build_sleep_log(records)
sleeping_beauty1, log1 = sorted(sleep_log.items(), key=lambda x: x[1]['time_asleep'], reverse=True)[0]
part1 = int(sleeping_beauty1) * int(log1['minutes'].index(max(log1['minutes'])))
print('Part 1: {}'.format(part1))
sleeping_beauty2, log2 = sorted(sleep_log.items(), key=lambda x: max(x[1]['minutes']), reverse=True)[0]
part2 = int(sleeping_beauty2) * int(log2['minutes'].index(max(log2['minutes'])))
print('Part 2: {}'.format(part2))
| [
"datetime.datetime.strptime",
"re.compile"
] | [((86, 105), 're.compile', 're.compile', (['"""#\\\\d+"""'], {}), "('#\\\\d+')\n", (96, 105), False, 'import re\n'), ((188, 244), 'datetime.datetime.strptime', 'datetime.strptime', (['record_string[1:17]', '"""%Y-%m-%d %H:%M"""'], {}), "(record_string[1:17], '%Y-%m-%d %H:%M')\n", (205, 244), False, 'from datetime import datetime\n')] |
from rest_framework import serializers
class ProductSerializer(serializers.Serializer):
product = serializers.ListField(
child=serializers.CharField(max_length=200))
| [
"rest_framework.serializers.CharField"
] | [((141, 178), 'rest_framework.serializers.CharField', 'serializers.CharField', ([], {'max_length': '(200)'}), '(max_length=200)\n', (162, 178), False, 'from rest_framework import serializers\n')] |
# Export the contents of AviSys files SIGHTING.DAT and FNotes.DAT to CSV format
# Author: <NAME> <<EMAIL>>
# Version: 1.2 3 April 2021
import sys
import csv
import ctypes
# Input files
DATA_FILE = 'SIGHTING.DAT'
MASTER_FILE = 'MASTER.AVI'
PLACES_FILE = 'PLACES.AVI'
NOTE_INDEX = 'FNotes.IX'
NOTE_FILE = 'FNotes.DAT'
ASSOCIATE_FILE = 'ASSOCIAT.AVI'
# Output files
EXPORT_FILE = 'AviSys.sightings.'
NOTE_OUTPUT = 'FieldNotes.txt'
stateCode = {
'Alabama':'AL',
'Alaska':'AK',
'Arizona':'AZ',
'Arkansas':'AR',
'California':'CA',
'Colorado':'CO',
'Connecticut':'CT',
'Delaware':'DE',
'D.C.':'DC',
'Florida':'FL',
'Georgia':'GA',
'Hawaii':'HI',
'Idaho':'ID',
'Illinois':'IL',
'Indiana':'IN',
'Iowa':'IA',
'Kansas':'KS',
'Kentucky':'KY',
'Louisiana':'LA',
'Maine':'ME',
'Maryland':'MD',
'Massachusetts':'MA',
'Michigan':'MI',
'Minnesota':'MN',
'Mississippi':'MS',
'Missouri':'MO',
'Montana':'MT',
'Nebraska':'NE',
'Nevada':'NV',
'New Hampshire':'NH',
'New Jersey':'NJ',
'New Mexico':'NM',
'New York':'NY',
'North Carolina':'NC',
'North Dakota':'ND',
'Ohio':'OH',
'Oklahoma':'OK',
'Oregon':'OR',
'Pennsylvania':'PA',
'Rhode Island':'RI',
'South Carolina':'SC',
'South Dakota':'SD',
'Tennessee':'TN',
'Texas':'TX',
'Utah':'UT',
'Vermont':'VT',
'Virginia':'VA',
'Washington':'WA',
'West Virginia':'WV',
'Wisconsin':'WI',
'Wyoming':'WY'
}
provinceCode = {
'Alberta':'AB',
'British Columbia':'BC',
'Manitoba':'MB',
'New Brunswick':'NB',
'Newfoundland':'NL', # AviSys uses 'NF'
'Northwest Terr.':'NT',
'Nova Scotia':'NS',
'Nunavut':'NU',
'Ontario':'ON',
'Prince Edward Is.':'PE',
'Quebec':'QC', # AviSys uses 'PQ'
'Saskatchewan':'SK',
'Yukon Territory':'YT'
}
class NoteBlock:
# FNotes.DAT contains 512-byte blocks. The first block is a header. Subsequent blocks have this structure:
# If byte 0 is 00: (First block in a note)
# Offset
# 0: Flag (00)
# 1-3: 000000
# 4-7: Note number
# 8-505: Data
# 506-507: Number of valid bytes from offset 0 through 505
# 508-511: Index of next block
# If byte 0 is 01: (Block that continues a note)
# 0: Flag (01)
# 1-505: Data
# 506-507: Number of valid bytes from offset 1 through 505
# 508-511: Index of next block
# Data lines are contained in fixed-length records of 125 bytes, which span blocks
# E.g., first block contains 3 records of 125 bytes, plus the first 123 bytes of the 4th record.
# Each data line is prefixed with its length in the first byte
def __init__(self,file,blockNumber): # Read the specified block from FNotes.DAT
self.file = file
offset = blockNumber * 512
file.seek(offset)
block = file.read(512)
validBytes = int.from_bytes(block[506:508],'little')
self.next = int.from_bytes(block[508:512],'little')
if block[0] == 0:
self.data = block[8:validBytes] # First block in chain
else:
self.data = block[1:validBytes+1] # Any subsequent block
def extract(self): # Extract the chain of blocks, and the individual records from the chain
data = self.extractBlocks()
output = ''
ptr = 0
while ptr < len(data):
strlen = data[ptr] # First byte has the length
ptr += 1 # String starts in second byte
output += data[ptr:ptr+strlen].decode('Windows-1252') + '\n'
ptr += 124
return output
def extractBlocks(self): # Extract data from this block and blocks chained to it
data = self.data
if self.next:
block = NoteBlock(self.file,self.next)
data += block.extractBlocks()
return data
def readMaster():
# Fill in the species name lookup table
# MASTER.AVI contains the taxonomy in 110 byte records
# Byte Content
# 0 Life list mask: 20 All species have this bit; 2a species I have seen
# 1-2 Custom checklist mask (bits 0-14) (Custom checklists that include this species); bit 15: species in most recent report
# 3-4 Custom checklist seen mask
# 5-6 Species number
# 7 Common name length
# 8-43 Common name
# 44-51 State checklist mask (64 bits) (State checklists that include this species)
# 52 Genus name length
# 53-76 Genus name
# 77 Species name length
# 78-101 Species name
# 102-103 ABA bytes
# 104-109 Always 00
# ABA byte 0
# 01 ABA area species
# 00 not ABA area species
# ABA byte 1
# 01 Seen in ABA area
# 00 Not seen in ABA area
# Bytes 0-4 (Life list mask and checklist masks)
# Let 0200 be the mask for the NC checklist. Then bytes 0-4 work like this:
# 20 0000 0000 Non-NC species I have not seen anywhere; also family level entry
# 20 0200 0000 NC species that I have not seen anywhere
# 2a 0000 0000 Non-NC species I have seen somewhere but not in NC
# 2a 0000 0200 Non-NC species I have seen in NC
# 2a 0200 0000 NC species that I have seen but not in NC
# 2a 0200 0200 NC species seen in NC
name = {}
genusName = {}
speciesName = {}
try:
master_input = open(MASTER_FILE, "rb")
except FileNotFoundError:
print('Error: File',MASTER_FILE,'not found.')
raise SystemExit
except:
print("Error opening",MASTER_FILE,'--',sys.exc_info()[1])
raise SystemExit
while True:
taxon = master_input.read(110) # Read a record of 110 bytes
if not taxon:
break
speciesNo = int.from_bytes(taxon[5:7],"little")
name[speciesNo] = taxon[8:(8+taxon[7])].decode('Windows-1252')
genusName[speciesNo] = taxon[53:(53+taxon[52])].decode('Windows-1252')
speciesName[speciesNo] = taxon[78:(78+taxon[77])].decode('Windows-1252')
master_input.close()
return (name,genusName,speciesName)
class Place:
def __init__(self,placeNumber,name,link):
self.placeNumber = placeNumber
self.name = name
self.link = link
self.table = (placeNumber-1)//450
def __str__(self):
return str(self.placeNumber) + ': ' + self.name + ' ' + str(self.link) + ' (table ' + str(self.table) + ')'
def readPlaces():
# The places file (PLACES.AVI) contains fixed length records of 39 bytes
# Bytes
# 0-1 Place number
# 6 Length of place name
# 7-36 Place name
# 37-38 Place number of linked location
output = {}
try:
places_input = open(PLACES_FILE,"rb")
except FileNotFoundError:
print('Error: File',PLACES_FILE,'not found.')
raise SystemExit
except:
print("Error opening",PLACES_FILE,'--',sys.exc_info()[1])
raise SystemExit
while True: # Read all the places in the file
place = places_input.read(39) # Read a record of 39 bytes
if not place:
break
placeNumber = int.from_bytes(place[0:2],"little")
if placeNumber == 0:
continue;
name = place[7:(7+place[6])].decode('Windows-1252')
link = int.from_bytes(place[37:39],"little")
placeInfo = Place(placeNumber,name,link)
output[placeNumber] = placeInfo
places_input.close()
# Now make the 6-level list of links for each place
for placeNumber in output:
place = output[placeNumber]
links = []
for i in range(6):
if i == place.table: # i is the entry for this place
links.append(place.name)
next = place.link # now list the higher-level places this one is linked to
if next == 0:
while table < 5:
table += 1
links.append('')
break
place = output[next]
table = place.table
else:
links.append('') # Links are null until we get to the first one
output[placeNumber].linklist = links
return output
class Association:
def __init__(self,placeName,locationName,lat,lng,state,nation):
self.placeName = placeName
self.locationName = locationName
self.lat = lat
self.lng = lng
self.state = state
self.nation = nation
def readAssociate():
# The hotspot association file (ASSOCIAT.AVI) contains fixed length records of 152 bytes
# Bytes
# 0 Place len
# 1-30 AviSys place (30 chars)
# 31-33 ?
# 34 locid len
# 35-41 locid
# 42 hotspot len
# 43-102 eBird hotspot (60 chars)
# 103 lat len
# 104-115 lat
# 116-123 binary (float) lat
# 124 lng len
# 125-136 lng
# 137-144 binary (float) lng
# 145 state len
# 146-148 state
# 149 nation len
# 150-151 nation
output = {}
try:
associate_input = open(ASSOCIATE_FILE,"rb")
except FileNotFoundError:
print('Note: File',ASSOCIATE_FILE,'not found.')
return output
except:
print("Error opening",ASSOCIATE_FILE,'--',sys.exc_info()[1])
raise SystemExit
while True: # Read all the places in the file
association = associate_input.read(152) # Read a record of 152 bytes
if not association:
break
if len(association) != 152:
print("Odd, length is",len(association))
else:
place = association[1:1+association[0]].decode('Windows-1252')
location = association[43:43+association[42]].decode('Windows-1252')
lat = association[104:104+association[103]].decode('Windows-1252')
lng = association[125:125+association[124]].decode('Windows-1252')
state = association[146:146+association[145]].decode('Windows-1252')
nation = association[150:150+association[149]].decode('Windows-1252')
Info = Association(place,location,lat,lng,state,nation)
output[place] = Info
associate_input.close()
return output
def readNoteIndex():
# FNotes.IX contains fixed-length blocks.
# The first block begins with a 32 byte descriptive header:
# Bytes 0-3 contain 0xffffffff
# Bytes 4-7 contain ??
# Bytes 8-11 Number of blocks in the file
# Bytes 12-15 Size of each block (874 bytes)
# Bytes 16-21 ??
# Bytes 22-25 Number of field notes in the file
# Bytes 26-29 Number of notes per block (62)
# The rest of the first block is empty.
# In subsequent blocks:
# Byte 0: Number of valid index entries in this block
# Index entries begin at Byte 6 and are an array of 14-byte entries
# Index entry has block number in binary in bytes 0-3,
# length of note number (always 5) in byte 8,
# and note number in ascii in bytes 9-13
# Valid index entries are grouped at the beginning of a block,
# and the block may be padded out with non-valid, i.e., unused, entries.
try:
note_index = open(NOTE_INDEX,"rb")
except FileNotFoundError:
print('Error: File',NOTE_INDEX,'not found.')
except:
print("Error opening",NOTE_INDEX,'--',sys.exc_info()[1])
raise SystemExit
header = note_index.read(32)
marker = int.from_bytes(header[0:4],'little')
if marker != 4294967295:
print('Unexpected value',marker,'at beginning of',NOTE_INDEX)
# raise SystemExit
numBlocks = int.from_bytes(header[8:12],'little') # number of 874 byte blocks (e.g., 11)
blockSize = int.from_bytes(header[12:16],'little') # blocksize (874, 0x036a)
numNotes = int.from_bytes(header[22:26],'little') # Number of notes (e.g., 600)
blockFactor = int.from_bytes(header[26:30],'little') # Number of notes per block (62, 0x3E)
reclen = int((blockSize-6) / blockFactor) # 14
if reclen != 14:
print('Reclen was expected to be 14 but is', reclen)
raise SystemExit
note_index.read(blockSize - 32) # Have already read 32 bytes of first block. Now read the rest (and discard).
index = {}
while True:
block = note_index.read(blockSize)
if not block:
break
numValid = block[0]
if not numValid:
break
# Loop through each index entry in this block
for ptr in range(6,blockSize,reclen):
ix = block[ptr:ptr+reclen]
if not ix:
break
blockNumber = int.from_bytes(ix[0:4],'little')
nchar = ix[8]
ascii = ix[9:9+nchar].decode('Windows-1252')
index[int(ascii)] = blockNumber
numValid -= 1
if not numValid:
break # Finished with all valid entries this block
note_index.close()
return index
def integrateNote(comment,fieldnoteText):
# Integrate the comment and field note.
# If the observation was imported from eBird via http://avisys.info/ebirdtoavisys/
# the AviSys comment may duplicate the beginning of the eBird comment.
# Here we remove duplication.
if fieldnoteText != '': # If there is a field note
work = comment # Working copy of the comment
keepLen = 0 # Length of the beginning of the comment to keep, if any duplication
ptr = 0 # Where we are in the comment
hasAttributes = True if ptr < len(work) and work[ptr] == '/' else False
while hasAttributes: # There are AviSys attributes at the beginning of comment
attributeLen = 3 if ptr+2 < len(work) and comment[ptr+2] == '/' else 2 # Attributes are either 2 or 3 bytes
ptr += attributeLen # Bump ptr past this attribute
while ptr < len(work) and work[ptr] == ' ': # and past any trailing blanks
ptr += 1
hasAttributes = True if ptr < len(work) and work[ptr] == '/' else False # Check if there is another attribute
if ptr < len(work) and work[ptr] == '(': # If the first part of comment is parenthesized, skip over it
ptr += 1
while ptr < len(work) and work[ptr] != ')':
ptr += 1
if work[ptr] == ')':
ptr += 1
while ptr < len(work) and work[ptr] == ' ':
ptr += 1
keepLen = ptr # Keep at least this much of the comment
work = work[ptr:] # Check if this part of the comment is duplicated in the field note
text = fieldnoteText
linend = fieldnoteText.find('\n') # end of first line
# If the first line contains ' :: ' it is probably a heading so skip that line
if fieldnoteText[0:linend].find(' :: ') > 0:
text = fieldnoteText[linend+1:]
linend = text.find('\n') # end of second line
text = text[0:linend] + ' ' + text[linend+1:] # Examine the first two lines as one line
ptr = 0
while ptr < len(text) and text[ptr] == ' ': # Skip over any leading blanks
ptr += 1
if len(work): # If we have a comment
if text[ptr:ptr+len(work)] == work: # If the comment is identical to the beginning of the field note
if keepLen: # Discard the comment text. Keep only the comment prefix (attributes and/or parenthesized content)
comment = comment[0:keepLen]
else:
comment = '' # Discard the entire comment.
comment = comment.strip() + ' ' + fieldnoteText # Concatenate comment prefix and field note.
comment = comment.strip(' \n')
return comment
#########################################################################################################
######################################## The program starts here ########################################
#########################################################################################################
outArray = []
noteDict = {}
# ref https://stackoverflow.com/questions/55172090/detect-if-python-program-is-executed-via-windows-gui-double-click-vs-command-p
kernel32 = ctypes.WinDLL('kernel32', use_last_error=True)
process_array = (ctypes.c_uint * 1)()
num_processes = kernel32.GetConsoleProcessList(process_array, 1)
if len(sys.argv) < 2: # If no command-line argument
if num_processes <= 2: # Run from double-click
outputType = 'eBird'
else: # Run from command line
outputType = 'AviSys'
else:
outputType = sys.argv[1]
if outputType.lower() == 'avisys':
outputType = 'AviSys'
elif outputType.lower() == 'ebird':
outputType = 'eBird'
else:
print("Please specify either AviSys or eBird")
raise SystemExit
try:
FNotes = open(NOTE_FILE,"rb")
except FileNotFoundError:
print('Error: File',NOTE_FILE,'not found.')
raise SystemExit
except:
print("Error opening",NOTE_FILE,'--',sys.exc_info()[1])
raise SystemExit
noteIndex = readNoteIndex()
(name,genusName,speciesName) = readMaster()
places = readPlaces()
association = readAssociate()
try:
sighting_file = open(DATA_FILE,"rb")
except FileNotFoundError:
print('Error: File',DATA_FILE,'not found.')
raise SystemExit
except:
print("Error opening",DATA_FILE,'--',sys.exc_info()[1])
raise SystemExit
# Format of SIGHTING.DAT
# Header record
# 0-3 ffffffff
# 8-11 Number of records
# 12 Reclen (6F, 111)
# padded to 111 bytes
#
# Sighting record
# 0-3 always 00000000
# 4-5 Species number
# 6-9 Fieldnote number
# 10-13 Date
# 14-15 Place number
# 16 Country len
# 17-19 Country
# 20-23 nation bits e.g. 0d200800 for lower 48
# 24-27 always 00000000
# 28 Comment len
# 29-108 Comment
# 109-110 Count
#
# Update 2021 08 14:
# I figured out how bytes 0-3 are used.
# For valid sighting records, the first 4 bytes are zeroes.
# Corrupted records can be kept in the file but ignored;
# they are stored in a linked list where bytes 0-3 are the link pointer.
# The last record in the linked list has ffffffff in bytes 0-3.
# The first four bytes of the header (first four bytes of the file) point to the beginning of the linked list of corrupt records.
# If there are no corrupt records, the file begins with ffffffff.
# The value of the link pointer is the record number; thus multiply by 111 to get the byte offset in the file.
# To ignore invalid records, skip any record that does not begin with 00000000.
#
# Nation bits:
# 00000100 Australasia
# 00000200 Eurasia
# 00000400 South Polar
# 00000800 [AOU]
#
# 00010000 [Asia]
# 00020000 Atlantic Ocean
# 00040000 Pacific Ocean
# 00080000 Indian Ocean
#
# 00100000 [Oceanic]
# 00200000 North America
# 00400000 South America
# 00800000 Africa
#
# 01000000 [ABA Area]
# 02000000 [Canada]
# 04000000 [US]
# 08000000 [Lower 48]
#
# 10000000 [West Indies]
# 20000000 [Mexico]
# 40000000 [Central America]
# 80000000 [Western Palearctic]
header = sighting_file.read(111) # Read a 111 byte record
marker = int.from_bytes(header[0:4],'little')
corruptRecords = 0
EXPORT_FILE += outputType+'.csv'
try:
CSV = open(EXPORT_FILE,'w', newline='')
except PermissionError:
print('Denied permission to open',EXPORT_FILE,'-- Maybe it is open in another program? If so, close it and try again.')
raise SystemExit
except:
print('Error opening',EXPORT_FILE,'--',sys.exc_info()[1])
raise SystemExit
try:
noteOut = open(NOTE_OUTPUT,'w', newline='')
except PermissionError:
print('Denied permission to open',NOTE_OUTPUT,'-- Maybe it is open in another program? If so, close it and try again,')
raise SystemExit
except:
print('Error opening',NOTE_OUTPUT,'--',sys.exc_info()[1])
nrecs = int.from_bytes(header[8:12],"little")
reclen = header[12]
if reclen != 111:
print('Record length is', reclen, 'expecting it to be 111.')
raise SystemExit
recordCount = 0
while True:
sighting = sighting_file.read(111)
if not sighting:
break
recordCount+=1
corruptPointer = int.from_bytes(sighting[0:4],'little')
corruptedRecord = corruptPointer != 0
speciesNo = int.from_bytes(sighting[4:6],'little')
fieldnote = int.from_bytes(sighting[6:10],'little')
if fieldnote:
block = NoteBlock(FNotes,noteIndex[fieldnote])
fieldnoteText = block.extract()
noteDict[recordCount] = fieldnoteText
else:
fieldnoteText = ''
fieldnoteText = fieldnoteText.rstrip(' \n')
date = int.from_bytes(sighting[10:14],'little')
day = date % 100
month = (date // 100) % 100
year = (date // 10000) + 1930
date = str(month) + '/' + str(day) + '/' + str(year)
sortdate = str(year) + '-' + str(month).rjust(2,'0') + '-' + str(day).rjust(2,'0')
place = int.from_bytes(sighting[14:16],'little')
countryLen = sighting[16]
country = sighting[17:19].decode('Windows-1252')
commentLen = sighting[28]
shortComment = sighting[29:29+commentLen].decode('Windows-1252').strip()
comment = integrateNote(shortComment,fieldnoteText)
if outputType == 'eBird':
comment = comment.replace("\n"," ")
tally = int.from_bytes(sighting[109:111],'little')
if speciesNo in name:
commonName = name[speciesNo]
else:
commonName = '?'
if not corruptedRecord:
print("No name found for species number", speciesNo)
raise SystemExit
if place not in places:
if not corruptedRecord:
print("Place", place, "is not set")
raise SystemExit
else:
location = 'Unknown location'
else:
linkList = places[place].linklist
location = linkList[0] if linkList[0] != '' else \
linkList[1] if linkList[1] != '' else \
linkList[2] if linkList[2] != '' else \
linkList[3] if linkList[3] != '' else \
linkList[4] if linkList[4] != '' else \
linkList[5] if linkList[5] != '' else \
linkList[6]
if outputType == 'eBird' and location in association:
location = association[location].locationName # Use associated eBird location name instead of AviSys place name
if country == 'US':
state = stateCode[linkList[3]]
elif country == 'CA':
state = provinceCode[linkList[3]]
else:
state = ''
if corruptedRecord:
corruptRecords += 1
print('Corrupt record found:',commonName,location,date,state,country,comment)
else:
outArray.append([commonName,genusName[speciesNo],speciesName[speciesNo],tally,comment,location,sortdate,date,state,country,speciesNo,recordCount,shortComment])
def sortkey(array):
return array[6]
outArray.sort(key=sortkey)
if outputType == 'eBird':
csvFields = ['Common name','Genus','Species','Species Count','Species Comment','Location','Lat','Lng','Date','Start time','State','Country','Protocol','N. Observers','Duration','Complete','Distance','Area','Checklist comment','Important: Delete this header row before importing to eBird']
else:
csvFields = ['Common name','Genus','Species','Place','Date','Count','Comment','State','Nation','Blank','SpeciesNo']
CSVwriter = csv.DictWriter(CSV,fieldnames=csvFields)
CSVwriter.writeheader()
if outputType == 'eBird':
for row in outArray:
CSVwriter.writerow({'Common name':row[0],'Genus':row[1],'Species':row[2],'Species Count':row[3],'Species Comment':row[4],
'Location':row[5],'Lat':'','Lng':'','Date':row[7],'Start time':'','State':row[8],'Country':row[9],
'Protocol':'historical','N. Observers':1,'Duration':'','Complete':'N','Distance':'','Area':'','Checklist comment':'Imported from AviSys'})
else:
for row in outArray:
dateVal = row[6].split('-')
date = str(int(dateVal[1]))+'/'+str(int(dateVal[2]))+'/'+dateVal[0]
CSVwriter.writerow({'Common name':row[0],'Genus':row[1],'Species':row[2],'Place':row[5],'Date':date,'Count':row[3],'Comment':row[4],
'State':row[7],'Nation':row[8],'Blank':'','SpeciesNo':row[9]})
# Write all field notes to a file
# The entry for each note begins with species name -- date -- place on the first line, followed by a blank line.
# The text of the field note follows
# The note is terminated by a line of 80 equal signs (which is something that could not be part of the actual note).
# Note: If AviSys type output, the place is the AviSys place. If eBird type output, the associated eBird location, if any, is used as the place.
for row in outArray:
recordNo = row[11]
if recordNo in noteDict:
shortComment = row[12]
noteOut.write(row[0] +' -- '+ row[6] +' -- '+ row[5] + '\n\n')
if len(shortComment):
noteOut.write( 'Short comment: ' + shortComment + '\n\n')
noteOut.write(noteDict[recordNo] + '\n' + '==========================================================================================\n')
sighting_file.close()
noteOut.close()
CSV.close()
if recordCount != nrecs:
print('Should be', nrecs, 'records, but counted', recordCount)
else:
print(nrecs,"records processed")
if corruptRecords:
if corruptRecords == 1:
print('File', DATA_FILE, 'contains one corrupt record, which has been ignored. ')
print('To remove it from AviSys, run Utilities->Restructure sighting file.')
else:
print('File', DATA_FILE, 'contains', corruptRecords, 'corrupt records, which have been ignored. ')
print('To remove them from AviSys, run Utilities->Restructure sighting file.')
print(nrecs-corruptRecords, 'records are valid.')
| [
"csv.DictWriter",
"sys.exc_info",
"ctypes.WinDLL"
] | [((14091, 14137), 'ctypes.WinDLL', 'ctypes.WinDLL', (['"""kernel32"""'], {'use_last_error': '(True)'}), "('kernel32', use_last_error=True)\n", (14104, 14137), False, 'import ctypes\n'), ((20665, 20706), 'csv.DictWriter', 'csv.DictWriter', (['CSV'], {'fieldnames': 'csvFields'}), '(CSV, fieldnames=csvFields)\n', (20679, 20706), False, 'import csv\n'), ((14817, 14831), 'sys.exc_info', 'sys.exc_info', ([], {}), '()\n', (14829, 14831), False, 'import sys\n'), ((15159, 15173), 'sys.exc_info', 'sys.exc_info', ([], {}), '()\n', (15171, 15173), False, 'import sys\n'), ((17217, 17231), 'sys.exc_info', 'sys.exc_info', ([], {}), '()\n', (17229, 17231), False, 'import sys\n'), ((17516, 17530), 'sys.exc_info', 'sys.exc_info', ([], {}), '()\n', (17528, 17530), False, 'import sys\n'), ((4887, 4901), 'sys.exc_info', 'sys.exc_info', ([], {}), '()\n', (4899, 4901), False, 'import sys\n'), ((6052, 6066), 'sys.exc_info', 'sys.exc_info', ([], {}), '()\n', (6064, 6066), False, 'import sys\n'), ((7982, 7996), 'sys.exc_info', 'sys.exc_info', ([], {}), '()\n', (7994, 7996), False, 'import sys\n'), ((9811, 9825), 'sys.exc_info', 'sys.exc_info', ([], {}), '()\n', (9823, 9825), False, 'import sys\n')] |
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
#
#
# Copyright © 2018 Dell Inc. or its subsidiaries. All rights reserved.
# Dell, EMC, and other trademarks are trademarks of Dell Inc. or its subsidiaries.
# Other trademarks may be trademarks of their respective owners.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
# Authors: <NAME>
#
import subprocess
import io
from xml.dom.minidom import parse
import xml.dom.minidom
import json
import logging
logger = logging.getLogger(__name__)
class PsShell:
def __init__(self):
pass
def execute(self, cmd):
logger.debug("Executing: " + cmd)
proc = subprocess.Popen(["powershell", "-outputformat", "XML", "-command", "" + cmd + ""],
stdout=subprocess.PIPE, stderr=subprocess.PIPE)
wrapper = io.TextIOWrapper(proc.stdout, encoding="utf-8")
t = wrapper.readline()
output = io.StringIO()
for line in wrapper:
tt = line.rstrip()
output.write(tt)
data_json = {}
if output.getvalue() == "":
return data_json
domtree = xml.dom.minidom.parseString(output.getvalue())
collection = domtree.documentElement
counter = 0
for obj in collection.childNodes:
counter = counter + 1
mydata = self.print_objx("", obj)
name = "name" + str(counter)
if "ToString" in mydata:
name = mydata["ToString"]
if "f2" in mydata:
value = mydata["f2"]
if not name in data_json:
data_json[name] = []
data_json[name].append(value)
for name in data_json:
if len(data_json[name]) == 0:
data_json[name] = None
elif len(data_json[name]) == 1:
data_json[name] = data_json[name][0]
return data_json
def print_objx(self, n, obj):
tst = {}
counter = 0
if obj.hasAttributes():
for i in range(0, obj.attributes.length):
attr = obj.attributes.item(i)
tst[attr.name] = attr.value
for objns in obj.childNodes:
if objns.nodeType == objns.ELEMENT_NODE:
# empty node
if objns.firstChild == None:
counter = counter + 1
tst["f" + str(counter)] = objns.firstChild
elif objns.firstChild.nodeType == objns.firstChild.TEXT_NODE:
var = objns.getAttribute("N")
if var is None or var == "":
var = objns.tagName
if objns.tagName == "ToString":
tst[objns.tagName] = objns.firstChild.data
else:
tst[var] = objns.firstChild.data
else:
k = self.print_objx(n + " ", objns)
var = objns.getAttribute("N")
if var is None or var == "":
counter = counter + 1
var = "f" + str(counter)
tst[var] = k
# counter = counter - 1
else:
logger.debug(">>> not element>>" + str(objns.tagName))
return tst
| [
"logging.getLogger",
"subprocess.Popen",
"io.StringIO",
"io.TextIOWrapper"
] | [((988, 1015), 'logging.getLogger', 'logging.getLogger', (['__name__'], {}), '(__name__)\n', (1005, 1015), False, 'import logging\n'), ((1165, 1300), 'subprocess.Popen', 'subprocess.Popen', (["['powershell', '-outputformat', 'XML', '-command', '' + cmd + '']"], {'stdout': 'subprocess.PIPE', 'stderr': 'subprocess.PIPE'}), "(['powershell', '-outputformat', 'XML', '-command', '' +\n cmd + ''], stdout=subprocess.PIPE, stderr=subprocess.PIPE)\n", (1181, 1300), False, 'import subprocess\n'), ((1349, 1396), 'io.TextIOWrapper', 'io.TextIOWrapper', (['proc.stdout'], {'encoding': '"""utf-8"""'}), "(proc.stdout, encoding='utf-8')\n", (1365, 1396), False, 'import io\n'), ((1449, 1462), 'io.StringIO', 'io.StringIO', ([], {}), '()\n', (1460, 1462), False, 'import io\n')] |
from rest_framework import serializers
from djoser.serializers import UserSerializer
from math import cos, asin, sqrt, pi
from accounts.models import User
from .models import Localization
class UserInfoSerializer(UserSerializer):
class Meta:
model = User
exclude = ('email', 'password', 'is_superuser', 'last_name', 'is_staff', 'date_joined', 'groups', 'user_permissions', 'last_login', 'is_active', 'gender', 'background_color', 'job', 'preferred_drink', 'description')
class LocalizationSerializer(serializers.ModelSerializer):
user = UserInfoSerializer(many = False, read_only = True)
timestamp = serializers.DateTimeField(format = '%Y-%m-%d %H:%m', input_formats = None, read_only = True)
location = serializers.CharField(required = True)
class Meta:
model = Localization
fields = ['id', 'user', 'longitude', 'latitude', 'attitude', 'location', 'timestamp']
def create(self, request):
user = self.context['request'].user
longitude = request['longitude']
latitude = request['latitude']
attitude = request['attitude']
location = request['location']
last_localization = Localization.objects.filter(user = user)
if not last_localization:
current_localization = Localization.objects.create(
user = user,
longitude = longitude,
latitude = latitude,
attitude = attitude,
location = location
)
else:
current_localization = last_localization[0]
current_localization.longitude = longitude
current_localization.latitude = latitude
current_localization.attitude = attitude
current_localization.location = location
current_localization.save()
return current_localization
def validate_user(self, validated_data):
if not User.objects.filter(email = str(validated_data)):
raise serializers.ValidationError('User must be in database.')
return validated_data
#change receivers
def to_representation(self, instance):
ret = super(LocalizationSerializer, self).to_representation(instance)
# check the request is list view or detail view
is_list_view = isinstance(self.instance, list)
if is_list_view:
#user coordinates
latitude = instance.latitude
longitude = instance.longitude
#your coordinates
user = self.context['request'].user
self_localization = Localization.objects.filter(user = user)[0]
self_latitude = self_localization.latitude
self_longitude = self_localization.longitude
#check distance
p = pi / 180
a = 0.5 - cos((self_latitude - latitude) * p) / 2 + cos(latitude * p) * cos(self_latitude * p) * (1 - cos((self_longitude - longitude) * p)) / 2
distance = 12742 * asin(sqrt(a)) #2*R*asin...
extra_ret = {
"distance" : distance,
}
ret.update(extra_ret)
return ret
| [
"rest_framework.serializers.DateTimeField",
"rest_framework.serializers.ValidationError",
"math.sqrt",
"math.cos",
"rest_framework.serializers.CharField"
] | [((633, 723), 'rest_framework.serializers.DateTimeField', 'serializers.DateTimeField', ([], {'format': '"""%Y-%m-%d %H:%m"""', 'input_formats': 'None', 'read_only': '(True)'}), "(format='%Y-%m-%d %H:%m', input_formats=None,\n read_only=True)\n", (658, 723), False, 'from rest_framework import serializers\n'), ((741, 777), 'rest_framework.serializers.CharField', 'serializers.CharField', ([], {'required': '(True)'}), '(required=True)\n', (762, 777), False, 'from rest_framework import serializers\n'), ((2011, 2067), 'rest_framework.serializers.ValidationError', 'serializers.ValidationError', (['"""User must be in database."""'], {}), "('User must be in database.')\n", (2038, 2067), False, 'from rest_framework import serializers\n'), ((3009, 3016), 'math.sqrt', 'sqrt', (['a'], {}), '(a)\n', (3013, 3016), False, 'from math import cos, asin, sqrt, pi\n'), ((2838, 2873), 'math.cos', 'cos', (['((self_latitude - latitude) * p)'], {}), '((self_latitude - latitude) * p)\n', (2841, 2873), False, 'from math import cos, asin, sqrt, pi\n'), ((2880, 2897), 'math.cos', 'cos', (['(latitude * p)'], {}), '(latitude * p)\n', (2883, 2897), False, 'from math import cos, asin, sqrt, pi\n'), ((2900, 2922), 'math.cos', 'cos', (['(self_latitude * p)'], {}), '(self_latitude * p)\n', (2903, 2922), False, 'from math import cos, asin, sqrt, pi\n'), ((2930, 2967), 'math.cos', 'cos', (['((self_longitude - longitude) * p)'], {}), '((self_longitude - longitude) * p)\n', (2933, 2967), False, 'from math import cos, asin, sqrt, pi\n')] |
import qcodes as qc
from qdev_wrappers.sweep_functions import _do_measurement, _do_measurement_single, \
_select_plottables
def measure(meas_param, do_plots=True):
"""
Function which measures the specified parameter and optionally
plots the results.
Args:
meas_param: parameter to measure
do_plots: Default True: If False no plots are produced.
Data is still saved and can be displayed with show_num.
Returns:
data (qcodes dataset)
plot: QT plot
"""
measurement = qc.Measure(meas_param)
meas_params = _select_plottables(meas_param)
plot, data = _do_measurement_single(
measurement, meas_params, do_plots=do_plots)
return data, plot
def sweep1d(meas_param, sweep_param, start, stop, step, delay=0.01,
do_plots=True):
"""
Function which does a 1 dimensional sweep and optionally plots the results.
Args:
meas_param: parameter which we want the value of at each point
sweep_param: parameter to be swept in outer loop (default on y axis)
start: starting value for sweep_param1
stop: final value for sweep_param1
step: value to step sweep_param1 by
delay (default 0.01): mimimum time to spend on each point
do_plots: Default True: If False no plots are produced.
Data is still saved and can be displayed with show_num.
Returns:
data (qcodes dataset)
plot: QT plot
"""
loop = qc.Loop(sweep_param.sweep(
start, stop, step), delay).each(meas_param)
set_params = ((sweep_param, start, stop),)
meas_params = _select_plottables(meas_param)
plot, data = _do_measurement(loop, set_params, meas_params,
do_plots=do_plots)
return data, plot
def sweep2d(meas_param, sweep_param1, start1, stop1, step1,
sweep_param2, start2, stop2, step2, delay=0.01,
do_plots=True):
"""
Function which does a 2 dimensional sweep and optionally plots the results.
Args:
meas_param: parameter which we want the value of at each point
sweep_param1: parameter to be swept in outer loop (default on y axis)
start1: starting value for sweep_param1
stop1: final value for sweep_param1
step1: value to step sweep_param1 by
sweep_param2: parameter to be swept in inner loop (default on x axis)
start2: starting value for sweep_param2
stop2: final value for sweep_param2
step2: value to step sweep_param2 by
delay (default 0.01): mimimum time to spend on each point
do_plots: Default True: If False no plots are produced.
Data is still saved and can be displayed with show_num.
Returns:
data (qcodes dataset)
plot: QT plot
"""
innerloop = qc.Loop(sweep_param2.sweep(
start2, stop2, step2), delay).each(meas_param)
outerloop = qc.Loop(sweep_param1.sweep(
start1, stop1, step1), delay).each(innerloop)
set_params = ((sweep_param1, start1, stop1),
(sweep_param2, start2, stop2))
meas_params = _select_plottables(meas_param)
plot, data = _do_measurement(outerloop, set_params, meas_params,
do_plots=do_plots)
return data, plot
| [
"qcodes.Measure",
"qdev_wrappers.sweep_functions._select_plottables",
"qdev_wrappers.sweep_functions._do_measurement_single",
"qdev_wrappers.sweep_functions._do_measurement"
] | [((544, 566), 'qcodes.Measure', 'qc.Measure', (['meas_param'], {}), '(meas_param)\n', (554, 566), True, 'import qcodes as qc\n'), ((585, 615), 'qdev_wrappers.sweep_functions._select_plottables', '_select_plottables', (['meas_param'], {}), '(meas_param)\n', (603, 615), False, 'from qdev_wrappers.sweep_functions import _do_measurement, _do_measurement_single, _select_plottables\n'), ((633, 700), 'qdev_wrappers.sweep_functions._do_measurement_single', '_do_measurement_single', (['measurement', 'meas_params'], {'do_plots': 'do_plots'}), '(measurement, meas_params, do_plots=do_plots)\n', (655, 700), False, 'from qdev_wrappers.sweep_functions import _do_measurement, _do_measurement_single, _select_plottables\n'), ((1640, 1670), 'qdev_wrappers.sweep_functions._select_plottables', '_select_plottables', (['meas_param'], {}), '(meas_param)\n', (1658, 1670), False, 'from qdev_wrappers.sweep_functions import _do_measurement, _do_measurement_single, _select_plottables\n'), ((1689, 1754), 'qdev_wrappers.sweep_functions._do_measurement', '_do_measurement', (['loop', 'set_params', 'meas_params'], {'do_plots': 'do_plots'}), '(loop, set_params, meas_params, do_plots=do_plots)\n', (1704, 1754), False, 'from qdev_wrappers.sweep_functions import _do_measurement, _do_measurement_single, _select_plottables\n'), ((3149, 3179), 'qdev_wrappers.sweep_functions._select_plottables', '_select_plottables', (['meas_param'], {}), '(meas_param)\n', (3167, 3179), False, 'from qdev_wrappers.sweep_functions import _do_measurement, _do_measurement_single, _select_plottables\n'), ((3198, 3268), 'qdev_wrappers.sweep_functions._do_measurement', '_do_measurement', (['outerloop', 'set_params', 'meas_params'], {'do_plots': 'do_plots'}), '(outerloop, set_params, meas_params, do_plots=do_plots)\n', (3213, 3268), False, 'from qdev_wrappers.sweep_functions import _do_measurement, _do_measurement_single, _select_plottables\n')] |
import os.path
import os
import numpy
from . import common, cgen
"""
References
https://github.com/scikit-learn/scikit-learn/blob/15a949460dbf19e5e196b8ef48f9712b72a3b3c3/sklearn/covariance/_empirical_covariance.py#L297
https://github.com/scikit-learn/scikit-learn/blob/15a949460dbf19e5e196b8ef48f9712b72a3b3c3/sklearn/covariance/_elliptic_envelope.py#L149
"""
from sklearn.mixture._gaussian_mixture import _compute_log_det_cholesky
from sklearn.utils.extmath import row_norms
np = numpy
def squared_mahalanobis_distance(x1, x2, precision):
"""
@precision is the inverted covariance matrix
computes (x1 - x2).T * VI * (x1 - x2)
where VI is the precision matrix, the inverse of the covariance matrix
Loosely based on the scikit-learn implementation,
https://github.com/scikit-learn/scikit-learn/blob/main/sklearn/neighbors/_dist_metrics.pyx
"""
distance = 0.0
size = x1.shape[0]
temp = numpy.zeros(shape=size)
assert x1.shape == x2.shape
assert precision.shape[0] == precision.shape[1]
assert size == precision.shape[0]
for i in range(size):
accumulate = 0
for j in range(size):
accumulate += precision[i, j] * (x1[j] - x2[j])
distance += accumulate * (x1[i] - x2[i])
return distance
def generate_code(means, precision, offset, name='my_elliptic', modifiers='static const'):
n_features = means.shape[0]
decision_boundary = offset # FIXME, check
classifier_name = f'{name}_classifier'
means_name = f'{name}_means'
precisions_name = f'{name}_precisions'
predict_function_name = f'{name}_predict'
includes = '''
// This code is generated by emlearn
#include <eml_distance.h>
'''
pre = '\n\n'.join([
includes,
cgen.array_declare(means_name, n_features, modifiers=modifiers, values=means),
cgen.array_declare(precisions_name, n_features*n_features,
modifiers=modifiers,
values=precision.flatten(order='C'),
),
])
main = f'''
#include <stdio.h>
// Data definitions
{modifiers} EmlEllipticEnvelope {classifier_name} = {{
{n_features},
{decision_boundary},
{means_name},
{precisions_name}
}};
// Prediction function
float {predict_function_name}(const float *features, int n_features) {{
float dist = 0.0;
const int class = eml_elliptic_envelope_predict(&{classifier_name},
features, n_features, &dist);
return dist;
}}
'''
code = pre + main
return code
class Wrapper:
def __init__(self, estimator, classifier='inline', dtype='float'):
self.dtype = dtype
precision = estimator.get_precision()
self._means = estimator.location_.copy()
self._precision = precision
self._offset = estimator.offset_
if classifier == 'inline':
name = 'my_inline_elliptic'
func = '{}_predict(values, length)'.format(name)
code = self.save(name=name)
self.classifier_ = common.CompiledClassifier(code, name=name, call=func, out_dtype='float')
else:
raise ValueError("Unsupported classifier method '{}'".format(classifier))
def mahalanobis(self, X):
def dist(x):
return squared_mahalanobis_distance(x, self._means, precision=self._precision)
p = numpy.array([ dist(x) for x in X ])
predictions = self.classifier_.predict(X)
return predictions
def predict(self, X):
def predict_one(d):
dist = -d
dd = dist - self._offset
is_inlier = 1 if dd > 0 else -1
return is_inlier
distances = self.mahalanobis(X)
return numpy.array([predict_one(d) for d in distances])
def save(self, name=None, file=None):
if name is None:
if file is None:
raise ValueError('Either name or file must be provided')
else:
name = os.path.splitext(os.path.basename(file))[0]
code = generate_code(self._means, self._precision, self._offset, name=name)
if file:
with open(file, 'w') as f:
f.write(code)
return code
| [
"numpy.zeros",
"os.path.basename"
] | [((942, 965), 'numpy.zeros', 'numpy.zeros', ([], {'shape': 'size'}), '(shape=size)\n', (953, 965), False, 'import numpy\n'), ((4073, 4095), 'os.path.basename', 'os.path.basename', (['file'], {}), '(file)\n', (4089, 4095), False, 'import os\n')] |
from db_connection import DbConnection
import random
def load():
db_conn = DbConnection('profiles')
db_conn.execute("drop table if exists profiles")
db_conn.execute("create table profiles (id integer PRIMARY KEY, name text not null, skillset text not null, connection_weight integer not null)")
names_for_profiles = []
all_skills = []
with open('./data/list_of_names.txt', newline='') as f:
names_for_profiles = f.read().splitlines()
with open('./data/list_of_skills.txt', newline='') as f:
all_skills = f.read().splitlines()
id = 1
for name in names_for_profiles:
skill_sample = random.sample(all_skills, 4)
connection_weight = random.randint(0,10)
profile_skills = ','.join(skill_sample)
data_tuple = (id, name, profile_skills, connection_weight)
db_conn.execute("insert into profiles values (?, ?, ?, ?);", data_tuple)
id = id + 1
db_conn.commit()
db_conn.close() | [
"random.sample",
"db_connection.DbConnection",
"random.randint"
] | [((78, 102), 'db_connection.DbConnection', 'DbConnection', (['"""profiles"""'], {}), "('profiles')\n", (90, 102), False, 'from db_connection import DbConnection\n'), ((616, 644), 'random.sample', 'random.sample', (['all_skills', '(4)'], {}), '(all_skills, 4)\n', (629, 644), False, 'import random\n'), ((669, 690), 'random.randint', 'random.randint', (['(0)', '(10)'], {}), '(0, 10)\n', (683, 690), False, 'import random\n')] |
import cv2
# used to scale down the video resolution
# the repo doesn't include vid 480x360 file,
# but you can get if from https://www.youtube.com/watch?v=FtutLA63Cp8
cap = cv2.VideoCapture('bad_apple_480x360.mp4')
fourcc = cv2.VideoWriter_fourcc(*'MP4V')
out = cv2.VideoWriter('bad_apple_48x36.mp4', fourcc, 30, (48, 36)) # output: 30 fps, 48x36
while True:
ret, frame = cap.read()
if ret == True:
b = cv2.resize(frame, (48, 36), fx=0, fy=0, interpolation = cv2.INTER_CUBIC)
out.write(b)
else:
break
cap.release()
out.release()
cv2.destroyAllWindows() | [
"cv2.VideoWriter",
"cv2.destroyAllWindows",
"cv2.VideoCapture",
"cv2.VideoWriter_fourcc",
"cv2.resize"
] | [((184, 225), 'cv2.VideoCapture', 'cv2.VideoCapture', (['"""bad_apple_480x360.mp4"""'], {}), "('bad_apple_480x360.mp4')\n", (200, 225), False, 'import cv2\n'), ((238, 269), 'cv2.VideoWriter_fourcc', 'cv2.VideoWriter_fourcc', (["*'MP4V'"], {}), "(*'MP4V')\n", (260, 269), False, 'import cv2\n'), ((277, 337), 'cv2.VideoWriter', 'cv2.VideoWriter', (['"""bad_apple_48x36.mp4"""', 'fourcc', '(30)', '(48, 36)'], {}), "('bad_apple_48x36.mp4', fourcc, 30, (48, 36))\n", (292, 337), False, 'import cv2\n'), ((598, 621), 'cv2.destroyAllWindows', 'cv2.destroyAllWindows', ([], {}), '()\n', (619, 621), False, 'import cv2\n'), ((440, 510), 'cv2.resize', 'cv2.resize', (['frame', '(48, 36)'], {'fx': '(0)', 'fy': '(0)', 'interpolation': 'cv2.INTER_CUBIC'}), '(frame, (48, 36), fx=0, fy=0, interpolation=cv2.INTER_CUBIC)\n', (450, 510), False, 'import cv2\n')] |
#!/usr/bin/env python3
import rospy
from std_msgs.msg import Int32
n = 0
def cb(message):
global n
n = message.data
rospy.init_node('nabe')
sub = rospy.Subscriber('rand_number', Int32, cb)
pub = rospy.Publisher('atu', Int32, queue_size=1)
rate = rospy.Rate(1)
while not rospy.is_shutdown():
if n % 3 == 0 and n != 0:
print('ナベアツ「%d !!!!!」\n' % n)
elif n == 13 or n == 23 or n == 43 or n == 53 or n == 73 or n == 83:
print('ナベアツ「%d !!!!!」\n' % n)
elif n == 31 or n == 32 or n == 34 or n == 35 or n == 37 or n == 38:
print('ナベアツ「%d !!!!!」\n' % n)
pub.publish(n)
rate.sleep()
| [
"rospy.Subscriber",
"rospy.is_shutdown",
"rospy.init_node",
"rospy.Rate",
"rospy.Publisher"
] | [((128, 151), 'rospy.init_node', 'rospy.init_node', (['"""nabe"""'], {}), "('nabe')\n", (143, 151), False, 'import rospy\n'), ((158, 200), 'rospy.Subscriber', 'rospy.Subscriber', (['"""rand_number"""', 'Int32', 'cb'], {}), "('rand_number', Int32, cb)\n", (174, 200), False, 'import rospy\n'), ((207, 250), 'rospy.Publisher', 'rospy.Publisher', (['"""atu"""', 'Int32'], {'queue_size': '(1)'}), "('atu', Int32, queue_size=1)\n", (222, 250), False, 'import rospy\n'), ((258, 271), 'rospy.Rate', 'rospy.Rate', (['(1)'], {}), '(1)\n', (268, 271), False, 'import rospy\n'), ((282, 301), 'rospy.is_shutdown', 'rospy.is_shutdown', ([], {}), '()\n', (299, 301), False, 'import rospy\n')] |
from fuzzy_asteroids.util import Scenario
import numpy as np
# "Simple" Scenarios --------------------------------------------------------------------------------------------------#
# Threat priority tests
threat_test_1 = Scenario(
name="threat_test_1",
asteroid_states=[{"position": (0, 300), "angle": -90.0, "speed": 40},
{"position": (700, 300), "angle": 0.0, "speed": 0},
],
ship_state={"position": (600, 300)},
seed=0
)
threat_test_2 = Scenario(
name="threat_test_2",
asteroid_states=[{"position": (800, 300), "angle": 90.0, "speed": 40},
{"position": (100, 300), "angle": 0.0, "speed": 0},
],
ship_state={"position": (200, 300)},
seed=0
)
threat_test_3 = Scenario(
name="threat_test_3",
asteroid_states=[{"position": (400, 0), "angle": 0.0, "speed": 40},
{"position": (400, 550), "angle": 0.0, "speed": 0},
],
ship_state={"position": (400, 450)},
seed=0
)
threat_test_4 = Scenario(
name="threat_test_4",
asteroid_states=[{"position": (400, 600), "angle": 180.0, "speed": 40},
{"position": (400, 50), "angle": 0.0, "speed": 0},
],
ship_state={"position": (400, 150)},
seed=0
)
# Accuracy tests
accuracy_test_1 = Scenario(
name="accuracy_test_1",
asteroid_states=[{"position": (400, 500), "angle": 90.0, "speed": 120, "size": 1},
],
ship_state={"position": (400, 100)},
seed=0
)
accuracy_test_2 = Scenario(
name="accuracy_test_2",
asteroid_states=[{"position": (400, 500), "angle": -90.0, "speed": 120, "size": 1},
],
ship_state={"position": (400, 100)},
seed=0
)
accuracy_test_3 = Scenario(
name="accuracy_test_3",
asteroid_states=[{"position": (100, 100), "angle": 0.0, "speed": 120, "size": 1},
],
ship_state={"position": (400, 100)},
seed=0
)
accuracy_test_4 = Scenario(
name="accuracy_test_4",
asteroid_states=[{"position": (700, 100), "angle": 0.0, "speed": 120, "size": 1},
],
ship_state={"position": (400, 100)},
seed=0
)
accuracy_test_5 = Scenario(
name="accuracy_test_5",
asteroid_states=[{"position": (100, 500), "angle": 180.0, "speed": 120, "size": 1},
],
ship_state={"position": (400, 100)},
seed=0
)
accuracy_test_6 = Scenario(
name="accuracy_test_6",
asteroid_states=[{"position": (700, 500), "angle": 180.0, "speed": 120, "size": 1},
],
ship_state={"position": (400, 100)},
seed=0
)
accuracy_test_7 = Scenario(
name="accuracy_test_7",
asteroid_states=[{"position": (400, 500), "angle": 180.0, "speed": 120, "size": 1},
],
ship_state={"position": (400, 100), "angle": 90.0},
seed=0
)
accuracy_test_8 = Scenario(
name="accuracy_test_8",
asteroid_states=[{"position": (400, 500), "angle": 180.0, "speed": 120, "size": 1},
],
ship_state={"position": (400, 100), "angle": -90.0},
seed=0
)
accuracy_test_9 = Scenario(
name="accuracy_test_9",
asteroid_states=[{"position": (100, 500), "angle": -135.0, "speed": 120, "size": 1},
],
ship_state={"position": (700, 100), "angle": -90.0},
seed=0
)
accuracy_test_10 = Scenario(
name="accuracy_test_10",
asteroid_states=[{"position": (700, 500), "angle": 135.0, "speed": 120, "size": 1},
],
ship_state={"position": (100, 100), "angle": 90.0},
seed=0
)
# "Easy" wall scenario with default ship state, starts on left and moves right
wall_left_easy = Scenario(
name="wall_left_easy",
asteroid_states=[{"position": (0, 100), "angle": -90.0, "speed": 60},
{"position": (0, 200), "angle": -90.0, "speed": 60},
{"position": (0, 300), "angle": -90.0, "speed": 60},
{"position": (0, 400), "angle": -90.0, "speed": 60},
{"position": (0, 500), "angle": -90.0, "speed": 60},
],
ship_state={"position": (400, 300)},
seed=0
)
# "Easy" wall scenario with default ship state, starts on right and moves left
wall_right_easy = Scenario(
name="wall_right_easy",
asteroid_states=[{"position": (800, 100), "angle": 90.0, "speed": 60},
{"position": (800, 200), "angle": 90.0, "speed": 60},
{"position": (800, 300), "angle": 90.0, "speed": 60},
{"position": (800, 400), "angle": 90.0, "speed": 60},
{"position": (800, 500), "angle": 90.0, "speed": 60},
],
ship_state={"position": (400, 300)},
seed=0
)
# "Easy" wall scenario with default ship state, starts at the top and moves downward
wall_top_easy = Scenario(
name="wall_top_easy",
asteroid_states=[{"position": (100, 600), "angle": 180.0, "speed": 60},
{"position": (200, 600), "angle": 180.0, "speed": 60},
{"position": (300, 600), "angle": 180.0, "speed": 60},
{"position": (400, 600), "angle": 180.0, "speed": 60},
{"position": (500, 600), "angle": 180.0, "speed": 60},
{"position": (600, 600), "angle": 180.0, "speed": 60},
{"position": (700, 600), "angle": 180.0, "speed": 60},
],
ship_state={"position": (400, 300)},
seed=0
)
# "Easy" wall scenario with default ship state, starts at the top and moves downward
wall_bottom_easy = Scenario(
name="wall_bottom_easy",
asteroid_states=[{"position": (100, 0), "angle": 0.0, "speed": 60},
{"position": (200, 0), "angle": 0.0, "speed": 60},
{"position": (300, 0), "angle": 0.0, "speed": 60},
{"position": (400, 0), "angle": 0.0, "speed": 60},
{"position": (500, 0), "angle": 0.0, "speed": 60},
{"position": (600, 0), "angle": 0.0, "speed": 60},
{"position": (700, 0), "angle": 0.0, "speed": 60},
],
ship_state={"position": (400, 300)},
seed=0
)
# Ring scenarios ------------------------------------------------------------------------------------------------------#
# Scenario where a ring of asteroids close in on the vehicle
# calculating initial states
R = 300
theta = np.linspace(0, 2 * np.pi, 17)[:-1]
ast_x = [R * np.cos(angle) + 400 for angle in theta]
ast_y = [R * np.sin(angle) + 300 for angle in theta]
init_angle = [90 + val * 180 / np.pi for val in theta]
ast_states = []
for ii in range(len(init_angle)):
ast_states.append({"position": (ast_x[ii], ast_y[ii]), "angle": init_angle[ii], "speed": 30})
ring_closing = Scenario(
name="ring_closing",
asteroid_states=ast_states,
ship_state={"position": (400, 300)},
seed=0
)
# Static ring scenarios
# Static ring left
R = 150
theta = np.linspace(0, 2 * np.pi, 17)[1:-2]
ast_x = [R * np.cos(angle + np.pi) + 400 for angle in theta]
ast_y = [R * np.sin(angle + np.pi) + 300 for angle in theta]
init_angle = [90 + val * 180 / np.pi for val in theta]
ast_states = []
for ii in range(len(init_angle)):
ast_states.append({"position": (ast_x[ii], ast_y[ii]), "angle": init_angle[ii], "speed": 0})
ring_static_left = Scenario(
name="ring_static_left",
asteroid_states=ast_states,
ship_state={"position": (400, 300)},
seed=0
)
# Static ring right
R = 150
theta = np.linspace(0, 2 * np.pi, 17)[1:-2]
ast_x = [R * np.cos(angle) + 400 for angle in theta]
ast_y = [R * np.sin(angle) + 300 for angle in theta]
init_angle = [90 + val * 180 / np.pi for val in theta]
ast_states = []
for ii in range(len(init_angle)):
ast_states.append({"position": (ast_x[ii], ast_y[ii]), "angle": init_angle[ii], "speed": 0})
ring_static_right = Scenario(
name="ring_static_right",
asteroid_states=ast_states,
ship_state={"position": (400, 300)},
seed=0
)
# Static ring top
R = 150
theta = np.linspace(0, 2 * np.pi, 17)[1:-2]
ast_x = [R * np.cos(angle + np.pi / 2) + 400 for angle in theta]
ast_y = [R * np.sin(angle + np.pi / 2) + 300 for angle in theta]
init_angle = [90 + val * 180 / np.pi for val in theta]
ast_states = []
for ii in range(len(init_angle)):
ast_states.append({"position": (ast_x[ii], ast_y[ii]), "angle": init_angle[ii], "speed": 0})
ring_static_top = Scenario(
name="ring_static_top",
asteroid_states=ast_states,
ship_state={"position": (400, 300)},
seed=0
)
# Static ring bottom
R = 150
theta = np.linspace(0, 2 * np.pi, 17)[1:-2]
ast_x = [R * np.cos(angle + 3 * np.pi / 2) + 400 for angle in theta]
ast_y = [R * np.sin(angle + 3 * np.pi / 2) + 300 for angle in theta]
init_angle = [90 + val * 180 / np.pi for val in theta]
ast_states = []
for ii in range(len(init_angle)):
ast_states.append({"position": (ast_x[ii], ast_y[ii]), "angle": init_angle[ii], "speed": 0})
ring_static_bottom = Scenario(
name="ring_static_bottom",
asteroid_states=ast_states,
ship_state={"position": (400, 300)},
seed=0
)
# ---------------------------------------------------------------------------------------------------------------------#
# Normal corridor scenarios -------------------------------------------------------------------------------------------#
# Scenario where ship is in a corridor and forced to shoot its way through
# calculating corridor states
num_x = 17
num_y = 10
x = np.linspace(0, 800, num_x)
y = np.concatenate((np.linspace(0, 200, int(num_y / 2)), np.linspace(400, 600, int(num_y / 2))))
ast_x, ast_y = np.meshgrid(x, y, sparse=False, indexing='ij')
ast_states = []
for ii in range(num_x):
for jj in range(num_y):
ast_states.append({"position": (ast_x[ii, jj], ast_y[ii, jj]), "angle": 0.0, "speed": 0})
# calculate wall asteroid states
ast_states.append({"position": (50, 266), "angle": -90.0, "speed": 0})
ast_states.append({"position": (50, 332), "angle": -90.0, "speed": 0})
corridor_left = Scenario(
name="corridor_left",
asteroid_states=ast_states,
ship_state={"position": (700, 300)},
seed=0
)
# calculate wall asteroid states
ast_states = ast_states[:-2]
ast_states.append({"position": (800, 266), "angle": 90.0, "speed": 20})
ast_states.append({"position": (800, 332), "angle": 90.0, "speed": 20})
corridor_right = Scenario(
name="corridor_right",
asteroid_states=ast_states,
ship_state={"position": (100, 300)},
seed=0
)
# Corridor top scenario
num_x = 14
num_y = 13
x = np.concatenate((np.linspace(0, 300, int(num_x / 2)), np.linspace(500, 800, int(num_x / 2))))
y = np.linspace(0, 600, num_y)
ast_x, ast_y = np.meshgrid(x, y, sparse=False, indexing='ij')
ast_states = []
for ii in range(num_x):
for jj in range(num_y):
ast_states.append({"position": (ast_x[ii, jj], ast_y[ii, jj]), "angle": 0.0, "speed": 0})
# calculate wall asteroid states
ast_states.append({"position": (366, 600), "angle": 180.0, "speed": 20})
ast_states.append({"position": (432, 600), "angle": 180.0, "speed": 20})
corridor_top = Scenario(
name="corridor_top",
asteroid_states=ast_states,
ship_state={"position": (400, 100)},
seed=0
)
# Corridor bottom scenario
# calculate wall asteroid states
ast_states = ast_states[:-2]
ast_states.append({"position": (366, 0), "angle": 0.0, "speed": 20})
ast_states.append({"position": (432, 0), "angle": 0.0, "speed": 20})
corridor_bottom = Scenario(
name="corridor_bottom",
asteroid_states=ast_states,
ship_state={"position": (400, 500)},
seed=0
)
# ---------------------------------------------------------------------------------------------------------------------#
# Moving Corridor Scenarios -------------------------------------------------------------------------------------------#
# Corridor moving right
# calculating corridor states
num_x = 17
num_y = 10
x = np.linspace(0, 800, num_x)
y = np.concatenate((np.linspace(0, 200, int(num_y / 2)), np.linspace(400, 600, int(num_y / 2))))
ast_x, ast_y = np.meshgrid(x, y, sparse=False, indexing='ij')
ast_states = []
for ii in range(num_x):
for jj in range(num_y):
ast_states.append({"position": (ast_x[ii, jj], ast_y[ii, jj]), "angle": -90.0, "speed": 120})
moving_corridor_1 = Scenario(
name="moving_corridor_1",
asteroid_states=ast_states,
ship_state={"position": (400, 300), "angle": 90},
seed=0
)
# Corridor moving left
# calculating corridor states
num_x = 17
num_y = 10
x = np.linspace(0, 800, num_x)
y = np.concatenate((np.linspace(0, 200, int(num_y / 2)), np.linspace(400, 600, int(num_y / 2))))
ast_x, ast_y = np.meshgrid(x, y, sparse=False, indexing='ij')
ast_states = []
for ii in range(num_x):
for jj in range(num_y):
ast_states.append({"position": (ast_x[ii, jj], ast_y[ii, jj]), "angle": 90.0, "speed": 120})
moving_corridor_2 = Scenario(
name="moving_corridor_2",
asteroid_states=ast_states,
ship_state={"position": (400, 300), "angle": -90},
seed=0
)
# Corridor moving down
# calculating corridor states
num_x = 14
num_y = 13
x = np.concatenate((np.linspace(0, 300, int(num_x / 2)), np.linspace(500, 800, int(num_x / 2))))
y = np.linspace(0, 600, num_y)
ast_x, ast_y = np.meshgrid(x, y, sparse=False, indexing='ij')
ast_states = []
for ii in range(num_x):
for jj in range(num_y):
ast_states.append({"position": (ast_x[ii, jj], ast_y[ii, jj]), "angle": 180.0, "speed": 120})
moving_corridor_3 = Scenario(
name="moving_corridor_3",
asteroid_states=ast_states,
ship_state={"position": (400, 300), "angle": 0},
seed=0
)
# Corridor moving up
# calculating corridor states
num_x = 14
num_y = 13
x = np.concatenate((np.linspace(0, 300, int(num_x / 2)), np.linspace(500, 800, int(num_x / 2))))
y = np.linspace(0, 600, num_y)
ast_x, ast_y = np.meshgrid(x, y, sparse=False, indexing='ij')
ast_states = []
for ii in range(num_x):
for jj in range(num_y):
ast_states.append({"position": (ast_x[ii, jj], ast_y[ii, jj]), "angle": 0.0, "speed": 120})
moving_corridor_4 = Scenario(
name="moving_corridor_4",
asteroid_states=ast_states,
ship_state={"position": (400, 300), "angle": 180},
seed=0
)
# Angled corridor scenario 1
# calculating corridor states
num_x = 17
num_y = 13
x = np.linspace(0, 800, num_x)
y = np.linspace(0, 600, num_y)
ast_x, ast_y = np.meshgrid(x, y, sparse=False, indexing='ij')
ast_states = []
for ii in range(num_x):
for jj in range(num_y):
if not (abs(1.5 * ast_x[ii, jj] - ast_y[ii, jj]) <= 160) and not (
abs(-1.5 * ast_x[ii, jj] + 1200 - ast_y[ii, jj]) <= 160):
ast_states.append({"position": (ast_x[ii, jj], ast_y[ii, jj]), "angle": -90.0, "speed": 30})
moving_corridor_angled_1 = Scenario(
name="moving_corridor_angled_1",
asteroid_states=ast_states,
ship_state={"position": (750, 50), "angle": 90},
seed=0
)
# Angled corridor scenario 2
# calculating corridor states
num_x = 17
num_y = 13
x = np.linspace(0, 800, num_x)
y = np.linspace(0, 600, num_y)
ast_x, ast_y = np.meshgrid(x, y, sparse=False, indexing='ij')
ast_states = []
for ii in range(num_x):
for jj in range(num_y):
if not (abs(-1.5 * ast_x[ii, jj] + 600 - ast_y[ii, jj]) <= 160) and not (
abs(1.5 * ast_x[ii, jj] - 600 - ast_y[ii, jj]) <= 160):
ast_states.append({"position": (ast_x[ii, jj], ast_y[ii, jj]), "angle": -90.0, "speed": 30})
moving_corridor_angled_2 = Scenario(
name="moving_corridor_angled_2",
asteroid_states=ast_states,
ship_state={"position": (750, 550), "angle": 90},
seed=0
)
# Curved corridor scenario 1
# calculating corridor states
num_x = 17
num_y = 13
x = np.linspace(0, 800, num_x)
y = np.linspace(0, 600, num_y)
ast_x, ast_y = np.meshgrid(x, y, sparse=False, indexing='ij')
ast_states = []
for ii in range(num_x):
for jj in range(num_y):
if not (abs(-(1 / 300) * (ast_x[ii, jj] - 400) ** 2 + 600 - ast_y[ii, jj]) <= 200):
ast_states.append({"position": (ast_x[ii, jj], ast_y[ii, jj]), "angle": -90.0, "speed": 30})
moving_corridor_curve_1 = Scenario(
name="moving_corridor_curve_1",
asteroid_states=ast_states,
ship_state={"position": (550, 500), "angle": 90},
seed=0
)
# Curved corridor scenario 2
# calculating corridor states
num_x = 30
num_y = 45
x = np.linspace(0, 800, num_x)
y = np.linspace(0, 600, num_y)
ast_x, ast_y = np.meshgrid(x, y, sparse=False, indexing='ij')
ast_states = []
for ii in range(num_x):
for jj in range(num_y):
if not (abs((1 / 300) * (ast_x[ii, jj] - 400) ** 2 - ast_y[ii, jj]) <= 200) and not (
abs((1 / 300) * (ast_x[ii, jj] - 400) ** 2 - ast_y[ii, jj]) >= 300):
ast_states.append({"position": (ast_x[ii, jj], ast_y[ii, jj]), "angle": -90.0, "speed": 120, "size": 1})
moving_corridor_curve_2 = Scenario(
name="moving_corridor_curve_2",
asteroid_states=ast_states,
ship_state={"position": (550, 100), "angle": 90},
seed=0
)
# ---------------------------------------------------------------------------------------------------------------------#
# Apocalypse scenarios-------------------------------------------------------------------------------------------------#
# Scenario meant to be difficult, probably can't be totally cleared
# currently the vehicle spawns on top of asteroids. It won't kill the vehicle until you fire though
scenario_apocalypse_1 = Scenario(name="apocalypse_1", num_asteroids=50, seed=1)
# ---------------------------------------------------------------------------------------------------------------------#
# Forcing wrap scenarios-----------------------------------------------------------------------------------------------#
# Wrap right scenarios
wall_right_wrap_1 = Scenario(
name="wall_right_wrap_1",
asteroid_states=[{"position": (600, 0), "angle": -90.0, "speed": 80},
{"position": (600, 100), "angle": -90.0, "speed": 80},
{"position": (600, 200), "angle": -90.0, "speed": 80},
{"position": (600, 300), "angle": -90.0, "speed": 80},
{"position": (600, 400), "angle": -90.0, "speed": 80},
{"position": (600, 500), "angle": -90.0, "speed": 80},
{"position": (600, 600), "angle": -90.0, "speed": 80},
],
ship_state={"position": (750, 300)},
seed=0
)
wall_right_wrap_2 = Scenario(
name="wall_right_wrap_2",
asteroid_states=[{"position": (750, 0), "angle": -90.0, "speed": 80},
{"position": (750, 100), "angle": -90.0, "speed": 80},
{"position": (750, 200), "angle": -90.0, "speed": 80},
{"position": (750, 300), "angle": -90.0, "speed": 80},
{"position": (750, 400), "angle": -90.0, "speed": 80},
{"position": (750, 500), "angle": -90.0, "speed": 80},
{"position": (750, 600), "angle": -90.0, "speed": 80},
],
ship_state={"position": (50, 300)},
seed=0
)
wall_right_wrap_3 = Scenario(
name="wall_right_wrap_3",
asteroid_states=[{"position": (600, 0), "angle": -90.0, "speed": 80},
{"position": (600, 100), "angle": -90.0, "speed": 80},
{"position": (600, 200), "angle": -90.0, "speed": 80},
{"position": (600, 300), "angle": -90.0, "speed": 80},
{"position": (600, 400), "angle": -90.0, "speed": 80},
{"position": (600, 500), "angle": -90.0, "speed": 80},
{"position": (600, 600), "angle": -90.0, "speed": 80},
{"position": (200, 0), "angle": -90.0, "speed": 0},
{"position": (200, 100), "angle": -90.0, "speed": 0},
{"position": (200, 200), "angle": -90.0, "speed": 0},
{"position": (200, 300), "angle": -90.0, "speed": 0},
{"position": (200, 400), "angle": -90.0, "speed": 0},
{"position": (200, 500), "angle": -90.0, "speed": 0},
{"position": (200, 600), "angle": -90.0, "speed": 0},
],
ship_state={"position": (750, 300)},
seed=0
)
wall_right_wrap_4 = Scenario(
name="wall_right_wrap_4",
asteroid_states=[{"position": (750, 0), "angle": -90.0, "speed": 80},
{"position": (750, 100), "angle": -90.0, "speed": 80},
{"position": (750, 200), "angle": -90.0, "speed": 80},
{"position": (750, 300), "angle": -90.0, "speed": 80},
{"position": (750, 400), "angle": -90.0, "speed": 80},
{"position": (750, 500), "angle": -90.0, "speed": 80},
{"position": (750, 600), "angle": -90.0, "speed": 80},
{"position": (200, 0), "angle": -90.0, "speed": 0},
{"position": (200, 100), "angle": -90.0, "speed": 0},
{"position": (200, 200), "angle": -90.0, "speed": 0},
{"position": (200, 300), "angle": -90.0, "speed": 0},
{"position": (200, 400), "angle": -90.0, "speed": 0},
{"position": (200, 500), "angle": -90.0, "speed": 0},
{"position": (200, 600), "angle": -90.0, "speed": 0},
],
ship_state={"position": (50, 300)},
seed=0
)
# Wrap left scenarios
wall_left_wrap_1 = Scenario(
name="wall_left_wrap_1",
asteroid_states=[{"position": (200, 0), "angle": 90.0, "speed": 80},
{"position": (200, 100), "angle": 90.0, "speed": 80},
{"position": (200, 200), "angle": 90.0, "speed": 80},
{"position": (200, 300), "angle": 90.0, "speed": 80},
{"position": (200, 400), "angle": 90.0, "speed": 80},
{"position": (200, 500), "angle": 90.0, "speed": 80},
{"position": (200, 600), "angle": 90.0, "speed": 80},
],
ship_state={"position": (50, 300)},
seed=0
)
wall_left_wrap_2 = Scenario(
name="wall_left_wrap_2",
asteroid_states=[{"position": (50, 0), "angle": 90.0, "speed": 80},
{"position": (50, 100), "angle": 90.0, "speed": 80},
{"position": (50, 200), "angle": 90.0, "speed": 80},
{"position": (50, 300), "angle": 90.0, "speed": 80},
{"position": (50, 400), "angle": 90.0, "speed": 80},
{"position": (50, 500), "angle": 90.0, "speed": 80},
{"position": (50, 600), "angle": 90.0, "speed": 80},
],
ship_state={"position": (750, 300)},
seed=0
)
wall_left_wrap_3 = Scenario(
name="wall_left_wrap_3",
asteroid_states=[{"position": (200, 0), "angle": 90.0, "speed": 80},
{"position": (200, 100), "angle": 90.0, "speed": 80},
{"position": (200, 200), "angle": 90.0, "speed": 80},
{"position": (200, 300), "angle": 90.0, "speed": 80},
{"position": (200, 400), "angle": 90.0, "speed": 80},
{"position": (200, 500), "angle": 90.0, "speed": 80},
{"position": (200, 600), "angle": 90.0, "speed": 80},
{"position": (600, 0), "angle": -90.0, "speed": 0},
{"position": (600, 100), "angle": -90.0, "speed": 0},
{"position": (600, 200), "angle": -90.0, "speed": 0},
{"position": (600, 300), "angle": -90.0, "speed": 0},
{"position": (600, 400), "angle": -90.0, "speed": 0},
{"position": (600, 500), "angle": -90.0, "speed": 0},
{"position": (600, 600), "angle": -90.0, "speed": 0},
],
ship_state={"position": (50, 300)},
seed=0
)
wall_left_wrap_4 = Scenario(
name="wall_left_wrap_4",
asteroid_states=[{"position": (50, 0), "angle": 90.0, "speed": 80},
{"position": (50, 100), "angle": 90.0, "speed": 80},
{"position": (50, 200), "angle": 90.0, "speed": 80},
{"position": (50, 300), "angle": 90.0, "speed": 80},
{"position": (50, 400), "angle": 90.0, "speed": 80},
{"position": (50, 500), "angle": 90.0, "speed": 80},
{"position": (50, 600), "angle": 90.0, "speed": 80},
{"position": (600, 0), "angle": -90.0, "speed": 0},
{"position": (600, 100), "angle": -90.0, "speed": 0},
{"position": (600, 200), "angle": -90.0, "speed": 0},
{"position": (600, 300), "angle": -90.0, "speed": 0},
{"position": (600, 400), "angle": -90.0, "speed": 0},
{"position": (600, 500), "angle": -90.0, "speed": 0},
{"position": (600, 600), "angle": -90.0, "speed": 0},
],
ship_state={"position": (750, 300)},
seed=0
)
# Wrap top scenarios
wall_top_wrap_1 = Scenario(
name="wall_top_wrap_1",
asteroid_states=[{"position": (0, 400), "angle": 0.0, "speed": 80},
{"position": (100, 400), "angle": 0.0, "speed": 80},
{"position": (200, 400), "angle": 0.0, "speed": 80},
{"position": (300, 400), "angle": 0.0, "speed": 80},
{"position": (400, 400), "angle": 0.0, "speed": 80},
{"position": (500, 400), "angle": 0.0, "speed": 80},
{"position": (600, 400), "angle": 0.0, "speed": 80},
{"position": (700, 400), "angle": 0.0, "speed": 80},
{"position": (800, 400), "angle": 0.0, "speed": 80},
],
ship_state={"position": (400, 550)},
seed=0
)
wall_top_wrap_2 = Scenario(
name="wall_top_wrap_2",
asteroid_states=[{"position": (0, 400), "angle": 0.0, "speed": 80},
{"position": (100, 400), "angle": 0.0, "speed": 80},
{"position": (200, 400), "angle": 0.0, "speed": 80},
{"position": (300, 400), "angle": 0.0, "speed": 80},
{"position": (400, 400), "angle": 0.0, "speed": 80},
{"position": (500, 400), "angle": 0.0, "speed": 80},
{"position": (600, 400), "angle": 0.0, "speed": 80},
{"position": (700, 400), "angle": 0.0, "speed": 80},
{"position": (800, 400), "angle": 0.0, "speed": 80},
],
ship_state={"position": (400, 50)},
seed=0
)
wall_top_wrap_3 = Scenario(
name="wall_top_wrap_3",
asteroid_states=[{"position": (0, 400), "angle": 0.0, "speed": 80},
{"position": (100, 400), "angle": 0.0, "speed": 80},
{"position": (200, 400), "angle": 0.0, "speed": 80},
{"position": (300, 400), "angle": 0.0, "speed": 80},
{"position": (400, 400), "angle": 0.0, "speed": 80},
{"position": (500, 400), "angle": 0.0, "speed": 80},
{"position": (600, 400), "angle": 0.0, "speed": 80},
{"position": (700, 400), "angle": 0.0, "speed": 80},
{"position": (800, 400), "angle": 0.0, "speed": 80},
{"position": (0, 200), "angle": 0.0, "speed": 0},
{"position": (100, 200), "angle": 0.0, "speed": 0},
{"position": (200, 200), "angle": 0.0, "speed": 0},
{"position": (300, 200), "angle": 0.0, "speed": 0},
{"position": (400, 200), "angle": 0.0, "speed": 0},
{"position": (500, 200), "angle": 0.0, "speed": 0},
{"position": (600, 200), "angle": 0.0, "speed": 0},
{"position": (700, 200), "angle": 0.0, "speed": 0},
{"position": (800, 200), "angle": 0.0, "speed": 0},
],
ship_state={"position": (400, 550)},
seed=0
)
wall_top_wrap_4 = Scenario(
name="wall_top_wrap_4",
asteroid_states=[{"position": (0, 400), "angle": 0.0, "speed": 80},
{"position": (100, 400), "angle": 0.0, "speed": 80},
{"position": (200, 400), "angle": 0.0, "speed": 80},
{"position": (300, 400), "angle": 0.0, "speed": 80},
{"position": (400, 400), "angle": 0.0, "speed": 80},
{"position": (500, 400), "angle": 0.0, "speed": 80},
{"position": (600, 400), "angle": 0.0, "speed": 80},
{"position": (700, 400), "angle": 0.0, "speed": 80},
{"position": (800, 400), "angle": 0.0, "speed": 80},
{"position": (0, 200), "angle": 0.0, "speed": 0},
{"position": (100, 200), "angle": 0.0, "speed": 0},
{"position": (200, 200), "angle": 0.0, "speed": 0},
{"position": (300, 200), "angle": 0.0, "speed": 0},
{"position": (400, 200), "angle": 0.0, "speed": 0},
{"position": (500, 200), "angle": 0.0, "speed": 0},
{"position": (600, 200), "angle": 0.0, "speed": 0},
{"position": (700, 200), "angle": 0.0, "speed": 0},
{"position": (800, 200), "angle": 0.0, "speed": 0},
],
ship_state={"position": (400, 50)},
seed=0
)
# Wrap bottom scenarios
wall_bottom_wrap_1 = Scenario(
name="wall_bottom_wrap_1",
asteroid_states=[{"position": (0, 200), "angle": 180.0, "speed": 80},
{"position": (100, 200), "angle": 180.0, "speed": 80},
{"position": (200, 200), "angle": 180.0, "speed": 80},
{"position": (300, 200), "angle": 180.0, "speed": 80},
{"position": (400, 200), "angle": 180.0, "speed": 80},
{"position": (500, 200), "angle": 180.0, "speed": 80},
{"position": (600, 200), "angle": 180.0, "speed": 80},
{"position": (700, 200), "angle": 180.0, "speed": 80},
{"position": (800, 200), "angle": 180.0, "speed": 80},
],
ship_state={"position": (400, 50)},
seed=0
)
wall_bottom_wrap_2 = Scenario(
name="wall_bottom_wrap_2",
asteroid_states=[{"position": (0, 200), "angle": 180.0, "speed": 80},
{"position": (100, 200), "angle": 180.0, "speed": 80},
{"position": (200, 200), "angle": 180.0, "speed": 80},
{"position": (300, 200), "angle": 180.0, "speed": 80},
{"position": (400, 200), "angle": 180.0, "speed": 80},
{"position": (500, 200), "angle": 180.0, "speed": 80},
{"position": (600, 200), "angle": 180.0, "speed": 80},
{"position": (700, 200), "angle": 180.0, "speed": 80},
{"position": (800, 200), "angle": 180.0, "speed": 80},
],
ship_state={"position": (400, 550)},
seed=0
)
wall_bottom_wrap_3 = Scenario(
name="wall_bottom_wrap_3",
asteroid_states=[{"position": (0, 200), "angle": 180.0, "speed": 80},
{"position": (100, 200), "angle": 180.0, "speed": 80},
{"position": (200, 200), "angle": 180.0, "speed": 80},
{"position": (300, 200), "angle": 180.0, "speed": 80},
{"position": (400, 200), "angle": 180.0, "speed": 80},
{"position": (500, 200), "angle": 180.0, "speed": 80},
{"position": (600, 200), "angle": 180.0, "speed": 80},
{"position": (700, 200), "angle": 180.0, "speed": 80},
{"position": (800, 200), "angle": 180.0, "speed": 80},
{"position": (0, 400), "angle": 0.0, "speed": 0},
{"position": (100, 400), "angle": 0.0, "speed": 0},
{"position": (200, 400), "angle": 0.0, "speed": 0},
{"position": (300, 400), "angle": 0.0, "speed": 0},
{"position": (400, 400), "angle": 0.0, "speed": 0},
{"position": (500, 400), "angle": 0.0, "speed": 0},
{"position": (600, 400), "angle": 0.0, "speed": 0},
{"position": (700, 400), "angle": 0.0, "speed": 0},
{"position": (800, 400), "angle": 0.0, "speed": 0},
],
ship_state={"position": (400, 50)},
seed=0
)
wall_bottom_wrap_4 = Scenario(
name="wall_bottom_wrap_4",
asteroid_states=[{"position": (0, 200), "angle": 180.0, "speed": 80},
{"position": (100, 200), "angle": 180.0, "speed": 80},
{"position": (200, 200), "angle": 180.0, "speed": 80},
{"position": (300, 200), "angle": 180.0, "speed": 80},
{"position": (400, 200), "angle": 180.0, "speed": 80},
{"position": (500, 200), "angle": 180.0, "speed": 80},
{"position": (600, 200), "angle": 180.0, "speed": 80},
{"position": (700, 200), "angle": 180.0, "speed": 80},
{"position": (800, 200), "angle": 180.0, "speed": 80},
{"position": (0, 400), "angle": 0.0, "speed": 0},
{"position": (100, 400), "angle": 0.0, "speed": 0},
{"position": (200, 400), "angle": 0.0, "speed": 0},
{"position": (300, 400), "angle": 0.0, "speed": 0},
{"position": (400, 400), "angle": 0.0, "speed": 0},
{"position": (500, 400), "angle": 0.0, "speed": 0},
{"position": (600, 400), "angle": 0.0, "speed": 0},
{"position": (700, 400), "angle": 0.0, "speed": 0},
{"position": (800, 400), "angle": 0.0, "speed": 0},
],
ship_state={"position": (400, 550)},
seed=0
)
# A scenario with a big non moving box
scenario_big_box = Scenario(
name="big_box",
asteroid_states=[{"position": (100, 600), "angle": 0.0, "speed": 0},
{"position": (200, 600), "angle": 0.0, "speed": 0},
{"position": (300, 600), "angle": 0.0, "speed": 0},
{"position": (400, 600), "angle": 0.0, "speed": 0},
{"position": (500, 600), "angle": 0.0, "speed": 0},
{"position": (600, 600), "angle": 0.0, "speed": 0},
{"position": (700, 600), "angle": 0.0, "speed": 0},
{"position": (100, 0), "angle": 0.0, "speed": 0},
{"position": (200, 0), "angle": 0.0, "speed": 0},
{"position": (300, 0), "angle": 0.0, "speed": 0},
{"position": (400, 0), "angle": 0.0, "speed": 0},
{"position": (500, 0), "angle": 0.0, "speed": 0},
{"position": (600, 0), "angle": 0.0, "speed": 0},
{"position": (700, 0), "angle": 0.0, "speed": 0},
{"position": (800, 0), "angle": 0.0, "speed": 0},
{"position": (0, 0), "angle": 0.0, "speed": 0},
{"position": (0, 100), "angle": 0.0, "speed": 0},
{"position": (0, 200), "angle": 0.0, "speed": 0},
{"position": (0, 300), "angle": 0.0, "speed": 0},
{"position": (0, 400), "angle": 0.0, "speed": 0},
{"position": (0, 500), "angle": 0.0, "speed": 0},
{"position": (0, 600), "angle": 0.0, "speed": 0},
{"position": (800, 100), "angle": 0.0, "speed": 0},
{"position": (800, 200), "angle": 0.0, "speed": 0},
{"position": (800, 300), "angle": 0.0, "speed": 0},
{"position": (800, 400), "angle": 0.0, "speed": 0},
{"position": (800, 500), "angle": 0.0, "speed": 0},
{"position": (800, 600), "angle": 0.0, "speed": 0},
],
ship_state={"position": (400, 300)},
seed=0
)
# A scenario with a little non moving box
scenario_small_box = Scenario(
name="small_box",
asteroid_states=[{"position": (200, 500), "angle": 0.0, "speed": 0},
{"position": (300, 500), "angle": 0.0, "speed": 0},
{"position": (400, 500), "angle": 0.0, "speed": 0},
{"position": (500, 500), "angle": 0.0, "speed": 0},
{"position": (200, 100), "angle": 0.0, "speed": 0},
{"position": (300, 100), "angle": 0.0, "speed": 0},
{"position": (400, 100), "angle": 0.0, "speed": 0},
{"position": (500, 100), "angle": 0.0, "speed": 0},
{"position": (600, 100), "angle": 0.0, "speed": 0},
{"position": (200, 200), "angle": 0.0, "speed": 0},
{"position": (200, 300), "angle": 0.0, "speed": 0},
{"position": (200, 400), "angle": 0.0, "speed": 0},
{"position": (600, 200), "angle": 0.0, "speed": 0},
{"position": (600, 300), "angle": 0.0, "speed": 0},
{"position": (600, 400), "angle": 0.0, "speed": 0},
{"position": (600, 500), "angle": 0.0, "speed": 0},
],
ship_state={"position": (400, 300)},
seed=0
)
# A scenario with a big non moving box
scenario_2_still_corridors = Scenario(
name="scenario_2_still_corridors",
asteroid_states=[{"position": (0, 250), "angle": 0.0, "speed": 0, "size": 2},
{"position": (50, 250), "angle": 0.0, "speed": 0, "size": 2},
{"position": (100, 250), "angle": 0.0, "speed": 0, "size": 2},
{"position": (150, 250), "angle": 0.0, "speed": 0, "size": 2},
{"position": (200, 250), "angle": 0.0, "speed": 0, "size": 2},
{"position": (250, 250), "angle": 0.0, "speed": 0, "size": 2},
{"position": (300, 250), "angle": 0.0, "speed": 0, "size": 2},
{"position": (350, 250), "angle": 0.0, "speed": 0, "size": 2},
{"position": (0, 350), "angle": 0.0, "speed": 0, "size": 2},
{"position": (50, 350), "angle": 0.0, "speed": 0, "size": 2},
{"position": (100, 350), "angle": 0.0, "speed": 0, "size": 2},
{"position": (150, 350), "angle": 0.0, "speed": 0, "size": 2},
{"position": (200, 350), "angle": 0.0, "speed": 0, "size": 2},
{"position": (250, 350), "angle": 0.0, "speed": 0, "size": 2},
{"position": (300, 350), "angle": 0.0, "speed": 0, "size": 2},
{"position": (350, 350), "angle": 0.0, "speed": 0, "size": 2},
{"position": (450, 250), "angle": 0.0, "speed": 0, "size": 2},
{"position": (500, 250), "angle": 0.0, "speed": 0, "size": 2},
{"position": (550, 250), "angle": 0.0, "speed": 0, "size": 2},
{"position": (600, 250), "angle": 0.0, "speed": 0, "size": 2},
{"position": (650, 250), "angle": 0.0, "speed": 0, "size": 2},
{"position": (700, 250), "angle": 0.0, "speed": 0, "size": 2},
{"position": (750, 250), "angle": 0.0, "speed": 0, "size": 2},
{"position": (800, 250), "angle": 0.0, "speed": 0, "size": 2},
{"position": (450, 350), "angle": 0.0, "speed": 0, "size": 2},
{"position": (500, 350), "angle": 0.0, "speed": 0, "size": 2},
{"position": (550, 350), "angle": 0.0, "speed": 0, "size": 2},
{"position": (600, 350), "angle": 0.0, "speed": 0, "size": 2},
{"position": (650, 350), "angle": 0.0, "speed": 0, "size": 2},
{"position": (700, 350), "angle": 0.0, "speed": 0, "size": 2},
{"position": (750, 350), "angle": 0.0, "speed": 0, "size": 2},
{"position": (800, 350), "angle": 0.0, "speed": 0, "size": 2},
{"position": (350, 0), "angle": 0.0, "speed": 0, "size": 2},
{"position": (350, 50), "angle": 0.0, "speed": 0, "size": 2},
{"position": (350, 100), "angle": 0.0, "speed": 0, "size": 2},
{"position": (350, 150), "angle": 0.0, "speed": 0, "size": 2},
{"position": (350, 200), "angle": 0.0, "speed": 0, "size": 2},
{"position": (450, 0), "angle": 0.0, "speed": 0, "size": 2},
{"position": (450, 50), "angle": 0.0, "speed": 0, "size": 2},
{"position": (450, 100), "angle": 0.0, "speed": 0, "size": 2},
{"position": (450, 150), "angle": 0.0, "speed": 0, "size": 2},
{"position": (450, 200), "angle": 0.0, "speed": 0, "size": 2},
{"position": (350, 350), "angle": 0.0, "speed": 0, "size": 2},
{"position": (350, 400), "angle": 0.0, "speed": 0, "size": 2},
{"position": (350, 450), "angle": 0.0, "speed": 0, "size": 2},
{"position": (350, 500), "angle": 0.0, "speed": 0, "size": 2},
{"position": (350, 550), "angle": 0.0, "speed": 0, "size": 2},
{"position": (350, 600), "angle": 0.0, "speed": 0, "size": 2},
{"position": (450, 350), "angle": 0.0, "speed": 0, "size": 2},
{"position": (450, 400), "angle": 0.0, "speed": 0, "size": 2},
{"position": (450, 450), "angle": 0.0, "speed": 0, "size": 2},
{"position": (450, 500), "angle": 0.0, "speed": 0, "size": 2},
{"position": (450, 550), "angle": 0.0, "speed": 0, "size": 2},
{"position": (450, 600), "angle": 0.0, "speed": 0, "size": 2},
],
ship_state={"position": (400, 300)},
seed=0
)
| [
"numpy.linspace",
"numpy.cos",
"numpy.sin",
"numpy.meshgrid",
"fuzzy_asteroids.util.Scenario"
] | [((224, 430), 'fuzzy_asteroids.util.Scenario', 'Scenario', ([], {'name': '"""threat_test_1"""', 'asteroid_states': "[{'position': (0, 300), 'angle': -90.0, 'speed': 40}, {'position': (700, \n 300), 'angle': 0.0, 'speed': 0}]", 'ship_state': "{'position': (600, 300)}", 'seed': '(0)'}), "(name='threat_test_1', asteroid_states=[{'position': (0, 300),\n 'angle': -90.0, 'speed': 40}, {'position': (700, 300), 'angle': 0.0,\n 'speed': 0}], ship_state={'position': (600, 300)}, seed=0)\n", (232, 430), False, 'from fuzzy_asteroids.util import Scenario\n'), ((502, 709), 'fuzzy_asteroids.util.Scenario', 'Scenario', ([], {'name': '"""threat_test_2"""', 'asteroid_states': "[{'position': (800, 300), 'angle': 90.0, 'speed': 40}, {'position': (100, \n 300), 'angle': 0.0, 'speed': 0}]", 'ship_state': "{'position': (200, 300)}", 'seed': '(0)'}), "(name='threat_test_2', asteroid_states=[{'position': (800, 300),\n 'angle': 90.0, 'speed': 40}, {'position': (100, 300), 'angle': 0.0,\n 'speed': 0}], ship_state={'position': (200, 300)}, seed=0)\n", (510, 709), False, 'from fuzzy_asteroids.util import Scenario\n'), ((781, 985), 'fuzzy_asteroids.util.Scenario', 'Scenario', ([], {'name': '"""threat_test_3"""', 'asteroid_states': "[{'position': (400, 0), 'angle': 0.0, 'speed': 40}, {'position': (400, 550),\n 'angle': 0.0, 'speed': 0}]", 'ship_state': "{'position': (400, 450)}", 'seed': '(0)'}), "(name='threat_test_3', asteroid_states=[{'position': (400, 0),\n 'angle': 0.0, 'speed': 40}, {'position': (400, 550), 'angle': 0.0,\n 'speed': 0}], ship_state={'position': (400, 450)}, seed=0)\n", (789, 985), False, 'from fuzzy_asteroids.util import Scenario\n'), ((1057, 1264), 'fuzzy_asteroids.util.Scenario', 'Scenario', ([], {'name': '"""threat_test_4"""', 'asteroid_states': "[{'position': (400, 600), 'angle': 180.0, 'speed': 40}, {'position': (400, \n 50), 'angle': 0.0, 'speed': 0}]", 'ship_state': "{'position': (400, 150)}", 'seed': '(0)'}), "(name='threat_test_4', asteroid_states=[{'position': (400, 600),\n 'angle': 180.0, 'speed': 40}, {'position': (400, 50), 'angle': 0.0,\n 'speed': 0}], ship_state={'position': (400, 150)}, seed=0)\n", (1065, 1264), False, 'from fuzzy_asteroids.util import Scenario\n'), ((1356, 1525), 'fuzzy_asteroids.util.Scenario', 'Scenario', ([], {'name': '"""accuracy_test_1"""', 'asteroid_states': "[{'position': (400, 500), 'angle': 90.0, 'speed': 120, 'size': 1}]", 'ship_state': "{'position': (400, 100)}", 'seed': '(0)'}), "(name='accuracy_test_1', asteroid_states=[{'position': (400, 500),\n 'angle': 90.0, 'speed': 120, 'size': 1}], ship_state={'position': (400,\n 100)}, seed=0)\n", (1364, 1525), False, 'from fuzzy_asteroids.util import Scenario\n'), ((1578, 1748), 'fuzzy_asteroids.util.Scenario', 'Scenario', ([], {'name': '"""accuracy_test_2"""', 'asteroid_states': "[{'position': (400, 500), 'angle': -90.0, 'speed': 120, 'size': 1}]", 'ship_state': "{'position': (400, 100)}", 'seed': '(0)'}), "(name='accuracy_test_2', asteroid_states=[{'position': (400, 500),\n 'angle': -90.0, 'speed': 120, 'size': 1}], ship_state={'position': (400,\n 100)}, seed=0)\n", (1586, 1748), False, 'from fuzzy_asteroids.util import Scenario\n'), ((1801, 1970), 'fuzzy_asteroids.util.Scenario', 'Scenario', ([], {'name': '"""accuracy_test_3"""', 'asteroid_states': "[{'position': (100, 100), 'angle': 0.0, 'speed': 120, 'size': 1}]", 'ship_state': "{'position': (400, 100)}", 'seed': '(0)'}), "(name='accuracy_test_3', asteroid_states=[{'position': (100, 100),\n 'angle': 0.0, 'speed': 120, 'size': 1}], ship_state={'position': (400, \n 100)}, seed=0)\n", (1809, 1970), False, 'from fuzzy_asteroids.util import Scenario\n'), ((2022, 2191), 'fuzzy_asteroids.util.Scenario', 'Scenario', ([], {'name': '"""accuracy_test_4"""', 'asteroid_states': "[{'position': (700, 100), 'angle': 0.0, 'speed': 120, 'size': 1}]", 'ship_state': "{'position': (400, 100)}", 'seed': '(0)'}), "(name='accuracy_test_4', asteroid_states=[{'position': (700, 100),\n 'angle': 0.0, 'speed': 120, 'size': 1}], ship_state={'position': (400, \n 100)}, seed=0)\n", (2030, 2191), False, 'from fuzzy_asteroids.util import Scenario\n'), ((2243, 2413), 'fuzzy_asteroids.util.Scenario', 'Scenario', ([], {'name': '"""accuracy_test_5"""', 'asteroid_states': "[{'position': (100, 500), 'angle': 180.0, 'speed': 120, 'size': 1}]", 'ship_state': "{'position': (400, 100)}", 'seed': '(0)'}), "(name='accuracy_test_5', asteroid_states=[{'position': (100, 500),\n 'angle': 180.0, 'speed': 120, 'size': 1}], ship_state={'position': (400,\n 100)}, seed=0)\n", (2251, 2413), False, 'from fuzzy_asteroids.util import Scenario\n'), ((2466, 2636), 'fuzzy_asteroids.util.Scenario', 'Scenario', ([], {'name': '"""accuracy_test_6"""', 'asteroid_states': "[{'position': (700, 500), 'angle': 180.0, 'speed': 120, 'size': 1}]", 'ship_state': "{'position': (400, 100)}", 'seed': '(0)'}), "(name='accuracy_test_6', asteroid_states=[{'position': (700, 500),\n 'angle': 180.0, 'speed': 120, 'size': 1}], ship_state={'position': (400,\n 100)}, seed=0)\n", (2474, 2636), False, 'from fuzzy_asteroids.util import Scenario\n'), ((2689, 2874), 'fuzzy_asteroids.util.Scenario', 'Scenario', ([], {'name': '"""accuracy_test_7"""', 'asteroid_states': "[{'position': (400, 500), 'angle': 180.0, 'speed': 120, 'size': 1}]", 'ship_state': "{'position': (400, 100), 'angle': 90.0}", 'seed': '(0)'}), "(name='accuracy_test_7', asteroid_states=[{'position': (400, 500),\n 'angle': 180.0, 'speed': 120, 'size': 1}], ship_state={'position': (400,\n 100), 'angle': 90.0}, seed=0)\n", (2697, 2874), False, 'from fuzzy_asteroids.util import Scenario\n'), ((2927, 3113), 'fuzzy_asteroids.util.Scenario', 'Scenario', ([], {'name': '"""accuracy_test_8"""', 'asteroid_states': "[{'position': (400, 500), 'angle': 180.0, 'speed': 120, 'size': 1}]", 'ship_state': "{'position': (400, 100), 'angle': -90.0}", 'seed': '(0)'}), "(name='accuracy_test_8', asteroid_states=[{'position': (400, 500),\n 'angle': 180.0, 'speed': 120, 'size': 1}], ship_state={'position': (400,\n 100), 'angle': -90.0}, seed=0)\n", (2935, 3113), False, 'from fuzzy_asteroids.util import Scenario\n'), ((3166, 3354), 'fuzzy_asteroids.util.Scenario', 'Scenario', ([], {'name': '"""accuracy_test_9"""', 'asteroid_states': "[{'position': (100, 500), 'angle': -135.0, 'speed': 120, 'size': 1}]", 'ship_state': "{'position': (700, 100), 'angle': -90.0}", 'seed': '(0)'}), "(name='accuracy_test_9', asteroid_states=[{'position': (100, 500),\n 'angle': -135.0, 'speed': 120, 'size': 1}], ship_state={'position': (\n 700, 100), 'angle': -90.0}, seed=0)\n", (3174, 3354), False, 'from fuzzy_asteroids.util import Scenario\n'), ((3407, 3593), 'fuzzy_asteroids.util.Scenario', 'Scenario', ([], {'name': '"""accuracy_test_10"""', 'asteroid_states': "[{'position': (700, 500), 'angle': 135.0, 'speed': 120, 'size': 1}]", 'ship_state': "{'position': (100, 100), 'angle': 90.0}", 'seed': '(0)'}), "(name='accuracy_test_10', asteroid_states=[{'position': (700, 500),\n 'angle': 135.0, 'speed': 120, 'size': 1}], ship_state={'position': (100,\n 100), 'angle': 90.0}, seed=0)\n", (3415, 3593), False, 'from fuzzy_asteroids.util import Scenario\n'), ((3724, 4106), 'fuzzy_asteroids.util.Scenario', 'Scenario', ([], {'name': '"""wall_left_easy"""', 'asteroid_states': "[{'position': (0, 100), 'angle': -90.0, 'speed': 60}, {'position': (0, 200),\n 'angle': -90.0, 'speed': 60}, {'position': (0, 300), 'angle': -90.0,\n 'speed': 60}, {'position': (0, 400), 'angle': -90.0, 'speed': 60}, {\n 'position': (0, 500), 'angle': -90.0, 'speed': 60}]", 'ship_state': "{'position': (400, 300)}", 'seed': '(0)'}), "(name='wall_left_easy', asteroid_states=[{'position': (0, 100),\n 'angle': -90.0, 'speed': 60}, {'position': (0, 200), 'angle': -90.0,\n 'speed': 60}, {'position': (0, 300), 'angle': -90.0, 'speed': 60}, {\n 'position': (0, 400), 'angle': -90.0, 'speed': 60}, {'position': (0, \n 500), 'angle': -90.0, 'speed': 60}], ship_state={'position': (400, 300)\n }, seed=0)\n", (3732, 4106), False, 'from fuzzy_asteroids.util import Scenario\n'), ((4307, 4693), 'fuzzy_asteroids.util.Scenario', 'Scenario', ([], {'name': '"""wall_right_easy"""', 'asteroid_states': "[{'position': (800, 100), 'angle': 90.0, 'speed': 60}, {'position': (800, \n 200), 'angle': 90.0, 'speed': 60}, {'position': (800, 300), 'angle': \n 90.0, 'speed': 60}, {'position': (800, 400), 'angle': 90.0, 'speed': 60\n }, {'position': (800, 500), 'angle': 90.0, 'speed': 60}]", 'ship_state': "{'position': (400, 300)}", 'seed': '(0)'}), "(name='wall_right_easy', asteroid_states=[{'position': (800, 100),\n 'angle': 90.0, 'speed': 60}, {'position': (800, 200), 'angle': 90.0,\n 'speed': 60}, {'position': (800, 300), 'angle': 90.0, 'speed': 60}, {\n 'position': (800, 400), 'angle': 90.0, 'speed': 60}, {'position': (800,\n 500), 'angle': 90.0, 'speed': 60}], ship_state={'position': (400, 300)},\n seed=0)\n", (4315, 4693), False, 'from fuzzy_asteroids.util import Scenario\n'), ((4900, 5405), 'fuzzy_asteroids.util.Scenario', 'Scenario', ([], {'name': '"""wall_top_easy"""', 'asteroid_states': "[{'position': (100, 600), 'angle': 180.0, 'speed': 60}, {'position': (200, \n 600), 'angle': 180.0, 'speed': 60}, {'position': (300, 600), 'angle': \n 180.0, 'speed': 60}, {'position': (400, 600), 'angle': 180.0, 'speed': \n 60}, {'position': (500, 600), 'angle': 180.0, 'speed': 60}, {'position':\n (600, 600), 'angle': 180.0, 'speed': 60}, {'position': (700, 600),\n 'angle': 180.0, 'speed': 60}]", 'ship_state': "{'position': (400, 300)}", 'seed': '(0)'}), "(name='wall_top_easy', asteroid_states=[{'position': (100, 600),\n 'angle': 180.0, 'speed': 60}, {'position': (200, 600), 'angle': 180.0,\n 'speed': 60}, {'position': (300, 600), 'angle': 180.0, 'speed': 60}, {\n 'position': (400, 600), 'angle': 180.0, 'speed': 60}, {'position': (500,\n 600), 'angle': 180.0, 'speed': 60}, {'position': (600, 600), 'angle': \n 180.0, 'speed': 60}, {'position': (700, 600), 'angle': 180.0, 'speed': \n 60}], ship_state={'position': (400, 300)}, seed=0)\n", (4908, 5405), False, 'from fuzzy_asteroids.util import Scenario\n'), ((5651, 6129), 'fuzzy_asteroids.util.Scenario', 'Scenario', ([], {'name': '"""wall_bottom_easy"""', 'asteroid_states': "[{'position': (100, 0), 'angle': 0.0, 'speed': 60}, {'position': (200, 0),\n 'angle': 0.0, 'speed': 60}, {'position': (300, 0), 'angle': 0.0,\n 'speed': 60}, {'position': (400, 0), 'angle': 0.0, 'speed': 60}, {\n 'position': (500, 0), 'angle': 0.0, 'speed': 60}, {'position': (600, 0),\n 'angle': 0.0, 'speed': 60}, {'position': (700, 0), 'angle': 0.0,\n 'speed': 60}]", 'ship_state': "{'position': (400, 300)}", 'seed': '(0)'}), "(name='wall_bottom_easy', asteroid_states=[{'position': (100, 0),\n 'angle': 0.0, 'speed': 60}, {'position': (200, 0), 'angle': 0.0,\n 'speed': 60}, {'position': (300, 0), 'angle': 0.0, 'speed': 60}, {\n 'position': (400, 0), 'angle': 0.0, 'speed': 60}, {'position': (500, 0),\n 'angle': 0.0, 'speed': 60}, {'position': (600, 0), 'angle': 0.0,\n 'speed': 60}, {'position': (700, 0), 'angle': 0.0, 'speed': 60}],\n ship_state={'position': (400, 300)}, seed=0)\n", (5659, 6129), False, 'from fuzzy_asteroids.util import Scenario\n'), ((6861, 6968), 'fuzzy_asteroids.util.Scenario', 'Scenario', ([], {'name': '"""ring_closing"""', 'asteroid_states': 'ast_states', 'ship_state': "{'position': (400, 300)}", 'seed': '(0)'}), "(name='ring_closing', asteroid_states=ast_states, ship_state={\n 'position': (400, 300)}, seed=0)\n", (6869, 6968), False, 'from fuzzy_asteroids.util import Scenario\n'), ((7423, 7534), 'fuzzy_asteroids.util.Scenario', 'Scenario', ([], {'name': '"""ring_static_left"""', 'asteroid_states': 'ast_states', 'ship_state': "{'position': (400, 300)}", 'seed': '(0)'}), "(name='ring_static_left', asteroid_states=ast_states, ship_state={\n 'position': (400, 300)}, seed=0)\n", (7431, 7534), False, 'from fuzzy_asteroids.util import Scenario\n'), ((7951, 8063), 'fuzzy_asteroids.util.Scenario', 'Scenario', ([], {'name': '"""ring_static_right"""', 'asteroid_states': 'ast_states', 'ship_state': "{'position': (400, 300)}", 'seed': '(0)'}), "(name='ring_static_right', asteroid_states=ast_states, ship_state={\n 'position': (400, 300)}, seed=0)\n", (7959, 8063), False, 'from fuzzy_asteroids.util import Scenario\n'), ((8500, 8610), 'fuzzy_asteroids.util.Scenario', 'Scenario', ([], {'name': '"""ring_static_top"""', 'asteroid_states': 'ast_states', 'ship_state': "{'position': (400, 300)}", 'seed': '(0)'}), "(name='ring_static_top', asteroid_states=ast_states, ship_state={\n 'position': (400, 300)}, seed=0)\n", (8508, 8610), False, 'from fuzzy_asteroids.util import Scenario\n'), ((9061, 9174), 'fuzzy_asteroids.util.Scenario', 'Scenario', ([], {'name': '"""ring_static_bottom"""', 'asteroid_states': 'ast_states', 'ship_state': "{'position': (400, 300)}", 'seed': '(0)'}), "(name='ring_static_bottom', asteroid_states=ast_states, ship_state=\n {'position': (400, 300)}, seed=0)\n", (9069, 9174), False, 'from fuzzy_asteroids.util import Scenario\n'), ((9563, 9589), 'numpy.linspace', 'np.linspace', (['(0)', '(800)', 'num_x'], {}), '(0, 800, num_x)\n', (9574, 9589), True, 'import numpy as np\n'), ((9703, 9749), 'numpy.meshgrid', 'np.meshgrid', (['x', 'y'], {'sparse': '(False)', 'indexing': '"""ij"""'}), "(x, y, sparse=False, indexing='ij')\n", (9714, 9749), True, 'import numpy as np\n'), ((10110, 10218), 'fuzzy_asteroids.util.Scenario', 'Scenario', ([], {'name': '"""corridor_left"""', 'asteroid_states': 'ast_states', 'ship_state': "{'position': (700, 300)}", 'seed': '(0)'}), "(name='corridor_left', asteroid_states=ast_states, ship_state={\n 'position': (700, 300)}, seed=0)\n", (10118, 10218), False, 'from fuzzy_asteroids.util import Scenario\n'), ((10457, 10566), 'fuzzy_asteroids.util.Scenario', 'Scenario', ([], {'name': '"""corridor_right"""', 'asteroid_states': 'ast_states', 'ship_state': "{'position': (100, 300)}", 'seed': '(0)'}), "(name='corridor_right', asteroid_states=ast_states, ship_state={\n 'position': (100, 300)}, seed=0)\n", (10465, 10566), False, 'from fuzzy_asteroids.util import Scenario\n'), ((10729, 10755), 'numpy.linspace', 'np.linspace', (['(0)', '(600)', 'num_y'], {}), '(0, 600, num_y)\n', (10740, 10755), True, 'import numpy as np\n'), ((10772, 10818), 'numpy.meshgrid', 'np.meshgrid', (['x', 'y'], {'sparse': '(False)', 'indexing': '"""ij"""'}), "(x, y, sparse=False, indexing='ij')\n", (10783, 10818), True, 'import numpy as np\n'), ((11182, 11289), 'fuzzy_asteroids.util.Scenario', 'Scenario', ([], {'name': '"""corridor_top"""', 'asteroid_states': 'ast_states', 'ship_state': "{'position': (400, 100)}", 'seed': '(0)'}), "(name='corridor_top', asteroid_states=ast_states, ship_state={\n 'position': (400, 100)}, seed=0)\n", (11190, 11289), False, 'from fuzzy_asteroids.util import Scenario\n'), ((11550, 11660), 'fuzzy_asteroids.util.Scenario', 'Scenario', ([], {'name': '"""corridor_bottom"""', 'asteroid_states': 'ast_states', 'ship_state': "{'position': (400, 500)}", 'seed': '(0)'}), "(name='corridor_bottom', asteroid_states=ast_states, ship_state={\n 'position': (400, 500)}, seed=0)\n", (11558, 11660), False, 'from fuzzy_asteroids.util import Scenario\n'), ((11998, 12024), 'numpy.linspace', 'np.linspace', (['(0)', '(800)', 'num_x'], {}), '(0, 800, num_x)\n', (12009, 12024), True, 'import numpy as np\n'), ((12138, 12184), 'numpy.meshgrid', 'np.meshgrid', (['x', 'y'], {'sparse': '(False)', 'indexing': '"""ij"""'}), "(x, y, sparse=False, indexing='ij')\n", (12149, 12184), True, 'import numpy as np\n'), ((12377, 12502), 'fuzzy_asteroids.util.Scenario', 'Scenario', ([], {'name': '"""moving_corridor_1"""', 'asteroid_states': 'ast_states', 'ship_state': "{'position': (400, 300), 'angle': 90}", 'seed': '(0)'}), "(name='moving_corridor_1', asteroid_states=ast_states, ship_state={\n 'position': (400, 300), 'angle': 90}, seed=0)\n", (12385, 12502), False, 'from fuzzy_asteroids.util import Scenario\n'), ((12596, 12622), 'numpy.linspace', 'np.linspace', (['(0)', '(800)', 'num_x'], {}), '(0, 800, num_x)\n', (12607, 12622), True, 'import numpy as np\n'), ((12736, 12782), 'numpy.meshgrid', 'np.meshgrid', (['x', 'y'], {'sparse': '(False)', 'indexing': '"""ij"""'}), "(x, y, sparse=False, indexing='ij')\n", (12747, 12782), True, 'import numpy as np\n'), ((12974, 13100), 'fuzzy_asteroids.util.Scenario', 'Scenario', ([], {'name': '"""moving_corridor_2"""', 'asteroid_states': 'ast_states', 'ship_state': "{'position': (400, 300), 'angle': -90}", 'seed': '(0)'}), "(name='moving_corridor_2', asteroid_states=ast_states, ship_state={\n 'position': (400, 300), 'angle': -90}, seed=0)\n", (12982, 13100), False, 'from fuzzy_asteroids.util import Scenario\n'), ((13292, 13318), 'numpy.linspace', 'np.linspace', (['(0)', '(600)', 'num_y'], {}), '(0, 600, num_y)\n', (13303, 13318), True, 'import numpy as np\n'), ((13335, 13381), 'numpy.meshgrid', 'np.meshgrid', (['x', 'y'], {'sparse': '(False)', 'indexing': '"""ij"""'}), "(x, y, sparse=False, indexing='ij')\n", (13346, 13381), True, 'import numpy as np\n'), ((13574, 13698), 'fuzzy_asteroids.util.Scenario', 'Scenario', ([], {'name': '"""moving_corridor_3"""', 'asteroid_states': 'ast_states', 'ship_state': "{'position': (400, 300), 'angle': 0}", 'seed': '(0)'}), "(name='moving_corridor_3', asteroid_states=ast_states, ship_state={\n 'position': (400, 300), 'angle': 0}, seed=0)\n", (13582, 13698), False, 'from fuzzy_asteroids.util import Scenario\n'), ((13888, 13914), 'numpy.linspace', 'np.linspace', (['(0)', '(600)', 'num_y'], {}), '(0, 600, num_y)\n', (13899, 13914), True, 'import numpy as np\n'), ((13931, 13977), 'numpy.meshgrid', 'np.meshgrid', (['x', 'y'], {'sparse': '(False)', 'indexing': '"""ij"""'}), "(x, y, sparse=False, indexing='ij')\n", (13942, 13977), True, 'import numpy as np\n'), ((14168, 14294), 'fuzzy_asteroids.util.Scenario', 'Scenario', ([], {'name': '"""moving_corridor_4"""', 'asteroid_states': 'ast_states', 'ship_state': "{'position': (400, 300), 'angle': 180}", 'seed': '(0)'}), "(name='moving_corridor_4', asteroid_states=ast_states, ship_state={\n 'position': (400, 300), 'angle': 180}, seed=0)\n", (14176, 14294), False, 'from fuzzy_asteroids.util import Scenario\n'), ((14394, 14420), 'numpy.linspace', 'np.linspace', (['(0)', '(800)', 'num_x'], {}), '(0, 800, num_x)\n', (14405, 14420), True, 'import numpy as np\n'), ((14425, 14451), 'numpy.linspace', 'np.linspace', (['(0)', '(600)', 'num_y'], {}), '(0, 600, num_y)\n', (14436, 14451), True, 'import numpy as np\n'), ((14468, 14514), 'numpy.meshgrid', 'np.meshgrid', (['x', 'y'], {'sparse': '(False)', 'indexing': '"""ij"""'}), "(x, y, sparse=False, indexing='ij')\n", (14479, 14514), True, 'import numpy as np\n'), ((14866, 14996), 'fuzzy_asteroids.util.Scenario', 'Scenario', ([], {'name': '"""moving_corridor_angled_1"""', 'asteroid_states': 'ast_states', 'ship_state': "{'position': (750, 50), 'angle': 90}", 'seed': '(0)'}), "(name='moving_corridor_angled_1', asteroid_states=ast_states,\n ship_state={'position': (750, 50), 'angle': 90}, seed=0)\n", (14874, 14996), False, 'from fuzzy_asteroids.util import Scenario\n'), ((15097, 15123), 'numpy.linspace', 'np.linspace', (['(0)', '(800)', 'num_x'], {}), '(0, 800, num_x)\n', (15108, 15123), True, 'import numpy as np\n'), ((15128, 15154), 'numpy.linspace', 'np.linspace', (['(0)', '(600)', 'num_y'], {}), '(0, 600, num_y)\n', (15139, 15154), True, 'import numpy as np\n'), ((15171, 15217), 'numpy.meshgrid', 'np.meshgrid', (['x', 'y'], {'sparse': '(False)', 'indexing': '"""ij"""'}), "(x, y, sparse=False, indexing='ij')\n", (15182, 15217), True, 'import numpy as np\n'), ((15574, 15705), 'fuzzy_asteroids.util.Scenario', 'Scenario', ([], {'name': '"""moving_corridor_angled_2"""', 'asteroid_states': 'ast_states', 'ship_state': "{'position': (750, 550), 'angle': 90}", 'seed': '(0)'}), "(name='moving_corridor_angled_2', asteroid_states=ast_states,\n ship_state={'position': (750, 550), 'angle': 90}, seed=0)\n", (15582, 15705), False, 'from fuzzy_asteroids.util import Scenario\n'), ((15806, 15832), 'numpy.linspace', 'np.linspace', (['(0)', '(800)', 'num_x'], {}), '(0, 800, num_x)\n', (15817, 15832), True, 'import numpy as np\n'), ((15837, 15863), 'numpy.linspace', 'np.linspace', (['(0)', '(600)', 'num_y'], {}), '(0, 600, num_y)\n', (15848, 15863), True, 'import numpy as np\n'), ((15880, 15926), 'numpy.meshgrid', 'np.meshgrid', (['x', 'y'], {'sparse': '(False)', 'indexing': '"""ij"""'}), "(x, y, sparse=False, indexing='ij')\n", (15891, 15926), True, 'import numpy as np\n'), ((16220, 16350), 'fuzzy_asteroids.util.Scenario', 'Scenario', ([], {'name': '"""moving_corridor_curve_1"""', 'asteroid_states': 'ast_states', 'ship_state': "{'position': (550, 500), 'angle': 90}", 'seed': '(0)'}), "(name='moving_corridor_curve_1', asteroid_states=ast_states,\n ship_state={'position': (550, 500), 'angle': 90}, seed=0)\n", (16228, 16350), False, 'from fuzzy_asteroids.util import Scenario\n'), ((16451, 16477), 'numpy.linspace', 'np.linspace', (['(0)', '(800)', 'num_x'], {}), '(0, 800, num_x)\n', (16462, 16477), True, 'import numpy as np\n'), ((16482, 16508), 'numpy.linspace', 'np.linspace', (['(0)', '(600)', 'num_y'], {}), '(0, 600, num_y)\n', (16493, 16508), True, 'import numpy as np\n'), ((16525, 16571), 'numpy.meshgrid', 'np.meshgrid', (['x', 'y'], {'sparse': '(False)', 'indexing': '"""ij"""'}), "(x, y, sparse=False, indexing='ij')\n", (16536, 16571), True, 'import numpy as np\n'), ((16964, 17094), 'fuzzy_asteroids.util.Scenario', 'Scenario', ([], {'name': '"""moving_corridor_curve_2"""', 'asteroid_states': 'ast_states', 'ship_state': "{'position': (550, 100), 'angle': 90}", 'seed': '(0)'}), "(name='moving_corridor_curve_2', asteroid_states=ast_states,\n ship_state={'position': (550, 100), 'angle': 90}, seed=0)\n", (16972, 17094), False, 'from fuzzy_asteroids.util import Scenario\n'), ((17545, 17600), 'fuzzy_asteroids.util.Scenario', 'Scenario', ([], {'name': '"""apocalypse_1"""', 'num_asteroids': '(50)', 'seed': '(1)'}), "(name='apocalypse_1', num_asteroids=50, seed=1)\n", (17553, 17600), False, 'from fuzzy_asteroids.util import Scenario\n'), ((17888, 18395), 'fuzzy_asteroids.util.Scenario', 'Scenario', ([], {'name': '"""wall_right_wrap_1"""', 'asteroid_states': "[{'position': (600, 0), 'angle': -90.0, 'speed': 80}, {'position': (600, \n 100), 'angle': -90.0, 'speed': 80}, {'position': (600, 200), 'angle': -\n 90.0, 'speed': 80}, {'position': (600, 300), 'angle': -90.0, 'speed': \n 80}, {'position': (600, 400), 'angle': -90.0, 'speed': 80}, {'position':\n (600, 500), 'angle': -90.0, 'speed': 80}, {'position': (600, 600),\n 'angle': -90.0, 'speed': 80}]", 'ship_state': "{'position': (750, 300)}", 'seed': '(0)'}), "(name='wall_right_wrap_1', asteroid_states=[{'position': (600, 0),\n 'angle': -90.0, 'speed': 80}, {'position': (600, 100), 'angle': -90.0,\n 'speed': 80}, {'position': (600, 200), 'angle': -90.0, 'speed': 80}, {\n 'position': (600, 300), 'angle': -90.0, 'speed': 80}, {'position': (600,\n 400), 'angle': -90.0, 'speed': 80}, {'position': (600, 500), 'angle': -\n 90.0, 'speed': 80}, {'position': (600, 600), 'angle': -90.0, 'speed': \n 80}], ship_state={'position': (750, 300)}, seed=0)\n", (17896, 18395), False, 'from fuzzy_asteroids.util import Scenario\n'), ((18557, 19063), 'fuzzy_asteroids.util.Scenario', 'Scenario', ([], {'name': '"""wall_right_wrap_2"""', 'asteroid_states': "[{'position': (750, 0), 'angle': -90.0, 'speed': 80}, {'position': (750, \n 100), 'angle': -90.0, 'speed': 80}, {'position': (750, 200), 'angle': -\n 90.0, 'speed': 80}, {'position': (750, 300), 'angle': -90.0, 'speed': \n 80}, {'position': (750, 400), 'angle': -90.0, 'speed': 80}, {'position':\n (750, 500), 'angle': -90.0, 'speed': 80}, {'position': (750, 600),\n 'angle': -90.0, 'speed': 80}]", 'ship_state': "{'position': (50, 300)}", 'seed': '(0)'}), "(name='wall_right_wrap_2', asteroid_states=[{'position': (750, 0),\n 'angle': -90.0, 'speed': 80}, {'position': (750, 100), 'angle': -90.0,\n 'speed': 80}, {'position': (750, 200), 'angle': -90.0, 'speed': 80}, {\n 'position': (750, 300), 'angle': -90.0, 'speed': 80}, {'position': (750,\n 400), 'angle': -90.0, 'speed': 80}, {'position': (750, 500), 'angle': -\n 90.0, 'speed': 80}, {'position': (750, 600), 'angle': -90.0, 'speed': \n 80}], ship_state={'position': (50, 300)}, seed=0)\n", (18565, 19063), False, 'from fuzzy_asteroids.util import Scenario\n'), ((19225, 20134), 'fuzzy_asteroids.util.Scenario', 'Scenario', ([], {'name': '"""wall_right_wrap_3"""', 'asteroid_states': "[{'position': (600, 0), 'angle': -90.0, 'speed': 80}, {'position': (600, \n 100), 'angle': -90.0, 'speed': 80}, {'position': (600, 200), 'angle': -\n 90.0, 'speed': 80}, {'position': (600, 300), 'angle': -90.0, 'speed': \n 80}, {'position': (600, 400), 'angle': -90.0, 'speed': 80}, {'position':\n (600, 500), 'angle': -90.0, 'speed': 80}, {'position': (600, 600),\n 'angle': -90.0, 'speed': 80}, {'position': (200, 0), 'angle': -90.0,\n 'speed': 0}, {'position': (200, 100), 'angle': -90.0, 'speed': 0}, {\n 'position': (200, 200), 'angle': -90.0, 'speed': 0}, {'position': (200,\n 300), 'angle': -90.0, 'speed': 0}, {'position': (200, 400), 'angle': -\n 90.0, 'speed': 0}, {'position': (200, 500), 'angle': -90.0, 'speed': 0},\n {'position': (200, 600), 'angle': -90.0, 'speed': 0}]", 'ship_state': "{'position': (750, 300)}", 'seed': '(0)'}), "(name='wall_right_wrap_3', asteroid_states=[{'position': (600, 0),\n 'angle': -90.0, 'speed': 80}, {'position': (600, 100), 'angle': -90.0,\n 'speed': 80}, {'position': (600, 200), 'angle': -90.0, 'speed': 80}, {\n 'position': (600, 300), 'angle': -90.0, 'speed': 80}, {'position': (600,\n 400), 'angle': -90.0, 'speed': 80}, {'position': (600, 500), 'angle': -\n 90.0, 'speed': 80}, {'position': (600, 600), 'angle': -90.0, 'speed': \n 80}, {'position': (200, 0), 'angle': -90.0, 'speed': 0}, {'position': (\n 200, 100), 'angle': -90.0, 'speed': 0}, {'position': (200, 200),\n 'angle': -90.0, 'speed': 0}, {'position': (200, 300), 'angle': -90.0,\n 'speed': 0}, {'position': (200, 400), 'angle': -90.0, 'speed': 0}, {\n 'position': (200, 500), 'angle': -90.0, 'speed': 0}, {'position': (200,\n 600), 'angle': -90.0, 'speed': 0}], ship_state={'position': (750, 300)},\n seed=0)\n", (19233, 20134), False, 'from fuzzy_asteroids.util import Scenario\n'), ((20417, 21325), 'fuzzy_asteroids.util.Scenario', 'Scenario', ([], {'name': '"""wall_right_wrap_4"""', 'asteroid_states': "[{'position': (750, 0), 'angle': -90.0, 'speed': 80}, {'position': (750, \n 100), 'angle': -90.0, 'speed': 80}, {'position': (750, 200), 'angle': -\n 90.0, 'speed': 80}, {'position': (750, 300), 'angle': -90.0, 'speed': \n 80}, {'position': (750, 400), 'angle': -90.0, 'speed': 80}, {'position':\n (750, 500), 'angle': -90.0, 'speed': 80}, {'position': (750, 600),\n 'angle': -90.0, 'speed': 80}, {'position': (200, 0), 'angle': -90.0,\n 'speed': 0}, {'position': (200, 100), 'angle': -90.0, 'speed': 0}, {\n 'position': (200, 200), 'angle': -90.0, 'speed': 0}, {'position': (200,\n 300), 'angle': -90.0, 'speed': 0}, {'position': (200, 400), 'angle': -\n 90.0, 'speed': 0}, {'position': (200, 500), 'angle': -90.0, 'speed': 0},\n {'position': (200, 600), 'angle': -90.0, 'speed': 0}]", 'ship_state': "{'position': (50, 300)}", 'seed': '(0)'}), "(name='wall_right_wrap_4', asteroid_states=[{'position': (750, 0),\n 'angle': -90.0, 'speed': 80}, {'position': (750, 100), 'angle': -90.0,\n 'speed': 80}, {'position': (750, 200), 'angle': -90.0, 'speed': 80}, {\n 'position': (750, 300), 'angle': -90.0, 'speed': 80}, {'position': (750,\n 400), 'angle': -90.0, 'speed': 80}, {'position': (750, 500), 'angle': -\n 90.0, 'speed': 80}, {'position': (750, 600), 'angle': -90.0, 'speed': \n 80}, {'position': (200, 0), 'angle': -90.0, 'speed': 0}, {'position': (\n 200, 100), 'angle': -90.0, 'speed': 0}, {'position': (200, 200),\n 'angle': -90.0, 'speed': 0}, {'position': (200, 300), 'angle': -90.0,\n 'speed': 0}, {'position': (200, 400), 'angle': -90.0, 'speed': 0}, {\n 'position': (200, 500), 'angle': -90.0, 'speed': 0}, {'position': (200,\n 600), 'angle': -90.0, 'speed': 0}], ship_state={'position': (50, 300)},\n seed=0)\n", (20425, 21325), False, 'from fuzzy_asteroids.util import Scenario\n'), ((21629, 22127), 'fuzzy_asteroids.util.Scenario', 'Scenario', ([], {'name': '"""wall_left_wrap_1"""', 'asteroid_states': "[{'position': (200, 0), 'angle': 90.0, 'speed': 80}, {'position': (200, 100\n ), 'angle': 90.0, 'speed': 80}, {'position': (200, 200), 'angle': 90.0,\n 'speed': 80}, {'position': (200, 300), 'angle': 90.0, 'speed': 80}, {\n 'position': (200, 400), 'angle': 90.0, 'speed': 80}, {'position': (200,\n 500), 'angle': 90.0, 'speed': 80}, {'position': (200, 600), 'angle': \n 90.0, 'speed': 80}]", 'ship_state': "{'position': (50, 300)}", 'seed': '(0)'}), "(name='wall_left_wrap_1', asteroid_states=[{'position': (200, 0),\n 'angle': 90.0, 'speed': 80}, {'position': (200, 100), 'angle': 90.0,\n 'speed': 80}, {'position': (200, 200), 'angle': 90.0, 'speed': 80}, {\n 'position': (200, 300), 'angle': 90.0, 'speed': 80}, {'position': (200,\n 400), 'angle': 90.0, 'speed': 80}, {'position': (200, 500), 'angle': \n 90.0, 'speed': 80}, {'position': (200, 600), 'angle': 90.0, 'speed': 80\n }], ship_state={'position': (50, 300)}, seed=0)\n", (21637, 22127), False, 'from fuzzy_asteroids.util import Scenario\n'), ((22288, 22781), 'fuzzy_asteroids.util.Scenario', 'Scenario', ([], {'name': '"""wall_left_wrap_2"""', 'asteroid_states': "[{'position': (50, 0), 'angle': 90.0, 'speed': 80}, {'position': (50, 100),\n 'angle': 90.0, 'speed': 80}, {'position': (50, 200), 'angle': 90.0,\n 'speed': 80}, {'position': (50, 300), 'angle': 90.0, 'speed': 80}, {\n 'position': (50, 400), 'angle': 90.0, 'speed': 80}, {'position': (50, \n 500), 'angle': 90.0, 'speed': 80}, {'position': (50, 600), 'angle': \n 90.0, 'speed': 80}]", 'ship_state': "{'position': (750, 300)}", 'seed': '(0)'}), "(name='wall_left_wrap_2', asteroid_states=[{'position': (50, 0),\n 'angle': 90.0, 'speed': 80}, {'position': (50, 100), 'angle': 90.0,\n 'speed': 80}, {'position': (50, 200), 'angle': 90.0, 'speed': 80}, {\n 'position': (50, 300), 'angle': 90.0, 'speed': 80}, {'position': (50, \n 400), 'angle': 90.0, 'speed': 80}, {'position': (50, 500), 'angle': \n 90.0, 'speed': 80}, {'position': (50, 600), 'angle': 90.0, 'speed': 80}\n ], ship_state={'position': (750, 300)}, seed=0)\n", (22296, 22781), False, 'from fuzzy_asteroids.util import Scenario\n'), ((22941, 23841), 'fuzzy_asteroids.util.Scenario', 'Scenario', ([], {'name': '"""wall_left_wrap_3"""', 'asteroid_states': "[{'position': (200, 0), 'angle': 90.0, 'speed': 80}, {'position': (200, 100\n ), 'angle': 90.0, 'speed': 80}, {'position': (200, 200), 'angle': 90.0,\n 'speed': 80}, {'position': (200, 300), 'angle': 90.0, 'speed': 80}, {\n 'position': (200, 400), 'angle': 90.0, 'speed': 80}, {'position': (200,\n 500), 'angle': 90.0, 'speed': 80}, {'position': (200, 600), 'angle': \n 90.0, 'speed': 80}, {'position': (600, 0), 'angle': -90.0, 'speed': 0},\n {'position': (600, 100), 'angle': -90.0, 'speed': 0}, {'position': (600,\n 200), 'angle': -90.0, 'speed': 0}, {'position': (600, 300), 'angle': -\n 90.0, 'speed': 0}, {'position': (600, 400), 'angle': -90.0, 'speed': 0},\n {'position': (600, 500), 'angle': -90.0, 'speed': 0}, {'position': (600,\n 600), 'angle': -90.0, 'speed': 0}]", 'ship_state': "{'position': (50, 300)}", 'seed': '(0)'}), "(name='wall_left_wrap_3', asteroid_states=[{'position': (200, 0),\n 'angle': 90.0, 'speed': 80}, {'position': (200, 100), 'angle': 90.0,\n 'speed': 80}, {'position': (200, 200), 'angle': 90.0, 'speed': 80}, {\n 'position': (200, 300), 'angle': 90.0, 'speed': 80}, {'position': (200,\n 400), 'angle': 90.0, 'speed': 80}, {'position': (200, 500), 'angle': \n 90.0, 'speed': 80}, {'position': (200, 600), 'angle': 90.0, 'speed': 80\n }, {'position': (600, 0), 'angle': -90.0, 'speed': 0}, {'position': (\n 600, 100), 'angle': -90.0, 'speed': 0}, {'position': (600, 200),\n 'angle': -90.0, 'speed': 0}, {'position': (600, 300), 'angle': -90.0,\n 'speed': 0}, {'position': (600, 400), 'angle': -90.0, 'speed': 0}, {\n 'position': (600, 500), 'angle': -90.0, 'speed': 0}, {'position': (600,\n 600), 'angle': -90.0, 'speed': 0}], ship_state={'position': (50, 300)},\n seed=0)\n", (22949, 23841), False, 'from fuzzy_asteroids.util import Scenario\n'), ((24123, 25014), 'fuzzy_asteroids.util.Scenario', 'Scenario', ([], {'name': '"""wall_left_wrap_4"""', 'asteroid_states': "[{'position': (50, 0), 'angle': 90.0, 'speed': 80}, {'position': (50, 100),\n 'angle': 90.0, 'speed': 80}, {'position': (50, 200), 'angle': 90.0,\n 'speed': 80}, {'position': (50, 300), 'angle': 90.0, 'speed': 80}, {\n 'position': (50, 400), 'angle': 90.0, 'speed': 80}, {'position': (50, \n 500), 'angle': 90.0, 'speed': 80}, {'position': (50, 600), 'angle': \n 90.0, 'speed': 80}, {'position': (600, 0), 'angle': -90.0, 'speed': 0},\n {'position': (600, 100), 'angle': -90.0, 'speed': 0}, {'position': (600,\n 200), 'angle': -90.0, 'speed': 0}, {'position': (600, 300), 'angle': -\n 90.0, 'speed': 0}, {'position': (600, 400), 'angle': -90.0, 'speed': 0},\n {'position': (600, 500), 'angle': -90.0, 'speed': 0}, {'position': (600,\n 600), 'angle': -90.0, 'speed': 0}]", 'ship_state': "{'position': (750, 300)}", 'seed': '(0)'}), "(name='wall_left_wrap_4', asteroid_states=[{'position': (50, 0),\n 'angle': 90.0, 'speed': 80}, {'position': (50, 100), 'angle': 90.0,\n 'speed': 80}, {'position': (50, 200), 'angle': 90.0, 'speed': 80}, {\n 'position': (50, 300), 'angle': 90.0, 'speed': 80}, {'position': (50, \n 400), 'angle': 90.0, 'speed': 80}, {'position': (50, 500), 'angle': \n 90.0, 'speed': 80}, {'position': (50, 600), 'angle': 90.0, 'speed': 80},\n {'position': (600, 0), 'angle': -90.0, 'speed': 0}, {'position': (600, \n 100), 'angle': -90.0, 'speed': 0}, {'position': (600, 200), 'angle': -\n 90.0, 'speed': 0}, {'position': (600, 300), 'angle': -90.0, 'speed': 0},\n {'position': (600, 400), 'angle': -90.0, 'speed': 0}, {'position': (600,\n 500), 'angle': -90.0, 'speed': 0}, {'position': (600, 600), 'angle': -\n 90.0, 'speed': 0}], ship_state={'position': (750, 300)}, seed=0)\n", (24131, 25014), False, 'from fuzzy_asteroids.util import Scenario\n'), ((25319, 25925), 'fuzzy_asteroids.util.Scenario', 'Scenario', ([], {'name': '"""wall_top_wrap_1"""', 'asteroid_states': "[{'position': (0, 400), 'angle': 0.0, 'speed': 80}, {'position': (100, 400),\n 'angle': 0.0, 'speed': 80}, {'position': (200, 400), 'angle': 0.0,\n 'speed': 80}, {'position': (300, 400), 'angle': 0.0, 'speed': 80}, {\n 'position': (400, 400), 'angle': 0.0, 'speed': 80}, {'position': (500, \n 400), 'angle': 0.0, 'speed': 80}, {'position': (600, 400), 'angle': 0.0,\n 'speed': 80}, {'position': (700, 400), 'angle': 0.0, 'speed': 80}, {\n 'position': (800, 400), 'angle': 0.0, 'speed': 80}]", 'ship_state': "{'position': (400, 550)}", 'seed': '(0)'}), "(name='wall_top_wrap_1', asteroid_states=[{'position': (0, 400),\n 'angle': 0.0, 'speed': 80}, {'position': (100, 400), 'angle': 0.0,\n 'speed': 80}, {'position': (200, 400), 'angle': 0.0, 'speed': 80}, {\n 'position': (300, 400), 'angle': 0.0, 'speed': 80}, {'position': (400, \n 400), 'angle': 0.0, 'speed': 80}, {'position': (500, 400), 'angle': 0.0,\n 'speed': 80}, {'position': (600, 400), 'angle': 0.0, 'speed': 80}, {\n 'position': (700, 400), 'angle': 0.0, 'speed': 80}, {'position': (800, \n 400), 'angle': 0.0, 'speed': 80}], ship_state={'position': (400, 550)},\n seed=0)\n", (25327, 25925), False, 'from fuzzy_asteroids.util import Scenario\n'), ((26118, 26723), 'fuzzy_asteroids.util.Scenario', 'Scenario', ([], {'name': '"""wall_top_wrap_2"""', 'asteroid_states': "[{'position': (0, 400), 'angle': 0.0, 'speed': 80}, {'position': (100, 400),\n 'angle': 0.0, 'speed': 80}, {'position': (200, 400), 'angle': 0.0,\n 'speed': 80}, {'position': (300, 400), 'angle': 0.0, 'speed': 80}, {\n 'position': (400, 400), 'angle': 0.0, 'speed': 80}, {'position': (500, \n 400), 'angle': 0.0, 'speed': 80}, {'position': (600, 400), 'angle': 0.0,\n 'speed': 80}, {'position': (700, 400), 'angle': 0.0, 'speed': 80}, {\n 'position': (800, 400), 'angle': 0.0, 'speed': 80}]", 'ship_state': "{'position': (400, 50)}", 'seed': '(0)'}), "(name='wall_top_wrap_2', asteroid_states=[{'position': (0, 400),\n 'angle': 0.0, 'speed': 80}, {'position': (100, 400), 'angle': 0.0,\n 'speed': 80}, {'position': (200, 400), 'angle': 0.0, 'speed': 80}, {\n 'position': (300, 400), 'angle': 0.0, 'speed': 80}, {'position': (400, \n 400), 'angle': 0.0, 'speed': 80}, {'position': (500, 400), 'angle': 0.0,\n 'speed': 80}, {'position': (600, 400), 'angle': 0.0, 'speed': 80}, {\n 'position': (700, 400), 'angle': 0.0, 'speed': 80}, {'position': (800, \n 400), 'angle': 0.0, 'speed': 80}], ship_state={'position': (400, 50)},\n seed=0)\n", (26126, 26723), False, 'from fuzzy_asteroids.util import Scenario\n'), ((26916, 28016), 'fuzzy_asteroids.util.Scenario', 'Scenario', ([], {'name': '"""wall_top_wrap_3"""', 'asteroid_states': "[{'position': (0, 400), 'angle': 0.0, 'speed': 80}, {'position': (100, 400),\n 'angle': 0.0, 'speed': 80}, {'position': (200, 400), 'angle': 0.0,\n 'speed': 80}, {'position': (300, 400), 'angle': 0.0, 'speed': 80}, {\n 'position': (400, 400), 'angle': 0.0, 'speed': 80}, {'position': (500, \n 400), 'angle': 0.0, 'speed': 80}, {'position': (600, 400), 'angle': 0.0,\n 'speed': 80}, {'position': (700, 400), 'angle': 0.0, 'speed': 80}, {\n 'position': (800, 400), 'angle': 0.0, 'speed': 80}, {'position': (0, \n 200), 'angle': 0.0, 'speed': 0}, {'position': (100, 200), 'angle': 0.0,\n 'speed': 0}, {'position': (200, 200), 'angle': 0.0, 'speed': 0}, {\n 'position': (300, 200), 'angle': 0.0, 'speed': 0}, {'position': (400, \n 200), 'angle': 0.0, 'speed': 0}, {'position': (500, 200), 'angle': 0.0,\n 'speed': 0}, {'position': (600, 200), 'angle': 0.0, 'speed': 0}, {\n 'position': (700, 200), 'angle': 0.0, 'speed': 0}, {'position': (800, \n 200), 'angle': 0.0, 'speed': 0}]", 'ship_state': "{'position': (400, 550)}", 'seed': '(0)'}), "(name='wall_top_wrap_3', asteroid_states=[{'position': (0, 400),\n 'angle': 0.0, 'speed': 80}, {'position': (100, 400), 'angle': 0.0,\n 'speed': 80}, {'position': (200, 400), 'angle': 0.0, 'speed': 80}, {\n 'position': (300, 400), 'angle': 0.0, 'speed': 80}, {'position': (400, \n 400), 'angle': 0.0, 'speed': 80}, {'position': (500, 400), 'angle': 0.0,\n 'speed': 80}, {'position': (600, 400), 'angle': 0.0, 'speed': 80}, {\n 'position': (700, 400), 'angle': 0.0, 'speed': 80}, {'position': (800, \n 400), 'angle': 0.0, 'speed': 80}, {'position': (0, 200), 'angle': 0.0,\n 'speed': 0}, {'position': (100, 200), 'angle': 0.0, 'speed': 0}, {\n 'position': (200, 200), 'angle': 0.0, 'speed': 0}, {'position': (300, \n 200), 'angle': 0.0, 'speed': 0}, {'position': (400, 200), 'angle': 0.0,\n 'speed': 0}, {'position': (500, 200), 'angle': 0.0, 'speed': 0}, {\n 'position': (600, 200), 'angle': 0.0, 'speed': 0}, {'position': (700, \n 200), 'angle': 0.0, 'speed': 0}, {'position': (800, 200), 'angle': 0.0,\n 'speed': 0}], ship_state={'position': (400, 550)}, seed=0)\n", (26924, 28016), False, 'from fuzzy_asteroids.util import Scenario\n'), ((28370, 29469), 'fuzzy_asteroids.util.Scenario', 'Scenario', ([], {'name': '"""wall_top_wrap_4"""', 'asteroid_states': "[{'position': (0, 400), 'angle': 0.0, 'speed': 80}, {'position': (100, 400),\n 'angle': 0.0, 'speed': 80}, {'position': (200, 400), 'angle': 0.0,\n 'speed': 80}, {'position': (300, 400), 'angle': 0.0, 'speed': 80}, {\n 'position': (400, 400), 'angle': 0.0, 'speed': 80}, {'position': (500, \n 400), 'angle': 0.0, 'speed': 80}, {'position': (600, 400), 'angle': 0.0,\n 'speed': 80}, {'position': (700, 400), 'angle': 0.0, 'speed': 80}, {\n 'position': (800, 400), 'angle': 0.0, 'speed': 80}, {'position': (0, \n 200), 'angle': 0.0, 'speed': 0}, {'position': (100, 200), 'angle': 0.0,\n 'speed': 0}, {'position': (200, 200), 'angle': 0.0, 'speed': 0}, {\n 'position': (300, 200), 'angle': 0.0, 'speed': 0}, {'position': (400, \n 200), 'angle': 0.0, 'speed': 0}, {'position': (500, 200), 'angle': 0.0,\n 'speed': 0}, {'position': (600, 200), 'angle': 0.0, 'speed': 0}, {\n 'position': (700, 200), 'angle': 0.0, 'speed': 0}, {'position': (800, \n 200), 'angle': 0.0, 'speed': 0}]", 'ship_state': "{'position': (400, 50)}", 'seed': '(0)'}), "(name='wall_top_wrap_4', asteroid_states=[{'position': (0, 400),\n 'angle': 0.0, 'speed': 80}, {'position': (100, 400), 'angle': 0.0,\n 'speed': 80}, {'position': (200, 400), 'angle': 0.0, 'speed': 80}, {\n 'position': (300, 400), 'angle': 0.0, 'speed': 80}, {'position': (400, \n 400), 'angle': 0.0, 'speed': 80}, {'position': (500, 400), 'angle': 0.0,\n 'speed': 80}, {'position': (600, 400), 'angle': 0.0, 'speed': 80}, {\n 'position': (700, 400), 'angle': 0.0, 'speed': 80}, {'position': (800, \n 400), 'angle': 0.0, 'speed': 80}, {'position': (0, 200), 'angle': 0.0,\n 'speed': 0}, {'position': (100, 200), 'angle': 0.0, 'speed': 0}, {\n 'position': (200, 200), 'angle': 0.0, 'speed': 0}, {'position': (300, \n 200), 'angle': 0.0, 'speed': 0}, {'position': (400, 200), 'angle': 0.0,\n 'speed': 0}, {'position': (500, 200), 'angle': 0.0, 'speed': 0}, {\n 'position': (600, 200), 'angle': 0.0, 'speed': 0}, {'position': (700, \n 200), 'angle': 0.0, 'speed': 0}, {'position': (800, 200), 'angle': 0.0,\n 'speed': 0}], ship_state={'position': (400, 50)}, seed=0)\n", (28378, 29469), False, 'from fuzzy_asteroids.util import Scenario\n'), ((29850, 30475), 'fuzzy_asteroids.util.Scenario', 'Scenario', ([], {'name': '"""wall_bottom_wrap_1"""', 'asteroid_states': "[{'position': (0, 200), 'angle': 180.0, 'speed': 80}, {'position': (100, \n 200), 'angle': 180.0, 'speed': 80}, {'position': (200, 200), 'angle': \n 180.0, 'speed': 80}, {'position': (300, 200), 'angle': 180.0, 'speed': \n 80}, {'position': (400, 200), 'angle': 180.0, 'speed': 80}, {'position':\n (500, 200), 'angle': 180.0, 'speed': 80}, {'position': (600, 200),\n 'angle': 180.0, 'speed': 80}, {'position': (700, 200), 'angle': 180.0,\n 'speed': 80}, {'position': (800, 200), 'angle': 180.0, 'speed': 80}]", 'ship_state': "{'position': (400, 50)}", 'seed': '(0)'}), "(name='wall_bottom_wrap_1', asteroid_states=[{'position': (0, 200),\n 'angle': 180.0, 'speed': 80}, {'position': (100, 200), 'angle': 180.0,\n 'speed': 80}, {'position': (200, 200), 'angle': 180.0, 'speed': 80}, {\n 'position': (300, 200), 'angle': 180.0, 'speed': 80}, {'position': (400,\n 200), 'angle': 180.0, 'speed': 80}, {'position': (500, 200), 'angle': \n 180.0, 'speed': 80}, {'position': (600, 200), 'angle': 180.0, 'speed': \n 80}, {'position': (700, 200), 'angle': 180.0, 'speed': 80}, {'position':\n (800, 200), 'angle': 180.0, 'speed': 80}], ship_state={'position': (400,\n 50)}, seed=0)\n", (29858, 30475), False, 'from fuzzy_asteroids.util import Scenario\n'), ((30672, 31298), 'fuzzy_asteroids.util.Scenario', 'Scenario', ([], {'name': '"""wall_bottom_wrap_2"""', 'asteroid_states': "[{'position': (0, 200), 'angle': 180.0, 'speed': 80}, {'position': (100, \n 200), 'angle': 180.0, 'speed': 80}, {'position': (200, 200), 'angle': \n 180.0, 'speed': 80}, {'position': (300, 200), 'angle': 180.0, 'speed': \n 80}, {'position': (400, 200), 'angle': 180.0, 'speed': 80}, {'position':\n (500, 200), 'angle': 180.0, 'speed': 80}, {'position': (600, 200),\n 'angle': 180.0, 'speed': 80}, {'position': (700, 200), 'angle': 180.0,\n 'speed': 80}, {'position': (800, 200), 'angle': 180.0, 'speed': 80}]", 'ship_state': "{'position': (400, 550)}", 'seed': '(0)'}), "(name='wall_bottom_wrap_2', asteroid_states=[{'position': (0, 200),\n 'angle': 180.0, 'speed': 80}, {'position': (100, 200), 'angle': 180.0,\n 'speed': 80}, {'position': (200, 200), 'angle': 180.0, 'speed': 80}, {\n 'position': (300, 200), 'angle': 180.0, 'speed': 80}, {'position': (400,\n 200), 'angle': 180.0, 'speed': 80}, {'position': (500, 200), 'angle': \n 180.0, 'speed': 80}, {'position': (600, 200), 'angle': 180.0, 'speed': \n 80}, {'position': (700, 200), 'angle': 180.0, 'speed': 80}, {'position':\n (800, 200), 'angle': 180.0, 'speed': 80}], ship_state={'position': (400,\n 550)}, seed=0)\n", (30680, 31298), False, 'from fuzzy_asteroids.util import Scenario\n'), ((31495, 32618), 'fuzzy_asteroids.util.Scenario', 'Scenario', ([], {'name': '"""wall_bottom_wrap_3"""', 'asteroid_states': "[{'position': (0, 200), 'angle': 180.0, 'speed': 80}, {'position': (100, \n 200), 'angle': 180.0, 'speed': 80}, {'position': (200, 200), 'angle': \n 180.0, 'speed': 80}, {'position': (300, 200), 'angle': 180.0, 'speed': \n 80}, {'position': (400, 200), 'angle': 180.0, 'speed': 80}, {'position':\n (500, 200), 'angle': 180.0, 'speed': 80}, {'position': (600, 200),\n 'angle': 180.0, 'speed': 80}, {'position': (700, 200), 'angle': 180.0,\n 'speed': 80}, {'position': (800, 200), 'angle': 180.0, 'speed': 80}, {\n 'position': (0, 400), 'angle': 0.0, 'speed': 0}, {'position': (100, 400\n ), 'angle': 0.0, 'speed': 0}, {'position': (200, 400), 'angle': 0.0,\n 'speed': 0}, {'position': (300, 400), 'angle': 0.0, 'speed': 0}, {\n 'position': (400, 400), 'angle': 0.0, 'speed': 0}, {'position': (500, \n 400), 'angle': 0.0, 'speed': 0}, {'position': (600, 400), 'angle': 0.0,\n 'speed': 0}, {'position': (700, 400), 'angle': 0.0, 'speed': 0}, {\n 'position': (800, 400), 'angle': 0.0, 'speed': 0}]", 'ship_state': "{'position': (400, 50)}", 'seed': '(0)'}), "(name='wall_bottom_wrap_3', asteroid_states=[{'position': (0, 200),\n 'angle': 180.0, 'speed': 80}, {'position': (100, 200), 'angle': 180.0,\n 'speed': 80}, {'position': (200, 200), 'angle': 180.0, 'speed': 80}, {\n 'position': (300, 200), 'angle': 180.0, 'speed': 80}, {'position': (400,\n 200), 'angle': 180.0, 'speed': 80}, {'position': (500, 200), 'angle': \n 180.0, 'speed': 80}, {'position': (600, 200), 'angle': 180.0, 'speed': \n 80}, {'position': (700, 200), 'angle': 180.0, 'speed': 80}, {'position':\n (800, 200), 'angle': 180.0, 'speed': 80}, {'position': (0, 400),\n 'angle': 0.0, 'speed': 0}, {'position': (100, 400), 'angle': 0.0,\n 'speed': 0}, {'position': (200, 400), 'angle': 0.0, 'speed': 0}, {\n 'position': (300, 400), 'angle': 0.0, 'speed': 0}, {'position': (400, \n 400), 'angle': 0.0, 'speed': 0}, {'position': (500, 400), 'angle': 0.0,\n 'speed': 0}, {'position': (600, 400), 'angle': 0.0, 'speed': 0}, {\n 'position': (700, 400), 'angle': 0.0, 'speed': 0}, {'position': (800, \n 400), 'angle': 0.0, 'speed': 0}], ship_state={'position': (400, 50)},\n seed=0)\n", (31503, 32618), False, 'from fuzzy_asteroids.util import Scenario\n'), ((32972, 34096), 'fuzzy_asteroids.util.Scenario', 'Scenario', ([], {'name': '"""wall_bottom_wrap_4"""', 'asteroid_states': "[{'position': (0, 200), 'angle': 180.0, 'speed': 80}, {'position': (100, \n 200), 'angle': 180.0, 'speed': 80}, {'position': (200, 200), 'angle': \n 180.0, 'speed': 80}, {'position': (300, 200), 'angle': 180.0, 'speed': \n 80}, {'position': (400, 200), 'angle': 180.0, 'speed': 80}, {'position':\n (500, 200), 'angle': 180.0, 'speed': 80}, {'position': (600, 200),\n 'angle': 180.0, 'speed': 80}, {'position': (700, 200), 'angle': 180.0,\n 'speed': 80}, {'position': (800, 200), 'angle': 180.0, 'speed': 80}, {\n 'position': (0, 400), 'angle': 0.0, 'speed': 0}, {'position': (100, 400\n ), 'angle': 0.0, 'speed': 0}, {'position': (200, 400), 'angle': 0.0,\n 'speed': 0}, {'position': (300, 400), 'angle': 0.0, 'speed': 0}, {\n 'position': (400, 400), 'angle': 0.0, 'speed': 0}, {'position': (500, \n 400), 'angle': 0.0, 'speed': 0}, {'position': (600, 400), 'angle': 0.0,\n 'speed': 0}, {'position': (700, 400), 'angle': 0.0, 'speed': 0}, {\n 'position': (800, 400), 'angle': 0.0, 'speed': 0}]", 'ship_state': "{'position': (400, 550)}", 'seed': '(0)'}), "(name='wall_bottom_wrap_4', asteroid_states=[{'position': (0, 200),\n 'angle': 180.0, 'speed': 80}, {'position': (100, 200), 'angle': 180.0,\n 'speed': 80}, {'position': (200, 200), 'angle': 180.0, 'speed': 80}, {\n 'position': (300, 200), 'angle': 180.0, 'speed': 80}, {'position': (400,\n 200), 'angle': 180.0, 'speed': 80}, {'position': (500, 200), 'angle': \n 180.0, 'speed': 80}, {'position': (600, 200), 'angle': 180.0, 'speed': \n 80}, {'position': (700, 200), 'angle': 180.0, 'speed': 80}, {'position':\n (800, 200), 'angle': 180.0, 'speed': 80}, {'position': (0, 400),\n 'angle': 0.0, 'speed': 0}, {'position': (100, 400), 'angle': 0.0,\n 'speed': 0}, {'position': (200, 400), 'angle': 0.0, 'speed': 0}, {\n 'position': (300, 400), 'angle': 0.0, 'speed': 0}, {'position': (400, \n 400), 'angle': 0.0, 'speed': 0}, {'position': (500, 400), 'angle': 0.0,\n 'speed': 0}, {'position': (600, 400), 'angle': 0.0, 'speed': 0}, {\n 'position': (700, 400), 'angle': 0.0, 'speed': 0}, {'position': (800, \n 400), 'angle': 0.0, 'speed': 0}], ship_state={'position': (400, 550)},\n seed=0)\n", (32980, 34096), False, 'from fuzzy_asteroids.util import Scenario\n'), ((34487, 36091), 'fuzzy_asteroids.util.Scenario', 'Scenario', ([], {'name': '"""big_box"""', 'asteroid_states': "[{'position': (100, 600), 'angle': 0.0, 'speed': 0}, {'position': (200, 600\n ), 'angle': 0.0, 'speed': 0}, {'position': (300, 600), 'angle': 0.0,\n 'speed': 0}, {'position': (400, 600), 'angle': 0.0, 'speed': 0}, {\n 'position': (500, 600), 'angle': 0.0, 'speed': 0}, {'position': (600, \n 600), 'angle': 0.0, 'speed': 0}, {'position': (700, 600), 'angle': 0.0,\n 'speed': 0}, {'position': (100, 0), 'angle': 0.0, 'speed': 0}, {\n 'position': (200, 0), 'angle': 0.0, 'speed': 0}, {'position': (300, 0),\n 'angle': 0.0, 'speed': 0}, {'position': (400, 0), 'angle': 0.0, 'speed':\n 0}, {'position': (500, 0), 'angle': 0.0, 'speed': 0}, {'position': (600,\n 0), 'angle': 0.0, 'speed': 0}, {'position': (700, 0), 'angle': 0.0,\n 'speed': 0}, {'position': (800, 0), 'angle': 0.0, 'speed': 0}, {\n 'position': (0, 0), 'angle': 0.0, 'speed': 0}, {'position': (0, 100),\n 'angle': 0.0, 'speed': 0}, {'position': (0, 200), 'angle': 0.0, 'speed':\n 0}, {'position': (0, 300), 'angle': 0.0, 'speed': 0}, {'position': (0, \n 400), 'angle': 0.0, 'speed': 0}, {'position': (0, 500), 'angle': 0.0,\n 'speed': 0}, {'position': (0, 600), 'angle': 0.0, 'speed': 0}, {\n 'position': (800, 100), 'angle': 0.0, 'speed': 0}, {'position': (800, \n 200), 'angle': 0.0, 'speed': 0}, {'position': (800, 300), 'angle': 0.0,\n 'speed': 0}, {'position': (800, 400), 'angle': 0.0, 'speed': 0}, {\n 'position': (800, 500), 'angle': 0.0, 'speed': 0}, {'position': (800, \n 600), 'angle': 0.0, 'speed': 0}]", 'ship_state': "{'position': (400, 300)}", 'seed': '(0)'}), "(name='big_box', asteroid_states=[{'position': (100, 600), 'angle':\n 0.0, 'speed': 0}, {'position': (200, 600), 'angle': 0.0, 'speed': 0}, {\n 'position': (300, 600), 'angle': 0.0, 'speed': 0}, {'position': (400, \n 600), 'angle': 0.0, 'speed': 0}, {'position': (500, 600), 'angle': 0.0,\n 'speed': 0}, {'position': (600, 600), 'angle': 0.0, 'speed': 0}, {\n 'position': (700, 600), 'angle': 0.0, 'speed': 0}, {'position': (100, 0\n ), 'angle': 0.0, 'speed': 0}, {'position': (200, 0), 'angle': 0.0,\n 'speed': 0}, {'position': (300, 0), 'angle': 0.0, 'speed': 0}, {\n 'position': (400, 0), 'angle': 0.0, 'speed': 0}, {'position': (500, 0),\n 'angle': 0.0, 'speed': 0}, {'position': (600, 0), 'angle': 0.0, 'speed':\n 0}, {'position': (700, 0), 'angle': 0.0, 'speed': 0}, {'position': (800,\n 0), 'angle': 0.0, 'speed': 0}, {'position': (0, 0), 'angle': 0.0,\n 'speed': 0}, {'position': (0, 100), 'angle': 0.0, 'speed': 0}, {\n 'position': (0, 200), 'angle': 0.0, 'speed': 0}, {'position': (0, 300),\n 'angle': 0.0, 'speed': 0}, {'position': (0, 400), 'angle': 0.0, 'speed':\n 0}, {'position': (0, 500), 'angle': 0.0, 'speed': 0}, {'position': (0, \n 600), 'angle': 0.0, 'speed': 0}, {'position': (800, 100), 'angle': 0.0,\n 'speed': 0}, {'position': (800, 200), 'angle': 0.0, 'speed': 0}, {\n 'position': (800, 300), 'angle': 0.0, 'speed': 0}, {'position': (800, \n 400), 'angle': 0.0, 'speed': 0}, {'position': (800, 500), 'angle': 0.0,\n 'speed': 0}, {'position': (800, 600), 'angle': 0.0, 'speed': 0}],\n ship_state={'position': (400, 300)}, seed=0)\n", (34495, 36091), False, 'from fuzzy_asteroids.util import Scenario\n'), ((36671, 37652), 'fuzzy_asteroids.util.Scenario', 'Scenario', ([], {'name': '"""small_box"""', 'asteroid_states': "[{'position': (200, 500), 'angle': 0.0, 'speed': 0}, {'position': (300, 500\n ), 'angle': 0.0, 'speed': 0}, {'position': (400, 500), 'angle': 0.0,\n 'speed': 0}, {'position': (500, 500), 'angle': 0.0, 'speed': 0}, {\n 'position': (200, 100), 'angle': 0.0, 'speed': 0}, {'position': (300, \n 100), 'angle': 0.0, 'speed': 0}, {'position': (400, 100), 'angle': 0.0,\n 'speed': 0}, {'position': (500, 100), 'angle': 0.0, 'speed': 0}, {\n 'position': (600, 100), 'angle': 0.0, 'speed': 0}, {'position': (200, \n 200), 'angle': 0.0, 'speed': 0}, {'position': (200, 300), 'angle': 0.0,\n 'speed': 0}, {'position': (200, 400), 'angle': 0.0, 'speed': 0}, {\n 'position': (600, 200), 'angle': 0.0, 'speed': 0}, {'position': (600, \n 300), 'angle': 0.0, 'speed': 0}, {'position': (600, 400), 'angle': 0.0,\n 'speed': 0}, {'position': (600, 500), 'angle': 0.0, 'speed': 0}]", 'ship_state': "{'position': (400, 300)}", 'seed': '(0)'}), "(name='small_box', asteroid_states=[{'position': (200, 500),\n 'angle': 0.0, 'speed': 0}, {'position': (300, 500), 'angle': 0.0,\n 'speed': 0}, {'position': (400, 500), 'angle': 0.0, 'speed': 0}, {\n 'position': (500, 500), 'angle': 0.0, 'speed': 0}, {'position': (200, \n 100), 'angle': 0.0, 'speed': 0}, {'position': (300, 100), 'angle': 0.0,\n 'speed': 0}, {'position': (400, 100), 'angle': 0.0, 'speed': 0}, {\n 'position': (500, 100), 'angle': 0.0, 'speed': 0}, {'position': (600, \n 100), 'angle': 0.0, 'speed': 0}, {'position': (200, 200), 'angle': 0.0,\n 'speed': 0}, {'position': (200, 300), 'angle': 0.0, 'speed': 0}, {\n 'position': (200, 400), 'angle': 0.0, 'speed': 0}, {'position': (600, \n 200), 'angle': 0.0, 'speed': 0}, {'position': (600, 300), 'angle': 0.0,\n 'speed': 0}, {'position': (600, 400), 'angle': 0.0, 'speed': 0}, {\n 'position': (600, 500), 'angle': 0.0, 'speed': 0}], ship_state={\n 'position': (400, 300)}, seed=0)\n", (36679, 37652), False, 'from fuzzy_asteroids.util import Scenario\n'), ((38022, 41761), 'fuzzy_asteroids.util.Scenario', 'Scenario', ([], {'name': '"""scenario_2_still_corridors"""', 'asteroid_states': "[{'position': (0, 250), 'angle': 0.0, 'speed': 0, 'size': 2}, {'position':\n (50, 250), 'angle': 0.0, 'speed': 0, 'size': 2}, {'position': (100, 250\n ), 'angle': 0.0, 'speed': 0, 'size': 2}, {'position': (150, 250),\n 'angle': 0.0, 'speed': 0, 'size': 2}, {'position': (200, 250), 'angle':\n 0.0, 'speed': 0, 'size': 2}, {'position': (250, 250), 'angle': 0.0,\n 'speed': 0, 'size': 2}, {'position': (300, 250), 'angle': 0.0, 'speed':\n 0, 'size': 2}, {'position': (350, 250), 'angle': 0.0, 'speed': 0,\n 'size': 2}, {'position': (0, 350), 'angle': 0.0, 'speed': 0, 'size': 2},\n {'position': (50, 350), 'angle': 0.0, 'speed': 0, 'size': 2}, {\n 'position': (100, 350), 'angle': 0.0, 'speed': 0, 'size': 2}, {\n 'position': (150, 350), 'angle': 0.0, 'speed': 0, 'size': 2}, {\n 'position': (200, 350), 'angle': 0.0, 'speed': 0, 'size': 2}, {\n 'position': (250, 350), 'angle': 0.0, 'speed': 0, 'size': 2}, {\n 'position': (300, 350), 'angle': 0.0, 'speed': 0, 'size': 2}, {\n 'position': (350, 350), 'angle': 0.0, 'speed': 0, 'size': 2}, {\n 'position': (450, 250), 'angle': 0.0, 'speed': 0, 'size': 2}, {\n 'position': (500, 250), 'angle': 0.0, 'speed': 0, 'size': 2}, {\n 'position': (550, 250), 'angle': 0.0, 'speed': 0, 'size': 2}, {\n 'position': (600, 250), 'angle': 0.0, 'speed': 0, 'size': 2}, {\n 'position': (650, 250), 'angle': 0.0, 'speed': 0, 'size': 2}, {\n 'position': (700, 250), 'angle': 0.0, 'speed': 0, 'size': 2}, {\n 'position': (750, 250), 'angle': 0.0, 'speed': 0, 'size': 2}, {\n 'position': (800, 250), 'angle': 0.0, 'speed': 0, 'size': 2}, {\n 'position': (450, 350), 'angle': 0.0, 'speed': 0, 'size': 2}, {\n 'position': (500, 350), 'angle': 0.0, 'speed': 0, 'size': 2}, {\n 'position': (550, 350), 'angle': 0.0, 'speed': 0, 'size': 2}, {\n 'position': (600, 350), 'angle': 0.0, 'speed': 0, 'size': 2}, {\n 'position': (650, 350), 'angle': 0.0, 'speed': 0, 'size': 2}, {\n 'position': (700, 350), 'angle': 0.0, 'speed': 0, 'size': 2}, {\n 'position': (750, 350), 'angle': 0.0, 'speed': 0, 'size': 2}, {\n 'position': (800, 350), 'angle': 0.0, 'speed': 0, 'size': 2}, {\n 'position': (350, 0), 'angle': 0.0, 'speed': 0, 'size': 2}, {'position':\n (350, 50), 'angle': 0.0, 'speed': 0, 'size': 2}, {'position': (350, 100\n ), 'angle': 0.0, 'speed': 0, 'size': 2}, {'position': (350, 150),\n 'angle': 0.0, 'speed': 0, 'size': 2}, {'position': (350, 200), 'angle':\n 0.0, 'speed': 0, 'size': 2}, {'position': (450, 0), 'angle': 0.0,\n 'speed': 0, 'size': 2}, {'position': (450, 50), 'angle': 0.0, 'speed': \n 0, 'size': 2}, {'position': (450, 100), 'angle': 0.0, 'speed': 0,\n 'size': 2}, {'position': (450, 150), 'angle': 0.0, 'speed': 0, 'size': \n 2}, {'position': (450, 200), 'angle': 0.0, 'speed': 0, 'size': 2}, {\n 'position': (350, 350), 'angle': 0.0, 'speed': 0, 'size': 2}, {\n 'position': (350, 400), 'angle': 0.0, 'speed': 0, 'size': 2}, {\n 'position': (350, 450), 'angle': 0.0, 'speed': 0, 'size': 2}, {\n 'position': (350, 500), 'angle': 0.0, 'speed': 0, 'size': 2}, {\n 'position': (350, 550), 'angle': 0.0, 'speed': 0, 'size': 2}, {\n 'position': (350, 600), 'angle': 0.0, 'speed': 0, 'size': 2}, {\n 'position': (450, 350), 'angle': 0.0, 'speed': 0, 'size': 2}, {\n 'position': (450, 400), 'angle': 0.0, 'speed': 0, 'size': 2}, {\n 'position': (450, 450), 'angle': 0.0, 'speed': 0, 'size': 2}, {\n 'position': (450, 500), 'angle': 0.0, 'speed': 0, 'size': 2}, {\n 'position': (450, 550), 'angle': 0.0, 'speed': 0, 'size': 2}, {\n 'position': (450, 600), 'angle': 0.0, 'speed': 0, 'size': 2}]", 'ship_state': "{'position': (400, 300)}", 'seed': '(0)'}), "(name='scenario_2_still_corridors', asteroid_states=[{'position': (\n 0, 250), 'angle': 0.0, 'speed': 0, 'size': 2}, {'position': (50, 250),\n 'angle': 0.0, 'speed': 0, 'size': 2}, {'position': (100, 250), 'angle':\n 0.0, 'speed': 0, 'size': 2}, {'position': (150, 250), 'angle': 0.0,\n 'speed': 0, 'size': 2}, {'position': (200, 250), 'angle': 0.0, 'speed':\n 0, 'size': 2}, {'position': (250, 250), 'angle': 0.0, 'speed': 0,\n 'size': 2}, {'position': (300, 250), 'angle': 0.0, 'speed': 0, 'size': \n 2}, {'position': (350, 250), 'angle': 0.0, 'speed': 0, 'size': 2}, {\n 'position': (0, 350), 'angle': 0.0, 'speed': 0, 'size': 2}, {'position':\n (50, 350), 'angle': 0.0, 'speed': 0, 'size': 2}, {'position': (100, 350\n ), 'angle': 0.0, 'speed': 0, 'size': 2}, {'position': (150, 350),\n 'angle': 0.0, 'speed': 0, 'size': 2}, {'position': (200, 350), 'angle':\n 0.0, 'speed': 0, 'size': 2}, {'position': (250, 350), 'angle': 0.0,\n 'speed': 0, 'size': 2}, {'position': (300, 350), 'angle': 0.0, 'speed':\n 0, 'size': 2}, {'position': (350, 350), 'angle': 0.0, 'speed': 0,\n 'size': 2}, {'position': (450, 250), 'angle': 0.0, 'speed': 0, 'size': \n 2}, {'position': (500, 250), 'angle': 0.0, 'speed': 0, 'size': 2}, {\n 'position': (550, 250), 'angle': 0.0, 'speed': 0, 'size': 2}, {\n 'position': (600, 250), 'angle': 0.0, 'speed': 0, 'size': 2}, {\n 'position': (650, 250), 'angle': 0.0, 'speed': 0, 'size': 2}, {\n 'position': (700, 250), 'angle': 0.0, 'speed': 0, 'size': 2}, {\n 'position': (750, 250), 'angle': 0.0, 'speed': 0, 'size': 2}, {\n 'position': (800, 250), 'angle': 0.0, 'speed': 0, 'size': 2}, {\n 'position': (450, 350), 'angle': 0.0, 'speed': 0, 'size': 2}, {\n 'position': (500, 350), 'angle': 0.0, 'speed': 0, 'size': 2}, {\n 'position': (550, 350), 'angle': 0.0, 'speed': 0, 'size': 2}, {\n 'position': (600, 350), 'angle': 0.0, 'speed': 0, 'size': 2}, {\n 'position': (650, 350), 'angle': 0.0, 'speed': 0, 'size': 2}, {\n 'position': (700, 350), 'angle': 0.0, 'speed': 0, 'size': 2}, {\n 'position': (750, 350), 'angle': 0.0, 'speed': 0, 'size': 2}, {\n 'position': (800, 350), 'angle': 0.0, 'speed': 0, 'size': 2}, {\n 'position': (350, 0), 'angle': 0.0, 'speed': 0, 'size': 2}, {'position':\n (350, 50), 'angle': 0.0, 'speed': 0, 'size': 2}, {'position': (350, 100\n ), 'angle': 0.0, 'speed': 0, 'size': 2}, {'position': (350, 150),\n 'angle': 0.0, 'speed': 0, 'size': 2}, {'position': (350, 200), 'angle':\n 0.0, 'speed': 0, 'size': 2}, {'position': (450, 0), 'angle': 0.0,\n 'speed': 0, 'size': 2}, {'position': (450, 50), 'angle': 0.0, 'speed': \n 0, 'size': 2}, {'position': (450, 100), 'angle': 0.0, 'speed': 0,\n 'size': 2}, {'position': (450, 150), 'angle': 0.0, 'speed': 0, 'size': \n 2}, {'position': (450, 200), 'angle': 0.0, 'speed': 0, 'size': 2}, {\n 'position': (350, 350), 'angle': 0.0, 'speed': 0, 'size': 2}, {\n 'position': (350, 400), 'angle': 0.0, 'speed': 0, 'size': 2}, {\n 'position': (350, 450), 'angle': 0.0, 'speed': 0, 'size': 2}, {\n 'position': (350, 500), 'angle': 0.0, 'speed': 0, 'size': 2}, {\n 'position': (350, 550), 'angle': 0.0, 'speed': 0, 'size': 2}, {\n 'position': (350, 600), 'angle': 0.0, 'speed': 0, 'size': 2}, {\n 'position': (450, 350), 'angle': 0.0, 'speed': 0, 'size': 2}, {\n 'position': (450, 400), 'angle': 0.0, 'speed': 0, 'size': 2}, {\n 'position': (450, 450), 'angle': 0.0, 'speed': 0, 'size': 2}, {\n 'position': (450, 500), 'angle': 0.0, 'speed': 0, 'size': 2}, {\n 'position': (450, 550), 'angle': 0.0, 'speed': 0, 'size': 2}, {\n 'position': (450, 600), 'angle': 0.0, 'speed': 0, 'size': 2}],\n ship_state={'position': (400, 300)}, seed=0)\n", (38030, 41761), False, 'from fuzzy_asteroids.util import Scenario\n'), ((6500, 6529), 'numpy.linspace', 'np.linspace', (['(0)', '(2 * np.pi)', '(17)'], {}), '(0, 2 * np.pi, 17)\n', (6511, 6529), True, 'import numpy as np\n'), ((7042, 7071), 'numpy.linspace', 'np.linspace', (['(0)', '(2 * np.pi)', '(17)'], {}), '(0, 2 * np.pi, 17)\n', (7053, 7071), True, 'import numpy as np\n'), ((7585, 7614), 'numpy.linspace', 'np.linspace', (['(0)', '(2 * np.pi)', '(17)'], {}), '(0, 2 * np.pi, 17)\n', (7596, 7614), True, 'import numpy as np\n'), ((8112, 8141), 'numpy.linspace', 'np.linspace', (['(0)', '(2 * np.pi)', '(17)'], {}), '(0, 2 * np.pi, 17)\n', (8123, 8141), True, 'import numpy as np\n'), ((8662, 8691), 'numpy.linspace', 'np.linspace', (['(0)', '(2 * np.pi)', '(17)'], {}), '(0, 2 * np.pi, 17)\n', (8673, 8691), True, 'import numpy as np\n'), ((6548, 6561), 'numpy.cos', 'np.cos', (['angle'], {}), '(angle)\n', (6554, 6561), True, 'import numpy as np\n'), ((6601, 6614), 'numpy.sin', 'np.sin', (['angle'], {}), '(angle)\n', (6607, 6614), True, 'import numpy as np\n'), ((7091, 7112), 'numpy.cos', 'np.cos', (['(angle + np.pi)'], {}), '(angle + np.pi)\n', (7097, 7112), True, 'import numpy as np\n'), ((7152, 7173), 'numpy.sin', 'np.sin', (['(angle + np.pi)'], {}), '(angle + np.pi)\n', (7158, 7173), True, 'import numpy as np\n'), ((7634, 7647), 'numpy.cos', 'np.cos', (['angle'], {}), '(angle)\n', (7640, 7647), True, 'import numpy as np\n'), ((7687, 7700), 'numpy.sin', 'np.sin', (['angle'], {}), '(angle)\n', (7693, 7700), True, 'import numpy as np\n'), ((8161, 8186), 'numpy.cos', 'np.cos', (['(angle + np.pi / 2)'], {}), '(angle + np.pi / 2)\n', (8167, 8186), True, 'import numpy as np\n'), ((8226, 8251), 'numpy.sin', 'np.sin', (['(angle + np.pi / 2)'], {}), '(angle + np.pi / 2)\n', (8232, 8251), True, 'import numpy as np\n'), ((8711, 8740), 'numpy.cos', 'np.cos', (['(angle + 3 * np.pi / 2)'], {}), '(angle + 3 * np.pi / 2)\n', (8717, 8740), True, 'import numpy as np\n'), ((8780, 8809), 'numpy.sin', 'np.sin', (['(angle + 3 * np.pi / 2)'], {}), '(angle + 3 * np.pi / 2)\n', (8786, 8809), True, 'import numpy as np\n')] |
from django.db import models
# class Game(models.Model):
# name = models.CharField(max_length=200)
#
# def __str__(self):
# return self.name
class Movie(models.Model):
imdb_id = models.CharField(max_length=200, null=True, blank=True)
name = models.CharField(max_length=200, null=True, blank=True)
director = models.CharField(max_length=200, null=True, blank=True)
year = models.IntegerField(null=True, blank=True)
popularity = models.FloatField(null=True, blank=True)
# summary = models.TextField(null=True, blank=True)
image = models.ImageField(upload_to='covers', null=True, blank=True)
has_quote = models.BooleanField(null=True, blank=True)
has_image = models.BooleanField(default=0, null=True, blank=True)
def __str__(self):
return self.name
class Quote(models.Model):
movie = models.ForeignKey(Movie, on_delete=models.CASCADE)
quote_text = models.TextField(null=True, blank=True)
def __str__(self):
return self.quote_text
class Screenshot(models.Model):
movie = models.ForeignKey(Movie, on_delete=models.CASCADE)
image = models.ImageField(upload_to='screenshot', null=True, blank=True)
# quote_text = models.TextField(null=True, blank=True)
class Game(models.Model):
name = models.CharField(max_length=200, null=True, blank=True)
timestamp = models.DateTimeField(auto_now_add=True)
current_q = models.IntegerField(null=True, blank=True)
nb_q = models.IntegerField(null=True, blank=True)
host = models.CharField(max_length=200, null=True, blank=True)
mode = models.CharField(default="quote", max_length=200, null=True, blank=True)
# "quote" ou "image"
game_mode = models.CharField(max_length=200, null=True, blank=True)
# 'chill' ou int
game_mode_debrief = models.CharField(max_length=200, null=True, blank=True)
# 'chill' ou int
def __str__(self):
return self.name
class Question(models.Model):
movie1 = models.ForeignKey(Movie, on_delete=models.CASCADE, related_name='m1')
movie2 = models.ForeignKey(Movie, on_delete=models.CASCADE, related_name='m2')
movie3 = models.ForeignKey(Movie, on_delete=models.CASCADE, related_name='m3')
movie_guessed = models.ForeignKey(Movie, on_delete=models.CASCADE, related_name='mg')
quote = models.ForeignKey(Quote, on_delete=models.CASCADE)
game = models.ForeignKey(Game, on_delete=models.CASCADE, blank=True, null=True)
def __str__(self):
return f'{self.movie1.id}_{self.movie2.id}_{self.movie3.id}_{self.quote.id}_{self.movie_guessed.id}'
class QuestionImage(models.Model):
movie1 = models.ForeignKey(Movie, on_delete=models.CASCADE, related_name='mi1')
movie2 = models.ForeignKey(Movie, on_delete=models.CASCADE, related_name='mi2')
movie3 = models.ForeignKey(Movie, on_delete=models.CASCADE, related_name='mi3')
movie_guessed = models.ForeignKey(Movie, on_delete=models.CASCADE, related_name='mig')
list_image_id = models.TextField(null=True, blank=True)
game = models.ForeignKey(Game, on_delete=models.CASCADE, blank=True, null=True)
def __str__(self):
return f'{self.game}_{self.movie_guessed.id}'
class Answer(models.Model):
user_id = models.CharField(max_length=200, null=True, blank=True)
question = models.ForeignKey(Question, on_delete=models.CASCADE)
movie_prop = models.ForeignKey(Movie, on_delete=models.CASCADE)
def __str__(self):
return f'{self.user_id}_{self.question}_{self.movie_prop}'
class AnswerImage(models.Model):
user_id = models.CharField(max_length=200, null=True, blank=True)
questionimage = models.ForeignKey(QuestionImage, on_delete=models.CASCADE)
movie_prop = models.ForeignKey(Movie, on_delete=models.CASCADE)
score = models.IntegerField(null=True, blank=True)
def __str__(self):
return f'{self.user_id}_{self.questionimage}_{self.movie_prop}'
class Genre(models.Model):
name = models.CharField(max_length=200, null=True, blank=True)
def __str__(self):
return self.name
class Country(models.Model):
name = models.CharField(max_length=200, null=True, blank=True)
def __str__(self):
return self.name
class Player(models.Model):
user_id = models.CharField(max_length=200, null=True, blank=True)
user_name = models.CharField(max_length=200, null=True, blank=True)
def __str__(self):
return self.user_name
class Preselect(models.Model):
name = models.CharField(max_length=200, null=True, blank=True)
list_movie = models.TextField(null=True, blank=True)
timestamp = models.DateTimeField(auto_now_add=True)
author = models.CharField(max_length=200, null=True, blank=True)
def __str__(self):
return self.name
class MovieGenre(models.Model):
movie = models.ForeignKey(Movie, on_delete=models.CASCADE)
genre = models.ForeignKey(Genre, on_delete=models.CASCADE)
def __str__(self):
return f'{self.movie.id}_{self.genre.id}'
class GamePlayer(models.Model):
game = models.ForeignKey(Game, on_delete=models.CASCADE)
player = models.ForeignKey(Player, on_delete=models.CASCADE)
def __str__(self):
return f'{self.game.id}_{self.player.id}'
class MovieCountry(models.Model):
movie = models.ForeignKey(Movie, on_delete=models.CASCADE)
country = models.ForeignKey(Country, on_delete=models.CASCADE)
def __str__(self):
return f'{self.movie.id}_{self.country.id}'
| [
"django.db.models.FloatField",
"django.db.models.TextField",
"django.db.models.IntegerField",
"django.db.models.ForeignKey",
"django.db.models.DateTimeField",
"django.db.models.BooleanField",
"django.db.models.ImageField",
"django.db.models.CharField"
] | [((203, 258), 'django.db.models.CharField', 'models.CharField', ([], {'max_length': '(200)', 'null': '(True)', 'blank': '(True)'}), '(max_length=200, null=True, blank=True)\n', (219, 258), False, 'from django.db import models\n'), ((270, 325), 'django.db.models.CharField', 'models.CharField', ([], {'max_length': '(200)', 'null': '(True)', 'blank': '(True)'}), '(max_length=200, null=True, blank=True)\n', (286, 325), False, 'from django.db import models\n'), ((341, 396), 'django.db.models.CharField', 'models.CharField', ([], {'max_length': '(200)', 'null': '(True)', 'blank': '(True)'}), '(max_length=200, null=True, blank=True)\n', (357, 396), False, 'from django.db import models\n'), ((408, 450), 'django.db.models.IntegerField', 'models.IntegerField', ([], {'null': '(True)', 'blank': '(True)'}), '(null=True, blank=True)\n', (427, 450), False, 'from django.db import models\n'), ((468, 508), 'django.db.models.FloatField', 'models.FloatField', ([], {'null': '(True)', 'blank': '(True)'}), '(null=True, blank=True)\n', (485, 508), False, 'from django.db import models\n'), ((577, 637), 'django.db.models.ImageField', 'models.ImageField', ([], {'upload_to': '"""covers"""', 'null': '(True)', 'blank': '(True)'}), "(upload_to='covers', null=True, blank=True)\n", (594, 637), False, 'from django.db import models\n'), ((654, 696), 'django.db.models.BooleanField', 'models.BooleanField', ([], {'null': '(True)', 'blank': '(True)'}), '(null=True, blank=True)\n', (673, 696), False, 'from django.db import models\n'), ((713, 766), 'django.db.models.BooleanField', 'models.BooleanField', ([], {'default': '(0)', 'null': '(True)', 'blank': '(True)'}), '(default=0, null=True, blank=True)\n', (732, 766), False, 'from django.db import models\n'), ((857, 907), 'django.db.models.ForeignKey', 'models.ForeignKey', (['Movie'], {'on_delete': 'models.CASCADE'}), '(Movie, on_delete=models.CASCADE)\n', (874, 907), False, 'from django.db import models\n'), ((925, 964), 'django.db.models.TextField', 'models.TextField', ([], {'null': '(True)', 'blank': '(True)'}), '(null=True, blank=True)\n', (941, 964), False, 'from django.db import models\n'), ((1066, 1116), 'django.db.models.ForeignKey', 'models.ForeignKey', (['Movie'], {'on_delete': 'models.CASCADE'}), '(Movie, on_delete=models.CASCADE)\n', (1083, 1116), False, 'from django.db import models\n'), ((1129, 1193), 'django.db.models.ImageField', 'models.ImageField', ([], {'upload_to': '"""screenshot"""', 'null': '(True)', 'blank': '(True)'}), "(upload_to='screenshot', null=True, blank=True)\n", (1146, 1193), False, 'from django.db import models\n'), ((1293, 1348), 'django.db.models.CharField', 'models.CharField', ([], {'max_length': '(200)', 'null': '(True)', 'blank': '(True)'}), '(max_length=200, null=True, blank=True)\n', (1309, 1348), False, 'from django.db import models\n'), ((1365, 1404), 'django.db.models.DateTimeField', 'models.DateTimeField', ([], {'auto_now_add': '(True)'}), '(auto_now_add=True)\n', (1385, 1404), False, 'from django.db import models\n'), ((1421, 1463), 'django.db.models.IntegerField', 'models.IntegerField', ([], {'null': '(True)', 'blank': '(True)'}), '(null=True, blank=True)\n', (1440, 1463), False, 'from django.db import models\n'), ((1475, 1517), 'django.db.models.IntegerField', 'models.IntegerField', ([], {'null': '(True)', 'blank': '(True)'}), '(null=True, blank=True)\n', (1494, 1517), False, 'from django.db import models\n'), ((1529, 1584), 'django.db.models.CharField', 'models.CharField', ([], {'max_length': '(200)', 'null': '(True)', 'blank': '(True)'}), '(max_length=200, null=True, blank=True)\n', (1545, 1584), False, 'from django.db import models\n'), ((1596, 1668), 'django.db.models.CharField', 'models.CharField', ([], {'default': '"""quote"""', 'max_length': '(200)', 'null': '(True)', 'blank': '(True)'}), "(default='quote', max_length=200, null=True, blank=True)\n", (1612, 1668), False, 'from django.db import models\n'), ((1710, 1765), 'django.db.models.CharField', 'models.CharField', ([], {'max_length': '(200)', 'null': '(True)', 'blank': '(True)'}), '(max_length=200, null=True, blank=True)\n', (1726, 1765), False, 'from django.db import models\n'), ((1811, 1866), 'django.db.models.CharField', 'models.CharField', ([], {'max_length': '(200)', 'null': '(True)', 'blank': '(True)'}), '(max_length=200, null=True, blank=True)\n', (1827, 1866), False, 'from django.db import models\n'), ((1982, 2051), 'django.db.models.ForeignKey', 'models.ForeignKey', (['Movie'], {'on_delete': 'models.CASCADE', 'related_name': '"""m1"""'}), "(Movie, on_delete=models.CASCADE, related_name='m1')\n", (1999, 2051), False, 'from django.db import models\n'), ((2065, 2134), 'django.db.models.ForeignKey', 'models.ForeignKey', (['Movie'], {'on_delete': 'models.CASCADE', 'related_name': '"""m2"""'}), "(Movie, on_delete=models.CASCADE, related_name='m2')\n", (2082, 2134), False, 'from django.db import models\n'), ((2148, 2217), 'django.db.models.ForeignKey', 'models.ForeignKey', (['Movie'], {'on_delete': 'models.CASCADE', 'related_name': '"""m3"""'}), "(Movie, on_delete=models.CASCADE, related_name='m3')\n", (2165, 2217), False, 'from django.db import models\n'), ((2238, 2307), 'django.db.models.ForeignKey', 'models.ForeignKey', (['Movie'], {'on_delete': 'models.CASCADE', 'related_name': '"""mg"""'}), "(Movie, on_delete=models.CASCADE, related_name='mg')\n", (2255, 2307), False, 'from django.db import models\n'), ((2320, 2370), 'django.db.models.ForeignKey', 'models.ForeignKey', (['Quote'], {'on_delete': 'models.CASCADE'}), '(Quote, on_delete=models.CASCADE)\n', (2337, 2370), False, 'from django.db import models\n'), ((2382, 2454), 'django.db.models.ForeignKey', 'models.ForeignKey', (['Game'], {'on_delete': 'models.CASCADE', 'blank': '(True)', 'null': '(True)'}), '(Game, on_delete=models.CASCADE, blank=True, null=True)\n', (2399, 2454), False, 'from django.db import models\n'), ((2637, 2707), 'django.db.models.ForeignKey', 'models.ForeignKey', (['Movie'], {'on_delete': 'models.CASCADE', 'related_name': '"""mi1"""'}), "(Movie, on_delete=models.CASCADE, related_name='mi1')\n", (2654, 2707), False, 'from django.db import models\n'), ((2721, 2791), 'django.db.models.ForeignKey', 'models.ForeignKey', (['Movie'], {'on_delete': 'models.CASCADE', 'related_name': '"""mi2"""'}), "(Movie, on_delete=models.CASCADE, related_name='mi2')\n", (2738, 2791), False, 'from django.db import models\n'), ((2805, 2875), 'django.db.models.ForeignKey', 'models.ForeignKey', (['Movie'], {'on_delete': 'models.CASCADE', 'related_name': '"""mi3"""'}), "(Movie, on_delete=models.CASCADE, related_name='mi3')\n", (2822, 2875), False, 'from django.db import models\n'), ((2896, 2966), 'django.db.models.ForeignKey', 'models.ForeignKey', (['Movie'], {'on_delete': 'models.CASCADE', 'related_name': '"""mig"""'}), "(Movie, on_delete=models.CASCADE, related_name='mig')\n", (2913, 2966), False, 'from django.db import models\n'), ((2987, 3026), 'django.db.models.TextField', 'models.TextField', ([], {'null': '(True)', 'blank': '(True)'}), '(null=True, blank=True)\n', (3003, 3026), False, 'from django.db import models\n'), ((3038, 3110), 'django.db.models.ForeignKey', 'models.ForeignKey', (['Game'], {'on_delete': 'models.CASCADE', 'blank': '(True)', 'null': '(True)'}), '(Game, on_delete=models.CASCADE, blank=True, null=True)\n', (3055, 3110), False, 'from django.db import models\n'), ((3232, 3287), 'django.db.models.CharField', 'models.CharField', ([], {'max_length': '(200)', 'null': '(True)', 'blank': '(True)'}), '(max_length=200, null=True, blank=True)\n', (3248, 3287), False, 'from django.db import models\n'), ((3303, 3356), 'django.db.models.ForeignKey', 'models.ForeignKey', (['Question'], {'on_delete': 'models.CASCADE'}), '(Question, on_delete=models.CASCADE)\n', (3320, 3356), False, 'from django.db import models\n'), ((3374, 3424), 'django.db.models.ForeignKey', 'models.ForeignKey', (['Movie'], {'on_delete': 'models.CASCADE'}), '(Movie, on_delete=models.CASCADE)\n', (3391, 3424), False, 'from django.db import models\n'), ((3564, 3619), 'django.db.models.CharField', 'models.CharField', ([], {'max_length': '(200)', 'null': '(True)', 'blank': '(True)'}), '(max_length=200, null=True, blank=True)\n', (3580, 3619), False, 'from django.db import models\n'), ((3640, 3698), 'django.db.models.ForeignKey', 'models.ForeignKey', (['QuestionImage'], {'on_delete': 'models.CASCADE'}), '(QuestionImage, on_delete=models.CASCADE)\n', (3657, 3698), False, 'from django.db import models\n'), ((3716, 3766), 'django.db.models.ForeignKey', 'models.ForeignKey', (['Movie'], {'on_delete': 'models.CASCADE'}), '(Movie, on_delete=models.CASCADE)\n', (3733, 3766), False, 'from django.db import models\n'), ((3779, 3821), 'django.db.models.IntegerField', 'models.IntegerField', ([], {'null': '(True)', 'blank': '(True)'}), '(null=True, blank=True)\n', (3798, 3821), False, 'from django.db import models\n'), ((3957, 4012), 'django.db.models.CharField', 'models.CharField', ([], {'max_length': '(200)', 'null': '(True)', 'blank': '(True)'}), '(max_length=200, null=True, blank=True)\n', (3973, 4012), False, 'from django.db import models\n'), ((4103, 4158), 'django.db.models.CharField', 'models.CharField', ([], {'max_length': '(200)', 'null': '(True)', 'blank': '(True)'}), '(max_length=200, null=True, blank=True)\n', (4119, 4158), False, 'from django.db import models\n'), ((4251, 4306), 'django.db.models.CharField', 'models.CharField', ([], {'max_length': '(200)', 'null': '(True)', 'blank': '(True)'}), '(max_length=200, null=True, blank=True)\n', (4267, 4306), False, 'from django.db import models\n'), ((4323, 4378), 'django.db.models.CharField', 'models.CharField', ([], {'max_length': '(200)', 'null': '(True)', 'blank': '(True)'}), '(max_length=200, null=True, blank=True)\n', (4339, 4378), False, 'from django.db import models\n'), ((4477, 4532), 'django.db.models.CharField', 'models.CharField', ([], {'max_length': '(200)', 'null': '(True)', 'blank': '(True)'}), '(max_length=200, null=True, blank=True)\n', (4493, 4532), False, 'from django.db import models\n'), ((4550, 4589), 'django.db.models.TextField', 'models.TextField', ([], {'null': '(True)', 'blank': '(True)'}), '(null=True, blank=True)\n', (4566, 4589), False, 'from django.db import models\n'), ((4606, 4645), 'django.db.models.DateTimeField', 'models.DateTimeField', ([], {'auto_now_add': '(True)'}), '(auto_now_add=True)\n', (4626, 4645), False, 'from django.db import models\n'), ((4659, 4714), 'django.db.models.CharField', 'models.CharField', ([], {'max_length': '(200)', 'null': '(True)', 'blank': '(True)'}), '(max_length=200, null=True, blank=True)\n', (4675, 4714), False, 'from django.db import models\n'), ((4810, 4860), 'django.db.models.ForeignKey', 'models.ForeignKey', (['Movie'], {'on_delete': 'models.CASCADE'}), '(Movie, on_delete=models.CASCADE)\n', (4827, 4860), False, 'from django.db import models\n'), ((4873, 4923), 'django.db.models.ForeignKey', 'models.ForeignKey', (['Genre'], {'on_delete': 'models.CASCADE'}), '(Genre, on_delete=models.CASCADE)\n', (4890, 4923), False, 'from django.db import models\n'), ((5042, 5091), 'django.db.models.ForeignKey', 'models.ForeignKey', (['Game'], {'on_delete': 'models.CASCADE'}), '(Game, on_delete=models.CASCADE)\n', (5059, 5091), False, 'from django.db import models\n'), ((5105, 5156), 'django.db.models.ForeignKey', 'models.ForeignKey', (['Player'], {'on_delete': 'models.CASCADE'}), '(Player, on_delete=models.CASCADE)\n', (5122, 5156), False, 'from django.db import models\n'), ((5279, 5329), 'django.db.models.ForeignKey', 'models.ForeignKey', (['Movie'], {'on_delete': 'models.CASCADE'}), '(Movie, on_delete=models.CASCADE)\n', (5296, 5329), False, 'from django.db import models\n'), ((5344, 5396), 'django.db.models.ForeignKey', 'models.ForeignKey', (['Country'], {'on_delete': 'models.CASCADE'}), '(Country, on_delete=models.CASCADE)\n', (5361, 5396), False, 'from django.db import models\n')] |
# Generated by Django 4.0.3 on 2022-03-15 03:08
from django.db import migrations
class Migration(migrations.Migration):
dependencies = [
('front_end', '0001_initial'),
]
operations = [
migrations.RenameField(
model_name='especies',
old_name='Nome_Popular',
new_name='Nome_popular',
),
]
| [
"django.db.migrations.RenameField"
] | [((230, 329), 'django.db.migrations.RenameField', 'migrations.RenameField', ([], {'model_name': '"""especies"""', 'old_name': '"""Nome_Popular"""', 'new_name': '"""Nome_popular"""'}), "(model_name='especies', old_name='Nome_Popular',\n new_name='Nome_popular')\n", (252, 329), False, 'from django.db import migrations\n')] |
import json
import os
import sys
import time
from os import path as osp
from pathlib import Path
from shutil import copyfile
import numpy as np
import torch
from torch.optim.lr_scheduler import ReduceLROnPlateau
from torch.utils.data import DataLoader
from tqdm import tqdm
from model_temporal import LSTMSeqNetwork, BilinearLSTMSeqNetwork, TCNSeqNetwork
from utils import load_config, MSEAverageMeter
from data_glob_speed import GlobSpeedSequence, SequenceToSequenceDataset, SenseINSSequence
from transformations import ComposeTransform, RandomHoriRotateSeq
from metric import compute_absolute_trajectory_error, compute_relative_trajectory_error
def WriteList(path, name, folders):
with open(path+"/"+name, 'w') as f:
for folder in folders:
f.writelines(folder+"\n")
f.close()
def GetFolderName(path):
names = os.listdir(path+"/")
folders=[]
for name in names:
if os.path.isdir(os.path.join(os.path.abspath(path), name)):
folders.append(name)
folders.sort()
return folders
'''
Temporal models with loss functions in global coordinate frame
Configurations
- Model types
TCN - type=tcn
LSTM_simple - type=lstm, lstm_bilinear
'''
torch.multiprocessing.set_sharing_strategy('file_system')
_nano_to_sec = 1e09
_input_channel, _output_channel = 6, 3
# _input_channel, _output_channel = 6, 2
device = 'cpu'
class GlobalPosLoss(torch.nn.Module):
def __init__(self, mode='full', history=None):
"""
Calculate position loss in global coordinate frame
Target :- Global Velocity
Prediction :- Global Velocity
"""
super(GlobalPosLoss, self).__init__()
self.mse_loss = torch.nn.MSELoss(reduction='none')
assert mode in ['full', 'part']
self.mode = mode
if self.mode == 'part':
assert history is not None
self.history = history
elif self.mode == 'full':
self.history = 1
def forward(self, pred, targ):
gt_pos = torch.cumsum(targ[:, 1:, ], 1)
pred_pos = torch.cumsum(pred[:, 1:, ], 1)
if self.mode == 'part':
gt_pos = gt_pos[:, self.history:, :] - gt_pos[:, :-self.history, :]
pred_pos = pred_pos[:, self.history:, :] - pred_pos[:, :-self.history, :]
loss = self.mse_loss(pred_pos, gt_pos)
return torch.mean(loss)
def write_config(args, **kwargs):
if args.out_dir:
with open(osp.join(args.out_dir, 'config.json'), 'w') as f:
values = vars(args)
values['file'] = "pytorch_global_position"
if kwargs:
values['kwargs'] = kwargs
json.dump(values, f, sort_keys=True)
def get_dataset(root_dir, data_list, args, **kwargs):
input_format, output_format = [0, 3, 6], [0, _output_channel]
mode = kwargs.get('mode', 'train')
random_shift, shuffle, transforms, grv_only = 0, False, [], False
if mode == 'train':
random_shift = args.step_size // 2
shuffle = True
transforms.append(RandomHoriRotateSeq(input_format, output_format))
elif mode == 'val':
shuffle = True
elif mode == 'test':
shuffle = False
grv_only = True
transforms = ComposeTransform(transforms)
if args.dataset == 'ronin':
seq_type = GlobSpeedSequence
elif args.dataset == 'ridi':
from data_ridi import RIDIGlobSpeedSequence
seq_type = RIDIGlobSpeedSequence
elif args.dataset == 'sense':
seq_type = SenseINSSequence
dataset = SequenceToSequenceDataset(seq_type, root_dir, data_list, args.cache_path, args.step_size, args.window_size,
random_shift=random_shift, transform=transforms, shuffle=shuffle,
grv_only=grv_only, args=args, **kwargs)
return dataset
def get_dataset_from_list(root_dir, list_path, args, **kwargs):
with open(list_path) as f:
data_list = [s.strip().split(',')[0] for s in f.readlines() if len(s) > 0 and s[0] != '#']
return get_dataset(root_dir, data_list, args, **kwargs)
def get_model(args, **kwargs):
config = {}
if kwargs.get('dropout'):
config['dropout'] = kwargs.get('dropout')
if args.type == 'tcn':
network = TCNSeqNetwork(_input_channel, _output_channel, args.kernel_size,
layer_channels=args.channels, **config)
print("TCN Network. Receptive field: {} ".format(network.get_receptive_field()))
elif args.type == 'lstm_bi':
print("Bilinear LSTM Network")
network = BilinearLSTMSeqNetwork(_input_channel, _output_channel, args.batch_size, device,
lstm_layers=args.layers, lstm_size=args.layer_size, **config).to(device)
else:
print("Simple LSTM Network")
network = LSTMSeqNetwork(_input_channel, _output_channel, args.batch_size, device,
lstm_layers=args.layers, lstm_size=args.layer_size, **config).to(device)
pytorch_total_params = sum(p.numel() for p in network.parameters() if p.requires_grad)
print('Network constructed. trainable parameters: {}'.format(pytorch_total_params))
return network
def get_loss_function(history, args, **kwargs):
if args.type == 'tcn':
config = {'mode': 'part',
'history': history}
else:
config = {'mode': 'full'}
criterion = GlobalPosLoss(**config)
return criterion
def format_string(*argv, sep=' '):
result = ''
for val in argv:
if isinstance(val, (tuple, list, np.ndarray)):
for v in val:
result += format_string(v, sep=sep) + sep
else:
result += str(val) + sep
return result[:-1]
def train(args, **kwargs):
# Loading data
start_t = time.time()
train_dataset = get_dataset_from_list(args.root_dir, args.train_list, args, mode='train', **kwargs)
train_loader = DataLoader(train_dataset, batch_size=args.batch_size, num_workers=args.num_workers, shuffle=True,
drop_last=True)
end_t = time.time()
print('Training set loaded. Time usage: {:.3f}s'.format(end_t - start_t))
val_dataset, val_loader = None, None
if args.val_list is not None:
val_dataset = get_dataset_from_list(args.validation_dir, args.val_list, args, mode='val', **kwargs)
val_loader = DataLoader(val_dataset, batch_size=args.batch_size, shuffle=True, drop_last=True)
print('Validation set loaded')
global device
device = torch.device(args.device if torch.cuda.is_available() else 'cpu')
if args.out_dir:
if not osp.isdir(args.out_dir):
os.makedirs(args.out_dir)
if not osp.isdir(osp.join(args.out_dir, 'checkpoints')):
os.makedirs(osp.join(args.out_dir, 'checkpoints'))
if not osp.isdir(osp.join(args.out_dir, 'logs')):
os.makedirs(osp.join(args.out_dir, 'logs'))
write_config(args, **kwargs)
print('\nNumber of train samples: {}'.format(len(train_dataset)))
train_mini_batches = len(train_loader)
if val_dataset:
print('Number of val samples: {}'.format(len(val_dataset)))
val_mini_batches = len(val_loader)
network = get_model(args, **kwargs).to(device)
history = network.get_receptive_field() if args.type == 'tcn' else args.window_size // 2
criterion = get_loss_function(history, args, **kwargs)
optimizer = torch.optim.Adam(network.parameters(), args.lr)
scheduler = ReduceLROnPlateau(optimizer, 'min', patience=10, factor=0.75, verbose=True, eps=1e-12)
quiet_mode = kwargs.get('quiet', False)
use_scheduler = kwargs.get('use_scheduler', False)
log_file = None
if args.out_dir:
log_file = osp.join(args.out_dir, 'logs', 'log.txt')
if osp.exists(log_file):
if args.continue_from is None:
os.remove(log_file)
else:
copyfile(log_file, osp.join(args.out_dir, 'logs', 'log_old.txt'))
start_epoch = 0
if args.continue_from is not None and osp.exists(args.continue_from):
with open(osp.join(str(Path(args.continue_from).parents[1]), 'config.json'), 'r') as f:
model_data = json.load(f)
if device.type == 'cpu':
checkpoints = torch.load(args.continue_from, map_location=lambda storage, location: storage)
else:
checkpoints = torch.load(args.continue_from, map_location={model_data['device']: args.device})
start_epoch = checkpoints.get('epoch', 0)
network.load_state_dict(checkpoints.get('model_state_dict'))
optimizer.load_state_dict(checkpoints.get('optimizer_state_dict'))
if kwargs.get('force_lr', False):
for param_group in optimizer.param_groups:
param_group['lr'] = args.lr
step = 0
best_val_loss = np.inf
train_errs = np.zeros(args.epochs)
print("Starting from epoch {}".format(start_epoch
))
try:
for epoch in range(start_epoch, args.epochs):
log_line = ''
network.train()
train_vel = MSEAverageMeter(3, [2], _output_channel)
train_loss = 0
start_t = time.time()
for bid, batch in tqdm(enumerate(train_loader)):
feat, targ, _, _ = batch
feat, targ = feat.to(device), targ.to(device)
optimizer.zero_grad()
predicted = network(feat)
train_vel.add(predicted.cpu().detach().numpy(), targ.cpu().detach().numpy())
loss = criterion(predicted, targ)
train_loss += loss.cpu().detach().numpy()
loss.backward()
optimizer.step()
step += 1
train_errs[epoch] = train_loss / train_mini_batches
end_t = time.time()
if not quiet_mode:
print('-' * 25)
print('Epoch {}, time usage: {:.3f}s, loss: {}, val_loss {}/{:.6f}'.format(
epoch, end_t - start_t, train_errs[epoch], train_vel.get_channel_avg(), train_vel.get_total_avg()))
print('Learning rate: {}'.format(optimizer.param_groups[0]['lr']))
log_line = format_string(log_line, epoch, optimizer.param_groups[0]['lr'], train_errs[epoch],
*train_vel.get_channel_avg())
saved_model = False
if val_loader:
network.eval()
val_vel = MSEAverageMeter(3, [2], _output_channel)
val_loss = 0
for bid, batch in tqdm(enumerate(val_loader)):
feat, targ, _, _ = batch
feat, targ = feat.to(device), targ.to(device)
optimizer.zero_grad()
pred = network(feat)
val_vel.add(pred.cpu().detach().numpy(), targ.cpu().detach().numpy())
val_loss += criterion(pred, targ).cpu().detach().numpy()
val_loss = val_loss / val_mini_batches
log_line = format_string(log_line, val_loss, *val_vel.get_channel_avg())
if not quiet_mode:
print('Validation loss: {} val_loss: {}/{:.6f}'.format(val_loss, val_vel.get_channel_avg(),
val_vel.get_total_avg()))
if val_loss < best_val_loss:
best_val_loss = val_loss
saved_model = True
if args.out_dir:
model_path = osp.join(args.out_dir, 'checkpoints', 'checkpoint_%d.pt' % epoch)
torch.save({'model_state_dict': network.state_dict(),
'epoch': epoch,
'loss': train_errs[epoch],
'optimizer_state_dict': optimizer.state_dict()}, model_path)
print('Best Validation Model saved to ', model_path)
scheduler.step(val_loss)
if args.out_dir and not saved_model and (epoch + 1) % args.save_interval == 0: # save even with validation
model_path = osp.join(args.out_dir, 'checkpoints', 'icheckpoint_%d.pt' % epoch)
torch.save({'model_state_dict': network.state_dict(),
'epoch': epoch,
'loss': train_errs[epoch],
'optimizer_state_dict': optimizer.state_dict()}, model_path)
print('Model saved to ', model_path)
if log_file:
log_line += '\n'
with open(log_file, 'a') as f:
f.write(log_line)
if np.isnan(train_loss):
print("Invalid value. Stopping training.")
break
except KeyboardInterrupt:
print('-' * 60)
print('Early terminate')
print('Training completed')
if args.out_dir:
model_path = osp.join(args.out_dir, 'checkpoints', 'checkpoint_latest.pt')
torch.save({'model_state_dict': network.state_dict(),
'epoch': epoch,
'optimizer_state_dict': optimizer.state_dict()}, model_path)
def recon_traj_with_preds_global(dataset, preds, ind=None, seq_id=0, type='preds', **kwargs):
ind = ind if ind is not None else np.array([i[1] for i in dataset.index_map if i[0] == seq_id], dtype=np.int)
if type == 'gt':
# pos = dataset.gt_pos[seq_id][:, :2]
pos = dataset.gt_pos[seq_id][:, :3]
else:
ts = dataset.ts[seq_id]
# Compute the global velocity from local velocity.
dts = np.mean(ts[ind[1:]] - ts[ind[:-1]])
pos = preds * dts
# pos[0, :] = dataset.gt_pos[seq_id][0, :2]
pos[0, :] = dataset.gt_pos[seq_id][0, :3]
pos = np.cumsum(pos, axis=0)
veloc = preds
ori = dataset.orientations[seq_id]
return pos, veloc, ori
def test(args, **kwargs):
global device, _output_channel
import matplotlib.pyplot as plt
device = torch.device(args.device if torch.cuda.is_available() else 'cpu')
if args.test_path is not None:
if args.test_path[-1] == '/':
args.test_path = args.test_path[:-1]
root_dir = osp.split(args.test_path)[0]
test_data_list = [osp.split(args.test_path)[1]]
elif args.test_list is not None:
root_dir = args.root_dir if args.root_dir else osp.split(args.test_list)[0]
with open(args.test_list) as f:
test_data_list = [s.strip().split(',')[0] for s in f.readlines() if len(s) > 0 and s[0] != '#']
else:
raise ValueError('Either test_path or test_list must be specified.')
# Load the first sequence to update the input and output size
_ = get_dataset(root_dir, [test_data_list[0]], args, mode='test')
if args.out_dir and not osp.exists(args.out_dir):
os.makedirs(args.out_dir)
with open(osp.join(str(Path(args.model_path).parents[1]), 'config.json'), 'r') as f:
model_data = json.load(f)
if device.type == 'cpu':
checkpoint = torch.load(args.model_path, map_location=lambda storage, location: storage)
else:
checkpoint = torch.load(args.model_path, map_location={model_data['device']: args.device})
network = get_model(args, **kwargs)
network.load_state_dict(checkpoint.get('model_state_dict'))
network.eval().to(device)
print('Model {} loaded to device {}.'.format(args.model_path, device))
log_file = None
if args.test_list and args.out_dir:
log_file = osp.join(args.out_dir, osp.split(args.test_list)[-1].split('.')[0] + '_log.txt')
with open(log_file, 'w') as f:
f.write(args.model_path + '\n')
f.write('Seq traj_len velocity ate rte\n')
losses_vel = MSEAverageMeter(2, [1], _output_channel)
ate_all, rte_all = [], []
pred_per_min = 200 * 60
seq_dataset = get_dataset(root_dir, test_data_list, args, mode='test', **kwargs)
for idx, data in enumerate(test_data_list):
assert data == osp.split(seq_dataset.data_path[idx])[1]
feat, vel = seq_dataset.get_test_seq(idx)
feat = torch.Tensor(feat).to(device)
preds = np.squeeze(network(feat).cpu().detach().numpy())[-vel.shape[0]:, :_output_channel]
ind = np.arange(vel.shape[0])
val_losses = np.mean((vel - preds) ** 2, axis=0)
losses_vel.add(vel, preds)
print('Reconstructing trajectory')
pos_pred, gv_pred, _ = recon_traj_with_preds_global(seq_dataset, preds, ind=ind, type='pred', seq_id=idx)
pos_gt, gv_gt, _ = recon_traj_with_preds_global(seq_dataset, vel, ind=ind, type='gt', seq_id=idx)
if args.out_dir is not None and osp.isdir(args.out_dir):
np.save(osp.join(args.out_dir, '{}_{}.npy'.format(data, args.type)),
np.concatenate([pos_pred, pos_gt], axis=1))
ate = compute_absolute_trajectory_error(pos_pred, pos_gt)
if pos_pred.shape[0] < pred_per_min:
ratio = pred_per_min / pos_pred.shape[0]
rte = compute_relative_trajectory_error(pos_pred, pos_gt, delta=pos_pred.shape[0] - 1) * ratio
else:
rte = compute_relative_trajectory_error(pos_pred, pos_gt, delta=pred_per_min)
pos_cum_error = np.linalg.norm(pos_pred - pos_gt, axis=1)
ate_all.append(ate)
rte_all.append(rte)
print('Sequence {}, Velocity loss {} / {}, ATE: {}, RTE:{}'.format(data, val_losses, np.mean(val_losses), ate,
rte))
log_line = format_string(data, np.mean(val_losses), ate, rte)
if not args.fast_test:
kp = preds.shape[1]
if kp == 2:
targ_names = ['vx', 'vy']
elif kp == 3:
targ_names = ['vx', 'vy', 'vz']
plt.figure('{}'.format(data), figsize=(16, 9))
plt.subplot2grid((kp, 2), (0, 0), rowspan=kp - 1)
plt.plot(pos_pred[:, 0], pos_pred[:, 1])
plt.plot(pos_gt[:, 0], pos_gt[:, 1])
plt.title(data)
plt.axis('equal')
plt.legend(['Predicted', 'Ground truth'])
plt.subplot2grid((kp, 2), (kp - 1, 0))
plt.plot(pos_cum_error)
plt.legend(['ATE:{:.3f}, RTE:{:.3f}'.format(ate_all[-1], rte_all[-1])])
for i in range(kp):
plt.subplot2grid((kp, 2), (i, 1))
plt.plot(ind, preds[:, i])
plt.plot(ind, vel[:, i])
plt.legend(['Predicted', 'Ground truth'])
plt.title('{}, error: {:.6f}'.format(targ_names[i], val_losses[i]))
plt.tight_layout()
if args.show_plot:
plt.show()
if args.out_dir is not None and osp.isdir(args.out_dir):
plt.savefig(osp.join(args.out_dir, '{}_{}.png'.format(data, args.type)))
if log_file is not None:
with open(log_file, 'a') as f:
log_line += '\n'
f.write(log_line)
plt.close('all')
ate_all = np.array(ate_all)
rte_all = np.array(rte_all)
measure = format_string('ATE', 'RTE', sep='\t')
values = format_string(np.mean(ate_all), np.mean(rte_all), sep='\t')
print(measure, '\n', values)
if log_file is not None:
with open(log_file, 'a') as f:
f.write(measure + '\n')
f.write(values)
if __name__ == '__main__':
"""
Run file with individual arguments or/and config file. If argument appears in both config file and args,
args is given precedence.
"""
default_config_file = osp.abspath(osp.join(osp.abspath(__file__), '../../config/temporal_model_defaults.json'))
import argparse
parser = argparse.ArgumentParser(description="Run seq2seq model in train/test mode [required]. Optional "
"configurations can be specified as --key [value..] pairs",
add_help=True)
parser.add_argument('--config', type=str, help='Configuration file [Default: {}]'.format(default_config_file),
default=default_config_file)
# common
parser.add_argument('--type', type=str, choices=['tcn', 'lstm', 'lstm_bi'], help='Model type', default='lstm')
parser.add_argument('--root_dir', type=str, default="/data/INSData/ins_data_test/IDOL_SenseINS/building1/train_debug", help='Path to data directory')
parser.add_argument('--validation_dir', type=str, default="/data/INSData/ins_data_test/IDOL_SenseINS/building1/train_debug")
# parser.add_argument('--root_dir', type=str,
# default="/home/SENSETIME/xurunsen/project/ronin/RONIN/train_debug",
# help='Path to data directory')
# parser.add_argument('--validation_dir', type=str,
# default="/home/SENSETIME/xurunsen/project/ronin/RONIN/train_debug")
parser.add_argument('--cache_path', type=str, default=None)
parser.add_argument('--feature_sigma', type=float, help='Gaussian for smoothing features')
parser.add_argument('--target_sigma', type=float, help='Gaussian for smoothing target')
parser.add_argument('--window_size', type=int)
parser.add_argument('--step_size', type=int)
parser.add_argument('--batch_size', type=int)
parser.add_argument('--num_workers', type=int)
parser.add_argument('--out_dir', type=str, default='../output/ronin_lstm/idol/2021.05.14/train_debug')
parser.add_argument('--device', type=str, help='Cuda device (e.g:- cuda:0) or cpu')
parser.add_argument('--dataset', type=str, choices=['ronin', 'ridi', 'sense'], default='sense')
parser.add_argument('--imu_freq', type=int, default=200)
# tcn
tcn_cmd = parser.add_argument_group('tcn', 'configuration for TCN')
tcn_cmd.add_argument('--kernel_size', type=int)
tcn_cmd.add_argument('--channels', type=str, help='Channel sizes for TCN layers (comma separated)')
# lstm
lstm_cmd = parser.add_argument_group('lstm', 'configuration for LSTM')
lstm_cmd.add_argument('--layers', type=int)
lstm_cmd.add_argument('--layer_size', type=int)
mode = parser.add_subparsers(title='mode', dest='mode', help='Operation: [train] train model, [test] evaluate model')
mode.required = False
# train
train_cmd = mode.add_parser('train')
train_cmd.add_argument('--train_list', type=str)
train_cmd.add_argument('--val_list', type=str)
train_cmd.add_argument('--continue_from', type=str, default=None)
train_cmd.add_argument('--epochs', type=int)
train_cmd.add_argument('--save_interval', type=int)
train_cmd.add_argument('--lr', '--learning_rate', type=float)
# test
test_cmd = mode.add_parser('test')
test_cmd.add_argument('--test_path', type=str, default=None)
test_cmd.add_argument('--test_list', type=str, default=None)
test_cmd.add_argument('--model_path', type=str, default='/home/SENSETIME/xurunsen/project/ronin/output/ronin_lstm/idol/2021.05.14/train_debug/checkpoints/checkpoint_714.pt')
test_cmd.add_argument('--fast_test', action='store_true')
test_cmd.add_argument('--show_plot', action='store_true')
'''
Extra arguments
Set True: use_scheduler,
quite (no output on stdout),
force_lr (force lr when a model is loaded from continue_from)
float: dropout,
max_ori_error (err. threshold for priority grv in degrees)
max_velocity_norm (filter outliers in training)
'''
args, unknown_args = parser.parse_known_args()
np.set_printoptions(formatter={'all': lambda x: '{:.6f}'.format(x)})
args, kwargs = load_config(default_config_file, args, unknown_args)
print(args, kwargs)
# add by runsen
# write list
if args.mode == "train":
if args.train_list is None:
WriteList(args.root_dir, "train_list.txt", GetFolderName(args.root_dir))
args.train_list = args.root_dir + "/train_list.txt"
if args.validation_dir is not None:
WriteList(args.validation_dir, "validation_list.txt", GetFolderName(args.validation_dir))
args.val_list = args.validation_dir + "/validation_list.txt"
elif args.mode == "test":
if args.test_list is None:
WriteList(args.root_dir, "test_list.txt", GetFolderName(args.root_dir))
args.test_list = args.root_dir + "/test_list.txt"
if args.mode == 'train':
train(args, **kwargs)
elif args.mode == 'test':
if not args.model_path:
raise ValueError("Model path required")
args.batch_size = 1
test(args, **kwargs)
| [
"model_temporal.BilinearLSTMSeqNetwork",
"model_temporal.LSTMSeqNetwork",
"utils.MSEAverageMeter",
"numpy.array",
"torch.nn.MSELoss",
"model_temporal.TCNSeqNetwork",
"torch.cuda.is_available",
"numpy.linalg.norm",
"metric.compute_absolute_trajectory_error",
"numpy.arange",
"os.remove",
"os.pat... | [((1238, 1295), 'torch.multiprocessing.set_sharing_strategy', 'torch.multiprocessing.set_sharing_strategy', (['"""file_system"""'], {}), "('file_system')\n", (1280, 1295), False, 'import torch\n'), ((853, 875), 'os.listdir', 'os.listdir', (["(path + '/')"], {}), "(path + '/')\n", (863, 875), False, 'import os\n'), ((3270, 3298), 'transformations.ComposeTransform', 'ComposeTransform', (['transforms'], {}), '(transforms)\n', (3286, 3298), False, 'from transformations import ComposeTransform, RandomHoriRotateSeq\n'), ((3579, 3801), 'data_glob_speed.SequenceToSequenceDataset', 'SequenceToSequenceDataset', (['seq_type', 'root_dir', 'data_list', 'args.cache_path', 'args.step_size', 'args.window_size'], {'random_shift': 'random_shift', 'transform': 'transforms', 'shuffle': 'shuffle', 'grv_only': 'grv_only', 'args': 'args'}), '(seq_type, root_dir, data_list, args.cache_path,\n args.step_size, args.window_size, random_shift=random_shift, transform=\n transforms, shuffle=shuffle, grv_only=grv_only, args=args, **kwargs)\n', (3604, 3801), False, 'from data_glob_speed import GlobSpeedSequence, SequenceToSequenceDataset, SenseINSSequence\n'), ((5882, 5893), 'time.time', 'time.time', ([], {}), '()\n', (5891, 5893), False, 'import time\n'), ((6017, 6135), 'torch.utils.data.DataLoader', 'DataLoader', (['train_dataset'], {'batch_size': 'args.batch_size', 'num_workers': 'args.num_workers', 'shuffle': '(True)', 'drop_last': '(True)'}), '(train_dataset, batch_size=args.batch_size, num_workers=args.\n num_workers, shuffle=True, drop_last=True)\n', (6027, 6135), False, 'from torch.utils.data import DataLoader\n'), ((6173, 6184), 'time.time', 'time.time', ([], {}), '()\n', (6182, 6184), False, 'import time\n'), ((7596, 7686), 'torch.optim.lr_scheduler.ReduceLROnPlateau', 'ReduceLROnPlateau', (['optimizer', '"""min"""'], {'patience': '(10)', 'factor': '(0.75)', 'verbose': '(True)', 'eps': '(1e-12)'}), "(optimizer, 'min', patience=10, factor=0.75, verbose=True,\n eps=1e-12)\n", (7613, 7686), False, 'from torch.optim.lr_scheduler import ReduceLROnPlateau\n'), ((8968, 8989), 'numpy.zeros', 'np.zeros', (['args.epochs'], {}), '(args.epochs)\n', (8976, 8989), True, 'import numpy as np\n'), ((15965, 16005), 'utils.MSEAverageMeter', 'MSEAverageMeter', (['(2)', '[1]', '_output_channel'], {}), '(2, [1], _output_channel)\n', (15980, 16005), False, 'from utils import load_config, MSEAverageMeter\n'), ((19286, 19303), 'numpy.array', 'np.array', (['ate_all'], {}), '(ate_all)\n', (19294, 19303), True, 'import numpy as np\n'), ((19318, 19335), 'numpy.array', 'np.array', (['rte_all'], {}), '(rte_all)\n', (19326, 19335), True, 'import numpy as np\n'), ((19964, 20142), 'argparse.ArgumentParser', 'argparse.ArgumentParser', ([], {'description': '"""Run seq2seq model in train/test mode [required]. Optional configurations can be specified as --key [value..] pairs"""', 'add_help': '(True)'}), "(description=\n 'Run seq2seq model in train/test mode [required]. Optional configurations can be specified as --key [value..] pairs'\n , add_help=True)\n", (19987, 20142), False, 'import argparse\n'), ((23899, 23951), 'utils.load_config', 'load_config', (['default_config_file', 'args', 'unknown_args'], {}), '(default_config_file, args, unknown_args)\n', (23910, 23951), False, 'from utils import load_config, MSEAverageMeter\n'), ((1727, 1761), 'torch.nn.MSELoss', 'torch.nn.MSELoss', ([], {'reduction': '"""none"""'}), "(reduction='none')\n", (1743, 1761), False, 'import torch\n'), ((2050, 2078), 'torch.cumsum', 'torch.cumsum', (['targ[:, 1:]', '(1)'], {}), '(targ[:, 1:], 1)\n', (2062, 2078), False, 'import torch\n'), ((2100, 2128), 'torch.cumsum', 'torch.cumsum', (['pred[:, 1:]', '(1)'], {}), '(pred[:, 1:], 1)\n', (2112, 2128), False, 'import torch\n'), ((2391, 2407), 'torch.mean', 'torch.mean', (['loss'], {}), '(loss)\n', (2401, 2407), False, 'import torch\n'), ((4324, 4432), 'model_temporal.TCNSeqNetwork', 'TCNSeqNetwork', (['_input_channel', '_output_channel', 'args.kernel_size'], {'layer_channels': 'args.channels'}), '(_input_channel, _output_channel, args.kernel_size,\n layer_channels=args.channels, **config)\n', (4337, 4432), False, 'from model_temporal import LSTMSeqNetwork, BilinearLSTMSeqNetwork, TCNSeqNetwork\n'), ((6468, 6554), 'torch.utils.data.DataLoader', 'DataLoader', (['val_dataset'], {'batch_size': 'args.batch_size', 'shuffle': '(True)', 'drop_last': '(True)'}), '(val_dataset, batch_size=args.batch_size, shuffle=True, drop_last\n =True)\n', (6478, 6554), False, 'from torch.utils.data import DataLoader\n'), ((7843, 7884), 'os.path.join', 'osp.join', (['args.out_dir', '"""logs"""', '"""log.txt"""'], {}), "(args.out_dir, 'logs', 'log.txt')\n", (7851, 7884), True, 'from os import path as osp\n'), ((7896, 7916), 'os.path.exists', 'osp.exists', (['log_file'], {}), '(log_file)\n', (7906, 7916), True, 'from os import path as osp\n'), ((8160, 8190), 'os.path.exists', 'osp.exists', (['args.continue_from'], {}), '(args.continue_from)\n', (8170, 8190), True, 'from os import path as osp\n'), ((13126, 13187), 'os.path.join', 'osp.join', (['args.out_dir', '"""checkpoints"""', '"""checkpoint_latest.pt"""'], {}), "(args.out_dir, 'checkpoints', 'checkpoint_latest.pt')\n", (13134, 13187), True, 'from os import path as osp\n'), ((13501, 13576), 'numpy.array', 'np.array', (['[i[1] for i in dataset.index_map if i[0] == seq_id]'], {'dtype': 'np.int'}), '([i[1] for i in dataset.index_map if i[0] == seq_id], dtype=np.int)\n', (13509, 13576), True, 'import numpy as np\n'), ((13804, 13839), 'numpy.mean', 'np.mean', (['(ts[ind[1:]] - ts[ind[:-1]])'], {}), '(ts[ind[1:]] - ts[ind[:-1]])\n', (13811, 13839), True, 'import numpy as np\n'), ((13982, 14004), 'numpy.cumsum', 'np.cumsum', (['pos'], {'axis': '(0)'}), '(pos, axis=0)\n', (13991, 14004), True, 'import numpy as np\n'), ((15052, 15077), 'os.makedirs', 'os.makedirs', (['args.out_dir'], {}), '(args.out_dir)\n', (15063, 15077), False, 'import os\n'), ((15189, 15201), 'json.load', 'json.load', (['f'], {}), '(f)\n', (15198, 15201), False, 'import json\n'), ((15253, 15328), 'torch.load', 'torch.load', (['args.model_path'], {'map_location': '(lambda storage, location: storage)'}), '(args.model_path, map_location=lambda storage, location: storage)\n', (15263, 15328), False, 'import torch\n'), ((15360, 15437), 'torch.load', 'torch.load', (['args.model_path'], {'map_location': "{model_data['device']: args.device}"}), "(args.model_path, map_location={model_data['device']: args.device})\n", (15370, 15437), False, 'import torch\n'), ((16473, 16496), 'numpy.arange', 'np.arange', (['vel.shape[0]'], {}), '(vel.shape[0])\n', (16482, 16496), True, 'import numpy as np\n'), ((16518, 16553), 'numpy.mean', 'np.mean', (['((vel - preds) ** 2)'], {'axis': '(0)'}), '((vel - preds) ** 2, axis=0)\n', (16525, 16553), True, 'import numpy as np\n'), ((17079, 17130), 'metric.compute_absolute_trajectory_error', 'compute_absolute_trajectory_error', (['pos_pred', 'pos_gt'], {}), '(pos_pred, pos_gt)\n', (17112, 17130), False, 'from metric import compute_absolute_trajectory_error, compute_relative_trajectory_error\n'), ((17464, 17505), 'numpy.linalg.norm', 'np.linalg.norm', (['(pos_pred - pos_gt)'], {'axis': '(1)'}), '(pos_pred - pos_gt, axis=1)\n', (17478, 17505), True, 'import numpy as np\n'), ((19254, 19270), 'matplotlib.pyplot.close', 'plt.close', (['"""all"""'], {}), "('all')\n", (19263, 19270), True, 'import matplotlib.pyplot as plt\n'), ((19416, 19432), 'numpy.mean', 'np.mean', (['ate_all'], {}), '(ate_all)\n', (19423, 19432), True, 'import numpy as np\n'), ((19434, 19450), 'numpy.mean', 'np.mean', (['rte_all'], {}), '(rte_all)\n', (19441, 19450), True, 'import numpy as np\n'), ((2697, 2733), 'json.dump', 'json.dump', (['values', 'f'], {'sort_keys': '(True)'}), '(values, f, sort_keys=True)\n', (2706, 2733), False, 'import json\n'), ((3083, 3131), 'transformations.RandomHoriRotateSeq', 'RandomHoriRotateSeq', (['input_format', 'output_format'], {}), '(input_format, output_format)\n', (3102, 3131), False, 'from transformations import ComposeTransform, RandomHoriRotateSeq\n'), ((6649, 6674), 'torch.cuda.is_available', 'torch.cuda.is_available', ([], {}), '()\n', (6672, 6674), False, 'import torch\n'), ((6724, 6747), 'os.path.isdir', 'osp.isdir', (['args.out_dir'], {}), '(args.out_dir)\n', (6733, 6747), True, 'from os import path as osp\n'), ((6761, 6786), 'os.makedirs', 'os.makedirs', (['args.out_dir'], {}), '(args.out_dir)\n', (6772, 6786), False, 'import os\n'), ((8313, 8325), 'json.load', 'json.load', (['f'], {}), '(f)\n', (8322, 8325), False, 'import json\n'), ((8386, 8464), 'torch.load', 'torch.load', (['args.continue_from'], {'map_location': '(lambda storage, location: storage)'}), '(args.continue_from, map_location=lambda storage, location: storage)\n', (8396, 8464), False, 'import torch\n'), ((8505, 8590), 'torch.load', 'torch.load', (['args.continue_from'], {'map_location': "{model_data['device']: args.device}"}), "(args.continue_from, map_location={model_data['device']: args.device}\n )\n", (8515, 8590), False, 'import torch\n'), ((9231, 9271), 'utils.MSEAverageMeter', 'MSEAverageMeter', (['(3)', '[2]', '_output_channel'], {}), '(3, [2], _output_channel)\n', (9246, 9271), False, 'from utils import load_config, MSEAverageMeter\n'), ((9321, 9332), 'time.time', 'time.time', ([], {}), '()\n', (9330, 9332), False, 'import time\n'), ((9955, 9966), 'time.time', 'time.time', ([], {}), '()\n', (9964, 9966), False, 'import time\n'), ((12861, 12881), 'numpy.isnan', 'np.isnan', (['train_loss'], {}), '(train_loss)\n', (12869, 12881), True, 'import numpy as np\n'), ((14231, 14256), 'torch.cuda.is_available', 'torch.cuda.is_available', ([], {}), '()\n', (14254, 14256), False, 'import torch\n'), ((14411, 14436), 'os.path.split', 'osp.split', (['args.test_path'], {}), '(args.test_path)\n', (14420, 14436), True, 'from os import path as osp\n'), ((15018, 15042), 'os.path.exists', 'osp.exists', (['args.out_dir'], {}), '(args.out_dir)\n', (15028, 15042), True, 'from os import path as osp\n'), ((16894, 16917), 'os.path.isdir', 'osp.isdir', (['args.out_dir'], {}), '(args.out_dir)\n', (16903, 16917), True, 'from os import path as osp\n'), ((17368, 17439), 'metric.compute_relative_trajectory_error', 'compute_relative_trajectory_error', (['pos_pred', 'pos_gt'], {'delta': 'pred_per_min'}), '(pos_pred, pos_gt, delta=pred_per_min)\n', (17401, 17439), False, 'from metric import compute_absolute_trajectory_error, compute_relative_trajectory_error\n'), ((17802, 17821), 'numpy.mean', 'np.mean', (['val_losses'], {}), '(val_losses)\n', (17809, 17821), True, 'import numpy as np\n'), ((18109, 18158), 'matplotlib.pyplot.subplot2grid', 'plt.subplot2grid', (['(kp, 2)', '(0, 0)'], {'rowspan': '(kp - 1)'}), '((kp, 2), (0, 0), rowspan=kp - 1)\n', (18125, 18158), True, 'import matplotlib.pyplot as plt\n'), ((18171, 18211), 'matplotlib.pyplot.plot', 'plt.plot', (['pos_pred[:, 0]', 'pos_pred[:, 1]'], {}), '(pos_pred[:, 0], pos_pred[:, 1])\n', (18179, 18211), True, 'import matplotlib.pyplot as plt\n'), ((18224, 18260), 'matplotlib.pyplot.plot', 'plt.plot', (['pos_gt[:, 0]', 'pos_gt[:, 1]'], {}), '(pos_gt[:, 0], pos_gt[:, 1])\n', (18232, 18260), True, 'import matplotlib.pyplot as plt\n'), ((18273, 18288), 'matplotlib.pyplot.title', 'plt.title', (['data'], {}), '(data)\n', (18282, 18288), True, 'import matplotlib.pyplot as plt\n'), ((18301, 18318), 'matplotlib.pyplot.axis', 'plt.axis', (['"""equal"""'], {}), "('equal')\n", (18309, 18318), True, 'import matplotlib.pyplot as plt\n'), ((18331, 18372), 'matplotlib.pyplot.legend', 'plt.legend', (["['Predicted', 'Ground truth']"], {}), "(['Predicted', 'Ground truth'])\n", (18341, 18372), True, 'import matplotlib.pyplot as plt\n'), ((18385, 18423), 'matplotlib.pyplot.subplot2grid', 'plt.subplot2grid', (['(kp, 2)', '(kp - 1, 0)'], {}), '((kp, 2), (kp - 1, 0))\n', (18401, 18423), True, 'import matplotlib.pyplot as plt\n'), ((18436, 18459), 'matplotlib.pyplot.plot', 'plt.plot', (['pos_cum_error'], {}), '(pos_cum_error)\n', (18444, 18459), True, 'import matplotlib.pyplot as plt\n'), ((18864, 18882), 'matplotlib.pyplot.tight_layout', 'plt.tight_layout', ([], {}), '()\n', (18880, 18882), True, 'import matplotlib.pyplot as plt\n'), ((19860, 19881), 'os.path.abspath', 'osp.abspath', (['__file__'], {}), '(__file__)\n', (19871, 19881), True, 'from os import path as osp\n'), ((950, 971), 'os.path.abspath', 'os.path.abspath', (['path'], {}), '(path)\n', (965, 971), False, 'import os\n'), ((2483, 2520), 'os.path.join', 'osp.join', (['args.out_dir', '"""config.json"""'], {}), "(args.out_dir, 'config.json')\n", (2491, 2520), True, 'from os import path as osp\n'), ((6812, 6849), 'os.path.join', 'osp.join', (['args.out_dir', '"""checkpoints"""'], {}), "(args.out_dir, 'checkpoints')\n", (6820, 6849), True, 'from os import path as osp\n'), ((6876, 6913), 'os.path.join', 'osp.join', (['args.out_dir', '"""checkpoints"""'], {}), "(args.out_dir, 'checkpoints')\n", (6884, 6913), True, 'from os import path as osp\n'), ((6940, 6970), 'os.path.join', 'osp.join', (['args.out_dir', '"""logs"""'], {}), "(args.out_dir, 'logs')\n", (6948, 6970), True, 'from os import path as osp\n'), ((6997, 7027), 'os.path.join', 'osp.join', (['args.out_dir', '"""logs"""'], {}), "(args.out_dir, 'logs')\n", (7005, 7027), True, 'from os import path as osp\n'), ((7977, 7996), 'os.remove', 'os.remove', (['log_file'], {}), '(log_file)\n', (7986, 7996), False, 'import os\n'), ((10611, 10651), 'utils.MSEAverageMeter', 'MSEAverageMeter', (['(3)', '[2]', '_output_channel'], {}), '(3, [2], _output_channel)\n', (10626, 10651), False, 'from utils import load_config, MSEAverageMeter\n'), ((12324, 12390), 'os.path.join', 'osp.join', (['args.out_dir', '"""checkpoints"""', "('icheckpoint_%d.pt' % epoch)"], {}), "(args.out_dir, 'checkpoints', 'icheckpoint_%d.pt' % epoch)\n", (12332, 12390), True, 'from os import path as osp\n'), ((14466, 14491), 'os.path.split', 'osp.split', (['args.test_path'], {}), '(args.test_path)\n', (14475, 14491), True, 'from os import path as osp\n'), ((16222, 16259), 'os.path.split', 'osp.split', (['seq_dataset.data_path[idx]'], {}), '(seq_dataset.data_path[idx])\n', (16231, 16259), True, 'from os import path as osp\n'), ((16329, 16347), 'torch.Tensor', 'torch.Tensor', (['feat'], {}), '(feat)\n', (16341, 16347), False, 'import torch\n'), ((17020, 17062), 'numpy.concatenate', 'np.concatenate', (['[pos_pred, pos_gt]'], {'axis': '(1)'}), '([pos_pred, pos_gt], axis=1)\n', (17034, 17062), True, 'import numpy as np\n'), ((17247, 17332), 'metric.compute_relative_trajectory_error', 'compute_relative_trajectory_error', (['pos_pred', 'pos_gt'], {'delta': '(pos_pred.shape[0] - 1)'}), '(pos_pred, pos_gt, delta=pos_pred.shape[0] - 1\n )\n', (17280, 17332), False, 'from metric import compute_absolute_trajectory_error, compute_relative_trajectory_error\n'), ((17656, 17675), 'numpy.mean', 'np.mean', (['val_losses'], {}), '(val_losses)\n', (17663, 17675), True, 'import numpy as np\n'), ((18592, 18625), 'matplotlib.pyplot.subplot2grid', 'plt.subplot2grid', (['(kp, 2)', '(i, 1)'], {}), '((kp, 2), (i, 1))\n', (18608, 18625), True, 'import matplotlib.pyplot as plt\n'), ((18642, 18668), 'matplotlib.pyplot.plot', 'plt.plot', (['ind', 'preds[:, i]'], {}), '(ind, preds[:, i])\n', (18650, 18668), True, 'import matplotlib.pyplot as plt\n'), ((18685, 18709), 'matplotlib.pyplot.plot', 'plt.plot', (['ind', 'vel[:, i]'], {}), '(ind, vel[:, i])\n', (18693, 18709), True, 'import matplotlib.pyplot as plt\n'), ((18726, 18767), 'matplotlib.pyplot.legend', 'plt.legend', (["['Predicted', 'Ground truth']"], {}), "(['Predicted', 'Ground truth'])\n", (18736, 18767), True, 'import matplotlib.pyplot as plt\n'), ((18931, 18941), 'matplotlib.pyplot.show', 'plt.show', ([], {}), '()\n', (18939, 18941), True, 'import matplotlib.pyplot as plt\n'), ((18987, 19010), 'os.path.isdir', 'osp.isdir', (['args.out_dir'], {}), '(args.out_dir)\n', (18996, 19010), True, 'from os import path as osp\n'), ((4640, 4786), 'model_temporal.BilinearLSTMSeqNetwork', 'BilinearLSTMSeqNetwork', (['_input_channel', '_output_channel', 'args.batch_size', 'device'], {'lstm_layers': 'args.layers', 'lstm_size': 'args.layer_size'}), '(_input_channel, _output_channel, args.batch_size,\n device, lstm_layers=args.layers, lstm_size=args.layer_size, **config)\n', (4662, 4786), False, 'from model_temporal import LSTMSeqNetwork, BilinearLSTMSeqNetwork, TCNSeqNetwork\n'), ((4900, 5038), 'model_temporal.LSTMSeqNetwork', 'LSTMSeqNetwork', (['_input_channel', '_output_channel', 'args.batch_size', 'device'], {'lstm_layers': 'args.layers', 'lstm_size': 'args.layer_size'}), '(_input_channel, _output_channel, args.batch_size, device,\n lstm_layers=args.layers, lstm_size=args.layer_size, **config)\n', (4914, 5038), False, 'from model_temporal import LSTMSeqNetwork, BilinearLSTMSeqNetwork, TCNSeqNetwork\n'), ((8050, 8095), 'os.path.join', 'osp.join', (['args.out_dir', '"""logs"""', '"""log_old.txt"""'], {}), "(args.out_dir, 'logs', 'log_old.txt')\n", (8058, 8095), True, 'from os import path as osp\n'), ((14588, 14613), 'os.path.split', 'osp.split', (['args.test_list'], {}), '(args.test_list)\n', (14597, 14613), True, 'from os import path as osp\n'), ((11700, 11765), 'os.path.join', 'osp.join', (['args.out_dir', '"""checkpoints"""', "('checkpoint_%d.pt' % epoch)"], {}), "(args.out_dir, 'checkpoints', 'checkpoint_%d.pt' % epoch)\n", (11708, 11765), True, 'from os import path as osp\n'), ((15106, 15127), 'pathlib.Path', 'Path', (['args.model_path'], {}), '(args.model_path)\n', (15110, 15127), False, 'from pathlib import Path\n'), ((8223, 8247), 'pathlib.Path', 'Path', (['args.continue_from'], {}), '(args.continue_from)\n', (8227, 8247), False, 'from pathlib import Path\n'), ((15751, 15776), 'os.path.split', 'osp.split', (['args.test_list'], {}), '(args.test_list)\n', (15760, 15776), True, 'from os import path as osp\n')] |
# coding=utf-8
"""
Definition of constants.
"""
from xbmcswift2.plugin import Plugin
PLUGIN = Plugin()
| [
"xbmcswift2.plugin.Plugin"
] | [((95, 103), 'xbmcswift2.plugin.Plugin', 'Plugin', ([], {}), '()\n', (101, 103), False, 'from xbmcswift2.plugin import Plugin\n')] |
from scipy.misc import imread,imshow
import chaosencrypt as cenc
import numpy as np
from chaosencrypt.discrete_pisarchik import bitexpand,bitreduce
# Read image
print('Loading image...')
im_org = imread('../image.jpg')
# Downsample
im = im_org[::3,::3,:].copy()
# Key
key = {'a':3.8,'n':10,'r':3,'bits':32}
# Encrypt
print('Encrypting image (discrete pisarchik)...')
enc_im = cenc.encrypt(im,key,'discrete_pisarchik')
# Decrypt
print('Decrypting image (discrete pisarchik)...')
dec_im = cenc.decrypt(enc_im,key,'discrete_pisarchik')
# Diff
diff = np.array(np.abs((im*1.0) - (dec_im*1.0)), dtype='int')
maxdiff = np.max(diff)
print('Max diff:', maxdiff)
# Show
if maxdiff == 0:
diff_im = np.zeros(im.shape, dtype='uint8')
else:
diff_im = np.array((diff - np.min(diff)) / (np.max(diff) - np.min(diff))*255.99, dtype='uint8')
print('[ original | encrypted ]')
print('[ decrypted | abs(org-dec) ]')
imshow(np.concatenate(
[np.concatenate((im,bitreduce(enc_im)),1),
np.concatenate((dec_im,diff_im),1)]
,0))
| [
"numpy.abs",
"chaosencrypt.encrypt",
"chaosencrypt.discrete_pisarchik.bitreduce",
"numpy.max",
"scipy.misc.imread",
"numpy.zeros",
"numpy.concatenate",
"numpy.min",
"chaosencrypt.decrypt"
] | [((198, 220), 'scipy.misc.imread', 'imread', (['"""../image.jpg"""'], {}), "('../image.jpg')\n", (204, 220), False, 'from scipy.misc import imread, imshow\n'), ((381, 424), 'chaosencrypt.encrypt', 'cenc.encrypt', (['im', 'key', '"""discrete_pisarchik"""'], {}), "(im, key, 'discrete_pisarchik')\n", (393, 424), True, 'import chaosencrypt as cenc\n'), ((493, 540), 'chaosencrypt.decrypt', 'cenc.decrypt', (['enc_im', 'key', '"""discrete_pisarchik"""'], {}), "(enc_im, key, 'discrete_pisarchik')\n", (505, 540), True, 'import chaosencrypt as cenc\n'), ((619, 631), 'numpy.max', 'np.max', (['diff'], {}), '(diff)\n', (625, 631), True, 'import numpy as np\n'), ((563, 594), 'numpy.abs', 'np.abs', (['(im * 1.0 - dec_im * 1.0)'], {}), '(im * 1.0 - dec_im * 1.0)\n', (569, 594), True, 'import numpy as np\n'), ((696, 729), 'numpy.zeros', 'np.zeros', (['im.shape'], {'dtype': '"""uint8"""'}), "(im.shape, dtype='uint8')\n", (704, 729), True, 'import numpy as np\n'), ((979, 1015), 'numpy.concatenate', 'np.concatenate', (['(dec_im, diff_im)', '(1)'], {}), '((dec_im, diff_im), 1)\n', (993, 1015), True, 'import numpy as np\n'), ((764, 776), 'numpy.min', 'np.min', (['diff'], {}), '(diff)\n', (770, 776), True, 'import numpy as np\n'), ((781, 793), 'numpy.max', 'np.max', (['diff'], {}), '(diff)\n', (787, 793), True, 'import numpy as np\n'), ((796, 808), 'numpy.min', 'np.min', (['diff'], {}), '(diff)\n', (802, 808), True, 'import numpy as np\n'), ((954, 971), 'chaosencrypt.discrete_pisarchik.bitreduce', 'bitreduce', (['enc_im'], {}), '(enc_im)\n', (963, 971), False, 'from chaosencrypt.discrete_pisarchik import bitexpand, bitreduce\n')] |
"""
A whole file dedicated to parsing __version__ in all it's weird possible ways
1) Only acts on source, no file handling.
2) some functions for *by line*
3) some functions for *by file*
4) Handle quotes
5) Handle whitespace
6) Handle version as tuple
"""
import ast
import re
from typing import Any, Optional, Tuple
version_tokens = [
"__version__", # canonical
"__VERSION__", # rare and wrong, but who am I to argue
"VERSION", # rare
"version",
"PACKAGE_VERSION",
]
def find_by_ast(line: str, version_token: str = "__version__") -> Optional[str]:
"""
Safer way to 'execute' python code to get a simple value
:param line:
:param version_token:
:return:
"""
if not line:
return ""
# clean up line.
simplified_line = simplify_line(line)
if simplified_line.startswith(version_token):
# noinspection PyBroadException
try:
tree: Any = ast.parse(simplified_line)
if hasattr(tree.body[0].value, "s"):
return str(tree.body[0].value.s)
if hasattr(tree.body[0].value, "elts"):
version_parts = []
for elt in tree.body[0].value.elts:
if hasattr(elt, "n"):
version_parts.append(str(elt.n))
else:
version_parts.append(str(elt.s))
return ".".join(version_parts)
if hasattr(tree.body[0].value, "n"):
return str(tree.body[0].value.n)
# print(tree)
except Exception:
# raise
return None
return None
def simplify_line(line: str, keep_comma: bool = False) -> str:
"""
Change ' to "
Remove tabs and spaces (assume no significant whitespace inside a version string!)
"""
if not line:
return ""
if "#" in line:
parts = line.split("#")
simplified_line = parts[0]
else:
simplified_line = line
simplified_line = (
simplified_line.replace(" ", "")
.replace("'", '"')
.replace("\t", "")
.replace("\n", "")
.replace("'''", '"') # version strings shouldn't be split across lines normally
.replace('"""', '"')
)
if not keep_comma:
simplified_line = simplified_line.strip(" ,")
return simplified_line
def find_version_by_regex(
file_source: str, version_token: str = "__version__"
) -> Optional[str]:
"""
Regex for dunder version
"""
if not file_source:
return None
version_match = re.search(
r"^" + version_token + r" = ['\"]([^'\"]*)['\"]", file_source, re.M
)
if version_match:
candidate = version_match.group(1)
if candidate in ("", "."): # yes, it will match to a .
return None
return candidate
return None
def find_version_by_string_lib(
line: str, version_token: str = "__version__"
) -> Optional[str]:
"""
No regex parsing. Or at least, mostly, not regex.
"""
if not line:
return None
simplified_line = simplify_line(line)
version = None
if simplified_line.strip().startswith(version_token):
if '"' not in simplified_line:
pass
# logger.debug("Weird version string, no double quote : " + unicode((full_path, line, simplified_line)))
else:
if "=" in simplified_line:
post_equals = simplified_line.split("=")[1]
if post_equals.startswith('"'):
parts = post_equals.split('"')
version = parts[0]
if not version:
version = None
return version
def validate_string(version: Optional[str]) -> Optional[str]:
"""
Trying to catch expressions here
:param version:
:return:
"""
if not version:
return None
for char in str(version):
if char in " \t()":
return None
# raise TypeError("Bad parse : " + version)
return version
def find_in_line(line: str) -> Tuple[Optional[str], Optional[str]]:
"""
Use three strategies to parse version string
"""
if not line:
return None, None
for version_token in version_tokens:
by_ast = find_by_ast(line, version_token)
by_ast = validate_string(by_ast)
if by_ast:
return by_ast, version_token
by_string_lib = find_version_by_string_lib(line, version_token)
by_string_lib = validate_string(by_string_lib)
if by_string_lib:
return by_string_lib, version_token
by_regex = find_version_by_regex(line, version_token)
by_regex = validate_string(by_regex)
if by_regex:
return by_regex, version_token
return None, None
| [
"ast.parse",
"re.search"
] | [((2585, 2671), 're.search', 're.search', (['(\'^\' + version_token + \' = [\\\'\\\\"]([^\\\'\\\\"]*)[\\\'\\\\"]\')', 'file_source', 're.M'], {}), '(\'^\' + version_token + \' = [\\\'\\\\"]([^\\\'\\\\"]*)[\\\'\\\\"]\', file_source,\n re.M)\n', (2594, 2671), False, 'import re\n'), ((942, 968), 'ast.parse', 'ast.parse', (['simplified_line'], {}), '(simplified_line)\n', (951, 968), False, 'import ast\n')] |
import copy
import itertools
import wsgiref.util
from oslo_config import cfg
from oslo_log import log
from oslo_serialization import jsonutils
from oslo_utils import importutils
import routes.middleware
import six
import webob.dec
import webob.exc
from wsgi_basic import exception
from wsgi_basic.common import authorization
from wsgi_basic.common import dependency
from wsgi_basic.common import utils
CONF = cfg.CONF
LOG = log.getLogger(__name__)
# Environment variable used to pass the request context
CONTEXT_ENV = 'wsgi_basic.context'
# Environment variable used to pass the request params
PARAMS_ENV = 'wsgi_basic.params'
JSON_ENCODE_CONTENT_TYPES = set(['application/json',
'application/json-home'])
class BaseApplication(object):
"""Base WSGI application wrapper. Subclasses need to implement __call__."""
@classmethod
def factory(cls, global_config, **local_config):
"""Used for paste app factories in paste.deploy config files.
Any local configuration (that is, values under the [app:APPNAME]
section of the paste config) will be passed into the `__init__` method
as kwargs.
A hypothetical configuration would look like:
[app:wadl]
latest_version = 1.3
paste.app_factory = wsgi_basic.fancy_api:Wadl.factory
which would result in a call to the `Wadl` class as
import wsgi_basic.fancy_api
wsgi_basic.fancy_api.Wadl(latest_version='1.3')
You could of course re-implement the `factory` method in subclasses,
but using the kwarg passing it shouldn't be necessary.
"""
return cls(**local_config)
def __call__(self, environ, start_response):
r"""Subclasses will probably want to implement __call__ like this:
@webob.dec.wsgify()
def __call__(self, req):
# Any of the following objects work as responses:
# Option 1: simple string
res = 'message\n'
# Option 2: a nicely formatted HTTP exception page
res = exc.HTTPForbidden(explanation='Nice try')
# Option 3: a webob Response object (in case you need to play with
# headers, or you want to be treated like an iterable, or or or)
res = Response();
res.app_iter = open('somefile')
# Option 4: any wsgi app to be run next
res = self.application
# Option 5: you can get a Response object for a wsgi app, too, to
# play with headers etc
res = req.get_response(self.application)
# You can then just return your response...
return res
# ... or set req.response and return None.
req.response = res
See the end of http://pythonpaste.org/webob/modules/dec.html
for more info.
"""
raise NotImplementedError('You must implement __call__')
@dependency.requires("token_api", "policy_api")
class Application(BaseApplication):
@webob.dec.wsgify()
def __call__(self, req):
arg_dict = req.environ['wsgiorg.routing_args'][1]
action = arg_dict.pop('action')
del arg_dict['controller']
# allow middleware up the stack to provide context, params and headers.
context = req.environ.get(CONTEXT_ENV, {})
context['query_string'] = dict(req.params.items())
context['headers'] = dict(req.headers.items())
context['path'] = req.environ['PATH_INFO']
scheme = (None if not CONF.secure_proxy_ssl_header
else req.environ.get(CONF.secure_proxy_ssl_header))
if scheme:
# NOTE(andrey-mp): "wsgi.url_scheme" contains the protocol used
# before the proxy removed it ('https' usually). So if
# the webob.Request instance is modified in order to use this
# scheme instead of the one defined by API, the call to
# webob.Request.relative_url() will return a URL with the correct
# scheme.
req.environ['wsgi.url_scheme'] = scheme
context['host_url'] = req.host_url
params = req.environ.get(PARAMS_ENV, {})
# authentication and authorization attributes are set as environment
# values by the container and processed by the pipeline. the complete
# set is not yet know.
context['environment'] = req.environ
context['accept_header'] = req.accept
req.environ = None
params.update(arg_dict)
context.setdefault('is_admin', False)
method = getattr(self, action)
# NOTE(morganfainberg): use the request method to normalize the
# response code between GET and HEAD requests. The HTTP status should
# be the same.
LOG.info('%(req_method)s %(uri)s', {
'req_method': req.environ['REQUEST_METHOD'].upper(),
'uri': wsgiref.util.request_uri(req.environ),
})
params = self._normalize_dict(params)
try:
result = method(context, **params)
except exception.Unauthorized as e:
LOG.warning(
("Authorization failed. %(exception)s from "
"%(remote_addr)s"),
{'exception': e, 'remote_addr': req.environ['REMOTE_ADDR']})
return render_exception(e, context=context)
except exception.Error as e:
LOG.warning(six.text_type(e))
return render_exception(e, context=context)
except TypeError as e:
LOG.exception(six.text_type(e))
return render_exception(exception.ValidationError(e),
context=context)
except Exception as e:
LOG.exception(six.text_type(e))
return render_exception(exception.UnexpectedError(exception=e),
context=context)
if result is None:
return render_response(status=(204, 'No Content'))
elif isinstance(result, six.string_types):
return result
elif isinstance(result, webob.Response):
return result
elif isinstance(result, webob.exc.WSGIHTTPException):
return result
response_code = self._get_response_code(req)
return render_response(body=result, status=response_code,
method=req.environ['REQUEST_METHOD'])
def _get_response_code(self, req):
code = None
return code
def _normalize_arg(self, arg):
return arg.replace(':', '_').replace('-', '_')
def _normalize_dict(self, d):
return {self._normalize_arg(k): v for (k, v) in d.items()}
def _attribute_is_empty(self, ref, attribute):
"""Returns true if the attribute in the given ref (which is a
dict) is empty or None.
"""
return ref.get(attribute) is None or ref.get(attribute) == ''
def _require_attribute(self, ref, attribute):
"""Ensures the reference contains the specified attribute.
Raise a ValidationError if the given attribute is not present
"""
if self._attribute_is_empty(ref, attribute):
msg = '%s field is required and cannot be empty' % attribute
raise exception.ValidationError(message=msg)
def _require_attributes(self, ref, attrs):
"""Ensures the reference contains the specified attributes.
Raise a ValidationError if any of the given attributes is not present
"""
missing_attrs = [attribute for attribute in attrs
if self._attribute_is_empty(ref, attribute)]
if missing_attrs:
msg = '%s field(s) cannot be empty' % ', '.join(missing_attrs)
raise exception.ValidationError(message=msg)
def _get_trust_id_for_request(self, context):
"""Get the trust_id for a call.
Retrieve the trust_id from the token
Returns None if token is not trust scoped
"""
if ('token_id' not in context or
context.get('token_id') == CONF.admin_token):
LOG.debug(('will not lookup trust as the request auth token is '
'either absent or it is the system admin token'))
return None
token_ref = utils.get_token_ref(context)
return token_ref.trust_id
@classmethod
def base_url(cls, context, endpoint_type):
url = CONF['%s_endpoint' % endpoint_type]
if url:
substitutions = dict(
itertools.chain(CONF.items(), CONF.eventlet_server.items()))
url = url % substitutions
else:
# NOTE(jamielennox): if url is not set via the config file we
# should set it relative to the url that the user used to get here
# so as not to mess with version discovery. This is not perfect.
# host_url omits the path prefix, but there isn't another good
# solution that will work for all urls.
url = context['host_url']
return url.rstrip('/')
class Middleware(Application):
"""Base WSGI middleware.
These classes require an application to be
initialized that will be called next. By default the middleware will
simply call its wrapped app, or you can override __call__ to customize its
behavior.
"""
@classmethod
def factory(cls, global_config, **local_config):
"""Used for paste app factories in paste.deploy config files.
Any local configuration (that is, values under the [filter:APPNAME]
section of the paste config) will be passed into the `__init__` method
as kwargs.
A hypothetical configuration would look like:
[filter:analytics]
redis_host = 127.0.0.1
paste.filter_factory = wsgi_basic.analytics:Analytics.factory
which would result in a call to the `Analytics` class as
import wsgi_basic.analytics
wsgi_basic.analytics.Analytics(app, redis_host='127.0.0.1')
You could of course re-implement the `factory` method in subclasses,
but using the kwarg passing it shouldn't be necessary.
"""
def _factory(app):
conf = global_config.copy()
conf.update(local_config)
return cls(app, **local_config)
return _factory
def __init__(self, application):
super(Middleware, self).__init__()
self.application = application
def process_request(self, request):
"""Called on each request.
If this returns None, the next application down the stack will be
executed. If it returns a response then that response will be returned
and execution will stop here.
"""
return None
def process_response(self, request, response):
"""Do whatever you'd like to the response, based on the request."""
return response
@webob.dec.wsgify()
def __call__(self, request):
try:
response = self.process_request(request)
if response:
return response
response = request.get_response(self.application)
return self.process_response(request, response)
except exception.Error as e:
LOG.warning(six.text_type(e))
return render_exception(e, request=request)
except TypeError as e:
LOG.exception(six.text_type(e))
return render_exception(exception.ValidationError(e),
request=request)
except Exception as e:
LOG.exception(six.text_type(e))
return render_exception(exception.UnexpectedError(exception=e),
request=request)
class Router(object):
"""WSGI middleware that maps incoming requests to WSGI apps."""
def __init__(self, mapper):
"""Create a router for the given routes.Mapper.
Each route in `mapper` must specify a 'controller', which is a
WSGI app to call. You'll probably want to specify an 'action' as
well and have your controller be an object that can route
the request to the action-specific method.
Examples:
mapper = routes.Mapper()
sc = ServerController()
# Explicit mapping of one route to a controller+action
mapper.connect(None, '/svrlist', controller=sc, action='list')
# Actions are all implicitly defined
mapper.resource('server', 'servers', controller=sc)
# Pointing to an arbitrary WSGI app. You can specify the
# {path_info:.*} parameter so the target app can be handed just that
# section of the URL.
mapper.connect(None, '/v1.0/{path_info:.*}', controller=BlogApp())
"""
self.map = mapper
self._router = routes.middleware.RoutesMiddleware(self._dispatch,
self.map)
@webob.dec.wsgify()
def __call__(self, req):
"""Route the incoming request to a controller based on self.map.
If no match, return a 404.
"""
return self._router
@staticmethod
@webob.dec.wsgify()
def _dispatch(req):
"""Dispatch the request to the appropriate controller.
Called by self._router after matching the incoming request to a route
and putting the information into req.environ. Either returns 404
or the routed WSGI app's response.
"""
match = req.environ['wsgiorg.routing_args'][1]
if not match:
msg = 'The resource could not be found.'
return render_exception(exception.NotFound(msg),
request=req)
app = match['controller']
return app
class ComposingRouter(Router):
def __init__(self, mapper=None, routers=None):
if mapper is None:
mapper = routes.Mapper()
if routers is None:
routers = []
for router in routers:
router.add_routes(mapper)
super(ComposingRouter, self).__init__(mapper)
class ComposableRouter(Router):
"""Router that supports use by ComposingRouter."""
def __init__(self, mapper=None):
if mapper is None:
mapper = routes.Mapper()
self.add_routes(mapper)
super(ComposableRouter, self).__init__(mapper)
def add_routes(self, mapper):
"""Add routes to given mapper."""
pass
def render_response(body=None, status=None, headers=None, method=None):
"""Forms a WSGI response."""
if headers is None:
headers = []
else:
headers = list(headers)
headers.append(('Vary', 'X-Auth-Token'))
if body is None:
body = ''
status = status or (204, 'No Content')
else:
content_types = [v for h, v in headers if h == 'Content-Type']
if content_types:
content_type = content_types[0]
else:
content_type = None
if content_type is None or content_type in JSON_ENCODE_CONTENT_TYPES:
body = jsonutils.dumps(body, cls=utils.SmarterEncoder)
if content_type is None:
headers.append(('Content-Type', 'application/json'))
status = status or (200, 'OK')
resp = webob.Response(body=body,
status='%s %s' % status,
headerlist=headers)
if method and method.upper() == 'HEAD':
# NOTE(morganfainberg): HEAD requests should return the same status
# as a GET request and same headers (including content-type and
# content-length). The webob.Response object automatically changes
# content-length (and other headers) if the body is set to b''. Capture
# all headers and reset them on the response object after clearing the
# body. The body can only be set to a binary-type (not TextType or
# NoneType), so b'' is used here and should be compatible with
# both py2x and py3x.
stored_headers = resp.headers.copy()
resp.body = b''
for header, value in stored_headers.items():
resp.headers[header] = value
return resp
def render_exception(error, context=None, request=None):
"""Forms a WSGI response based on the current error."""
error_message = error.args[0]
message = str(error_message)
if message is error_message:
# translate() didn't do anything because it wasn't a Message,
# convert to a string.
message = six.text_type(message)
body = {'error': {
'code': error.code,
'title': error.title,
'message': message,
}}
headers = []
if isinstance(error, exception.AuthPluginException):
body['error']['identity'] = error.authentication
return render_response(status=(error.code, error.title),
body=body,
headers=headers) | [
"wsgi_basic.common.dependency.requires",
"wsgi_basic.exception.ValidationError",
"wsgi_basic.exception.NotFound",
"oslo_serialization.jsonutils.dumps",
"six.text_type",
"wsgi_basic.common.utils.get_token_ref",
"wsgi_basic.exception.UnexpectedError",
"oslo_log.log.getLogger"
] | [((428, 451), 'oslo_log.log.getLogger', 'log.getLogger', (['__name__'], {}), '(__name__)\n', (441, 451), False, 'from oslo_log import log\n'), ((2934, 2980), 'wsgi_basic.common.dependency.requires', 'dependency.requires', (['"""token_api"""', '"""policy_api"""'], {}), "('token_api', 'policy_api')\n", (2953, 2980), False, 'from wsgi_basic.common import dependency\n'), ((8303, 8331), 'wsgi_basic.common.utils.get_token_ref', 'utils.get_token_ref', (['context'], {}), '(context)\n', (8322, 8331), False, 'from wsgi_basic.common import utils\n'), ((16620, 16642), 'six.text_type', 'six.text_type', (['message'], {}), '(message)\n', (16633, 16642), False, 'import six\n'), ((7274, 7312), 'wsgi_basic.exception.ValidationError', 'exception.ValidationError', ([], {'message': 'msg'}), '(message=msg)\n', (7299, 7312), False, 'from wsgi_basic import exception\n'), ((7768, 7806), 'wsgi_basic.exception.ValidationError', 'exception.ValidationError', ([], {'message': 'msg'}), '(message=msg)\n', (7793, 7806), False, 'from wsgi_basic import exception\n'), ((15170, 15217), 'oslo_serialization.jsonutils.dumps', 'jsonutils.dumps', (['body'], {'cls': 'utils.SmarterEncoder'}), '(body, cls=utils.SmarterEncoder)\n', (15185, 15217), False, 'from oslo_serialization import jsonutils\n'), ((13730, 13753), 'wsgi_basic.exception.NotFound', 'exception.NotFound', (['msg'], {}), '(msg)\n', (13748, 13753), False, 'from wsgi_basic import exception\n'), ((5428, 5444), 'six.text_type', 'six.text_type', (['e'], {}), '(e)\n', (5441, 5444), False, 'import six\n'), ((5559, 5575), 'six.text_type', 'six.text_type', (['e'], {}), '(e)\n', (5572, 5575), False, 'import six\n'), ((5613, 5641), 'wsgi_basic.exception.ValidationError', 'exception.ValidationError', (['e'], {}), '(e)\n', (5638, 5641), False, 'from wsgi_basic import exception\n'), ((5753, 5769), 'six.text_type', 'six.text_type', (['e'], {}), '(e)\n', (5766, 5769), False, 'import six\n'), ((5807, 5845), 'wsgi_basic.exception.UnexpectedError', 'exception.UnexpectedError', ([], {'exception': 'e'}), '(exception=e)\n', (5832, 5845), False, 'from wsgi_basic import exception\n'), ((11330, 11346), 'six.text_type', 'six.text_type', (['e'], {}), '(e)\n', (11343, 11346), False, 'import six\n'), ((11461, 11477), 'six.text_type', 'six.text_type', (['e'], {}), '(e)\n', (11474, 11477), False, 'import six\n'), ((11515, 11543), 'wsgi_basic.exception.ValidationError', 'exception.ValidationError', (['e'], {}), '(e)\n', (11540, 11543), False, 'from wsgi_basic import exception\n'), ((11655, 11671), 'six.text_type', 'six.text_type', (['e'], {}), '(e)\n', (11668, 11671), False, 'import six\n'), ((11709, 11747), 'wsgi_basic.exception.UnexpectedError', 'exception.UnexpectedError', ([], {'exception': 'e'}), '(exception=e)\n', (11734, 11747), False, 'from wsgi_basic import exception\n')] |
import math
from gluon import URL, SPAN
class Paginater():
"""
Adapted from http://web2py.com/books/default/chapter/29/14/other-recipes#Pagination
"""
item_limits = [6, 12, 25, 50, 100]
def __init__(self, request, query_set, db):
self._request = request
self._query_set = query_set
self._db = db
self._old_vars = filter(lambda x: "_" not in x[0], request.vars.items()) # get rid of crap like _formkey
self.page = None
self.items_per_page = None
self.limitby = None
self.orderby = None
self.order_table = None
self.order_field = None
self.order_links = {}
self.order_reverse = False
self.item_count = None
self.items_per_page_urls = []
self.pages = None
self.page_urls = []
self.has_next = None
self.has_prev = None
self.next_page = None
self.next_url = None
self.prev_page = None
self.prev_url = None
self.set_ordering()
self.set_paging()
def set_ordering(self):
order_string = self._request.vars["orderby"] or (self._request.args[0] + ".id")
self.order_reverse = "~" in order_string
order_string = order_string.strip("~")
self.order_table, self.order_field = order_string.split(".")
if self.order_reverse:
self.orderby = ~self._db[self.order_table][self.order_field]
else:
self.orderby = self._db[self.order_table][self.order_field]
if 'first_name' == self.order_field:
self.orderby = self.orderby|self._db[self.order_table]["last_name"]
elif "last_name" == self.order_field: # if 2 people have the same last name, then sort by first name
self.orderby = self.orderby|self._db[self.order_table]["first_name"]
for table_name in self._db.tables:
for table_field in self._db[table_name].fields:
table_field_is_order_field = (table_name == self.order_table) & (self.order_field == table_field)
self.order_links.setdefault(table_name, {}).setdefault(table_field, {}).update({ # http://stackoverflow.com/questions/12905999/python-dict-how-to-create-key-or-append-an-element-to-key
"url": URL(args=self._request.args, vars=dict(self._old_vars + {'orderby': ("" if (not table_field_is_order_field or self.order_reverse) else "~") + "%s.%s"%(table_name, table_field)}.items())), # flipping order
"arrow": SPAN(_class="text-info glyphicon glyphicon-arrow-" + ("down" if self.order_reverse else "up")) if table_field_is_order_field else ""
})
def set_paging(self):
self.page = (int(self._request.vars["page"] or 0))
self.items_per_page = int(self._request.vars["per"] if int(self._request.vars["per"] or -1) in Paginater.item_limits else Paginater.item_limits[1])
self.limitby=(self.page*self.items_per_page,(self.page+1)*self.items_per_page) # 1*5 <-> 2*5+1
for each in self.item_limits:
href = URL(args=self._request.args, vars=dict(self._old_vars + {'per': each, 'page': 0}.items()))
self.items_per_page_urls.append(dict(href=href, number=each, current=each == self.items_per_page))
self.item_count = self._query_set.count()
division = self.item_count / float(self.items_per_page)
self.pages = int(math.floor(division)) # don't need a new page for not full pages ie. 11/12
if division % 1 == 0: # fixed - there may be a bug with left inner join as not all left from (db.table>0) will show up if right is missing, use left outer join instead.
self.pages -= 1 # don't need a new page for a full page ie. 12/12 items
for each in xrange(self.pages + 1): # xrange doesn't include last
href = URL(args=self._request.args, vars=dict(self._old_vars + {'page':each}.items()))
self.page_urls.append(dict(href=href, number=each, current=each == self.page))
self.has_next = self.page < self.pages # need a new page for overfull page ie. 13/12 items, need page for 1/12
self.has_prev = bool(self.page)
self.next_page = None if not self.has_next else self.page+1 # href='{{=URL(vars=dict(page=paginater.next_page))}}'
self.next_url = URL(args=self._request.args, vars=dict(self._old_vars + {'page':self.next_page}.items()))
self.prev_page = None if not self.has_prev else self.page-1
self.prev_url = URL(args=self._request.args, vars=dict(self._old_vars + {'page':self.prev_page}.items()))
| [
"gluon.SPAN",
"math.floor"
] | [((3501, 3521), 'math.floor', 'math.floor', (['division'], {}), '(division)\n', (3511, 3521), False, 'import math\n'), ((2591, 2690), 'gluon.SPAN', 'SPAN', ([], {'_class': "('text-info glyphicon glyphicon-arrow-' + ('down' if self.order_reverse else\n 'up'))"}), "(_class='text-info glyphicon glyphicon-arrow-' + ('down' if self.\n order_reverse else 'up'))\n", (2595, 2690), False, 'from gluon import URL, SPAN\n')] |
import logging
import os
import pathlib
import requests
import shutil
from typing import Dict, List, Optional, Union
from PIL import Image, UnidentifiedImageError
from mir import scm
# project
def project_root() -> str:
root = str(pathlib.Path(__file__).parent.parent.parent.absolute())
return root
# mir repo infos
def mir_repo_head_name(git: Union[str, scm.CmdScm]) -> Optional[str]:
""" get current mir repo head name (may be branch, or commit id) """
git_scm = None
if isinstance(git, str):
git_scm = scm.Scm(git, scm_executable="git")
elif isinstance(git, scm.CmdScm):
git_scm = git
else:
raise ValueError("invalid git: needs str or CmdScm")
git_result = git_scm.rev_parse(["--abbrev-ref", "HEAD"])
if isinstance(git_result, str):
return git_result
elif isinstance(git_result, bytes):
return git_result.decode("utf-8")
return str(git_result)
def mir_repo_commit_id(git: Union[str, scm.CmdScm], branch: str = "HEAD") -> str:
""" get mir repo branch's commit id """
git_scm = None
if isinstance(git, str):
git_scm = scm.Scm(git, scm_executable="git")
elif isinstance(git, scm.CmdScm):
git_scm = git
else:
raise ValueError("invalid git: needs str or CmdScm")
git_result = git_scm.rev_parse(branch)
if isinstance(git_result, str):
return git_result
elif isinstance(git_result, bytes):
return git_result.decode("utf-8")
return str(git_result)
# Store assets in asset_ids to out_root/sub_folder,
# return relative path to the out_root, staring with sub_folder.
# Set overwrite to False to avoid overwriting.
def store_assets_to_dir(asset_ids: List[str],
out_root: str,
sub_folder: str,
asset_location: str,
overwrite: bool = False,
create_prefix: bool = True,
need_suffix: bool = True) -> Dict[str, str]:
"""
load assets in location and save them to destination local folder
Args:
asset_ids: a list of asset ids (asset hashes)
out_root: the root of output path
sub_folder: sub folder to the output path, if no sub, set to '.'
asset_location: server location prefix of assets, if set to none, try to read it from mir repo config
overwrite (bool): if True, still copy assets even if assets already exists in export dir
create_prefix (bool): use last 2 chars of asset id as a sub dir
"""
# if out_root exists, but not a folder, raise error
if os.path.exists(out_root) and not os.path.isdir(out_root):
raise ValueError("invalid out_root")
os.makedirs(out_root, exist_ok=True)
sub_dir_abs = os.path.join(out_root, sub_folder)
os.makedirs(sub_dir_abs, exist_ok=True)
assets_location = _get_assets_location(asset_ids, asset_location)
unknown_format_count = 0
total_count = len(asset_ids)
asset_id_to_rel_paths: Dict[str, str] = {}
for idx, asset_id in enumerate(asset_ids):
if create_prefix:
suffix = asset_id[-2:]
sub_sub_folder_abs = os.path.join(sub_dir_abs, suffix)
os.makedirs(sub_sub_folder_abs, exist_ok=True)
sub_sub_folder_rel = os.path.join(sub_folder, suffix)
else:
sub_sub_folder_abs = sub_dir_abs
sub_sub_folder_rel = sub_folder
if need_suffix:
try:
asset_image = Image.open(assets_location[asset_id])
file_format = asset_image.format.lower()
except UnidentifiedImageError:
file_format = 'unknown'
unknown_format_count += 1
file_name = (f"{asset_id}.{file_format.lower()}" if need_suffix else asset_id)
asset_path_abs = os.path.join(sub_sub_folder_abs, file_name) # path started from out_root
asset_path_rel = os.path.join(sub_sub_folder_rel, file_name) # path started from sub_folder
_store_asset_to_location(assets_location[asset_id], asset_path_abs, overwrite=overwrite)
asset_id_to_rel_paths[asset_id] = asset_path_rel
if idx > 0 and idx % 5000 == 0:
logging.info(f"exporting {idx} / {total_count} assets")
if unknown_format_count > 0:
logging.warning(f"unknown format asset count: {unknown_format_count}")
return asset_id_to_rel_paths
def _store_asset_to_location(src: str, dst: str, overwrite: bool = False) -> None:
if not src or not dst:
return
os.makedirs(os.path.dirname(dst), exist_ok=True)
if not overwrite and os.path.isfile(dst):
return
if src.startswith('http'): # from http request
response = requests.get(src)
if len(response.content) > 0:
with open(dst, "wb") as f:
f.write(response.content)
elif src.startswith('/'): # from filesystem, require abs path.
shutil.copyfile(src, dst)
else:
raise ValueError(f"Invalid src, not a abs path: {src}")
def _get_assets_location(asset_ids: List[str], asset_location: str) -> Dict[str, str]:
"""
get asset locations
Args:
asset_ids: a list of asset ids (asset hashes)
asset_location: the server location of assets.
Returns:
a dict, key: asset id, value: asset location url
Raises:
Attribute exception if asset_location is not set, and can not be found in config file
"""
# asset_location is a required field.
# CMD layer should NOT aware where the asset is stored.
if not asset_location:
raise ValueError("asset_location is not set.")
return {id: os.path.join(asset_location, id) for id in asset_ids}
| [
"os.path.exists",
"PIL.Image.open",
"os.makedirs",
"pathlib.Path",
"mir.scm.Scm",
"logging.warning",
"os.path.join",
"requests.get",
"os.path.isfile",
"os.path.dirname",
"shutil.copyfile",
"os.path.isdir",
"logging.info"
] | [((2734, 2770), 'os.makedirs', 'os.makedirs', (['out_root'], {'exist_ok': '(True)'}), '(out_root, exist_ok=True)\n', (2745, 2770), False, 'import os\n'), ((2789, 2823), 'os.path.join', 'os.path.join', (['out_root', 'sub_folder'], {}), '(out_root, sub_folder)\n', (2801, 2823), False, 'import os\n'), ((2828, 2867), 'os.makedirs', 'os.makedirs', (['sub_dir_abs'], {'exist_ok': '(True)'}), '(sub_dir_abs, exist_ok=True)\n', (2839, 2867), False, 'import os\n'), ((539, 573), 'mir.scm.Scm', 'scm.Scm', (['git'], {'scm_executable': '"""git"""'}), "(git, scm_executable='git')\n", (546, 573), False, 'from mir import scm\n'), ((1132, 1166), 'mir.scm.Scm', 'scm.Scm', (['git'], {'scm_executable': '"""git"""'}), "(git, scm_executable='git')\n", (1139, 1166), False, 'from mir import scm\n'), ((2627, 2651), 'os.path.exists', 'os.path.exists', (['out_root'], {}), '(out_root)\n', (2641, 2651), False, 'import os\n'), ((3857, 3900), 'os.path.join', 'os.path.join', (['sub_sub_folder_abs', 'file_name'], {}), '(sub_sub_folder_abs, file_name)\n', (3869, 3900), False, 'import os\n'), ((3956, 3999), 'os.path.join', 'os.path.join', (['sub_sub_folder_rel', 'file_name'], {}), '(sub_sub_folder_rel, file_name)\n', (3968, 3999), False, 'import os\n'), ((4337, 4407), 'logging.warning', 'logging.warning', (['f"""unknown format asset count: {unknown_format_count}"""'], {}), "(f'unknown format asset count: {unknown_format_count}')\n", (4352, 4407), False, 'import logging\n'), ((4585, 4605), 'os.path.dirname', 'os.path.dirname', (['dst'], {}), '(dst)\n', (4600, 4605), False, 'import os\n'), ((4647, 4666), 'os.path.isfile', 'os.path.isfile', (['dst'], {}), '(dst)\n', (4661, 4666), False, 'import os\n'), ((4754, 4771), 'requests.get', 'requests.get', (['src'], {}), '(src)\n', (4766, 4771), False, 'import requests\n'), ((5693, 5725), 'os.path.join', 'os.path.join', (['asset_location', 'id'], {}), '(asset_location, id)\n', (5705, 5725), False, 'import os\n'), ((2660, 2683), 'os.path.isdir', 'os.path.isdir', (['out_root'], {}), '(out_root)\n', (2673, 2683), False, 'import os\n'), ((3190, 3223), 'os.path.join', 'os.path.join', (['sub_dir_abs', 'suffix'], {}), '(sub_dir_abs, suffix)\n', (3202, 3223), False, 'import os\n'), ((3236, 3282), 'os.makedirs', 'os.makedirs', (['sub_sub_folder_abs'], {'exist_ok': '(True)'}), '(sub_sub_folder_abs, exist_ok=True)\n', (3247, 3282), False, 'import os\n'), ((3316, 3348), 'os.path.join', 'os.path.join', (['sub_folder', 'suffix'], {}), '(sub_folder, suffix)\n', (3328, 3348), False, 'import os\n'), ((4239, 4294), 'logging.info', 'logging.info', (['f"""exporting {idx} / {total_count} assets"""'], {}), "(f'exporting {idx} / {total_count} assets')\n", (4251, 4294), False, 'import logging\n'), ((4967, 4992), 'shutil.copyfile', 'shutil.copyfile', (['src', 'dst'], {}), '(src, dst)\n', (4982, 4992), False, 'import shutil\n'), ((3524, 3561), 'PIL.Image.open', 'Image.open', (['assets_location[asset_id]'], {}), '(assets_location[asset_id])\n', (3534, 3561), False, 'from PIL import Image, UnidentifiedImageError\n'), ((239, 261), 'pathlib.Path', 'pathlib.Path', (['__file__'], {}), '(__file__)\n', (251, 261), False, 'import pathlib\n')] |
import pandas as pd
import numpy as np
import matplotlib.pyplot as plt
# Load in the data
df = pd.read_csv("results.txt", header=None)
df.columns = ["poison_perc", "success_rate", "auc"]
# Plot the data
plt.plot(df['poison_perc'], df['success_rate'], label="Success rate")
plt.plot(df['poison_perc'], df['auc'], label='Clean AUC')
plt.legend()
plt.xlabel("Poisoning percentage")
plt.show()
# Save the figure
# plt.savefig('result_graph.png', dpi=100)
| [
"pandas.read_csv",
"matplotlib.pyplot.xlabel",
"matplotlib.pyplot.plot",
"matplotlib.pyplot.legend",
"matplotlib.pyplot.show"
] | [((96, 135), 'pandas.read_csv', 'pd.read_csv', (['"""results.txt"""'], {'header': 'None'}), "('results.txt', header=None)\n", (107, 135), True, 'import pandas as pd\n'), ((205, 274), 'matplotlib.pyplot.plot', 'plt.plot', (["df['poison_perc']", "df['success_rate']"], {'label': '"""Success rate"""'}), "(df['poison_perc'], df['success_rate'], label='Success rate')\n", (213, 274), True, 'import matplotlib.pyplot as plt\n'), ((275, 332), 'matplotlib.pyplot.plot', 'plt.plot', (["df['poison_perc']", "df['auc']"], {'label': '"""Clean AUC"""'}), "(df['poison_perc'], df['auc'], label='Clean AUC')\n", (283, 332), True, 'import matplotlib.pyplot as plt\n'), ((333, 345), 'matplotlib.pyplot.legend', 'plt.legend', ([], {}), '()\n', (343, 345), True, 'import matplotlib.pyplot as plt\n'), ((346, 380), 'matplotlib.pyplot.xlabel', 'plt.xlabel', (['"""Poisoning percentage"""'], {}), "('Poisoning percentage')\n", (356, 380), True, 'import matplotlib.pyplot as plt\n'), ((381, 391), 'matplotlib.pyplot.show', 'plt.show', ([], {}), '()\n', (389, 391), True, 'import matplotlib.pyplot as plt\n')] |
#!/usr/bin/env python3
# Usage: python SESGenerator.py <target_configuration>.json <output_directory>
#
# <target_configuration>.json is a json file generated from CMake on the form:
# {
# "target": {
# "name": "light_control_client_nrf52832_xxAA_s132_5.0.0",
# "sources": "main.c;provisioner.c;..",
# "includes": "include1;include2;..",
# "definitions":"NRF52;NRF52_SERIES;..",
# },
# "platform": {
# "name": "nrf52832_xxAA",
# "arch": "cortex-m4f",
# "flash_size": 524288,
# "ram_size": 65536,
# },
# "softdevice": {
# "hex_file": "<path-to-s132_nrf52_5.0.0_softdevice.hex>",
# "flash_size": 143360,
# "ram_size": 12720
# }
# }
import jinja2
import sys
import argparse
import json
import os
from collections import namedtuple
from shutil import copyfile
TEST_JSON_STR = """{
"target": {
"name": "light_control_client_nrf52832_xxAA_s132_5.0.0",
"sources": "main.c;provisioner.c",
"includes": "include1;include2",
"defines":"NRF52;NRF52_SERIES"
},
"platform": {
"name": "nrf52832_xxAA",
"arch": "cortex-m4f",
"flash_size": 524288,
"ram_size": 65536
},
"softdevice": {
"hex_file": "path-to/s132_nrf52_5.0.0_softdevice.hex",
"flash_size": 143360,
"ram_size": 12720
}
}"""
# Constants
NRF51_BOOTLOADER_FLASH_SIZE = 24576
NRF51_BOOTLOADER_RAM_SIZE = 768
NRF52_BOOTLOADER_FLASH_SIZE = 32768
NRF52_BOOTLOADER_RAM_SIZE = 4096
RAM_ADDRESS_START = 536870912
def application_flash_limits_get(softdevice_flash_size,
bootloader_flash_size,
platform_flash_size):
return (hex(softdevice_flash_size), hex(platform_flash_size - bootloader_flash_size))
def application_ram_limits_get(softdevice_ram_size,
bootloader_ram_size,
platform_ram_size):
return (hex(RAM_ADDRESS_START + softdevice_ram_size), hex(platform_ram_size - bootloader_ram_size))
DataRegion = namedtuple("DataRegion", ["start", "size"])
Target = namedtuple("Target", ["name", "includes", "defines", "sources"])
Platform = namedtuple("Platform", ["name", "arch", "flash_size", "ram_size"])
SoftDevice = namedtuple("Softdevice", ["hex_file", "flash_size", "ram_size"])
Configuration = namedtuple("Configuration", ["target", "platform", "softdevice"])
File = namedtuple("File", ["path"])
Group = namedtuple("Group", ["name", "files", "match_string"])
GROUP_TEMPLATES = [
Group(name="Application", files=[], match_string="examples"),
Group(name="Core", files=[], match_string="mesh/core"),
Group(name="Serial", files=[], match_string="mesh/serial"),
Group(name="Mesh stack", files=[], match_string="mesh/stack"),
Group(name="GATT", files=[], match_string="mesh/gatt"),
Group(name="DFU", files=[], match_string="mesh/dfu"),
Group(name="Toolchain", files=[File("$(StudioDir)/source/thumb_crt0.s")], match_string="toolchain"),
Group(name="Access", files=[], match_string="mesh/access"),
Group(name="Bearer", files=[], match_string="mesh/bearer"),
Group(name="SEGGER RTT", files=[], match_string="rtt"),
Group(name="uECC", files=[], match_string="micro-ecc"),
Group(name="nRF5 SDK", files=[], match_string="$(SDK_ROOT"),
Group(name="Provisioning", files=[], match_string="mesh/prov"),
Group(name="Configuration Model", files=[], match_string="models/foundation/config"),
Group(name="Health Model", files=[], match_string="models/foundation/health"),
Group(name="Generic OnOff Model", files=[], match_string="models/model_spec/generic_onoff"),
Group(name="Simple OnOff Model", files=[], match_string="models/vendor/simple_on_off"),
Group(name="Remote provisioning Model", files=[], match_string="models/proprietary/pb_remote")]
def unix_relative_path_get(path1, path2):
if not path1.startswith('$('):
path1 = os.path.relpath(path1, path2)
return path1.replace("\\", "/")
def load_config(input_file):
with open(input_file, "r") as f:
config = json.load(f)
return config
def load_softdevice(sd_config):
with open(sd_config["definition_file"], "r") as f:
config = json.load(f)
return [sd for sd in config["softdevices"] if sd["name"] == sd_config["name"]][0]
def load_platform(platform_config):
with open(platform_config["definition_file"], "r") as f:
config = json.load(f)
return [platform for platform in config["platforms"] if platform["name"] == platform_config["name"]][0]
def create_file_groups(files, out_dir):
other = Group(name="Other", files=[], match_string=None)
groups = GROUP_TEMPLATES[:]
for f in files:
found_group = False
if "gcc_startup" in f.lower() or "arm_startup" in f.lower():
continue
for g in groups:
if g.match_string in f:
f = unix_relative_path_get(f, out_dir)
g.files.append(File(f))
found_group = True
break
if not found_group:
f = unix_relative_path_get(f, out_dir)
other.files.append(File(f))
groups.append(other)
# Remove empty groups
for g in groups[:]:
if len(g.files) == 0:
groups.remove(g)
return groups
def calculate_flash_limits(config):
bl_flash_size = NRF51_BOOTLOADER_FLASH_SIZE if "nrf51" in config["platform"]["config"]["name"].lower() else NRF52_BOOTLOADER_FLASH_SIZE
bl_flash_size = bl_flash_size if "nrf52810_xxAA" not in config["platform"]["config"]["name"] else 0
flash_limits = application_flash_limits_get(config["softdevice"]["config"]["flash_size"], bl_flash_size, config["platform"]["config"]["flash_size"])
return DataRegion(*flash_limits)
def calculate_ram_limits(config):
bl_ram_size = NRF51_BOOTLOADER_RAM_SIZE if "nrf51" in config["platform"]["config"]["name"].lower() else NRF52_BOOTLOADER_RAM_SIZE
bl_ram_size = bl_ram_size if "nrf52810_xxAA" not in config["platform"]["config"]["name"] else 0
ram_limits = application_ram_limits_get(config["softdevice"]["config"]["ram_size"], bl_ram_size, config["platform"]["config"]["ram_size"])
return DataRegion(*ram_limits)
def generate_ses_project(config, out_dir="."):
files = config["target"]["sources"].split(";")
config["target"]["includes"] = [unix_relative_path_get(i, out_dir) for i in config["target"]["includes"].split(";")]
config["target"]["heap_size"] = 1024
config["target"]["stack_size"] = 2048
config["target"]["groups"] = create_file_groups(files, out_dir)
config["target"]["flash"] = calculate_flash_limits(config)
config["target"]["ram"] = calculate_ram_limits(config)
config["platform"]["fpu"] = config["platform"]["config"]["arch"] == "cortex-m4f"
config["softdevice"]["hex_file"] = unix_relative_path_get(config["softdevice"]["hex_file"], out_dir)
config["sdk_default_path"] = unix_relative_path_get('../../../nRF5_SDK_16.0.0_98a08e2', out_dir)
s = ""
with open("ses.xml", "r") as f:
s = f.read()
t = jinja2.Template(s)
s = t.render(config)
return s
def generate_ses_session(out_dir):
session_file_contents = ['<!DOCTYPE CrossStudio_Session_File>',
'<session>',
'\t<Files>',
'\t\t<SessionOpenFile path="{}"/>',
'\t</Files>',
'</session>']
return '\n'.join(session_file_contents).format(unix_relative_path_get('../../doc/getting_started/SES.md', out_dir))
def test():
config = json.loads(TEST_JSON_STR)
print(config)
s = generate_ses_project(config)
with open("test.xml", "w") as f:
f.write(s)
print ("Done")
def main():
input_file = sys.argv[1]
out_dir = sys.argv[2]
config = load_config(input_file)
config["softdevice"]["config"] = load_softdevice(config["softdevice"])
config["platform"]["config"] = load_platform(config["platform"])
ses_project = generate_ses_project(config, out_dir)
out_dir += "/"
# SES doesn't support "." in filenames
output_filename = out_dir + config["target"]["name"].replace(".", "_")
project_file = output_filename + ".emProject"
with open(project_file, "w") as f:
f.write(ses_project)
# Create session
ses_session = generate_ses_session(out_dir)
session_file = output_filename + ".emSession"
with open(session_file, "w") as f:
f.write(ses_session)
# Generate flash placement:
copyfile("flash_placement.xml", out_dir + "flash_placement.xml")
print("Wrote: " + project_file)
if __name__ == "__main__":
main()
| [
"json.loads",
"collections.namedtuple",
"jinja2.Template",
"shutil.copyfile",
"json.load",
"os.path.relpath"
] | [((2111, 2154), 'collections.namedtuple', 'namedtuple', (['"""DataRegion"""', "['start', 'size']"], {}), "('DataRegion', ['start', 'size'])\n", (2121, 2154), False, 'from collections import namedtuple\n'), ((2165, 2229), 'collections.namedtuple', 'namedtuple', (['"""Target"""', "['name', 'includes', 'defines', 'sources']"], {}), "('Target', ['name', 'includes', 'defines', 'sources'])\n", (2175, 2229), False, 'from collections import namedtuple\n'), ((2241, 2307), 'collections.namedtuple', 'namedtuple', (['"""Platform"""', "['name', 'arch', 'flash_size', 'ram_size']"], {}), "('Platform', ['name', 'arch', 'flash_size', 'ram_size'])\n", (2251, 2307), False, 'from collections import namedtuple\n'), ((2321, 2385), 'collections.namedtuple', 'namedtuple', (['"""Softdevice"""', "['hex_file', 'flash_size', 'ram_size']"], {}), "('Softdevice', ['hex_file', 'flash_size', 'ram_size'])\n", (2331, 2385), False, 'from collections import namedtuple\n'), ((2402, 2467), 'collections.namedtuple', 'namedtuple', (['"""Configuration"""', "['target', 'platform', 'softdevice']"], {}), "('Configuration', ['target', 'platform', 'softdevice'])\n", (2412, 2467), False, 'from collections import namedtuple\n'), ((2476, 2504), 'collections.namedtuple', 'namedtuple', (['"""File"""', "['path']"], {}), "('File', ['path'])\n", (2486, 2504), False, 'from collections import namedtuple\n'), ((2513, 2567), 'collections.namedtuple', 'namedtuple', (['"""Group"""', "['name', 'files', 'match_string']"], {}), "('Group', ['name', 'files', 'match_string'])\n", (2523, 2567), False, 'from collections import namedtuple\n'), ((7167, 7185), 'jinja2.Template', 'jinja2.Template', (['s'], {}), '(s)\n', (7182, 7185), False, 'import jinja2\n'), ((7712, 7737), 'json.loads', 'json.loads', (['TEST_JSON_STR'], {}), '(TEST_JSON_STR)\n', (7722, 7737), False, 'import json\n'), ((8656, 8720), 'shutil.copyfile', 'copyfile', (['"""flash_placement.xml"""', "(out_dir + 'flash_placement.xml')"], {}), "('flash_placement.xml', out_dir + 'flash_placement.xml')\n", (8664, 8720), False, 'from shutil import copyfile\n'), ((4007, 4036), 'os.path.relpath', 'os.path.relpath', (['path1', 'path2'], {}), '(path1, path2)\n', (4022, 4036), False, 'import os\n'), ((4157, 4169), 'json.load', 'json.load', (['f'], {}), '(f)\n', (4166, 4169), False, 'import json\n'), ((4293, 4305), 'json.load', 'json.load', (['f'], {}), '(f)\n', (4302, 4305), False, 'import json\n'), ((4507, 4519), 'json.load', 'json.load', (['f'], {}), '(f)\n', (4516, 4519), False, 'import json\n')] |
from django import template
from django.db.models import Count
from django.utils.safestring import mark_safe
import markdown
from ..models import Post
register = template.Library()
####
# Register as simple tags
####
# A simple template tag that returns the number of posts published so far.=
@register.simple_tag
def total_posts():
return Post.published.count()
# A simple template tag that displays the 5 most commented posts
@register.simple_tag
def get_most_commented_posts(count=5):
# Build a QuerySet using the annotate() function to aggregate the
# - total number of comments for each post.
return Post.published.annotate(
# use the Count aggregation function to store the number of comments
# - in the computed field total_comments for each Post object.
total_comments=Count('comments')
).order_by('-total_comments')[:count]
####
# Register as inclusion_tags
####
# An inclusion tag that returns the 5 latest posts.
@register.inclusion_tag('blog/post/latest_posts.html')
def show_latest_posts(count=5):
latest_posts = Post.published.order_by('-publish')[:count]
return {
'latest_posts': latest_posts
}
####
# Register Template Filters
####
# A template filter to enable use of markdown .md syntax in blog posts and then converts
# - post contents to HTML in the templates
@register.filter(name='markdown')
def markdown_format(text):
return mark_safe(markdown.markdown(text))
| [
"django.db.models.Count",
"markdown.markdown",
"django.template.Library"
] | [((164, 182), 'django.template.Library', 'template.Library', ([], {}), '()\n', (180, 182), False, 'from django import template\n'), ((1439, 1462), 'markdown.markdown', 'markdown.markdown', (['text'], {}), '(text)\n', (1456, 1462), False, 'import markdown\n'), ((824, 841), 'django.db.models.Count', 'Count', (['"""comments"""'], {}), "('comments')\n", (829, 841), False, 'from django.db.models import Count\n')] |
from PyQt5.QtWidgets import QWidget, QHBoxLayout
from pyqtgraph import GraphicsLayoutWidget
import pyqtgraph as pg
class VideoWidget(QWidget):
def __init__(self, parent=None):
super().__init__(parent=parent)
self.layout = QHBoxLayout(self)
# Settings for the image
self.imv = pg.ImageView()
# Add everything to the widget
self.layout.addWidget(self.imv)
self.setLayout(self.layout) | [
"pyqtgraph.ImageView",
"PyQt5.QtWidgets.QHBoxLayout"
] | [((243, 260), 'PyQt5.QtWidgets.QHBoxLayout', 'QHBoxLayout', (['self'], {}), '(self)\n', (254, 260), False, 'from PyQt5.QtWidgets import QWidget, QHBoxLayout\n'), ((314, 328), 'pyqtgraph.ImageView', 'pg.ImageView', ([], {}), '()\n', (326, 328), True, 'import pyqtgraph as pg\n')] |
from midiutil.MidiFile import MIDIFile
import os
def _create_midi_mapping():
""" Create a dictionary that maps note name to midi note integer """
middle_c = 60
notes = "c", "c#", "d", "d#", "e", "f", "f#", "g", "g#", "a", "a#", "b"
equiv = (("c#", "db"), ("d#", "eb"),
("f#", "gb"), ("g#", "ab"), ("a#", "bb"))
m = {}
j, o = len(notes)-1, 3
for v in range(middle_c-1, -1, -1):
for e in equiv: m[notes[j].replace(*e) + str(o)] = v
if j == 0: o -= 1
j = (j - 1) % len(notes)
j, o = 0, 4
for v in range(middle_c, 128):
for e in equiv: m[notes[j].replace(*e) + str(o)] = v
j = (j + 1) % len(notes)
if j == 0: o += 1
return m
_midi_mapping = _create_midi_mapping()
class Song(MIDIFile):
_valid = tuple, list, type(x for x in range(1))
def __init__(self, name="test", tempo=100, num_tracks=1):
"""
Intialize Song object.
name: str, name of song/file.
tempo: int, bpm of song.
num_tracks: int, number of tracks for the midi file to have.
"""
super().__init__(num_tracks)
self.name, self.tempo, self.volume = name, tempo, 100
self.filename = "%s.mid" % name
self.path = ""
track, self.channel = 0, 0
self.time = [0]*num_tracks # start each track at the beginning
self.addTempo(track, self.time[0], self.tempo)
def addNote(self, notes, duration=4, track=0):
"""
Overrides MIDIFile's addNote method, but uses it as a subroutine. Adds
a note or notes with a duration to the specified track, then increments
the time by that duration.
notes: str or tuple of strs, notes to add at the current location of
of the track.
duration: float, number of beats for the note/chord.
track: int, which track to add to.
"""
if not isinstance(notes, Song._valid): notes = notes,
for note in notes:
note = note.lower()
if note in _midi_mapping: pitch = _midi_mapping[note]
elif note+"4" in _midi_mapping: pitch = _midi_mapping[note+"4"]
else: raise ValueError("Note not valid:", note)
super().addNote(track, self.channel, pitch,
self.time[track], duration, self.volume)
self.time[track] += duration
self.need_to_write = True
def addRest(self, duration=1, track=0):
"""
Add a rest to the track, just corresponds to adjusting the time.
duration: float, number of beats the rest lasts.
track: int, which track to add the rest to.
"""
self.time[track] += duration
self.need_to_write = True
def addText(self, text, track=0):
"""
Add text to a track at the current time. For it to be visible, there
must be a note at the current time on this track.
text: str, text to add.
track: int, which track to add the text to.
"""
super().addText(track, self.time[track], str(text))
self.need_to_write = True
def writeFile(self, path=""):
"""
Write the current midi track to a file
path: str, path to write the file to. Must end with a "/"!
"""
if not self.need_to_write: return
try:
with open(path+self.filename, "wb") as f: super().writeFile(f)
except FileNotFoundError:
os.mkdir(path)
with open(path+self.filename, "wb") as f: super().writeFile(f)
self.need_to_write = False
self.path = path
def play(self, path=""):
"""
Write the midi file, then call on the system's default midi player. On
Windows, this is probably Windows Media Player. THIS ONLY WORKS ON
WINDOWS, IF YOU WANT TO USE IT YOU MUST CHANGE THE SYSTEM CALL.
path: str, where to save the file to. Must end with a "/"!
"""
if not path and self.path: path = self.path
self.writeFile(path)
os.system("start %s" % (self.path+self.filename))
def __str__(self):
""" Return the string name of the song """
return self.filename
if __name__ == "__main__":
s = Song(name="helloworld", tempo=110, path="")
s.addNote("c")
s.addNote("d")
s.addNote(("c", "d", "e"))
s.view()
| [
"os.system",
"os.mkdir"
] | [((4254, 4305), 'os.system', 'os.system', (["('start %s' % (self.path + self.filename))"], {}), "('start %s' % (self.path + self.filename))\n", (4263, 4305), False, 'import os\n'), ((3631, 3645), 'os.mkdir', 'os.mkdir', (['path'], {}), '(path)\n', (3639, 3645), False, 'import os\n')] |
#!/usr/bin/env python3
# pylint: disable=too-many-lines
"""
Provides a stdin/stdout based protocol to safely dispatch commands and return their
results over any connection that forwards both stdin/stdout, as well as some other
needed remote system related utilities.
"""
import errno as sys_errno
import hashlib
import os
import stat
import struct
import subprocess
import sys
import typing
from pwd import getpwnam, getpwuid
from grp import getgrnam, getgrgid, getgrall
from spwd import getspnam
from struct import pack, unpack
from typing import IO, Any, Type, TypeVar, Callable, Optional, Union, NamedTuple, NewType, cast
T = TypeVar('T')
i32 = NewType('i32', int)
u32 = NewType('u32', int)
i64 = NewType('i64', int)
u64 = NewType('u64', int)
is_server = False
debug = False
try:
import fora
except ModuleNotFoundError:
pass
# TODO: timeout on commands
# TODO: interactive commands?
# TODO: env
class RemoteOSError(Exception):
"""An exception type for remote OSErrors."""
def __init__(self, errno: int, strerror: str, msg: str):
super().__init__(msg)
self.errno = errno
self.strerror = strerror
# Utility functions
# ----------------------------------------------------------------
def _is_debug() -> bool:
"""Returns True if debugging output should be genereated."""
return debug if is_server else cast(bool, fora.args.debug)
def _log(msg: str) -> None:
"""
Logs the given message to stderr, appending a prefix to indicate whether this
is running on a remote (server) or locally (client).
Parameters
----------
msg
The message to log.
"""
if not _is_debug():
return
# TODO color should be configurable
prefix = " [1;33mREMOTE[m: " if is_server else " [1;32mLOCAL[m: "
print(f"{prefix}{msg}", file=sys.stderr, flush=True)
def _resolve_oct(value: str) -> int:
"""
Resolves an octal string to a numeric value (e.g. for umask or mode).
Raises a ValueError if the value is malformed.
Parameters
----------
value
The octal string value
Returns
-------
int
The numeric representation of the octal value
"""
try:
return int(value, 8)
except ValueError:
raise ValueError(f"Invalid value '{value}': Must be in octal format.") # pylint: disable=raise-missing-from
def _resolve_user(user: str) -> tuple[int, int]:
"""
Resolves the given user string to a uid and gid.
The string may be either a username or a uid.
Raises a ValueError if the user/uid does not exist.
Parameters
----------
user
The username or uid to resolve
Returns
-------
tuple[int, int]
A tuple (uid, gid) with the numeric ids of the user and its primary group
"""
try:
pw = getpwnam(user)
except KeyError:
try:
uid = int(user)
try:
pw = getpwuid(uid)
except KeyError:
raise ValueError(f"The user with the uid '{uid}' does not exist.") # pylint: disable=raise-missing-from
except ValueError:
raise ValueError(f"The user with the name '{user}' does not exist.") # pylint: disable=raise-missing-from
return (pw.pw_uid, pw.pw_gid)
def _resolve_group(group: str) -> int:
"""
Resolves the given group string to a gid.
The string may be either a groupname or a gid.
Raises a ValueError if the group/gid does not exist.
Parameters
----------
group
The groupname or gid to resolve
Returns
-------
int
The numeric gid of the group
"""
try:
gr = getgrnam(group)
except KeyError:
try:
gid = int(group)
try:
gr = getgrgid(gid)
except KeyError:
raise ValueError(f"The group with the gid '{gid}' does not exist.") # pylint: disable=raise-missing-from
except ValueError:
raise ValueError(f"The group with the name '{group}' does not exist.") # pylint: disable=raise-missing-from
return gr.gr_gid
# Connection wrapper
# ----------------------------------------------------------------
# pylint: disable=too-many-public-methods
class Connection:
"""Represents a connection to this dispatcher via an input and output buffer."""
def __init__(self, buffer_in: IO[bytes], buffer_out: IO[bytes]):
self.buffer_in = buffer_in
self.buffer_out = buffer_out
self.should_close = False
def flush(self) -> None:
"""Flushes the output buffer."""
self.buffer_out.flush()
def read(self, count: int) -> bytes:
"""Reads exactly the given amount of bytes."""
return self.buffer_in.read(count)
def write(self, data: bytes, count: int) -> None:
"""Writes exactly the given amount of bytes from data."""
self.buffer_out.write(data[:count])
def write_packet(self, packet: Any) -> None:
"""Writes the given packet."""
if not hasattr(packet, '_is_packet') or not bool(getattr(packet, '_is_packet')):
raise ValueError("Invalid argument: Must be a packet!")
# We don't expose write directly, as the type checker currently is unable
# to determine whether this function exists, as it is added by the @Packet decorator.
packet._write(self) # pylint: disable=protected-access
# Primary serialization and deserialization
# ----------------------------------------------------------------
def _is_optional(field: Type[Any]) -> bool:
"""Returns True when the given type annotation is Optional[...]."""
return typing.get_origin(field) is Union and type(None) in typing.get_args(field)
def _is_list(field: Type[Any]) -> bool:
"""Returns True when the given type annotation is list[...]."""
return typing.get_origin(field) is list
_serializers: dict[Any, Callable[[Connection, Any], Any]] = {}
_serializers[bool] = lambda conn, v: conn.write(pack(">?", v), 1)
_serializers[i32] = lambda conn, v: conn.write(pack(">i", v), 4)
_serializers[u32] = lambda conn, v: conn.write(pack(">I", v), 4)
_serializers[i64] = lambda conn, v: conn.write(pack(">q", v), 8)
_serializers[u64] = lambda conn, v: conn.write(pack(">Q", v), 8)
_serializers[bytes] = lambda conn, v: (_serializers[u64](conn, len(v)), conn.write(v, len(v))) # type: ignore[func-returns-value]
_serializers[str] = lambda conn, v: _serializers[bytes](conn, v.encode('utf-8'))
def _serialize(conn: Connection, vtype: Type[Any], v: Any) -> None:
"""Serializes v based on the underlying type 'vtype' and writes it to the given connection."""
if vtype in _serializers:
_serializers[vtype](conn, v)
elif _is_optional(vtype):
real_type = typing.get_args(vtype)[0]
_serializers[bool](conn, v is not None)
if v is not None:
_serialize(conn, real_type, v)
elif _is_list(vtype):
element_type = typing.get_args(vtype)[0]
_serializers[u64](conn, len(v))
for i in v:
_serialize(conn, element_type, i)
else:
raise ValueError(f"Cannot serialize object of type {vtype}")
_deserializers: dict[Any, Callable[[Connection], Any]] = {}
_deserializers[bool] = lambda conn: unpack(">?", conn.read(1))[0]
_deserializers[i32] = lambda conn: unpack(">i", conn.read(4))[0]
_deserializers[u32] = lambda conn: unpack(">I", conn.read(4))[0]
_deserializers[i64] = lambda conn: unpack(">q", conn.read(8))[0]
_deserializers[u64] = lambda conn: unpack(">Q", conn.read(8))[0]
_deserializers[bytes] = lambda conn: conn.read(_deserializers[u64](conn))
_deserializers[str] = lambda conn: _deserializers[bytes](conn).decode('utf-8')
def _deserialize(conn: Connection, vtype: Type[Any]) -> Any:
"""Deserializes an object from the given connection based on the underlying type 'vtype' and returns it."""
# pylint: disable=no-else-return
if vtype in _deserializers:
return _deserializers[vtype](conn)
elif _is_optional(vtype):
real_type = typing.get_args(vtype)[0]
if not _deserializers[bool](conn):
return None
return _deserialize(conn, real_type)
elif _is_list(vtype):
element_type = typing.get_args(vtype)[0]
return list(_deserialize(conn, element_type) for _ in range(_deserializers[u64](conn)))
else:
raise ValueError(f"Cannot deserialize object of type {vtype}")
# Packet helpers
# ----------------------------------------------------------------
packets: list[Any] = []
packet_deserializers: dict[int, Callable[[Connection], Any]] = {}
def _handle_response_packet() -> None:
raise RuntimeError("This packet is a server-side response packet and must never be sent by the client!")
# Define generic read and write functions
def _read_packet(cls: Type[Any], conn: Connection) -> Any:
kwargs: dict[str, Any] = {}
for f in cast(Any, cls)._fields:
ftype = cls.__annotations__[f]
kwargs[f] = _deserialize(conn, ftype)
return cls(**kwargs)
def _write_packet(cls: Type[Any], packet_id: u32, this: object, conn: Connection) -> None:
_serialize(conn, u32, packet_id)
for f in cls._fields:
ftype = cls.__annotations__[f]
_serialize(conn, ftype, getattr(this, f))
conn.flush()
def Packet(type: str) -> Callable[[Type[Any]], Any]: # pylint: disable=redefined-builtin
"""Decorator for packet types. Registers the packet and generates read and write methods."""
if type not in ['response', 'request']:
raise RuntimeError("Invalid @Packet decoration: type must be either 'response' or 'request'.")
def wrapper(cls: Type[Any]) -> Type[Any]:
# Assert cls is a NamedTuple
if not hasattr(cls, '_fields'):
raise RuntimeError("Invalid @Packet decoration: Decorated class must inherit from NamedTuple.")
# Find next packet id
packet_id = u32(len(packets))
# Replace functions
cls._is_packet = True # pylint: disable=protected-access
cls._write = lambda self, conn: _write_packet(cls, packet_id, self, conn) # pylint: disable=protected-access
if type == 'response':
cls.handle = _handle_response_packet
elif type == 'request':
if not hasattr(cls, 'handle') or not callable(getattr(cls, 'handle')):
raise RuntimeError("Invalid @Packet decoration: request packets must provide a handle method!")
# Register packet
packets.append(cls)
packet_deserializers[packet_id] = lambda conn: _read_packet(cls, conn)
return cls
return wrapper
# Packets
# ----------------------------------------------------------------
@Packet(type='response')
class PacketOk(NamedTuple):
"""This packet is used by some requests as a generic successful status indicator."""
@Packet(type='response')
class PacketAck(NamedTuple):
"""This packet is used to acknowledge a previous PacketCheckAlive packet."""
@Packet(type='request')
class PacketCheckAlive(NamedTuple):
"""This packet is used to check whether a connection is alive.
The receiver must answer with PacketAck immediately."""
def handle(self, conn: Connection) -> None:
"""Responds with PacketAck."""
_ = (self)
conn.write_packet(PacketAck())
@Packet(type='request')
class PacketExit(NamedTuple):
"""This packet is used to signal the server to close the connection and end the dispatcher."""
def handle(self, conn: Connection) -> None:
"""Signals the connection to close."""
_ = (self)
conn.should_close = True
@Packet(type='response')
class PacketOSError(NamedTuple):
"""This packet is sent when an OSError occurs."""
errno: i64
strerror: str
msg: str
@Packet(type='response')
class PacketInvalidField(NamedTuple):
"""This packet is used when an invalid value was given in a previous packet."""
field: str
error_message: str
@Packet(type='response')
class PacketProcessCompleted(NamedTuple):
"""This packet is used to return the results of a process."""
stdout: Optional[bytes]
stderr: Optional[bytes]
returncode: i32
@Packet(type='response')
class PacketProcessError(NamedTuple):
"""This packet is used to indicate an error when running a process or when running the preexec_fn."""
message: str
@Packet(type='request')
class PacketProcessRun(NamedTuple):
"""This packet is used to run a process."""
command: list[str]
stdin: Optional[bytes] = None
capture_output: bool = True
user: Optional[str] = None
group: Optional[str] = None
umask: Optional[str] = None
cwd: Optional[str] = None
def handle(self, conn: Connection) -> None:
"""Runs the requested command."""
# By default we will run commands as the current user.
uid, gid = (None, None)
umask_oct = 0o077
if self.umask is not None:
try:
umask_oct = _resolve_oct(self.umask)
except ValueError as e:
conn.write_packet(PacketInvalidField("umask", str(e)))
return
if self.user is not None:
try:
(uid, gid) = _resolve_user(self.user)
except ValueError as e:
conn.write_packet(PacketInvalidField("user", str(e)))
return
if self.group is not None:
try:
gid = _resolve_group(self.group)
except ValueError as e:
conn.write_packet(PacketInvalidField("group", str(e)))
return
if self.cwd is not None:
if not os.path.isdir(self.cwd):
conn.write_packet(PacketInvalidField("cwd", "The directory does not exist"))
return
def child_preexec() -> None:
"""
Sets umask and becomes the correct user.
"""
os.umask(umask_oct)
if gid is not None:
os.setresgid(gid, gid, gid)
if uid is not None:
os.setresuid(uid, uid, uid)
if self.cwd is not None:
os.chdir(self.cwd)
# Execute command with desired parameters
try:
result = subprocess.run(self.command,
input=self.stdin,
capture_output=self.capture_output,
cwd=self.cwd,
preexec_fn=child_preexec,
check=False)
except subprocess.SubprocessError as e:
conn.write_packet(PacketProcessError(str(e)))
return
# Send response for command result
conn.write_packet(PacketProcessCompleted(result.stdout, result.stderr, i32(result.returncode)))
@Packet(type='response')
class PacketStatResult(NamedTuple):
"""This packet is used to return the results of a stat packet."""
type: str # pylint: disable=redefined-builtin
mode: u64
owner: str
group: str
size: u64
mtime: u64
ctime: u64
sha512sum: Optional[bytes]
@Packet(type='request')
class PacketStat(NamedTuple):
"""This packet is used to retrieve information about a file or directory."""
path: str
follow_links: bool = False
sha512sum: bool = False
def handle(self, conn: Connection) -> None:
"""Stats the requested path."""
try:
s = os.stat(self.path, follow_symlinks=self.follow_links)
except OSError as e:
if e.errno != sys_errno.ENOENT:
raise
conn.write_packet(PacketInvalidField("path", str(e)))
return
ftype = "dir" if stat.S_ISDIR(s.st_mode) else \
"chr" if stat.S_ISCHR(s.st_mode) else \
"blk" if stat.S_ISBLK(s.st_mode) else \
"file" if stat.S_ISREG(s.st_mode) else \
"fifo" if stat.S_ISFIFO(s.st_mode) else \
"link" if stat.S_ISLNK(s.st_mode) else \
"sock" if stat.S_ISSOCK(s.st_mode) else \
"other"
try:
owner = getpwuid(s.st_uid).pw_name
except KeyError:
owner = str(s.st_uid)
try:
group = getgrgid(s.st_gid).gr_name
except KeyError:
group = str(s.st_gid)
sha512sum: Optional[bytes]
if self.sha512sum and ftype == "file":
with open(self.path, 'rb') as f:
sha512sum = hashlib.sha512(f.read()).digest()
else:
sha512sum = None
# Send response
conn.write_packet(PacketStatResult(
type=ftype,
mode=u64(stat.S_IMODE(s.st_mode)),
owner=owner,
group=group,
size=u64(s.st_size),
mtime=u64(s.st_mtime_ns),
ctime=u64(s.st_ctime_ns),
sha512sum=sha512sum))
@Packet(type='response')
class PacketResolveResult(NamedTuple):
"""This packet is used to return the results of a resolve packet."""
value: str
@Packet(type='request')
class PacketResolveUser(NamedTuple):
"""
This packet is used to canonicalize a user name / uid and to ensure it exists.
If None is given, it queries the current user.
"""
user: Optional[str]
def handle(self, conn: Connection) -> None:
"""Resolves the requested user."""
user = self.user if self.user is not None else str(os.getuid())
try:
pw = getpwnam(user)
except KeyError:
try:
uid = int(user)
pw = getpwuid(uid)
except (KeyError, ValueError):
conn.write_packet(PacketInvalidField("user", "The user does not exist"))
return
# Send response
conn.write_packet(PacketResolveResult(value=pw.pw_name))
@Packet(type='request')
class PacketResolveGroup(NamedTuple):
"""
This packet is used to canonicalize a group name / gid and to ensure it exists.
If None is given, it queries the current group.
"""
group: Optional[str]
def handle(self, conn: Connection) -> None:
"""Resolves the requested group."""
group = self.group if self.group is not None else str(os.getgid())
try:
gr = getgrnam(group)
except KeyError:
try:
gid = int(group)
gr = getgrgid(gid)
except (KeyError, ValueError):
conn.write_packet(PacketInvalidField("group", "The group does not exist"))
return
# Send response
conn.write_packet(PacketResolveResult(value=gr.gr_name))
@Packet(type='request')
class PacketUpload(NamedTuple):
"""This packet is used to upload the given content to the remote and save it as a file.
Overwrites existing files. Responds with PacketOk if saving was successful, or PacketInvalidField if any
field contained an invalid value."""
file: str
content: bytes
mode: Optional[str] = None
owner: Optional[str] = None
group: Optional[str] = None
def handle(self, conn: Connection) -> None:
"""Saves the content under the given path."""
uid, gid = (None, None)
mode_oct = None
if self.mode is not None:
try:
mode_oct = _resolve_oct(self.mode)
except ValueError as e:
conn.write_packet(PacketInvalidField("mode", str(e)))
return
if self.owner is not None:
try:
(uid, gid) = _resolve_user(self.owner)
except ValueError as e:
conn.write_packet(PacketInvalidField("owner", str(e)))
return
if self.group is not None:
try:
gid = _resolve_group(self.group)
except ValueError as e:
conn.write_packet(PacketInvalidField("group", str(e)))
return
with open(self.file, 'wb') as f:
f.write(self.content)
if mode_oct is not None:
os.chmod(self.file, mode_oct)
if uid is not None or gid is not None:
os.chown(self.file, uid or 0, gid or 0)
conn.write_packet(PacketOk())
@Packet(type='response')
class PacketDownloadResult(NamedTuple):
"""This packet is used to return the content of a file."""
content: bytes
@Packet(type='request')
class PacketDownload(NamedTuple):
"""This packet is used to download the contents of a given file.
Responds with PacketDownloadResult if reading was successful, or PacketInvalidField if any
field contained an invalid value."""
file: str
def handle(self, conn: Connection) -> None:
"""Reads the file."""
try:
with open(self.file, 'rb') as f:
content = f.read()
except OSError as e:
if e.errno != sys_errno.ENOENT:
raise
conn.write_packet(PacketInvalidField("file", str(e)))
return
conn.write_packet(PacketDownloadResult(content))
@Packet(type='response')
class PacketUserEntry(NamedTuple):
"""This packet is used to return information about a user."""
name: str
"""The name of the user"""
uid: i64
"""The numerical user id"""
group: str
"""The name of the primary group"""
gid: i64
"""The numerical primary group id"""
groups: list[str]
"""All names of the supplementary groups this user belongs to"""
password_hash: Optional[str]
"""The password hash from shadow"""
gecos: str
"""The comment (GECOS) field of the user"""
home: str
"""The home directory of the user"""
shell: str
"""The default shell of the user"""
@Packet(type='request')
class PacketQueryUser(NamedTuple):
"""This packet is used to get information about a group via pwd.getpw*."""
user: str
"""User name or decimal uid"""
query_password_hash: bool
"""Whether the current password hash from shadow should also be returned"""
def handle(self, conn: Connection) -> None:
"""Queries the requested user."""
try:
pw = getpwnam(self.user)
except KeyError:
try:
gid = int(self.user)
pw = getpwuid(gid)
except (KeyError, ValueError):
conn.write_packet(PacketInvalidField("user", "The user does not exist"))
return
pw_hash: Optional[str] = None
if self.query_password_hash:
try:
pw_hash = getspnam(pw.pw_name).sp_pwdp
except KeyError:
conn.write_packet(PacketInvalidField("user", "The user has no shadow entry, or it is inaccessible."))
return
groups = [g.gr_name for g in getgrall() if pw.pw_name in g.gr_mem]
try:
conn.write_packet(PacketUserEntry(
name=pw.pw_name,
uid=i64(pw.pw_uid),
group=getgrgid(pw.pw_gid).gr_name,
gid=i64(pw.pw_gid),
groups=groups,
password_hash=pw_hash,
gecos=pw.pw_gecos,
home=pw.pw_dir,
shell=pw.pw_shell))
except KeyError:
conn.write_packet(PacketInvalidField("user", "The user's primary group doesn't exist"))
return
@Packet(type='response')
class PacketGroupEntry(NamedTuple):
"""This packet is used to return information about a group."""
name: str
"""The name of the group"""
gid: i64
"""The numerical group id"""
members: list[str]
"""All the group member's user names"""
@Packet(type='request')
class PacketQueryGroup(NamedTuple):
"""This packet is used to get information about a group via grp.getgr*."""
group: str
"""Group name or decimal gid"""
def handle(self, conn: Connection) -> None:
"""Queries the requested group."""
try:
gr = getgrnam(self.group)
except KeyError:
try:
gid = int(self.group)
gr = getgrgid(gid)
except (KeyError, ValueError):
conn.write_packet(PacketInvalidField("group", "The group does not exist"))
return
# Send response
conn.write_packet(PacketGroupEntry(name=gr.gr_name, gid=i64(gr.gr_gid), members=gr.gr_mem))
@Packet(type='response')
class PacketEnvironVar(NamedTuple):
"""This packet is used to return an environment variable."""
value: Optional[str]
"""The value of the environment variable, if it was set."""
@Packet(type='request')
class PacketGetenv(NamedTuple):
"""This packet is used to get an environment variable."""
key: str
"""The environment variable to retrieve"""
def handle(self, conn: Connection) -> None:
"""Gets the requested environment variable."""
conn.write_packet(PacketEnvironVar(value=os.getenv(self.key)))
def receive_packet(conn: Connection, request: Any = None) -> Any:
"""
Receives the next packet from the given connection.
Parameters
----------
conn
The connection
request
The corresponding request packet, if any.
Returns
-------
Any
The received packet
Raises
------
RemoteOSError
An OSError occurred on the remote host.
IOError
When an issue on the connection occurs.
ValueError
When an PacketInvalidField is received as the response and a corresponding request packet was given.
"""
try:
packet_id = cast(u32, _deserialize(conn, u32))
if packet_id not in packet_deserializers:
raise IOError(f"Received invalid packet id '{packet_id}'")
try:
packet_name = packets[packet_id].__name__
except KeyError:
packet_name = f"[unknown packet with id {packet_id}]"
_log(f"got packet header for: {packet_name}")
packet = packet_deserializers[packet_id](conn)
if isinstance(packet, PacketOSError):
raise RemoteOSError(msg=packet.msg, errno=packet.errno, strerror=packet.strerror)
if isinstance(packet, PacketInvalidField):
raise ValueError(f"Invalid value '{getattr(request, packet.field)}' given for field '{packet.field}': {packet.error_message}")
return packet
except struct.error as e:
raise IOError("Unexpected EOF in data stream") from e
def _main() -> None:
"""Handles all incoming packets in a loop until an invalid packet or a PacketExit is received."""
os.umask(0o077)
# pylint: disable=global-statement
global debug
global is_server
debug = len(sys.argv) > 1 and sys.argv[1] == "--debug"
is_server = __name__ == "__main__"
conn = Connection(sys.stdin.buffer, sys.stdout.buffer)
while not conn.should_close:
try:
_log("waiting for packet")
packet = receive_packet(conn)
except IOError as e:
print(f"{str(e)}. Aborting.", file=sys.stderr, flush=True)
sys.exit(3)
_log(f"received packet {type(packet).__name__}")
try:
packet.handle(conn)
except OSError as e:
conn.write_packet(PacketOSError(errno=i64(e.errno), strerror=e.strerror, msg=str(e)))
if __name__ == '__main__':
_main()
| [
"os.setresuid",
"os.getuid",
"os.chown",
"stat.S_IMODE",
"sys.exit",
"stat.S_ISLNK",
"typing.get_origin",
"os.setresgid",
"stat.S_ISDIR",
"stat.S_ISFIFO",
"pwd.getpwnam",
"subprocess.run",
"stat.S_ISBLK",
"typing.NewType",
"os.chmod",
"os.umask",
"os.path.isdir",
"os.getgid",
"ty... | [((633, 645), 'typing.TypeVar', 'TypeVar', (['"""T"""'], {}), "('T')\n", (640, 645), False, 'from typing import IO, Any, Type, TypeVar, Callable, Optional, Union, NamedTuple, NewType, cast\n'), ((652, 671), 'typing.NewType', 'NewType', (['"""i32"""', 'int'], {}), "('i32', int)\n", (659, 671), False, 'from typing import IO, Any, Type, TypeVar, Callable, Optional, Union, NamedTuple, NewType, cast\n'), ((678, 697), 'typing.NewType', 'NewType', (['"""u32"""', 'int'], {}), "('u32', int)\n", (685, 697), False, 'from typing import IO, Any, Type, TypeVar, Callable, Optional, Union, NamedTuple, NewType, cast\n'), ((704, 723), 'typing.NewType', 'NewType', (['"""i64"""', 'int'], {}), "('i64', int)\n", (711, 723), False, 'from typing import IO, Any, Type, TypeVar, Callable, Optional, Union, NamedTuple, NewType, cast\n'), ((730, 749), 'typing.NewType', 'NewType', (['"""u64"""', 'int'], {}), "('u64', int)\n", (737, 749), False, 'from typing import IO, Any, Type, TypeVar, Callable, Optional, Union, NamedTuple, NewType, cast\n'), ((26559, 26571), 'os.umask', 'os.umask', (['(63)'], {}), '(63)\n', (26567, 26571), False, 'import os\n'), ((1359, 1386), 'typing.cast', 'cast', (['bool', 'fora.args.debug'], {}), '(bool, fora.args.debug)\n', (1363, 1386), False, 'from typing import IO, Any, Type, TypeVar, Callable, Optional, Union, NamedTuple, NewType, cast\n'), ((2820, 2834), 'pwd.getpwnam', 'getpwnam', (['user'], {}), '(user)\n', (2828, 2834), False, 'from pwd import getpwnam, getpwuid\n'), ((3661, 3676), 'grp.getgrnam', 'getgrnam', (['group'], {}), '(group)\n', (3669, 3676), False, 'from grp import getgrnam, getgrgid, getgrall\n'), ((5849, 5873), 'typing.get_origin', 'typing.get_origin', (['field'], {}), '(field)\n', (5866, 5873), False, 'import typing\n'), ((5995, 6008), 'struct.pack', 'pack', (['""">?"""', 'v'], {}), "('>?', v)\n", (5999, 6008), False, 'from struct import pack, unpack\n'), ((6062, 6075), 'struct.pack', 'pack', (['""">i"""', 'v'], {}), "('>i', v)\n", (6066, 6075), False, 'from struct import pack, unpack\n'), ((6129, 6142), 'struct.pack', 'pack', (['""">I"""', 'v'], {}), "('>I', v)\n", (6133, 6142), False, 'from struct import pack, unpack\n'), ((6196, 6209), 'struct.pack', 'pack', (['""">q"""', 'v'], {}), "('>q', v)\n", (6200, 6209), False, 'from struct import pack, unpack\n'), ((6263, 6276), 'struct.pack', 'pack', (['""">Q"""', 'v'], {}), "('>Q', v)\n", (6267, 6276), False, 'from struct import pack, unpack\n'), ((8931, 8945), 'typing.cast', 'cast', (['Any', 'cls'], {}), '(Any, cls)\n', (8935, 8945), False, 'from typing import IO, Any, Type, TypeVar, Callable, Optional, Union, NamedTuple, NewType, cast\n'), ((5654, 5678), 'typing.get_origin', 'typing.get_origin', (['field'], {}), '(field)\n', (5671, 5678), False, 'import typing\n'), ((5706, 5728), 'typing.get_args', 'typing.get_args', (['field'], {}), '(field)\n', (5721, 5728), False, 'import typing\n'), ((13953, 13972), 'os.umask', 'os.umask', (['umask_oct'], {}), '(umask_oct)\n', (13961, 13972), False, 'import os\n'), ((14282, 14422), 'subprocess.run', 'subprocess.run', (['self.command'], {'input': 'self.stdin', 'capture_output': 'self.capture_output', 'cwd': 'self.cwd', 'preexec_fn': 'child_preexec', 'check': '(False)'}), '(self.command, input=self.stdin, capture_output=self.\n capture_output, cwd=self.cwd, preexec_fn=child_preexec, check=False)\n', (14296, 14422), False, 'import subprocess\n'), ((15399, 15452), 'os.stat', 'os.stat', (['self.path'], {'follow_symlinks': 'self.follow_links'}), '(self.path, follow_symlinks=self.follow_links)\n', (15406, 15452), False, 'import os\n'), ((15660, 15683), 'stat.S_ISDIR', 'stat.S_ISDIR', (['s.st_mode'], {}), '(s.st_mode)\n', (15672, 15683), False, 'import stat\n'), ((17454, 17468), 'pwd.getpwnam', 'getpwnam', (['user'], {}), '(user)\n', (17462, 17468), False, 'from pwd import getpwnam, getpwuid\n'), ((18262, 18277), 'grp.getgrnam', 'getgrnam', (['group'], {}), '(group)\n', (18270, 18277), False, 'from grp import getgrnam, getgrgid, getgrall\n'), ((20044, 20073), 'os.chmod', 'os.chmod', (['self.file', 'mode_oct'], {}), '(self.file, mode_oct)\n', (20052, 20073), False, 'import os\n'), ((20133, 20172), 'os.chown', 'os.chown', (['self.file', '(uid or 0)', '(gid or 0)'], {}), '(self.file, uid or 0, gid or 0)\n', (20141, 20172), False, 'import os\n'), ((22130, 22149), 'pwd.getpwnam', 'getpwnam', (['self.user'], {}), '(self.user)\n', (22138, 22149), False, 'from pwd import getpwnam, getpwuid\n'), ((23947, 23967), 'grp.getgrnam', 'getgrnam', (['self.group'], {}), '(self.group)\n', (23955, 23967), False, 'from grp import getgrnam, getgrgid, getgrall\n'), ((6779, 6801), 'typing.get_args', 'typing.get_args', (['vtype'], {}), '(vtype)\n', (6794, 6801), False, 'import typing\n'), ((8069, 8091), 'typing.get_args', 'typing.get_args', (['vtype'], {}), '(vtype)\n', (8084, 8091), False, 'import typing\n'), ((13677, 13700), 'os.path.isdir', 'os.path.isdir', (['self.cwd'], {}), '(self.cwd)\n', (13690, 13700), False, 'import os\n'), ((14021, 14048), 'os.setresgid', 'os.setresgid', (['gid', 'gid', 'gid'], {}), '(gid, gid, gid)\n', (14033, 14048), False, 'import os\n'), ((14097, 14124), 'os.setresuid', 'os.setresuid', (['uid', 'uid', 'uid'], {}), '(uid, uid, uid)\n', (14109, 14124), False, 'import os\n'), ((14178, 14196), 'os.chdir', 'os.chdir', (['self.cwd'], {}), '(self.cwd)\n', (14186, 14196), False, 'import os\n'), ((15718, 15741), 'stat.S_ISCHR', 'stat.S_ISCHR', (['s.st_mode'], {}), '(s.st_mode)\n', (15730, 15741), False, 'import stat\n'), ((16098, 16116), 'pwd.getpwuid', 'getpwuid', (['s.st_uid'], {}), '(s.st_uid)\n', (16106, 16116), False, 'from pwd import getpwnam, getpwuid\n'), ((16218, 16236), 'grp.getgrgid', 'getgrgid', (['s.st_gid'], {}), '(s.st_gid)\n', (16226, 16236), False, 'from grp import getgrnam, getgrgid, getgrall\n'), ((17410, 17421), 'os.getuid', 'os.getuid', ([], {}), '()\n', (17419, 17421), False, 'import os\n'), ((18218, 18229), 'os.getgid', 'os.getgid', ([], {}), '()\n', (18227, 18229), False, 'import os\n'), ((22775, 22785), 'grp.getgrall', 'getgrall', ([], {}), '()\n', (22783, 22785), False, 'from grp import getgrnam, getgrgid, getgrall\n'), ((27051, 27062), 'sys.exit', 'sys.exit', (['(3)'], {}), '(3)\n', (27059, 27062), False, 'import sys\n'), ((2935, 2948), 'pwd.getpwuid', 'getpwuid', (['uid'], {}), '(uid)\n', (2943, 2948), False, 'from pwd import getpwnam, getpwuid\n'), ((3778, 3791), 'grp.getgrgid', 'getgrgid', (['gid'], {}), '(gid)\n', (3786, 3791), False, 'from grp import getgrnam, getgrgid, getgrall\n'), ((6971, 6993), 'typing.get_args', 'typing.get_args', (['vtype'], {}), '(vtype)\n', (6986, 6993), False, 'import typing\n'), ((8256, 8278), 'typing.get_args', 'typing.get_args', (['vtype'], {}), '(vtype)\n', (8271, 8278), False, 'import typing\n'), ((15776, 15799), 'stat.S_ISBLK', 'stat.S_ISBLK', (['s.st_mode'], {}), '(s.st_mode)\n', (15788, 15799), False, 'import stat\n'), ((17564, 17577), 'pwd.getpwuid', 'getpwuid', (['uid'], {}), '(uid)\n', (17572, 17577), False, 'from pwd import getpwnam, getpwuid\n'), ((18374, 18387), 'grp.getgrgid', 'getgrgid', (['gid'], {}), '(gid)\n', (18382, 18387), False, 'from grp import getgrnam, getgrgid, getgrall\n'), ((22250, 22263), 'pwd.getpwuid', 'getpwuid', (['gid'], {}), '(gid)\n', (22258, 22263), False, 'from pwd import getpwnam, getpwuid\n'), ((22538, 22558), 'spwd.getspnam', 'getspnam', (['pw.pw_name'], {}), '(pw.pw_name)\n', (22546, 22558), False, 'from spwd import getspnam\n'), ((24069, 24082), 'grp.getgrgid', 'getgrgid', (['gid'], {}), '(gid)\n', (24077, 24082), False, 'from grp import getgrnam, getgrgid, getgrall\n'), ((24913, 24932), 'os.getenv', 'os.getenv', (['self.key'], {}), '(self.key)\n', (24922, 24932), False, 'import os\n'), ((15834, 15857), 'stat.S_ISREG', 'stat.S_ISREG', (['s.st_mode'], {}), '(s.st_mode)\n', (15846, 15857), False, 'import stat\n'), ((16651, 16674), 'stat.S_IMODE', 'stat.S_IMODE', (['s.st_mode'], {}), '(s.st_mode)\n', (16663, 16674), False, 'import stat\n'), ((15892, 15916), 'stat.S_ISFIFO', 'stat.S_ISFIFO', (['s.st_mode'], {}), '(s.st_mode)\n', (15905, 15916), False, 'import stat\n'), ((22964, 22983), 'grp.getgrgid', 'getgrgid', (['pw.pw_gid'], {}), '(pw.pw_gid)\n', (22972, 22983), False, 'from grp import getgrnam, getgrgid, getgrall\n'), ((15950, 15973), 'stat.S_ISLNK', 'stat.S_ISLNK', (['s.st_mode'], {}), '(s.st_mode)\n', (15962, 15973), False, 'import stat\n'), ((16008, 16032), 'stat.S_ISSOCK', 'stat.S_ISSOCK', (['s.st_mode'], {}), '(s.st_mode)\n', (16021, 16032), False, 'import stat\n')] |
# -*- coding: utf-8 -*-
import logging
from django import db
from django.conf import settings
from django.core.management import call_command, BaseCommand, CommandError
from django.utils.module_loading import import_string
from django_orghierarchy.models import Organization
from django.db import transaction
from events.models import Language
from .sync import ModelSyncher
from .base import Importer, register_importer
# Per module logger
logger = logging.getLogger(__name__)
#this importer fills events_language table fields (the table has already be but there is id's only)
@register_importer
class LanguageFiedsImporter(Importer):
name = 'fill_events_language'
supported_languages = ['fi', 'sv']
def setup(self):
self.organization = ''
self.data_source = ''
LANGUAGE_SET_DATA = [{
'id': 'fi',
'name': 'Suomi',
'name_fi': 'Suomi',
'name_sv': 'Finska',
'name_en': 'Finnish',
},
{
'id': 'sv',
'name': 'Ruotsi',
'name_fi': 'Ruotsi',
'name_sv': 'Svenska',
'name_en': 'Swedish',
},
{
'id': 'en',
'name': 'Englanti',
'name_fi': 'Englanti',
'name_sv': 'Engelska',
'name_en': 'English',
}]
for i in LANGUAGE_SET_DATA:
language, created = Language.objects.update_or_create(id=i['id'], defaults= i)
if created:
print('New language %s (%s)' % (i['name_fi'], i['id']))
else:
print('Language %s (%s) already exists and it is updated now.' % (i['name_fi'], i['id']))
| [
"logging.getLogger",
"events.models.Language.objects.update_or_create"
] | [((456, 483), 'logging.getLogger', 'logging.getLogger', (['__name__'], {}), '(__name__)\n', (473, 483), False, 'import logging\n'), ((1438, 1495), 'events.models.Language.objects.update_or_create', 'Language.objects.update_or_create', ([], {'id': "i['id']", 'defaults': 'i'}), "(id=i['id'], defaults=i)\n", (1471, 1495), False, 'from events.models import Language\n')] |
from hashtable import HashTable
class Set:
def __init__(self, elements=None):
"""Initialize this new set and add the given elements"""
self.hash_set = HashTable()
if elements is not None:
for element in elements:
self.add(element)
def size(self):
"""Returns the size of the set"""
return self.hash_set.size
def contains(self, element):
"""Return True if the set contains the given element, or False.
Running time: 0(1); hash tables automatically resize"""
return self.hash_set.contains(element)
def add(self, element):
"""Add given element to the set, if not already present
Running time: 0(1); adds key-value pair at constant time"""
if not self.hash_set.contains(element):
self.hash_set.set(element, 1)
def remove(self, element):
"""Remove the given element from the set, if exists, or raise KeyError
Running time: 0(1); jump right to element using key & remove -- constant time"""
if self.hash_set.contains(element):
self.hash_set.delete(element)
else:
raise KeyError(f'Item not found: {element}')
def union(self, other_set):
"""Return a new set that is the union of this set and other_set
Running time: 0(m+n); gets keys, possible resizing needed, adds to new set"""
new_set = Set()
t_set = self.hash_set.keys()
o_set = other_set.hash_set.keys()
for element in t_set:
new_set.add(element)
for element in o_set:
new_set.add(element)
return new_set
def intersection(self, other_set):
"""Return a new set that is the intersection of this set and other_set
Running time: 0(n); gets keys linearly"""
new_set = Set()
o_set = other_set.hash_set.keys()
for element in o_set:
if self.contains(element):
new_set.add(element)
return new_set
def difference(self, other_set):
"""Return a new set that is the difference of this set and other_set
Running time: 0(n); gets keys linearly"""
new_set = Set()
t_set = self.hash_set.keys()
o_set = other_set.hash_set.keys()
for element in t_set:
if other_set.contains(element) is False:
new_set.add(element)
for element in o_set:
if self.contains(element) is False:
new_set.add(element)
return new_set
def is_subset(self, other_set):
"""Return True if other_set is a subset of this set, or False
Running time: 0(n); gets keys linearly"""
t_set = self.hash_set.keys()
o_set = other_set.hash_set.keys()
for element in o_set:
if element not in t_set:
return False
return True
| [
"hashtable.HashTable"
] | [((173, 184), 'hashtable.HashTable', 'HashTable', ([], {}), '()\n', (182, 184), False, 'from hashtable import HashTable\n')] |
from afterpay.attribute_getter import AttributeGetter
from afterpay.exceptions import AfterpayError
class Merchant(AttributeGetter):
"""
Merchant object
Attributes:
redirectConfirmUrl: The consumer is redirected to this URL when the payment process is completed.
redirectCancelUrl: The consumer is redirected to this URL if the payment process is cancelled.
"""
attribute_list = [
"redirectConfirmUrl",
"redirectCancelUrl",
]
def __init__(self, attributes):
if "redirectConfirmUrl" not in attributes:
raise AfterpayError("Cannot initialize Contact object without a 'redirectConfirmUrl'")
if "redirectCancelUrl" not in attributes:
raise AfterpayError("Cannot initialize Contact object without a 'redirectCancelUrl'")
AttributeGetter.__init__(self, attributes)
def __repr__(self):
return super(Merchant, self).__repr__(self.attribute_list)
def get_json(self):
return {
i: super(Merchant, self).__dict__[i] for i in super(Merchant, self).__dict__ if i in self.attribute_list
}
| [
"afterpay.attribute_getter.AttributeGetter.__init__",
"afterpay.exceptions.AfterpayError"
] | [((828, 870), 'afterpay.attribute_getter.AttributeGetter.__init__', 'AttributeGetter.__init__', (['self', 'attributes'], {}), '(self, attributes)\n', (852, 870), False, 'from afterpay.attribute_getter import AttributeGetter\n'), ((590, 675), 'afterpay.exceptions.AfterpayError', 'AfterpayError', (['"""Cannot initialize Contact object without a \'redirectConfirmUrl\'"""'], {}), '("Cannot initialize Contact object without a \'redirectConfirmUrl\'"\n )\n', (603, 675), False, 'from afterpay.exceptions import AfterpayError\n'), ((739, 818), 'afterpay.exceptions.AfterpayError', 'AfterpayError', (['"""Cannot initialize Contact object without a \'redirectCancelUrl\'"""'], {}), '("Cannot initialize Contact object without a \'redirectCancelUrl\'")\n', (752, 818), False, 'from afterpay.exceptions import AfterpayError\n')] |
# -*- coding: utf-8 -*-
import logging
import uuid
from gettext import gettext
from py4j.compat import bytearray2
from urllib.parse import urlparse
from limonero.py4j_init import create_gateway
WRONG_HDFS_CONFIG = gettext(
"Limonero HDFS access not correctly configured (see "
"config 'dfs.client.use.datanode.hostname')")
log = logging.getLogger(__name__)
def get_tmp_path(jvm, hdfs, parsed, filename):
"""
Temporary directory used to upload files to HDFS
"""
tmp_dir = '{}/tmp/upload/{}'.format(parsed.path.replace('//', '/'),
filename)
tmp_path = jvm.org.apache.hadoop.fs.Path(tmp_dir)
if not hdfs.exists(tmp_path):
hdfs.mkdirs(tmp_path)
return tmp_path
def create_hdfs_chunk(chunk_number, filename, storage, use_hostname,
gateway_port):
parsed = urlparse(storage.url)
conf, jvm = create_gateway_and_hdfs_conf(use_hostname, gateway_port)
str_uri = '{proto}://{host}:{port}'.format(
proto=parsed.scheme, host=parsed.hostname, port=parsed.port)
uri = jvm.java.net.URI(str_uri)
hdfs = jvm.org.apache.hadoop.fs.FileSystem.get(uri, conf)
tmp_path = get_tmp_path(jvm, hdfs, parsed, filename)
chunk_filename = "{tmp}/{file}.part{part:09d}".format(
tmp=tmp_path.toString(), file=filename, part=chunk_number)
# time.sleep(1)
chunk_path = jvm.org.apache.hadoop.fs.Path(chunk_filename)
return chunk_path, hdfs
def write_chunk(jvm, chunk_number, filename, storage, file_data,
conf):
"""
Writes a single chunk in HDFS. Chunks are provided by the interface and
are blocks of data (binary)
"""
storage_url = storage.url if storage.url[-1] != '/' \
else storage.url[:-1]
parsed = urlparse(storage_url)
if parsed.scheme == 'file':
str_uri = '{proto}://{path}'.format(
proto=parsed.scheme, path=parsed.path)
else:
str_uri = '{proto}://{host}:{port}'.format(
proto=parsed.scheme, host=parsed.hostname,
port=parsed.port)
uri = jvm.java.net.URI(str_uri)
hdfs = jvm.org.apache.hadoop.fs.FileSystem.get(uri, conf)
log.info('================== %s', uri)
tmp_path = get_tmp_path(jvm, hdfs, parsed, filename)
chunk_filename = "{tmp}/{file}.part{part:09d}".format(
tmp=tmp_path.toString(), file=filename, part=chunk_number)
chunk_path = jvm.org.apache.hadoop.fs.Path(chunk_filename)
output_stream = hdfs.create(chunk_path)
block = bytearray2(file_data)
output_stream.write(block, 0, len(block))
output_stream.close()
# Checks if all file's parts are present
full_path = tmp_path
list_iter = hdfs.listFiles(full_path, False)
counter = 0
while list_iter.hasNext():
counter += 1
list_iter.next()
return file_data, hdfs, str_uri, tmp_path, counter
def create_gateway_and_hdfs_conf(use_datanode, gateway_port):
"""
Stats JVM and define HDFS configuration used to upload data.
"""
gateway = create_gateway(log, gateway_port)
jvm = gateway.jvm
conf = jvm.org.apache.hadoop.conf.Configuration()
conf.set('dfs.client.use.datanode.hostname',
"true" if use_datanode else "false")
return conf, jvm
def merge_chunks(conf, filename, full_path, hdfs, jvm, str_uri,
instance_name):
"""
Merge already uploaded chunks in a single file using HDFS API.
"""
final_filename = '{}_{}'.format(uuid.uuid4().hex, filename)
# time to merge all files
target_path = jvm.org.apache.hadoop.fs.Path('{}/{}/{}/{}'.format(
str_uri, '/limonero/data', instance_name, final_filename))
result_code = 200
result = None
if hdfs.exists(target_path):
result = {'status': 'error', 'message': gettext('File already exists')}
result_code = 500
jvm.org.apache.hadoop.fs.FileUtil.copyMerge(
hdfs, full_path, hdfs, target_path, True, conf, None)
return result_code, result, target_path
| [
"logging.getLogger",
"limonero.py4j_init.create_gateway",
"urllib.parse.urlparse",
"uuid.uuid4",
"gettext.gettext",
"py4j.compat.bytearray2"
] | [((216, 330), 'gettext.gettext', 'gettext', (['"""Limonero HDFS access not correctly configured (see config \'dfs.client.use.datanode.hostname\')"""'], {}), '(\n "Limonero HDFS access not correctly configured (see config \'dfs.client.use.datanode.hostname\')"\n )\n', (223, 330), False, 'from gettext import gettext\n'), ((340, 367), 'logging.getLogger', 'logging.getLogger', (['__name__'], {}), '(__name__)\n', (357, 367), False, 'import logging\n'), ((867, 888), 'urllib.parse.urlparse', 'urlparse', (['storage.url'], {}), '(storage.url)\n', (875, 888), False, 'from urllib.parse import urlparse\n'), ((1790, 1811), 'urllib.parse.urlparse', 'urlparse', (['storage_url'], {}), '(storage_url)\n', (1798, 1811), False, 'from urllib.parse import urlparse\n'), ((2535, 2556), 'py4j.compat.bytearray2', 'bytearray2', (['file_data'], {}), '(file_data)\n', (2545, 2556), False, 'from py4j.compat import bytearray2\n'), ((3058, 3091), 'limonero.py4j_init.create_gateway', 'create_gateway', (['log', 'gateway_port'], {}), '(log, gateway_port)\n', (3072, 3091), False, 'from limonero.py4j_init import create_gateway\n'), ((3506, 3518), 'uuid.uuid4', 'uuid.uuid4', ([], {}), '()\n', (3516, 3518), False, 'import uuid\n'), ((3823, 3853), 'gettext.gettext', 'gettext', (['"""File already exists"""'], {}), "('File already exists')\n", (3830, 3853), False, 'from gettext import gettext\n')] |
from selenium import webdriver
from webdriver_manager.opera import OperaDriverManager
from webdriver_setup.driver import DriverBase
class OperaDriver(DriverBase):
def __init__(self, **kwargs):
super().__init__(**kwargs)
def create_driver(self, **kwargs):
"""Create Opera webdriver
:type kwargs: dict
:param kwargs: Optional arguments
:rtype: selenium.webdriver.Opera
:returns: Opera webdriver instance
"""
cache_timeout = kwargs.get("cache_valid_range", 7)
driver_path = OperaDriverManager(cache_valid_range=cache_timeout).install()
return webdriver.Opera(executable_path=driver_path, **kwargs)
| [
"selenium.webdriver.Opera",
"webdriver_manager.opera.OperaDriverManager"
] | [((638, 692), 'selenium.webdriver.Opera', 'webdriver.Opera', ([], {'executable_path': 'driver_path'}), '(executable_path=driver_path, **kwargs)\n', (653, 692), False, 'from selenium import webdriver\n'), ((560, 611), 'webdriver_manager.opera.OperaDriverManager', 'OperaDriverManager', ([], {'cache_valid_range': 'cache_timeout'}), '(cache_valid_range=cache_timeout)\n', (578, 611), False, 'from webdriver_manager.opera import OperaDriverManager\n')] |
import math
import os
import tempfile
from contextlib import contextmanager
from soap import logger
from soap.common.cache import cached
from soap.expression import operators, OutputVariableTuple
from soap.semantics.error import IntegerInterval, ErrorSemantics
flopoco_command_map = {
'IntAdder': ('{wi}', ),
'IntMultiplier': ('{wi}', '{wi}', '{wi}', '1', '1', '0'),
'FPAdder': ('{we}', '{wf}'),
'FPMultiplier': ('{we}', '{wf}', '{wf}'),
'FPSquarer': ('{we}', '{wf}', '{wf}'),
'FPDiv': ('{we}', '{wf}'),
'FPPow': ('{we}', '{wf}'),
'FPExp': ('{we}', '{wf}'),
'FPLog': ('{we}', '{wf}', '0'),
}
flopoco_operators = tuple(flopoco_command_map)
operators_map = {
operators.ADD_OP: ['FPAdder', 'IntAdder'],
operators.SUBTRACT_OP: ['FPAdder', 'IntAdder'],
operators.MULTIPLY_OP: ['FPMultiplier', 'IntMultiplier'],
operators.DIVIDE_OP: 'FPDiv',
operators.LESS_OP: ['FPAdder', 'IntAdder'],
operators.LESS_EQUAL_OP: ['FPAdder', 'IntAdder'],
operators.GREATER_OP: ['FPAdder', 'IntAdder'],
operators.GREATER_EQUAL_OP: ['FPAdder', 'IntAdder'],
operators.EQUAL_OP: ['FPAdder', 'IntAdder'],
operators.NOT_EQUAL_OP: ['FPAdder', 'IntAdder'],
operators.TERNARY_SELECT_OP: 'Multiplexer',
operators.FIXPOINT_OP: 'Null',
operators.UNARY_SUBTRACT_OP: 'OneLUT',
}
we_min, we_max = 5, 15
we_range = list(range(we_min, we_max + 1))
wf_min, wf_max = 10, 112
wf_range = list(range(wf_min, wf_max + 1))
wi_min, wi_max = 1, 100
wi_range = list(range(wi_min, wi_max + 1))
directory = os.path.dirname(__file__)
default_file = os.path.join(directory, 'luts.pkl')
template_file = os.path.join(directory, 'template.vhdl')
device_name = 'Virtex6'
device_model = 'xc6vlx760'
@contextmanager
def cd(d):
import sh
p = os.path.abspath(os.curdir)
if d:
sh.mkdir('-p', d)
sh.cd(d)
try:
yield
except Exception:
raise
finally:
sh.cd(p)
def flopoco_key(fop, we=-1, wf=-1, wi=-1):
try:
format_tuple = flopoco_command_map[fop]
except KeyError:
raise ValueError('Unrecognised operator {}'.format(fop))
args = [fop]
args += [a.format(we=we, wf=wf, wi=wi) for a in format_tuple]
return tuple(args)
def flopoco(key, file_name=None, dir_name=None):
import sh
file_name = file_name or tempfile.mktemp(suffix='.vhdl', dir='')
cmd = ('-target=' + device_name, '-outputfile=' + file_name) + key
logger.debug('flopoco: {!r}'.format(cmd))
dir_name = dir_name or tempfile.mktemp(suffix='/')
with cd(dir_name):
sh.flopoco(*cmd)
try:
with open(file_name) as fh:
if not fh.read():
raise IOError()
except (IOError, FileNotFoundError):
logger.error('Flopoco failed to generate file ' + file_name)
raise
return file_name, dir_name
def get_luts(file_name):
from bs4 import BeautifulSoup
with open(file_name, 'r') as f:
f = BeautifulSoup(f.read())
app = f.document.application
util = app.find('section', stringid='XST_DEVICE_UTILIZATION_SUMMARY')
luts = util.find('item', stringid='XST_NUMBER_OF_SLICE_LUTS')
if luts:
return int(luts.get('value'))
logger.warning('{} requires no LUTs'.format(file_name))
return 0
def xilinx(file_name, dir_name=None):
import sh
file_base = os.path.split(file_name)[1]
file_base = os.path.splitext(file_base)[0]
synth_name = file_base + '.ngc'
cmd = ['run', '-p', device_model]
cmd += ['-ifn', file_name, '-ifmt', 'VHDL']
cmd += ['-ofn', synth_name, '-ofmt', 'NGC']
logger.debug('xst: {!r}'.format(cmd))
dir_name = dir_name or tempfile.mktemp(suffix='/')
with cd(dir_name):
out_file_name = file_base + '.out.log'
err_file_name = file_base + '.err.log'
sh.xst(sh.echo(*cmd), _out=out_file_name, _err=err_file_name)
return get_luts(file_base + '.ngc_xst.xrpt')
_FILTER_OPERATORS = operators.TRADITIONAL_OPERATORS + [
operators.TERNARY_SELECT_OP
]
@cached
def _datatype_exponent(op, label):
if isinstance(label, OutputVariableTuple):
exponent = 0
for l in label:
label_datatype, label_exponent = _datatype_exponent(op, l)
exponent += label_exponent
return None, exponent
if op == operators.FIXPOINT_OP:
return None, 0
if op not in _FILTER_OPERATORS:
return None, None
bound = label.bound
datatype = type(bound)
if datatype is IntegerInterval:
if bound.is_top():
return datatype, flopoco.wi_max
if bound.is_bottom():
return datatype, flopoco.wi_min
bound_max = max(abs(bound.min), abs(bound.max), 1)
width_max = int(math.ceil(math.log(bound_max + 1, 2)) + 1)
return datatype, width_max
if datatype is ErrorSemantics:
bound = bound.v
if bound.is_top():
return datatype, flopoco.we_max
if bound.is_bottom():
return datatype, flopoco.we_min
bound_max = max(abs(bound.min), abs(bound.max), 1)
try:
exp_max = math.floor(math.log(bound_max, 2))
except OverflowError:
return datatype, flopoco.we_max
try:
exponent = int(math.ceil(math.log(exp_max + 1, 2) + 1))
return datatype, max(exponent, flopoco.we_min)
except ValueError:
return datatype, flopoco.we_min
raise TypeError('Unrecognized type of bound {!r}'.format(bound))
| [
"soap.logger.error",
"sh.echo",
"os.path.join",
"os.path.splitext",
"os.path.split",
"tempfile.mktemp",
"os.path.dirname",
"math.log",
"sh.mkdir",
"os.path.abspath",
"sh.flopoco",
"sh.cd"
] | [((1548, 1573), 'os.path.dirname', 'os.path.dirname', (['__file__'], {}), '(__file__)\n', (1563, 1573), False, 'import os\n'), ((1589, 1624), 'os.path.join', 'os.path.join', (['directory', '"""luts.pkl"""'], {}), "(directory, 'luts.pkl')\n", (1601, 1624), False, 'import os\n'), ((1641, 1681), 'os.path.join', 'os.path.join', (['directory', '"""template.vhdl"""'], {}), "(directory, 'template.vhdl')\n", (1653, 1681), False, 'import os\n'), ((1785, 1811), 'os.path.abspath', 'os.path.abspath', (['os.curdir'], {}), '(os.curdir)\n', (1800, 1811), False, 'import os\n'), ((1830, 1847), 'sh.mkdir', 'sh.mkdir', (['"""-p"""', 'd'], {}), "('-p', d)\n", (1838, 1847), False, 'import sh\n'), ((1856, 1864), 'sh.cd', 'sh.cd', (['d'], {}), '(d)\n', (1861, 1864), False, 'import sh\n'), ((1945, 1953), 'sh.cd', 'sh.cd', (['p'], {}), '(p)\n', (1950, 1953), False, 'import sh\n'), ((2343, 2382), 'tempfile.mktemp', 'tempfile.mktemp', ([], {'suffix': '""".vhdl"""', 'dir': '""""""'}), "(suffix='.vhdl', dir='')\n", (2358, 2382), False, 'import tempfile\n'), ((2528, 2555), 'tempfile.mktemp', 'tempfile.mktemp', ([], {'suffix': '"""/"""'}), "(suffix='/')\n", (2543, 2555), False, 'import tempfile\n'), ((2588, 2604), 'sh.flopoco', 'sh.flopoco', (['*cmd'], {}), '(*cmd)\n', (2598, 2604), False, 'import sh\n'), ((3425, 3449), 'os.path.split', 'os.path.split', (['file_name'], {}), '(file_name)\n', (3438, 3449), False, 'import os\n'), ((3469, 3496), 'os.path.splitext', 'os.path.splitext', (['file_base'], {}), '(file_base)\n', (3485, 3496), False, 'import os\n'), ((3741, 3768), 'tempfile.mktemp', 'tempfile.mktemp', ([], {'suffix': '"""/"""'}), "(suffix='/')\n", (3756, 3768), False, 'import tempfile\n'), ((3902, 3915), 'sh.echo', 'sh.echo', (['*cmd'], {}), '(*cmd)\n', (3909, 3915), False, 'import sh\n'), ((2785, 2845), 'soap.logger.error', 'logger.error', (["('Flopoco failed to generate file ' + file_name)"], {}), "('Flopoco failed to generate file ' + file_name)\n", (2797, 2845), False, 'from soap import logger\n'), ((5207, 5229), 'math.log', 'math.log', (['bound_max', '(2)'], {}), '(bound_max, 2)\n', (5215, 5229), False, 'import math\n'), ((4829, 4855), 'math.log', 'math.log', (['(bound_max + 1)', '(2)'], {}), '(bound_max + 1, 2)\n', (4837, 4855), False, 'import math\n'), ((5355, 5379), 'math.log', 'math.log', (['(exp_max + 1)', '(2)'], {}), '(exp_max + 1, 2)\n', (5363, 5379), False, 'import math\n')] |
# -*- coding: utf-8 -*-
from coralquant.models.odl_model import BS_Stock_Basic, BS_SZ50_Stocks, TS_Stock_Basic, TS_TradeCal
from coralquant.spider.bs_stock_basic import get_stock_basic
from coralquant import logger
from datetime import date, datetime, timedelta
from sqlalchemy import MetaData
from coralquant.database import session_scope
from coralquant.settings import CQ_Config
from coralquant.models.orm_model import TaskTable
from coralquant.stringhelper import TaskEnum
from sqlalchemy import func, distinct
_logger = logger.Logger(__name__).get_log()
meta = MetaData()
def create_task(
task: TaskEnum,
begin_date: date,
end_date: date,
codes: list = [],
type: str = None,
status: str = None,
market: str = None,
isdel=False,
):
"""创建任务
:param task: 任务类型
:type task: TaskEnum
:param begin_date: 如果开始时间(begin_date)为None,开始时间取股票上市(IPO)时间
:type begin_date: date
:param end_date: 结束时间
:type end_date: date
:param codes: 股票代码列表, defaults to []
:type codes: list, optional
:param type: 证券类型,其中1:股票,2:指数,3:其它, defaults to None
:type type: str, optional
:param status: 上市状态,其中1:上市,0:退市, defaults to None
:type status: str, optional
:param market: 市场类型 (主板/中小板/创业板/科创板/CDR), defaults to None
:type market: str, optional
:param isdel: 是否删除删除原有的相同任务的历史任务列表, defaults to False
:type isdel: bool, optional
"""
with session_scope() as sm:
if not codes:
query = sm.query(BS_Stock_Basic.code, BS_Stock_Basic.ipoDate)
if market:
query = query.join(TS_Stock_Basic, BS_Stock_Basic.code == TS_Stock_Basic.bs_code).filter(
TS_Stock_Basic.market == market
)
if CQ_Config.IDB_DEBUG == "1": # 如果是测试环境
query = query.join(BS_SZ50_Stocks, BS_Stock_Basic.code == BS_SZ50_Stocks.code)
if status:
query = query.filter(BS_Stock_Basic.status == status)
if type:
query = query.filter(BS_Stock_Basic.type == type)
codes = query.all()
if isdel:
# 删除原有的相同任务的历史任务列表
query = sm.query(TaskTable).filter(TaskTable.task == task.value)
query.delete()
sm.commit()
_logger.info("任务:{}-历史任务已删除".format(task.name))
tasklist = []
for c in codes:
tasktable = TaskTable(
task=task.value,
task_name=task.name,
ts_code=c.code,
begin_date=begin_date if begin_date is not None else c.ipoDate,
end_date=end_date,
)
tasklist.append(tasktable)
sm.bulk_save_objects(tasklist)
_logger.info("生成{}条任务记录".format(len(codes)))
def create_bs_task(task: TaskEnum, tmpcodes=None):
"""
创建BS任务列表
"""
# 删除原有的相同任务的历史任务列表
TaskTable.del_with_task(task)
with session_scope() as sm:
query = sm.query(BS_Stock_Basic.code, BS_Stock_Basic.ipoDate, BS_Stock_Basic.outDate, BS_Stock_Basic.ts_code)
if CQ_Config.IDB_DEBUG == "1": # 如果是测试环境
if tmpcodes:
query = query.filter(BS_Stock_Basic.code.in_(tmpcodes))
else:
query = query.join(BS_SZ50_Stocks, BS_Stock_Basic.code == BS_SZ50_Stocks.code)
# query = query.filter(BS_Stock_Basic.status == True) #取上市的
codes = query.all()
tasklist = []
for c in codes:
tasktable = TaskTable(
task=task.value,
task_name=task.name,
ts_code=c.ts_code,
bs_code=c.code,
begin_date=c.ipoDate,
end_date=c.outDate if c.outDate is not None else datetime.now().date(),
)
tasklist.append(tasktable)
sm.bulk_save_objects(tasklist)
_logger.info("生成{}条任务记录".format(len(codes)))
def create_ts_task(task: TaskEnum):
"""
创建TS任务列表
"""
# 删除原有的相同任务的历史任务列表
TaskTable.del_with_task(task)
with session_scope() as sm:
codes = (
sm.query(
TS_Stock_Basic.ts_code, TS_Stock_Basic.bs_code, TS_Stock_Basic.list_date, TS_Stock_Basic.delist_date
)
.filter(TS_Stock_Basic.list_status == "L")
.all()
)
tasklist = []
for c in codes:
tasktable = TaskTable(
task=task.value,
task_name=task.name,
ts_code=c.ts_code,
bs_code=c.bs_code,
begin_date=c.list_date,
end_date=c.delist_date if c.delist_date is not None else datetime.now().date(),
)
tasklist.append(tasktable)
sm.bulk_save_objects(tasklist)
_logger.info("生成{}条任务记录".format(len(codes)))
def create_ts_cal_task(task: TaskEnum):
"""
创建基于交易日历的任务列表
"""
# 删除历史任务
TaskTable.del_with_task(task)
with session_scope() as sm:
rp = sm.query(distinct(TS_TradeCal.date).label("t_date")).filter(
TS_TradeCal.is_open == True, TS_TradeCal.date <= datetime.now().date() # noqa
)
codes = rp.all()
tasklist = []
for c in codes:
tasktable = TaskTable(
task=task.value,
task_name=task.name,
ts_code="按日期更新",
bs_code="按日期更新",
begin_date=c.t_date,
end_date=c.t_date,
)
tasklist.append(tasktable)
sm.bulk_save_objects(tasklist)
_logger.info("生成{}条任务记录".format(len(codes)))
if __name__ == "__main__":
pass
| [
"coralquant.logger.Logger",
"coralquant.models.orm_model.TaskTable",
"coralquant.database.session_scope",
"sqlalchemy.MetaData",
"coralquant.models.orm_model.TaskTable.del_with_task",
"datetime.datetime.now",
"sqlalchemy.distinct",
"coralquant.models.odl_model.BS_Stock_Basic.code.in_"
] | [((568, 578), 'sqlalchemy.MetaData', 'MetaData', ([], {}), '()\n', (576, 578), False, 'from sqlalchemy import MetaData\n'), ((2890, 2919), 'coralquant.models.orm_model.TaskTable.del_with_task', 'TaskTable.del_with_task', (['task'], {}), '(task)\n', (2913, 2919), False, 'from coralquant.models.orm_model import TaskTable\n'), ((4009, 4038), 'coralquant.models.orm_model.TaskTable.del_with_task', 'TaskTable.del_with_task', (['task'], {}), '(task)\n', (4032, 4038), False, 'from coralquant.models.orm_model import TaskTable\n'), ((4920, 4949), 'coralquant.models.orm_model.TaskTable.del_with_task', 'TaskTable.del_with_task', (['task'], {}), '(task)\n', (4943, 4949), False, 'from coralquant.models.orm_model import TaskTable\n'), ((526, 549), 'coralquant.logger.Logger', 'logger.Logger', (['__name__'], {}), '(__name__)\n', (539, 549), False, 'from coralquant import logger\n'), ((1423, 1438), 'coralquant.database.session_scope', 'session_scope', ([], {}), '()\n', (1436, 1438), False, 'from coralquant.database import session_scope\n'), ((2930, 2945), 'coralquant.database.session_scope', 'session_scope', ([], {}), '()\n', (2943, 2945), False, 'from coralquant.database import session_scope\n'), ((4049, 4064), 'coralquant.database.session_scope', 'session_scope', ([], {}), '()\n', (4062, 4064), False, 'from coralquant.database import session_scope\n'), ((4960, 4975), 'coralquant.database.session_scope', 'session_scope', ([], {}), '()\n', (4973, 4975), False, 'from coralquant.database import session_scope\n'), ((2411, 2562), 'coralquant.models.orm_model.TaskTable', 'TaskTable', ([], {'task': 'task.value', 'task_name': 'task.name', 'ts_code': 'c.code', 'begin_date': '(begin_date if begin_date is not None else c.ipoDate)', 'end_date': 'end_date'}), '(task=task.value, task_name=task.name, ts_code=c.code, begin_date=\n begin_date if begin_date is not None else c.ipoDate, end_date=end_date)\n', (2420, 2562), False, 'from coralquant.models.orm_model import TaskTable\n'), ((5253, 5379), 'coralquant.models.orm_model.TaskTable', 'TaskTable', ([], {'task': 'task.value', 'task_name': 'task.name', 'ts_code': '"""按日期更新"""', 'bs_code': '"""按日期更新"""', 'begin_date': 'c.t_date', 'end_date': 'c.t_date'}), "(task=task.value, task_name=task.name, ts_code='按日期更新', bs_code=\n '按日期更新', begin_date=c.t_date, end_date=c.t_date)\n", (5262, 5379), False, 'from coralquant.models.orm_model import TaskTable\n'), ((3183, 3216), 'coralquant.models.odl_model.BS_Stock_Basic.code.in_', 'BS_Stock_Basic.code.in_', (['tmpcodes'], {}), '(tmpcodes)\n', (3206, 3216), False, 'from coralquant.models.odl_model import BS_Stock_Basic, BS_SZ50_Stocks, TS_Stock_Basic, TS_TradeCal\n'), ((5118, 5132), 'datetime.datetime.now', 'datetime.now', ([], {}), '()\n', (5130, 5132), False, 'from datetime import date, datetime, timedelta\n'), ((5005, 5031), 'sqlalchemy.distinct', 'distinct', (['TS_TradeCal.date'], {}), '(TS_TradeCal.date)\n', (5013, 5031), False, 'from sqlalchemy import func, distinct\n'), ((3751, 3765), 'datetime.datetime.now', 'datetime.now', ([], {}), '()\n', (3763, 3765), False, 'from datetime import date, datetime, timedelta\n'), ((4663, 4677), 'datetime.datetime.now', 'datetime.now', ([], {}), '()\n', (4675, 4677), False, 'from datetime import date, datetime, timedelta\n')] |
from tensorboardX import SummaryWriter
class Logger:
def __init__(self, log_dir):
self.env_name = 'Pong-v0'
# TensorBoard
self.writer = SummaryWriter(log_dir=log_dir)
# Episode Values
self.ep = 0
self.ep_rewards = []
self.ep_max_reward = 0.0
self.ep_min_reward = 0.0
# Updates Values
self.grad_count = int(0)
self.total_q = 0.0
self.total_loss = 0.0
self.mb_loss = 0.0
self.mb_q = 0.0
# Counters
self.epsilon_val = 0.0
self.update_count = 0.0
self.step = 0.0
def network(self, net):
for name, param in net.named_parameters():
self._log(name, param.clone().cpu().data.numpy(),
self.step, type='histogram')
def epsilon(self, eps, step):
self.step = step
self.epsilon_val = eps
self._log('epsilon', self.epsilon_val, self.step)
def q_loss(self, q, loss, step):
self.step = step
self.update_count += 1
self.mb_loss = loss.data.cpu().sum()
self.mb_q = q.sum().data.cpu().sum() / int(q.size()[0])
self.total_q += self.mb_q
self.total_loss += self.mb_loss
avg_loss = self.total_loss / self.update_count
avg_q = self.total_q / self.update_count
self._log('update.average_q', avg_q, self.step)
self._log('update.average_loss', avg_loss, self.step)
self._log('update.minibatch_loss', self.mb_loss, self.step)
self._log('update.minibatch_q', self.mb_q, self.step)
def episode(self, reward):
self.ep_rewards.append(reward)
self.ep_max_reward = max(self.ep_max_reward, reward)
self.ep_min_reward = min(self.ep_min_reward, reward)
def display(self):
avg_loss = None if self.update_count == 0 else self.total_loss / self.update_count
avg_q = None if self.update_count == 0 else self.total_q / self.update_count
nonzero_reward_list = [
reward for reward in self.ep_rewards if reward != 0]
avg_ep_nonzero_reward = None if len(nonzero_reward_list) == 0 else sum(
nonzero_reward_list) / float(len(nonzero_reward_list))
values = {
'Episode': self.ep,
'Step': self.step,
'Avg. Loss': avg_loss,
'Avg. Q': avg_q,
'Episode Avg. Reward': sum(self.ep_rewards) / float(len(self.ep_rewards)),
'Episode Avg. Reward Non-0': avg_ep_nonzero_reward,
'Episode Min. Reward': self.ep_min_reward,
'Episode Max. Reward': self.ep_max_reward,
'Minibatch Loss': self.mb_loss,
'Minibatch Q': self.mb_q,
'Epsilon': self.epsilon_val
}
print('-------')
for key in values:
print('{}: {}'.format(key, values[key]))
def reset_episode(self):
avg_ep_reward = sum(self.ep_rewards) / float(len(self.ep_rewards))
nonzero_reward_list = [
reward for reward in self.ep_rewards if reward != 0]
avg_ep_nonzero_reward = sum(
nonzero_reward_list) / float(len(nonzero_reward_list))
self._log('ep.average_reward_nonzero', avg_ep_nonzero_reward, self.ep)
self._log('ep.average_reward', avg_ep_reward, self.ep)
self._log('ep.min_reward', self.ep_min_reward, self.ep)
self._log('ep.max_reward', self.ep_max_reward, self.ep)
self.ep += 1
self.ep_rewards = []
self.ep_max_reward = 0.0
self.ep_min_reward = 0.0
def _log(self, name, value, step, type='scalar'):
# Add Env.Name to name
name = '{}/{}'.format(self.env_name, name)
# Log in Tensorboard
if type == 'scalar':
self.writer.add_scalar(name, value, step)
self.writer.scalar_dict = {}
elif type == 'histogram':
self.writer.add_histogram(name, value, step)
| [
"tensorboardX.SummaryWriter"
] | [((167, 197), 'tensorboardX.SummaryWriter', 'SummaryWriter', ([], {'log_dir': 'log_dir'}), '(log_dir=log_dir)\n', (180, 197), False, 'from tensorboardX import SummaryWriter\n')] |
from __future__ import print_function
import os
import shutil
import pyNastran
from pyNastran.utils import print_bad_path
pkg_path = pyNastran.__path__[0]
def create_rst_from_ipython_notebooks():
#curdir = os.getcwd()
notebook_dir = os.path.join(pkg_path, '..', 'docs', 'quick_start', 'demo')
pydocs_dir = os.path.join(pkg_path, '..', 'docs', 'html_docs', 'quick_start')
assert os.path.exists(pydocs_dir), print_bad_path(quick_start_pydocs_dir)
assert os.path.exists(notebook_dir), print_bad_path(notebook_dir)
os.chdir(notebook_dir)
for fname in os.listdir(notebook_dir):
fnamei = os.path.basename(fname)
base = os.path.splitext(fnamei)[0]
fname2 = base + '.rst'
if fnamei.startswith('.'):
continue
if not fnamei.endswith('.ipynb'):
continue
os.system('ipython nbconvert --to rst %s' % fname)
if not os.path.exists(fname2):
print('%s was not made...' % fname2)
continue
moved_fname2 = os.path.join(pydocs_dir, fname2)
try:
if os.path.exists(moved_fname2):
os.remove(moved_fname2)
os.rename(fname2, moved_fname2)
except:
pass
| [
"os.path.exists",
"os.listdir",
"os.rename",
"os.path.join",
"os.path.splitext",
"os.chdir",
"os.path.basename",
"os.system",
"pyNastran.utils.print_bad_path",
"os.remove"
] | [((244, 303), 'os.path.join', 'os.path.join', (['pkg_path', '""".."""', '"""docs"""', '"""quick_start"""', '"""demo"""'], {}), "(pkg_path, '..', 'docs', 'quick_start', 'demo')\n", (256, 303), False, 'import os\n'), ((321, 385), 'os.path.join', 'os.path.join', (['pkg_path', '""".."""', '"""docs"""', '"""html_docs"""', '"""quick_start"""'], {}), "(pkg_path, '..', 'docs', 'html_docs', 'quick_start')\n", (333, 385), False, 'import os\n'), ((398, 424), 'os.path.exists', 'os.path.exists', (['pydocs_dir'], {}), '(pydocs_dir)\n', (412, 424), False, 'import os\n'), ((426, 464), 'pyNastran.utils.print_bad_path', 'print_bad_path', (['quick_start_pydocs_dir'], {}), '(quick_start_pydocs_dir)\n', (440, 464), False, 'from pyNastran.utils import print_bad_path\n'), ((476, 504), 'os.path.exists', 'os.path.exists', (['notebook_dir'], {}), '(notebook_dir)\n', (490, 504), False, 'import os\n'), ((506, 534), 'pyNastran.utils.print_bad_path', 'print_bad_path', (['notebook_dir'], {}), '(notebook_dir)\n', (520, 534), False, 'from pyNastran.utils import print_bad_path\n'), ((539, 561), 'os.chdir', 'os.chdir', (['notebook_dir'], {}), '(notebook_dir)\n', (547, 561), False, 'import os\n'), ((579, 603), 'os.listdir', 'os.listdir', (['notebook_dir'], {}), '(notebook_dir)\n', (589, 603), False, 'import os\n'), ((622, 645), 'os.path.basename', 'os.path.basename', (['fname'], {}), '(fname)\n', (638, 645), False, 'import os\n'), ((848, 898), 'os.system', 'os.system', (["('ipython nbconvert --to rst %s' % fname)"], {}), "('ipython nbconvert --to rst %s' % fname)\n", (857, 898), False, 'import os\n'), ((1032, 1064), 'os.path.join', 'os.path.join', (['pydocs_dir', 'fname2'], {}), '(pydocs_dir, fname2)\n', (1044, 1064), False, 'import os\n'), ((661, 685), 'os.path.splitext', 'os.path.splitext', (['fnamei'], {}), '(fnamei)\n', (677, 685), False, 'import os\n'), ((914, 936), 'os.path.exists', 'os.path.exists', (['fname2'], {}), '(fname2)\n', (928, 936), False, 'import os\n'), ((1093, 1121), 'os.path.exists', 'os.path.exists', (['moved_fname2'], {}), '(moved_fname2)\n', (1107, 1121), False, 'import os\n'), ((1175, 1206), 'os.rename', 'os.rename', (['fname2', 'moved_fname2'], {}), '(fname2, moved_fname2)\n', (1184, 1206), False, 'import os\n'), ((1139, 1162), 'os.remove', 'os.remove', (['moved_fname2'], {}), '(moved_fname2)\n', (1148, 1162), False, 'import os\n')] |
import pygame
import pygame_gui
from rules_python.python.runfiles import runfiles
class GeneratorWindow(pygame_gui.elements.ui_window.UIWindow):
def __init__(self, position, ui_manager, generator):
super().__init__(pygame.Rect(position, (320, 120)), ui_manager,
window_display_title=generator.name,
object_id='#generator_window')
self._generator = generator
self.button = pygame_gui.elements.UIButton(pygame.Rect((64, 0), (150, 30)),
'Unknown',
ui_manager,
container=self,
object_id='#toggle_button')
self.output_label = pygame_gui.elements.UILabel(pygame.Rect((64, 30), (160, 25)),
f'Current Output: {self._generator.current_output}',
ui_manager,
container=self,
object_id='#output_label')
self._load_images()
self._image = pygame_gui.elements.UIImage(pygame.Rect((0,0), (64, 64)),
self._toggle_off_image,
ui_manager,
container=self,
object_id='#toggle_image')
def _load_images(self):
r = runfiles.Create()
with open(r.Rlocation('joule_quest/assets/images/light_switch_off_256x256.png'), 'r') as f:
self._toggle_off_image = pygame.transform.scale(pygame.image.load(f), (64,64)).convert_alpha()
with open(r.Rlocation('joule_quest/assets/images/light_switch_on_256x256.png'), 'r') as f:
self._toggle_on_image = pygame.transform.scale(pygame.image.load(f), (64,64)).convert_alpha()
def process_event(self, event):
handled = super().process_event(event)
if (event.type == pygame.USEREVENT and
event.user_type == pygame_gui.UI_BUTTON_PRESSED and
event.ui_object_id == "#generator_window.#toggle_button" and
event.ui_element == self.button):
handled = True
self._generator.toggle_output_connected()
return handled
def update(self, time_delta):
super().update(time_delta)
self.output_label.set_text(f'Current Output: {self._generator.current_output}')
self.output_label.update(time_delta)
self.button.set_text('Connected' if self._generator.output_connected else 'Disconnected')
self.button.update(time_delta)
self._image.set_image(self._toggle_on_image if self._generator.output_connected else self._toggle_off_image)
self._image.set_dimensions((64, 64))
self._image.update(time_delta)
| [
"pygame.image.load",
"rules_python.python.runfiles.runfiles.Create",
"pygame.Rect"
] | [((1446, 1463), 'rules_python.python.runfiles.runfiles.Create', 'runfiles.Create', ([], {}), '()\n', (1461, 1463), False, 'from rules_python.python.runfiles import runfiles\n'), ((230, 263), 'pygame.Rect', 'pygame.Rect', (['position', '(320, 120)'], {}), '(position, (320, 120))\n', (241, 263), False, 'import pygame\n'), ((484, 515), 'pygame.Rect', 'pygame.Rect', (['(64, 0)', '(150, 30)'], {}), '((64, 0), (150, 30))\n', (495, 515), False, 'import pygame\n'), ((785, 817), 'pygame.Rect', 'pygame.Rect', (['(64, 30)', '(160, 25)'], {}), '((64, 30), (160, 25))\n', (796, 817), False, 'import pygame\n'), ((1151, 1180), 'pygame.Rect', 'pygame.Rect', (['(0, 0)', '(64, 64)'], {}), '((0, 0), (64, 64))\n', (1162, 1180), False, 'import pygame\n'), ((1624, 1644), 'pygame.image.load', 'pygame.image.load', (['f'], {}), '(f)\n', (1641, 1644), False, 'import pygame\n'), ((1829, 1849), 'pygame.image.load', 'pygame.image.load', (['f'], {}), '(f)\n', (1846, 1849), False, 'import pygame\n')] |
# -*- coding: utf-8 -*-
"""
Copyright [2009-2017] EMBL-European Bioinformatics Institute
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
"""
import re
import itertools as it
from Bio import SeqIO
NHMMER_PATTERN = re.compile("^[ABCDGHKMNRSTVWXYU]+$", re.IGNORECASE)
def is_valid_nhmmer_record(record):
"""
Checks if a sequence is valid for nhmmer usage.
"""
return bool(NHMMER_PATTERN.match(str(record.seq)))
def valid_nhmmer(handle, output):
sequences = SeqIO.parse(handle, "fasta")
accepted = filter(is_valid_nhmmer_record, sequences)
SeqIO.write(accepted, output, "fasta")
def invalid_nhmmer(handle, output):
sequences = SeqIO.parse(handle, "fasta")
rejected = it.filterfalse(is_valid_nhmmer_record, sequences)
SeqIO.write(rejected, output, "fasta")
| [
"itertools.filterfalse",
"Bio.SeqIO.parse",
"Bio.SeqIO.write",
"re.compile"
] | [((687, 738), 're.compile', 're.compile', (['"""^[ABCDGHKMNRSTVWXYU]+$"""', 're.IGNORECASE'], {}), "('^[ABCDGHKMNRSTVWXYU]+$', re.IGNORECASE)\n", (697, 738), False, 'import re\n'), ((952, 980), 'Bio.SeqIO.parse', 'SeqIO.parse', (['handle', '"""fasta"""'], {}), "(handle, 'fasta')\n", (963, 980), False, 'from Bio import SeqIO\n'), ((1042, 1080), 'Bio.SeqIO.write', 'SeqIO.write', (['accepted', 'output', '"""fasta"""'], {}), "(accepted, output, 'fasta')\n", (1053, 1080), False, 'from Bio import SeqIO\n'), ((1135, 1163), 'Bio.SeqIO.parse', 'SeqIO.parse', (['handle', '"""fasta"""'], {}), "(handle, 'fasta')\n", (1146, 1163), False, 'from Bio import SeqIO\n'), ((1179, 1228), 'itertools.filterfalse', 'it.filterfalse', (['is_valid_nhmmer_record', 'sequences'], {}), '(is_valid_nhmmer_record, sequences)\n', (1193, 1228), True, 'import itertools as it\n'), ((1233, 1271), 'Bio.SeqIO.write', 'SeqIO.write', (['rejected', 'output', '"""fasta"""'], {}), "(rejected, output, 'fasta')\n", (1244, 1271), False, 'from Bio import SeqIO\n')] |
"""
This script contains the code implementing my version of the Boids artificial
life programme.
"""
# ---------------------------------- Imports ----------------------------------
# Allow imports from parent folder
import sys, os
sys.path.insert(0, os.path.abspath('..'))
# Standard library imports
import timeit
import time
import numpy as np
from math import atan2, sqrt
# Repo module imports
import boids_core.generate_values as generate_values
# Code from delauney triangulation module
from delauney_triangulation.triangulation_core.triangulation import triangulate
from delauney_triangulation.triangulation_core.linear_algebra import (vector_add,
vector_sub,
list_divide,
perpendicular,
normalise)
# ----------------------------- Class definitions -----------------------------
class World():
"""
A 2D world for the Boids to live in.
"""
def __init__(self, world_size):
self.x_min = world_size[0]
self.x_max = world_size[1]
self.y_min = world_size[2]
self.y_max = world_size[3]
class Object():
def __init__(self, idx, position, stationary=False):
self.index = idx
self.stationary = stationary
self.pos = position
class Obstacle(Object):
def __init__(self, idx, position):
super().__init__(idx, position, stationary=True)
class Boid(Object):
"""
Class to represent a single Boid.
"""
def __init__(self, idx, position, velocity, options):
super().__init__(idx, position)
self.vel = velocity
self.neighbours = []
self.max_speed = options['max_speed']
self.field_of_view = options['field_of_view']
self.vision_distance = options['vision_distance']
self.safety_zone = options['safety_zone']
self.alignment_perception = options['alignment_perception']
self.cohesion_perception = options['cohesion_perception']
self.separation_perception = options['seperation_perception']
def __repr__(self):
return f"{self.index}, {self.pos}, {self.vel}"
def magnitude(self):
return sqrt(self.vel[0]**2 + self.vel[1]**2)
def direction(self):
return atan2(self.vel[1], self.vel[0])
def make_tri(self, height, width):
"""
Generate the co-ordinates of the three points of a triangle used to
plot the boid.
Parameters
----------
height : int
The height of the boid in pixels.
width : int
The width of the boid in pixels.
Returns
-------
numpy.array
Numpy array with the triangle coordiantes.
"""
offset_h = list_divide(self.vel, self.magnitude()/height)
offset_w = list_divide(self.vel, self.magnitude()/width)
offset_w = perpendicular(offset_w)
p1 = vector_add(self.pos, list_divide(offset_h, 2))
p2 = p3 = vector_sub(self.pos, list_divide(offset_h, 2))
p2 = vector_add(p2, list_divide(offset_w, 2))
p3 = vector_sub(p3, list_divide(offset_w, 2))
return (np.asarray([p1, p2, p3]).astype(int))
def restrict_fov(self, positions):
"""
Function to limit the field of view of the boid. Neighbours beyond the
self.field_of_view/2 angle are removed from the set of neighbours.
Parameters
----------
positions : list
List of all coordinates of the boids.
"""
new_neighbours = []
boid_dir = atan2(self.vel[0], self.vel[1])
for neighbour in self.neighbours:
n_pos = positions[neighbour[1]]
# Find the angle between boid direction and neighbour
angle = atan2(n_pos[0]-self.pos[0], n_pos[1]-self.pos[1])
# print(f"{boid_dir},{boid_dir - self.field_of_view/2},{angle},{boid_dir + self.field_of_view/2}")
if ((boid_dir - self.field_of_view/2) < angle and
angle < (boid_dir + self.field_of_view/2)):
diff_x = n_pos[0] - self.pos[0]
diff_y = n_pos[1] - self.pos[1]
distance = sqrt(diff_x**2 + diff_y**2)
if distance < self.vision_distance:
new_neighbours.append(neighbour)
self.neighbours = new_neighbours
def separation(self, positions):
"""
Function to implemen the boids seperation rule.
"""
resultant_x = 0
resultant_y = 0
counter = 0
for neighbour in self.neighbours:
n_pos = positions[neighbour[1]]
diff_x = n_pos[0] - self.pos[0]
diff_y = n_pos[1] - self.pos[1]
distance = sqrt(diff_x**2 + diff_y**2)
if distance < self.safety_zone:
counter += 1
resultant_x -= diff_x / distance
resultant_y -= diff_y / distance
if counter != 0:
resultant_x /= counter
resultant_y /= counter
vs_x = self.separation_perception * resultant_x
vs_y = self.separation_perception * resultant_y
# print(f"separation,{vs_x:0.4f},{vs_y:0.4f}")
return [vs_x, vs_y]
def cohesion(self, positions):
"""
Function to implemen the boids cohesion rule.
"""
num_neighbours = len(self.neighbours)
resultant_x = 0
resultant_y = 0
for neighbour in self.neighbours:
n_pos = positions[neighbour[1]]
resultant_x += n_pos[0]
resultant_y += n_pos[1]
resultant_x /= num_neighbours
resultant_y /= num_neighbours
vc_x = self.cohesion_perception * (resultant_x - self.pos[0])
vc_y = self.cohesion_perception * (resultant_y - self.pos[1])
# print(f"cohesion,{vc_x:0.4f},{vc_y:0.4f}")
return [vc_x, vc_y]
def alignment(self, velocities):
"""
Function to implemen the boids alignment rule.
"""
num_neighbours = len(self.neighbours)
resultant_vx = 0
resultant_vy = 0
for neighbour in self.neighbours:
n_velo = velocities[neighbour[1]]
resultant_vx += n_velo[0]
resultant_vy += n_velo[1]
resultant_vx /= num_neighbours
resultant_vy /= num_neighbours
va_x = self.alignment_perception * resultant_vx
va_y = self.alignment_perception * resultant_vy
# print(f"alignment,{va_x:0.4f},{va_y:0.4f}")
return [va_x, va_y]
def wrap_world(self, world):
"""
Apply period boundary conditions, so if the boid goes off the edge
of the world it reappears on the opposite edge.
"""
if self.pos[0] < 0:
self.pos[0] = world.x_max + self.pos[0]
if self.pos[0] > world.x_max:
self.pos[0] = self.pos[0] - world.x_max
if self.pos[1] < 0:
self.pos[1] = world.y_max + self.pos[1]
if self.pos[1] > world.y_max:
self.pos[1] = self.pos[1] - world.y_max
def update_boid(self, positions, velocities, world):
"""
Function to apply all the boid rules to update the position and
velocity of a boid for a single time-step.
"""
self.restrict_fov(positions)
# print(f"current pos: {self.pos[0]:0.4f}, {self.pos[1]:0.4f}")
# print(f"current vel: {self.vel[0]:0.4f}, {self.vel[1]:0.4f}")
if len(self.neighbours) >= 1:
ali = self.alignment(velocities)
coh = self.cohesion(positions)
sep = self.separation(positions)
self.vel[0] += (coh[0] + ali[0] + sep[0])
self.vel[1] += (coh[1] + ali[1] + sep[1])
# curl = perpendicular(self.vel)
# self.vel = vector_add(self.vel, list_divide(curl, 20))
if sqrt(self.vel[0]**2 + self.vel[1]**2) > self.max_speed:
new_v = normalise(self.vel, self.max_speed)
self.vel = new_v
self.pos[0] += self.vel[0]
self.pos[1] += self.vel[1]
self.wrap_world(world)
# print(f"new pos: {self.pos[0]:0.4f}, {self.pos[1]:0.4f}")
# print(f"new vel: {self.vel[0]:0.4f}, {self.vel[1]:0.4f}")
# print("-"*32)
class Boids():
"""
A Class to store the full set of Boid Class objects, along with associated
functions on all boids.
"""
def __init__(self, number, world, options):
self.num = number
self.world = world
self.members = []
self.positions = []
self.velocities = []
self.triangulation = None
self.max_speed = options['max_speed']
def add_boid(self, new_boid):
self.members.append(new_boid)
def generate_boids(self, options, distribution='random'):
"""
Setup the inital positions and velocities of the boids.
Parameters
----------
options : dict
Dictionary of setup options.
distribution : TYPE, optional
Choose how the boids are initially distributed.
The default is 'random'. 'lattice' and 'lattice_with_noise' are
alternative options.
"""
if distribution == 'random':
positions = generate_values.random(self.num, self.world)
if distribution == 'lattice':
positions = generate_values.lattice(self.num, self.world)
if distribution == 'lattice_with_noise':
positions = generate_values.noisy_lattice(self.num, self.world)
velocities = generate_values.random_velocities(self.num, self.max_speed)
for i in range(self.num):
new_boid = Boid(i, positions[i], velocities[i], options)
self.add_boid(new_boid)
def get_pos_vel(self):
positions = []
velocities = []
for boid in self.members:
positions.append(boid.pos)
velocities.append(boid.vel)
self.positions = positions
self.velocities = velocities
def sort_boids(self):
"""
Perform a lexicographic sort on the boids by position.
"""
sorted_b = sorted(self.members, key=lambda b: [b.pos[0], b.pos[1]])
self.members = sorted_b
def triangulate_boids(self):
"""
Use the delauney_triangulation module to triangulate the set of boids.
"""
self.sort_boids()
self.get_pos_vel()
self.triangulation = triangulate(self.positions)
def setup_triangulate_boids(self):
"""
Setup the triangulation with actually performing the Delauney
triangulation algorithm. This is used for the MPI implementation
(in 'run_boids_mpi_cli.py) where there is a custom MPI triangulate
function.
"""
self.sort_boids()
self.get_pos_vel()
def make_neighbourhoods(self):
"""
Make neighbourhoods using the Delanunay triangulation module.
"""
points_seen = []
for edge in self.triangulation.edges:
if edge.org not in points_seen and not edge.deactivate:
connections = edge.find_connections(self.triangulation.edges)
self.members[edge.org].neighbours = connections
def make_neighbourhoods_basic(self, max_dist=5):
"""
Make neighbourhoods using the linear seach algorithm.
"""
for member in self.members:
member.neighbours = []
for i, pos in enumerate(self.positions):
diff_x = pos[0] - member.pos[0]
diff_y = pos[1] - member.pos[1]
distance = sqrt(diff_x**2 + diff_y**2)
if 0<distance<max_dist:
# print(i, member.pos, pos)
member.neighbours.append([member.index, i])
| [
"boids_core.generate_values.noisy_lattice",
"math.sqrt",
"delauney_triangulation.triangulation_core.triangulation.triangulate",
"numpy.asarray",
"boids_core.generate_values.random",
"boids_core.generate_values.lattice",
"math.atan2",
"delauney_triangulation.triangulation_core.linear_algebra.list_divid... | [((261, 282), 'os.path.abspath', 'os.path.abspath', (['""".."""'], {}), "('..')\n", (276, 282), False, 'import sys, os\n'), ((2460, 2501), 'math.sqrt', 'sqrt', (['(self.vel[0] ** 2 + self.vel[1] ** 2)'], {}), '(self.vel[0] ** 2 + self.vel[1] ** 2)\n', (2464, 2501), False, 'from math import atan2, sqrt\n'), ((2546, 2577), 'math.atan2', 'atan2', (['self.vel[1]', 'self.vel[0]'], {}), '(self.vel[1], self.vel[0])\n', (2551, 2577), False, 'from math import atan2, sqrt\n'), ((3200, 3223), 'delauney_triangulation.triangulation_core.linear_algebra.perpendicular', 'perpendicular', (['offset_w'], {}), '(offset_w)\n', (3213, 3223), False, 'from delauney_triangulation.triangulation_core.linear_algebra import vector_add, vector_sub, list_divide, perpendicular, normalise\n'), ((3933, 3964), 'math.atan2', 'atan2', (['self.vel[0]', 'self.vel[1]'], {}), '(self.vel[0], self.vel[1])\n', (3938, 3964), False, 'from math import atan2, sqrt\n'), ((10256, 10315), 'boids_core.generate_values.random_velocities', 'generate_values.random_velocities', (['self.num', 'self.max_speed'], {}), '(self.num, self.max_speed)\n', (10289, 10315), True, 'import boids_core.generate_values as generate_values\n'), ((11227, 11254), 'delauney_triangulation.triangulation_core.triangulation.triangulate', 'triangulate', (['self.positions'], {}), '(self.positions)\n', (11238, 11254), False, 'from delauney_triangulation.triangulation_core.triangulation import triangulate\n'), ((3269, 3293), 'delauney_triangulation.triangulation_core.linear_algebra.list_divide', 'list_divide', (['offset_h', '(2)'], {}), '(offset_h, 2)\n', (3280, 3293), False, 'from delauney_triangulation.triangulation_core.linear_algebra import vector_add, vector_sub, list_divide, perpendicular, normalise\n'), ((3335, 3359), 'delauney_triangulation.triangulation_core.linear_algebra.list_divide', 'list_divide', (['offset_h', '(2)'], {}), '(offset_h, 2)\n', (3346, 3359), False, 'from delauney_triangulation.triangulation_core.linear_algebra import vector_add, vector_sub, list_divide, perpendicular, normalise\n'), ((3390, 3414), 'delauney_triangulation.triangulation_core.linear_algebra.list_divide', 'list_divide', (['offset_w', '(2)'], {}), '(offset_w, 2)\n', (3401, 3414), False, 'from delauney_triangulation.triangulation_core.linear_algebra import vector_add, vector_sub, list_divide, perpendicular, normalise\n'), ((3445, 3469), 'delauney_triangulation.triangulation_core.linear_algebra.list_divide', 'list_divide', (['offset_w', '(2)'], {}), '(offset_w, 2)\n', (3456, 3469), False, 'from delauney_triangulation.triangulation_core.linear_algebra import vector_add, vector_sub, list_divide, perpendicular, normalise\n'), ((4141, 4194), 'math.atan2', 'atan2', (['(n_pos[0] - self.pos[0])', '(n_pos[1] - self.pos[1])'], {}), '(n_pos[0] - self.pos[0], n_pos[1] - self.pos[1])\n', (4146, 4194), False, 'from math import atan2, sqrt\n'), ((5139, 5170), 'math.sqrt', 'sqrt', (['(diff_x ** 2 + diff_y ** 2)'], {}), '(diff_x ** 2 + diff_y ** 2)\n', (5143, 5170), False, 'from math import atan2, sqrt\n'), ((9944, 9988), 'boids_core.generate_values.random', 'generate_values.random', (['self.num', 'self.world'], {}), '(self.num, self.world)\n', (9966, 9988), True, 'import boids_core.generate_values as generate_values\n'), ((10053, 10098), 'boids_core.generate_values.lattice', 'generate_values.lattice', (['self.num', 'self.world'], {}), '(self.num, self.world)\n', (10076, 10098), True, 'import boids_core.generate_values as generate_values\n'), ((10174, 10225), 'boids_core.generate_values.noisy_lattice', 'generate_values.noisy_lattice', (['self.num', 'self.world'], {}), '(self.num, self.world)\n', (10203, 10225), True, 'import boids_core.generate_values as generate_values\n'), ((3498, 3522), 'numpy.asarray', 'np.asarray', (['[p1, p2, p3]'], {}), '([p1, p2, p3])\n', (3508, 3522), True, 'import numpy as np\n'), ((4554, 4585), 'math.sqrt', 'sqrt', (['(diff_x ** 2 + diff_y ** 2)'], {}), '(diff_x ** 2 + diff_y ** 2)\n', (4558, 4585), False, 'from math import atan2, sqrt\n'), ((8465, 8506), 'math.sqrt', 'sqrt', (['(self.vel[0] ** 2 + self.vel[1] ** 2)'], {}), '(self.vel[0] ** 2 + self.vel[1] ** 2)\n', (8469, 8506), False, 'from math import atan2, sqrt\n'), ((8546, 8581), 'delauney_triangulation.triangulation_core.linear_algebra.normalise', 'normalise', (['self.vel', 'self.max_speed'], {}), '(self.vel, self.max_speed)\n', (8555, 8581), False, 'from delauney_triangulation.triangulation_core.linear_algebra import vector_add, vector_sub, list_divide, perpendicular, normalise\n'), ((12473, 12504), 'math.sqrt', 'sqrt', (['(diff_x ** 2 + diff_y ** 2)'], {}), '(diff_x ** 2 + diff_y ** 2)\n', (12477, 12504), False, 'from math import atan2, sqrt\n')] |
# Copyright 2021 Softwerks LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import dataclasses
import time
from typing import List, Optional
import flask
import redis.client
@dataclasses.dataclass
class Session:
"""Session data persisted between requests."""
address: str
authenticated: bool = dataclasses.field(init=False)
created: str
id_: str = dataclasses.field(init=False)
last_seen: str
token: str
user_agent: str
game_id: Optional[str] = None
feedback: Optional[str] = None
time_zone: Optional[str] = None
user_id: Optional[str] = None
def __post_init__(self) -> None:
if self.user_id is not None:
self.authenticated = True
self.id_ = self.user_id
else:
self.authenticated = False
self.id_ = self.token
if self.authenticated:
self.game_id = flask.g.redis.hget("games", self.id_)
self.last_seen = str(time.time())
flask.g.redis.hset(f"session:{self.token}", "last_seen", self.last_seen)
def all(self) -> List["Session"]:
"""Return a list of the user's sessions."""
if self.authenticated:
return [
Session(token=token, **flask.g.redis.hgetall(f"session:{token}"))
for token in flask.g.redis.smembers(f"user:sessions:{self.user_id}")
]
else:
return [self]
def update_feedback_timestamp(self) -> str:
self.feedback = str(time.time())
flask.g.redis.hset(f"session:{self.token}", "feedback", self.feedback)
return self.feedback
def delete_all(self) -> None:
"""Delete all of the user's sessions."""
if self.authenticated:
pipeline: redis.client.Pipeline = flask.g.redis.pipeline()
for token in flask.g.redis.smembers(f"user:sessions:{self.user_id}"):
pipeline.delete(f"session:{token}")
pipeline.delete(f"user:sessions:{self.user_id}")
pipeline.execute()
flask.session.clear()
else:
self.delete()
def delete(self) -> None:
"""Delete the session (log out)."""
if self.authenticated and self.user_id is not None:
flask.g.redis.srem(f"user:sessions:{self.user_id}", self.token)
flask.g.redis.delete(f"session:{self.token}")
flask.session.clear()
| [
"flask.g.redis.smembers",
"flask.g.redis.hget",
"flask.g.redis.srem",
"flask.g.redis.hgetall",
"flask.g.redis.delete",
"flask.g.redis.hset",
"time.time",
"dataclasses.field",
"flask.session.clear",
"flask.g.redis.pipeline"
] | [((812, 841), 'dataclasses.field', 'dataclasses.field', ([], {'init': '(False)'}), '(init=False)\n', (829, 841), False, 'import dataclasses\n'), ((874, 903), 'dataclasses.field', 'dataclasses.field', ([], {'init': '(False)'}), '(init=False)\n', (891, 903), False, 'import dataclasses\n'), ((1481, 1553), 'flask.g.redis.hset', 'flask.g.redis.hset', (['f"""session:{self.token}"""', '"""last_seen"""', 'self.last_seen'], {}), "(f'session:{self.token}', 'last_seen', self.last_seen)\n", (1499, 1553), False, 'import flask\n'), ((2016, 2086), 'flask.g.redis.hset', 'flask.g.redis.hset', (['f"""session:{self.token}"""', '"""feedback"""', 'self.feedback'], {}), "(f'session:{self.token}', 'feedback', self.feedback)\n", (2034, 2086), False, 'import flask\n'), ((2827, 2872), 'flask.g.redis.delete', 'flask.g.redis.delete', (['f"""session:{self.token}"""'], {}), "(f'session:{self.token}')\n", (2847, 2872), False, 'import flask\n'), ((2882, 2903), 'flask.session.clear', 'flask.session.clear', ([], {}), '()\n', (2901, 2903), False, 'import flask\n'), ((1392, 1429), 'flask.g.redis.hget', 'flask.g.redis.hget', (['"""games"""', 'self.id_'], {}), "('games', self.id_)\n", (1410, 1429), False, 'import flask\n'), ((1460, 1471), 'time.time', 'time.time', ([], {}), '()\n', (1469, 1471), False, 'import time\n'), ((1995, 2006), 'time.time', 'time.time', ([], {}), '()\n', (2004, 2006), False, 'import time\n'), ((2278, 2302), 'flask.g.redis.pipeline', 'flask.g.redis.pipeline', ([], {}), '()\n', (2300, 2302), False, 'import flask\n'), ((2329, 2384), 'flask.g.redis.smembers', 'flask.g.redis.smembers', (['f"""user:sessions:{self.user_id}"""'], {}), "(f'user:sessions:{self.user_id}')\n", (2351, 2384), False, 'import flask\n'), ((2545, 2566), 'flask.session.clear', 'flask.session.clear', ([], {}), '()\n', (2564, 2566), False, 'import flask\n'), ((2754, 2817), 'flask.g.redis.srem', 'flask.g.redis.srem', (['f"""user:sessions:{self.user_id}"""', 'self.token'], {}), "(f'user:sessions:{self.user_id}', self.token)\n", (2772, 2817), False, 'import flask\n'), ((1808, 1863), 'flask.g.redis.smembers', 'flask.g.redis.smembers', (['f"""user:sessions:{self.user_id}"""'], {}), "(f'user:sessions:{self.user_id}')\n", (1830, 1863), False, 'import flask\n'), ((1736, 1777), 'flask.g.redis.hgetall', 'flask.g.redis.hgetall', (['f"""session:{token}"""'], {}), "(f'session:{token}')\n", (1757, 1777), False, 'import flask\n')] |
# -*- coding: UTF-8 -*-
"""
opencv实现人脸识别
参考:
1、https://github.com/opencv/opencv/tree/master/data/haarcascades
2、http://www.cnblogs.com/hanson1/p/7105265.html
"""
import cv2
def detect_face(image):
gray = cv2.cvtColor(image, cv2.COLOR_BGR2GRAY)
'''
# 获取人脸识别训练数据
对于人脸特征的一些描述,opencv在读取完数据后很据训练中的样品数据,
就可以感知读取到的图片上的特征,进而对图片进行人脸识别。
xml数据下载,
参考:https://github.com/opencv/opencv/tree/master/data/haarcascades
'''
face_cascade = cv2.CascadeClassifier(r'./haarcascade_frontalface_default.xml')
# 探测人脸
# 根据训练的数据来对新图片进行识别的过程。
faces = face_cascade.detectMultiScale(
gray,
scaleFactor=1.15,
minNeighbors=5,
minSize=(5, 5),
# flags = cv2.HAAR_SCALE_IMAGE
)
# 我们可以随意的指定里面参数的值,来达到不同精度下的识别。返回值就是opencv对图片的探测结果的体现。
# 处理人脸探测的结果
print("发现{0}个人脸!".format(len(faces)))
for (x, y, w, h) in faces:
cv2.rectangle(image, (x, y), (x + w, y + w), (0, 255, 0), 2)
# cv2.circle(image,((x+x+w)/2,(y+y+h)/2),w/2,(0,255,0),2)
return image
# # 待检测的图片路径
# imagepath="nba.jpg"
#
# image = cv2.imread(imagepath)
# gray = cv2.cvtColor(image,cv2.COLOR_BGR2GRAY)
#
#
# '''
# # 获取人脸识别训练数据
#
# 对于人脸特征的一些描述,opencv在读取完数据后很据训练中的样品数据,
# 就可以感知读取到的图片上的特征,进而对图片进行人脸识别。
# xml数据下载,
# 参考:https://github.com/opencv/opencv/tree/master/data/haarcascades
# '''
# face_cascade = cv2.CascadeClassifier(r'./haarcascade_frontalface_default.xml')
#
# # 探测人脸
# # 根据训练的数据来对新图片进行识别的过程。
# faces = face_cascade.detectMultiScale(
# gray,
# scaleFactor = 1.15,
# minNeighbors = 5,
# minSize = (5,5),
# #flags = cv2.HAAR_SCALE_IMAGE
# )
#
# # 我们可以随意的指定里面参数的值,来达到不同精度下的识别。返回值就是opencv对图片的探测结果的体现。
#
# # 处理人脸探测的结果
# print ("发现{0}个人脸!".format(len(faces)))
# for(x,y,w,h) in faces:
# cv2.rectangle(image,(x,y),(x+w,y+w),(0,255,0),2)
# # cv2.circle(image,((x+x+w)/2,(y+y+h)/2),w/2,(0,255,0),2)
#
# cv2.imshow("image",image)
# cv2.waitKey(0)
# cv2.destroyAllWindows()
| [
"cv2.rectangle",
"cv2.CascadeClassifier",
"cv2.cvtColor"
] | [((213, 252), 'cv2.cvtColor', 'cv2.cvtColor', (['image', 'cv2.COLOR_BGR2GRAY'], {}), '(image, cv2.COLOR_BGR2GRAY)\n', (225, 252), False, 'import cv2\n'), ((464, 526), 'cv2.CascadeClassifier', 'cv2.CascadeClassifier', (['"""./haarcascade_frontalface_default.xml"""'], {}), "('./haarcascade_frontalface_default.xml')\n", (485, 526), False, 'import cv2\n'), ((900, 960), 'cv2.rectangle', 'cv2.rectangle', (['image', '(x, y)', '(x + w, y + w)', '(0, 255, 0)', '(2)'], {}), '(image, (x, y), (x + w, y + w), (0, 255, 0), 2)\n', (913, 960), False, 'import cv2\n')] |
#!/usr/bin/env python3
import datetime
import os
import warnings
import numpy as np
import scipy.interpolate as si
import matplotlib as mpl
from matplotlib.backends import backend_pdf
import matplotlib.pyplot as plt
from .utils import aia_raster
from .utils import cli
from .utils import eis
from .utils import num
from .utils import plots
from . import coregister as cr
class OptPointingVerif(object):
def __init__(self,
verif_dir, eis_name, aia_band,
pointings,
raster_builder, eis_int,
titles, ranges, offsets, cross_correlations,
start_time, stop_time,
):
''' Build and save pointing verification data
Parameters
==========
verif_dir : str
eis_name : str
aia_band : int
pointings : list of eis.EISPointing
raster_builder : aia_raster.SyntheticRasterBuilder
eis_int : 2D array
titles : list of str
ranges : list
Items can be either 3-tuples of cr.tools.OffsetSet, or None.
offsets : list
Items can be either 3-tuples of floats, or arrays of shape (n, 3).
cross_correlations : list of arrays
start_time : datetime.datetime
stop_time : datetime.datetime
'''
self.verif_dir = verif_dir
self.eis_name = eis_name
self.aia_band = aia_band
self.pointings = pointings
self.raster_builder = raster_builder
self.eis_int = eis_int
self.titles = titles
self.ranges = ranges
self.offsets = offsets
self.cross_correlations = cross_correlations
self.start_time = start_time
self.stop_time = stop_time
self.rms = []
if not os.path.exists(self.verif_dir):
os.makedirs(self.verif_dir)
def save_all(self):
self.save_npz()
self.save_figures()
self.save_summary()
def save_npz(self):
''' Save cc, offset, and new coordinates '''
np.savez(
os.path.join(self.verif_dir, 'offsets.npz'),
offset=np.array(self.offsets, dtype=object),
cc=np.array(self.cross_correlations, dtype=object),
x=self.pointings[-1].x, y=self.pointings[-1].y,
)
def save_summary(self):
''' Print and save yaml summary '''
if not self.rms:
self.rms = [None] * (len(titles) + 1)
run_specs = [
('verif_dir', self.verif_dir),
('initial_rms', self.rms[0]),
('steps', self._repr_steps(
self.titles,
self.ranges,
self.offsets,
self.cross_correlations,
self.rms[1:],
indent=2)),
('exec_time', self.stop_time - self.start_time),
]
summary = ''
for spec in run_specs:
summary += self._repr_kv(*spec, indent=0)
print('\n---\n', summary, '...', sep='')
with open(os.path.join(self.verif_dir, 'summary.yml'), 'w') as f:
f.write(summary)
def _repr_offset(self, offset):
offset = list(offset)
offset[0], offset[1] = offset[1], offset[0]
return offset
def _repr_kv(self, name, value, indent=0, sep=': ', end='\n'):
form = '{:#.6g}'
if isinstance(value, (list, tuple)):
value = [form.format(v)
if np.issubdtype(type(v), (float, np.inexact))
else str(v)
for v in value]
value = '[' + ', '.join(value) + ']'
if value is None:
value = 'null'
elif np.issubdtype(type(value), (float, np.inexact)):
value = form.format(value)
else:
value = str(value)
string = ''.join([indent * ' ', name, sep, str(value), end])
return string
def _repr_steps(self, titles, all_ranges, offsets, ccs, rmss, indent=0):
indent += 2
ret = '\n'
for name, ranges, offset, cc, rms in \
zip(titles, all_ranges, offsets, ccs, rmss):
ret += ' '*(indent-2) + '- '
ret += self._repr_kv('name', name, indent=0)
if ranges:
ry, rx, ra = ranges
ret += self._repr_kv('range_x', rx, indent=indent)
ret += self._repr_kv('range_y', ry, indent=indent)
ret += self._repr_kv('range_a', ra, indent=indent)
if len(offset) <= 3:
ret += self._repr_kv('offset', self._repr_offset(offset), indent=indent)
ret += self._repr_kv('cc_max', np.nanmax(cc), indent=indent)
if rms is not None:
ret += self._repr_kv('rms', rms, indent=indent)
if ret[-1] == '\n':
ret = ret[:-1]
return ret
def save_figures(self):
''' plot alignment results '''
diff_norm = mpl.colors.Normalize(vmin=-3, vmax=+3)
n_pointings = len(self.pointings)
for i, pointing in enumerate(self.pointings):
name = 'step_{}'.format(i)
if i == 0:
name += '_original'
elif i == n_pointings - 1:
name += '_optimal'
self.plot_intensity(pointing, name=name, diff_norm=diff_norm)
self.plot_slit_align()
def _get_interpolated_maps(self, pointing, save_to=None):
''' get maps and interpolate them on an evenly-spaced grid '''
x, y = pointing.x, pointing.y
aia_int = self.raster_builder.get_raster(
x, y, pointing.t / 3600,
extrapolate_t=True)
y_interp = np.linspace(y.min(), y.max(), y.shape[0])
x_interp = np.linspace(x.min(), x.max(), x.shape[1])
xi_interp = np.moveaxis(np.array(np.meshgrid(x_interp, y_interp)), 0, -1)
points = (x.flatten(), y.flatten())
eis_int_interp = si.LinearNDInterpolator(points, self.eis_int.flatten())
eis_int_interp = eis_int_interp(xi_interp)
aia_int_interp = si.LinearNDInterpolator(points, aia_int.flatten())
aia_int_interp = aia_int_interp(xi_interp)
if save_to:
np.savez(
save_to,
x=x,
y=y,
eis_int=self.eis_int,
aia_int=aia_int,
x_interp=x_interp,
y_interp=y_interp,
eis_int_interp=eis_int_interp,
aia_int_interp=aia_int_interp,
)
return x_interp, y_interp, eis_int_interp, aia_int_interp
def _normalize_intensity(self, a, b, norm=mpl.colors.Normalize):
def normalize(arr):
arr_stat = arr[~(arr == 0)] # exclude possibly missing AIA data
arr = (arr - np.nanmean(arr_stat)) / np.nanstd(arr_stat)
return arr
a = normalize(a)
b = normalize(b)
offset = - np.nanmin((a, b))
offset += .1
a += offset
b += offset
norm = norm(vmin=np.nanmin((a, b)), vmax=np.nanmax((a, b)))
return a, b, norm
def plot_intensity(self, pointing, name='', diff_norm=None):
''' plot intensity maps of EIS and AIA rasters '''
if name:
name = '_' + name
filenames = {
'npy': 'intensity_data{}.npz',
'intensity': 'intensity_maps{}.pdf',
'diff': 'intensity_diff{}.pdf',
}
filenames = {k: os.path.join(self.verif_dir, v.format(name))
for k, v in filenames.items()}
# build and save normalized intensity maps
x, y, eis_int, aia_int = self._get_interpolated_maps(
pointing, save_to=filenames['npy'])
eis_int, aia_int, norm = self._normalize_intensity(
eis_int, aia_int, mpl.colors.LogNorm)
# plot maps
pp = backend_pdf.PdfPages(filenames['intensity'])
intensity_plots = (
(eis_int, 'EIS'),
(aia_int, 'synthetic raster from AIA {}'.format(self.aia_band)),
)
for int_map, title in intensity_plots:
plt.clf()
plots.plot_map(
plt.gca(),
int_map, coordinates=[x, y],
cmap='gray', norm=norm)
plt.title(title)
plt.xlabel('X [arcsec]')
plt.ylabel('Y [arcsec]')
plt.savefig(pp)
pp.close()
# plot difference
diff = eis_int - aia_int
rms = np.sqrt(np.nanmean(diff**2))
self.rms.append(rms)
if not diff_norm:
vlim = np.nanmax(np.abs(diff))
diff_norm = mpl.colors.Normalize(vmin=-vlim, vmax=+vlim)
plt.clf()
im = plots.plot_map(
plt.gca(),
diff, coordinates=[x, y],
cmap='gray', norm=diff_norm)
cb = plt.colorbar(im)
cb.set_label('normalised EIS − AIA')
plt.title('RMS = {:.2g}'.format(rms))
plt.xlabel('X [arcsec]')
plt.ylabel('Y [arcsec]')
plt.savefig(filenames['diff'])
def _get_slit_offset(self):
slit_offsets = []
for offset in self.offsets:
if np.array(offset).ndim > 1:
slit_offsets.append(offset)
if len(slit_offsets) == 0:
return None
elif len(slit_offsets) > 1:
warnings.warn('Multiple slitshift steps. Plotting the first one')
return slit_offsets[0]
def plot_slit_align(self):
''' plot offsets and slit coordinates '''
slit_offset = self._get_slit_offset()
if slit_offset is None:
return
pp = backend_pdf.PdfPages(os.path.join(self.verif_dir, 'slit_align.pdf'))
x_color = '#2ca02c'
y_color = '#1f77b4'
old_color = '#d62728'
new_color = '#000000'
# offset
plt.clf()
plt.plot(slit_offset.T[1], '.', label='X', color=x_color)
plt.plot(slit_offset.T[0], '.', label='Y', color=y_color)
plt.title(self.eis_name)
plt.xlabel('slit position')
plt.ylabel('offset [arcsec]')
plt.legend()
plt.savefig(pp)
# new coordinates
plots = [
('X', self.pointings[-1].x, self.pointings[0].x),
('Y', self.pointings[-1].y, self.pointings[0].y),
]
for name, aligned, original in plots:
plt.clf()
plt.plot(original[0], ',', label='original ' + name, color=old_color)
plt.plot(aligned[0], ',', label='aligned ' + name, color=new_color)
plt.legend()
plt.title(self.eis_name)
plt.xlabel('slit position')
plt.ylabel(name + ' [arcsec]')
plt.savefig(pp)
pp.close()
def shift_step(x, y, eis_int, aia_int, cores=None, **kwargs):
cli.print_now('> correct translation')
x, y, offset = cr.images.align(
eis_int, x, y,
aia_int, x, y,
cores=cores, **kwargs)
y_offset, x_offset, cc = offset
offset = [y_offset, x_offset, 0]
offset_set = None
title = 'shift'
return title, offset_set, offset, cc, x, y
def rotshift_step(x, y, dates_rel_hours, eis_int, raster_builder,
cores=None, **kwargs):
cli.print_now('> align rasters')
x, y, offset = cr.rasters.align(
eis_int, x, y, dates_rel_hours, raster_builder,
cores=cores, **kwargs)
y_offset, x_offset, a_offset, cc = offset
offset = [y_offset, x_offset, a_offset]
offset_set = (kwargs['y_set'], kwargs['x_set'], kwargs['a_set'])
title = 'rotshift'
return title, offset_set, offset, cc, x, y
def slitshift_step(x, y, dates_rel_hours, eis_int, raster_builder,
cores=None, **kwargs):
cli.print_now('> align slit positions')
x, y, offset = cr.slits.align(
eis_int, x, y, dates_rel_hours, raster_builder,
cores=cores, **kwargs)
offset, cc = offset
offset_set = (kwargs['y_set'], kwargs['x_set'], kwargs['a_set'])
title = 'slitshift'
return title, offset_set, offset, cc, x, y
def optimal_pointing(eis_data, cores=None, aia_band=None,
verif_dir=None, aia_cache=None, eis_name=None, steps_file=None):
''' Determine the EIS pointing using AIA data as a reference.
Parameters
==========
eis_data : eis.EISData
Object containing the EIS intensity and pointing.
cores : int or None
Number of cores to use for multiprocessing, if any.
aia_band : int
The reference AIA channel. Eg. 193.
verif_dir : str
Path to the directory where to save verification plots.
aia_cache : str
Path to the synthetic AIA raster builder cache file.
eis_name : str
Name of the l0 EIS file eg. eis_l0_20140810_010438
steps_file : str
Path to a yaml file containing the registration steps.
Returns
=======
pointing : eis.EISPointing
Optimal EIS pointing.
'''
if steps_file:
registration_steps = cli.load_corr_steps(steps_file)
else:
warnings.warn('No steps file provided, falling back to default.')
registration_steps = {'steps': [
{'type': 'shift',
'cc_function': 'explicit',
'cc_boundary': 'drop',
'sub_px': True,
},
{'type': 'rotshift',
'x_set': cr.tools.OffsetSet((-10.0, 10.0), number=11),
'y_set': cr.tools.OffsetSet((-5.0, 5.0), number=11),
'a_set': cr.tools.OffsetSet((-3.0, 3.0), step=0.2),
},
{'type': 'slitshift',
'x_set': cr.tools.OffsetSet((-20.0, 20.0), number=21),
'y_set': cr.tools.OffsetSet((-20.0, 20.0), number=21),
'a_set': cr.tools.OffsetSet((0.0, 0.0), number=1),
'mp_mode': 'track'
},
]}
cli.print_now('> build relative and absolute date arrays') # ----------------------
dates_rel = num.seconds_to_timedelta(eis_data.pointing.t)
dates_rel_hours = eis_data.pointing.t / 3600
date_ref = eis_data.pointing.t_ref
dates_abs = date_ref + dates_rel
cli.print_now('> get EIS grid info and add margin') # -----------------------------
x, y = eis_data.pointing.x, eis_data.pointing.y
x_margin = (np.max(x) - np.min(x)) / 2
y_margin = (np.max(y) - np.min(y)) / 2
x_margin = np.max(x_margin)
y_margin = np.max(y_margin)
ny, y_slice = cr.tools.create_margin(y, y_margin, 0)
nx, x_slice = cr.tools.create_margin(x, x_margin, 1)
new_shape = 1, ny, nx
new_slice = slice(None), y_slice, x_slice
eis_int = eis_data.data
cli.print_now('> get AIA data') # -------------------------------------------------
single_aia_frame = registration_steps.get('single_aia_frame', False)
if single_aia_frame:
single_aia_frame = num.dt_average(np.min(dates_abs), np.max(dates_abs))
aia_cache = None
raster_builder = aia_raster.SyntheticRasterBuilder(
dates=[np.min(dates_abs), np.max(dates_abs)],
date_ref=date_ref,
channel=aia_band,
file_cache=aia_cache,
single_frame=single_aia_frame,
)
raster_builder.get_data()
# degrade raster_builder resolution to 3 arcsec (see DelZanna+2011)
raster_builder.degrade_resolution(3, cores=cores)
# crop raster_builder cached data to fix multiprocessing
x_min, x_max = x.min(), x.max()
y_min, y_max = y.min(), y.max()
x_cen = (x_min + x_max) / 2
y_cen = (y_min + y_max) / 2
r = np.sqrt((x_max - x_cen)**2 + (y_max - y_cen)**2)
raster_builder.crop_data(x_cen - r, x_cen + r, y_cen - r, y_cen + r)
# compute alignment -------------------------------------------------------
titles = []
offset_sets = []
offsets = []
pointings = [eis_data.pointing]
cross_correlations = []
start_time = datetime.datetime.now()
for step in registration_steps['steps']:
registration_type = step.pop('type')
if registration_type == 'shift':
aia_int = raster_builder.get_raster(
x, y, dates_rel_hours,
extrapolate_t=True)
result = shift_step(x, y, eis_int, aia_int, cores=cores, **step)
elif registration_type == 'rotshift':
result = rotshift_step(x, y, dates_rel_hours,
eis_int, raster_builder,
cores=cores, **step)
elif registration_type == 'slitshift':
result = slitshift_step(x, y, dates_rel_hours,
eis_int, raster_builder,
cores=cores, **step)
else:
raise ValueError('unknown registration step')
title, offset_set, offset, cc, x, y = result
titles.append(title)
offset_sets.append(offset_set)
offsets.append(offset)
pointings.append(eis.EISPointing(x, y, eis_data.pointing.t, date_ref))
cross_correlations.append(cc)
stop_time = datetime.datetime.now()
if verif_dir:
verif = OptPointingVerif(
verif_dir, eis_name, aia_band,
pointings,
raster_builder, eis_int,
titles, offset_sets, offsets, cross_correlations,
start_time, stop_time,
)
verif.save_all()
return pointings[-1]
| [
"numpy.sqrt",
"matplotlib.pyplot.ylabel",
"numpy.nanmean",
"numpy.array",
"numpy.nanmin",
"os.path.exists",
"numpy.savez",
"matplotlib.pyplot.xlabel",
"matplotlib.pyplot.plot",
"numpy.max",
"numpy.nanmax",
"numpy.min",
"warnings.warn",
"numpy.meshgrid",
"numpy.abs",
"numpy.nanstd",
"... | [((14319, 14335), 'numpy.max', 'np.max', (['x_margin'], {}), '(x_margin)\n', (14325, 14335), True, 'import numpy as np\n'), ((14351, 14367), 'numpy.max', 'np.max', (['y_margin'], {}), '(y_margin)\n', (14357, 14367), True, 'import numpy as np\n'), ((15480, 15532), 'numpy.sqrt', 'np.sqrt', (['((x_max - x_cen) ** 2 + (y_max - y_cen) ** 2)'], {}), '((x_max - x_cen) ** 2 + (y_max - y_cen) ** 2)\n', (15487, 15532), True, 'import numpy as np\n'), ((15819, 15842), 'datetime.datetime.now', 'datetime.datetime.now', ([], {}), '()\n', (15840, 15842), False, 'import datetime\n'), ((16900, 16923), 'datetime.datetime.now', 'datetime.datetime.now', ([], {}), '()\n', (16921, 16923), False, 'import datetime\n'), ((4917, 4955), 'matplotlib.colors.Normalize', 'mpl.colors.Normalize', ([], {'vmin': '(-3)', 'vmax': '(+3)'}), '(vmin=-3, vmax=+3)\n', (4937, 4955), True, 'import matplotlib as mpl\n'), ((7833, 7877), 'matplotlib.backends.backend_pdf.PdfPages', 'backend_pdf.PdfPages', (["filenames['intensity']"], {}), "(filenames['intensity'])\n", (7853, 7877), False, 'from matplotlib.backends import backend_pdf\n'), ((8664, 8673), 'matplotlib.pyplot.clf', 'plt.clf', ([], {}), '()\n', (8671, 8673), True, 'import matplotlib.pyplot as plt\n'), ((8818, 8834), 'matplotlib.pyplot.colorbar', 'plt.colorbar', (['im'], {}), '(im)\n', (8830, 8834), True, 'import matplotlib.pyplot as plt\n'), ((8934, 8958), 'matplotlib.pyplot.xlabel', 'plt.xlabel', (['"""X [arcsec]"""'], {}), "('X [arcsec]')\n", (8944, 8958), True, 'import matplotlib.pyplot as plt\n'), ((8967, 8991), 'matplotlib.pyplot.ylabel', 'plt.ylabel', (['"""Y [arcsec]"""'], {}), "('Y [arcsec]')\n", (8977, 8991), True, 'import matplotlib.pyplot as plt\n'), ((9000, 9030), 'matplotlib.pyplot.savefig', 'plt.savefig', (["filenames['diff']"], {}), "(filenames['diff'])\n", (9011, 9030), True, 'import matplotlib.pyplot as plt\n'), ((9818, 9827), 'matplotlib.pyplot.clf', 'plt.clf', ([], {}), '()\n', (9825, 9827), True, 'import matplotlib.pyplot as plt\n'), ((9836, 9893), 'matplotlib.pyplot.plot', 'plt.plot', (['slit_offset.T[1]', '"""."""'], {'label': '"""X"""', 'color': 'x_color'}), "(slit_offset.T[1], '.', label='X', color=x_color)\n", (9844, 9893), True, 'import matplotlib.pyplot as plt\n'), ((9902, 9959), 'matplotlib.pyplot.plot', 'plt.plot', (['slit_offset.T[0]', '"""."""'], {'label': '"""Y"""', 'color': 'y_color'}), "(slit_offset.T[0], '.', label='Y', color=y_color)\n", (9910, 9959), True, 'import matplotlib.pyplot as plt\n'), ((9968, 9992), 'matplotlib.pyplot.title', 'plt.title', (['self.eis_name'], {}), '(self.eis_name)\n', (9977, 9992), True, 'import matplotlib.pyplot as plt\n'), ((10001, 10028), 'matplotlib.pyplot.xlabel', 'plt.xlabel', (['"""slit position"""'], {}), "('slit position')\n", (10011, 10028), True, 'import matplotlib.pyplot as plt\n'), ((10037, 10066), 'matplotlib.pyplot.ylabel', 'plt.ylabel', (['"""offset [arcsec]"""'], {}), "('offset [arcsec]')\n", (10047, 10066), True, 'import matplotlib.pyplot as plt\n'), ((10075, 10087), 'matplotlib.pyplot.legend', 'plt.legend', ([], {}), '()\n', (10085, 10087), True, 'import matplotlib.pyplot as plt\n'), ((10096, 10111), 'matplotlib.pyplot.savefig', 'plt.savefig', (['pp'], {}), '(pp)\n', (10107, 10111), True, 'import matplotlib.pyplot as plt\n'), ((12998, 13063), 'warnings.warn', 'warnings.warn', (['"""No steps file provided, falling back to default."""'], {}), "('No steps file provided, falling back to default.')\n", (13011, 13063), False, 'import warnings\n'), ((1759, 1789), 'os.path.exists', 'os.path.exists', (['self.verif_dir'], {}), '(self.verif_dir)\n', (1773, 1789), False, 'import os\n'), ((1803, 1830), 'os.makedirs', 'os.makedirs', (['self.verif_dir'], {}), '(self.verif_dir)\n', (1814, 1830), False, 'import os\n'), ((2044, 2087), 'os.path.join', 'os.path.join', (['self.verif_dir', '"""offsets.npz"""'], {}), "(self.verif_dir, 'offsets.npz')\n", (2056, 2087), False, 'import os\n'), ((6165, 6340), 'numpy.savez', 'np.savez', (['save_to'], {'x': 'x', 'y': 'y', 'eis_int': 'self.eis_int', 'aia_int': 'aia_int', 'x_interp': 'x_interp', 'y_interp': 'y_interp', 'eis_int_interp': 'eis_int_interp', 'aia_int_interp': 'aia_int_interp'}), '(save_to, x=x, y=y, eis_int=self.eis_int, aia_int=aia_int, x_interp\n =x_interp, y_interp=y_interp, eis_int_interp=eis_int_interp,\n aia_int_interp=aia_int_interp)\n', (6173, 6340), True, 'import numpy as np\n'), ((6897, 6914), 'numpy.nanmin', 'np.nanmin', (['(a, b)'], {}), '((a, b))\n', (6906, 6914), True, 'import numpy as np\n'), ((8086, 8095), 'matplotlib.pyplot.clf', 'plt.clf', ([], {}), '()\n', (8093, 8095), True, 'import matplotlib.pyplot as plt\n'), ((8248, 8264), 'matplotlib.pyplot.title', 'plt.title', (['title'], {}), '(title)\n', (8257, 8264), True, 'import matplotlib.pyplot as plt\n'), ((8277, 8301), 'matplotlib.pyplot.xlabel', 'plt.xlabel', (['"""X [arcsec]"""'], {}), "('X [arcsec]')\n", (8287, 8301), True, 'import matplotlib.pyplot as plt\n'), ((8314, 8338), 'matplotlib.pyplot.ylabel', 'plt.ylabel', (['"""Y [arcsec]"""'], {}), "('Y [arcsec]')\n", (8324, 8338), True, 'import matplotlib.pyplot as plt\n'), ((8351, 8366), 'matplotlib.pyplot.savefig', 'plt.savefig', (['pp'], {}), '(pp)\n', (8362, 8366), True, 'import matplotlib.pyplot as plt\n'), ((8468, 8489), 'numpy.nanmean', 'np.nanmean', (['(diff ** 2)'], {}), '(diff ** 2)\n', (8478, 8489), True, 'import numpy as np\n'), ((8611, 8655), 'matplotlib.colors.Normalize', 'mpl.colors.Normalize', ([], {'vmin': '(-vlim)', 'vmax': '(+vlim)'}), '(vmin=-vlim, vmax=+vlim)\n', (8631, 8655), True, 'import matplotlib as mpl\n'), ((8715, 8724), 'matplotlib.pyplot.gca', 'plt.gca', ([], {}), '()\n', (8722, 8724), True, 'import matplotlib.pyplot as plt\n'), ((9629, 9675), 'os.path.join', 'os.path.join', (['self.verif_dir', '"""slit_align.pdf"""'], {}), "(self.verif_dir, 'slit_align.pdf')\n", (9641, 9675), False, 'import os\n'), ((10352, 10361), 'matplotlib.pyplot.clf', 'plt.clf', ([], {}), '()\n', (10359, 10361), True, 'import matplotlib.pyplot as plt\n'), ((10374, 10443), 'matplotlib.pyplot.plot', 'plt.plot', (['original[0]', '""","""'], {'label': "('original ' + name)", 'color': 'old_color'}), "(original[0], ',', label='original ' + name, color=old_color)\n", (10382, 10443), True, 'import matplotlib.pyplot as plt\n'), ((10456, 10523), 'matplotlib.pyplot.plot', 'plt.plot', (['aligned[0]', '""","""'], {'label': "('aligned ' + name)", 'color': 'new_color'}), "(aligned[0], ',', label='aligned ' + name, color=new_color)\n", (10464, 10523), True, 'import matplotlib.pyplot as plt\n'), ((10538, 10550), 'matplotlib.pyplot.legend', 'plt.legend', ([], {}), '()\n', (10548, 10550), True, 'import matplotlib.pyplot as plt\n'), ((10563, 10587), 'matplotlib.pyplot.title', 'plt.title', (['self.eis_name'], {}), '(self.eis_name)\n', (10572, 10587), True, 'import matplotlib.pyplot as plt\n'), ((10600, 10627), 'matplotlib.pyplot.xlabel', 'plt.xlabel', (['"""slit position"""'], {}), "('slit position')\n", (10610, 10627), True, 'import matplotlib.pyplot as plt\n'), ((10640, 10670), 'matplotlib.pyplot.ylabel', 'plt.ylabel', (["(name + ' [arcsec]')"], {}), "(name + ' [arcsec]')\n", (10650, 10670), True, 'import matplotlib.pyplot as plt\n'), ((10683, 10698), 'matplotlib.pyplot.savefig', 'plt.savefig', (['pp'], {}), '(pp)\n', (10694, 10698), True, 'import matplotlib.pyplot as plt\n'), ((14234, 14243), 'numpy.max', 'np.max', (['x'], {}), '(x)\n', (14240, 14243), True, 'import numpy as np\n'), ((14246, 14255), 'numpy.min', 'np.min', (['x'], {}), '(x)\n', (14252, 14255), True, 'import numpy as np\n'), ((14277, 14286), 'numpy.max', 'np.max', (['y'], {}), '(y)\n', (14283, 14286), True, 'import numpy as np\n'), ((14289, 14298), 'numpy.min', 'np.min', (['y'], {}), '(y)\n', (14295, 14298), True, 'import numpy as np\n'), ((14812, 14829), 'numpy.min', 'np.min', (['dates_abs'], {}), '(dates_abs)\n', (14818, 14829), True, 'import numpy as np\n'), ((14831, 14848), 'numpy.max', 'np.max', (['dates_abs'], {}), '(dates_abs)\n', (14837, 14848), True, 'import numpy as np\n'), ((2108, 2144), 'numpy.array', 'np.array', (['self.offsets'], {'dtype': 'object'}), '(self.offsets, dtype=object)\n', (2116, 2144), True, 'import numpy as np\n'), ((2161, 2208), 'numpy.array', 'np.array', (['self.cross_correlations'], {'dtype': 'object'}), '(self.cross_correlations, dtype=object)\n', (2169, 2208), True, 'import numpy as np\n'), ((3014, 3057), 'os.path.join', 'os.path.join', (['self.verif_dir', '"""summary.yml"""'], {}), "(self.verif_dir, 'summary.yml')\n", (3026, 3057), False, 'import os\n'), ((5787, 5818), 'numpy.meshgrid', 'np.meshgrid', (['x_interp', 'y_interp'], {}), '(x_interp, y_interp)\n', (5798, 5818), True, 'import numpy as np\n'), ((6785, 6804), 'numpy.nanstd', 'np.nanstd', (['arr_stat'], {}), '(arr_stat)\n', (6794, 6804), True, 'import numpy as np\n'), ((7001, 7018), 'numpy.nanmin', 'np.nanmin', (['(a, b)'], {}), '((a, b))\n', (7010, 7018), True, 'import numpy as np\n'), ((7025, 7042), 'numpy.nanmax', 'np.nanmax', (['(a, b)'], {}), '((a, b))\n', (7034, 7042), True, 'import numpy as np\n'), ((8140, 8149), 'matplotlib.pyplot.gca', 'plt.gca', ([], {}), '()\n', (8147, 8149), True, 'import matplotlib.pyplot as plt\n'), ((8573, 8585), 'numpy.abs', 'np.abs', (['diff'], {}), '(diff)\n', (8579, 8585), True, 'import numpy as np\n'), ((9319, 9384), 'warnings.warn', 'warnings.warn', (['"""Multiple slitshift steps. Plotting the first one"""'], {}), "('Multiple slitshift steps. Plotting the first one')\n", (9332, 9384), False, 'import warnings\n'), ((14946, 14963), 'numpy.min', 'np.min', (['dates_abs'], {}), '(dates_abs)\n', (14952, 14963), True, 'import numpy as np\n'), ((14965, 14982), 'numpy.max', 'np.max', (['dates_abs'], {}), '(dates_abs)\n', (14971, 14982), True, 'import numpy as np\n'), ((4628, 4641), 'numpy.nanmax', 'np.nanmax', (['cc'], {}), '(cc)\n', (4637, 4641), True, 'import numpy as np\n'), ((6761, 6781), 'numpy.nanmean', 'np.nanmean', (['arr_stat'], {}), '(arr_stat)\n', (6771, 6781), True, 'import numpy as np\n'), ((9141, 9157), 'numpy.array', 'np.array', (['offset'], {}), '(offset)\n', (9149, 9157), True, 'import numpy as np\n')] |
import unittest
from unittest.mock import patch, Mock
from payments.domain.paypal_payment import PaypalPayment
class TestPaypal(unittest.TestCase):
@patch("paypalrestsdk.Payment", return_value=Mock(links=[Mock(rel="approval_url", href="url")],
id="PAYID-123", create=Mock(return_value=True)))
def test_initiate_payment(self, mock_object):
payment_id = "123"
order_id = "order-123"
amount = 123
payment_status = ""
created_at = "2000-01-01 00:00:00"
payment_details = {}
currency = "USD"
response = PaypalPayment(payment_id, amount, currency,
payment_status, created_at, payment_details).initiate_payment(order_id)
expected_response = {'payment': {'id': 'PAYID-123', 'payment_url': 'url'}}
self.assertDictEqual(response, expected_response)
@patch("paypalrestsdk.Payment.find", return_value=Mock(links=[Mock(rel="approval_url", href="url")],
id="PAYID-123", execute=Mock(return_value=True)))
def test_execute_payment(self, mock_object):
payment_id = "123"
amount = 123
payment_status = ""
created_at = "2000-01-01 00:00:00"
currency = "USD"
payment_details = {"payment_id": "PAYID-123"}
assert PaypalPayment(payment_id, amount, currency, payment_status, created_at, payment_details) \
.execute_transaction({"payer_id": "PAYER-123"})
if __name__ == "__main__":
TestPaypal()
| [
"payments.domain.paypal_payment.PaypalPayment",
"unittest.mock.Mock"
] | [((627, 719), 'payments.domain.paypal_payment.PaypalPayment', 'PaypalPayment', (['payment_id', 'amount', 'currency', 'payment_status', 'created_at', 'payment_details'], {}), '(payment_id, amount, currency, payment_status, created_at,\n payment_details)\n', (640, 719), False, 'from payments.domain.paypal_payment import PaypalPayment\n'), ((1394, 1486), 'payments.domain.paypal_payment.PaypalPayment', 'PaypalPayment', (['payment_id', 'amount', 'currency', 'payment_status', 'created_at', 'payment_details'], {}), '(payment_id, amount, currency, payment_status, created_at,\n payment_details)\n', (1407, 1486), False, 'from payments.domain.paypal_payment import PaypalPayment\n'), ((328, 351), 'unittest.mock.Mock', 'Mock', ([], {'return_value': '(True)'}), '(return_value=True)\n', (332, 351), False, 'from unittest.mock import patch, Mock\n'), ((1106, 1129), 'unittest.mock.Mock', 'Mock', ([], {'return_value': '(True)'}), '(return_value=True)\n', (1110, 1129), False, 'from unittest.mock import patch, Mock\n'), ((212, 248), 'unittest.mock.Mock', 'Mock', ([], {'rel': '"""approval_url"""', 'href': '"""url"""'}), "(rel='approval_url', href='url')\n", (216, 248), False, 'from unittest.mock import patch, Mock\n'), ((984, 1020), 'unittest.mock.Mock', 'Mock', ([], {'rel': '"""approval_url"""', 'href': '"""url"""'}), "(rel='approval_url', href='url')\n", (988, 1020), False, 'from unittest.mock import patch, Mock\n')] |
"""Defines useful types and utilities for working with bytestrings."""
from __future__ import annotations
import zlib
from abc import abstractmethod, ABCMeta
from collections.abc import Iterable, Sequence
from io import BytesIO
from itertools import chain
from typing import cast, final, Any, Final, TypeVar, SupportsBytes, Union, \
Protocol
__all__ = ['MaybeBytes', 'MaybeBytesT', 'WriteStream', 'Writeable',
'BytesFormat']
#: A bytes object, memoryview, or an object with a ``__bytes__`` method.
MaybeBytes = Union[bytes, bytearray, memoryview, SupportsBytes]
#: A type variable bound to :class:`MaybeBytes`.
MaybeBytesT = TypeVar('MaybeBytesT', bound=MaybeBytes)
_FormatArg = Union[MaybeBytes, int]
class WriteStream(Protocol):
"""Typing protocol indicating the object implements the :meth:`.write`
method.
See Also:
:class:`~asyncio.StreamWriter`, :class:`~typing.BinaryIO`
"""
@abstractmethod
def write(self, data: bytes) -> Any:
"""Defines an abstract method where ``data`` is written to a stream or
buffer.
Args:
data: The data to write.
"""
...
class HashStream(WriteStream):
"""A stream that a :class:`Writeable` can use to generate a
non-cryptographic hash using :func:`zlib.adler32`.
"""
__slots__ = ['_digest']
def __init__(self) -> None:
super().__init__()
self._digest = zlib.adler32(b'')
def write(self, data: bytes) -> None:
self._digest = zlib.adler32(data, self._digest)
def digest(self, data: Writeable = None) -> bytes:
"""Return the digest of the data written to the hash stream.
Args:
data: The data to write before computing the digest.
"""
if data is not None:
data.write(self)
return self._digest.to_bytes(4, 'big')
class Writeable(metaclass=ABCMeta):
"""Base class for types that can be written to a stream."""
__slots__: Sequence[str] = []
@final
def tobytes(self) -> bytes:
"""Convert the writeable object back into a bytestring using the
:meth:`.write` method.
"""
writer = BytesIO()
self.write(writer)
return writer.getvalue()
@classmethod
def empty(cls) -> Writeable:
"""Return a :class:`Writeable` for an empty string."""
return _EmptyWriteable()
@classmethod
def wrap(cls, data: MaybeBytes) -> Writeable:
"""Wrap the bytes in a :class:`Writeable`.
Args:
data: The object to wrap.
"""
return _WrappedWriteable(data)
@classmethod
def concat(cls, data: Iterable[MaybeBytes]) -> Writeable:
"""Wrap the iterable in a :class:`Writeable` that will write each item.
Args:
data: The iterable to wrap.
"""
return _ConcatWriteable(data)
def write(self, writer: WriteStream) -> None:
"""Write the object to the stream, with one or more calls to
:meth:`~WriteStream.write`.
Args:
writer: The output stream.
"""
writer.write(bytes(self))
def __bool__(self) -> bool:
return True
def __len__(self) -> int:
return len(bytes(self))
@abstractmethod
def __bytes__(self) -> bytes:
...
class _EmptyWriteable(Writeable):
__slots__: Sequence[str] = []
def write(self, writer: WriteStream) -> None:
pass
def __bytes__(self) -> bytes:
return b''
def __repr__(self) -> str:
return '<Writeable empty>'
class _WrappedWriteable(Writeable):
__slots__ = ['data']
def __init__(self, data: MaybeBytes) -> None:
if isinstance(data, bytes):
self.data = data
else:
self.data = bytes(data)
def __bytes__(self) -> bytes:
return self.data
def __repr__(self) -> str:
return f'<Writeable {repr(self.data)}>'
class _ConcatWriteable(Writeable):
__slots__ = ['data']
def __init__(self, data: Iterable[MaybeBytes]) -> None:
self.data = list(data)
def write(self, writer: WriteStream) -> None:
for item in self.data:
if isinstance(item, Writeable):
item.write(writer)
else:
writer.write(bytes(item))
def __bytes__(self) -> bytes:
return BytesFormat(b'').join(self.data)
def __repr__(self) -> str:
return f'<Writeable {repr(self.data)}>'
class BytesFormat:
"""Helper utility for performing formatting operations that produce
bytestrings. While similar to the builtin formatting and join
operations, this class intends to provide cleaner typing.
Args:
how: The formatting string or join delimiter to use.
"""
__slots__ = ['how']
def __init__(self, how: bytes) -> None:
super().__init__()
self.how: Final = how
def __mod__(self, other: Union[_FormatArg, Iterable[_FormatArg]]) -> bytes:
"""String interpolation, shortcut for :meth:`.format`.
Args:
other: The data interpolated into the format string.
"""
if isinstance(other, bytes):
return self.format([other])
elif hasattr(other, '__bytes__'):
supports_bytes = cast(SupportsBytes, other)
return self.format([bytes(supports_bytes)])
elif hasattr(other, '__iter__'):
items = cast(Iterable[_FormatArg], other)
return self.format(items)
return NotImplemented
@classmethod
def _fix_format_arg(cls, data: _FormatArg) -> Any:
if isinstance(data, int):
return data
else:
return bytes(data)
def format(self, data: Iterable[_FormatArg]) -> bytes:
"""String interpolation into the format string.
Args:
data: The data interpolated into the format string.
Examples:
::
BytesFormat(b'Hello, %b!') % b'World'
BytesFormat(b'%b, %b!') % (b'Hello', b'World')
"""
fix_arg = self._fix_format_arg
return self.how % tuple(fix_arg(item) for item in data)
@classmethod
def _fix_join_arg(cls, data: _FormatArg) -> Any:
if isinstance(data, int):
return b'%d' % data
else:
return bytes(data)
def join(self, *data: Iterable[_FormatArg]) -> bytes:
"""Iterable join on a delimiter.
Args:
data: Iterable of items to join.
Examples:
::
BytesFormat(b' ').join([b'one', b'two', b'three'])
"""
fix_arg = self._fix_join_arg
return self.how.join(fix_arg(item) for item in chain(*data))
| [
"itertools.chain",
"zlib.adler32",
"io.BytesIO",
"typing.cast",
"typing.TypeVar"
] | [((646, 686), 'typing.TypeVar', 'TypeVar', (['"""MaybeBytesT"""'], {'bound': 'MaybeBytes'}), "('MaybeBytesT', bound=MaybeBytes)\n", (653, 686), False, 'from typing import cast, final, Any, Final, TypeVar, SupportsBytes, Union, Protocol\n'), ((1439, 1456), 'zlib.adler32', 'zlib.adler32', (["b''"], {}), "(b'')\n", (1451, 1456), False, 'import zlib\n'), ((1523, 1555), 'zlib.adler32', 'zlib.adler32', (['data', 'self._digest'], {}), '(data, self._digest)\n', (1535, 1555), False, 'import zlib\n'), ((2194, 2203), 'io.BytesIO', 'BytesIO', ([], {}), '()\n', (2201, 2203), False, 'from io import BytesIO\n'), ((5320, 5346), 'typing.cast', 'cast', (['SupportsBytes', 'other'], {}), '(SupportsBytes, other)\n', (5324, 5346), False, 'from typing import cast, final, Any, Final, TypeVar, SupportsBytes, Union, Protocol\n'), ((5464, 5497), 'typing.cast', 'cast', (['Iterable[_FormatArg]', 'other'], {}), '(Iterable[_FormatArg], other)\n', (5468, 5497), False, 'from typing import cast, final, Any, Final, TypeVar, SupportsBytes, Union, Protocol\n'), ((6754, 6766), 'itertools.chain', 'chain', (['*data'], {}), '(*data)\n', (6759, 6766), False, 'from itertools import chain\n')] |
#A template for when we actually build the model.
import numpy as np
from sklearn.model_selection import train_test_split
from tensorflow.keras.layers import Dense, LSTM, Dropout
from tensorflow.keras import Sequential
categories = [] #List out category string names here
reproducibility = 7 #Constant seed for reproducibility
np.random.seed(reproducibility)
#Load the data here; probably pickle or something like that
train_x,test_x,train_y,test_y = train_test_split(x,y,
test_size=0.2,random_state=reproducibility)
model = Sequential()
#Layers will go here
#Compiled given that we're working with categorization.
model.add(Dense(len(categories),activation="softmax"))
model.compile(optimizer="adam",
loss="categorical_crossentropy",metrics=['accuracy'])
#--
bsize=64
epochs=1000
#--
model.fit(x=train_x,y=train_y, verbose=1,
validation_data=(test_x,test_y),batch_size=bsize,epochs=epochs) | [
"sklearn.model_selection.train_test_split",
"tensorflow.keras.Sequential",
"numpy.random.seed"
] | [((327, 358), 'numpy.random.seed', 'np.random.seed', (['reproducibility'], {}), '(reproducibility)\n', (341, 358), True, 'import numpy as np\n'), ((451, 518), 'sklearn.model_selection.train_test_split', 'train_test_split', (['x', 'y'], {'test_size': '(0.2)', 'random_state': 'reproducibility'}), '(x, y, test_size=0.2, random_state=reproducibility)\n', (467, 518), False, 'from sklearn.model_selection import train_test_split\n'), ((557, 569), 'tensorflow.keras.Sequential', 'Sequential', ([], {}), '()\n', (567, 569), False, 'from tensorflow.keras import Sequential\n')] |
# Create FastText model using from raw train data
import fastText
TRAIN_FILE = './datasets/raw_data/tweets.train'
su_model = fastText.train_supervised(input=TRAIN_FILE, wordNgrams=3)
su_model.save_model('model_sentiment.bin') | [
"fastText.train_supervised"
] | [((127, 184), 'fastText.train_supervised', 'fastText.train_supervised', ([], {'input': 'TRAIN_FILE', 'wordNgrams': '(3)'}), '(input=TRAIN_FILE, wordNgrams=3)\n', (152, 184), False, 'import fastText\n')] |
# Generated by Django 3.1.12 on 2021-06-13 04:44
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('media', '0009_auto_20210611_1441'),
]
operations = [
migrations.RemoveField(
model_name='collection',
name='media_item',
),
migrations.AddField(
model_name='collection',
name='media',
field=models.ManyToManyField(blank=True, limit_choices_to={'approved': True}, to='media.Media'),
),
]
| [
"django.db.migrations.RemoveField",
"django.db.models.ManyToManyField"
] | [((234, 300), 'django.db.migrations.RemoveField', 'migrations.RemoveField', ([], {'model_name': '"""collection"""', 'name': '"""media_item"""'}), "(model_name='collection', name='media_item')\n", (256, 300), False, 'from django.db import migrations, models\n'), ((447, 541), 'django.db.models.ManyToManyField', 'models.ManyToManyField', ([], {'blank': '(True)', 'limit_choices_to': "{'approved': True}", 'to': '"""media.Media"""'}), "(blank=True, limit_choices_to={'approved': True}, to=\n 'media.Media')\n", (469, 541), False, 'from django.db import migrations, models\n')] |
import warnings
no_pandas_warning = "Pandas/Numpy is not available. Support for 'dataframe' mode is disabled."
no_redis_warning = "Redis dependencies not available. Support for caching is disabled."
try:
import pandas as pd
import numpy as np
except ModuleNotFoundError:
pd = None
np = None
warnings.warn(no_pandas_warning)
try:
import aioredis
import lz4.block as lz4
except ModuleNotFoundError:
aioredis = None
lz4 = None
__all__ = ['no_pandas_warning', 'no_redis_warning', 'pd', 'np', 'aioredis', 'lz4']
| [
"warnings.warn"
] | [((313, 345), 'warnings.warn', 'warnings.warn', (['no_pandas_warning'], {}), '(no_pandas_warning)\n', (326, 345), False, 'import warnings\n')] |
import json
import rospy
from std_msgs.msg import String
from handlers.handler import Handler
from mapping.command_panel import CommandPanel
from mapping.resistance import Resistance
class ReadLettersHandler(Handler):
def initialize(self):
self.sub = rospy.Subscriber('letters', String, self.read_letters) # TODO: checker le nom du topic
self.is_finished = False
def handle(self, handled_data=None):
self.initialize()
command_panel = CommandPanel()
command_panel.set_resistance(handled_data['resistance'])
# handled_data["calculate_pucks_pub"].publish(True)
self.handled_data = handled_data
handled_data["read_letters_pub"].publish(True)
while not self.is_finished:
pass
rounded_resistance, _ = Resistance(handled_data["resistance"]).get_resistance_and_colors()
handled_data["letters"] = self.letters
command_panel.set_mapped_letters(self.letters)
command_panel.set_resistance(rounded_resistance)
first_corner = command_panel.find_first_corner_letter()
second_corner = first_corner.get_next_letter()
third_corner = second_corner.get_next_letter()
handled_data["corners"] = [first_corner.value, second_corner.value, third_corner.value]
return handled_data
def read_letters(self, data):
letters = json.loads(data.data)
self.letters = letters
rospy.logerr("READ LETTERS " + str(self.letters))
self.is_finished = len(letters) == 9
if not self.is_finished:
self.handled_data["read_letters_pub"].publish(True)
def unregister(self):
self.sub.unregister()
| [
"mapping.resistance.Resistance",
"mapping.command_panel.CommandPanel",
"rospy.Subscriber",
"json.loads"
] | [((266, 320), 'rospy.Subscriber', 'rospy.Subscriber', (['"""letters"""', 'String', 'self.read_letters'], {}), "('letters', String, self.read_letters)\n", (282, 320), False, 'import rospy\n'), ((478, 492), 'mapping.command_panel.CommandPanel', 'CommandPanel', ([], {}), '()\n', (490, 492), False, 'from mapping.command_panel import CommandPanel\n'), ((1381, 1402), 'json.loads', 'json.loads', (['data.data'], {}), '(data.data)\n', (1391, 1402), False, 'import json\n'), ((801, 839), 'mapping.resistance.Resistance', 'Resistance', (["handled_data['resistance']"], {}), "(handled_data['resistance'])\n", (811, 839), False, 'from mapping.resistance import Resistance\n')] |
"""
@author: buechner_m <<EMAIL>>
"""
import sys
sys.path.append('..') # To allow importing from neighbouring folder
import simulation.materials as materials
import logging
logger = logging.getLogger(__name__)
class Grating(object):
"""
Parent class for gratings.
Parameters
==========
pitch: grating pitch in [um], in x-direction
material: grating material
design_energy: x-ray energy [keV]
height: grating height in [um], in z-direction; default=0 (no height
specified)
duty_cycle: default=0.5
shape: shape of grating, choices = ['flat','circular'], default='flat'
Examples
========
"""
def __init__(self, pitch, material, design_energy, height=0,
duty_cycle=0.5, shape='flat'):
self.pitch = pitch # [um]
self.material = material
self.design_energy = design_energy # [keV]
self.height = height # [um]
self.duty_cycle = duty_cycle
self.shape = shape
class PhaseGrating(Grating):
"""
Child class from Grating class, adds phase properties.
Parameters
==========
phase_shift: required phase shift at given design energy; default=0 (no
shift specified)
Notes
=====
Either a grating height or a required phase shift needs to be defined, the
other is calculated accordingly.
Examples
========
"""
def __init__(self, pitch, material, design_energy, height=0,
duty_cycle=0.5, shape='flat', phase_shift=0):
# call init from parent class
super(PhaseGrating, self).__init__(pitch, material, design_energy,
height, duty_cycle, shape)
# Calculate height or phase shift respectively
if self.height:
self.phase_shift = materials.height_to_shift(self.height,
self.material,
self.design_energy)
elif phase_shift:
self.height = materials.shift_to_height(phase_shift, self.material,
self.design_energy) # [um]
self.phase_shift = phase_shift
else:
raise Exception('Neither height of grating nor phase shift are '
'defined.')
class AbsorptionGrating(Grating):
"""
Child class from Grating class, adds absorption properties.
Parameters
==========
absorption: required percentage of absorbed x-rays at design energy;
default=0 (no absorption specified)
Notes
=====
Either a grating height or a required absorption needs to be defined, the
other is calculated accordingly.
Examples
========
"""
def __init__(self, pitch, material, design_energy, height=0,
duty_cycle=0.5, shape='flat', absorption=0):
# call init from parent class
super(AbsorptionGrating, self).__init__(pitch, material,
design_energy, height,
duty_cycle, shape)
# Calculate height or absorption respectively
if self.height:
self.absorption = materials.height_to_absorption(self.height,
self.material,
self.
design_energy)
# [%]
elif absorption:
self.height = materials.absorption_to_height(absorption,
self.material,
self.
design_energy) # [um]
self.absorption = absorption # [%]
else:
raise Exception('Neither height of grating nor absorption are '
'defined.')
| [
"logging.getLogger",
"simulation.materials.shift_to_height",
"simulation.materials.height_to_absorption",
"simulation.materials.absorption_to_height",
"sys.path.append",
"simulation.materials.height_to_shift"
] | [((50, 71), 'sys.path.append', 'sys.path.append', (['""".."""'], {}), "('..')\n", (65, 71), False, 'import sys\n'), ((184, 211), 'logging.getLogger', 'logging.getLogger', (['__name__'], {}), '(__name__)\n', (201, 211), False, 'import logging\n'), ((1811, 1884), 'simulation.materials.height_to_shift', 'materials.height_to_shift', (['self.height', 'self.material', 'self.design_energy'], {}), '(self.height, self.material, self.design_energy)\n', (1836, 1884), True, 'import simulation.materials as materials\n'), ((3262, 3340), 'simulation.materials.height_to_absorption', 'materials.height_to_absorption', (['self.height', 'self.material', 'self.design_energy'], {}), '(self.height, self.material, self.design_energy)\n', (3292, 3340), True, 'import simulation.materials as materials\n'), ((2051, 2124), 'simulation.materials.shift_to_height', 'materials.shift_to_height', (['phase_shift', 'self.material', 'self.design_energy'], {}), '(phase_shift, self.material, self.design_energy)\n', (2076, 2124), True, 'import simulation.materials as materials\n'), ((3594, 3671), 'simulation.materials.absorption_to_height', 'materials.absorption_to_height', (['absorption', 'self.material', 'self.design_energy'], {}), '(absorption, self.material, self.design_energy)\n', (3624, 3671), True, 'import simulation.materials as materials\n')] |
from copy import deepcopy
from typing import Dict, Iterable, List, Optional, Tuple, Union
import fastjsonschema
from ..shared.exceptions import ActionException, DatastoreException, EventStoreException
from ..shared.handlers.base_handler import BaseHandler
from ..shared.interfaces.write_request_element import WriteRequestElement
from ..shared.schema import schema_version
from .action import merge_write_request_elements
from .relations.relation_manager import RelationManager
from .util.actions_map import actions_map
from .util.typing import (
ActionResponse,
ActionResponseResults,
ActionResponseResultsElement,
Payload,
)
payload_schema = fastjsonschema.compile(
{
"$schema": schema_version,
"title": "Schema for action API",
"description": "An array of actions.",
"type": "array",
"items": {
"type": "object",
"properties": {
"action": {
"description": "Name of the action to be performed on the server",
"type": "string",
"minLength": 1,
},
"data": {
"description": "Data for the action (array)",
"type": "array",
"items": {"type": "object"},
},
},
"required": ["action", "data"],
"additionalProperties": False,
},
}
)
class ActionHandler(BaseHandler):
"""
Action handler. It is the concret implementation of Action interface.
"""
MAX_RETRY = 3
@classmethod
def get_actions_dev_status(cls) -> Iterable[Tuple[str, Union[str, Dict]]]:
"""
Returns name and development status of all actions
"""
for name, action in actions_map.items():
if getattr(action, "is_dummy", False):
yield name, "Not implemented"
else:
yield name, action.schema
def handle_request(self, payload: Payload, user_id: int) -> ActionResponse:
"""
Takes payload and user id and handles this request by validating and
parsing all actions. In the end it sends everything to the event store.
"""
self.user_id = user_id
# Validate payload of request
try:
self.validate(payload)
except fastjsonschema.JsonSchemaException as exception:
raise ActionException(exception.message)
retried = 0
payload_copy = deepcopy(payload)
while True:
# Parse actions and creates events
write_request_element, results = self.parse_actions(payload)
# Send events to datastore
if write_request_element:
try:
self.datastore.write(write_request_element)
except DatastoreException as exception:
retried += 1
payload = deepcopy(payload_copy)
if retried > self.MAX_RETRY:
raise ActionException(exception.message)
continue
except EventStoreException as exception:
raise ActionException(exception.message)
break
# Return action result
# TODO: This is a fake result because in this place all actions were
# always successful.
self.logger.debug("Request was successful. Send response now.")
return ActionResponse(
success=True, message="Actions handled successfully", results=results
)
def validate(self, payload: Payload) -> None:
"""
Validates actions requests sent by client. Raises JsonSchemaException if
input is invalid.
"""
self.logger.debug("Validate actions request.")
payload_schema(payload)
def parse_actions(
self, payload: Payload
) -> Tuple[Optional[WriteRequestElement], ActionResponseResults]:
"""
Parses actions request send by client. Raises ActionException or
PermissionDenied if something went wrong.
"""
all_write_request_elements: List[WriteRequestElement] = []
all_action_response_results: ActionResponseResults = []
relation_manager = RelationManager(self.datastore)
for element in payload:
action_name = element["action"]
ActionClass = actions_map.get(action_name)
if ActionClass is None or ActionClass.internal:
raise ActionException(f"Action {action_name} does not exist.")
self.logger.debug(f"Perform action {action_name}.")
action = ActionClass(self.services, relation_manager)
action_results = action.perform(element["data"], self.user_id)
response_elements: List[Optional[ActionResponseResultsElement]] = []
for item in action_results:
if isinstance(item, WriteRequestElement):
self.logger.debug(f"Prepared write request element {item}.")
all_write_request_elements.append(item)
else:
# item = cast(ActionResponseResultsElement, item)
self.logger.debug(f"Got action response element {item}.")
response_elements.append(item)
all_action_response_results.append(response_elements or None)
self.logger.debug("Write request is ready.")
return (
merge_write_request_elements(all_write_request_elements),
all_action_response_results,
)
| [
"fastjsonschema.compile",
"copy.deepcopy"
] | [((663, 1157), 'fastjsonschema.compile', 'fastjsonschema.compile', (["{'$schema': schema_version, 'title': 'Schema for action API', 'description':\n 'An array of actions.', 'type': 'array', 'items': {'type': 'object',\n 'properties': {'action': {'description':\n 'Name of the action to be performed on the server', 'type': 'string',\n 'minLength': 1}, 'data': {'description': 'Data for the action (array)',\n 'type': 'array', 'items': {'type': 'object'}}}, 'required': ['action',\n 'data'], 'additionalProperties': False}}"], {}), "({'$schema': schema_version, 'title':\n 'Schema for action API', 'description': 'An array of actions.', 'type':\n 'array', 'items': {'type': 'object', 'properties': {'action': {\n 'description': 'Name of the action to be performed on the server',\n 'type': 'string', 'minLength': 1}, 'data': {'description':\n 'Data for the action (array)', 'type': 'array', 'items': {'type':\n 'object'}}}, 'required': ['action', 'data'], 'additionalProperties': \n False}})\n", (685, 1157), False, 'import fastjsonschema\n'), ((2517, 2534), 'copy.deepcopy', 'deepcopy', (['payload'], {}), '(payload)\n', (2525, 2534), False, 'from copy import deepcopy\n'), ((2957, 2979), 'copy.deepcopy', 'deepcopy', (['payload_copy'], {}), '(payload_copy)\n', (2965, 2979), False, 'from copy import deepcopy\n')] |
import unittest as ut
from tests._utils._timer import Timer
from drivelink import cached
#from Process import freeze_support
def uncachedFib(a):
if a in [0, 1]:
return a
if a < 0:
raise Exception("Reverse fibonacci sequence not implemented.")
return uncachedFib(a - 1) + uncachedFib(a - 2)
def test_fib():
assert uncachedFib(0) == 0
assert uncachedFib(1) == 1
assert uncachedFib(2) == 1
assert uncachedFib(3) == 2
assert uncachedFib(4) == 3
assert uncachedFib(5) == 5
class cachedTest(ut.TestCase):
c = None
def setUp(self):
@cached(self.id(), 1, 1)
def fib(a):
if a in [0, 1]:
return a
if a < 0:
raise Exception("Reverse fibonacci sequence not implemented.")
return fib(a - 1) + fib(a - 2)
self.c = fib
def test_fib(self):
self.assertEqual(
self.c(0), 0, "The zeroth element of the Fibonnaci sequence is 0, not {0}.".format(str(self.c(0))))
self.assertEqual(
self.c(1), 1, "The first element of the Fibonnaci sequence is 1, not {0}.".format(str(self.c(1))))
self.assertEqual(
self.c(2), 1, "The second element of the Fibonnaci sequence is 1, not {0}.".format(str(self.c(2))))
self.assertEqual(
self.c(3), 2, "The third element of the Fibonnaci sequence is 2, not {0}.".format(str(self.c(3))))
self.assertEqual(
self.c(4), 3, "The fourth element of the Fibonnaci sequence is 3, not {0}.".format(str(self.c(4))))
self.assertEqual(
self.c(5), 5, "The fifth element of the Fibonnaci sequence is 5, not {0}.".format(str(self.c(5))))
def test_init(self):
self.assertEqual(len(self.c.c), 0, "The cache was malformed.")
self.assertEqual(self.c.f(0), uncachedFib(0), "The function was not entered correctly.")
def test_cache(self):
i = self.c(0)
self.assertEqual(len(self.c.c), 1, "The value was not cached properly.")
self.assertEqual(self.c(0), i, "The cached answer was incorrect.")
def test_speed(self):
with Timer() as t1:
_ = uncachedFib(32)
self.c.n = -1
with Timer() as t2:
_ = self.c(32)
self.assertTrue(t2.interval < t1.interval,
"There isn't a speed up... This is useless then, I suppose.")
with Timer() as t1:
_ = self.c(32)
self.assertTrue(t2.interval > t1.interval,
"There isn't a speed up... This is useless then, I suppose.")
if __name__ == '__main__':
freeze_support()
ut.main()
| [
"unittest.main",
"tests._utils._timer.Timer"
] | [((2660, 2669), 'unittest.main', 'ut.main', ([], {}), '()\n', (2667, 2669), True, 'import unittest as ut\n'), ((2153, 2160), 'tests._utils._timer.Timer', 'Timer', ([], {}), '()\n', (2158, 2160), False, 'from tests._utils._timer import Timer\n'), ((2235, 2242), 'tests._utils._timer.Timer', 'Timer', ([], {}), '()\n', (2240, 2242), False, 'from tests._utils._timer import Timer\n'), ((2427, 2434), 'tests._utils._timer.Timer', 'Timer', ([], {}), '()\n', (2432, 2434), False, 'from tests._utils._timer import Timer\n')] |
from billy.scrape.actions import Rule, BaseCategorizer
# These are regex patterns that map to action categories.
_categorizer_rules = (
Rule((r'\(Ayes (?P<yes_votes>\d+)\.\s+Noes\s+'
r'(?P<no_votes>\d+)\.( Page \S+\.)?\)')),
Rule(r'^Introduced', 'bill:introduced'),
Rule(r'(?i)Referred to (?P<committees>.+)', 'committee:referred'),
Rule(r'(?i)Referred to (?P<committees>.+?)(\.\s+suspense)',
'committee:referred'),
Rule(r're-refer to Standing (?P<committees>[^.]+)\.',
'committee:referred'),
Rule(r'Read first time\.', 'bill:reading:1'),
Rule(r'Read second time and amended',
['bill:reading:2']),
Rule(r'Read third time', 'bill:reading:3'),
Rule(r'Read third time. Refused passage\.',
'bill:failed'),
Rule([r'(?i)read third time.{,5}passed',
r'(?i)Read third time.+?Passed'],
['bill:passed', 'bill:reading:3']),
Rule(r'Approved by the Governor', 'governor:signed'),
Rule(r'Approved by the Governor with item veto',
'governor:vetoed:line-item'),
Rule('Vetoed by Governor', 'governor:vetoed'),
Rule(r'To Governor', 'governor:received'),
Rule(r'amendments concurred in', 'amendment:passed'),
Rule(r'refused to concur in Assembly amendments', 'amendment:failed'),
Rule(r'Failed passage in committee', 'committee:failed'),
Rule(r'(?i)From committee', 'committee:passed'),
Rule(r'(?i)From committee: Do pass', 'committee:passed:favorable'),
Rule(r'From committee with author\'s amendments', 'committee:passed'),
# Resolutions
Rule(r'Adopted', 'bill:passed'),
Rule(r'Read', 'bill:reading:1'),
Rule(r'^From committee: Be adopted', 'committee:passed:favorable'),
)
class CACategorizer(BaseCategorizer):
rules = _categorizer_rules
| [
"billy.scrape.actions.Rule"
] | [((143, 243), 'billy.scrape.actions.Rule', 'Rule', (['"""\\\\(Ayes (?P<yes_votes>\\\\d+)\\\\.\\\\s+Noes\\\\s+(?P<no_votes>\\\\d+)\\\\.( Page \\\\S+\\\\.)?\\\\)"""'], {}), "(\n '\\\\(Ayes (?P<yes_votes>\\\\d+)\\\\.\\\\s+Noes\\\\s+(?P<no_votes>\\\\d+)\\\\.( Page \\\\S+\\\\.)?\\\\)'\n )\n", (147, 243), False, 'from billy.scrape.actions import Rule, BaseCategorizer\n'), ((247, 285), 'billy.scrape.actions.Rule', 'Rule', (['"""^Introduced"""', '"""bill:introduced"""'], {}), "('^Introduced', 'bill:introduced')\n", (251, 285), False, 'from billy.scrape.actions import Rule, BaseCategorizer\n'), ((293, 357), 'billy.scrape.actions.Rule', 'Rule', (['"""(?i)Referred to (?P<committees>.+)"""', '"""committee:referred"""'], {}), "('(?i)Referred to (?P<committees>.+)', 'committee:referred')\n", (297, 357), False, 'from billy.scrape.actions import Rule, BaseCategorizer\n'), ((364, 450), 'billy.scrape.actions.Rule', 'Rule', (['"""(?i)Referred to (?P<committees>.+?)(\\\\.\\\\s+suspense)"""', '"""committee:referred"""'], {}), "('(?i)Referred to (?P<committees>.+?)(\\\\.\\\\s+suspense)',\n 'committee:referred')\n", (368, 450), False, 'from billy.scrape.actions import Rule, BaseCategorizer\n'), ((460, 535), 'billy.scrape.actions.Rule', 'Rule', (['"""re-refer to Standing (?P<committees>[^.]+)\\\\."""', '"""committee:referred"""'], {}), "('re-refer to Standing (?P<committees>[^.]+)\\\\.', 'committee:referred')\n", (464, 535), False, 'from billy.scrape.actions import Rule, BaseCategorizer\n'), ((551, 595), 'billy.scrape.actions.Rule', 'Rule', (['"""Read first time\\\\."""', '"""bill:reading:1"""'], {}), "('Read first time\\\\.', 'bill:reading:1')\n", (555, 595), False, 'from billy.scrape.actions import Rule, BaseCategorizer\n'), ((601, 657), 'billy.scrape.actions.Rule', 'Rule', (['"""Read second time and amended"""', "['bill:reading:2']"], {}), "('Read second time and amended', ['bill:reading:2'])\n", (605, 657), False, 'from billy.scrape.actions import Rule, BaseCategorizer\n'), ((674, 715), 'billy.scrape.actions.Rule', 'Rule', (['"""Read third time"""', '"""bill:reading:3"""'], {}), "('Read third time', 'bill:reading:3')\n", (678, 715), False, 'from billy.scrape.actions import Rule, BaseCategorizer\n'), ((722, 780), 'billy.scrape.actions.Rule', 'Rule', (['"""Read third time. Refused passage\\\\."""', '"""bill:failed"""'], {}), "('Read third time. Refused passage\\\\.', 'bill:failed')\n", (726, 780), False, 'from billy.scrape.actions import Rule, BaseCategorizer\n'), ((795, 907), 'billy.scrape.actions.Rule', 'Rule', (["['(?i)read third time.{,5}passed', '(?i)Read third time.+?Passed']", "['bill:passed', 'bill:reading:3']"], {}), "(['(?i)read third time.{,5}passed', '(?i)Read third time.+?Passed'], [\n 'bill:passed', 'bill:reading:3'])\n", (799, 907), False, 'from billy.scrape.actions import Rule, BaseCategorizer\n'), ((930, 981), 'billy.scrape.actions.Rule', 'Rule', (['"""Approved by the Governor"""', '"""governor:signed"""'], {}), "('Approved by the Governor', 'governor:signed')\n", (934, 981), False, 'from billy.scrape.actions import Rule, BaseCategorizer\n'), ((988, 1064), 'billy.scrape.actions.Rule', 'Rule', (['"""Approved by the Governor with item veto"""', '"""governor:vetoed:line-item"""'], {}), "('Approved by the Governor with item veto', 'governor:vetoed:line-item')\n", (992, 1064), False, 'from billy.scrape.actions import Rule, BaseCategorizer\n'), ((1080, 1125), 'billy.scrape.actions.Rule', 'Rule', (['"""Vetoed by Governor"""', '"""governor:vetoed"""'], {}), "('Vetoed by Governor', 'governor:vetoed')\n", (1084, 1125), False, 'from billy.scrape.actions import Rule, BaseCategorizer\n'), ((1131, 1171), 'billy.scrape.actions.Rule', 'Rule', (['"""To Governor"""', '"""governor:received"""'], {}), "('To Governor', 'governor:received')\n", (1135, 1171), False, 'from billy.scrape.actions import Rule, BaseCategorizer\n'), ((1179, 1230), 'billy.scrape.actions.Rule', 'Rule', (['"""amendments concurred in"""', '"""amendment:passed"""'], {}), "('amendments concurred in', 'amendment:passed')\n", (1183, 1230), False, 'from billy.scrape.actions import Rule, BaseCategorizer\n'), ((1237, 1305), 'billy.scrape.actions.Rule', 'Rule', (['"""refused to concur in Assembly amendments"""', '"""amendment:failed"""'], {}), "('refused to concur in Assembly amendments', 'amendment:failed')\n", (1241, 1305), False, 'from billy.scrape.actions import Rule, BaseCategorizer\n'), ((1313, 1368), 'billy.scrape.actions.Rule', 'Rule', (['"""Failed passage in committee"""', '"""committee:failed"""'], {}), "('Failed passage in committee', 'committee:failed')\n", (1317, 1368), False, 'from billy.scrape.actions import Rule, BaseCategorizer\n'), ((1375, 1421), 'billy.scrape.actions.Rule', 'Rule', (['"""(?i)From committee"""', '"""committee:passed"""'], {}), "('(?i)From committee', 'committee:passed')\n", (1379, 1421), False, 'from billy.scrape.actions import Rule, BaseCategorizer\n'), ((1428, 1493), 'billy.scrape.actions.Rule', 'Rule', (['"""(?i)From committee: Do pass"""', '"""committee:passed:favorable"""'], {}), "('(?i)From committee: Do pass', 'committee:passed:favorable')\n", (1432, 1493), False, 'from billy.scrape.actions import Rule, BaseCategorizer\n'), ((1500, 1569), 'billy.scrape.actions.Rule', 'Rule', (['"""From committee with author\\\\\'s amendments"""', '"""committee:passed"""'], {}), '("From committee with author\\\\\'s amendments", \'committee:passed\')\n', (1504, 1569), False, 'from billy.scrape.actions import Rule, BaseCategorizer\n'), ((1594, 1624), 'billy.scrape.actions.Rule', 'Rule', (['"""Adopted"""', '"""bill:passed"""'], {}), "('Adopted', 'bill:passed')\n", (1598, 1624), False, 'from billy.scrape.actions import Rule, BaseCategorizer\n'), ((1631, 1661), 'billy.scrape.actions.Rule', 'Rule', (['"""Read"""', '"""bill:reading:1"""'], {}), "('Read', 'bill:reading:1')\n", (1635, 1661), False, 'from billy.scrape.actions import Rule, BaseCategorizer\n'), ((1668, 1733), 'billy.scrape.actions.Rule', 'Rule', (['"""^From committee: Be adopted"""', '"""committee:passed:favorable"""'], {}), "('^From committee: Be adopted', 'committee:passed:favorable')\n", (1672, 1733), False, 'from billy.scrape.actions import Rule, BaseCategorizer\n')] |
# -*- coding: utf-8 -*-
from __future__ import with_statement, unicode_literals
import pytest
from ziggurat_foundations.models.services.group_permission import GroupPermissionService
from ziggurat_foundations.models.services.group_resource_permission import (
GroupResourcePermissionService,
)
from ziggurat_foundations.models.services.user_permission import UserPermissionService
from ziggurat_foundations.models.services.user_resource_permission import (
UserResourcePermissionService,
)
from ziggurat_foundations.models.services.resource import ResourceService
from ziggurat_foundations.permissions import PermissionTuple, ALL_PERMISSIONS
from ziggurat_foundations.tests import (
add_user,
check_one_in_other,
add_resource,
add_resource_b,
add_group,
BaseTestCase,
)
from ziggurat_foundations.tests.conftest import (
User,
UserPermission,
GroupPermission,
UserResourcePermission,
GroupResourcePermission,
ResourceTestobjB,
)
from ziggurat_foundations.models.services.group import GroupService
from ziggurat_foundations.models.services.user import UserService
class TestUserPermissions(BaseTestCase):
def test_user_permissions(self, db_session):
created_user = add_user(db_session)
permissions = UserService.permissions(created_user, db_session=db_session)
expected = [
PermissionTuple(
created_user, "alter_users", "user", None, None, False, True
),
PermissionTuple(created_user, "root", "user", None, None, False, True),
]
check_one_in_other(permissions, expected)
def test_owned_permissions(self, db_session):
created_user = add_user(db_session)
resource = add_resource(db_session, 1, "test_resource")
created_user.resources.append(resource)
db_session.flush()
resources = UserService.resources_with_perms(
created_user, ["test_perm"], db_session=db_session
).all()
assert resources[0] == resource
permission = ResourceService.direct_perms_for_user(resource, created_user)[0]
assert permission.owner is True
assert permission.allowed is True
assert permission.user.id == created_user.id
def test_resources_with_perm(self, db_session):
created_user = add_user(db_session)
resource = add_resource(db_session, 1, "test_resource")
permission = UserResourcePermission(
perm_name="test_perm",
user_id=created_user.id,
resource_id=resource.resource_id,
)
resource.user_permissions.append(permission)
db_session.flush()
resources = UserService.resources_with_perms(
created_user, ["test_perm"], db_session=db_session
).all()
assert resources[0] == resource
def test_mixed_perms(self, db_session):
created_user = add_user(db_session)
resource = add_resource(db_session, 1, "test_resource")
permission = UserResourcePermission(
perm_name="test_perm",
user_id=created_user.id,
resource_id=resource.resource_id,
)
resource.user_permissions.append(permission)
resource2 = add_resource(db_session, 2, "test_resource")
created_user.resources.append(resource2)
add_resource(db_session, 3, "test_resource")
add_resource_b(db_session, 4, "test_resource")
db_session.flush()
resources = UserService.resources_with_perms(
created_user, ["test_perm"], db_session=db_session
).all()
found_ids = [r.resource_id for r in resources]
assert sorted(found_ids) == [1, 2]
def test_resources_with_perm_type_found(self, db_session):
created_user = add_user(db_session)
resource = add_resource(db_session, 1, "test_resource")
permission = UserResourcePermission(
perm_name="test_perm",
user_id=created_user.id,
resource_id=resource.resource_id,
)
resource.user_permissions.append(permission)
db_session.flush()
resources = UserService.resources_with_perms(
created_user,
["test_perm"],
resource_types=["test_resource"],
db_session=db_session,
).all()
assert resources[0] == resource
def test_resources_with_perm_type_not_found(self, db_session):
created_user = add_user(db_session)
resource = add_resource(db_session, 1, "test_resource")
permission = UserResourcePermission(
perm_name="test_perm",
user_id=created_user.id,
resource_id=resource.resource_id,
)
resource.user_permissions.append(permission)
db_session.flush()
resources = UserService.resources_with_perms(
created_user,
["test_perm"],
resource_types=["test_resource_b"],
db_session=db_session,
).all()
assert resources == []
def test_resources_with_perm_type_other_found(self, db_session):
created_user = add_user(db_session)
resource = add_resource(db_session, 1, "test_resource")
resource2 = add_resource_b(db_session, 2, "test_resource")
resource3 = add_resource(db_session, 3, "test_resource")
resource4 = add_resource_b(db_session, 4, "test_resource")
db_session.flush()
permission = UserResourcePermission(
perm_name="test_perm",
user_id=created_user.id,
resource_id=resource.resource_id,
)
resource.user_permissions.append(permission)
permission2 = UserResourcePermission(
perm_name="test_perm",
user_id=created_user.id,
resource_id=resource2.resource_id,
)
resource2.user_permissions.append(permission2)
permission3 = UserResourcePermission(
perm_name="test_perm",
user_id=created_user.id,
resource_id=resource3.resource_id,
)
resource3.user_permissions.append(permission3)
permission4 = UserResourcePermission(
perm_name="test_perm",
user_id=created_user.id,
resource_id=resource4.resource_id,
)
resource4.user_permissions.append(permission4)
db_session.flush()
resources = UserService.resources_with_perms(
created_user,
["test_perm"],
resource_types=["test_resource_b"],
db_session=db_session,
).all()
assert len(resources) == 2
def test_resources_with_wrong_perm(self, db_session):
created_user = add_user(db_session)
resource = add_resource(db_session, 1, "test_resource")
permission = UserResourcePermission(
perm_name="test_perm_bad",
user_id=created_user.id,
resource_id=resource.resource_id,
)
with pytest.raises(AssertionError):
resource.user_permissions.append(permission)
def test_multiple_resources_with_perm(self, db_session):
created_user = add_user(db_session)
resource = add_resource(db_session, 1, "test_resource")
permission = UserResourcePermission(
perm_name="test_perm",
user_id=created_user.id,
resource_id=resource.resource_id,
)
resource.user_permissions.append(permission)
resource2 = add_resource(db_session, 2, "test_resource2")
permission2 = UserResourcePermission(
perm_name="test_perm",
user_id=created_user.id,
resource_id=resource2.resource_id,
)
resource2.user_permissions.append(permission2)
resources = UserService.resources_with_perms(
created_user, ["test_perm"], db_session=db_session
).all()
assert resources == [resource, resource2]
def test_resources_ids_with_perm(self, db_session):
created_user = add_user(db_session)
resource1 = add_resource(db_session, 1, "test_resource1")
resource2 = add_resource(db_session, 2, "test_resource2")
resource3 = add_resource(db_session, 3, "test_resource3")
permission1 = UserResourcePermission(
perm_name="test_perm",
user_id=created_user.id,
resource_id=resource1.resource_id,
)
permission2 = UserResourcePermission(
perm_name="test_perm",
user_id=created_user.id,
resource_id=resource2.resource_id,
)
permission3 = UserResourcePermission(
perm_name="test_perm",
user_id=created_user.id,
resource_id=resource3.resource_id,
)
resource1.user_permissions.append(permission1)
resource2.user_permissions.append(permission2)
resource3.user_permissions.append(permission3)
db_session.flush()
resources = UserService.resources_with_perms(
created_user, ["test_perm"], resource_ids=[1, 3], db_session=db_session
).all()
assert resources == [resource1, resource3]
def test_resources_with_wrong_group_permission(self, db_session):
created_user = add_user(db_session)
resource = add_resource(db_session, 1, "test_resource")
group = add_group(db_session)
group.users.append(created_user)
group_permission = GroupResourcePermission(
perm_name="test_perm_bad",
group_id=group.id,
resource_id=resource.resource_id,
)
with pytest.raises(AssertionError):
resource.group_permissions.append(group_permission)
def test_resources_with_group_permission(self, db_session):
created_user = add_user(db_session)
resource = add_resource(db_session, 1, "test_resource")
resource2 = add_resource(db_session, 2, "test_resource2")
add_resource(db_session, 3, "test_resource3")
group = add_group(db_session)
group.users.append(created_user)
group_permission = GroupResourcePermission(
perm_name="test_perm", group_id=1, resource_id=resource.resource_id
)
group_permission2 = GroupResourcePermission(
perm_name="foo_perm", group_id=1, resource_id=resource2.resource_id
)
resource.group_permissions.append(group_permission)
resource2.group_permissions.append(group_permission2)
db_session.flush()
resources = UserService.resources_with_perms(
created_user, ["foo_perm"], db_session=db_session
).all()
assert resources[0] == resource2
def test_resources_with_direct_user_perms(self, db_session):
self.set_up_user_group_and_perms(db_session)
# test_perm1 from group perms should be ignored
perms = ResourceService.direct_perms_for_user(
self.resource, self.user, db_session=db_session
)
second = [
PermissionTuple(
self.user, "foo_perm", "user", None, self.resource, False, True
),
PermissionTuple(
self.user, "test_perm2", "user", None, self.resource, False, True
),
]
check_one_in_other(perms, second)
def test_resources_with_direct_group_perms(self, db_session):
self.set_up_user_group_and_perms(db_session)
# test_perm1 from group perms should be ignored
perms = ResourceService.group_perms_for_user(
self.resource, self.user, db_session=db_session
)
second = [
PermissionTuple(
self.user, "group_perm", "group", self.group, self.resource, False, True
)
]
check_one_in_other(perms, second)
def test_resources_with_user_perms(self, db_session):
self.maxDiff = 9999
self.set_up_user_group_and_perms(db_session)
perms = ResourceService.perms_for_user(
self.resource, self.user, db_session=db_session
)
second = [
PermissionTuple(
self.user, "foo_perm", "user", None, self.resource, False, True
),
PermissionTuple(
self.user, "group_perm", "group", self.group, self.resource, False, True
),
PermissionTuple(
self.user, "test_perm2", "user", None, self.resource, False, True
),
]
check_one_in_other(perms, second)
def test_resource_users_for_perm(self, db_session):
self.set_up_user_group_and_perms(db_session)
perms = ResourceService.users_for_perm(
self.resource, "foo_perm", db_session=db_session
)
second = [
PermissionTuple(
self.user, "foo_perm", "user", None, self.resource, False, True
)
]
check_one_in_other(perms, second)
def test_resource_users_for_any_perm(self, db_session):
self.maxDiff = 99999
self.set_up_user_group_and_perms(db_session)
perms = ResourceService.users_for_perm(
self.resource, "__any_permission__", db_session=db_session
)
second = [
PermissionTuple(
self.user, "group_perm", "group", self.group, self.resource, False, True
),
PermissionTuple(
self.user, "test_perm2", "user", None, self.resource, False, True
),
PermissionTuple(
self.user, "foo_perm", "user", None, self.resource, False, True
),
PermissionTuple(
self.user4,
"group_perm",
"group",
self.group2,
self.resource,
False,
True,
),
]
check_one_in_other(perms, second)
def test_resource_users_for_any_perm_resource_2(self, db_session):
self.set_up_user_group_and_perms(db_session)
perms = ResourceService.users_for_perm(
self.resource2, "__any_permission__", db_session=db_session
)
second = [
PermissionTuple(
self.user2, "foo_perm", "user", None, self.resource2, False, True
),
PermissionTuple(
self.user3, "test_perm", "user", None, self.resource2, False, True
),
]
check_one_in_other(perms, second)
def test_resource_users_limited_users(self, db_session):
self.maxDiff = 9999
self.set_up_user_group_and_perms(db_session)
perms = ResourceService.users_for_perm(
self.resource,
"__any_permission__",
user_ids=[self.user.id],
db_session=db_session,
)
second = [
PermissionTuple(
self.user, "group_perm", "group", self.group, self.resource, False, True
),
PermissionTuple(
self.user, "test_perm2", "user", None, self.resource, False, True
),
PermissionTuple(
self.user, "foo_perm", "user", None, self.resource, False, True
),
]
check_one_in_other(perms, second)
def test_resource_users_limited_group(self, db_session):
self.maxDiff = 9999
self.set_up_user_group_and_perms(db_session)
perms = ResourceService.users_for_perm(
self.resource,
"__any_permission__",
user_ids=[self.user.id],
group_ids=[self.group2.id],
db_session=db_session,
)
second = [
PermissionTuple(
self.user, "test_perm2", "user", None, self.resource, False, True
),
PermissionTuple(
self.user, "foo_perm", "user", None, self.resource, False, True
),
]
check_one_in_other(perms, second)
def test_resource_users_limited_group_other_user_3(self, db_session):
self.maxDiff = 9999
self.set_up_user_group_and_perms(db_session)
perms = ResourceService.users_for_perm(
self.resource2,
"__any_permission__",
user_ids=[self.user3.id],
db_session=db_session,
)
second = [
PermissionTuple(
self.user3, "test_perm", "user", None, self.resource2, False, True
)
]
check_one_in_other(perms, second)
def test_resource_users_limited_group_other_user_4(self, db_session):
self.maxDiff = 9999
self.set_up_user_group_and_perms(db_session)
perms = ResourceService.users_for_perm(
self.resource,
"__any_permission__",
user_ids=[self.user4.id],
group_ids=[self.group2.id],
db_session=db_session,
)
second = [
PermissionTuple(
self.user4,
"group_perm",
"group",
self.group2,
self.resource,
False,
True,
)
]
check_one_in_other(perms, second)
def test_resource_users_limited_group_ownage(self, db_session):
self.maxDiff = 9999
self.set_up_user_group_and_perms(db_session)
resource = ResourceTestobjB(
resource_id=99, resource_name="other", owner_user_id=self.user2.id
)
group3 = add_group(db_session, "group 3")
user2_permission = UserResourcePermission(
perm_name="foo_perm", user_id=self.user2.id
)
group3_permission = GroupResourcePermission(
perm_name="group_perm", group_id=group3.id
)
resource.group_permissions.append(group3_permission)
resource.user_permissions.append(user2_permission)
group3.users.append(self.user3)
self.user.resources.append(resource)
self.group2.resources.append(resource)
db_session.flush()
perms = ResourceService.users_for_perm(
resource, "__any_permission__", db_session=db_session
)
second = [
PermissionTuple(
self.user2, "foo_perm", "user", None, resource, False, True
),
PermissionTuple(
self.user, ALL_PERMISSIONS, "user", None, resource, True, True
),
PermissionTuple(
self.user4, ALL_PERMISSIONS, "group", self.group2, resource, True, True
),
PermissionTuple(
self.user3, "group_perm", "group", group3, resource, False, True
),
]
check_one_in_other(perms, second)
def test_users_for_perms(self, db_session):
user = User(user_name="aaa", email="aaa", status=0)
UserService.set_password(user, "password")
aaa_perm = UserPermission(perm_name="aaa")
bbb_perm = UserPermission(perm_name="bbb")
bbb2_perm = UserPermission(perm_name="bbb")
user.user_permissions.append(aaa_perm)
user.user_permissions.append(bbb_perm)
user2 = User(user_name="bbb", email="bbb", status=0)
UserService.set_password(user2, "password")
user2.user_permissions.append(bbb2_perm)
user3 = User(user_name="ccc", email="ccc", status=0)
UserService.set_password(user3, "password")
group = add_group(db_session)
group.users.append(user3)
db_session.add(user)
db_session.add(user2)
db_session.flush()
users = UserService.users_for_perms(["aaa"], db_session=db_session)
assert len(users.all()) == 1
assert users[0].user_name == "aaa"
users = UserService.users_for_perms(["bbb"], db_session=db_session).all()
assert len(users) == 2
assert ["aaa", "bbb"] == sorted([u.user_name for u in users])
users = UserService.users_for_perms(
["aaa", "bbb", "manage_apps"], db_session=db_session
)
assert ["aaa", "bbb", "ccc"] == sorted([u.user_name for u in users])
def test_resources_with_possible_perms(self, db_session):
self.set_up_user_group_and_perms(db_session)
resource = ResourceTestobjB(
resource_id=3, resource_name="other", owner_user_id=self.user.id
)
self.user.resources.append(resource)
resource_g = ResourceTestobjB(resource_id=4, resource_name="group owned")
self.group.resources.append(resource_g)
db_session.flush()
perms = UserService.resources_with_possible_perms(
self.user, db_session=db_session
)
second = [
PermissionTuple(
self.user, "foo_perm", "user", None, self.resource, False, True
),
PermissionTuple(
self.user, "group_perm", "group", self.group, self.resource, False, True
),
PermissionTuple(
self.user, "test_perm2", "user", None, self.resource, False, True
),
PermissionTuple(
self.user, ALL_PERMISSIONS, "user", None, resource, True, True
),
PermissionTuple(
self.user, ALL_PERMISSIONS, "group", self.group, resource_g, True, True
),
]
check_one_in_other(perms, second)
def test_resource_users_for_any_perm_additional_users(self, db_session):
self.maxDiff = 99999
self.set_up_user_group_and_perms(db_session)
user6 = add_user(db_session, 6, "user 6")
user7 = add_user(db_session, 7, "user 7")
perm2 = GroupResourcePermission(
perm_name="group_perm2", resource_id=self.resource.resource_id
)
self.group.resource_permissions.append(perm2)
self.group.users.append(user6)
self.group.users.append(user7)
perms = ResourceService.users_for_perm(
self.resource, "__any_permission__", db_session=db_session
)
second = [
PermissionTuple(
self.user, "group_perm", "group", self.group, self.resource, False, True
),
PermissionTuple(
user6, "group_perm", "group", self.group, self.resource, False, True
),
PermissionTuple(
user7, "group_perm", "group", self.group, self.resource, False, True
),
PermissionTuple(
self.user,
"group_perm2",
"group",
self.group,
self.resource,
False,
True,
),
PermissionTuple(
user6, "group_perm2", "group", self.group, self.resource, False, True
),
PermissionTuple(
user7, "group_perm2", "group", self.group, self.resource, False, True
),
PermissionTuple(
self.user, "test_perm2", "user", None, self.resource, False, True
),
PermissionTuple(
self.user, "foo_perm", "user", None, self.resource, False, True
),
PermissionTuple(
self.user4,
"group_perm",
"group",
self.group2,
self.resource,
False,
True,
),
]
check_one_in_other(perms, second)
def test_resource_users_for_any_perm_limited_group_perms(self, db_session):
self.maxDiff = 99999
self.set_up_user_group_and_perms(db_session)
user6 = add_user(db_session, 6, "user 6")
user7 = add_user(db_session, 7, "user 7")
perm2 = GroupResourcePermission(
perm_name="group_perm2", resource_id=self.resource.resource_id
)
self.group.resource_permissions.append(perm2)
self.group.users.append(user6)
self.group.users.append(user7)
perms = ResourceService.users_for_perm(
self.resource,
"__any_permission__",
limit_group_permissions=True,
db_session=db_session,
)
second = [
PermissionTuple(
None, "group_perm", "group", self.group, self.resource, False, True
),
PermissionTuple(
None, "group_perm2", "group", self.group, self.resource, False, True
),
PermissionTuple(
self.user, "test_perm2", "user", None, self.resource, False, True
),
PermissionTuple(
self.user, "foo_perm", "user", None, self.resource, False, True
),
PermissionTuple(
None, "group_perm", "group", self.group2, self.resource, False, True
),
]
check_one_in_other(perms, second)
def test_resource_groups_for_any_perm_additional_users(self, db_session):
self.maxDiff = 99999
self.set_up_user_group_and_perms(db_session)
user6 = add_user(db_session, 6, "user 6")
user7 = add_user(db_session, 7, "user 7")
perm2 = GroupResourcePermission(
perm_name="group_perm2", resource_id=self.resource.resource_id
)
self.group.resource_permissions.append(perm2)
self.group.users.append(user6)
self.group.users.append(user7)
perms = ResourceService.groups_for_perm(
self.resource, "__any_permission__", db_session=db_session
)
second = [
PermissionTuple(
self.user, "group_perm", "group", self.group, self.resource, False, True
),
PermissionTuple(
user6, "group_perm", "group", self.group, self.resource, False, True
),
PermissionTuple(
user7, "group_perm", "group", self.group, self.resource, False, True
),
PermissionTuple(
self.user,
"group_perm2",
"group",
self.group,
self.resource,
False,
True,
),
PermissionTuple(
user6, "group_perm2", "group", self.group, self.resource, False, True
),
PermissionTuple(
user7, "group_perm2", "group", self.group, self.resource, False, True
),
PermissionTuple(
self.user4,
"group_perm",
"group",
self.group2,
self.resource,
False,
True,
),
]
check_one_in_other(perms, second)
def test_resource_groups_for_any_perm_just_group_perms_limited(self, db_session):
self.maxDiff = 99999
self.set_up_user_group_and_perms(db_session)
user6 = add_user(db_session, 6, "user 6")
user7 = add_user(db_session, 7, "user 7")
perm2 = GroupResourcePermission(
perm_name="group_perm2", resource_id=self.resource.resource_id
)
self.group.resource_permissions.append(perm2)
self.group.users.append(user6)
self.group.users.append(user7)
perms = ResourceService.groups_for_perm(
self.resource,
"__any_permission__",
limit_group_permissions=True,
db_session=db_session,
)
second = [
PermissionTuple(
None, "group_perm", "group", self.group, self.resource, False, True
),
PermissionTuple(
None, "group_perm2", "group", self.group, self.resource, False, True
),
PermissionTuple(
None, "group_perm", "group", self.group2, self.resource, False, True
),
]
check_one_in_other(perms, second)
def test_resource_users_for_any_perm_excluding_group_perms(self, db_session):
self.maxDiff = 99999
self.set_up_user_group_and_perms(db_session)
user6 = add_user(db_session, 6, "user 6")
user7 = add_user(db_session, 7, "user 7")
perm2 = GroupResourcePermission(
perm_name="group_perm2", resource_id=self.resource.resource_id
)
self.group.resource_permissions.append(perm2)
self.group.users.append(user6)
self.group.users.append(user7)
perms = ResourceService.users_for_perm(
self.resource,
"__any_permission__",
limit_group_permissions=True,
skip_group_perms=True,
db_session=db_session,
)
second = [
PermissionTuple(
self.user, "test_perm2", "user", None, self.resource, False, True
),
PermissionTuple(
self.user, "foo_perm", "user", None, self.resource, False, True
),
]
check_one_in_other(perms, second)
def test_resource_groups_for_any_perm_just_group_perms_limited_empty_group(
self, db_session
):
self.maxDiff = 99999
self.set_up_user_group_and_perms(db_session)
user6 = add_user(db_session, 6, "user 6")
user7 = add_user(db_session, 7, "user 7")
perm2 = GroupResourcePermission(
perm_name="group_perm2", resource_id=self.resource.resource_id
)
self.group.resource_permissions.append(perm2)
self.group.users.append(user6)
self.group.users.append(user7)
group3 = add_group(db_session, "Empty group")
perm3 = GroupResourcePermission(
perm_name="group_permx", resource_id=self.resource.resource_id
)
group3.resource_permissions.append(perm3)
perms = ResourceService.groups_for_perm(
self.resource,
"__any_permission__",
limit_group_permissions=True,
db_session=db_session,
)
second = [
PermissionTuple(
None, "group_perm", "group", self.group, self.resource, False, True
),
PermissionTuple(
None, "group_perm2", "group", self.group, self.resource, False, True
),
PermissionTuple(
None, "group_perm", "group", self.group2, self.resource, False, True
),
PermissionTuple(
None, "group_permx", "group", group3, self.resource, False, True
),
]
check_one_in_other(perms, second)
def test_resource_users_for_any_perm_limited_group_perms_empty_group(
self, db_session
):
self.maxDiff = 99999
self.set_up_user_group_and_perms(db_session)
user6 = add_user(db_session, 6, "user 6")
user7 = add_user(db_session, 7, "user 7")
perm2 = GroupResourcePermission(
perm_name="group_perm2", resource_id=self.resource.resource_id
)
self.group.resource_permissions.append(perm2)
self.group.users.append(user6)
self.group.users.append(user7)
group3 = add_group(db_session, "Empty group")
perm3 = GroupResourcePermission(
perm_name="group_permx", resource_id=self.resource.resource_id
)
group3.resource_permissions.append(perm3)
perms = ResourceService.users_for_perm(
self.resource,
"__any_permission__",
limit_group_permissions=True,
db_session=db_session,
)
second = [
PermissionTuple(
None, "group_perm", "group", self.group, self.resource, False, True
),
PermissionTuple(
None, "group_perm2", "group", self.group, self.resource, False, True
),
PermissionTuple(
self.user, "test_perm2", "user", None, self.resource, False, True
),
PermissionTuple(
self.user, "foo_perm", "user", None, self.resource, False, True
),
PermissionTuple(
None, "group_perm", "group", self.group2, self.resource, False, True
),
PermissionTuple(
None, "group_permx", "group", group3, self.resource, False, True
),
]
check_one_in_other(perms, second)
def test_get_resource_permission(self, db_session):
created_user = add_user(db_session)
resource = add_resource(db_session, 1, "test_resource")
permission = UserResourcePermission(
perm_name="test_perm",
user_id=created_user.id,
resource_id=resource.resource_id,
)
resource.user_permissions.append(permission)
db_session.flush()
perm = UserResourcePermissionService.get(
user_id=created_user.id,
resource_id=resource.resource_id,
perm_name="test_perm",
db_session=db_session,
)
assert perm.perm_name == "test_perm"
assert perm.resource_id == resource.resource_id
assert perm.user_id == created_user.id
class TestGroupPermission(BaseTestCase):
def test_repr(self, db_session):
group_permission = GroupPermission(group_id=1, perm_name="perm")
assert repr(group_permission) == "<GroupPermission: perm>"
def test_get(self, db_session):
org_group = add_group(db_session, "group1")
group = GroupPermissionService.get(
group_id=org_group.id, perm_name="manage_apps", db_session=db_session
)
assert group.group_id == 1
assert group.perm_name == "manage_apps"
def test_by_group_and_perm(self, db_session):
add_group(db_session)
queried = GroupPermissionService.by_group_and_perm(
1, "manage_apps", db_session=db_session
)
assert queried.group_id == 1
assert queried.perm_name == "manage_apps"
def test_by_group_and_perm_wrong_group(self, db_session):
add_group(db_session)
queried = GroupPermissionService.by_group_and_perm(
2, "manage_apps", db_session=db_session
)
assert queried is None
def test_by_group_and_perm_wrong_perm(self, db_session):
add_group(db_session)
queried = GroupPermissionService.by_group_and_perm(
1, "wrong_perm", db_session=db_session
)
assert queried is None
def test_resources_with_possible_perms(self, db_session):
self.set_up_user_group_and_perms(db_session)
perms = GroupService.resources_with_possible_perms(self.group)
second = [
PermissionTuple(
None, "group_perm", "group", self.group, self.resource, False, True
)
]
check_one_in_other(perms, second)
def test_resources_with_possible_perms_group2(self, db_session):
self.set_up_user_group_and_perms(db_session)
resource3 = add_resource_b(db_session, 3, "other resource")
self.group2.resources.append(resource3)
group_permission2 = GroupResourcePermission(
perm_name="group_perm2", group_id=self.group2.id
)
self.resource2.group_permissions.append(group_permission2)
perms = GroupService.resources_with_possible_perms(self.group2)
second = [
PermissionTuple(
None, "group_perm", "group", self.group2, self.resource, False, True
),
PermissionTuple(
None, "group_perm2", "group", self.group2, self.resource2, False, True
),
PermissionTuple(
None, ALL_PERMISSIONS, "group", self.group2, resource3, True, True
),
]
check_one_in_other(perms, second)
def test_group_resource_permission(self, db_session):
self.set_up_user_group_and_perms(db_session)
add_resource_b(db_session, 3, "other resource")
db_session.flush()
group_permission2 = GroupResourcePermission(
perm_name="group_perm2", group_id=self.group2.id
)
row = GroupResourcePermissionService.get(
group_id=self.group2.id,
resource_id=self.resource2.resource_id,
perm_name="group_perm2",
db_session=db_session,
)
assert row is None
self.resource2.group_permissions.append(group_permission2)
row = GroupResourcePermissionService.get(
group_id=self.group2.id,
resource_id=self.resource2.resource_id,
perm_name="group_perm2",
db_session=db_session,
)
assert row is not None
def test_group_resource_permission_wrong(self, db_session):
self.set_up_user_group_and_perms(db_session)
perm_name = "group_permX"
perm = ResourceService.perm_by_group_and_perm_name(
resource_id=self.resource.resource_id,
group_id=self.group.id,
perm_name=perm_name,
db_session=db_session,
)
assert perm is None
def test_group_resource_permission2(self, db_session):
self.set_up_user_group_and_perms(db_session)
perm_name = "group_perm"
perm = ResourceService.perm_by_group_and_perm_name(
resource_id=self.resource.resource_id,
group_id=self.group.id,
perm_name=perm_name,
db_session=db_session,
)
assert perm.group_id == self.group.id
assert perm.resource_id == self.resource.resource_id
assert perm.perm_name == perm_name
class TestUserPermission(BaseTestCase):
def test_repr(self, db_session):
user_permission = UserPermission(user_id=1, perm_name="perm")
assert repr(user_permission) == "<UserPermission: perm>"
def test_get(self, db_session):
user = add_user(db_session)
perm = UserPermissionService.get(
user_id=user.id, perm_name="root", db_session=db_session
)
assert perm.user_id == user.id
assert perm.perm_name == "root"
def test_by_user_and_perm(self, db_session):
add_user(db_session)
user_permission = UserPermissionService.by_user_and_perm(
1, "root", db_session=db_session
)
assert user_permission.user_id == 1
assert user_permission.perm_name == "root"
def test_by_user_and_perm_wrong_username(self, db_session):
add_user(db_session)
user_permission = UserPermissionService.by_user_and_perm(
999, "root", db_session=db_session
)
assert user_permission is None
def test_by_user_and_perm_wrong_permname(self, db_session):
add_user(db_session)
user_permission = UserPermissionService.by_user_and_perm(
1, "wrong", db_session=db_session
)
assert user_permission is None
| [
"ziggurat_foundations.models.services.group_permission.GroupPermissionService.by_group_and_perm",
"ziggurat_foundations.models.services.resource.ResourceService.perms_for_user",
"ziggurat_foundations.permissions.PermissionTuple",
"ziggurat_foundations.tests.add_resource_b",
"ziggurat_foundations.tests.conft... | [((1237, 1257), 'ziggurat_foundations.tests.add_user', 'add_user', (['db_session'], {}), '(db_session)\n', (1245, 1257), False, 'from ziggurat_foundations.tests import add_user, check_one_in_other, add_resource, add_resource_b, add_group, BaseTestCase\n'), ((1280, 1340), 'ziggurat_foundations.models.services.user.UserService.permissions', 'UserService.permissions', (['created_user'], {'db_session': 'db_session'}), '(created_user, db_session=db_session)\n', (1303, 1340), False, 'from ziggurat_foundations.models.services.user import UserService\n'), ((1585, 1626), 'ziggurat_foundations.tests.check_one_in_other', 'check_one_in_other', (['permissions', 'expected'], {}), '(permissions, expected)\n', (1603, 1626), False, 'from ziggurat_foundations.tests import add_user, check_one_in_other, add_resource, add_resource_b, add_group, BaseTestCase\n'), ((1701, 1721), 'ziggurat_foundations.tests.add_user', 'add_user', (['db_session'], {}), '(db_session)\n', (1709, 1721), False, 'from ziggurat_foundations.tests import add_user, check_one_in_other, add_resource, add_resource_b, add_group, BaseTestCase\n'), ((1741, 1785), 'ziggurat_foundations.tests.add_resource', 'add_resource', (['db_session', '(1)', '"""test_resource"""'], {}), "(db_session, 1, 'test_resource')\n", (1753, 1785), False, 'from ziggurat_foundations.tests import add_user, check_one_in_other, add_resource, add_resource_b, add_group, BaseTestCase\n'), ((2331, 2351), 'ziggurat_foundations.tests.add_user', 'add_user', (['db_session'], {}), '(db_session)\n', (2339, 2351), False, 'from ziggurat_foundations.tests import add_user, check_one_in_other, add_resource, add_resource_b, add_group, BaseTestCase\n'), ((2371, 2415), 'ziggurat_foundations.tests.add_resource', 'add_resource', (['db_session', '(1)', '"""test_resource"""'], {}), "(db_session, 1, 'test_resource')\n", (2383, 2415), False, 'from ziggurat_foundations.tests import add_user, check_one_in_other, add_resource, add_resource_b, add_group, BaseTestCase\n'), ((2437, 2545), 'ziggurat_foundations.tests.conftest.UserResourcePermission', 'UserResourcePermission', ([], {'perm_name': '"""test_perm"""', 'user_id': 'created_user.id', 'resource_id': 'resource.resource_id'}), "(perm_name='test_perm', user_id=created_user.id,\n resource_id=resource.resource_id)\n", (2459, 2545), False, 'from ziggurat_foundations.tests.conftest import User, UserPermission, GroupPermission, UserResourcePermission, GroupResourcePermission, ResourceTestobjB\n'), ((2910, 2930), 'ziggurat_foundations.tests.add_user', 'add_user', (['db_session'], {}), '(db_session)\n', (2918, 2930), False, 'from ziggurat_foundations.tests import add_user, check_one_in_other, add_resource, add_resource_b, add_group, BaseTestCase\n'), ((2950, 2994), 'ziggurat_foundations.tests.add_resource', 'add_resource', (['db_session', '(1)', '"""test_resource"""'], {}), "(db_session, 1, 'test_resource')\n", (2962, 2994), False, 'from ziggurat_foundations.tests import add_user, check_one_in_other, add_resource, add_resource_b, add_group, BaseTestCase\n'), ((3016, 3124), 'ziggurat_foundations.tests.conftest.UserResourcePermission', 'UserResourcePermission', ([], {'perm_name': '"""test_perm"""', 'user_id': 'created_user.id', 'resource_id': 'resource.resource_id'}), "(perm_name='test_perm', user_id=created_user.id,\n resource_id=resource.resource_id)\n", (3038, 3124), False, 'from ziggurat_foundations.tests.conftest import User, UserPermission, GroupPermission, UserResourcePermission, GroupResourcePermission, ResourceTestobjB\n'), ((3241, 3285), 'ziggurat_foundations.tests.add_resource', 'add_resource', (['db_session', '(2)', '"""test_resource"""'], {}), "(db_session, 2, 'test_resource')\n", (3253, 3285), False, 'from ziggurat_foundations.tests import add_user, check_one_in_other, add_resource, add_resource_b, add_group, BaseTestCase\n'), ((3343, 3387), 'ziggurat_foundations.tests.add_resource', 'add_resource', (['db_session', '(3)', '"""test_resource"""'], {}), "(db_session, 3, 'test_resource')\n", (3355, 3387), False, 'from ziggurat_foundations.tests import add_user, check_one_in_other, add_resource, add_resource_b, add_group, BaseTestCase\n'), ((3396, 3442), 'ziggurat_foundations.tests.add_resource_b', 'add_resource_b', (['db_session', '(4)', '"""test_resource"""'], {}), "(db_session, 4, 'test_resource')\n", (3410, 3442), False, 'from ziggurat_foundations.tests import add_user, check_one_in_other, add_resource, add_resource_b, add_group, BaseTestCase\n'), ((3788, 3808), 'ziggurat_foundations.tests.add_user', 'add_user', (['db_session'], {}), '(db_session)\n', (3796, 3808), False, 'from ziggurat_foundations.tests import add_user, check_one_in_other, add_resource, add_resource_b, add_group, BaseTestCase\n'), ((3828, 3872), 'ziggurat_foundations.tests.add_resource', 'add_resource', (['db_session', '(1)', '"""test_resource"""'], {}), "(db_session, 1, 'test_resource')\n", (3840, 3872), False, 'from ziggurat_foundations.tests import add_user, check_one_in_other, add_resource, add_resource_b, add_group, BaseTestCase\n'), ((3894, 4002), 'ziggurat_foundations.tests.conftest.UserResourcePermission', 'UserResourcePermission', ([], {'perm_name': '"""test_perm"""', 'user_id': 'created_user.id', 'resource_id': 'resource.resource_id'}), "(perm_name='test_perm', user_id=created_user.id,\n resource_id=resource.resource_id)\n", (3916, 4002), False, 'from ziggurat_foundations.tests.conftest import User, UserPermission, GroupPermission, UserResourcePermission, GroupResourcePermission, ResourceTestobjB\n'), ((4461, 4481), 'ziggurat_foundations.tests.add_user', 'add_user', (['db_session'], {}), '(db_session)\n', (4469, 4481), False, 'from ziggurat_foundations.tests import add_user, check_one_in_other, add_resource, add_resource_b, add_group, BaseTestCase\n'), ((4501, 4545), 'ziggurat_foundations.tests.add_resource', 'add_resource', (['db_session', '(1)', '"""test_resource"""'], {}), "(db_session, 1, 'test_resource')\n", (4513, 4545), False, 'from ziggurat_foundations.tests import add_user, check_one_in_other, add_resource, add_resource_b, add_group, BaseTestCase\n'), ((4567, 4675), 'ziggurat_foundations.tests.conftest.UserResourcePermission', 'UserResourcePermission', ([], {'perm_name': '"""test_perm"""', 'user_id': 'created_user.id', 'resource_id': 'resource.resource_id'}), "(perm_name='test_perm', user_id=created_user.id,\n resource_id=resource.resource_id)\n", (4589, 4675), False, 'from ziggurat_foundations.tests.conftest import User, UserPermission, GroupPermission, UserResourcePermission, GroupResourcePermission, ResourceTestobjB\n'), ((5129, 5149), 'ziggurat_foundations.tests.add_user', 'add_user', (['db_session'], {}), '(db_session)\n', (5137, 5149), False, 'from ziggurat_foundations.tests import add_user, check_one_in_other, add_resource, add_resource_b, add_group, BaseTestCase\n'), ((5169, 5213), 'ziggurat_foundations.tests.add_resource', 'add_resource', (['db_session', '(1)', '"""test_resource"""'], {}), "(db_session, 1, 'test_resource')\n", (5181, 5213), False, 'from ziggurat_foundations.tests import add_user, check_one_in_other, add_resource, add_resource_b, add_group, BaseTestCase\n'), ((5234, 5280), 'ziggurat_foundations.tests.add_resource_b', 'add_resource_b', (['db_session', '(2)', '"""test_resource"""'], {}), "(db_session, 2, 'test_resource')\n", (5248, 5280), False, 'from ziggurat_foundations.tests import add_user, check_one_in_other, add_resource, add_resource_b, add_group, BaseTestCase\n'), ((5301, 5345), 'ziggurat_foundations.tests.add_resource', 'add_resource', (['db_session', '(3)', '"""test_resource"""'], {}), "(db_session, 3, 'test_resource')\n", (5313, 5345), False, 'from ziggurat_foundations.tests import add_user, check_one_in_other, add_resource, add_resource_b, add_group, BaseTestCase\n'), ((5366, 5412), 'ziggurat_foundations.tests.add_resource_b', 'add_resource_b', (['db_session', '(4)', '"""test_resource"""'], {}), "(db_session, 4, 'test_resource')\n", (5380, 5412), False, 'from ziggurat_foundations.tests import add_user, check_one_in_other, add_resource, add_resource_b, add_group, BaseTestCase\n'), ((5461, 5569), 'ziggurat_foundations.tests.conftest.UserResourcePermission', 'UserResourcePermission', ([], {'perm_name': '"""test_perm"""', 'user_id': 'created_user.id', 'resource_id': 'resource.resource_id'}), "(perm_name='test_perm', user_id=created_user.id,\n resource_id=resource.resource_id)\n", (5483, 5569), False, 'from ziggurat_foundations.tests.conftest import User, UserPermission, GroupPermission, UserResourcePermission, GroupResourcePermission, ResourceTestobjB\n'), ((5688, 5797), 'ziggurat_foundations.tests.conftest.UserResourcePermission', 'UserResourcePermission', ([], {'perm_name': '"""test_perm"""', 'user_id': 'created_user.id', 'resource_id': 'resource2.resource_id'}), "(perm_name='test_perm', user_id=created_user.id,\n resource_id=resource2.resource_id)\n", (5710, 5797), False, 'from ziggurat_foundations.tests.conftest import User, UserPermission, GroupPermission, UserResourcePermission, GroupResourcePermission, ResourceTestobjB\n'), ((5918, 6027), 'ziggurat_foundations.tests.conftest.UserResourcePermission', 'UserResourcePermission', ([], {'perm_name': '"""test_perm"""', 'user_id': 'created_user.id', 'resource_id': 'resource3.resource_id'}), "(perm_name='test_perm', user_id=created_user.id,\n resource_id=resource3.resource_id)\n", (5940, 6027), False, 'from ziggurat_foundations.tests.conftest import User, UserPermission, GroupPermission, UserResourcePermission, GroupResourcePermission, ResourceTestobjB\n'), ((6148, 6257), 'ziggurat_foundations.tests.conftest.UserResourcePermission', 'UserResourcePermission', ([], {'perm_name': '"""test_perm"""', 'user_id': 'created_user.id', 'resource_id': 'resource4.resource_id'}), "(perm_name='test_perm', user_id=created_user.id,\n resource_id=resource4.resource_id)\n", (6170, 6257), False, 'from ziggurat_foundations.tests.conftest import User, UserPermission, GroupPermission, UserResourcePermission, GroupResourcePermission, ResourceTestobjB\n'), ((6706, 6726), 'ziggurat_foundations.tests.add_user', 'add_user', (['db_session'], {}), '(db_session)\n', (6714, 6726), False, 'from ziggurat_foundations.tests import add_user, check_one_in_other, add_resource, add_resource_b, add_group, BaseTestCase\n'), ((6746, 6790), 'ziggurat_foundations.tests.add_resource', 'add_resource', (['db_session', '(1)', '"""test_resource"""'], {}), "(db_session, 1, 'test_resource')\n", (6758, 6790), False, 'from ziggurat_foundations.tests import add_user, check_one_in_other, add_resource, add_resource_b, add_group, BaseTestCase\n'), ((6812, 6924), 'ziggurat_foundations.tests.conftest.UserResourcePermission', 'UserResourcePermission', ([], {'perm_name': '"""test_perm_bad"""', 'user_id': 'created_user.id', 'resource_id': 'resource.resource_id'}), "(perm_name='test_perm_bad', user_id=created_user.id,\n resource_id=resource.resource_id)\n", (6834, 6924), False, 'from ziggurat_foundations.tests.conftest import User, UserPermission, GroupPermission, UserResourcePermission, GroupResourcePermission, ResourceTestobjB\n'), ((7154, 7174), 'ziggurat_foundations.tests.add_user', 'add_user', (['db_session'], {}), '(db_session)\n', (7162, 7174), False, 'from ziggurat_foundations.tests import add_user, check_one_in_other, add_resource, add_resource_b, add_group, BaseTestCase\n'), ((7194, 7238), 'ziggurat_foundations.tests.add_resource', 'add_resource', (['db_session', '(1)', '"""test_resource"""'], {}), "(db_session, 1, 'test_resource')\n", (7206, 7238), False, 'from ziggurat_foundations.tests import add_user, check_one_in_other, add_resource, add_resource_b, add_group, BaseTestCase\n'), ((7260, 7368), 'ziggurat_foundations.tests.conftest.UserResourcePermission', 'UserResourcePermission', ([], {'perm_name': '"""test_perm"""', 'user_id': 'created_user.id', 'resource_id': 'resource.resource_id'}), "(perm_name='test_perm', user_id=created_user.id,\n resource_id=resource.resource_id)\n", (7282, 7368), False, 'from ziggurat_foundations.tests.conftest import User, UserPermission, GroupPermission, UserResourcePermission, GroupResourcePermission, ResourceTestobjB\n'), ((7485, 7530), 'ziggurat_foundations.tests.add_resource', 'add_resource', (['db_session', '(2)', '"""test_resource2"""'], {}), "(db_session, 2, 'test_resource2')\n", (7497, 7530), False, 'from ziggurat_foundations.tests import add_user, check_one_in_other, add_resource, add_resource_b, add_group, BaseTestCase\n'), ((7553, 7662), 'ziggurat_foundations.tests.conftest.UserResourcePermission', 'UserResourcePermission', ([], {'perm_name': '"""test_perm"""', 'user_id': 'created_user.id', 'resource_id': 'resource2.resource_id'}), "(perm_name='test_perm', user_id=created_user.id,\n resource_id=resource2.resource_id)\n", (7575, 7662), False, 'from ziggurat_foundations.tests.conftest import User, UserPermission, GroupPermission, UserResourcePermission, GroupResourcePermission, ResourceTestobjB\n'), ((8024, 8044), 'ziggurat_foundations.tests.add_user', 'add_user', (['db_session'], {}), '(db_session)\n', (8032, 8044), False, 'from ziggurat_foundations.tests import add_user, check_one_in_other, add_resource, add_resource_b, add_group, BaseTestCase\n'), ((8065, 8110), 'ziggurat_foundations.tests.add_resource', 'add_resource', (['db_session', '(1)', '"""test_resource1"""'], {}), "(db_session, 1, 'test_resource1')\n", (8077, 8110), False, 'from ziggurat_foundations.tests import add_user, check_one_in_other, add_resource, add_resource_b, add_group, BaseTestCase\n'), ((8131, 8176), 'ziggurat_foundations.tests.add_resource', 'add_resource', (['db_session', '(2)', '"""test_resource2"""'], {}), "(db_session, 2, 'test_resource2')\n", (8143, 8176), False, 'from ziggurat_foundations.tests import add_user, check_one_in_other, add_resource, add_resource_b, add_group, BaseTestCase\n'), ((8197, 8242), 'ziggurat_foundations.tests.add_resource', 'add_resource', (['db_session', '(3)', '"""test_resource3"""'], {}), "(db_session, 3, 'test_resource3')\n", (8209, 8242), False, 'from ziggurat_foundations.tests import add_user, check_one_in_other, add_resource, add_resource_b, add_group, BaseTestCase\n'), ((8266, 8375), 'ziggurat_foundations.tests.conftest.UserResourcePermission', 'UserResourcePermission', ([], {'perm_name': '"""test_perm"""', 'user_id': 'created_user.id', 'resource_id': 'resource1.resource_id'}), "(perm_name='test_perm', user_id=created_user.id,\n resource_id=resource1.resource_id)\n", (8288, 8375), False, 'from ziggurat_foundations.tests.conftest import User, UserPermission, GroupPermission, UserResourcePermission, GroupResourcePermission, ResourceTestobjB\n'), ((8441, 8550), 'ziggurat_foundations.tests.conftest.UserResourcePermission', 'UserResourcePermission', ([], {'perm_name': '"""test_perm"""', 'user_id': 'created_user.id', 'resource_id': 'resource2.resource_id'}), "(perm_name='test_perm', user_id=created_user.id,\n resource_id=resource2.resource_id)\n", (8463, 8550), False, 'from ziggurat_foundations.tests.conftest import User, UserPermission, GroupPermission, UserResourcePermission, GroupResourcePermission, ResourceTestobjB\n'), ((8616, 8725), 'ziggurat_foundations.tests.conftest.UserResourcePermission', 'UserResourcePermission', ([], {'perm_name': '"""test_perm"""', 'user_id': 'created_user.id', 'resource_id': 'resource3.resource_id'}), "(perm_name='test_perm', user_id=created_user.id,\n resource_id=resource3.resource_id)\n", (8638, 8725), False, 'from ziggurat_foundations.tests.conftest import User, UserPermission, GroupPermission, UserResourcePermission, GroupResourcePermission, ResourceTestobjB\n'), ((9262, 9282), 'ziggurat_foundations.tests.add_user', 'add_user', (['db_session'], {}), '(db_session)\n', (9270, 9282), False, 'from ziggurat_foundations.tests import add_user, check_one_in_other, add_resource, add_resource_b, add_group, BaseTestCase\n'), ((9302, 9346), 'ziggurat_foundations.tests.add_resource', 'add_resource', (['db_session', '(1)', '"""test_resource"""'], {}), "(db_session, 1, 'test_resource')\n", (9314, 9346), False, 'from ziggurat_foundations.tests import add_user, check_one_in_other, add_resource, add_resource_b, add_group, BaseTestCase\n'), ((9363, 9384), 'ziggurat_foundations.tests.add_group', 'add_group', (['db_session'], {}), '(db_session)\n', (9372, 9384), False, 'from ziggurat_foundations.tests import add_user, check_one_in_other, add_resource, add_resource_b, add_group, BaseTestCase\n'), ((9453, 9560), 'ziggurat_foundations.tests.conftest.GroupResourcePermission', 'GroupResourcePermission', ([], {'perm_name': '"""test_perm_bad"""', 'group_id': 'group.id', 'resource_id': 'resource.resource_id'}), "(perm_name='test_perm_bad', group_id=group.id,\n resource_id=resource.resource_id)\n", (9476, 9560), False, 'from ziggurat_foundations.tests.conftest import User, UserPermission, GroupPermission, UserResourcePermission, GroupResourcePermission, ResourceTestobjB\n'), ((9800, 9820), 'ziggurat_foundations.tests.add_user', 'add_user', (['db_session'], {}), '(db_session)\n', (9808, 9820), False, 'from ziggurat_foundations.tests import add_user, check_one_in_other, add_resource, add_resource_b, add_group, BaseTestCase\n'), ((9840, 9884), 'ziggurat_foundations.tests.add_resource', 'add_resource', (['db_session', '(1)', '"""test_resource"""'], {}), "(db_session, 1, 'test_resource')\n", (9852, 9884), False, 'from ziggurat_foundations.tests import add_user, check_one_in_other, add_resource, add_resource_b, add_group, BaseTestCase\n'), ((9905, 9950), 'ziggurat_foundations.tests.add_resource', 'add_resource', (['db_session', '(2)', '"""test_resource2"""'], {}), "(db_session, 2, 'test_resource2')\n", (9917, 9950), False, 'from ziggurat_foundations.tests import add_user, check_one_in_other, add_resource, add_resource_b, add_group, BaseTestCase\n'), ((9959, 10004), 'ziggurat_foundations.tests.add_resource', 'add_resource', (['db_session', '(3)', '"""test_resource3"""'], {}), "(db_session, 3, 'test_resource3')\n", (9971, 10004), False, 'from ziggurat_foundations.tests import add_user, check_one_in_other, add_resource, add_resource_b, add_group, BaseTestCase\n'), ((10021, 10042), 'ziggurat_foundations.tests.add_group', 'add_group', (['db_session'], {}), '(db_session)\n', (10030, 10042), False, 'from ziggurat_foundations.tests import add_user, check_one_in_other, add_resource, add_resource_b, add_group, BaseTestCase\n'), ((10111, 10208), 'ziggurat_foundations.tests.conftest.GroupResourcePermission', 'GroupResourcePermission', ([], {'perm_name': '"""test_perm"""', 'group_id': '(1)', 'resource_id': 'resource.resource_id'}), "(perm_name='test_perm', group_id=1, resource_id=\n resource.resource_id)\n", (10134, 10208), False, 'from ziggurat_foundations.tests.conftest import User, UserPermission, GroupPermission, UserResourcePermission, GroupResourcePermission, ResourceTestobjB\n'), ((10254, 10351), 'ziggurat_foundations.tests.conftest.GroupResourcePermission', 'GroupResourcePermission', ([], {'perm_name': '"""foo_perm"""', 'group_id': '(1)', 'resource_id': 'resource2.resource_id'}), "(perm_name='foo_perm', group_id=1, resource_id=\n resource2.resource_id)\n", (10277, 10351), False, 'from ziggurat_foundations.tests.conftest import User, UserPermission, GroupPermission, UserResourcePermission, GroupResourcePermission, ResourceTestobjB\n'), ((10882, 10973), 'ziggurat_foundations.models.services.resource.ResourceService.direct_perms_for_user', 'ResourceService.direct_perms_for_user', (['self.resource', 'self.user'], {'db_session': 'db_session'}), '(self.resource, self.user, db_session=\n db_session)\n', (10919, 10973), False, 'from ziggurat_foundations.models.services.resource import ResourceService\n'), ((11279, 11312), 'ziggurat_foundations.tests.check_one_in_other', 'check_one_in_other', (['perms', 'second'], {}), '(perms, second)\n', (11297, 11312), False, 'from ziggurat_foundations.tests import add_user, check_one_in_other, add_resource, add_resource_b, add_group, BaseTestCase\n'), ((11505, 11595), 'ziggurat_foundations.models.services.resource.ResourceService.group_perms_for_user', 'ResourceService.group_perms_for_user', (['self.resource', 'self.user'], {'db_session': 'db_session'}), '(self.resource, self.user, db_session=\n db_session)\n', (11541, 11595), False, 'from ziggurat_foundations.models.services.resource import ResourceService\n'), ((11783, 11816), 'ziggurat_foundations.tests.check_one_in_other', 'check_one_in_other', (['perms', 'second'], {}), '(perms, second)\n', (11801, 11816), False, 'from ziggurat_foundations.tests import add_user, check_one_in_other, add_resource, add_resource_b, add_group, BaseTestCase\n'), ((11973, 12052), 'ziggurat_foundations.models.services.resource.ResourceService.perms_for_user', 'ResourceService.perms_for_user', (['self.resource', 'self.user'], {'db_session': 'db_session'}), '(self.resource, self.user, db_session=db_session)\n', (12003, 12052), False, 'from ziggurat_foundations.models.services.resource import ResourceService\n'), ((12496, 12529), 'ziggurat_foundations.tests.check_one_in_other', 'check_one_in_other', (['perms', 'second'], {}), '(perms, second)\n', (12514, 12529), False, 'from ziggurat_foundations.tests import add_user, check_one_in_other, add_resource, add_resource_b, add_group, BaseTestCase\n'), ((12656, 12741), 'ziggurat_foundations.models.services.resource.ResourceService.users_for_perm', 'ResourceService.users_for_perm', (['self.resource', '"""foo_perm"""'], {'db_session': 'db_session'}), "(self.resource, 'foo_perm', db_session=db_session\n )\n", (12686, 12741), False, 'from ziggurat_foundations.models.services.resource import ResourceService\n'), ((12920, 12953), 'ziggurat_foundations.tests.check_one_in_other', 'check_one_in_other', (['perms', 'second'], {}), '(perms, second)\n', (12938, 12953), False, 'from ziggurat_foundations.tests import add_user, check_one_in_other, add_resource, add_resource_b, add_group, BaseTestCase\n'), ((13113, 13207), 'ziggurat_foundations.models.services.resource.ResourceService.users_for_perm', 'ResourceService.users_for_perm', (['self.resource', '"""__any_permission__"""'], {'db_session': 'db_session'}), "(self.resource, '__any_permission__',\n db_session=db_session)\n", (13143, 13207), False, 'from ziggurat_foundations.models.services.resource import ResourceService\n'), ((13879, 13912), 'ziggurat_foundations.tests.check_one_in_other', 'check_one_in_other', (['perms', 'second'], {}), '(perms, second)\n', (13897, 13912), False, 'from ziggurat_foundations.tests import add_user, check_one_in_other, add_resource, add_resource_b, add_group, BaseTestCase\n'), ((14054, 14149), 'ziggurat_foundations.models.services.resource.ResourceService.users_for_perm', 'ResourceService.users_for_perm', (['self.resource2', '"""__any_permission__"""'], {'db_session': 'db_session'}), "(self.resource2, '__any_permission__',\n db_session=db_session)\n", (14084, 14149), False, 'from ziggurat_foundations.models.services.resource import ResourceService\n'), ((14459, 14492), 'ziggurat_foundations.tests.check_one_in_other', 'check_one_in_other', (['perms', 'second'], {}), '(perms, second)\n', (14477, 14492), False, 'from ziggurat_foundations.tests import add_user, check_one_in_other, add_resource, add_resource_b, add_group, BaseTestCase\n'), ((14652, 14771), 'ziggurat_foundations.models.services.resource.ResourceService.users_for_perm', 'ResourceService.users_for_perm', (['self.resource', '"""__any_permission__"""'], {'user_ids': '[self.user.id]', 'db_session': 'db_session'}), "(self.resource, '__any_permission__',\n user_ids=[self.user.id], db_session=db_session)\n", (14682, 14771), False, 'from ziggurat_foundations.models.services.resource import ResourceService\n'), ((15248, 15281), 'ziggurat_foundations.tests.check_one_in_other', 'check_one_in_other', (['perms', 'second'], {}), '(perms, second)\n', (15266, 15281), False, 'from ziggurat_foundations.tests import add_user, check_one_in_other, add_resource, add_resource_b, add_group, BaseTestCase\n'), ((15441, 15588), 'ziggurat_foundations.models.services.resource.ResourceService.users_for_perm', 'ResourceService.users_for_perm', (['self.resource', '"""__any_permission__"""'], {'user_ids': '[self.user.id]', 'group_ids': '[self.group2.id]', 'db_session': 'db_session'}), "(self.resource, '__any_permission__',\n user_ids=[self.user.id], group_ids=[self.group2.id], db_session=db_session)\n", (15471, 15588), False, 'from ziggurat_foundations.models.services.resource import ResourceService\n'), ((15944, 15977), 'ziggurat_foundations.tests.check_one_in_other', 'check_one_in_other', (['perms', 'second'], {}), '(perms, second)\n', (15962, 15977), False, 'from ziggurat_foundations.tests import add_user, check_one_in_other, add_resource, add_resource_b, add_group, BaseTestCase\n'), ((16150, 16271), 'ziggurat_foundations.models.services.resource.ResourceService.users_for_perm', 'ResourceService.users_for_perm', (['self.resource2', '"""__any_permission__"""'], {'user_ids': '[self.user3.id]', 'db_session': 'db_session'}), "(self.resource2, '__any_permission__',\n user_ids=[self.user3.id], db_session=db_session)\n", (16180, 16271), False, 'from ziggurat_foundations.models.services.resource import ResourceService\n'), ((16491, 16524), 'ziggurat_foundations.tests.check_one_in_other', 'check_one_in_other', (['perms', 'second'], {}), '(perms, second)\n', (16509, 16524), False, 'from ziggurat_foundations.tests import add_user, check_one_in_other, add_resource, add_resource_b, add_group, BaseTestCase\n'), ((16697, 16850), 'ziggurat_foundations.models.services.resource.ResourceService.users_for_perm', 'ResourceService.users_for_perm', (['self.resource', '"""__any_permission__"""'], {'user_ids': '[self.user4.id]', 'group_ids': '[self.group2.id]', 'db_session': 'db_session'}), "(self.resource, '__any_permission__',\n user_ids=[self.user4.id], group_ids=[self.group2.id], db_session=db_session\n )\n", (16727, 16850), False, 'from ziggurat_foundations.models.services.resource import ResourceService\n'), ((17182, 17215), 'ziggurat_foundations.tests.check_one_in_other', 'check_one_in_other', (['perms', 'second'], {}), '(perms, second)\n', (17200, 17215), False, 'from ziggurat_foundations.tests import add_user, check_one_in_other, add_resource, add_resource_b, add_group, BaseTestCase\n'), ((17385, 17474), 'ziggurat_foundations.tests.conftest.ResourceTestobjB', 'ResourceTestobjB', ([], {'resource_id': '(99)', 'resource_name': '"""other"""', 'owner_user_id': 'self.user2.id'}), "(resource_id=99, resource_name='other', owner_user_id=self.\n user2.id)\n", (17401, 17474), False, 'from ziggurat_foundations.tests.conftest import User, UserPermission, GroupPermission, UserResourcePermission, GroupResourcePermission, ResourceTestobjB\n'), ((17509, 17541), 'ziggurat_foundations.tests.add_group', 'add_group', (['db_session', '"""group 3"""'], {}), "(db_session, 'group 3')\n", (17518, 17541), False, 'from ziggurat_foundations.tests import add_user, check_one_in_other, add_resource, add_resource_b, add_group, BaseTestCase\n'), ((17569, 17636), 'ziggurat_foundations.tests.conftest.UserResourcePermission', 'UserResourcePermission', ([], {'perm_name': '"""foo_perm"""', 'user_id': 'self.user2.id'}), "(perm_name='foo_perm', user_id=self.user2.id)\n", (17591, 17636), False, 'from ziggurat_foundations.tests.conftest import User, UserPermission, GroupPermission, UserResourcePermission, GroupResourcePermission, ResourceTestobjB\n'), ((17687, 17754), 'ziggurat_foundations.tests.conftest.GroupResourcePermission', 'GroupResourcePermission', ([], {'perm_name': '"""group_perm"""', 'group_id': 'group3.id'}), "(perm_name='group_perm', group_id=group3.id)\n", (17710, 17754), False, 'from ziggurat_foundations.tests.conftest import User, UserPermission, GroupPermission, UserResourcePermission, GroupResourcePermission, ResourceTestobjB\n'), ((18072, 18162), 'ziggurat_foundations.models.services.resource.ResourceService.users_for_perm', 'ResourceService.users_for_perm', (['resource', '"""__any_permission__"""'], {'db_session': 'db_session'}), "(resource, '__any_permission__', db_session=\n db_session)\n", (18102, 18162), False, 'from ziggurat_foundations.models.services.resource import ResourceService\n'), ((18718, 18751), 'ziggurat_foundations.tests.check_one_in_other', 'check_one_in_other', (['perms', 'second'], {}), '(perms, second)\n', (18736, 18751), False, 'from ziggurat_foundations.tests import add_user, check_one_in_other, add_resource, add_resource_b, add_group, BaseTestCase\n'), ((18816, 18860), 'ziggurat_foundations.tests.conftest.User', 'User', ([], {'user_name': '"""aaa"""', 'email': '"""aaa"""', 'status': '(0)'}), "(user_name='aaa', email='aaa', status=0)\n", (18820, 18860), False, 'from ziggurat_foundations.tests.conftest import User, UserPermission, GroupPermission, UserResourcePermission, GroupResourcePermission, ResourceTestobjB\n'), ((18869, 18911), 'ziggurat_foundations.models.services.user.UserService.set_password', 'UserService.set_password', (['user', '"""password"""'], {}), "(user, 'password')\n", (18893, 18911), False, 'from ziggurat_foundations.models.services.user import UserService\n'), ((18931, 18962), 'ziggurat_foundations.tests.conftest.UserPermission', 'UserPermission', ([], {'perm_name': '"""aaa"""'}), "(perm_name='aaa')\n", (18945, 18962), False, 'from ziggurat_foundations.tests.conftest import User, UserPermission, GroupPermission, UserResourcePermission, GroupResourcePermission, ResourceTestobjB\n'), ((18982, 19013), 'ziggurat_foundations.tests.conftest.UserPermission', 'UserPermission', ([], {'perm_name': '"""bbb"""'}), "(perm_name='bbb')\n", (18996, 19013), False, 'from ziggurat_foundations.tests.conftest import User, UserPermission, GroupPermission, UserResourcePermission, GroupResourcePermission, ResourceTestobjB\n'), ((19034, 19065), 'ziggurat_foundations.tests.conftest.UserPermission', 'UserPermission', ([], {'perm_name': '"""bbb"""'}), "(perm_name='bbb')\n", (19048, 19065), False, 'from ziggurat_foundations.tests.conftest import User, UserPermission, GroupPermission, UserResourcePermission, GroupResourcePermission, ResourceTestobjB\n'), ((19176, 19220), 'ziggurat_foundations.tests.conftest.User', 'User', ([], {'user_name': '"""bbb"""', 'email': '"""bbb"""', 'status': '(0)'}), "(user_name='bbb', email='bbb', status=0)\n", (19180, 19220), False, 'from ziggurat_foundations.tests.conftest import User, UserPermission, GroupPermission, UserResourcePermission, GroupResourcePermission, ResourceTestobjB\n'), ((19229, 19272), 'ziggurat_foundations.models.services.user.UserService.set_password', 'UserService.set_password', (['user2', '"""password"""'], {}), "(user2, 'password')\n", (19253, 19272), False, 'from ziggurat_foundations.models.services.user import UserService\n'), ((19338, 19382), 'ziggurat_foundations.tests.conftest.User', 'User', ([], {'user_name': '"""ccc"""', 'email': '"""ccc"""', 'status': '(0)'}), "(user_name='ccc', email='ccc', status=0)\n", (19342, 19382), False, 'from ziggurat_foundations.tests.conftest import User, UserPermission, GroupPermission, UserResourcePermission, GroupResourcePermission, ResourceTestobjB\n'), ((19391, 19434), 'ziggurat_foundations.models.services.user.UserService.set_password', 'UserService.set_password', (['user3', '"""password"""'], {}), "(user3, 'password')\n", (19415, 19434), False, 'from ziggurat_foundations.models.services.user import UserService\n'), ((19451, 19472), 'ziggurat_foundations.tests.add_group', 'add_group', (['db_session'], {}), '(db_session)\n', (19460, 19472), False, 'from ziggurat_foundations.tests import add_user, check_one_in_other, add_resource, add_resource_b, add_group, BaseTestCase\n'), ((19609, 19668), 'ziggurat_foundations.models.services.user.UserService.users_for_perms', 'UserService.users_for_perms', (["['aaa']"], {'db_session': 'db_session'}), "(['aaa'], db_session=db_session)\n", (19636, 19668), False, 'from ziggurat_foundations.models.services.user import UserService\n'), ((19948, 20034), 'ziggurat_foundations.models.services.user.UserService.users_for_perms', 'UserService.users_for_perms', (["['aaa', 'bbb', 'manage_apps']"], {'db_session': 'db_session'}), "(['aaa', 'bbb', 'manage_apps'], db_session=\n db_session)\n", (19975, 20034), False, 'from ziggurat_foundations.models.services.user import UserService\n'), ((20264, 20351), 'ziggurat_foundations.tests.conftest.ResourceTestobjB', 'ResourceTestobjB', ([], {'resource_id': '(3)', 'resource_name': '"""other"""', 'owner_user_id': 'self.user.id'}), "(resource_id=3, resource_name='other', owner_user_id=self.\n user.id)\n", (20280, 20351), False, 'from ziggurat_foundations.tests.conftest import User, UserPermission, GroupPermission, UserResourcePermission, GroupResourcePermission, ResourceTestobjB\n'), ((20435, 20495), 'ziggurat_foundations.tests.conftest.ResourceTestobjB', 'ResourceTestobjB', ([], {'resource_id': '(4)', 'resource_name': '"""group owned"""'}), "(resource_id=4, resource_name='group owned')\n", (20451, 20495), False, 'from ziggurat_foundations.tests.conftest import User, UserPermission, GroupPermission, UserResourcePermission, GroupResourcePermission, ResourceTestobjB\n'), ((20587, 20662), 'ziggurat_foundations.models.services.user.UserService.resources_with_possible_perms', 'UserService.resources_with_possible_perms', (['self.user'], {'db_session': 'db_session'}), '(self.user, db_session=db_session)\n', (20628, 20662), False, 'from ziggurat_foundations.models.services.user import UserService\n'), ((21361, 21394), 'ziggurat_foundations.tests.check_one_in_other', 'check_one_in_other', (['perms', 'second'], {}), '(perms, second)\n', (21379, 21394), False, 'from ziggurat_foundations.tests import add_user, check_one_in_other, add_resource, add_resource_b, add_group, BaseTestCase\n'), ((21571, 21604), 'ziggurat_foundations.tests.add_user', 'add_user', (['db_session', '(6)', '"""user 6"""'], {}), "(db_session, 6, 'user 6')\n", (21579, 21604), False, 'from ziggurat_foundations.tests import add_user, check_one_in_other, add_resource, add_resource_b, add_group, BaseTestCase\n'), ((21621, 21654), 'ziggurat_foundations.tests.add_user', 'add_user', (['db_session', '(7)', '"""user 7"""'], {}), "(db_session, 7, 'user 7')\n", (21629, 21654), False, 'from ziggurat_foundations.tests import add_user, check_one_in_other, add_resource, add_resource_b, add_group, BaseTestCase\n'), ((21671, 21763), 'ziggurat_foundations.tests.conftest.GroupResourcePermission', 'GroupResourcePermission', ([], {'perm_name': '"""group_perm2"""', 'resource_id': 'self.resource.resource_id'}), "(perm_name='group_perm2', resource_id=self.resource.\n resource_id)\n", (21694, 21763), False, 'from ziggurat_foundations.tests.conftest import User, UserPermission, GroupPermission, UserResourcePermission, GroupResourcePermission, ResourceTestobjB\n'), ((21929, 22023), 'ziggurat_foundations.models.services.resource.ResourceService.users_for_perm', 'ResourceService.users_for_perm', (['self.resource', '"""__any_permission__"""'], {'db_session': 'db_session'}), "(self.resource, '__any_permission__',\n db_session=db_session)\n", (21959, 22023), False, 'from ziggurat_foundations.models.services.resource import ResourceService\n'), ((23444, 23477), 'ziggurat_foundations.tests.check_one_in_other', 'check_one_in_other', (['perms', 'second'], {}), '(perms, second)\n', (23462, 23477), False, 'from ziggurat_foundations.tests import add_user, check_one_in_other, add_resource, add_resource_b, add_group, BaseTestCase\n'), ((23657, 23690), 'ziggurat_foundations.tests.add_user', 'add_user', (['db_session', '(6)', '"""user 6"""'], {}), "(db_session, 6, 'user 6')\n", (23665, 23690), False, 'from ziggurat_foundations.tests import add_user, check_one_in_other, add_resource, add_resource_b, add_group, BaseTestCase\n'), ((23707, 23740), 'ziggurat_foundations.tests.add_user', 'add_user', (['db_session', '(7)', '"""user 7"""'], {}), "(db_session, 7, 'user 7')\n", (23715, 23740), False, 'from ziggurat_foundations.tests import add_user, check_one_in_other, add_resource, add_resource_b, add_group, BaseTestCase\n'), ((23757, 23849), 'ziggurat_foundations.tests.conftest.GroupResourcePermission', 'GroupResourcePermission', ([], {'perm_name': '"""group_perm2"""', 'resource_id': 'self.resource.resource_id'}), "(perm_name='group_perm2', resource_id=self.resource.\n resource_id)\n", (23780, 23849), False, 'from ziggurat_foundations.tests.conftest import User, UserPermission, GroupPermission, UserResourcePermission, GroupResourcePermission, ResourceTestobjB\n'), ((24015, 24139), 'ziggurat_foundations.models.services.resource.ResourceService.users_for_perm', 'ResourceService.users_for_perm', (['self.resource', '"""__any_permission__"""'], {'limit_group_permissions': '(True)', 'db_session': 'db_session'}), "(self.resource, '__any_permission__',\n limit_group_permissions=True, db_session=db_session)\n", (24045, 24139), False, 'from ziggurat_foundations.models.services.resource import ResourceService\n'), ((24869, 24902), 'ziggurat_foundations.tests.check_one_in_other', 'check_one_in_other', (['perms', 'second'], {}), '(perms, second)\n', (24887, 24902), False, 'from ziggurat_foundations.tests import add_user, check_one_in_other, add_resource, add_resource_b, add_group, BaseTestCase\n'), ((25080, 25113), 'ziggurat_foundations.tests.add_user', 'add_user', (['db_session', '(6)', '"""user 6"""'], {}), "(db_session, 6, 'user 6')\n", (25088, 25113), False, 'from ziggurat_foundations.tests import add_user, check_one_in_other, add_resource, add_resource_b, add_group, BaseTestCase\n'), ((25130, 25163), 'ziggurat_foundations.tests.add_user', 'add_user', (['db_session', '(7)', '"""user 7"""'], {}), "(db_session, 7, 'user 7')\n", (25138, 25163), False, 'from ziggurat_foundations.tests import add_user, check_one_in_other, add_resource, add_resource_b, add_group, BaseTestCase\n'), ((25180, 25272), 'ziggurat_foundations.tests.conftest.GroupResourcePermission', 'GroupResourcePermission', ([], {'perm_name': '"""group_perm2"""', 'resource_id': 'self.resource.resource_id'}), "(perm_name='group_perm2', resource_id=self.resource.\n resource_id)\n", (25203, 25272), False, 'from ziggurat_foundations.tests.conftest import User, UserPermission, GroupPermission, UserResourcePermission, GroupResourcePermission, ResourceTestobjB\n'), ((25438, 25533), 'ziggurat_foundations.models.services.resource.ResourceService.groups_for_perm', 'ResourceService.groups_for_perm', (['self.resource', '"""__any_permission__"""'], {'db_session': 'db_session'}), "(self.resource, '__any_permission__',\n db_session=db_session)\n", (25469, 25533), False, 'from ziggurat_foundations.models.services.resource import ResourceService\n'), ((26704, 26737), 'ziggurat_foundations.tests.check_one_in_other', 'check_one_in_other', (['perms', 'second'], {}), '(perms, second)\n', (26722, 26737), False, 'from ziggurat_foundations.tests import add_user, check_one_in_other, add_resource, add_resource_b, add_group, BaseTestCase\n'), ((26923, 26956), 'ziggurat_foundations.tests.add_user', 'add_user', (['db_session', '(6)', '"""user 6"""'], {}), "(db_session, 6, 'user 6')\n", (26931, 26956), False, 'from ziggurat_foundations.tests import add_user, check_one_in_other, add_resource, add_resource_b, add_group, BaseTestCase\n'), ((26973, 27006), 'ziggurat_foundations.tests.add_user', 'add_user', (['db_session', '(7)', '"""user 7"""'], {}), "(db_session, 7, 'user 7')\n", (26981, 27006), False, 'from ziggurat_foundations.tests import add_user, check_one_in_other, add_resource, add_resource_b, add_group, BaseTestCase\n'), ((27023, 27115), 'ziggurat_foundations.tests.conftest.GroupResourcePermission', 'GroupResourcePermission', ([], {'perm_name': '"""group_perm2"""', 'resource_id': 'self.resource.resource_id'}), "(perm_name='group_perm2', resource_id=self.resource.\n resource_id)\n", (27046, 27115), False, 'from ziggurat_foundations.tests.conftest import User, UserPermission, GroupPermission, UserResourcePermission, GroupResourcePermission, ResourceTestobjB\n'), ((27281, 27406), 'ziggurat_foundations.models.services.resource.ResourceService.groups_for_perm', 'ResourceService.groups_for_perm', (['self.resource', '"""__any_permission__"""'], {'limit_group_permissions': '(True)', 'db_session': 'db_session'}), "(self.resource, '__any_permission__',\n limit_group_permissions=True, db_session=db_session)\n", (27312, 27406), False, 'from ziggurat_foundations.models.services.resource import ResourceService\n'), ((27886, 27919), 'ziggurat_foundations.tests.check_one_in_other', 'check_one_in_other', (['perms', 'second'], {}), '(perms, second)\n', (27904, 27919), False, 'from ziggurat_foundations.tests import add_user, check_one_in_other, add_resource, add_resource_b, add_group, BaseTestCase\n'), ((28101, 28134), 'ziggurat_foundations.tests.add_user', 'add_user', (['db_session', '(6)', '"""user 6"""'], {}), "(db_session, 6, 'user 6')\n", (28109, 28134), False, 'from ziggurat_foundations.tests import add_user, check_one_in_other, add_resource, add_resource_b, add_group, BaseTestCase\n'), ((28151, 28184), 'ziggurat_foundations.tests.add_user', 'add_user', (['db_session', '(7)', '"""user 7"""'], {}), "(db_session, 7, 'user 7')\n", (28159, 28184), False, 'from ziggurat_foundations.tests import add_user, check_one_in_other, add_resource, add_resource_b, add_group, BaseTestCase\n'), ((28201, 28293), 'ziggurat_foundations.tests.conftest.GroupResourcePermission', 'GroupResourcePermission', ([], {'perm_name': '"""group_perm2"""', 'resource_id': 'self.resource.resource_id'}), "(perm_name='group_perm2', resource_id=self.resource.\n resource_id)\n", (28224, 28293), False, 'from ziggurat_foundations.tests.conftest import User, UserPermission, GroupPermission, UserResourcePermission, GroupResourcePermission, ResourceTestobjB\n'), ((28459, 28606), 'ziggurat_foundations.models.services.resource.ResourceService.users_for_perm', 'ResourceService.users_for_perm', (['self.resource', '"""__any_permission__"""'], {'limit_group_permissions': '(True)', 'skip_group_perms': '(True)', 'db_session': 'db_session'}), "(self.resource, '__any_permission__',\n limit_group_permissions=True, skip_group_perms=True, db_session=db_session)\n", (28489, 28606), False, 'from ziggurat_foundations.models.services.resource import ResourceService\n'), ((28962, 28995), 'ziggurat_foundations.tests.check_one_in_other', 'check_one_in_other', (['perms', 'second'], {}), '(perms, second)\n', (28980, 28995), False, 'from ziggurat_foundations.tests import add_user, check_one_in_other, add_resource, add_resource_b, add_group, BaseTestCase\n'), ((29207, 29240), 'ziggurat_foundations.tests.add_user', 'add_user', (['db_session', '(6)', '"""user 6"""'], {}), "(db_session, 6, 'user 6')\n", (29215, 29240), False, 'from ziggurat_foundations.tests import add_user, check_one_in_other, add_resource, add_resource_b, add_group, BaseTestCase\n'), ((29257, 29290), 'ziggurat_foundations.tests.add_user', 'add_user', (['db_session', '(7)', '"""user 7"""'], {}), "(db_session, 7, 'user 7')\n", (29265, 29290), False, 'from ziggurat_foundations.tests import add_user, check_one_in_other, add_resource, add_resource_b, add_group, BaseTestCase\n'), ((29307, 29399), 'ziggurat_foundations.tests.conftest.GroupResourcePermission', 'GroupResourcePermission', ([], {'perm_name': '"""group_perm2"""', 'resource_id': 'self.resource.resource_id'}), "(perm_name='group_perm2', resource_id=self.resource.\n resource_id)\n", (29330, 29399), False, 'from ziggurat_foundations.tests.conftest import User, UserPermission, GroupPermission, UserResourcePermission, GroupResourcePermission, ResourceTestobjB\n'), ((29567, 29603), 'ziggurat_foundations.tests.add_group', 'add_group', (['db_session', '"""Empty group"""'], {}), "(db_session, 'Empty group')\n", (29576, 29603), False, 'from ziggurat_foundations.tests import add_user, check_one_in_other, add_resource, add_resource_b, add_group, BaseTestCase\n'), ((29620, 29712), 'ziggurat_foundations.tests.conftest.GroupResourcePermission', 'GroupResourcePermission', ([], {'perm_name': '"""group_permx"""', 'resource_id': 'self.resource.resource_id'}), "(perm_name='group_permx', resource_id=self.resource.\n resource_id)\n", (29643, 29712), False, 'from ziggurat_foundations.tests.conftest import User, UserPermission, GroupPermission, UserResourcePermission, GroupResourcePermission, ResourceTestobjB\n'), ((29796, 29921), 'ziggurat_foundations.models.services.resource.ResourceService.groups_for_perm', 'ResourceService.groups_for_perm', (['self.resource', '"""__any_permission__"""'], {'limit_group_permissions': '(True)', 'db_session': 'db_session'}), "(self.resource, '__any_permission__',\n limit_group_permissions=True, db_session=db_session)\n", (29827, 29921), False, 'from ziggurat_foundations.models.services.resource import ResourceService\n'), ((30527, 30560), 'ziggurat_foundations.tests.check_one_in_other', 'check_one_in_other', (['perms', 'second'], {}), '(perms, second)\n', (30545, 30560), False, 'from ziggurat_foundations.tests import add_user, check_one_in_other, add_resource, add_resource_b, add_group, BaseTestCase\n'), ((30766, 30799), 'ziggurat_foundations.tests.add_user', 'add_user', (['db_session', '(6)', '"""user 6"""'], {}), "(db_session, 6, 'user 6')\n", (30774, 30799), False, 'from ziggurat_foundations.tests import add_user, check_one_in_other, add_resource, add_resource_b, add_group, BaseTestCase\n'), ((30816, 30849), 'ziggurat_foundations.tests.add_user', 'add_user', (['db_session', '(7)', '"""user 7"""'], {}), "(db_session, 7, 'user 7')\n", (30824, 30849), False, 'from ziggurat_foundations.tests import add_user, check_one_in_other, add_resource, add_resource_b, add_group, BaseTestCase\n'), ((30866, 30958), 'ziggurat_foundations.tests.conftest.GroupResourcePermission', 'GroupResourcePermission', ([], {'perm_name': '"""group_perm2"""', 'resource_id': 'self.resource.resource_id'}), "(perm_name='group_perm2', resource_id=self.resource.\n resource_id)\n", (30889, 30958), False, 'from ziggurat_foundations.tests.conftest import User, UserPermission, GroupPermission, UserResourcePermission, GroupResourcePermission, ResourceTestobjB\n'), ((31125, 31161), 'ziggurat_foundations.tests.add_group', 'add_group', (['db_session', '"""Empty group"""'], {}), "(db_session, 'Empty group')\n", (31134, 31161), False, 'from ziggurat_foundations.tests import add_user, check_one_in_other, add_resource, add_resource_b, add_group, BaseTestCase\n'), ((31178, 31270), 'ziggurat_foundations.tests.conftest.GroupResourcePermission', 'GroupResourcePermission', ([], {'perm_name': '"""group_permx"""', 'resource_id': 'self.resource.resource_id'}), "(perm_name='group_permx', resource_id=self.resource.\n resource_id)\n", (31201, 31270), False, 'from ziggurat_foundations.tests.conftest import User, UserPermission, GroupPermission, UserResourcePermission, GroupResourcePermission, ResourceTestobjB\n'), ((31355, 31479), 'ziggurat_foundations.models.services.resource.ResourceService.users_for_perm', 'ResourceService.users_for_perm', (['self.resource', '"""__any_permission__"""'], {'limit_group_permissions': '(True)', 'db_session': 'db_session'}), "(self.resource, '__any_permission__',\n limit_group_permissions=True, db_session=db_session)\n", (31385, 31479), False, 'from ziggurat_foundations.models.services.resource import ResourceService\n'), ((32335, 32368), 'ziggurat_foundations.tests.check_one_in_other', 'check_one_in_other', (['perms', 'second'], {}), '(perms, second)\n', (32353, 32368), False, 'from ziggurat_foundations.tests import add_user, check_one_in_other, add_resource, add_resource_b, add_group, BaseTestCase\n'), ((32449, 32469), 'ziggurat_foundations.tests.add_user', 'add_user', (['db_session'], {}), '(db_session)\n', (32457, 32469), False, 'from ziggurat_foundations.tests import add_user, check_one_in_other, add_resource, add_resource_b, add_group, BaseTestCase\n'), ((32489, 32533), 'ziggurat_foundations.tests.add_resource', 'add_resource', (['db_session', '(1)', '"""test_resource"""'], {}), "(db_session, 1, 'test_resource')\n", (32501, 32533), False, 'from ziggurat_foundations.tests import add_user, check_one_in_other, add_resource, add_resource_b, add_group, BaseTestCase\n'), ((32555, 32663), 'ziggurat_foundations.tests.conftest.UserResourcePermission', 'UserResourcePermission', ([], {'perm_name': '"""test_perm"""', 'user_id': 'created_user.id', 'resource_id': 'resource.resource_id'}), "(perm_name='test_perm', user_id=created_user.id,\n resource_id=resource.resource_id)\n", (32577, 32663), False, 'from ziggurat_foundations.tests.conftest import User, UserPermission, GroupPermission, UserResourcePermission, GroupResourcePermission, ResourceTestobjB\n'), ((32802, 32945), 'ziggurat_foundations.models.services.user_resource_permission.UserResourcePermissionService.get', 'UserResourcePermissionService.get', ([], {'user_id': 'created_user.id', 'resource_id': 'resource.resource_id', 'perm_name': '"""test_perm"""', 'db_session': 'db_session'}), "(user_id=created_user.id, resource_id=\n resource.resource_id, perm_name='test_perm', db_session=db_session)\n", (32835, 32945), False, 'from ziggurat_foundations.models.services.user_resource_permission import UserResourcePermissionService\n'), ((33255, 33300), 'ziggurat_foundations.tests.conftest.GroupPermission', 'GroupPermission', ([], {'group_id': '(1)', 'perm_name': '"""perm"""'}), "(group_id=1, perm_name='perm')\n", (33270, 33300), False, 'from ziggurat_foundations.tests.conftest import User, UserPermission, GroupPermission, UserResourcePermission, GroupResourcePermission, ResourceTestobjB\n'), ((33425, 33456), 'ziggurat_foundations.tests.add_group', 'add_group', (['db_session', '"""group1"""'], {}), "(db_session, 'group1')\n", (33434, 33456), False, 'from ziggurat_foundations.tests import add_user, check_one_in_other, add_resource, add_resource_b, add_group, BaseTestCase\n'), ((33473, 33574), 'ziggurat_foundations.models.services.group_permission.GroupPermissionService.get', 'GroupPermissionService.get', ([], {'group_id': 'org_group.id', 'perm_name': '"""manage_apps"""', 'db_session': 'db_session'}), "(group_id=org_group.id, perm_name='manage_apps',\n db_session=db_session)\n", (33499, 33574), False, 'from ziggurat_foundations.models.services.group_permission import GroupPermissionService\n'), ((33735, 33756), 'ziggurat_foundations.tests.add_group', 'add_group', (['db_session'], {}), '(db_session)\n', (33744, 33756), False, 'from ziggurat_foundations.tests import add_user, check_one_in_other, add_resource, add_resource_b, add_group, BaseTestCase\n'), ((33775, 33861), 'ziggurat_foundations.models.services.group_permission.GroupPermissionService.by_group_and_perm', 'GroupPermissionService.by_group_and_perm', (['(1)', '"""manage_apps"""'], {'db_session': 'db_session'}), "(1, 'manage_apps', db_session=\n db_session)\n", (33815, 33861), False, 'from ziggurat_foundations.models.services.group_permission import GroupPermissionService\n'), ((34037, 34058), 'ziggurat_foundations.tests.add_group', 'add_group', (['db_session'], {}), '(db_session)\n', (34046, 34058), False, 'from ziggurat_foundations.tests import add_user, check_one_in_other, add_resource, add_resource_b, add_group, BaseTestCase\n'), ((34077, 34163), 'ziggurat_foundations.models.services.group_permission.GroupPermissionService.by_group_and_perm', 'GroupPermissionService.by_group_and_perm', (['(2)', '"""manage_apps"""'], {'db_session': 'db_session'}), "(2, 'manage_apps', db_session=\n db_session)\n", (34117, 34163), False, 'from ziggurat_foundations.models.services.group_permission import GroupPermissionService\n'), ((34282, 34303), 'ziggurat_foundations.tests.add_group', 'add_group', (['db_session'], {}), '(db_session)\n', (34291, 34303), False, 'from ziggurat_foundations.tests import add_user, check_one_in_other, add_resource, add_resource_b, add_group, BaseTestCase\n'), ((34322, 34407), 'ziggurat_foundations.models.services.group_permission.GroupPermissionService.by_group_and_perm', 'GroupPermissionService.by_group_and_perm', (['(1)', '"""wrong_perm"""'], {'db_session': 'db_session'}), "(1, 'wrong_perm', db_session=db_session\n )\n", (34362, 34407), False, 'from ziggurat_foundations.models.services.group_permission import GroupPermissionService\n'), ((34588, 34642), 'ziggurat_foundations.models.services.group.GroupService.resources_with_possible_perms', 'GroupService.resources_with_possible_perms', (['self.group'], {}), '(self.group)\n', (34630, 34642), False, 'from ziggurat_foundations.models.services.group import GroupService\n'), ((34808, 34841), 'ziggurat_foundations.tests.check_one_in_other', 'check_one_in_other', (['perms', 'second'], {}), '(perms, second)\n', (34826, 34841), False, 'from ziggurat_foundations.tests import add_user, check_one_in_other, add_resource, add_resource_b, add_group, BaseTestCase\n'), ((34985, 35032), 'ziggurat_foundations.tests.add_resource_b', 'add_resource_b', (['db_session', '(3)', '"""other resource"""'], {}), "(db_session, 3, 'other resource')\n", (34999, 35032), False, 'from ziggurat_foundations.tests import add_user, check_one_in_other, add_resource, add_resource_b, add_group, BaseTestCase\n'), ((35109, 35182), 'ziggurat_foundations.tests.conftest.GroupResourcePermission', 'GroupResourcePermission', ([], {'perm_name': '"""group_perm2"""', 'group_id': 'self.group2.id'}), "(perm_name='group_perm2', group_id=self.group2.id)\n", (35132, 35182), False, 'from ziggurat_foundations.tests.conftest import User, UserPermission, GroupPermission, UserResourcePermission, GroupResourcePermission, ResourceTestobjB\n'), ((35289, 35344), 'ziggurat_foundations.models.services.group.GroupService.resources_with_possible_perms', 'GroupService.resources_with_possible_perms', (['self.group2'], {}), '(self.group2)\n', (35331, 35344), False, 'from ziggurat_foundations.models.services.group import GroupService\n'), ((35770, 35803), 'ziggurat_foundations.tests.check_one_in_other', 'check_one_in_other', (['perms', 'second'], {}), '(perms, second)\n', (35788, 35803), False, 'from ziggurat_foundations.tests import add_user, check_one_in_other, add_resource, add_resource_b, add_group, BaseTestCase\n'), ((35924, 35971), 'ziggurat_foundations.tests.add_resource_b', 'add_resource_b', (['db_session', '(3)', '"""other resource"""'], {}), "(db_session, 3, 'other resource')\n", (35938, 35971), False, 'from ziggurat_foundations.tests import add_user, check_one_in_other, add_resource, add_resource_b, add_group, BaseTestCase\n'), ((36027, 36100), 'ziggurat_foundations.tests.conftest.GroupResourcePermission', 'GroupResourcePermission', ([], {'perm_name': '"""group_perm2"""', 'group_id': 'self.group2.id'}), "(perm_name='group_perm2', group_id=self.group2.id)\n", (36050, 36100), False, 'from ziggurat_foundations.tests.conftest import User, UserPermission, GroupPermission, UserResourcePermission, GroupResourcePermission, ResourceTestobjB\n'), ((36137, 36289), 'ziggurat_foundations.models.services.group_resource_permission.GroupResourcePermissionService.get', 'GroupResourcePermissionService.get', ([], {'group_id': 'self.group2.id', 'resource_id': 'self.resource2.resource_id', 'perm_name': '"""group_perm2"""', 'db_session': 'db_session'}), "(group_id=self.group2.id, resource_id=\n self.resource2.resource_id, perm_name='group_perm2', db_session=db_session)\n", (36171, 36289), False, 'from ziggurat_foundations.models.services.group_resource_permission import GroupResourcePermissionService\n'), ((36452, 36604), 'ziggurat_foundations.models.services.group_resource_permission.GroupResourcePermissionService.get', 'GroupResourcePermissionService.get', ([], {'group_id': 'self.group2.id', 'resource_id': 'self.resource2.resource_id', 'perm_name': '"""group_perm2"""', 'db_session': 'db_session'}), "(group_id=self.group2.id, resource_id=\n self.resource2.resource_id, perm_name='group_perm2', db_session=db_session)\n", (36486, 36604), False, 'from ziggurat_foundations.models.services.group_resource_permission import GroupResourcePermissionService\n'), ((36857, 37017), 'ziggurat_foundations.models.services.resource.ResourceService.perm_by_group_and_perm_name', 'ResourceService.perm_by_group_and_perm_name', ([], {'resource_id': 'self.resource.resource_id', 'group_id': 'self.group.id', 'perm_name': 'perm_name', 'db_session': 'db_session'}), '(resource_id=self.resource.\n resource_id, group_id=self.group.id, perm_name=perm_name, db_session=\n db_session)\n', (36900, 37017), False, 'from ziggurat_foundations.models.services.resource import ResourceService\n'), ((37256, 37416), 'ziggurat_foundations.models.services.resource.ResourceService.perm_by_group_and_perm_name', 'ResourceService.perm_by_group_and_perm_name', ([], {'resource_id': 'self.resource.resource_id', 'group_id': 'self.group.id', 'perm_name': 'perm_name', 'db_session': 'db_session'}), '(resource_id=self.resource.\n resource_id, group_id=self.group.id, perm_name=perm_name, db_session=\n db_session)\n', (37299, 37416), False, 'from ziggurat_foundations.models.services.resource import ResourceService\n'), ((37721, 37764), 'ziggurat_foundations.tests.conftest.UserPermission', 'UserPermission', ([], {'user_id': '(1)', 'perm_name': '"""perm"""'}), "(user_id=1, perm_name='perm')\n", (37735, 37764), False, 'from ziggurat_foundations.tests.conftest import User, UserPermission, GroupPermission, UserResourcePermission, GroupResourcePermission, ResourceTestobjB\n'), ((37882, 37902), 'ziggurat_foundations.tests.add_user', 'add_user', (['db_session'], {}), '(db_session)\n', (37890, 37902), False, 'from ziggurat_foundations.tests import add_user, check_one_in_other, add_resource, add_resource_b, add_group, BaseTestCase\n'), ((37918, 38006), 'ziggurat_foundations.models.services.user_permission.UserPermissionService.get', 'UserPermissionService.get', ([], {'user_id': 'user.id', 'perm_name': '"""root"""', 'db_session': 'db_session'}), "(user_id=user.id, perm_name='root', db_session=\n db_session)\n", (37943, 38006), False, 'from ziggurat_foundations.models.services.user_permission import UserPermissionService\n'), ((38161, 38181), 'ziggurat_foundations.tests.add_user', 'add_user', (['db_session'], {}), '(db_session)\n', (38169, 38181), False, 'from ziggurat_foundations.tests import add_user, check_one_in_other, add_resource, add_resource_b, add_group, BaseTestCase\n'), ((38208, 38280), 'ziggurat_foundations.models.services.user_permission.UserPermissionService.by_user_and_perm', 'UserPermissionService.by_user_and_perm', (['(1)', '"""root"""'], {'db_session': 'db_session'}), "(1, 'root', db_session=db_session)\n", (38246, 38280), False, 'from ziggurat_foundations.models.services.user_permission import UserPermissionService\n'), ((38472, 38492), 'ziggurat_foundations.tests.add_user', 'add_user', (['db_session'], {}), '(db_session)\n', (38480, 38492), False, 'from ziggurat_foundations.tests import add_user, check_one_in_other, add_resource, add_resource_b, add_group, BaseTestCase\n'), ((38519, 38593), 'ziggurat_foundations.models.services.user_permission.UserPermissionService.by_user_and_perm', 'UserPermissionService.by_user_and_perm', (['(999)', '"""root"""'], {'db_session': 'db_session'}), "(999, 'root', db_session=db_session)\n", (38557, 38593), False, 'from ziggurat_foundations.models.services.user_permission import UserPermissionService\n'), ((38729, 38749), 'ziggurat_foundations.tests.add_user', 'add_user', (['db_session'], {}), '(db_session)\n', (38737, 38749), False, 'from ziggurat_foundations.tests import add_user, check_one_in_other, add_resource, add_resource_b, add_group, BaseTestCase\n'), ((38776, 38849), 'ziggurat_foundations.models.services.user_permission.UserPermissionService.by_user_and_perm', 'UserPermissionService.by_user_and_perm', (['(1)', '"""wrong"""'], {'db_session': 'db_session'}), "(1, 'wrong', db_session=db_session)\n", (38814, 38849), False, 'from ziggurat_foundations.models.services.user_permission import UserPermissionService\n'), ((1374, 1451), 'ziggurat_foundations.permissions.PermissionTuple', 'PermissionTuple', (['created_user', '"""alter_users"""', '"""user"""', 'None', 'None', '(False)', '(True)'], {}), "(created_user, 'alter_users', 'user', None, None, False, True)\n", (1389, 1451), False, 'from ziggurat_foundations.permissions import PermissionTuple, ALL_PERMISSIONS\n'), ((1495, 1565), 'ziggurat_foundations.permissions.PermissionTuple', 'PermissionTuple', (['created_user', '"""root"""', '"""user"""', 'None', 'None', '(False)', '(True)'], {}), "(created_user, 'root', 'user', None, None, False, True)\n", (1510, 1565), False, 'from ziggurat_foundations.permissions import PermissionTuple, ALL_PERMISSIONS\n'), ((2055, 2116), 'ziggurat_foundations.models.services.resource.ResourceService.direct_perms_for_user', 'ResourceService.direct_perms_for_user', (['resource', 'created_user'], {}), '(resource, created_user)\n', (2092, 2116), False, 'from ziggurat_foundations.models.services.resource import ResourceService\n'), ((6981, 7010), 'pytest.raises', 'pytest.raises', (['AssertionError'], {}), '(AssertionError)\n', (6994, 7010), False, 'import pytest\n'), ((9617, 9646), 'pytest.raises', 'pytest.raises', (['AssertionError'], {}), '(AssertionError)\n', (9630, 9646), False, 'import pytest\n'), ((11022, 11107), 'ziggurat_foundations.permissions.PermissionTuple', 'PermissionTuple', (['self.user', '"""foo_perm"""', '"""user"""', 'None', 'self.resource', '(False)', '(True)'], {}), "(self.user, 'foo_perm', 'user', None, self.resource, False, True\n )\n", (11037, 11107), False, 'from ziggurat_foundations.permissions import PermissionTuple, ALL_PERMISSIONS\n'), ((11146, 11232), 'ziggurat_foundations.permissions.PermissionTuple', 'PermissionTuple', (['self.user', '"""test_perm2"""', '"""user"""', 'None', 'self.resource', '(False)', '(True)'], {}), "(self.user, 'test_perm2', 'user', None, self.resource, False,\n True)\n", (11161, 11232), False, 'from ziggurat_foundations.permissions import PermissionTuple, ALL_PERMISSIONS\n'), ((11644, 11737), 'ziggurat_foundations.permissions.PermissionTuple', 'PermissionTuple', (['self.user', '"""group_perm"""', '"""group"""', 'self.group', 'self.resource', '(False)', '(True)'], {}), "(self.user, 'group_perm', 'group', self.group, self.resource,\n False, True)\n", (11659, 11737), False, 'from ziggurat_foundations.permissions import PermissionTuple, ALL_PERMISSIONS\n'), ((12106, 12191), 'ziggurat_foundations.permissions.PermissionTuple', 'PermissionTuple', (['self.user', '"""foo_perm"""', '"""user"""', 'None', 'self.resource', '(False)', '(True)'], {}), "(self.user, 'foo_perm', 'user', None, self.resource, False, True\n )\n", (12121, 12191), False, 'from ziggurat_foundations.permissions import PermissionTuple, ALL_PERMISSIONS\n'), ((12230, 12323), 'ziggurat_foundations.permissions.PermissionTuple', 'PermissionTuple', (['self.user', '"""group_perm"""', '"""group"""', 'self.group', 'self.resource', '(False)', '(True)'], {}), "(self.user, 'group_perm', 'group', self.group, self.resource,\n False, True)\n", (12245, 12323), False, 'from ziggurat_foundations.permissions import PermissionTuple, ALL_PERMISSIONS\n'), ((12363, 12449), 'ziggurat_foundations.permissions.PermissionTuple', 'PermissionTuple', (['self.user', '"""test_perm2"""', '"""user"""', 'None', 'self.resource', '(False)', '(True)'], {}), "(self.user, 'test_perm2', 'user', None, self.resource, False,\n True)\n", (12378, 12449), False, 'from ziggurat_foundations.permissions import PermissionTuple, ALL_PERMISSIONS\n'), ((12790, 12875), 'ziggurat_foundations.permissions.PermissionTuple', 'PermissionTuple', (['self.user', '"""foo_perm"""', '"""user"""', 'None', 'self.resource', '(False)', '(True)'], {}), "(self.user, 'foo_perm', 'user', None, self.resource, False, True\n )\n", (12805, 12875), False, 'from ziggurat_foundations.permissions import PermissionTuple, ALL_PERMISSIONS\n'), ((13257, 13350), 'ziggurat_foundations.permissions.PermissionTuple', 'PermissionTuple', (['self.user', '"""group_perm"""', '"""group"""', 'self.group', 'self.resource', '(False)', '(True)'], {}), "(self.user, 'group_perm', 'group', self.group, self.resource,\n False, True)\n", (13272, 13350), False, 'from ziggurat_foundations.permissions import PermissionTuple, ALL_PERMISSIONS\n'), ((13390, 13476), 'ziggurat_foundations.permissions.PermissionTuple', 'PermissionTuple', (['self.user', '"""test_perm2"""', '"""user"""', 'None', 'self.resource', '(False)', '(True)'], {}), "(self.user, 'test_perm2', 'user', None, self.resource, False,\n True)\n", (13405, 13476), False, 'from ziggurat_foundations.permissions import PermissionTuple, ALL_PERMISSIONS\n'), ((13516, 13601), 'ziggurat_foundations.permissions.PermissionTuple', 'PermissionTuple', (['self.user', '"""foo_perm"""', '"""user"""', 'None', 'self.resource', '(False)', '(True)'], {}), "(self.user, 'foo_perm', 'user', None, self.resource, False, True\n )\n", (13531, 13601), False, 'from ziggurat_foundations.permissions import PermissionTuple, ALL_PERMISSIONS\n'), ((13640, 13736), 'ziggurat_foundations.permissions.PermissionTuple', 'PermissionTuple', (['self.user4', '"""group_perm"""', '"""group"""', 'self.group2', 'self.resource', '(False)', '(True)'], {}), "(self.user4, 'group_perm', 'group', self.group2, self.\n resource, False, True)\n", (13655, 13736), False, 'from ziggurat_foundations.permissions import PermissionTuple, ALL_PERMISSIONS\n'), ((14199, 14285), 'ziggurat_foundations.permissions.PermissionTuple', 'PermissionTuple', (['self.user2', '"""foo_perm"""', '"""user"""', 'None', 'self.resource2', '(False)', '(True)'], {}), "(self.user2, 'foo_perm', 'user', None, self.resource2, False,\n True)\n", (14214, 14285), False, 'from ziggurat_foundations.permissions import PermissionTuple, ALL_PERMISSIONS\n'), ((14325, 14413), 'ziggurat_foundations.permissions.PermissionTuple', 'PermissionTuple', (['self.user3', '"""test_perm"""', '"""user"""', 'None', 'self.resource2', '(False)', '(True)'], {}), "(self.user3, 'test_perm', 'user', None, self.resource2, \n False, True)\n", (14340, 14413), False, 'from ziggurat_foundations.permissions import PermissionTuple, ALL_PERMISSIONS\n'), ((14858, 14951), 'ziggurat_foundations.permissions.PermissionTuple', 'PermissionTuple', (['self.user', '"""group_perm"""', '"""group"""', 'self.group', 'self.resource', '(False)', '(True)'], {}), "(self.user, 'group_perm', 'group', self.group, self.resource,\n False, True)\n", (14873, 14951), False, 'from ziggurat_foundations.permissions import PermissionTuple, ALL_PERMISSIONS\n'), ((14991, 15077), 'ziggurat_foundations.permissions.PermissionTuple', 'PermissionTuple', (['self.user', '"""test_perm2"""', '"""user"""', 'None', 'self.resource', '(False)', '(True)'], {}), "(self.user, 'test_perm2', 'user', None, self.resource, False,\n True)\n", (15006, 15077), False, 'from ziggurat_foundations.permissions import PermissionTuple, ALL_PERMISSIONS\n'), ((15117, 15202), 'ziggurat_foundations.permissions.PermissionTuple', 'PermissionTuple', (['self.user', '"""foo_perm"""', '"""user"""', 'None', 'self.resource', '(False)', '(True)'], {}), "(self.user, 'foo_perm', 'user', None, self.resource, False, True\n )\n", (15132, 15202), False, 'from ziggurat_foundations.permissions import PermissionTuple, ALL_PERMISSIONS\n'), ((15687, 15773), 'ziggurat_foundations.permissions.PermissionTuple', 'PermissionTuple', (['self.user', '"""test_perm2"""', '"""user"""', 'None', 'self.resource', '(False)', '(True)'], {}), "(self.user, 'test_perm2', 'user', None, self.resource, False,\n True)\n", (15702, 15773), False, 'from ziggurat_foundations.permissions import PermissionTuple, ALL_PERMISSIONS\n'), ((15813, 15898), 'ziggurat_foundations.permissions.PermissionTuple', 'PermissionTuple', (['self.user', '"""foo_perm"""', '"""user"""', 'None', 'self.resource', '(False)', '(True)'], {}), "(self.user, 'foo_perm', 'user', None, self.resource, False, True\n )\n", (15828, 15898), False, 'from ziggurat_foundations.permissions import PermissionTuple, ALL_PERMISSIONS\n'), ((16358, 16446), 'ziggurat_foundations.permissions.PermissionTuple', 'PermissionTuple', (['self.user3', '"""test_perm"""', '"""user"""', 'None', 'self.resource2', '(False)', '(True)'], {}), "(self.user3, 'test_perm', 'user', None, self.resource2, \n False, True)\n", (16373, 16446), False, 'from ziggurat_foundations.permissions import PermissionTuple, ALL_PERMISSIONS\n'), ((16944, 17040), 'ziggurat_foundations.permissions.PermissionTuple', 'PermissionTuple', (['self.user4', '"""group_perm"""', '"""group"""', 'self.group2', 'self.resource', '(False)', '(True)'], {}), "(self.user4, 'group_perm', 'group', self.group2, self.\n resource, False, True)\n", (16959, 17040), False, 'from ziggurat_foundations.permissions import PermissionTuple, ALL_PERMISSIONS\n'), ((18211, 18287), 'ziggurat_foundations.permissions.PermissionTuple', 'PermissionTuple', (['self.user2', '"""foo_perm"""', '"""user"""', 'None', 'resource', '(False)', '(True)'], {}), "(self.user2, 'foo_perm', 'user', None, resource, False, True)\n", (18226, 18287), False, 'from ziggurat_foundations.permissions import PermissionTuple, ALL_PERMISSIONS\n'), ((18331, 18410), 'ziggurat_foundations.permissions.PermissionTuple', 'PermissionTuple', (['self.user', 'ALL_PERMISSIONS', '"""user"""', 'None', 'resource', '(True)', '(True)'], {}), "(self.user, ALL_PERMISSIONS, 'user', None, resource, True, True)\n", (18346, 18410), False, 'from ziggurat_foundations.permissions import PermissionTuple, ALL_PERMISSIONS\n'), ((18454, 18546), 'ziggurat_foundations.permissions.PermissionTuple', 'PermissionTuple', (['self.user4', 'ALL_PERMISSIONS', '"""group"""', 'self.group2', 'resource', '(True)', '(True)'], {}), "(self.user4, ALL_PERMISSIONS, 'group', self.group2, resource,\n True, True)\n", (18469, 18546), False, 'from ziggurat_foundations.permissions import PermissionTuple, ALL_PERMISSIONS\n'), ((18586, 18671), 'ziggurat_foundations.permissions.PermissionTuple', 'PermissionTuple', (['self.user3', '"""group_perm"""', '"""group"""', 'group3', 'resource', '(False)', '(True)'], {}), "(self.user3, 'group_perm', 'group', group3, resource, False,\n True)\n", (18601, 18671), False, 'from ziggurat_foundations.permissions import PermissionTuple, ALL_PERMISSIONS\n'), ((20716, 20801), 'ziggurat_foundations.permissions.PermissionTuple', 'PermissionTuple', (['self.user', '"""foo_perm"""', '"""user"""', 'None', 'self.resource', '(False)', '(True)'], {}), "(self.user, 'foo_perm', 'user', None, self.resource, False, True\n )\n", (20731, 20801), False, 'from ziggurat_foundations.permissions import PermissionTuple, ALL_PERMISSIONS\n'), ((20840, 20933), 'ziggurat_foundations.permissions.PermissionTuple', 'PermissionTuple', (['self.user', '"""group_perm"""', '"""group"""', 'self.group', 'self.resource', '(False)', '(True)'], {}), "(self.user, 'group_perm', 'group', self.group, self.resource,\n False, True)\n", (20855, 20933), False, 'from ziggurat_foundations.permissions import PermissionTuple, ALL_PERMISSIONS\n'), ((20973, 21059), 'ziggurat_foundations.permissions.PermissionTuple', 'PermissionTuple', (['self.user', '"""test_perm2"""', '"""user"""', 'None', 'self.resource', '(False)', '(True)'], {}), "(self.user, 'test_perm2', 'user', None, self.resource, False,\n True)\n", (20988, 21059), False, 'from ziggurat_foundations.permissions import PermissionTuple, ALL_PERMISSIONS\n'), ((21099, 21178), 'ziggurat_foundations.permissions.PermissionTuple', 'PermissionTuple', (['self.user', 'ALL_PERMISSIONS', '"""user"""', 'None', 'resource', '(True)', '(True)'], {}), "(self.user, ALL_PERMISSIONS, 'user', None, resource, True, True)\n", (21114, 21178), False, 'from ziggurat_foundations.permissions import PermissionTuple, ALL_PERMISSIONS\n'), ((21222, 21314), 'ziggurat_foundations.permissions.PermissionTuple', 'PermissionTuple', (['self.user', 'ALL_PERMISSIONS', '"""group"""', 'self.group', 'resource_g', '(True)', '(True)'], {}), "(self.user, ALL_PERMISSIONS, 'group', self.group, resource_g,\n True, True)\n", (21237, 21314), False, 'from ziggurat_foundations.permissions import PermissionTuple, ALL_PERMISSIONS\n'), ((22073, 22166), 'ziggurat_foundations.permissions.PermissionTuple', 'PermissionTuple', (['self.user', '"""group_perm"""', '"""group"""', 'self.group', 'self.resource', '(False)', '(True)'], {}), "(self.user, 'group_perm', 'group', self.group, self.resource,\n False, True)\n", (22088, 22166), False, 'from ziggurat_foundations.permissions import PermissionTuple, ALL_PERMISSIONS\n'), ((22206, 22296), 'ziggurat_foundations.permissions.PermissionTuple', 'PermissionTuple', (['user6', '"""group_perm"""', '"""group"""', 'self.group', 'self.resource', '(False)', '(True)'], {}), "(user6, 'group_perm', 'group', self.group, self.resource, \n False, True)\n", (22221, 22296), False, 'from ziggurat_foundations.permissions import PermissionTuple, ALL_PERMISSIONS\n'), ((22335, 22425), 'ziggurat_foundations.permissions.PermissionTuple', 'PermissionTuple', (['user7', '"""group_perm"""', '"""group"""', 'self.group', 'self.resource', '(False)', '(True)'], {}), "(user7, 'group_perm', 'group', self.group, self.resource, \n False, True)\n", (22350, 22425), False, 'from ziggurat_foundations.permissions import PermissionTuple, ALL_PERMISSIONS\n'), ((22464, 22559), 'ziggurat_foundations.permissions.PermissionTuple', 'PermissionTuple', (['self.user', '"""group_perm2"""', '"""group"""', 'self.group', 'self.resource', '(False)', '(True)'], {}), "(self.user, 'group_perm2', 'group', self.group, self.\n resource, False, True)\n", (22479, 22559), False, 'from ziggurat_foundations.permissions import PermissionTuple, ALL_PERMISSIONS\n'), ((22695, 22786), 'ziggurat_foundations.permissions.PermissionTuple', 'PermissionTuple', (['user6', '"""group_perm2"""', '"""group"""', 'self.group', 'self.resource', '(False)', '(True)'], {}), "(user6, 'group_perm2', 'group', self.group, self.resource, \n False, True)\n", (22710, 22786), False, 'from ziggurat_foundations.permissions import PermissionTuple, ALL_PERMISSIONS\n'), ((22825, 22916), 'ziggurat_foundations.permissions.PermissionTuple', 'PermissionTuple', (['user7', '"""group_perm2"""', '"""group"""', 'self.group', 'self.resource', '(False)', '(True)'], {}), "(user7, 'group_perm2', 'group', self.group, self.resource, \n False, True)\n", (22840, 22916), False, 'from ziggurat_foundations.permissions import PermissionTuple, ALL_PERMISSIONS\n'), ((22955, 23041), 'ziggurat_foundations.permissions.PermissionTuple', 'PermissionTuple', (['self.user', '"""test_perm2"""', '"""user"""', 'None', 'self.resource', '(False)', '(True)'], {}), "(self.user, 'test_perm2', 'user', None, self.resource, False,\n True)\n", (22970, 23041), False, 'from ziggurat_foundations.permissions import PermissionTuple, ALL_PERMISSIONS\n'), ((23081, 23166), 'ziggurat_foundations.permissions.PermissionTuple', 'PermissionTuple', (['self.user', '"""foo_perm"""', '"""user"""', 'None', 'self.resource', '(False)', '(True)'], {}), "(self.user, 'foo_perm', 'user', None, self.resource, False, True\n )\n", (23096, 23166), False, 'from ziggurat_foundations.permissions import PermissionTuple, ALL_PERMISSIONS\n'), ((23205, 23301), 'ziggurat_foundations.permissions.PermissionTuple', 'PermissionTuple', (['self.user4', '"""group_perm"""', '"""group"""', 'self.group2', 'self.resource', '(False)', '(True)'], {}), "(self.user4, 'group_perm', 'group', self.group2, self.\n resource, False, True)\n", (23220, 23301), False, 'from ziggurat_foundations.permissions import PermissionTuple, ALL_PERMISSIONS\n'), ((24226, 24315), 'ziggurat_foundations.permissions.PermissionTuple', 'PermissionTuple', (['None', '"""group_perm"""', '"""group"""', 'self.group', 'self.resource', '(False)', '(True)'], {}), "(None, 'group_perm', 'group', self.group, self.resource, \n False, True)\n", (24241, 24315), False, 'from ziggurat_foundations.permissions import PermissionTuple, ALL_PERMISSIONS\n'), ((24354, 24444), 'ziggurat_foundations.permissions.PermissionTuple', 'PermissionTuple', (['None', '"""group_perm2"""', '"""group"""', 'self.group', 'self.resource', '(False)', '(True)'], {}), "(None, 'group_perm2', 'group', self.group, self.resource, \n False, True)\n", (24369, 24444), False, 'from ziggurat_foundations.permissions import PermissionTuple, ALL_PERMISSIONS\n'), ((24483, 24569), 'ziggurat_foundations.permissions.PermissionTuple', 'PermissionTuple', (['self.user', '"""test_perm2"""', '"""user"""', 'None', 'self.resource', '(False)', '(True)'], {}), "(self.user, 'test_perm2', 'user', None, self.resource, False,\n True)\n", (24498, 24569), False, 'from ziggurat_foundations.permissions import PermissionTuple, ALL_PERMISSIONS\n'), ((24609, 24694), 'ziggurat_foundations.permissions.PermissionTuple', 'PermissionTuple', (['self.user', '"""foo_perm"""', '"""user"""', 'None', 'self.resource', '(False)', '(True)'], {}), "(self.user, 'foo_perm', 'user', None, self.resource, False, True\n )\n", (24624, 24694), False, 'from ziggurat_foundations.permissions import PermissionTuple, ALL_PERMISSIONS\n'), ((24733, 24823), 'ziggurat_foundations.permissions.PermissionTuple', 'PermissionTuple', (['None', '"""group_perm"""', '"""group"""', 'self.group2', 'self.resource', '(False)', '(True)'], {}), "(None, 'group_perm', 'group', self.group2, self.resource, \n False, True)\n", (24748, 24823), False, 'from ziggurat_foundations.permissions import PermissionTuple, ALL_PERMISSIONS\n'), ((25583, 25676), 'ziggurat_foundations.permissions.PermissionTuple', 'PermissionTuple', (['self.user', '"""group_perm"""', '"""group"""', 'self.group', 'self.resource', '(False)', '(True)'], {}), "(self.user, 'group_perm', 'group', self.group, self.resource,\n False, True)\n", (25598, 25676), False, 'from ziggurat_foundations.permissions import PermissionTuple, ALL_PERMISSIONS\n'), ((25716, 25806), 'ziggurat_foundations.permissions.PermissionTuple', 'PermissionTuple', (['user6', '"""group_perm"""', '"""group"""', 'self.group', 'self.resource', '(False)', '(True)'], {}), "(user6, 'group_perm', 'group', self.group, self.resource, \n False, True)\n", (25731, 25806), False, 'from ziggurat_foundations.permissions import PermissionTuple, ALL_PERMISSIONS\n'), ((25845, 25935), 'ziggurat_foundations.permissions.PermissionTuple', 'PermissionTuple', (['user7', '"""group_perm"""', '"""group"""', 'self.group', 'self.resource', '(False)', '(True)'], {}), "(user7, 'group_perm', 'group', self.group, self.resource, \n False, True)\n", (25860, 25935), False, 'from ziggurat_foundations.permissions import PermissionTuple, ALL_PERMISSIONS\n'), ((25974, 26069), 'ziggurat_foundations.permissions.PermissionTuple', 'PermissionTuple', (['self.user', '"""group_perm2"""', '"""group"""', 'self.group', 'self.resource', '(False)', '(True)'], {}), "(self.user, 'group_perm2', 'group', self.group, self.\n resource, False, True)\n", (25989, 26069), False, 'from ziggurat_foundations.permissions import PermissionTuple, ALL_PERMISSIONS\n'), ((26205, 26296), 'ziggurat_foundations.permissions.PermissionTuple', 'PermissionTuple', (['user6', '"""group_perm2"""', '"""group"""', 'self.group', 'self.resource', '(False)', '(True)'], {}), "(user6, 'group_perm2', 'group', self.group, self.resource, \n False, True)\n", (26220, 26296), False, 'from ziggurat_foundations.permissions import PermissionTuple, ALL_PERMISSIONS\n'), ((26335, 26426), 'ziggurat_foundations.permissions.PermissionTuple', 'PermissionTuple', (['user7', '"""group_perm2"""', '"""group"""', 'self.group', 'self.resource', '(False)', '(True)'], {}), "(user7, 'group_perm2', 'group', self.group, self.resource, \n False, True)\n", (26350, 26426), False, 'from ziggurat_foundations.permissions import PermissionTuple, ALL_PERMISSIONS\n'), ((26465, 26561), 'ziggurat_foundations.permissions.PermissionTuple', 'PermissionTuple', (['self.user4', '"""group_perm"""', '"""group"""', 'self.group2', 'self.resource', '(False)', '(True)'], {}), "(self.user4, 'group_perm', 'group', self.group2, self.\n resource, False, True)\n", (26480, 26561), False, 'from ziggurat_foundations.permissions import PermissionTuple, ALL_PERMISSIONS\n'), ((27493, 27582), 'ziggurat_foundations.permissions.PermissionTuple', 'PermissionTuple', (['None', '"""group_perm"""', '"""group"""', 'self.group', 'self.resource', '(False)', '(True)'], {}), "(None, 'group_perm', 'group', self.group, self.resource, \n False, True)\n", (27508, 27582), False, 'from ziggurat_foundations.permissions import PermissionTuple, ALL_PERMISSIONS\n'), ((27621, 27711), 'ziggurat_foundations.permissions.PermissionTuple', 'PermissionTuple', (['None', '"""group_perm2"""', '"""group"""', 'self.group', 'self.resource', '(False)', '(True)'], {}), "(None, 'group_perm2', 'group', self.group, self.resource, \n False, True)\n", (27636, 27711), False, 'from ziggurat_foundations.permissions import PermissionTuple, ALL_PERMISSIONS\n'), ((27750, 27840), 'ziggurat_foundations.permissions.PermissionTuple', 'PermissionTuple', (['None', '"""group_perm"""', '"""group"""', 'self.group2', 'self.resource', '(False)', '(True)'], {}), "(None, 'group_perm', 'group', self.group2, self.resource, \n False, True)\n", (27765, 27840), False, 'from ziggurat_foundations.permissions import PermissionTuple, ALL_PERMISSIONS\n'), ((28705, 28791), 'ziggurat_foundations.permissions.PermissionTuple', 'PermissionTuple', (['self.user', '"""test_perm2"""', '"""user"""', 'None', 'self.resource', '(False)', '(True)'], {}), "(self.user, 'test_perm2', 'user', None, self.resource, False,\n True)\n", (28720, 28791), False, 'from ziggurat_foundations.permissions import PermissionTuple, ALL_PERMISSIONS\n'), ((28831, 28916), 'ziggurat_foundations.permissions.PermissionTuple', 'PermissionTuple', (['self.user', '"""foo_perm"""', '"""user"""', 'None', 'self.resource', '(False)', '(True)'], {}), "(self.user, 'foo_perm', 'user', None, self.resource, False, True\n )\n", (28846, 28916), False, 'from ziggurat_foundations.permissions import PermissionTuple, ALL_PERMISSIONS\n'), ((30009, 30098), 'ziggurat_foundations.permissions.PermissionTuple', 'PermissionTuple', (['None', '"""group_perm"""', '"""group"""', 'self.group', 'self.resource', '(False)', '(True)'], {}), "(None, 'group_perm', 'group', self.group, self.resource, \n False, True)\n", (30024, 30098), False, 'from ziggurat_foundations.permissions import PermissionTuple, ALL_PERMISSIONS\n'), ((30137, 30227), 'ziggurat_foundations.permissions.PermissionTuple', 'PermissionTuple', (['None', '"""group_perm2"""', '"""group"""', 'self.group', 'self.resource', '(False)', '(True)'], {}), "(None, 'group_perm2', 'group', self.group, self.resource, \n False, True)\n", (30152, 30227), False, 'from ziggurat_foundations.permissions import PermissionTuple, ALL_PERMISSIONS\n'), ((30266, 30356), 'ziggurat_foundations.permissions.PermissionTuple', 'PermissionTuple', (['None', '"""group_perm"""', '"""group"""', 'self.group2', 'self.resource', '(False)', '(True)'], {}), "(None, 'group_perm', 'group', self.group2, self.resource, \n False, True)\n", (30281, 30356), False, 'from ziggurat_foundations.permissions import PermissionTuple, ALL_PERMISSIONS\n'), ((30395, 30480), 'ziggurat_foundations.permissions.PermissionTuple', 'PermissionTuple', (['None', '"""group_permx"""', '"""group"""', 'group3', 'self.resource', '(False)', '(True)'], {}), "(None, 'group_permx', 'group', group3, self.resource, False,\n True)\n", (30410, 30480), False, 'from ziggurat_foundations.permissions import PermissionTuple, ALL_PERMISSIONS\n'), ((31567, 31656), 'ziggurat_foundations.permissions.PermissionTuple', 'PermissionTuple', (['None', '"""group_perm"""', '"""group"""', 'self.group', 'self.resource', '(False)', '(True)'], {}), "(None, 'group_perm', 'group', self.group, self.resource, \n False, True)\n", (31582, 31656), False, 'from ziggurat_foundations.permissions import PermissionTuple, ALL_PERMISSIONS\n'), ((31695, 31785), 'ziggurat_foundations.permissions.PermissionTuple', 'PermissionTuple', (['None', '"""group_perm2"""', '"""group"""', 'self.group', 'self.resource', '(False)', '(True)'], {}), "(None, 'group_perm2', 'group', self.group, self.resource, \n False, True)\n", (31710, 31785), False, 'from ziggurat_foundations.permissions import PermissionTuple, ALL_PERMISSIONS\n'), ((31824, 31910), 'ziggurat_foundations.permissions.PermissionTuple', 'PermissionTuple', (['self.user', '"""test_perm2"""', '"""user"""', 'None', 'self.resource', '(False)', '(True)'], {}), "(self.user, 'test_perm2', 'user', None, self.resource, False,\n True)\n", (31839, 31910), False, 'from ziggurat_foundations.permissions import PermissionTuple, ALL_PERMISSIONS\n'), ((31950, 32035), 'ziggurat_foundations.permissions.PermissionTuple', 'PermissionTuple', (['self.user', '"""foo_perm"""', '"""user"""', 'None', 'self.resource', '(False)', '(True)'], {}), "(self.user, 'foo_perm', 'user', None, self.resource, False, True\n )\n", (31965, 32035), False, 'from ziggurat_foundations.permissions import PermissionTuple, ALL_PERMISSIONS\n'), ((32074, 32164), 'ziggurat_foundations.permissions.PermissionTuple', 'PermissionTuple', (['None', '"""group_perm"""', '"""group"""', 'self.group2', 'self.resource', '(False)', '(True)'], {}), "(None, 'group_perm', 'group', self.group2, self.resource, \n False, True)\n", (32089, 32164), False, 'from ziggurat_foundations.permissions import PermissionTuple, ALL_PERMISSIONS\n'), ((32203, 32288), 'ziggurat_foundations.permissions.PermissionTuple', 'PermissionTuple', (['None', '"""group_permx"""', '"""group"""', 'group3', 'self.resource', '(False)', '(True)'], {}), "(None, 'group_permx', 'group', group3, self.resource, False,\n True)\n", (32218, 32288), False, 'from ziggurat_foundations.permissions import PermissionTuple, ALL_PERMISSIONS\n'), ((34674, 34763), 'ziggurat_foundations.permissions.PermissionTuple', 'PermissionTuple', (['None', '"""group_perm"""', '"""group"""', 'self.group', 'self.resource', '(False)', '(True)'], {}), "(None, 'group_perm', 'group', self.group, self.resource, \n False, True)\n", (34689, 34763), False, 'from ziggurat_foundations.permissions import PermissionTuple, ALL_PERMISSIONS\n'), ((35376, 35466), 'ziggurat_foundations.permissions.PermissionTuple', 'PermissionTuple', (['None', '"""group_perm"""', '"""group"""', 'self.group2', 'self.resource', '(False)', '(True)'], {}), "(None, 'group_perm', 'group', self.group2, self.resource, \n False, True)\n", (35391, 35466), False, 'from ziggurat_foundations.permissions import PermissionTuple, ALL_PERMISSIONS\n'), ((35505, 35597), 'ziggurat_foundations.permissions.PermissionTuple', 'PermissionTuple', (['None', '"""group_perm2"""', '"""group"""', 'self.group2', 'self.resource2', '(False)', '(True)'], {}), "(None, 'group_perm2', 'group', self.group2, self.resource2, \n False, True)\n", (35520, 35597), False, 'from ziggurat_foundations.permissions import PermissionTuple, ALL_PERMISSIONS\n'), ((35636, 35724), 'ziggurat_foundations.permissions.PermissionTuple', 'PermissionTuple', (['None', 'ALL_PERMISSIONS', '"""group"""', 'self.group2', 'resource3', '(True)', '(True)'], {}), "(None, ALL_PERMISSIONS, 'group', self.group2, resource3, \n True, True)\n", (35651, 35724), False, 'from ziggurat_foundations.permissions import PermissionTuple, ALL_PERMISSIONS\n'), ((1881, 1970), 'ziggurat_foundations.models.services.user.UserService.resources_with_perms', 'UserService.resources_with_perms', (['created_user', "['test_perm']"], {'db_session': 'db_session'}), "(created_user, ['test_perm'], db_session=\n db_session)\n", (1913, 1970), False, 'from ziggurat_foundations.models.services.user import UserService\n'), ((2689, 2778), 'ziggurat_foundations.models.services.user.UserService.resources_with_perms', 'UserService.resources_with_perms', (['created_user', "['test_perm']"], {'db_session': 'db_session'}), "(created_user, ['test_perm'], db_session=\n db_session)\n", (2721, 2778), False, 'from ziggurat_foundations.models.services.user import UserService\n'), ((3490, 3579), 'ziggurat_foundations.models.services.user.UserService.resources_with_perms', 'UserService.resources_with_perms', (['created_user', "['test_perm']"], {'db_session': 'db_session'}), "(created_user, ['test_perm'], db_session=\n db_session)\n", (3522, 3579), False, 'from ziggurat_foundations.models.services.user import UserService\n'), ((4146, 4268), 'ziggurat_foundations.models.services.user.UserService.resources_with_perms', 'UserService.resources_with_perms', (['created_user', "['test_perm']"], {'resource_types': "['test_resource']", 'db_session': 'db_session'}), "(created_user, ['test_perm'],\n resource_types=['test_resource'], db_session=db_session)\n", (4178, 4268), False, 'from ziggurat_foundations.models.services.user import UserService\n'), ((4819, 4943), 'ziggurat_foundations.models.services.user.UserService.resources_with_perms', 'UserService.resources_with_perms', (['created_user', "['test_perm']"], {'resource_types': "['test_resource_b']", 'db_session': 'db_session'}), "(created_user, ['test_perm'],\n resource_types=['test_resource_b'], db_session=db_session)\n", (4851, 4943), False, 'from ziggurat_foundations.models.services.user import UserService\n'), ((6403, 6527), 'ziggurat_foundations.models.services.user.UserService.resources_with_perms', 'UserService.resources_with_perms', (['created_user', "['test_perm']"], {'resource_types': "['test_resource_b']", 'db_session': 'db_session'}), "(created_user, ['test_perm'],\n resource_types=['test_resource_b'], db_session=db_session)\n", (6435, 6527), False, 'from ziggurat_foundations.models.services.user import UserService\n'), ((7781, 7870), 'ziggurat_foundations.models.services.user.UserService.resources_with_perms', 'UserService.resources_with_perms', (['created_user', "['test_perm']"], {'db_session': 'db_session'}), "(created_user, ['test_perm'], db_session=\n db_session)\n", (7813, 7870), False, 'from ziggurat_foundations.models.services.user import UserService\n'), ((8983, 9093), 'ziggurat_foundations.models.services.user.UserService.resources_with_perms', 'UserService.resources_with_perms', (['created_user', "['test_perm']"], {'resource_ids': '[1, 3]', 'db_session': 'db_session'}), "(created_user, ['test_perm'], resource_ids=\n [1, 3], db_session=db_session)\n", (9015, 9093), False, 'from ziggurat_foundations.models.services.user import UserService\n'), ((10538, 10626), 'ziggurat_foundations.models.services.user.UserService.resources_with_perms', 'UserService.resources_with_perms', (['created_user', "['foo_perm']"], {'db_session': 'db_session'}), "(created_user, ['foo_perm'], db_session=\n db_session)\n", (10570, 10626), False, 'from ziggurat_foundations.models.services.user import UserService\n'), ((19765, 19824), 'ziggurat_foundations.models.services.user.UserService.users_for_perms', 'UserService.users_for_perms', (["['bbb']"], {'db_session': 'db_session'}), "(['bbb'], db_session=db_session)\n", (19792, 19824), False, 'from ziggurat_foundations.models.services.user import UserService\n')] |
from flask import Flask
from flask import render_template
import datetime as dt
import json
import jinja2 as j2
import requests
import argparse
import os
import feedparser
from pathlib import Path
class OpenWeatherAPI():
url = j2.Template('https://api.openweathermap.org/data/2.5/onecall?lat={{lat}}&lon={{lon}}&appid={{api_key}}&units={{units}}')
def __init__(self, api_key):
self.__api_key = api_key
def call(self, lon, lat, units='imperial'):
return json.loads(requests.get(OpenWeatherAPI.url.render(api_key=self.__api_key, lon=lon, lat=lat, units=units)).text)
args = {
'own': os.environ['OWN_API'],
'lon': os.environ['LON'],
'lat': os.environ['LAT']
}
app = Flask("ws-eink")
own_api = OpenWeatherAPI(args['own'])
def get_weather():
return own_api.call(args['lon'], args['lat'])
def get_news():
bbc_rss = "http://feeds.bbci.co.uk/news/rss.xml?edition=uk"
feed = feedparser.parse( bbc_rss )
return feed
icon_map = {
'01': '<span class="material-icons">wb_sunny</span>',
'02': '<span class="material-icons">wb_cloudy</span>',
'03': '<span class="material-icons">wb_cloudy</span>',
'04': '<span class="material-icons">wb_cloudy</span>',
'09': '<img src="/icons/rain" />',
'10': '<img src="/icons/rain" />',
'11': '<span class="material-icons">flash_on</span>',
'13': '<span class="material-icons">ac_unit</span>',
'50': '<img src="/icons/fog" />'
}
@app.template_filter()
def icon(value):
material = icon_map.get(value[:2], "")
return material
@app.template_filter()
def format_datetime(value, format='full'):
if format == 'date':
dt_format="%d/%m/%Y"
elif format == 'time':
dt_format = '%H:%M'
else:
dt_format="%d/%m/%Y %H:%M"
return dt.datetime.fromtimestamp(value).strftime(dt_format)
@app.route('/icons/<name>', methods=['GET'])
def get_icons(name):
current_file = Path(__file__)
with open(f'{current_file.parent}/icons/{name}.svg', 'r') as icon:
icon_str = icon.read()
return icon_str
@app.route('/', methods=['GET'])
@app.route('/<page>', methods=['GET'])
def pages(page=None):
if page is None:
page = 'main'
weather = get_weather()
feed = get_news()
return render_template(f'{page}.html', weather=weather, news=feed)
| [
"flask.render_template",
"datetime.datetime.fromtimestamp",
"pathlib.Path",
"feedparser.parse",
"flask.Flask",
"jinja2.Template"
] | [((714, 730), 'flask.Flask', 'Flask', (['"""ws-eink"""'], {}), "('ws-eink')\n", (719, 730), False, 'from flask import Flask\n'), ((233, 363), 'jinja2.Template', 'j2.Template', (['"""https://api.openweathermap.org/data/2.5/onecall?lat={{lat}}&lon={{lon}}&appid={{api_key}}&units={{units}}"""'], {}), "(\n 'https://api.openweathermap.org/data/2.5/onecall?lat={{lat}}&lon={{lon}}&appid={{api_key}}&units={{units}}'\n )\n", (244, 363), True, 'import jinja2 as j2\n'), ((932, 957), 'feedparser.parse', 'feedparser.parse', (['bbc_rss'], {}), '(bbc_rss)\n', (948, 957), False, 'import feedparser\n'), ((1933, 1947), 'pathlib.Path', 'Path', (['__file__'], {}), '(__file__)\n', (1937, 1947), False, 'from pathlib import Path\n'), ((2271, 2330), 'flask.render_template', 'render_template', (['f"""{page}.html"""'], {'weather': 'weather', 'news': 'feed'}), "(f'{page}.html', weather=weather, news=feed)\n", (2286, 2330), False, 'from flask import render_template\n'), ((1793, 1825), 'datetime.datetime.fromtimestamp', 'dt.datetime.fromtimestamp', (['value'], {}), '(value)\n', (1818, 1825), True, 'import datetime as dt\n')] |
from flask import Flask
def create_app():
app = Flask(__name__)
#Rotas
from app.controllers.main.rotas import main
#Registrar Blueprint
app.register_blueprint(main)
return app
| [
"flask.Flask"
] | [((53, 68), 'flask.Flask', 'Flask', (['__name__'], {}), '(__name__)\n', (58, 68), False, 'from flask import Flask\n')] |
import rules
# ------------predicates------------
# Processus
@rules.predicate
def is_process_manager(user, processus):
return processus.proc_manager == user
@rules.predicate
def is_process_upper_mgt(user, processus):
bu = processus.business_unit
return bu.bu_manager == user
# Activités
@rules.predicate
def is_activity_owner(user, activite):
return activite.responsable == user
@rules.predicate
def is_activity_supervisor(user, activite):
processus = activite.processus
return processus.proc_manager == user
@rules.predicate
def is_activity_upper_mgt(user, activite):
processus = activite.processus
bu = processus.business_unit
return bu.bu_manager == user
# Risques des activités
@rules.predicate
def is_activity_risk_reporter(user, activiterisque):
return activiterisque.soumis_par == user
@rules.predicate
def is_activity_risk_owner(user, activiterisque):
return activiterisque.proprietaire == user
@rules.predicate
def is_activity_risk_monitor(user, activiterisque):
activite = activiterisque.activite
return activite.responsable == user
@rules.predicate
def is_activity_risk_supervisor(user, activiterisque):
processus = activiterisque.activite.processus
return processus.proc_manager == user
@rules.predicate
def is_activity_risk_upper_mgt(user, activiterisque):
processus = activiterisque.activite.processus
bu = processus.business_unit
return bu.bu_manager == user
# Risques des processus
@rules.predicate
def is_process_risk_reporter(user, processusrisque):
if processusrisque.soumis_par:
return processusrisque.soumis_par == user
return False
@rules.predicate
def is_process_risk_owner(user, processusrisque):
return processusrisque.proprietaire == user
@rules.predicate
def is_process_risk_monitor(user, processusrisque):
processus = processusrisque.processus
return processus.proc_manager == user
@rules.predicate
def is_process_risk_upper_mgt(user, processusrisque):
bu = processusrisque.processus.business_unit
return bu.bu_manager == user
# Estimations
@rules.predicate
def is_estimation_monitor(user, estimation):
try:
return estimation.content_object.activite.processus.proc_manager == user
except AttributeError:
return estimation.content_object.processus.proc_manager == user
# Contrôles
@rules.predicate
def is_controle_creator(user, controle):
return controle.cree_par == user
@rules.predicate
def is_controle_owner(user, controle):
if controle.assigne_a:
return controle.assigne_a == user
return False
@rules.predicate
def is_controle_reviewer(user, controle):
try:
return controle.content_object.activite.processus.proc_manager == user
except AttributeError:
return controle.content_object.processus.proc_manager == user
# Risques
@rules.predicate
def is_risk_creator(user, risque):
return risque.cree_par == user
# Identification Risques
@rules.predicate
def is_risk_verifier(user, identificationrisque):
if identificationrisque.get_class == 'ProcessusRisque':
return is_process_risk_monitor(user, identificationrisque) \
or is_process_risk_upper_mgt(user, identificationrisque)
elif identificationrisque.get_class == 'ActiviteRisque':
return is_activity_risk_monitor(user, identificationrisque) \
or is_activity_risk_supervisor(user, identificationrisque) \
or is_activity_risk_upper_mgt(user, identificationrisque)
# ------------rules------------
# Risques
rules.add_rule('change_risque', is_risk_creator)
# Identification Risques
rules.add_rule('verify_risk', is_risk_verifier)
# Processus
rules.add_rule('change_processus', is_process_upper_mgt)
rules.add_rule('delete_processus', is_process_upper_mgt)
rules.add_rule('add_activity_to_process', is_process_manager | is_process_upper_mgt)
rules.add_rule('add_process_data', is_process_manager | is_process_upper_mgt)
rules.add_rule('add_process_risk', rules.is_authenticated)
# Activités
rules.add_rule('change_activite', is_activity_supervisor | is_activity_upper_mgt)
rules.add_rule('delete_activite', is_activity_supervisor | is_activity_upper_mgt)
rules.add_rule('add_activity_risk', rules.is_authenticated)
rules.add_rule('complete_activity', is_activity_owner)
# Risques des activités
rules.add_rule('set_seuil_activity_risk', is_activity_risk_supervisor | is_activity_risk_upper_mgt)
rules.add_rule('set_review_date_activity_risk', is_activity_risk_supervisor | is_activity_risk_upper_mgt)
rules.add_rule('add_control_activity_risk', is_activity_risk_reporter | is_activity_risk_monitor |
is_activity_risk_owner | is_activity_risk_supervisor | is_activity_risk_upper_mgt)
rules.add_rule('assign_activity_risk', is_activity_risk_supervisor | is_activity_risk_upper_mgt)
rules.add_rule('estimate_activity_risk', is_activity_risk_supervisor | is_activity_risk_upper_mgt)
rules.add_rule('change_activiterisque', is_activity_risk_supervisor |
is_activity_risk_upper_mgt | is_activity_risk_reporter)
rules.add_rule('delete_activiterisque', is_activity_risk_supervisor | is_activity_risk_upper_mgt)
# Risques des processus
rules.add_rule('set_seuil_process_risk', is_process_risk_monitor | is_process_risk_upper_mgt)
rules.add_rule('set_review_date_process_risk', is_process_risk_upper_mgt | is_process_risk_monitor)
rules.add_rule('add_control_process_risk', is_process_risk_owner | is_process_risk_monitor | is_process_risk_upper_mgt
| is_activity_risk_reporter)
rules.add_rule('assign_process_risk', is_process_risk_monitor | is_process_risk_upper_mgt)
rules.add_rule('estimate_process_risk', is_process_risk_upper_mgt | is_process_risk_monitor)
rules.add_rule('change_processusrisque', is_process_risk_upper_mgt |
is_process_risk_monitor | is_process_risk_reporter)
rules.add_rule('delete_processusrisque', is_process_risk_monitor | is_process_risk_upper_mgt)
# Estimations
rules.add_rule('set_estimation_review_date', is_estimation_monitor)
# Contrôles
rules.add_rule('assign_control', is_controle_reviewer)
rules.add_rule('complete_control', is_controle_owner)
rules.add_rule('change_controle', is_controle_reviewer | is_controle_creator)
rules.add_rule('delete_controle', is_controle_creator | is_controle_reviewer)
rules.add_rule('approve_controle', is_controle_reviewer)
rules.add_rule('validate_controle_completion', is_controle_reviewer | is_controle_creator)
# ------------permissions------------
# Risques
rules.add_perm('risk_register.change_risque', is_risk_creator)
# Identification Risques
rules.add_perm('risk_register.verify_risque', is_risk_verifier)
# Processus
rules.add_perm('risk_register.change_processus', is_process_upper_mgt)
rules.add_perm('risk_register.delete_processus', is_process_upper_mgt)
rules.add_perm('risk_register.add_activity_to_process', is_process_manager | is_process_upper_mgt)
rules.add_perm('risk_register.add_process_data', is_process_manager | is_process_upper_mgt)
rules.add_perm('risk_register.add_process_risk', rules.is_authenticated)
# Activités
rules.add_perm('risk_register.change_activite', is_activity_supervisor | is_activity_upper_mgt)
rules.add_perm('risk_register.delete_activite', is_activity_supervisor | is_activity_upper_mgt)
rules.add_perm('risk_register.add_activity_risk', rules.is_authenticated)
rules.add_perm('risk_register_complete_activity', is_activity_owner)
# Risques des activités
rules.add_perm('risk_register.set_seuil_activity_risk', is_activity_risk_supervisor | is_activity_risk_upper_mgt)
rules.add_perm('risk_register.set_review_date_activity_risk', is_activity_risk_supervisor | is_activity_risk_upper_mgt)
rules.add_perm('risk_register.add_control_activity_risk', is_activity_risk_reporter | is_activity_risk_monitor |
is_activity_risk_owner | is_activity_risk_supervisor | is_activity_risk_upper_mgt)
rules.add_perm('risk_register.assign_activity_risk', is_activity_risk_supervisor | is_activity_risk_upper_mgt)
rules.add_perm('risk_register.estimate_activity_risk',
is_activity_risk_supervisor | is_activity_risk_upper_mgt | is_activity_risk_reporter)
rules.add_perm('risk_register.change_activiterisque', is_activity_risk_supervisor |
is_activity_risk_upper_mgt | is_activity_risk_reporter)
rules.add_perm('risk_register.delete_activiterisque', is_activity_risk_supervisor | is_activity_risk_upper_mgt)
# Risques des processus
rules.add_perm('risk_register.set_seuil_process_risk', is_process_risk_monitor | is_process_risk_upper_mgt)
rules.add_perm('risk_register.set_review_date_process_risk', is_process_risk_upper_mgt | is_process_risk_monitor)
rules.add_perm('risk_register.add_control_process_risk', is_process_risk_owner | is_process_risk_monitor |
is_process_risk_upper_mgt | is_process_risk_reporter)
rules.add_perm('risk_register.assign_process_risk', is_process_risk_monitor | is_process_risk_upper_mgt)
rules.add_perm('risk_register.estimate_process_risk', is_process_risk_upper_mgt | is_process_risk_monitor |
is_process_risk_owner | is_process_risk_reporter)
rules.add_perm('risk_register.change_processusrisque', is_process_risk_upper_mgt |
is_process_risk_monitor | is_process_risk_reporter)
rules.add_perm('risk_register.delete_processusrisque', is_process_risk_monitor | is_process_risk_upper_mgt)
# Estimations
rules.add_perm('risk_register.set_estimation_review_date', is_estimation_monitor)
# Contrôles
rules.add_perm('risk_register.assign_control', is_controle_reviewer)
rules.add_perm('risk_register.complete_control', is_controle_owner)
rules.add_perm('risk_register.change_controle', is_controle_reviewer | is_controle_creator)
rules.add_perm('risk_register.delete_controle', is_controle_creator | is_controle_reviewer)
rules.add_perm('risk_register.approve_controle', is_controle_reviewer)
rules.add_perm('risk_register.validate_controle_completion', is_controle_reviewer | is_controle_creator)
| [
"rules.add_rule",
"rules.add_perm"
] | [((3577, 3625), 'rules.add_rule', 'rules.add_rule', (['"""change_risque"""', 'is_risk_creator'], {}), "('change_risque', is_risk_creator)\n", (3591, 3625), False, 'import rules\n'), ((3652, 3699), 'rules.add_rule', 'rules.add_rule', (['"""verify_risk"""', 'is_risk_verifier'], {}), "('verify_risk', is_risk_verifier)\n", (3666, 3699), False, 'import rules\n'), ((3713, 3769), 'rules.add_rule', 'rules.add_rule', (['"""change_processus"""', 'is_process_upper_mgt'], {}), "('change_processus', is_process_upper_mgt)\n", (3727, 3769), False, 'import rules\n'), ((3770, 3826), 'rules.add_rule', 'rules.add_rule', (['"""delete_processus"""', 'is_process_upper_mgt'], {}), "('delete_processus', is_process_upper_mgt)\n", (3784, 3826), False, 'import rules\n'), ((3827, 3915), 'rules.add_rule', 'rules.add_rule', (['"""add_activity_to_process"""', '(is_process_manager | is_process_upper_mgt)'], {}), "('add_activity_to_process', is_process_manager |\n is_process_upper_mgt)\n", (3841, 3915), False, 'import rules\n'), ((3912, 3989), 'rules.add_rule', 'rules.add_rule', (['"""add_process_data"""', '(is_process_manager | is_process_upper_mgt)'], {}), "('add_process_data', is_process_manager | is_process_upper_mgt)\n", (3926, 3989), False, 'import rules\n'), ((3990, 4048), 'rules.add_rule', 'rules.add_rule', (['"""add_process_risk"""', 'rules.is_authenticated'], {}), "('add_process_risk', rules.is_authenticated)\n", (4004, 4048), False, 'import rules\n'), ((4062, 4147), 'rules.add_rule', 'rules.add_rule', (['"""change_activite"""', '(is_activity_supervisor | is_activity_upper_mgt)'], {}), "('change_activite', is_activity_supervisor |\n is_activity_upper_mgt)\n", (4076, 4147), False, 'import rules\n'), ((4144, 4229), 'rules.add_rule', 'rules.add_rule', (['"""delete_activite"""', '(is_activity_supervisor | is_activity_upper_mgt)'], {}), "('delete_activite', is_activity_supervisor |\n is_activity_upper_mgt)\n", (4158, 4229), False, 'import rules\n'), ((4226, 4285), 'rules.add_rule', 'rules.add_rule', (['"""add_activity_risk"""', 'rules.is_authenticated'], {}), "('add_activity_risk', rules.is_authenticated)\n", (4240, 4285), False, 'import rules\n'), ((4286, 4340), 'rules.add_rule', 'rules.add_rule', (['"""complete_activity"""', 'is_activity_owner'], {}), "('complete_activity', is_activity_owner)\n", (4300, 4340), False, 'import rules\n'), ((4367, 4470), 'rules.add_rule', 'rules.add_rule', (['"""set_seuil_activity_risk"""', '(is_activity_risk_supervisor | is_activity_risk_upper_mgt)'], {}), "('set_seuil_activity_risk', is_activity_risk_supervisor |\n is_activity_risk_upper_mgt)\n", (4381, 4470), False, 'import rules\n'), ((4467, 4576), 'rules.add_rule', 'rules.add_rule', (['"""set_review_date_activity_risk"""', '(is_activity_risk_supervisor | is_activity_risk_upper_mgt)'], {}), "('set_review_date_activity_risk', is_activity_risk_supervisor |\n is_activity_risk_upper_mgt)\n", (4481, 4576), False, 'import rules\n'), ((4573, 4762), 'rules.add_rule', 'rules.add_rule', (['"""add_control_activity_risk"""', '(is_activity_risk_reporter | is_activity_risk_monitor |\n is_activity_risk_owner | is_activity_risk_supervisor |\n is_activity_risk_upper_mgt)'], {}), "('add_control_activity_risk', is_activity_risk_reporter |\n is_activity_risk_monitor | is_activity_risk_owner |\n is_activity_risk_supervisor | is_activity_risk_upper_mgt)\n", (4587, 4762), False, 'import rules\n'), ((4770, 4870), 'rules.add_rule', 'rules.add_rule', (['"""assign_activity_risk"""', '(is_activity_risk_supervisor | is_activity_risk_upper_mgt)'], {}), "('assign_activity_risk', is_activity_risk_supervisor |\n is_activity_risk_upper_mgt)\n", (4784, 4870), False, 'import rules\n'), ((4867, 4969), 'rules.add_rule', 'rules.add_rule', (['"""estimate_activity_risk"""', '(is_activity_risk_supervisor | is_activity_risk_upper_mgt)'], {}), "('estimate_activity_risk', is_activity_risk_supervisor |\n is_activity_risk_upper_mgt)\n", (4881, 4969), False, 'import rules\n'), ((4966, 5095), 'rules.add_rule', 'rules.add_rule', (['"""change_activiterisque"""', '(is_activity_risk_supervisor | is_activity_risk_upper_mgt |\n is_activity_risk_reporter)'], {}), "('change_activiterisque', is_activity_risk_supervisor |\n is_activity_risk_upper_mgt | is_activity_risk_reporter)\n", (4980, 5095), False, 'import rules\n'), ((5107, 5208), 'rules.add_rule', 'rules.add_rule', (['"""delete_activiterisque"""', '(is_activity_risk_supervisor | is_activity_risk_upper_mgt)'], {}), "('delete_activiterisque', is_activity_risk_supervisor |\n is_activity_risk_upper_mgt)\n", (5121, 5208), False, 'import rules\n'), ((5230, 5327), 'rules.add_rule', 'rules.add_rule', (['"""set_seuil_process_risk"""', '(is_process_risk_monitor | is_process_risk_upper_mgt)'], {}), "('set_seuil_process_risk', is_process_risk_monitor |\n is_process_risk_upper_mgt)\n", (5244, 5327), False, 'import rules\n'), ((5324, 5427), 'rules.add_rule', 'rules.add_rule', (['"""set_review_date_process_risk"""', '(is_process_risk_upper_mgt | is_process_risk_monitor)'], {}), "('set_review_date_process_risk', is_process_risk_upper_mgt |\n is_process_risk_monitor)\n", (5338, 5427), False, 'import rules\n'), ((5424, 5579), 'rules.add_rule', 'rules.add_rule', (['"""add_control_process_risk"""', '(is_process_risk_owner | is_process_risk_monitor |\n is_process_risk_upper_mgt | is_activity_risk_reporter)'], {}), "('add_control_process_risk', is_process_risk_owner |\n is_process_risk_monitor | is_process_risk_upper_mgt |\n is_activity_risk_reporter)\n", (5438, 5579), False, 'import rules\n'), ((5587, 5681), 'rules.add_rule', 'rules.add_rule', (['"""assign_process_risk"""', '(is_process_risk_monitor | is_process_risk_upper_mgt)'], {}), "('assign_process_risk', is_process_risk_monitor |\n is_process_risk_upper_mgt)\n", (5601, 5681), False, 'import rules\n'), ((5678, 5774), 'rules.add_rule', 'rules.add_rule', (['"""estimate_process_risk"""', '(is_process_risk_upper_mgt | is_process_risk_monitor)'], {}), "('estimate_process_risk', is_process_risk_upper_mgt |\n is_process_risk_monitor)\n", (5692, 5774), False, 'import rules\n'), ((5771, 5895), 'rules.add_rule', 'rules.add_rule', (['"""change_processusrisque"""', '(is_process_risk_upper_mgt | is_process_risk_monitor | is_process_risk_reporter\n )'], {}), "('change_processusrisque', is_process_risk_upper_mgt |\n is_process_risk_monitor | is_process_risk_reporter)\n", (5785, 5895), False, 'import rules\n'), ((5907, 6004), 'rules.add_rule', 'rules.add_rule', (['"""delete_processusrisque"""', '(is_process_risk_monitor | is_process_risk_upper_mgt)'], {}), "('delete_processusrisque', is_process_risk_monitor |\n is_process_risk_upper_mgt)\n", (5921, 6004), False, 'import rules\n'), ((6016, 6083), 'rules.add_rule', 'rules.add_rule', (['"""set_estimation_review_date"""', 'is_estimation_monitor'], {}), "('set_estimation_review_date', is_estimation_monitor)\n", (6030, 6083), False, 'import rules\n'), ((6097, 6151), 'rules.add_rule', 'rules.add_rule', (['"""assign_control"""', 'is_controle_reviewer'], {}), "('assign_control', is_controle_reviewer)\n", (6111, 6151), False, 'import rules\n'), ((6152, 6205), 'rules.add_rule', 'rules.add_rule', (['"""complete_control"""', 'is_controle_owner'], {}), "('complete_control', is_controle_owner)\n", (6166, 6205), False, 'import rules\n'), ((6206, 6283), 'rules.add_rule', 'rules.add_rule', (['"""change_controle"""', '(is_controle_reviewer | is_controle_creator)'], {}), "('change_controle', is_controle_reviewer | is_controle_creator)\n", (6220, 6283), False, 'import rules\n'), ((6284, 6361), 'rules.add_rule', 'rules.add_rule', (['"""delete_controle"""', '(is_controle_creator | is_controle_reviewer)'], {}), "('delete_controle', is_controle_creator | is_controle_reviewer)\n", (6298, 6361), False, 'import rules\n'), ((6362, 6418), 'rules.add_rule', 'rules.add_rule', (['"""approve_controle"""', 'is_controle_reviewer'], {}), "('approve_controle', is_controle_reviewer)\n", (6376, 6418), False, 'import rules\n'), ((6419, 6513), 'rules.add_rule', 'rules.add_rule', (['"""validate_controle_completion"""', '(is_controle_reviewer | is_controle_creator)'], {}), "('validate_controle_completion', is_controle_reviewer |\n is_controle_creator)\n", (6433, 6513), False, 'import rules\n'), ((6559, 6621), 'rules.add_perm', 'rules.add_perm', (['"""risk_register.change_risque"""', 'is_risk_creator'], {}), "('risk_register.change_risque', is_risk_creator)\n", (6573, 6621), False, 'import rules\n'), ((6648, 6711), 'rules.add_perm', 'rules.add_perm', (['"""risk_register.verify_risque"""', 'is_risk_verifier'], {}), "('risk_register.verify_risque', is_risk_verifier)\n", (6662, 6711), False, 'import rules\n'), ((6725, 6795), 'rules.add_perm', 'rules.add_perm', (['"""risk_register.change_processus"""', 'is_process_upper_mgt'], {}), "('risk_register.change_processus', is_process_upper_mgt)\n", (6739, 6795), False, 'import rules\n'), ((6796, 6866), 'rules.add_perm', 'rules.add_perm', (['"""risk_register.delete_processus"""', 'is_process_upper_mgt'], {}), "('risk_register.delete_processus', is_process_upper_mgt)\n", (6810, 6866), False, 'import rules\n'), ((6867, 6969), 'rules.add_perm', 'rules.add_perm', (['"""risk_register.add_activity_to_process"""', '(is_process_manager | is_process_upper_mgt)'], {}), "('risk_register.add_activity_to_process', is_process_manager |\n is_process_upper_mgt)\n", (6881, 6969), False, 'import rules\n'), ((6966, 7061), 'rules.add_perm', 'rules.add_perm', (['"""risk_register.add_process_data"""', '(is_process_manager | is_process_upper_mgt)'], {}), "('risk_register.add_process_data', is_process_manager |\n is_process_upper_mgt)\n", (6980, 7061), False, 'import rules\n'), ((7058, 7130), 'rules.add_perm', 'rules.add_perm', (['"""risk_register.add_process_risk"""', 'rules.is_authenticated'], {}), "('risk_register.add_process_risk', rules.is_authenticated)\n", (7072, 7130), False, 'import rules\n'), ((7144, 7243), 'rules.add_perm', 'rules.add_perm', (['"""risk_register.change_activite"""', '(is_activity_supervisor | is_activity_upper_mgt)'], {}), "('risk_register.change_activite', is_activity_supervisor |\n is_activity_upper_mgt)\n", (7158, 7243), False, 'import rules\n'), ((7240, 7339), 'rules.add_perm', 'rules.add_perm', (['"""risk_register.delete_activite"""', '(is_activity_supervisor | is_activity_upper_mgt)'], {}), "('risk_register.delete_activite', is_activity_supervisor |\n is_activity_upper_mgt)\n", (7254, 7339), False, 'import rules\n'), ((7336, 7409), 'rules.add_perm', 'rules.add_perm', (['"""risk_register.add_activity_risk"""', 'rules.is_authenticated'], {}), "('risk_register.add_activity_risk', rules.is_authenticated)\n", (7350, 7409), False, 'import rules\n'), ((7410, 7478), 'rules.add_perm', 'rules.add_perm', (['"""risk_register_complete_activity"""', 'is_activity_owner'], {}), "('risk_register_complete_activity', is_activity_owner)\n", (7424, 7478), False, 'import rules\n'), ((7505, 7623), 'rules.add_perm', 'rules.add_perm', (['"""risk_register.set_seuil_activity_risk"""', '(is_activity_risk_supervisor | is_activity_risk_upper_mgt)'], {}), "('risk_register.set_seuil_activity_risk', \n is_activity_risk_supervisor | is_activity_risk_upper_mgt)\n", (7519, 7623), False, 'import rules\n'), ((7619, 7743), 'rules.add_perm', 'rules.add_perm', (['"""risk_register.set_review_date_activity_risk"""', '(is_activity_risk_supervisor | is_activity_risk_upper_mgt)'], {}), "('risk_register.set_review_date_activity_risk', \n is_activity_risk_supervisor | is_activity_risk_upper_mgt)\n", (7633, 7743), False, 'import rules\n'), ((7739, 7947), 'rules.add_perm', 'rules.add_perm', (['"""risk_register.add_control_activity_risk"""', '(is_activity_risk_reporter | is_activity_risk_monitor |\n is_activity_risk_owner | is_activity_risk_supervisor |\n is_activity_risk_upper_mgt)'], {}), "('risk_register.add_control_activity_risk', \n is_activity_risk_reporter | is_activity_risk_monitor |\n is_activity_risk_owner | is_activity_risk_supervisor |\n is_activity_risk_upper_mgt)\n", (7753, 7947), False, 'import rules\n'), ((7950, 8065), 'rules.add_perm', 'rules.add_perm', (['"""risk_register.assign_activity_risk"""', '(is_activity_risk_supervisor | is_activity_risk_upper_mgt)'], {}), "('risk_register.assign_activity_risk', \n is_activity_risk_supervisor | is_activity_risk_upper_mgt)\n", (7964, 8065), False, 'import rules\n'), ((8061, 8210), 'rules.add_perm', 'rules.add_perm', (['"""risk_register.estimate_activity_risk"""', '(is_activity_risk_supervisor | is_activity_risk_upper_mgt |\n is_activity_risk_reporter)'], {}), "('risk_register.estimate_activity_risk', \n is_activity_risk_supervisor | is_activity_risk_upper_mgt |\n is_activity_risk_reporter)\n", (8075, 8210), False, 'import rules\n'), ((8217, 8365), 'rules.add_perm', 'rules.add_perm', (['"""risk_register.change_activiterisque"""', '(is_activity_risk_supervisor | is_activity_risk_upper_mgt |\n is_activity_risk_reporter)'], {}), "('risk_register.change_activiterisque', \n is_activity_risk_supervisor | is_activity_risk_upper_mgt |\n is_activity_risk_reporter)\n", (8231, 8365), False, 'import rules\n'), ((8372, 8488), 'rules.add_perm', 'rules.add_perm', (['"""risk_register.delete_activiterisque"""', '(is_activity_risk_supervisor | is_activity_risk_upper_mgt)'], {}), "('risk_register.delete_activiterisque', \n is_activity_risk_supervisor | is_activity_risk_upper_mgt)\n", (8386, 8488), False, 'import rules\n'), ((8509, 8621), 'rules.add_perm', 'rules.add_perm', (['"""risk_register.set_seuil_process_risk"""', '(is_process_risk_monitor | is_process_risk_upper_mgt)'], {}), "('risk_register.set_seuil_process_risk', \n is_process_risk_monitor | is_process_risk_upper_mgt)\n", (8523, 8621), False, 'import rules\n'), ((8617, 8735), 'rules.add_perm', 'rules.add_perm', (['"""risk_register.set_review_date_process_risk"""', '(is_process_risk_upper_mgt | is_process_risk_monitor)'], {}), "('risk_register.set_review_date_process_risk', \n is_process_risk_upper_mgt | is_process_risk_monitor)\n", (8631, 8735), False, 'import rules\n'), ((8731, 8900), 'rules.add_perm', 'rules.add_perm', (['"""risk_register.add_control_process_risk"""', '(is_process_risk_owner | is_process_risk_monitor |\n is_process_risk_upper_mgt | is_process_risk_reporter)'], {}), "('risk_register.add_control_process_risk', \n is_process_risk_owner | is_process_risk_monitor |\n is_process_risk_upper_mgt | is_process_risk_reporter)\n", (8745, 8900), False, 'import rules\n'), ((8907, 9015), 'rules.add_perm', 'rules.add_perm', (['"""risk_register.assign_process_risk"""', '(is_process_risk_monitor | is_process_risk_upper_mgt)'], {}), "('risk_register.assign_process_risk', is_process_risk_monitor |\n is_process_risk_upper_mgt)\n", (8921, 9015), False, 'import rules\n'), ((9012, 9178), 'rules.add_perm', 'rules.add_perm', (['"""risk_register.estimate_process_risk"""', '(is_process_risk_upper_mgt | is_process_risk_monitor |\n is_process_risk_owner | is_process_risk_reporter)'], {}), "('risk_register.estimate_process_risk', \n is_process_risk_upper_mgt | is_process_risk_monitor |\n is_process_risk_owner | is_process_risk_reporter)\n", (9026, 9178), False, 'import rules\n'), ((9185, 9328), 'rules.add_perm', 'rules.add_perm', (['"""risk_register.change_processusrisque"""', '(is_process_risk_upper_mgt | is_process_risk_monitor | is_process_risk_reporter\n )'], {}), "('risk_register.change_processusrisque', \n is_process_risk_upper_mgt | is_process_risk_monitor |\n is_process_risk_reporter)\n", (9199, 9328), False, 'import rules\n'), ((9335, 9447), 'rules.add_perm', 'rules.add_perm', (['"""risk_register.delete_processusrisque"""', '(is_process_risk_monitor | is_process_risk_upper_mgt)'], {}), "('risk_register.delete_processusrisque', \n is_process_risk_monitor | is_process_risk_upper_mgt)\n", (9349, 9447), False, 'import rules\n'), ((9458, 9543), 'rules.add_perm', 'rules.add_perm', (['"""risk_register.set_estimation_review_date"""', 'is_estimation_monitor'], {}), "('risk_register.set_estimation_review_date',\n is_estimation_monitor)\n", (9472, 9543), False, 'import rules\n'), ((9553, 9621), 'rules.add_perm', 'rules.add_perm', (['"""risk_register.assign_control"""', 'is_controle_reviewer'], {}), "('risk_register.assign_control', is_controle_reviewer)\n", (9567, 9621), False, 'import rules\n'), ((9622, 9689), 'rules.add_perm', 'rules.add_perm', (['"""risk_register.complete_control"""', 'is_controle_owner'], {}), "('risk_register.complete_control', is_controle_owner)\n", (9636, 9689), False, 'import rules\n'), ((9690, 9785), 'rules.add_perm', 'rules.add_perm', (['"""risk_register.change_controle"""', '(is_controle_reviewer | is_controle_creator)'], {}), "('risk_register.change_controle', is_controle_reviewer |\n is_controle_creator)\n", (9704, 9785), False, 'import rules\n'), ((9782, 9877), 'rules.add_perm', 'rules.add_perm', (['"""risk_register.delete_controle"""', '(is_controle_creator | is_controle_reviewer)'], {}), "('risk_register.delete_controle', is_controle_creator |\n is_controle_reviewer)\n", (9796, 9877), False, 'import rules\n'), ((9874, 9944), 'rules.add_perm', 'rules.add_perm', (['"""risk_register.approve_controle"""', 'is_controle_reviewer'], {}), "('risk_register.approve_controle', is_controle_reviewer)\n", (9888, 9944), False, 'import rules\n'), ((9945, 10054), 'rules.add_perm', 'rules.add_perm', (['"""risk_register.validate_controle_completion"""', '(is_controle_reviewer | is_controle_creator)'], {}), "('risk_register.validate_controle_completion', \n is_controle_reviewer | is_controle_creator)\n", (9959, 10054), False, 'import rules\n')] |
import os
import uuid
import requests_mock
import zeep
def read_file(file_name, folder="wsdl_ims"):
file = os.path.join(os.path.dirname(os.path.realpath(__file__)), folder, file_name)
with open(file) as f:
return f.read()
def test_find_customer():
with requests_mock.mock() as m:
m.get("http://example.com/inventory?wsdl", text=read_file("inventory.wsdl"))
m.post(
"http://example.com/Inventory/inventoryhttps",
text=read_file("find_customer_by_name_response.xml", "mock_ims"),
)
# set strict to True -> then data will be available in _raw_elements
client = zeep.Client(
"http://example.com/inventory?wsdl",
settings=zeep.settings.Settings(strict=False),
)
filter_fields = [
{
"FilterField": {
"Name": "Name",
"SelectedOperator": "OperationEquals",
"Value": "SURFNET",
}
}
]
ims_filter = {"Filters": filter_fields}
pager = {
"StartElement": 0,
"Descending": False,
"NumberOfElements": 10,
"OrderByProperty": None,
}
result = client.service.GetAllCustomersFiltered(
pager=pager, filter=ims_filter, sessionToken=str(uuid.uuid4())
)
assert result.GetAllCustomersFilteredResult.Customer[0].Id == 2644557
assert result.GetAllCustomersFilteredResult.Customer[0].Name == "SURFNET"
| [
"os.path.realpath",
"requests_mock.mock",
"zeep.settings.Settings",
"uuid.uuid4"
] | [((279, 299), 'requests_mock.mock', 'requests_mock.mock', ([], {}), '()\n', (297, 299), False, 'import requests_mock\n'), ((144, 170), 'os.path.realpath', 'os.path.realpath', (['__file__'], {}), '(__file__)\n', (160, 170), False, 'import os\n'), ((731, 767), 'zeep.settings.Settings', 'zeep.settings.Settings', ([], {'strict': '(False)'}), '(strict=False)\n', (753, 767), False, 'import zeep\n'), ((1361, 1373), 'uuid.uuid4', 'uuid.uuid4', ([], {}), '()\n', (1371, 1373), False, 'import uuid\n')] |
"""
Entry Point
"""
import asyncio
from dotplug.main import main
from dotplug.console import ncurses
def _main():
import uvloop
asyncio.set_event_loop_policy(uvloop.EventLoopPolicy())
with ncurses():
asyncio.run(main())
input("")
| [
"dotplug.console.ncurses",
"dotplug.main.main",
"uvloop.EventLoopPolicy"
] | [((170, 194), 'uvloop.EventLoopPolicy', 'uvloop.EventLoopPolicy', ([], {}), '()\n', (192, 194), False, 'import uvloop\n'), ((206, 215), 'dotplug.console.ncurses', 'ncurses', ([], {}), '()\n', (213, 215), False, 'from dotplug.console import ncurses\n'), ((237, 243), 'dotplug.main.main', 'main', ([], {}), '()\n', (241, 243), False, 'from dotplug.main import main\n')] |
#coding:utf-8
from mininet.net import Mininet
from mininet.topo import LinearTopo
from mininet.cli import CLI
# from eventlet import greenthread
import argparse
import threading
import re
from time import sleep
import logging
logger = logging.getLogger(__name__)
logger.setLevel(level = logging.INFO)
handler = logging.FileHandler("hahalog.txt")
handler.setLevel(logging.INFO)
formatter = logging.Formatter('%(asctime)s - %(name)s - %(levelname)s - %(message)s')
handler.setFormatter(formatter)
logger.addHandler(handler)
# import eventlet
# 四个交换机每个下边挂载一个主机
# import ctypes
IPERF_SERVER_LOG_DIR = '/root/ez-segway/logs/iperflogs/server/'
IPERF_CLIENT_LOG_DIR = '/root/ez-segway/logs/iperflogs/client/'
class DataSender(object):
def __init__(self,net, filepath, wait_time):
self.net = net
self.filepath = filepath
self.wait_time = wait_time
self.conf = []#all flows' confs
self.srv_ports = [] #(server,port)
def read_conf(self,filepath):
f = open(filepath, 'r')
line = f.readline()
while line:
des = line.strip('\n').split("\t")
nodes_index = des[1].strip('(').strip(')').split(',')
hosts = (net.hosts[int(nodes_index[0])], net.hosts[int(nodes_index[1])])
obj = dict()
obj['uuid'] = des[0]
obj['hosts'] = hosts
obj['port'] = int(des[2])
obj['vol'] = (des[3] + 'M')
obj['seconds'] = float(des[4])
obj['goal'] = float(des[5])
self.conf.append(obj)
line = f.readline()
# return self.conf
def _iperf(self, hosts, l4Type="UDP", udpBw='10M', fmt=None, seconds=10, port=5001,uuid=None):
server, client = hosts
if((server,port) not in self.srv_ports):
self.srv_ports.append((server,port))
server.cmd('iperf3 -s -p %d -i 1 > '%port +IPERF_SERVER_LOG_DIR+'server%s.txt&'% uuid)
# server.cmd('iperf3 -s -u -p %d -i 1 > '%port +IPERF_SERVER_LOG_DIR+'server%s.txt&'% uuid)
logger.info('operf -s -p %d' %port)
iperfArgs = 'iperf3 -p %d ' % port
bwArgs = ''
if l4Type == 'UDP':
iperfArgs += '-u '
bwArgs = '-b ' + udpBw + ' '
elif l4Type != 'TCP':
raise Exception( 'Unexpected l4 type: %s' % l4Type )
if fmt:
iperfArgs += '-f %s ' % fmt
if l4Type == 'TCP':
if not waitListening( client, server.IP(), port ):
raise Exception( 'Could not connect to iperf on port %d'
% port )
client.cmd( iperfArgs + '-t %d -i 1 -c ' % seconds +
server.IP() + ' ' + bwArgs +' > ' + IPERF_CLIENT_LOG_DIR +'client%s.txt &'%uuid)
logger.info(iperfArgs + '-t %d -c ' % seconds +
server.IP() + ' ' + bwArgs)
def send_iperfs(self):
for c in self.conf:
self._iperf(hosts=c['hosts'], l4Type="UDP", udpBw=c['vol'], seconds=c['seconds'], port=c['port'],uuid=c['uuid'])
if __name__ == "__main__":
parser = argparse.ArgumentParser(description='ctrl')
parser.add_argument('--iperf', nargs='?',
type=int, default=0)
parser.add_argument('--filepath', nargs='?',
type=str, default=None)
args = parser.parse_args()
iperf = args.iperf
filepath = args.filepath
print(filepath)
wait_time = 10
Linear4 = LinearTopo(k=4)
net = Mininet(topo=Linear4)
net.start()
if(filepath):
ds = DataSender(net, filepath, wait_time)
ds.read_conf(ds.filepath)
logger.info(ds.conf)
if(iperf):
ds.send_iperfs()
CLI(net)
# net.pingAll()
net.stop()
| [
"logging.getLogger",
"argparse.ArgumentParser",
"logging.Formatter",
"mininet.cli.CLI",
"mininet.topo.LinearTopo",
"logging.FileHandler",
"mininet.net.Mininet"
] | [((238, 265), 'logging.getLogger', 'logging.getLogger', (['__name__'], {}), '(__name__)\n', (255, 265), False, 'import logging\n'), ((314, 348), 'logging.FileHandler', 'logging.FileHandler', (['"""hahalog.txt"""'], {}), "('hahalog.txt')\n", (333, 348), False, 'import logging\n'), ((392, 465), 'logging.Formatter', 'logging.Formatter', (['"""%(asctime)s - %(name)s - %(levelname)s - %(message)s"""'], {}), "('%(asctime)s - %(name)s - %(levelname)s - %(message)s')\n", (409, 465), False, 'import logging\n'), ((3131, 3174), 'argparse.ArgumentParser', 'argparse.ArgumentParser', ([], {'description': '"""ctrl"""'}), "(description='ctrl')\n", (3154, 3174), False, 'import argparse\n'), ((3500, 3515), 'mininet.topo.LinearTopo', 'LinearTopo', ([], {'k': '(4)'}), '(k=4)\n', (3510, 3515), False, 'from mininet.topo import LinearTopo\n'), ((3526, 3547), 'mininet.net.Mininet', 'Mininet', ([], {'topo': 'Linear4'}), '(topo=Linear4)\n', (3533, 3547), False, 'from mininet.net import Mininet\n'), ((3748, 3756), 'mininet.cli.CLI', 'CLI', (['net'], {}), '(net)\n', (3751, 3756), False, 'from mininet.cli import CLI\n')] |
#!/usr/bin/env python3
import collections
import datetime
import glob
import html
import re
import sys
# this is a mess right now, feel free to make it less bad if you feel like it
try:
# python 3.7+
datetime.datetime.fromisoformat
except AttributeError:
# not fully correct, but good enough for this use case
adjtz_re = re.compile(r"([-+][0-9]+):([0-9]+)")
rmmil_re = re.compile(r"\..[0-9]*")
class ___datetime(datetime.datetime):
@staticmethod
def fromisoformat(f):
return datetime.datetime.strptime(adjtz_re.sub(r"\1\2", rmmil_re.sub("", f)), "%Y-%m-%dT%H:%M:%S%z")
datetime.datetime = ___datetime
del ___datetime
Entry = collections.namedtuple("Entry", ["timestamp", "dead", "type", "fields"])
Member = collections.namedtuple("Member", ["name", "discriminator", "avatar", "nick", "roles"])
Role = collections.namedtuple("Role", ["name", "color", "pos", "perms", "hoist"])
def mkrole(name, color, pos, perms, hoist=''):
return Role(name, int(color), int(pos), int(perms), bool(hoist))
def read_guild(f):
g = {
"guild": {},
"channel": {},
"member": {},
"role": {},
"emoji": {},
}
for l in f.readlines():
ts, _, op, type, id, *rest = l.strip().split("\t")
if type not in g:
continue
if id not in g[type]:
g[type][id] = []
if type == "member":
name, discriminator, *rest = rest
if rest:
avatar, *rest = rest
else:
avatar = None
if rest:
nick, *rest = rest
else:
nick = None
if rest:
roles, *rest = rest
else:
roles = ''
rest = Member(name, int(discriminator), avatar, nick, roles)
elif type == "role":
rest = mkrole(*rest)
g[type][id].append(Entry(datetime.datetime.fromisoformat(ts), op != "add", type, rest))
return g
class Message:
def __init__(self, id, author):
self.id = id
self.author = author
self.content = None
self.editedtime = None
self.deletedtime = None
self.attachments = []
self.embeds = []
def __str__(self):
return f"<Message {self.id} by {self.author} " + str([self.content, self.editedtime, self.deletedtime, self.attachments, self.embeds])
def timestamp(self):
return ((int(self.id) >> 22) + 1420070400000)/1000
member_re = re.compile("<@!?([0-9]+)>")
role_re = re.compile("<@&([0-9]+)>")
channel_re = re.compile("<#([0-9]+)>") # TODO
def mention(guild, date, msg, wrap=lambda a: a):
def member_name(m):
id, *_ = m.groups()
o = guild["member"].get(id)
if o:
member = close_to(o, date).fields
return wrap("@" + (member.nick or member.name))
else:
return m.group(0)
def role_name(m):
id, *_ = m.groups()
o = guild["role"].get(id)
if o:
role = close_to(o, date).fields
return wrap("@" + role[0])
else:
return m.group(0)
msg = member_re.sub(member_name, msg)
msg = role_re.sub(role_name, msg)
return msg
def unescape_msg(msg):
return msg.replace("\\n", "\n").replace("\\t", "\t").replace("\\\\", "\\")
def read_channel(f):
msgs = collections.OrderedDict()
attachbuf = (None, [])
reactbuf = (None, [])
for l in f.readlines():
ts, _, op, type, id, *rest = l.strip().split("\t")
if type == "message":
if op == "del":
if id in msgs:
del msgs[id] # TODO: show deletions
continue
authorid, *rest = rest
msgs[id] = Message(id, authorid)
if attachbuf[0] == id:
msgs[id].attachments = attachbuf[1]
attachbuf = (None, [])
if reactbuf[0] == id:
msgs[id].reactions = reactbuf[1]
reactbuf = (None, [])
if rest:
editedtime, *rest = rest
msgs[id].editedtime = editedtime if editedtime else None
# FIXME: handle variable number of fields properly instead of doing these kinds of hacks
if rest:
tts, content, *_ = rest
msgs[id].content = unescape_msg(content)
elif type == "attachment":
msgid, *_ = rest
if msgid in msgs:
msgs[msgid].attachments.append(id)
else:
if attachbuf[0] is None:
attachbuf = (msgid, [])
elif attachbuf[0] != msgid:
raise Exception(f"attachbuf id mismatch ({attachbuf[0]} != {msgid})")
attachbuf[1].append(id)
elif type == "reaction":
...
return msgs
def close_to(versions, dt):
ret = versions[0]
for v in versions[1:]:
if v.timestamp >= dt:
break
ret = v
return ret
def print_text(guild, cid, msgs):
for _, m in msgs.items():
date = datetime.datetime.fromtimestamp(m.timestamp(), datetime.timezone.utc)
author = close_to(guild["member"][m.author], date).fields
print(f"[{date.strftime('%Y-%m-%d %H:%M:%S')}] {author.nick or author.name}: ", end="")
if m.content:
print(mention(guild, date, m.content), end=" ")
if m.attachments:
for a in m.attachments:
path = f"attachments/{cid}/{a}/"
path = glob.glob(f"{path}/*")[0]
url = "https://cdn.discordapp.com/" + path
# TODO: use attachment name from the log if present
print(f"{url} ", end="")
print()
# TODO: animated emoji
emoji_re = re.compile("<:([^:]+):([0-9]+)>")
def emoji_img(m):
name, id = m.groups()
return f'<img class="emoji" title=":{html.escape(name)}:" src="emojis/{id}.png">'
def print_html(guild, cid, msgs):
import markdown
md = markdown.Markdown(
extensions=[
"nl2br",
"discord_mdext.fenced_code",
"discord_mdext.strikethrough",
"discord_mdext.standard_subset",
"mdx_urlize",
]
)
first = True
lastauthor = None
for _, m in msgs.items():
date = datetime.datetime.fromtimestamp(m.timestamp(), datetime.timezone.utc)
author = close_to(guild["member"][m.author], date).fields
roles = sorted(((r, close_to(guild["role"][r], date).fields) for r in author.roles.split(',')), key=lambda r: r[1].pos)
if lastauthor != m.author:
if not first:
print("</div></div>")
first = False
lastauthor = m.author
print('<div class="msg">')
print(' <div class="msg-left">')
av = glob.glob(f"avatars/{m.author}/{author.avatar}.*")
av = av[0] if av else f"embed/avatars/{author.discriminator%5}.png"
print(f' <img class="msg-avatar" src="{html.escape(av)}">')
print(" </div>")
print(' <div class="msg-right">')
print(f' <span class="msg-user"', end="")
if roles[-1][1].color:
print(f" style=\"color: #{'%x' % roles[-1][1].color}\"", end="")
print(f' title="{html.escape(author.name)}#{author.discriminator:04d}">{html.escape(author.nick or author.name)}</span>')
print(' <span class="msg-date">', end="")
print(f"{date.strftime('%Y-%m-%d %H:%M:%S')}</span>")
if m.content:
print(" ", end="")
print('<div class="msg-content">', end="")
msg = mention(guild, date, m.content, lambda c: '<span class="mention">' + c + '</span>')
msg = emoji_re.sub(emoji_img, msg)
msg = md.convert(msg)
# annyoing hack, we can't pass <div class="msg-content"> to prevent
# adding <p>s since markdown doesn't process the text inside the div
if msg.startswith("<p>"):
msg = msg[len("<p>"):]
if msg.endswith("</p>"):
msg = msg[:-len("</p>")]
msg = re.sub("</p>\n<p>", "<br /><br />", msg, flags=re.MULTILINE)
msg = re.sub("</?p>", "", msg)
print(msg, end="")
print("</div>")
if m.attachments:
for a in m.attachments:
path = f"attachments/{cid}/{a}/"
path = glob.glob(f"{path}/*")[0]
# TODO: use attachment name from the log if present
print(' <div class="msg-attachment">')
print(f' <a href="{html.escape(path)}">')
# TODO: handle other file types
print(f' <img class="msg-attachment" src="{html.escape(path)}">')
print(" </a>\n </div>")
if __name__ == "__main__":
_, gid, cid, *_ = sys.argv
with open(f"channels/{gid}/guild.tsv", newline="\n") as f:
guild = read_guild(f)
with open(f"channels/{gid}/{cid}.tsv", newline="\n") as f:
msgs = read_channel(f)
print_html(guild, cid, msgs)
| [
"collections.OrderedDict",
"markdown.Markdown",
"collections.namedtuple",
"re.compile",
"datetime.datetime.fromisoformat",
"re.sub",
"html.escape",
"glob.glob"
] | [((643, 715), 'collections.namedtuple', 'collections.namedtuple', (['"""Entry"""', "['timestamp', 'dead', 'type', 'fields']"], {}), "('Entry', ['timestamp', 'dead', 'type', 'fields'])\n", (665, 715), False, 'import collections\n'), ((725, 815), 'collections.namedtuple', 'collections.namedtuple', (['"""Member"""', "['name', 'discriminator', 'avatar', 'nick', 'roles']"], {}), "('Member', ['name', 'discriminator', 'avatar', 'nick',\n 'roles'])\n", (747, 815), False, 'import collections\n'), ((819, 893), 'collections.namedtuple', 'collections.namedtuple', (['"""Role"""', "['name', 'color', 'pos', 'perms', 'hoist']"], {}), "('Role', ['name', 'color', 'pos', 'perms', 'hoist'])\n", (841, 893), False, 'import collections\n'), ((2177, 2204), 're.compile', 're.compile', (['"""<@!?([0-9]+)>"""'], {}), "('<@!?([0-9]+)>')\n", (2187, 2204), False, 'import re\n'), ((2215, 2241), 're.compile', 're.compile', (['"""<@&([0-9]+)>"""'], {}), "('<@&([0-9]+)>')\n", (2225, 2241), False, 'import re\n'), ((2255, 2280), 're.compile', 're.compile', (['"""<#([0-9]+)>"""'], {}), "('<#([0-9]+)>')\n", (2265, 2280), False, 'import re\n'), ((4860, 4893), 're.compile', 're.compile', (['"""<:([^:]+):([0-9]+)>"""'], {}), "('<:([^:]+):([0-9]+)>')\n", (4870, 4893), False, 'import re\n'), ((2923, 2948), 'collections.OrderedDict', 'collections.OrderedDict', ([], {}), '()\n', (2946, 2948), False, 'import collections\n'), ((5076, 5230), 'markdown.Markdown', 'markdown.Markdown', ([], {'extensions': "['nl2br', 'discord_mdext.fenced_code', 'discord_mdext.strikethrough',\n 'discord_mdext.standard_subset', 'mdx_urlize']"}), "(extensions=['nl2br', 'discord_mdext.fenced_code',\n 'discord_mdext.strikethrough', 'discord_mdext.standard_subset',\n 'mdx_urlize'])\n", (5093, 5230), False, 'import markdown\n'), ((327, 362), 're.compile', 're.compile', (['"""([-+][0-9]+):([0-9]+)"""'], {}), "('([-+][0-9]+):([0-9]+)')\n", (337, 362), False, 'import re\n'), ((376, 400), 're.compile', 're.compile', (['"""\\\\..[0-9]*"""'], {}), "('\\\\..[0-9]*')\n", (386, 400), False, 'import re\n'), ((4973, 4990), 'html.escape', 'html.escape', (['name'], {}), '(name)\n', (4984, 4990), False, 'import html\n'), ((5757, 5807), 'glob.glob', 'glob.glob', (['f"""avatars/{m.author}/{author.avatar}.*"""'], {}), "(f'avatars/{m.author}/{author.avatar}.*')\n", (5766, 5807), False, 'import glob\n'), ((6875, 6935), 're.sub', 're.sub', (['"""</p>\n<p>"""', '"""<br /><br />"""', 'msg'], {'flags': 're.MULTILINE'}), "('</p>\\n<p>', '<br /><br />', msg, flags=re.MULTILINE)\n", (6881, 6935), False, 'import re\n'), ((6945, 6969), 're.sub', 're.sub', (['"""</?p>"""', '""""""', 'msg'], {}), "('</?p>', '', msg)\n", (6951, 6969), False, 'import re\n'), ((1653, 1688), 'datetime.datetime.fromisoformat', 'datetime.datetime.fromisoformat', (['ts'], {}), '(ts)\n', (1684, 1688), False, 'import datetime\n'), ((4656, 4678), 'glob.glob', 'glob.glob', (['f"""{path}/*"""'], {}), "(f'{path}/*')\n", (4665, 4678), False, 'import glob\n'), ((7107, 7129), 'glob.glob', 'glob.glob', (['f"""{path}/*"""'], {}), "(f'{path}/*')\n", (7116, 7129), False, 'import glob\n'), ((5922, 5937), 'html.escape', 'html.escape', (['av'], {}), '(av)\n', (5933, 5937), False, 'import html\n'), ((6161, 6185), 'html.escape', 'html.escape', (['author.name'], {}), '(author.name)\n', (6172, 6185), False, 'import html\n'), ((6216, 6255), 'html.escape', 'html.escape', (['(author.nick or author.name)'], {}), '(author.nick or author.name)\n', (6227, 6255), False, 'import html\n'), ((7258, 7275), 'html.escape', 'html.escape', (['path'], {}), '(path)\n', (7269, 7275), False, 'import html\n'), ((7367, 7384), 'html.escape', 'html.escape', (['path'], {}), '(path)\n', (7378, 7384), False, 'import html\n')] |
# -*- coding: utf-8 -*-
"""
============================================================================
Authors:
<NAME> and <NAME>*
*Department of Informatics
Universidad Nacional de San Antonio Abad del Cusco (UNSAAC) - Perú
============================================================================
"""
# Python: 3.8.x
"""
Script for evaluate best topology (static and dinamic) about convergence
"""
import matplotlib.pyplot as plt
from matplotlib.ticker import NullFormatter # useful for `logit` scale
import numpy as np
from utils import topology, dataset
print("******* START *******")
dataset_topology = [
[9, 5, 2, 3, 7, 8, 4, 1, 6], # d 20
[], # d 21
[5, 7, 2, 6, 8, 9, 3, 1, 4], # d 22
[9, 5, 1, 2, 8, 3, 4, 6, 7], # d 23
[8, 7, 1, 5, 2, 4, 3, 6, 9], # d 24
[7, 8, 1, 5, 6, 4, 2, 3, 9], # d 25
[9, 8, 4, 5, 1, 2, 6, 7, 3], # d 26
[7, 4, 1, 2, 8, 9, 3, 5, 6], # d 27
[8, 6, 3, 4, 5, 7, 1, 2, 9], # d 28
[] # d 29
]
rankig_low = [] # ranking metric for low dataset
rankig_high = [] # ranking metric for high dataset
rankig_all = [] # ranking metric low and high dataset
for index, index_topology in enumerate([0, 1, 2, 3, 4, 5, 6, 7, 8]): # change [0, 1, 2, 3, 4, 5, 6, 7, 8]
# load data for plot
rankig_l = []
rankig_h = []
rankig_a = []
for index_dataset in [20, 22, 23, 24, 25, 26, 27, 28]: # change [0, ..., 29]
if index_dataset >= 26:
rankig_h.append(dataset_topology[index_dataset - 20][index])
else:
rankig_l.append(dataset_topology[index_dataset - 20][index])
rankig_a.append(dataset_topology[index_dataset - 20][index])
rankig_low.append(np.sum(rankig_l))
rankig_high.append(np.sum(rankig_h))
rankig_all.append(np.sum(rankig_a))
labels = topology
# rankig_low = [20, 34, 30, 35, 27]
# rankig_high = [25, 32, 34, 20, 25]
x = np.arange(len(labels)) # the label locations
width = 0.25 # the width of the bars
fig, ax = plt.subplots()
rects1 = ax.bar(x - width / 2, rankig_low, width, label='Low size')
rects2 = ax.bar(x + width / 2, rankig_high, width, label='High size')
"""rects1 = ax.bar(x - width, rankig_low, width, label='Low size')
rects2 = ax.bar(x, rankig_high, width, label='High size')
rects3 = ax.bar(x + width, rankig_all, width, label='All') """
# Add some text for labels, title and custom x-axis tick labels, etc.
ax.set_ylabel("Scores")
ax.set_xlabel("Topology")
ax.set_title("Best Topology")
ax.set_xticks(x)
ax.set_xticklabels(labels)
ax.legend()
def autolabel(rects):
"""Attach a text label above each bar in *rects*, displaying its height."""
for rect in rects:
height = rect.get_height()
ax.annotate('{}'.format(height),
xy=(rect.get_x() + rect.get_width() / 2, height),
xytext=(0, 3), # 3 points vertical offset
textcoords="offset points",
ha='center', va='bottom')
autolabel(rects1)
autolabel(rects2)
# autolabel(rects3)
fig.tight_layout()
plt.grid()
plt.show()
print("******* END *******")
# Run:
# python graphic_convergence_topology.py | [
"numpy.sum",
"matplotlib.pyplot.grid",
"matplotlib.pyplot.subplots",
"matplotlib.pyplot.show"
] | [((1903, 1917), 'matplotlib.pyplot.subplots', 'plt.subplots', ([], {}), '()\n', (1915, 1917), True, 'import matplotlib.pyplot as plt\n'), ((2959, 2969), 'matplotlib.pyplot.grid', 'plt.grid', ([], {}), '()\n', (2967, 2969), True, 'import matplotlib.pyplot as plt\n'), ((2970, 2980), 'matplotlib.pyplot.show', 'plt.show', ([], {}), '()\n', (2978, 2980), True, 'import matplotlib.pyplot as plt\n'), ((1618, 1634), 'numpy.sum', 'np.sum', (['rankig_l'], {}), '(rankig_l)\n', (1624, 1634), True, 'import numpy as np\n'), ((1656, 1672), 'numpy.sum', 'np.sum', (['rankig_h'], {}), '(rankig_h)\n', (1662, 1672), True, 'import numpy as np\n'), ((1693, 1709), 'numpy.sum', 'np.sum', (['rankig_a'], {}), '(rankig_a)\n', (1699, 1709), True, 'import numpy as np\n')] |
import os.path as osp
import os
import pylab as plt
import gc
import argparse
from utils import read_image
parser = argparse.ArgumentParser(description='Plot rank-5 results of S-ReID, SP-ReID and SSP-ReID')
parser.add_argument('-d', '--dataset', type=str, default='market1501')
# Architecture
parser.add_argument('-a', '--arch', type=str, default='resnet50')
parser.add_argument('--save-dir', type=str, default='log/tmp')
args = parser.parse_args()
def plot(images, save_name):
num_figs = len(images)
fig = plt.figure(figsize = (30, 20))
for i, img in enumerate(images):
a = fig.add_subplot(num_figs, 1, i + 1)
plt.imshow(img)
plt.axis('off')
fig.savefig(save_name, bbox_inches='tight')
fig.clf()
plt.close()
del a
gc.collect()
def combine_fig(file_name, salience_dir, parsing_dir, salience_parsing_dir, save_dir):
salience_file = osp.join(salience_dir, file_name)
parsing_file = osp.join(parsing_dir, file_name)
salience_parsing_file = osp.join(salience_parsing_dir, file_name)
save_file = osp.join(save_dir, file_name)
images = [read_image(salience_file), read_image(parsing_file), read_image(salience_parsing_file)]
plot(images, save_file)
def main():
dataset = args.dataset
model = args.arch
salience_dir = osp.join('log/', '{}-salience-{}/-1'.format(model, dataset))
parsing_dir = osp.join('log/', '{}-parsing-{}/-1'.format(model, dataset))
salience_parsing_dir = osp.join('log/', '{}-salience-parsing-{}/-1'.format(model, dataset))
save_dir = osp.join(args.save_dir, '{}-improvement-{}'.format(model, dataset))
if not os.path.exists(save_dir):
os.makedirs(save_dir)
list_figs = os.listdir(salience_dir)
for img_name in list_figs:
combine_fig(img_name, salience_dir, parsing_dir, salience_parsing_dir, save_dir)
if __name__ == '__main__':
main() | [
"pylab.axis",
"os.path.exists",
"os.listdir",
"argparse.ArgumentParser",
"os.makedirs",
"utils.read_image",
"os.path.join",
"pylab.close",
"pylab.figure",
"gc.collect",
"pylab.imshow"
] | [((119, 214), 'argparse.ArgumentParser', 'argparse.ArgumentParser', ([], {'description': '"""Plot rank-5 results of S-ReID, SP-ReID and SSP-ReID"""'}), "(description=\n 'Plot rank-5 results of S-ReID, SP-ReID and SSP-ReID')\n", (142, 214), False, 'import argparse\n'), ((522, 550), 'pylab.figure', 'plt.figure', ([], {'figsize': '(30, 20)'}), '(figsize=(30, 20))\n', (532, 550), True, 'import pylab as plt\n'), ((753, 764), 'pylab.close', 'plt.close', ([], {}), '()\n', (762, 764), True, 'import pylab as plt\n'), ((779, 791), 'gc.collect', 'gc.collect', ([], {}), '()\n', (789, 791), False, 'import gc\n'), ((900, 933), 'os.path.join', 'osp.join', (['salience_dir', 'file_name'], {}), '(salience_dir, file_name)\n', (908, 933), True, 'import os.path as osp\n'), ((953, 985), 'os.path.join', 'osp.join', (['parsing_dir', 'file_name'], {}), '(parsing_dir, file_name)\n', (961, 985), True, 'import os.path as osp\n'), ((1014, 1055), 'os.path.join', 'osp.join', (['salience_parsing_dir', 'file_name'], {}), '(salience_parsing_dir, file_name)\n', (1022, 1055), True, 'import os.path as osp\n'), ((1072, 1101), 'os.path.join', 'osp.join', (['save_dir', 'file_name'], {}), '(save_dir, file_name)\n', (1080, 1101), True, 'import os.path as osp\n'), ((1717, 1741), 'os.listdir', 'os.listdir', (['salience_dir'], {}), '(salience_dir)\n', (1727, 1741), False, 'import os\n'), ((646, 661), 'pylab.imshow', 'plt.imshow', (['img'], {}), '(img)\n', (656, 661), True, 'import pylab as plt\n'), ((670, 685), 'pylab.axis', 'plt.axis', (['"""off"""'], {}), "('off')\n", (678, 685), True, 'import pylab as plt\n'), ((1117, 1142), 'utils.read_image', 'read_image', (['salience_file'], {}), '(salience_file)\n', (1127, 1142), False, 'from utils import read_image\n'), ((1144, 1168), 'utils.read_image', 'read_image', (['parsing_file'], {}), '(parsing_file)\n', (1154, 1168), False, 'from utils import read_image\n'), ((1170, 1203), 'utils.read_image', 'read_image', (['salience_parsing_file'], {}), '(salience_parsing_file)\n', (1180, 1203), False, 'from utils import read_image\n'), ((1644, 1668), 'os.path.exists', 'os.path.exists', (['save_dir'], {}), '(save_dir)\n', (1658, 1668), False, 'import os\n'), ((1678, 1699), 'os.makedirs', 'os.makedirs', (['save_dir'], {}), '(save_dir)\n', (1689, 1699), False, 'import os\n')] |