code
stringlengths 22
1.05M
| apis
listlengths 1
3.31k
| extract_api
stringlengths 75
3.25M
|
|---|---|---|
import os
import csv
import six
import pandas as pd
import numpy as np
from ..core import STReader, SortOrder, find_conversion
from ..core import csv_headers, col_population, pop_na, col_timestamps, col_node_ids
# TODO: Won't work with a non-population column csv file. Update so that if there is no populations then it will
# do a lookup by node_id only.
def write_csv(path, spiketrain_reader, mode='w', sort_order=SortOrder.none, include_header=True,
include_population=True, units='ms', **kwargs):
path_dir = os.path.dirname(path)
if path_dir and not os.path.exists(path_dir):
os.makedirs(path_dir)
conv_factor = find_conversion(spiketrain_reader.units, units)
with open(path, mode=mode) as f:
if include_population:
# Saves the Population column
csv_writer = csv.writer(f, delimiter=' ')
if include_header:
csv_writer.writerow(csv_headers)
for spk in spiketrain_reader.spikes(sort_order=sort_order):
csv_writer.writerow([spk[0]*conv_factor, spk[1], spk[2]])
else:
# Don't write the Population column
csv_writer = csv.writer(f, delimiter=' ')
if include_header:
csv_writer.writerow([c for c in csv_headers if c != col_population])
for spk in spiketrain_reader.spikes(sort_order=sort_order):
csv_writer.writerow([spk[0]*conv_factor, spk[2]])
class CSVSTReader(STReader):
def __init__(self, path, sep=' ', **kwargs):
self._n_spikes = None
self._populations = None
try:
# check to see if file contains headers
with open(path, 'r') as csvfile:
sniffer = csv.Sniffer()
has_headers = sniffer.has_header(csvfile.read(1024))
except Exception:
has_headers = True
self._spikes_df = pd.read_csv(path, sep=sep, header=0 if has_headers else None)
if not has_headers:
self._spikes_df.columns = csv_headers[0::2]
if col_population not in self._spikes_df.columns:
pop_name = kwargs.get(col_population, pop_na)
self._spikes_df[col_population] = pop_name
# TODO: Check all the necessary columns exits
self._spikes_df = self._spikes_df[csv_headers]
@property
def populations(self):
if self._populations is None:
self._populations = self._spikes_df['population'].unique()
return self._populations
def to_dataframe(self, node_ids=None, populations=None, time_window=None, sort_order=SortOrder.none, **kwargs):
selected = self._spikes_df.copy()
mask = True
if populations is not None:
if isinstance(populations, six.string_types) or np.isscalar(populations):
mask &= selected[col_population] == populations
else:
mask &= selected[col_population].isin(populations)
if node_ids is not None:
node_ids = [node_ids] if np.isscalar(node_ids) else node_ids
mask &= selected[col_node_ids].isin(node_ids)
if time_window is not None:
mask &= (selected[col_timestamps] >= time_window[0]) & (selected[col_timestamps] <= time_window[1])
if isinstance(mask, pd.Series):
selected = selected[mask]
if sort_order == SortOrder.by_time:
selected.sort_values(by=col_timestamps, inplace=True)
elif sort_order == SortOrder.by_id:
selected.sort_values(by=col_node_ids, inplace=True)
selected.index = pd.RangeIndex(len(selected.index))
return selected
def get_times(self, node_id, population=None, time_window=None, **kwargs):
selected = self._spikes_df.copy()
mask = (selected[col_node_ids] == node_id)
if population is not None:
mask &= (selected[col_population] == population)
if time_window is not None:
mask &= (selected[col_timestamps] >= time_window[0]) & (selected[col_timestamps] <= time_window[1])
return np.array(self._spikes_df[mask][col_timestamps])
def nodes(self, populations=None):
selected = self._spikes_df.copy()
mask = True
if populations is not None:
if isinstance(populations, six.string_types) or np.isscalar(populations):
mask = selected[col_population] == populations
else:
mask = selected[col_population].isin(populations)
if isinstance(mask, pd.Series):
selected = selected[mask]
return list(selected.groupby(by=[col_population, col_node_ids]).indices.keys())
def n_spikes(self, population=None):
return len(self.to_dataframe(populations=population))
def time_range(self, populations=None):
selected = self._spikes_df.copy()
if populations is not None:
if isinstance(populations, six.string_types) or np.isscalar(populations):
mask = selected[col_population] == populations
else:
mask = selected[col_population].isin(populations)
selected = selected[mask]
return selected[col_timestamps].agg([np.min, np.max]).values
def spikes(self, node_ids=None, populations=None, time_window=None, sort_order=SortOrder.none, **kwargs):
selected = self._spikes_df.copy()
mask = True
if populations is not None:
if isinstance(populations, six.string_types) or np.isscalar(populations):
mask &= selected[col_population] == populations
else:
mask &= selected[col_population].isin(populations)
if node_ids is not None:
node_ids = [node_ids] if np.isscalar(node_ids) else node_ids
mask &= selected[col_node_ids].isin(node_ids)
if time_window is not None:
mask &= (selected[col_timestamps] >= time_window[0]) & (selected[col_timestamps] <= time_window[1])
if isinstance(mask, pd.Series):
selected = selected[mask]
if sort_order == SortOrder.by_time:
selected.sort_values(by=col_timestamps, inplace=True)
elif sort_order == SortOrder.by_id:
selected.sort_values(by=col_node_ids, inplace=True)
indicies = selected.index.values
for indx in indicies:
yield tuple(self._spikes_df.iloc[indx])
def __len__(self):
if self._n_spikes is None:
self._n_spikes = len(self._spikes_df)
return self._n_spikes
|
[
"os.makedirs",
"csv.writer",
"pandas.read_csv",
"numpy.isscalar",
"os.path.dirname",
"os.path.exists",
"csv.Sniffer",
"numpy.array"
] |
[((536, 557), 'os.path.dirname', 'os.path.dirname', (['path'], {}), '(path)\n', (551, 557), False, 'import os\n'), ((616, 637), 'os.makedirs', 'os.makedirs', (['path_dir'], {}), '(path_dir)\n', (627, 637), False, 'import os\n'), ((1913, 1974), 'pandas.read_csv', 'pd.read_csv', (['path'], {'sep': 'sep', 'header': '(0 if has_headers else None)'}), '(path, sep=sep, header=0 if has_headers else None)\n', (1924, 1974), True, 'import pandas as pd\n'), ((4110, 4157), 'numpy.array', 'np.array', (['self._spikes_df[mask][col_timestamps]'], {}), '(self._spikes_df[mask][col_timestamps])\n', (4118, 4157), True, 'import numpy as np\n'), ((582, 606), 'os.path.exists', 'os.path.exists', (['path_dir'], {}), '(path_dir)\n', (596, 606), False, 'import os\n'), ((840, 868), 'csv.writer', 'csv.writer', (['f'], {'delimiter': '""" """'}), "(f, delimiter=' ')\n", (850, 868), False, 'import csv\n'), ((1183, 1211), 'csv.writer', 'csv.writer', (['f'], {'delimiter': '""" """'}), "(f, delimiter=' ')\n", (1193, 1211), False, 'import csv\n'), ((1746, 1759), 'csv.Sniffer', 'csv.Sniffer', ([], {}), '()\n', (1757, 1759), False, 'import csv\n'), ((2803, 2827), 'numpy.isscalar', 'np.isscalar', (['populations'], {}), '(populations)\n', (2814, 2827), True, 'import numpy as np\n'), ((3049, 3070), 'numpy.isscalar', 'np.isscalar', (['node_ids'], {}), '(node_ids)\n', (3060, 3070), True, 'import numpy as np\n'), ((4356, 4380), 'numpy.isscalar', 'np.isscalar', (['populations'], {}), '(populations)\n', (4367, 4380), True, 'import numpy as np\n'), ((4983, 5007), 'numpy.isscalar', 'np.isscalar', (['populations'], {}), '(populations)\n', (4994, 5007), True, 'import numpy as np\n'), ((5535, 5559), 'numpy.isscalar', 'np.isscalar', (['populations'], {}), '(populations)\n', (5546, 5559), True, 'import numpy as np\n'), ((5781, 5802), 'numpy.isscalar', 'np.isscalar', (['node_ids'], {}), '(node_ids)\n', (5792, 5802), True, 'import numpy as np\n')]
|
"""
tests.tests_dratf04
~~~~~~~~~~~~~~~~~~~~~~
Test against `Common test suite`_.
.. _`Common test suite`:: https://github.com/json-schema/JSON-Schema-Test-Suite
"""
from jsonspec.validators import load, ValidationError, CompilationError
from jsonspec.reference.providers import SpecProvider, ProxyProvider
from jsonspec import driver as json
import io
import os
import pytest
import logging
from pathlib import Path
logger = logging.getLogger(__name__)
here = os.path.dirname(os.path.abspath(__file__))
def contents(*paths):
fullpath = os.path.join(here, 'suite', *paths)
d = len(fullpath)
for filepath in Path(fullpath).glob('**/*.json'):
with filepath.open('r', encoding='utf-8') as file:
yield json.load(file), filepath.as_posix()[d:].lstrip('/')
provider = ProxyProvider(SpecProvider())
for data, src in contents('remotes'):
provider[os.path.join('http://localhost:1234', src)] = data
def scenarios(draft):
skip = []
for data, src in contents('tests', draft):
if src in skip:
continue
for block in data:
for test in block['tests']:
yield (block['schema'], test['description'],
test['data'], test['valid'], src)
@pytest.mark.parametrize('schema, description, data, valid, src',
scenarios('draft4'))
def test_common(schema, description, data, valid, src):
try:
load(schema, provider=provider).validate(data)
if not valid:
assert False, description
except (ValidationError, CompilationError) as error:
if valid:
logger.exception(error)
assert False, description
|
[
"os.path.abspath",
"jsonspec.driver.load",
"pathlib.Path",
"jsonspec.reference.providers.SpecProvider",
"jsonspec.validators.load",
"os.path.join",
"logging.getLogger"
] |
[((447, 474), 'logging.getLogger', 'logging.getLogger', (['__name__'], {}), '(__name__)\n', (464, 474), False, 'import logging\n'), ((498, 523), 'os.path.abspath', 'os.path.abspath', (['__file__'], {}), '(__file__)\n', (513, 523), False, 'import os\n'), ((564, 599), 'os.path.join', 'os.path.join', (['here', '"""suite"""', '*paths'], {}), "(here, 'suite', *paths)\n", (576, 599), False, 'import os\n'), ((833, 847), 'jsonspec.reference.providers.SpecProvider', 'SpecProvider', ([], {}), '()\n', (845, 847), False, 'from jsonspec.reference.providers import SpecProvider, ProxyProvider\n'), ((900, 942), 'os.path.join', 'os.path.join', (['"""http://localhost:1234"""', 'src'], {}), "('http://localhost:1234', src)\n", (912, 942), False, 'import os\n'), ((642, 656), 'pathlib.Path', 'Path', (['fullpath'], {}), '(fullpath)\n', (646, 656), False, 'from pathlib import Path\n'), ((1455, 1486), 'jsonspec.validators.load', 'load', (['schema'], {'provider': 'provider'}), '(schema, provider=provider)\n', (1459, 1486), False, 'from jsonspec.validators import load, ValidationError, CompilationError\n'), ((753, 768), 'jsonspec.driver.load', 'json.load', (['file'], {}), '(file)\n', (762, 768), True, 'from jsonspec import driver as json\n')]
|
"""MIT License
Copyright (c) 2022 <NAME>
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in all
copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
SOFTWARE.
"""
import numpy as np
import cv2
import matplotlib.pyplot as plt
class Estimator:
def __init__(self,enhance=False,leftProjMat=[],rightProjMat=[]):
"""
to_enchance = To enchange images before processing
left_C_mat = Left camera matrix
left_T_vec = Left camera translation vector
right_T_vec = Right camera translation vector
"""
self.to_enhance = enhance
# Decomposing the Projection Matrixs
self.left_C_mat, _,self.left_T_vec, _, _, _, _ = cv2.decomposeProjectionMatrix(leftProjMat)
_, _,self.right_T_vec, _, _, _, _ = cv2.decomposeProjectionMatrix(rightProjMat)
def enhance(self,image):
"""
To preprocess the image with differnt algorithms
# Gaussian Blur to remove the noise
# Gamma Equilization
# Histogram filter to equilize the lights
"""
image = cv2.cvtColor(image,cv2.COLOR_BGR2GRAY)
smoothen_image = cv2.GaussianBlur(image,(5,5),0.1)
gamma=0.75
lookUpTable = np.empty((1,256), np.uint8)
for i in range(256):
lookUpTable[0,i] = np.clip(pow(i / 255.0, gamma) * 255.0, 0, 255)
gamma_equilized_image = cv2.LUT(smoothen_image, lookUpTable)
clahe = cv2.createCLAHE(clipLimit=2.0, tileGridSize=(4,4))
histogram_filtered_image = clahe.apply(gamma_equilized_image)
return histogram_filtered_image
def estimate(self,rightimage ,leftimage,depth=False):
"""
Using Stereo SGBM to calculate disparity map and
traditional approach to calculate depth
Input:
leftimage = Left image matrix
rightimage = Right image matrix
depth = To calculate depth
Output:
Disparity map and depth map of the input image
"""
if self.to_enhance:
leftimage = self.enhance(leftimage)
rightimage = self.enhance(rightimage)
# Calculating Depth Map using Stereo SGBM
stereo = cv2.StereoSGBM_create(numDisparities=16*6,
blockSize=15,P1=4000,P2=15000,
mode=cv2.STEREO_SGBM_MODE_SGBM_3WAY)
disparity_map = stereo.compute(leftimage,rightimage)
if depth:
disparity = disparity_map
f = self.left_C_mat[0][0]
b = self.right_T_vec[0] - self.left_T_vec[0]
# Avoid instability and division by zero
disparity_map[disparity_map == 0.0] = 0.1
disparity_map[disparity_map == -1.0] = 0.1
depth_map = np.ones(disparity_map.shape)
depth_map = f * b / (disparity_map+0.00000001)
return disparity,depth_map
return disparity_map[:,95:]
|
[
"cv2.GaussianBlur",
"cv2.cvtColor",
"numpy.empty",
"numpy.ones",
"cv2.LUT",
"cv2.createCLAHE",
"cv2.decomposeProjectionMatrix",
"cv2.StereoSGBM_create"
] |
[((1590, 1632), 'cv2.decomposeProjectionMatrix', 'cv2.decomposeProjectionMatrix', (['leftProjMat'], {}), '(leftProjMat)\n', (1619, 1632), False, 'import cv2\n'), ((1677, 1720), 'cv2.decomposeProjectionMatrix', 'cv2.decomposeProjectionMatrix', (['rightProjMat'], {}), '(rightProjMat)\n', (1706, 1720), False, 'import cv2\n'), ((2009, 2048), 'cv2.cvtColor', 'cv2.cvtColor', (['image', 'cv2.COLOR_BGR2GRAY'], {}), '(image, cv2.COLOR_BGR2GRAY)\n', (2021, 2048), False, 'import cv2\n'), ((2073, 2109), 'cv2.GaussianBlur', 'cv2.GaussianBlur', (['image', '(5, 5)', '(0.1)'], {}), '(image, (5, 5), 0.1)\n', (2089, 2109), False, 'import cv2\n'), ((2157, 2185), 'numpy.empty', 'np.empty', (['(1, 256)', 'np.uint8'], {}), '((1, 256), np.uint8)\n', (2165, 2185), True, 'import numpy as np\n'), ((2324, 2360), 'cv2.LUT', 'cv2.LUT', (['smoothen_image', 'lookUpTable'], {}), '(smoothen_image, lookUpTable)\n', (2331, 2360), False, 'import cv2\n'), ((2386, 2437), 'cv2.createCLAHE', 'cv2.createCLAHE', ([], {'clipLimit': '(2.0)', 'tileGridSize': '(4, 4)'}), '(clipLimit=2.0, tileGridSize=(4, 4))\n', (2401, 2437), False, 'import cv2\n'), ((3169, 3288), 'cv2.StereoSGBM_create', 'cv2.StereoSGBM_create', ([], {'numDisparities': '(16 * 6)', 'blockSize': '(15)', 'P1': '(4000)', 'P2': '(15000)', 'mode': 'cv2.STEREO_SGBM_MODE_SGBM_3WAY'}), '(numDisparities=16 * 6, blockSize=15, P1=4000, P2=\n 15000, mode=cv2.STEREO_SGBM_MODE_SGBM_3WAY)\n', (3190, 3288), False, 'import cv2\n'), ((3772, 3800), 'numpy.ones', 'np.ones', (['disparity_map.shape'], {}), '(disparity_map.shape)\n', (3779, 3800), True, 'import numpy as np\n')]
|
import unittest
import os
import yaml
from shutil import copytree, copyfile
from elasticsearch import Elasticsearch
from filebeat import BaseTest
INTEGRATION_TESTS = os.environ.get('INTEGRATION_TESTS', False)
class Test(BaseTest):
def init(self):
self.elasticsearch_url = self.get_elasticsearch_url()
print("Using elasticsearch: {}".format(self.elasticsearch_url))
self.es = Elasticsearch([self.elasticsearch_url])
@unittest.skipIf(not INTEGRATION_TESTS,
"integration tests are disabled, run with INTEGRATION_TESTS=1 to enable them.")
@unittest.skipIf(os.getenv("TESTING_ENVIRONMENT") == "2x",
"integration test not available on 2.x")
def test_setup_modules_d_config(self):
"""
Check if template settings are applied to Ingest pipelines when configured from modules.d.
"""
self.init()
self.render_config_template(
modules=True,
elasticsearch={
"host": self.get_elasticsearch_url(),
},
)
self._setup_dummy_module()
beat_setup_modules_pipelines = self.start_beat(
extra_args=[
"setup",
"--pipelines",
"-E", "filebeat.config.modules.path=" + self.working_dir + "/modules.d/*.yml",
],
)
beat_setup_modules_pipelines.check_wait(exit_code=0)
version = self.get_beat_version()
pipeline_name = "filebeat-" + version + "-template-test-module-test-pipeline"
pipeline = self.es.transport.perform_request("GET", "/_ingest/pipeline/" + pipeline_name)
assert "date" in pipeline[pipeline_name]["processors"][0]
assert "remove" in pipeline[pipeline_name]["processors"][1]
def _setup_dummy_module(self):
modules_d_path = self.working_dir + "/modules.d"
modules_path = self.working_dir + "/module"
for directory in [modules_d_path, modules_path]:
if not os.path.isdir(directory):
os.mkdir(directory)
copytree(self.beat_path + "/tests/system/input/template-test-module", modules_path + "/template-test-module")
copyfile(self.beat_path + "/tests/system/input/template-test-module/_meta/config.yml", modules_d_path + "/test.yml")
|
[
"elasticsearch.Elasticsearch",
"unittest.skipIf",
"os.mkdir",
"os.path.isdir",
"os.environ.get",
"shutil.copyfile",
"shutil.copytree",
"os.getenv"
] |
[((178, 220), 'os.environ.get', 'os.environ.get', (['"""INTEGRATION_TESTS"""', '(False)'], {}), "('INTEGRATION_TESTS', False)\n", (192, 220), False, 'import os\n'), ((474, 601), 'unittest.skipIf', 'unittest.skipIf', (['(not INTEGRATION_TESTS)', '"""integration tests are disabled, run with INTEGRATION_TESTS=1 to enable them."""'], {}), "(not INTEGRATION_TESTS,\n 'integration tests are disabled, run with INTEGRATION_TESTS=1 to enable them.'\n )\n", (489, 601), False, 'import unittest\n'), ((426, 465), 'elasticsearch.Elasticsearch', 'Elasticsearch', (['[self.elasticsearch_url]'], {}), '([self.elasticsearch_url])\n', (439, 465), False, 'from elasticsearch import Elasticsearch\n'), ((2147, 2261), 'shutil.copytree', 'copytree', (["(self.beat_path + '/tests/system/input/template-test-module')", "(modules_path + '/template-test-module')"], {}), "(self.beat_path + '/tests/system/input/template-test-module', \n modules_path + '/template-test-module')\n", (2155, 2261), False, 'from shutil import copytree, copyfile\n'), ((2266, 2391), 'shutil.copyfile', 'copyfile', (["(self.beat_path + '/tests/system/input/template-test-module/_meta/config.yml')", "(modules_d_path + '/test.yml')"], {}), "(self.beat_path +\n '/tests/system/input/template-test-module/_meta/config.yml', \n modules_d_path + '/test.yml')\n", (2274, 2391), False, 'from shutil import copytree, copyfile\n'), ((637, 669), 'os.getenv', 'os.getenv', (['"""TESTING_ENVIRONMENT"""'], {}), "('TESTING_ENVIRONMENT')\n", (646, 669), False, 'import os\n'), ((2073, 2097), 'os.path.isdir', 'os.path.isdir', (['directory'], {}), '(directory)\n', (2086, 2097), False, 'import os\n'), ((2116, 2135), 'os.mkdir', 'os.mkdir', (['directory'], {}), '(directory)\n', (2124, 2135), False, 'import os\n')]
|
from keras.models import Sequential
from keras.layers import Dense, Reshape, Flatten, Conv2D
from keras.optimizers import SGD
import random
import matplotlib.pyplot as plt
from dqn.dqn import DQN
from tictactoe_env import TicTacToe
def model_constructor():
model = Sequential()
# hidden layer
model.add(Dense(18, input_shape=(18,)))
model.add(Dense(12))
model.add(Dense(9))
model.compile(optimizer='adam', loss='mse', metrics=['accuracy'])
return model
dqnX = DQN(model_constructor, 9)
dqnO = DQN(model_constructor, 9)
def ai_ai_game():
false_moves_X = 0
false_moves_O = 0
env = TicTacToe()
winner = env.winner()
while winner == 0:
ai_reward = 0
if env.get_turn() == 'X':
res = -1
while res == -1:
move = dqnX.determine_action(env.get_state(), ai_reward)
res = env.place(move)
if res == -1:
ai_reward = -1
dqnX.determine_action(env.get_state(), ai_reward, terminal_state=True)
dqnO.determine_action(env.get_state(), 1, terminal_state=True)
return 0
else:
ai_reward = 0
else:
res = -1
while res == -1:
move = dqnO.determine_action(env.get_state(), ai_reward)
res = env.place(move)
if res == -1:
ai_reward = -1
dqnO.determine_action(env.get_state(), ai_reward, terminal_state=True)
dqnX.determine_action(env.get_state(), 1, terminal_state=True)
return 0
else:
ai_reward = 0
winner = env.winner()
rewardX = 0
rewardO = 0
if winner == -2:
rewardX = 1
rewardO = 1
elif winner == 1:
rewardX = 2
rewardO = -2
else:
rewardX = -2
rewardO = 2
dqnX.determine_action(env.get_state(), rewardX, terminal_state=True)
dqnO.determine_action(env.get_state(), rewardO, terminal_state=True)
return winner
def random_ai_game(ai_team='X'):
false_moves = 0
env = TicTacToe()
winner = env.winner()
while winner == 0:
# print("Board:")
# env.print_board()
ai_reward = 0
if env.get_turn() == ai_team:
res = -1
while res == -1:
# print("AI Turn")
move = dqnX.determine_action(env.get_state(), ai_reward)
res = env.place(move)
if res == -1:
ai_reward = -1
dqnX.determine_action(env.get_state(), ai_reward, terminal_state=True)
return 0
else:
ai_reward = 0
else:
res = -1
while res == -1:
res = env.place(random.randint(0, 8))
winner = env.winner()
reward = 0
if winner == -2:
reward = 0
elif winner == 1 and ai_team == 'X' or winner == -1 and ai_team == 'O':
reward = 1
else:
reward = -1
dqnX.determine_action(env.get_state(), reward, terminal_state=True)
return winner
def run_game(ai_team='X'):
env = TicTacToe()
winner = env.winner()
while winner == 0:
print("Board:")
env.print_board()
ai_reward = 0
if env.get_turn() == ai_team:
res = -1
while res == -1:
print("AI Turn")
move = dqnX.determine_action(env.get_state(), ai_reward)
print(dqnX.q_predictor.predict(env.get_state()))
res = env.place(move)
if res == -1:
print("AI goofed")
ai_reward = -1
else:
res = -1
while res == -1:
move = input("Input place position from 0-8: ")
res = env.place(int(move))
if res == -1:
print("Bad move.")
winner = env.winner()
reward = 0
if winner == -2:
reward = 0
print("Tie.")
elif winner == 1 and ai_team == 'X' or winner == -1 and ai_team == 'O':
reward = 0
print("AI Win!")
else:
reward = 0
print("Ai lost")
dqnX.determine_action(env.get_state(), reward, terminal_state=True)
# dqnX.force_epsilon(1)
# dqnO.force_epsilon(1)
ai_w_l = []
ai_t_l = []
ai_l_l = []
ai_fm_l = []
fm_l = []
for i in range(1000):
ai_w = 0
ai_t = 0
ai_l = 0
ai_fm = 0
fm = 0
print("RUN:", i)
for i in range(100):
res = random_ai_game()
if res == 1:
ai_w += 1
elif res == -2:
ai_t += 1
elif res == 0:
ai_fm += 1
else:
ai_l += 1
ai_w_l.append(ai_w)
ai_t_l.append(ai_t)
ai_l_l.append(ai_l)
ai_fm_l.append(ai_fm)
print("W/T/L/F", ai_w, ai_t, ai_l, ai_fm)
dqnX.train()
dqnO.train()
dqnX.force_epsilon(0)
ai_w = 0
ai_t = 0
ai_l = 0
ai_fm = 0
for i in range(5000):
res = random_ai_game()
if res == 1:
ai_w += 1
elif res == -2:
ai_t += 1
elif res == 0:
ai_fm += 1
else:
ai_l += 1
print("W/T/L/F", ai_w, ai_t, ai_l, ai_fm)
# dqnO.force_epsilon(0)
# ai_w = 0
# ai_t = 0
# ai_l = 0
# fm = 0
# for i in range(100):
# print("RUN:", i)
# false_moves, res = ai_ai_game()
# if res == 1:
# ai_w += 1
# elif res == -2:
# ai_t += 1
# else:
# ai_l += 1
# fm += false_moves
# print("W/T/L/F", ai_w, ai_t, ai_l)
plt.stackplot(range(1, len(ai_w_l)+1), [ai_w_l, ai_t_l, ai_l_l, ai_fm_l], labels=['X wins', 'ties', 'O wins', 'Misplaces'])
# plt.plot(ai_w_l, label='X wins')
# plt.plot(ai_t_l, label='ties')
# plt.plot(ai_l_l, label='O wins')
# plt.plot(ai_fm_l, label='Misplaces')
# plt.plot(fm_l, label='misplaces')
plt.legend()
plt.show()
dqnX.force_epsilon(0)
while True:
# try:
run_game()
# except Exception as e:
# print(e)
# break
|
[
"matplotlib.pyplot.show",
"tictactoe_env.TicTacToe",
"random.randint",
"matplotlib.pyplot.legend",
"keras.layers.Dense",
"dqn.dqn.DQN",
"keras.models.Sequential"
] |
[((493, 518), 'dqn.dqn.DQN', 'DQN', (['model_constructor', '(9)'], {}), '(model_constructor, 9)\n', (496, 518), False, 'from dqn.dqn import DQN\n'), ((526, 551), 'dqn.dqn.DQN', 'DQN', (['model_constructor', '(9)'], {}), '(model_constructor, 9)\n', (529, 551), False, 'from dqn.dqn import DQN\n'), ((5928, 5940), 'matplotlib.pyplot.legend', 'plt.legend', ([], {}), '()\n', (5938, 5940), True, 'import matplotlib.pyplot as plt\n'), ((5941, 5951), 'matplotlib.pyplot.show', 'plt.show', ([], {}), '()\n', (5949, 5951), True, 'import matplotlib.pyplot as plt\n'), ((271, 283), 'keras.models.Sequential', 'Sequential', ([], {}), '()\n', (281, 283), False, 'from keras.models import Sequential\n'), ((626, 637), 'tictactoe_env.TicTacToe', 'TicTacToe', ([], {}), '()\n', (635, 637), False, 'from tictactoe_env import TicTacToe\n'), ((2193, 2204), 'tictactoe_env.TicTacToe', 'TicTacToe', ([], {}), '()\n', (2202, 2204), False, 'from tictactoe_env import TicTacToe\n'), ((3262, 3273), 'tictactoe_env.TicTacToe', 'TicTacToe', ([], {}), '()\n', (3271, 3273), False, 'from tictactoe_env import TicTacToe\n'), ((317, 345), 'keras.layers.Dense', 'Dense', (['(18)'], {'input_shape': '(18,)'}), '(18, input_shape=(18,))\n', (322, 345), False, 'from keras.layers import Dense, Reshape, Flatten, Conv2D\n'), ((361, 370), 'keras.layers.Dense', 'Dense', (['(12)'], {}), '(12)\n', (366, 370), False, 'from keras.layers import Dense, Reshape, Flatten, Conv2D\n'), ((386, 394), 'keras.layers.Dense', 'Dense', (['(9)'], {}), '(9)\n', (391, 394), False, 'from keras.layers import Dense, Reshape, Flatten, Conv2D\n'), ((2901, 2921), 'random.randint', 'random.randint', (['(0)', '(8)'], {}), '(0, 8)\n', (2915, 2921), False, 'import random\n')]
|
from django.urls import path
from . import views
urlpatterns = [
path('', views.all_supplies, name='supplies'),
path('add/', views.supply_add, name='supply_add'),
path('edit/<int:supply_id>/', views.supply_edit, name='supply_edit'),
path('delete/<int:supply_id>/\
', views.supply_delete, name='supply_delete'),
]
|
[
"django.urls.path"
] |
[((70, 115), 'django.urls.path', 'path', (['""""""', 'views.all_supplies'], {'name': '"""supplies"""'}), "('', views.all_supplies, name='supplies')\n", (74, 115), False, 'from django.urls import path\n'), ((121, 170), 'django.urls.path', 'path', (['"""add/"""', 'views.supply_add'], {'name': '"""supply_add"""'}), "('add/', views.supply_add, name='supply_add')\n", (125, 170), False, 'from django.urls import path\n'), ((176, 244), 'django.urls.path', 'path', (['"""edit/<int:supply_id>/"""', 'views.supply_edit'], {'name': '"""supply_edit"""'}), "('edit/<int:supply_id>/', views.supply_edit, name='supply_edit')\n", (180, 244), False, 'from django.urls import path\n'), ((250, 337), 'django.urls.path', 'path', (['"""delete/<int:supply_id>/ """', 'views.supply_delete'], {'name': '"""supply_delete"""'}), "('delete/<int:supply_id>/ ', views.supply_delete, name=\n 'supply_delete')\n", (254, 337), False, 'from django.urls import path\n')]
|
import libvirt
import re
def CreateVM(UserID, TemplateID, VMName):
# Step1:从Template中获取虚拟机的配置信息
image = GetImageFromTemplate(TemplateID)
cpu = GETCPUFromTemplate(TemplateID)
memory = GetMemoryFromTemplate(TemplateID)
# Step2:获取可用的的服务器
host_list = GetHosts()
target_host = GetBestHostFromHosts(host_list)
# Step3:创建虚拟机的磁盘文件
# 磁盘文件可以使用qemu-img采用QCOW2的方式创建,执行方式可以使用SSH远程执行
disk_file = CreateVMDiskWithSSH(target_host, image)
# Step4:生成虚拟机的XML配置文件
XMLFile = GetVMXMLFile(VMName, disk_file, cpu, memory)
# Step5:创建虚拟机对象并初始化
VM = VirtualMachine()
VM.name = VMName
VM.cpu = cpu
...
VM.host_id = target_host.id
VM.image_id = image.id
VM.owner_id = UserID
VM.status_code = VM_INIT
# Step6:创建并启动虚拟机
libvirt_host = GetLibvirtHost(target_host)
domain = libvirt_host.createXML(XMLFile)
if 如果创建成功则更新虚拟机状态:
VM.status_code = VM_RUNNING
# Step7:更新数据库
db.session.commit()
def StartVM(VMID):
# Step1:获取虚拟机对象
vm = GetVMByID(VMID)
# Step2:获取Libvirt对象
domain = GetLibvirtDomainByVM(vm)
# Step3:执行Libvirt API进行操作
domain.create()
def ShutdownVM(VMID):
# Step1:获取虚拟机对象
vm = GetVMByID(VMID)
# Step2:获取Libvirt对象
domain = GetLibvirtDomainByVM(vm)
# Step3:执行Libvirt API进行操作
domain.destroy()
def RebootVM(VMID):
# Step1:获取虚拟机对象
vm = GetVMByID(VMID)
# Step2:获取Libvirt对象
domain = GetLibvirtDomainByVM(vm)
# Step3:执行Libvirt API进行操作
domain.reboot()
def deleteVM(VMID):
# Step1:获取虚拟机对象
vm = GetVMByID(VMID)
# Step2:获取Libvirt对象
domain = GetLibvirtDomainByVM(vm)
# Step3:执行Libvirt API进行操作
domain.destroy()
domain.undefine()
# Step4:删除虚拟机磁盘文件,可以使用SSH远程执行删除命令
DeleteDiskFileWithSSH(vm.host)
# Step5:更新数据库
db.session.delete(vm)
db.session.commit()
def GetVNCPort(host, vmname):
domain = host.lookupByName(vmname)
xml = domain.XMLDesc()
reg = '\' port=\'(.*)\' a'
vncport = re.search(reg, xml)
if vncport == None:
return None
return vncport.group(1)
def CreateVMToken(vm):
# Step1:获取VMC端口
port = GetVNCPort(vm)
# Step2:创建Token文件内容
# Token文件格式:`token: host:port`
token_string = GenerateTokenString(vm.host, port)
# Step3:写入Token文件,可以使用SSH远程执行echo命令来创建token文件,文件命名可以使用VMID,方便后续的删除操作
CreateTokenFileWithSSH(vm.host, token_string)
def DeleteVMToken(vm):
# Step1:删除Token文件,可以使用SSH远程执行rm命令删除
DeleteTokenFileWithSSH(vm.host, vm.id)
def checkHost(host):
# Step1:测试服务器是否可以ping通
status_code = PingTest(host)
# Step2:测试服务器SSH连接
status_code = SSHTest(host)
# Step3:测试服务器Libvirt服务
status_code = LibvirtTest(host)
|
[
"re.search"
] |
[((1997, 2016), 're.search', 're.search', (['reg', 'xml'], {}), '(reg, xml)\n', (2006, 2016), False, 'import re\n')]
|
# Copyright 2022 Quantapix Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# =============================================================================
# https://arxiv.org/abs/1909.11942
# https://github.com/google-research/albert
import torch
from torch import nn
from torch.nn import functional as F
from transformers.utils import logging
from .. import core as qc
from ..core import utils as qu
from ..core import forward as qf
from ..core import output as qo
from ..core import attention as qa
from ..core.embed import Embeds
from ..core.ffnet import Classifier, FFNet, Masker, Pool
from ..prep.config.albert import PreTrained
from . import bert
log = logging.get_logger(__name__)
class Model(PreTrained):
def __init__(self, add_pool=True, **kw):
super().__init__(**kw)
self.get_cfg(kw)
self.embs = Embeds(**kw)
self.enc = Encoder(**kw)
self.pool = Pool(**kw) if add_pool else None
def forward(self, x, x_emb=None, mask=None, head_m=None, **kw):
cfg = self.cfg
yo = self.get_y_opts(**kw)
if x is not None:
assert x_emb is None
s, d = x.size(), x.device
else:
s, d = x_emb.size()[:-1], x_emb.device
if mask is None:
mask = torch.ones(s, device=d)
mask = self.get_mask(mask, s, d)
head_m = self.get_head_m(head_m, cfg.n_lays)
ys = self.embs(x, x_emb, **kw)
ys = self.enc(ys, mask=mask, head_m=head_m, **kw, yo=yo)
if self.pool is not None:
ys += (self.pool(ys[0]),)
return qo.WithPools(*ys) if yo.kw else ys
class ForMasked(PreTrained):
def __init__(self, **kw):
super().__init__(**kw)
cfg = self.get_cfg(kw)
self.model = Model(add_pool=False, **kw)
self.proj = Masker(cfg.d_embed, **kw)
forward = qf.forward_masked
class ForMultiChoice(PreTrained):
def __init__(self, **kw):
super().__init__(**kw)
self.get_cfg(kw)
self.model = Model(**kw)
self.proj = Classifier(n_labels=1, **kw)
forward = bert.ForMultiChoice.forward
class ForPreTraining(PreTrained):
def __init__(self, **kw):
super().__init__(**kw)
cfg = self.get_cfg(kw)
self.model = Model(**kw)
self.proj = Masker(cfg.d_embed, **kw)
self.order = Classifier(n_labels=2, **kw)
forward = bert.ForPreTraining.forward
class ForQA(PreTrained):
def __init__(self, **kw):
super().__init__(**kw)
cfg = self.get_cfg(kw)
self.model = Model(add_pool=False, **kw)
self.proj = qc.Linear(cfg.d_model, cfg.n_labels, **kw)
forward = qf.forward_qa
class ForSeqClassifier(PreTrained):
def __init__(self, **kw):
super().__init__(**kw)
self.get_cfg(kw)
self.model = Model(**kw)
self.proj = Classifier(**kw)
forward = qf.forward_seq
class ForTokClassifier(PreTrained):
def __init__(self, **kw):
super().__init__(**kw)
self.get_cfg(kw)
self.model = Model(add_pool=False, **kw)
self.proj = Classifier(**kw)
forward = qf.forward_tok
class Encoder(qc.Module):
hs = qc.Hypers({"d_embed", "d_model", "n_groups"})
def __init__(self, ps={}, hs=[], **kw):
super().__init__(ps, [self.hs] + hs, **kw)
cfg = self.get_cfg(kw)
self.proj = qc.Linear(cfg.d_embed, cfg.d_model, **kw)
self.groups = qc.Stack([Group(**kw) for _ in range(cfg.n_groups)])
def forward(self, x, head_m=None, y_attn=False, y_hidden=False, y_kw=True, **kw):
cfg = self.cfg
yo = self.get_y_opts(y_attn=y_attn, y_kw=y_kw, y_hidden=y_hidden, **kw)
y = self.proj(x)
attns = () if yo.attn else None
hiddens = () if yo.hidden else None
hm = [None] * cfg.n_lays if head_m is None else head_m
for i in range(cfg.n_lays):
if yo.hidden:
hiddens += (y,)
n = int(cfg.n_lays / cfg.n_groups)
g = int(i / (cfg.n_lays / cfg.n_groups))
ys = self.groups[g](y, head_m=hm[g * n : (g + 1) * n], **kw, yo=yo)
y = ys[0]
if yo.attn:
attns += ys[1]
if yo.hidden:
hiddens += (y,)
ys = (y, attns, hiddens)
return qo.Base(*ys) if yo.kw else ys
class Group(qc.Module):
hs = qc.Hypers({"s_group"})
def __init__(self, ps={}, hs=[], **kw):
super().__init__(ps, [self.hs] + hs, **kw)
cfg = self.get_cfg(kw)
self.lays = qc.Stack([Layer(**kw) for _ in range(cfg.s_group)])
def forward(self, x, head_m=None, y_attn=False, y_hidden=False, **kw):
yo = self.get_y_opts(y_attn=y_attn, y_hidden=y_hidden, **kw)
y = x
attns = () if yo.attn else None
hiddens = () if yo.hidden else None
for i, lay in enumerate(self.lays):
if yo.hidden:
hiddens += (y,)
ys = lay(y, head_m=head_m[i], **kw, yo=yo)
y = ys[0]
if yo.attn:
attns += (ys[1],)
if yo.hidden:
hiddens += (y,)
ys = (y, attns, hiddens)
return qo.Base(*ys) if yo.kw else ys
class Layer(qc.Module):
hs = qc.Hypers({"act", "d_ff", "d_model", "drop", "eps"})
def __init__(self, ps={}, hs=[], **kw):
super().__init__(ps, [self.hs] + hs, **kw)
cfg = self.get_cfg(kw)
self.attn = Attention(**kw)
self.ffnet = FFNet(**kw)
self.norm = qc.LayerNorm(cfg.d_model, cfg.eps, **kw)
def forward(self, x, **kw):
ys = self.attn(x, **kw)
y = self.ffnet(ys[0])
y = self.norm(y + ys[0])
return (y,) + ys[1:]
class Attention(qc.Module):
hs = qc.Hypers(
{"d_embed", "d_model", "n_heads", "n_pos"}, {"drop_attn": 0.0, "pos_type": "absolute"}
)
def __init__(self, pos_type=None, ps={}, hs=[], **kw):
if pos_type is not None:
kw.update(pos_type=pos_type)
super().__init__(ps, [self.hs] + hs, **kw)
cfg = self.get_cfg(kw)
m, n = cfg.d_model, cfg.n_heads
assert m % n == 0 # or cfg.d_embed is not None
cfg.d_head = h = m // n
cfg.scale = 1 / (h**0.5)
self.query = qc.Linear(m, m, **kw)
self.key = qc.Linear(m, m, **kw)
self.value = qc.Linear(m, m, **kw)
if cfg.pos_type == "relative_key" or cfg.pos_type == "relative_key_query":
self.pos_emb = qc.Embed(2 * cfg.n_pos - 1, h, **kw)
self.attn_drop = qc.Dropout(cfg.drop_attn, **kw)
self.proj = qc.Linear(m, m, **kw)
self.drop = qc.Dropout(cfg.drop, **kw)
self.norm = qc.LayerNorm(m, cfg.eps, **kw)
split_heads = qa.split_heads
def forward(self, x, mask=None, head_m=None, y_attn=False, **kw):
cfg = self.cfg
yo = self.get_y_opts(y_attn=y_attn, **kw)
q = self.split_heads(self.query(x))
k = self.split_heads(self.key(x))
v = self.split_heads(self.value(x))
a = torch.matmul(q, k.transpose(-1, -2))
a.mul_(cfg.scale)
if mask is not None:
a = a + mask
t = cfg.pos_type
if t == "relative_key" or t == "relative_key_query":
n = x.size()[1]
kw = dict(device=x.device, dtype=torch.long)
left, right = torch.arange(n, **kw).view(-1, 1), torch.arange(n, **kw).view(1, -1)
pos = self.pos_emb(left - right + self.n_pos - 1).to(dtype=q.dtype)
if t == "relative_key":
a += torch.einsum("bhld,lrd->bhlr", q, pos)
elif t == "relative_key_query":
a += torch.einsum("bhld,lrd->bhlr", q, pos) + torch.einsum("bhrd,lrd->bhlr", k, pos)
a = self.attn_drop(F.softmax(a, dim=-1))
if head_m is not None:
a = a * head_m
y = torch.matmul(a, v).transpose(2, 1).flatten(2)
y = (self.norm(x + self.drop(self.proj(y))),)
if yo.attn:
y += (a,)
return y
|
[
"torch.ones",
"transformers.utils.logging.get_logger",
"torch.nn.functional.softmax",
"torch.einsum",
"torch.arange",
"torch.matmul"
] |
[((1192, 1220), 'transformers.utils.logging.get_logger', 'logging.get_logger', (['__name__'], {}), '(__name__)\n', (1210, 1220), False, 'from transformers.utils import logging\n'), ((1801, 1824), 'torch.ones', 'torch.ones', (['s'], {'device': 'd'}), '(s, device=d)\n', (1811, 1824), False, 'import torch\n'), ((8272, 8292), 'torch.nn.functional.softmax', 'F.softmax', (['a'], {'dim': '(-1)'}), '(a, dim=-1)\n', (8281, 8292), True, 'from torch.nn import functional as F\n'), ((8061, 8099), 'torch.einsum', 'torch.einsum', (['"""bhld,lrd->bhlr"""', 'q', 'pos'], {}), "('bhld,lrd->bhlr', q, pos)\n", (8073, 8099), False, 'import torch\n'), ((7855, 7876), 'torch.arange', 'torch.arange', (['n'], {}), '(n, **kw)\n', (7867, 7876), False, 'import torch\n'), ((7890, 7911), 'torch.arange', 'torch.arange', (['n'], {}), '(n, **kw)\n', (7902, 7911), False, 'import torch\n'), ((8165, 8203), 'torch.einsum', 'torch.einsum', (['"""bhld,lrd->bhlr"""', 'q', 'pos'], {}), "('bhld,lrd->bhlr', q, pos)\n", (8177, 8203), False, 'import torch\n'), ((8206, 8244), 'torch.einsum', 'torch.einsum', (['"""bhrd,lrd->bhlr"""', 'k', 'pos'], {}), "('bhrd,lrd->bhlr', k, pos)\n", (8218, 8244), False, 'import torch\n'), ((8364, 8382), 'torch.matmul', 'torch.matmul', (['a', 'v'], {}), '(a, v)\n', (8376, 8382), False, 'import torch\n')]
|
import graphene
from graphene.types import Field
from graphene_django.types import DjangoObjectType
from graphene_django.forms.mutation import DjangoModelFormMutation
from .forms import IssueForm, ResearchNoteForm, ResearchNoteWrittenForm
from .models import Issue, ResearchNote
class IssueType(DjangoObjectType):
class Meta:
model = Issue
class ResearchNoteType(DjangoObjectType):
class Meta:
model = ResearchNote
class Query:
issues = graphene.List(IssueType)
research_notes = graphene.List(ResearchNoteType)
def resolve_issues(self, info, **kwargs):
return Issue.objects.prefetch_related('research_notes').all().order_by('date')
def resolve_research_notes(self, info, **kwargs):
return ResearchNote.objects.select_related('issue').all().order_by('issue__date', 'id')
class IssueMutation(DjangoModelFormMutation):
issue = Field(IssueType)
class Meta:
form_class = IssueForm
class ResearchNoteMutation(DjangoModelFormMutation):
research_note = Field(ResearchNoteType)
class Meta:
form_class = ResearchNoteForm
class ResearchNoteWrittenMutation(DjangoModelFormMutation):
research_note = Field(ResearchNoteType)
class Meta:
form_class = ResearchNoteWrittenForm
class Mutations:
update_issue = IssueMutation.Field()
update_research_note = ResearchNoteMutation.Field()
update_research_note_written = ResearchNoteWrittenMutation.Field()
|
[
"graphene.List",
"graphene.types.Field"
] |
[((474, 498), 'graphene.List', 'graphene.List', (['IssueType'], {}), '(IssueType)\n', (487, 498), False, 'import graphene\n'), ((520, 551), 'graphene.List', 'graphene.List', (['ResearchNoteType'], {}), '(ResearchNoteType)\n', (533, 551), False, 'import graphene\n'), ((897, 913), 'graphene.types.Field', 'Field', (['IssueType'], {}), '(IssueType)\n', (902, 913), False, 'from graphene.types import Field\n'), ((1037, 1060), 'graphene.types.Field', 'Field', (['ResearchNoteType'], {}), '(ResearchNoteType)\n', (1042, 1060), False, 'from graphene.types import Field\n'), ((1198, 1221), 'graphene.types.Field', 'Field', (['ResearchNoteType'], {}), '(ResearchNoteType)\n', (1203, 1221), False, 'from graphene.types import Field\n')]
|
# coding: utf-8
import os
import shutil
import pickle
import librosa
import argparse
import pandas as pd
import numpy as np
from glob import glob
from tqdm import tqdm
from PIL import Image
from facenet_pytorch import MTCNN, InceptionResnetV1
import torch
from transformers import BertTokenizer, BertModel
from torch.utils.data import Dataset, DataLoader
class MDataPreLoader(Dataset):
def __init__(self, args):
self.working_dir = args.working_dir
self.df = args.df
self.annotation_dict = {
"Negative": 0,
"Neutral": 1,
"Positive": 2
}
# toolkits path
self.openface2Path = args.openface2Path
# bert
tokenizer_class = BertTokenizer
if args.language == 'cn':
self.pretrainedBertPath = 'pretrained_model/bert_cn'
self.tokenizer = tokenizer_class.from_pretrained('pretrained_model/bert_cn')
else:
self.pretrainedBertPath = 'pretrained_model/bert_en'
self.tokenizer = tokenizer_class.from_pretrained('pretrained_model/bert_en', do_lower_case=True)
def __len__(self):
return len(self.df)
def __getVideoEmbedding(self, video_path, tmp_dir, pool_size=3):
faces_feature_dir = os.path.join(tmp_dir, 'Faces')
os.mkdir(faces_feature_dir)
cmd = self.openface2Path + ' -f ' + video_path + ' -out_dir ' + faces_feature_dir
os.system(cmd)
# read features
features, local_features = [], []
df_path = glob(os.path.join(faces_feature_dir, '*.csv'))
if len(df_path) > 0:
df_path = df_path[0]
df = pd.read_csv(df_path)
for i in range(len(df)):
local_features.append(np.array(df.loc[i][df.columns[5:]]))
if (i + 1) % pool_size == 0:
features.append(np.array(local_features).mean(axis=0))
local_features = []
if len(local_features) != 0:
features.append(np.array(local_features).mean(axis=0))
return np.array(features)
def __getAudioEmbedding(self, video_path, audio_path):
# use ffmpeg to extract audio
cmd = 'ffmpeg -i ' + video_path + ' -f wav -vn ' + \
audio_path + ' -loglevel quiet'
os.system(cmd)
# get features
y, sr = librosa.load(audio_path)
# using librosa to get audio features (f0, mfcc, cqt)
hop_length = 512 # hop_length smaller, seq_len larger
f0 = librosa.feature.zero_crossing_rate(y, hop_length=hop_length).T # (seq_len, 1)
mfcc = librosa.feature.mfcc(y=y, sr=sr, hop_length=hop_length, htk=True).T # (seq_len, 20)
cqt = librosa.feature.chroma_cqt(y=y, sr=sr, hop_length=hop_length).T # (seq_len, 12)
return np.concatenate([f0, mfcc, cqt], axis=-1)
def __getTextEmbedding(self, text):
# directory is fine
tokenizer = BertTokenizer.from_pretrained(self.pretrainedBertPath)
model = BertModel.from_pretrained(self.pretrainedBertPath)
# add_special_tokens will add start and end token
input_ids = torch.tensor([tokenizer.encode(text, add_special_tokens=True)])
with torch.no_grad():
last_hidden_states = model(input_ids)[0] # Models outputs are now tuples
return last_hidden_states.squeeze().numpy()
def __preTextforBert(self, text):
tokens_a = self.tokenizer.tokenize(text,invertable=True)
tokens = ["[CLS]"] + tokens_a + ["[SEP]"]
segment_ids = [0] * len(tokens)
input_ids = self.tokenizer.convert_tokens_to_ids(tokens)
input_mask = [1] * len(input_ids)
input_ids = np.expand_dims(input_ids, 1)
input_mask = np.expand_dims(input_mask, 1)
segment_ids = np.expand_dims(segment_ids, 1)
text_bert = np.concatenate([input_ids, input_mask, segment_ids], axis=1)
return text_bert
def __getitem__(self, index):
tmp_dir = os.path.join(self.working_dir, f'Processed/tmp-{index}')
if not os.path.exists(tmp_dir):
os.makedirs(tmp_dir)
video_id, clip_id, text, label, annotation, mode, _ = self.df.loc[index]
cur_id = video_id + '$_$' + clip_id
# video
video_path = os.path.join(self.working_dir, 'Raw', video_id, clip_id + '.mp4')
embedding_V = self.__getVideoEmbedding(video_path, tmp_dir)
seq_V = embedding_V.shape[0]
# audio
audio_path = os.path.join(tmp_dir, 'tmp.wav')
embedding_A = self.__getAudioEmbedding(video_path, audio_path)
seq_A = embedding_A.shape[0]
# text
embedding_T = self.__getTextEmbedding(text)
text_bert = self.__preTextforBert(text)
seq_T = embedding_T.shape[0]
ret = {
'id': cur_id,
'audio': embedding_A,
'vision': embedding_V,
'raw_text': text,
'text': embedding_T,
'text_bert': text_bert,
'audio_lengths': seq_A,
'vision_lengths': seq_V,
'annotations': annotation,
'classification_labels': self.annotation_dict[annotation],
'regression_labels': label,
'mode': mode
}
# clear tmp dir to save space
shutil.rmtree(tmp_dir)
return ret
class MDataPre():
def __init__(self, args):
self.working_dir = args.working_dir
# padding
self.padding_mode = 'zeros'
self.padding_location = 'back'
def __padding(self, feature, MAX_LEN):
"""
mode:
zero: padding with 0
normal: padding with normal distribution
location: front / back
"""
assert self.padding_mode in ['zeros', 'normal']
assert self.padding_location in ['front', 'back']
length = feature.shape[0]
if length >= MAX_LEN:
return feature[:MAX_LEN, :]
if self.padding_mode == "zeros":
pad = np.zeros([MAX_LEN - length, feature.shape[-1]])
elif self.padding_mode == "normal":
mean, std = feature.mean(), feature.std()
pad = np.random.normal(mean, std, (MAX_LEN-length, feature.shape[1]))
feature = np.concatenate([pad, feature], axis=0) if(self.padding_location == "front") else \
np.concatenate((feature, pad), axis=0)
return feature
def __paddingSequence(self, sequences):
if len(sequences) == 0:
return sequences
feature_dim = sequences[0].shape[-1]
lens = [s.shape[0] for s in sequences]
# confirm length using (mean + std)
final_length = int(np.mean(lens) + 3 * np.std(lens))
# padding sequences to final_length
final_sequence = np.zeros([len(sequences), final_length, feature_dim])
for i, s in enumerate(sequences):
if len(s) != 0:
final_sequence[i] = self.__padding(s, final_length)
return final_sequence
def __collate_fn(self, batch):
ret = {k: [] for k in batch[0].keys()}
for b in batch:
for k,v in b.items():
ret[k].append(v)
return ret
def run(self):
output_path = os.path.join(self.working_dir, 'Processed/features.pkl')
# load last point
if os.path.exists(output_path):
with open(output_path, 'rb') as f:
data = pickle.load(f)
last_row_idx = len(data['id'])
else:
data = {"id": [],
"raw_text": [],
"audio": [],
"vision": [],
"text": [],
"text_bert": [],
"audio_lengths": [],
"vision_lengths": [],
"annotations": [],
"classification_labels": [],
"regression_labels": [],
"mode": []}
last_row_idx = 0
args.df = pd.read_csv(os.path.join(self.working_dir, 'label.csv'), dtype={'clip_id': str, 'video_id': str, 'text': str})
args.df = args.df[last_row_idx:]
dataloader = DataLoader(MDataPreLoader(args),
batch_size=64,
num_workers=8,
shuffle=False,
collate_fn=self.__collate_fn)
isEnd = False
try:
with tqdm(dataloader) as td:
for batch_data in td:
for k, v in batch_data.items():
data[k].extend(v)
isEnd = True
except Exception as e:
print(e)
finally:
try:
if isEnd:
# padding
for item in ['audio', 'vision', 'text', 'text_bert']:
data[item] = self.__paddingSequence(data[item])
# data['mode'] = list(args.df['mode'])
# split train, valid, test
inx_dict = {
mode + '_index': [i for i, v in enumerate(data['mode']) if v == mode]
for mode in ['train', 'valid', 'test']
}
data.pop('mode')
final_data = {k: {} for k in ['train', 'valid', 'test']}
for mode in ['train', 'valid', 'test']:
indexes = inx_dict[mode + '_index']
for item in data.keys():
if isinstance(data[item], list):
final_data[mode][item] = [data[item][v] for v in indexes]
else:
final_data[mode][item] = data[item][indexes]
data = final_data
except Exception as e:
print(e)
finally:
with open(output_path, 'wb') as wf:
pickle.dump(data, wf, protocol = 4)
print('Features are saved in %s!' %output_path)
def parse_args():
parser = argparse.ArgumentParser()
parser.add_argument('--working_dir', type=str, default='/home/sharing/disk3/dataset/multimodal-sentiment-dataset/StandardDatasets/MOSEI',
help='path to datasets')
parser.add_argument('--language', type=str, default="en",
help='en / cn')
parser.add_argument('--openface2Path', type=str, default="/home/iyuge2/ToolKits/OpenFace/build/bin/FeatureExtraction",
help='path to FeatureExtraction tool in openface2')
return parser.parse_args()
if __name__ == "__main__":
args = parse_args()
dp = MDataPre(args)
dp.run()
|
[
"os.mkdir",
"pickle.dump",
"argparse.ArgumentParser",
"pandas.read_csv",
"numpy.mean",
"pickle.load",
"numpy.random.normal",
"shutil.rmtree",
"torch.no_grad",
"librosa.feature.mfcc",
"os.path.join",
"numpy.std",
"os.path.exists",
"transformers.BertModel.from_pretrained",
"librosa.feature.zero_crossing_rate",
"librosa.feature.chroma_cqt",
"tqdm.tqdm",
"os.system",
"librosa.load",
"transformers.BertTokenizer.from_pretrained",
"numpy.concatenate",
"os.makedirs",
"numpy.zeros",
"numpy.expand_dims",
"numpy.array"
] |
[((10167, 10192), 'argparse.ArgumentParser', 'argparse.ArgumentParser', ([], {}), '()\n', (10190, 10192), False, 'import argparse\n'), ((1269, 1299), 'os.path.join', 'os.path.join', (['tmp_dir', '"""Faces"""'], {}), "(tmp_dir, 'Faces')\n", (1281, 1299), False, 'import os\n'), ((1308, 1335), 'os.mkdir', 'os.mkdir', (['faces_feature_dir'], {}), '(faces_feature_dir)\n', (1316, 1335), False, 'import os\n'), ((1434, 1448), 'os.system', 'os.system', (['cmd'], {}), '(cmd)\n', (1443, 1448), False, 'import os\n'), ((2079, 2097), 'numpy.array', 'np.array', (['features'], {}), '(features)\n', (2087, 2097), True, 'import numpy as np\n'), ((2313, 2327), 'os.system', 'os.system', (['cmd'], {}), '(cmd)\n', (2322, 2327), False, 'import os\n'), ((2367, 2391), 'librosa.load', 'librosa.load', (['audio_path'], {}), '(audio_path)\n', (2379, 2391), False, 'import librosa\n'), ((2816, 2856), 'numpy.concatenate', 'np.concatenate', (['[f0, mfcc, cqt]'], {'axis': '(-1)'}), '([f0, mfcc, cqt], axis=-1)\n', (2830, 2856), True, 'import numpy as np\n'), ((2950, 3004), 'transformers.BertTokenizer.from_pretrained', 'BertTokenizer.from_pretrained', (['self.pretrainedBertPath'], {}), '(self.pretrainedBertPath)\n', (2979, 3004), False, 'from transformers import BertTokenizer, BertModel\n'), ((3021, 3071), 'transformers.BertModel.from_pretrained', 'BertModel.from_pretrained', (['self.pretrainedBertPath'], {}), '(self.pretrainedBertPath)\n', (3046, 3071), False, 'from transformers import BertTokenizer, BertModel\n'), ((3719, 3747), 'numpy.expand_dims', 'np.expand_dims', (['input_ids', '(1)'], {}), '(input_ids, 1)\n', (3733, 3747), True, 'import numpy as np\n'), ((3769, 3798), 'numpy.expand_dims', 'np.expand_dims', (['input_mask', '(1)'], {}), '(input_mask, 1)\n', (3783, 3798), True, 'import numpy as np\n'), ((3821, 3851), 'numpy.expand_dims', 'np.expand_dims', (['segment_ids', '(1)'], {}), '(segment_ids, 1)\n', (3835, 3851), True, 'import numpy as np\n'), ((3873, 3933), 'numpy.concatenate', 'np.concatenate', (['[input_ids, input_mask, segment_ids]'], {'axis': '(1)'}), '([input_ids, input_mask, segment_ids], axis=1)\n', (3887, 3933), True, 'import numpy as np\n'), ((4013, 4069), 'os.path.join', 'os.path.join', (['self.working_dir', 'f"""Processed/tmp-{index}"""'], {}), "(self.working_dir, f'Processed/tmp-{index}')\n", (4025, 4069), False, 'import os\n'), ((4305, 4370), 'os.path.join', 'os.path.join', (['self.working_dir', '"""Raw"""', 'video_id', "(clip_id + '.mp4')"], {}), "(self.working_dir, 'Raw', video_id, clip_id + '.mp4')\n", (4317, 4370), False, 'import os\n'), ((4513, 4545), 'os.path.join', 'os.path.join', (['tmp_dir', '"""tmp.wav"""'], {}), "(tmp_dir, 'tmp.wav')\n", (4525, 4545), False, 'import os\n'), ((5321, 5343), 'shutil.rmtree', 'shutil.rmtree', (['tmp_dir'], {}), '(tmp_dir)\n', (5334, 5343), False, 'import shutil\n'), ((7284, 7340), 'os.path.join', 'os.path.join', (['self.working_dir', '"""Processed/features.pkl"""'], {}), "(self.working_dir, 'Processed/features.pkl')\n", (7296, 7340), False, 'import os\n'), ((7378, 7405), 'os.path.exists', 'os.path.exists', (['output_path'], {}), '(output_path)\n', (7392, 7405), False, 'import os\n'), ((1538, 1578), 'os.path.join', 'os.path.join', (['faces_feature_dir', '"""*.csv"""'], {}), "(faces_feature_dir, '*.csv')\n", (1550, 1578), False, 'import os\n'), ((1659, 1679), 'pandas.read_csv', 'pd.read_csv', (['df_path'], {}), '(df_path)\n', (1670, 1679), True, 'import pandas as pd\n'), ((2529, 2589), 'librosa.feature.zero_crossing_rate', 'librosa.feature.zero_crossing_rate', (['y'], {'hop_length': 'hop_length'}), '(y, hop_length=hop_length)\n', (2563, 2589), False, 'import librosa\n'), ((2622, 2687), 'librosa.feature.mfcc', 'librosa.feature.mfcc', ([], {'y': 'y', 'sr': 'sr', 'hop_length': 'hop_length', 'htk': '(True)'}), '(y=y, sr=sr, hop_length=hop_length, htk=True)\n', (2642, 2687), False, 'import librosa\n'), ((2720, 2781), 'librosa.feature.chroma_cqt', 'librosa.feature.chroma_cqt', ([], {'y': 'y', 'sr': 'sr', 'hop_length': 'hop_length'}), '(y=y, sr=sr, hop_length=hop_length)\n', (2746, 2781), False, 'import librosa\n'), ((3227, 3242), 'torch.no_grad', 'torch.no_grad', ([], {}), '()\n', (3240, 3242), False, 'import torch\n'), ((4085, 4108), 'os.path.exists', 'os.path.exists', (['tmp_dir'], {}), '(tmp_dir)\n', (4099, 4108), False, 'import os\n'), ((4122, 4142), 'os.makedirs', 'os.makedirs', (['tmp_dir'], {}), '(tmp_dir)\n', (4133, 4142), False, 'import os\n'), ((6040, 6087), 'numpy.zeros', 'np.zeros', (['[MAX_LEN - length, feature.shape[-1]]'], {}), '([MAX_LEN - length, feature.shape[-1]])\n', (6048, 6087), True, 'import numpy as np\n'), ((6287, 6325), 'numpy.concatenate', 'np.concatenate', (['[pad, feature]'], {'axis': '(0)'}), '([pad, feature], axis=0)\n', (6301, 6325), True, 'import numpy as np\n'), ((6388, 6426), 'numpy.concatenate', 'np.concatenate', (['(feature, pad)'], {'axis': '(0)'}), '((feature, pad), axis=0)\n', (6402, 6426), True, 'import numpy as np\n'), ((8061, 8104), 'os.path.join', 'os.path.join', (['self.working_dir', '"""label.csv"""'], {}), "(self.working_dir, 'label.csv')\n", (8073, 8104), False, 'import os\n'), ((6204, 6269), 'numpy.random.normal', 'np.random.normal', (['mean', 'std', '(MAX_LEN - length, feature.shape[1])'], {}), '(mean, std, (MAX_LEN - length, feature.shape[1]))\n', (6220, 6269), True, 'import numpy as np\n'), ((6719, 6732), 'numpy.mean', 'np.mean', (['lens'], {}), '(lens)\n', (6726, 6732), True, 'import numpy as np\n'), ((7477, 7491), 'pickle.load', 'pickle.load', (['f'], {}), '(f)\n', (7488, 7491), False, 'import pickle\n'), ((8511, 8527), 'tqdm.tqdm', 'tqdm', (['dataloader'], {}), '(dataloader)\n', (8515, 8527), False, 'from tqdm import tqdm\n'), ((1755, 1790), 'numpy.array', 'np.array', (['df.loc[i][df.columns[5:]]'], {}), '(df.loc[i][df.columns[5:]])\n', (1763, 1790), True, 'import numpy as np\n'), ((6739, 6751), 'numpy.std', 'np.std', (['lens'], {}), '(lens)\n', (6745, 6751), True, 'import numpy as np\n'), ((10034, 10067), 'pickle.dump', 'pickle.dump', (['data', 'wf'], {'protocol': '(4)'}), '(data, wf, protocol=4)\n', (10045, 10067), False, 'import pickle\n'), ((2025, 2049), 'numpy.array', 'np.array', (['local_features'], {}), '(local_features)\n', (2033, 2049), True, 'import numpy as np\n'), ((1873, 1897), 'numpy.array', 'np.array', (['local_features'], {}), '(local_features)\n', (1881, 1897), True, 'import numpy as np\n')]
|
from flask import Flask
from flask_login import LoginManager
from flask_restful import Api
from flask_mail import Mail
from data import global_init, create_session, User
from data.user import AnonymousUser
from config import config
from bot import bot_launch
from threading import Thread
import logging
import os
config.setup()
global_init()
app = Flask(__name__, static_folder=config.STATIC_FOLDER)
app.jinja_options['extensions'].extend(config.JINJA_EXTENSIONS)
app.config.from_object(config)
app.jinja_env.globals['client_id'] = app.config['CLIENT_ID']
app.jinja_env.globals['group_id'] = app.config['VK_GROUP_ID']
mail = Mail(app)
from .mails import send_message, send_messages
login_manager = LoginManager()
login_manager.init_app(app)
login_manager.anonymous_user = AnonymousUser
@login_manager.user_loader
def load_user(user_id) -> User:
session = create_session()
return session.query(User).get(user_id)
from . import errorhandlers
from . import web_pages
from . import single_pages
from . import web_utils
app.register_blueprint(web_pages.blueprint)
app.register_blueprint(single_pages.blueprint)
app.register_blueprint(web_utils.blueprint)
api = Api(app)
from .resources import TournamentResource
from .resources import UserResource, UsersResource, TeamResource, LeagueResource, LeaguesResource
from .resources import GameResource, GamesResource, ProtocolResource
from .resources import PostResource, TournamentPostsResource
api.add_resource(UserResource, '/api/user/<int:user_id>')
api.add_resource(UsersResource, '/api/user')
api.add_resource(TournamentResource, '/api/tournament/<int:tour_id>')
api.add_resource(TeamResource, '/api/team/<int:team_id>')
api.add_resource(LeagueResource, '/api/league/<int:league_id>')
api.add_resource(LeaguesResource, '/api/league')
api.add_resource(GameResource, '/api/game/<int:game_id>')
api.add_resource(GamesResource, '/api/game')
api.add_resource(ProtocolResource, '/api/game/<int:game_id>/protocol')
api.add_resource(PostResource, '/api/post', '/api/post/<int:post_id>')
api.add_resource(TournamentPostsResource, '/api/tournament/<int:tour_id>/posts')
bot_working = os.environ.get('BOT_WORKING', None) # Current status working vk bot
if not bot_working:
bot_thread = Thread(target=bot_launch)
bot_thread.start()
logging.info('Bot is working!')
|
[
"data.global_init",
"flask_restful.Api",
"threading.Thread",
"flask.Flask",
"flask_mail.Mail",
"os.environ.get",
"logging.info",
"data.create_session",
"config.config.setup",
"flask_login.LoginManager"
] |
[((326, 340), 'config.config.setup', 'config.setup', ([], {}), '()\n', (338, 340), False, 'from config import config\n'), ((342, 355), 'data.global_init', 'global_init', ([], {}), '()\n', (353, 355), False, 'from data import global_init, create_session, User\n'), ((365, 416), 'flask.Flask', 'Flask', (['__name__'], {'static_folder': 'config.STATIC_FOLDER'}), '(__name__, static_folder=config.STATIC_FOLDER)\n', (370, 416), False, 'from flask import Flask\n'), ((649, 658), 'flask_mail.Mail', 'Mail', (['app'], {}), '(app)\n', (653, 658), False, 'from flask_mail import Mail\n'), ((726, 740), 'flask_login.LoginManager', 'LoginManager', ([], {}), '()\n', (738, 740), False, 'from flask_login import LoginManager\n'), ((1218, 1226), 'flask_restful.Api', 'Api', (['app'], {}), '(app)\n', (1221, 1226), False, 'from flask_restful import Api\n'), ((2201, 2236), 'os.environ.get', 'os.environ.get', (['"""BOT_WORKING"""', 'None'], {}), "('BOT_WORKING', None)\n", (2215, 2236), False, 'import os\n'), ((896, 912), 'data.create_session', 'create_session', ([], {}), '()\n', (910, 912), False, 'from data import global_init, create_session, User\n'), ((2309, 2334), 'threading.Thread', 'Thread', ([], {'target': 'bot_launch'}), '(target=bot_launch)\n', (2315, 2334), False, 'from threading import Thread\n'), ((2364, 2395), 'logging.info', 'logging.info', (['"""Bot is working!"""'], {}), "('Bot is working!')\n", (2376, 2395), False, 'import logging\n')]
|
# Copyright 2021 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Feature Pyramid Network and Path Aggregation variants used in YOLO."""
from typing import Mapping, Union, Optional
import tensorflow as tf
from official.modeling import hyperparams
from official.vision.beta.modeling.decoders import factory
from official.vision.beta.projects.yolo.modeling.layers import nn_blocks
# model configurations
# the structure is as follows. model version, {v3, v4, v#, ... etc}
# the model config type {regular, tiny, small, large, ... etc}
YOLO_MODELS = {
'v4':
dict(
regular=dict(
embed_spp=False,
use_fpn=True,
max_level_process_len=None,
path_process_len=6),
tiny=dict(
embed_spp=False,
use_fpn=False,
max_level_process_len=2,
path_process_len=1),
csp=dict(
embed_spp=False,
use_fpn=True,
max_level_process_len=None,
csp_stack=5,
fpn_depth=5,
path_process_len=6),
csp_large=dict(
embed_spp=False,
use_fpn=True,
max_level_process_len=None,
csp_stack=7,
fpn_depth=7,
max_fpn_depth=5,
max_csp_stack=5,
path_process_len=8,
fpn_filter_scale=1),
csp_xlarge=dict(
embed_spp=False,
use_fpn=True,
max_level_process_len=None,
csp_stack=7,
fpn_depth=7,
path_process_len=8,
fpn_filter_scale=1),
),
'v3':
dict(
regular=dict(
embed_spp=False,
use_fpn=False,
max_level_process_len=None,
path_process_len=6),
tiny=dict(
embed_spp=False,
use_fpn=False,
max_level_process_len=2,
path_process_len=1),
spp=dict(
embed_spp=True,
use_fpn=False,
max_level_process_len=2,
path_process_len=1),
),
}
class _IdentityRoute(tf.keras.layers.Layer):
def call(self, inputs): # pylint: disable=arguments-differ
return None, inputs
class YoloFPN(tf.keras.layers.Layer):
"""YOLO Feature pyramid network."""
def __init__(self,
fpn_depth=4,
max_fpn_depth=None,
max_csp_stack=None,
use_spatial_attention=False,
csp_stack=False,
activation='leaky',
fpn_filter_scale=1,
use_sync_bn=False,
use_separable_conv=False,
norm_momentum=0.99,
norm_epsilon=0.001,
kernel_initializer='VarianceScaling',
kernel_regularizer=None,
bias_regularizer=None,
**kwargs):
"""Yolo FPN initialization function (Yolo V4).
Args:
fpn_depth: `int`, number of layers to use in each FPN path
if you choose to use an FPN.
max_fpn_depth: `int`, number of layers to use in each FPN path
if you choose to use an FPN along the largest FPN level.
max_csp_stack: `int`, number of layers to use for CSP on the largest_path
only.
use_spatial_attention: `bool`, use the spatial attention module.
csp_stack: `bool`, CSPize the FPN.
activation: `str`, the activation function to use typically leaky or mish.
fpn_filter_scale: `int`, scaling factor for the FPN filters.
use_sync_bn: if True, use synchronized batch normalization.
use_separable_conv: `bool` whether to use separable convs.
norm_momentum: `float`, normalization momentum for the moving average.
norm_epsilon: `float`, small float added to variance to avoid dividing by
zero.
kernel_initializer: kernel_initializer for convolutional layers.
kernel_regularizer: tf.keras.regularizers.Regularizer object for Conv2D.
bias_regularizer: tf.keras.regularizers.Regularizer object for Conv2D.
**kwargs: keyword arguments to be passed.
"""
super().__init__(**kwargs)
self._fpn_depth = fpn_depth
self._max_fpn_depth = max_fpn_depth or self._fpn_depth
self._activation = activation
self._use_sync_bn = use_sync_bn
self._use_separable_conv = use_separable_conv
self._norm_momentum = norm_momentum
self._norm_epsilon = norm_epsilon
self._kernel_initializer = kernel_initializer
self._kernel_regularizer = kernel_regularizer
self._bias_regularizer = bias_regularizer
self._use_spatial_attention = use_spatial_attention
self._filter_scale = fpn_filter_scale
self._csp_stack = csp_stack
self._max_csp_stack = max_csp_stack or min(self._max_fpn_depth, csp_stack)
self._base_config = dict(
activation=self._activation,
use_sync_bn=self._use_sync_bn,
use_separable_conv=self._use_separable_conv,
kernel_regularizer=self._kernel_regularizer,
kernel_initializer=self._kernel_initializer,
bias_regularizer=self._bias_regularizer,
norm_epsilon=self._norm_epsilon,
norm_momentum=self._norm_momentum)
def get_raw_depths(self, minimum_depth, inputs):
"""Calculates the unscaled depths of the FPN branches.
Args:
minimum_depth (int): depth of the smallest branch of the FPN.
inputs (dict): dictionary of the shape of input args as a dictionary of
lists.
Returns:
The unscaled depths of the FPN branches.
"""
depths = []
for i in range(self._min_level, self._max_level + 1):
depths.append(inputs[str(i)][-1] / self._filter_scale)
return list(reversed(depths))
def build(self, inputs):
"""Use config dictionary to generate all important attributes for head.
Args:
inputs: dictionary of the shape of input args as a dictionary of lists.
"""
keys = [int(key) for key in inputs.keys()]
self._min_level = min(keys)
self._max_level = max(keys)
self._min_depth = inputs[str(self._min_level)][-1]
self._depths = self.get_raw_depths(self._min_depth, inputs)
# directly connect to an input path and process it
self.preprocessors = dict()
# resample an input and merge it with the output of another path
# inorder to aggregate backbone outputs
self.resamples = dict()
# set of convoltion layers and upsample layers that are used to
# prepare the FPN processors for output
for level, depth in zip(
reversed(range(self._min_level, self._max_level + 1)), self._depths):
if level == self._min_level:
self.resamples[str(level)] = nn_blocks.PathAggregationBlock(
filters=depth // 2,
inverted=True,
upsample=True,
drop_final=self._csp_stack == 0,
upsample_size=2,
**self._base_config)
self.preprocessors[str(level)] = _IdentityRoute()
elif level != self._max_level:
self.resamples[str(level)] = nn_blocks.PathAggregationBlock(
filters=depth // 2,
inverted=True,
upsample=True,
drop_final=False,
upsample_size=2,
**self._base_config)
self.preprocessors[str(level)] = nn_blocks.DarkRouteProcess(
filters=depth,
repetitions=self._fpn_depth - int(level == self._min_level),
block_invert=True,
insert_spp=False,
csp_stack=self._csp_stack,
**self._base_config)
else:
self.preprocessors[str(level)] = nn_blocks.DarkRouteProcess(
filters=depth,
repetitions=self._max_fpn_depth + 1 * int(self._csp_stack == 0),
insert_spp=True,
block_invert=False,
csp_stack=min(self._csp_stack, self._max_fpn_depth),
**self._base_config)
def call(self, inputs):
outputs = dict()
layer_in = inputs[str(self._max_level)]
for level in reversed(range(self._min_level, self._max_level + 1)):
_, x = self.preprocessors[str(level)](layer_in)
outputs[str(level)] = x
if level > self._min_level:
x_next = inputs[str(level - 1)]
_, layer_in = self.resamples[str(level - 1)]([x_next, x])
return outputs
class YoloPAN(tf.keras.layers.Layer):
"""YOLO Path Aggregation Network."""
def __init__(self,
path_process_len=6,
max_level_process_len=None,
embed_spp=False,
use_spatial_attention=False,
csp_stack=False,
activation='leaky',
use_sync_bn=False,
use_separable_conv=False,
norm_momentum=0.99,
norm_epsilon=0.001,
kernel_initializer='VarianceScaling',
kernel_regularizer=None,
bias_regularizer=None,
fpn_input=True,
fpn_filter_scale=1.0,
**kwargs):
"""Yolo Path Aggregation Network initialization function (Yolo V3 and V4).
Args:
path_process_len: `int`, number of layers ot use in each Decoder path.
max_level_process_len: `int`, number of layers ot use in the largest
processing path, or the backbones largest output if it is different.
embed_spp: `bool`, use the SPP found in the YoloV3 and V4 model.
use_spatial_attention: `bool`, use the spatial attention module.
csp_stack: `bool`, CSPize the FPN.
activation: `str`, the activation function to use typically leaky or mish.
use_sync_bn: if True, use synchronized batch normalization.
use_separable_conv: `bool` whether to use separable convs.
norm_momentum: `float`, normalization omentum for the moving average.
norm_epsilon: `float`, small float added to variance to avoid dividing
by zero.
kernel_initializer: kernel_initializer for convolutional layers.
kernel_regularizer: tf.keras.regularizers.Regularizer object for Conv2D.
bias_regularizer: tf.keras.regularizers.Regularizer object for Conv2D.
fpn_input: `bool`, for whether the input into this fucntion is an FPN or
a backbone.
fpn_filter_scale: `int`, scaling factor for the FPN filters.
**kwargs: keyword arguments to be passed.
"""
super().__init__(**kwargs)
self._path_process_len = path_process_len
self._embed_spp = embed_spp
self._use_spatial_attention = use_spatial_attention
self._activation = activation
self._use_sync_bn = use_sync_bn
self._use_separable_conv = use_separable_conv
self._norm_momentum = norm_momentum
self._norm_epsilon = norm_epsilon
self._kernel_initializer = kernel_initializer
self._kernel_regularizer = kernel_regularizer
self._bias_regularizer = bias_regularizer
self._fpn_input = fpn_input
self._max_level_process_len = max_level_process_len
self._csp_stack = csp_stack
self._fpn_filter_scale = fpn_filter_scale
if max_level_process_len is None:
self._max_level_process_len = path_process_len
self._base_config = dict(
activation=self._activation,
use_sync_bn=self._use_sync_bn,
use_separable_conv=self._use_separable_conv,
kernel_regularizer=self._kernel_regularizer,
kernel_initializer=self._kernel_initializer,
bias_regularizer=self._bias_regularizer,
norm_epsilon=self._norm_epsilon,
norm_momentum=self._norm_momentum)
def build(self, inputs):
"""Use config dictionary to generate all important attributes for head.
Args:
inputs: dictionary of the shape of input args as a dictionary of lists.
"""
# define the key order
keys = [int(key) for key in inputs.keys()]
self._min_level = min(keys)
self._max_level = max(keys)
self._min_depth = inputs[str(self._min_level)][-1]
self._depths = self.get_raw_depths(self._min_depth, inputs)
# directly connect to an input path and process it
self.preprocessors = dict()
# resample an input and merge it with the output of another path
# inorder to aggregate backbone outputs
self.resamples = dict()
# FPN will reverse the key process order for the backbone, so we need
# adjust the order that objects are created and processed to adjust for
# this. not using an FPN will directly connect the decoder to the backbone
# therefore the object creation order needs to be done from the largest
# to smallest level.
if self._fpn_input:
# process order {... 3, 4, 5}
self._iterator = range(self._min_level, self._max_level + 1)
self._check = lambda x: x < self._max_level
self._key_shift = lambda x: x + 1
self._input = self._min_level
downsample = True
upsample = False
else:
# process order {5, 4, 3, ...}
self._iterator = list(
reversed(range(self._min_level, self._max_level + 1)))
self._check = lambda x: x > self._min_level
self._key_shift = lambda x: x - 1
self._input = self._max_level
downsample = False
upsample = True
for level, depth in zip(self._iterator, self._depths):
if level > 5:
proc_filters = lambda x: x * 2
resample_filters = lambda x: x
elif self._csp_stack == 0:
proc_filters = lambda x: x
resample_filters = lambda x: x // 2
else:
proc_filters = lambda x: x * 2
resample_filters = lambda x: x
if level == self._input:
self.preprocessors[str(level)] = nn_blocks.DarkRouteProcess(
filters=proc_filters(depth),
repetitions=self._max_level_process_len,
insert_spp=self._embed_spp,
block_invert=False,
insert_sam=self._use_spatial_attention,
csp_stack=self._csp_stack,
**self._base_config)
else:
self.resamples[str(level)] = nn_blocks.PathAggregationBlock(
filters=resample_filters(depth),
upsample=upsample,
downsample=downsample,
inverted=False,
drop_final=self._csp_stack == 0,
**self._base_config)
self.preprocessors[str(level)] = nn_blocks.DarkRouteProcess(
filters=proc_filters(depth),
repetitions=self._path_process_len,
insert_spp=False,
insert_sam=self._use_spatial_attention,
csp_stack=self._csp_stack,
**self._base_config)
def get_raw_depths(self, minimum_depth, inputs):
"""Calculates the unscaled depths of the FPN branches.
Args:
minimum_depth: `int` depth of the smallest branch of the FPN.
inputs: `dict[str, tf.InputSpec]` of the shape of input args as a
dictionary of lists.
Returns:
The unscaled depths of the FPN branches.
"""
depths = []
if len(inputs.keys()) > 3 or self._fpn_filter_scale > 1:
for i in range(self._min_level, self._max_level + 1):
depths.append(inputs[str(i)][-1])
else:
for _ in range(self._min_level, self._max_level + 1):
depths.append(minimum_depth)
minimum_depth *= 2
if self._fpn_input:
return depths
return list(reversed(depths))
def call(self, inputs):
outputs = dict()
layer_in = inputs[str(self._input)]
for level in self._iterator:
x_route, x = self.preprocessors[str(level)](layer_in)
outputs[str(level)] = x
if self._check(level):
x_next = inputs[str(self._key_shift(level))]
_, layer_in = self.resamples[str(
self._key_shift(level))]([x_route, x_next])
return outputs
class YoloDecoder(tf.keras.Model):
"""Darknet Backbone Decoder."""
def __init__(self,
input_specs,
use_fpn=False,
use_spatial_attention=False,
csp_stack=False,
fpn_depth=4,
max_fpn_depth=None,
max_csp_stack=None,
fpn_filter_scale=1,
path_process_len=6,
max_level_process_len=None,
embed_spp=False,
activation='leaky',
use_sync_bn=False,
use_separable_conv=False,
norm_momentum=0.99,
norm_epsilon=0.001,
kernel_initializer='VarianceScaling',
kernel_regularizer=None,
bias_regularizer=None,
**kwargs):
"""Yolo Decoder initialization function.
A unified model that ties all decoder components into a conditionally build
YOLO decoder.
Args:
input_specs: `dict[str, tf.InputSpec]`: input specs of each of the inputs
to the heads.
use_fpn: `bool`, use the FPN found in the YoloV4 model.
use_spatial_attention: `bool`, use the spatial attention module.
csp_stack: `bool`, CSPize the FPN.
fpn_depth: `int`, number of layers ot use in each FPN path if you choose
to use an FPN.
max_fpn_depth: `int`, maximum fpn depth.
max_csp_stack: `int`, maximum csp stack.
fpn_filter_scale: `int`, scaling factor for the FPN filters.
path_process_len: `int`, number of layers ot use in each Decoder path.
max_level_process_len: `int`, number of layers ot use in the largest
processing path, or the backbones largest output if it is different.
embed_spp: `bool`, use the SPP found in the YoloV3 and V4 model.
activation: `str`, the activation function to use typically leaky or mish.
use_sync_bn: if True, use synchronized batch normalization.
use_separable_conv: `bool` wether to use separable convs.
norm_momentum: `float`, normalization omentum for the moving average.
norm_epsilon: `float`, small float added to variance to avoid dividing by
zero.
kernel_initializer: kernel_initializer for convolutional layers.
kernel_regularizer: tf.keras.regularizers.Regularizer object for Conv2D.
bias_regularizer: tf.keras.regularizers.Regularizer object for Conv2D.
**kwargs: keyword arguments to be passed.
"""
self._input_specs = input_specs
self._use_fpn = use_fpn
self._fpn_depth = fpn_depth
self._max_fpn_depth = max_fpn_depth
self._max_csp_stack = max_csp_stack
self._path_process_len = path_process_len
self._max_level_process_len = max_level_process_len
self._embed_spp = embed_spp
self._activation = activation
self._use_sync_bn = use_sync_bn
self._use_separable_conv = use_separable_conv
self._norm_momentum = norm_momentum
self._norm_epsilon = norm_epsilon
self._kernel_initializer = kernel_initializer
self._kernel_regularizer = kernel_regularizer
self._bias_regularizer = bias_regularizer
self._base_config = dict(
use_spatial_attention=use_spatial_attention,
csp_stack=csp_stack,
activation=self._activation,
use_sync_bn=self._use_sync_bn,
use_separable_conv=self._use_separable_conv,
fpn_filter_scale=fpn_filter_scale,
norm_momentum=self._norm_momentum,
norm_epsilon=self._norm_epsilon,
kernel_initializer=self._kernel_initializer,
kernel_regularizer=self._kernel_regularizer,
bias_regularizer=self._bias_regularizer)
self._decoder_config = dict(
path_process_len=self._path_process_len,
max_level_process_len=self._max_level_process_len,
embed_spp=self._embed_spp,
fpn_input=self._use_fpn,
**self._base_config)
inputs = {
key: tf.keras.layers.Input(shape=value[1:])
for key, value in input_specs.items()
}
if self._use_fpn:
inter_outs = YoloFPN(
fpn_depth=self._fpn_depth,
max_fpn_depth=self._max_fpn_depth,
max_csp_stack=self._max_csp_stack,
**self._base_config)(inputs)
outputs = YoloPAN(**self._decoder_config)(inter_outs)
else:
inter_outs = None
outputs = YoloPAN(**self._decoder_config)(inputs)
self._output_specs = {key: value.shape for key, value in outputs.items()}
super().__init__(inputs=inputs, outputs=outputs, name='YoloDecoder')
@property
def use_fpn(self):
return self._use_fpn
@property
def output_specs(self):
return self._output_specs
def get_config(self):
config = dict(
input_specs=self._input_specs,
use_fpn=self._use_fpn,
fpn_depth=self._fpn_depth,
**self._decoder_config)
return config
@classmethod
def from_config(cls, config, custom_objects=None):
return cls(**config)
@factory.register_decoder_builder('yolo_decoder')
def build_yolo_decoder(
input_specs: Mapping[str, tf.TensorShape],
model_config: hyperparams.Config,
l2_regularizer: Optional[tf.keras.regularizers.Regularizer] = None,
**kwargs) -> Union[None, tf.keras.Model, tf.keras.layers.Layer]:
"""Builds Yolo FPN/PAN decoder from a config.
Args:
input_specs: A `dict` of input specifications. A dictionary consists of
{level: TensorShape} from a backbone.
model_config: A OneOfConfig. Model config.
l2_regularizer: A `tf.keras.regularizers.Regularizer` instance. Default to
None.
**kwargs: Additional kwargs arguments.
Returns:
A `tf.keras.Model` instance of the Yolo FPN/PAN decoder.
"""
decoder_cfg = model_config.decoder.get()
norm_activation_config = model_config.norm_activation
activation = (
decoder_cfg.activation if decoder_cfg.activation != 'same' else
norm_activation_config.activation)
if decoder_cfg.version is None: # custom yolo
raise ValueError('Decoder version cannot be None, specify v3 or v4.')
if decoder_cfg.version not in YOLO_MODELS:
raise ValueError(
'Unsupported model version please select from {v3, v4}, '
'or specify a custom decoder config using YoloDecoder in you yaml')
if decoder_cfg.type is None:
decoder_cfg.type = 'regular'
if decoder_cfg.type not in YOLO_MODELS[decoder_cfg.version]:
raise ValueError('Unsupported model type please select from '
'{yolo_model.YOLO_MODELS[decoder_cfg.version].keys()}'
'or specify a custom decoder config using YoloDecoder.')
base_model = YOLO_MODELS[decoder_cfg.version][decoder_cfg.type]
cfg_dict = decoder_cfg.as_dict()
for key in base_model:
if cfg_dict[key] is not None:
base_model[key] = cfg_dict[key]
base_dict = dict(
activation=activation,
use_spatial_attention=decoder_cfg.use_spatial_attention,
use_separable_conv=decoder_cfg.use_separable_conv,
use_sync_bn=norm_activation_config.use_sync_bn,
norm_momentum=norm_activation_config.norm_momentum,
norm_epsilon=norm_activation_config.norm_epsilon,
kernel_regularizer=l2_regularizer)
base_model.update(base_dict)
model = YoloDecoder(input_specs, **base_model, **kwargs)
return model
|
[
"official.vision.beta.modeling.decoders.factory.register_decoder_builder",
"official.vision.beta.projects.yolo.modeling.layers.nn_blocks.PathAggregationBlock",
"tensorflow.keras.layers.Input"
] |
[((21313, 21361), 'official.vision.beta.modeling.decoders.factory.register_decoder_builder', 'factory.register_decoder_builder', (['"""yolo_decoder"""'], {}), "('yolo_decoder')\n", (21345, 21361), False, 'from official.vision.beta.modeling.decoders import factory\n'), ((20280, 20318), 'tensorflow.keras.layers.Input', 'tf.keras.layers.Input', ([], {'shape': 'value[1:]'}), '(shape=value[1:])\n', (20301, 20318), True, 'import tensorflow as tf\n'), ((7397, 7558), 'official.vision.beta.projects.yolo.modeling.layers.nn_blocks.PathAggregationBlock', 'nn_blocks.PathAggregationBlock', ([], {'filters': '(depth // 2)', 'inverted': '(True)', 'upsample': '(True)', 'drop_final': '(self._csp_stack == 0)', 'upsample_size': '(2)'}), '(filters=depth // 2, inverted=True, upsample=\n True, drop_final=self._csp_stack == 0, upsample_size=2, **self._base_config\n )\n', (7427, 7558), False, 'from official.vision.beta.projects.yolo.modeling.layers import nn_blocks\n'), ((7754, 7895), 'official.vision.beta.projects.yolo.modeling.layers.nn_blocks.PathAggregationBlock', 'nn_blocks.PathAggregationBlock', ([], {'filters': '(depth // 2)', 'inverted': '(True)', 'upsample': '(True)', 'drop_final': '(False)', 'upsample_size': '(2)'}), '(filters=depth // 2, inverted=True, upsample=\n True, drop_final=False, upsample_size=2, **self._base_config)\n', (7784, 7895), False, 'from official.vision.beta.projects.yolo.modeling.layers import nn_blocks\n')]
|
#!/usr/bin/python
# Copyright 2017-present Open Networking Foundation
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# TO RUN
# source scripts/setup_venv.sh
# xos-migrate [-s <service-name>] [-r ~/cord]
# eg: xos-migrate -r ~/Sites/cord -s core -s fabric
# TODO
# - add support to specify a name to be given to the generated migration (--name parameter in django makemigrations)
# - add support to generate empty migrations (needed for data-only migrations)
import os
import sys
import argparse
import yaml
import shutil
from xosgenx.generator import XOSProcessor, XOSProcessorArgs
from xosconfig import Config
from multistructlog import create_logger
REPO_ROOT = "~/cord"
def get_abs_path(dir_):
""" Convert a path specified by the user, which might be relative or based on
home directory location, into an absolute path.
"""
if os.path.isabs(dir_):
return os.path.realpath(dir_)
if dir_[0] == "~" and not os.path.exists(dir_):
dir_ = os.path.expanduser(dir_)
return os.path.abspath(dir_)
return os.path.abspath(os.path.join(os.getcwd(), dir_))
def get_migration_library_path(dir_):
""" Return a directory relative to the location of the migration library """
return os.path.dirname(os.path.realpath(__file__)) + "/" + dir_
def print_banner(root):
log.info(r"---------------------------------------------------------------")
log.info(r" _ __ ")
log.info(r" _ ______ _____ ____ ___ (_)___ __________ _/ /____ ")
log.info(r" | |/_/ __ \/ ___/_____/ __ `__ \/ / __ `/ ___/ __ `/ __/ _ \ ")
log.info(r" _> </ /_/ (__ )_____/ / / / / / / /_/ / / / /_/ / /_/ __/ ")
log.info(r"/_/|_|\____/____/ /_/ /_/ /_/_/\__, /_/ \__,_/\__/\___/ ")
log.info(r" /____/ ")
log.info(r"---------------------------------------------------------------")
log.debug("CORD repo root", root=root)
log.debug("Storing logs in: %s" % os.environ["LOG_FILE"])
log.debug(r"---------------------------------------------------------------")
def generate_core_models(core_dir):
core_xproto = os.path.join(core_dir, "core.xproto")
args = XOSProcessorArgs(
output=core_dir,
target="django.xtarget",
dest_extension="py",
write_to_file="model",
files=[core_xproto],
)
XOSProcessor.process(args)
security_args = XOSProcessorArgs(
output=core_dir,
target="django-security.xtarget",
dest_file="security.py",
write_to_file="single",
files=[core_xproto],
)
XOSProcessor.process(security_args)
init_args = XOSProcessorArgs(
output=core_dir,
target="init.xtarget",
dest_file="__init__.py",
write_to_file="single",
files=[core_xproto],
)
XOSProcessor.process(init_args)
def find_xproto_in_folder(path):
"""
Recursively iterate a folder tree to look for any xProto file.
We use this function in case that the name of the xProto is different from the name of the folder (eg: olt-service)
:param path: the root folder to start the search
:return: [string]
"""
xprotos = []
for fn in os.listdir(path):
# skip hidden files and folders
if fn.startswith("."):
continue
full_path = os.path.join(path, fn)
if fn.endswith(".xproto"):
xprotos.append(full_path)
elif os.path.isdir(full_path):
xprotos = xprotos + find_xproto_in_folder(full_path)
return xprotos
def find_decls_models(path):
"""
Recursively iterate a folder tree to look for any models.py file.
This files contain the base model for _decl generated models.
:param path: the root folder to start the search
:return: [string]
"""
decls = []
for fn in os.listdir(path):
# skip hidden files and folders
if fn.startswith("."):
continue
full_path = os.path.join(path, fn)
if fn == "models.py":
decls.append(full_path)
elif os.path.isdir(full_path):
decls = decls + find_decls_models(full_path)
return decls
def get_service_name_from_config(path):
"""
Given a service folder look for the config.yaml file and find the name
:param path: the root folder to start the search
:return: string
"""
config = os.path.join(path, "xos/synchronizer/config.yaml")
if not os.path.isfile(config):
raise Exception("Config file not found at: %s" % config)
cfg_file = open(config)
cfg = yaml.safe_load(cfg_file)
return cfg["name"]
def generate_service_models(service_dir, service_dest_dir, service_name):
"""
Generate the django code starting from xProto for a given service.
:param service_dir: string (path to the folder)
:param service_name: string (name of the service)
:return: void
"""
sync_dir = os.path.join(service_dir, "xos/synchronizer/models")
xprotos = find_xproto_in_folder(sync_dir)
decls = find_decls_models(sync_dir)
log.debug("Generating models for %s from files %s" % (service_name, ", ".join(xprotos)))
out_dir = os.path.join(service_dest_dir, service_name)
if not os.path.isdir(out_dir):
os.mkdir(out_dir)
args = XOSProcessorArgs(
output=out_dir,
files=xprotos,
target="service.xtarget",
write_to_file="target",
)
XOSProcessor.process(args)
security_args = XOSProcessorArgs(
output=out_dir,
target="django-security.xtarget",
dest_file="security.py",
write_to_file="single",
files=xprotos,
)
XOSProcessor.process(security_args)
init_py_filename = os.path.join(out_dir, "__init__.py")
if not os.path.exists(init_py_filename):
open(init_py_filename, "w").write("# created by dynamicbuild")
# copy over models.py files from the service
if len(decls) > 0:
for file in decls:
fn = os.path.basename(file)
src_fn = file
dest_fn = os.path.join(out_dir, fn)
log.debug("Copying models.py from %s to %s" % (src_fn, dest_fn))
shutil.copyfile(src_fn, dest_fn)
# copy existing migrations from the service, otherwise they won't be incremental
src_dir = os.path.join(service_dir, "xos", "synchronizer", "migrations")
if os.path.isdir(src_dir):
dest_dir = os.path.join(out_dir, "migrations")
if os.path.isdir(dest_dir):
shutil.rmtree(dest_dir) # empty the folder, we'll copy everything again
shutil.copytree(src_dir, dest_dir)
def copy_service_migrations(service_dir, service_dest_dir, service_name):
"""
Once the migrations are generated, copy them in the correct location
:param service_dir: string (path to the folder)
:param service_name: string (name of the service)
:return: void
"""
log.debug("Copying %s migrations to %s" % (service_name, service_dir))
migration_dir = os.path.join(service_dest_dir, service_name, "migrations")
dest_dir = os.path.join(service_dir, "xos", "synchronizer", "migrations")
if os.path.isdir(dest_dir):
shutil.rmtree(dest_dir) # empty the folder, we'll copy everything again
shutil.copytree(migration_dir, dest_dir)
# clean after the tool, generated migrations has been moved in the service repo
shutil.rmtree(get_abs_path(os.path.join(migration_dir, "../")))
def monkey_patch_migration_template():
import django
django.setup()
import django.db.migrations.writer as dj
dj.MIGRATION_TEMPLATE = """\
# Copyright 2017-present Open Networking Foundation
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# -*- coding: utf-8 -*-
# Generated by Django %(version)s on %(timestamp)s
from __future__ import unicode_literals
%(imports)s
class Migration(migrations.Migration):
%(replaces_str)s%(initial_str)s
dependencies = [
%(dependencies)s\
]
operations = [
%(operations)s\
]
"""
def configure_logging(verbose):
global log
# INITIALIZING LOGGER
Config.init()
cfg = Config().get("logging")
if verbose:
cfg["handlers"]["console"]["level"] = "DEBUG"
log = create_logger(cfg)
# SETTING ENV
os.environ["LOG_FILE"] = get_migration_library_path("django.log")
os.environ["XOS_CONFIG_SCHEMA"] = get_migration_library_path("migration_cfg_schema.yaml")
os.environ["XOS_CONFIG_FILE"] = get_migration_library_path("migration_cfg.yaml")
os.environ["MIGRATIONS"] = "true"
# this is populated in case we generate migrations for services and it's used in settings.py
os.environ["INSTALLED_APPS"] = ""
# PARAMS
parser = argparse.ArgumentParser(description="XOS Migrations")
required = parser.add_argument_group("required arguments")
required.add_argument(
"-s",
"--service",
action="append",
required=True,
dest="service_names",
help="The name of the folder containing the service in cord/orchestration/xos-services"
)
parser.add_argument(
"-r",
"--repo",
default=REPO_ROOT,
dest="repo_root",
help="Path to the CORD repo root (defaults to '../..'). Mutually exclusive with '--xos'."
)
parser.add_argument(
"-x",
"--xos-dir",
default=None,
dest="xos_root",
help="Path to directory of the XOS repo. Incompatible with '--repo'."
)
parser.add_argument(
"--services-dir",
default=None,
dest="services_root",
help="Path to directory of the XOS services root. Incompatible with '--repo'." +
"Note that all the services repo needs to be siblings"
)
parser.add_argument(
"--check",
default=False,
action="store_true",
dest="check",
help="Check if the migrations are generated for a given service. Does not apply any change."
)
parser.add_argument(
"-v",
"--verbose",
help="increase log verbosity",
dest="verbose",
action="store_true"
)
def run():
service_base_dir = None
# cleaning up from possible incorrect states
if "INSTALLED_APPS" in os.environ:
del os.environ["INSTALLED_APPS"]
args = parser.parse_args()
configure_logging(args.verbose)
print_banner(args.repo_root)
# validating args, the solution is hacky but it does not fit `add_mutually_exclusive_group`
# and it's not complex enough for the solution proposed here:
# https://stackoverflow.com/questions/17909294/python-argparse-mutual-exclusive-group
if args.service_names != ["core"] and \
((args.xos_root and not args.services_root) or (args.services_root and not args.xos_root)):
# if we're only generating migrations for the core,
# the --xos-dir is the only think we need
log.error("You need to set both --xos-dir and \
--services-dir parameters when generating migrations for a service")
sys.exit(1)
if (args.xos_root or args.services_root) and (args.repo_root != REPO_ROOT):
log.error("The --xos-dir or --services-dir parameters are not compatible with the --repo parameter")
sys.exit(1)
# find absolute path to the code
if args.xos_root or args.services_root:
xos_path = get_abs_path(os.path.join(args.xos_root, "xos"))
if args.services_root:
# NOTE this params is optional (we may be generating migrations for the core only
service_base_dir = get_abs_path(args.services_root)
else:
xos_path = get_abs_path(os.path.join(args.repo_root, "orchestration/xos/xos/"))
service_base_dir = get_abs_path(os.path.join(xos_path, "../../xos-services/"))
log.debug("XOS Path: %s" % xos_path)
log.debug("Service Base Dir: %s" % service_base_dir)
service_dest_dir = get_abs_path(os.path.join(xos_path, "services/"))
core_dir = get_abs_path(os.path.join(xos_path, "core/models/"))
# we need to append the xos folder to sys.path
original_sys_path = sys.path
sys.path.append(xos_path)
log.info("Services: %s" % ", ".join(args.service_names))
django_cli_args = ['xos-migrate.py', "makemigrations"]
# generate the code for each service and create a list of parameters to pass to django
app_list = []
for service in args.service_names:
# NOTE we need core models to be there as all the services depend on them
generate_core_models(core_dir)
if service == "core":
django_cli_args.append("core")
else:
service_dir = os.path.join(service_base_dir, service)
service_name = get_service_name_from_config(service_dir)
generate_service_models(service_dir, service_dest_dir, service_name)
app_list.append("services.%s" % service_name)
django_cli_args.append(service_name)
if len(app_list) > 0:
os.environ["INSTALLED_APPS"] = ",".join(app_list)
os.environ.setdefault("DJANGO_SETTINGS_MODULE", "xos.settings")
monkey_patch_migration_template()
if args.check:
django_cli_args.append("--check")
django_cli_args.append("--dry-run")
from django.core.management import execute_from_command_line
try:
log.debug("Django CLI Args", args=django_cli_args)
execute_from_command_line(django_cli_args)
returncode = 0
except SystemExit as e:
returncode = e.message
if returncode != 0:
if args.check:
log.error("Migrations are not up to date with the service changes!")
else:
log.error("An error occurred")
sys.exit(returncode)
# copying migrations back to the service
for service in args.service_names:
if service == "core":
# we don't need to copy migrations for the core
continue
else:
service_dir = os.path.join(service_base_dir, service)
service_name = get_service_name_from_config(service_dir)
copy_service_migrations(service_dir, service_dest_dir, service_name)
# restore orginal sys.path
sys.path = original_sys_path
|
[
"os.mkdir",
"django.setup",
"argparse.ArgumentParser",
"os.path.isfile",
"xosconfig.Config",
"yaml.safe_load",
"shutil.rmtree",
"os.path.join",
"sys.path.append",
"os.path.abspath",
"os.path.exists",
"shutil.copyfile",
"xosgenx.generator.XOSProcessor.process",
"xosconfig.Config.init",
"os.environ.setdefault",
"os.path.basename",
"os.path.realpath",
"xosgenx.generator.XOSProcessorArgs",
"django.core.management.execute_from_command_line",
"multistructlog.create_logger",
"os.listdir",
"sys.exit",
"os.path.isabs",
"os.path.isdir",
"os.getcwd",
"shutil.copytree",
"os.path.expanduser"
] |
[((9736, 9789), 'argparse.ArgumentParser', 'argparse.ArgumentParser', ([], {'description': '"""XOS Migrations"""'}), "(description='XOS Migrations')\n", (9759, 9789), False, 'import argparse\n'), ((1352, 1371), 'os.path.isabs', 'os.path.isabs', (['dir_'], {}), '(dir_)\n', (1365, 1371), False, 'import os\n'), ((2706, 2743), 'os.path.join', 'os.path.join', (['core_dir', '"""core.xproto"""'], {}), "(core_dir, 'core.xproto')\n", (2718, 2743), False, 'import os\n'), ((2756, 2884), 'xosgenx.generator.XOSProcessorArgs', 'XOSProcessorArgs', ([], {'output': 'core_dir', 'target': '"""django.xtarget"""', 'dest_extension': '"""py"""', 'write_to_file': '"""model"""', 'files': '[core_xproto]'}), "(output=core_dir, target='django.xtarget', dest_extension=\n 'py', write_to_file='model', files=[core_xproto])\n", (2772, 2884), False, 'from xosgenx.generator import XOSProcessor, XOSProcessorArgs\n'), ((2931, 2957), 'xosgenx.generator.XOSProcessor.process', 'XOSProcessor.process', (['args'], {}), '(args)\n', (2951, 2957), False, 'from xosgenx.generator import XOSProcessor, XOSProcessorArgs\n'), ((2979, 3120), 'xosgenx.generator.XOSProcessorArgs', 'XOSProcessorArgs', ([], {'output': 'core_dir', 'target': '"""django-security.xtarget"""', 'dest_file': '"""security.py"""', 'write_to_file': '"""single"""', 'files': '[core_xproto]'}), "(output=core_dir, target='django-security.xtarget',\n dest_file='security.py', write_to_file='single', files=[core_xproto])\n", (2995, 3120), False, 'from xosgenx.generator import XOSProcessor, XOSProcessorArgs\n'), ((3169, 3204), 'xosgenx.generator.XOSProcessor.process', 'XOSProcessor.process', (['security_args'], {}), '(security_args)\n', (3189, 3204), False, 'from xosgenx.generator import XOSProcessor, XOSProcessorArgs\n'), ((3222, 3353), 'xosgenx.generator.XOSProcessorArgs', 'XOSProcessorArgs', ([], {'output': 'core_dir', 'target': '"""init.xtarget"""', 'dest_file': '"""__init__.py"""', 'write_to_file': '"""single"""', 'files': '[core_xproto]'}), "(output=core_dir, target='init.xtarget', dest_file=\n '__init__.py', write_to_file='single', files=[core_xproto])\n", (3238, 3353), False, 'from xosgenx.generator import XOSProcessor, XOSProcessorArgs\n'), ((3400, 3431), 'xosgenx.generator.XOSProcessor.process', 'XOSProcessor.process', (['init_args'], {}), '(init_args)\n', (3420, 3431), False, 'from xosgenx.generator import XOSProcessor, XOSProcessorArgs\n'), ((3776, 3792), 'os.listdir', 'os.listdir', (['path'], {}), '(path)\n', (3786, 3792), False, 'import os\n'), ((4412, 4428), 'os.listdir', 'os.listdir', (['path'], {}), '(path)\n', (4422, 4428), False, 'import os\n'), ((4963, 5013), 'os.path.join', 'os.path.join', (['path', '"""xos/synchronizer/config.yaml"""'], {}), "(path, 'xos/synchronizer/config.yaml')\n", (4975, 5013), False, 'import os\n'), ((5153, 5177), 'yaml.safe_load', 'yaml.safe_load', (['cfg_file'], {}), '(cfg_file)\n', (5167, 5177), False, 'import yaml\n'), ((5503, 5555), 'os.path.join', 'os.path.join', (['service_dir', '"""xos/synchronizer/models"""'], {}), "(service_dir, 'xos/synchronizer/models')\n", (5515, 5555), False, 'import os\n'), ((5749, 5793), 'os.path.join', 'os.path.join', (['service_dest_dir', 'service_name'], {}), '(service_dest_dir, service_name)\n', (5761, 5793), False, 'import os\n'), ((5867, 5968), 'xosgenx.generator.XOSProcessorArgs', 'XOSProcessorArgs', ([], {'output': 'out_dir', 'files': 'xprotos', 'target': '"""service.xtarget"""', 'write_to_file': '"""target"""'}), "(output=out_dir, files=xprotos, target='service.xtarget',\n write_to_file='target')\n", (5883, 5968), False, 'from xosgenx.generator import XOSProcessor, XOSProcessorArgs\n'), ((6008, 6034), 'xosgenx.generator.XOSProcessor.process', 'XOSProcessor.process', (['args'], {}), '(args)\n', (6028, 6034), False, 'from xosgenx.generator import XOSProcessor, XOSProcessorArgs\n'), ((6056, 6190), 'xosgenx.generator.XOSProcessorArgs', 'XOSProcessorArgs', ([], {'output': 'out_dir', 'target': '"""django-security.xtarget"""', 'dest_file': '"""security.py"""', 'write_to_file': '"""single"""', 'files': 'xprotos'}), "(output=out_dir, target='django-security.xtarget',\n dest_file='security.py', write_to_file='single', files=xprotos)\n", (6072, 6190), False, 'from xosgenx.generator import XOSProcessor, XOSProcessorArgs\n'), ((6239, 6274), 'xosgenx.generator.XOSProcessor.process', 'XOSProcessor.process', (['security_args'], {}), '(security_args)\n', (6259, 6274), False, 'from xosgenx.generator import XOSProcessor, XOSProcessorArgs\n'), ((6299, 6335), 'os.path.join', 'os.path.join', (['out_dir', '"""__init__.py"""'], {}), "(out_dir, '__init__.py')\n", (6311, 6335), False, 'import os\n'), ((6888, 6950), 'os.path.join', 'os.path.join', (['service_dir', '"""xos"""', '"""synchronizer"""', '"""migrations"""'], {}), "(service_dir, 'xos', 'synchronizer', 'migrations')\n", (6900, 6950), False, 'import os\n'), ((6958, 6980), 'os.path.isdir', 'os.path.isdir', (['src_dir'], {}), '(src_dir)\n', (6971, 6980), False, 'import os\n'), ((7585, 7643), 'os.path.join', 'os.path.join', (['service_dest_dir', 'service_name', '"""migrations"""'], {}), "(service_dest_dir, service_name, 'migrations')\n", (7597, 7643), False, 'import os\n'), ((7659, 7721), 'os.path.join', 'os.path.join', (['service_dir', '"""xos"""', '"""synchronizer"""', '"""migrations"""'], {}), "(service_dir, 'xos', 'synchronizer', 'migrations')\n", (7671, 7721), False, 'import os\n'), ((7729, 7752), 'os.path.isdir', 'os.path.isdir', (['dest_dir'], {}), '(dest_dir)\n', (7742, 7752), False, 'import os\n'), ((7839, 7879), 'shutil.copytree', 'shutil.copytree', (['migration_dir', 'dest_dir'], {}), '(migration_dir, dest_dir)\n', (7854, 7879), False, 'import shutil\n'), ((8095, 8109), 'django.setup', 'django.setup', ([], {}), '()\n', (8107, 8109), False, 'import django\n'), ((9154, 9167), 'xosconfig.Config.init', 'Config.init', ([], {}), '()\n', (9165, 9167), False, 'from xosconfig import Config\n'), ((9284, 9302), 'multistructlog.create_logger', 'create_logger', (['cfg'], {}), '(cfg)\n', (9297, 9302), False, 'from multistructlog import create_logger\n'), ((12988, 13013), 'sys.path.append', 'sys.path.append', (['xos_path'], {}), '(xos_path)\n', (13003, 13013), False, 'import sys\n'), ((13907, 13970), 'os.environ.setdefault', 'os.environ.setdefault', (['"""DJANGO_SETTINGS_MODULE"""', '"""xos.settings"""'], {}), "('DJANGO_SETTINGS_MODULE', 'xos.settings')\n", (13928, 13970), False, 'import os\n'), ((1388, 1410), 'os.path.realpath', 'os.path.realpath', (['dir_'], {}), '(dir_)\n', (1404, 1410), False, 'import os\n'), ((1478, 1502), 'os.path.expanduser', 'os.path.expanduser', (['dir_'], {}), '(dir_)\n', (1496, 1502), False, 'import os\n'), ((1518, 1539), 'os.path.abspath', 'os.path.abspath', (['dir_'], {}), '(dir_)\n', (1533, 1539), False, 'import os\n'), ((3906, 3928), 'os.path.join', 'os.path.join', (['path', 'fn'], {}), '(path, fn)\n', (3918, 3928), False, 'import os\n'), ((4542, 4564), 'os.path.join', 'os.path.join', (['path', 'fn'], {}), '(path, fn)\n', (4554, 4564), False, 'import os\n'), ((5025, 5047), 'os.path.isfile', 'os.path.isfile', (['config'], {}), '(config)\n', (5039, 5047), False, 'import os\n'), ((5805, 5827), 'os.path.isdir', 'os.path.isdir', (['out_dir'], {}), '(out_dir)\n', (5818, 5827), False, 'import os\n'), ((5837, 5854), 'os.mkdir', 'os.mkdir', (['out_dir'], {}), '(out_dir)\n', (5845, 5854), False, 'import os\n'), ((6347, 6379), 'os.path.exists', 'os.path.exists', (['init_py_filename'], {}), '(init_py_filename)\n', (6361, 6379), False, 'import os\n'), ((7001, 7036), 'os.path.join', 'os.path.join', (['out_dir', '"""migrations"""'], {}), "(out_dir, 'migrations')\n", (7013, 7036), False, 'import os\n'), ((7048, 7071), 'os.path.isdir', 'os.path.isdir', (['dest_dir'], {}), '(dest_dir)\n', (7061, 7071), False, 'import os\n'), ((7166, 7200), 'shutil.copytree', 'shutil.copytree', (['src_dir', 'dest_dir'], {}), '(src_dir, dest_dir)\n', (7181, 7200), False, 'import shutil\n'), ((7762, 7785), 'shutil.rmtree', 'shutil.rmtree', (['dest_dir'], {}), '(dest_dir)\n', (7775, 7785), False, 'import shutil\n'), ((11912, 11923), 'sys.exit', 'sys.exit', (['(1)'], {}), '(1)\n', (11920, 11923), False, 'import sys\n'), ((12122, 12133), 'sys.exit', 'sys.exit', (['(1)'], {}), '(1)\n', (12130, 12133), False, 'import sys\n'), ((12794, 12829), 'os.path.join', 'os.path.join', (['xos_path', '"""services/"""'], {}), "(xos_path, 'services/')\n", (12806, 12829), False, 'import os\n'), ((12859, 12897), 'os.path.join', 'os.path.join', (['xos_path', '"""core/models/"""'], {}), "(xos_path, 'core/models/')\n", (12871, 12897), False, 'import os\n'), ((14259, 14301), 'django.core.management.execute_from_command_line', 'execute_from_command_line', (['django_cli_args'], {}), '(django_cli_args)\n', (14284, 14301), False, 'from django.core.management import execute_from_command_line\n'), ((14578, 14598), 'sys.exit', 'sys.exit', (['returncode'], {}), '(returncode)\n', (14586, 14598), False, 'import sys\n'), ((1441, 1461), 'os.path.exists', 'os.path.exists', (['dir_'], {}), '(dir_)\n', (1455, 1461), False, 'import os\n'), ((1580, 1591), 'os.getcwd', 'os.getcwd', ([], {}), '()\n', (1589, 1591), False, 'import os\n'), ((4015, 4039), 'os.path.isdir', 'os.path.isdir', (['full_path'], {}), '(full_path)\n', (4028, 4039), False, 'import os\n'), ((4644, 4668), 'os.path.isdir', 'os.path.isdir', (['full_path'], {}), '(full_path)\n', (4657, 4668), False, 'import os\n'), ((6569, 6591), 'os.path.basename', 'os.path.basename', (['file'], {}), '(file)\n', (6585, 6591), False, 'import os\n'), ((6640, 6665), 'os.path.join', 'os.path.join', (['out_dir', 'fn'], {}), '(out_dir, fn)\n', (6652, 6665), False, 'import os\n'), ((6755, 6787), 'shutil.copyfile', 'shutil.copyfile', (['src_fn', 'dest_fn'], {}), '(src_fn, dest_fn)\n', (6770, 6787), False, 'import shutil\n'), ((7085, 7108), 'shutil.rmtree', 'shutil.rmtree', (['dest_dir'], {}), '(dest_dir)\n', (7098, 7108), False, 'import shutil\n'), ((7995, 8029), 'os.path.join', 'os.path.join', (['migration_dir', '"""../"""'], {}), "(migration_dir, '../')\n", (8007, 8029), False, 'import os\n'), ((9179, 9187), 'xosconfig.Config', 'Config', ([], {}), '()\n', (9185, 9187), False, 'from xosconfig import Config\n'), ((12248, 12282), 'os.path.join', 'os.path.join', (['args.xos_root', '"""xos"""'], {}), "(args.xos_root, 'xos')\n", (12260, 12282), False, 'import os\n'), ((12515, 12569), 'os.path.join', 'os.path.join', (['args.repo_root', '"""orchestration/xos/xos/"""'], {}), "(args.repo_root, 'orchestration/xos/xos/')\n", (12527, 12569), False, 'import os\n'), ((12611, 12656), 'os.path.join', 'os.path.join', (['xos_path', '"""../../xos-services/"""'], {}), "(xos_path, '../../xos-services/')\n", (12623, 12656), False, 'import os\n'), ((13519, 13558), 'os.path.join', 'os.path.join', (['service_base_dir', 'service'], {}), '(service_base_dir, service)\n', (13531, 13558), False, 'import os\n'), ((14835, 14874), 'os.path.join', 'os.path.join', (['service_base_dir', 'service'], {}), '(service_base_dir, service)\n', (14847, 14874), False, 'import os\n'), ((1748, 1774), 'os.path.realpath', 'os.path.realpath', (['__file__'], {}), '(__file__)\n', (1764, 1774), False, 'import os\n')]
|
#! /usr/bin/env python
"""Generate tables from Rainbow's experimental results."""
import csv
import json
import logging
import os
import re
from typing import Any, List, Optional, Tuple
import click
from sklearn import metrics
from rainbow import datasets, utils
logger = logging.getLogger(__name__)
# constants
TASK_TO_DATASET = {
task: dataset
for datasets_ in [
datasets.RAINBOW_DATASETS,
datasets.KNOWLEDGE_GRAPH_DATASETS,
datasets.GLUE_DATASETS,
datasets.SUPER_GLUE_DATASETS,
datasets.COMMONSENSE_DATASETS,
]
for task, dataset in datasets_.items()
}
"""A mapping from all dataset names to metadata about them."""
EXPERIMENT_TO_TABLES_CONFIG = {
"effect-of-size": {
# the mixtures table
"mixtures": (
# path to root
"mixtures/t5",
# experiment factors
{
"model_size": ["small", "base", "large"],
"task": [
"anli",
"cosmosqa",
"hellaswag",
"physicaliqa",
"socialiqa",
"winogrande",
],
"multiset": ["rainbow"],
"rate": ["equal"],
"lr": ["lr-2.5e-4", "lr-1e-3", "lr-4e-3"],
"split": ["validation_eval"],
},
# directories to ignore
[],
),
# the multiset learning curves table
"multiset_learning-curves": (
# path to root
"learning-curves/t5",
# experiment factors
{
"model_size": ["small", "base", "large"],
"task": ["commonsenseqa"],
"multiset": ["rainbow"],
"transfer_method": [
"multi-task",
"multi-task-fine-tune",
"sequential-fine-tune",
],
"size": [
"00004",
"00010",
"00030",
"00091",
"00280",
"00865",
"02667",
"05334",
"08000",
"10667",
"13334",
"16000",
],
"lr": ["lr-2.5e-4", "lr-1e-3", "lr-4e-3"],
"split": ["validation_eval"],
},
# directories to ignore
["single-task"],
),
# the single-task learning curves table
"single-task_learning-curves": (
# path to root
"learning-curves/t5",
# experiment factors
{
"model_size": ["small", "base", "large"],
"task": ["commonsenseqa"],
"multiset": ["single-task"],
"size": [
"00004",
"00010",
"00030",
"00091",
"00280",
"00865",
"02667",
"05334",
"08000",
"10667",
"13334",
"16000",
],
"lr": ["lr-2.5e-4", "lr-1e-3", "lr-4e-3"],
"split": ["validation_eval"],
},
# directories to ignore
["rainbow"],
),
},
"transferring-multisets": {
# the multiset full tasks table
"multiset_full-tasks": (
# path to root
"full-tasks/t5",
# experiment factors
{
"model_size": ["large"],
"task": [
"anli",
"cosmosqa",
"hellaswag",
"physicaliqa",
"socialiqa",
"winogrande",
],
"multiset": ["glue", "super-glue", "rainbow"],
"transfer_method": [
"multi-task",
"multi-task-fine-tune",
"sequential-fine-tune",
],
"rate": ["equal", "proportional"],
"lr": ["lr-2.5e-4", "lr-1e-3", "lr-4e-3"],
"split": ["validation_eval"],
},
# directories to ignore
["single-task"],
),
# the single-task full tasks table
"single-task_full-tasks": (
# path to root
"full-tasks/t5",
# experiment factors
{
"model_size": ["large"],
"task": [
"anli",
"cosmosqa",
"hellaswag",
"physicaliqa",
"socialiqa",
"winogrande",
],
"multiset": ["single-task"],
"lr": ["lr-2.5e-4", "lr-1e-3", "lr-4e-3"],
"split": ["validation_eval"],
},
# directories to ignore
["glue", "super-glue", "rainbow"],
),
# the multiset learning curves table
"multiset_learning-curves": (
# path to root
"learning-curves/t5",
# experiment factors
{
"model_size": ["large"],
"task": [
"anli",
"cosmosqa",
"hellaswag",
"physicaliqa",
"socialiqa",
"winogrande",
],
"multiset": ["glue", "super-glue", "rainbow"],
"transfer_method": [
"multi-task",
"multi-task-fine-tune",
"sequential-fine-tune",
],
"size": [
"00004",
"00010",
"00030",
"00091",
"00280",
"00865",
"02667",
"05334",
"08000",
"10667",
"13334",
"16000",
],
"lr": ["lr-2.5e-4", "lr-1e-3", "lr-4e-3"],
"split": ["validation_eval"],
},
# directories to ignore
["single-task"],
),
# the single-task learning curves table
"single-task_learning-curves": (
# path to root
"learning-curves/t5",
# experiment factors
{
"model_size": ["large"],
"task": [
"anli",
"cosmosqa",
"hellaswag",
"physicaliqa",
"socialiqa",
"winogrande",
],
"multiset": ["single-task"],
"size": [
"00004",
"00010",
"00030",
"00091",
"00280",
"00865",
"02667",
"05334",
"08000",
"10667",
"13334",
"16000",
],
"lr": ["lr-2.5e-4", "lr-1e-3", "lr-4e-3"],
"split": ["validation_eval"],
},
# directories to ignore
["glue", "super-glue", "rainbow"],
),
},
"transferring-knowledge-graphs": {
# the multiset full tasks table
"multiset_full-tasks": (
# path to root
"full-tasks/t5",
# experiment factors
{
"model_size": ["large"],
"task": [
"anli",
"cosmosqa",
"hellaswag",
"physicaliqa",
"socialiqa",
"winogrande",
],
"multiset": ["knowledge-graph", "rainbow-knowledge-graph"],
"knowledge-graph": ["atomic", "conceptnet", "comet"],
"direction": ["forward", "backward", "bidirectional"],
"transfer_method": ["multi-task"],
"rate": ["equal", "proportional"],
"lr": ["lr-2.5e-4", "lr-1e-3", "lr-4e-3"],
"split": ["validation_eval"],
},
# directories to ignore
["single-task"],
),
# the single-task full tasks table
"single-task_full-tasks": (
# path to root
"full-tasks/t5",
# experiment factors
{
"model_size": ["large"],
"task": [
"anli",
"cosmosqa",
"hellaswag",
"physicaliqa",
"socialiqa",
"winogrande",
],
"multiset": ["single-task"],
"lr": ["lr-2.5e-4", "lr-1e-3", "lr-4e-3"],
"split": ["validation_eval"],
},
# directories to ignore
["knowledge-graph", "rainbow-knowledge-graph"],
),
# the multiset learning curves table
"multiset_learning-curves": (
# path to root
"learning-curves/t5",
# experiment factors
{
"model_size": ["large"],
"task": [
"anli",
"cosmosqa",
"hellaswag",
"physicaliqa",
"socialiqa",
"winogrande",
],
"multiset": ["knowledge-graph", "rainbow-knowledge-graph"],
"knowledge-graph": ["atomic", "conceptnet", "comet"],
"transfer_method": ["multi-task"],
"size": [
"00004",
"00010",
"00030",
"00091",
"00280",
"00865",
"02667",
"05334",
"08000",
"10667",
"13334",
"16000",
],
"lr": ["lr-2.5e-4", "lr-1e-3", "lr-4e-3"],
"split": ["validation_eval"],
},
# directories to ignore
["single-task"],
),
# the single-task learning curves table
"single-task_learning-curves": (
# path to root
"learning-curves/t5",
# experiment factors
{
"model_size": ["large"],
"task": [
"anli",
"cosmosqa",
"hellaswag",
"physicaliqa",
"socialiqa",
"winogrande",
],
"multiset": ["single-task"],
"size": [
"00004",
"00010",
"00030",
"00091",
"00280",
"00865",
"02667",
"05334",
"08000",
"10667",
"13334",
"16000",
],
"lr": ["lr-2.5e-4", "lr-1e-3", "lr-4e-3"],
"split": ["validation_eval"],
},
# directories to ignore
["knowledge-graph", "rainbow-knowledge-graph"],
),
},
"transferring-to-external-tasks": {
# the multiset full tasks table
"multiset_full-tasks": (
# path to root
"full-tasks/t5",
# experiment factors
{
"model_size": ["large"],
"task": ["commonsenseqa", "joci"],
"multiset": ["glue", "super-glue", "rainbow"],
"transfer_method": ["multi-task"],
"rate": ["equal", "proportional"],
"lr": ["lr-2.5e-4", "lr-1e-3", "lr-4e-3"],
"split": ["validation_eval"],
},
# directories to ignore
["single-task"],
),
# the single-task full tasks table
"single-task_full-tasks": (
# path to root
"full-tasks/t5",
# experiment factors
{
"model_size": ["large"],
"task": ["commonsenseqa", "joci"],
"multiset": ["single-task"],
"lr": ["lr-2.5e-4", "lr-1e-3", "lr-4e-3"],
"split": ["validation_eval"],
},
# directories to ignore
["glue", "super-glue", "rainbow"],
),
# the multiset learning curves table
"multiset_learning-curves": (
# path to root
"learning-curves/t5",
# experiment factors
{
"model_size": ["large"],
"task": ["commonsenseqa", "joci"],
"multiset": ["glue", "super-glue", "rainbow"],
"transfer_method": ["multi-task"],
"size": [
"00004",
"00010",
"00030",
"00091",
"00280",
"00865",
"02667",
"05334",
"08000",
"10667",
"13334",
"16000",
],
"lr": ["lr-2.5e-4", "lr-1e-3", "lr-4e-3"],
"split": ["validation_eval"],
},
# directories to ignore
["single-task"],
),
# the single-task learning curves table
"single-task_learning-curves": (
# path to root
"learning-curves/t5",
# experiment factors
{
"model_size": ["large"],
"task": ["commonsenseqa", "joci"],
"multiset": ["single-task"],
"size": [
"00004",
"00010",
"00030",
"00091",
"00280",
"00865",
"02667",
"05334",
"08000",
"10667",
"13334",
"16000",
],
"lr": ["lr-2.5e-4", "lr-1e-3", "lr-4e-3"],
"split": ["validation_eval"],
},
# directories to ignore
["glue", "super-glue", "rainbow"],
),
},
}
"""Table configurations for all of the experiments."""
# helper functions
def read_labels(fpath: str) -> List[str]:
"""Return the labels read from a ``fpath``.
Parameters
----------
fpath : str, required
The path to the labels file. The file should have one label per
line.
Returns
-------
List[str]
The labels located at ``fpath`` as a list of strings.
"""
with open(fpath, "r") as f:
labels = [ln.strip().lower() for ln in f]
return labels
def parse_training_curve(dpath: str) -> List[Tuple[int, float]]:
"""Parse the training curve from ``dpath``.
Parameters
----------
dpath : str, required
The path to the training curve's directory.
Returns
-------
List[Tuple[int, float]]
The training curve represented as a list of tuples of
(step, score) number pairs.
"""
# Parse and validate the directory structure.
preds_fpattern = re.compile(
r"(?P<task>[a-z]*(?:_\d+)?)_task_(?P<step>\d+)_predictions"
)
targets_fpattern = re.compile(r"(?P<task>[a-z]*(?:_\d+)?)_task_targets")
tasks = set()
preds_fpaths = set()
targets_fpaths = set()
for fpath in os.listdir(dpath):
preds_fpattern_match = preds_fpattern.match(fpath)
targets_fpattern_match = targets_fpattern.match(fpath)
if preds_fpattern_match:
preds_fpaths.add(fpath)
tasks.add(preds_fpattern_match.groupdict()["task"])
elif targets_fpattern_match:
targets_fpaths.add(fpath)
tasks.add(targets_fpattern_match.groupdict()["task"])
else:
raise IOError(
f"The file path ({fpath}) did not match the"
f" predictions or targets file path patterns."
)
if len(tasks) != 1:
raise IOError(
f"{dpath} should have files corresponding to exactly one task."
)
if len(preds_fpaths) != 10:
raise IOError(
f"There should be exactly 10 prediction files in {dpath}."
)
if len(targets_fpaths) != 1:
raise IOError(f"There should be exactly 1 targets file in {dpath}.")
# Pop the directory's task.
task = tasks.pop()
# Return the training curve as a list of (step, score) pairs.
targets_fname = f"{task}_task_targets"
training_curve = []
for preds_fname in os.listdir(dpath):
match = preds_fpattern.match(preds_fname)
if match is None:
continue
step = int(match.groupdict()["step"])
score = metrics.accuracy_score(
y_pred=read_labels(os.path.join(dpath, preds_fname)),
y_true=read_labels(os.path.join(dpath, targets_fname)),
)
training_curve.append((step, score))
training_curve.sort()
return training_curve
def parse_training_curves(
dpath: str, ignore_dirs: Optional[List[str]] = None, **kwargs
):
"""Parse the training curves located at ``dpath``.
Parse the training curves run with different factors
(hyper-parameters) located at ``dpath`` into a nested dictionary
structure.
Parameters
----------
dpath : str, required
The path to the experiment's directory.
ignore_dirs : Optional[List[str]], optional (default=None)
An optional list of directory names to ignore when validating
the directory structure. Defaults to ``None``.
**kwargs, required
Key-word arguments, each providing a list of strings for the
directory names expected in deeper levels of the directory tree
rooted at ``dpath``. Each directory name is assumed to be a
factor for the experiment, and the directories are expected to
be in order.
Returns
-------
Dict
Nested dictionaries mapping a sequence of factors to the
corresponding training curve.
"""
# Handle the base case.
if len(kwargs) == 0:
return parse_training_curve(dpath)
# Pop the next set of factors.
key = next(iter(kwargs))
values = kwargs.pop(key)
# Parse and validate the directory structure.
found_subdirs = set(os.listdir(dpath)).difference(set(ignore_dirs or []))
expected_subdirs = set(values)
if found_subdirs != expected_subdirs:
raise IOError(f"Directories in {dpath} did not match expected {key}.")
# Return the mapping.
return {
value: parse_training_curves(
dpath=os.path.join(dpath, value), ignore_dirs=ignore_dirs, **kwargs
)
for value in values
}
def process_factor(task: str, key: str, value: str) -> Any:
"""Return ``value`` coerced to the proper type and value.
Parameters
----------
task : str, required
The task.
key : str, required
The name of the factor.
value : str, required
The factor's value.
Returns
-------
Any
``value`` coerced to the appropriate type.
"""
if key == "lr":
return float(re.match(r"lr-(\d+(?:\.\d+)?e-?\d+)", value).groups()[0])
if key == "split":
if value.endswith("_eval"):
value = value[: -len("_eval")]
return str(value)
if key == "size":
dataset = TASK_TO_DATASET[task]
# N.B. The maximum sized used in any experiment was automatically
# truncated to the minimum of the requested size or the size of the
# training split.
return min(int(value), dataset.splits["train"].size)
if key == "best_score":
return float(value)
if key == "training_curve":
return list(value)
return str(value)
# main function
@click.command()
@click.argument(
"src", type=click.Path(exists=True, dir_okay=True, file_okay=False)
)
@click.argument(
"dst", type=click.Path(exists=False, dir_okay=True, file_okay=False)
)
def generate_tables(src: str, dst: str) -> None:
"""Generate tables from Rainbow's experimental results.
Read the experimental results for Rainbow from SRC and write tables
for the experiments out to DST, in the CSV format. Full curves from
training are also written out in the JSON Lines format.
"""
utils.configure_logging(clear=True)
for experiment, tables_config in EXPERIMENT_TO_TABLES_CONFIG.items():
for name, (path, factors, ignore_dirs) in tables_config.items():
# Parse the data using the config.
training_curves = parse_training_curves(
dpath=os.path.join(src, experiment, path),
ignore_dirs=ignore_dirs,
**factors,
)
# Convert the training curve data from nested dictionaries
# into two lists of dictionaries, using a depth-first
# search. The first list of dictionaries, table, provides a
# table grouping each hyper-parameter configuration to the
# best score from training (i.e., early stopping). The
# second list of dictionaries, training_curves_table, groups
# each hyper-parameter configuration with the full training
# curve.
table = []
training_curves_table = []
values = []
stack = [
(value, children, 0)
for value, children in list(training_curves.items())[::-1]
]
while len(stack) > 0:
# Pop the node off the stack.
value, children, depth = stack.pop()
# Truncate the values to the correct depth.
values = values[:depth]
# Update values with the current value.
values.append(value)
# Handle the node.
if isinstance(children, list):
# The node is a leaf (training curves).
best_score = max(score for _, score in children)
table.append(values + [best_score])
training_curves_table.append(values + [children])
elif isinstance(children, dict):
# The node is an internal node.
for value, children in list(children.items())[::-1]:
stack.append((value, children, depth + 1))
# Write out the data.
os.makedirs(os.path.join(dst, experiment, name))
# Write the training curves.
training_curves_table_path = os.path.join(
dst, experiment, name, "training-curves.jsonl"
)
with open(training_curves_table_path, "w") as fout:
fieldnames = list(factors.keys()) + ["training_curve"]
for row in training_curves_table:
# Identify the task.
task = next(
value
for factor, value in zip(fieldnames, row)
if factor == "task"
)
# Skip all sizes where the requested training set size is
# larger than the available training data, except when size
# is 16000. In these cases, the training data used is the
# dataset's full training set size. We skip all except when
# size is 16000 because we want to avoid repeat runs at the
# same training set size (to simplify comparisons across
# the experiments).
try:
size = next(
value
for factor, value in zip(fieldnames, row)
if factor == "size"
)
except StopIteration:
pass
else:
dataset = TASK_TO_DATASET[task]
if (
int(size) >= dataset.splits["train"].size
and size != "16000"
):
continue
# Write the training curve to disk.
fout.write(
json.dumps(
{
factor: process_factor(
task=task, key=factor, value=value,
)
for factor, value in zip(fieldnames, row)
}
)
+ "\n"
)
# Write the results table.
table_path = os.path.join(dst, experiment, name, "table.csv")
with open(table_path, "w") as fout:
fieldnames = list(factors.keys()) + ["best_score"]
writer = csv.DictWriter(
f=fout, fieldnames=fieldnames, dialect="unix"
)
writer.writeheader()
for row in table:
# Identify the task.
task = next(
value
for factor, value in zip(fieldnames, row)
if factor == "task"
)
# Skip all sizes where the requested training set size is
# larger than the available training data, except when size
# is 16000. In these cases, the training data used is the
# dataset's full training set size. We skip all except when
# size is 16000 because we want to avoid repeat runs at the
# same training set size (to simplify comparisons across
# the experiments).
try:
size = next(
value
for factor, value in zip(fieldnames, row)
if factor == "size"
)
except StopIteration:
pass
else:
dataset = TASK_TO_DATASET[task]
if (
int(size) >= dataset.splits["train"].size
and size != "16000"
):
continue
# Write the table to disk.
writer.writerow(
{
factor: process_factor(
task=task, key=factor, value=value,
)
for factor, value in zip(fieldnames, row)
}
)
if __name__ == "__main__":
generate_tables() # pylint: disable=no-value-for-parameter
|
[
"rainbow.utils.configure_logging",
"re.match",
"click.command",
"click.Path",
"csv.DictWriter",
"os.path.join",
"os.listdir",
"logging.getLogger",
"re.compile"
] |
[((278, 305), 'logging.getLogger', 'logging.getLogger', (['__name__'], {}), '(__name__)\n', (295, 305), False, 'import logging\n'), ((20597, 20612), 'click.command', 'click.command', ([], {}), '()\n', (20610, 20612), False, 'import click\n'), ((15890, 15962), 're.compile', 're.compile', (['"""(?P<task>[a-z]*(?:_\\\\d+)?)_task_(?P<step>\\\\d+)_predictions"""'], {}), "('(?P<task>[a-z]*(?:_\\\\d+)?)_task_(?P<step>\\\\d+)_predictions')\n", (15900, 15962), False, 'import re\n'), ((15999, 16052), 're.compile', 're.compile', (['"""(?P<task>[a-z]*(?:_\\\\d+)?)_task_targets"""'], {}), "('(?P<task>[a-z]*(?:_\\\\d+)?)_task_targets')\n", (16009, 16052), False, 'import re\n'), ((16142, 16159), 'os.listdir', 'os.listdir', (['dpath'], {}), '(dpath)\n', (16152, 16159), False, 'import os\n'), ((17331, 17348), 'os.listdir', 'os.listdir', (['dpath'], {}), '(dpath)\n', (17341, 17348), False, 'import os\n'), ((21122, 21157), 'rainbow.utils.configure_logging', 'utils.configure_logging', ([], {'clear': '(True)'}), '(clear=True)\n', (21145, 21157), False, 'from rainbow import datasets, utils\n'), ((20646, 20701), 'click.Path', 'click.Path', ([], {'exists': '(True)', 'dir_okay': '(True)', 'file_okay': '(False)'}), '(exists=True, dir_okay=True, file_okay=False)\n', (20656, 20701), False, 'import click\n'), ((20737, 20793), 'click.Path', 'click.Path', ([], {'exists': '(False)', 'dir_okay': '(True)', 'file_okay': '(False)'}), '(exists=False, dir_okay=True, file_okay=False)\n', (20747, 20793), False, 'import click\n'), ((23375, 23435), 'os.path.join', 'os.path.join', (['dst', 'experiment', 'name', '"""training-curves.jsonl"""'], {}), "(dst, experiment, name, 'training-curves.jsonl')\n", (23387, 23435), False, 'import os\n'), ((25567, 25615), 'os.path.join', 'os.path.join', (['dst', 'experiment', 'name', '"""table.csv"""'], {}), "(dst, experiment, name, 'table.csv')\n", (25579, 25615), False, 'import os\n'), ((19100, 19117), 'os.listdir', 'os.listdir', (['dpath'], {}), '(dpath)\n', (19110, 19117), False, 'import os\n'), ((19406, 19432), 'os.path.join', 'os.path.join', (['dpath', 'value'], {}), '(dpath, value)\n', (19418, 19432), False, 'import os\n'), ((23256, 23291), 'os.path.join', 'os.path.join', (['dst', 'experiment', 'name'], {}), '(dst, experiment, name)\n', (23268, 23291), False, 'import os\n'), ((25756, 25817), 'csv.DictWriter', 'csv.DictWriter', ([], {'f': 'fout', 'fieldnames': 'fieldnames', 'dialect': '"""unix"""'}), "(f=fout, fieldnames=fieldnames, dialect='unix')\n", (25770, 25817), False, 'import csv\n'), ((17565, 17597), 'os.path.join', 'os.path.join', (['dpath', 'preds_fname'], {}), '(dpath, preds_fname)\n', (17577, 17597), False, 'import os\n'), ((17631, 17665), 'os.path.join', 'os.path.join', (['dpath', 'targets_fname'], {}), '(dpath, targets_fname)\n', (17643, 17665), False, 'import os\n'), ((21428, 21463), 'os.path.join', 'os.path.join', (['src', 'experiment', 'path'], {}), '(src, experiment, path)\n', (21440, 21463), False, 'import os\n'), ((19953, 20000), 're.match', 're.match', (['"""lr-(\\\\d+(?:\\\\.\\\\d+)?e-?\\\\d+)"""', 'value'], {}), "('lr-(\\\\d+(?:\\\\.\\\\d+)?e-?\\\\d+)', value)\n", (19961, 20000), False, 'import re\n')]
|
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
"""
Created on Mon Jun 18 18:05:40 2018
Dataset Object
CHECK MAX DISBALANCE OPN REPLICATION FOR MULTICLASS
@author: ereyes
"""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import numpy as np
class Dataset(object):
"""
Constructor
"""
def __init__(self, data_array, data_label, batch_size):
self.batch_counter = -1
self.batch_counter_eval = -1
self.batch_size = batch_size
self.data_array = data_array
self.data_label = data_label
def _merge_with_dataset(self, array, labels):
self.data_label = np.concatenate((self.data_label, labels))
self.data_array = np.concatenate((self.data_array, array))
def get_batch_images(self):
batch, _ = self.get_batch()
return batch
def _check_first_call(self, counter):
if counter == -1:
return 0
return counter
def get_batch(self):
self.batch_counter = self._check_first_call(self.batch_counter)
if self.batch_counter + self.batch_size < self.data_array.shape[0]:
batch_image = self.data_array[
self.batch_counter:self.batch_counter + self.batch_size,
...]
batch_label = self.data_label[
self.batch_counter:self.batch_counter + self.batch_size,
...]
self.batch_counter += self.batch_size
# print(get_batch.batch_counter)
else:
self.batch_counter = 0
self.shuffle_data()
batch_image = self.data_array[
self.batch_counter:self.batch_counter + self.batch_size,
...]
batch_label = self.data_label[
self.batch_counter:self.batch_counter + self.batch_size,
...]
self.batch_counter += self.batch_size
return batch_image, batch_label
def get_batch_eval(self):
self.batch_counter_eval = self._check_first_call(self.batch_counter_eval)
# print(self.batch_counter_eval)
if self.batch_counter_eval + self.batch_size < self.data_array.shape[0]:
batch_image = self.data_array[
self.batch_counter_eval:self.batch_counter_eval + self.batch_size,
...]
batch_label = self.data_label[
self.batch_counter_eval:self.batch_counter_eval + self.batch_size,
...]
self.batch_counter_eval += self.batch_size
# print(get_batch.batch_counter)
else:
left_samples = self.data_array.shape[0] - self.batch_counter_eval
batch_image = self.data_array[
self.batch_counter_eval:self.batch_counter_eval + left_samples,
...]
batch_label = self.data_label[
self.batch_counter_eval:self.batch_counter_eval + left_samples,
...]
self.batch_counter_eval = 0
return batch_image, batch_label
def shuffle_data(self):
idx = np.arange(self.data_array.shape[0])
np.random.shuffle(idx)
self.data_array = self.data_array[idx, ...]
self.data_label = self.data_label[idx, ...]
# TODO: change both values for uique functions (AVOID CODE REPLICATION)
# TODO: recursively? replicate_data should be?
# TODO: min_lbl_count changes on very iteration, it should stay the same or shuffle
# of replicate_data cannot be
def balance_data_by_replication(self):
max_disbalance = self.get_max_disbalance()
max_lbl_count, min_lbl_count = self.get_max_min_label_count()
max_lbl, min_lbl = self.get_max_min_label()
if max_disbalance == 0:
return
while max_disbalance != 0:
if min_lbl_count > max_disbalance:
self.replicate_data(min_lbl, max_disbalance)
# max_disbalance = 0
else:
self.replicate_data(min_lbl, min_lbl_count)
# max_disbalance -= min_lbl_count
max_disbalance = self.get_max_disbalance() #
self.balance_data_by_replication()
return
def get_max_disbalance(self):
max_label_count, min_label_count = self.get_max_min_label_count()
return max_label_count - min_label_count
def get_max_min_label_count(self):
max_label, min_label = self.get_max_min_label()
max_label_count = np.where(self.data_label == max_label)[0].shape[0]
min_label_count = np.where(self.data_label == min_label)[0].shape[0]
return max_label_count, min_label_count
def get_max_min_label(self):
labels = np.unique(self.data_label)
labels_count = []
for j in range(labels.shape[0]):
label_j_count = np.where(self.data_label == labels[j])[0].shape[0]
labels_count.append(label_j_count)
labels_count = np.array(labels_count)
max_label = labels[np.where(labels_count == np.max(labels_count))[0][0]]
min_label = labels[np.where(labels_count == np.min(labels_count))[0][0]]
return max_label, min_label
def replicate_data(self, label, samples_number):
# print("%i samples replicated of class %i" %(samples_number,label))
label_idx = np.where(self.data_label == label)[0]
# np.random.shuffle(label_idx)
label_idx = label_idx[0:samples_number]
replicated_data_array = self.data_array[label_idx, ...]
self._merge_with_dataset(replicated_data_array, label)
def get_array_from_label(self, label):
label_idx = np.where(self.data_label == label)[0]
return self.data_array[label_idx]
|
[
"numpy.random.shuffle",
"numpy.concatenate",
"numpy.max",
"numpy.where",
"numpy.array",
"numpy.arange",
"numpy.min",
"numpy.unique"
] |
[((2965, 3000), 'numpy.arange', 'np.arange', (['self.data_array.shape[0]'], {}), '(self.data_array.shape[0])\n', (2974, 3000), True, 'import numpy as np\n'), ((3003, 3025), 'numpy.random.shuffle', 'np.random.shuffle', (['idx'], {}), '(idx)\n', (3020, 3025), True, 'import numpy as np\n'), ((4385, 4411), 'numpy.unique', 'np.unique', (['self.data_label'], {}), '(self.data_label)\n', (4394, 4411), True, 'import numpy as np\n'), ((4596, 4618), 'numpy.array', 'np.array', (['labels_count'], {}), '(labels_count)\n', (4604, 4618), True, 'import numpy as np\n'), ((647, 688), 'numpy.concatenate', 'np.concatenate', (['(self.data_label, labels)'], {}), '((self.data_label, labels))\n', (661, 688), True, 'import numpy as np\n'), ((711, 751), 'numpy.concatenate', 'np.concatenate', (['(self.data_array, array)'], {}), '((self.data_array, array))\n', (725, 751), True, 'import numpy as np\n'), ((4936, 4970), 'numpy.where', 'np.where', (['(self.data_label == label)'], {}), '(self.data_label == label)\n', (4944, 4970), True, 'import numpy as np\n'), ((5219, 5253), 'numpy.where', 'np.where', (['(self.data_label == label)'], {}), '(self.data_label == label)\n', (5227, 5253), True, 'import numpy as np\n'), ((4178, 4216), 'numpy.where', 'np.where', (['(self.data_label == max_label)'], {}), '(self.data_label == max_label)\n', (4186, 4216), True, 'import numpy as np\n'), ((4249, 4287), 'numpy.where', 'np.where', (['(self.data_label == min_label)'], {}), '(self.data_label == min_label)\n', (4257, 4287), True, 'import numpy as np\n'), ((4488, 4526), 'numpy.where', 'np.where', (['(self.data_label == labels[j])'], {}), '(self.data_label == labels[j])\n', (4496, 4526), True, 'import numpy as np\n'), ((4666, 4686), 'numpy.max', 'np.max', (['labels_count'], {}), '(labels_count)\n', (4672, 4686), True, 'import numpy as np\n'), ((4741, 4761), 'numpy.min', 'np.min', (['labels_count'], {}), '(labels_count)\n', (4747, 4761), True, 'import numpy as np\n')]
|
# -*- coding: utf-8 -*-
'''
This module is the core module, creating the tkInter GUI and "placing" the orders.
The order_engine module handles the orders and calculating the PNLs, returning those values here.
Lots to do! Need to clean all of this up, need to employ better code practice.
Will eventually add separate frames to allow for more customization.
by @robswc
'''
# TODO Remove debugging prints.
# TODO Clean up code.
# TODO Make new frames.
# TODO add function to store and retrieve config xpaths.
# Import necessary modules.
from selenium import webdriver
from tkinter import StringVar, messagebox
import tkinter.font as tkFont
import tkinter as tk
import locale
import os.path
import config
import time
# Import custom modules
from order_engine import OrderEngine, get_position, get_account_value, percent_change
from trade_exporting import write, read_all, clear_csv
# Set formatting for currency.
locale.setlocale(locale.LC_ALL, 'English_United States.1252')
# Set variables
config.initial_amount = 100000
account_value = float(config.initial_amount)
is_playing = "▶"
# Check if csv file already exists, if not create it.
if os.path.exists("trades.csv"):
pass
else:
trades_file = open("trades.csv", "w")
# Set driver to chromedriver.exe
# Try to initialize the driver, if it fails user will be told to update chrome or chromedriver.
try:
driver = webdriver.Chrome()
driver.get("https://www.tradingview.com/#signin")
except Exception as e:
print('+ Error Involving Chrome Driver + \n')
print(str(e) + '\n')
print('Visit: https://github.com/Robswc/tradingview-trainer/wiki/Errors')
print('Report this error here: https://github.com/Robswc/tradingview-trainer/issues')
input()
quit()
# Create main app tkinter frame.
class Application(tk.Frame):
def __init__(self, master=None, bg=config.background_color):
super().__init__(master)
self.master = master
self.master.configure(background="#1E1E1E")
self.pack()
self.create_widgets()
self.limit_check()
def create_fonts(self):
self.font = tkFont.Font(family=config.font, weight=config.font_weight, size=config.font_size)
self.font_large = tkFont.Font(family=config.font, weight=config.font_weight, size=round(config.font_size * 1.5))
self.font_small = tkFont.Font(family=config.font, weight=config.font_weight, size=round(config.font_size * .66))
def create_variables(self):
# Set up StringVars that will be used.
self.ticker_value = StringVar()
self.account_value_text = StringVar()
self.account_value_text.set(str(account_value))
self.last_price_value = StringVar()
self.current_position_value = StringVar()
self.current_position_pnl = StringVar()
self.account_value_pnl = StringVar()
self.limit_price = StringVar()
self.order_type = StringVar(None, 'market')
# Create tkinter widgets.
def create_widgets(self):
self.create_fonts()
self.create_variables()
# Global is_playing as it is used as a boolean and StringVar
global is_playing
is_playing = StringVar()
is_playing.set("▶")
# Create Ticker Label for Ticker
self.ticker_id = tk.Label(
self,
textvariable=self.ticker_value,
fg=config.color_white,
bg=config.background_color,
font=self.font_large
).grid(row=0, column=0, columnspan=4, padx=33, sticky="nsew")
# Create a label to show the last price
self.last_price_label = tk.Label(
self,
textvariable=self.last_price_value,
bg=config.background_color,
fg=config.color_white,
font=self.font_large
).grid(row=1, column=0, columnspan=4, sticky="nsew")
# Create a button to start the reply
self.play_button = tk.Button(
self,
textvariable=is_playing,
bg=config.button_color_light,
fg=config.color_grey,
borderwidth=0
)
self.play_button["command"] = self.play_replay
self.play_button.grid(row=2, column=0, columnspan=2, sticky="nsew")
# Create a button for progressing to next bar
self.next_button = tk.Button(self, text="▮▶", bg=config.button_color_light, fg=config.color_grey, borderwidth=0)
self.next_button["command"] = self.next_bar
self.next_button.grid(row=2, column=2, columnspan=2, sticky="nsew")
# Create a button for long orders
self.long_button = tk.Button(
self,
text="BUY",
font=self.font,
bg=config.button_color,
fg=config.color_green,
borderwidth=0
)
self.long_button["command"] = self.order_buy
self.long_button.grid(row=3, column=0, columnspan=2, sticky="nsew")
# Create a button for short orders
self.short_button = tk.Button(
self,
text="SELL",
font=self.font,
bg=config.button_color,
fg=config.color_red,
borderwidth=0
)
self.short_button["command"] = self.order_sell
self.short_button.grid(row=3, column=2, columnspan=2, sticky="nsew")
# Create radio buttons to toggle between limit orders and market orders
self.limit_radiobutton = tk.Radiobutton(
self,
bg=config.background_color,
fg=config.color_dark_grey,
selectcolor=config.background_color,
text="LIMIT",
variable=self.order_type,
value="limit"
)
self.limit_radiobutton.grid(row=4, column=0, columnspan=2, sticky="nsew")
self.market_radiobutton = tk.Radiobutton(
self,
bg=config.background_color,
fg=config.color_dark_grey,
selectcolor=config.background_color,
text="MARKET",
variable=self.order_type,
value="market",
).grid(row=4, column=2, columnspan=2, sticky="nsew")
# Create entry box for limit orders
self.limit_price = tk.Entry(
self,
borderwidth=0,
bg=config.button_color_light,
fg=config.color_grey)
self.limit_price.insert(0, " ")
self.limit_price.grid(row=5, column=0, columnspan=3, sticky="nsew", padx=5)
self.limit_copy_button = tk.Button(
self,
text="LAST",
borderwidth=0,
bg=config.button_color,
fg=config.color_grey,
font=self.font_small
)
self.limit_copy_button["command"] = self.copy_last
self.limit_copy_button.grid(row=5, column=3, columnspan=1, sticky="nsew")
self.current_position_label = tk.Label(
self,
text="Current Position",
anchor="w",
bg=config.background_color,
fg=config.color_grey, font=self.font_small
).grid(row=6, column=0, columnspan=4, sticky="nsew")
self.current_position_value_label = tk.Label(
self,
textvariable=self.current_position_value,
anchor="w",
bg=config.button_color_light,
fg=config.color_dark_grey
).grid(row=7, column=0, columnspan=3, sticky="nsew")
self.current_position_pnl_label = tk.Label(
self,
textvariable=self.current_position_pnl,
anchor="e",
bg=config.button_color_light,
fg=config.color_dark_grey
).grid(row=7, column=3, columnspan=1, sticky="nsew")
self.account_value_label = tk.Label(
self,
text="Account value",
anchor="w",
bg=config.background_color,
fg=config.color_grey,
font=self.font_small
).grid(row=8, column=0, columnspan=4, sticky="nsew")
self.account_value_value_label = tk.Label(
self,
textvariable=self.account_value_text,
bg=config.button_color_light,
fg=config.color_white,
anchor="w"
).grid(row=9, column=0, columnspan=3, sticky="nsew")
self.account_value_pnl_label = tk.Label(
self,
textvariable=self.account_value_pnl,
bg=config.button_color_light,
fg=config.color_dark_grey,
anchor="e"
).grid(row=9, column=3, columnspan=1, sticky="nsew")
self.trade_history_label = tk.Label(
self,
text="Trades",
anchor="w",
bg=config.background_color,
fg=config.color_grey,
font=self.font_small
).grid(row=10, column=0, columnspan=3, sticky="nsew")
self.trade_history_clear = tk.Button(
self,
text="Clear",
bg=config.button_color,
fg=config.color_grey,
font=self.font_small,
borderwidth=0
)
self.trade_history_clear.grid(row=10, column=3, columnspan=1, sticky="nsew")
self.trade_history_clear['command'] = self.clear_list
self.trade_history_list = tk.Listbox(
self,
fg=config.color_grey,
bg=config.textarea_color,
borderwidth=0)
self.trade_history_list.grid(row=11, column=0, columnspan=4, sticky="nsew")
# Write Timestamp to csv file
write([time.strftime("%Y-%m-%d %H:%M")])
# Start of Functions
def message_box(self):
messagebox.showinfo('Error', 'Sorry! Limit orders are not currently implemented.\n'
'You can check progress here:\n'
'https://github.com/Robswc/tradingview-trainer/issues/5')
self.order_type.set('market')
# Generic function to show error
def show_error(self, cause, exception, message):
messagebox.showerror(str(cause), str(str(exception) + '\n' + message))
driver.get("https://github.com/Robswc/tradingview-trainer/wiki/Errors")
def clear_list(self):
clear_csv()
self.update_labels()
def get_ticker(self):
#ticker = driver.find_element_by_xpath(
# '/html/body/div[1]/div[1]/div[3]/div[1]/div/table/tr[1]/td[2]/div/div[3]/div[1]/span[2]/div/div[1]/div'
#).text
try:
ticker = driver.find_element_by_xpath(
'/html/body/div[1]/div[1]/div[3]/div[1]/div/table/tr[1]/td[2]/div/div[2]/div[1]/div[1]/div[1]/div[1]'
).text
ticker = str(ticker).split(' ')
print(ticker[0])
return str(ticker[0])
except:
return 'None'
def get_price_data(self, request):
try:
if request == 'o':
return float(driver.find_element_by_xpath(
'/html/body/div[1]/div[1]/div[3]/div[1]/div/table/tr[1]/td[2]/div/div[3]/div[1]/div[2]/div/div[1]/div[2]'
).text)
if request == 'h':
return float(driver.find_element_by_xpath(
'/html/body/div[1]/div[1]/div[3]/div[1]/div/table/tr[1]/td[2]/div/div[3]/div[1]/div[2]/div/div[2]/div[2]'
).text)
if request == 'l':
return float(driver.find_element_by_xpath(
'/html/body/div[1]/div[1]/div[3]/div[1]/div/table/tr[1]/td[2]/div/div[3]/div[1]/div[2]/div/div[3]/div[2]'
).text)
if request == 'c':
return float(driver.find_element_by_xpath(
'/html/body/div[1]/div[1]/div[3]/div[1]/div/table/tr[1]/td[2]/div/div[3]/div[1]/div[2]/div/div[4]/div[2]'
).text)
except:
return 0
def get_limit_price(self):
self.limit_price.get()
def get_position_pnl(self):
if get_position()['quantity'] < 0:
pnl_percent = ((get_position()['entry'] - self.get_last_price()) / get_position()['entry']) * 100
if get_position()['quantity'] > 0:
pnl_percent = ((self.get_last_price() - get_position()['entry']) / self.get_last_price()) * 100
try:
return round(pnl_percent, 2)
except:
return 0
# Doesn't seem to work :(
def add_marker(self):
pass
# actions = ActionChains(driver)
# element = driver.find_element_by_xpath('/html/body/div[1]/div[1]/div[3]/div[1]/div/table/tr[1]')
# element.click()
# actions.click(element).key_down(Keys.ALT, 'v').perform()
def update_labels(self):
# update all labels via tk StringVar()
self.last_price_value.set(str(self.get_last_price()))
self.current_position_pnl.set(str(self.get_position_pnl()) + '%')
self.account_value_pnl.set(str(round(percent_change(get_account_value(), float(config.initial_amount)), 2)) + '%')
self.current_position_value.set(str(get_position()['quantity']) + " @ " + str(get_position()['entry']))
self.account_value_text.set(locale.currency(get_account_value(), grouping=True))
self.ticker_value.set(self.get_ticker())
# Update trade history box
self.trade_history_list.delete(0, 'end')
for trade in read_all():
self.trade_history_list.insert(0, trade)
# get last price via xpath
def get_last_price(self):
try:
last_price = driver.find_element_by_xpath(
'/html/body/div[1]/div[1]/div[3]/div[1]/div/table/tr[1]/td[2]/div/div[2]/div[1]/div[2]/div/div[4]/div[2]'
).text
return float(last_price)
except:
try:
last_price = driver.find_element_by_xpath(config.custom_xpath_last_price).text
return float(last_price)
except Exception as error:
pass
# self.show_error('last_value', str(error), 'Please report error here: ')
# function to pass buy order to order engine.
def order_buy(self):
oe = OrderEngine
if self.order_type.get() == 'market':
oe.market(OrderEngine, round(get_account_value(), 2), self.get_last_price())
if self.order_type.get() == 'limit':
print('LIMIT BIMIT ORDER HEHEH')
oe.limit(OrderEngine, round(get_account_value(), 2), float(self.limit_price.get()), self.get_last_price())
self.update_labels()
# function to pass sell order to order engine.
def order_sell(self):
oe = OrderEngine
if self.order_type.get() == 'market':
print(type(get_account_value()), type(self.get_last_price()))
oe.market(OrderEngine, round(get_account_value(), 2) * -1, float(self.get_last_price()))
if self.order_type.get() == 'limit':
oe.limit(OrderEngine, get_account_value() * -1, float(self.limit_price.get()), self.get_last_price())
self.update_labels()
# Check with the order engine to see if there is a limit order.
def limit_check(self):
oe = OrderEngine
oe.on_tick(OrderEngine,
self.get_price_data('o'),
self.get_price_data('h'),
self.get_price_data('l'),
self.get_price_data('c')
)
global is_playing
print(str(is_playing.get()))
if str(is_playing.get()) == "▮▮":
print(str(is_playing.get()))
self.after(500, self.limit_check)
# Function to auto-fill last price into limit price.
def copy_last(self):
self.limit_price.delete(0, "end")
self.limit_price.insert(0, self.last_price_value.get())
# Click next bar w/selenium, use functions to grab values.
def next_bar(self):
print(self.limit_price.get())
global is_playing
try:
driver.find_element_by_xpath('/html/body/div[7]/div/div[2]/div[3]/div').click()
except:
try:
driver.find_element_by_xpath(config.custom_xpath_replay).click()
except:
self.show_error('next_bar', 'xpath error', 'Please report error here: ')
is_playing.set("▶")
self.limit_check()
self.update_labels()
print('>>')
# Function to click the play-replay with selenium, check for limit orders.
def play_replay(self):
global is_playing
self.update_labels()
try:
driver.find_element_by_xpath('/html/body/div[9]/div/div[2]/div[2]/div').click()
except Exception:
driver.find_element_by_xpath(config.custom_xpath_play_replay).click()
print(str(is_playing.get()))
if str(is_playing.get()) == "▶":
is_playing.set("▮▮")
print(str(is_playing))
self.limit_check()
else:
is_playing.set("▶")
print(str(is_playing))
# Create tkinter window/app
root = tk.Tk()
root.title('tv-Trainer ~@robswc')
root["bg"] = "#1E1E1E"
root.attributes('-topmost', True)
root.geometry("300x500+0+0")
app = Application(root)
app['bg'] = config.background_color
app.configure()
app.mainloop()
|
[
"tkinter.StringVar",
"order_engine.get_account_value",
"tkinter.Button",
"tkinter.Entry",
"tkinter.Listbox",
"tkinter.messagebox.showinfo",
"tkinter.font.Font",
"trade_exporting.read_all",
"tkinter.Radiobutton",
"time.strftime",
"selenium.webdriver.Chrome",
"order_engine.get_position",
"locale.setlocale",
"tkinter.Label",
"tkinter.Tk",
"trade_exporting.clear_csv"
] |
[((921, 982), 'locale.setlocale', 'locale.setlocale', (['locale.LC_ALL', '"""English_United States.1252"""'], {}), "(locale.LC_ALL, 'English_United States.1252')\n", (937, 982), False, 'import locale\n'), ((16992, 16999), 'tkinter.Tk', 'tk.Tk', ([], {}), '()\n', (16997, 16999), True, 'import tkinter as tk\n'), ((1387, 1405), 'selenium.webdriver.Chrome', 'webdriver.Chrome', ([], {}), '()\n', (1403, 1405), False, 'from selenium import webdriver\n'), ((2119, 2205), 'tkinter.font.Font', 'tkFont.Font', ([], {'family': 'config.font', 'weight': 'config.font_weight', 'size': 'config.font_size'}), '(family=config.font, weight=config.font_weight, size=config.\n font_size)\n', (2130, 2205), True, 'import tkinter.font as tkFont\n'), ((2551, 2562), 'tkinter.StringVar', 'StringVar', ([], {}), '()\n', (2560, 2562), False, 'from tkinter import StringVar, messagebox\n'), ((2597, 2608), 'tkinter.StringVar', 'StringVar', ([], {}), '()\n', (2606, 2608), False, 'from tkinter import StringVar, messagebox\n'), ((2697, 2708), 'tkinter.StringVar', 'StringVar', ([], {}), '()\n', (2706, 2708), False, 'from tkinter import StringVar, messagebox\n'), ((2747, 2758), 'tkinter.StringVar', 'StringVar', ([], {}), '()\n', (2756, 2758), False, 'from tkinter import StringVar, messagebox\n'), ((2795, 2806), 'tkinter.StringVar', 'StringVar', ([], {}), '()\n', (2804, 2806), False, 'from tkinter import StringVar, messagebox\n'), ((2840, 2851), 'tkinter.StringVar', 'StringVar', ([], {}), '()\n', (2849, 2851), False, 'from tkinter import StringVar, messagebox\n'), ((2879, 2890), 'tkinter.StringVar', 'StringVar', ([], {}), '()\n', (2888, 2890), False, 'from tkinter import StringVar, messagebox\n'), ((2917, 2942), 'tkinter.StringVar', 'StringVar', (['None', '"""market"""'], {}), "(None, 'market')\n", (2926, 2942), False, 'from tkinter import StringVar, messagebox\n'), ((3181, 3192), 'tkinter.StringVar', 'StringVar', ([], {}), '()\n', (3190, 3192), False, 'from tkinter import StringVar, messagebox\n'), ((3937, 4049), 'tkinter.Button', 'tk.Button', (['self'], {'textvariable': 'is_playing', 'bg': 'config.button_color_light', 'fg': 'config.color_grey', 'borderwidth': '(0)'}), '(self, textvariable=is_playing, bg=config.button_color_light, fg=\n config.color_grey, borderwidth=0)\n', (3946, 4049), True, 'import tkinter as tk\n'), ((4329, 4427), 'tkinter.Button', 'tk.Button', (['self'], {'text': '"""▮▶"""', 'bg': 'config.button_color_light', 'fg': 'config.color_grey', 'borderwidth': '(0)'}), "(self, text='▮▶', bg=config.button_color_light, fg=config.\n color_grey, borderwidth=0)\n", (4338, 4427), True, 'import tkinter as tk\n'), ((4621, 4731), 'tkinter.Button', 'tk.Button', (['self'], {'text': '"""BUY"""', 'font': 'self.font', 'bg': 'config.button_color', 'fg': 'config.color_green', 'borderwidth': '(0)'}), "(self, text='BUY', font=self.font, bg=config.button_color, fg=\n config.color_green, borderwidth=0)\n", (4630, 4731), True, 'import tkinter as tk\n'), ((5010, 5119), 'tkinter.Button', 'tk.Button', (['self'], {'text': '"""SELL"""', 'font': 'self.font', 'bg': 'config.button_color', 'fg': 'config.color_red', 'borderwidth': '(0)'}), "(self, text='SELL', font=self.font, bg=config.button_color, fg=\n config.color_red, borderwidth=0)\n", (5019, 5119), True, 'import tkinter as tk\n'), ((5443, 5619), 'tkinter.Radiobutton', 'tk.Radiobutton', (['self'], {'bg': 'config.background_color', 'fg': 'config.color_dark_grey', 'selectcolor': 'config.background_color', 'text': '"""LIMIT"""', 'variable': 'self.order_type', 'value': '"""limit"""'}), "(self, bg=config.background_color, fg=config.color_dark_grey,\n selectcolor=config.background_color, text='LIMIT', variable=self.\n order_type, value='limit')\n", (5457, 5619), True, 'import tkinter as tk\n'), ((6210, 6296), 'tkinter.Entry', 'tk.Entry', (['self'], {'borderwidth': '(0)', 'bg': 'config.button_color_light', 'fg': 'config.color_grey'}), '(self, borderwidth=0, bg=config.button_color_light, fg=config.\n color_grey)\n', (6218, 6296), True, 'import tkinter as tk\n'), ((6499, 6615), 'tkinter.Button', 'tk.Button', (['self'], {'text': '"""LAST"""', 'borderwidth': '(0)', 'bg': 'config.button_color', 'fg': 'config.color_grey', 'font': 'self.font_small'}), "(self, text='LAST', borderwidth=0, bg=config.button_color, fg=\n config.color_grey, font=self.font_small)\n", (6508, 6615), True, 'import tkinter as tk\n'), ((8871, 8987), 'tkinter.Button', 'tk.Button', (['self'], {'text': '"""Clear"""', 'bg': 'config.button_color', 'fg': 'config.color_grey', 'font': 'self.font_small', 'borderwidth': '(0)'}), "(self, text='Clear', bg=config.button_color, fg=config.color_grey,\n font=self.font_small, borderwidth=0)\n", (8880, 8987), True, 'import tkinter as tk\n'), ((9248, 9327), 'tkinter.Listbox', 'tk.Listbox', (['self'], {'fg': 'config.color_grey', 'bg': 'config.textarea_color', 'borderwidth': '(0)'}), '(self, fg=config.color_grey, bg=config.textarea_color, borderwidth=0)\n', (9258, 9327), True, 'import tkinter as tk\n'), ((9602, 9781), 'tkinter.messagebox.showinfo', 'messagebox.showinfo', (['"""Error"""', '"""Sorry! Limit orders are not currently implemented.\nYou can check progress here:\nhttps://github.com/Robswc/tradingview-trainer/issues/5"""'], {}), '(\'Error\',\n """Sorry! Limit orders are not currently implemented.\nYou can check progress here:\nhttps://github.com/Robswc/tradingview-trainer/issues/5"""\n )\n', (9621, 9781), False, 'from tkinter import StringVar, messagebox\n'), ((10174, 10185), 'trade_exporting.clear_csv', 'clear_csv', ([], {}), '()\n', (10183, 10185), False, 'from trade_exporting import write, read_all, clear_csv\n'), ((13323, 13333), 'trade_exporting.read_all', 'read_all', ([], {}), '()\n', (13331, 13333), False, 'from trade_exporting import write, read_all, clear_csv\n'), ((9507, 9538), 'time.strftime', 'time.strftime', (['"""%Y-%m-%d %H:%M"""'], {}), "('%Y-%m-%d %H:%M')\n", (9520, 9538), False, 'import time\n'), ((3288, 3412), 'tkinter.Label', 'tk.Label', (['self'], {'textvariable': 'self.ticker_value', 'fg': 'config.color_white', 'bg': 'config.background_color', 'font': 'self.font_large'}), '(self, textvariable=self.ticker_value, fg=config.color_white, bg=\n config.background_color, font=self.font_large)\n', (3296, 3412), True, 'import tkinter as tk\n'), ((3619, 3747), 'tkinter.Label', 'tk.Label', (['self'], {'textvariable': 'self.last_price_value', 'bg': 'config.background_color', 'fg': 'config.color_white', 'font': 'self.font_large'}), '(self, textvariable=self.last_price_value, bg=config.\n background_color, fg=config.color_white, font=self.font_large)\n', (3627, 3747), True, 'import tkinter as tk\n'), ((5822, 6000), 'tkinter.Radiobutton', 'tk.Radiobutton', (['self'], {'bg': 'config.background_color', 'fg': 'config.color_dark_grey', 'selectcolor': 'config.background_color', 'text': '"""MARKET"""', 'variable': 'self.order_type', 'value': '"""market"""'}), "(self, bg=config.background_color, fg=config.color_dark_grey,\n selectcolor=config.background_color, text='MARKET', variable=self.\n order_type, value='market')\n", (5836, 6000), True, 'import tkinter as tk\n'), ((6873, 7001), 'tkinter.Label', 'tk.Label', (['self'], {'text': '"""Current Position"""', 'anchor': '"""w"""', 'bg': 'config.background_color', 'fg': 'config.color_grey', 'font': 'self.font_small'}), "(self, text='Current Position', anchor='w', bg=config.\n background_color, fg=config.color_grey, font=self.font_small)\n", (6881, 7001), True, 'import tkinter as tk\n'), ((7163, 7293), 'tkinter.Label', 'tk.Label', (['self'], {'textvariable': 'self.current_position_value', 'anchor': '"""w"""', 'bg': 'config.button_color_light', 'fg': 'config.color_dark_grey'}), "(self, textvariable=self.current_position_value, anchor='w', bg=\n config.button_color_light, fg=config.color_dark_grey)\n", (7171, 7293), True, 'import tkinter as tk\n'), ((7453, 7581), 'tkinter.Label', 'tk.Label', (['self'], {'textvariable': 'self.current_position_pnl', 'anchor': '"""e"""', 'bg': 'config.button_color_light', 'fg': 'config.color_dark_grey'}), "(self, textvariable=self.current_position_pnl, anchor='e', bg=\n config.button_color_light, fg=config.color_dark_grey)\n", (7461, 7581), True, 'import tkinter as tk\n'), ((7734, 7858), 'tkinter.Label', 'tk.Label', (['self'], {'text': '"""Account value"""', 'anchor': '"""w"""', 'bg': 'config.background_color', 'fg': 'config.color_grey', 'font': 'self.font_small'}), "(self, text='Account value', anchor='w', bg=config.background_color,\n fg=config.color_grey, font=self.font_small)\n", (7742, 7858), True, 'import tkinter as tk\n'), ((8030, 8152), 'tkinter.Label', 'tk.Label', (['self'], {'textvariable': 'self.account_value_text', 'bg': 'config.button_color_light', 'fg': 'config.color_white', 'anchor': '"""w"""'}), "(self, textvariable=self.account_value_text, bg=config.\n button_color_light, fg=config.color_white, anchor='w')\n", (8038, 8152), True, 'import tkinter as tk\n'), ((8309, 8434), 'tkinter.Label', 'tk.Label', (['self'], {'textvariable': 'self.account_value_pnl', 'bg': 'config.button_color_light', 'fg': 'config.color_dark_grey', 'anchor': '"""e"""'}), "(self, textvariable=self.account_value_pnl, bg=config.\n button_color_light, fg=config.color_dark_grey, anchor='e')\n", (8317, 8434), True, 'import tkinter as tk\n'), ((8587, 8705), 'tkinter.Label', 'tk.Label', (['self'], {'text': '"""Trades"""', 'anchor': '"""w"""', 'bg': 'config.background_color', 'fg': 'config.color_grey', 'font': 'self.font_small'}), "(self, text='Trades', anchor='w', bg=config.background_color, fg=\n config.color_grey, font=self.font_small)\n", (8595, 8705), True, 'import tkinter as tk\n'), ((11934, 11948), 'order_engine.get_position', 'get_position', ([], {}), '()\n', (11946, 11948), False, 'from order_engine import OrderEngine, get_position, get_account_value, percent_change\n'), ((12087, 12101), 'order_engine.get_position', 'get_position', ([], {}), '()\n', (12099, 12101), False, 'from order_engine import OrderEngine, get_position, get_account_value, percent_change\n'), ((13131, 13150), 'order_engine.get_account_value', 'get_account_value', ([], {}), '()\n', (13148, 13150), False, 'from order_engine import OrderEngine, get_position, get_account_value, percent_change\n'), ((14204, 14223), 'order_engine.get_account_value', 'get_account_value', ([], {}), '()\n', (14221, 14223), False, 'from order_engine import OrderEngine, get_position, get_account_value, percent_change\n'), ((14382, 14401), 'order_engine.get_account_value', 'get_account_value', ([], {}), '()\n', (14399, 14401), False, 'from order_engine import OrderEngine, get_position, get_account_value, percent_change\n'), ((14662, 14681), 'order_engine.get_account_value', 'get_account_value', ([], {}), '()\n', (14679, 14681), False, 'from order_engine import OrderEngine, get_position, get_account_value, percent_change\n'), ((14893, 14912), 'order_engine.get_account_value', 'get_account_value', ([], {}), '()\n', (14910, 14912), False, 'from order_engine import OrderEngine, get_position, get_account_value, percent_change\n'), ((12045, 12059), 'order_engine.get_position', 'get_position', ([], {}), '()\n', (12057, 12059), False, 'from order_engine import OrderEngine, get_position, get_account_value, percent_change\n'), ((13053, 13067), 'order_engine.get_position', 'get_position', ([], {}), '()\n', (13065, 13067), False, 'from order_engine import OrderEngine, get_position, get_account_value, percent_change\n'), ((14754, 14773), 'order_engine.get_account_value', 'get_account_value', ([], {}), '()\n', (14771, 14773), False, 'from order_engine import OrderEngine, get_position, get_account_value, percent_change\n'), ((11994, 12008), 'order_engine.get_position', 'get_position', ([], {}), '()\n', (12006, 12008), False, 'from order_engine import OrderEngine, get_position, get_account_value, percent_change\n'), ((12171, 12185), 'order_engine.get_position', 'get_position', ([], {}), '()\n', (12183, 12185), False, 'from order_engine import OrderEngine, get_position, get_account_value, percent_change\n'), ((12904, 12923), 'order_engine.get_account_value', 'get_account_value', ([], {}), '()\n', (12921, 12923), False, 'from order_engine import OrderEngine, get_position, get_account_value, percent_change\n'), ((13011, 13025), 'order_engine.get_position', 'get_position', ([], {}), '()\n', (13023, 13025), False, 'from order_engine import OrderEngine, get_position, get_account_value, percent_change\n')]
|
#!/usr/bin/env python
# -*- coding: utf-8
# Copyright 2017-2019 The FIAAS Authors
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from __future__ import absolute_import
import base64
import hashlib
import logging
from itertools import chain
from k8s.client import NotFound
from k8s.models.common import ObjectMeta
from k8s.models.ingress import Ingress, IngressSpec, IngressRule, HTTPIngressRuleValue, HTTPIngressPath, IngressBackend, \
IngressTLS
from fiaas_deploy_daemon.retry import retry_on_upsert_conflict
from fiaas_deploy_daemon.tools import merge_dicts
LOG = logging.getLogger(__name__)
class IngressDeployer(object):
def __init__(self, config, ingress_tls):
self._ingress_suffixes = config.ingress_suffixes
self._host_rewrite_rules = config.host_rewrite_rules
self._ingress_tls = ingress_tls
def deploy(self, app_spec, labels):
if self._should_have_ingress(app_spec):
self._create(app_spec, labels)
else:
self.delete(app_spec)
def delete(self, app_spec):
LOG.info("Deleting ingress for %s", app_spec.name)
try:
Ingress.delete(app_spec.name, app_spec.namespace)
except NotFound:
pass
@retry_on_upsert_conflict
def _create(self, app_spec, labels):
LOG.info("Creating/updating ingress for %s", app_spec.name)
annotations = {
u"fiaas/expose": u"true" if _has_explicitly_set_host(app_spec) else u"false"
}
custom_labels = merge_dicts(app_spec.labels.ingress, labels)
custom_annotations = merge_dicts(app_spec.annotations.ingress, annotations)
metadata = ObjectMeta(name=app_spec.name, namespace=app_spec.namespace, labels=custom_labels,
annotations=custom_annotations)
per_host_ingress_rules = [
IngressRule(host=self._apply_host_rewrite_rules(ingress_item.host),
http=self._make_http_ingress_rule_value(app_spec, ingress_item.pathmappings))
for ingress_item in app_spec.ingresses
if ingress_item.host is not None
]
default_host_ingress_rules = self._create_default_host_ingress_rules(app_spec)
ingress_spec = IngressSpec(rules=per_host_ingress_rules + default_host_ingress_rules)
ingress = Ingress.get_or_create(metadata=metadata, spec=ingress_spec)
self._ingress_tls.apply(ingress, app_spec, self._get_hosts(app_spec))
ingress.save()
def _generate_default_hosts(self, name):
for suffix in self._ingress_suffixes:
yield u"{}.{}".format(name, suffix)
def _create_default_host_ingress_rules(self, app_spec):
all_pathmappings = chain.from_iterable(ingress_item.pathmappings for ingress_item in app_spec.ingresses)
http_ingress_rule_value = self._make_http_ingress_rule_value(app_spec, all_pathmappings)
return [IngressRule(host=host, http=http_ingress_rule_value)
for host in self._generate_default_hosts(app_spec.name)]
def _apply_host_rewrite_rules(self, host):
for rule in self._host_rewrite_rules:
if rule.matches(host):
return rule.apply(host)
return host
def _should_have_ingress(self, app_spec):
return self._can_generate_host(app_spec) and _has_ingress(app_spec) and _has_http_port(app_spec)
def _can_generate_host(self, app_spec):
return len(self._ingress_suffixes) > 0 or _has_explicitly_set_host(app_spec)
@staticmethod
def _make_http_ingress_rule_value(app_spec, pathmappings):
http_ingress_paths = [
HTTPIngressPath(path=pm.path, backend=IngressBackend(serviceName=app_spec.name, servicePort=pm.port))
for pm in _deduplicate_in_order(pathmappings)]
return HTTPIngressRuleValue(paths=http_ingress_paths)
def _get_hosts(self, app_spec):
return list(self._generate_default_hosts(app_spec.name)) + \
[self._apply_host_rewrite_rules(ingress_item.host)
for ingress_item in app_spec.ingresses if ingress_item.host is not None]
def _has_explicitly_set_host(app_spec):
return any(ingress.host is not None for ingress in app_spec.ingresses)
def _has_http_port(app_spec):
return any(port.protocol == u"http" for port in app_spec.ports)
def _has_ingress(app_spec):
return len(app_spec.ingresses) > 0
def _deduplicate_in_order(iterator):
seen = set()
for item in iterator:
if item not in seen:
yield item
seen.add(item)
class IngressTls(object):
def __init__(self, config):
self._use_ingress_tls = config.use_ingress_tls
self._cert_issuer = config.tls_certificate_issuer
self._shortest_suffix = sorted(config.ingress_suffixes, key=len)[0] if config.ingress_suffixes else None
self.enable_deprecated_tls_entry_per_host = config.enable_deprecated_tls_entry_per_host
def apply(self, ingress, app_spec, hosts):
if self._should_have_ingress_tls(app_spec):
tls_annotations = {}
if self._cert_issuer or app_spec.ingress_tls.certificate_issuer:
issuer = app_spec.ingress_tls.certificate_issuer if app_spec.ingress_tls.certificate_issuer else self._cert_issuer
tls_annotations[u"certmanager.k8s.io/cluster-issuer"] = issuer
else:
tls_annotations[u"kubernetes.io/tls-acme"] = u"true"
ingress.metadata.annotations = merge_dicts(
ingress.metadata.annotations if ingress.metadata.annotations else {},
tls_annotations
)
if self.enable_deprecated_tls_entry_per_host:
# TODO: DOCD-1846 - Once new certificates has been provisioned, remove the single host entries and
# associated configuration flag
ingress.spec.tls = [IngressTLS(hosts=[host], secretName=host) for host in hosts if len(host) < 64]
else:
ingress.spec.tls = []
collapsed = self._collapse_hosts(app_spec, hosts)
ingress.spec.tls.append(IngressTLS(hosts=collapsed, secretName="{}-ingress-tls".format(app_spec.name)))
def _collapse_hosts(self, app_spec, hosts):
"""The first hostname in the list will be used as Common Name in the certificate"""
if self._shortest_suffix:
try:
return [self._generate_short_host(app_spec)] + hosts
except ValueError:
LOG.error("Failed to generate a short name to use as Common Name")
return hosts
def _should_have_ingress_tls(self, app_spec):
if self._use_ingress_tls == 'disabled' or app_spec.ingress_tls.enabled is False:
return False
else:
return self._use_ingress_tls == 'default_on' or app_spec.ingress_tls.enabled is True
def _generate_short_host(self, app_spec):
h = hashlib.sha1()
h.update(app_spec.name)
h.update(app_spec.namespace)
prefix = base64.b32encode(h.digest()).strip("=").lower()
short_prefix = prefix[:62 - len(self._shortest_suffix)]
short_name = "{}.{}".format(short_prefix, self._shortest_suffix)
if len(short_name) > 63 or short_name[0] == ".":
raise ValueError("Unable to generate a name short enough to be Common Name in certificate")
return short_name
|
[
"k8s.models.ingress.IngressBackend",
"k8s.models.ingress.HTTPIngressRuleValue",
"hashlib.sha1",
"k8s.models.ingress.IngressTLS",
"k8s.models.ingress.Ingress.delete",
"k8s.models.ingress.IngressRule",
"k8s.models.ingress.Ingress.get_or_create",
"k8s.models.ingress.IngressSpec",
"k8s.models.common.ObjectMeta",
"fiaas_deploy_daemon.tools.merge_dicts",
"itertools.chain.from_iterable",
"logging.getLogger"
] |
[((1075, 1102), 'logging.getLogger', 'logging.getLogger', (['__name__'], {}), '(__name__)\n', (1092, 1102), False, 'import logging\n'), ((2016, 2060), 'fiaas_deploy_daemon.tools.merge_dicts', 'merge_dicts', (['app_spec.labels.ingress', 'labels'], {}), '(app_spec.labels.ingress, labels)\n', (2027, 2060), False, 'from fiaas_deploy_daemon.tools import merge_dicts\n'), ((2090, 2144), 'fiaas_deploy_daemon.tools.merge_dicts', 'merge_dicts', (['app_spec.annotations.ingress', 'annotations'], {}), '(app_spec.annotations.ingress, annotations)\n', (2101, 2144), False, 'from fiaas_deploy_daemon.tools import merge_dicts\n'), ((2164, 2283), 'k8s.models.common.ObjectMeta', 'ObjectMeta', ([], {'name': 'app_spec.name', 'namespace': 'app_spec.namespace', 'labels': 'custom_labels', 'annotations': 'custom_annotations'}), '(name=app_spec.name, namespace=app_spec.namespace, labels=\n custom_labels, annotations=custom_annotations)\n', (2174, 2283), False, 'from k8s.models.common import ObjectMeta\n'), ((2744, 2814), 'k8s.models.ingress.IngressSpec', 'IngressSpec', ([], {'rules': '(per_host_ingress_rules + default_host_ingress_rules)'}), '(rules=per_host_ingress_rules + default_host_ingress_rules)\n', (2755, 2814), False, 'from k8s.models.ingress import Ingress, IngressSpec, IngressRule, HTTPIngressRuleValue, HTTPIngressPath, IngressBackend, IngressTLS\n'), ((2834, 2893), 'k8s.models.ingress.Ingress.get_or_create', 'Ingress.get_or_create', ([], {'metadata': 'metadata', 'spec': 'ingress_spec'}), '(metadata=metadata, spec=ingress_spec)\n', (2855, 2893), False, 'from k8s.models.ingress import Ingress, IngressSpec, IngressRule, HTTPIngressRuleValue, HTTPIngressPath, IngressBackend, IngressTLS\n'), ((3223, 3313), 'itertools.chain.from_iterable', 'chain.from_iterable', (['(ingress_item.pathmappings for ingress_item in app_spec.ingresses)'], {}), '(ingress_item.pathmappings for ingress_item in app_spec.\n ingresses)\n', (3242, 3313), False, 'from itertools import chain\n'), ((4321, 4367), 'k8s.models.ingress.HTTPIngressRuleValue', 'HTTPIngressRuleValue', ([], {'paths': 'http_ingress_paths'}), '(paths=http_ingress_paths)\n', (4341, 4367), False, 'from k8s.models.ingress import Ingress, IngressSpec, IngressRule, HTTPIngressRuleValue, HTTPIngressPath, IngressBackend, IngressTLS\n'), ((7456, 7470), 'hashlib.sha1', 'hashlib.sha1', ([], {}), '()\n', (7468, 7470), False, 'import hashlib\n'), ((1636, 1685), 'k8s.models.ingress.Ingress.delete', 'Ingress.delete', (['app_spec.name', 'app_spec.namespace'], {}), '(app_spec.name, app_spec.namespace)\n', (1650, 1685), False, 'from k8s.models.ingress import Ingress, IngressSpec, IngressRule, HTTPIngressRuleValue, HTTPIngressPath, IngressBackend, IngressTLS\n'), ((3422, 3474), 'k8s.models.ingress.IngressRule', 'IngressRule', ([], {'host': 'host', 'http': 'http_ingress_rule_value'}), '(host=host, http=http_ingress_rule_value)\n', (3433, 3474), False, 'from k8s.models.ingress import Ingress, IngressSpec, IngressRule, HTTPIngressRuleValue, HTTPIngressPath, IngressBackend, IngressTLS\n'), ((6008, 6110), 'fiaas_deploy_daemon.tools.merge_dicts', 'merge_dicts', (['(ingress.metadata.annotations if ingress.metadata.annotations else {})', 'tls_annotations'], {}), '(ingress.metadata.annotations if ingress.metadata.annotations else\n {}, tls_annotations)\n', (6019, 6110), False, 'from fiaas_deploy_daemon.tools import merge_dicts\n'), ((4182, 4244), 'k8s.models.ingress.IngressBackend', 'IngressBackend', ([], {'serviceName': 'app_spec.name', 'servicePort': 'pm.port'}), '(serviceName=app_spec.name, servicePort=pm.port)\n', (4196, 4244), False, 'from k8s.models.ingress import Ingress, IngressSpec, IngressRule, HTTPIngressRuleValue, HTTPIngressPath, IngressBackend, IngressTLS\n'), ((6411, 6452), 'k8s.models.ingress.IngressTLS', 'IngressTLS', ([], {'hosts': '[host]', 'secretName': 'host'}), '(hosts=[host], secretName=host)\n', (6421, 6452), False, 'from k8s.models.ingress import Ingress, IngressSpec, IngressRule, HTTPIngressRuleValue, HTTPIngressPath, IngressBackend, IngressTLS\n')]
|
import pytest
from app.authentication import get_token_from_headers, get_allowed_tokens_from_config
def test_get_token_from_headers():
yield check_token, {'Authorization': 'Bearer foo-bar'}, 'foo-bar'
yield check_token, {'Authorization': 'Bearer bar-foo'}, 'bar-foo'
yield check_token, {'Authorization': 'Bearer '}, ''
yield (check_token, {'Authorisation': 'Bearer foo-bar'}, None,
"Authorization header misspelt")
yield (check_token, {'Authorization': 'Borrower foo-bar'}, None,
"Authorization header prefix invalid")
def check_token(headers, expected_token, message=None):
assert get_token_from_headers(headers) == expected_token, message
@pytest.mark.parametrize('config,tokens', [
({'DM_API_AUTH_TOKENS': 'foo:bar'}, ['foo', 'bar']),
({'DM_API_AUTH_TOKENS': 'bar'}, ['bar']),
({}, []),
])
def test_get_allowed_tokens_from_config(config, tokens):
assert get_allowed_tokens_from_config(config) == tokens
|
[
"pytest.mark.parametrize",
"app.authentication.get_token_from_headers",
"app.authentication.get_allowed_tokens_from_config"
] |
[((695, 847), 'pytest.mark.parametrize', 'pytest.mark.parametrize', (['"""config,tokens"""', "[({'DM_API_AUTH_TOKENS': 'foo:bar'}, ['foo', 'bar']), ({\n 'DM_API_AUTH_TOKENS': 'bar'}, ['bar']), ({}, [])]"], {}), "('config,tokens', [({'DM_API_AUTH_TOKENS': 'foo:bar'\n }, ['foo', 'bar']), ({'DM_API_AUTH_TOKENS': 'bar'}, ['bar']), ({}, [])])\n", (718, 847), False, 'import pytest\n'), ((633, 664), 'app.authentication.get_token_from_headers', 'get_token_from_headers', (['headers'], {}), '(headers)\n', (655, 664), False, 'from app.authentication import get_token_from_headers, get_allowed_tokens_from_config\n'), ((926, 964), 'app.authentication.get_allowed_tokens_from_config', 'get_allowed_tokens_from_config', (['config'], {}), '(config)\n', (956, 964), False, 'from app.authentication import get_token_from_headers, get_allowed_tokens_from_config\n')]
|
# -*- coding: utf-8 -*-
"""Tests for the executor_vline."""
# Third party imports
import pytest
from qtpy.QtCore import Qt
from spyder.config.manager import CONF
# Local imports
from spyder_okvim.spyder.config import CONF_SECTION
from spyder_okvim.utils.vim_status import VimState
@pytest.mark.parametrize(
"text, cmd_list, cursor_pos, sel_pos",
[
("import numpy as np", ['V'], 0, [0, 18]),
("import numpy as np", ['V', 'l', 'h'], 0, [0, 18]),
("import numpy as np", ['V', '5l'], 5, [0, 18]),
("import numpy as np", ['5l', 'V'], 5, [0, 18]),
("""import numpy as np
import matplotlib.pyplot as plt
import scipy.scipy as sc""", ['V', 'j'], 19, [0, 50]),
("""import numpy as np
import matplotlib.pyplot as plt
import scipy.scipy as sc""", ['2l', 'V', '2j', '5l'], 58, [0, 75]),
("""import numpy as np
import matplotlib.pyplot as plt
import scipy.scipy as sc
""", ['2l', 'V', '3j', 'k', 'j'], 76, [0, 76]),
("""
import matplotlib.pyplot as plt
import scipy.scipy as sc
""", ['5j', 'V', '5k'], 0, [0, 58]),
]
)
def test_V_cmd(vim_bot, text, cmd_list, cursor_pos, sel_pos):
"""Test V command."""
_, _, editor, vim, qtbot = vim_bot
editor.set_text(text)
cmd_line = vim.get_focus_widget()
for cmd in cmd_list:
qtbot.keyClicks(cmd_line, cmd)
sel = editor.get_extra_selections("vim_selection")[0]
sel_pos_ = [sel.cursor.selectionStart(), sel.cursor.selectionEnd()]
assert cmd_line.text() == ""
assert editor.textCursor().position() == cursor_pos
assert sel_pos_ == sel_pos
@pytest.mark.parametrize(
"text, cmd_list, cursor_pos, sel_pos",
[
("import numpy as np", ['V', '0'], 0, [0, 18]),
("import numpy as np", ['5l', 'V', '0'], 0, [0, 18]),
("""import numpy as np
import matplotlib.pyplot as plt
import scipy.scipy as sc""", ['V', 'j', '5l', '0'], 19, [0, 50])
]
)
def test_zero_cmd_in_vline(vim_bot, text, cmd_list, cursor_pos, sel_pos):
"""Test 0 command in v-line."""
_, _, editor, vim, qtbot = vim_bot
editor.set_text(text)
cmd_line = vim.get_focus_widget()
for cmd in cmd_list:
qtbot.keyClicks(cmd_line, cmd)
sel = editor.get_extra_selections("vim_selection")[0]
sel_pos_ = [sel.cursor.selectionStart(), sel.cursor.selectionEnd()]
assert cmd_line.text() == ""
assert editor.textCursor().position() == cursor_pos
assert sel_pos_ == sel_pos
@pytest.mark.parametrize(
"text, cmd_list, cursor_pos, sel_pos",
[
(" import numpy as np", ['V', '^'], 3, [0, 21]),
(" import numpy as np", ['10l', 'V', '^'], 3, [0, 21]),
]
)
def test_caret_cmd_in_vline(vim_bot, text, cmd_list, cursor_pos, sel_pos):
"""Test ^ command in v-line."""
_, _, editor, vim, qtbot = vim_bot
editor.set_text(text)
cmd_line = vim.get_focus_widget()
for cmd in cmd_list:
qtbot.keyClicks(cmd_line, cmd)
sel = editor.get_extra_selections("vim_selection")[0]
sel_pos_ = [sel.cursor.selectionStart(), sel.cursor.selectionEnd()]
assert cmd_line.text() == ""
assert editor.textCursor().position() == cursor_pos
assert sel_pos_ == sel_pos
@pytest.mark.parametrize(
"text, cmd_list, cursor_pos, sel_pos",
[
("import numpy as np", ['V', '$'], 18, [0, 18])
]
)
def test_dollar_cmd_in_vline(vim_bot, text, cmd_list, cursor_pos, sel_pos):
"""Test $ command in v-line."""
_, _, editor, vim, qtbot = vim_bot
editor.set_text(text)
cmd_line = vim.get_focus_widget()
for cmd in cmd_list:
qtbot.keyClicks(cmd_line, cmd)
sel = editor.get_extra_selections("vim_selection")[0]
sel_pos_ = [sel.cursor.selectionStart(), sel.cursor.selectionEnd()]
assert cmd_line.text() == ""
assert editor.textCursor().position() == cursor_pos
assert sel_pos_ == sel_pos
@pytest.mark.parametrize(
"text, cmd_list, cursor_pos, sel_pos",
[
("01 34\n", ['V', 'w'], 3, [0, 5]),
("01 34\n", ['V', 'w', 'o'], 0, [0, 5]),
("01 34\n", ['V', 'w', 'o', 'o'], 4, [0, 5]),
("\n", ['j', 'V'], 1, [1, 1]),
("\n", ['j', 'V', 'o'], 1, [1, 1]),
("01 34\n6\n", ['V', 'j'], 6, [0, 7]),
("01 34\n6\n", ['V', 'j', 'o'], 0, [0, 7]),
("01 34\n6\n", ['j', 'V', 'k', 'o'], 6, [0, 7]),
("01 34\n6\n8\n", ['j', 'V', 'j', '2k', 'o'], 6, [0, 7]),
]
)
def test_o_cmd_in_vline(vim_bot, text, cmd_list, cursor_pos, sel_pos):
"""Test o command in vline."""
_, _, editor, vim, qtbot = vim_bot
editor.set_text(text)
cmd_line = vim.get_focus_widget()
for cmd in cmd_list:
qtbot.keyClicks(cmd_line, cmd)
sel = editor.get_extra_selections("vim_selection")[0]
sel_pos_ = [sel.cursor.selectionStart(), sel.cursor.selectionEnd()]
assert cmd_line.text() == ""
assert editor.textCursor().position() == cursor_pos
assert sel_pos_ == sel_pos
@pytest.mark.parametrize(
"text, cmd_list, text_expected, cursor_pos",
[
("", ['V', 'J'], "", 0),
("\n\n", ['3j', 'V', 'J'], "\n\n", 2),
("0\n23", ['j', 'l', 'V', 'k', 'J'], "0 23", 1),
("0\n2\n4\n6\n8\n", ['V', '2j', 'J'], "0 2 4\n6\n8\n", 3),
("0\n2\n4\n6\n8\n", ['V', '2j', 'J', '.'], "0 2 4 6 8\n", 7)
]
)
def test_J_cmd_in_vline(vim_bot, text, cmd_list, text_expected, cursor_pos):
"""Test J command in vline."""
_, _, editor, vim, qtbot = vim_bot
editor.set_text(text)
cmd_line = vim.get_focus_widget()
for cmd in cmd_list:
qtbot.keyClicks(cmd_line, cmd)
assert cmd_line.text() == ""
assert editor.textCursor().position() == cursor_pos
assert editor.toPlainText() == text_expected
assert vim.vim_cmd.vim_status.get_pos_start_in_selection() is None
@pytest.mark.parametrize(
"text, cmd_list, cursor_pos, sel_pos",
[
("01 34", ['V', 'w'], 3, [0, 5]),
]
)
def test_w_cmd_in_vline(vim_bot, text, cmd_list, cursor_pos, sel_pos):
"""Test w command in vline."""
_, _, editor, vim, qtbot = vim_bot
editor.set_text(text)
cmd_line = vim.get_focus_widget()
for cmd in cmd_list:
qtbot.keyClicks(cmd_line, cmd)
sel = editor.get_extra_selections("vim_selection")[0]
sel_pos_ = [sel.cursor.selectionStart(), sel.cursor.selectionEnd()]
assert cmd_line.text() == ""
assert editor.textCursor().position() == cursor_pos
assert sel_pos_ == sel_pos
@pytest.mark.parametrize(
"text, cmd_list, cursor_pos, sel_pos",
[
('', ['V', 'W'], 0, [0, 0]),
('029.d98@jl 34', ['V', 'W'], 11, [0, 13]),
('029.d98@jl 34', ['V', '2W'], 13, [0, 13]),
('029.d98@jl 34\na', ['V', '2W'], 14, [0, 15]),
('029.d98@jl 34\n a', ['V', '2W'], 16, [0, 17]),
]
)
def test_W_cmd_in_vline(vim_bot, text, cmd_list, cursor_pos, sel_pos):
"""Test W command in vline."""
_, _, editor, vim, qtbot = vim_bot
editor.set_text(text)
cmd_line = vim.get_focus_widget()
for cmd in cmd_list:
qtbot.keyClicks(cmd_line, cmd)
sel = editor.get_extra_selections("vim_selection")[0]
sel_pos_ = [sel.cursor.selectionStart(), sel.cursor.selectionEnd()]
assert cmd_line.text() == ""
assert editor.textCursor().position() == cursor_pos
assert sel_pos_ == sel_pos
@pytest.mark.parametrize(
"text, cmd_list, cursor_pos, sel_pos",
[
("01 34", ['$', 'V', 'b'], 3, [0, 5]),
]
)
def test_b_cmd_in_vline(vim_bot, text, cmd_list, cursor_pos, sel_pos):
"""Test b command in vline."""
_, _, editor, vim, qtbot = vim_bot
editor.set_text(text)
cmd_line = vim.get_focus_widget()
for cmd in cmd_list:
qtbot.keyClicks(cmd_line, cmd)
sel = editor.get_extra_selections("vim_selection")[0]
sel_pos_ = [sel.cursor.selectionStart(), sel.cursor.selectionEnd()]
assert cmd_line.text() == ""
assert editor.textCursor().position() == cursor_pos
assert sel_pos_ == sel_pos
@pytest.mark.parametrize(
"text, cmd_list, cursor_pos, sel_pos",
[
("01.34", ['$', 'V', 'B'], 0, [0, 5]),
]
)
def test_B_cmd_in_vline(vim_bot, text, cmd_list, cursor_pos, sel_pos):
"""Test B command in vline."""
_, _, editor, vim, qtbot = vim_bot
editor.set_text(text)
cmd_line = vim.get_focus_widget()
for cmd in cmd_list:
qtbot.keyClicks(cmd_line, cmd)
sel = editor.get_extra_selections("vim_selection")[0]
sel_pos_ = [sel.cursor.selectionStart(), sel.cursor.selectionEnd()]
assert cmd_line.text() == ""
assert editor.textCursor().position() == cursor_pos
assert sel_pos_ == sel_pos
@pytest.mark.parametrize(
"text, cmd_list, cursor_pos, sel_pos",
[
("01 34\n67 90", ['V', 'e'], 1, [0, 5]),
("01 34\n67 90", ['V', '3e'], 7, [0, 11]),
]
)
def test_e_cmd_in_vline(vim_bot, text, cmd_list, cursor_pos, sel_pos):
"""Test e command in vline."""
_, _, editor, vim, qtbot = vim_bot
editor.set_text(text)
cmd_line = vim.get_focus_widget()
for cmd in cmd_list:
qtbot.keyClicks(cmd_line, cmd)
sel = editor.get_extra_selections("vim_selection")[0]
sel_pos_ = [sel.cursor.selectionStart(), sel.cursor.selectionEnd()]
assert cmd_line.text() == ""
assert editor.textCursor().position() == cursor_pos
assert sel_pos_ == sel_pos
@pytest.mark.parametrize(
"text, cmd_list, cursor_pos, sel_pos",
[
('0\n2\n4\n', ['V', '2G'], 2, [0, 3]),
('0\n \n8\n', ['V', '2G'], 6, [0, 7]),
('0\n2\n4\n', ['V', 'G'], 6, [0, 6]),
('0\n2\n4\n a', ['V', 'G'], 11, [0, 12])
]
)
def test_G_cmd_in_vline(vim_bot, text, cmd_list, cursor_pos, sel_pos):
"""Test G command in vline."""
_, _, editor, vim, qtbot = vim_bot
editor.set_text(text)
cmd_line = vim.get_focus_widget()
for cmd in cmd_list:
qtbot.keyClicks(cmd_line, cmd)
sel = editor.get_extra_selections("vim_selection")[0]
sel_pos_ = [sel.cursor.selectionStart(), sel.cursor.selectionEnd()]
assert cmd_line.text() == ""
assert editor.textCursor().position() == cursor_pos
assert sel_pos_ == sel_pos
@pytest.mark.parametrize(
"text, cmd_list, cursor_pos, sel_pos",
[
('0\n2\n4\n', ['V', '2gg'], 2, [0, 3]),
('0\n \n8\n', ['V', '2gg'], 6, [0, 7]),
(' 0\n2\n4\n', ['4j', 'V', 'gg'], 4, [0, 10])
]
)
def test_gg_cmd_in_vline(vim_bot, text, cmd_list, cursor_pos, sel_pos):
"""Test gg command in vline."""
_, _, editor, vim, qtbot = vim_bot
editor.set_text(text)
cmd_line = vim.get_focus_widget()
for cmd in cmd_list:
qtbot.keyClicks(cmd_line, cmd)
sel = editor.get_extra_selections("vim_selection")[0]
sel_pos_ = [sel.cursor.selectionStart(), sel.cursor.selectionEnd()]
assert cmd_line.text() == ""
assert editor.textCursor().position() == cursor_pos
assert sel_pos_ == sel_pos
@pytest.mark.parametrize(
"text, cmd_list, text_expected, cursor_pos",
[
('abcde', ['V', '~'], 'ABCDE', 0),
('abcde\na', ['l', 'V', '$', '~'], 'ABCDE\na', 0),
('abcde\na', ['l', 'V', '$', '~', 'j', '.'], 'ABCDE\nA', 6),
('', ['V', '~'], '', 0)
]
)
def test_tilde_cmd_in_vline(vim_bot, text, cmd_list, text_expected,
cursor_pos):
"""Test ~ command in vline."""
_, _, editor, vim, qtbot = vim_bot
editor.set_text(text)
cmd_line = vim.get_focus_widget()
for cmd in cmd_list:
qtbot.keyClicks(cmd_line, cmd)
assert cmd_line.text() == ""
assert editor.toPlainText() == text_expected
assert editor.textCursor().position() == cursor_pos
assert vim.vim_cmd.vim_status.get_pos_start_in_selection() is None
@pytest.mark.parametrize(
"text, cmd_list, cursor_pos, sel_pos",
[
('', ['V', '%'], 0, [0, 0]),
('\n', ['j', 'V', '%'], 1, [1, 1]),
(' ()', ['V', '%'], 2, [0, 3]),
(' ()', ['V', '%', '%'], 1, [0, 3])
]
)
def test_percent_cmd_in_vline(vim_bot, text, cmd_list, cursor_pos, sel_pos):
"""Test % command in vline."""
_, _, editor, vim, qtbot = vim_bot
editor.set_text(text)
cmd_line = vim.get_focus_widget()
for cmd in cmd_list:
qtbot.keyClicks(cmd_line, cmd)
sel = editor.get_extra_selections("vim_selection")[0]
sel_pos_ = [sel.cursor.selectionStart(), sel.cursor.selectionEnd()]
assert cmd_line.text() == ""
assert editor.textCursor().position() == cursor_pos
assert sel_pos_ == sel_pos
@pytest.mark.parametrize(
"text, cmd_list, cursor_pos, sel_pos",
[
('', ['V', 'f', 'r'], 0, [0, 0]),
('\n', ['j', 'V', 'f', 'r'], 1, [1, 1]),
(' rr', ['V', 'f', 'r'], 1, [0, 3]),
(' rr', ['V', 'f', 'r', ';'], 2, [0, 3]),
(' rr', ['V', 'f', 'r', ';', ','], 1, [0, 3]),
]
)
def test_f_cmd_in_vline(vim_bot, text, cmd_list, cursor_pos, sel_pos):
"""Test f command in vline."""
_, _, editor, vim, qtbot = vim_bot
editor.set_text(text)
cmd_line = vim.get_focus_widget()
for cmd in cmd_list:
qtbot.keyClicks(cmd_line, cmd)
sel = editor.get_extra_selections("vim_selection")[0]
sel_pos_ = [sel.cursor.selectionStart(), sel.cursor.selectionEnd()]
assert cmd_line.text() == ""
assert editor.textCursor().position() == cursor_pos
assert sel_pos_ == sel_pos
@pytest.mark.parametrize(
"text, cmd_list, cursor_pos, sel_pos",
[
('', ['V', 'F', 'r'], 0, [0, 0]),
('\n', ['j', 'V', 'F', 'r'], 1, [1, 1]),
(' rr ', ['V', '$', 'F', 'r'], 2, [0, 4]),
(' rr ', ['V', '$', 'F', 'r', ';'], 1, [0, 4]),
(' rr ', ['V', '$', 'F', 'r', ';', ','], 2, [0, 4]),
]
)
def test_F_cmd_in_vline(vim_bot, text, cmd_list, cursor_pos, sel_pos):
"""Test F command in vline."""
_, _, editor, vim, qtbot = vim_bot
editor.set_text(text)
cmd_line = vim.get_focus_widget()
for cmd in cmd_list:
qtbot.keyClicks(cmd_line, cmd)
sel = editor.get_extra_selections("vim_selection")[0]
sel_pos_ = [sel.cursor.selectionStart(), sel.cursor.selectionEnd()]
assert cmd_line.text() == ""
assert editor.textCursor().position() == cursor_pos
assert sel_pos_ == sel_pos
@pytest.mark.parametrize(
"text, cmd_list, cursor_pos, sel_pos",
[
('', ['V', 't', 'r'], 0, [0, 0]),
('\n', ['j', 'V', 't', 'r'], 1, [1, 1]),
(' rr', ['V', 't', 'r'], 1, [0, 4]),
(' rr', ['V', 't', 'r', ';'], 2, [0, 4]),
(' rrrr', ['V', 't', 'r', '4;'], 4, [0, 6]),
(' rrrr', ['V', 't', 'r', '4;', ','], 3, [0, 6]),
]
)
def test_t_cmd_in_vline(vim_bot, text, cmd_list, cursor_pos, sel_pos):
"""Test t command in vline."""
_, _, editor, vim, qtbot = vim_bot
editor.set_text(text)
cmd_line = vim.get_focus_widget()
for cmd in cmd_list:
qtbot.keyClicks(cmd_line, cmd)
sel = editor.get_extra_selections("vim_selection")[0]
sel_pos_ = [sel.cursor.selectionStart(), sel.cursor.selectionEnd()]
assert cmd_line.text() == ""
assert editor.textCursor().position() == cursor_pos
assert sel_pos_ == sel_pos
@pytest.mark.parametrize(
"text, cmd_list, cursor_pos, sel_pos",
[
('', ['V', 'T', 'r'], 0, [0, 0]),
('r\n', ['j', 'V', 'T', 'r'], 2, [2, 2]),
(' rr ', ['V', '$', 'T', 'r'], 4, [0, 6]),
(' rr ', ['V', '$', 'T', 'r', ';'], 3, [0, 6]),
(' rrrr', ['V', '$', 'T', 'r', '4;'], 3, [0, 6]),
(' rrrr', ['V', '$', 'T', 'r', '4;', ','], 4, [0, 6]),
]
)
def test_T_cmd_in_vline(vim_bot, text, cmd_list, cursor_pos, sel_pos):
"""Test T command in vline."""
_, _, editor, vim, qtbot = vim_bot
editor.set_text(text)
cmd_line = vim.get_focus_widget()
for cmd in cmd_list:
qtbot.keyClicks(cmd_line, cmd)
sel = editor.get_extra_selections("vim_selection")[0]
sel_pos_ = [sel.cursor.selectionStart(), sel.cursor.selectionEnd()]
assert cmd_line.text() == ""
assert editor.textCursor().position() == cursor_pos
assert sel_pos_ == sel_pos
@pytest.mark.parametrize(
"text, cmd_list, text_expected, cursor_pos",
[
("", ['V', 'r', 'r'], "", 0),
("1\n", ['j', 'V', 'r', 'r'], "1\n", 2),
("\n\na", ['j', 'V', 'r', 'r'], "\n\na", 1),
("a", ['V', 'r', 'r'], "r", 0),
(" a\nbc\n", ['l', 'V', 'j', 'r', 'r'], "rr\nrr\n", 0),
(" a\nbc\nkk", ['l', 'V', 'j', 'r', 'r'], "rr\nrr\nkk", 0),
(" a\nbc\nkk", ['l', 'V', 'j', 'r', 'r', 'j', '.'], "rr\nrr\nrr", 3),
]
)
def test_r_cmd_in_vline(vim_bot, text, cmd_list, text_expected, cursor_pos):
"""Test r command in vline."""
_, _, editor, vim, qtbot = vim_bot
editor.set_text(text)
cmd_line = vim.get_focus_widget()
for cmd in cmd_list:
qtbot.keyClicks(cmd_line, cmd)
sel = editor.get_extra_selections("vim_selection")
assert sel == []
assert cmd_line.text() == ""
assert vim.vim_cmd.vim_status.vim_state == VimState.NORMAL
assert editor.textCursor().position() == cursor_pos
assert editor.toPlainText() == text_expected
@pytest.mark.parametrize(
"text, cmd_list, text_expected, cursor_pos",
[
('ABCDE', ['V', 'u'], 'abcde', 0),
('ABCDE\nA', ['l', 'V', '$', 'u'], 'abcde\nA', 0),
('ABCDE\nA', ['l', 'V', '$', 'u', 'j', '.'], 'abcde\na', 6),
('', ['V', 'u'], '', 0)
]
)
def test_u_cmd_in_vline(vim_bot, text, cmd_list, text_expected, cursor_pos):
"""Test u command in vline."""
_, _, editor, vim, qtbot = vim_bot
editor.set_text(text)
cmd_line = vim.get_focus_widget()
for cmd in cmd_list:
qtbot.keyClicks(cmd_line, cmd)
assert cmd_line.text() == ""
assert editor.toPlainText() == text_expected
assert editor.textCursor().position() == cursor_pos
assert vim.vim_cmd.vim_status.get_pos_start_in_selection() is None
@pytest.mark.parametrize(
"text, cmd_list, text_expected, cursor_pos",
[
('ABCDE', ['V', 'g', 'u'], 'abcde', 0),
('ABCDE\nA', ['l', 'V', '$', 'g', 'u'], 'abcde\nA', 0),
('ABCDE\nA', ['l', 'V', '$', 'g', 'u', 'j', '.'], 'abcde\na', 6),
('', ['V', 'g', 'u'], '', 0)
]
)
def test_gu_cmd_in_vline(vim_bot, text, cmd_list, text_expected, cursor_pos):
"""Test gu command in vline."""
_, _, editor, vim, qtbot = vim_bot
editor.set_text(text)
cmd_line = vim.get_focus_widget()
for cmd in cmd_list:
qtbot.keyClicks(cmd_line, cmd)
assert cmd_line.text() == ""
assert editor.toPlainText() == text_expected
assert editor.textCursor().position() == cursor_pos
assert vim.vim_cmd.vim_status.get_pos_start_in_selection() is None
@pytest.mark.parametrize(
"text, cmd_list, text_expected, cursor_pos",
[
('abcde', ['V', 'U'], 'ABCDE', 0),
('abcde\na', ['l', 'V', '$', 'U'], 'ABCDE\na', 0),
('abcde\na', ['l', 'V', '$', 'U', 'j', '.'], 'ABCDE\nA', 6),
('', ['V', 'U'], '', 0)
]
)
def test_U_cmd_in_vline(vim_bot, text, cmd_list, text_expected, cursor_pos):
"""Test U command in vline."""
_, _, editor, vim, qtbot = vim_bot
editor.set_text(text)
cmd_line = vim.get_focus_widget()
for cmd in cmd_list:
qtbot.keyClicks(cmd_line, cmd)
assert cmd_line.text() == ""
assert editor.toPlainText() == text_expected
assert editor.textCursor().position() == cursor_pos
assert vim.vim_cmd.vim_status.get_pos_start_in_selection() is None
@pytest.mark.parametrize(
"text, cmd_list, text_expected, cursor_pos",
[
('abcde', ['V', 'g', 'U'], 'ABCDE', 0),
('abcde\na', ['l', 'V', '$', 'g', 'U'], 'ABCDE\na', 0),
('abcde\na', ['l', 'V', '$', 'g', 'U', 'j', '.'], 'ABCDE\nA', 6),
('', ['V', 'g', 'U'], '', 0)
]
)
def test_gU_cmd_in_vline(vim_bot, text, cmd_list, text_expected, cursor_pos):
"""Test gU command in vline."""
_, _, editor, vim, qtbot = vim_bot
editor.set_text(text)
cmd_line = vim.get_focus_widget()
for cmd in cmd_list:
qtbot.keyClicks(cmd_line, cmd)
assert cmd_line.text() == ""
assert editor.toPlainText() == text_expected
assert editor.textCursor().position() == cursor_pos
assert vim.vim_cmd.vim_status.get_pos_start_in_selection() is None
@pytest.mark.parametrize(
"text, cmd_list, text_expected, cursor_pos",
[
('abCde', ['V', 'g', '~'], 'ABcDE', 0),
('abCde\na', ['l', 'V', '$', 'g', '~'], 'ABcDE\na', 0),
('abCde\na', ['l', 'V', '$', 'g', '~', 'j', '.'], 'ABcDE\nA', 6),
('', ['V', 'g', '~'], '', 0)
]
)
def test_gtilde_cmd_in_vline(vim_bot, text, cmd_list, text_expected, cursor_pos):
"""Test g~ command in vline."""
_, _, editor, vim, qtbot = vim_bot
editor.set_text(text)
cmd_line = vim.get_focus_widget()
for cmd in cmd_list:
qtbot.keyClicks(cmd_line, cmd)
assert cmd_line.text() == ""
assert editor.toPlainText() == text_expected
assert editor.textCursor().position() == cursor_pos
assert vim.vim_cmd.vim_status.get_pos_start_in_selection() is None
@pytest.mark.parametrize(
"text, cmd_list, text_expected, cursor_pos",
[
('', ['V', '>'], '', 0),
('abcde', ['2l', 'V', '>'], ' abcde', 4),
(' abcde\na', ['V', '>'], ' abcde\na', 5),
('a\n\na', ['V', '2j', '>'], ' a\n\n a', 4),
]
)
def test_greater_cmd_in_vline(vim_bot, text, cmd_list, text_expected, cursor_pos):
"""Test > command in vline."""
_, _, editor, vim, qtbot = vim_bot
editor.set_text(text)
cmd_line = vim.get_focus_widget()
for cmd in cmd_list:
qtbot.keyClicks(cmd_line, cmd)
assert cmd_line.text() == ""
assert editor.toPlainText() == text_expected
assert editor.textCursor().position() == cursor_pos
assert vim.vim_cmd.vim_status.get_pos_start_in_selection() is None
@pytest.mark.parametrize(
"text, cmd_list, text_expected, cursor_pos",
[
('', ['V', '<'], '', 0),
(' abcde', ['2l', 'V', '<'], 'abcde', 0),
(' abcde\na', ['V', '<'], ' abcde\na', 1),
(' a\n\n a', ['V', '2j', '<'], 'a\n\na', 0),
]
)
def test_less_cmd_in_vline(vim_bot, text, cmd_list, text_expected, cursor_pos):
"""Test < command in vline."""
_, _, editor, vim, qtbot = vim_bot
editor.set_text(text)
cmd_line = vim.get_focus_widget()
for cmd in cmd_list:
qtbot.keyClicks(cmd_line, cmd)
assert cmd_line.text() == ""
assert editor.toPlainText() == text_expected
assert editor.textCursor().position() == cursor_pos
assert vim.vim_cmd.vim_status.get_pos_start_in_selection() is None
@pytest.mark.parametrize(
"text, cmd_list, cursor_pos, register_name, text_yanked",
[
('a', ['V', 'y'], 0, '"', 'a\n'),
('abcd', ['V', '"', '0', 'y'], 0, '0', 'abcd\n'),
('abcd\ne', ['V', 'j', '"', 'a', 'y'], 0, 'a', 'abcd\ne\n'),
]
)
def test_y_cmd_in_vline(vim_bot, text, cmd_list, cursor_pos, register_name,
text_yanked):
"""Test y command in vline."""
_, _, editor, vim, qtbot = vim_bot
editor.set_text(text)
cmd_line = vim.get_focus_widget()
for cmd in cmd_list:
qtbot.keyClicks(cmd_line, cmd)
reg = vim.vim_cmd.vim_status.register_dict[register_name]
assert cmd_line.text() == ""
assert editor.textCursor().position() == cursor_pos
assert reg.content == text_yanked
assert reg.type == VimState.VLINE
assert vim.vim_cmd.vim_status.get_pos_start_in_selection() is None
if register_name == '"':
reg0 = vim.vim_cmd.vim_status.register_dict['0']
assert reg0.content == text_yanked
@pytest.mark.parametrize(
"text, cmd_list, cursor_pos, text_expected",
[
('ak', ['V', 'p'], 0, ''),
('ak', ['v', 'l', 'y', 'V', 'p'], 0, 'ak'),
('ak', ['v', 'l', 'y', 'V', 'P'], 0, 'ak'),
('ak', ['v', 'l', 'y', 'V', '2p'], 0, 'ak\nak'),
('ab\n\ncd\n', ['v', 'j', 'y', '2j', 'V', '2p'], 4, 'ab\n\nab\n\n\nab\n\n\n'),
('ab\ncd\nef\ngh\n', ['V', 'j', 'y', '2j', 'V', 'p'], 6, 'ab\ncd\nab\ncd\ngh\n'),
('ab\ncd\nef\ngh\n', ['V', 'j', 'y', '2j', 'V', '2p'], 6, 'ab\ncd\nab\ncd\nab\ncd\ngh\n'),
]
)
def test_p_P_cmd_in_vline(vim_bot, text, cmd_list, cursor_pos, text_expected):
"""Test p command in vline."""
_, _, editor, vim, qtbot = vim_bot
editor.set_text(text)
cmd_line = vim.get_focus_widget()
for cmd in cmd_list:
qtbot.keyClicks(cmd_line, cmd)
assert cmd_line.text() == ""
assert editor.textCursor().position() == cursor_pos
assert editor.toPlainText() == text_expected
assert vim.vim_cmd.vim_status.get_pos_start_in_selection() is None
@pytest.mark.parametrize(
"text, cmd_list, cursor_pos, text_expected, reg_name, text_yanked",
[
('ab', ['V', 'd'], 0, '', '"', 'ab\n'),
(' ab\n cd\n ef', ['j', 'V', 'd'], 5, ' ab\n ef', '"', ' cd\n'),
(' ab\n cd\n', ['V', 'G', 'd'], 0, '', '"', ' ab\n cd\n\n'),
(' ab\n cd\n ef', ['2j', 'V', 'd'], 5, ' ab\n cd', '"', ' ef\n'),
(' ab\n cd\n ef', ['2j', 'V', 'k', 'd'], 1, ' ab', '"', ' cd\n ef\n'),
(' ab\n cd', ['$', 'V', 'd'], 1, ' cd', '"', ' ab\n'),
]
)
def test_d_cmd_in_vline(vim_bot, text, cmd_list, cursor_pos, text_expected, reg_name, text_yanked):
"""Test d command in vline."""
_, _, editor, vim, qtbot = vim_bot
editor.set_text(text)
cmd_line = vim.get_focus_widget()
for cmd in cmd_list:
qtbot.keyClicks(cmd_line, cmd)
reg = vim.vim_cmd.vim_status.register_dict[reg_name]
assert cmd_line.text() == ""
assert editor.toPlainText() == text_expected
assert reg.content == text_yanked
assert editor.textCursor().position() == cursor_pos
assert vim.vim_cmd.vim_status.get_pos_start_in_selection() is None
@pytest.mark.parametrize(
"text, cmd_list, cursor_pos, text_expected, reg_name, text_yanked",
[
('ab', ['V', 'x'], 0, '', '"', 'ab\n'),
(' ab\n cd\n ef', ['j', 'V', 'x'], 5, ' ab\n ef', '"', ' cd\n'),
(' ab\n cd\n', ['V', 'G', 'x'], 0, '', '"', ' ab\n cd\n\n'),
(' ab\n cd\n ef', ['2j', 'V', 'x'], 5, ' ab\n cd', '"', ' ef\n'),
(' ab\n cd\n ef', ['2j', 'V', 'k', 'x'], 1, ' ab', '"', ' cd\n ef\n'),
(' ab\n cd', ['$', 'V', 'x'], 1, ' cd', '"', ' ab\n'),
]
)
def test_x_cmd_in_vline(vim_bot, text, cmd_list, cursor_pos, text_expected, reg_name, text_yanked):
"""Test x command in vline."""
_, _, editor, vim, qtbot = vim_bot
editor.set_text(text)
cmd_line = vim.get_focus_widget()
for cmd in cmd_list:
qtbot.keyClicks(cmd_line, cmd)
reg = vim.vim_cmd.vim_status.register_dict[reg_name]
assert cmd_line.text() == ""
assert editor.toPlainText() == text_expected
assert reg.content == text_yanked
assert editor.textCursor().position() == cursor_pos
assert vim.vim_cmd.vim_status.get_pos_start_in_selection() is None
@pytest.mark.parametrize(
"text, cmd_list, cursor_pos, text_expected, reg_name, text_yanked",
[
('ab', ['V', 'c'], 0, '', '"', 'ab\n'),
(' ab\n cd\n ef', ['j', 'V', 'c'], 4, ' ab\n\n ef', '"', ' cd\n'),
(' ab\n cd\n', ['V', 'G', 'c'], 0, '', '"', ' ab\n cd\n\n'),
(' ab\n cd\n ef', ['2j', 'V', 'c'], 8, ' ab\n cd\n', '"', ' ef\n'),
(' ab\n cd\n ef', ['2j', 'V', 'k', 'c'], 4, ' ab\n', '"', ' cd\n ef\n'),
(' ab\n cd', ['$', 'V', 'c'], 0, '\n cd', '"', ' ab\n'),
]
)
def test_c_cmd_in_vline(vim_bot, text, cmd_list, cursor_pos, text_expected, reg_name, text_yanked):
"""Test c command in vline."""
_, _, editor, vim, qtbot = vim_bot
editor.set_text(text)
cmd_line = vim.get_focus_widget()
for cmd in cmd_list:
qtbot.keyClicks(cmd_line, cmd)
reg = vim.vim_cmd.vim_status.register_dict[reg_name]
assert cmd_line.text() == ""
assert editor.toPlainText() == text_expected
assert reg.content == text_yanked
assert editor.textCursor().position() == cursor_pos
assert vim.vim_cmd.vim_status.get_pos_start_in_selection() is None
@pytest.mark.parametrize(
"text, cmd_list, cursor_pos, text_expected, reg_name, text_yanked",
[
('ab', ['V', 's'], 0, '', '"', 'ab\n'),
(' ab\n cd\n ef', ['j', 'V', 's'], 4, ' ab\n\n ef', '"', ' cd\n'),
(' ab\n cd\n', ['V', 'G', 's'], 0, '', '"', ' ab\n cd\n\n'),
(' ab\n cd\n ef', ['2j', 'V', 's'], 8, ' ab\n cd\n', '"', ' ef\n'),
(' ab\n cd\n ef', ['2j', 'V', 'k', 's'], 4, ' ab\n', '"', ' cd\n ef\n'),
(' ab\n cd', ['$', 'V', 's'], 0, '\n cd', '"', ' ab\n'),
]
)
def test_s_cmd_in_vline(vim_bot, text, cmd_list, cursor_pos, text_expected, reg_name, text_yanked):
"""Test s command in vline."""
_, _, editor, vim, qtbot = vim_bot
editor.set_text(text)
cmd_line = vim.get_focus_widget()
for cmd in cmd_list:
qtbot.keyClicks(cmd_line, cmd)
reg = vim.vim_cmd.vim_status.register_dict[reg_name]
assert cmd_line.text() == ""
assert editor.toPlainText() == text_expected
assert reg.content == text_yanked
assert editor.textCursor().position() == cursor_pos
assert vim.vim_cmd.vim_status.get_pos_start_in_selection() is None
@pytest.mark.parametrize(
"text, cmd_list, cursor_pos, sel_pos",
[
('a', ['V', '/', 'b', '\r'], 0, [0, 1]),
('a', ['V', '/', 'b', '\r', 'n'], 0, [0, 1]),
(' dhr\n dhrwodn\n\ndhrwodn\n dhrwodn', ['V', '/', 'd', 'h', 'r', Qt.Key_Enter], 1, [0, 4]),
(' dhr\n dhrwodn\n\ndhrwodn\n dhrwodn', ['V', '/', 'd', 'h', 'r', Qt.Key_Enter, 'n'], 7, [0, 14]),
(' dhr\n dhrwodn\n\ndhrwodn\n dhrwodn', ['V', '/', 'd', 'h', 'r', Qt.Key_Return, 'n', 'N'], 1, [0, 4]),
]
)
def test_search_cmd_in_vline(vim_bot, text, cmd_list, cursor_pos, sel_pos):
"""Test / command in vline."""
_, _, editor, vim, qtbot = vim_bot
editor.set_text(text)
cmd_line = vim.get_focus_widget()
for cmd in cmd_list:
if isinstance(cmd, str):
qtbot.keyClicks(cmd_line, cmd)
else:
qtbot.keyPress(cmd_line, cmd)
sel = editor.get_extra_selections("vim_selection")[0]
sel_pos_ = [sel.cursor.selectionStart(), sel.cursor.selectionEnd()]
assert cmd_line.text() == ""
assert editor.textCursor().position() == cursor_pos
assert sel_pos == sel_pos_
@pytest.mark.parametrize(
"text, cmd_list, cursor_pos, sel_pos",
[
("01 34", ['V', Qt.Key_Space], 1, [0, 5]),
]
)
def test_space_cmd_in_vline(vim_bot, text, cmd_list, cursor_pos, sel_pos):
"""Test space command in vline."""
_, _, editor, vim, qtbot = vim_bot
CONF.set(CONF_SECTION, 'leader_key', 'F1')
vim.apply_plugin_settings("")
editor.set_text(text)
cmd_line = vim.get_focus_widget()
for cmd in cmd_list:
if isinstance(cmd, str):
qtbot.keyClicks(cmd_line, cmd)
else:
qtbot.keyPress(cmd_line, cmd)
sel = editor.get_extra_selections("vim_selection")[0]
sel_pos_ = [sel.cursor.selectionStart(), sel.cursor.selectionEnd()]
assert cmd_line.text() == ""
assert editor.textCursor().position() == cursor_pos
assert sel_pos_ == sel_pos
@pytest.mark.parametrize(
"text, cmd_list, cursor_pos, sel_pos",
[
("01 34", ['2l', 'V', Qt.Key_Backspace], 1, [0, 5]),
]
)
def test_backspace_cmd_in_vline(vim_bot, text, cmd_list, cursor_pos, sel_pos):
"""Test backspace command in vline."""
_, _, editor, vim, qtbot = vim_bot
editor.set_text(text)
cmd_line = vim.get_focus_widget()
for cmd in cmd_list:
if isinstance(cmd, str):
qtbot.keyClicks(cmd_line, cmd)
else:
qtbot.keyPress(cmd_line, cmd)
sel = editor.get_extra_selections("vim_selection")[0]
sel_pos_ = [sel.cursor.selectionStart(), sel.cursor.selectionEnd()]
assert cmd_line.text() == ""
assert editor.textCursor().position() == cursor_pos
assert sel_pos_ == sel_pos
@pytest.mark.parametrize(
"text, cmd_list, cursor_pos, sel_pos",
[
("01 34\n kj", ['V', Qt.Key_Enter], 9, [0, 11]),
]
)
def test_enter_cmd_in_vline(vim_bot, text, cmd_list, cursor_pos, sel_pos):
"""Test enter command in vline."""
_, _, editor, vim, qtbot = vim_bot
editor.set_text(text)
cmd_line = vim.get_focus_widget()
for cmd in cmd_list:
if isinstance(cmd, str):
qtbot.keyClicks(cmd_line, cmd)
else:
qtbot.keyPress(cmd_line, cmd)
sel = editor.get_extra_selections("vim_selection")[0]
sel_pos_ = [sel.cursor.selectionStart(), sel.cursor.selectionEnd()]
assert cmd_line.text() == ""
assert editor.textCursor().position() == cursor_pos
assert sel_pos_ == sel_pos
|
[
"pytest.mark.parametrize",
"spyder.config.manager.CONF.set"
] |
[((285, 1055), 'pytest.mark.parametrize', 'pytest.mark.parametrize', (['"""text, cmd_list, cursor_pos, sel_pos"""', '[(\'import numpy as np\', [\'V\'], 0, [0, 18]), (\'import numpy as np\', [\'V\',\n \'l\', \'h\'], 0, [0, 18]), (\'import numpy as np\', [\'V\', \'5l\'], 5, [0, 18]),\n (\'import numpy as np\', [\'5l\', \'V\'], 5, [0, 18]), (\n """import numpy as np\nimport matplotlib.pyplot as plt\nimport scipy.scipy as sc"""\n , [\'V\', \'j\'], 19, [0, 50]), (\n """import numpy as np\nimport matplotlib.pyplot as plt\nimport scipy.scipy as sc"""\n , [\'2l\', \'V\', \'2j\', \'5l\'], 58, [0, 75]), (\n """import numpy as np\nimport matplotlib.pyplot as plt\nimport scipy.scipy as sc\n"""\n , [\'2l\', \'V\', \'3j\', \'k\', \'j\'], 76, [0, 76]), (\n """\nimport matplotlib.pyplot as plt\nimport scipy.scipy as sc\n""", [\'5j\',\n \'V\', \'5k\'], 0, [0, 58])]'], {}), '(\'text, cmd_list, cursor_pos, sel_pos\', [(\n \'import numpy as np\', [\'V\'], 0, [0, 18]), (\'import numpy as np\', [\'V\',\n \'l\', \'h\'], 0, [0, 18]), (\'import numpy as np\', [\'V\', \'5l\'], 5, [0, 18]),\n (\'import numpy as np\', [\'5l\', \'V\'], 5, [0, 18]), (\n """import numpy as np\nimport matplotlib.pyplot as plt\nimport scipy.scipy as sc"""\n , [\'V\', \'j\'], 19, [0, 50]), (\n """import numpy as np\nimport matplotlib.pyplot as plt\nimport scipy.scipy as sc"""\n , [\'2l\', \'V\', \'2j\', \'5l\'], 58, [0, 75]), (\n """import numpy as np\nimport matplotlib.pyplot as plt\nimport scipy.scipy as sc\n"""\n , [\'2l\', \'V\', \'3j\', \'k\', \'j\'], 76, [0, 76]), (\n """\nimport matplotlib.pyplot as plt\nimport scipy.scipy as sc\n""", [\'5j\',\n \'V\', \'5k\'], 0, [0, 58])])\n', (308, 1055), False, 'import pytest\n'), ((1596, 1903), 'pytest.mark.parametrize', 'pytest.mark.parametrize', (['"""text, cmd_list, cursor_pos, sel_pos"""', '[(\'import numpy as np\', [\'V\', \'0\'], 0, [0, 18]), (\'import numpy as np\', [\n \'5l\', \'V\', \'0\'], 0, [0, 18]), (\n """import numpy as np\nimport matplotlib.pyplot as plt\nimport scipy.scipy as sc"""\n , [\'V\', \'j\', \'5l\', \'0\'], 19, [0, 50])]'], {}), '(\'text, cmd_list, cursor_pos, sel_pos\', [(\n \'import numpy as np\', [\'V\', \'0\'], 0, [0, 18]), (\'import numpy as np\', [\n \'5l\', \'V\', \'0\'], 0, [0, 18]), (\n """import numpy as np\nimport matplotlib.pyplot as plt\nimport scipy.scipy as sc"""\n , [\'V\', \'j\', \'5l\', \'0\'], 19, [0, 50])])\n', (1619, 1903), False, 'import pytest\n'), ((2457, 2640), 'pytest.mark.parametrize', 'pytest.mark.parametrize', (['"""text, cmd_list, cursor_pos, sel_pos"""', "[(' import numpy as np', ['V', '^'], 3, [0, 21]), (\n ' import numpy as np', ['10l', 'V', '^'], 3, [0, 21])]"], {}), "('text, cmd_list, cursor_pos, sel_pos', [(\n ' import numpy as np', ['V', '^'], 3, [0, 21]), (\n ' import numpy as np', ['10l', 'V', '^'], 3, [0, 21])])\n", (2480, 2640), False, 'import pytest\n'), ((3198, 3316), 'pytest.mark.parametrize', 'pytest.mark.parametrize', (['"""text, cmd_list, cursor_pos, sel_pos"""', "[('import numpy as np', ['V', '$'], 18, [0, 18])]"], {}), "('text, cmd_list, cursor_pos, sel_pos', [(\n 'import numpy as np', ['V', '$'], 18, [0, 18])])\n", (3221, 3316), False, 'import pytest\n'), ((3871, 4342), 'pytest.mark.parametrize', 'pytest.mark.parametrize', (['"""text, cmd_list, cursor_pos, sel_pos"""', "[('01 34\\n', ['V', 'w'], 3, [0, 5]), ('01 34\\n', ['V', 'w', 'o'], 0, [0, 5]\n ), ('01 34\\n', ['V', 'w', 'o', 'o'], 4, [0, 5]), ('\\n', ['j', 'V'], 1,\n [1, 1]), ('\\n', ['j', 'V', 'o'], 1, [1, 1]), ('01 34\\n6\\n', ['V', 'j'],\n 6, [0, 7]), ('01 34\\n6\\n', ['V', 'j', 'o'], 0, [0, 7]), ('01 34\\n6\\n',\n ['j', 'V', 'k', 'o'], 6, [0, 7]), ('01 34\\n6\\n8\\n', ['j', 'V', 'j',\n '2k', 'o'], 6, [0, 7])]"], {}), "('text, cmd_list, cursor_pos, sel_pos', [('01 34\\n',\n ['V', 'w'], 3, [0, 5]), ('01 34\\n', ['V', 'w', 'o'], 0, [0, 5]), (\n '01 34\\n', ['V', 'w', 'o', 'o'], 4, [0, 5]), ('\\n', ['j', 'V'], 1, [1, \n 1]), ('\\n', ['j', 'V', 'o'], 1, [1, 1]), ('01 34\\n6\\n', ['V', 'j'], 6,\n [0, 7]), ('01 34\\n6\\n', ['V', 'j', 'o'], 0, [0, 7]), ('01 34\\n6\\n', [\n 'j', 'V', 'k', 'o'], 6, [0, 7]), ('01 34\\n6\\n8\\n', ['j', 'V', 'j', '2k',\n 'o'], 6, [0, 7])])\n", (3894, 4342), False, 'import pytest\n'), ((4934, 5255), 'pytest.mark.parametrize', 'pytest.mark.parametrize', (['"""text, cmd_list, text_expected, cursor_pos"""', "[('', ['V', 'J'], '', 0), ('\\n\\n', ['3j', 'V', 'J'], '\\n\\n', 2), ('0\\n23',\n ['j', 'l', 'V', 'k', 'J'], '0 23', 1), ('0\\n2\\n4\\n6\\n8\\n', ['V', '2j',\n 'J'], '0 2 4\\n6\\n8\\n', 3), ('0\\n2\\n4\\n6\\n8\\n', ['V', '2j', 'J', '.'],\n '0 2 4 6 8\\n', 7)]"], {}), "('text, cmd_list, text_expected, cursor_pos', [('',\n ['V', 'J'], '', 0), ('\\n\\n', ['3j', 'V', 'J'], '\\n\\n', 2), ('0\\n23', [\n 'j', 'l', 'V', 'k', 'J'], '0 23', 1), ('0\\n2\\n4\\n6\\n8\\n', ['V', '2j',\n 'J'], '0 2 4\\n6\\n8\\n', 3), ('0\\n2\\n4\\n6\\n8\\n', ['V', '2j', 'J', '.'],\n '0 2 4 6 8\\n', 7)])\n", (4957, 5255), False, 'import pytest\n'), ((5788, 5891), 'pytest.mark.parametrize', 'pytest.mark.parametrize', (['"""text, cmd_list, cursor_pos, sel_pos"""', "[('01 34', ['V', 'w'], 3, [0, 5])]"], {}), "('text, cmd_list, cursor_pos, sel_pos', [('01 34', [\n 'V', 'w'], 3, [0, 5])])\n", (5811, 5891), False, 'import pytest\n'), ((6441, 6737), 'pytest.mark.parametrize', 'pytest.mark.parametrize', (['"""text, cmd_list, cursor_pos, sel_pos"""', '[(\'\', [\'V\', \'W\'], 0, [0, 0]), (\'029.d98@jl 34\', [\'V\', \'W\'], 11, [0, 13]), (\n \'029.d98@jl 34\', [\'V\', \'2W\'], 13, [0, 13]), (\'029.d98@jl 34\\na\', [\'V\',\n \'2W\'], 14, [0, 15]), ("""029.d98@jl 34\n a""", [\'V\', \'2W\'], 16, [0, 17])]'], {}), '(\'text, cmd_list, cursor_pos, sel_pos\', [(\'\', [\'V\',\n \'W\'], 0, [0, 0]), (\'029.d98@jl 34\', [\'V\', \'W\'], 11, [0, 13]), (\n \'029.d98@jl 34\', [\'V\', \'2W\'], 13, [0, 13]), (\'029.d98@jl 34\\na\', [\'V\',\n \'2W\'], 14, [0, 15]), ("""029.d98@jl 34\n a""", [\'V\', \'2W\'], 16, [0, 17])])\n', (6464, 6737), False, 'import pytest\n'), ((7308, 7416), 'pytest.mark.parametrize', 'pytest.mark.parametrize', (['"""text, cmd_list, cursor_pos, sel_pos"""', "[('01 34', ['$', 'V', 'b'], 3, [0, 5])]"], {}), "('text, cmd_list, cursor_pos, sel_pos', [('01 34', [\n '$', 'V', 'b'], 3, [0, 5])])\n", (7331, 7416), False, 'import pytest\n'), ((7966, 8074), 'pytest.mark.parametrize', 'pytest.mark.parametrize', (['"""text, cmd_list, cursor_pos, sel_pos"""', "[('01.34', ['$', 'V', 'B'], 0, [0, 5])]"], {}), "('text, cmd_list, cursor_pos, sel_pos', [('01.34', [\n '$', 'V', 'B'], 0, [0, 5])])\n", (7989, 8074), False, 'import pytest\n'), ((8624, 8781), 'pytest.mark.parametrize', 'pytest.mark.parametrize', (['"""text, cmd_list, cursor_pos, sel_pos"""', "[('01 34\\n67 90', ['V', 'e'], 1, [0, 5]), ('01 34\\n67 90', ['V', '3e'], 7,\n [0, 11])]"], {}), "('text, cmd_list, cursor_pos, sel_pos', [(\n '01 34\\n67 90', ['V', 'e'], 1, [0, 5]), ('01 34\\n67 90', ['V', '3e'], 7,\n [0, 11])])\n", (8647, 8781), False, 'import pytest\n'), ((9335, 9578), 'pytest.mark.parametrize', 'pytest.mark.parametrize', (['"""text, cmd_list, cursor_pos, sel_pos"""', "[('0\\n2\\n4\\n', ['V', '2G'], 2, [0, 3]), ('0\\n \\n8\\n', ['V', '2G'], 6, [\n 0, 7]), ('0\\n2\\n4\\n', ['V', 'G'], 6, [0, 6]), ('0\\n2\\n4\\n a', ['V',\n 'G'], 11, [0, 12])]"], {}), "('text, cmd_list, cursor_pos, sel_pos', [(\n '0\\n2\\n4\\n', ['V', '2G'], 2, [0, 3]), ('0\\n \\n8\\n', ['V', '2G'], 6,\n [0, 7]), ('0\\n2\\n4\\n', ['V', 'G'], 6, [0, 6]), ('0\\n2\\n4\\n a', ['V',\n 'G'], 11, [0, 12])])\n", (9358, 9578), False, 'import pytest\n'), ((10143, 10351), 'pytest.mark.parametrize', 'pytest.mark.parametrize', (['"""text, cmd_list, cursor_pos, sel_pos"""', "[('0\\n2\\n4\\n', ['V', '2gg'], 2, [0, 3]), ('0\\n \\n8\\n', ['V', '2gg'], 6,\n [0, 7]), (' 0\\n2\\n4\\n', ['4j', 'V', 'gg'], 4, [0, 10])]"], {}), "('text, cmd_list, cursor_pos, sel_pos', [(\n '0\\n2\\n4\\n', ['V', '2gg'], 2, [0, 3]), ('0\\n \\n8\\n', ['V', '2gg'], \n 6, [0, 7]), (' 0\\n2\\n4\\n', ['4j', 'V', 'gg'], 4, [0, 10])])\n", (10166, 10351), False, 'import pytest\n'), ((10913, 11168), 'pytest.mark.parametrize', 'pytest.mark.parametrize', (['"""text, cmd_list, text_expected, cursor_pos"""', "[('abcde', ['V', '~'], 'ABCDE', 0), ('abcde\\na', ['l', 'V', '$', '~'],\n 'ABCDE\\na', 0), ('abcde\\na', ['l', 'V', '$', '~', 'j', '.'], 'ABCDE\\nA',\n 6), ('', ['V', '~'], '', 0)]"], {}), "('text, cmd_list, text_expected, cursor_pos', [(\n 'abcde', ['V', '~'], 'ABCDE', 0), ('abcde\\na', ['l', 'V', '$', '~'],\n 'ABCDE\\na', 0), ('abcde\\na', ['l', 'V', '$', '~', 'j', '.'], 'ABCDE\\nA',\n 6), ('', ['V', '~'], '', 0)])\n", (10936, 11168), False, 'import pytest\n'), ((11729, 11936), 'pytest.mark.parametrize', 'pytest.mark.parametrize', (['"""text, cmd_list, cursor_pos, sel_pos"""', "[('', ['V', '%'], 0, [0, 0]), ('\\n', ['j', 'V', '%'], 1, [1, 1]), (' ()', [\n 'V', '%'], 2, [0, 3]), (' ()', ['V', '%', '%'], 1, [0, 3])]"], {}), "('text, cmd_list, cursor_pos, sel_pos', [('', ['V',\n '%'], 0, [0, 0]), ('\\n', ['j', 'V', '%'], 1, [1, 1]), (' ()', ['V', '%'\n ], 2, [0, 3]), (' ()', ['V', '%', '%'], 1, [0, 3])])\n", (11752, 11936), False, 'import pytest\n'), ((12511, 12788), 'pytest.mark.parametrize', 'pytest.mark.parametrize', (['"""text, cmd_list, cursor_pos, sel_pos"""', "[('', ['V', 'f', 'r'], 0, [0, 0]), ('\\n', ['j', 'V', 'f', 'r'], 1, [1, 1]),\n (' rr', ['V', 'f', 'r'], 1, [0, 3]), (' rr', ['V', 'f', 'r', ';'], 2, [\n 0, 3]), (' rr', ['V', 'f', 'r', ';', ','], 1, [0, 3])]"], {}), "('text, cmd_list, cursor_pos, sel_pos', [('', ['V',\n 'f', 'r'], 0, [0, 0]), ('\\n', ['j', 'V', 'f', 'r'], 1, [1, 1]), (' rr',\n ['V', 'f', 'r'], 1, [0, 3]), (' rr', ['V', 'f', 'r', ';'], 2, [0, 3]),\n (' rr', ['V', 'f', 'r', ';', ','], 1, [0, 3])])\n", (12534, 12788), False, 'import pytest\n'), ((13363, 13658), 'pytest.mark.parametrize', 'pytest.mark.parametrize', (['"""text, cmd_list, cursor_pos, sel_pos"""', "[('', ['V', 'F', 'r'], 0, [0, 0]), ('\\n', ['j', 'V', 'F', 'r'], 1, [1, 1]),\n (' rr ', ['V', '$', 'F', 'r'], 2, [0, 4]), (' rr ', ['V', '$', 'F', 'r',\n ';'], 1, [0, 4]), (' rr ', ['V', '$', 'F', 'r', ';', ','], 2, [0, 4])]"], {}), "('text, cmd_list, cursor_pos, sel_pos', [('', ['V',\n 'F', 'r'], 0, [0, 0]), ('\\n', ['j', 'V', 'F', 'r'], 1, [1, 1]), (' rr ',\n ['V', '$', 'F', 'r'], 2, [0, 4]), (' rr ', ['V', '$', 'F', 'r', ';'], 1,\n [0, 4]), (' rr ', ['V', '$', 'F', 'r', ';', ','], 2, [0, 4])])\n", (13386, 13658), False, 'import pytest\n'), ((14233, 14566), 'pytest.mark.parametrize', 'pytest.mark.parametrize', (['"""text, cmd_list, cursor_pos, sel_pos"""', "[('', ['V', 't', 'r'], 0, [0, 0]), ('\\n', ['j', 'V', 't', 'r'], 1, [1, 1]),\n (' rr', ['V', 't', 'r'], 1, [0, 4]), (' rr', ['V', 't', 'r', ';'], 2,\n [0, 4]), (' rrrr', ['V', 't', 'r', '4;'], 4, [0, 6]), (' rrrr', ['V',\n 't', 'r', '4;', ','], 3, [0, 6])]"], {}), "('text, cmd_list, cursor_pos, sel_pos', [('', ['V',\n 't', 'r'], 0, [0, 0]), ('\\n', ['j', 'V', 't', 'r'], 1, [1, 1]), (' rr',\n ['V', 't', 'r'], 1, [0, 4]), (' rr', ['V', 't', 'r', ';'], 2, [0, 4]),\n (' rrrr', ['V', 't', 'r', '4;'], 4, [0, 6]), (' rrrr', ['V', 't', 'r',\n '4;', ','], 3, [0, 6])])\n", (14256, 14566), False, 'import pytest\n'), ((15145, 15505), 'pytest.mark.parametrize', 'pytest.mark.parametrize', (['"""text, cmd_list, cursor_pos, sel_pos"""', "[('', ['V', 'T', 'r'], 0, [0, 0]), ('r\\n', ['j', 'V', 'T', 'r'], 2, [2, 2]),\n (' rr ', ['V', '$', 'T', 'r'], 4, [0, 6]), (' rr ', ['V', '$', 'T',\n 'r', ';'], 3, [0, 6]), (' rrrr', ['V', '$', 'T', 'r', '4;'], 3, [0, 6]\n ), (' rrrr', ['V', '$', 'T', 'r', '4;', ','], 4, [0, 6])]"], {}), "('text, cmd_list, cursor_pos, sel_pos', [('', ['V',\n 'T', 'r'], 0, [0, 0]), ('r\\n', ['j', 'V', 'T', 'r'], 2, [2, 2]), (\n ' rr ', ['V', '$', 'T', 'r'], 4, [0, 6]), (' rr ', ['V', '$', 'T',\n 'r', ';'], 3, [0, 6]), (' rrrr', ['V', '$', 'T', 'r', '4;'], 3, [0, 6]\n ), (' rrrr', ['V', '$', 'T', 'r', '4;', ','], 4, [0, 6])])\n", (15168, 15505), False, 'import pytest\n'), ((16082, 16509), 'pytest.mark.parametrize', 'pytest.mark.parametrize', (['"""text, cmd_list, text_expected, cursor_pos"""', "[('', ['V', 'r', 'r'], '', 0), ('1\\n', ['j', 'V', 'r', 'r'], '1\\n', 2), (\n '\\n\\na', ['j', 'V', 'r', 'r'], '\\n\\na', 1), ('a', ['V', 'r', 'r'], 'r',\n 0), (' a\\nbc\\n', ['l', 'V', 'j', 'r', 'r'], 'rr\\nrr\\n', 0), (\n ' a\\nbc\\nkk', ['l', 'V', 'j', 'r', 'r'], 'rr\\nrr\\nkk', 0), (\n ' a\\nbc\\nkk', ['l', 'V', 'j', 'r', 'r', 'j', '.'], 'rr\\nrr\\nrr', 3)]"], {}), "('text, cmd_list, text_expected, cursor_pos', [('',\n ['V', 'r', 'r'], '', 0), ('1\\n', ['j', 'V', 'r', 'r'], '1\\n', 2), (\n '\\n\\na', ['j', 'V', 'r', 'r'], '\\n\\na', 1), ('a', ['V', 'r', 'r'], 'r',\n 0), (' a\\nbc\\n', ['l', 'V', 'j', 'r', 'r'], 'rr\\nrr\\n', 0), (\n ' a\\nbc\\nkk', ['l', 'V', 'j', 'r', 'r'], 'rr\\nrr\\nkk', 0), (\n ' a\\nbc\\nkk', ['l', 'V', 'j', 'r', 'r', 'j', '.'], 'rr\\nrr\\nrr', 3)])\n", (16105, 16509), False, 'import pytest\n'), ((17122, 17377), 'pytest.mark.parametrize', 'pytest.mark.parametrize', (['"""text, cmd_list, text_expected, cursor_pos"""', "[('ABCDE', ['V', 'u'], 'abcde', 0), ('ABCDE\\nA', ['l', 'V', '$', 'u'],\n 'abcde\\nA', 0), ('ABCDE\\nA', ['l', 'V', '$', 'u', 'j', '.'], 'abcde\\na',\n 6), ('', ['V', 'u'], '', 0)]"], {}), "('text, cmd_list, text_expected, cursor_pos', [(\n 'ABCDE', ['V', 'u'], 'abcde', 0), ('ABCDE\\nA', ['l', 'V', '$', 'u'],\n 'abcde\\nA', 0), ('ABCDE\\nA', ['l', 'V', '$', 'u', 'j', '.'], 'abcde\\na',\n 6), ('', ['V', 'u'], '', 0)])\n", (17145, 17377), False, 'import pytest\n'), ((17906, 18181), 'pytest.mark.parametrize', 'pytest.mark.parametrize', (['"""text, cmd_list, text_expected, cursor_pos"""', "[('ABCDE', ['V', 'g', 'u'], 'abcde', 0), ('ABCDE\\nA', ['l', 'V', '$', 'g',\n 'u'], 'abcde\\nA', 0), ('ABCDE\\nA', ['l', 'V', '$', 'g', 'u', 'j', '.'],\n 'abcde\\na', 6), ('', ['V', 'g', 'u'], '', 0)]"], {}), "('text, cmd_list, text_expected, cursor_pos', [(\n 'ABCDE', ['V', 'g', 'u'], 'abcde', 0), ('ABCDE\\nA', ['l', 'V', '$', 'g',\n 'u'], 'abcde\\nA', 0), ('ABCDE\\nA', ['l', 'V', '$', 'g', 'u', 'j', '.'],\n 'abcde\\na', 6), ('', ['V', 'g', 'u'], '', 0)])\n", (17929, 18181), False, 'import pytest\n'), ((18712, 18967), 'pytest.mark.parametrize', 'pytest.mark.parametrize', (['"""text, cmd_list, text_expected, cursor_pos"""', "[('abcde', ['V', 'U'], 'ABCDE', 0), ('abcde\\na', ['l', 'V', '$', 'U'],\n 'ABCDE\\na', 0), ('abcde\\na', ['l', 'V', '$', 'U', 'j', '.'], 'ABCDE\\nA',\n 6), ('', ['V', 'U'], '', 0)]"], {}), "('text, cmd_list, text_expected, cursor_pos', [(\n 'abcde', ['V', 'U'], 'ABCDE', 0), ('abcde\\na', ['l', 'V', '$', 'U'],\n 'ABCDE\\na', 0), ('abcde\\na', ['l', 'V', '$', 'U', 'j', '.'], 'ABCDE\\nA',\n 6), ('', ['V', 'U'], '', 0)])\n", (18735, 18967), False, 'import pytest\n'), ((19496, 19771), 'pytest.mark.parametrize', 'pytest.mark.parametrize', (['"""text, cmd_list, text_expected, cursor_pos"""', "[('abcde', ['V', 'g', 'U'], 'ABCDE', 0), ('abcde\\na', ['l', 'V', '$', 'g',\n 'U'], 'ABCDE\\na', 0), ('abcde\\na', ['l', 'V', '$', 'g', 'U', 'j', '.'],\n 'ABCDE\\nA', 6), ('', ['V', 'g', 'U'], '', 0)]"], {}), "('text, cmd_list, text_expected, cursor_pos', [(\n 'abcde', ['V', 'g', 'U'], 'ABCDE', 0), ('abcde\\na', ['l', 'V', '$', 'g',\n 'U'], 'ABCDE\\na', 0), ('abcde\\na', ['l', 'V', '$', 'g', 'U', 'j', '.'],\n 'ABCDE\\nA', 6), ('', ['V', 'g', 'U'], '', 0)])\n", (19519, 19771), False, 'import pytest\n'), ((20302, 20577), 'pytest.mark.parametrize', 'pytest.mark.parametrize', (['"""text, cmd_list, text_expected, cursor_pos"""', "[('abCde', ['V', 'g', '~'], 'ABcDE', 0), ('abCde\\na', ['l', 'V', '$', 'g',\n '~'], 'ABcDE\\na', 0), ('abCde\\na', ['l', 'V', '$', 'g', '~', 'j', '.'],\n 'ABcDE\\nA', 6), ('', ['V', 'g', '~'], '', 0)]"], {}), "('text, cmd_list, text_expected, cursor_pos', [(\n 'abCde', ['V', 'g', '~'], 'ABcDE', 0), ('abCde\\na', ['l', 'V', '$', 'g',\n '~'], 'ABcDE\\na', 0), ('abCde\\na', ['l', 'V', '$', 'g', '~', 'j', '.'],\n 'ABcDE\\nA', 6), ('', ['V', 'g', '~'], '', 0)])\n", (20325, 20577), False, 'import pytest\n'), ((21112, 21363), 'pytest.mark.parametrize', 'pytest.mark.parametrize', (['"""text, cmd_list, text_expected, cursor_pos"""', "[('', ['V', '>'], '', 0), ('abcde', ['2l', 'V', '>'], ' abcde', 4), (\n ' abcde\\na', ['V', '>'], ' abcde\\na', 5), ('a\\n\\na', ['V', '2j',\n '>'], ' a\\n\\n a', 4)]"], {}), "('text, cmd_list, text_expected, cursor_pos', [('',\n ['V', '>'], '', 0), ('abcde', ['2l', 'V', '>'], ' abcde', 4), (\n ' abcde\\na', ['V', '>'], ' abcde\\na', 5), ('a\\n\\na', ['V', '2j',\n '>'], ' a\\n\\n a', 4)])\n", (21135, 21363), False, 'import pytest\n'), ((21899, 22150), 'pytest.mark.parametrize', 'pytest.mark.parametrize', (['"""text, cmd_list, text_expected, cursor_pos"""', "[('', ['V', '<'], '', 0), (' abcde', ['2l', 'V', '<'], 'abcde', 0), (\n ' abcde\\na', ['V', '<'], ' abcde\\na', 1), (' a\\n\\n a', ['V',\n '2j', '<'], 'a\\n\\na', 0)]"], {}), "('text, cmd_list, text_expected, cursor_pos', [('',\n ['V', '<'], '', 0), (' abcde', ['2l', 'V', '<'], 'abcde', 0), (\n ' abcde\\na', ['V', '<'], ' abcde\\na', 1), (' a\\n\\n a', ['V',\n '2j', '<'], 'a\\n\\na', 0)])\n", (21922, 22150), False, 'import pytest\n'), ((22683, 22924), 'pytest.mark.parametrize', 'pytest.mark.parametrize', (['"""text, cmd_list, cursor_pos, register_name, text_yanked"""', '[(\'a\', [\'V\', \'y\'], 0, \'"\', \'a\\n\'), (\'abcd\', [\'V\', \'"\', \'0\', \'y\'], 0, \'0\',\n \'abcd\\n\'), (\'abcd\\ne\', [\'V\', \'j\', \'"\', \'a\', \'y\'], 0, \'a\', \'abcd\\ne\\n\')]'], {}), '(\n \'text, cmd_list, cursor_pos, register_name, text_yanked\', [(\'a\', [\'V\',\n \'y\'], 0, \'"\', \'a\\n\'), (\'abcd\', [\'V\', \'"\', \'0\', \'y\'], 0, \'0\', \'abcd\\n\'),\n (\'abcd\\ne\', [\'V\', \'j\', \'"\', \'a\', \'y\'], 0, \'a\', \'abcd\\ne\\n\')])\n', (22706, 22924), False, 'import pytest\n'), ((23701, 24203), 'pytest.mark.parametrize', 'pytest.mark.parametrize', (['"""text, cmd_list, cursor_pos, text_expected"""', '[(\'ak\', [\'V\', \'p\'], 0, \'\'), (\'ak\', [\'v\', \'l\', \'y\', \'V\', \'p\'], 0, \'ak\'), (\n \'ak\', [\'v\', \'l\', \'y\', \'V\', \'P\'], 0, \'ak\'), (\'ak\', [\'v\', \'l\', \'y\', \'V\',\n \'2p\'], 0, \'ak\\nak\'), (\'ab\\n\\ncd\\n\', [\'v\', \'j\', \'y\', \'2j\', \'V\', \'2p\'], 4,\n """ab\n\nab\n\n\nab\n\n\n"""), (\'ab\\ncd\\nef\\ngh\\n\', [\'V\', \'j\', \'y\', \'2j\', \'V\',\n \'p\'], 6, """ab\ncd\nab\ncd\ngh\n"""), (\'ab\\ncd\\nef\\ngh\\n\', [\'V\', \'j\', \'y\',\n \'2j\', \'V\', \'2p\'], 6, """ab\ncd\nab\ncd\nab\ncd\ngh\n""")]'], {}), '(\'text, cmd_list, cursor_pos, text_expected\', [(\'ak\',\n [\'V\', \'p\'], 0, \'\'), (\'ak\', [\'v\', \'l\', \'y\', \'V\', \'p\'], 0, \'ak\'), (\'ak\',\n [\'v\', \'l\', \'y\', \'V\', \'P\'], 0, \'ak\'), (\'ak\', [\'v\', \'l\', \'y\', \'V\', \'2p\'],\n 0, \'ak\\nak\'), (\'ab\\n\\ncd\\n\', [\'v\', \'j\', \'y\', \'2j\', \'V\', \'2p\'], 4,\n """ab\n\nab\n\n\nab\n\n\n"""), (\'ab\\ncd\\nef\\ngh\\n\', [\'V\', \'j\', \'y\', \'2j\', \'V\',\n \'p\'], 6, """ab\ncd\nab\ncd\ngh\n"""), (\'ab\\ncd\\nef\\ngh\\n\', [\'V\', \'j\', \'y\',\n \'2j\', \'V\', \'2p\'], 6, """ab\ncd\nab\ncd\nab\ncd\ngh\n""")])\n', (23724, 24203), False, 'import pytest\n'), ((24756, 25234), 'pytest.mark.parametrize', 'pytest.mark.parametrize', (['"""text, cmd_list, cursor_pos, text_expected, reg_name, text_yanked"""', '[(\'ab\', [\'V\', \'d\'], 0, \'\', \'"\', \'ab\\n\'), (\' ab\\n cd\\n ef\', [\'j\', \'V\', \'d\'],\n 5, \' ab\\n ef\', \'"\', \' cd\\n\'), (\' ab\\n cd\\n\', [\'V\', \'G\', \'d\'], 0, \'\',\n \'"\', \' ab\\n cd\\n\\n\'), (\' ab\\n cd\\n ef\', [\'2j\', \'V\', \'d\'], 5, \' ab\\n cd\',\n \'"\', \' ef\\n\'), (\' ab\\n cd\\n ef\', [\'2j\', \'V\', \'k\', \'d\'], 1, \' ab\', \'"\',\n \' cd\\n ef\\n\'), (\' ab\\n cd\', [\'$\', \'V\', \'d\'], 1, \' cd\', \'"\', \' ab\\n\')]'], {}), '(\n \'text, cmd_list, cursor_pos, text_expected, reg_name, text_yanked\', [(\n \'ab\', [\'V\', \'d\'], 0, \'\', \'"\', \'ab\\n\'), (\' ab\\n cd\\n ef\', [\'j\', \'V\', \'d\'\n ], 5, \' ab\\n ef\', \'"\', \' cd\\n\'), (\' ab\\n cd\\n\', [\'V\', \'G\', \'d\'], 0, \'\',\n \'"\', \' ab\\n cd\\n\\n\'), (\' ab\\n cd\\n ef\', [\'2j\', \'V\', \'d\'], 5, \' ab\\n cd\',\n \'"\', \' ef\\n\'), (\' ab\\n cd\\n ef\', [\'2j\', \'V\', \'k\', \'d\'], 1, \' ab\', \'"\',\n \' cd\\n ef\\n\'), (\' ab\\n cd\', [\'$\', \'V\', \'d\'], 1, \' cd\', \'"\', \' ab\\n\')])\n', (24779, 25234), False, 'import pytest\n'), ((25884, 26362), 'pytest.mark.parametrize', 'pytest.mark.parametrize', (['"""text, cmd_list, cursor_pos, text_expected, reg_name, text_yanked"""', '[(\'ab\', [\'V\', \'x\'], 0, \'\', \'"\', \'ab\\n\'), (\' ab\\n cd\\n ef\', [\'j\', \'V\', \'x\'],\n 5, \' ab\\n ef\', \'"\', \' cd\\n\'), (\' ab\\n cd\\n\', [\'V\', \'G\', \'x\'], 0, \'\',\n \'"\', \' ab\\n cd\\n\\n\'), (\' ab\\n cd\\n ef\', [\'2j\', \'V\', \'x\'], 5, \' ab\\n cd\',\n \'"\', \' ef\\n\'), (\' ab\\n cd\\n ef\', [\'2j\', \'V\', \'k\', \'x\'], 1, \' ab\', \'"\',\n \' cd\\n ef\\n\'), (\' ab\\n cd\', [\'$\', \'V\', \'x\'], 1, \' cd\', \'"\', \' ab\\n\')]'], {}), '(\n \'text, cmd_list, cursor_pos, text_expected, reg_name, text_yanked\', [(\n \'ab\', [\'V\', \'x\'], 0, \'\', \'"\', \'ab\\n\'), (\' ab\\n cd\\n ef\', [\'j\', \'V\', \'x\'\n ], 5, \' ab\\n ef\', \'"\', \' cd\\n\'), (\' ab\\n cd\\n\', [\'V\', \'G\', \'x\'], 0, \'\',\n \'"\', \' ab\\n cd\\n\\n\'), (\' ab\\n cd\\n ef\', [\'2j\', \'V\', \'x\'], 5, \' ab\\n cd\',\n \'"\', \' ef\\n\'), (\' ab\\n cd\\n ef\', [\'2j\', \'V\', \'k\', \'x\'], 1, \' ab\', \'"\',\n \' cd\\n ef\\n\'), (\' ab\\n cd\', [\'$\', \'V\', \'x\'], 1, \' cd\', \'"\', \' ab\\n\')])\n', (25907, 26362), False, 'import pytest\n'), ((27012, 27502), 'pytest.mark.parametrize', 'pytest.mark.parametrize', (['"""text, cmd_list, cursor_pos, text_expected, reg_name, text_yanked"""', '[(\'ab\', [\'V\', \'c\'], 0, \'\', \'"\', \'ab\\n\'), (\' ab\\n cd\\n ef\', [\'j\', \'V\', \'c\'],\n 4, \' ab\\n\\n ef\', \'"\', \' cd\\n\'), (\' ab\\n cd\\n\', [\'V\', \'G\', \'c\'], 0, \'\',\n \'"\', \' ab\\n cd\\n\\n\'), (\' ab\\n cd\\n ef\', [\'2j\', \'V\', \'c\'], 8,\n \' ab\\n cd\\n\', \'"\', \' ef\\n\'), (\' ab\\n cd\\n ef\', [\'2j\', \'V\', \'k\', \'c\'], 4,\n \' ab\\n\', \'"\', \' cd\\n ef\\n\'), (\' ab\\n cd\', [\'$\', \'V\', \'c\'], 0, \'\\n cd\',\n \'"\', \' ab\\n\')]'], {}), '(\n \'text, cmd_list, cursor_pos, text_expected, reg_name, text_yanked\', [(\n \'ab\', [\'V\', \'c\'], 0, \'\', \'"\', \'ab\\n\'), (\' ab\\n cd\\n ef\', [\'j\', \'V\', \'c\'\n ], 4, \' ab\\n\\n ef\', \'"\', \' cd\\n\'), (\' ab\\n cd\\n\', [\'V\', \'G\', \'c\'], 0,\n \'\', \'"\', \' ab\\n cd\\n\\n\'), (\' ab\\n cd\\n ef\', [\'2j\', \'V\', \'c\'], 8,\n \' ab\\n cd\\n\', \'"\', \' ef\\n\'), (\' ab\\n cd\\n ef\', [\'2j\', \'V\', \'k\', \'c\'], 4,\n \' ab\\n\', \'"\', \' cd\\n ef\\n\'), (\' ab\\n cd\', [\'$\', \'V\', \'c\'], 0, \'\\n cd\',\n \'"\', \' ab\\n\')])\n', (27035, 27502), False, 'import pytest\n'), ((28148, 28638), 'pytest.mark.parametrize', 'pytest.mark.parametrize', (['"""text, cmd_list, cursor_pos, text_expected, reg_name, text_yanked"""', '[(\'ab\', [\'V\', \'s\'], 0, \'\', \'"\', \'ab\\n\'), (\' ab\\n cd\\n ef\', [\'j\', \'V\', \'s\'],\n 4, \' ab\\n\\n ef\', \'"\', \' cd\\n\'), (\' ab\\n cd\\n\', [\'V\', \'G\', \'s\'], 0, \'\',\n \'"\', \' ab\\n cd\\n\\n\'), (\' ab\\n cd\\n ef\', [\'2j\', \'V\', \'s\'], 8,\n \' ab\\n cd\\n\', \'"\', \' ef\\n\'), (\' ab\\n cd\\n ef\', [\'2j\', \'V\', \'k\', \'s\'], 4,\n \' ab\\n\', \'"\', \' cd\\n ef\\n\'), (\' ab\\n cd\', [\'$\', \'V\', \'s\'], 0, \'\\n cd\',\n \'"\', \' ab\\n\')]'], {}), '(\n \'text, cmd_list, cursor_pos, text_expected, reg_name, text_yanked\', [(\n \'ab\', [\'V\', \'s\'], 0, \'\', \'"\', \'ab\\n\'), (\' ab\\n cd\\n ef\', [\'j\', \'V\', \'s\'\n ], 4, \' ab\\n\\n ef\', \'"\', \' cd\\n\'), (\' ab\\n cd\\n\', [\'V\', \'G\', \'s\'], 0,\n \'\', \'"\', \' ab\\n cd\\n\\n\'), (\' ab\\n cd\\n ef\', [\'2j\', \'V\', \'s\'], 8,\n \' ab\\n cd\\n\', \'"\', \' ef\\n\'), (\' ab\\n cd\\n ef\', [\'2j\', \'V\', \'k\', \'s\'], 4,\n \' ab\\n\', \'"\', \' cd\\n ef\\n\'), (\' ab\\n cd\', [\'$\', \'V\', \'s\'], 0, \'\\n cd\',\n \'"\', \' ab\\n\')])\n', (28171, 28638), False, 'import pytest\n'), ((29284, 29768), 'pytest.mark.parametrize', 'pytest.mark.parametrize', (['"""text, cmd_list, cursor_pos, sel_pos"""', '[(\'a\', [\'V\', \'/\', \'b\', \'\\r\'], 0, [0, 1]), (\'a\', [\'V\', \'/\', \'b\', \'\\r\', \'n\'],\n 0, [0, 1]), (""" dhr\n dhrwodn\n\ndhrwodn\n dhrwodn""", [\'V\', \'/\', \'d\',\n \'h\', \'r\', Qt.Key_Enter], 1, [0, 4]), (\n """ dhr\n dhrwodn\n\ndhrwodn\n dhrwodn""", [\'V\', \'/\', \'d\', \'h\', \'r\', Qt.\n Key_Enter, \'n\'], 7, [0, 14]), (""" dhr\n dhrwodn\n\ndhrwodn\n dhrwodn""",\n [\'V\', \'/\', \'d\', \'h\', \'r\', Qt.Key_Return, \'n\', \'N\'], 1, [0, 4])]'], {}), '(\'text, cmd_list, cursor_pos, sel_pos\', [(\'a\', [\'V\',\n \'/\', \'b\', \'\\r\'], 0, [0, 1]), (\'a\', [\'V\', \'/\', \'b\', \'\\r\', \'n\'], 0, [0, 1\n ]), (""" dhr\n dhrwodn\n\ndhrwodn\n dhrwodn""", [\'V\', \'/\', \'d\', \'h\', \'r\',\n Qt.Key_Enter], 1, [0, 4]), (""" dhr\n dhrwodn\n\ndhrwodn\n dhrwodn""", [\n \'V\', \'/\', \'d\', \'h\', \'r\', Qt.Key_Enter, \'n\'], 7, [0, 14]), (\n """ dhr\n dhrwodn\n\ndhrwodn\n dhrwodn""", [\'V\', \'/\', \'d\', \'h\', \'r\', Qt.\n Key_Return, \'n\', \'N\'], 1, [0, 4])])\n', (29307, 29768), False, 'import pytest\n'), ((30425, 30537), 'pytest.mark.parametrize', 'pytest.mark.parametrize', (['"""text, cmd_list, cursor_pos, sel_pos"""', "[('01 34', ['V', Qt.Key_Space], 1, [0, 5])]"], {}), "('text, cmd_list, cursor_pos, sel_pos', [('01 34', [\n 'V', Qt.Key_Space], 1, [0, 5])])\n", (30448, 30537), False, 'import pytest\n'), ((31271, 31393), 'pytest.mark.parametrize', 'pytest.mark.parametrize', (['"""text, cmd_list, cursor_pos, sel_pos"""', "[('01 34', ['2l', 'V', Qt.Key_Backspace], 1, [0, 5])]"], {}), "('text, cmd_list, cursor_pos, sel_pos', [('01 34', [\n '2l', 'V', Qt.Key_Backspace], 1, [0, 5])])\n", (31294, 31393), False, 'import pytest\n'), ((32052, 32172), 'pytest.mark.parametrize', 'pytest.mark.parametrize', (['"""text, cmd_list, cursor_pos, sel_pos"""', "[('01 34\\n kj', ['V', Qt.Key_Enter], 9, [0, 11])]"], {}), "('text, cmd_list, cursor_pos, sel_pos', [(\n '01 34\\n kj', ['V', Qt.Key_Enter], 9, [0, 11])])\n", (32075, 32172), False, 'import pytest\n'), ((30716, 30758), 'spyder.config.manager.CONF.set', 'CONF.set', (['CONF_SECTION', '"""leader_key"""', '"""F1"""'], {}), "(CONF_SECTION, 'leader_key', 'F1')\n", (30724, 30758), False, 'from spyder.config.manager import CONF\n')]
|
from typing import Optional, List
from pydantic import Field, SecretStr, validator
from pystratis.api import Model
from pystratis.core import Outpoint, Recipient
from pystratis.core.types import Address, Money
# noinspection PyUnresolvedReferences
class BuildTransactionRequest(Model):
"""A request model for the smartcontracts/build-transaction endpoint.
Args:
sender (Address): The sender address.
fee_amount (Money, optional): The fee amount.
password (SecretStr): The password.
segwit_change_address (bool, optional): If the change address is a segwit address. Default=False.
wallet_name (str): The wallet name.
account_name (str, optional): The account name. Default='account 0'.
outpoints (List[Outpoint]): A list of the outpoints used to construct the transactation.
recipients (List[Recipient]): A list of the recipients, including amounts, for the transaction.
op_return_data (str, optional): OP_RETURN data to include with the transaction.
op_return_amount (Money, optional): Amount to burn in the OP_RETURN transaction.
fee_type (str, optional): low, medium, or high.
allow_unconfirmed (bool, optional): If True, allow unconfirmed transactions in the estimation. Default=False
shuffle_outputs (bool, optional): If True, shuffles outputs. Default=False.
change_address (Address, optional): Sends output sum less amount sent to recipients to this designated change address, if provided.
Notes:
Both fee_type and fee_amount cannot be set.
"""
sender: Address
fee_amount: Optional[Money] = Field(alias='feeAmount')
password: SecretStr
segwit_change_address: Optional[bool] = Field(default=False, alias='segwitChangeAddress')
wallet_name: str = Field(alias='walletName')
account_name: Optional[str] = Field(default='account 0', alias='accountName')
outpoints: Optional[List[Outpoint]]
recipients: List[Recipient]
op_return_data: Optional[str] = Field(alias='opReturnData')
op_return_amount: Optional[Money] = Field(alias='opReturnAmount')
fee_type: Optional[str] = Field(alias='feeType')
allow_unconfirmed: Optional[bool] = Field(default=False, alias='allowUnconfirmed')
shuffle_outputs: Optional[bool] = Field(default=False, alias='shuffleOutputs')
change_address: Optional[Address] = Field(alias='changeAddress')
# noinspection PyMethodParameters,PyUnusedLocal
@validator('fee_type')
def validate_fee_type(cls, v, values):
allowed = [
'low',
'medium',
'high'
]
if v is not None and v not in allowed:
raise ValueError(f'Invalid command. Must be: {allowed}')
if v is not None and values['fee_amount'] is not None:
raise ValueError('Both fee_type and fee_amount cannot be set.')
return v
# noinspection PyMethodParameters,PyUnusedLocal
@validator('fee_amount', always=True)
def check_fee_too_high(cls, v, values):
if v is not None:
if v > Money(1):
raise ValueError('Fee should not be more than 1. Check parameters.')
return v
|
[
"pydantic.Field",
"pydantic.validator",
"pystratis.core.types.Money"
] |
[((1646, 1670), 'pydantic.Field', 'Field', ([], {'alias': '"""feeAmount"""'}), "(alias='feeAmount')\n", (1651, 1670), False, 'from pydantic import Field, SecretStr, validator\n'), ((1739, 1788), 'pydantic.Field', 'Field', ([], {'default': '(False)', 'alias': '"""segwitChangeAddress"""'}), "(default=False, alias='segwitChangeAddress')\n", (1744, 1788), False, 'from pydantic import Field, SecretStr, validator\n'), ((1812, 1837), 'pydantic.Field', 'Field', ([], {'alias': '"""walletName"""'}), "(alias='walletName')\n", (1817, 1837), False, 'from pydantic import Field, SecretStr, validator\n'), ((1872, 1919), 'pydantic.Field', 'Field', ([], {'default': '"""account 0"""', 'alias': '"""accountName"""'}), "(default='account 0', alias='accountName')\n", (1877, 1919), False, 'from pydantic import Field, SecretStr, validator\n'), ((2028, 2055), 'pydantic.Field', 'Field', ([], {'alias': '"""opReturnData"""'}), "(alias='opReturnData')\n", (2033, 2055), False, 'from pydantic import Field, SecretStr, validator\n'), ((2096, 2125), 'pydantic.Field', 'Field', ([], {'alias': '"""opReturnAmount"""'}), "(alias='opReturnAmount')\n", (2101, 2125), False, 'from pydantic import Field, SecretStr, validator\n'), ((2156, 2178), 'pydantic.Field', 'Field', ([], {'alias': '"""feeType"""'}), "(alias='feeType')\n", (2161, 2178), False, 'from pydantic import Field, SecretStr, validator\n'), ((2219, 2265), 'pydantic.Field', 'Field', ([], {'default': '(False)', 'alias': '"""allowUnconfirmed"""'}), "(default=False, alias='allowUnconfirmed')\n", (2224, 2265), False, 'from pydantic import Field, SecretStr, validator\n'), ((2304, 2348), 'pydantic.Field', 'Field', ([], {'default': '(False)', 'alias': '"""shuffleOutputs"""'}), "(default=False, alias='shuffleOutputs')\n", (2309, 2348), False, 'from pydantic import Field, SecretStr, validator\n'), ((2389, 2417), 'pydantic.Field', 'Field', ([], {'alias': '"""changeAddress"""'}), "(alias='changeAddress')\n", (2394, 2417), False, 'from pydantic import Field, SecretStr, validator\n'), ((2476, 2497), 'pydantic.validator', 'validator', (['"""fee_type"""'], {}), "('fee_type')\n", (2485, 2497), False, 'from pydantic import Field, SecretStr, validator\n'), ((2961, 2997), 'pydantic.validator', 'validator', (['"""fee_amount"""'], {'always': '(True)'}), "('fee_amount', always=True)\n", (2970, 2997), False, 'from pydantic import Field, SecretStr, validator\n'), ((3087, 3095), 'pystratis.core.types.Money', 'Money', (['(1)'], {}), '(1)\n', (3092, 3095), False, 'from pystratis.core.types import Address, Money\n')]
|
import networkx as nx
import matplotlib.pyplot as plt
import pylab
import pickle
import numpy as np
import common
def draw_transition_table(transition_table, cluster_centers, meanscreen, tsne ,color, black_edges=None, red_edges=None, title=None):
G = nx.DiGraph()
edge_colors = []
if red_edges is not None:
for e in red_edges:
G.add_edges_from([e], weight=np.round(transition_table[e[0],e[1]]*100)/100)
edge_colors.append('red')
if black_edges is not None:
if red_edges is not None:
black_edges = list(set(black_edges)-set(red_edges))
for e in black_edges:
G.add_edges_from([e], weight=np.round(transition_table[e[0],e[1]]*100)/100)
edge_colors.append('black')
edge_labels=dict([((u,v,),d['weight']) for u,v,d in G.edges(data=True)])
node_labels = {node:node for node in G.nodes()};
counter=0
for key in node_labels.keys():
node_labels[key] = counter
counter+=1
if title is None:
fig = plt.figure('SMDP')
fig.clear()
else:
fig = plt.figure(title)
plt.scatter(tsne[:,0],tsne[:,1],s= np.ones(tsne.shape[0])*2,facecolor=color, edgecolor='none')
pos = cluster_centers[:,0:2]
nx.draw_networkx_edge_labels(G,pos,edge_labels=edge_labels,label_pos=0.65,font_size=9)
nx.draw_networkx_labels(G, pos, labels=node_labels,font_color='w',font_size=8)
nx.draw(G,pos,cmap=plt.cm.brg,edge_color=edge_colors)
######Present images on nodes
ax = plt.subplot(111)
plt.axis('off')
trans = ax.transData.transform
trans2 = fig.transFigure.inverted().transform
cut = 1.01
xmax = cut * max(tsne[:,0])
ymax = cut * max(tsne[:,1])
xmin = cut * min(tsne[:,0])
ymin = cut * min(tsne[:,1])
plt.xlim(xmin, xmax)
plt.ylim(ymin, ymax)
h = 70.0
w = 70.0
counter= 0
for node in G:
xx, yy = trans(pos[node])
# axes coordinates
xa, ya = trans2((xx, yy))
# this is the image size
piesize_1 = (300.0 / (h*80))
piesize_2 = (300.0 / (w*80))
p2_2 = piesize_2 / 2
p2_1 = piesize_1 / 2
a = plt.axes([xa - p2_2, ya - p2_1, piesize_2, piesize_1])
G.node[node]['image'] = meanscreen[counter]
#display it
a.imshow(G.node[node]['image'])
a.set_title(node_labels[counter])
#turn off the axis from minor plot
a.axis('off')
counter+=1
plt.draw()
def draw_transition_table_no_image(transition_table,cluster_centers):
G = nx.DiGraph()
G2 = nx.DiGraph()
# print transition_table.sum(axis=1)
transition_table = (transition_table.transpose()/transition_table.sum(axis=1)).transpose()
transition_table[np.isnan(transition_table)]=0
# print(transition_table)
# transition_table = (transition_table.transpose()/transition_table.sum(axis=1)).transpose()
# print transition_table
# print transition_table.sum(axis=0)
# assert(np.all(transition_table.sum(axis=0)!=0))
transition_table[transition_table<0.1]=0
pos = cluster_centers[:,0:2]
m,n = transition_table.shape
for i in range(m):
for j in range(n):
if transition_table[i,j]!=0:
G.add_edges_from([(i, j)], weight=np.round(transition_table[i,j]*100)/100)
G2.add_edges_from([(i, j)], weight=np.round(transition_table[i,j]*100)/100)
values = cluster_centers[:,2]
red_edges = []
edges_sizes =[]
for i in range(n):
trans = transition_table[i,:]
indices = (trans!=0)
index = np.argmax(cluster_centers[indices,2])
counter = 0
for j in range(len(indices)):
if indices[j]:
if counter == index:
ind = j
break
else:
counter+=1
edges_sizes.append(ind)
red_edges.append((i,ind))
# print(red_edges)
# sizes = 3000*cluster_centers[:,3]
sizes = np.ones_like(values)*500
edge_labels=dict([((u,v,),d['weight']) for u,v,d in G.edges(data=True)])
edge_colors = ['black' for edge in G.edges()]
# edge_colors = ['black' if not edge in red_edges else 'red' for edge in G.edges()]
node_labels = {node:node for node in G.nodes()};
counter=0
for key in node_labels.keys():
# node_labels[key] = np.round(100*cluster_centers[counter,3])/100
node_labels[key] = counter
counter+=1
fig = plt.figure()
nx.draw_networkx_edge_labels(G,pos,edge_labels=edge_labels,label_pos=0.65,font_size=9)
nx.draw_networkx_labels(G, pos, labels=node_labels,font_color='w',font_size=8)
nx.draw(G,pos, node_color = values,cmap=plt.cm.brg, node_size=np.round(sizes),edge_color=edge_colors,edge_cmap=plt.cm.Reds)
######Present images on nodes
# plt.show()
#
def test():
gamename = 'breakout' #breakout pacman
transition_table = pickle.load(file('/home/tom/git/graying_the_box/data/'+gamename+'/120k' + '/knn/' + 'transition_table.bin'))
cluster_centers = pickle.load(file('/home/tom/git/graying_the_box/data/'+gamename+'/120k' + '/knn/' + 'cluster_centers.bin'))
cluster_std = pickle.load(file('/home/tom/git/graying_the_box/data/'+gamename+'/120k' + '/knn/' + 'cluster_std.bin'))
cluster_med = pickle.load(file('/home/tom/git/graying_the_box/data/'+gamename+'/120k' + '/knn/' + 'cluster_med.bin'))
cluster_min = pickle.load(file('/home/tom/git/graying_the_box/data/'+gamename+'/120k' + '/knn/' + 'cluster_min.bin'))
cluster_max = pickle.load(file('/home/tom/git/graying_the_box/data/'+gamename+'/120k' + '/knn/' + 'cluster_max.bin'))
meanscreen = pickle.load(file('/home/tom/git/graying_the_box/data/'+gamename+'/120k' + '/knn/' + 'meanscreen.bin'))
cluster_time = pickle.load(file('/home/tom/git/graying_the_box/data/'+gamename+'/120k' + '/knn/' + 'cluster_time.bin'))
tsne = common.load_hdf5('lowd_activations', 'data/' + 'breakout' + '/'+'120k/')
q_hdf5 = common.load_hdf5('qvals', 'data/' + 'breakout' + '/'+'120k/')
num_frames = 120000
V = np.zeros(shape=(num_frames))
for i in range(0,num_frames):
V[i] = max(q_hdf5[i])
V = V/V.max()
draw_transition_table(transition_table,cluster_centers,meanscreen,cluster_time,tsne,V)
plt.show()
# test()
# stdscreen = pickle.load(file('/home/tom/git/graying_the_box/data/'+gamename+'/120k' + '/knn/' + 'stdscreen.bin'))
# #
# a = 1
# b = 0
# c = 0
# screen = a*meanscreen + c*stdscreen
# facecolor = self.color,
# edgecolor='none',picker=5)
# draw_transition_table_no_image(transition_table,cluster_centers)
# transition_table = pickle.load(file('/home/tom/git/graying_the_box/data/seaquest/120k' + '/knn/' + 'transition_table.bin'))
# transition_table[transition_table<0.1]=0
# cluster_centers = pickle.load(file('/home/tom/git/graying_the_box/data/seaquest/120k' + '/knn/' + 'cluster_centers.bin'))
# pos2 = np.zeros(shape=(cluster_centers.shape[0],2))
# pos2[:,0] = cluster_time[:,0]
# pos2[:,1] = cluster_centers[:,1]
# plt.figure()
# nx.draw_networkx_edge_labels(G2,pos2,edge_labels=edge_labels,label_pos=0.8,font_size=8)
# nx.draw_networkx_labels(G2, pos2, labels=node_labels,font_color='w',font_size=8)
# nx.draw(G2,pos2, node_color = values,cmap=plt.cm.brg, node_size=np.round(sizes),edge_color=edge_colors,edge_cmap=plt.cm.Reds)
|
[
"numpy.argmax",
"matplotlib.pyplot.axes",
"numpy.ones",
"numpy.isnan",
"matplotlib.pyplot.figure",
"networkx.draw_networkx_labels",
"networkx.draw_networkx_edge_labels",
"numpy.round",
"matplotlib.pyplot.draw",
"common.load_hdf5",
"matplotlib.pyplot.show",
"numpy.ones_like",
"matplotlib.pyplot.ylim",
"networkx.draw",
"networkx.DiGraph",
"matplotlib.pyplot.subplot",
"matplotlib.pyplot.xlim",
"numpy.zeros",
"matplotlib.pyplot.axis"
] |
[((257, 269), 'networkx.DiGraph', 'nx.DiGraph', ([], {}), '()\n', (267, 269), True, 'import networkx as nx\n'), ((1258, 1353), 'networkx.draw_networkx_edge_labels', 'nx.draw_networkx_edge_labels', (['G', 'pos'], {'edge_labels': 'edge_labels', 'label_pos': '(0.65)', 'font_size': '(9)'}), '(G, pos, edge_labels=edge_labels, label_pos=\n 0.65, font_size=9)\n', (1286, 1353), True, 'import networkx as nx\n'), ((1349, 1434), 'networkx.draw_networkx_labels', 'nx.draw_networkx_labels', (['G', 'pos'], {'labels': 'node_labels', 'font_color': '"""w"""', 'font_size': '(8)'}), "(G, pos, labels=node_labels, font_color='w', font_size=8\n )\n", (1372, 1434), True, 'import networkx as nx\n'), ((1432, 1488), 'networkx.draw', 'nx.draw', (['G', 'pos'], {'cmap': 'plt.cm.brg', 'edge_color': 'edge_colors'}), '(G, pos, cmap=plt.cm.brg, edge_color=edge_colors)\n', (1439, 1488), True, 'import networkx as nx\n'), ((1531, 1547), 'matplotlib.pyplot.subplot', 'plt.subplot', (['(111)'], {}), '(111)\n', (1542, 1547), True, 'import matplotlib.pyplot as plt\n'), ((1552, 1567), 'matplotlib.pyplot.axis', 'plt.axis', (['"""off"""'], {}), "('off')\n", (1560, 1567), True, 'import matplotlib.pyplot as plt\n'), ((1800, 1820), 'matplotlib.pyplot.xlim', 'plt.xlim', (['xmin', 'xmax'], {}), '(xmin, xmax)\n', (1808, 1820), True, 'import matplotlib.pyplot as plt\n'), ((1825, 1845), 'matplotlib.pyplot.ylim', 'plt.ylim', (['ymin', 'ymax'], {}), '(ymin, ymax)\n', (1833, 1845), True, 'import matplotlib.pyplot as plt\n'), ((2478, 2488), 'matplotlib.pyplot.draw', 'plt.draw', ([], {}), '()\n', (2486, 2488), True, 'import matplotlib.pyplot as plt\n'), ((2569, 2581), 'networkx.DiGraph', 'nx.DiGraph', ([], {}), '()\n', (2579, 2581), True, 'import networkx as nx\n'), ((2591, 2603), 'networkx.DiGraph', 'nx.DiGraph', ([], {}), '()\n', (2601, 2603), True, 'import networkx as nx\n'), ((4504, 4516), 'matplotlib.pyplot.figure', 'plt.figure', ([], {}), '()\n', (4514, 4516), True, 'import matplotlib.pyplot as plt\n'), ((4521, 4616), 'networkx.draw_networkx_edge_labels', 'nx.draw_networkx_edge_labels', (['G', 'pos'], {'edge_labels': 'edge_labels', 'label_pos': '(0.65)', 'font_size': '(9)'}), '(G, pos, edge_labels=edge_labels, label_pos=\n 0.65, font_size=9)\n', (4549, 4616), True, 'import networkx as nx\n'), ((4612, 4697), 'networkx.draw_networkx_labels', 'nx.draw_networkx_labels', (['G', 'pos'], {'labels': 'node_labels', 'font_color': '"""w"""', 'font_size': '(8)'}), "(G, pos, labels=node_labels, font_color='w', font_size=8\n )\n", (4635, 4697), True, 'import networkx as nx\n'), ((5935, 6009), 'common.load_hdf5', 'common.load_hdf5', (['"""lowd_activations"""', "('data/' + 'breakout' + '/' + '120k/')"], {}), "('lowd_activations', 'data/' + 'breakout' + '/' + '120k/')\n", (5951, 6009), False, 'import common\n'), ((6021, 6084), 'common.load_hdf5', 'common.load_hdf5', (['"""qvals"""', "('data/' + 'breakout' + '/' + '120k/')"], {}), "('qvals', 'data/' + 'breakout' + '/' + '120k/')\n", (6037, 6084), False, 'import common\n'), ((6125, 6151), 'numpy.zeros', 'np.zeros', ([], {'shape': 'num_frames'}), '(shape=num_frames)\n', (6133, 6151), True, 'import numpy as np\n'), ((6332, 6342), 'matplotlib.pyplot.show', 'plt.show', ([], {}), '()\n', (6340, 6342), True, 'import matplotlib.pyplot as plt\n'), ((1040, 1058), 'matplotlib.pyplot.figure', 'plt.figure', (['"""SMDP"""'], {}), "('SMDP')\n", (1050, 1058), True, 'import matplotlib.pyplot as plt\n'), ((1103, 1120), 'matplotlib.pyplot.figure', 'plt.figure', (['title'], {}), '(title)\n', (1113, 1120), True, 'import matplotlib.pyplot as plt\n'), ((2181, 2235), 'matplotlib.pyplot.axes', 'plt.axes', (['[xa - p2_2, ya - p2_1, piesize_2, piesize_1]'], {}), '([xa - p2_2, ya - p2_1, piesize_2, piesize_1])\n', (2189, 2235), True, 'import matplotlib.pyplot as plt\n'), ((2763, 2789), 'numpy.isnan', 'np.isnan', (['transition_table'], {}), '(transition_table)\n', (2771, 2789), True, 'import numpy as np\n'), ((3611, 3649), 'numpy.argmax', 'np.argmax', (['cluster_centers[indices, 2]'], {}), '(cluster_centers[indices, 2])\n', (3620, 3649), True, 'import numpy as np\n'), ((4019, 4039), 'numpy.ones_like', 'np.ones_like', (['values'], {}), '(values)\n', (4031, 4039), True, 'import numpy as np\n'), ((4757, 4772), 'numpy.round', 'np.round', (['sizes'], {}), '(sizes)\n', (4765, 4772), True, 'import numpy as np\n'), ((1161, 1183), 'numpy.ones', 'np.ones', (['tsne.shape[0]'], {}), '(tsne.shape[0])\n', (1168, 1183), True, 'import numpy as np\n'), ((391, 435), 'numpy.round', 'np.round', (['(transition_table[e[0], e[1]] * 100)'], {}), '(transition_table[e[0], e[1]] * 100)\n', (399, 435), True, 'import numpy as np\n'), ((679, 723), 'numpy.round', 'np.round', (['(transition_table[e[0], e[1]] * 100)'], {}), '(transition_table[e[0], e[1]] * 100)\n', (687, 723), True, 'import numpy as np\n'), ((3298, 3336), 'numpy.round', 'np.round', (['(transition_table[i, j] * 100)'], {}), '(transition_table[i, j] * 100)\n', (3306, 3336), True, 'import numpy as np\n'), ((3390, 3428), 'numpy.round', 'np.round', (['(transition_table[i, j] * 100)'], {}), '(transition_table[i, j] * 100)\n', (3398, 3428), True, 'import numpy as np\n')]
|
import json
import os
import sys
if len(sys.argv) != 2:
sys.exit("invalid args")
def loadTemplate():
with open("template.hpp", "r") as file:
return file.read()
def loadBlocks():
with open("blocks.json", "r") as file:
data = file.read()
return json.loads(data)
def constantNameFromBlockName(name):
return "BLOCKID_" + name.upper().replace(":", "_")
def generateConstants(blocks):
str = ""
for name, entry in blocks.items():
for state in entry["states"]:
if "default" in state and state["default"]:
str += "\n\tconstexpr BlockId {} = {};".format(constantNameFromBlockName(name), state["id"])
return str
blockConstantOutput = loadTemplate().replace("$$$", generateConstants(loadBlocks()))
outputDirPath = os.path.join(sys.argv[1], "generated")
os.makedirs(outputDirPath, exist_ok=True)
outputFilePath = os.path.join(outputDirPath, "ids.hpp")
with open(outputFilePath, "w") as file:
file.write(blockConstantOutput)
|
[
"os.path.join",
"os.makedirs",
"sys.exit",
"json.loads"
] |
[((733, 771), 'os.path.join', 'os.path.join', (['sys.argv[1]', '"""generated"""'], {}), "(sys.argv[1], 'generated')\n", (745, 771), False, 'import os\n'), ((772, 813), 'os.makedirs', 'os.makedirs', (['outputDirPath'], {'exist_ok': '(True)'}), '(outputDirPath, exist_ok=True)\n', (783, 813), False, 'import os\n'), ((832, 870), 'os.path.join', 'os.path.join', (['outputDirPath', '"""ids.hpp"""'], {}), "(outputDirPath, 'ids.hpp')\n", (844, 870), False, 'import os\n'), ((58, 82), 'sys.exit', 'sys.exit', (['"""invalid args"""'], {}), "('invalid args')\n", (66, 82), False, 'import sys\n'), ((255, 271), 'json.loads', 'json.loads', (['data'], {}), '(data)\n', (265, 271), False, 'import json\n')]
|
# MegEngine is Licensed under the Apache License, Version 2.0 (the "License")
#
# Copyright (c) 2014-2020 Megvii Inc. All rights reserved.
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT ARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# pylint: disable=import-error,no-name-in-module,no-member
from test.utils import LinearOpr
import megengine as mge
import megengine.module as M
import numpy as np
from megengine.core.tensor import dtype
from megengine.core.tensor.dtype import _builtin_quant_dtypes
from megengine.module.quant_dequant import QuantStub
from megengine.quantization.quantize import quantize_qat
from megengine.quantization.utils import create_qparams
from megengine.traced_module.fake_quant import FakeQuantize
from .test_caffe import _test_convert_result
from .tm_utils import get_traced_module
max_err = 1e-6
def get_qat_net(inp_dtype, net, num_inp=1, shape=(1, 16, 32, 32)):
qat_net = quantize_qat(net)
inps = []
for _ in range(num_inp):
data1 = mge.tensor(np.random.random(shape)) * 16
data1 = data1.astype(inp_dtype)
inp1 = mge.tensor(dtype.convert_from_qint8(data1.numpy()))
inp1.qparams.scale = mge.tensor(dtype.get_scale(inp_dtype))
inp1.qparams.dtype_meta = dtype._builtin_quant_dtypes["qint8"]
inps.append(inp1)
return qat_net, inps
def get_qat_inputs_quint8(inp_dtype, num_inp=1, shape=(1, 16, 384, 512)):
inps = []
for _ in range(num_inp):
data1 = mge.tensor(np.random.random(shape)) * 16
data1 = data1.astype(inp_dtype)
inp1 = mge.tensor(dtype.convert_from_quint8(data1.numpy()))
inp1.qparams.scale = mge.tensor(dtype.get_scale(inp_dtype))
inp1.qparams.zero_point = mge.tensor(dtype.get_zero_point(inp_dtype))
inp1.qparams.dtype_meta = dtype._builtin_quant_dtypes["quint8"]
inps.append(inp1)
return inps
def test_linear():
net = LinearOpr()
inp_dtype = dtype.qint8(16.0 / 128.0)
qat_net, inps = get_qat_net(inp_dtype, net, shape=(10, 100))
traced_module, tm_result = get_traced_module(qat_net, inps[0])
inp = inps[0].astype(inp_dtype)
_test_convert_result(inp, traced_module, tm_result, max_err, require_quantize=False)
def test_add():
class ElemwiseOpr(M.Module):
def __init__(self,):
super().__init__()
self.data = np.ones((2, 3, 224, 224)).astype(np.float32)
self.data1 = np.random.random((1, 3, 1, 1)).astype(np.float32)
self.add1 = M.Elemwise("add")
self.add2 = M.Elemwise("add")
self.add3 = M.Elemwise("add")
scale = mge.tensor((16.0 / 128.0))
self.quant_stub = QuantStub()
self.quant_stub.act_fake_quant = FakeQuantize(
_builtin_quant_dtypes["qint8"]
)
self.quant_stub.act_fake_quant.set_qparams(
create_qparams(
dtype_meta=_builtin_quant_dtypes["qint8"],
scale=scale,
zero_point=None,
)
)
self.quant_stub1 = QuantStub()
self.quant_stub1.act_fake_quant = FakeQuantize(
_builtin_quant_dtypes["qint8"]
)
self.quant_stub1.act_fake_quant.set_qparams(
create_qparams(
dtype_meta=_builtin_quant_dtypes["qint8"],
scale=scale,
zero_point=None,
)
)
def forward(self, a):
n = self.quant_stub(mge.tensor(np.float32(10)))
data1 = self.quant_stub1(mge.tensor(self.data1))
x = self.add1(a, n)
y = self.add2(a, data1)
z = self.add3(x, y)
return z
net = ElemwiseOpr()
inp_dtype = dtype.qint8(16.0 / 128.0)
qat_net, inps = get_qat_net(inp_dtype, net, shape=(1, 3, 1, 1))
traced_module, tm_result = get_traced_module(qat_net, inps[0])
print(traced_module.flatten().graph)
inp = inps[0].astype(inp_dtype)
_test_convert_result(
inp,
traced_module,
tm_result,
max_err,
require_quantize=False,
split_conv_relu=True,
)
def test_det_model():
net = mge.load("models_fire_det.fix_batch.fuse_scale_cpu.pkl")
inp_dtype = dtype.qint8(16.0 / 128.0)
qat_net, inps = get_qat_net(inp_dtype, net, shape=(1, 3, 512, 512))
traced_module, tm_result = get_traced_module(qat_net, inps[0])
inp = inps[0].astype(inp_dtype)
_test_convert_result(inp, traced_module, tm_result, max_err, require_quantize=False)
def test_snpe_model_8f():
model = "8w16f_backbone.tm"
net = mge.load(model)
print(net.flatten().graph)
inp_dtype = dtype.quint8(16.0 / 128.0, 128)
inps = get_qat_inputs_quint8(inp_dtype, num_inp=2, shape=(1, 16, 384, 512))
tm_result = dict(zip(net.graph.outputs, net(*inps)))
_test_convert_result(
inps,
net,
tm_result,
max_err,
input_data_type="quint8",
input_scales=inps[0].qparams.scale,
input_zero_points=inps[0].qparams.zero_point,
require_quantize=False,
param_fake_quant=True,
split_conv_relu=True,
input_name=["inp", "prev"],
)
|
[
"megengine.core.tensor.dtype.get_scale",
"megengine.quantization.quantize.quantize_qat",
"megengine.tensor",
"numpy.float32",
"numpy.ones",
"megengine.core.tensor.dtype.get_zero_point",
"megengine.core.tensor.dtype.qint8",
"test.utils.LinearOpr",
"megengine.core.tensor.dtype.quint8",
"megengine.module.quant_dequant.QuantStub",
"numpy.random.random",
"megengine.quantization.utils.create_qparams",
"megengine.module.Elemwise",
"megengine.traced_module.fake_quant.FakeQuantize",
"megengine.load"
] |
[((1033, 1050), 'megengine.quantization.quantize.quantize_qat', 'quantize_qat', (['net'], {}), '(net)\n', (1045, 1050), False, 'from megengine.quantization.quantize import quantize_qat\n'), ((2023, 2034), 'test.utils.LinearOpr', 'LinearOpr', ([], {}), '()\n', (2032, 2034), False, 'from test.utils import LinearOpr\n'), ((2051, 2076), 'megengine.core.tensor.dtype.qint8', 'dtype.qint8', (['(16.0 / 128.0)'], {}), '(16.0 / 128.0)\n', (2062, 2076), False, 'from megengine.core.tensor import dtype\n'), ((3910, 3935), 'megengine.core.tensor.dtype.qint8', 'dtype.qint8', (['(16.0 / 128.0)'], {}), '(16.0 / 128.0)\n', (3921, 3935), False, 'from megengine.core.tensor import dtype\n'), ((4348, 4404), 'megengine.load', 'mge.load', (['"""models_fire_det.fix_batch.fuse_scale_cpu.pkl"""'], {}), "('models_fire_det.fix_batch.fuse_scale_cpu.pkl')\n", (4356, 4404), True, 'import megengine as mge\n'), ((4421, 4446), 'megengine.core.tensor.dtype.qint8', 'dtype.qint8', (['(16.0 / 128.0)'], {}), '(16.0 / 128.0)\n', (4432, 4446), False, 'from megengine.core.tensor import dtype\n'), ((4781, 4796), 'megengine.load', 'mge.load', (['model'], {}), '(model)\n', (4789, 4796), True, 'import megengine as mge\n'), ((4844, 4875), 'megengine.core.tensor.dtype.quint8', 'dtype.quint8', (['(16.0 / 128.0)', '(128)'], {}), '(16.0 / 128.0, 128)\n', (4856, 4875), False, 'from megengine.core.tensor import dtype\n'), ((1298, 1324), 'megengine.core.tensor.dtype.get_scale', 'dtype.get_scale', (['inp_dtype'], {}), '(inp_dtype)\n', (1313, 1324), False, 'from megengine.core.tensor import dtype\n'), ((1772, 1798), 'megengine.core.tensor.dtype.get_scale', 'dtype.get_scale', (['inp_dtype'], {}), '(inp_dtype)\n', (1787, 1798), False, 'from megengine.core.tensor import dtype\n'), ((1845, 1876), 'megengine.core.tensor.dtype.get_zero_point', 'dtype.get_zero_point', (['inp_dtype'], {}), '(inp_dtype)\n', (1865, 1876), False, 'from megengine.core.tensor import dtype\n'), ((2613, 2630), 'megengine.module.Elemwise', 'M.Elemwise', (['"""add"""'], {}), "('add')\n", (2623, 2630), True, 'import megengine.module as M\n'), ((2655, 2672), 'megengine.module.Elemwise', 'M.Elemwise', (['"""add"""'], {}), "('add')\n", (2665, 2672), True, 'import megengine.module as M\n'), ((2697, 2714), 'megengine.module.Elemwise', 'M.Elemwise', (['"""add"""'], {}), "('add')\n", (2707, 2714), True, 'import megengine.module as M\n'), ((2736, 2760), 'megengine.tensor', 'mge.tensor', (['(16.0 / 128.0)'], {}), '(16.0 / 128.0)\n', (2746, 2760), True, 'import megengine as mge\n'), ((2793, 2804), 'megengine.module.quant_dequant.QuantStub', 'QuantStub', ([], {}), '()\n', (2802, 2804), False, 'from megengine.module.quant_dequant import QuantStub\n'), ((2850, 2894), 'megengine.traced_module.fake_quant.FakeQuantize', 'FakeQuantize', (["_builtin_quant_dtypes['qint8']"], {}), "(_builtin_quant_dtypes['qint8'])\n", (2862, 2894), False, 'from megengine.traced_module.fake_quant import FakeQuantize\n'), ((3209, 3220), 'megengine.module.quant_dequant.QuantStub', 'QuantStub', ([], {}), '()\n', (3218, 3220), False, 'from megengine.module.quant_dequant import QuantStub\n'), ((3267, 3311), 'megengine.traced_module.fake_quant.FakeQuantize', 'FakeQuantize', (["_builtin_quant_dtypes['qint8']"], {}), "(_builtin_quant_dtypes['qint8'])\n", (3279, 3311), False, 'from megengine.traced_module.fake_quant import FakeQuantize\n'), ((1121, 1144), 'numpy.random.random', 'np.random.random', (['shape'], {}), '(shape)\n', (1137, 1144), True, 'import numpy as np\n'), ((1594, 1617), 'numpy.random.random', 'np.random.random', (['shape'], {}), '(shape)\n', (1610, 1617), True, 'import numpy as np\n'), ((2997, 3088), 'megengine.quantization.utils.create_qparams', 'create_qparams', ([], {'dtype_meta': "_builtin_quant_dtypes['qint8']", 'scale': 'scale', 'zero_point': 'None'}), "(dtype_meta=_builtin_quant_dtypes['qint8'], scale=scale,\n zero_point=None)\n", (3011, 3088), False, 'from megengine.quantization.utils import create_qparams\n'), ((3415, 3506), 'megengine.quantization.utils.create_qparams', 'create_qparams', ([], {'dtype_meta': "_builtin_quant_dtypes['qint8']", 'scale': 'scale', 'zero_point': 'None'}), "(dtype_meta=_builtin_quant_dtypes['qint8'], scale=scale,\n zero_point=None)\n", (3429, 3506), False, 'from megengine.quantization.utils import create_qparams\n'), ((3724, 3746), 'megengine.tensor', 'mge.tensor', (['self.data1'], {}), '(self.data1)\n', (3734, 3746), True, 'import megengine as mge\n'), ((2469, 2494), 'numpy.ones', 'np.ones', (['(2, 3, 224, 224)'], {}), '((2, 3, 224, 224))\n', (2476, 2494), True, 'import numpy as np\n'), ((2539, 2569), 'numpy.random.random', 'np.random.random', (['(1, 3, 1, 1)'], {}), '((1, 3, 1, 1))\n', (2555, 2569), True, 'import numpy as np\n'), ((3670, 3684), 'numpy.float32', 'np.float32', (['(10)'], {}), '(10)\n', (3680, 3684), True, 'import numpy as np\n')]
|
import math
from itertools import groupby, chain
from operator import itemgetter
from aocd import get_data
def simplify(a, b):
if b == 0:
return (-1, 0) if a < 0 else (1, 0)
elif a == 0:
return (0, -1) if b < 0 else (0, 1)
gcd = math.gcd(a, b)
return a // gcd, b // gcd
def part1(a):
return max(len(set(simplify(x - base[0], y - base[1]) for x, y in a if (x, y) != base)) for base in a)
def angle(x, y):
deg = math.degrees(math.atan2(y, x))
deg -= 270
while deg < 0:
deg += 360
return deg
def part2(a):
base = max(a, key=lambda b: len(set(simplify(x - b[0], y - b[1]) for x, y in a if (x, y) != b)))
astroids = groupby(sorted((simplify(x - base[0], y - base[1]), (x - base[0], y - base[1]))
for x, y in a if (x, y) != base), key=itemgetter(0))
ast = list(map(lambda x: (x[1][0] + base[0], x[1][1] + base[1]),
sorted(chain.from_iterable(
enumerate(sorted((c[1] for c in cs), key=lambda x: (abs(x[0]), abs(x[1]))))
for s, cs in astroids), key=lambda x: (x[0], angle(*x[1])))))
ast = ast[199]
return ast[0] * 100 + ast[1]
if __name__ == '__main__':
data = get_data(day=10, year=2019)
inp = [(x, y) for y, l in enumerate(data.splitlines()) for x, c in enumerate(l) if c == '#']
print(part1(inp))
print(part2(inp))
|
[
"math.atan2",
"operator.itemgetter",
"aocd.get_data",
"math.gcd"
] |
[((260, 274), 'math.gcd', 'math.gcd', (['a', 'b'], {}), '(a, b)\n', (268, 274), False, 'import math\n'), ((1243, 1270), 'aocd.get_data', 'get_data', ([], {'day': '(10)', 'year': '(2019)'}), '(day=10, year=2019)\n', (1251, 1270), False, 'from aocd import get_data\n'), ((470, 486), 'math.atan2', 'math.atan2', (['y', 'x'], {}), '(y, x)\n', (480, 486), False, 'import math\n'), ((836, 849), 'operator.itemgetter', 'itemgetter', (['(0)'], {}), '(0)\n', (846, 849), False, 'from operator import itemgetter\n')]
|
# ----------------------------------------------------------------------------
# Copyright (c) 2016-2020, QIIME 2 development team.
#
# Distributed under the terms of the Modified BSD License.
#
# The full license is in the file LICENSE, distributed with this software.
# ----------------------------------------------------------------------------
import qiime2
from q2_dada2 import DADA2StatsFormat
from q2_dada2.plugin_setup import plugin
@plugin.register_transformer
def _1(ff: DADA2StatsFormat) -> qiime2.Metadata:
return qiime2.Metadata.load(str(ff))
@plugin.register_transformer
def _2(obj: qiime2.Metadata) -> DADA2StatsFormat:
ff = DADA2StatsFormat()
obj.save(str(ff))
return ff
|
[
"q2_dada2.DADA2StatsFormat"
] |
[((655, 673), 'q2_dada2.DADA2StatsFormat', 'DADA2StatsFormat', ([], {}), '()\n', (671, 673), False, 'from q2_dada2 import DADA2StatsFormat\n')]
|
"""
The MIT License (MIT)
Copyright (c) 2021 xXSergeyXx
Permission is hereby granted, free of charge, to any person obtaining a
copy of this software and associated documentation files (the "Software"),
to deal in the Software without restriction, including without limitation
the rights to use, copy, modify, merge, publish, distribute, sublicense,
and/or sell copies of the Software, and to permit persons to whom the
Software is furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in
all copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS
OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER
DEALINGS IN THE SOFTWARE.
"""
from __future__ import annotations
import inspect
import re
from typing import Any, Dict, Generic, List, Optional, TYPE_CHECKING, TypeVar, Union
import liftcord.abc
import liftcord.utils
from liftcord.message import Message
if TYPE_CHECKING:
from typing_extensions import ParamSpec
from liftcord.abc import MessageableChannel
from liftcord.guild import Guild
from liftcord.member import Member
from liftcord.state import ConnectionState
from liftcord.user import ClientUser, User
from liftcord.voice_client import VoiceProtocol
from .bot import Bot, AutoShardedBot
from .cog import Cog
from .core import Command
from .help import HelpCommand
from .view import StringView
__all__ = (
'Context',
)
MISSING: Any = liftcord.utils.MISSING
T = TypeVar('T')
BotT = TypeVar('BotT', bound="Union[Bot, AutoShardedBot]")
CogT = TypeVar('CogT', bound="Cog")
if TYPE_CHECKING:
P = ParamSpec('P')
else:
P = TypeVar('P')
class Context(liftcord.abc.Messageable, Generic[BotT]):
r"""Represents the context in which a command is being invoked under.
This class contains a lot of meta data to help you understand more about
the invocation context. This class is not created manually and is instead
passed around to commands as the first parameter.
This class implements the :class:`~nextcord.abc.Messageable` ABC.
Attributes
-----------
message: :class:`.Message`
The message that triggered the command being executed.
bot: :class:`.Bot`
The bot that contains the command being executed.
args: :class:`list`
The list of transformed arguments that were passed into the command.
If this is accessed during the :func:`.on_command_error` event
then this list could be incomplete.
kwargs: :class:`dict`
A dictionary of transformed arguments that were passed into the command.
Similar to :attr:`args`\, if this is accessed in the
:func:`.on_command_error` event then this dict could be incomplete.
current_parameter: Optional[:class:`inspect.Parameter`]
The parameter that is currently being inspected and converted.
This is only of use for within converters.
.. versionadded:: 2.0
prefix: Optional[:class:`str`]
The prefix that was used to invoke the command.
command: Optional[:class:`Command`]
The command that is being invoked currently.
invoked_with: Optional[:class:`str`]
The command name that triggered this invocation. Useful for finding out
which alias called the command.
invoked_parents: List[:class:`str`]
The command names of the parents that triggered this invocation. Useful for
finding out which aliases called the command.
For example in commands ``?a b c test``, the invoked parents are ``['a', 'b', 'c']``.
.. versionadded:: 1.7
invoked_subcommand: Optional[:class:`Command`]
The subcommand that was invoked.
If no valid subcommand was invoked then this is equal to ``None``.
subcommand_passed: Optional[:class:`str`]
The string that was attempted to call a subcommand. This does not have
to point to a valid registered subcommand and could just point to a
nonsense string. If nothing was passed to attempt a call to a
subcommand then this is set to ``None``.
command_failed: :class:`bool`
A boolean that indicates if the command failed to be parsed, checked,
or invoked.
"""
def __init__(self,
*,
message: Message,
bot: BotT,
view: StringView,
args: List[Any] = MISSING,
kwargs: Dict[str, Any] = MISSING,
prefix: Optional[str] = None,
command: Optional[Command] = None,
invoked_with: Optional[str] = None,
invoked_parents: List[str] = MISSING,
invoked_subcommand: Optional[Command] = None,
subcommand_passed: Optional[str] = None,
command_failed: bool = False,
current_parameter: Optional[inspect.Parameter] = None,
):
self.message: Message = message
self.bot: BotT = bot
self.args: List[Any] = args or []
self.kwargs: Dict[str, Any] = kwargs or {}
self.prefix: Optional[str] = prefix
self.command: Optional[Command] = command
self.view: StringView = view
self.invoked_with: Optional[str] = invoked_with
self.invoked_parents: List[str] = invoked_parents or []
self.invoked_subcommand: Optional[Command] = invoked_subcommand
self.subcommand_passed: Optional[str] = subcommand_passed
self.command_failed: bool = command_failed
self.current_parameter: Optional[inspect.Parameter] = current_parameter
self._state: ConnectionState = self.message._state
async def invoke(self, command: Command[CogT, P, T], /, *args: P.args, **kwargs: P.kwargs) -> T:
r"""|coro|
Calls a command with the arguments given.
This is useful if you want to just call the callback that a
:class:`.Command` holds internally.
.. note::
This does not handle converters, checks, cooldowns, pre-invoke,
or after-invoke hooks in any matter. It calls the internal callback
directly as-if it was a regular function.
You must take care in passing the proper arguments when
using this function.
Parameters
-----------
command: :class:`.Command`
The command that is going to be called.
\*args
The arguments to use.
\*\*kwargs
The keyword arguments to use.
Raises
-------
TypeError
The command argument to invoke is missing.
"""
return await command(self, *args, **kwargs)
async def reinvoke(self, *, call_hooks: bool = False, restart: bool = True) -> None:
"""|coro|
Calls the command again.
This is similar to :meth:`~.Context.invoke` except that it bypasses
checks, cooldowns, and error handlers.
.. note::
If you want to bypass :exc:`.UserInputError` derived exceptions,
it is recommended to use the regular :meth:`~.Context.invoke`
as it will work more naturally. After all, this will end up
using the old arguments the user has used and will thus just
fail again.
Parameters
------------
call_hooks: :class:`bool`
Whether to call the before and after invoke hooks.
restart: :class:`bool`
Whether to start the call chain from the very beginning
or where we left off (i.e. the command that caused the error).
The default is to start where we left off.
Raises
-------
ValueError
The context to reinvoke is not valid.
"""
cmd = self.command
view = self.view
if cmd is None:
raise ValueError('This context is not valid.')
# some state to revert to when we're done
index, previous = view.index, view.previous
invoked_with = self.invoked_with
invoked_subcommand = self.invoked_subcommand
invoked_parents = self.invoked_parents
subcommand_passed = self.subcommand_passed
if restart:
to_call = cmd.root_parent or cmd
view.index = len(self.prefix or '')
view.previous = 0
self.invoked_parents = []
self.invoked_with = view.get_word() # advance to get the root command
else:
to_call = cmd
try:
await to_call.reinvoke(self, call_hooks=call_hooks)
finally:
self.command = cmd
view.index = index
view.previous = previous
self.invoked_with = invoked_with
self.invoked_subcommand = invoked_subcommand
self.invoked_parents = invoked_parents
self.subcommand_passed = subcommand_passed
@property
def valid(self) -> bool:
""":class:`bool`: Checks if the invocation context is valid to be invoked with."""
return self.prefix is not None and self.command is not None
async def _get_channel(self) -> liftcord.abc.Messageable:
return self.channel
@property
def clean_prefix(self) -> str:
""":class:`str`: The cleaned up invoke prefix. i.e. mentions are ``@name`` instead of ``<@id>``.
.. versionadded:: 2.0
"""
if self.prefix is None:
return ''
user = self.me
# this breaks if the prefix mention is not the bot itself but I
# consider this to be an *incredibly* strange use case. I'd rather go
# for this common use case rather than waste performance for the
# odd one.
pattern = re.compile(r"<@!?%s>" % user.id)
return pattern.sub("@%s" % user.display_name.replace('\\', r'\\'), self.prefix)
@property
def cog(self) -> Optional[Cog]:
"""Optional[:class:`.Cog`]: Returns the cog associated with this context's command. None if it does not exist."""
if self.command is None:
return None
return self.command.cog
@liftcord.utils.cached_property
def guild(self) -> Optional[Guild]:
"""Optional[:class:`.Guild`]: Returns the guild associated with this context's command. None if not available."""
return self.message.guild
@liftcord.utils.cached_property
def channel(self) -> MessageableChannel:
"""Union[:class:`.abc.Messageable`]: Returns the channel associated with this context's command.
Shorthand for :attr:`.Message.channel`.
"""
return self.message.channel
@liftcord.utils.cached_property
def author(self) -> Union[User, Member]:
"""Union[:class:`~nextcord.User`, :class:`.Member`]:
Returns the author associated with this context's command. Shorthand for :attr:`.Message.author`
"""
return self.message.author
@liftcord.utils.cached_property
def me(self) -> Union[Member, ClientUser]:
"""Union[:class:`.Member`, :class:`.ClientUser`]:
Similar to :attr:`.Guild.me` except it may return the :class:`.ClientUser` in private message contexts.
"""
# bot.user will never be None at this point.
return self.guild.me if self.guild is not None else self.bot.user # type: ignore
@property
def voice_client(self) -> Optional[VoiceProtocol]:
r"""Optional[:class:`.VoiceProtocol`]: A shortcut to :attr:`.Guild.voice_client`\, if applicable."""
g = self.guild
return g.voice_client if g else None
async def send_help(self, *args: Any) -> Any:
"""send_help(entity=<bot>)
|coro|
Shows the help command for the specified entity if given.
The entity can be a command or a cog.
If no entity is given, then it'll show help for the
entire bot.
If the entity is a string, then it looks up whether it's a
:class:`Cog` or a :class:`Command`.
.. note::
Due to the way this function works, instead of returning
something similar to :meth:`~.commands.HelpCommand.command_not_found`
this returns :class:`None` on bad input or no help command.
Parameters
------------
entity: Optional[Union[:class:`Command`, :class:`Cog`, :class:`str`]]
The entity to show help for.
Returns
--------
Any
The result of the help command, if any.
"""
from .core import Group, Command, wrap_callback
from .errors import CommandError
bot = self.bot
cmd = bot.help_command
if cmd is None:
return None
cmd = cmd.copy()
cmd.context = self
if len(args) == 0:
await cmd.prepare_help_command(self, None)
mapping = cmd.get_bot_mapping()
injected = wrap_callback(cmd.send_bot_help)
try:
return await injected(mapping)
except CommandError as e:
await cmd.on_help_command_error(self, e)
return None
entity = args[0]
if isinstance(entity, str):
entity = bot.get_cog(entity) or bot.get_command(entity)
if entity is None:
return None
try:
entity.qualified_name
except AttributeError:
# if we're here then it's not a cog, group, or command.
return None
await cmd.prepare_help_command(self, entity.qualified_name)
try:
if hasattr(entity, '__cog_commands__'):
injected = wrap_callback(cmd.send_cog_help)
return await injected(entity)
elif isinstance(entity, Group):
injected = wrap_callback(cmd.send_group_help)
return await injected(entity)
elif isinstance(entity, Command):
injected = wrap_callback(cmd.send_command_help)
return await injected(entity)
else:
return None
except CommandError as e:
await cmd.on_help_command_error(self, e)
@liftcord.utils.copy_doc(Message.reply)
async def reply(self, content: Optional[str] = None, **kwargs: Any) -> Message:
return await self.message.reply(content, **kwargs)
|
[
"typing.TypeVar",
"typing_extensions.ParamSpec",
"re.compile"
] |
[((1886, 1898), 'typing.TypeVar', 'TypeVar', (['"""T"""'], {}), "('T')\n", (1893, 1898), False, 'from typing import Any, Dict, Generic, List, Optional, TYPE_CHECKING, TypeVar, Union\n'), ((1906, 1957), 'typing.TypeVar', 'TypeVar', (['"""BotT"""'], {'bound': '"""Union[Bot, AutoShardedBot]"""'}), "('BotT', bound='Union[Bot, AutoShardedBot]')\n", (1913, 1957), False, 'from typing import Any, Dict, Generic, List, Optional, TYPE_CHECKING, TypeVar, Union\n'), ((1965, 1993), 'typing.TypeVar', 'TypeVar', (['"""CogT"""'], {'bound': '"""Cog"""'}), "('CogT', bound='Cog')\n", (1972, 1993), False, 'from typing import Any, Dict, Generic, List, Optional, TYPE_CHECKING, TypeVar, Union\n'), ((2021, 2035), 'typing_extensions.ParamSpec', 'ParamSpec', (['"""P"""'], {}), "('P')\n", (2030, 2035), False, 'from typing_extensions import ParamSpec\n'), ((2050, 2062), 'typing.TypeVar', 'TypeVar', (['"""P"""'], {}), "('P')\n", (2057, 2062), False, 'from typing import Any, Dict, Generic, List, Optional, TYPE_CHECKING, TypeVar, Union\n'), ((10014, 10045), 're.compile', 're.compile', (["('<@!?%s>' % user.id)"], {}), "('<@!?%s>' % user.id)\n", (10024, 10045), False, 'import re\n')]
|
"""
Use this script to create JSON-Line description files that can be used to
train deep-speech models through this library.
This works with data directories that are organized like LibriSpeech:
data_directory/group/speaker/[file_id1.wav, file_id2.wav, ...,
speaker.trans.txt]
Where speaker.trans.txt has in each line, file_id transcription
"""
from __future__ import absolute_import, division, print_function
import argparse
import json
import os
import wave
def main(data_directory, output_file):
labels = []
durations = []
keys = []
for group in os.listdir(data_directory):
if group.startswith('.'):
continue
speaker_path = os.path.join(data_directory, group)
for speaker in os.listdir(speaker_path):
if speaker.startswith('.'):
continue
labels_file = os.path.join(speaker_path, speaker,
'{}-{}.trans.txt'
.format(group, speaker))
speakers = [speaker]
labels_files = [labels_file]
if not os.path.isfile(labels_file):
speakers = []
labels_files = []
for s in os.listdir(os.path.join(speaker_path, speaker)):
speakers.append(os.path.join(speaker, s))
labels_files.append(os.path.join(speaker_path, speaker, s,
'{}-{}.trans.txt'
.format(speaker, s)))
for speaker, labels_file in zip(speakers, labels_files):
for line in open(labels_file):
split = line.strip().split()
file_id = split[0]
label = ' '.join(split[1:]).lower()
audio_file = os.path.join(speaker_path, speaker,
file_id) + '.wav'
audio = wave.open(audio_file)
duration = float(audio.getnframes()) / audio.getframerate()
audio.close()
keys.append(audio_file)
durations.append(duration)
labels.append(label)
with open(output_file, 'w') as out_file:
for i in range(len(keys)):
line = json.dumps({'key': keys[i], 'duration': durations[i],
'text': labels[i]})
out_file.write(line + '\n')
if __name__ == '__main__':
parser = argparse.ArgumentParser()
parser.add_argument('data_directory', type=str,
help='Path to data directory')
parser.add_argument('output_file', type=str,
help='Path to output file')
args = parser.parse_args()
main(args.data_directory, args.output_file)
|
[
"wave.open",
"argparse.ArgumentParser",
"json.dumps",
"os.path.isfile",
"os.path.join",
"os.listdir"
] |
[((600, 626), 'os.listdir', 'os.listdir', (['data_directory'], {}), '(data_directory)\n', (610, 626), False, 'import os\n'), ((2533, 2558), 'argparse.ArgumentParser', 'argparse.ArgumentParser', ([], {}), '()\n', (2556, 2558), False, 'import argparse\n'), ((706, 741), 'os.path.join', 'os.path.join', (['data_directory', 'group'], {}), '(data_directory, group)\n', (718, 741), False, 'import os\n'), ((765, 789), 'os.listdir', 'os.listdir', (['speaker_path'], {}), '(speaker_path)\n', (775, 789), False, 'import os\n'), ((2347, 2420), 'json.dumps', 'json.dumps', (["{'key': keys[i], 'duration': durations[i], 'text': labels[i]}"], {}), "({'key': keys[i], 'duration': durations[i], 'text': labels[i]})\n", (2357, 2420), False, 'import json\n'), ((1132, 1159), 'os.path.isfile', 'os.path.isfile', (['labels_file'], {}), '(labels_file)\n', (1146, 1159), False, 'import os\n'), ((1261, 1296), 'os.path.join', 'os.path.join', (['speaker_path', 'speaker'], {}), '(speaker_path, speaker)\n', (1273, 1296), False, 'import os\n'), ((1980, 2001), 'wave.open', 'wave.open', (['audio_file'], {}), '(audio_file)\n', (1989, 2001), False, 'import wave\n'), ((1335, 1359), 'os.path.join', 'os.path.join', (['speaker', 's'], {}), '(speaker, s)\n', (1347, 1359), False, 'import os\n'), ((1852, 1896), 'os.path.join', 'os.path.join', (['speaker_path', 'speaker', 'file_id'], {}), '(speaker_path, speaker, file_id)\n', (1864, 1896), False, 'import os\n')]
|
import json
from api.models import db_session, engine, BaseModel, User, Semester, Course, CourseDependency, InstructorPool, StudentDemand, StudentRecord
user_roles = {
'5': 'student',
'1': 'TA',
'3': 'professor',
'4': 'administrator'
}
# Students
students = {}
for (student, record) in db_session.query(User, StudentRecord).join(StudentRecord).all():
students[student.id] = {
'seniority': record.seniority if record else 0,
'current_gpa': record.current_gpa if record else 0
}
# Semesters (should figure out better course timing approach)
semesters = {}
for semester in db_session.query(Semester).all():
semesters[semester.id] = {
'semester_name': semester.name
}
# Courses
courses = {}
for course in db_session.query(Course).all():
courses[course.id] = {
'is_fall': course.is_fall,
'is_spring': course.is_spring,
'is_summer': course.is_summer
}
# Prerequisites
course_dependencies = []
for cd in db_session.query(CourseDependency).all():
course_dependencies.append({
'first_course': cd.first_course,
'second_course': cd.second_course
})
# Student demand
student_demand = []
for sd in db_session.query(StudentDemand).all():
student_demand.append({
'student_id': sd.student_id,
'course_id': sd.course_id,
'semester_id': sd.semester_id
})
# Instructor availability
instructor_availability = []
for ia in db_session.query(InstructorPool).all():
instructor_availability.append({
'instructor_id': ia.user_id,
'course_id': ia.course_id,
'semester_id': ia.semester_id,
'instructor_role': ia.instructor_role
})
with open('optimizer/test_data_big.json', 'w') as testfile:
testfile.write(json.dumps({
'students': students,
'courses': courses,
'semesters': semesters,
'course_dependencies': course_dependencies,
'student_demand': student_demand,
'instructor_pool': instructor_availability
}, indent=2))
|
[
"api.models.db_session.query",
"json.dumps"
] |
[((613, 639), 'api.models.db_session.query', 'db_session.query', (['Semester'], {}), '(Semester)\n', (629, 639), False, 'from api.models import db_session, engine, BaseModel, User, Semester, Course, CourseDependency, InstructorPool, StudentDemand, StudentRecord\n'), ((761, 785), 'api.models.db_session.query', 'db_session.query', (['Course'], {}), '(Course)\n', (777, 785), False, 'from api.models import db_session, engine, BaseModel, User, Semester, Course, CourseDependency, InstructorPool, StudentDemand, StudentRecord\n'), ((990, 1024), 'api.models.db_session.query', 'db_session.query', (['CourseDependency'], {}), '(CourseDependency)\n', (1006, 1024), False, 'from api.models import db_session, engine, BaseModel, User, Semester, Course, CourseDependency, InstructorPool, StudentDemand, StudentRecord\n'), ((1203, 1234), 'api.models.db_session.query', 'db_session.query', (['StudentDemand'], {}), '(StudentDemand)\n', (1219, 1234), False, 'from api.models import db_session, engine, BaseModel, User, Semester, Course, CourseDependency, InstructorPool, StudentDemand, StudentRecord\n'), ((1453, 1485), 'api.models.db_session.query', 'db_session.query', (['InstructorPool'], {}), '(InstructorPool)\n', (1469, 1485), False, 'from api.models import db_session, engine, BaseModel, User, Semester, Course, CourseDependency, InstructorPool, StudentDemand, StudentRecord\n'), ((1774, 1992), 'json.dumps', 'json.dumps', (["{'students': students, 'courses': courses, 'semesters': semesters,\n 'course_dependencies': course_dependencies, 'student_demand':\n student_demand, 'instructor_pool': instructor_availability}"], {'indent': '(2)'}), "({'students': students, 'courses': courses, 'semesters':\n semesters, 'course_dependencies': course_dependencies, 'student_demand':\n student_demand, 'instructor_pool': instructor_availability}, indent=2)\n", (1784, 1992), False, 'import json\n'), ((304, 341), 'api.models.db_session.query', 'db_session.query', (['User', 'StudentRecord'], {}), '(User, StudentRecord)\n', (320, 341), False, 'from api.models import db_session, engine, BaseModel, User, Semester, Course, CourseDependency, InstructorPool, StudentDemand, StudentRecord\n')]
|
# Copyright 2013 Metacloud
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
"""Items useful for external testing."""
import copy
from dogpile.cache import proxy
from oslo_cache import core as cache
__all__ = [
'CacheIsolatingProxy',
]
NO_VALUE = cache.NO_VALUE
def _copy_value(value):
if value is not NO_VALUE:
value = copy.deepcopy(value)
return value
# NOTE(morganfainberg): WARNING - It is not recommended to use the Memory
# backend for dogpile.cache in a real deployment under any circumstances. The
# backend does no cleanup of expired values and therefore will leak memory. The
# backend is not implemented in a way to share data across processes (e.g.
# Keystone in HTTPD. This proxy is a hack to get around the lack of isolation
# of values in memory. Currently it blindly stores and retrieves the values
# from the cache, and modifications to dicts/lists/etc returned can result in
# changes to the cached values. In short, do not use the dogpile.cache.memory
# backend unless you are running tests or expecting odd/strange results.
class CacheIsolatingProxy(proxy.ProxyBackend):
"""Proxy that forces a memory copy of stored values.
The default in-memory cache-region does not perform a copy on values it
is meant to cache. Therefore if the value is modified after set or after
get, the cached value also is modified. This proxy does a copy as the last
thing before storing data.
In your application's tests, you'll want to set this as a proxy for the
in-memory cache, like this::
self.config_fixture.config(
group='cache',
backend='dogpile.cache.memory',
enabled=True,
proxies=['oslo_cache.testing.CacheIsolatingProxy'])
"""
def get(self, key):
return _copy_value(self.proxied.get(key))
def set(self, key, value):
self.proxied.set(key, _copy_value(value))
|
[
"copy.deepcopy"
] |
[((844, 864), 'copy.deepcopy', 'copy.deepcopy', (['value'], {}), '(value)\n', (857, 864), False, 'import copy\n')]
|
# System
import os
import sys
from pprint import pprint as pp
import argparse
import logging
import multiprocessing as mp
from functools import partial
from time import time
import shutil
# Externals
import yaml
import numpy as np
import pandas as pd
import torch
import torch.nn as nn
from torch_scatter import scatter_add
import scipy as sp
from sklearn.cluster import DBSCAN
# Locals
# sys.path.append('GraphLearning/src')
from GraphLearning.src.trainers import get_trainer
from Seeding.src.utils.data_utils import load_config_dir, load_summaries, get_seed_data_loader
if torch.cuda.is_available():
DEVICE='cuda'
else:
DEVICE='cpu'
def load_triplets(test_loader, filelist):
graph_dataset = test_loader.dataset
graph_indices = np.array([g.i for g in graph_dataset])
filelist = np.array(filelist)
graph_names = filelist[graph_indices]
return graph_dataset, graph_names
def save_triplet_hitlist(triplet_data, threshold, output_dir):
e, graph_name, o = triplet_data
g_ID = np.load(graph_name[:-4] + "_ID.npz", allow_pickle=True)["I"]
triplet_preds = np.hstack([g_ID[:,e[0,o > threshold]], g_ID[:,e[1,o > threshold]]]).T
# triplet_IDs = np.hstack([g_ID[:,e[0,:]].T, g_ID[:,e[1,:]].T])[:,[0,1,3]]
# triplet_preds = triplet_IDs[o > threshold]
o_preds = np.hstack([o[o > threshold], o[o > threshold]]).T
# print(triplet_preds.shape, o_preds.shape)
triplet_list = np.c_[triplet_preds.astype(np.int64), o_preds]
filename = os.path.join(output_dir, os.path.splitext(os.path.basename(graph_name))[0])
np.save(filename, triplet_list)
def get_edge_scores(load_path, triplet_artifacts, n_tasks, task):
"""
- Takes config info for triplet training dataset (different from doublet training dataset),
- Runs the dataset through the trained doublet network,
- Returns edge scores with same indices as edge network input
"""
# Load configs
config = load_config_dir(triplet_artifacts)
logging.info('Inferring triplets on model configuration:')
logging.info(config)
# Find the best epoch
summaries = load_summaries(config)
best_idx = summaries.valid_loss.idxmin()
summaries.loc[[best_idx]]
# Build the trainer and load best checkpoint
task_gpu = 0 if DEVICE=='cuda' else None
trainer = get_trainer(output_dir=config['output_dir'], gpu=task_gpu, **config['trainer'])
trainer.build_model(optimizer_config=config['optimizer'], **config['model'])
best_epoch = summaries.epoch.loc[best_idx]
trainer.load_checkpoint(checkpoint_id=best_epoch)
logging.info("With weight system:")
logging.info(trainer.model)
logging.info("On device:")
logging.info(trainer.device)
# Load the test dataset
test_loader, filelist = get_seed_data_loader(load_path, n_tasks, task)
# Apply the model
test_preds, test_targets = trainer.device_predict(test_loader)
print("Graph prediction complete")
#GET Hit ID data here and GRAPH NAMES
graph_dataset, graph_names = load_triplets(test_loader, filelist)
return test_preds, graph_dataset, graph_names
def combine_event(event_name, split_names):
""" Concatenates the triplet list of each subgraph """
total_triplets = np.empty((0,3))
for i in np.where(split_names[:,0] == event_name)[0]:
triplet_list = np.load(str(split_names[i,0]) + "_" + str(split_names[i,1]), allow_pickle=True)
total_triplets = np.append(total_triplets, triplet_list, axis=0)
return total_triplets
def cluster(e_csr_bi, epsilon):
clustering = DBSCAN(eps=epsilon, metric="precomputed", min_samples=1).fit_predict(e_csr_bi)
track_labels = np.vstack([np.unique(e_csr_bi.tocoo().row), clustering[np.unique(e_csr_bi.tocoo().row)]])
track_labels = pd.DataFrame(track_labels.T)
track_labels.columns = ["hit_id", "track_id"]
# Add TrackML scoring here and print
return track_labels
def convert_to_bidirectional(e_csr):
# Invert to treat score as an inverse distance
e_csr.data = 1 - e_csr.data
e_csr_bi = sp.sparse.coo_matrix((np.hstack([e_csr.tocoo().data, e_csr.tocoo().data]),
np.hstack([np.vstack([e_csr.tocoo().row, e_csr.tocoo().col]),
np.vstack([e_csr.tocoo().col, e_csr.tocoo().row])])))
return e_csr_bi
def triplets_to_doublets(triplet_edges, triplet_scores, label_cut):
e_doublet_coo = sp.sparse.coo_matrix((triplet_edges.max()+1, triplet_edges.max()+1))
dok = sp.sparse.dok_matrix((e_doublet_coo.shape), dtype=e_doublet_coo.dtype)
dok._update(zip(zip(triplet_edges[:,0], triplet_edges[:,1]), [1]*triplet_edges.shape[0])) # Could be converted to actual scores
e_csr = dok.tocsr()
return e_csr
def save_labels(track_labels, event_name, output_dir):
label_filename = os.path.join(output_dir, event_name)
np.save(label_filename, track_labels)
# def recombine_triplet_graphs(split_names, graph_dataset, test_preds, n_phi_segments):
# # for file_base in np.unique(split_names[:,0]):
# # Needs to load data as in combine_event()
# total_e = np.empty((2,0), dtype="int64")
# total_o = np.empty(0, dtype="float64")
# total_hid = np.empty((2,0), dtype="int64")
# total_pid = np.empty((1,0), dtype="int64")
# total_X = np.empty((0,7), dtype="float64")
# for i in np.where(split_names[:,0] == file_base)[0]:
# e_trip = graph_dataset[i].edge_index.numpy()
# scores = test_preds[i].numpy()
# hid = np.load(split_names[i,0] + "_" + split_names[i,1] + "_ID.npz", allow_pickle=True)["I"]
# pid = np.load(split_names[i,0] + "_" + split_names[i,1] + "_ID.npz", allow_pickle=True)["pid"]
# total_e = np.append(total_e, e_trip + total_hid.shape[1], axis=1)
# total_o = np.append(total_o, scores)
# total_hid = np.append(total_hid, hid, axis=1)
# total_pid = np.append(total_pid, pid)
# X = graph_dataset[i].x.numpy()
# X[:,1] = X[:,1] - n_phi_segments + 1 + delta*int(split_names[i,1]) #Is this right??
# X[X[:,1] < (-n_phi_segments), 1] += 2*n_phi_segments
# X[X[:,1] > n_phi_segments, 1] -= 2*n_phi_segments
# X[:,1] = X[:,1] / n_phi_segments # Renormalise
# X[:,4] = X[:,4] - n_phi_segments + 1 + delta*int(split_names[i,1]) #Is this right??
# X[X[:,4] < (-n_phi_segments), 1] += 2*n_phi_segments
# X[X[:,4] > n_phi_segments, 1] -= 2*n_phi_segments
# X[:,4] = X[:,4] / n_phi_segments # Renormalise
# total_X = np.vstack([total_X, graph_dataset[i].x.numpy()])
# return total_X, total_e, total_o, total_hid, total_pid
def process_event(event_name, split_names, output_dir, label_cut, epsilon):
# Recombine triplet graphs by loading all files in event
total_triplets = combine_event(event_name, split_names)
triplet_edges = total_triplets[:,:2].T.astype(dtype='int64')
triplet_scores = total_triplets[:,2].T
# Convert triplets to doublets
e_csr = triplets_to_doublets(triplet_edges, triplet_scores, label_cut)
# Cluster and produce track list
e_csr_bi = convert_to_bidirectional(e_csr)
# Save track labels
track_labels = cluster(e_csr_bi, epsilon)
save_labels(track_labels, event_name, output_dir)
def process_data(save_path, load_path, triplet_artifacts, label_threshold, epsilon, n_tasks, task):
logging.info("Running inference on triplet graphs")
# Calculate edge scores from best doublet model checkpoint
edge_scores, graph_dataset, graph_names = get_edge_scores(load_path, triplet_artifacts, n_tasks, task)
triplet_data = np.array([[gi.edge_index.numpy(), graph_name, oi.numpy()] for gi, graph_name, oi in zip(graph_dataset, graph_names, edge_scores)])
logging.info("Inference complete")
# SAVE TRIPLET HITLIST
temp_dir = os.path.join(save_path, "temp")
if not os.path.exists(temp_dir):
os.makedirs(temp_dir, exist_ok=True)
with mp.Pool(processes=None) as pool:
process_fn = partial(save_triplet_hitlist, threshold=label_threshold, output_dir=temp_dir)
pool.map(process_fn, triplet_data)
logging.info("All files saved")
if task == 0:
# IS THIS THE CORRECT LENGTH???
triplet_data_length = len(triplet_data)
while(len(os.listdir(temp_dir)) < triplet_data_length):
print("Waiting")
time.sleep(10) # Want to wait until all files a
# RELOAD FILELIST AND SPLIT
filelist = os.listdir(temp_dir)
split_names = np.array([[os.path.join(temp_dir,file[:-6]), file[-5:]] for file in filelist])
event_names = np.unique(split_names[:,0])
with mp.Pool(processes=None) as pool:
process_fn = partial(process_event, split_names = split_names, output_dir=save_path, label_cut=label_threshold, epsilon=epsilon)
pool.map(process_fn, event_names)
if os.path.exists(temp_dir):
shutil.rmtree(temp_dir, ignore_errors=False)
def main(args, force=False):
""" Main function """
tic = time()
save_path = os.path.join(args.data_storage_path, 'labels')
load_path = os.path.join(args.data_storage_path, 'triplet_graphs')
artifact_path = os.path.join(args.artifact_storage_path, 'triplet_gnn')
os.makedirs(save_path, exist_ok=True)
# Setup logging
log_format = '%(asctime)s %(levelname)s %(message)s'
log_level = logging.DEBUG #if args.verbose else logging.INFO
logging.basicConfig(level=log_level, format=log_format)
logging.info('Initialising')
process_data(save_path, load_path, artifact_path, args.label_threshold, args.epsilon, args.n_tasks, args.task)
logging.info('Processing finished')
if __name__ == '__main__':
main()
|
[
"numpy.load",
"numpy.empty",
"GraphLearning.src.trainers.get_trainer",
"shutil.rmtree",
"os.path.join",
"numpy.unique",
"sklearn.cluster.DBSCAN",
"pandas.DataFrame",
"os.path.exists",
"numpy.append",
"scipy.sparse.dok_matrix",
"Seeding.src.utils.data_utils.load_summaries",
"functools.partial",
"numpy.save",
"os.path.basename",
"numpy.hstack",
"torch.cuda.is_available",
"multiprocessing.Pool",
"os.listdir",
"Seeding.src.utils.data_utils.load_config_dir",
"os.makedirs",
"logging.basicConfig",
"Seeding.src.utils.data_utils.get_seed_data_loader",
"time.time",
"logging.info",
"numpy.where",
"numpy.array",
"time.time.sleep"
] |
[((579, 604), 'torch.cuda.is_available', 'torch.cuda.is_available', ([], {}), '()\n', (602, 604), False, 'import torch\n'), ((755, 793), 'numpy.array', 'np.array', (['[g.i for g in graph_dataset]'], {}), '([g.i for g in graph_dataset])\n', (763, 793), True, 'import numpy as np\n'), ((809, 827), 'numpy.array', 'np.array', (['filelist'], {}), '(filelist)\n', (817, 827), True, 'import numpy as np\n'), ((1607, 1638), 'numpy.save', 'np.save', (['filename', 'triplet_list'], {}), '(filename, triplet_list)\n', (1614, 1638), True, 'import numpy as np\n'), ((1977, 2011), 'Seeding.src.utils.data_utils.load_config_dir', 'load_config_dir', (['triplet_artifacts'], {}), '(triplet_artifacts)\n', (1992, 2011), False, 'from Seeding.src.utils.data_utils import load_config_dir, load_summaries, get_seed_data_loader\n'), ((2016, 2074), 'logging.info', 'logging.info', (['"""Inferring triplets on model configuration:"""'], {}), "('Inferring triplets on model configuration:')\n", (2028, 2074), False, 'import logging\n'), ((2079, 2099), 'logging.info', 'logging.info', (['config'], {}), '(config)\n', (2091, 2099), False, 'import logging\n'), ((2143, 2165), 'Seeding.src.utils.data_utils.load_summaries', 'load_summaries', (['config'], {}), '(config)\n', (2157, 2165), False, 'from Seeding.src.utils.data_utils import load_config_dir, load_summaries, get_seed_data_loader\n'), ((2350, 2429), 'GraphLearning.src.trainers.get_trainer', 'get_trainer', ([], {'output_dir': "config['output_dir']", 'gpu': 'task_gpu'}), "(output_dir=config['output_dir'], gpu=task_gpu, **config['trainer'])\n", (2361, 2429), False, 'from GraphLearning.src.trainers import get_trainer\n'), ((2618, 2653), 'logging.info', 'logging.info', (['"""With weight system:"""'], {}), "('With weight system:')\n", (2630, 2653), False, 'import logging\n'), ((2658, 2685), 'logging.info', 'logging.info', (['trainer.model'], {}), '(trainer.model)\n', (2670, 2685), False, 'import logging\n'), ((2690, 2716), 'logging.info', 'logging.info', (['"""On device:"""'], {}), "('On device:')\n", (2702, 2716), False, 'import logging\n'), ((2721, 2749), 'logging.info', 'logging.info', (['trainer.device'], {}), '(trainer.device)\n', (2733, 2749), False, 'import logging\n'), ((2808, 2854), 'Seeding.src.utils.data_utils.get_seed_data_loader', 'get_seed_data_loader', (['load_path', 'n_tasks', 'task'], {}), '(load_path, n_tasks, task)\n', (2828, 2854), False, 'from Seeding.src.utils.data_utils import load_config_dir, load_summaries, get_seed_data_loader\n'), ((3283, 3299), 'numpy.empty', 'np.empty', (['(0, 3)'], {}), '((0, 3))\n', (3291, 3299), True, 'import numpy as np\n'), ((3830, 3858), 'pandas.DataFrame', 'pd.DataFrame', (['track_labels.T'], {}), '(track_labels.T)\n', (3842, 3858), True, 'import pandas as pd\n'), ((4685, 4753), 'scipy.sparse.dok_matrix', 'sp.sparse.dok_matrix', (['e_doublet_coo.shape'], {'dtype': 'e_doublet_coo.dtype'}), '(e_doublet_coo.shape, dtype=e_doublet_coo.dtype)\n', (4705, 4753), True, 'import scipy as sp\n'), ((5016, 5052), 'os.path.join', 'os.path.join', (['output_dir', 'event_name'], {}), '(output_dir, event_name)\n', (5028, 5052), False, 'import os\n'), ((5062, 5099), 'numpy.save', 'np.save', (['label_filename', 'track_labels'], {}), '(label_filename, track_labels)\n', (5069, 5099), True, 'import numpy as np\n'), ((7632, 7683), 'logging.info', 'logging.info', (['"""Running inference on triplet graphs"""'], {}), "('Running inference on triplet graphs')\n", (7644, 7683), False, 'import logging\n'), ((8019, 8053), 'logging.info', 'logging.info', (['"""Inference complete"""'], {}), "('Inference complete')\n", (8031, 8053), False, 'import logging\n'), ((8105, 8136), 'os.path.join', 'os.path.join', (['save_path', '"""temp"""'], {}), "(save_path, 'temp')\n", (8117, 8136), False, 'import os\n'), ((8412, 8443), 'logging.info', 'logging.info', (['"""All files saved"""'], {}), "('All files saved')\n", (8424, 8443), False, 'import logging\n'), ((9384, 9390), 'time.time', 'time', ([], {}), '()\n', (9388, 9390), False, 'from time import time\n'), ((9412, 9458), 'os.path.join', 'os.path.join', (['args.data_storage_path', '"""labels"""'], {}), "(args.data_storage_path, 'labels')\n", (9424, 9458), False, 'import os\n'), ((9475, 9529), 'os.path.join', 'os.path.join', (['args.data_storage_path', '"""triplet_graphs"""'], {}), "(args.data_storage_path, 'triplet_graphs')\n", (9487, 9529), False, 'import os\n'), ((9557, 9612), 'os.path.join', 'os.path.join', (['args.artifact_storage_path', '"""triplet_gnn"""'], {}), "(args.artifact_storage_path, 'triplet_gnn')\n", (9569, 9612), False, 'import os\n'), ((9618, 9655), 'os.makedirs', 'os.makedirs', (['save_path'], {'exist_ok': '(True)'}), '(save_path, exist_ok=True)\n', (9629, 9655), False, 'import os\n'), ((9807, 9862), 'logging.basicConfig', 'logging.basicConfig', ([], {'level': 'log_level', 'format': 'log_format'}), '(level=log_level, format=log_format)\n', (9826, 9862), False, 'import logging\n'), ((9867, 9895), 'logging.info', 'logging.info', (['"""Initialising"""'], {}), "('Initialising')\n", (9879, 9895), False, 'import logging\n'), ((10018, 10053), 'logging.info', 'logging.info', (['"""Processing finished"""'], {}), "('Processing finished')\n", (10030, 10053), False, 'import logging\n'), ((1034, 1089), 'numpy.load', 'np.load', (["(graph_name[:-4] + '_ID.npz')"], {'allow_pickle': '(True)'}), "(graph_name[:-4] + '_ID.npz', allow_pickle=True)\n", (1041, 1089), True, 'import numpy as np\n'), ((1120, 1191), 'numpy.hstack', 'np.hstack', (['[g_ID[:, e[0, o > threshold]], g_ID[:, e[1, o > threshold]]]'], {}), '([g_ID[:, e[0, o > threshold]], g_ID[:, e[1, o > threshold]]])\n', (1129, 1191), True, 'import numpy as np\n'), ((1337, 1384), 'numpy.hstack', 'np.hstack', (['[o[o > threshold], o[o > threshold]]'], {}), '([o[o > threshold], o[o > threshold]])\n', (1346, 1384), True, 'import numpy as np\n'), ((3312, 3353), 'numpy.where', 'np.where', (['(split_names[:, 0] == event_name)'], {}), '(split_names[:, 0] == event_name)\n', (3320, 3353), True, 'import numpy as np\n'), ((3485, 3532), 'numpy.append', 'np.append', (['total_triplets', 'triplet_list'], {'axis': '(0)'}), '(total_triplets, triplet_list, axis=0)\n', (3494, 3532), True, 'import numpy as np\n'), ((8148, 8172), 'os.path.exists', 'os.path.exists', (['temp_dir'], {}), '(temp_dir)\n', (8162, 8172), False, 'import os\n'), ((8182, 8218), 'os.makedirs', 'os.makedirs', (['temp_dir'], {'exist_ok': '(True)'}), '(temp_dir, exist_ok=True)\n', (8193, 8218), False, 'import os\n'), ((8228, 8251), 'multiprocessing.Pool', 'mp.Pool', ([], {'processes': 'None'}), '(processes=None)\n', (8235, 8251), True, 'import multiprocessing as mp\n'), ((8282, 8359), 'functools.partial', 'partial', (['save_triplet_hitlist'], {'threshold': 'label_threshold', 'output_dir': 'temp_dir'}), '(save_triplet_hitlist, threshold=label_threshold, output_dir=temp_dir)\n', (8289, 8359), False, 'from functools import partial\n'), ((8787, 8807), 'os.listdir', 'os.listdir', (['temp_dir'], {}), '(temp_dir)\n', (8797, 8807), False, 'import os\n'), ((8935, 8963), 'numpy.unique', 'np.unique', (['split_names[:, 0]'], {}), '(split_names[:, 0])\n', (8944, 8963), True, 'import numpy as np\n'), ((9221, 9245), 'os.path.exists', 'os.path.exists', (['temp_dir'], {}), '(temp_dir)\n', (9235, 9245), False, 'import os\n'), ((3623, 3679), 'sklearn.cluster.DBSCAN', 'DBSCAN', ([], {'eps': 'epsilon', 'metric': '"""precomputed"""', 'min_samples': '(1)'}), "(eps=epsilon, metric='precomputed', min_samples=1)\n", (3629, 3679), False, 'from sklearn.cluster import DBSCAN\n'), ((8675, 8689), 'time.time.sleep', 'time.sleep', (['(10)'], {}), '(10)\n', (8685, 8689), False, 'from time import time\n'), ((8977, 9000), 'multiprocessing.Pool', 'mp.Pool', ([], {'processes': 'None'}), '(processes=None)\n', (8984, 9000), True, 'import multiprocessing as mp\n'), ((9035, 9152), 'functools.partial', 'partial', (['process_event'], {'split_names': 'split_names', 'output_dir': 'save_path', 'label_cut': 'label_threshold', 'epsilon': 'epsilon'}), '(process_event, split_names=split_names, output_dir=save_path,\n label_cut=label_threshold, epsilon=epsilon)\n', (9042, 9152), False, 'from functools import partial\n'), ((9259, 9303), 'shutil.rmtree', 'shutil.rmtree', (['temp_dir'], {'ignore_errors': '(False)'}), '(temp_dir, ignore_errors=False)\n', (9272, 9303), False, 'import shutil\n'), ((1569, 1597), 'os.path.basename', 'os.path.basename', (['graph_name'], {}), '(graph_name)\n', (1585, 1597), False, 'import os\n'), ((8588, 8608), 'os.listdir', 'os.listdir', (['temp_dir'], {}), '(temp_dir)\n', (8598, 8608), False, 'import os\n'), ((8841, 8874), 'os.path.join', 'os.path.join', (['temp_dir', 'file[:-6]'], {}), '(temp_dir, file[:-6])\n', (8853, 8874), False, 'import os\n')]
|
import math
import torch
import torch.nn as nn
import torch.nn.functional as F
from .common import *
class ChannelAttention(nn.Module):
def __init__(self, in_planes, ratio=8):
super(ChannelAttention, self).__init__()
self.avg_pool = nn.AdaptiveAvgPool2d(1)
self.max_pool = nn.AdaptiveMaxPool2d(1)
# 利用1x1卷积代替全连接
self.fc1 = nn.Conv2d(in_planes, in_planes // ratio, 1, bias=False)
self.relu1 = nn.ReLU()
self.fc2 = nn.Conv2d(in_planes // ratio, in_planes, 1, bias=False)
self.sigmoid = nn.Sigmoid()
def forward(self, x):
avg_out = self.fc2(self.relu1(self.fc1(self.avg_pool(x))))
max_out = self.fc2(self.relu1(self.fc1(self.max_pool(x))))
out = avg_out + max_out
return self.sigmoid(out)
class SpatialAttention(nn.Module):
def __init__(self, kernel_size=7):
super(SpatialAttention, self).__init__()
assert kernel_size in (3, 7), 'kernel size must be 3 or 7'
padding = 3 if kernel_size == 7 else 1
self.conv1 = nn.Conv2d(2, 1, kernel_size, padding=padding, bias=False)
self.sigmoid = nn.Sigmoid()
def forward(self, x):
avg_out = torch.mean(x, dim=1, keepdim=True)
max_out, _ = torch.max(x, dim=1, keepdim=True)
x = torch.cat([avg_out, max_out], dim=1)
x = self.conv1(x)
return self.sigmoid(x)
class CBAM(nn.Module):
def __init__(self, channel, ratio=8, kernel_size=7):
super(CBAM, self).__init__()
self.channelattention = ChannelAttention(channel, ratio=ratio)
self.downsample = nn.Conv2d(channel, channel // 2, 1, 1, 0)
self.spatialattention = SpatialAttention(kernel_size=kernel_size)
def forward(self, x):
x = x * self.channelattention(x)
x = self.downsample(x)
x = x * self.spatialattention(x)
return x
class HA(nn.Module):
def __init__(self, channels, scale=1):
super(HA, self).__init__()
head = []
for _ in range(int(math.log2(scale))):
head.append(ConvBlock(channels, channels))
head.append(ConvBlock(channels, channels, 4, 2, 1))
head.append(ConvBlock(channels, channels))
self.ha_head = nn.Sequential(*head)
# AvgPool
self.ha_blur = nn.Sequential(
nn.AvgPool2d(2),
nn.ConvTranspose2d(channels, channels, 4, 2, 1)
)
# act
self.act = torch.nn.PReLU()
self.attention = CBAM(channels * 2)
def forward(self, de_feature, sr_feature):
x = self.ha_head(de_feature)
blured = self.ha_blur(x)
acted = self.act(x - blured)
high_freq_feature = x + acted * x
concated = torch.cat([high_freq_feature, sr_feature], 1)
out = self.attention(concated)
return out
class CG(nn.Module):
def __init__(self, channels, scale=1, conv=default_conv):
super(CG, self).__init__()
self.cg_head_de = conv(channels, channels, 3)
self.cg_head_sr = nn.Sequential(
ConvBlock(channels, channels),
nn.Upsample(scale_factor=scale),
ConvBlock(channels, channels)
)
self.cg_head_sr_2 = ConvBlock(channels, channels)
# Conv + softmax, scored
self.scored = nn.Sequential(
nn.Conv2d(channels, channels, 3, 1, 1),
nn.Softmax(dim=1)
)
self.attention = CBAM(channels * 2)
def forward(self, de_feature, sr_feature):
sr_feature = self.cg_head_sr(sr_feature)
x = self.cg_head_de(de_feature)
y = self.cg_head_sr_2(sr_feature)
structure_score = self.scored(x - y)
sr_feature = sr_feature + y * structure_score
concated = torch.cat([sr_feature, de_feature], 1)
out = self.attention(concated)
return out
import torch.nn as nn
class BridgeNetV3(nn.Module):
def __init__(self, in_channels=1, img_channels=3, n_resblocks=4, n_feats=128, scale=4, conv=default_conv):
super(BridgeNetV3, self).__init__()
kernel_size = 3
act = nn.ReLU(True)
self.scale = scale
# define head module
self.sr_head = conv(in_channels, n_feats, kernel_size)
self.de_head = conv(img_channels, n_feats, kernel_size)
# define body module
for i in range(1, 5):
sr_blocks = [ResBlock(conv, n_feats, kernel_size) for _ in range(n_resblocks)]
de_blocks = [ResBlock(conv, n_feats, kernel_size) for _ in range(n_resblocks)]
sr_body = nn.Sequential(*sr_blocks)
de_body = nn.Sequential(*de_blocks)
setattr(self, f'sr_body_{i}', sr_body)
setattr(self, f'de_body_{i}', de_body)
self.sr_body_last = conv(n_feats, n_feats, kernel_size)
self.de_body_last = conv(n_feats, n_feats, kernel_size)
# define tail module
assert scale & (scale - 1) == 0, "scale doesn't equal to 2^n has not realised."
for i in range(1, int(math.log2(scale)) + 1):
de_tail = nn.Sequential(
ResBlock(conv, n_feats, kernel_size)
)
sr_tail = nn.Sequential(
conv(n_feats, 4 * n_feats, 3),
nn.PixelShuffle(2)
)
setattr(self, f'sr_tail_{i}', sr_tail)
setattr(self, f'de_tail_{i}', de_tail)
self.sr_out = conv(n_feats, in_channels, kernel_size)
self.de_out = conv(n_feats, in_channels, kernel_size)
for i in range(1, 5):
setattr(self, f'correction_{i}', CG(n_feats, scale=scale))
for i in range(1, int(math.log2(scale)) + 1):
setattr(self, f'guidance_{i}', HA(n_feats, scale=(scale >> i)))
def forward(self, img, lr):
de_fea = self.de_head(img)
sr_fea = self.sr_head(lr)
sr_1 = self.sr_body_1(sr_fea)
sr_2 = self.sr_body_2(sr_1)
sr_3 = self.sr_body_3(sr_2)
sr_4 = self.sr_body_4(sr_3)
sr_fea = sr_fea + self.sr_body_last(sr_4)
de_1 = self.de_body_1(de_fea)
de_2 = self.de_body_1(self.correction_1(de_1, sr_1))
de_3 = self.de_body_1(self.correction_2(de_2, sr_2))
de_4 = self.de_body_1(self.correction_3(de_3, sr_3))
de_fea = de_fea + self.de_body_last(self.correction_4(de_4, sr_4))
sr_up_1 = self.sr_tail_1(sr_fea)
de_up_1 = self.de_tail_1(de_fea)
sr_up_2 = self.sr_tail_2(self.guidance_1(de_up_1, sr_up_1))
de_up_2 = self.de_tail_2(de_up_1)
if self.scale == 4:
sr_out = self.sr_out(self.guidance_2(de_up_2, sr_up_2))
de_out = self.de_out(de_up_2)
else:
sr_up_3 = self.sr_tail_3(self.guidance_2(de_up_2, sr_up_2))
de_up_3 = self.de_tail_3(de_up_2)
sr_out = self.sr_out(self.guidance_3(de_up_3, sr_up_3))
de_out = self.de_out(de_up_3)
return de_out, sr_out
|
[
"torch.mean",
"torch.nn.AdaptiveAvgPool2d",
"torch.nn.PReLU",
"torch.nn.ReLU",
"torch.nn.ConvTranspose2d",
"torch.nn.Sequential",
"torch.nn.AdaptiveMaxPool2d",
"torch.nn.Conv2d",
"torch.cat",
"torch.nn.Upsample",
"torch.max",
"torch.nn.Softmax",
"torch.nn.PixelShuffle",
"torch.nn.AvgPool2d",
"math.log2",
"torch.nn.Sigmoid"
] |
[((256, 279), 'torch.nn.AdaptiveAvgPool2d', 'nn.AdaptiveAvgPool2d', (['(1)'], {}), '(1)\n', (276, 279), True, 'import torch.nn as nn\n'), ((304, 327), 'torch.nn.AdaptiveMaxPool2d', 'nn.AdaptiveMaxPool2d', (['(1)'], {}), '(1)\n', (324, 327), True, 'import torch.nn as nn\n'), ((371, 426), 'torch.nn.Conv2d', 'nn.Conv2d', (['in_planes', '(in_planes // ratio)', '(1)'], {'bias': '(False)'}), '(in_planes, in_planes // ratio, 1, bias=False)\n', (380, 426), True, 'import torch.nn as nn\n'), ((448, 457), 'torch.nn.ReLU', 'nn.ReLU', ([], {}), '()\n', (455, 457), True, 'import torch.nn as nn\n'), ((477, 532), 'torch.nn.Conv2d', 'nn.Conv2d', (['(in_planes // ratio)', 'in_planes', '(1)'], {'bias': '(False)'}), '(in_planes // ratio, in_planes, 1, bias=False)\n', (486, 532), True, 'import torch.nn as nn\n'), ((557, 569), 'torch.nn.Sigmoid', 'nn.Sigmoid', ([], {}), '()\n', (567, 569), True, 'import torch.nn as nn\n'), ((1057, 1114), 'torch.nn.Conv2d', 'nn.Conv2d', (['(2)', '(1)', 'kernel_size'], {'padding': 'padding', 'bias': '(False)'}), '(2, 1, kernel_size, padding=padding, bias=False)\n', (1066, 1114), True, 'import torch.nn as nn\n'), ((1138, 1150), 'torch.nn.Sigmoid', 'nn.Sigmoid', ([], {}), '()\n', (1148, 1150), True, 'import torch.nn as nn\n'), ((1196, 1230), 'torch.mean', 'torch.mean', (['x'], {'dim': '(1)', 'keepdim': '(True)'}), '(x, dim=1, keepdim=True)\n', (1206, 1230), False, 'import torch\n'), ((1252, 1285), 'torch.max', 'torch.max', (['x'], {'dim': '(1)', 'keepdim': '(True)'}), '(x, dim=1, keepdim=True)\n', (1261, 1285), False, 'import torch\n'), ((1298, 1334), 'torch.cat', 'torch.cat', (['[avg_out, max_out]'], {'dim': '(1)'}), '([avg_out, max_out], dim=1)\n', (1307, 1334), False, 'import torch\n'), ((1608, 1649), 'torch.nn.Conv2d', 'nn.Conv2d', (['channel', '(channel // 2)', '(1)', '(1)', '(0)'], {}), '(channel, channel // 2, 1, 1, 0)\n', (1617, 1649), True, 'import torch.nn as nn\n'), ((2240, 2260), 'torch.nn.Sequential', 'nn.Sequential', (['*head'], {}), '(*head)\n', (2253, 2260), True, 'import torch.nn as nn\n'), ((2449, 2465), 'torch.nn.PReLU', 'torch.nn.PReLU', ([], {}), '()\n', (2463, 2465), False, 'import torch\n'), ((2726, 2771), 'torch.cat', 'torch.cat', (['[high_freq_feature, sr_feature]', '(1)'], {}), '([high_freq_feature, sr_feature], 1)\n', (2735, 2771), False, 'import torch\n'), ((3747, 3785), 'torch.cat', 'torch.cat', (['[sr_feature, de_feature]', '(1)'], {}), '([sr_feature, de_feature], 1)\n', (3756, 3785), False, 'import torch\n'), ((4093, 4106), 'torch.nn.ReLU', 'nn.ReLU', (['(True)'], {}), '(True)\n', (4100, 4106), True, 'import torch.nn as nn\n'), ((2329, 2344), 'torch.nn.AvgPool2d', 'nn.AvgPool2d', (['(2)'], {}), '(2)\n', (2341, 2344), True, 'import torch.nn as nn\n'), ((2358, 2405), 'torch.nn.ConvTranspose2d', 'nn.ConvTranspose2d', (['channels', 'channels', '(4)', '(2)', '(1)'], {}), '(channels, channels, 4, 2, 1)\n', (2376, 2405), True, 'import torch.nn as nn\n'), ((3100, 3131), 'torch.nn.Upsample', 'nn.Upsample', ([], {'scale_factor': 'scale'}), '(scale_factor=scale)\n', (3111, 3131), True, 'import torch.nn as nn\n'), ((3326, 3364), 'torch.nn.Conv2d', 'nn.Conv2d', (['channels', 'channels', '(3)', '(1)', '(1)'], {}), '(channels, channels, 3, 1, 1)\n', (3335, 3364), True, 'import torch.nn as nn\n'), ((3378, 3395), 'torch.nn.Softmax', 'nn.Softmax', ([], {'dim': '(1)'}), '(dim=1)\n', (3388, 3395), True, 'import torch.nn as nn\n'), ((4555, 4580), 'torch.nn.Sequential', 'nn.Sequential', (['*sr_blocks'], {}), '(*sr_blocks)\n', (4568, 4580), True, 'import torch.nn as nn\n'), ((4603, 4628), 'torch.nn.Sequential', 'nn.Sequential', (['*de_blocks'], {}), '(*de_blocks)\n', (4616, 4628), True, 'import torch.nn as nn\n'), ((2027, 2043), 'math.log2', 'math.log2', (['scale'], {}), '(scale)\n', (2036, 2043), False, 'import math\n'), ((5236, 5254), 'torch.nn.PixelShuffle', 'nn.PixelShuffle', (['(2)'], {}), '(2)\n', (5251, 5254), True, 'import torch.nn as nn\n'), ((5008, 5024), 'math.log2', 'math.log2', (['scale'], {}), '(scale)\n', (5017, 5024), False, 'import math\n'), ((5629, 5645), 'math.log2', 'math.log2', (['scale'], {}), '(scale)\n', (5638, 5645), False, 'import math\n')]
|
from django.contrib.auth.models import AbstractBaseUser, PermissionsMixin
from django.db import models
from django.utils.translation import ugettext_lazy as _
from common.models import IndexedTimeStampedModel
from .managers import UserManager
class User(AbstractBaseUser, PermissionsMixin, IndexedTimeStampedModel):
email = models.EmailField(max_length=255, unique=True)
username = models.CharField(max_length=100, unique=True)
is_staff = models.BooleanField(
default=False,
help_text=_('Designates whether the user can log into this admin '
'site.'))
is_active = models.BooleanField(
default=True,
help_text=_('Designates whether this user should be treated as '
'active. Unselect this instead of deleting accounts.'))
objects = UserManager()
USERNAME_FIELD = 'email'
def get_full_name(self):
return self.email
def get_short_name(self):
return self.email
def __str__(self):
return self.email
|
[
"django.db.models.CharField",
"django.utils.translation.ugettext_lazy",
"django.db.models.EmailField"
] |
[((332, 378), 'django.db.models.EmailField', 'models.EmailField', ([], {'max_length': '(255)', 'unique': '(True)'}), '(max_length=255, unique=True)\n', (349, 378), False, 'from django.db import models\n'), ((394, 439), 'django.db.models.CharField', 'models.CharField', ([], {'max_length': '(100)', 'unique': '(True)'}), '(max_length=100, unique=True)\n', (410, 439), False, 'from django.db import models\n'), ((517, 579), 'django.utils.translation.ugettext_lazy', '_', (['"""Designates whether the user can log into this admin site."""'], {}), "('Designates whether the user can log into this admin site.')\n", (518, 579), True, 'from django.utils.translation import ugettext_lazy as _\n'), ((681, 792), 'django.utils.translation.ugettext_lazy', '_', (['"""Designates whether this user should be treated as active. Unselect this instead of deleting accounts."""'], {}), "('Designates whether this user should be treated as active. Unselect this instead of deleting accounts.'\n )\n", (682, 792), True, 'from django.utils.translation import ugettext_lazy as _\n')]
|
# SPDX-License-Identifier: GPL-2.0
#
# Sphinx has deprecated its older logging interface, but the replacement
# only goes back to 1.6. So here's a wrapper layer to keep around for
# as long as we support 1.4.
#
import sphinx
if sphinx.__version__[:3] >= '1.6':
UseLogging = True
from sphinx.util import logging
logger = logging.getLogger('kerneldoc')
else:
UseLogging = False
def warn(app, message):
if UseLogging:
logger.warning(message)
else:
app.warn(message)
def verbose(app, message):
if UseLogging:
logger.verbose(message)
else:
app.verbose(message)
def info(app, message):
if UseLogging:
logger.info(message)
else:
app.info(message)
|
[
"sphinx.util.logging.getLogger"
] |
[((334, 364), 'sphinx.util.logging.getLogger', 'logging.getLogger', (['"""kerneldoc"""'], {}), "('kerneldoc')\n", (351, 364), False, 'from sphinx.util import logging\n')]
|
# Copyright (c) Microsoft Corporation.
# Licensed under the MIT License
import io
import json
import uuid
from copy import copy
from datetime import timedelta
from enum import Enum, unique
from typing import Union, Callable
import requests
from requests.adapters import HTTPAdapter
from ._version import VERSION
from .data_format import DataFormat
from .exceptions import KustoServiceError
from .response import KustoResponseDataSetV1, KustoResponseDataSetV2, KustoResponseDataSet
from .security import _AadHelper
class KustoConnectionStringBuilder:
"""
Parses Kusto connection strings.
For usages, check out the sample at:
https://github.com/Azure/azure-kusto-python/blob/master/azure-kusto-data/tests/sample.py
"""
@unique
class ValidKeywords(Enum):
"""
Set of properties that can be use in a connection string provided to KustoConnectionStringBuilder.
For a complete list of properties go to https://docs.microsoft.com/en-us/azure/kusto/api/connection-strings/kusto
"""
data_source = "Data Source"
aad_federated_security = "AAD Federated Security"
aad_user_id = "AAD User ID"
password = "Password"
application_client_id = "Application Client Id"
application_key = "Application Key"
application_certificate = "Application Certificate"
application_certificate_thumbprint = "Application Certificate Thumbprint"
authority_id = "Authority Id"
application_token = "Application Token"
user_token = "User Token"
msi_auth = "MSI Authentication"
msi_params = "MSI Params"
az_cli = "AZ CLI"
@classmethod
def parse(cls, key: str) -> "ValidKeywords":
"""Create a valid keyword."""
key = key.lower().strip()
if key in ["data source", "addr", "address", "network address", "server"]:
return cls.data_source
if key in ["aad user id"]:
return cls.aad_user_id
if key in ["password", "pwd"]:
return cls.password
if key in ["application client id", "appclientid"]:
return cls.application_client_id
if key in ["application key", "appkey"]:
return cls.application_key
if key in ["application certificate"]:
return cls.application_certificate
if key in ["application certificate thumbprint"]:
return cls.application_certificate_thumbprint
if key in ["authority id", "authorityid", "authority", "tenantid", "tenant", "tid"]:
return cls.authority_id
if key in ["aad federated security", "federated security", "federated", "fed", "aadfed"]:
return cls.aad_federated_security
if key in ["application token", "apptoken"]:
return cls.application_token
if key in ["user token", "usertoken", "usrtoken"]:
return cls.user_token
if key in ["msi_auth"]:
return cls.msi_auth
if key in ["msi_type"]:
return cls.msi_params
raise KeyError(key)
def is_secret(self) -> bool:
"""States for each property if it contains secret"""
return self in [self.password, self.application_key, self.application_certificate, self.application_token, self.user_token]
def is_str_type(self) -> bool:
"""States whether a word is of type str or not."""
return self in [
self.aad_user_id,
self.application_certificate,
self.application_certificate_thumbprint,
self.application_client_id,
self.data_source,
self.password,
self.application_key,
self.authority_id,
self.application_token,
self.user_token,
]
def is_dict_type(self) -> bool:
return self in [self.msi_params]
def is_bool_type(self) -> bool:
"""States whether a word is of type bool or not."""
return self in [self.aad_federated_security, self.msi_auth, self.az_cli]
def __init__(self, connection_string: str):
"""
Creates new KustoConnectionStringBuilder.
:param str connection_string: Kusto connection string should by of the format:
https://<clusterName>.kusto.windows.net;AAD User ID="<EMAIL>@microsoft.<EMAIL>";Password=<PASSWORD>
For more information please look at:
https://kusto.azurewebsites.net/docs/concepts/kusto_connection_strings.html
"""
_assert_value_is_valid(connection_string)
self._internal_dict = {}
self._token_provider = None
if connection_string is not None and "=" not in connection_string.partition(";")[0]:
connection_string = "Data Source=" + connection_string
self[self.ValidKeywords.authority_id] = "common"
for kvp_string in connection_string.split(";"):
key, _, value = kvp_string.partition("=")
keyword = self.ValidKeywords.parse(key)
if keyword.is_str_type():
self[keyword] = value.strip()
if keyword.is_bool_type():
if value.strip() in ["True", "true"]:
self[keyword] = True
elif value.strip() in ["False", "false"]:
self[keyword] = False
else:
raise KeyError("Expected aad federated security to be bool. Recieved %s" % value)
def __setitem__(self, key, value):
try:
keyword = key if isinstance(key, self.ValidKeywords) else self.ValidKeywords.parse(key)
except KeyError:
raise KeyError("%s is not supported as an item in KustoConnectionStringBuilder" % key)
if value is None:
raise TypeError("Value cannot be None.")
if keyword.is_str_type():
self._internal_dict[keyword] = value.strip()
elif keyword.is_bool_type():
if not isinstance(value, bool):
raise TypeError("Expected %s to be bool" % key)
self._internal_dict[keyword] = value
elif keyword.is_dict_type():
if not isinstance(value, dict):
raise TypeError("Expected %s to be dict" % key)
self._internal_dict[keyword] = value
else:
raise KeyError("KustoConnectionStringBuilder supports only bools and strings.")
@classmethod
def with_aad_user_password_authentication(
cls, connection_string: str, user_id: str, password: str, authority_id: str = "common"
) -> "KustoConnectionStringBuilder":
"""
Creates a KustoConnection string builder that will authenticate with AAD user name and
password.
:param str connection_string: Kusto connection string should by of the format: https://<clusterName>.kusto.windows.net
:param str user_id: AAD user ID.
:param str password: <PASSWORD> of the AAD <EMAIL>.
:param str authority_id: optional param. defaults to "common"
"""
_assert_value_is_valid(user_id)
_assert_value_is_valid(password)
kcsb = cls(connection_string)
kcsb[kcsb.ValidKeywords.aad_federated_security] = True
kcsb[kcsb.ValidKeywords.aad_user_id] = user_id
kcsb[kcsb.ValidKeywords.password] = password
kcsb[kcsb.ValidKeywords.authority_id] = authority_id
return kcsb
@classmethod
def with_aad_user_token_authentication(cls, connection_string: str, user_token: str) -> "KustoConnectionStringBuilder":
"""
Creates a KustoConnection string builder that will authenticate with AAD application and
a certificate credentials.
:param str connection_string: Kusto connection string should by of the format:
https://<clusterName>.kusto.windows.net
:param str user_token: AAD user token.
"""
_assert_value_is_valid(user_token)
kcsb = cls(connection_string)
kcsb[kcsb.ValidKeywords.aad_federated_security] = True
kcsb[kcsb.ValidKeywords.user_token] = user_token
return kcsb
@classmethod
def with_aad_application_key_authentication(
cls, connection_string: str, aad_app_id: str, app_key: str, authority_id: str
) -> "KustoConnectionStringBuilder":
"""
Creates a KustoConnection string builder that will authenticate with AAD application and key.
:param str connection_string: Kusto connection string should by of the format: https://<clusterName>.kusto.windows.net
:param str aad_app_id: AAD application ID.
:param str app_key: Corresponding key of the AAD application.
:param str authority_id: Authority id (aka Tenant id) must be provided
"""
_assert_value_is_valid(aad_app_id)
_assert_value_is_valid(app_key)
_assert_value_is_valid(authority_id)
kcsb = cls(connection_string)
kcsb[kcsb.ValidKeywords.aad_federated_security] = True
kcsb[kcsb.ValidKeywords.application_client_id] = aad_app_id
kcsb[kcsb.ValidKeywords.application_key] = app_key
kcsb[kcsb.ValidKeywords.authority_id] = authority_id
return kcsb
@classmethod
def with_aad_application_certificate_authentication(
cls, connection_string: str, aad_app_id: str, certificate: str, thumbprint: str, authority_id: str
) -> "KustoConnectionStringBuilder":
"""
Creates a KustoConnection string builder that will authenticate with AAD application and
a certificate credentials.
:param str connection_string: Kusto connection string should by of the format:
https://<clusterName>.kusto.windows.net
:param str aad_app_id: AAD application ID.
:param str certificate: A PEM encoded certificate private key.
:param str thumbprint: hex encoded thumbprint of the certificate.
:param str authority_id: Authority id (aka Tenant id) must be provided
"""
_assert_value_is_valid(aad_app_id)
_assert_value_is_valid(certificate)
_assert_value_is_valid(thumbprint)
_assert_value_is_valid(authority_id)
kcsb = cls(connection_string)
kcsb[kcsb.ValidKeywords.aad_federated_security] = True
kcsb[kcsb.ValidKeywords.application_client_id] = aad_app_id
kcsb[kcsb.ValidKeywords.application_certificate] = certificate
kcsb[kcsb.ValidKeywords.application_certificate_thumbprint] = thumbprint
kcsb[kcsb.ValidKeywords.authority_id] = authority_id
return kcsb
@classmethod
def with_aad_application_token_authentication(cls, connection_string: str, application_token: str) -> "KustoConnectionStringBuilder":
"""
Creates a KustoConnection string builder that will authenticate with AAD application and
an application token.
:param str connection_string: Kusto connection string should by of the format:
https://<clusterName>.kusto.windows.net
:param str application_token: AAD application token.
"""
_assert_value_is_valid(application_token)
kcsb = cls(connection_string)
kcsb[kcsb.ValidKeywords.aad_federated_security] = True
kcsb[kcsb.ValidKeywords.application_token] = application_token
return kcsb
@classmethod
def with_aad_device_authentication(cls, connection_string: str, authority_id: str = "common") -> "KustoConnectionStringBuilder":
"""
Creates a KustoConnection string builder that will authenticate with AAD application and
password.
:param str connection_string: Kusto connection string should by of the format: https://<clusterName>.kusto.windows.net
:param str authority_id: optional param. defaults to "common"
"""
kcsb = cls(connection_string)
kcsb[kcsb.ValidKeywords.aad_federated_security] = True
kcsb[kcsb.ValidKeywords.authority_id] = authority_id
return kcsb
@classmethod
def with_az_cli_authentication(cls, connection_string: str) -> "KustoConnectionStringBuilder":
"""
Creates a KustoConnection string builder that will use existing authenticated az cli profile
password.
:param str connection_string: Kusto connection string should by of the format: https://<clusterName>.kusto.windows.net
"""
kcsb = cls(connection_string)
kcsb[kcsb.ValidKeywords.az_cli] = True
kcsb[kcsb.ValidKeywords.aad_federated_security] = True
return kcsb
@classmethod
def with_aad_managed_service_identity_authentication(
cls, connection_string: str, client_id: str = None, object_id: str = None, msi_res_id: str = None, timeout: int = None
) -> "KustoConnectionStringBuilder":
""""
Creates a KustoConnection string builder that will authenticate with AAD application, using
an application token obtained from a Microsoft Service Identity endpoint. An optional user
assigned application ID can be added to the token.
:param str connection_string: Kusto connection string should by of the format: https://<clusterName>.kusto.windows.net
:param client_id: an optional user assigned identity provided as an Azure ID of a client
:param object_id: an optional user assigned identity provided as an Azure ID of an object
:param msi_res_id: an optional user assigned identity provided as an Azure ID of an MSI resource
:param timeout: an optional timeout (seconds) to wait for an MSI Authentication to occur
"""
kcsb = cls(connection_string)
params = {"resource": kcsb.data_source}
exclusive_pcount = 0
if timeout is not None:
params["timeout"] = timeout
if client_id is not None:
params["client_id"] = client_id
exclusive_pcount += 1
if object_id is not None:
params["object_id"] = object_id
exclusive_pcount += 1
if msi_res_id is not None:
params["msi_res_id"] = msi_res_id
exclusive_pcount += 1
if exclusive_pcount > 1:
raise ValueError("the following parameters are mutually exclusive and can not be provided at the same time: user_uid, object_id, msi_res_id")
kcsb[kcsb.ValidKeywords.aad_federated_security] = True
kcsb[kcsb.ValidKeywords.msi_auth] = True
kcsb[kcsb.ValidKeywords.msi_params] = params
return kcsb
@classmethod
def with_token_provider(cls, connection_string: str, token_provider: Callable[[], str]) -> "KustoConnectionStringBuilder":
"""
Create a KustoConnectionStringBuilder that uses a callback function to obtain a connection token
:param str connection_string: Kusto connection string should by of the format: https://<clusterName>.kusto.windows.net
:param token_provider: a parameterless function that returns a valid bearer token for the relevant kusto resource as a string
"""
assert callable(token_provider)
kcsb = cls(connection_string)
kcsb._token_provider = token_provider
return kcsb
@property
def data_source(self) -> str:
"""The URI specifying the Kusto service endpoint.
For example, https://kuskus.kusto.windows.net or net.tcp://localhost
"""
return self._internal_dict.get(self.ValidKeywords.data_source)
@property
def aad_user_id(self) -> str:
"""The username to use for AAD Federated AuthN."""
return self._internal_dict.get(self.ValidKeywords.aad_user_id)
@property
def password(self) -> str:
"""The password to use for authentication when username/password authentication is used.
Must be accompanied by UserID property
"""
return self._internal_dict.get(self.ValidKeywords.password)
@property
def application_client_id(self) -> str:
"""The application client id to use for authentication when federated
authentication is used.
"""
return self._internal_dict.get(self.ValidKeywords.application_client_id)
@property
def application_key(self) -> str:
"""The application key to use for authentication when federated authentication is used"""
return self._internal_dict.get(self.ValidKeywords.application_key)
@property
def application_certificate(self) -> str:
"""A PEM encoded certificate private key."""
return self._internal_dict.get(self.ValidKeywords.application_certificate)
@application_certificate.setter
def application_certificate(self, value):
self[self.ValidKeywords.application_certificate] = value
@property
def application_certificate_thumbprint(self):
"""hex encoded thumbprint of the certificate."""
return self._internal_dict.get(self.ValidKeywords.application_certificate_thumbprint)
@application_certificate_thumbprint.setter
def application_certificate_thumbprint(self, value):
self[self.ValidKeywords.application_certificate_thumbprint] = value
@property
def authority_id(self):
"""The ID of the AAD tenant where the application is configured.
(should be supplied only for non-Microsoft tenant)"""
return self._internal_dict.get(self.ValidKeywords.authority_id)
@authority_id.setter
def authority_id(self, value):
self[self.ValidKeywords.authority_id] = value
@property
def aad_federated_security(self):
"""A Boolean value that instructs the client to perform AAD federated authentication."""
return self._internal_dict.get(self.ValidKeywords.aad_federated_security)
@property
def user_token(self):
"""User token."""
return self._internal_dict.get(self.ValidKeywords.user_token)
@property
def application_token(self):
"""Application token."""
return self._internal_dict.get(self.ValidKeywords.application_token)
@property
def msi_authentication(self):
""" A value stating the MSI identity type to obtain """
return self._internal_dict.get(self.ValidKeywords.msi_auth)
@property
def msi_parameters(self):
""" A user assigned MSI ID to be obtained """
return self._internal_dict.get(self.ValidKeywords.msi_params)
@property
def az_cli(self):
return self._internal_dict.get(self.ValidKeywords.az_cli)
@property
def token_provider(self):
return self._token_provider
def __str__(self):
dict_copy = self._internal_dict.copy()
for key in dict_copy:
if key.is_secret():
dict_copy[key] = "****"
return self._build_connection_string(dict_copy)
def __repr__(self):
return self._build_connection_string(self._internal_dict)
def _build_connection_string(self, kcsb_as_dict) -> str:
return ";".join(["{0}={1}".format(word.value, kcsb_as_dict[word]) for word in self.ValidKeywords if word in kcsb_as_dict])
def _assert_value_is_valid(value):
if not value or not value.strip():
raise ValueError("Should not be empty")
class ClientRequestProperties:
"""This class is a POD used by client making requests to describe specific needs from the service executing the requests.
For more information please look at: https://docs.microsoft.com/en-us/azure/kusto/api/netfx/request-properties
"""
results_defer_partial_query_failures_option_name = "deferpartialqueryfailures"
request_timeout_option_name = "servertimeout"
def __init__(self):
self._options = {}
self._parameters = {}
self.client_request_id = None
self.application = None
self.user = None
def set_parameter(self, name: str, value: str):
"""Sets a parameter's value"""
_assert_value_is_valid(name)
self._parameters[name] = value
def has_parameter(self, name):
"""Checks if a parameter is specified."""
return name in self._parameters
def get_parameter(self, name, default_value):
"""Gets a parameter's value."""
return self._parameters.get(name, default_value)
def set_option(self, name, value):
"""Sets an option's value"""
_assert_value_is_valid(name)
self._options[name] = value
def has_option(self, name):
"""Checks if an option is specified."""
return name in self._options
def get_option(self, name, default_value):
"""Gets an option's value."""
return self._options.get(name, default_value)
def to_json(self):
"""Safe serialization to a JSON string."""
return json.dumps({"Options": self._options, "Parameters": self._parameters}, default=str)
class KustoClient:
"""
Kusto client for Python.
The client is a wrapper around the Kusto REST API.
To read more about it, go to https://docs.microsoft.com/en-us/azure/kusto/api/rest/
The primary methods are:
`execute_query`: executes a KQL query against the Kusto service.
`execute_mgmt`: executes a KQL control command against the Kusto service.
"""
_mgmt_default_timeout = timedelta(hours=1, seconds=30)
_query_default_timeout = timedelta(minutes=4, seconds=30)
_streaming_ingest_default_timeout = timedelta(minutes=10)
# The maximum amount of connections to be able to operate in parallel
_max_pool_size = 100
def __init__(self, kcsb: Union[KustoConnectionStringBuilder, str]):
"""
Kusto Client constructor.
:param kcsb: The connection string to initialize KustoClient.
:type kcsb: azure.kusto.data.KustoConnectionStringBuilder or str
"""
if not isinstance(kcsb, KustoConnectionStringBuilder):
kcsb = KustoConnectionStringBuilder(kcsb)
kusto_cluster = kcsb.data_source
# Create a session object for connection pooling
self._session = requests.Session()
self._session.mount("http://", HTTPAdapter(pool_maxsize=self._max_pool_size))
self._session.mount("https://", HTTPAdapter(pool_maxsize=self._max_pool_size))
self._mgmt_endpoint = "{0}/v1/rest/mgmt".format(kusto_cluster)
self._query_endpoint = "{0}/v2/rest/query".format(kusto_cluster)
self._streaming_ingest_endpoint = "{0}/v1/rest/ingest/".format(kusto_cluster)
# notice that in this context, federated actually just stands for add auth, not aad federated auth (legacy code)
self._auth_provider = _AadHelper(kcsb) if kcsb.aad_federated_security else None
self._request_headers = {"Accept": "application/json", "Accept-Encoding": "gzip,deflate", "x-ms-client-version": "Kusto.Python.Client:" + VERSION}
def execute(self, database: str, query: str, properties: ClientRequestProperties = None) -> KustoResponseDataSet:
"""
Executes a query or management command.
:param str database: Database against query will be executed.
:param str query: Query to be executed.
:param azure.kusto.data.ClientRequestProperties properties: Optional additional properties.
:return: Kusto response data set.
:rtype: azure.kusto.data.response.KustoResponseDataSet
"""
query = query.strip()
if query.startswith("."):
return self.execute_mgmt(database, query, properties)
return self.execute_query(database, query, properties)
def execute_query(self, database: str, query: str, properties: ClientRequestProperties = None) -> KustoResponseDataSet:
"""
Execute a KQL query.
To learn more about KQL go to https://docs.microsoft.com/en-us/azure/kusto/query/
:param str database: Database against query will be executed.
:param str query: Query to be executed.
:param azure.kusto.data.ClientRequestProperties properties: Optional additional properties.
:return: Kusto response data set.
:rtype: azure.kusto.data.response.KustoResponseDataSet
"""
return self._execute(self._query_endpoint, database, query, None, KustoClient._query_default_timeout, properties)
def execute_mgmt(self, database: str, query: str, properties: ClientRequestProperties = None) -> KustoResponseDataSet:
"""
Execute a KQL control command.
To learn more about KQL control commands go to https://docs.microsoft.com/en-us/azure/kusto/management/
:param str database: Database against query will be executed.
:param str query: Query to be executed.
:param azure.kusto.data.ClientRequestProperties properties: Optional additional properties.
:return: Kusto response data set.
:rtype: azure.kusto.data.response.KustoResponseDataSet
"""
return self._execute(self._mgmt_endpoint, database, query, None, KustoClient._mgmt_default_timeout, properties)
def execute_streaming_ingest(
self,
database: str,
table: str,
stream: io.IOBase,
stream_format: Union[DataFormat, str],
properties: ClientRequestProperties = None,
mapping_name: str = None,
):
"""
Execute streaming ingest against this client
If the Kusto service is not configured to allow streaming ingestion, this may raise an error
To learn more about streaming ingestion go to:
https://docs.microsoft.com/en-us/azure/data-explorer/ingest-data-streaming
:param str database: Target database.
:param str table: Target table.
:param io.BaseIO stream: stream object which contains the data to ingest.
:param DataFormat stream_format: Format of the data in the stream.
:param ClientRequestProperties properties: additional request properties.
:param str mapping_name: Pre-defined mapping of the table. Required when stream_format is json/avro.
"""
stream_format = stream_format.value if isinstance(stream_format, DataFormat) else DataFormat(stream_format.lower()).value
endpoint = self._streaming_ingest_endpoint + database + "/" + table + "?streamFormat=" + stream_format
if mapping_name is not None:
endpoint = endpoint + "&mappingName=" + mapping_name
self._execute(endpoint, database, None, stream, KustoClient._streaming_ingest_default_timeout, properties)
def _execute(self, endpoint: str, database: str, query: str, payload: io.IOBase, timeout: timedelta, properties: ClientRequestProperties = None):
"""Executes given query against this client"""
request_headers = copy(self._request_headers)
json_payload = None
if not payload:
json_payload = {"db": database, "csl": query}
if properties:
json_payload["properties"] = properties.to_json()
client_request_id_prefix = "KPC.execute;"
request_headers["Content-Type"] = "application/json; charset=utf-8"
else:
if properties:
request_headers.update(json.loads(properties.to_json())["Options"])
client_request_id_prefix = "KPC.execute_streaming_ingest;"
request_headers["Content-Encoding"] = "gzip"
request_headers["x-ms-client-request-id"] = client_request_id_prefix + str(uuid.uuid4())
if properties is not None:
if properties.client_request_id is not None:
request_headers["x-ms-client-request-id"] = properties.client_request_id
if properties.application is not None:
request_headers["x-ms-app"] = properties.application
if properties.user is not None:
request_headers["x-ms-user"] = properties.user
if self._auth_provider:
request_headers["Authorization"] = self._auth_provider.acquire_authorization_header()
if properties:
timeout = properties.get_option(ClientRequestProperties.request_timeout_option_name, timeout)
response = self._session.post(endpoint, headers=request_headers, data=payload, json=json_payload, timeout=timeout.seconds)
if response.status_code == 200:
if endpoint.endswith("v2/rest/query"):
return KustoResponseDataSetV2(response.json())
return KustoResponseDataSetV1(response.json())
if payload:
raise KustoServiceError(
"An error occurred while trying to ingest: Status: {0.status_code}, Reason: {0.reason}, Text: {0.text}".format(response), response
)
raise KustoServiceError([response.json()], response)
|
[
"uuid.uuid4",
"requests.adapters.HTTPAdapter",
"requests.Session",
"copy.copy",
"json.dumps",
"datetime.timedelta"
] |
[((21924, 21954), 'datetime.timedelta', 'timedelta', ([], {'hours': '(1)', 'seconds': '(30)'}), '(hours=1, seconds=30)\n', (21933, 21954), False, 'from datetime import timedelta\n'), ((21985, 22017), 'datetime.timedelta', 'timedelta', ([], {'minutes': '(4)', 'seconds': '(30)'}), '(minutes=4, seconds=30)\n', (21994, 22017), False, 'from datetime import timedelta\n'), ((22059, 22080), 'datetime.timedelta', 'timedelta', ([], {'minutes': '(10)'}), '(minutes=10)\n', (22068, 22080), False, 'from datetime import timedelta\n'), ((21410, 21497), 'json.dumps', 'json.dumps', (["{'Options': self._options, 'Parameters': self._parameters}"], {'default': 'str'}), "({'Options': self._options, 'Parameters': self._parameters},\n default=str)\n", (21420, 21497), False, 'import json\n'), ((22711, 22729), 'requests.Session', 'requests.Session', ([], {}), '()\n', (22727, 22729), False, 'import requests\n'), ((27438, 27465), 'copy.copy', 'copy', (['self._request_headers'], {}), '(self._request_headers)\n', (27442, 27465), False, 'from copy import copy\n'), ((22770, 22815), 'requests.adapters.HTTPAdapter', 'HTTPAdapter', ([], {'pool_maxsize': 'self._max_pool_size'}), '(pool_maxsize=self._max_pool_size)\n', (22781, 22815), False, 'from requests.adapters import HTTPAdapter\n'), ((22858, 22903), 'requests.adapters.HTTPAdapter', 'HTTPAdapter', ([], {'pool_maxsize': 'self._max_pool_size'}), '(pool_maxsize=self._max_pool_size)\n', (22869, 22903), False, 'from requests.adapters import HTTPAdapter\n'), ((28158, 28170), 'uuid.uuid4', 'uuid.uuid4', ([], {}), '()\n', (28168, 28170), False, 'import uuid\n')]
|
import time
from typing import TYPE_CHECKING, List, Optional, Union, Dict, Any
from decimal import Decimal
import attr
from .json_db import StoredObject
from .i18n import _
from .util import age, InvoiceError
#from .lnaddr import lndecode, LnAddr
from . import constants
from .bitcoin import COIN, TOTAL_COIN_SUPPLY_LIMIT_IN_URX
from .transaction import PartialTxOutput
if TYPE_CHECKING:
from .paymentrequest import PaymentRequest
# convention: 'invoices' = outgoing , 'request' = incoming
# types of payment requests
PR_TYPE_ONCHAIN = 0
PR_TYPE_LN = 2
# status of payment requests
PR_UNPAID = 0 # if onchain: invoice amt not reached by txs in mempool+chain. if LN: invoice not paid.
PR_EXPIRED = 1 # invoice is unpaid and expiry time reached
PR_UNKNOWN = 2 # e.g. invoice not found
PR_PAID = 3 # if onchain: paid and mined (1 conf). if LN: invoice is paid.
PR_INFLIGHT = 4 # only for LN. payment attempt in progress
PR_FAILED = 5 # only for LN. we attempted to pay it, but all attempts failed
PR_ROUTING = 6 # only for LN. *unused* atm.
PR_UNCONFIRMED = 7 # only onchain. invoice is satisfied but tx is not mined yet.
pr_color = {
PR_UNPAID: (.7, .7, .7, 1),
PR_PAID: (.2, .9, .2, 1),
PR_UNKNOWN: (.7, .7, .7, 1),
PR_EXPIRED: (.9, .2, .2, 1),
PR_INFLIGHT: (.9, .6, .3, 1),
PR_FAILED: (.9, .2, .2, 1),
PR_ROUTING: (.9, .6, .3, 1),
PR_UNCONFIRMED: (.9, .6, .3, 1),
}
pr_tooltips = {
PR_UNPAID:_('Unpaid'),
PR_PAID:_('Paid'),
PR_UNKNOWN:_('Unknown'),
PR_EXPIRED:_('Expired'),
PR_INFLIGHT:_('In progress'),
PR_FAILED:_('Failed'),
PR_ROUTING: _('Computing route...'),
PR_UNCONFIRMED: _('Unconfirmed'),
}
PR_DEFAULT_EXPIRATION_WHEN_CREATING = 0 # 1 day
pr_expiration_values = {
0: _('Never'),
# 10*60: _('10 minutes'),
# 60*60: _('1 hour'),
# 24*60*60: _('1 day'),
# 7*24*60*60: _('1 week'),
}
assert PR_DEFAULT_EXPIRATION_WHEN_CREATING in pr_expiration_values
def _decode_outputs(outputs) -> List[PartialTxOutput]:
ret = []
for output in outputs:
if not isinstance(output, PartialTxOutput):
output = PartialTxOutput.from_legacy_tuple(*output)
ret.append(output)
return ret
# hack: BOLT-11 is not really clear on what an expiry of 0 means.
# It probably interprets it as 0 seconds, so already expired...
# Our higher level invoices code however uses 0 for "never".
# Hence set some high expiration here
LN_EXPIRY_NEVER = 100 * 365 * 24 * 60 * 60 # 100 years
@attr.s
class Invoice(StoredObject):
type = attr.ib(type=int, kw_only=True)
message: str
exp: int
time: int
def is_lightning(self):
return self.type == PR_TYPE_LN
def get_status_str(self, status):
status_str = pr_tooltips[status]
if status == PR_UNPAID:
if self.exp > 0 and self.exp != LN_EXPIRY_NEVER:
expiration = self.exp + self.time
status_str = _('Expires') + ' ' + age(expiration, include_seconds=True)
return status_str
def get_amount_sat(self) -> Union[int, Decimal, str, None]:
"""Returns a decimal satoshi amount, or '!' or None."""
raise NotImplementedError()
@classmethod
def from_json(cls, x: dict) -> 'Invoice':
# note: these raise if x has extra fields
if x.get('type') == PR_TYPE_LN:
return LNInvoice(**x)
else:
return OnchainInvoice(**x)
@attr.s
class OnchainInvoice(Invoice):
message = attr.ib(type=str, kw_only=True)
amount_sat = attr.ib(kw_only=True) # type: Union[int, str] # in satoshis. can be '!'
exp = attr.ib(type=int, kw_only=True, validator=attr.validators.instance_of(int))
time = attr.ib(type=int, kw_only=True, validator=attr.validators.instance_of(int))
id = attr.ib(type=str, kw_only=True)
outputs = attr.ib(kw_only=True, converter=_decode_outputs) # type: List[PartialTxOutput]
bip70 = attr.ib(type=str, kw_only=True) # type: Optional[str]
requestor = attr.ib(type=str, kw_only=True) # type: Optional[str]
height = attr.ib(type=int, kw_only=True, validator=attr.validators.instance_of(int))
def get_address(self) -> str:
"""returns the first address, to be displayed in GUI"""
return self.outputs[0].address
def get_amount_sat(self) -> Union[int, str]:
return self.amount_sat or 0
@amount_sat.validator
def _validate_amount(self, attribute, value):
if isinstance(value, int):
if not (0 <= value <= TOTAL_COIN_SUPPLY_LIMIT_IN_URX * COIN):
raise InvoiceError(f"amount is out-of-bounds: {value!r} sat")
elif isinstance(value, str):
if value != '!':
raise InvoiceError(f"unexpected amount: {value!r}")
else:
raise InvoiceError(f"unexpected amount: {value!r}")
@classmethod
def from_bip70_payreq(cls, pr: 'PaymentRequest', height:int) -> 'OnchainInvoice':
return OnchainInvoice(
type=PR_TYPE_ONCHAIN,
amount_sat=pr.get_amount(),
outputs=pr.get_outputs(),
message=pr.get_memo(),
id=pr.get_id(),
time=pr.get_time(),
exp=pr.get_expiration_date() - pr.get_time(),
bip70=pr.raw.hex(),
requestor=pr.get_requestor(),
height=height,
)
@attr.s
class LNInvoice(Invoice):
invoice = attr.ib(type=str)
amount_msat = attr.ib(kw_only=True) # type: Optional[int] # needed for zero amt invoices
__lnaddr = None
@invoice.validator
def _validate_invoice_str(self, attribute, value):
lndecode(value) # this checks the str can be decoded
@amount_msat.validator
def _validate_amount(self, attribute, value):
if value is None:
return
if isinstance(value, int):
if not (0 <= value <= TOTAL_COIN_SUPPLY_LIMIT_IN_URX * COIN * 1000):
raise InvoiceError(f"amount is out-of-bounds: {value!r} msat")
else:
raise InvoiceError(f"unexpected amount: {value!r}")
@property
# def _lnaddr(self) -> LnAddr:
# if self.__lnaddr is None:
# self.__lnaddr = lndecode(self.invoice)
# return self.__lnaddr
@property
def rhash(self) -> str:
return self._lnaddr.paymenthash.hex()
def get_amount_msat(self) -> Optional[int]:
amount_btc = self._lnaddr.amount
amount = int(amount_btc * COIN * 1000) if amount_btc else None
return amount or self.amount_msat
def get_amount_sat(self) -> Union[Decimal, None]:
amount_msat = self.get_amount_msat()
if amount_msat is None:
return None
return Decimal(amount_msat) / 1000
@property
def exp(self) -> int:
return self._lnaddr.get_expiry()
@property
def time(self) -> int:
return self._lnaddr.date
@property
def message(self) -> str:
return self._lnaddr.get_description()
@classmethod
def from_bech32(cls, invoice: str) -> 'LNInvoice':
"""Constructs LNInvoice object from BOLT-11 string.
Might raise InvoiceError.
"""
try:
lnaddr = lndecode(invoice)
except Exception as e:
raise InvoiceError(e) from e
amount_msat = lnaddr.get_amount_msat()
return LNInvoice(
type=PR_TYPE_LN,
invoice=invoice,
amount_msat=amount_msat,
)
def to_debug_json(self) -> Dict[str, Any]:
d = self.to_json()
d.update({
'pubkey': self._lnaddr.pubkey.serialize().hex(),
'amount_BTC': str(self._lnaddr.amount),
'rhash': self._lnaddr.paymenthash.hex(),
'description': self._lnaddr.get_description(),
'exp': self._lnaddr.get_expiry(),
'time': self._lnaddr.date,
# 'tags': str(lnaddr.tags),
})
return d
|
[
"attr.validators.instance_of",
"attr.ib",
"decimal.Decimal"
] |
[((2591, 2622), 'attr.ib', 'attr.ib', ([], {'type': 'int', 'kw_only': '(True)'}), '(type=int, kw_only=True)\n', (2598, 2622), False, 'import attr\n'), ((3534, 3565), 'attr.ib', 'attr.ib', ([], {'type': 'str', 'kw_only': '(True)'}), '(type=str, kw_only=True)\n', (3541, 3565), False, 'import attr\n'), ((3583, 3604), 'attr.ib', 'attr.ib', ([], {'kw_only': '(True)'}), '(kw_only=True)\n', (3590, 3604), False, 'import attr\n'), ((3839, 3870), 'attr.ib', 'attr.ib', ([], {'type': 'str', 'kw_only': '(True)'}), '(type=str, kw_only=True)\n', (3846, 3870), False, 'import attr\n'), ((3885, 3933), 'attr.ib', 'attr.ib', ([], {'kw_only': '(True)', 'converter': '_decode_outputs'}), '(kw_only=True, converter=_decode_outputs)\n', (3892, 3933), False, 'import attr\n'), ((3977, 4008), 'attr.ib', 'attr.ib', ([], {'type': 'str', 'kw_only': '(True)'}), '(type=str, kw_only=True)\n', (3984, 4008), False, 'import attr\n'), ((4048, 4079), 'attr.ib', 'attr.ib', ([], {'type': 'str', 'kw_only': '(True)'}), '(type=str, kw_only=True)\n', (4055, 4079), False, 'import attr\n'), ((5452, 5469), 'attr.ib', 'attr.ib', ([], {'type': 'str'}), '(type=str)\n', (5459, 5469), False, 'import attr\n'), ((5488, 5509), 'attr.ib', 'attr.ib', ([], {'kw_only': '(True)'}), '(kw_only=True)\n', (5495, 5509), False, 'import attr\n'), ((3709, 3741), 'attr.validators.instance_of', 'attr.validators.instance_of', (['int'], {}), '(int)\n', (3736, 3741), False, 'import attr\n'), ((3796, 3828), 'attr.validators.instance_of', 'attr.validators.instance_of', (['int'], {}), '(int)\n', (3823, 3828), False, 'import attr\n'), ((4158, 4190), 'attr.validators.instance_of', 'attr.validators.instance_of', (['int'], {}), '(int)\n', (4185, 4190), False, 'import attr\n'), ((6752, 6772), 'decimal.Decimal', 'Decimal', (['amount_msat'], {}), '(amount_msat)\n', (6759, 6772), False, 'from decimal import Decimal\n')]
|
# Licensed to the StackStorm, Inc ('StackStorm') under one or more
# contributor license agreements. See the NOTICE file distributed with
# this work for additional information regarding copyright ownership.
# The ASF licenses this file to You under the Apache License, Version 2.0
# (the "License"); you may not use this file except in compliance with
# the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import mongoengine as me
from st2common.models.db import MongoDBAccess
from st2common.models.db import stormbase
from st2common.constants.types import ResourceType
__all__ = [
'PackDB',
'ConfigSchemaDB',
'ConfigDB'
]
class PackDB(stormbase.StormFoundationDB, stormbase.UIDFieldMixin):
"""
System entity which represents a pack.
"""
RESOURCE_TYPE = ResourceType.PACK
UID_FIELDS = ['ref']
ref = me.StringField(required=True, unique=True)
name = me.StringField(required=True, unique=True)
description = me.StringField(required=True)
keywords = me.ListField(field=me.StringField())
version = me.StringField(required=True) # TODO: Enforce format
author = me.StringField(required=True)
email = me.EmailField(required=True)
files = me.ListField(field=me.StringField())
meta = {
'indexes': stormbase.UIDFieldMixin.get_indexes()
}
def __init__(self, *args, **values):
super(PackDB, self).__init__(*args, **values)
self.uid = self.get_uid()
class ConfigSchemaDB(stormbase.StormFoundationDB):
"""
System entity representing a config schema for a particular pack.
"""
pack = me.StringField(
required=True,
unique=True,
help_text='Name of the content pack this schema belongs to.')
attributes = stormbase.EscapedDynamicField(
help_text='The specification for config schema attributes.')
class ConfigDB(stormbase.StormFoundationDB):
"""
System entity representing pack config.
"""
pack = me.StringField(
required=True,
unique=True,
help_text='Name of the content pack this config belongs to.')
values = stormbase.EscapedDynamicField(
help_text='Config values.')
# specialized access objects
pack_access = MongoDBAccess(PackDB)
config_schema_access = MongoDBAccess(ConfigSchemaDB)
config_access = MongoDBAccess(ConfigDB)
MODELS = [PackDB, ConfigSchemaDB, ConfigDB]
|
[
"st2common.models.db.stormbase.EscapedDynamicField",
"st2common.models.db.stormbase.UIDFieldMixin.get_indexes",
"mongoengine.EmailField",
"st2common.models.db.MongoDBAccess",
"mongoengine.StringField"
] |
[((2591, 2612), 'st2common.models.db.MongoDBAccess', 'MongoDBAccess', (['PackDB'], {}), '(PackDB)\n', (2604, 2612), False, 'from st2common.models.db import MongoDBAccess\n'), ((2636, 2665), 'st2common.models.db.MongoDBAccess', 'MongoDBAccess', (['ConfigSchemaDB'], {}), '(ConfigSchemaDB)\n', (2649, 2665), False, 'from st2common.models.db import MongoDBAccess\n'), ((2682, 2705), 'st2common.models.db.MongoDBAccess', 'MongoDBAccess', (['ConfigDB'], {}), '(ConfigDB)\n', (2695, 2705), False, 'from st2common.models.db import MongoDBAccess\n'), ((1215, 1257), 'mongoengine.StringField', 'me.StringField', ([], {'required': '(True)', 'unique': '(True)'}), '(required=True, unique=True)\n', (1229, 1257), True, 'import mongoengine as me\n'), ((1269, 1311), 'mongoengine.StringField', 'me.StringField', ([], {'required': '(True)', 'unique': '(True)'}), '(required=True, unique=True)\n', (1283, 1311), True, 'import mongoengine as me\n'), ((1330, 1359), 'mongoengine.StringField', 'me.StringField', ([], {'required': '(True)'}), '(required=True)\n', (1344, 1359), True, 'import mongoengine as me\n'), ((1426, 1455), 'mongoengine.StringField', 'me.StringField', ([], {'required': '(True)'}), '(required=True)\n', (1440, 1455), True, 'import mongoengine as me\n'), ((1493, 1522), 'mongoengine.StringField', 'me.StringField', ([], {'required': '(True)'}), '(required=True)\n', (1507, 1522), True, 'import mongoengine as me\n'), ((1535, 1563), 'mongoengine.EmailField', 'me.EmailField', ([], {'required': '(True)'}), '(required=True)\n', (1548, 1563), True, 'import mongoengine as me\n'), ((1971, 2080), 'mongoengine.StringField', 'me.StringField', ([], {'required': '(True)', 'unique': '(True)', 'help_text': '"""Name of the content pack this schema belongs to."""'}), "(required=True, unique=True, help_text=\n 'Name of the content pack this schema belongs to.')\n", (1985, 2080), True, 'import mongoengine as me\n'), ((2118, 2213), 'st2common.models.db.stormbase.EscapedDynamicField', 'stormbase.EscapedDynamicField', ([], {'help_text': '"""The specification for config schema attributes."""'}), "(help_text=\n 'The specification for config schema attributes.')\n", (2147, 2213), False, 'from st2common.models.db import stormbase\n'), ((2336, 2445), 'mongoengine.StringField', 'me.StringField', ([], {'required': '(True)', 'unique': '(True)', 'help_text': '"""Name of the content pack this config belongs to."""'}), "(required=True, unique=True, help_text=\n 'Name of the content pack this config belongs to.')\n", (2350, 2445), True, 'import mongoengine as me\n'), ((2479, 2536), 'st2common.models.db.stormbase.EscapedDynamicField', 'stormbase.EscapedDynamicField', ([], {'help_text': '"""Config values."""'}), "(help_text='Config values.')\n", (2508, 2536), False, 'from st2common.models.db import stormbase\n'), ((1646, 1683), 'st2common.models.db.stormbase.UIDFieldMixin.get_indexes', 'stormbase.UIDFieldMixin.get_indexes', ([], {}), '()\n', (1681, 1683), False, 'from st2common.models.db import stormbase\n'), ((1394, 1410), 'mongoengine.StringField', 'me.StringField', ([], {}), '()\n', (1408, 1410), True, 'import mongoengine as me\n'), ((1595, 1611), 'mongoengine.StringField', 'me.StringField', ([], {}), '()\n', (1609, 1611), True, 'import mongoengine as me\n')]
|
from evaluation import ResultsEvaluation
if __name__ == "__main__":
dataset = 'dodh'
table = 'base_models'
scorers_folder = "./scorers/{}/".format(dataset)
results_folder = f"./results/{dataset}/{table + '/' if table is not None else ''}"
if dataset == 'dodo':
result_evaluation = ResultsEvaluation(
scorers_folder=scorers_folder,
results_folder=results_folder
)
elif dataset == 'dodh':
result_evaluation = ResultsEvaluation(
scorers_folder=scorers_folder,
results_folder=results_folder,
lights_off={
"63b799f6-8a4f-4224-8797-ea971f78fb53": 60,
"de3af7b1-ab6f-43fd-96f0-6fc64e8d2ed4": 60,
},
lights_on={
"a14f8058-f636-4be7-a67a-8f7f91a419e7": 620,
}
)
result_evaluation.print_soft_agreements()
result_evaluation.print_scores()
|
[
"evaluation.ResultsEvaluation"
] |
[((310, 389), 'evaluation.ResultsEvaluation', 'ResultsEvaluation', ([], {'scorers_folder': 'scorers_folder', 'results_folder': 'results_folder'}), '(scorers_folder=scorers_folder, results_folder=results_folder)\n', (327, 389), False, 'from evaluation import ResultsEvaluation\n'), ((481, 732), 'evaluation.ResultsEvaluation', 'ResultsEvaluation', ([], {'scorers_folder': 'scorers_folder', 'results_folder': 'results_folder', 'lights_off': "{'63b799f6-8a4f-4224-8797-ea971f78fb53': 60,\n 'de3af7b1-ab6f-43fd-96f0-6fc64e8d2ed4': 60}", 'lights_on': "{'a14f8058-f636-4be7-a67a-8f7f91a419e7': 620}"}), "(scorers_folder=scorers_folder, results_folder=\n results_folder, lights_off={'63b799f6-8a4f-4224-8797-ea971f78fb53': 60,\n 'de3af7b1-ab6f-43fd-96f0-6fc64e8d2ed4': 60}, lights_on={\n 'a14f8058-f636-4be7-a67a-8f7f91a419e7': 620})\n", (498, 732), False, 'from evaluation import ResultsEvaluation\n')]
|
# Import required stuff
from cvxpy import *
import numpy as np
import matplotlib.pyplot as plt
# In this problem, we need both y.txt and beta.txt, so read them from input files.
# Please note that the path has to be changed accordingly before running.
with open('/home/akilesh/Desktop/Akilesh_opt/y.txt') as f:
y = []
for line in f:
line = line.split() # to deal with blank
if line: # lines (ie skip them)
line = [float(i) for i in line]
y.append(line)
with open('/home/akilesh/Desktop/Akilesh_opt/beta0.txt') as f:
betaorg = []
for line in f:
line = line.split() # to deal with blank
if line: # lines (ie skip them)
line = [float(i) for i in line]
betaorg.append(line)
# Define 100 logarithmically spaced values from 10^1 to 10^-2.
lvals = np.logspace(-2, 1, 100)
mseList = [] # List for storing MSEs as we vary the lambda.
changepointsList = [] # List for storing changepoint as we vary the lambda.
lvalsList = [] # List for storing beta computed as we vary the lambda.
for val in lvals:
# As in prev problem, beta is the variable.
beta = Variable(1, 100)
error = sum_squares(y - beta)
error = error/2
obj = Minimize(error + val*tv(beta))
prob = Problem(obj)
prob.solve()
# to compute mse for a particular lambda.
# Threshold is defined as 10^-8.
mse = 0
thresh = 0.00000001
changepoints = 0
# Iterate over points
for i in range(0, 100):
req = (beta[i].value - betaorg[i][0])
a = abs(req).value
# Compute MSE.
mse = mse + req ** 2
# If abs. value is greater than thresh
if(a > thresh):
changepoints = changepoints + 1
# Appending stuff to corresponding list
mse = mse / 100
mseList.append(mse)
changepointsList.append(changepoints)
lvalsList.append(val)
#print(mseList)
# For plotting - mse vs lambda.
plt.subplot(211)
plt.plot(lvalsList, mseList)
plt.xlabel('lambda ')
plt.ylabel('mse')
plt.title(' mse vs lambda ')
# For plotting - changepoints vs lambda.
plt.subplot(212)
plt.plot(lvalsList, changepointsList)
plt.xlabel('lambda ')
plt.ylabel(' changepoints')
plt.title(' changepoints vs lambda ')
plt.tight_layout()
plt.show()
|
[
"matplotlib.pyplot.title",
"matplotlib.pyplot.subplot",
"matplotlib.pyplot.show",
"matplotlib.pyplot.plot",
"numpy.logspace",
"matplotlib.pyplot.ylabel",
"matplotlib.pyplot.xlabel",
"matplotlib.pyplot.tight_layout"
] |
[((865, 888), 'numpy.logspace', 'np.logspace', (['(-2)', '(1)', '(100)'], {}), '(-2, 1, 100)\n', (876, 888), True, 'import numpy as np\n'), ((1869, 1885), 'matplotlib.pyplot.subplot', 'plt.subplot', (['(211)'], {}), '(211)\n', (1880, 1885), True, 'import matplotlib.pyplot as plt\n'), ((1886, 1914), 'matplotlib.pyplot.plot', 'plt.plot', (['lvalsList', 'mseList'], {}), '(lvalsList, mseList)\n', (1894, 1914), True, 'import matplotlib.pyplot as plt\n'), ((1915, 1936), 'matplotlib.pyplot.xlabel', 'plt.xlabel', (['"""lambda """'], {}), "('lambda ')\n", (1925, 1936), True, 'import matplotlib.pyplot as plt\n'), ((1937, 1954), 'matplotlib.pyplot.ylabel', 'plt.ylabel', (['"""mse"""'], {}), "('mse')\n", (1947, 1954), True, 'import matplotlib.pyplot as plt\n'), ((1955, 1983), 'matplotlib.pyplot.title', 'plt.title', (['""" mse vs lambda """'], {}), "(' mse vs lambda ')\n", (1964, 1983), True, 'import matplotlib.pyplot as plt\n'), ((2026, 2042), 'matplotlib.pyplot.subplot', 'plt.subplot', (['(212)'], {}), '(212)\n', (2037, 2042), True, 'import matplotlib.pyplot as plt\n'), ((2043, 2080), 'matplotlib.pyplot.plot', 'plt.plot', (['lvalsList', 'changepointsList'], {}), '(lvalsList, changepointsList)\n', (2051, 2080), True, 'import matplotlib.pyplot as plt\n'), ((2081, 2102), 'matplotlib.pyplot.xlabel', 'plt.xlabel', (['"""lambda """'], {}), "('lambda ')\n", (2091, 2102), True, 'import matplotlib.pyplot as plt\n'), ((2103, 2130), 'matplotlib.pyplot.ylabel', 'plt.ylabel', (['""" changepoints"""'], {}), "(' changepoints')\n", (2113, 2130), True, 'import matplotlib.pyplot as plt\n'), ((2131, 2168), 'matplotlib.pyplot.title', 'plt.title', (['""" changepoints vs lambda """'], {}), "(' changepoints vs lambda ')\n", (2140, 2168), True, 'import matplotlib.pyplot as plt\n'), ((2170, 2188), 'matplotlib.pyplot.tight_layout', 'plt.tight_layout', ([], {}), '()\n', (2186, 2188), True, 'import matplotlib.pyplot as plt\n'), ((2189, 2199), 'matplotlib.pyplot.show', 'plt.show', ([], {}), '()\n', (2197, 2199), True, 'import matplotlib.pyplot as plt\n')]
|
#!/usr/bin/env python
import json
import os
import re
import math
import traceback
import urllib.parse as urlparse
from ..common import *
__all__ = ['huaban_download']
site_info = '花瓣 (Huaban)'
LIMIT = 100
class Board:
def __init__(self, title, pins):
self.title = title
self.pins = pins
self.pin_count = len(pins)
class Pin:
host = 'http://img.hb.aicdn.com/'
def __init__(self, pin_json):
img_file = pin_json['file']
self.id = str(pin_json['pin_id'])
self.url = urlparse.urljoin(self.host, img_file['key'])
self.ext = img_file['type'].split('/')[-1]
def construct_url(url, **params):
param_str = urlparse.urlencode(params)
return url + '?' + param_str
def extract_json_data(url, **params):
url = construct_url(url, **params)
html = get_content(url, headers=fake_headers)
json_string = match1(html, r'app.page\["board"\] = (.*?});')
json_data = json.loads(json_string)
return json_data
def extract_board_data(url):
json_data = extract_json_data(url, limit=LIMIT)
pin_list = json_data['pins']
title = json_data['title']
pin_count = json_data['pin_count']
pin_count -= len(pin_list)
while pin_count > 0:
json_data = extract_json_data(url, max=pin_list[-1]['pin_id'],
limit=LIMIT)
pins = json_data['pins']
pin_list += pins
pin_count -= len(pins)
return Board(title, list(map(Pin, pin_list)))
def huaban_download_board(url, output_dir, **kwargs):
kwargs['merge'] = False
board = extract_board_data(url)
output_dir = os.path.join(output_dir, board.title)
print_info(site_info, board.title, 'jpg', float('Inf'))
for pin in board.pins:
download_urls([pin.url], pin.id, pin.ext, float('Inf'),
output_dir=output_dir, faker=True, **kwargs)
def huaban_download(url, output_dir='.', **kwargs):
if re.match(r'http://huaban\.com/boards/\d+/', url):
huaban_download_board(url, output_dir, **kwargs)
else:
print('Only board (画板) pages are supported currently')
print('ex: http://huaban.com/boards/12345678/')
download = huaban_download
download_playlist = playlist_not_supported("huaban")
|
[
"urllib.parse.urljoin",
"json.loads",
"urllib.parse.urlencode",
"re.match",
"os.path.join"
] |
[((681, 707), 'urllib.parse.urlencode', 'urlparse.urlencode', (['params'], {}), '(params)\n', (699, 707), True, 'import urllib.parse as urlparse\n'), ((951, 974), 'json.loads', 'json.loads', (['json_string'], {}), '(json_string)\n', (961, 974), False, 'import json\n'), ((1638, 1675), 'os.path.join', 'os.path.join', (['output_dir', 'board.title'], {}), '(output_dir, board.title)\n', (1650, 1675), False, 'import os\n'), ((1955, 2004), 're.match', 're.match', (['"""http://huaban\\\\.com/boards/\\\\d+/"""', 'url'], {}), "('http://huaban\\\\.com/boards/\\\\d+/', url)\n", (1963, 2004), False, 'import re\n'), ((533, 577), 'urllib.parse.urljoin', 'urlparse.urljoin', (['self.host', "img_file['key']"], {}), "(self.host, img_file['key'])\n", (549, 577), True, 'import urllib.parse as urlparse\n')]
|
import os
import codecs
import base64
import string
import random
import logging
import argparse
from cryptography.hazmat.primitives import padding
from cryptography.hazmat.backends import default_backend
from cryptography.hazmat.primitives.ciphers import Cipher, algorithms, modes
KEY_LENGTH = 32
IV_LENGTH = 16
BLOCK_SIZE = 128
logger = logging.getLogger(__name__)
logger.setLevel(logging.INFO)
handle = logging.StreamHandler()
handle.setLevel(logging.INFO)
handle.setFormatter(logging.Formatter('%(asctime)s: %(message)s'))
logger.addHandler(handle)
def define_params():
parser = argparse.ArgumentParser()
parser.add_argument('file', help='path to file that you want to target.')
parser.add_argument('-e', action='store_true',
help='encrypts the provided file.')
parser.add_argument('-d', action='store_true',
help='decrypts the provided file.')
parser.add_argument('--new', action='store_true', help='creates new ' +
'random keys in case of encryption.')
return parser.parse_args()
# use this to generate new random keys.
def get_random_string(size):
return ''.join(random.SystemRandom().choice(
string.ascii_uppercase + string.digits) for _ in range(size)) \
.encode('utf8')
def adjust_padding(data, block_size, unpad=False):
if not unpad:
padder = padding.PKCS7(block_size).padder()
padded_data = padder.update(data.encode('utf8'))
padded_data += padder.finalize()
return padded_data
unpadder = padding.PKCS7(block_size).unpadder()
data = unpadder.update(data)
data += unpadder.finalize()
return data.decode('utf8')
def encrypt_file(filename, new_keys=False):
'''
encrypts file.
params
------
filename: str
path to filename to encrypt.
new_keys: bool (default: False)
creates new AES key if True, otherwise expects it from
AES_KEY environment variable.
'''
if not os.path.isfile(filename):
exit('File doesn\'t exist: %s' % (filename))
if new_keys:
aes_key = get_random_string(KEY_LENGTH)
aes_iv = get_random_string(IV_LENGTH)
else:
if not os.environ.get('AES_KEY'):
exit('`AES_KEY` doesn\'t exist in environment variables.')
if not os.environ.get('AES_IV'):
exit('`AES_IV` doesn\'t exist in environment variables.')
aes_key = os.environ.get('AES_KEY').encode('utf8')
aes_iv = os.environ.get('AES_IV').encode('utf8')
try:
aes_key = codecs.decode(aes_key, 'hex')
aes_iv = codecs.decode(aes_iv, 'hex')
except Exception as exc:
exit('Could not convert hex keys to string.')
if len(aes_key) != KEY_LENGTH or len(aes_iv) != IV_LENGTH:
exit('Invalid AES key/iv detected.')
content = open(filename, 'r').read()
content = adjust_padding(content, BLOCK_SIZE)
cipher = Cipher(algorithms.AES(aes_key), modes.CBC(aes_iv),
backend=default_backend())
encryptor = cipher.encryptor()
ciphertext = encryptor.update(content) + encryptor.finalize()
ciphertext = base64.b64encode(ciphertext)
name, ext = os.path.splitext(filename)
filename = filename + '.enc' if ext != '.enc' else name + '.enc'
open(filename, 'wb').write(ciphertext + b'\n')
open('_keys', 'w').writelines([
'export AES_KEY=%s\n' % (aes_key.hex()),
'export AES_IV=%s\n' % (aes_iv.hex())
])
logger.info('Successfully encrypted file in: %s', filename)
logger.info('Encryption keys can be found in: %s', '_keys')
def decrypt_file(filename, write_to_file=True, is_ciphertext=False):
'''
decrypts file.
params
------
filename: str
path to filename to decrypt.
write_to_file: bool (default: True)
writes decrypted content to '_' + filename if True.
Otherwise, prints to std.
is_ciphertext: bool (default: False)
skips reading filename and consider it a text string
if True, otherwise reads file.
returns
-------
str
if write_to_file is False
'''
if not os.environ.get('AES_KEY'):
exit('`AES_KEY` doesn\'t exist in environment variables.')
if not os.environ.get('AES_IV'):
exit('`AES_IV` doesn\'t exist in environment variables.')
aes_key = os.environ.get('AES_KEY').encode('utf8')
aes_iv = os.environ.get('AES_IV').encode('utf8')
try:
aes_key = codecs.decode(aes_key, 'hex')
aes_iv = codecs.decode(aes_iv, 'hex')
except Exception as exc:
exit('Could not convert hex keys to string.')
if not is_ciphertext:
if not os.path.isfile(filename):
exit('File doesn\'t exist: %s' % (filename))
ciphertext = open(filename, 'rb').read()
else:
ciphertext = filename
ciphertext = base64.b64decode(ciphertext)
cipher = Cipher(algorithms.AES(aes_key), modes.CBC(aes_iv),
backend=default_backend())
decryptor = cipher.decryptor()
try:
content = decryptor.update(ciphertext) + decryptor.finalize()
except ValueError as exc:
exit('ValueError: %s\n' % (str(exc)))
content = adjust_padding(content, BLOCK_SIZE, unpad=True)
if write_to_file and not is_ciphertext:
name, ext = os.path.splitext(filename)
filename = name + '.dec' if ext != '.dec' else name
open(filename, 'w').write(content)
logger.info('Successfully decrypted file in: %s', filename)
else:
return content
def main():
params = define_params()
if params.e:
encrypt_file(params.file, params.new)
elif params.d:
decrypt_file(params.file)
|
[
"cryptography.hazmat.primitives.padding.PKCS7",
"random.SystemRandom",
"argparse.ArgumentParser",
"codecs.decode",
"logging.StreamHandler",
"cryptography.hazmat.primitives.ciphers.modes.CBC",
"base64.b64decode",
"cryptography.hazmat.primitives.ciphers.algorithms.AES",
"logging.Formatter",
"os.environ.get",
"os.path.isfile",
"base64.b64encode",
"os.path.splitext",
"cryptography.hazmat.backends.default_backend",
"logging.getLogger"
] |
[((342, 369), 'logging.getLogger', 'logging.getLogger', (['__name__'], {}), '(__name__)\n', (359, 369), False, 'import logging\n'), ((410, 433), 'logging.StreamHandler', 'logging.StreamHandler', ([], {}), '()\n', (431, 433), False, 'import logging\n'), ((484, 529), 'logging.Formatter', 'logging.Formatter', (['"""%(asctime)s: %(message)s"""'], {}), "('%(asctime)s: %(message)s')\n", (501, 529), False, 'import logging\n'), ((593, 618), 'argparse.ArgumentParser', 'argparse.ArgumentParser', ([], {}), '()\n', (616, 618), False, 'import argparse\n'), ((3200, 3228), 'base64.b64encode', 'base64.b64encode', (['ciphertext'], {}), '(ciphertext)\n', (3216, 3228), False, 'import base64\n'), ((3246, 3272), 'os.path.splitext', 'os.path.splitext', (['filename'], {}), '(filename)\n', (3262, 3272), False, 'import os\n'), ((4927, 4955), 'base64.b64decode', 'base64.b64decode', (['ciphertext'], {}), '(ciphertext)\n', (4943, 4955), False, 'import base64\n'), ((2012, 2036), 'os.path.isfile', 'os.path.isfile', (['filename'], {}), '(filename)\n', (2026, 2036), False, 'import os\n'), ((2991, 3014), 'cryptography.hazmat.primitives.ciphers.algorithms.AES', 'algorithms.AES', (['aes_key'], {}), '(aes_key)\n', (3005, 3014), False, 'from cryptography.hazmat.primitives.ciphers import Cipher, algorithms, modes\n'), ((3016, 3033), 'cryptography.hazmat.primitives.ciphers.modes.CBC', 'modes.CBC', (['aes_iv'], {}), '(aes_iv)\n', (3025, 3033), False, 'from cryptography.hazmat.primitives.ciphers import Cipher, algorithms, modes\n'), ((4201, 4226), 'os.environ.get', 'os.environ.get', (['"""AES_KEY"""'], {}), "('AES_KEY')\n", (4215, 4226), False, 'import os\n'), ((4306, 4330), 'os.environ.get', 'os.environ.get', (['"""AES_IV"""'], {}), "('AES_IV')\n", (4320, 4330), False, 'import os\n'), ((4535, 4564), 'codecs.decode', 'codecs.decode', (['aes_key', '"""hex"""'], {}), "(aes_key, 'hex')\n", (4548, 4564), False, 'import codecs\n'), ((4582, 4610), 'codecs.decode', 'codecs.decode', (['aes_iv', '"""hex"""'], {}), "(aes_iv, 'hex')\n", (4595, 4610), False, 'import codecs\n'), ((4976, 4999), 'cryptography.hazmat.primitives.ciphers.algorithms.AES', 'algorithms.AES', (['aes_key'], {}), '(aes_key)\n', (4990, 4999), False, 'from cryptography.hazmat.primitives.ciphers import Cipher, algorithms, modes\n'), ((5001, 5018), 'cryptography.hazmat.primitives.ciphers.modes.CBC', 'modes.CBC', (['aes_iv'], {}), '(aes_iv)\n', (5010, 5018), False, 'from cryptography.hazmat.primitives.ciphers import Cipher, algorithms, modes\n'), ((5386, 5412), 'os.path.splitext', 'os.path.splitext', (['filename'], {}), '(filename)\n', (5402, 5412), False, 'import os\n'), ((1570, 1595), 'cryptography.hazmat.primitives.padding.PKCS7', 'padding.PKCS7', (['block_size'], {}), '(block_size)\n', (1583, 1595), False, 'from cryptography.hazmat.primitives import padding\n'), ((2229, 2254), 'os.environ.get', 'os.environ.get', (['"""AES_KEY"""'], {}), "('AES_KEY')\n", (2243, 2254), False, 'import os\n'), ((2342, 2366), 'os.environ.get', 'os.environ.get', (['"""AES_IV"""'], {}), "('AES_IV')\n", (2356, 2366), False, 'import os\n'), ((2591, 2620), 'codecs.decode', 'codecs.decode', (['aes_key', '"""hex"""'], {}), "(aes_key, 'hex')\n", (2604, 2620), False, 'import codecs\n'), ((2642, 2670), 'codecs.decode', 'codecs.decode', (['aes_iv', '"""hex"""'], {}), "(aes_iv, 'hex')\n", (2655, 2670), False, 'import codecs\n'), ((3063, 3080), 'cryptography.hazmat.backends.default_backend', 'default_backend', ([], {}), '()\n', (3078, 3080), False, 'from cryptography.hazmat.backends import default_backend\n'), ((4413, 4438), 'os.environ.get', 'os.environ.get', (['"""AES_KEY"""'], {}), "('AES_KEY')\n", (4427, 4438), False, 'import os\n'), ((4467, 4491), 'os.environ.get', 'os.environ.get', (['"""AES_IV"""'], {}), "('AES_IV')\n", (4481, 4491), False, 'import os\n'), ((4736, 4760), 'os.path.isfile', 'os.path.isfile', (['filename'], {}), '(filename)\n', (4750, 4760), False, 'import os\n'), ((5048, 5065), 'cryptography.hazmat.backends.default_backend', 'default_backend', ([], {}), '()\n', (5063, 5065), False, 'from cryptography.hazmat.backends import default_backend\n'), ((1394, 1419), 'cryptography.hazmat.primitives.padding.PKCS7', 'padding.PKCS7', (['block_size'], {}), '(block_size)\n', (1407, 1419), False, 'from cryptography.hazmat.primitives import padding\n'), ((2457, 2482), 'os.environ.get', 'os.environ.get', (['"""AES_KEY"""'], {}), "('AES_KEY')\n", (2471, 2482), False, 'import os\n'), ((2515, 2539), 'os.environ.get', 'os.environ.get', (['"""AES_IV"""'], {}), "('AES_IV')\n", (2529, 2539), False, 'import os\n'), ((1179, 1200), 'random.SystemRandom', 'random.SystemRandom', ([], {}), '()\n', (1198, 1200), False, 'import random\n')]
|
import requests
from ate import response, exception
from tests.base import ApiServerUnittest
class TestResponse(ApiServerUnittest):
def test_parse_response_object_json(self):
url = "http://127.0.0.1:5000/api/users"
resp = requests.get(url)
resp_obj = response.ResponseObject(resp)
parsed_dict = resp_obj.parsed_dict()
self.assertIn('status_code', parsed_dict)
self.assertIn('headers', parsed_dict)
self.assertIn('body', parsed_dict)
self.assertIn('Content-Type', parsed_dict['headers'])
self.assertIn('Content-Length', parsed_dict['headers'])
self.assertIn('success', parsed_dict['body'])
def test_parse_response_object_text(self):
url = "http://127.0.0.1:5000/"
resp = requests.get(url)
resp_obj = response.ResponseObject(resp)
parsed_dict = resp_obj.parsed_dict()
self.assertIn('status_code', parsed_dict)
self.assertIn('headers', parsed_dict)
self.assertIn('body', parsed_dict)
self.assertIn('Content-Type', parsed_dict['headers'])
self.assertIn('Content-Length', parsed_dict['headers'])
self.assertTrue(str, type(parsed_dict['body']))
def test_extract_response_json(self):
resp = requests.post(
url="http://127.0.0.1:5000/customize-response",
json={
'headers': {
'Content-Type': "application/json"
},
'body': {
'success': False,
"person": {
"name": {
"first_name": "Leo",
"last_name": "Lee",
},
"age": 29,
"cities": ["Guangzhou", "Shenzhen"]
}
}
}
)
extract_binds_list = [
{"resp_status_code": "status_code"},
{"resp_headers_content_type": "headers.content-type"},
{"resp_content_body_success": "body.success"},
{"resp_content_content_success": "content.success"},
{"resp_content_text_success": "text.success"},
{"resp_content_person_first_name": "content.person.name.first_name"},
{"resp_content_cities_1": "content.person.cities.1"}
]
resp_obj = response.ResponseObject(resp)
extract_binds_dict_list = resp_obj.extract_response(extract_binds_list)
self.assertEqual(
extract_binds_dict_list[0]["resp_status_code"],
200
)
self.assertEqual(
extract_binds_dict_list[1]["resp_headers_content_type"],
"application/json"
)
self.assertEqual(
extract_binds_dict_list[2]["resp_content_body_success"],
False
)
self.assertEqual(
extract_binds_dict_list[3]["resp_content_content_success"],
False
)
self.assertEqual(
extract_binds_dict_list[4]["resp_content_text_success"],
False
)
self.assertEqual(
extract_binds_dict_list[5]["resp_content_person_first_name"],
"Leo"
)
self.assertEqual(
extract_binds_dict_list[6]["resp_content_cities_1"],
"Shenzhen"
)
def test_extract_response_fail(self):
resp = requests.post(
url="http://127.0.0.1:5000/customize-response",
json={
'headers': {
'Content-Type': "application/json"
},
'body': {
'success': False,
"person": {
"name": {
"first_name": "Leo",
"last_name": "Lee",
},
"age": 29,
"cities": ["Guangzhou", "Shenzhen"]
}
}
}
)
extract_binds_list = [
{"resp_content_dict_key_error": "content.not_exist"}
]
resp_obj = response.ResponseObject(resp)
with self.assertRaises(exception.ParseResponseError):
resp_obj.extract_response(extract_binds_list)
extract_binds_list = [
{"resp_content_list_index_error": "content.person.cities.3"}
]
resp_obj = response.ResponseObject(resp)
with self.assertRaises(exception.ParseResponseError):
resp_obj.extract_response(extract_binds_list)
def test_extract_response_json_string(self):
resp = requests.post(
url="http://127.0.0.1:5000/customize-response",
json={
'headers': {
'Content-Type': "application/json"
},
'body': "abc"
}
)
extract_binds_list = [
{"resp_content_body": "content"}
]
resp_obj = response.ResponseObject(resp)
extract_binds_dict_list = resp_obj.extract_response(extract_binds_list)
self.assertEqual(
extract_binds_dict_list[0]["resp_content_body"],
"abc"
)
def test_extract_response_empty(self):
resp = requests.post(
url="http://127.0.0.1:5000/customize-response",
json={
'headers': {
'Content-Type': "application/json"
},
'body': ""
}
)
extract_binds_list = [
{"resp_content_body": "content"}
]
resp_obj = response.ResponseObject(resp)
extract_binds_dict_list = resp_obj.extract_response(extract_binds_list)
self.assertEqual(
extract_binds_dict_list[0]["resp_content_body"],
""
)
extract_binds_list = [
{"resp_content_body": "content.abc"}
]
resp_obj = response.ResponseObject(resp)
with self.assertRaises(exception.ResponseError):
resp_obj.extract_response(extract_binds_list)
def test_validate(self):
url = "http://127.0.0.1:5000/"
resp = requests.get(url)
resp_obj = response.ResponseObject(resp)
validators = [
{"check": "resp_status_code", "comparator": "eq", "expected": 201},
{"check": "resp_body_success", "comparator": "eq", "expected": True}
]
variables_mapping = {
"resp_status_code": 200,
"resp_body_success": True
}
with self.assertRaises(exception.ValidationError):
resp_obj.validate(validators, variables_mapping)
validators = [
{"check": "resp_status_code", "comparator": "eq", "expected": 201},
{"check": "resp_body_success", "comparator": "eq", "expected": True}
]
variables_mapping = {
"resp_status_code": 201,
"resp_body_success": True
}
self.assertTrue(resp_obj.validate(validators, variables_mapping))
def test_validate_exception(self):
url = "http://127.0.0.1:5000/"
resp = requests.get(url)
resp_obj = response.ResponseObject(resp)
# expected value missed in validators
validators = [
{"check": "status_code", "comparator": "eq", "expected": 201},
{"check": "body_success", "comparator": "eq"}
]
variables_mapping = {}
with self.assertRaises(exception.ValidationError):
resp_obj.validate(validators, variables_mapping)
# expected value missed in variables mapping
validators = [
{"check": "resp_status_code", "comparator": "eq", "expected": 201},
{"check": "body_success", "comparator": "eq"}
]
variables_mapping = {
"resp_status_code": 200
}
with self.assertRaises(exception.ValidationError):
resp_obj.validate(validators, variables_mapping)
|
[
"requests.post",
"requests.get",
"ate.response.ResponseObject"
] |
[((244, 261), 'requests.get', 'requests.get', (['url'], {}), '(url)\n', (256, 261), False, 'import requests\n'), ((281, 310), 'ate.response.ResponseObject', 'response.ResponseObject', (['resp'], {}), '(resp)\n', (304, 310), False, 'from ate import response, exception\n'), ((777, 794), 'requests.get', 'requests.get', (['url'], {}), '(url)\n', (789, 794), False, 'import requests\n'), ((814, 843), 'ate.response.ResponseObject', 'response.ResponseObject', (['resp'], {}), '(resp)\n', (837, 843), False, 'from ate import response, exception\n'), ((1268, 1538), 'requests.post', 'requests.post', ([], {'url': '"""http://127.0.0.1:5000/customize-response"""', 'json': "{'headers': {'Content-Type': 'application/json'}, 'body': {'success': False,\n 'person': {'name': {'first_name': 'Leo', 'last_name': 'Lee'}, 'age': 29,\n 'cities': ['Guangzhou', 'Shenzhen']}}}"}), "(url='http://127.0.0.1:5000/customize-response', json={\n 'headers': {'Content-Type': 'application/json'}, 'body': {'success': \n False, 'person': {'name': {'first_name': 'Leo', 'last_name': 'Lee'},\n 'age': 29, 'cities': ['Guangzhou', 'Shenzhen']}}})\n", (1281, 1538), False, 'import requests\n'), ((2385, 2414), 'ate.response.ResponseObject', 'response.ResponseObject', (['resp'], {}), '(resp)\n', (2408, 2414), False, 'from ate import response, exception\n'), ((3426, 3696), 'requests.post', 'requests.post', ([], {'url': '"""http://127.0.0.1:5000/customize-response"""', 'json': "{'headers': {'Content-Type': 'application/json'}, 'body': {'success': False,\n 'person': {'name': {'first_name': 'Leo', 'last_name': 'Lee'}, 'age': 29,\n 'cities': ['Guangzhou', 'Shenzhen']}}}"}), "(url='http://127.0.0.1:5000/customize-response', json={\n 'headers': {'Content-Type': 'application/json'}, 'body': {'success': \n False, 'person': {'name': {'first_name': 'Leo', 'last_name': 'Lee'},\n 'age': 29, 'cities': ['Guangzhou', 'Shenzhen']}}})\n", (3439, 3696), False, 'import requests\n'), ((4162, 4191), 'ate.response.ResponseObject', 'response.ResponseObject', (['resp'], {}), '(resp)\n', (4185, 4191), False, 'from ate import response, exception\n'), ((4447, 4476), 'ate.response.ResponseObject', 'response.ResponseObject', (['resp'], {}), '(resp)\n', (4470, 4476), False, 'from ate import response, exception\n'), ((4663, 4800), 'requests.post', 'requests.post', ([], {'url': '"""http://127.0.0.1:5000/customize-response"""', 'json': "{'headers': {'Content-Type': 'application/json'}, 'body': 'abc'}"}), "(url='http://127.0.0.1:5000/customize-response', json={\n 'headers': {'Content-Type': 'application/json'}, 'body': 'abc'})\n", (4676, 4800), False, 'import requests\n'), ((5020, 5049), 'ate.response.ResponseObject', 'response.ResponseObject', (['resp'], {}), '(resp)\n', (5043, 5049), False, 'from ate import response, exception\n'), ((5305, 5439), 'requests.post', 'requests.post', ([], {'url': '"""http://127.0.0.1:5000/customize-response"""', 'json': "{'headers': {'Content-Type': 'application/json'}, 'body': ''}"}), "(url='http://127.0.0.1:5000/customize-response', json={\n 'headers': {'Content-Type': 'application/json'}, 'body': ''})\n", (5318, 5439), False, 'import requests\n'), ((5659, 5688), 'ate.response.ResponseObject', 'response.ResponseObject', (['resp'], {}), '(resp)\n', (5682, 5688), False, 'from ate import response, exception\n'), ((5991, 6020), 'ate.response.ResponseObject', 'response.ResponseObject', (['resp'], {}), '(resp)\n', (6014, 6020), False, 'from ate import response, exception\n'), ((6220, 6237), 'requests.get', 'requests.get', (['url'], {}), '(url)\n', (6232, 6237), False, 'import requests\n'), ((6257, 6286), 'ate.response.ResponseObject', 'response.ResponseObject', (['resp'], {}), '(resp)\n', (6280, 6286), False, 'from ate import response, exception\n'), ((7197, 7214), 'requests.get', 'requests.get', (['url'], {}), '(url)\n', (7209, 7214), False, 'import requests\n'), ((7234, 7263), 'ate.response.ResponseObject', 'response.ResponseObject', (['resp'], {}), '(resp)\n', (7257, 7263), False, 'from ate import response, exception\n')]
|
#-----------------------------------------------------------------------------
# Copyright (c) 2012 - 2017, Anaconda, Inc. All rights reserved.
#
# Powered by the Bokeh Development Team.
#
# The full license is in the file LICENSE.txt, distributed with this software.
#-----------------------------------------------------------------------------
#-----------------------------------------------------------------------------
# Boilerplate
#-----------------------------------------------------------------------------
from __future__ import absolute_import, division, print_function, unicode_literals
import pytest ; pytest
from bokeh.util.api import DEV, GENERAL ; DEV, GENERAL
from bokeh.util.testing import verify_api ; verify_api
#-----------------------------------------------------------------------------
# Imports
#-----------------------------------------------------------------------------
# Standard library imports
from mock import patch
# External imports
import bs4
from jinja2 import Template
from six import string_types
# Bokeh imports
from bokeh.core.properties import Instance
from bokeh.document import Document
from bokeh.io import curdoc
from bokeh.model import Model
from bokeh.plotting import figure
from bokeh.resources import CDN, JSResources, CSSResources
from bokeh.util.string import encode_utf8
# Module under test
import bokeh.embed.standalone as bes
#-----------------------------------------------------------------------------
# API Definition
#-----------------------------------------------------------------------------
api = {
GENERAL: (
( 'autoload_static', (1,0,0) ),
( 'components', (1,0,0) ),
( 'file_html', (1,0,0) ),
), DEV: (
)
}
Test_api = verify_api(bes, api)
#-----------------------------------------------------------------------------
# Setup
#-----------------------------------------------------------------------------
class SomeModelInTestObjects(Model):
child = Instance(Model)
def stable_id():
return 'ID'
@pytest.fixture
def test_plot():
from bokeh.plotting import figure
test_plot = figure()
test_plot.circle([1, 2], [2, 3])
return test_plot
#-----------------------------------------------------------------------------
# General API
#-----------------------------------------------------------------------------
class Test_autoload_static(object):
def test_return_type(self, test_plot):
r = bes.autoload_static(test_plot, CDN, "some/path")
assert len(r) == 2
@patch('bokeh.embed.util.make_id', new_callable=lambda: stable_id)
def test_script_attrs(self, mock_make_id, test_plot):
js, tag = bes.autoload_static(test_plot, CDN, "some/path")
html = bs4.BeautifulSoup(tag, "lxml")
scripts = html.findAll(name='script')
assert len(scripts) == 1
attrs = scripts[0].attrs
assert set(attrs) == set([
'src',
'data-bokeh-model-id',
'id',
'data-bokeh-doc-id'])
assert attrs['data-bokeh-doc-id'] == 'ID'
assert attrs['data-bokeh-model-id'] == str(test_plot._id)
assert attrs['src'] == 'some/path'
class Test_components(object):
def test_return_type(self):
plot1 = figure()
plot1.circle([], [])
plot2 = figure()
plot2.circle([], [])
# This is a testing artefact, users dont' have to do this in practice
curdoc().add_root(plot1)
curdoc().add_root(plot2)
r = bes.components(plot1)
assert len(r) == 2
_, divs = bes.components((plot1, plot2))
assert isinstance(divs, tuple)
_, divs = bes.components([plot1, plot2])
assert isinstance(divs, tuple)
_, divs = bes.components({"Plot 1": plot1, "Plot 2": plot2})
assert isinstance(divs, dict)
assert all(isinstance(x, string_types) for x in divs.keys())
@patch('bokeh.embed.util.make_id', new_callable=lambda: stable_id)
def test_plot_dict_returned_when_wrap_plot_info_is_false(self, mock_make_id):
plot1 = figure()
plot1.circle([], [])
plot2 = figure()
plot2.circle([], [])
# This is a testing artefact, users dont' have to do this in practice
curdoc().add_root(plot1)
curdoc().add_root(plot2)
expected_plotdict_1 = {"modelid": plot1.ref["id"], "elementid": "ID", "docid": "ID"}
expected_plotdict_2 = {"modelid": plot2.ref["id"], "elementid": "ID", "docid": "ID"}
_, plotdict = bes.components(plot1, wrap_plot_info=False)
assert plotdict == expected_plotdict_1
_, plotids = bes.components((plot1, plot2), wrap_plot_info=False)
assert plotids == (expected_plotdict_1, expected_plotdict_2)
_, plotiddict = bes.components({'p1': plot1, 'p2': plot2}, wrap_plot_info=False)
assert plotiddict == {'p1': expected_plotdict_1, 'p2': expected_plotdict_2}
def test_result_attrs(self, test_plot):
script, div = bes.components(test_plot)
html = bs4.BeautifulSoup(script, "lxml")
scripts = html.findAll(name='script')
assert len(scripts) == 1
assert scripts[0].attrs == {'type': 'text/javascript'}
def test_div_attrs(self, test_plot):
script, div = bes.components(test_plot)
html = bs4.BeautifulSoup(div, "lxml")
divs = html.findAll(name='div')
assert len(divs) == 2
div = divs[0]
assert set(div.attrs) == set(['class'])
assert div.attrs['class'] == ['bk-root']
assert div.text == '\n\n'
div = divs[1]
assert set(div.attrs) == set(['id', 'class'])
assert div.attrs['class'] == ['bk-plotdiv']
assert div.text == ''
def test_script_is_utf8_encoded(self, test_plot):
script, div = bes.components(test_plot)
assert isinstance(script, str)
@patch('bokeh.embed.util.make_id', new_callable=lambda: stable_id)
def test_output_is_without_script_tag_when_wrap_script_is_false(self, mock_make_id, test_plot):
script, div = bes.components(test_plot)
html = bs4.BeautifulSoup(script, "lxml")
scripts = html.findAll(name='script')
assert len(scripts) == 1
# XXX: this needs to account for indentation
#script_content = scripts[0].getText()
#rawscript, div = bes.components(test_plot, wrap_script=False)
#self.maxDiff = None
#assert rawscript.strip() == script_content.strip()
class Test_file_html(object):
def test_return_type(self, test_plot):
class fake_template:
def __init__(self, tester, user_template_variables=None):
self.tester = tester
self.template_variables = {
"title",
"bokeh_js",
"bokeh_css",
"plot_script",
"plot_div"
}
if user_template_variables is not None:
self.template_variables.update(user_template_variables)
def render(self, template_variables):
assert self.template_variables.issubset(set(template_variables.keys()))
return "template result"
r = bes.file_html(test_plot, CDN, "title")
assert isinstance(r, str)
r = bes.file_html(test_plot, CDN, "title", fake_template(self))
assert isinstance(r, str)
r = bes.file_html(test_plot, CDN, "title",
fake_template(self, {"test_var"}),
{"test_var": "test"})
assert isinstance(r, str)
@patch('bokeh.embed.bundle.warn')
def test_file_html_handles_js_only_resources(self, mock_warn, test_plot):
js_resources = JSResources(mode="relative", components=["bokeh"])
template = Template("<head>{{ bokeh_js }}</head><body></body>")
output = bes.file_html(test_plot, (js_resources, None), "title", template=template)
html = encode_utf8("<head>%s</head><body></body>" % js_resources.render_js())
assert output == html
@patch('bokeh.embed.bundle.warn')
def test_file_html_provides_warning_if_no_css(self, mock_warn, test_plot):
js_resources = JSResources()
bes.file_html(test_plot, (js_resources, None), "title")
mock_warn.assert_called_once_with(
'No Bokeh CSS Resources provided to template. If required you will need to provide them manually.'
)
@patch('bokeh.embed.bundle.warn')
def test_file_html_handles_css_only_resources(self, mock_warn, test_plot):
css_resources = CSSResources(mode="relative", components=["bokeh"])
template = Template("<head>{{ bokeh_css }}</head><body></body>")
output = bes.file_html(test_plot, (None, css_resources), "title", template=template)
html = encode_utf8("<head>%s</head><body></body>" % css_resources.render_css())
assert output == html
@patch('bokeh.embed.bundle.warn')
def test_file_html_provides_warning_if_no_js(self, mock_warn, test_plot):
css_resources = CSSResources()
bes.file_html(test_plot, (None, css_resources), "title")
mock_warn.assert_called_once_with(
'No Bokeh JS Resources provided to template. If required you will need to provide them manually.'
)
def test_file_html_title_is_escaped(self, test_plot):
r = bes.file_html(test_plot, CDN, "&<")
assert "<title>&<</title>" in r
#-----------------------------------------------------------------------------
# Dev API
#-----------------------------------------------------------------------------
#-----------------------------------------------------------------------------
# Private API
#-----------------------------------------------------------------------------
class Test__ModelInDocument(object):
def test_single_model(self):
p = Model()
assert p.document is None
with bes._ModelInDocument([p]):
assert p.document is not None
assert p.document is None
def test_list_of_model(self):
p1 = Model()
p2 = Model()
assert p1.document is None
assert p2.document is None
with bes._ModelInDocument([p1, p2]):
assert p1.document is not None
assert p2.document is not None
assert p1.document is None
assert p2.document is None
def test_uses_precedent(self):
# it's deliberate that the doc is on p2, so _ModelInDocument
# has to be smart about looking for a doc anywhere in the list
# before it starts inventing new documents
doc = Document()
p1 = Model()
p2 = Model()
doc.add_root(p2)
assert p1.document is None
assert p2.document is not None
with bes._ModelInDocument([p1, p2]):
assert p1.document is not None
assert p2.document is not None
assert p1.document is doc
assert p2.document is doc
assert p1.document is None
assert p2.document is not None
def test_uses_doc_precedent(self):
doc = Document()
p1 = Model()
p2 = Model()
assert p1.document is None
assert p2.document is None
with bes._ModelInDocument([p1, p2, doc]):
assert p1.document is not None
assert p2.document is not None
assert p1.document is doc
assert p2.document is doc
assert p1.document is None
assert p2.document is None
def test_with_doc_in_child_raises_error(self):
doc = Document()
p1 = Model()
p2 = SomeModelInTestObjects(child=Model())
doc.add_root(p2.child)
assert p1.document is None
assert p2.document is None
assert p2.child.document is doc
with pytest.raises(RuntimeError):
with bes._ModelInDocument([p1, p2]):
assert p1.document is not None
assert p2.document is not None
assert p1.document is doc
assert p2.document is doc
@patch('bokeh.document.document.check_integrity')
def test_validates_document_by_default(self, check_integrity, test_plot):
with bes._ModelInDocument([test_plot]):
pass
assert check_integrity.called
@patch('bokeh.document.document.check_integrity')
def test_doesnt_validate_doc_due_to_env_var(self, check_integrity, monkeypatch, test_plot):
monkeypatch.setenv("BOKEH_VALIDATE_DOC", "false")
with bes._ModelInDocument([test_plot]):
pass
assert not check_integrity.called
class Test__add_doc_to_models(object):
pass
class Test__title_from_models(object):
pass
|
[
"jinja2.Template",
"bokeh.resources.JSResources",
"bokeh.plotting.figure",
"bokeh.model.Model",
"bokeh.embed.standalone.autoload_static",
"bokeh.embed.standalone.components",
"bokeh.embed.standalone.file_html",
"mock.patch",
"bokeh.document.Document",
"bokeh.embed.standalone._ModelInDocument",
"pytest.raises",
"bokeh.util.testing.verify_api",
"bokeh.resources.CSSResources",
"bokeh.io.curdoc",
"bs4.BeautifulSoup",
"bokeh.core.properties.Instance"
] |
[((1752, 1772), 'bokeh.util.testing.verify_api', 'verify_api', (['bes', 'api'], {}), '(bes, api)\n', (1762, 1772), False, 'from bokeh.util.testing import verify_api\n'), ((1990, 2005), 'bokeh.core.properties.Instance', 'Instance', (['Model'], {}), '(Model)\n', (1998, 2005), False, 'from bokeh.core.properties import Instance\n'), ((2128, 2136), 'bokeh.plotting.figure', 'figure', ([], {}), '()\n', (2134, 2136), False, 'from bokeh.plotting import figure\n'), ((2543, 2609), 'mock.patch', 'patch', (['"""bokeh.embed.util.make_id"""'], {'new_callable': '(lambda : stable_id)'}), "('bokeh.embed.util.make_id', new_callable=lambda : stable_id)\n", (2548, 2609), False, 'from mock import patch\n'), ((3933, 3999), 'mock.patch', 'patch', (['"""bokeh.embed.util.make_id"""'], {'new_callable': '(lambda : stable_id)'}), "('bokeh.embed.util.make_id', new_callable=lambda : stable_id)\n", (3938, 3999), False, 'from mock import patch\n'), ((5904, 5970), 'mock.patch', 'patch', (['"""bokeh.embed.util.make_id"""'], {'new_callable': '(lambda : stable_id)'}), "('bokeh.embed.util.make_id', new_callable=lambda : stable_id)\n", (5909, 5970), False, 'from mock import patch\n'), ((7652, 7684), 'mock.patch', 'patch', (['"""bokeh.embed.bundle.warn"""'], {}), "('bokeh.embed.bundle.warn')\n", (7657, 7684), False, 'from mock import patch\n'), ((8123, 8155), 'mock.patch', 'patch', (['"""bokeh.embed.bundle.warn"""'], {}), "('bokeh.embed.bundle.warn')\n", (8128, 8155), False, 'from mock import patch\n'), ((8506, 8538), 'mock.patch', 'patch', (['"""bokeh.embed.bundle.warn"""'], {}), "('bokeh.embed.bundle.warn')\n", (8511, 8538), False, 'from mock import patch\n'), ((8984, 9016), 'mock.patch', 'patch', (['"""bokeh.embed.bundle.warn"""'], {}), "('bokeh.embed.bundle.warn')\n", (8989, 9016), False, 'from mock import patch\n'), ((12146, 12194), 'mock.patch', 'patch', (['"""bokeh.document.document.check_integrity"""'], {}), "('bokeh.document.document.check_integrity')\n", (12151, 12194), False, 'from mock import patch\n'), ((12382, 12430), 'mock.patch', 'patch', (['"""bokeh.document.document.check_integrity"""'], {}), "('bokeh.document.document.check_integrity')\n", (12387, 12430), False, 'from mock import patch\n'), ((2461, 2509), 'bokeh.embed.standalone.autoload_static', 'bes.autoload_static', (['test_plot', 'CDN', '"""some/path"""'], {}), "(test_plot, CDN, 'some/path')\n", (2480, 2509), True, 'import bokeh.embed.standalone as bes\n'), ((2685, 2733), 'bokeh.embed.standalone.autoload_static', 'bes.autoload_static', (['test_plot', 'CDN', '"""some/path"""'], {}), "(test_plot, CDN, 'some/path')\n", (2704, 2733), True, 'import bokeh.embed.standalone as bes\n'), ((2749, 2779), 'bs4.BeautifulSoup', 'bs4.BeautifulSoup', (['tag', '"""lxml"""'], {}), "(tag, 'lxml')\n", (2766, 2779), False, 'import bs4\n'), ((3274, 3282), 'bokeh.plotting.figure', 'figure', ([], {}), '()\n', (3280, 3282), False, 'from bokeh.plotting import figure\n'), ((3328, 3336), 'bokeh.plotting.figure', 'figure', ([], {}), '()\n', (3334, 3336), False, 'from bokeh.plotting import figure\n'), ((3523, 3544), 'bokeh.embed.standalone.components', 'bes.components', (['plot1'], {}), '(plot1)\n', (3537, 3544), True, 'import bokeh.embed.standalone as bes\n'), ((3591, 3621), 'bokeh.embed.standalone.components', 'bes.components', (['(plot1, plot2)'], {}), '((plot1, plot2))\n', (3605, 3621), True, 'import bokeh.embed.standalone as bes\n'), ((3680, 3710), 'bokeh.embed.standalone.components', 'bes.components', (['[plot1, plot2]'], {}), '([plot1, plot2])\n', (3694, 3710), True, 'import bokeh.embed.standalone as bes\n'), ((3769, 3819), 'bokeh.embed.standalone.components', 'bes.components', (["{'Plot 1': plot1, 'Plot 2': plot2}"], {}), "({'Plot 1': plot1, 'Plot 2': plot2})\n", (3783, 3819), True, 'import bokeh.embed.standalone as bes\n'), ((4097, 4105), 'bokeh.plotting.figure', 'figure', ([], {}), '()\n', (4103, 4105), False, 'from bokeh.plotting import figure\n'), ((4151, 4159), 'bokeh.plotting.figure', 'figure', ([], {}), '()\n', (4157, 4159), False, 'from bokeh.plotting import figure\n'), ((4543, 4586), 'bokeh.embed.standalone.components', 'bes.components', (['plot1'], {'wrap_plot_info': '(False)'}), '(plot1, wrap_plot_info=False)\n', (4557, 4586), True, 'import bokeh.embed.standalone as bes\n'), ((4656, 4708), 'bokeh.embed.standalone.components', 'bes.components', (['(plot1, plot2)'], {'wrap_plot_info': '(False)'}), '((plot1, plot2), wrap_plot_info=False)\n', (4670, 4708), True, 'import bokeh.embed.standalone as bes\n'), ((4803, 4867), 'bokeh.embed.standalone.components', 'bes.components', (["{'p1': plot1, 'p2': plot2}"], {'wrap_plot_info': '(False)'}), "({'p1': plot1, 'p2': plot2}, wrap_plot_info=False)\n", (4817, 4867), True, 'import bokeh.embed.standalone as bes\n'), ((5019, 5044), 'bokeh.embed.standalone.components', 'bes.components', (['test_plot'], {}), '(test_plot)\n', (5033, 5044), True, 'import bokeh.embed.standalone as bes\n'), ((5060, 5093), 'bs4.BeautifulSoup', 'bs4.BeautifulSoup', (['script', '"""lxml"""'], {}), "(script, 'lxml')\n", (5077, 5093), False, 'import bs4\n'), ((5300, 5325), 'bokeh.embed.standalone.components', 'bes.components', (['test_plot'], {}), '(test_plot)\n', (5314, 5325), True, 'import bokeh.embed.standalone as bes\n'), ((5341, 5371), 'bs4.BeautifulSoup', 'bs4.BeautifulSoup', (['div', '"""lxml"""'], {}), "(div, 'lxml')\n", (5358, 5371), False, 'import bs4\n'), ((5833, 5858), 'bokeh.embed.standalone.components', 'bes.components', (['test_plot'], {}), '(test_plot)\n', (5847, 5858), True, 'import bokeh.embed.standalone as bes\n'), ((6092, 6117), 'bokeh.embed.standalone.components', 'bes.components', (['test_plot'], {}), '(test_plot)\n', (6106, 6117), True, 'import bokeh.embed.standalone as bes\n'), ((6133, 6166), 'bs4.BeautifulSoup', 'bs4.BeautifulSoup', (['script', '"""lxml"""'], {}), "(script, 'lxml')\n", (6150, 6166), False, 'import bs4\n'), ((7267, 7305), 'bokeh.embed.standalone.file_html', 'bes.file_html', (['test_plot', 'CDN', '"""title"""'], {}), "(test_plot, CDN, 'title')\n", (7280, 7305), True, 'import bokeh.embed.standalone as bes\n'), ((7786, 7836), 'bokeh.resources.JSResources', 'JSResources', ([], {'mode': '"""relative"""', 'components': "['bokeh']"}), "(mode='relative', components=['bokeh'])\n", (7797, 7836), False, 'from bokeh.resources import CDN, JSResources, CSSResources\n'), ((7856, 7908), 'jinja2.Template', 'Template', (['"""<head>{{ bokeh_js }}</head><body></body>"""'], {}), "('<head>{{ bokeh_js }}</head><body></body>')\n", (7864, 7908), False, 'from jinja2 import Template\n'), ((7926, 8000), 'bokeh.embed.standalone.file_html', 'bes.file_html', (['test_plot', '(js_resources, None)', '"""title"""'], {'template': 'template'}), "(test_plot, (js_resources, None), 'title', template=template)\n", (7939, 8000), True, 'import bokeh.embed.standalone as bes\n'), ((8258, 8271), 'bokeh.resources.JSResources', 'JSResources', ([], {}), '()\n', (8269, 8271), False, 'from bokeh.resources import CDN, JSResources, CSSResources\n'), ((8280, 8335), 'bokeh.embed.standalone.file_html', 'bes.file_html', (['test_plot', '(js_resources, None)', '"""title"""'], {}), "(test_plot, (js_resources, None), 'title')\n", (8293, 8335), True, 'import bokeh.embed.standalone as bes\n'), ((8642, 8693), 'bokeh.resources.CSSResources', 'CSSResources', ([], {'mode': '"""relative"""', 'components': "['bokeh']"}), "(mode='relative', components=['bokeh'])\n", (8654, 8693), False, 'from bokeh.resources import CDN, JSResources, CSSResources\n'), ((8713, 8766), 'jinja2.Template', 'Template', (['"""<head>{{ bokeh_css }}</head><body></body>"""'], {}), "('<head>{{ bokeh_css }}</head><body></body>')\n", (8721, 8766), False, 'from jinja2 import Template\n'), ((8784, 8859), 'bokeh.embed.standalone.file_html', 'bes.file_html', (['test_plot', '(None, css_resources)', '"""title"""'], {'template': 'template'}), "(test_plot, (None, css_resources), 'title', template=template)\n", (8797, 8859), True, 'import bokeh.embed.standalone as bes\n'), ((9119, 9133), 'bokeh.resources.CSSResources', 'CSSResources', ([], {}), '()\n', (9131, 9133), False, 'from bokeh.resources import CDN, JSResources, CSSResources\n'), ((9142, 9198), 'bokeh.embed.standalone.file_html', 'bes.file_html', (['test_plot', '(None, css_resources)', '"""title"""'], {}), "(test_plot, (None, css_resources), 'title')\n", (9155, 9198), True, 'import bokeh.embed.standalone as bes\n'), ((9433, 9468), 'bokeh.embed.standalone.file_html', 'bes.file_html', (['test_plot', 'CDN', '"""&<"""'], {}), "(test_plot, CDN, '&<')\n", (9446, 9468), True, 'import bokeh.embed.standalone as bes\n'), ((9942, 9949), 'bokeh.model.Model', 'Model', ([], {}), '()\n', (9947, 9949), False, 'from bokeh.model import Model\n'), ((10148, 10155), 'bokeh.model.Model', 'Model', ([], {}), '()\n', (10153, 10155), False, 'from bokeh.model import Model\n'), ((10169, 10176), 'bokeh.model.Model', 'Model', ([], {}), '()\n', (10174, 10176), False, 'from bokeh.model import Model\n'), ((10689, 10699), 'bokeh.document.Document', 'Document', ([], {}), '()\n', (10697, 10699), False, 'from bokeh.document import Document\n'), ((10713, 10720), 'bokeh.model.Model', 'Model', ([], {}), '()\n', (10718, 10720), False, 'from bokeh.model import Model\n'), ((10734, 10741), 'bokeh.model.Model', 'Model', ([], {}), '()\n', (10739, 10741), False, 'from bokeh.model import Model\n'), ((11176, 11186), 'bokeh.document.Document', 'Document', ([], {}), '()\n', (11184, 11186), False, 'from bokeh.document import Document\n'), ((11200, 11207), 'bokeh.model.Model', 'Model', ([], {}), '()\n', (11205, 11207), False, 'from bokeh.model import Model\n'), ((11221, 11228), 'bokeh.model.Model', 'Model', ([], {}), '()\n', (11226, 11228), False, 'from bokeh.model import Model\n'), ((11647, 11657), 'bokeh.document.Document', 'Document', ([], {}), '()\n', (11655, 11657), False, 'from bokeh.document import Document\n'), ((11671, 11678), 'bokeh.model.Model', 'Model', ([], {}), '()\n', (11676, 11678), False, 'from bokeh.model import Model\n'), ((9997, 10022), 'bokeh.embed.standalone._ModelInDocument', 'bes._ModelInDocument', (['[p]'], {}), '([p])\n', (10017, 10022), True, 'import bokeh.embed.standalone as bes\n'), ((10260, 10290), 'bokeh.embed.standalone._ModelInDocument', 'bes._ModelInDocument', (['[p1, p2]'], {}), '([p1, p2])\n', (10280, 10290), True, 'import bokeh.embed.standalone as bes\n'), ((10854, 10884), 'bokeh.embed.standalone._ModelInDocument', 'bes._ModelInDocument', (['[p1, p2]'], {}), '([p1, p2])\n', (10874, 10884), True, 'import bokeh.embed.standalone as bes\n'), ((11312, 11347), 'bokeh.embed.standalone._ModelInDocument', 'bes._ModelInDocument', (['[p1, p2, doc]'], {}), '([p1, p2, doc])\n', (11332, 11347), True, 'import bokeh.embed.standalone as bes\n'), ((11884, 11911), 'pytest.raises', 'pytest.raises', (['RuntimeError'], {}), '(RuntimeError)\n', (11897, 11911), False, 'import pytest\n'), ((12286, 12319), 'bokeh.embed.standalone._ModelInDocument', 'bes._ModelInDocument', (['[test_plot]'], {}), '([test_plot])\n', (12306, 12319), True, 'import bokeh.embed.standalone as bes\n'), ((12598, 12631), 'bokeh.embed.standalone._ModelInDocument', 'bes._ModelInDocument', (['[test_plot]'], {}), '([test_plot])\n', (12618, 12631), True, 'import bokeh.embed.standalone as bes\n'), ((3452, 3460), 'bokeh.io.curdoc', 'curdoc', ([], {}), '()\n', (3458, 3460), False, 'from bokeh.io import curdoc\n'), ((3485, 3493), 'bokeh.io.curdoc', 'curdoc', ([], {}), '()\n', (3491, 3493), False, 'from bokeh.io import curdoc\n'), ((4275, 4283), 'bokeh.io.curdoc', 'curdoc', ([], {}), '()\n', (4281, 4283), False, 'from bokeh.io import curdoc\n'), ((4308, 4316), 'bokeh.io.curdoc', 'curdoc', ([], {}), '()\n', (4314, 4316), False, 'from bokeh.io import curdoc\n'), ((11721, 11728), 'bokeh.model.Model', 'Model', ([], {}), '()\n', (11726, 11728), False, 'from bokeh.model import Model\n'), ((11930, 11960), 'bokeh.embed.standalone._ModelInDocument', 'bes._ModelInDocument', (['[p1, p2]'], {}), '([p1, p2])\n', (11950, 11960), True, 'import bokeh.embed.standalone as bes\n')]
|
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
import logging
from decimal import Decimal
from functools import partial
from typing import Any, Callable, Dict, List, Optional, Set, Tuple, Union
import geohash as geohash_lib
import numpy as np
import pandas as pd
from flask_babel import gettext as _
from geopy.point import Point
from pandas import DataFrame, NamedAgg, Series, Timestamp
from superset.constants import NULL_STRING, PandasAxis, PandasPostprocessingCompare
from superset.exceptions import QueryObjectValidationError
from superset.utils.core import (
DTTM_ALIAS,
PostProcessingBoxplotWhiskerType,
PostProcessingContributionOrientation,
TIME_COMPARISION,
)
NUMPY_FUNCTIONS = {
"average": np.average,
"argmin": np.argmin,
"argmax": np.argmax,
"count": np.ma.count,
"count_nonzero": np.count_nonzero,
"cumsum": np.cumsum,
"cumprod": np.cumprod,
"max": np.max,
"mean": np.mean,
"median": np.median,
"nansum": np.nansum,
"nanmin": np.nanmin,
"nanmax": np.nanmax,
"nanmean": np.nanmean,
"nanmedian": np.nanmedian,
"nanpercentile": np.nanpercentile,
"min": np.min,
"percentile": np.percentile,
"prod": np.prod,
"product": np.product,
"std": np.std,
"sum": np.sum,
"var": np.var,
}
DENYLIST_ROLLING_FUNCTIONS = (
"count",
"corr",
"cov",
"kurt",
"max",
"mean",
"median",
"min",
"std",
"skew",
"sum",
"var",
"quantile",
)
ALLOWLIST_CUMULATIVE_FUNCTIONS = (
"cummax",
"cummin",
"cumprod",
"cumsum",
)
PROPHET_TIME_GRAIN_MAP = {
"PT1S": "S",
"PT1M": "min",
"PT5M": "5min",
"PT10M": "10min",
"PT15M": "15min",
"PT30M": "30min",
"PT1H": "H",
"P1D": "D",
"P1W": "W",
"P1M": "M",
"P3M": "Q",
"P1Y": "A",
"1969-12-28T00:00:00Z/P1W": "W",
"1969-12-29T00:00:00Z/P1W": "W",
"P1W/1970-01-03T00:00:00Z": "W",
"P1W/1970-01-04T00:00:00Z": "W",
}
def _flatten_column_after_pivot(
column: Union[float, Timestamp, str, Tuple[str, ...]],
aggregates: Dict[str, Dict[str, Any]],
) -> str:
"""
Function for flattening column names into a single string. This step is necessary
to be able to properly serialize a DataFrame. If the column is a string, return
element unchanged. For multi-element columns, join column elements with a comma,
with the exception of pivots made with a single aggregate, in which case the
aggregate column name is omitted.
:param column: single element from `DataFrame.columns`
:param aggregates: aggregates
:return:
"""
if not isinstance(column, tuple):
column = (column,)
if len(aggregates) == 1 and len(column) > 1:
# drop aggregate for single aggregate pivots with multiple groupings
# from column name (aggregates always come first in column name)
column = column[1:]
return ", ".join([str(col) for col in column])
def validate_column_args(*argnames: str) -> Callable[..., Any]:
def wrapper(func: Callable[..., Any]) -> Callable[..., Any]:
def wrapped(df: DataFrame, **options: Any) -> Any:
if options.get("is_pivot_df"):
# skip validation when pivot Dataframe
return func(df, **options)
columns = df.columns.tolist()
for name in argnames:
if name in options and not all(
elem in columns for elem in options.get(name) or []
):
raise QueryObjectValidationError(
_("Referenced columns not available in DataFrame.")
)
return func(df, **options)
return wrapped
return wrapper
def _get_aggregate_funcs(
df: DataFrame, aggregates: Dict[str, Dict[str, Any]],
) -> Dict[str, NamedAgg]:
"""
Converts a set of aggregate config objects into functions that pandas can use as
aggregators. Currently only numpy aggregators are supported.
:param df: DataFrame on which to perform aggregate operation.
:param aggregates: Mapping from column name to aggregate config.
:return: Mapping from metric name to function that takes a single input argument.
"""
agg_funcs: Dict[str, NamedAgg] = {}
for name, agg_obj in aggregates.items():
column = agg_obj.get("column", name)
if column not in df:
raise QueryObjectValidationError(
_(
"Column referenced by aggregate is undefined: %(column)s",
column=column,
)
)
if "operator" not in agg_obj:
raise QueryObjectValidationError(
_("Operator undefined for aggregator: %(name)s", name=name,)
)
operator = agg_obj["operator"]
if callable(operator):
aggfunc = operator
else:
func = NUMPY_FUNCTIONS.get(operator)
if not func:
raise QueryObjectValidationError(
_("Invalid numpy function: %(operator)s", operator=operator,)
)
options = agg_obj.get("options", {})
aggfunc = partial(func, **options)
agg_funcs[name] = NamedAgg(column=column, aggfunc=aggfunc)
return agg_funcs
def _append_columns(
base_df: DataFrame, append_df: DataFrame, columns: Dict[str, str]
) -> DataFrame:
"""
Function for adding columns from one DataFrame to another DataFrame. Calls the
assign method, which overwrites the original column in `base_df` if the column
already exists, and appends the column if the name is not defined.
:param base_df: DataFrame which to use as the base
:param append_df: DataFrame from which to select data.
:param columns: columns on which to append, mapping source column to
target column. For instance, `{'y': 'y'}` will replace the values in
column `y` in `base_df` with the values in `y` in `append_df`,
while `{'y': 'y2'}` will add a column `y2` to `base_df` based
on values in column `y` in `append_df`, leaving the original column `y`
in `base_df` unchanged.
:return: new DataFrame with combined data from `base_df` and `append_df`
"""
return base_df.assign(
**{target: append_df[source] for source, target in columns.items()}
)
@validate_column_args("index", "columns")
def pivot( # pylint: disable=too-many-arguments,too-many-locals
df: DataFrame,
index: List[str],
aggregates: Dict[str, Dict[str, Any]],
columns: Optional[List[str]] = None,
metric_fill_value: Optional[Any] = None,
column_fill_value: Optional[str] = NULL_STRING,
drop_missing_columns: Optional[bool] = True,
combine_value_with_metric: bool = False,
marginal_distributions: Optional[bool] = None,
marginal_distribution_name: Optional[str] = None,
flatten_columns: bool = True,
reset_index: bool = True,
) -> DataFrame:
"""
Perform a pivot operation on a DataFrame.
:param df: Object on which pivot operation will be performed
:param index: Columns to group by on the table index (=rows)
:param columns: Columns to group by on the table columns
:param metric_fill_value: Value to replace missing values with
:param column_fill_value: Value to replace missing pivot columns with. By default
replaces missing values with "<NULL>". Set to `None` to remove columns
with missing values.
:param drop_missing_columns: Do not include columns whose entries are all missing
:param combine_value_with_metric: Display metrics side by side within each column,
as opposed to each column being displayed side by side for each metric.
:param aggregates: A mapping from aggregate column name to the the aggregate
config.
:param marginal_distributions: Add totals for row/column. Default to False
:param marginal_distribution_name: Name of row/column with marginal distribution.
Default to 'All'.
:param flatten_columns: Convert column names to strings
:param reset_index: Convert index to column
:return: A pivot table
:raises QueryObjectValidationError: If the request in incorrect
"""
if not index:
raise QueryObjectValidationError(
_("Pivot operation requires at least one index")
)
if not aggregates:
raise QueryObjectValidationError(
_("Pivot operation must include at least one aggregate")
)
if columns and column_fill_value:
df[columns] = df[columns].fillna(value=column_fill_value)
aggregate_funcs = _get_aggregate_funcs(df, aggregates)
# TODO (villebro): Pandas 1.0.3 doesn't yet support NamedAgg in pivot_table.
# Remove once/if support is added.
aggfunc = {na.column: na.aggfunc for na in aggregate_funcs.values()}
# When dropna = False, the pivot_table function will calculate cartesian-product
# for MultiIndex.
# https://github.com/apache/superset/issues/15956
# https://github.com/pandas-dev/pandas/issues/18030
series_set = set()
if not drop_missing_columns and columns:
for row in df[columns].itertuples():
for metric in aggfunc.keys():
series_set.add(str(tuple([metric]) + tuple(row[1:])))
df = df.pivot_table(
values=aggfunc.keys(),
index=index,
columns=columns,
aggfunc=aggfunc,
fill_value=metric_fill_value,
dropna=drop_missing_columns,
margins=marginal_distributions,
margins_name=marginal_distribution_name,
)
if not drop_missing_columns and len(series_set) > 0 and not df.empty:
for col in df.columns:
series = str(col)
if series not in series_set:
df = df.drop(col, axis=PandasAxis.COLUMN)
if combine_value_with_metric:
df = df.stack(0).unstack()
# Make index regular column
if flatten_columns:
df.columns = [
_flatten_column_after_pivot(col, aggregates) for col in df.columns
]
# return index as regular column
if reset_index:
df.reset_index(level=0, inplace=True)
return df
@validate_column_args("groupby")
def aggregate(
df: DataFrame, groupby: List[str], aggregates: Dict[str, Dict[str, Any]]
) -> DataFrame:
"""
Apply aggregations to a DataFrame.
:param df: Object to aggregate.
:param groupby: columns to aggregate
:param aggregates: A mapping from metric column to the function used to
aggregate values.
:raises QueryObjectValidationError: If the request in incorrect
"""
aggregates = aggregates or {}
aggregate_funcs = _get_aggregate_funcs(df, aggregates)
if groupby:
df_groupby = df.groupby(by=groupby)
else:
df_groupby = df.groupby(lambda _: True)
return df_groupby.agg(**aggregate_funcs).reset_index(drop=not groupby)
@validate_column_args("columns")
def sort(df: DataFrame, columns: Dict[str, bool]) -> DataFrame:
"""
Sort a DataFrame.
:param df: DataFrame to sort.
:param columns: columns by by which to sort. The key specifies the column name,
value specifies if sorting in ascending order.
:return: Sorted DataFrame
:raises QueryObjectValidationError: If the request in incorrect
"""
return df.sort_values(by=list(columns.keys()), ascending=list(columns.values()))
@validate_column_args("columns")
def rolling( # pylint: disable=too-many-arguments
df: DataFrame,
rolling_type: str,
columns: Optional[Dict[str, str]] = None,
window: Optional[int] = None,
rolling_type_options: Optional[Dict[str, Any]] = None,
center: bool = False,
win_type: Optional[str] = None,
min_periods: Optional[int] = None,
is_pivot_df: bool = False,
) -> DataFrame:
"""
Apply a rolling window on the dataset. See the Pandas docs for further details:
https://pandas.pydata.org/pandas-docs/stable/reference/api/pandas.DataFrame.rolling.html
:param df: DataFrame on which the rolling period will be based.
:param columns: columns on which to perform rolling, mapping source column to
target column. For instance, `{'y': 'y'}` will replace the column `y` with
the rolling value in `y`, while `{'y': 'y2'}` will add a column `y2` based
on rolling values calculated from `y`, leaving the original column `y`
unchanged.
:param rolling_type: Type of rolling window. Any numpy function will work.
:param window: Size of the window.
:param rolling_type_options: Optional options to pass to rolling method. Needed
for e.g. quantile operation.
:param center: Should the label be at the center of the window.
:param win_type: Type of window function.
:param min_periods: The minimum amount of periods required for a row to be included
in the result set.
:param is_pivot_df: Dataframe is pivoted or not
:return: DataFrame with the rolling columns
:raises QueryObjectValidationError: If the request in incorrect
"""
rolling_type_options = rolling_type_options or {}
columns = columns or {}
if is_pivot_df:
df_rolling = df
else:
df_rolling = df[columns.keys()]
kwargs: Dict[str, Union[str, int]] = {}
if window is None:
raise QueryObjectValidationError(_("Undefined window for rolling operation"))
if window == 0:
raise QueryObjectValidationError(_("Window must be > 0"))
kwargs["window"] = window
if min_periods is not None:
kwargs["min_periods"] = min_periods
if center is not None:
kwargs["center"] = center
if win_type is not None:
kwargs["win_type"] = win_type
df_rolling = df_rolling.rolling(**kwargs)
if rolling_type not in DENYLIST_ROLLING_FUNCTIONS or not hasattr(
df_rolling, rolling_type
):
raise QueryObjectValidationError(
_("Invalid rolling_type: %(type)s", type=rolling_type)
)
try:
df_rolling = getattr(df_rolling, rolling_type)(**rolling_type_options)
except TypeError as ex:
raise QueryObjectValidationError(
_(
"Invalid options for %(rolling_type)s: %(options)s",
rolling_type=rolling_type,
options=rolling_type_options,
)
) from ex
if is_pivot_df:
agg_in_pivot_df = df.columns.get_level_values(0).drop_duplicates().to_list()
agg: Dict[str, Dict[str, Any]] = {col: {} for col in agg_in_pivot_df}
df_rolling.columns = [
_flatten_column_after_pivot(col, agg) for col in df_rolling.columns
]
df_rolling.reset_index(level=0, inplace=True)
else:
df_rolling = _append_columns(df, df_rolling, columns)
if min_periods:
df_rolling = df_rolling[min_periods:]
return df_rolling
@validate_column_args("columns", "drop", "rename")
def select(
df: DataFrame,
columns: Optional[List[str]] = None,
exclude: Optional[List[str]] = None,
rename: Optional[Dict[str, str]] = None,
) -> DataFrame:
"""
Only select a subset of columns in the original dataset. Can be useful for
removing unnecessary intermediate results, renaming and reordering columns.
:param df: DataFrame on which the rolling period will be based.
:param columns: Columns which to select from the DataFrame, in the desired order.
If left undefined, all columns will be selected. If columns are
renamed, the original column name should be referenced here.
:param exclude: columns to exclude from selection. If columns are renamed, the new
column name should be referenced here.
:param rename: columns which to rename, mapping source column to target column.
For instance, `{'y': 'y2'}` will rename the column `y` to
`y2`.
:return: Subset of columns in original DataFrame
:raises QueryObjectValidationError: If the request in incorrect
"""
df_select = df.copy(deep=False)
if columns:
df_select = df_select[columns]
if exclude:
df_select = df_select.drop(exclude, axis=1)
if rename is not None:
df_select = df_select.rename(columns=rename)
return df_select
@validate_column_args("columns")
def diff(
df: DataFrame,
columns: Dict[str, str],
periods: int = 1,
axis: PandasAxis = PandasAxis.ROW,
) -> DataFrame:
"""
Calculate row-by-row or column-by-column difference for select columns.
:param df: DataFrame on which the diff will be based.
:param columns: columns on which to perform diff, mapping source column to
target column. For instance, `{'y': 'y'}` will replace the column `y` with
the diff value in `y`, while `{'y': 'y2'}` will add a column `y2` based
on diff values calculated from `y`, leaving the original column `y`
unchanged.
:param periods: periods to shift for calculating difference.
:param axis: 0 for row, 1 for column. default 0.
:return: DataFrame with diffed columns
:raises QueryObjectValidationError: If the request in incorrect
"""
df_diff = df[columns.keys()]
df_diff = df_diff.diff(periods=periods, axis=axis)
return _append_columns(df, df_diff, columns)
@validate_column_args("source_columns", "compare_columns")
def compare( # pylint: disable=too-many-arguments
df: DataFrame,
source_columns: List[str],
compare_columns: List[str],
compare_type: Optional[PandasPostprocessingCompare],
drop_original_columns: Optional[bool] = False,
precision: Optional[int] = 4,
) -> DataFrame:
"""
Calculate column-by-column changing for select columns.
:param df: DataFrame on which the compare will be based.
:param source_columns: Main query columns
:param compare_columns: Columns being compared
:param compare_type: Type of compare. Choice of `absolute`, `percentage` or `ratio`
:param drop_original_columns: Whether to remove the source columns and
compare columns.
:param precision: Round a change rate to a variable number of decimal places.
:return: DataFrame with compared columns.
:raises QueryObjectValidationError: If the request in incorrect.
"""
if len(source_columns) != len(compare_columns):
raise QueryObjectValidationError(
_("`compare_columns` must have the same length as `source_columns`.")
)
if compare_type not in tuple(PandasPostprocessingCompare):
raise QueryObjectValidationError(
_("`compare_type` must be `difference`, `percentage` or `ratio`")
)
if len(source_columns) == 0:
return df
for s_col, c_col in zip(source_columns, compare_columns):
if compare_type == PandasPostprocessingCompare.DIFF:
diff_series = df[s_col] - df[c_col]
elif compare_type == PandasPostprocessingCompare.PCT:
diff_series = (
((df[s_col] - df[c_col]) / df[c_col]).astype(float).round(precision)
)
else:
# compare_type == "ratio"
diff_series = (df[s_col] / df[c_col]).astype(float).round(precision)
diff_df = diff_series.to_frame(
name=TIME_COMPARISION.join([compare_type, s_col, c_col])
)
df = pd.concat([df, diff_df], axis=1)
if drop_original_columns:
df = df.drop(source_columns + compare_columns, axis=1)
return df
@validate_column_args("columns")
def cum(
df: DataFrame,
operator: str,
columns: Optional[Dict[str, str]] = None,
is_pivot_df: bool = False,
) -> DataFrame:
"""
Calculate cumulative sum/product/min/max for select columns.
:param df: DataFrame on which the cumulative operation will be based.
:param columns: columns on which to perform a cumulative operation, mapping source
column to target column. For instance, `{'y': 'y'}` will replace the column
`y` with the cumulative value in `y`, while `{'y': 'y2'}` will add a column
`y2` based on cumulative values calculated from `y`, leaving the original
column `y` unchanged.
:param operator: cumulative operator, e.g. `sum`, `prod`, `min`, `max`
:param is_pivot_df: Dataframe is pivoted or not
:return: DataFrame with cumulated columns
"""
columns = columns or {}
if is_pivot_df:
df_cum = df
else:
df_cum = df[columns.keys()]
operation = "cum" + operator
if operation not in ALLOWLIST_CUMULATIVE_FUNCTIONS or not hasattr(
df_cum, operation
):
raise QueryObjectValidationError(
_("Invalid cumulative operator: %(operator)s", operator=operator)
)
if is_pivot_df:
df_cum = getattr(df_cum, operation)()
agg_in_pivot_df = df.columns.get_level_values(0).drop_duplicates().to_list()
agg: Dict[str, Dict[str, Any]] = {col: {} for col in agg_in_pivot_df}
df_cum.columns = [
_flatten_column_after_pivot(col, agg) for col in df_cum.columns
]
df_cum.reset_index(level=0, inplace=True)
else:
df_cum = _append_columns(df, getattr(df_cum, operation)(), columns)
return df_cum
def geohash_decode(
df: DataFrame, geohash: str, longitude: str, latitude: str
) -> DataFrame:
"""
Decode a geohash column into longitude and latitude
:param df: DataFrame containing geohash data
:param geohash: Name of source column containing geohash location.
:param longitude: Name of new column to be created containing longitude.
:param latitude: Name of new column to be created containing latitude.
:return: DataFrame with decoded longitudes and latitudes
"""
try:
lonlat_df = DataFrame()
lonlat_df["latitude"], lonlat_df["longitude"] = zip(
*df[geohash].apply(geohash_lib.decode)
)
return _append_columns(
df, lonlat_df, {"latitude": latitude, "longitude": longitude}
)
except ValueError as ex:
raise QueryObjectValidationError(_("Invalid geohash string")) from ex
def geohash_encode(
df: DataFrame, geohash: str, longitude: str, latitude: str,
) -> DataFrame:
"""
Encode longitude and latitude into geohash
:param df: DataFrame containing longitude and latitude data
:param geohash: Name of new column to be created containing geohash location.
:param longitude: Name of source column containing longitude.
:param latitude: Name of source column containing latitude.
:return: DataFrame with decoded longitudes and latitudes
"""
try:
encode_df = df[[latitude, longitude]]
encode_df.columns = ["latitude", "longitude"]
encode_df["geohash"] = encode_df.apply(
lambda row: geohash_lib.encode(row["latitude"], row["longitude"]), axis=1,
)
return _append_columns(df, encode_df, {"geohash": geohash})
except ValueError as ex:
raise QueryObjectValidationError(_("Invalid longitude/latitude")) from ex
def geodetic_parse(
df: DataFrame,
geodetic: str,
longitude: str,
latitude: str,
altitude: Optional[str] = None,
) -> DataFrame:
"""
Parse a column containing a geodetic point string
[Geopy](https://geopy.readthedocs.io/en/stable/#geopy.point.Point).
:param df: DataFrame containing geodetic point data
:param geodetic: Name of source column containing geodetic point string.
:param longitude: Name of new column to be created containing longitude.
:param latitude: Name of new column to be created containing latitude.
:param altitude: Name of new column to be created containing altitude.
:return: DataFrame with decoded longitudes and latitudes
"""
def _parse_location(location: str) -> Tuple[float, float, float]:
"""
Parse a string containing a geodetic point and return latitude, longitude
and altitude
"""
point = Point(location)
return point[0], point[1], point[2]
try:
geodetic_df = DataFrame()
(
geodetic_df["latitude"],
geodetic_df["longitude"],
geodetic_df["altitude"],
) = zip(*df[geodetic].apply(_parse_location))
columns = {"latitude": latitude, "longitude": longitude}
if altitude:
columns["altitude"] = altitude
return _append_columns(df, geodetic_df, columns)
except ValueError as ex:
raise QueryObjectValidationError(_("Invalid geodetic string")) from ex
@validate_column_args("columns")
def contribution(
df: DataFrame,
orientation: Optional[
PostProcessingContributionOrientation
] = PostProcessingContributionOrientation.COLUMN,
columns: Optional[List[str]] = None,
rename_columns: Optional[List[str]] = None,
) -> DataFrame:
"""
Calculate cell contibution to row/column total for numeric columns.
Non-numeric columns will be kept untouched.
If `columns` are specified, only calculate contributions on selected columns.
:param df: DataFrame containing all-numeric data (temporal column ignored)
:param columns: Columns to calculate values from.
:param rename_columns: The new labels for the calculated contribution columns.
The original columns will not be removed.
:param orientation: calculate by dividing cell with row/column total
:return: DataFrame with contributions.
"""
contribution_df = df.copy()
numeric_df = contribution_df.select_dtypes(include=["number", Decimal])
# verify column selections
if columns:
numeric_columns = numeric_df.columns.tolist()
for col in columns:
if col not in numeric_columns:
raise QueryObjectValidationError(
_(
'Column "%(column)s" is not numeric or does not '
"exists in the query results.",
column=col,
)
)
columns = columns or numeric_df.columns
rename_columns = rename_columns or columns
if len(rename_columns) != len(columns):
raise QueryObjectValidationError(
_("`rename_columns` must have the same length as `columns`.")
)
# limit to selected columns
numeric_df = numeric_df[columns]
axis = 0 if orientation == PostProcessingContributionOrientation.COLUMN else 1
numeric_df = numeric_df / numeric_df.values.sum(axis=axis, keepdims=True)
contribution_df[rename_columns] = numeric_df
return contribution_df
def _prophet_parse_seasonality(
input_value: Optional[Union[bool, int]]
) -> Union[bool, str, int]:
if input_value is None:
return "auto"
if isinstance(input_value, bool):
return input_value
try:
return int(input_value)
except ValueError:
return input_value
def _prophet_fit_and_predict( # pylint: disable=too-many-arguments
df: DataFrame,
confidence_interval: float,
yearly_seasonality: Union[bool, str, int],
weekly_seasonality: Union[bool, str, int],
daily_seasonality: Union[bool, str, int],
periods: int,
freq: str,
) -> DataFrame:
"""
Fit a prophet model and return a DataFrame with predicted results.
"""
try:
# pylint: disable=import-error,import-outside-toplevel
from prophet import Prophet
prophet_logger = logging.getLogger("prophet.plot")
prophet_logger.setLevel(logging.CRITICAL)
prophet_logger.setLevel(logging.NOTSET)
except ModuleNotFoundError as ex:
raise QueryObjectValidationError(_("`prophet` package not installed")) from ex
model = Prophet(
interval_width=confidence_interval,
yearly_seasonality=yearly_seasonality,
weekly_seasonality=weekly_seasonality,
daily_seasonality=daily_seasonality,
)
if df["ds"].dt.tz:
df["ds"] = df["ds"].dt.tz_convert(None)
model.fit(df)
future = model.make_future_dataframe(periods=periods, freq=freq)
forecast = model.predict(future)[["ds", "yhat", "yhat_lower", "yhat_upper"]]
return forecast.join(df.set_index("ds"), on="ds").set_index(["ds"])
def prophet( # pylint: disable=too-many-arguments
df: DataFrame,
time_grain: str,
periods: int,
confidence_interval: float,
yearly_seasonality: Optional[Union[bool, int]] = None,
weekly_seasonality: Optional[Union[bool, int]] = None,
daily_seasonality: Optional[Union[bool, int]] = None,
) -> DataFrame:
"""
Add forecasts to each series in a timeseries dataframe, along with confidence
intervals for the prediction. For each series, the operation creates three
new columns with the column name suffixed with the following values:
- `__yhat`: the forecast for the given date
- `__yhat_lower`: the lower bound of the forecast for the given date
- `__yhat_upper`: the upper bound of the forecast for the given date
:param df: DataFrame containing all-numeric data (temporal column ignored)
:param time_grain: Time grain used to specify time period increments in prediction
:param periods: Time periods (in units of `time_grain`) to predict into the future
:param confidence_interval: Width of predicted confidence interval
:param yearly_seasonality: Should yearly seasonality be applied.
An integer value will specify Fourier order of seasonality.
:param weekly_seasonality: Should weekly seasonality be applied.
An integer value will specify Fourier order of seasonality, `None` will
automatically detect seasonality.
:param daily_seasonality: Should daily seasonality be applied.
An integer value will specify Fourier order of seasonality, `None` will
automatically detect seasonality.
:return: DataFrame with contributions, with temporal column at beginning if present
"""
# validate inputs
if not time_grain:
raise QueryObjectValidationError(_("Time grain missing"))
if time_grain not in PROPHET_TIME_GRAIN_MAP:
raise QueryObjectValidationError(
_("Unsupported time grain: %(time_grain)s", time_grain=time_grain,)
)
freq = PROPHET_TIME_GRAIN_MAP[time_grain]
# check type at runtime due to marhsmallow schema not being able to handle
# union types
if not periods or periods < 0 or not isinstance(periods, int):
raise QueryObjectValidationError(_("Periods must be a positive integer value"))
if not confidence_interval or confidence_interval <= 0 or confidence_interval >= 1:
raise QueryObjectValidationError(
_("Confidence interval must be between 0 and 1 (exclusive)")
)
if DTTM_ALIAS not in df.columns:
raise QueryObjectValidationError(_("DataFrame must include temporal column"))
if len(df.columns) < 2:
raise QueryObjectValidationError(_("DataFrame include at least one series"))
target_df = DataFrame()
for column in [column for column in df.columns if column != DTTM_ALIAS]:
fit_df = _prophet_fit_and_predict(
df=df[[DTTM_ALIAS, column]].rename(columns={DTTM_ALIAS: "ds", column: "y"}),
confidence_interval=confidence_interval,
yearly_seasonality=_prophet_parse_seasonality(yearly_seasonality),
weekly_seasonality=_prophet_parse_seasonality(weekly_seasonality),
daily_seasonality=_prophet_parse_seasonality(daily_seasonality),
periods=periods,
freq=freq,
)
new_columns = [
f"{column}__yhat",
f"{column}__yhat_lower",
f"{column}__yhat_upper",
f"{column}",
]
fit_df.columns = new_columns
if target_df.empty:
target_df = fit_df
else:
for new_column in new_columns:
target_df = target_df.assign(**{new_column: fit_df[new_column]})
target_df.reset_index(level=0, inplace=True)
return target_df.rename(columns={"ds": DTTM_ALIAS})
def boxplot(
df: DataFrame,
groupby: List[str],
metrics: List[str],
whisker_type: PostProcessingBoxplotWhiskerType,
percentiles: Optional[
Union[List[Union[int, float]], Tuple[Union[int, float], Union[int, float]]]
] = None,
) -> DataFrame:
"""
Calculate boxplot statistics. For each metric, the operation creates eight
new columns with the column name suffixed with the following values:
- `__mean`: the mean
- `__median`: the median
- `__max`: the maximum value excluding outliers (see whisker type)
- `__min`: the minimum value excluding outliers (see whisker type)
- `__q1`: the median
- `__q1`: the first quartile (25th percentile)
- `__q3`: the third quartile (75th percentile)
- `__count`: count of observations
- `__outliers`: the values that fall outside the minimum/maximum value
(see whisker type)
:param df: DataFrame containing all-numeric data (temporal column ignored)
:param groupby: The categories to group by (x-axis)
:param metrics: The metrics for which to calculate the distribution
:param whisker_type: The confidence level type
:return: DataFrame with boxplot statistics per groupby
"""
def quartile1(series: Series) -> float:
return np.nanpercentile(series, 25, interpolation="midpoint")
def quartile3(series: Series) -> float:
return np.nanpercentile(series, 75, interpolation="midpoint")
if whisker_type == PostProcessingBoxplotWhiskerType.TUKEY:
def whisker_high(series: Series) -> float:
upper_outer_lim = quartile3(series) + 1.5 * (
quartile3(series) - quartile1(series)
)
return series[series <= upper_outer_lim].max()
def whisker_low(series: Series) -> float:
lower_outer_lim = quartile1(series) - 1.5 * (
quartile3(series) - quartile1(series)
)
return series[series >= lower_outer_lim].min()
elif whisker_type == PostProcessingBoxplotWhiskerType.PERCENTILE:
if (
not isinstance(percentiles, (list, tuple))
or len(percentiles) != 2
or not isinstance(percentiles[0], (int, float))
or not isinstance(percentiles[1], (int, float))
or percentiles[0] >= percentiles[1]
):
raise QueryObjectValidationError(
_(
"percentiles must be a list or tuple with two numeric values, "
"of which the first is lower than the second value"
)
)
low, high = percentiles[0], percentiles[1]
def whisker_high(series: Series) -> float:
return np.nanpercentile(series, high)
def whisker_low(series: Series) -> float:
return np.nanpercentile(series, low)
else:
whisker_high = np.max
whisker_low = np.min
def outliers(series: Series) -> Set[float]:
above = series[series > whisker_high(series)]
below = series[series < whisker_low(series)]
return above.tolist() + below.tolist()
operators: Dict[str, Callable[[Any], Any]] = {
"mean": np.mean,
"median": np.median,
"max": whisker_high,
"min": whisker_low,
"q1": quartile1,
"q3": quartile3,
"count": np.ma.count,
"outliers": outliers,
}
aggregates: Dict[str, Dict[str, Union[str, Callable[..., Any]]]] = {
f"{metric}__{operator_name}": {"column": metric, "operator": operator}
for operator_name, operator in operators.items()
for metric in metrics
}
return aggregate(df, groupby=groupby, aggregates=aggregates)
def resample(
df: DataFrame,
rule: str,
method: str,
time_column: str,
fill_value: Optional[Union[float, int]] = None,
) -> DataFrame:
"""
resample a timeseries dataframe.
:param df: DataFrame to resample.
:param rule: The offset string representing target conversion.
:param method: How to fill the NaN value after resample.
:param time_column: existing columns in DataFrame.
:param fill_value: What values do fill missing.
:return: DataFrame after resample
:raises QueryObjectValidationError: If the request in incorrect
"""
df = df.set_index(time_column)
if method == "asfreq" and fill_value is not None:
df = df.resample(rule).asfreq(fill_value=fill_value)
else:
df = getattr(df.resample(rule), method)()
return df.reset_index()
|
[
"pandas.DataFrame",
"functools.partial",
"numpy.nanpercentile",
"pandas.NamedAgg",
"flask_babel.gettext",
"geopy.point.Point",
"geohash.encode",
"prophet.Prophet",
"superset.utils.core.TIME_COMPARISION.join",
"pandas.concat",
"logging.getLogger"
] |
[((28618, 28785), 'prophet.Prophet', 'Prophet', ([], {'interval_width': 'confidence_interval', 'yearly_seasonality': 'yearly_seasonality', 'weekly_seasonality': 'weekly_seasonality', 'daily_seasonality': 'daily_seasonality'}), '(interval_width=confidence_interval, yearly_seasonality=\n yearly_seasonality, weekly_seasonality=weekly_seasonality,\n daily_seasonality=daily_seasonality)\n', (28625, 28785), False, 'from prophet import Prophet\n'), ((31909, 31920), 'pandas.DataFrame', 'DataFrame', ([], {}), '()\n', (31918, 31920), False, 'from pandas import DataFrame, NamedAgg, Series, Timestamp\n'), ((5994, 6034), 'pandas.NamedAgg', 'NamedAgg', ([], {'column': 'column', 'aggfunc': 'aggfunc'}), '(column=column, aggfunc=aggfunc)\n', (6002, 6034), False, 'from pandas import DataFrame, NamedAgg, Series, Timestamp\n'), ((20222, 20254), 'pandas.concat', 'pd.concat', (['[df, diff_df]'], {'axis': '(1)'}), '([df, diff_df], axis=1)\n', (20231, 20254), True, 'import pandas as pd\n'), ((22659, 22670), 'pandas.DataFrame', 'DataFrame', ([], {}), '()\n', (22668, 22670), False, 'from pandas import DataFrame, NamedAgg, Series, Timestamp\n'), ((24881, 24896), 'geopy.point.Point', 'Point', (['location'], {}), '(location)\n', (24886, 24896), False, 'from geopy.point import Point\n'), ((24973, 24984), 'pandas.DataFrame', 'DataFrame', ([], {}), '()\n', (24982, 24984), False, 'from pandas import DataFrame, NamedAgg, Series, Timestamp\n'), ((28349, 28382), 'logging.getLogger', 'logging.getLogger', (['"""prophet.plot"""'], {}), "('prophet.plot')\n", (28366, 28382), False, 'import logging\n'), ((34281, 34335), 'numpy.nanpercentile', 'np.nanpercentile', (['series', '(25)'], {'interpolation': '"""midpoint"""'}), "(series, 25, interpolation='midpoint')\n", (34297, 34335), True, 'import numpy as np\n'), ((34396, 34450), 'numpy.nanpercentile', 'np.nanpercentile', (['series', '(75)'], {'interpolation': '"""midpoint"""'}), "(series, 75, interpolation='midpoint')\n", (34412, 34450), True, 'import numpy as np\n'), ((5943, 5967), 'functools.partial', 'partial', (['func'], {}), '(func, **options)\n', (5950, 5967), False, 'from functools import partial\n'), ((9094, 9142), 'flask_babel.gettext', '_', (['"""Pivot operation requires at least one index"""'], {}), "('Pivot operation requires at least one index')\n", (9095, 9142), True, 'from flask_babel import gettext as _\n'), ((9230, 9286), 'flask_babel.gettext', '_', (['"""Pivot operation must include at least one aggregate"""'], {}), "('Pivot operation must include at least one aggregate')\n", (9231, 9286), True, 'from flask_babel import gettext as _\n'), ((14195, 14238), 'flask_babel.gettext', '_', (['"""Undefined window for rolling operation"""'], {}), "('Undefined window for rolling operation')\n", (14196, 14238), True, 'from flask_babel import gettext as _\n'), ((14301, 14324), 'flask_babel.gettext', '_', (['"""Window must be > 0"""'], {}), "('Window must be > 0')\n", (14302, 14324), True, 'from flask_babel import gettext as _\n'), ((14772, 14826), 'flask_babel.gettext', '_', (['"""Invalid rolling_type: %(type)s"""'], {'type': 'rolling_type'}), "('Invalid rolling_type: %(type)s', type=rolling_type)\n", (14773, 14826), True, 'from flask_babel import gettext as _\n'), ((19272, 19341), 'flask_babel.gettext', '_', (['"""`compare_columns` must have the same length as `source_columns`."""'], {}), "('`compare_columns` must have the same length as `source_columns`.')\n", (19273, 19341), True, 'from flask_babel import gettext as _\n'), ((19469, 19534), 'flask_babel.gettext', '_', (['"""`compare_type` must be `difference`, `percentage` or `ratio`"""'], {}), "('`compare_type` must be `difference`, `percentage` or `ratio`')\n", (19470, 19534), True, 'from flask_babel import gettext as _\n'), ((21551, 21616), 'flask_babel.gettext', '_', (['"""Invalid cumulative operator: %(operator)s"""'], {'operator': 'operator'}), "('Invalid cumulative operator: %(operator)s', operator=operator)\n", (21552, 21616), True, 'from flask_babel import gettext as _\n'), ((27128, 27189), 'flask_babel.gettext', '_', (['"""`rename_columns` must have the same length as `columns`."""'], {}), "('`rename_columns` must have the same length as `columns`.')\n", (27129, 27189), True, 'from flask_babel import gettext as _\n'), ((30939, 30962), 'flask_babel.gettext', '_', (['"""Time grain missing"""'], {}), "('Time grain missing')\n", (30940, 30962), True, 'from flask_babel import gettext as _\n'), ((31067, 31133), 'flask_babel.gettext', '_', (['"""Unsupported time grain: %(time_grain)s"""'], {'time_grain': 'time_grain'}), "('Unsupported time grain: %(time_grain)s', time_grain=time_grain)\n", (31068, 31133), True, 'from flask_babel import gettext as _\n'), ((31396, 31441), 'flask_babel.gettext', '_', (['"""Periods must be a positive integer value"""'], {}), "('Periods must be a positive integer value')\n", (31397, 31441), True, 'from flask_babel import gettext as _\n'), ((31585, 31645), 'flask_babel.gettext', '_', (['"""Confidence interval must be between 0 and 1 (exclusive)"""'], {}), "('Confidence interval must be between 0 and 1 (exclusive)')\n", (31586, 31645), True, 'from flask_babel import gettext as _\n'), ((31734, 31777), 'flask_babel.gettext', '_', (['"""DataFrame must include temporal column"""'], {}), "('DataFrame must include temporal column')\n", (31735, 31777), True, 'from flask_babel import gettext as _\n'), ((31848, 31890), 'flask_babel.gettext', '_', (['"""DataFrame include at least one series"""'], {}), "('DataFrame include at least one series')\n", (31849, 31890), True, 'from flask_babel import gettext as _\n'), ((5209, 5284), 'flask_babel.gettext', '_', (['"""Column referenced by aggregate is undefined: %(column)s"""'], {'column': 'column'}), "('Column referenced by aggregate is undefined: %(column)s', column=column)\n", (5210, 5284), True, 'from flask_babel import gettext as _\n'), ((5458, 5517), 'flask_babel.gettext', '_', (['"""Operator undefined for aggregator: %(name)s"""'], {'name': 'name'}), "('Operator undefined for aggregator: %(name)s', name=name)\n", (5459, 5517), True, 'from flask_babel import gettext as _\n'), ((15007, 15123), 'flask_babel.gettext', '_', (['"""Invalid options for %(rolling_type)s: %(options)s"""'], {'rolling_type': 'rolling_type', 'options': 'rolling_type_options'}), "('Invalid options for %(rolling_type)s: %(options)s', rolling_type=\n rolling_type, options=rolling_type_options)\n", (15008, 15123), True, 'from flask_babel import gettext as _\n'), ((20147, 20198), 'superset.utils.core.TIME_COMPARISION.join', 'TIME_COMPARISION.join', (['[compare_type, s_col, c_col]'], {}), '([compare_type, s_col, c_col])\n', (20168, 20198), False, 'from superset.utils.core import DTTM_ALIAS, PostProcessingBoxplotWhiskerType, PostProcessingContributionOrientation, TIME_COMPARISION\n'), ((22979, 23006), 'flask_babel.gettext', '_', (['"""Invalid geohash string"""'], {}), "('Invalid geohash string')\n", (22980, 23006), True, 'from flask_babel import gettext as _\n'), ((23700, 23753), 'geohash.encode', 'geohash_lib.encode', (["row['latitude']", "row['longitude']"], {}), "(row['latitude'], row['longitude'])\n", (23718, 23753), True, 'import geohash as geohash_lib\n'), ((23911, 23942), 'flask_babel.gettext', '_', (['"""Invalid longitude/latitude"""'], {}), "('Invalid longitude/latitude')\n", (23912, 23942), True, 'from flask_babel import gettext as _\n'), ((25417, 25445), 'flask_babel.gettext', '_', (['"""Invalid geodetic string"""'], {}), "('Invalid geodetic string')\n", (25418, 25445), True, 'from flask_babel import gettext as _\n'), ((28560, 28596), 'flask_babel.gettext', '_', (['"""`prophet` package not installed"""'], {}), "('`prophet` package not installed')\n", (28561, 28596), True, 'from flask_babel import gettext as _\n'), ((35718, 35748), 'numpy.nanpercentile', 'np.nanpercentile', (['series', 'high'], {}), '(series, high)\n', (35734, 35748), True, 'import numpy as np\n'), ((35819, 35848), 'numpy.nanpercentile', 'np.nanpercentile', (['series', 'low'], {}), '(series, low)\n', (35835, 35848), True, 'import numpy as np\n'), ((5792, 5852), 'flask_babel.gettext', '_', (['"""Invalid numpy function: %(operator)s"""'], {'operator': 'operator'}), "('Invalid numpy function: %(operator)s', operator=operator)\n", (5793, 5852), True, 'from flask_babel import gettext as _\n'), ((26730, 26827), 'flask_babel.gettext', '_', (['"""Column "%(column)s" is not numeric or does not exists in the query results."""'], {'column': 'col'}), '(\'Column "%(column)s" is not numeric or does not exists in the query results.\'\n , column=col)\n', (26731, 26827), True, 'from flask_babel import gettext as _\n'), ((35405, 35525), 'flask_babel.gettext', '_', (['"""percentiles must be a list or tuple with two numeric values, of which the first is lower than the second value"""'], {}), "('percentiles must be a list or tuple with two numeric values, of which the first is lower than the second value'\n )\n", (35406, 35525), True, 'from flask_babel import gettext as _\n'), ((4331, 4382), 'flask_babel.gettext', '_', (['"""Referenced columns not available in DataFrame."""'], {}), "('Referenced columns not available in DataFrame.')\n", (4332, 4382), True, 'from flask_babel import gettext as _\n')]
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
"""
Trains a neural network for bird classification.
For usage information, call with --help.
Author: <NAME>
"""
from __future__ import print_function
import sys
import os
import io
from argparse import ArgumentParser
try:
import cPickle as pickle
except ImportError:
import pickle
import numpy as np
import theano
import theano.tensor as T
floatX = theano.config.floatX
import lasagne
from progress import progress
import config
import data
import model
import augment
def opts_parser():
descr = "Trains a neural network for bird classification."
parser = ArgumentParser(description=descr)
parser.add_argument('modelfile', metavar='MODELFILE',
type=str,
help='File to save the learned weights to (.npz format)')
parser.add_argument('--dataset',
type=str, default='birdclef',
help='Name of the dataset to use (default: %(default)s)')
parser.add_argument('--validate',
action='store_true', default=False,
help='Monitor validation loss (disabled by default)')
parser.add_argument('--no-validate',
action='store_false', dest='validate',
help='Disable monitoring validation loss (disabled by default)')
parser.add_argument('--save-errors',
action='store_true', default=False,
help='If given, save error log in {MODELFILE%%.npz}.err.npz.')
parser.add_argument('--keep-state',
action='store_true', default=False,
help='If given, save the complete training state after each epoch '
'in {MODELFILE%%.npz}.state, and load it to continue from '
'there if the script is restarted.')
parser.add_argument('--vars', metavar='FILE',
action='append', type=str,
default=[os.path.join(os.path.dirname(__file__), 'defaults.vars')],
help='Reads configuration variables from a FILE of KEY=VALUE '
'lines. Can be given multiple times, settings from later '
'files overriding earlier ones. Will read defaults.vars, '
'then files given here.')
parser.add_argument('--var', metavar='KEY=VALUE',
action='append', type=str,
help='Set the configuration variable KEY to VALUE. Overrides '
'settings from --vars options. Can be given multiple times.')
return parser
def get_state(network, updates):
return ([p.get_value() for p in updates.keys()] +
lasagne.layers.get_all_param_values(network, trainable=False))
def restore_state(network, updates, state):
for p, s in zip(updates.keys(), state):
p.set_value(s)
lasagne.layers.set_all_param_values(
network, state[len(updates):], trainable=False)
def save_model(modelfile, network, cfg):
np.savez(modelfile, **{'param%d' % i: p for i, p in enumerate(
lasagne.layers.get_all_param_values(network))})
write = 'wb' if sys.version_info[0] == 2 else 'w'
with io.open(modelfile + '.vars', write) as f:
f.writelines('%s=%s\n' % kv for kv in cfg.items())
def main():
# parse command line
parser = opts_parser()
options = parser.parse_args()
modelfile = options.modelfile
# read configuration files and immediate settings
cfg = {}
for fn in options.vars:
cfg.update(config.parse_config_file(fn))
cfg.update(config.parse_variable_assignments(options.var))
# prepare dataset
datadir = os.path.join(os.path.dirname(__file__),
os.path.pardir, 'datasets', options.dataset)
print("Preparing training data feed...")
with io.open(os.path.join(datadir, 'filelists', 'train')) as f:
filelist = [l.rstrip() for l in f if l.rstrip()]
train_feed, train_formats = data.prepare_datafeed(filelist, datadir,
'train', cfg)
# If told so, we plot some mini-batches on screen.
if cfg.get('plot_datafeed'):
import matplotlib.pyplot as plt
for batch in data.run_datafeed(train_feed, cfg):
plt.matshow(np.log(batch['spect'][0]).T, aspect='auto',
origin='lower', cmap='hot', interpolation='nearest')
plt.colorbar()
plt.title(str(batch['label'][0]))
plt.show()
# We start the mini-batch generator and augmenter in one or more
# background threads or processes (unless disabled).
bg_threads = cfg['bg_threads']
bg_processes = cfg['bg_processes']
if not bg_threads and not bg_processes:
# no background processing: just create a single generator
batches = data.run_datafeed(train_feed, cfg)
elif bg_threads:
# multithreading: create a separate generator per thread
batches = augment.generate_in_background(
[data.run_datafeed(feed, cfg)
for feed in data.split_datafeed(train_feed, bg_threads, cfg)],
num_cached=bg_threads * 2)
elif bg_processes:
# multiprocessing: single generator is forked along with processes
batches = augment.generate_in_background(
[data.run_datafeed(train_feed, cfg)] * bg_processes,
num_cached=bg_processes * 25,
in_processes=True)
# If told so, we benchmark the creation of a given number of mini-batches.
if cfg.get('benchmark_datafeed'):
print("Benchmark: %d mini-batches of %d items " %
(cfg['benchmark_datafeed'], cfg['batchsize']), end='')
if bg_threads:
print("(in %d threads): " % bg_threads)
elif bg_processes:
print("(in %d processes): " % bg_processes)
else:
print("(in main thread): ")
import time
import itertools
t0 = time.time()
next(itertools.islice(batches, cfg['benchmark_datafeed'],
cfg['benchmark_datafeed']), None)
t1 = time.time()
print (t1 - t0)
return
# - prepare validation data generator
if options.validate:
print("Preparing validation data feed...")
with io.open(os.path.join(datadir, 'filelists', 'valid')) as f:
filelist_val = [l.rstrip() for l in f if l.rstrip()]
val_feed, val_formats = data.prepare_datafeed(filelist_val, datadir,
'valid', cfg)
if bg_threads or bg_processes:
multi = bg_threads or bg_processes
val_feed = data.split_datafeed(val_feed, multi, cfg)
def run_val_datafeed():
if bg_threads or bg_processes:
return augment.generate_in_background(
[data.run_datafeed(feed, cfg)
for feed in val_feed],
num_cached=multi, in_processes=bool(bg_processes))
else:
return data.run_datafeed(val_feed, cfg)
print("Preparing training function...")
# instantiate neural network
input_vars = {name: T.TensorType(str(np.dtype(dtype)),
(False,) * len(shape))(name)
for name, (dtype, shape) in train_formats.items()}
input_shapes = {name: shape
for name, (dtype, shape) in train_formats.items()}
network = model.architecture(input_vars, input_shapes, cfg)
print("- %d layers (%d with weights), %f mio params" %
(len(lasagne.layers.get_all_layers(network)),
sum(hasattr(l, 'W') for l in lasagne.layers.get_all_layers(network)),
lasagne.layers.count_params(network, trainable=True) / 1e6))
print("- weight shapes: %r" % [
l.W.get_value().shape
for l in lasagne.layers.get_all_layers(network)
if hasattr(l, 'W') and hasattr(l.W, 'get_value')])
cost_vars = dict(input_vars)
# prepare for born-again-network, if needed
if cfg.get('ban'):
network2 = model.architecture(input_vars, input_shapes, cfg)
with np.load(cfg['ban'], encoding='latin1') as f:
lasagne.layers.set_all_param_values(
network2, [f['param%d' % i] for i in range(len(f.files))])
cost_vars['pseudo_label'] = lasagne.layers.get_output(
network2, deterministic=True)
# load pre-trained weights, if needed
if cfg.get('init_from'):
param_values = []
for fn in cfg['init_from'].split(':'):
with np.load(fn, encoding='latin1') as f:
param_values.extend(f['param%d' % i]
for i in range(len(f.files)))
lasagne.layers.set_all_param_values(network, param_values)
del param_values
# create cost expression
outputs = lasagne.layers.get_output(network, deterministic=False)
cost = T.mean(model.cost(outputs, cost_vars, 'train', cfg))
if cfg.get('l2_decay', 0):
cost_l2 = lasagne.regularization.regularize_network_params(
network, lasagne.regularization.l2) * cfg['l2_decay']
else:
cost_l2 = 0
# prepare and compile training function
params = lasagne.layers.get_all_params(network, trainable=True)
initial_eta = cfg['initial_eta']
eta_decay = cfg['eta_decay']
eta_decay_every = cfg.get('eta_decay_every', 1)
eta_cycle = tuple(map(float, str(cfg['eta_cycle']).split(':')))
if eta_cycle == (0,):
eta_cycle = (1,) # so eta_cycle=0 equals disabling it
patience = cfg.get('patience', 0)
trials_of_patience = cfg.get('trials_of_patience', 1)
patience_criterion = cfg.get('patience_criterion',
'valid_loss' if options.validate
else 'train_loss')
momentum = cfg['momentum']
first_params = params[:cfg['first_params']]
first_params_eta_scale = cfg['first_params_eta_scale']
if cfg['learn_scheme'] == 'nesterov':
learn_scheme = lasagne.updates.nesterov_momentum
elif cfg['learn_scheme'] == 'momentum':
learn_scheme = lasagne.update.momentum
elif cfg['learn_scheme'] == 'adam':
learn_scheme = lasagne.updates.adam
else:
raise ValueError('Unknown learn_scheme=%s' % cfg['learn_scheme'])
eta = theano.shared(lasagne.utils.floatX(initial_eta))
if not first_params or first_params_eta_scale == 1:
updates = learn_scheme(cost + cost_l2, params, eta, momentum)
else:
grads = theano.grad(cost + cost_l2, params)
updates = learn_scheme(grads[len(first_params):],
params[len(first_params):], eta, momentum)
if first_params_eta_scale > 0:
updates.update(
learn_scheme(grads[:len(first_params)], first_params,
eta * first_params_eta_scale, momentum))
print("Compiling training function...")
train_fn = theano.function(list(input_vars.values()), cost, updates=updates,
on_unused_input='ignore')
# prepare and compile validation function, if requested
if options.validate:
print("Compiling validation function...")
outputs_test = lasagne.layers.get_output(network, deterministic=True)
cost_test = T.mean(model.cost(outputs_test, input_vars, 'valid', cfg))
if isinstance(outputs_test, (list, tuple)):
outputs_test = outputs_test[0]
val_fn = theano.function([input_vars[k] for k in val_formats],
[cost_test, outputs_test],
on_unused_input='ignore')
# restore previous training state, or create fresh training state
state = {}
if options.keep_state:
statefile = modelfile[:-len('.npz')] + '.state'
if os.path.exists(statefile):
print("Restoring training state...")
state = np.load(modelfile[:-len('.npz')] + '.state',
encoding='latin1')
restore_state(network, updates, state['network'])
epochs = cfg['epochs']
epochsize = cfg['epochsize']
batches = iter(batches)
if options.save_errors:
errors = state.get('errors', [])
if first_params and cfg['first_params_log']:
first_params_hist = []
if options.keep_state and os.path.exists(modelfile[:-4] + '.hist.npz'):
with np.load(modelfile[:-4] + '.hist.npz') as f:
first_params_hist = list(zip(*(f['param%d' % i]
for i in range(len(first_params)))))
if patience > 0:
best_error = state.get('best_error', np.inf)
best_state = state.get('best_state') or get_state(network, updates)
patience = state.get('patience', patience)
trials_of_patience = state.get('trials_of_patience', trials_of_patience)
epoch = state.get('epoch', 0)
del state
# run training loop
print("Training:")
for epoch in range(epoch, epochs):
# actual training
err = 0
for batch in progress(
range(epochsize), min_delay=.5,
desc='Epoch %d/%d: Batch ' % (epoch + 1, epochs)):
err += train_fn(**next(batches))
if not np.isfinite(err):
print("\nEncountered NaN loss in training. Aborting.")
sys.exit(1)
if first_params and cfg['first_params_log'] and (batch % cfg['first_params_log'] == 0):
first_params_hist.append(tuple(param.get_value()
for param in first_params))
np.savez(modelfile[:-4] + '.hist.npz',
**{'param%d' % i: param
for i, param in enumerate(zip(*first_params_hist))})
# report training loss
print("Train loss: %.3f" % (err / epochsize))
if options.save_errors:
errors.append(err / epochsize)
# compute and report validation loss, if requested
if options.validate:
import time
t0 = time.time()
# predict in mini-batches
val_err = 0
val_batches = 0
preds = []
truth = []
for batch in run_val_datafeed():
e, p = val_fn(**batch)
val_err += np.sum(e)
val_batches += 1
preds.append(p)
truth.append(batch['label'])
t1 = time.time()
# join mini-batches
preds = np.concatenate(preds) if len(preds) > 1 else preds[0]
truth = np.concatenate(truth) if len(truth) > 1 else truth[0]
# show results
print("Validation loss: %.3f" % (val_err / val_batches))
from eval import evaluate
results = evaluate(preds, truth)
print("Validation error: %.3f" % (1 - results['accuracy']))
print("Validation MAP: %.3f" % results['map'])
print("(took %.2f seconds)" % (t1 - t0))
if options.save_errors:
errors.append(val_err / val_batches)
errors.append(1 - results['accuracy'])
errors.append(results['map'])
# update learning rate and/or apply early stopping, if needed
if patience > 0:
if patience_criterion == 'train_loss':
cur_error = err / epochsize
elif patience_criterion == 'valid_loss':
cur_error = val_err / val_batches
elif patience_criterion == 'valid_error':
cur_error = 1 - results['accuracy']
elif patience_criterion == 'valid_map':
cur_error = 1 - results['map']
if cur_error <= best_error:
best_error = cur_error
best_state = get_state(network, updates)
patience = cfg['patience']
else:
patience -= 1
if patience == 0:
if eta_decay_every == 'trial_of_patience' and eta_decay != 1:
eta.set_value(eta.get_value() * lasagne.utils.floatX(eta_decay))
restore_state(network, updates, best_state)
patience = cfg['patience']
trials_of_patience -= 1
print("Lost patience (%d remaining trials)." % trials_of_patience)
if trials_of_patience == 0:
break
if eta_decay_every != 'trial_of_patience' and eta_decay != 1 and \
(epoch + 1) % eta_decay_every == 0:
eta.set_value(eta.get_value() * lasagne.utils.floatX(eta_decay))
if eta_cycle[epoch % len(eta_cycle)] != 1:
eta.set_value(eta.get_value() *
lasagne.utils.floatX(eta_cycle[epoch % len(eta_cycle)]))
# store current training state, if needed
if options.keep_state:
state = {}
state['epoch'] = epoch + 1
state['network'] = get_state(network, updates)
if options.save_errors:
state['errors'] = errors
if patience > 0:
state['best_error'] = best_error
state['best_state'] = best_state
state['patience'] = patience
state['trials_of_patience'] = trials_of_patience
with open(statefile, 'wb') as f:
pickle.dump(state, f, -1)
del state
# for debugging: print memory use and break into debugger
#import resource, psutil
#print("Memory usage: %.3f MiB / %.3f MiB" %
# (resource.getrusage(resource.RUSAGE_SELF).ru_maxrss / 1024.,
# psutil.Process().memory_info()[0] / float(1024**2)))
#import pdb; pdb.set_trace()
# save final network
print("Saving final model")
save_model(modelfile, network, cfg)
if options.save_errors:
np.savez(modelfile[:-len('.npz')] + '.err.npz',
np.asarray(errors).reshape(epoch + 1, -1))
if __name__ == "__main__":
main()
|
[
"numpy.load",
"config.parse_config_file",
"numpy.sum",
"argparse.ArgumentParser",
"model.cost",
"pickle.dump",
"lasagne.regularization.regularize_network_params",
"lasagne.layers.get_output",
"os.path.join",
"data.run_datafeed",
"model.architecture",
"os.path.dirname",
"os.path.exists",
"numpy.isfinite",
"matplotlib.pyplot.colorbar",
"data.split_datafeed",
"io.open",
"lasagne.layers.count_params",
"lasagne.layers.set_all_param_values",
"lasagne.layers.get_all_param_values",
"lasagne.layers.get_all_params",
"data.prepare_datafeed",
"matplotlib.pyplot.show",
"numpy.asarray",
"itertools.islice",
"eval.evaluate",
"theano.grad",
"sys.exit",
"numpy.concatenate",
"numpy.log",
"lasagne.layers.get_all_layers",
"theano.function",
"numpy.dtype",
"lasagne.utils.floatX",
"time.time",
"config.parse_variable_assignments"
] |
[((628, 661), 'argparse.ArgumentParser', 'ArgumentParser', ([], {'description': 'descr'}), '(description=descr)\n', (642, 661), False, 'from argparse import ArgumentParser\n'), ((3855, 3909), 'data.prepare_datafeed', 'data.prepare_datafeed', (['filelist', 'datadir', '"""train"""', 'cfg'], {}), "(filelist, datadir, 'train', cfg)\n", (3876, 3909), False, 'import data\n'), ((7403, 7452), 'model.architecture', 'model.architecture', (['input_vars', 'input_shapes', 'cfg'], {}), '(input_vars, input_shapes, cfg)\n', (7421, 7452), False, 'import model\n'), ((8837, 8892), 'lasagne.layers.get_output', 'lasagne.layers.get_output', (['network'], {'deterministic': '(False)'}), '(network, deterministic=False)\n', (8862, 8892), False, 'import lasagne\n'), ((9214, 9268), 'lasagne.layers.get_all_params', 'lasagne.layers.get_all_params', (['network'], {'trainable': '(True)'}), '(network, trainable=True)\n', (9243, 9268), False, 'import lasagne\n'), ((2550, 2611), 'lasagne.layers.get_all_param_values', 'lasagne.layers.get_all_param_values', (['network'], {'trainable': '(False)'}), '(network, trainable=False)\n', (2585, 2611), False, 'import lasagne\n'), ((3060, 3095), 'io.open', 'io.open', (["(modelfile + '.vars')", 'write'], {}), "(modelfile + '.vars', write)\n", (3067, 3095), False, 'import io\n'), ((3455, 3501), 'config.parse_variable_assignments', 'config.parse_variable_assignments', (['options.var'], {}), '(options.var)\n', (3488, 3501), False, 'import config\n'), ((3553, 3578), 'os.path.dirname', 'os.path.dirname', (['__file__'], {}), '(__file__)\n', (3568, 3578), False, 'import os\n'), ((4114, 4148), 'data.run_datafeed', 'data.run_datafeed', (['train_feed', 'cfg'], {}), '(train_feed, cfg)\n', (4131, 4148), False, 'import data\n'), ((4721, 4755), 'data.run_datafeed', 'data.run_datafeed', (['train_feed', 'cfg'], {}), '(train_feed, cfg)\n', (4738, 4755), False, 'import data\n'), ((5874, 5885), 'time.time', 'time.time', ([], {}), '()\n', (5883, 5885), False, 'import time\n'), ((6029, 6040), 'time.time', 'time.time', ([], {}), '()\n', (6038, 6040), False, 'import time\n'), ((6368, 6426), 'data.prepare_datafeed', 'data.prepare_datafeed', (['filelist_val', 'datadir', '"""valid"""', 'cfg'], {}), "(filelist_val, datadir, 'valid', cfg)\n", (6389, 6426), False, 'import data\n'), ((8038, 8087), 'model.architecture', 'model.architecture', (['input_vars', 'input_shapes', 'cfg'], {}), '(input_vars, input_shapes, cfg)\n', (8056, 8087), False, 'import model\n'), ((8310, 8365), 'lasagne.layers.get_output', 'lasagne.layers.get_output', (['network2'], {'deterministic': '(True)'}), '(network2, deterministic=True)\n', (8335, 8365), False, 'import lasagne\n'), ((8709, 8767), 'lasagne.layers.set_all_param_values', 'lasagne.layers.set_all_param_values', (['network', 'param_values'], {}), '(network, param_values)\n', (8744, 8767), False, 'import lasagne\n'), ((8911, 8955), 'model.cost', 'model.cost', (['outputs', 'cost_vars', '"""train"""', 'cfg'], {}), "(outputs, cost_vars, 'train', cfg)\n", (8921, 8955), False, 'import model\n'), ((10337, 10370), 'lasagne.utils.floatX', 'lasagne.utils.floatX', (['initial_eta'], {}), '(initial_eta)\n', (10357, 10370), False, 'import lasagne\n'), ((10524, 10559), 'theano.grad', 'theano.grad', (['(cost + cost_l2)', 'params'], {}), '(cost + cost_l2, params)\n', (10535, 10559), False, 'import theano\n'), ((11248, 11302), 'lasagne.layers.get_output', 'lasagne.layers.get_output', (['network'], {'deterministic': '(True)'}), '(network, deterministic=True)\n', (11273, 11302), False, 'import lasagne\n'), ((11494, 11604), 'theano.function', 'theano.function', (['[input_vars[k] for k in val_formats]', '[cost_test, outputs_test]'], {'on_unused_input': '"""ignore"""'}), "([input_vars[k] for k in val_formats], [cost_test,\n outputs_test], on_unused_input='ignore')\n", (11509, 11604), False, 'import theano\n'), ((11847, 11872), 'os.path.exists', 'os.path.exists', (['statefile'], {}), '(statefile)\n', (11861, 11872), False, 'import os\n'), ((3410, 3438), 'config.parse_config_file', 'config.parse_config_file', (['fn'], {}), '(fn)\n', (3434, 3438), False, 'import config\n'), ((3715, 3758), 'os.path.join', 'os.path.join', (['datadir', '"""filelists"""', '"""train"""'], {}), "(datadir, 'filelists', 'train')\n", (3727, 3758), False, 'import os\n'), ((4307, 4321), 'matplotlib.pyplot.colorbar', 'plt.colorbar', ([], {}), '()\n', (4319, 4321), True, 'import matplotlib.pyplot as plt\n'), ((4380, 4390), 'matplotlib.pyplot.show', 'plt.show', ([], {}), '()\n', (4388, 4390), True, 'import matplotlib.pyplot as plt\n'), ((5899, 5978), 'itertools.islice', 'itertools.islice', (['batches', "cfg['benchmark_datafeed']", "cfg['benchmark_datafeed']"], {}), "(batches, cfg['benchmark_datafeed'], cfg['benchmark_datafeed'])\n", (5915, 5978), False, 'import itertools\n'), ((6590, 6631), 'data.split_datafeed', 'data.split_datafeed', (['val_feed', 'multi', 'cfg'], {}), '(val_feed, multi, cfg)\n', (6609, 6631), False, 'import data\n'), ((8101, 8139), 'numpy.load', 'np.load', (["cfg['ban']"], {'encoding': '"""latin1"""'}), "(cfg['ban'], encoding='latin1')\n", (8108, 8139), True, 'import numpy as np\n'), ((9006, 9095), 'lasagne.regularization.regularize_network_params', 'lasagne.regularization.regularize_network_params', (['network', 'lasagne.regularization.l2'], {}), '(network, lasagne.\n regularization.l2)\n', (9054, 9095), False, 'import lasagne\n'), ((11330, 11380), 'model.cost', 'model.cost', (['outputs_test', 'input_vars', '"""valid"""', 'cfg'], {}), "(outputs_test, input_vars, 'valid', cfg)\n", (11340, 11380), False, 'import model\n'), ((12368, 12412), 'os.path.exists', 'os.path.exists', (["(modelfile[:-4] + '.hist.npz')"], {}), "(modelfile[:-4] + '.hist.npz')\n", (12382, 12412), False, 'import os\n'), ((14123, 14134), 'time.time', 'time.time', ([], {}), '()\n', (14132, 14134), False, 'import time\n'), ((14519, 14530), 'time.time', 'time.time', ([], {}), '()\n', (14528, 14530), False, 'import time\n'), ((14867, 14889), 'eval.evaluate', 'evaluate', (['preds', 'truth'], {}), '(preds, truth)\n', (14875, 14889), False, 'from eval import evaluate\n'), ((6220, 6263), 'os.path.join', 'os.path.join', (['datadir', '"""filelists"""', '"""valid"""'], {}), "(datadir, 'filelists', 'valid')\n", (6232, 6263), False, 'import os\n'), ((6981, 7013), 'data.run_datafeed', 'data.run_datafeed', (['val_feed', 'cfg'], {}), '(val_feed, cfg)\n', (6998, 7013), False, 'import data\n'), ((8545, 8575), 'numpy.load', 'np.load', (['fn'], {'encoding': '"""latin1"""'}), "(fn, encoding='latin1')\n", (8552, 8575), True, 'import numpy as np\n'), ((12431, 12468), 'numpy.load', 'np.load', (["(modelfile[:-4] + '.hist.npz')"], {}), "(modelfile[:-4] + '.hist.npz')\n", (12438, 12468), True, 'import numpy as np\n'), ((13290, 13306), 'numpy.isfinite', 'np.isfinite', (['err'], {}), '(err)\n', (13301, 13306), True, 'import numpy as np\n'), ((13395, 13406), 'sys.exit', 'sys.exit', (['(1)'], {}), '(1)\n', (13403, 13406), False, 'import sys\n'), ((14382, 14391), 'numpy.sum', 'np.sum', (['e'], {}), '(e)\n', (14388, 14391), True, 'import numpy as np\n'), ((14583, 14604), 'numpy.concatenate', 'np.concatenate', (['preds'], {}), '(preds)\n', (14597, 14604), True, 'import numpy as np\n'), ((14657, 14678), 'numpy.concatenate', 'np.concatenate', (['truth'], {}), '(truth)\n', (14671, 14678), True, 'import numpy as np\n'), ((17475, 17500), 'pickle.dump', 'pickle.dump', (['state', 'f', '(-1)'], {}), '(state, f, -1)\n', (17486, 17500), False, 'import pickle\n'), ((1868, 1893), 'os.path.dirname', 'os.path.dirname', (['__file__'], {}), '(__file__)\n', (1883, 1893), False, 'import os\n'), ((4174, 4199), 'numpy.log', 'np.log', (["batch['spect'][0]"], {}), "(batch['spect'][0])\n", (4180, 4199), True, 'import numpy as np\n'), ((4909, 4937), 'data.run_datafeed', 'data.run_datafeed', (['feed', 'cfg'], {}), '(feed, cfg)\n', (4926, 4937), False, 'import data\n'), ((7133, 7148), 'numpy.dtype', 'np.dtype', (['dtype'], {}), '(dtype)\n', (7141, 7148), True, 'import numpy as np\n'), ((7527, 7565), 'lasagne.layers.get_all_layers', 'lasagne.layers.get_all_layers', (['network'], {}), '(network)\n', (7556, 7565), False, 'import lasagne\n'), ((7660, 7712), 'lasagne.layers.count_params', 'lasagne.layers.count_params', (['network'], {'trainable': '(True)'}), '(network, trainable=True)\n', (7687, 7712), False, 'import lasagne\n'), ((7812, 7850), 'lasagne.layers.get_all_layers', 'lasagne.layers.get_all_layers', (['network'], {}), '(network)\n', (7841, 7850), False, 'import lasagne\n'), ((16686, 16717), 'lasagne.utils.floatX', 'lasagne.utils.floatX', (['eta_decay'], {}), '(eta_decay)\n', (16706, 16717), False, 'import lasagne\n'), ((18057, 18075), 'numpy.asarray', 'np.asarray', (['errors'], {}), '(errors)\n', (18067, 18075), True, 'import numpy as np\n'), ((2949, 2993), 'lasagne.layers.get_all_param_values', 'lasagne.layers.get_all_param_values', (['network'], {}), '(network)\n', (2984, 2993), False, 'import lasagne\n'), ((4967, 5015), 'data.split_datafeed', 'data.split_datafeed', (['train_feed', 'bg_threads', 'cfg'], {}), '(train_feed, bg_threads, cfg)\n', (4986, 5015), False, 'import data\n'), ((6788, 6816), 'data.run_datafeed', 'data.run_datafeed', (['feed', 'cfg'], {}), '(feed, cfg)\n', (6805, 6816), False, 'import data\n'), ((5226, 5260), 'data.run_datafeed', 'data.run_datafeed', (['train_feed', 'cfg'], {}), '(train_feed, cfg)\n', (5243, 5260), False, 'import data\n'), ((7608, 7646), 'lasagne.layers.get_all_layers', 'lasagne.layers.get_all_layers', (['network'], {}), '(network)\n', (7637, 7646), False, 'import lasagne\n'), ((16162, 16193), 'lasagne.utils.floatX', 'lasagne.utils.floatX', (['eta_decay'], {}), '(eta_decay)\n', (16182, 16193), False, 'import lasagne\n')]
|
# Copyright 2009 by <NAME>. All rights reserved.
# This code is part of the Biopython distribution and governed by its
# license. Please see the LICENSE file that should have been included
# as part of this package.
# Importing these functions with leading underscore as not intended for reuse
from Bio._py3k import urlopen as _urlopen
from Bio._py3k import urlencode as _urlencode
from xml.sax import handler
from xml.sax.expatreader import ExpatParser
class Record(list):
"""Represents search results returned by ScanProsite.
This record is a list containing the search results returned by
ScanProsite. The record also contains the data members n_match,
n_seq, capped, and warning.
"""
def __init__(self):
self.n_match = None
self.n_seq = None
self.capped = None
self.warning = None
def scan(seq="", mirror='http://www.expasy.org', output='xml', **keywords):
"""Execute a ScanProsite search.
Arguments:
- mirror: The ScanProsite mirror to be used
(default: http://www.expasy.org).
- seq: The query sequence, or UniProtKB (Swiss-Prot,
TrEMBL) accession
- output: Format of the search results
(default: xml)
Further search parameters can be passed as keywords; see the
documentation for programmatic access to ScanProsite at
http://www.expasy.org/tools/scanprosite/ScanPrositeREST.html
for a description of such parameters.
This function returns a handle to the search results returned by
ScanProsite. Search results in the XML format can be parsed into a
Python object, by using the Bio.ExPASy.ScanProsite.read function.
"""
parameters = {'seq': seq,
'output': output}
for key, value in keywords.items():
if value is not None:
parameters[key] = value
command = _urlencode(parameters)
url = "%s/cgi-bin/prosite/PSScan.cgi?%s" % (mirror, command)
handle = _urlopen(url)
return handle
def read(handle):
"""Parse search results returned by ScanProsite into a Python object."""
content_handler = ContentHandler()
saxparser = Parser()
saxparser.setContentHandler(content_handler)
saxparser.parse(handle)
record = content_handler.record
return record
# The functions below are considered private
class Parser(ExpatParser):
def __init__(self):
ExpatParser.__init__(self)
self.firsttime = True
def feed(self, data, isFinal=0):
# Error messages returned by the ScanProsite server are formatted as
# as plain text instead of an XML document. To catch such error
# messages, we override the feed method of the Expat parser.
# The error message is (hopefully) contained in the data that was just
# fed to the parser.
if self.firsttime:
if data[:5].decode('utf-8') != "<?xml":
raise ValueError(data)
self.firsttime = False
return ExpatParser.feed(self, data, isFinal)
class ContentHandler(handler.ContentHandler):
integers = ("start", "stop")
strings = ("sequence_ac",
"sequence_id",
"sequence_db",
"signature_ac",
"level",
"level_tag")
def __init__(self):
self.element = []
def startElement(self, name, attrs):
self.element.append(name)
self.content = ""
if self.element == ["matchset"]:
self.record = Record()
self.record.n_match = int(attrs["n_match"])
self.record.n_seq = int(attrs["n_seq"])
elif self.element == ["matchset", "match"]:
match = {}
self.record.append(match)
def endElement(self, name):
assert name == self.element.pop()
name = str(name)
if self.element == ["matchset", "match"]:
match = self.record[-1]
if name in ContentHandler.integers:
match[name] = int(self.content)
elif name in ContentHandler.strings:
match[name] = self.content
else:
# Unknown type, treat it as a string
match[name] = self.content
def characters(self, content):
self.content += content
|
[
"xml.sax.expatreader.ExpatParser.feed",
"Bio._py3k.urlopen",
"Bio._py3k.urlencode",
"xml.sax.expatreader.ExpatParser.__init__"
] |
[((1894, 1916), 'Bio._py3k.urlencode', '_urlencode', (['parameters'], {}), '(parameters)\n', (1904, 1916), True, 'from Bio._py3k import urlencode as _urlencode\n'), ((1995, 2008), 'Bio._py3k.urlopen', '_urlopen', (['url'], {}), '(url)\n', (2003, 2008), True, 'from Bio._py3k import urlopen as _urlopen\n'), ((2427, 2453), 'xml.sax.expatreader.ExpatParser.__init__', 'ExpatParser.__init__', (['self'], {}), '(self)\n', (2447, 2453), False, 'from xml.sax.expatreader import ExpatParser\n'), ((3012, 3049), 'xml.sax.expatreader.ExpatParser.feed', 'ExpatParser.feed', (['self', 'data', 'isFinal'], {}), '(self, data, isFinal)\n', (3028, 3049), False, 'from xml.sax.expatreader import ExpatParser\n')]
|
#!/usr/bin/env python3
"""
Constants for use during tests
"""
# built-ins
import copy
import secrets
from pathlib import Path
from typing import Dict, List, Union
from xml.etree import ( # nosec (Used only when TYPE_CHECKING) # nosem: python.lang.security.use-defused-xml.use-defused-xml
ElementTree as InsecureElementTree,
)
# third party
from defusedxml import ElementTree
import yaml
# pylint: disable=too-many-lines
## Sample Results API environmental information
VALID_RESULTS_API: Dict[str, Union[str, bool, Dict[str, str]]] = {}
VALID_RESULTS_API["base_url"] = "https://analysiscenter.veracode.com/api/"
VALID_RESULTS_API["version"] = {
"detailedreport.do": "5.0",
"detailedreportpdf.do": "4.0",
"getaccountcustomfieldlist.do": "5.0",
"getappbuilds.do": "4.0",
"getcallstacks.do": "5.0",
"summaryreport.do": "4.0",
"summaryreportpdf.do": "4.0",
"thirdpartyreportpdf.do": "4.0",
}
VALID_RESULTS_API["app_id"] = "1337"
VALID_RESULTS_API["api_key_id"] = secrets.token_hex(16)
VALID_RESULTS_API["api_key_secret"] = secrets.token_hex(64) # nosec
VALID_RESULTS_API["ignore_compliance_status"] = False
VALID_RESULTS_API_DIFFERENT_APP_ID = copy.deepcopy(VALID_RESULTS_API)
VALID_RESULTS_API_DIFFERENT_APP_ID["app_id"] = "31337"
INVALID_RESULTS_API_MISSING_VERSION_KEY = copy.deepcopy(VALID_RESULTS_API)
del INVALID_RESULTS_API_MISSING_VERSION_KEY["version"]
INVALID_RESULTS_API_INCORRECT_APP_ID = copy.deepcopy(VALID_RESULTS_API)
INVALID_RESULTS_API_INCORRECT_APP_ID["app_id"] = 1337
INVALID_RESULTS_API_INCORRECT_VERSION_VALUES = copy.deepcopy(VALID_RESULTS_API)
for value in VALID_RESULTS_API["version"]:
INVALID_RESULTS_API_INCORRECT_VERSION_VALUES["version"][value] = float(
VALID_RESULTS_API["version"][value]
)
INVALID_RESULTS_API_MISSING_DOMAIN = copy.deepcopy(VALID_RESULTS_API)
INVALID_RESULTS_API_MISSING_DOMAIN["base_url"] = "https:///api/"
INVALID_RESULTS_API_INCORRECT_COMPLIANCE_STATUS = copy.deepcopy(VALID_RESULTS_API)
INVALID_RESULTS_API_INCORRECT_COMPLIANCE_STATUS["ignore_compliance_status"] = "True"
INVALID_RESULTS_API_INVALID_PORT = copy.deepcopy(VALID_RESULTS_API)
INVALID_RESULTS_API_INVALID_PORT[
"base_url"
] = "https://analysiscenter.veracode.com:65536/api/"
VALID_RESULTS_API_WITH_PORT_IN_URL = copy.deepcopy(VALID_RESULTS_API)
VALID_RESULTS_API_WITH_PORT_IN_URL[
"base_url"
] = "https://analysiscenter.veracode.com:443/api/"
# Valid Results API getappbuilds.do information, but no
# policy_compliance_status
VALID_RESULTS_API_GETAPPBUILDS_RESPONSE_XML_NO_BUILDS: Dict[
str, Union[str, bytes, InsecureElementTree.Element]
] = {}
# Unfortunately, this varies slightly from the Veracode-provided example
# because (1) the xml library cannot parse the XML using a XSD file, and (2)
# the placeholders Veracode provided in its documentation result in invalid XML
# regardless.
VALID_RESULTS_API_GETAPPBUILDS_RESPONSE_XML_NO_BUILDS[
"string"
] = """<?xml version="1.0" encoding="UTF-8"?>
<applicationbuilds xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
xmlns="https://analysiscenter.veracode.com/schema/2.0/applicationbuilds"
xsi:schemaLocation="https://analysiscenter.veracode.com/schema/2.0/applicationbuilds
https://analysiscenter.veracode.com/resource/2.0/applicationbuilds.xsd"
account_id="00000000001">
<application app_name="app name" app_id="1337" industry_vertical="Manufacturing" assurance_level="Very High"
business_criticality="Very High" origin="Not Specified" modified_date="2019-08-13T14:00:10-04:00"
cots="false" business_unit="Not Specified" tags="">
<customfield name="Custom 1" value=""/>
<customfield name="Custom 2" value=""/>
<customfield name="Custom 3" value=""/>
<customfield name="Custom 4" value=""/>
<customfield name="Custom 5" value=""/>
<customfield name="Custom 6" value=""/>
<customfield name="Custom 7" value=""/>
<customfield name="Custom 8" value=""/>
<customfield name="Custom 9" value=""/>
<customfield name="Custom 10" value=""/>
</application>
</applicationbuilds>
<!-- Parameters: report_changed_since=08/25/2019 only_latest=true include_in_progress=false -->""" # pylint: disable=line-too-long
VALID_RESULTS_API_GETAPPBUILDS_RESPONSE_XML_NO_BUILDS["bytes"] = bytes(
VALID_RESULTS_API_GETAPPBUILDS_RESPONSE_XML_NO_BUILDS["string"], "utf-8"
)
VALID_RESULTS_API_GETAPPBUILDS_RESPONSE_XML_NO_BUILDS[
"Element"
] = ElementTree.fromstring(
VALID_RESULTS_API_GETAPPBUILDS_RESPONSE_XML_NO_BUILDS["bytes"]
)
# Valid Results API getappbuilds.do information, with a failing
# policy_compliance_status
VALID_RESULTS_API_GETAPPBUILDS_RESPONSE_XML_FAILING_POLICY_COMPLIANCE_STATUS = {}
# Variant of
# VALID_RESULTS_API_GETAPPBUILDS_RESPONSE_XML_NO_BUILDS to
# add a failing policy_compliance_status
VALID_RESULTS_API_GETAPPBUILDS_RESPONSE_XML_FAILING_POLICY_COMPLIANCE_STATUS[
"string"
] = """<?xml version="1.0" encoding="UTF-8"?>
<applicationbuilds xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
xmlns="https://analysiscenter.veracode.com/schema/2.0/applicationbuilds"
xsi:schemaLocation="https://analysiscenter.veracode.com/schema/2.0/applicationbuilds
https://analysiscenter.veracode.com/resource/2.0/applicationbuilds.xsd"
account_id="00000000001">
<application app_name="app name" app_id="1337" industry_vertical="Manufacturing" assurance_level="Very High"
business_criticality="Very High" origin="Not Specified" modified_date="2019-08-13T14:00:10-04:00"
cots="false" business_unit="Not Specified" tags="">
<customfield name="Custom 1" value=""/>
<customfield name="Custom 2" value=""/>
<customfield name="Custom 3" value=""/>
<customfield name="Custom 4" value=""/>
<customfield name="Custom 5" value=""/>
<customfield name="Custom 6" value=""/>
<customfield name="Custom 7" value=""/>
<customfield name="Custom 8" value=""/>
<customfield name="Custom 9" value=""/>
<customfield name="Custom 10" value=""/>
<build version="2019-10 Testing" build_id="1234321" submitter="<NAME>" platform="Not Specified" lifecycle_stage="Deployed (In production and actively developed)" results_ready="true" policy_name="Veracode Recommended Medium" policy_version="1" policy_compliance_status="Did Not Pass" rules_status="Did Not Pass" grace_period_expired="false" scan_overdue="false">
<analysis_unit analysis_type="Static" published_date="2019-10-13T16:20:30-04:00" published_date_sec="1570998030" status="Results Ready"/>
</build>
</application>
</applicationbuilds>
<!-- Parameters: report_changed_since=08/25/2019 only_latest=true include_in_progress=false -->""" # pylint: disable=line-too-long
VALID_RESULTS_API_GETAPPBUILDS_RESPONSE_XML_FAILING_POLICY_COMPLIANCE_STATUS[
"bytes"
] = bytes(
VALID_RESULTS_API_GETAPPBUILDS_RESPONSE_XML_FAILING_POLICY_COMPLIANCE_STATUS[
"string"
],
"utf-8",
)
VALID_RESULTS_API_GETAPPBUILDS_RESPONSE_XML_FAILING_POLICY_COMPLIANCE_STATUS[
"Element"
] = ElementTree.fromstring(
VALID_RESULTS_API_GETAPPBUILDS_RESPONSE_XML_FAILING_POLICY_COMPLIANCE_STATUS[
"bytes"
]
)
# Valid Results API getappbuilds.do information, with a passing
# policy_compliance_status
VALID_RESULTS_API_GETAPPBUILDS_RESPONSE_XML_PASSING_POLICY_COMPLIANCE_STATUS = {}
# Variant of
# VALID_RESULTS_API_GETAPPBUILDS_RESPONSE_XML_NO_BUILDS to
# add a passing policy_compliance_status
VALID_RESULTS_API_GETAPPBUILDS_RESPONSE_XML_PASSING_POLICY_COMPLIANCE_STATUS[
"string"
] = """<?xml version="1.0" encoding="UTF-8"?>
<applicationbuilds xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
xmlns="https://analysiscenter.veracode.com/schema/2.0/applicationbuilds"
xsi:schemaLocation="https://analysiscenter.veracode.com/schema/2.0/applicationbuilds
https://analysiscenter.veracode.com/resource/2.0/applicationbuilds.xsd"
account_id="00000000001">
<application app_name="app name" app_id="1337" industry_vertical="Manufacturing" assurance_level="Very High"
business_criticality="Very High" origin="Not Specified" modified_date="2019-08-13T14:00:10-04:00"
cots="false" business_unit="Not Specified" tags="">
<customfield name="Custom 1" value=""/>
<customfield name="Custom 2" value=""/>
<customfield name="Custom 3" value=""/>
<customfield name="Custom 4" value=""/>
<customfield name="Custom 5" value=""/>
<customfield name="Custom 6" value=""/>
<customfield name="Custom 7" value=""/>
<customfield name="Custom 8" value=""/>
<customfield name="Custom 9" value=""/>
<customfield name="Custom 10" value=""/>
<build version="2019-10 Testing" build_id="1234321" submitter="<NAME>" platform="Not Specified" lifecycle_stage="Deployed (In production and actively developed)" results_ready="true" policy_name="Veracode Recommended Medium" policy_version="1" policy_compliance_status="Pass" rules_status="Pass" grace_period_expired="false" scan_overdue="false">
<analysis_unit analysis_type="Static" published_date="2019-10-13T16:20:30-04:00" published_date_sec="1570998030" status="Results Ready"/>
</build>
</application>
</applicationbuilds>
<!-- Parameters: report_changed_since=08/25/2019 only_latest=true include_in_progress=false -->""" # pylint: disable=line-too-long
VALID_RESULTS_API_GETAPPBUILDS_RESPONSE_XML_PASSING_POLICY_COMPLIANCE_STATUS[
"bytes"
] = bytes(
VALID_RESULTS_API_GETAPPBUILDS_RESPONSE_XML_PASSING_POLICY_COMPLIANCE_STATUS[
"string"
],
"utf-8",
)
VALID_RESULTS_API_GETAPPBUILDS_RESPONSE_XML_PASSING_POLICY_COMPLIANCE_STATUS[
"Element"
] = ElementTree.fromstring(
VALID_RESULTS_API_GETAPPBUILDS_RESPONSE_XML_PASSING_POLICY_COMPLIANCE_STATUS[
"bytes"
]
)
## Sample Upload API environmental information
VALID_UPLOAD_API: Dict[str, Union[str, Dict[str, str], Path, bool]] = {}
VALID_UPLOAD_API["base_url"] = "https://analysiscenter.veracode.com/api/"
VALID_UPLOAD_API["version"] = {
"beginprescan.do": "5.0",
"beginscan.do": "5.0",
"createapp.do": "5.0",
"createbuild.do": "5.0",
"deleteapp.do": "5.0",
"deletebuild.do": "5.0",
"getappinfo.do": "5.0",
"getapplist.do": "5.0",
"getbuildinfo.do": "5.0",
"getbuildlist.do": "5.0",
"getfilelist.do": "5.0",
"getpolicylist.do": "5.0",
"getprescanresults.do": "5.0",
"getvendorlist.do": "5.0",
"removefile.do": "5.0",
"updateapp.do": "5.0",
"updatebuild.do": "5.0",
"uploadfile.do": "5.0",
"uploadlargefile.do": "5.0",
}
VALID_UPLOAD_API["app_id"] = "1337"
VALID_UPLOAD_API["build_dir"] = Path("/usr/local/bin/").absolute()
VALID_UPLOAD_API["build_id"] = "v1.2.3"
VALID_UPLOAD_API["sandbox_id"] = "321"
VALID_UPLOAD_API["scan_all_nonfatal_top_level_modules"] = True
VALID_UPLOAD_API["auto_scan"] = True
VALID_UPLOAD_API["api_key_id"] = secrets.token_hex(16)
VALID_UPLOAD_API["api_key_secret"] = secrets.token_hex(64) # nosec
INVALID_UPLOAD_API_MISSING_BUILD_DIR = copy.deepcopy(VALID_UPLOAD_API)
del INVALID_UPLOAD_API_MISSING_BUILD_DIR["build_dir"]
INVALID_UPLOAD_API_BUILD_DIR = copy.deepcopy(VALID_UPLOAD_API)
INVALID_UPLOAD_API_BUILD_DIR["build_dir"] = "/usr/local/bin/"
INVALID_UPLOAD_API_MISSING_DOMAIN = copy.deepcopy(VALID_UPLOAD_API)
INVALID_UPLOAD_API_MISSING_DOMAIN["base_url"] = "https:///api/"
INVALID_UPLOAD_API_INCORRECT_VERSION_VALUES = copy.deepcopy(VALID_UPLOAD_API)
for value in VALID_UPLOAD_API["version"]:
INVALID_UPLOAD_API_INCORRECT_VERSION_VALUES["version"][value] = float(
VALID_UPLOAD_API["version"][value]
)
INVALID_UPLOAD_API_BUILD_ID = copy.deepcopy(VALID_UPLOAD_API)
INVALID_UPLOAD_API_BUILD_ID["build_id"] = "invalid(build_id)"
INVALID_UPLOAD_API_SCAN_ALL_NONFATAL_TOP_LEVEL_MODULES = copy.deepcopy(VALID_UPLOAD_API)
INVALID_UPLOAD_API_SCAN_ALL_NONFATAL_TOP_LEVEL_MODULES[
"scan_all_nonfatal_top_level_modules"
] = "True"
INVALID_UPLOAD_API_AUTO_SCAN = copy.deepcopy(VALID_UPLOAD_API)
INVALID_UPLOAD_API_AUTO_SCAN["auto_scan"] = "False"
# Valid Upload API uploadlargefile.do information
# https://help.veracode.com/reader/LMv_dtSHyb7iIxAQznC~9w/lzZ1eON0Bkr8iYjNVD9tqw
VALID_UPLOAD_API_UPLOADLARGEFILE_RESPONSE_XML = {}
# Unfortunately, this varies slightly from the Veracode-provided example
# because (1) the xml library cannot parse the XML using a XSD file, and (2)
# the placeholders Veracode provided in its documentation result in invalid XML
# regardless.
VALID_UPLOAD_API_UPLOADLARGEFILE_RESPONSE_XML[
"string"
] = """<?xml version="1.0" encoding="UTF-8" standalone="no"?>
<filelist xmlns="https://analysiscenter.veracode.com/schema/2.0/filelist"
xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
account_id="00000000001" app_id="1337" build_id="3131313131" filelist_version="1.1"
xsi:schemaLocation="https://analysiscenter.veracode.com/schema/2.0/filelist
https://analysiscenter.veracode.com/resource/2.0/filelist.xsd">
<file file_id="-9223372036854775808" file_name="valid_file.pdb" file_status="Uploaded"/>
</filelist>"""
VALID_UPLOAD_API_UPLOADLARGEFILE_RESPONSE_XML["bytes"] = bytes(
VALID_UPLOAD_API_UPLOADLARGEFILE_RESPONSE_XML["string"], "utf-8"
)
VALID_UPLOAD_API_UPLOADLARGEFILE_RESPONSE_XML["Element"] = ElementTree.fromstring(
VALID_UPLOAD_API_UPLOADLARGEFILE_RESPONSE_XML["bytes"]
)
# Valid Upload API beginprescan.do information
# https://help.veracode.com/reader/LMv_dtSHyb7iIxAQznC~9w/PX5ReM5acqjM~IOVEg2~rA
VALID_UPLOAD_API_BEGINPRESCAN_RESPONSE_XML = {}
# Unfortunately, this varies slightly from the Veracode-provided example
# because (1) the xml library cannot parse the XML using a XSD file, and (2)
# the placeholders Veracode provided in its documentation result in invalid XML
# regardless.
VALID_UPLOAD_API_BEGINPRESCAN_RESPONSE_XML[
"string"
] = """<?xml version="1.0" encoding="UTF-8"?>
<buildinfo xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
xmlns="https://analysiscenter.veracode.com/schema/4.0/buildinfo"
xsi:schemaLocation="https://analysiscenter.veracode.com/schema/4.0/buildinfo
https://analysiscenter.veracode.com/resource/4.0/buildinfo.xsd"
buildinfo_version="1.4" account_id="00000000001" app_id="1337" build_id="3131313131">
<build version="v1" build_id="3131313131" submitter="JoeUser" platform="Not Specified"
lifecycle_stage="Not Specified" results_ready="false" policy_name="Veracode Transitional Very High"
policy_version="1" policy_compliance_status="Not Assessed" rules_status="Not Assessed"
grace_period_expired="false" scan_overdue="false" legacy_scan_engine="false">
<analysis_unit analysis_type="Static" status="Pre-Scan Submitted"/>
</build>
</buildinfo>"""
VALID_UPLOAD_API_BEGINPRESCAN_RESPONSE_XML["bytes"] = bytes(
VALID_UPLOAD_API_BEGINPRESCAN_RESPONSE_XML["string"], "utf-8"
)
VALID_UPLOAD_API_BEGINPRESCAN_RESPONSE_XML["Element"] = ElementTree.fromstring(
VALID_UPLOAD_API_BEGINPRESCAN_RESPONSE_XML["bytes"]
)
# Valid Upload API createbuild.do information
# https://help.veracode.com/reader/LMv_dtSHyb7iIxAQznC~9w/vhuQ5lMdxRNQWUK1br1mDg
VALID_UPLOAD_API_CREATEBUILD_RESPONSE_XML = {}
# Unfortunately, this varies slightly from the Veracode-provided example
# because (1) the xml library cannot parse the XML using a XSD file, and (2)
# the placeholders Veracode provided in its documentation result in invalid XML
# regardless.
VALID_UPLOAD_API_CREATEBUILD_RESPONSE_XML[
"string"
] = """<?xml version="1.0" encoding="UTF-8"?>
<buildinfo xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
xmlns="https://analysiscenter.veracode.com/schema/4.0/buildinfo"
xsi:schemaLocation="https://analysiscenter.veracode.com/schema/4.0/buildinfo
https://analysiscenter.veracode.com/resource/4.0/buildinfo.xsd" buildinfo_version="1.4"
account_id="00000000001" app_id="1337" sandbox_id="1122" build_id="3131313131"><build version="2019-10 Testing"
build_id="3131313131" submitter="JoeUser" platform="Not Specified" lifecycle_stage="Not Specified"
results_ready="false" policy_name="Veracode Transitional Very High" policy_version="1" policy_compliance_status="Not Assessed"
rules_status="Not Assessed" grace_period_expired="false" scan_overdue="false" legacy_scan_engine="false">
<analysis_unit analysis_type="Static" status="Incomplete"/>
</build>
</buildinfo>"""
VALID_UPLOAD_API_CREATEBUILD_RESPONSE_XML["bytes"] = bytes(
VALID_UPLOAD_API_CREATEBUILD_RESPONSE_XML["string"], "utf-8"
)
VALID_UPLOAD_API_CREATEBUILD_RESPONSE_XML["Element"] = ElementTree.fromstring(
VALID_UPLOAD_API_CREATEBUILD_RESPONSE_XML["bytes"]
)
# Valid Upload API getappinfo.do information
# https://help.veracode.com/reader/LMv_dtSHyb7iIxAQznC~9w/kb2SM9net26_L91VploQGw
VALID_UPLOAD_API_GETAPPINFO_RESPONSE_XML = {}
# Unfortunately, this varies slightly from the Veracode-provided example
# because (1) the xml library cannot parse the XML using a XSD file, and (2)
# the placeholders Veracode provided in its documentation result in invalid XML
# regardless.
VALID_UPLOAD_API_GETAPPINFO_RESPONSE_XML[
"string"
] = """<?xml version="1.0" encoding="UTF-8"?>
<appinfo xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
xmlns="https://analysiscenter.veracode.com/schema/2.0/appinfo"
xsi:schemaLocation="https://analysiscenter.veracode.com/schema/2.0/appinfo
https://analysiscenter.veracode.com/resource/2.0/appinfo.xsd" appinfo_version="1.1"
account_id="00000000001">
<application app_id="1337" app_name="app name" description="app description" business_criticality="Very High"
policy="Veracode Transitional Very High" policy_updated_date="2019-08-13T14:02:08-04:00"
teams="Demo Team" origin="Not Specified" industry_vertical="Other" app_type="Other" deployment_method="Not Specified"
is_web_application="false" archer_app_name="archer app name" modified_date="2019-08-15T11:27:47-04:00"
cots="false" vast="false" business_unit="Not Specified" tags="">
<customfield name="Custom 1" value=""/>
<customfield name="Custom 2" value=""/>
<customfield name="Custom 3" value=""/>
<customfield name="Custom 4" value=""/>
<customfield name="Custom 5" value=""/>
<customfield name="Custom 6" value=""/>
<customfield name="Custom 7" value=""/>
<customfield name="Custom 8" value=""/>
<customfield name="Custom 9" value=""/>
<customfield name="Custom 10" value="foo"/>
</application>
</appinfo>"""
VALID_UPLOAD_API_GETAPPINFO_RESPONSE_XML["bytes"] = bytes(
VALID_UPLOAD_API_GETAPPINFO_RESPONSE_XML["string"], "utf-8"
)
VALID_UPLOAD_API_GETAPPINFO_RESPONSE_XML["Element"] = ElementTree.fromstring(
VALID_UPLOAD_API_GETAPPINFO_RESPONSE_XML["bytes"]
)
# Valid Upload API getappinfo.do information
# https://help.veracode.com/reader/LMv_dtSHyb7iIxAQznC~9w/rERUQewXKGx2D_zaoi6wGw
VALID_UPLOAD_API_DELETEBUILD_RESPONSE_XML = {}
# Unfortunately, this varies slightly from the Veracode-provided example
# because (1) the xml library cannot parse the XML using a XSD file, and (2)
# the placeholders Veracode provided in its documentation result in invalid XML
# regardless.
VALID_UPLOAD_API_DELETEBUILD_RESPONSE_XML[
"string"
] = """<?xml version="1.0" encoding="UTF-8"?>
<buildlist xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
xmlns="https://analysiscenter.veracode.com/schema/2.0/buildlist"
xsi:schemaLocation="https://analysiscenter.veracode.com/schema/2.0/buildlist
https://analysiscenter.veracode.com/resource/2.0/buildlist.xsd" buildlist_version="1.3"
account_id="12345" app_id="54321" app_name="Application Name">
</buildlist>"""
VALID_UPLOAD_API_DELETEBUILD_RESPONSE_XML["bytes"] = bytes(
VALID_UPLOAD_API_DELETEBUILD_RESPONSE_XML["string"], "utf-8"
)
VALID_UPLOAD_API_DELETEBUILD_RESPONSE_XML["Element"] = ElementTree.fromstring(
VALID_UPLOAD_API_DELETEBUILD_RESPONSE_XML["bytes"]
)
# Valid Upload API getbuildinfo.do information - build status is "vendor reviewing"
# https://help.veracode.com/reader/orRWez4I0tnZNaA_i0zn9g/Yjclv0XIfU1v_yqmkt18zA
VALID_UPLOAD_API_GETBUILDINFO_RESPONSE_XML = {}
# Unfortunately, this varies slightly from the Veracode-provided example
# because (1) the xml library cannot parse the XML using a XSD file, and (2)
# the placeholders Veracode provided in its documentation result in invalid XML
# regardless.
VALID_UPLOAD_API_GETBUILDINFO_RESPONSE_XML[
"string"
] = """<?xml version="1.0" encoding="UTF-8"?>
<buildinfo xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
xmlns="https://analysiscenter.veracode.com/schema/4.0/buildinfo"
xsi:schemaLocation="https://analysiscenter.veracode.com/schema/4.0/buildinfo
https://analysiscenter.veracode.com/resource/4.0/buildinfo.xsd" buildinfo_version="1.4"
account_id="hunter2" app_id="1337" build_id="41414141">
<build version="13 Aug 2019 Static" build_id="41414141" submitter="Veracode" platform="Not Specified"
lifecycle_stage="Not Specified" results_ready="false" policy_name="Veracode Transitional Very High" policy_version="1"
policy_compliance_status="Not Assessed" policy_updated_date="2019-08-13T14:02:08-04:00"
rules_status="Not Assessed" grace_period_expired="false" scan_overdue="false" legacy_scan_engine="false">
<analysis_unit analysis_type="Static" status="Vendor Reviewing" engine_version="20190805180615"/>
</build>
</buildinfo> """
VALID_UPLOAD_API_GETBUILDINFO_RESPONSE_XML["bytes"] = bytes(
VALID_UPLOAD_API_GETBUILDINFO_RESPONSE_XML["string"], "utf-8"
)
VALID_UPLOAD_API_GETBUILDINFO_RESPONSE_XML["Element"] = ElementTree.fromstring(
VALID_UPLOAD_API_GETBUILDINFO_RESPONSE_XML["bytes"]
)
# Valid Upload API getbuildinfo.do information - new build ready - second scenario
# # https://help.veracode.com/reader/orRWez4I0tnZNaA_i0zn9g/Yjclv0XIfU1v_yqmkt18zA
VALID_UPLOAD_API_GETBUILDINFO_RESPONSE_READY_XML = {}
# Unfortunately, this varies slightly from the Veracode-provided example
# because (1) the xml library cannot parse the XML using a XSD file, and (2)
# the placeholders Veracode provided in its documentation result in invalid XML
# regardless.
VALID_UPLOAD_API_GETBUILDINFO_RESPONSE_READY_XML[
"string"
] = """<?xml version="1.0" encoding="UTF-8"?>
<buildinfo xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
xmlns="https://analysiscenter.veracode.com/schema/4.0/buildinfo"
xsi:schemaLocation="https://analysiscenter.veracode.com/schema/4.0/buildinfo
https://analysiscenter.veracode.com/resource/4.0/buildinfo.xsd" buildinfo_version="1.4"
account_id="hunter2" app_id="1337" build_id="41414141">
<build version="13 Aug 2019 Static" build_id="41414141" submitter="Veracode" platform="Not Specified"
lifecycle_stage="Not Specified" results_ready="true" policy_name="Veracode Transitional Very High" policy_version="1"
policy_compliance_status="Not Assessed" policy_updated_date="2019-08-13T14:02:08-04:00"
rules_status="Not Assessed" grace_period_expired="false" scan_overdue="false" legacy_scan_engine="false">
<analysis_unit analysis_type="Static" status="Scan in Process" engine_version="20190805180615"/>
</build>
</buildinfo> """
VALID_UPLOAD_API_GETBUILDINFO_RESPONSE_READY_XML["bytes"] = bytes(
VALID_UPLOAD_API_GETBUILDINFO_RESPONSE_READY_XML["string"], "utf-8"
)
VALID_UPLOAD_API_GETBUILDINFO_RESPONSE_READY_XML["Element"] = ElementTree.fromstring(
VALID_UPLOAD_API_GETBUILDINFO_RESPONSE_READY_XML["bytes"]
)
# Valid Upload API getbuildinfo.do information - build in progress
# https://help.veracode.com/reader/orRWez4I0tnZNaA_i0zn9g/Yjclv0XIfU1v_yqmkt18zA
VALID_UPLOAD_API_GETBUILDINFO_IN_PROGRESS_RESPONSE_XML = {}
# Unfortunately, this varies slightly from the Veracode-provided example
# because (1) the xml library cannot parse the XML using a XSD file, and (2)
# the placeholders Veracode provided in its documentation result in invalid XML
# regardless.
VALID_UPLOAD_API_GETBUILDINFO_IN_PROGRESS_RESPONSE_XML[
"string"
] = """<?xml version="1.0" encoding="UTF-8"?>
<buildinfo xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
xmlns="https://analysiscenter.veracode.com/schema/4.0/buildinfo"
xsi:schemaLocation="https://analysiscenter.veracode.com/schema/4.0/buildinfo
https://analysiscenter.veracode.com/resource/4.0/buildinfo.xsd" buildinfo_version="1.4"
account_id="hunter2" app_id="1337" build_id="41414141">
<build version="13 Aug 2019 Static" build_id="41414141" submitter="Veracode" platform="Not Specified"
lifecycle_stage="Not Specified" results_ready="false" policy_name="Veracode Transitional Very High" policy_version="1"
policy_compliance_status="Not Assessed" policy_updated_date="2019-08-13T14:02:08-04:00"
rules_status="Not Assessed" grace_period_expired="false" scan_overdue="false" legacy_scan_engine="false">
<analysis_unit analysis_type="Static" status="Scan In Process" engine_version="20190805180615"/>
</build>
</buildinfo> """
VALID_UPLOAD_API_GETBUILDINFO_IN_PROGRESS_RESPONSE_XML["bytes"] = bytes(
VALID_UPLOAD_API_GETBUILDINFO_IN_PROGRESS_RESPONSE_XML["string"], "utf-8"
)
VALID_UPLOAD_API_GETBUILDINFO_IN_PROGRESS_RESPONSE_XML[
"Element"
] = ElementTree.fromstring(
VALID_UPLOAD_API_GETBUILDINFO_IN_PROGRESS_RESPONSE_XML["bytes"]
)
# Valid Upload API getbuildinfo.do information - missing build tag
# https://help.veracode.com/reader/orRWez4I0tnZNaA_i0zn9g/Yjclv0XIfU1v_yqmkt18zA
VALID_UPLOAD_API_GETBUILDINFO_RESULTS_READY_ERROR_IN_RESPONSE_XML = {}
# Unfortunately, this varies slightly from the Veracode-provided example
# because (1) the xml library cannot parse the XML using a XSD file, and (2)
# the placeholders Veracode provided in its documentation result in invalid XML
# regardless.
VALID_UPLOAD_API_GETBUILDINFO_RESULTS_READY_ERROR_IN_RESPONSE_XML[
"string"
] = """<?xml version="1.0" encoding="UTF-8"?>
<buildinfo xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
xmlns="https://analysiscenter.veracode.com/schema/4.0/buildinfo"
xsi:schemaLocation="https://analysiscenter.veracode.com/schema/4.0/buildinfo
https://analysiscenter.veracode.com/resource/4.0/buildinfo.xsd" buildinfo_version="1.4"
account_id="hunter2" app_id="1337" build_id="41414141">
</buildinfo> """
VALID_UPLOAD_API_GETBUILDINFO_RESULTS_READY_ERROR_IN_RESPONSE_XML["bytes"] = bytes(
VALID_UPLOAD_API_GETBUILDINFO_RESULTS_READY_ERROR_IN_RESPONSE_XML["string"], "utf-8"
)
VALID_UPLOAD_API_GETBUILDINFO_RESULTS_READY_ERROR_IN_RESPONSE_XML[
"Element"
] = ElementTree.fromstring(
VALID_UPLOAD_API_GETBUILDINFO_RESULTS_READY_ERROR_IN_RESPONSE_XML["bytes"]
)
# Valid Upload API getbuildlist.do information - build ID present
# https://help.veracode.com/reader/orRWez4I0tnZNaA_i0zn9g/Yjclv0XIfU1v_yqmkt18zA
VALID_UPLOAD_API_GETBUILDLIST_BUILDID_IN_RESPONSE_XML = {}
# Unfortunately, this varies slightly from the Veracode-provided example
# because (1) the xml library cannot parse the XML using a XSD file, and (2)
# the placeholders Veracode provided in its documentation result in invalid XML
# regardless.
VALID_UPLOAD_API_GETBUILDLIST_BUILDID_IN_RESPONSE_XML[
"string"
] = """<?xml version="1.0" encoding="UTF-8"?>
<buildlist xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
xmlns="https://analysiscenter.veracode.com/schema/2.0/buildlist"
xsi:schemaLocation="https://analysiscenter.veracode.com/schema/2.0/buildlist
https://analysiscenter.veracode.com/resource/2.0/buildlist.xsd" buildlist_version="1.3"
account_id="12345" app_id="54321" sandbox_id="12345" app_name="Application Name">
<build build_id="7777"/>
</buildlist>"""
VALID_UPLOAD_API_GETBUILDLIST_BUILDID_IN_RESPONSE_XML["bytes"] = bytes(
VALID_UPLOAD_API_GETBUILDLIST_BUILDID_IN_RESPONSE_XML["string"], "utf-8"
)
VALID_UPLOAD_API_GETBUILDLIST_BUILDID_IN_RESPONSE_XML[
"Element"
] = ElementTree.fromstring(
VALID_UPLOAD_API_GETBUILDLIST_BUILDID_IN_RESPONSE_XML["bytes"]
)
# Valid Upload API getbuildlist.do information - no build ID
# https://help.veracode.com/reader/orRWez4I0tnZNaA_i0zn9g/Yjclv0XIfU1v_yqmkt18zA
VALID_UPLOAD_API_GETBUILDLIST_MISSING_BUILDID_IN_RESPONSE_XML = {}
# Unfortunately, this varies slightly from the Veracode-provided example
# because (1) the xml library cannot parse the XML using a XSD file, and (2)
# the placeholders Veracode provided in its documentation result in invalid XML
# regardless.
VALID_UPLOAD_API_GETBUILDLIST_MISSING_BUILDID_IN_RESPONSE_XML[
"string"
] = """<?xml version="1.0" encoding="UTF-8"?>
<buildlist xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
xmlns="https://analysiscenter.veracode.com/schema/2.0/buildlist"
xsi:schemaLocation="https://analysiscenter.veracode.com/schema/2.0/buildlist
https://analysiscenter.veracode.com/resource/2.0/buildlist.xsd" buildlist_version="1.3"
account_id="12345" app_id="54321" sandbox_id="12345" app_name="Application Name">
</buildlist>"""
VALID_UPLOAD_API_GETBUILDLIST_MISSING_BUILDID_IN_RESPONSE_XML["bytes"] = bytes(
VALID_UPLOAD_API_GETBUILDLIST_MISSING_BUILDID_IN_RESPONSE_XML["string"], "utf-8"
)
VALID_UPLOAD_API_GETBUILDLIST_MISSING_BUILDID_IN_RESPONSE_XML[
"Element"
] = ElementTree.fromstring(
VALID_UPLOAD_API_GETBUILDLIST_MISSING_BUILDID_IN_RESPONSE_XML["bytes"]
)
# Valid Upload API getbuildinfo.do information - missing analysis_unit status attribute
# https://help.veracode.com/reader/orRWez4I0tnZNaA_i0zn9g/Yjclv0XIfU1v_yqmkt18zA
VALID_UPLOAD_API_GETBUILDINFO_STATUS_MISSING_IN_RESPONSE_XML = {}
# Unfortunately, this varies slightly from the Veracode-provided example
# because (1) the xml library cannot parse the XML using a XSD file, and (2)
# the placeholders Veracode provided in its documentation result in invalid XML
# regardless.
VALID_UPLOAD_API_GETBUILDINFO_STATUS_MISSING_IN_RESPONSE_XML[
"string"
] = """<?xml version="1.0" encoding="UTF-8"?>
<buildinfo xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
xmlns="https://analysiscenter.veracode.com/schema/4.0/buildinfo"
xsi:schemaLocation="https://analysiscenter.veracode.com/schema/4.0/buildinfo
https://analysiscenter.veracode.com/resource/4.0/buildinfo.xsd" buildinfo_version="1.4"
account_id="hunter2" app_id="1337" build_id="41414141">
<build version="13 Aug 2019 Static" build_id="41414141" submitter="Veracode" platform="Not Specified"
lifecycle_stage="Not Specified" results_ready="false" policy_name="Veracode Transitional Very High" policy_version="1"
policy_compliance_status="Not Assessed" policy_updated_date="2019-08-13T14:02:08-04:00"
rules_status="Not Assessed" grace_period_expired="false" scan_overdue="false" legacy_scan_engine="false">
<analysis_unit analysis_type="Static" engine_version="20190805180615"/>
</build>
</buildinfo> """
VALID_UPLOAD_API_GETBUILDINFO_STATUS_MISSING_IN_RESPONSE_XML["bytes"] = bytes(
VALID_UPLOAD_API_GETBUILDINFO_STATUS_MISSING_IN_RESPONSE_XML["string"], "utf-8"
)
VALID_UPLOAD_API_GETBUILDINFO_STATUS_MISSING_IN_RESPONSE_XML[
"Element"
] = ElementTree.fromstring(
VALID_UPLOAD_API_GETBUILDINFO_STATUS_MISSING_IN_RESPONSE_XML["bytes"]
)
## Sample Sandbox API environmental information
VALID_SANDBOX_API: Dict[str, Union[str, Dict[str, str], Path, bool]] = {}
VALID_SANDBOX_API["base_url"] = "https://analysiscenter.veracode.com/api/"
VALID_SANDBOX_API["version"] = {
"createsandbox.do": "5.0",
"getsandboxlist.do": "5.0",
"promotesandbox.do": "5.0",
"updatesandbox.do": "5.0",
"deletesandbox.do": "5.0",
}
VALID_SANDBOX_API["app_id"] = "1337"
VALID_SANDBOX_API["build_id"] = "v1.2.3"
VALID_SANDBOX_API["sandbox_id"] = "321"
VALID_SANDBOX_API["sandbox_name"] = "fb/jonzeolla/add-sandbox_name"
VALID_SANDBOX_API["api_key_id"] = secrets.token_hex(16)
VALID_SANDBOX_API["api_key_secret"] = secrets.token_hex(64) # nosec
INVALID_SANDBOX_API_BUILD_ID = copy.deepcopy(VALID_SANDBOX_API)
INVALID_SANDBOX_API_BUILD_ID["build_id"] = "invalid(build_id)"
INVALID_SANDBOX_API_SANDBOX_NAME = copy.deepcopy(VALID_SANDBOX_API)
INVALID_SANDBOX_API_SANDBOX_NAME["sandbox_name"] = r"invalid\sandbox_name"
INVALID_SANDBOX_API_INCORRECT_VERSION_VALUES = copy.deepcopy(VALID_SANDBOX_API)
for value in VALID_SANDBOX_API["version"]:
INVALID_SANDBOX_API_INCORRECT_VERSION_VALUES["version"][value] = float(
VALID_SANDBOX_API["version"][value]
)
# Valid Sandbox API information
# https://help.veracode.com/reader/LMv_dtSHyb7iIxAQznC~9w/twPT73YBy_iQvrsGEZamhQ
VALID_SANDBOX_GETSANDBOXLIST_API_RESPONSE_XML = {}
# Unfortunately, this varies slightly from the Veracode-provided example
# because (1) the xml library cannot parse the XML using a XSD file, and (2)
# the placeholders Veracode provided in its documentation result in invalid XML
# regardless.
VALID_SANDBOX_GETSANDBOXLIST_API_RESPONSE_XML[
"string"
] = """<?xml version="1.0" encoding="UTF-8"?>
<sandboxlist xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
xmlns="https://analysiscenter.veracode.com/schema/4.0/sandboxlist"
xsi:schemaLocation="https://analysiscenter.veracode.com/schema/4.0/sandboxlist
https://analysiscenter.veracode.com/resource/4.0/sandboxlist.xsd"
sandboxlist_version="1.0" account_id="12345" app_id="31337">
<sandbox sandbox_id="111111111" sandbox_name="Project Security" owner="<EMAIL>"
last_modified="2019-09-17T14:08:35-04:00">
<customfield name="Custom 1" value=""/>
<customfield name="Custom 2" value=""/>
<customfield name="Custom 3" value=""/>
<customfield name="Custom 4" value=""/>
<customfield name="Custom 5" value=""/>
</sandbox>
<sandbox sandbox_id="22222222" sandbox_name="Project Refactor" owner="<EMAIL>"
last_modified="2019-09-17T14:04:13-04:00">
<customfield name="Custom 1" value=""/>
<customfield name="Custom 2" value=""/>
<customfield name="Custom 3" value=""/>
<customfield name="Custom 4" value=""/>
<customfield name="Custom 5" value=""/>
</sandbox>
</sandboxlist>"""
VALID_SANDBOX_GETSANDBOXLIST_API_RESPONSE_XML["bytes"] = bytes(
VALID_SANDBOX_GETSANDBOXLIST_API_RESPONSE_XML["string"], "utf-8"
)
VALID_SANDBOX_GETSANDBOXLIST_API_RESPONSE_XML["Element"] = ElementTree.fromstring(
VALID_SANDBOX_GETSANDBOXLIST_API_RESPONSE_XML["bytes"]
)
# Valid Sandbox API information
# https://help.veracode.com/reader/LMv_dtSHyb7iIxAQznC~9w/jp8rPey8I5WsuWz7bY2SZg
VALID_SANDBOX_CREATESANDBOX_API_RESPONSE_XML = {}
# Unfortunately, this varies slightly from the Veracode-provided example
# because (1) the xml library cannot parse the XML using a XSD file, and (2)
# the placeholders Veracode provided in its documentation result in invalid XML
# regardless.
VALID_SANDBOX_CREATESANDBOX_API_RESPONSE_XML[
"string"
] = """<?xml version="1.0" encoding="UTF-8"?>
<sandboxinfo xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
xmlns="https://analysiscenter.veracode.com/schema/4.0/sandboxinfo"
xsi:schemaLocation="https://analysiscenter.veracode.com/schema/4.0/sandboxinfo
https://analysiscenter.veracode.com/resource/4.0/sandboxinfo.xsd" sandboxinfo_version="1.2"
account_id="12345" app_id="31337">
<sandbox sandbox_id="1111111" sandbox_name="Project Security" sandbox_status="sandbox" owner="<EMAIL>"
modified_date="2019-09-17T14:08:35-04:00" created_date="2019-09-17T14:08:35-04:00">
<customfield name="Custom 1" value=""/>
<customfield name="Custom 2" value=""/>
<customfield name="Custom 3" value=""/>
<customfield name="Custom 4" value=""/>
<customfield name="Custom 5" value=""/>
</sandbox>
</sandboxinfo>"""
VALID_SANDBOX_CREATESANDBOX_API_RESPONSE_XML["bytes"] = bytes(
VALID_SANDBOX_CREATESANDBOX_API_RESPONSE_XML["string"], "utf-8"
)
VALID_SANDBOX_CREATESANDBOX_API_RESPONSE_XML["Element"] = ElementTree.fromstring(
VALID_SANDBOX_CREATESANDBOX_API_RESPONSE_XML["bytes"]
)
# Invalid Sandbox API information
# https://help.veracode.com/reader/LMv_dtSHyb7iIxAQznC~9w/jp8rPey8I5WsuWz7bY2SZg
INVALID_SANDBOX_CREATESANDBOX_API_RESPONSE_XML_NO_SANDBOX = {}
# Unfortunately, this varies slightly from the Veracode-provided example
# because (1) the xml library cannot parse the XML using a XSD file, and (2)
# the placeholders Veracode provided in its documentation result in invalid XML
# regardless.
INVALID_SANDBOX_CREATESANDBOX_API_RESPONSE_XML_NO_SANDBOX[
"string"
] = """<?xml version="1.0" encoding="UTF-8"?>
<sandboxinfo xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
xmlns="https://analysiscenter.veracode.com/schema/4.0/sandboxinfo"
xsi:schemaLocation="https://analysiscenter.veracode.com/schema/4.0/sandboxinfo
https://analysiscenter.veracode.com/resource/4.0/sandboxinfo.xsd" sandboxinfo_version="1.2"
account_id="12345" app_id="31337">
</sandboxinfo>"""
INVALID_SANDBOX_CREATESANDBOX_API_RESPONSE_XML_NO_SANDBOX["bytes"] = bytes(
INVALID_SANDBOX_CREATESANDBOX_API_RESPONSE_XML_NO_SANDBOX["string"], "utf-8"
)
INVALID_SANDBOX_CREATESANDBOX_API_RESPONSE_XML_NO_SANDBOX[
"Element"
] = ElementTree.fromstring(
INVALID_SANDBOX_CREATESANDBOX_API_RESPONSE_XML_NO_SANDBOX["bytes"]
)
INVALID_SANDBOX_API_INCORRECT_DOMAIN = copy.deepcopy(VALID_RESULTS_API)
INVALID_SANDBOX_API_INCORRECT_DOMAIN["base_url"] = "https:///api/"
## Example file info
VALID_FILE: Dict[str, Union[str, List[str], bytes, Path]] = {}
VALID_FILE["name"] = "valid_file.pdb"
VALID_FILE["names"] = [
"valid_file.exe",
"valid_file.pdb",
"valid_file.dll",
"valid_file.jar",
"valid_file.zip",
"valid_file.tar",
"valid_file.tgz",
"valid_file.war",
"valid_file.ear",
"valid_file.jar",
"valid_file.apk",
"valid_file.ipa",
"valid_file.tar.gz",
"this.is.a.valid.file.dll",
"this.is..also..valid.jar",
]
VALID_FILE["bytes"] = b"Seiso was here!\n"
VALID_FILE["Path"] = Path("/path/" + str(VALID_FILE["name"]))
INVALID_FILE: Dict[str, Union[str, List[str], bytes, Path]] = {}
INVALID_FILE["name"] = "invalid_file.tar.gz.bar"
INVALID_FILE["names"] = [
"invalid_file.thingy",
"invalid_file",
"invalid_file.tar.gz.bar",
"invalid_file.dll.gz",
"invalid_file.exe.docx",
]
INVALID_FILE["bytes"] = b"Seiso was here!\n"
INVALID_FILE["Path"] = Path("/path/" + str(INVALID_FILE["name"]))
## Veracode error responses
VERACODE_ERROR_RESPONSE_XML: Dict[str, bytes] = {}
VERACODE_ERROR_RESPONSE_XML[
"bytes"
] = b'<?xml version="1.0" encoding="UTF-8"?>\n\n<error>App not in state where new builds are allowed.</error>\n' # pylint: disable=line-too-long
VERACODE_ERROR_RESPONSE_XML["Element"] = ElementTree.fromstring(
VERACODE_ERROR_RESPONSE_XML["bytes"]
)
XML_API_VALID_RESPONSE_XML_ERROR: Dict[str, bytes] = {}
XML_API_VALID_RESPONSE_XML_ERROR[
"bytes"
] = b'<?xml version="1.0" encoding="UTF-8"?>\n\n<error>Access denied.</error>\n'
XML_API_VALID_RESPONSE_XML_ERROR["Element"] = ElementTree.fromstring(
XML_API_VALID_RESPONSE_XML_ERROR["bytes"]
)
XML_API_INVALID_RESPONSE_XML_ERROR: Dict[str, bytes] = {}
XML_API_INVALID_RESPONSE_XML_ERROR[
"bytes"
] = b'<?xml version="1.0" encoding="UTF-8"?>\n\n<Invalid Error Message.</error>\n'
# Configs
INVALID_CONFIG_DIRTY = {
"a": 1,
"b": {},
"c": {"hideme": None, "keepme": "KEEP", "list": ["a", {}, "b", None]},
"emptydict": {},
None: "hide",
}
INVALID_CONFIG_NO_NONE = {
"a": 1,
"b": {},
"c": {"keepme": "KEEP", "list": ["a", {}, "b"]},
"emptydict": {},
}
INVALID_CONFIG_NO_EMPTY_DICT = {
"a": 1,
"c": {"hideme": None, "keepme": "KEEP", "list": ["a", "b", None]},
None: "hide",
}
INVALID_CONFIG_CLEAN = {
"a": 1,
"c": {"keepme": "KEEP", "list": ["a", "b"]},
}
SIMPLE_CONFIG_FILE = {}
SIMPLE_CONFIG_FILE["name"] = "easy_sast.yml"
SIMPLE_CONFIG_FILE["Path"] = Path("/path/" + str(SIMPLE_CONFIG_FILE["name"]))
SIMPLE_CONFIG_FILE[
"string"
] = '''---
loglevel: "WARNING"'''
SIMPLE_CONFIG_FILE["bytes"] = bytes(SIMPLE_CONFIG_FILE["string"], "utf-8")
INVALID_CONFIG_FILES = [
Path("./easy_sast.yml.gz"),
Path("./config.txt.yml"),
Path("./thing.notyml"),
Path("./not_a_config.pdb"),
Path("./config.yalm"),
Path("./yaml.config"),
Path("./setup.cfg"),
Path("./config.ini"),
Path("./file.json"),
Path("./yaml"),
Path("./yml"),
Path("./config"),
]
VALID_CLEAN_FILE_CONFIG = {}
VALID_CLEAN_FILE_CONFIG[
"string"
] = '''---
apis:
results:
base_url: "https://analysiscenter.veracode.com/api/"
version: {
"detailedreport.do": "5.0",
"detailedreportpdf.do": "4.0",
"getaccountcustomfieldlist.do": "5.0",
"getappbuilds.do": "4.0",
"getcallstacks.do": "5.0",
"summaryreport.do": "4.0",
"summaryreportpdf.do": "4.0",
"thirdpartyreportpdf.do": "4.0",
}
app_id: "31337"
ignore_compliance_status: False
upload:
base_url: "https://analysiscenter.veracode.com/api/"
version: {
"beginprescan.do": "5.0",
"beginscan.do": "5.0",
"createapp.do": "5.0",
"createbuild.do": "5.0",
"deleteapp.do": "5.0",
"deletebuild.do": "5.0",
"getappinfo.do": "5.0",
"getapplist.do": "5.0",
"getbuildinfo.do": "5.0",
"getbuildlist.do": "5.0",
"getfilelist.do": "5.0",
"getpolicylist.do": "5.0",
"getprescanresults.do": "5.0",
"getvendorlist.do": "5.0",
"removefile.do": "5.0",
"updateapp.do": "5.0",
"updatebuild.do": "5.0",
"uploadfile.do": "5.0",
"uploadlargefile.do": "5.0"
}
app_id: "31337"
build_dir: "/build/"
build_id: "2037-03-13_03-14-15"
scan_all_nonfatal_top_level_modules: True
auto_scan: True
sandbox:
base_url: "https://analysiscenter.veracode.com/api/"
version: {
"createsandbox.do": "5.0",
"getsandboxlist.do": "5.0",
"promotesandbox.do": "5.0",
"updatesandbox.do": "5.0",
"deletesandbox.do": "5.0"
}
app_id: "31337"
sandbox_name: "fb/jonzeolla/name-of-branch"
loglevel: "warning"
workflow:
- "submit_artifacts"
- "check_compliance"'''
VALID_CLEAN_FILE_CONFIG["bytes"] = bytes(VALID_CLEAN_FILE_CONFIG["string"], "utf-8")
VALID_CLEAN_FILE_CONFIG["dict"] = yaml.safe_load(VALID_CLEAN_FILE_CONFIG["bytes"])
# VALID_CLEAN_FILE_CONFIG is already normalized, but separating it here in case
# in the future it isn't and we want to update the places where this is used to
# mock the response to normalized_file_config
VALID_CLEAN_FILE_CONFIG_NORMALIZED = copy.deepcopy(VALID_CLEAN_FILE_CONFIG)
CLEAN_DEFAULT_CONFIG = {
"workflow": ["submit_artifacts", "check_compliance"],
"loglevel": "WARNING",
"apis": {"upload": {}, "results": {}, "sandbox": {}},
}
CLEAN_FILE_CONFIG = {
"apis": {
"results": {
"base_url": "https://analysiscenter.veracode.com/api/",
"version": {
"detailedreport.do": "5.0",
"detailedreportpdf.do": "4.0",
"getaccountcustomfieldlist.do": "5.0",
"getappbuilds.do": "4.0",
"getcallstacks.do": "5.0",
"summaryreport.do": "4.0",
"summaryreportpdf.do": "4.0",
"thirdpartyreportpdf.do": "4.0",
},
"app_id": "31337",
"ignore_compliance_status": False,
},
"upload": {
"base_url": "https://analysiscenter.veracode.com/api/",
"version": {
"beginprescan.do": "5.0",
"beginscan.do": "5.0",
"createapp.do": "5.0",
"createbuild.do": "5.0",
"deleteapp.do": "5.0",
"deletebuild.do": "5.0",
"getappinfo.do": "5.0",
"getapplist.do": "5.0",
"getbuildinfo.do": "5.0",
"getbuildlist.do": "5.0",
"getfilelist.do": "5.0",
"getpolicylist.do": "5.0",
"getprescanresults.do": "5.0",
"getvendorlist.do": "5.0",
"removefile.do": "5.0",
"updateapp.do": "5.0",
"updatebuild.do": "5.0",
"uploadfile.do": "5.0",
"uploadlargefile.do": "5.0",
},
"app_id": "31337",
"build_dir": Path("/build/").absolute(),
"build_id": "2037-03-13_03-14-15",
"scan_all_nonfatal_top_level_modules": True,
"auto_scan": True,
},
"sandbox": {
"base_url": "https://analysiscenter.veracode.com/api/",
"version": {
"createsandbox.do": "5.0",
"getsandboxlist.do": "5.0",
"promotesandbox.do": "5.0",
"updatesandbox.do": "5.0",
"deletesandbox.do": "5.0",
},
"app_id": "31337",
"sandbox_name": VALID_SANDBOX_API["sandbox_name"],
},
},
"loglevel": "WARNING",
"workflow": ["submit_artifacts", "check_compliance"],
}
CLEAN_ENV_CONFIG = {
"api_key_id": "95e637f1a25d453cdfdc30a338287ba8",
"api_key_secret": "<KEY>",
}
CLEAN_ARGS_CONFIG = {
"config_file": Path("/easy_sast/easy_sast.yml"),
"apis": {
"results": {"ignore_compliance_status": False},
"upload": {"scan_all_nonfatal_top_level_modules": True, "auto_scan": True},
"sandbox": {"sandbox_name": VALID_SANDBOX_API["sandbox_name"]},
},
}
CLEAN_FILE_CONFIG_NO_RESULTS_API = copy.deepcopy(CLEAN_FILE_CONFIG)
CLEAN_FILE_CONFIG_NO_RESULTS_API["apis"] = {"upload": {}}
CLEAN_FILE_CONFIG_NO_UPLOAD_API = copy.deepcopy(CLEAN_FILE_CONFIG)
CLEAN_FILE_CONFIG_NO_UPLOAD_API["apis"] = {"results": {}}
CLEAN_EFFECTIVE_CONFIG = {
"workflow": ["submit_artifacts", "check_compliance"],
"loglevel": "WARNING",
"apis": {
"sandbox": {
"base_url": "https://analysiscenter.veracode.com/api/",
"version": {
"createsandbox.do": "5.0",
"getsandboxlist.do": "5.0",
"promotesandbox.do": "5.0",
"updatesandbox.do": "5.0",
"deletesandbox.do": "5.0",
},
"app_id": "31337",
"sandbox_name": VALID_SANDBOX_API["sandbox_name"],
},
"upload": {
"base_url": "https://analysiscenter.veracode.com/api/",
"version": {
"beginprescan.do": "5.0",
"beginscan.do": "5.0",
"createapp.do": "5.0",
"createbuild.do": "5.0",
"deleteapp.do": "5.0",
"deletebuild.do": "5.0",
"getappinfo.do": "5.0",
"getapplist.do": "5.0",
"getbuildinfo.do": "5.0",
"getbuildlist.do": "5.0",
"getfilelist.do": "5.0",
"getpolicylist.do": "5.0",
"getprescanresults.do": "5.0",
"getvendorlist.do": "5.0",
"removefile.do": "5.0",
"updateapp.do": "5.0",
"updatebuild.do": "5.0",
"uploadfile.do": "5.0",
"uploadlargefile.do": "5.0",
},
"app_id": "31337",
"build_dir": Path("/build/").absolute(),
"build_id": "2037-03-13_03-14-15",
"scan_all_nonfatal_top_level_modules": True,
"auto_scan": True,
},
"results": {
"base_url": "https://analysiscenter.veracode.com/api/",
"version": {
"detailedreport.do": "5.0",
"detailedreportpdf.do": "4.0",
"getaccountcustomfieldlist.do": "5.0",
"getappbuilds.do": "4.0",
"getcallstacks.do": "5.0",
"summaryreport.do": "4.0",
"summaryreportpdf.do": "4.0",
"thirdpartyreportpdf.do": "4.0",
},
"app_id": "31337",
"ignore_compliance_status": False,
},
},
"api_key_id": "95e637f1a25d453cdfdc30a338287ba8",
"api_key_secret": "<KEY>",
"config_file": Path("/easy_sast/easy_sast.yml"),
}
|
[
"copy.deepcopy",
"secrets.token_hex",
"pathlib.Path",
"yaml.safe_load",
"defusedxml.ElementTree.fromstring"
] |
[((999, 1020), 'secrets.token_hex', 'secrets.token_hex', (['(16)'], {}), '(16)\n', (1016, 1020), False, 'import secrets\n'), ((1059, 1080), 'secrets.token_hex', 'secrets.token_hex', (['(64)'], {}), '(64)\n', (1076, 1080), False, 'import secrets\n'), ((1183, 1215), 'copy.deepcopy', 'copy.deepcopy', (['VALID_RESULTS_API'], {}), '(VALID_RESULTS_API)\n', (1196, 1215), False, 'import copy\n'), ((1315, 1347), 'copy.deepcopy', 'copy.deepcopy', (['VALID_RESULTS_API'], {}), '(VALID_RESULTS_API)\n', (1328, 1347), False, 'import copy\n'), ((1444, 1476), 'copy.deepcopy', 'copy.deepcopy', (['VALID_RESULTS_API'], {}), '(VALID_RESULTS_API)\n', (1457, 1476), False, 'import copy\n'), ((1580, 1612), 'copy.deepcopy', 'copy.deepcopy', (['VALID_RESULTS_API'], {}), '(VALID_RESULTS_API)\n', (1593, 1612), False, 'import copy\n'), ((1821, 1853), 'copy.deepcopy', 'copy.deepcopy', (['VALID_RESULTS_API'], {}), '(VALID_RESULTS_API)\n', (1834, 1853), False, 'import copy\n'), ((1970, 2002), 'copy.deepcopy', 'copy.deepcopy', (['VALID_RESULTS_API'], {}), '(VALID_RESULTS_API)\n', (1983, 2002), False, 'import copy\n'), ((2124, 2156), 'copy.deepcopy', 'copy.deepcopy', (['VALID_RESULTS_API'], {}), '(VALID_RESULTS_API)\n', (2137, 2156), False, 'import copy\n'), ((2297, 2329), 'copy.deepcopy', 'copy.deepcopy', (['VALID_RESULTS_API'], {}), '(VALID_RESULTS_API)\n', (2310, 2329), False, 'import copy\n'), ((4662, 4753), 'defusedxml.ElementTree.fromstring', 'ElementTree.fromstring', (["VALID_RESULTS_API_GETAPPBUILDS_RESPONSE_XML_NO_BUILDS['bytes']"], {}), "(VALID_RESULTS_API_GETAPPBUILDS_RESPONSE_XML_NO_BUILDS\n ['bytes'])\n", (4684, 4753), False, 'from defusedxml import ElementTree\n'), ((7492, 7611), 'defusedxml.ElementTree.fromstring', 'ElementTree.fromstring', (["VALID_RESULTS_API_GETAPPBUILDS_RESPONSE_XML_FAILING_POLICY_COMPLIANCE_STATUS[\n 'bytes']"], {}), "(\n VALID_RESULTS_API_GETAPPBUILDS_RESPONSE_XML_FAILING_POLICY_COMPLIANCE_STATUS\n ['bytes'])\n", (7514, 7611), False, 'from defusedxml import ElementTree\n'), ((10343, 10462), 'defusedxml.ElementTree.fromstring', 'ElementTree.fromstring', (["VALID_RESULTS_API_GETAPPBUILDS_RESPONSE_XML_PASSING_POLICY_COMPLIANCE_STATUS[\n 'bytes']"], {}), "(\n VALID_RESULTS_API_GETAPPBUILDS_RESPONSE_XML_PASSING_POLICY_COMPLIANCE_STATUS\n ['bytes'])\n", (10365, 10462), False, 'from defusedxml import ElementTree\n'), ((11573, 11594), 'secrets.token_hex', 'secrets.token_hex', (['(16)'], {}), '(16)\n', (11590, 11594), False, 'import secrets\n'), ((11632, 11653), 'secrets.token_hex', 'secrets.token_hex', (['(64)'], {}), '(64)\n', (11649, 11653), False, 'import secrets\n'), ((11704, 11735), 'copy.deepcopy', 'copy.deepcopy', (['VALID_UPLOAD_API'], {}), '(VALID_UPLOAD_API)\n', (11717, 11735), False, 'import copy\n'), ((11823, 11854), 'copy.deepcopy', 'copy.deepcopy', (['VALID_UPLOAD_API'], {}), '(VALID_UPLOAD_API)\n', (11836, 11854), False, 'import copy\n'), ((11955, 11986), 'copy.deepcopy', 'copy.deepcopy', (['VALID_UPLOAD_API'], {}), '(VALID_UPLOAD_API)\n', (11968, 11986), False, 'import copy\n'), ((12099, 12130), 'copy.deepcopy', 'copy.deepcopy', (['VALID_UPLOAD_API'], {}), '(VALID_UPLOAD_API)\n', (12112, 12130), False, 'import copy\n'), ((12329, 12360), 'copy.deepcopy', 'copy.deepcopy', (['VALID_UPLOAD_API'], {}), '(VALID_UPLOAD_API)\n', (12342, 12360), False, 'import copy\n'), ((12482, 12513), 'copy.deepcopy', 'copy.deepcopy', (['VALID_UPLOAD_API'], {}), '(VALID_UPLOAD_API)\n', (12495, 12513), False, 'import copy\n'), ((12656, 12687), 'copy.deepcopy', 'copy.deepcopy', (['VALID_UPLOAD_API'], {}), '(VALID_UPLOAD_API)\n', (12669, 12687), False, 'import copy\n'), ((13967, 14045), 'defusedxml.ElementTree.fromstring', 'ElementTree.fromstring', (["VALID_UPLOAD_API_UPLOADLARGEFILE_RESPONSE_XML['bytes']"], {}), "(VALID_UPLOAD_API_UPLOADLARGEFILE_RESPONSE_XML['bytes'])\n", (13989, 14045), False, 'from defusedxml import ElementTree\n'), ((15741, 15816), 'defusedxml.ElementTree.fromstring', 'ElementTree.fromstring', (["VALID_UPLOAD_API_BEGINPRESCAN_RESPONSE_XML['bytes']"], {}), "(VALID_UPLOAD_API_BEGINPRESCAN_RESPONSE_XML['bytes'])\n", (15763, 15816), False, 'from defusedxml import ElementTree\n'), ((17517, 17591), 'defusedxml.ElementTree.fromstring', 'ElementTree.fromstring', (["VALID_UPLOAD_API_CREATEBUILD_RESPONSE_XML['bytes']"], {}), "(VALID_UPLOAD_API_CREATEBUILD_RESPONSE_XML['bytes'])\n", (17539, 17591), False, 'from defusedxml import ElementTree\n'), ((19792, 19865), 'defusedxml.ElementTree.fromstring', 'ElementTree.fromstring', (["VALID_UPLOAD_API_GETAPPINFO_RESPONSE_XML['bytes']"], {}), "(VALID_UPLOAD_API_GETAPPINFO_RESPONSE_XML['bytes'])\n", (19814, 19865), False, 'from defusedxml import ElementTree\n'), ((21088, 21162), 'defusedxml.ElementTree.fromstring', 'ElementTree.fromstring', (["VALID_UPLOAD_API_DELETEBUILD_RESPONSE_XML['bytes']"], {}), "(VALID_UPLOAD_API_DELETEBUILD_RESPONSE_XML['bytes'])\n", (21110, 21162), False, 'from defusedxml import ElementTree\n'), ((22997, 23072), 'defusedxml.ElementTree.fromstring', 'ElementTree.fromstring', (["VALID_UPLOAD_API_GETBUILDINFO_RESPONSE_XML['bytes']"], {}), "(VALID_UPLOAD_API_GETBUILDINFO_RESPONSE_XML['bytes'])\n", (23019, 23072), False, 'from defusedxml import ElementTree\n'), ((24936, 25022), 'defusedxml.ElementTree.fromstring', 'ElementTree.fromstring', (["VALID_UPLOAD_API_GETBUILDINFO_RESPONSE_READY_XML['bytes']"], {}), "(VALID_UPLOAD_API_GETBUILDINFO_RESPONSE_READY_XML[\n 'bytes'])\n", (24958, 25022), False, 'from defusedxml import ElementTree\n'), ((26900, 26992), 'defusedxml.ElementTree.fromstring', 'ElementTree.fromstring', (["VALID_UPLOAD_API_GETBUILDINFO_IN_PROGRESS_RESPONSE_XML['bytes']"], {}), "(VALID_UPLOAD_API_GETBUILDINFO_IN_PROGRESS_RESPONSE_XML\n ['bytes'])\n", (26922, 26992), False, 'from defusedxml import ElementTree\n'), ((28356, 28459), 'defusedxml.ElementTree.fromstring', 'ElementTree.fromstring', (["VALID_UPLOAD_API_GETBUILDINFO_RESULTS_READY_ERROR_IN_RESPONSE_XML['bytes']"], {}), "(\n VALID_UPLOAD_API_GETBUILDINFO_RESULTS_READY_ERROR_IN_RESPONSE_XML['bytes'])\n", (28378, 28459), False, 'from defusedxml import ElementTree\n'), ((29814, 29905), 'defusedxml.ElementTree.fromstring', 'ElementTree.fromstring', (["VALID_UPLOAD_API_GETBUILDLIST_BUILDID_IN_RESPONSE_XML['bytes']"], {}), "(VALID_UPLOAD_API_GETBUILDLIST_BUILDID_IN_RESPONSE_XML\n ['bytes'])\n", (29836, 29905), False, 'from defusedxml import ElementTree\n'), ((31264, 31363), 'defusedxml.ElementTree.fromstring', 'ElementTree.fromstring', (["VALID_UPLOAD_API_GETBUILDLIST_MISSING_BUILDID_IN_RESPONSE_XML['bytes']"], {}), "(\n VALID_UPLOAD_API_GETBUILDLIST_MISSING_BUILDID_IN_RESPONSE_XML['bytes'])\n", (31286, 31363), False, 'from defusedxml import ElementTree\n'), ((33267, 33365), 'defusedxml.ElementTree.fromstring', 'ElementTree.fromstring', (["VALID_UPLOAD_API_GETBUILDINFO_STATUS_MISSING_IN_RESPONSE_XML['bytes']"], {}), "(\n VALID_UPLOAD_API_GETBUILDINFO_STATUS_MISSING_IN_RESPONSE_XML['bytes'])\n", (33289, 33365), False, 'from defusedxml import ElementTree\n'), ((33977, 33998), 'secrets.token_hex', 'secrets.token_hex', (['(16)'], {}), '(16)\n', (33994, 33998), False, 'import secrets\n'), ((34037, 34058), 'secrets.token_hex', 'secrets.token_hex', (['(64)'], {}), '(64)\n', (34054, 34058), False, 'import secrets\n'), ((34101, 34133), 'copy.deepcopy', 'copy.deepcopy', (['VALID_SANDBOX_API'], {}), '(VALID_SANDBOX_API)\n', (34114, 34133), False, 'import copy\n'), ((34234, 34266), 'copy.deepcopy', 'copy.deepcopy', (['VALID_SANDBOX_API'], {}), '(VALID_SANDBOX_API)\n', (34247, 34266), False, 'import copy\n'), ((34391, 34423), 'copy.deepcopy', 'copy.deepcopy', (['VALID_SANDBOX_API'], {}), '(VALID_SANDBOX_API)\n', (34404, 34423), False, 'import copy\n'), ((36587, 36665), 'defusedxml.ElementTree.fromstring', 'ElementTree.fromstring', (["VALID_SANDBOX_GETSANDBOXLIST_API_RESPONSE_XML['bytes']"], {}), "(VALID_SANDBOX_GETSANDBOXLIST_API_RESPONSE_XML['bytes'])\n", (36609, 36665), False, 'from defusedxml import ElementTree\n'), ((38348, 38425), 'defusedxml.ElementTree.fromstring', 'ElementTree.fromstring', (["VALID_SANDBOX_CREATESANDBOX_API_RESPONSE_XML['bytes']"], {}), "(VALID_SANDBOX_CREATESANDBOX_API_RESPONSE_XML['bytes'])\n", (38370, 38425), False, 'from defusedxml import ElementTree\n'), ((39708, 39803), 'defusedxml.ElementTree.fromstring', 'ElementTree.fromstring', (["INVALID_SANDBOX_CREATESANDBOX_API_RESPONSE_XML_NO_SANDBOX['bytes']"], {}), "(\n INVALID_SANDBOX_CREATESANDBOX_API_RESPONSE_XML_NO_SANDBOX['bytes'])\n", (39730, 39803), False, 'from defusedxml import ElementTree\n'), ((39845, 39877), 'copy.deepcopy', 'copy.deepcopy', (['VALID_RESULTS_API'], {}), '(VALID_RESULTS_API)\n', (39858, 39877), False, 'import copy\n'), ((41250, 41310), 'defusedxml.ElementTree.fromstring', 'ElementTree.fromstring', (["VERACODE_ERROR_RESPONSE_XML['bytes']"], {}), "(VERACODE_ERROR_RESPONSE_XML['bytes'])\n", (41272, 41310), False, 'from defusedxml import ElementTree\n'), ((41547, 41612), 'defusedxml.ElementTree.fromstring', 'ElementTree.fromstring', (["XML_API_VALID_RESPONSE_XML_ERROR['bytes']"], {}), "(XML_API_VALID_RESPONSE_XML_ERROR['bytes'])\n", (41569, 41612), False, 'from defusedxml import ElementTree\n'), ((45122, 45170), 'yaml.safe_load', 'yaml.safe_load', (["VALID_CLEAN_FILE_CONFIG['bytes']"], {}), "(VALID_CLEAN_FILE_CONFIG['bytes'])\n", (45136, 45170), False, 'import yaml\n'), ((45415, 45453), 'copy.deepcopy', 'copy.deepcopy', (['VALID_CLEAN_FILE_CONFIG'], {}), '(VALID_CLEAN_FILE_CONFIG)\n', (45428, 45453), False, 'import copy\n'), ((48389, 48421), 'copy.deepcopy', 'copy.deepcopy', (['CLEAN_FILE_CONFIG'], {}), '(CLEAN_FILE_CONFIG)\n', (48402, 48421), False, 'import copy\n'), ((48515, 48547), 'copy.deepcopy', 'copy.deepcopy', (['CLEAN_FILE_CONFIG'], {}), '(CLEAN_FILE_CONFIG)\n', (48528, 48547), False, 'import copy\n'), ((42659, 42685), 'pathlib.Path', 'Path', (['"""./easy_sast.yml.gz"""'], {}), "('./easy_sast.yml.gz')\n", (42663, 42685), False, 'from pathlib import Path\n'), ((42691, 42715), 'pathlib.Path', 'Path', (['"""./config.txt.yml"""'], {}), "('./config.txt.yml')\n", (42695, 42715), False, 'from pathlib import Path\n'), ((42721, 42743), 'pathlib.Path', 'Path', (['"""./thing.notyml"""'], {}), "('./thing.notyml')\n", (42725, 42743), False, 'from pathlib import Path\n'), ((42749, 42775), 'pathlib.Path', 'Path', (['"""./not_a_config.pdb"""'], {}), "('./not_a_config.pdb')\n", (42753, 42775), False, 'from pathlib import Path\n'), ((42781, 42802), 'pathlib.Path', 'Path', (['"""./config.yalm"""'], {}), "('./config.yalm')\n", (42785, 42802), False, 'from pathlib import Path\n'), ((42808, 42829), 'pathlib.Path', 'Path', (['"""./yaml.config"""'], {}), "('./yaml.config')\n", (42812, 42829), False, 'from pathlib import Path\n'), ((42835, 42854), 'pathlib.Path', 'Path', (['"""./setup.cfg"""'], {}), "('./setup.cfg')\n", (42839, 42854), False, 'from pathlib import Path\n'), ((42860, 42880), 'pathlib.Path', 'Path', (['"""./config.ini"""'], {}), "('./config.ini')\n", (42864, 42880), False, 'from pathlib import Path\n'), ((42886, 42905), 'pathlib.Path', 'Path', (['"""./file.json"""'], {}), "('./file.json')\n", (42890, 42905), False, 'from pathlib import Path\n'), ((42911, 42925), 'pathlib.Path', 'Path', (['"""./yaml"""'], {}), "('./yaml')\n", (42915, 42925), False, 'from pathlib import Path\n'), ((42931, 42944), 'pathlib.Path', 'Path', (['"""./yml"""'], {}), "('./yml')\n", (42935, 42944), False, 'from pathlib import Path\n'), ((42950, 42966), 'pathlib.Path', 'Path', (['"""./config"""'], {}), "('./config')\n", (42954, 42966), False, 'from pathlib import Path\n'), ((48084, 48116), 'pathlib.Path', 'Path', (['"""/easy_sast/easy_sast.yml"""'], {}), "('/easy_sast/easy_sast.yml')\n", (48088, 48116), False, 'from pathlib import Path\n'), ((51024, 51056), 'pathlib.Path', 'Path', (['"""/easy_sast/easy_sast.yml"""'], {}), "('/easy_sast/easy_sast.yml')\n", (51028, 51056), False, 'from pathlib import Path\n'), ((11326, 11349), 'pathlib.Path', 'Path', (['"""/usr/local/bin/"""'], {}), "('/usr/local/bin/')\n", (11330, 11349), False, 'from pathlib import Path\n'), ((47216, 47231), 'pathlib.Path', 'Path', (['"""/build/"""'], {}), "('/build/')\n", (47220, 47231), False, 'from pathlib import Path\n'), ((50152, 50167), 'pathlib.Path', 'Path', (['"""/build/"""'], {}), "('/build/')\n", (50156, 50167), False, 'from pathlib import Path\n')]
|
import unittest
import numpy as np
from neurolib.utils.parameterSpace import ParameterSpace
class TestParameterSpace(unittest.TestCase):
def test_parameterspace_init(self):
# init from list
par = ParameterSpace(["a", "b"], [[3], [3]])
# init from dict
par = ParameterSpace({"a": [1, 2], "b": [1, 2]})
# init from dict with numpy arrays
par = ParameterSpace({"a": np.zeros((3)), "b": np.ones((33)),})
def test_parameterspace_kind(self):
# 'point'
par = ParameterSpace(["a", "b"], [[10], [3.0]])
self.assertEqual(par.kind, "point")
# 'bound'
par = ParameterSpace(["a", "b"], [[3.0, 5.0], [0.0, 3.0]])
self.assertEqual(par.kind, "bound")
# 'grid'
par = ParameterSpace(["a", "b"], [[3.0, 3.5, 5.0], [0.0, 3.0]])
self.assertEqual(par.kind, "grid")
def test_parameterspace_attributes(self):
par = ParameterSpace(["a", "b"], [[10], [3.0]])
par.a
par["a"]
par.b
par["c"] = [1, 2, 3]
def test_conversions(self):
par = ParameterSpace({"a": [1, 2], "b": [1, 2]})
par.named_tuple_constructor
par.named_tuple
par.dict()
print(par)
|
[
"numpy.zeros",
"neurolib.utils.parameterSpace.ParameterSpace",
"numpy.ones"
] |
[((218, 256), 'neurolib.utils.parameterSpace.ParameterSpace', 'ParameterSpace', (["['a', 'b']", '[[3], [3]]'], {}), "(['a', 'b'], [[3], [3]])\n", (232, 256), False, 'from neurolib.utils.parameterSpace import ParameterSpace\n'), ((297, 339), 'neurolib.utils.parameterSpace.ParameterSpace', 'ParameterSpace', (["{'a': [1, 2], 'b': [1, 2]}"], {}), "({'a': [1, 2], 'b': [1, 2]})\n", (311, 339), False, 'from neurolib.utils.parameterSpace import ParameterSpace\n'), ((529, 570), 'neurolib.utils.parameterSpace.ParameterSpace', 'ParameterSpace', (["['a', 'b']", '[[10], [3.0]]'], {}), "(['a', 'b'], [[10], [3.0]])\n", (543, 570), False, 'from neurolib.utils.parameterSpace import ParameterSpace\n'), ((648, 700), 'neurolib.utils.parameterSpace.ParameterSpace', 'ParameterSpace', (["['a', 'b']", '[[3.0, 5.0], [0.0, 3.0]]'], {}), "(['a', 'b'], [[3.0, 5.0], [0.0, 3.0]])\n", (662, 700), False, 'from neurolib.utils.parameterSpace import ParameterSpace\n'), ((777, 834), 'neurolib.utils.parameterSpace.ParameterSpace', 'ParameterSpace', (["['a', 'b']", '[[3.0, 3.5, 5.0], [0.0, 3.0]]'], {}), "(['a', 'b'], [[3.0, 3.5, 5.0], [0.0, 3.0]])\n", (791, 834), False, 'from neurolib.utils.parameterSpace import ParameterSpace\n'), ((939, 980), 'neurolib.utils.parameterSpace.ParameterSpace', 'ParameterSpace', (["['a', 'b']", '[[10], [3.0]]'], {}), "(['a', 'b'], [[10], [3.0]])\n", (953, 980), False, 'from neurolib.utils.parameterSpace import ParameterSpace\n'), ((1102, 1144), 'neurolib.utils.parameterSpace.ParameterSpace', 'ParameterSpace', (["{'a': [1, 2], 'b': [1, 2]}"], {}), "({'a': [1, 2], 'b': [1, 2]})\n", (1116, 1144), False, 'from neurolib.utils.parameterSpace import ParameterSpace\n'), ((419, 430), 'numpy.zeros', 'np.zeros', (['(3)'], {}), '(3)\n', (427, 430), True, 'import numpy as np\n'), ((439, 450), 'numpy.ones', 'np.ones', (['(33)'], {}), '(33)\n', (446, 450), True, 'import numpy as np\n')]
|
"""Utility functions for classifying and solving
ordinary and partial differential equations.
Contains
========
_preprocess
ode_order
_desolve
"""
from sympy.core.compatibility import set_union
from sympy.core.function import Function, Derivative, AppliedUndef
from sympy.core.relational import Equality, Eq
from sympy.core.symbol import Wild
def _preprocess(expr, func=None, hint='_Integral'):
"""Prepare expr for solving by making sure that differentiation
is done so that only func remains in unevaluated derivatives and
(if hint doesn't end with _Integral) that doit is applied to all
other derivatives. If hint is None, don't do any differentiation.
(Currently this may cause some simple differential equations to
fail.)
In case func is None, an attempt will be made to autodetect the
function to be solved for.
>>> from sympy.solvers.deutils import _preprocess
>>> from sympy import Derivative, Function, Integral, sin
>>> from sympy.abc import x, y, z
>>> f, g = map(Function, 'fg')
Apply doit to derivatives that contain more than the function
of interest:
>>> _preprocess(Derivative(f(x) + x, x))
(Derivative(f(x), x) + 1, f(x))
Do others if the differentiation variable(s) intersect with those
of the function of interest or contain the function of interest:
>>> _preprocess(Derivative(g(x), y, z), f(y))
(0, f(y))
>>> _preprocess(Derivative(f(y), z), f(y))
(0, f(y))
Do others if the hint doesn't end in '_Integral' (the default
assumes that it does):
>>> _preprocess(Derivative(g(x), y), f(x))
(Derivative(g(x), y), f(x))
>>> _preprocess(Derivative(f(x), y), f(x), hint='')
(0, f(x))
Don't do any derivatives if hint is None:
>>> eq = Derivative(f(x) + 1, x) + Derivative(f(x), y)
>>> _preprocess(eq, f(x), hint=None)
(Derivative(f(x) + 1, x) + Derivative(f(x), y), f(x))
If it's not clear what the function of interest is, it must be given:
>>> eq = Derivative(f(x) + g(x), x)
>>> _preprocess(eq, g(x))
(Derivative(f(x), x) + Derivative(g(x), x), g(x))
>>> try: _preprocess(eq)
... except ValueError: print "A ValueError was raised."
A ValueError was raised.
"""
derivs = expr.atoms(Derivative)
if not func:
funcs = set_union(*[d.atoms(AppliedUndef) for d in derivs])
if len(funcs) != 1:
raise ValueError('The function cannot be '
'automatically detected for %s.' % expr)
func = funcs.pop()
fvars = set(func.args)
if hint is None:
return expr, func
reps = [(d, d.doit()) for d in derivs if not hint.endswith('_Integral') or
d.has(func) or set(d.variables) & fvars]
eq = expr.subs(reps)
return eq, func
def ode_order(expr, func):
"""
Returns the order of a given differential
equation with respect to func.
This function is implemented recursively.
Examples
========
>>> from sympy import Function
>>> from sympy.solvers.deutils import ode_order
>>> from sympy.abc import x
>>> f, g = map(Function, ['f', 'g'])
>>> ode_order(f(x).diff(x, 2) + f(x).diff(x)**2 +
... f(x).diff(x), f(x))
2
>>> ode_order(f(x).diff(x, 2) + g(x).diff(x, 3), f(x))
2
>>> ode_order(f(x).diff(x, 2) + g(x).diff(x, 3), g(x))
3
"""
a = Wild('a', exclude=[func])
if expr.match(a):
return 0
if isinstance(expr, Derivative):
if expr.args[0] == func:
return len(expr.variables)
else:
order = 0
for arg in expr.args[0].args:
order = max(order, ode_order(arg, func) + len(expr.variables))
return order
else:
order = 0
for arg in expr.args:
order = max(order, ode_order(arg, func))
return order
def _desolve(eq, func=None, hint="default", simplify=True, **kwargs):
"""This is a helper function to dsolve and pdsolve in the ode
and pde modules.
If the hint provided to the function is "default", then a dict with
the following keys are returned
'func' - It provides the function for which the differential equation
has to be solved. This is useful when the function
'default' - The default key as returned by classifier functions in ode
and pde.py
'hint' - The hint given by the user for which the differential equation
is to be solved. If the hint given by the user is 'default',
then the value of 'hint' and 'default' is the same.
'order' - The order of the function as returned by ode_order
'match' - It returns the match as given by the classifier functions, for
the default hint.
If the hint provided to the function is not "default" and is not in
('all', 'all_Integral', 'best'), then a dict with the above mentioned keys
is returned along with the keys which are returned when dict in
classify_ode or classify_pde is set True
If the hint given is in ('all', 'all_Integral', 'best'), then this function
returns a nested dict, with the keys, being the set of classified hints
returned by classifier functions, and the values being the dict of form
as mentioned above.
Key 'eq' is a common key to all the above mentioned hints which returns an
expression if eq given by user is an Equality.
See Also
========
classify_ode(ode.py)
classify_pde(pde.py)
"""
prep = kwargs.pop('prep', True)
if isinstance(eq, Equality):
eq = eq.lhs - eq.rhs
# preprocess the equation and find func if not given
if prep or func is None:
eq, func = _preprocess(eq, func)
prep = False
# type is an argument passed by the solve functions in ode and pde.py
# that identifies whether the function caller is an ordinary
# or partial differential equation. Accordingly corresponding
# changes are made in the function.
type = kwargs.get('type', None)
if type == 'ode':
from sympy.solvers.ode import classify_ode, allhints
classifier = classify_ode
string = 'ODE '
dummy = ''
elif type == 'pde':
from sympy.solvers.pde import classify_pde, allhints
classifier = classify_pde
string = 'PDE '
dummy = 'p'
# Magic that should only be used internally. Prevents classify_ode from
# being called more than it needs to be by passing its results through
# recursive calls.
if kwargs.get('classify', True):
hints = classifier(eq, func, dict=True, prep=prep)
else:
# Here is what all this means:
#
# hint: The hint method given to _desolve() by the user.
# hints: The dictionary of hints that match the DE, along with other
# information (including the internal pass-through magic).
# default: The default hint to return, the first hint from allhints
# that matches the hint; obtained from classify_ode().
# match: Dictionary containing the match dictionary for each hint
# (the parts of the DE for solving). When going through the
# hints in "all", this holds the match string for the current
# hint.
# order: The order of the DE, as determined by ode_order().
hints = kwargs.get('hint',
{'default': hint,
hint: kwargs['match'],
'order': kwargs['order']})
if hints['order'] == 0:
raise ValueError(
str(eq) + " is not a differential equation in " + str(func))
if not hints['default']:
# classify_ode will set hints['default'] to None if no hints match.
if hint not in allhints and hint != 'default':
raise ValueError("Hint not recognized: " + hint)
elif hint not in hints['ordered_hints'] and hint != 'default':
raise ValueError(string + str(eq) + " does not match hint " + hint)
else:
raise NotImplementedError(dummy + "solve" + ": Cannot solve " + str(eq))
if hint == 'default':
return _desolve(eq, func, hint=hints['default'], simplify=simplify,
prep=prep, classify=False, order=hints['order'],
match=hints[hints['default']], type=type)
elif hint in ('all', 'all_Integral', 'best'):
retdict = {}
failedhints = {}
gethints = set(hints) - set(['order', 'default', 'ordered_hints'])
if hint == 'all_Integral':
for i in hints:
if i.endswith('_Integral'):
gethints.remove(i[:-len('_Integral')])
# special case
if "1st_homogeneous_coeff_best" in gethints:
gethints.remove("1st_homogeneous_coeff_best")
for i in gethints:
sol = _desolve(eq, func, hint=i, simplify=simplify, prep=prep,
classify=False, order=hints['order'], match=hints[i], type=type)
retdict[i] = sol
retdict['all'] = True
retdict['eq'] = eq
return retdict
elif hint not in allhints: # and hint not in ('default', 'ordered_hints'):
raise ValueError("Hint not recognized: " + hint)
elif hint not in hints:
raise ValueError(string + str(eq) + " does not match hint " + hint)
else:
# Key added to identify the hint needed to solve the equation
hints['hint'] = hint
hints.update({'func': func, 'eq': eq})
return hints
|
[
"sympy.core.symbol.Wild"
] |
[((3379, 3404), 'sympy.core.symbol.Wild', 'Wild', (['"""a"""'], {'exclude': '[func]'}), "('a', exclude=[func])\n", (3383, 3404), False, 'from sympy.core.symbol import Wild\n')]
|
# -*- coding: utf-8 -*-
'''
Support for RFC 2136 dynamic DNS updates.
:depends: - dnspython Python module
:configuration: If you want to use TSIG authentication for the server, there
are a couple of optional configuration parameters made available to
support this (the keyname is only needed if the keyring contains more
than one key)::
keyring: keyring file (default=None)
keyname: key name in file (default=None)
The keyring file needs to be in json format and the key name needs to end
with an extra period in the file, similar to this:
.. code-block:: json
{"keyname.": "keycontent"}
'''
from __future__ import absolute_import
# Import python libs
import logging
import json
log = logging.getLogger(__name__)
try:
import dns.query
import dns.update
import dns.tsigkeyring
dns_support = True
except ImportError as e:
dns_support = False
import salt.utils
def __virtual__():
'''
Confirm dnspython is available.
'''
if dns_support:
return 'ddns'
return False
def _config(name, key=None, **kwargs):
'''
Return a value for 'name' from command line args then config file options.
Specify 'key' if the config file option is not the same as 'name'.
'''
if key is None:
key = name
if name in kwargs:
value = kwargs[name]
else:
value = __salt__['config.option']('ddns.{0}'.format(key))
if not value:
value = None
return value
def _get_keyring(keyfile):
keyring = None
if keyfile and __salt__['file.file_exists'](keyfile):
with salt.utils.fopen(keyfile) as _f:
keyring = dns.tsigkeyring.from_text(json.load(_f))
return keyring
def add_host(zone, name, ttl, ip, nameserver='127.0.0.1', replace=True, **kwargs):
'''
Add, replace, or update the A and PTR (reverse) records for a host.
CLI Example:
.. code-block:: bash
salt ns1 ddns.add_host example.com host1 60 10.1.1.1
'''
res = update(zone, name, ttl, 'A', ip, nameserver, replace, **kwargs)
if res is False:
return False
fqdn = '{0}.{1}.'.format(name, zone)
parts = ip.split('.')[::-1]
popped = []
# Iterate over possible reverse zones
while len(parts) > 1:
p = parts.pop(0)
popped.append(p)
zone = '{0}.{1}'.format('.'.join(parts), 'in-addr.arpa.')
name = '.'.join(popped)
ptr = update(zone, name, ttl, 'PTR', fqdn, nameserver, replace, **kwargs)
if ptr:
return True
return res
def delete_host(zone, name, nameserver='127.0.0.1', **kwargs):
'''
Delete the forward and reverse records for a host.
Returns true if any records are deleted.
CLI Example:
.. code-block:: bash
salt ns1 ddns.delete_host example.com host1
'''
fqdn = '{0}.{1}'.format(name, zone)
request = dns.message.make_query(fqdn, 'A')
answer = dns.query.udp(request, nameserver)
try:
ips = [i.address for i in answer.answer[0].items]
except IndexError:
ips = []
res = delete(zone, name, nameserver=nameserver, **kwargs)
fqdn = fqdn + '.'
for ip in ips:
parts = ip.split('.')[::-1]
popped = []
# Iterate over possible reverse zones
while len(parts) > 1:
p = parts.pop(0)
popped.append(p)
zone = '{0}.{1}'.format('.'.join(parts), 'in-addr.arpa.')
name = '.'.join(popped)
ptr = delete(zone, name, 'PTR', fqdn, nameserver=nameserver, **kwargs)
if ptr:
res = True
return res
def update(zone, name, ttl, rdtype, data, nameserver='127.0.0.1', replace=False, **kwargs):
'''
Add, replace, or update a DNS record.
nameserver must be an IP address and the minion running this module
must have update privileges on that server.
If replace is true, first deletes all records for this name and type.
CLI Example:
.. code-block:: bash
salt ns1 ddns.update example.com host1 60 A 10.0.0.1
'''
name = str(name)
fqdn = '{0}.{1}'.format(name, zone)
request = dns.message.make_query(fqdn, rdtype)
answer = dns.query.udp(request, nameserver)
rdtype = dns.rdatatype.from_text(rdtype)
rdata = dns.rdata.from_text(dns.rdataclass.IN, rdtype, data)
is_update = False
for rrset in answer.answer:
if rdata in rrset.items:
rr = rrset.items
if ttl == rrset.ttl:
if replace and (len(answer.answer) > 1
or len(rrset.items) > 1):
is_update = True
break
return None
is_update = True
break
keyring = _get_keyring(_config('keyfile', **kwargs))
keyname = _config('keyname', **kwargs)
keyalgorithm = _config('keyalgorithm', **kwargs) or 'HMAC-MD5.SIG-ALG.REG.INT'
dns_update = dns.update.Update(zone, keyring=keyring, keyname=keyname, keyalgorithm=keyalgorithm)
if is_update:
dns_update.replace(name, ttl, rdata)
else:
dns_update.add(name, ttl, rdata)
answer = dns.query.udp(dns_update, nameserver)
if answer.rcode() > 0:
return False
return True
def delete(zone, name, rdtype=None, data=None, nameserver='127.0.0.1', **kwargs):
'''
Delete a DNS record.
CLI Example:
.. code-block:: bash
salt ns1 ddns.delete example.com host1 A
'''
name = str(name)
fqdn = '{0}.{1}'.format(name, zone)
request = dns.message.make_query(fqdn, (rdtype or 'ANY'))
answer = dns.query.udp(request, nameserver)
if not answer.answer:
return None
keyring = _get_keyring(_config('keyfile', **kwargs))
keyname = _config('keyname', **kwargs)
keyalgorithm = _config('keyalgorithm', **kwargs) or '<KEY>.SIG-ALG.REG.INT'
dns_update = dns.update.Update(zone, keyring=keyring, keyname=keyname, keyalgorithm=keyalgorithm)
if rdtype:
rdtype = dns.rdatatype.from_text(rdtype)
if data:
rdata = dns.rdata.from_text(dns.rdataclass.IN, rdtype, data)
dns_update.delete(name, rdata)
else:
dns_update.delete(name, rdtype)
else:
dns_update.delete(name)
answer = dns.query.udp(dns_update, nameserver)
if answer.rcode() > 0:
return False
return True
|
[
"json.load",
"logging.getLogger"
] |
[((741, 768), 'logging.getLogger', 'logging.getLogger', (['__name__'], {}), '(__name__)\n', (758, 768), False, 'import logging\n'), ((1706, 1719), 'json.load', 'json.load', (['_f'], {}), '(_f)\n', (1715, 1719), False, 'import json\n')]
|
import json
import os
import random
from unittest.mock import patch
import cv2
import numpy as np
import pytest
import albumentations as A
import albumentations.augmentations.functional as F
from albumentations.core.serialization import SERIALIZABLE_REGISTRY, shorten_class_name
from albumentations.core.transforms_interface import ImageOnlyTransform
from .conftest import skipif_no_torch
from .utils import (
OpenMock,
check_all_augs_exists,
get_dual_transforms,
get_image_only_transforms,
get_transforms,
set_seed,
)
TEST_SEEDS = (0, 1, 42, 111, 9999)
@pytest.mark.parametrize(
["augmentation_cls", "params"],
get_transforms(
custom_arguments={
A.Crop: {"y_min": 0, "y_max": 10, "x_min": 0, "x_max": 10},
A.CenterCrop: {"height": 10, "width": 10},
A.CropNonEmptyMaskIfExists: {"height": 10, "width": 10},
A.RandomCrop: {"height": 10, "width": 10},
A.RandomResizedCrop: {"height": 10, "width": 10},
A.RandomSizedCrop: {"min_max_height": (4, 8), "height": 10, "width": 10},
A.CropAndPad: {"px": 10},
A.Resize: {"height": 10, "width": 10},
},
except_augmentations={
A.RandomCropNearBBox,
A.RandomSizedBBoxSafeCrop,
A.FDA,
A.HistogramMatching,
A.PixelDistributionAdaptation,
A.Lambda,
A.TemplateTransform,
},
),
)
@pytest.mark.parametrize("p", [0.5, 1])
@pytest.mark.parametrize("seed", TEST_SEEDS)
@pytest.mark.parametrize("always_apply", (False, True))
def test_augmentations_serialization(augmentation_cls, params, p, seed, image, mask, always_apply):
aug = augmentation_cls(p=p, always_apply=always_apply, **params)
serialized_aug = A.to_dict(aug)
deserialized_aug = A.from_dict(serialized_aug)
set_seed(seed)
aug_data = aug(image=image, mask=mask)
set_seed(seed)
deserialized_aug_data = deserialized_aug(image=image, mask=mask)
assert np.array_equal(aug_data["image"], deserialized_aug_data["image"])
assert np.array_equal(aug_data["mask"], deserialized_aug_data["mask"])
AUGMENTATION_CLS_PARAMS = [
[
A.ImageCompression,
{
"quality_lower": 10,
"quality_upper": 80,
"compression_type": A.ImageCompression.ImageCompressionType.WEBP,
},
],
[A.JpegCompression, {"quality_lower": 10, "quality_upper": 80}],
[A.HueSaturationValue, {"hue_shift_limit": 70, "sat_shift_limit": 95, "val_shift_limit": 55}],
[A.RGBShift, {"r_shift_limit": 70, "g_shift_limit": 80, "b_shift_limit": 40}],
[A.RandomBrightnessContrast, {"brightness_limit": 0.5, "contrast_limit": 0.8}],
[A.Blur, {"blur_limit": 3}],
[A.MotionBlur, {"blur_limit": 3}],
[A.MedianBlur, {"blur_limit": 3}],
[A.GaussianBlur, {"blur_limit": 3}],
[A.GaussNoise, {"var_limit": (20, 90), "mean": 10, "per_channel": False}],
[A.CLAHE, {"clip_limit": 2, "tile_grid_size": (12, 12)}],
[A.RandomGamma, {"gamma_limit": (10, 90)}],
[A.Cutout, {"num_holes": 4, "max_h_size": 4, "max_w_size": 4}],
[A.CoarseDropout, {"max_holes": 4, "max_height": 4, "max_width": 4}],
[A.RandomSnow, {"snow_point_lower": 0.2, "snow_point_upper": 0.4, "brightness_coeff": 4}],
[
A.RandomRain,
{
"slant_lower": -5,
"slant_upper": 5,
"drop_length": 15,
"drop_width": 2,
"drop_color": (100, 100, 100),
"blur_value": 3,
"brightness_coefficient": 0.5,
"rain_type": "heavy",
},
],
[A.RandomFog, {"fog_coef_lower": 0.2, "fog_coef_upper": 0.8, "alpha_coef": 0.11}],
[
A.RandomSunFlare,
{
"flare_roi": (0.1, 0.1, 0.9, 0.6),
"angle_lower": 0.1,
"angle_upper": 0.95,
"num_flare_circles_lower": 7,
"num_flare_circles_upper": 11,
"src_radius": 300,
"src_color": (200, 200, 200),
},
],
[
A.RandomShadow,
{
"shadow_roi": (0.1, 0.4, 0.9, 0.9),
"num_shadows_lower": 2,
"num_shadows_upper": 4,
"shadow_dimension": 8,
},
],
[
A.PadIfNeeded,
{"min_height": 512, "min_width": 512, "border_mode": cv2.BORDER_CONSTANT, "value": (10, 10, 10)},
],
[
A.Rotate,
{
"limit": 120,
"interpolation": cv2.INTER_CUBIC,
"border_mode": cv2.BORDER_CONSTANT,
"value": (10, 10, 10),
},
],
[
A.SafeRotate,
{
"limit": 120,
"interpolation": cv2.INTER_CUBIC,
"border_mode": cv2.BORDER_CONSTANT,
"value": (10, 10, 10),
},
],
[
A.ShiftScaleRotate,
{
"shift_limit": 0.2,
"scale_limit": 0.2,
"rotate_limit": 70,
"interpolation": cv2.INTER_CUBIC,
"border_mode": cv2.BORDER_CONSTANT,
"value": (10, 10, 10),
},
],
[
A.ShiftScaleRotate,
{
"shift_limit_x": 0.3,
"shift_limit_y": 0.4,
"scale_limit": 0.2,
"rotate_limit": 70,
"interpolation": cv2.INTER_CUBIC,
"border_mode": cv2.BORDER_CONSTANT,
"value": (10, 10, 10),
},
],
[
A.OpticalDistortion,
{
"distort_limit": 0.2,
"shift_limit": 0.2,
"interpolation": cv2.INTER_CUBIC,
"border_mode": cv2.BORDER_CONSTANT,
"value": (10, 10, 10),
},
],
[
A.GridDistortion,
{
"num_steps": 10,
"distort_limit": 0.5,
"interpolation": cv2.INTER_CUBIC,
"border_mode": cv2.BORDER_CONSTANT,
"value": (10, 10, 10),
},
],
[
A.ElasticTransform,
{
"alpha": 2,
"sigma": 25,
"alpha_affine": 40,
"interpolation": cv2.INTER_CUBIC,
"border_mode": cv2.BORDER_CONSTANT,
"value": (10, 10, 10),
},
],
[A.CenterCrop, {"height": 10, "width": 10}],
[A.RandomCrop, {"height": 10, "width": 10}],
[A.CropNonEmptyMaskIfExists, {"height": 10, "width": 10}],
[A.RandomSizedCrop, {"min_max_height": (4, 8), "height": 10, "width": 10}],
[A.Crop, {"x_max": 64, "y_max": 64}],
[A.ToFloat, {"max_value": 16536}],
[A.Normalize, {"mean": (0.385, 0.356, 0.306), "std": (0.129, 0.124, 0.125), "max_pixel_value": 100.0}],
[A.RandomBrightness, {"limit": 0.4}],
[A.RandomContrast, {"limit": 0.4}],
[A.RandomScale, {"scale_limit": 0.2, "interpolation": cv2.INTER_CUBIC}],
[A.Resize, {"height": 64, "width": 64}],
[A.SmallestMaxSize, {"max_size": 64, "interpolation": cv2.INTER_CUBIC}],
[A.LongestMaxSize, {"max_size": 128, "interpolation": cv2.INTER_CUBIC}],
[A.RandomGridShuffle, {"grid": (5, 5)}],
[A.Solarize, {"threshold": 32}],
[A.Posterize, {"num_bits": 1}],
[A.Equalize, {"mode": "pil", "by_channels": False}],
[A.MultiplicativeNoise, {"multiplier": (0.7, 2.3), "per_channel": True, "elementwise": True}],
[
A.ColorJitter,
{"brightness": [0.2, 0.3], "contrast": [0.7, 0.9], "saturation": [1.2, 1.7], "hue": [-0.2, 0.1]},
],
[
A.Perspective,
{
"scale": 0.5,
"keep_size": False,
"pad_mode": cv2.BORDER_REFLECT_101,
"pad_val": 10,
"mask_pad_val": 100,
"fit_output": True,
"interpolation": cv2.INTER_CUBIC,
},
],
[A.Sharpen, {"alpha": [0.2, 0.5], "lightness": [0.5, 1.0]}],
[A.Emboss, {"alpha": [0.2, 0.5], "strength": [0.5, 1.0]}],
[A.RandomToneCurve, {"scale": 0.2}],
[
A.CropAndPad,
{
"px": 10,
"keep_size": False,
"sample_independently": False,
"interpolation": cv2.INTER_CUBIC,
"pad_cval_mask": [10, 20, 30],
"pad_cval": [11, 12, 13],
"pad_mode": cv2.BORDER_REFLECT101,
},
],
[
A.Superpixels,
{"p_replace": (0.5, 0.7), "n_segments": (20, 30), "max_size": 25, "interpolation": cv2.INTER_CUBIC},
],
[
A.Affine,
{
"scale": 0.5,
"translate_percent": 0.7,
"translate_px": None,
"rotate": 33,
"shear": 21,
"interpolation": cv2.INTER_CUBIC,
"cval": 25,
"cval_mask": 1,
"mode": cv2.BORDER_REFLECT,
"fit_output": True,
},
],
[
A.Affine,
{
"scale": {"x": [0.3, 0.5], "y": [0.1, 0.2]},
"translate_percent": None,
"translate_px": {"x": [10, 200], "y": [5, 101]},
"rotate": [333, 360],
"shear": {"x": [31, 38], "y": [41, 48]},
"interpolation": 3,
"cval": [10, 20, 30],
"cval_mask": 1,
"mode": cv2.BORDER_REFLECT,
"fit_output": True,
},
],
[
A.PiecewiseAffine,
{
"scale": 0.33,
"nb_rows": (10, 20),
"nb_cols": 33,
"interpolation": 2,
"mask_interpolation": 1,
"cval": 10,
"cval_mask": 20,
"mode": "edge",
"absolute_scale": True,
"keypoints_threshold": 0.1,
},
],
[A.ChannelDropout, dict(channel_drop_range=(1, 2), fill_value=1)],
[A.ChannelShuffle, {}],
[A.Downscale, dict(scale_min=0.5, scale_max=0.75, interpolation=cv2.INTER_LINEAR)],
[A.Flip, {}],
[A.FromFloat, dict(dtype="uint8", max_value=1)],
[A.HorizontalFlip, {}],
[A.ISONoise, dict(color_shift=(0.2, 0.3), intensity=(0.7, 0.9))],
[A.InvertImg, {}],
[A.MaskDropout, dict(max_objects=2, image_fill_value=10, mask_fill_value=20)],
[A.NoOp, {}],
[A.RandomResizedCrop, dict(height=20, width=30, scale=(0.5, 0.6), ratio=(0.8, 0.9))],
[A.FancyPCA, dict(alpha=0.3)],
[A.RandomRotate90, {}],
[A.ToGray, {}],
[A.ToSepia, {}],
[A.Transpose, {}],
[A.VerticalFlip, {}],
[A.RingingOvershoot, dict(blur_limit=(7, 15), cutoff=(np.pi / 5, np.pi / 2))],
[A.UnsharpMask, {"blur_limit": 3, "sigma_limit": 0.5, "alpha": 0.2, "threshold": 15}],
[A.AdvancedBlur, dict(blur_limit=(3, 5), rotate_limit=(60, 90))],
[A.PixelDropout, {"dropout_prob": 0.1, "per_channel": True, "drop_value": None}],
[A.PixelDropout, {"dropout_prob": 0.1, "per_channel": False, "drop_value": None, "mask_drop_value": 15}],
]
AUGMENTATION_CLS_EXCEPT = {
A.FDA,
A.HistogramMatching,
A.PixelDistributionAdaptation,
A.Lambda,
A.RandomCropNearBBox,
A.RandomSizedBBoxSafeCrop,
A.GridDropout,
A.GlassBlur,
A.TemplateTransform,
}
@pytest.mark.parametrize(
["augmentation_cls", "params"], check_all_augs_exists(AUGMENTATION_CLS_PARAMS, AUGMENTATION_CLS_EXCEPT)
)
@pytest.mark.parametrize("p", [0.5, 1])
@pytest.mark.parametrize("seed", TEST_SEEDS)
@pytest.mark.parametrize("always_apply", (False, True))
def test_augmentations_serialization_with_custom_parameters(
augmentation_cls, params, p, seed, image, mask, always_apply
):
aug = augmentation_cls(p=p, always_apply=always_apply, **params)
serialized_aug = A.to_dict(aug)
deserialized_aug = A.from_dict(serialized_aug)
set_seed(seed)
aug_data = aug(image=image, mask=mask)
set_seed(seed)
deserialized_aug_data = deserialized_aug(image=image, mask=mask)
assert np.array_equal(aug_data["image"], deserialized_aug_data["image"])
assert np.array_equal(aug_data["mask"], deserialized_aug_data["mask"])
@pytest.mark.parametrize(
["augmentation_cls", "params"], check_all_augs_exists(AUGMENTATION_CLS_PARAMS, AUGMENTATION_CLS_EXCEPT)
)
@pytest.mark.parametrize("p", [0.5, 1])
@pytest.mark.parametrize("seed", TEST_SEEDS)
@pytest.mark.parametrize("always_apply", (False, True))
@pytest.mark.parametrize("data_format", ("yaml",))
def test_augmentations_serialization_to_file_with_custom_parameters(
augmentation_cls, params, p, seed, image, mask, always_apply, data_format
):
with patch("builtins.open", OpenMock()):
aug = augmentation_cls(p=p, always_apply=always_apply, **params)
filepath = "serialized.{}".format(data_format)
A.save(aug, filepath, data_format=data_format)
deserialized_aug = A.load(filepath, data_format=data_format)
set_seed(seed)
aug_data = aug(image=image, mask=mask)
set_seed(seed)
deserialized_aug_data = deserialized_aug(image=image, mask=mask)
assert np.array_equal(aug_data["image"], deserialized_aug_data["image"])
assert np.array_equal(aug_data["mask"], deserialized_aug_data["mask"])
@pytest.mark.parametrize(
["augmentation_cls", "params"],
get_transforms(
custom_arguments={
A.Crop: {"y_min": 0, "y_max": 10, "x_min": 0, "x_max": 10},
A.CenterCrop: {"height": 10, "width": 10},
A.CropNonEmptyMaskIfExists: {"height": 10, "width": 10},
A.RandomCrop: {"height": 10, "width": 10},
A.RandomResizedCrop: {"height": 10, "width": 10},
A.RandomSizedCrop: {"min_max_height": (4, 8), "height": 10, "width": 10},
A.CropAndPad: {"px": 10},
A.Resize: {"height": 10, "width": 10},
A.RandomSizedBBoxSafeCrop: {"height": 10, "width": 10},
},
except_augmentations={
A.RandomCropNearBBox,
A.FDA,
A.HistogramMatching,
A.PixelDistributionAdaptation,
A.Lambda,
A.CoarseDropout,
A.CropNonEmptyMaskIfExists,
A.ElasticTransform,
A.GridDistortion,
A.RandomGridShuffle,
A.GridDropout,
A.MaskDropout,
A.OpticalDistortion,
A.TemplateTransform,
},
),
)
@pytest.mark.parametrize("p", [0.5, 1])
@pytest.mark.parametrize("seed", TEST_SEEDS)
@pytest.mark.parametrize("always_apply", (False, True))
def test_augmentations_for_bboxes_serialization(
augmentation_cls, params, p, seed, image, albumentations_bboxes, always_apply
):
aug = augmentation_cls(p=p, always_apply=always_apply, **params)
serialized_aug = A.to_dict(aug)
deserialized_aug = A.from_dict(serialized_aug)
set_seed(seed)
aug_data = aug(image=image, bboxes=albumentations_bboxes)
set_seed(seed)
deserialized_aug_data = deserialized_aug(image=image, bboxes=albumentations_bboxes)
assert np.array_equal(aug_data["image"], deserialized_aug_data["image"])
assert np.array_equal(aug_data["bboxes"], deserialized_aug_data["bboxes"])
@pytest.mark.parametrize(
["augmentation_cls", "params"],
get_transforms(
custom_arguments={
A.Crop: {"y_min": 0, "y_max": 10, "x_min": 0, "x_max": 10},
A.CenterCrop: {"height": 10, "width": 10},
A.CropNonEmptyMaskIfExists: {"height": 10, "width": 10},
A.RandomCrop: {"height": 10, "width": 10},
A.RandomResizedCrop: {"height": 10, "width": 10},
A.RandomSizedCrop: {"min_max_height": (4, 8), "height": 10, "width": 10},
A.CropAndPad: {"px": 10},
A.Resize: {"height": 10, "width": 10},
},
except_augmentations={
A.RandomCropNearBBox,
A.FDA,
A.HistogramMatching,
A.PixelDistributionAdaptation,
A.Lambda,
A.CoarseDropout,
A.CropNonEmptyMaskIfExists,
A.ElasticTransform,
A.GridDistortion,
A.RandomGridShuffle,
A.GridDropout,
A.MaskDropout,
A.OpticalDistortion,
A.RandomSizedBBoxSafeCrop,
A.TemplateTransform,
},
),
)
@pytest.mark.parametrize("p", [0.5, 1])
@pytest.mark.parametrize("seed", TEST_SEEDS)
@pytest.mark.parametrize("always_apply", (False, True))
def test_augmentations_for_keypoints_serialization(augmentation_cls, params, p, seed, image, keypoints, always_apply):
aug = augmentation_cls(p=p, always_apply=always_apply, **params)
serialized_aug = A.to_dict(aug)
deserialized_aug = A.from_dict(serialized_aug)
set_seed(seed)
aug_data = aug(image=image, keypoints=keypoints)
set_seed(seed)
deserialized_aug_data = deserialized_aug(image=image, keypoints=keypoints)
assert np.array_equal(aug_data["image"], deserialized_aug_data["image"])
assert np.array_equal(aug_data["keypoints"], deserialized_aug_data["keypoints"])
@pytest.mark.parametrize(
["augmentation_cls", "params", "call_params"],
[[A.RandomCropNearBBox, {"max_part_shift": 0.15}, {"cropping_bbox": [-59, 77, 177, 231]}]],
)
@pytest.mark.parametrize("p", [0.5, 1])
@pytest.mark.parametrize("seed", TEST_SEEDS)
@pytest.mark.parametrize("always_apply", (False, True))
def test_augmentations_serialization_with_call_params(
augmentation_cls, params, call_params, p, seed, image, always_apply
):
aug = augmentation_cls(p=p, always_apply=always_apply, **params)
annotations = {"image": image, **call_params}
serialized_aug = A.to_dict(aug)
deserialized_aug = A.from_dict(serialized_aug)
set_seed(seed)
aug_data = aug(**annotations)
set_seed(seed)
deserialized_aug_data = deserialized_aug(**annotations)
assert np.array_equal(aug_data["image"], deserialized_aug_data["image"])
def test_from_float_serialization(float_image):
aug = A.FromFloat(p=1, dtype="uint8")
serialized_aug = A.to_dict(aug)
deserialized_aug = A.from_dict(serialized_aug)
aug_data = aug(image=float_image)
deserialized_aug_data = deserialized_aug(image=float_image)
assert np.array_equal(aug_data["image"], deserialized_aug_data["image"])
@pytest.mark.parametrize("seed", TEST_SEEDS)
def test_transform_pipeline_serialization(seed, image, mask):
aug = A.Compose(
[
A.OneOrOther(
A.Compose(
[
A.Resize(1024, 1024),
A.RandomSizedCrop(min_max_height=(256, 1024), height=512, width=512, p=1),
A.OneOf(
[
A.RandomSizedCrop(min_max_height=(256, 512), height=384, width=384, p=0.5),
A.RandomSizedCrop(min_max_height=(256, 512), height=512, width=512, p=0.5),
]
),
]
),
A.Compose(
[
A.Resize(1024, 1024),
A.RandomSizedCrop(min_max_height=(256, 1025), height=256, width=256, p=1),
A.OneOf([A.HueSaturationValue(p=0.5), A.RGBShift(p=0.7)], p=1),
]
),
),
A.SomeOf(
[
A.HorizontalFlip(p=1),
A.Transpose(p=1),
A.HueSaturationValue(p=0.5),
A.RandomBrightnessContrast(p=0.5),
],
2,
replace=False,
),
]
)
serialized_aug = A.to_dict(aug)
deserialized_aug = A.from_dict(serialized_aug)
set_seed(seed)
aug_data = aug(image=image, mask=mask)
set_seed(seed)
deserialized_aug_data = deserialized_aug(image=image, mask=mask)
assert np.array_equal(aug_data["image"], deserialized_aug_data["image"])
assert np.array_equal(aug_data["mask"], deserialized_aug_data["mask"])
@pytest.mark.parametrize(
["bboxes", "bbox_format", "labels"],
[
([(20, 30, 40, 50)], "coco", [1]),
([(20, 30, 40, 50, 99), (10, 40, 30, 20, 9)], "coco", [1, 2]),
([(20, 30, 60, 80)], "pascal_voc", [2]),
([(20, 30, 60, 80, 99)], "pascal_voc", [1]),
([(0.2, 0.3, 0.4, 0.5)], "yolo", [2]),
([(0.2, 0.3, 0.4, 0.5, 99)], "yolo", [1]),
],
)
@pytest.mark.parametrize("seed", TEST_SEEDS)
def test_transform_pipeline_serialization_with_bboxes(seed, image, bboxes, bbox_format, labels):
aug = A.Compose(
[
A.OneOrOther(
A.Compose([A.RandomRotate90(), A.OneOf([A.HorizontalFlip(p=0.5), A.VerticalFlip(p=0.5)])]),
A.Compose([A.Rotate(p=0.5), A.OneOf([A.HueSaturationValue(p=0.5), A.RGBShift(p=0.7)], p=1)]),
),
A.SomeOf(
[
A.HorizontalFlip(p=1),
A.Transpose(p=1),
A.HueSaturationValue(p=0.5),
A.RandomBrightnessContrast(p=0.5),
],
n=5,
),
],
bbox_params={"format": bbox_format, "label_fields": ["labels"]},
)
serialized_aug = A.to_dict(aug)
deserialized_aug = A.from_dict(serialized_aug)
set_seed(seed)
aug_data = aug(image=image, bboxes=bboxes, labels=labels)
set_seed(seed)
deserialized_aug_data = deserialized_aug(image=image, bboxes=bboxes, labels=labels)
assert np.array_equal(aug_data["image"], deserialized_aug_data["image"])
assert np.array_equal(aug_data["bboxes"], deserialized_aug_data["bboxes"])
@pytest.mark.parametrize(
["keypoints", "keypoint_format", "labels"],
[
([(20, 30, 40, 50)], "xyas", [1]),
([(20, 30, 40, 50, 99), (10, 40, 30, 20, 9)], "xy", [1, 2]),
([(20, 30, 60, 80)], "yx", [2]),
([(20, 30, 60, 80, 99)], "xys", [1]),
],
)
@pytest.mark.parametrize("seed", TEST_SEEDS)
def test_transform_pipeline_serialization_with_keypoints(seed, image, keypoints, keypoint_format, labels):
aug = A.Compose(
[
A.OneOrOther(
A.Compose([A.RandomRotate90(), A.OneOf([A.HorizontalFlip(p=0.5), A.VerticalFlip(p=0.5)])]),
A.Compose([A.Rotate(p=0.5), A.OneOf([A.HueSaturationValue(p=0.5), A.RGBShift(p=0.7)], p=1)]),
),
A.SomeOf(
n=2,
transforms=[
A.HorizontalFlip(p=1),
A.Transpose(p=1),
A.HueSaturationValue(p=0.5),
A.RandomBrightnessContrast(p=0.5),
],
replace=False,
),
],
keypoint_params={"format": keypoint_format, "label_fields": ["labels"]},
)
serialized_aug = A.to_dict(aug)
deserialized_aug = A.from_dict(serialized_aug)
set_seed(seed)
aug_data = aug(image=image, keypoints=keypoints, labels=labels)
set_seed(seed)
deserialized_aug_data = deserialized_aug(image=image, keypoints=keypoints, labels=labels)
assert np.array_equal(aug_data["image"], deserialized_aug_data["image"])
assert np.array_equal(aug_data["keypoints"], deserialized_aug_data["keypoints"])
@pytest.mark.parametrize(
["augmentation_cls", "params"],
get_image_only_transforms(
except_augmentations={A.HistogramMatching, A.FDA, A.PixelDistributionAdaptation, A.TemplateTransform},
),
)
@pytest.mark.parametrize("seed", TEST_SEEDS)
def test_additional_targets_for_image_only_serialization(augmentation_cls, params, image, seed):
aug = A.Compose([augmentation_cls(always_apply=True, **params)], additional_targets={"image2": "image"})
image2 = image.copy()
serialized_aug = A.to_dict(aug)
deserialized_aug = A.from_dict(serialized_aug)
set_seed(seed)
aug_data = aug(image=image, image2=image2)
set_seed(seed)
deserialized_aug_data = deserialized_aug(image=image, image2=image2)
assert np.array_equal(aug_data["image"], deserialized_aug_data["image"])
assert np.array_equal(aug_data["image2"], deserialized_aug_data["image2"])
@pytest.mark.parametrize("seed", TEST_SEEDS)
@pytest.mark.parametrize("p", [1])
def test_lambda_serialization(image, mask, albumentations_bboxes, keypoints, seed, p):
def vflip_image(image, **kwargs):
return F.vflip(image)
def vflip_mask(mask, **kwargs):
return F.vflip(mask)
def vflip_bbox(bbox, **kwargs):
return F.bbox_vflip(bbox, **kwargs)
def vflip_keypoint(keypoint, **kwargs):
return F.keypoint_vflip(keypoint, **kwargs)
aug = A.Lambda(name="vflip", image=vflip_image, mask=vflip_mask, bbox=vflip_bbox, keypoint=vflip_keypoint, p=p)
serialized_aug = A.to_dict(aug)
deserialized_aug = A.from_dict(serialized_aug, lambda_transforms={"vflip": aug})
set_seed(seed)
aug_data = aug(image=image, mask=mask, bboxes=albumentations_bboxes, keypoints=keypoints)
set_seed(seed)
deserialized_aug_data = deserialized_aug(image=image, mask=mask, bboxes=albumentations_bboxes, keypoints=keypoints)
assert np.array_equal(aug_data["image"], deserialized_aug_data["image"])
assert np.array_equal(aug_data["mask"], deserialized_aug_data["mask"])
assert np.array_equal(aug_data["bboxes"], deserialized_aug_data["bboxes"])
assert np.array_equal(aug_data["keypoints"], deserialized_aug_data["keypoints"])
def test_serialization_v2_conversion_without_totensor():
current_directory = os.path.dirname(os.path.abspath(__file__))
files_directory = os.path.join(current_directory, "files")
transform_1_1_0 = A.load(os.path.join(files_directory, "transform_v1.1.0_without_totensor.json"))
with open(os.path.join(files_directory, "output_v1.1.0_without_totensor.json")) as f:
output_1_1_0 = json.load(f)
np.random.seed(42)
image = np.random.randint(low=0, high=255, size=(256, 256, 3), dtype=np.uint8)
random.seed(42)
transformed_image = transform_1_1_0(image=image)["image"]
assert transformed_image.tolist() == output_1_1_0
@skipif_no_torch
def test_serialization_v2_conversion_with_totensor():
current_directory = os.path.dirname(os.path.abspath(__file__))
files_directory = os.path.join(current_directory, "files")
transform_1_1_0 = A.load(os.path.join(files_directory, "transform_v1.1.0_with_totensor.json"))
with open(os.path.join(files_directory, "output_v1.1.0_with_totensor.json")) as f:
output_1_1_0 = json.load(f)
np.random.seed(42)
image = np.random.randint(low=0, high=255, size=(256, 256, 3), dtype=np.uint8)
random.seed(42)
transformed_image = transform_1_1_0(image=image)["image"]
assert transformed_image.numpy().tolist() == output_1_1_0
def test_serialization_v2_without_totensor():
current_directory = os.path.dirname(os.path.abspath(__file__))
files_directory = os.path.join(current_directory, "files")
transform = A.load(os.path.join(files_directory, "transform_serialization_v2_without_totensor.json"))
with open(os.path.join(files_directory, "output_v1.1.0_without_totensor.json")) as f:
output_1_1_0 = json.load(f)
np.random.seed(42)
image = np.random.randint(low=0, high=255, size=(256, 256, 3), dtype=np.uint8)
random.seed(42)
transformed_image = transform(image=image)["image"]
assert transformed_image.tolist() == output_1_1_0
@skipif_no_torch
def test_serialization_v2_with_totensor():
current_directory = os.path.dirname(os.path.abspath(__file__))
files_directory = os.path.join(current_directory, "files")
transform = A.load(os.path.join(files_directory, "transform_serialization_v2_with_totensor.json"))
with open(os.path.join(files_directory, "output_v1.1.0_with_totensor.json")) as f:
output_1_1_0 = json.load(f)
np.random.seed(42)
image = np.random.randint(low=0, high=255, size=(256, 256, 3), dtype=np.uint8)
random.seed(42)
transformed_image = transform(image=image)["image"]
assert transformed_image.numpy().tolist() == output_1_1_0
def test_custom_transform_with_overlapping_name():
class HorizontalFlip(ImageOnlyTransform):
pass
assert SERIALIZABLE_REGISTRY["HorizontalFlip"] == A.HorizontalFlip
assert SERIALIZABLE_REGISTRY["tests.test_serialization.HorizontalFlip"] == HorizontalFlip
def test_serialization_v2_to_dict():
transform = A.Compose([A.HorizontalFlip()])
transform_dict = A.to_dict(transform)["transform"]
assert transform_dict == {
"__class_fullname__": "Compose",
"p": 1.0,
"transforms": [{"__class_fullname__": "HorizontalFlip", "always_apply": False, "p": 0.5}],
"bbox_params": None,
"keypoint_params": None,
"additional_targets": {},
}
@pytest.mark.parametrize(
["class_fullname", "expected_short_class_name"],
[
["albumentations.augmentations.transforms.HorizontalFlip", "HorizontalFlip"],
["HorizontalFlip", "HorizontalFlip"],
["some_module.HorizontalFlip", "some_module.HorizontalFlip"],
],
)
def test_shorten_class_name(class_fullname, expected_short_class_name):
assert shorten_class_name(class_fullname) == expected_short_class_name
@pytest.mark.parametrize("seed", TEST_SEEDS)
@pytest.mark.parametrize("p", [1])
def test_template_transform_serialization(image, template, seed, p):
template_transform = A.TemplateTransform(name="template", templates=template, p=p)
aug = A.Compose([A.Flip(), template_transform, A.Blur()])
serialized_aug = A.to_dict(aug)
deserialized_aug = A.from_dict(serialized_aug, lambda_transforms={"template": template_transform})
set_seed(seed)
aug_data = aug(image=image)
set_seed(seed)
deserialized_aug_data = deserialized_aug(image=image)
assert np.array_equal(aug_data["image"], deserialized_aug_data["image"])
|
[
"albumentations.Lambda",
"numpy.random.seed",
"albumentations.Resize",
"albumentations.Flip",
"albumentations.Rotate",
"numpy.random.randint",
"albumentations.TemplateTransform",
"albumentations.load",
"pytest.mark.parametrize",
"albumentations.HorizontalFlip",
"os.path.join",
"albumentations.RGBShift",
"albumentations.from_dict",
"albumentations.Blur",
"os.path.abspath",
"albumentations.save",
"albumentations.core.serialization.shorten_class_name",
"random.seed",
"albumentations.augmentations.functional.bbox_vflip",
"albumentations.to_dict",
"albumentations.augmentations.functional.keypoint_vflip",
"albumentations.RandomSizedCrop",
"albumentations.VerticalFlip",
"json.load",
"albumentations.HueSaturationValue",
"albumentations.RandomBrightnessContrast",
"albumentations.Transpose",
"albumentations.augmentations.functional.vflip",
"albumentations.FromFloat",
"numpy.array_equal",
"albumentations.RandomRotate90"
] |
[((1467, 1505), 'pytest.mark.parametrize', 'pytest.mark.parametrize', (['"""p"""', '[0.5, 1]'], {}), "('p', [0.5, 1])\n", (1490, 1505), False, 'import pytest\n'), ((1507, 1550), 'pytest.mark.parametrize', 'pytest.mark.parametrize', (['"""seed"""', 'TEST_SEEDS'], {}), "('seed', TEST_SEEDS)\n", (1530, 1550), False, 'import pytest\n'), ((1552, 1606), 'pytest.mark.parametrize', 'pytest.mark.parametrize', (['"""always_apply"""', '(False, True)'], {}), "('always_apply', (False, True))\n", (1575, 1606), False, 'import pytest\n'), ((11113, 11151), 'pytest.mark.parametrize', 'pytest.mark.parametrize', (['"""p"""', '[0.5, 1]'], {}), "('p', [0.5, 1])\n", (11136, 11151), False, 'import pytest\n'), ((11153, 11196), 'pytest.mark.parametrize', 'pytest.mark.parametrize', (['"""seed"""', 'TEST_SEEDS'], {}), "('seed', TEST_SEEDS)\n", (11176, 11196), False, 'import pytest\n'), ((11198, 11252), 'pytest.mark.parametrize', 'pytest.mark.parametrize', (['"""always_apply"""', '(False, True)'], {}), "('always_apply', (False, True))\n", (11221, 11252), False, 'import pytest\n'), ((11979, 12017), 'pytest.mark.parametrize', 'pytest.mark.parametrize', (['"""p"""', '[0.5, 1]'], {}), "('p', [0.5, 1])\n", (12002, 12017), False, 'import pytest\n'), ((12019, 12062), 'pytest.mark.parametrize', 'pytest.mark.parametrize', (['"""seed"""', 'TEST_SEEDS'], {}), "('seed', TEST_SEEDS)\n", (12042, 12062), False, 'import pytest\n'), ((12064, 12118), 'pytest.mark.parametrize', 'pytest.mark.parametrize', (['"""always_apply"""', '(False, True)'], {}), "('always_apply', (False, True))\n", (12087, 12118), False, 'import pytest\n'), ((12120, 12169), 'pytest.mark.parametrize', 'pytest.mark.parametrize', (['"""data_format"""', "('yaml',)"], {}), "('data_format', ('yaml',))\n", (12143, 12169), False, 'import pytest\n'), ((14108, 14146), 'pytest.mark.parametrize', 'pytest.mark.parametrize', (['"""p"""', '[0.5, 1]'], {}), "('p', [0.5, 1])\n", (14131, 14146), False, 'import pytest\n'), ((14148, 14191), 'pytest.mark.parametrize', 'pytest.mark.parametrize', (['"""seed"""', 'TEST_SEEDS'], {}), "('seed', TEST_SEEDS)\n", (14171, 14191), False, 'import pytest\n'), ((14193, 14247), 'pytest.mark.parametrize', 'pytest.mark.parametrize', (['"""always_apply"""', '(False, True)'], {}), "('always_apply', (False, True))\n", (14216, 14247), False, 'import pytest\n'), ((16018, 16056), 'pytest.mark.parametrize', 'pytest.mark.parametrize', (['"""p"""', '[0.5, 1]'], {}), "('p', [0.5, 1])\n", (16041, 16056), False, 'import pytest\n'), ((16058, 16101), 'pytest.mark.parametrize', 'pytest.mark.parametrize', (['"""seed"""', 'TEST_SEEDS'], {}), "('seed', TEST_SEEDS)\n", (16081, 16101), False, 'import pytest\n'), ((16103, 16157), 'pytest.mark.parametrize', 'pytest.mark.parametrize', (['"""always_apply"""', '(False, True)'], {}), "('always_apply', (False, True))\n", (16126, 16157), False, 'import pytest\n'), ((16768, 16940), 'pytest.mark.parametrize', 'pytest.mark.parametrize', (["['augmentation_cls', 'params', 'call_params']", "[[A.RandomCropNearBBox, {'max_part_shift': 0.15}, {'cropping_bbox': [-59, \n 77, 177, 231]}]]"], {}), "(['augmentation_cls', 'params', 'call_params'], [[A.\n RandomCropNearBBox, {'max_part_shift': 0.15}, {'cropping_bbox': [-59, \n 77, 177, 231]}]])\n", (16791, 16940), False, 'import pytest\n'), ((16943, 16981), 'pytest.mark.parametrize', 'pytest.mark.parametrize', (['"""p"""', '[0.5, 1]'], {}), "('p', [0.5, 1])\n", (16966, 16981), False, 'import pytest\n'), ((16983, 17026), 'pytest.mark.parametrize', 'pytest.mark.parametrize', (['"""seed"""', 'TEST_SEEDS'], {}), "('seed', TEST_SEEDS)\n", (17006, 17026), False, 'import pytest\n'), ((17028, 17082), 'pytest.mark.parametrize', 'pytest.mark.parametrize', (['"""always_apply"""', '(False, True)'], {}), "('always_apply', (False, True))\n", (17051, 17082), False, 'import pytest\n'), ((17989, 18032), 'pytest.mark.parametrize', 'pytest.mark.parametrize', (['"""seed"""', 'TEST_SEEDS'], {}), "('seed', TEST_SEEDS)\n", (18012, 18032), False, 'import pytest\n'), ((19778, 20123), 'pytest.mark.parametrize', 'pytest.mark.parametrize', (["['bboxes', 'bbox_format', 'labels']", "[([(20, 30, 40, 50)], 'coco', [1]), ([(20, 30, 40, 50, 99), (10, 40, 30, 20,\n 9)], 'coco', [1, 2]), ([(20, 30, 60, 80)], 'pascal_voc', [2]), ([(20, \n 30, 60, 80, 99)], 'pascal_voc', [1]), ([(0.2, 0.3, 0.4, 0.5)], 'yolo',\n [2]), ([(0.2, 0.3, 0.4, 0.5, 99)], 'yolo', [1])]"], {}), "(['bboxes', 'bbox_format', 'labels'], [([(20, 30, 40,\n 50)], 'coco', [1]), ([(20, 30, 40, 50, 99), (10, 40, 30, 20, 9)],\n 'coco', [1, 2]), ([(20, 30, 60, 80)], 'pascal_voc', [2]), ([(20, 30, 60,\n 80, 99)], 'pascal_voc', [1]), ([(0.2, 0.3, 0.4, 0.5)], 'yolo', [2]), ([\n (0.2, 0.3, 0.4, 0.5, 99)], 'yolo', [1])])\n", (19801, 20123), False, 'import pytest\n'), ((20174, 20217), 'pytest.mark.parametrize', 'pytest.mark.parametrize', (['"""seed"""', 'TEST_SEEDS'], {}), "('seed', TEST_SEEDS)\n", (20197, 20217), False, 'import pytest\n'), ((21409, 21659), 'pytest.mark.parametrize', 'pytest.mark.parametrize', (["['keypoints', 'keypoint_format', 'labels']", "[([(20, 30, 40, 50)], 'xyas', [1]), ([(20, 30, 40, 50, 99), (10, 40, 30, 20,\n 9)], 'xy', [1, 2]), ([(20, 30, 60, 80)], 'yx', [2]), ([(20, 30, 60, 80,\n 99)], 'xys', [1])]"], {}), "(['keypoints', 'keypoint_format', 'labels'], [([(20,\n 30, 40, 50)], 'xyas', [1]), ([(20, 30, 40, 50, 99), (10, 40, 30, 20, 9)\n ], 'xy', [1, 2]), ([(20, 30, 60, 80)], 'yx', [2]), ([(20, 30, 60, 80, \n 99)], 'xys', [1])])\n", (21432, 21659), False, 'import pytest\n'), ((21697, 21740), 'pytest.mark.parametrize', 'pytest.mark.parametrize', (['"""seed"""', 'TEST_SEEDS'], {}), "('seed', TEST_SEEDS)\n", (21720, 21740), False, 'import pytest\n'), ((23223, 23266), 'pytest.mark.parametrize', 'pytest.mark.parametrize', (['"""seed"""', 'TEST_SEEDS'], {}), "('seed', TEST_SEEDS)\n", (23246, 23266), False, 'import pytest\n'), ((23904, 23947), 'pytest.mark.parametrize', 'pytest.mark.parametrize', (['"""seed"""', 'TEST_SEEDS'], {}), "('seed', TEST_SEEDS)\n", (23927, 23947), False, 'import pytest\n'), ((23949, 23982), 'pytest.mark.parametrize', 'pytest.mark.parametrize', (['"""p"""', '[1]'], {}), "('p', [1])\n", (23972, 23982), False, 'import pytest\n'), ((28545, 28811), 'pytest.mark.parametrize', 'pytest.mark.parametrize', (["['class_fullname', 'expected_short_class_name']", "[['albumentations.augmentations.transforms.HorizontalFlip',\n 'HorizontalFlip'], ['HorizontalFlip', 'HorizontalFlip'], [\n 'some_module.HorizontalFlip', 'some_module.HorizontalFlip']]"], {}), "(['class_fullname', 'expected_short_class_name'], [[\n 'albumentations.augmentations.transforms.HorizontalFlip',\n 'HorizontalFlip'], ['HorizontalFlip', 'HorizontalFlip'], [\n 'some_module.HorizontalFlip', 'some_module.HorizontalFlip']])\n", (28568, 28811), False, 'import pytest\n'), ((28990, 29033), 'pytest.mark.parametrize', 'pytest.mark.parametrize', (['"""seed"""', 'TEST_SEEDS'], {}), "('seed', TEST_SEEDS)\n", (29013, 29033), False, 'import pytest\n'), ((29035, 29068), 'pytest.mark.parametrize', 'pytest.mark.parametrize', (['"""p"""', '[1]'], {}), "('p', [1])\n", (29058, 29068), False, 'import pytest\n'), ((1797, 1811), 'albumentations.to_dict', 'A.to_dict', (['aug'], {}), '(aug)\n', (1806, 1811), True, 'import albumentations as A\n'), ((1835, 1862), 'albumentations.from_dict', 'A.from_dict', (['serialized_aug'], {}), '(serialized_aug)\n', (1846, 1862), True, 'import albumentations as A\n'), ((2024, 2089), 'numpy.array_equal', 'np.array_equal', (["aug_data['image']", "deserialized_aug_data['image']"], {}), "(aug_data['image'], deserialized_aug_data['image'])\n", (2038, 2089), True, 'import numpy as np\n'), ((2101, 2164), 'numpy.array_equal', 'np.array_equal', (["aug_data['mask']", "deserialized_aug_data['mask']"], {}), "(aug_data['mask'], deserialized_aug_data['mask'])\n", (2115, 2164), True, 'import numpy as np\n'), ((11472, 11486), 'albumentations.to_dict', 'A.to_dict', (['aug'], {}), '(aug)\n', (11481, 11486), True, 'import albumentations as A\n'), ((11510, 11537), 'albumentations.from_dict', 'A.from_dict', (['serialized_aug'], {}), '(serialized_aug)\n', (11521, 11537), True, 'import albumentations as A\n'), ((11699, 11764), 'numpy.array_equal', 'np.array_equal', (["aug_data['image']", "deserialized_aug_data['image']"], {}), "(aug_data['image'], deserialized_aug_data['image'])\n", (11713, 11764), True, 'import numpy as np\n'), ((11776, 11839), 'numpy.array_equal', 'np.array_equal', (["aug_data['mask']", "deserialized_aug_data['mask']"], {}), "(aug_data['mask'], deserialized_aug_data['mask'])\n", (11790, 11839), True, 'import numpy as np\n'), ((14472, 14486), 'albumentations.to_dict', 'A.to_dict', (['aug'], {}), '(aug)\n', (14481, 14486), True, 'import albumentations as A\n'), ((14510, 14537), 'albumentations.from_dict', 'A.from_dict', (['serialized_aug'], {}), '(serialized_aug)\n', (14521, 14537), True, 'import albumentations as A\n'), ((14737, 14802), 'numpy.array_equal', 'np.array_equal', (["aug_data['image']", "deserialized_aug_data['image']"], {}), "(aug_data['image'], deserialized_aug_data['image'])\n", (14751, 14802), True, 'import numpy as np\n'), ((14814, 14881), 'numpy.array_equal', 'np.array_equal', (["aug_data['bboxes']", "deserialized_aug_data['bboxes']"], {}), "(aug_data['bboxes'], deserialized_aug_data['bboxes'])\n", (14828, 14881), True, 'import numpy as np\n'), ((16367, 16381), 'albumentations.to_dict', 'A.to_dict', (['aug'], {}), '(aug)\n', (16376, 16381), True, 'import albumentations as A\n'), ((16405, 16432), 'albumentations.from_dict', 'A.from_dict', (['serialized_aug'], {}), '(serialized_aug)\n', (16416, 16432), True, 'import albumentations as A\n'), ((16614, 16679), 'numpy.array_equal', 'np.array_equal', (["aug_data['image']", "deserialized_aug_data['image']"], {}), "(aug_data['image'], deserialized_aug_data['image'])\n", (16628, 16679), True, 'import numpy as np\n'), ((16691, 16764), 'numpy.array_equal', 'np.array_equal', (["aug_data['keypoints']", "deserialized_aug_data['keypoints']"], {}), "(aug_data['keypoints'], deserialized_aug_data['keypoints'])\n", (16705, 16764), True, 'import numpy as np\n'), ((17353, 17367), 'albumentations.to_dict', 'A.to_dict', (['aug'], {}), '(aug)\n', (17362, 17367), True, 'import albumentations as A\n'), ((17391, 17418), 'albumentations.from_dict', 'A.from_dict', (['serialized_aug'], {}), '(serialized_aug)\n', (17402, 17418), True, 'import albumentations as A\n'), ((17562, 17627), 'numpy.array_equal', 'np.array_equal', (["aug_data['image']", "deserialized_aug_data['image']"], {}), "(aug_data['image'], deserialized_aug_data['image'])\n", (17576, 17627), True, 'import numpy as np\n'), ((17688, 17719), 'albumentations.FromFloat', 'A.FromFloat', ([], {'p': '(1)', 'dtype': '"""uint8"""'}), "(p=1, dtype='uint8')\n", (17699, 17719), True, 'import albumentations as A\n'), ((17741, 17755), 'albumentations.to_dict', 'A.to_dict', (['aug'], {}), '(aug)\n', (17750, 17755), True, 'import albumentations as A\n'), ((17779, 17806), 'albumentations.from_dict', 'A.from_dict', (['serialized_aug'], {}), '(serialized_aug)\n', (17790, 17806), True, 'import albumentations as A\n'), ((17920, 17985), 'numpy.array_equal', 'np.array_equal', (["aug_data['image']", "deserialized_aug_data['image']"], {}), "(aug_data['image'], deserialized_aug_data['image'])\n", (17934, 17985), True, 'import numpy as np\n'), ((19407, 19421), 'albumentations.to_dict', 'A.to_dict', (['aug'], {}), '(aug)\n', (19416, 19421), True, 'import albumentations as A\n'), ((19445, 19472), 'albumentations.from_dict', 'A.from_dict', (['serialized_aug'], {}), '(serialized_aug)\n', (19456, 19472), True, 'import albumentations as A\n'), ((19634, 19699), 'numpy.array_equal', 'np.array_equal', (["aug_data['image']", "deserialized_aug_data['image']"], {}), "(aug_data['image'], deserialized_aug_data['image'])\n", (19648, 19699), True, 'import numpy as np\n'), ((19711, 19774), 'numpy.array_equal', 'np.array_equal', (["aug_data['mask']", "deserialized_aug_data['mask']"], {}), "(aug_data['mask'], deserialized_aug_data['mask'])\n", (19725, 19774), True, 'import numpy as np\n'), ((20996, 21010), 'albumentations.to_dict', 'A.to_dict', (['aug'], {}), '(aug)\n', (21005, 21010), True, 'import albumentations as A\n'), ((21034, 21061), 'albumentations.from_dict', 'A.from_dict', (['serialized_aug'], {}), '(serialized_aug)\n', (21045, 21061), True, 'import albumentations as A\n'), ((21261, 21326), 'numpy.array_equal', 'np.array_equal', (["aug_data['image']", "deserialized_aug_data['image']"], {}), "(aug_data['image'], deserialized_aug_data['image'])\n", (21275, 21326), True, 'import numpy as np\n'), ((21338, 21405), 'numpy.array_equal', 'np.array_equal', (["aug_data['bboxes']", "deserialized_aug_data['bboxes']"], {}), "(aug_data['bboxes'], deserialized_aug_data['bboxes'])\n", (21352, 21405), True, 'import numpy as np\n'), ((22579, 22593), 'albumentations.to_dict', 'A.to_dict', (['aug'], {}), '(aug)\n', (22588, 22593), True, 'import albumentations as A\n'), ((22617, 22644), 'albumentations.from_dict', 'A.from_dict', (['serialized_aug'], {}), '(serialized_aug)\n', (22628, 22644), True, 'import albumentations as A\n'), ((22856, 22921), 'numpy.array_equal', 'np.array_equal', (["aug_data['image']", "deserialized_aug_data['image']"], {}), "(aug_data['image'], deserialized_aug_data['image'])\n", (22870, 22921), True, 'import numpy as np\n'), ((22933, 23006), 'numpy.array_equal', 'np.array_equal', (["aug_data['keypoints']", "deserialized_aug_data['keypoints']"], {}), "(aug_data['keypoints'], deserialized_aug_data['keypoints'])\n", (22947, 23006), True, 'import numpy as np\n'), ((23521, 23535), 'albumentations.to_dict', 'A.to_dict', (['aug'], {}), '(aug)\n', (23530, 23535), True, 'import albumentations as A\n'), ((23559, 23586), 'albumentations.from_dict', 'A.from_dict', (['serialized_aug'], {}), '(serialized_aug)\n', (23570, 23586), True, 'import albumentations as A\n'), ((23756, 23821), 'numpy.array_equal', 'np.array_equal', (["aug_data['image']", "deserialized_aug_data['image']"], {}), "(aug_data['image'], deserialized_aug_data['image'])\n", (23770, 23821), True, 'import numpy as np\n'), ((23833, 23900), 'numpy.array_equal', 'np.array_equal', (["aug_data['image2']", "deserialized_aug_data['image2']"], {}), "(aug_data['image2'], deserialized_aug_data['image2'])\n", (23847, 23900), True, 'import numpy as np\n'), ((24393, 24502), 'albumentations.Lambda', 'A.Lambda', ([], {'name': '"""vflip"""', 'image': 'vflip_image', 'mask': 'vflip_mask', 'bbox': 'vflip_bbox', 'keypoint': 'vflip_keypoint', 'p': 'p'}), "(name='vflip', image=vflip_image, mask=vflip_mask, bbox=vflip_bbox,\n keypoint=vflip_keypoint, p=p)\n", (24401, 24502), True, 'import albumentations as A\n'), ((24521, 24535), 'albumentations.to_dict', 'A.to_dict', (['aug'], {}), '(aug)\n', (24530, 24535), True, 'import albumentations as A\n'), ((24559, 24620), 'albumentations.from_dict', 'A.from_dict', (['serialized_aug'], {'lambda_transforms': "{'vflip': aug}"}), "(serialized_aug, lambda_transforms={'vflip': aug})\n", (24570, 24620), True, 'import albumentations as A\n'), ((24884, 24949), 'numpy.array_equal', 'np.array_equal', (["aug_data['image']", "deserialized_aug_data['image']"], {}), "(aug_data['image'], deserialized_aug_data['image'])\n", (24898, 24949), True, 'import numpy as np\n'), ((24961, 25024), 'numpy.array_equal', 'np.array_equal', (["aug_data['mask']", "deserialized_aug_data['mask']"], {}), "(aug_data['mask'], deserialized_aug_data['mask'])\n", (24975, 25024), True, 'import numpy as np\n'), ((25036, 25103), 'numpy.array_equal', 'np.array_equal', (["aug_data['bboxes']", "deserialized_aug_data['bboxes']"], {}), "(aug_data['bboxes'], deserialized_aug_data['bboxes'])\n", (25050, 25103), True, 'import numpy as np\n'), ((25115, 25188), 'numpy.array_equal', 'np.array_equal', (["aug_data['keypoints']", "deserialized_aug_data['keypoints']"], {}), "(aug_data['keypoints'], deserialized_aug_data['keypoints'])\n", (25129, 25188), True, 'import numpy as np\n'), ((25337, 25377), 'os.path.join', 'os.path.join', (['current_directory', '"""files"""'], {}), "(current_directory, 'files')\n", (25349, 25377), False, 'import os\n'), ((25610, 25628), 'numpy.random.seed', 'np.random.seed', (['(42)'], {}), '(42)\n', (25624, 25628), True, 'import numpy as np\n'), ((25641, 25711), 'numpy.random.randint', 'np.random.randint', ([], {'low': '(0)', 'high': '(255)', 'size': '(256, 256, 3)', 'dtype': 'np.uint8'}), '(low=0, high=255, size=(256, 256, 3), dtype=np.uint8)\n', (25658, 25711), True, 'import numpy as np\n'), ((25716, 25731), 'random.seed', 'random.seed', (['(42)'], {}), '(42)\n', (25727, 25731), False, 'import random\n'), ((26010, 26050), 'os.path.join', 'os.path.join', (['current_directory', '"""files"""'], {}), "(current_directory, 'files')\n", (26022, 26050), False, 'import os\n'), ((26277, 26295), 'numpy.random.seed', 'np.random.seed', (['(42)'], {}), '(42)\n', (26291, 26295), True, 'import numpy as np\n'), ((26308, 26378), 'numpy.random.randint', 'np.random.randint', ([], {'low': '(0)', 'high': '(255)', 'size': '(256, 256, 3)', 'dtype': 'np.uint8'}), '(low=0, high=255, size=(256, 256, 3), dtype=np.uint8)\n', (26325, 26378), True, 'import numpy as np\n'), ((26383, 26398), 'random.seed', 'random.seed', (['(42)'], {}), '(42)\n', (26394, 26398), False, 'import random\n'), ((26660, 26700), 'os.path.join', 'os.path.join', (['current_directory', '"""files"""'], {}), "(current_directory, 'files')\n", (26672, 26700), False, 'import os\n'), ((26937, 26955), 'numpy.random.seed', 'np.random.seed', (['(42)'], {}), '(42)\n', (26951, 26955), True, 'import numpy as np\n'), ((26968, 27038), 'numpy.random.randint', 'np.random.randint', ([], {'low': '(0)', 'high': '(255)', 'size': '(256, 256, 3)', 'dtype': 'np.uint8'}), '(low=0, high=255, size=(256, 256, 3), dtype=np.uint8)\n', (26985, 27038), True, 'import numpy as np\n'), ((27043, 27058), 'random.seed', 'random.seed', (['(42)'], {}), '(42)\n', (27054, 27058), False, 'import random\n'), ((27320, 27360), 'os.path.join', 'os.path.join', (['current_directory', '"""files"""'], {}), "(current_directory, 'files')\n", (27332, 27360), False, 'import os\n'), ((27591, 27609), 'numpy.random.seed', 'np.random.seed', (['(42)'], {}), '(42)\n', (27605, 27609), True, 'import numpy as np\n'), ((27622, 27692), 'numpy.random.randint', 'np.random.randint', ([], {'low': '(0)', 'high': '(255)', 'size': '(256, 256, 3)', 'dtype': 'np.uint8'}), '(low=0, high=255, size=(256, 256, 3), dtype=np.uint8)\n', (27639, 27692), True, 'import numpy as np\n'), ((27697, 27712), 'random.seed', 'random.seed', (['(42)'], {}), '(42)\n', (27708, 27712), False, 'import random\n'), ((29163, 29224), 'albumentations.TemplateTransform', 'A.TemplateTransform', ([], {'name': '"""template"""', 'templates': 'template', 'p': 'p'}), "(name='template', templates=template, p=p)\n", (29182, 29224), True, 'import albumentations as A\n'), ((29310, 29324), 'albumentations.to_dict', 'A.to_dict', (['aug'], {}), '(aug)\n', (29319, 29324), True, 'import albumentations as A\n'), ((29348, 29427), 'albumentations.from_dict', 'A.from_dict', (['serialized_aug'], {'lambda_transforms': "{'template': template_transform}"}), "(serialized_aug, lambda_transforms={'template': template_transform})\n", (29359, 29427), True, 'import albumentations as A\n'), ((29569, 29634), 'numpy.array_equal', 'np.array_equal', (["aug_data['image']", "deserialized_aug_data['image']"], {}), "(aug_data['image'], deserialized_aug_data['image'])\n", (29583, 29634), True, 'import numpy as np\n'), ((12501, 12547), 'albumentations.save', 'A.save', (['aug', 'filepath'], {'data_format': 'data_format'}), '(aug, filepath, data_format=data_format)\n', (12507, 12547), True, 'import albumentations as A\n'), ((12575, 12616), 'albumentations.load', 'A.load', (['filepath'], {'data_format': 'data_format'}), '(filepath, data_format=data_format)\n', (12581, 12616), True, 'import albumentations as A\n'), ((12798, 12863), 'numpy.array_equal', 'np.array_equal', (["aug_data['image']", "deserialized_aug_data['image']"], {}), "(aug_data['image'], deserialized_aug_data['image'])\n", (12812, 12863), True, 'import numpy as np\n'), ((12879, 12942), 'numpy.array_equal', 'np.array_equal', (["aug_data['mask']", "deserialized_aug_data['mask']"], {}), "(aug_data['mask'], deserialized_aug_data['mask'])\n", (12893, 12942), True, 'import numpy as np\n'), ((24123, 24137), 'albumentations.augmentations.functional.vflip', 'F.vflip', (['image'], {}), '(image)\n', (24130, 24137), True, 'import albumentations.augmentations.functional as F\n'), ((24190, 24203), 'albumentations.augmentations.functional.vflip', 'F.vflip', (['mask'], {}), '(mask)\n', (24197, 24203), True, 'import albumentations.augmentations.functional as F\n'), ((24256, 24284), 'albumentations.augmentations.functional.bbox_vflip', 'F.bbox_vflip', (['bbox'], {}), '(bbox, **kwargs)\n', (24268, 24284), True, 'import albumentations.augmentations.functional as F\n'), ((24345, 24381), 'albumentations.augmentations.functional.keypoint_vflip', 'F.keypoint_vflip', (['keypoint'], {}), '(keypoint, **kwargs)\n', (24361, 24381), True, 'import albumentations.augmentations.functional as F\n'), ((25288, 25313), 'os.path.abspath', 'os.path.abspath', (['__file__'], {}), '(__file__)\n', (25303, 25313), False, 'import os\n'), ((25407, 25478), 'os.path.join', 'os.path.join', (['files_directory', '"""transform_v1.1.0_without_totensor.json"""'], {}), "(files_directory, 'transform_v1.1.0_without_totensor.json')\n", (25419, 25478), False, 'import os\n'), ((25593, 25605), 'json.load', 'json.load', (['f'], {}), '(f)\n', (25602, 25605), False, 'import json\n'), ((25961, 25986), 'os.path.abspath', 'os.path.abspath', (['__file__'], {}), '(__file__)\n', (25976, 25986), False, 'import os\n'), ((26080, 26148), 'os.path.join', 'os.path.join', (['files_directory', '"""transform_v1.1.0_with_totensor.json"""'], {}), "(files_directory, 'transform_v1.1.0_with_totensor.json')\n", (26092, 26148), False, 'import os\n'), ((26260, 26272), 'json.load', 'json.load', (['f'], {}), '(f)\n', (26269, 26272), False, 'import json\n'), ((26611, 26636), 'os.path.abspath', 'os.path.abspath', (['__file__'], {}), '(__file__)\n', (26626, 26636), False, 'import os\n'), ((26724, 26809), 'os.path.join', 'os.path.join', (['files_directory', '"""transform_serialization_v2_without_totensor.json"""'], {}), "(files_directory,\n 'transform_serialization_v2_without_totensor.json')\n", (26736, 26809), False, 'import os\n'), ((26920, 26932), 'json.load', 'json.load', (['f'], {}), '(f)\n', (26929, 26932), False, 'import json\n'), ((27271, 27296), 'os.path.abspath', 'os.path.abspath', (['__file__'], {}), '(__file__)\n', (27286, 27296), False, 'import os\n'), ((27384, 27462), 'os.path.join', 'os.path.join', (['files_directory', '"""transform_serialization_v2_with_totensor.json"""'], {}), "(files_directory, 'transform_serialization_v2_with_totensor.json')\n", (27396, 27462), False, 'import os\n'), ((27574, 27586), 'json.load', 'json.load', (['f'], {}), '(f)\n', (27583, 27586), False, 'import json\n'), ((28217, 28237), 'albumentations.to_dict', 'A.to_dict', (['transform'], {}), '(transform)\n', (28226, 28237), True, 'import albumentations as A\n'), ((28923, 28957), 'albumentations.core.serialization.shorten_class_name', 'shorten_class_name', (['class_fullname'], {}), '(class_fullname)\n', (28941, 28957), False, 'from albumentations.core.serialization import SERIALIZABLE_REGISTRY, shorten_class_name\n'), ((25494, 25562), 'os.path.join', 'os.path.join', (['files_directory', '"""output_v1.1.0_without_totensor.json"""'], {}), "(files_directory, 'output_v1.1.0_without_totensor.json')\n", (25506, 25562), False, 'import os\n'), ((26164, 26229), 'os.path.join', 'os.path.join', (['files_directory', '"""output_v1.1.0_with_totensor.json"""'], {}), "(files_directory, 'output_v1.1.0_with_totensor.json')\n", (26176, 26229), False, 'import os\n'), ((26821, 26889), 'os.path.join', 'os.path.join', (['files_directory', '"""output_v1.1.0_without_totensor.json"""'], {}), "(files_directory, 'output_v1.1.0_without_totensor.json')\n", (26833, 26889), False, 'import os\n'), ((27478, 27543), 'os.path.join', 'os.path.join', (['files_directory', '"""output_v1.1.0_with_totensor.json"""'], {}), "(files_directory, 'output_v1.1.0_with_totensor.json')\n", (27490, 27543), False, 'import os\n'), ((28175, 28193), 'albumentations.HorizontalFlip', 'A.HorizontalFlip', ([], {}), '()\n', (28191, 28193), True, 'import albumentations as A\n'), ((29247, 29255), 'albumentations.Flip', 'A.Flip', ([], {}), '()\n', (29253, 29255), True, 'import albumentations as A\n'), ((29277, 29285), 'albumentations.Blur', 'A.Blur', ([], {}), '()\n', (29283, 29285), True, 'import albumentations as A\n'), ((19121, 19142), 'albumentations.HorizontalFlip', 'A.HorizontalFlip', ([], {'p': '(1)'}), '(p=1)\n', (19137, 19142), True, 'import albumentations as A\n'), ((19164, 19180), 'albumentations.Transpose', 'A.Transpose', ([], {'p': '(1)'}), '(p=1)\n', (19175, 19180), True, 'import albumentations as A\n'), ((19202, 19229), 'albumentations.HueSaturationValue', 'A.HueSaturationValue', ([], {'p': '(0.5)'}), '(p=0.5)\n', (19222, 19229), True, 'import albumentations as A\n'), ((19251, 19284), 'albumentations.RandomBrightnessContrast', 'A.RandomBrightnessContrast', ([], {'p': '(0.5)'}), '(p=0.5)\n', (19277, 19284), True, 'import albumentations as A\n'), ((20665, 20686), 'albumentations.HorizontalFlip', 'A.HorizontalFlip', ([], {'p': '(1)'}), '(p=1)\n', (20681, 20686), True, 'import albumentations as A\n'), ((20708, 20724), 'albumentations.Transpose', 'A.Transpose', ([], {'p': '(1)'}), '(p=1)\n', (20719, 20724), True, 'import albumentations as A\n'), ((20746, 20773), 'albumentations.HueSaturationValue', 'A.HueSaturationValue', ([], {'p': '(0.5)'}), '(p=0.5)\n', (20766, 20773), True, 'import albumentations as A\n'), ((20795, 20828), 'albumentations.RandomBrightnessContrast', 'A.RandomBrightnessContrast', ([], {'p': '(0.5)'}), '(p=0.5)\n', (20821, 20828), True, 'import albumentations as A\n'), ((18225, 18245), 'albumentations.Resize', 'A.Resize', (['(1024)', '(1024)'], {}), '(1024, 1024)\n', (18233, 18245), True, 'import albumentations as A\n'), ((18271, 18344), 'albumentations.RandomSizedCrop', 'A.RandomSizedCrop', ([], {'min_max_height': '(256, 1024)', 'height': '(512)', 'width': '(512)', 'p': '(1)'}), '(min_max_height=(256, 1024), height=512, width=512, p=1)\n', (18288, 18344), True, 'import albumentations as A\n'), ((18796, 18816), 'albumentations.Resize', 'A.Resize', (['(1024)', '(1024)'], {}), '(1024, 1024)\n', (18804, 18816), True, 'import albumentations as A\n'), ((18842, 18915), 'albumentations.RandomSizedCrop', 'A.RandomSizedCrop', ([], {'min_max_height': '(256, 1025)', 'height': '(256)', 'width': '(256)', 'p': '(1)'}), '(min_max_height=(256, 1025), height=256, width=256, p=1)\n', (18859, 18915), True, 'import albumentations as A\n'), ((20399, 20417), 'albumentations.RandomRotate90', 'A.RandomRotate90', ([], {}), '()\n', (20415, 20417), True, 'import albumentations as A\n'), ((20507, 20522), 'albumentations.Rotate', 'A.Rotate', ([], {'p': '(0.5)'}), '(p=0.5)\n', (20515, 20522), True, 'import albumentations as A\n'), ((21932, 21950), 'albumentations.RandomRotate90', 'A.RandomRotate90', ([], {}), '()\n', (21948, 21950), True, 'import albumentations as A\n'), ((22040, 22055), 'albumentations.Rotate', 'A.Rotate', ([], {'p': '(0.5)'}), '(p=0.5)\n', (22048, 22055), True, 'import albumentations as A\n'), ((22230, 22251), 'albumentations.HorizontalFlip', 'A.HorizontalFlip', ([], {'p': '(1)'}), '(p=1)\n', (22246, 22251), True, 'import albumentations as A\n'), ((22273, 22289), 'albumentations.Transpose', 'A.Transpose', ([], {'p': '(1)'}), '(p=1)\n', (22284, 22289), True, 'import albumentations as A\n'), ((22311, 22338), 'albumentations.HueSaturationValue', 'A.HueSaturationValue', ([], {'p': '(0.5)'}), '(p=0.5)\n', (22331, 22338), True, 'import albumentations as A\n'), ((22360, 22393), 'albumentations.RandomBrightnessContrast', 'A.RandomBrightnessContrast', ([], {'p': '(0.5)'}), '(p=0.5)\n', (22386, 22393), True, 'import albumentations as A\n'), ((18441, 18515), 'albumentations.RandomSizedCrop', 'A.RandomSizedCrop', ([], {'min_max_height': '(256, 512)', 'height': '(384)', 'width': '(384)', 'p': '(0.5)'}), '(min_max_height=(256, 512), height=384, width=384, p=0.5)\n', (18458, 18515), True, 'import albumentations as A\n'), ((18549, 18623), 'albumentations.RandomSizedCrop', 'A.RandomSizedCrop', ([], {'min_max_height': '(256, 512)', 'height': '(512)', 'width': '(512)', 'p': '(0.5)'}), '(min_max_height=(256, 512), height=512, width=512, p=0.5)\n', (18566, 18623), True, 'import albumentations as A\n'), ((18950, 18977), 'albumentations.HueSaturationValue', 'A.HueSaturationValue', ([], {'p': '(0.5)'}), '(p=0.5)\n', (18970, 18977), True, 'import albumentations as A\n'), ((18979, 18996), 'albumentations.RGBShift', 'A.RGBShift', ([], {'p': '(0.7)'}), '(p=0.7)\n', (18989, 18996), True, 'import albumentations as A\n'), ((20428, 20451), 'albumentations.HorizontalFlip', 'A.HorizontalFlip', ([], {'p': '(0.5)'}), '(p=0.5)\n', (20444, 20451), True, 'import albumentations as A\n'), ((20453, 20474), 'albumentations.VerticalFlip', 'A.VerticalFlip', ([], {'p': '(0.5)'}), '(p=0.5)\n', (20467, 20474), True, 'import albumentations as A\n'), ((20533, 20560), 'albumentations.HueSaturationValue', 'A.HueSaturationValue', ([], {'p': '(0.5)'}), '(p=0.5)\n', (20553, 20560), True, 'import albumentations as A\n'), ((20562, 20579), 'albumentations.RGBShift', 'A.RGBShift', ([], {'p': '(0.7)'}), '(p=0.7)\n', (20572, 20579), True, 'import albumentations as A\n'), ((21961, 21984), 'albumentations.HorizontalFlip', 'A.HorizontalFlip', ([], {'p': '(0.5)'}), '(p=0.5)\n', (21977, 21984), True, 'import albumentations as A\n'), ((21986, 22007), 'albumentations.VerticalFlip', 'A.VerticalFlip', ([], {'p': '(0.5)'}), '(p=0.5)\n', (22000, 22007), True, 'import albumentations as A\n'), ((22066, 22093), 'albumentations.HueSaturationValue', 'A.HueSaturationValue', ([], {'p': '(0.5)'}), '(p=0.5)\n', (22086, 22093), True, 'import albumentations as A\n'), ((22095, 22112), 'albumentations.RGBShift', 'A.RGBShift', ([], {'p': '(0.7)'}), '(p=0.7)\n', (22105, 22112), True, 'import albumentations as A\n')]
|
import os
import sys
sys.path.insert(0, os.path.join(os.path.dirname(os.path.abspath(__file__)), '..'))
import shutil # NOQA E402
import gui.config as CONFIG # NOQA E402
from gui.window import Window # NOQA E402
from datetime import datetime # NOQA E402
import gui.gui_components as GUI # NOQA E402
from utils.misc import load_image # NOQA E402
from PyQt5 import QtCore, QtWidgets, QtGui # NOQA E402
from utils.predict_image import predict_image # NOQA E402
from gui.help.simple_window import SimpleWindow # NOQA E402
from gui.help.about_models_window import AboutModelsWindow # NOQA E402
from gui.help.about_author_window import AboutAuthorWindow # NOQA E402
from gui.help.about_datasets_window import AboutDatasetsWindow # NOQA E402
from gui.further_analysis.inspect_conv_window import InspectConvWindow # NOQA E402
class MainWindow(Window):
def set_main_window(self, MainWindow, MAIN_CONFIG):
super().set_window(MainWindow, MAIN_CONFIG)
def create_central_widget(self, MainWindow, MAIN_CONFIG):
super().create_central_widget(MainWindow, MAIN_CONFIG)
self.load_image_button = GUI.get_button(self.centralwidget,
*MAIN_CONFIG['LOAD_IMAGE_BUTTON_POSITION'],
CONFIG.FONT,
MAIN_CONFIG['LOAD_IMAGE_BUTTON_NAME'])
self.input_image_label = GUI.get_image_label(self.centralwidget,
*MAIN_CONFIG['INPUT_IMAGE_POSITION'],
CONFIG.FONT,
True,
MAIN_CONFIG['INPUT_IMAGE_NAME'],
None)
self.breast_tissue_radio_button = GUI.get_radio_button(self.centralwidget,
*MAIN_CONFIG['BREAST_TISSUE_RADIO_BUTTON_POSITION'],
CONFIG.FONT,
MAIN_CONFIG['BREAST_TISSUE_RADIO_BUTTON_NAME'])
self.colorectal_tissue_radio_button = GUI.get_radio_button(self.centralwidget,
*MAIN_CONFIG['COLORECTAL_TISSUE_RADIO_BUTTON_POSITION'],
CONFIG.FONT,
MAIN_CONFIG['COLORECTAL_TISSUE_RADIO_BUTTON_NAME'])
self.classify_button = GUI.get_button(self.centralwidget,
*MAIN_CONFIG['CLASSIFY_BUTTON_POSITION'],
CONFIG.FONT,
MAIN_CONFIG['CLASSIFY_BUTTON_NAME'])
self.predicted_class_label = GUI.get_label(self.centralwidget,
*MAIN_CONFIG['PREDICTED_CLASS_LABEL_POSITION'],
CONFIG.FONT,
False,
MAIN_CONFIG['PREDICTED_CLASS_LABEL_NAME'])
self.class_probabilities_plot = GUI.get_image_label(self.centralwidget,
*MAIN_CONFIG['CLASS_PROBABILITIES_PLOT_POSITION'],
CONFIG.FONT,
True,
MAIN_CONFIG['CLASS_PROBABILITIES_PLOT_NAME'],
None)
self.image = None
self.model = None
self.dataset = None
self.image_path = ''
self.plot_path = None
self.heatmap_path = None
self.filter_patterns = False
self.layer_activations = False
MainWindow.setCentralWidget(self.centralwidget)
def create_menu(self, MainWindow, MAIN_CONFIG):
self.menubar = GUI.get_menu_bar(MainWindow,
*MAIN_CONFIG['MENU_BAR_POSITION'],
CONFIG.FONT,
MAIN_CONFIG['MENU_BAR_NAME'])
self.menu_file = GUI.get_menu(self.menubar, MAIN_CONFIG['MENU']['FILE_NAME'])
self.menu_further_analysis = GUI.get_menu(self.menubar, MAIN_CONFIG['MENU']['FURTHER_ANALYSIS_NAME'])
self.menu_about = GUI.get_menu(self.menubar, MAIN_CONFIG['MENU']['ABOUT_NAME'])
self.menu_application = GUI.get_menu(self.menubar, MAIN_CONFIG['MENU']['APPLICATION_NAME'])
self.menu_datasets = GUI.get_menu(self.menubar, MAIN_CONFIG['MENU']['DATASETS_NAME'])
self.menu_models = GUI.get_menu(self.menubar, MAIN_CONFIG['MENU']['MODELS_NAME'])
MainWindow.setMenuBar(self.menubar)
self.action_author = GUI.get_action(MainWindow, MAIN_CONFIG['ACTION']['AUTHOR_NAME'])
self.action_how_to = GUI.get_action(MainWindow, MAIN_CONFIG['ACTION']['HOW_TO_NAME'])
self.action_breakhis = GUI.get_action(MainWindow, MAIN_CONFIG['ACTION']['BREAK_HIS_NAME'])
self.action_nctcrche100k = GUI.get_action(MainWindow, MAIN_CONFIG['ACTION']['NCT_CRC_HE_100K_NAME'])
self.action_cnnsimple = GUI.get_action(MainWindow, MAIN_CONFIG['ACTION']['CNN_SIMPLE_NAME'])
self.action_vgg19simple = GUI.get_action(MainWindow, MAIN_CONFIG['ACTION']['VGG19_SIMPLE_NAME'])
self.action_network_filters = GUI.get_action(MainWindow, MAIN_CONFIG['ACTION']['MODELS_NAME'])
self.action_intermediate_activations = GUI.get_action(MainWindow,
MAIN_CONFIG['ACTION']['INTERMEDIATE_ACTIVATIONS_NAME'])
self.action_heatmap = GUI.get_action(MainWindow, MAIN_CONFIG['ACTION']['HEATMAP_NAME'])
self.action_exit = GUI.get_action(MainWindow, MAIN_CONFIG['ACTION']['EXIT_NAME'])
self.action_save = GUI.get_action(MainWindow, MAIN_CONFIG['ACTION']['SAVE_NAME'])
self.action_general = GUI.get_action(MainWindow, MAIN_CONFIG['ACTION']['GENERAL_NAME'])
def add_actions(self):
self.menu_file.addAction(self.action_save)
self.menu_file.addSeparator()
self.menu_file.addAction(self.action_exit)
self.menu_further_analysis.addAction(self.action_intermediate_activations)
self.menu_further_analysis.addAction(self.action_heatmap)
self.menu_further_analysis.addSeparator()
self.menu_further_analysis.addAction(self.action_network_filters)
self.menu_application.addAction(self.action_general)
self.menu_application.addAction(self.action_how_to)
self.menu_application.addSeparator()
self.menu_application.addAction(self.action_author)
self.menu_datasets.addAction(self.action_breakhis)
self.menu_datasets.addAction(self.action_nctcrche100k)
self.menu_models.addAction(self.action_cnnsimple)
self.menu_models.addAction(self.action_vgg19simple)
self.menu_about.addAction(self.menu_application.menuAction())
self.menu_about.addSeparator()
self.menu_about.addAction(self.menu_datasets.menuAction())
self.menu_about.addAction(self.menu_models.menuAction())
self.menubar.addAction(self.menu_file.menuAction())
self.menubar.addAction(self.menu_further_analysis.menuAction())
self.menubar.addAction(self.menu_about.menuAction())
def retranslate(self, MainWindow, MAIN_CONFIG):
super().retranslate(MainWindow, MAIN_CONFIG)
self.load_image_button.setText(self._translate(MAIN_CONFIG['WINDOW_NAME'], MAIN_CONFIG['LOAD_IMAGE_BUTTON_TEXT']))
self.breast_tissue_radio_button.setText(self._translate(MAIN_CONFIG['WINDOW_NAME'],
MAIN_CONFIG['BREAST_TISSUE_RADIO_BUTTON_TEXT']))
self.colorectal_tissue_radio_button.setText(self._translate(MAIN_CONFIG['WINDOW_NAME'],
MAIN_CONFIG['COLORECTAL_TISSUE_RADIO_BUTTON_TEXT']))
self.classify_button.setText(self._translate(MAIN_CONFIG['WINDOW_NAME'], MAIN_CONFIG['CLASSIFY_BUTTON_TEXT']))
self.menu_file.setTitle(self._translate(MAIN_CONFIG['WINDOW_NAME'], MAIN_CONFIG['MENU']['FILE_TEXT']))
self.menu_further_analysis.setTitle(self._translate(MAIN_CONFIG['WINDOW_NAME'],
MAIN_CONFIG['MENU']['FURTHER_ANALYSIS_TEXT']))
self.menu_about.setTitle(self._translate(MAIN_CONFIG['WINDOW_NAME'], MAIN_CONFIG['MENU']['ABOUT_TEXT']))
self.menu_application.setTitle(self._translate(MAIN_CONFIG['WINDOW_NAME'], MAIN_CONFIG['MENU']['APPLICATION_TEXT']))
self.menu_datasets.setTitle(self._translate(MAIN_CONFIG['WINDOW_NAME'], MAIN_CONFIG['MENU']['DATASETS_TEXT']))
self.menu_models.setTitle(self._translate(MAIN_CONFIG['WINDOW_NAME'], MAIN_CONFIG['MENU']['MODELS_TEXT']))
self.action_how_to.setText(self._translate(MAIN_CONFIG['WINDOW_NAME'], MAIN_CONFIG['ACTION']['HOW_TO_TEXT']))
self.action_author.setText(self._translate(MAIN_CONFIG['WINDOW_NAME'], MAIN_CONFIG['ACTION']['AUTHOR_TEXT']))
self.action_breakhis.setText(self._translate(MAIN_CONFIG['WINDOW_NAME'], MAIN_CONFIG['ACTION']['BREAK_HIS_TEXT']))
self.action_nctcrche100k.setText(self._translate(MAIN_CONFIG['WINDOW_NAME'],
MAIN_CONFIG['ACTION']['NCT_CRC_HE_100K_TEXT']))
self.action_cnnsimple.setText(self._translate(MAIN_CONFIG['WINDOW_NAME'], MAIN_CONFIG['ACTION']['CNN_SIMPLE_TEXT']))
self.action_vgg19simple.setText(self._translate(MAIN_CONFIG['WINDOW_NAME'],
MAIN_CONFIG['ACTION']['VGG19_SIMPLE_TEXT']))
self.action_network_filters.setText(self._translate(MAIN_CONFIG['WINDOW_NAME'],
MAIN_CONFIG['ACTION']['NETWORK_FILTERS_TEXT']))
self.action_intermediate_activations.setText(self._translate(MAIN_CONFIG['WINDOW_NAME'],
MAIN_CONFIG['ACTION']['INTERMEDIATE_ACTIVATIONS_TEXT']))
self.action_heatmap.setText(self._translate(MAIN_CONFIG['WINDOW_NAME'], MAIN_CONFIG['ACTION']['HEATMAP_TEXT']))
self.action_exit.setText(self._translate(MAIN_CONFIG['WINDOW_NAME'], MAIN_CONFIG['ACTION']['EXIT_TEXT']))
self.action_save.setText(self._translate(MAIN_CONFIG['WINDOW_NAME'], MAIN_CONFIG['ACTION']['SAVE_TEXT']))
self.action_general.setText(self._translate(MAIN_CONFIG['WINDOW_NAME'], MAIN_CONFIG['ACTION']['GENERAL_TEXT']))
def inspect_conv_window_fun(self, INSPECT_CONV_CONFIG):
self.InspectConvWindow = QtWidgets.QMainWindow()
self.inspect_conv_window = InspectConvWindow()
self.inspect_conv_window.input_image = self.image
self.inspect_conv_window.model = self.model
self.inspect_conv_window.dataset = self.dataset
self.inspect_conv_window.setup(self.InspectConvWindow, INSPECT_CONV_CONFIG)
self.InspectConvWindow.show()
def layer_activations_window_fun(self):
if self.layer_activations:
self.inspect_conv_window_fun(CONFIG.INSPECT_CONV_CONFIG['LAYER_ACTIVATIONS'])
def filter_patterns_window_fun(self):
if self.filter_patterns:
self.inspect_conv_window_fun(CONFIG.INSPECT_CONV_CONFIG['FILTER_PATTERNS'])
def heatmap_window_fun(self):
if self.heatmap_path:
np_img = load_image(self.heatmap_path)
CONFIG.SIMPLE_CONFIG['HEATMAP']['WINDOW_X'] = np_img.shape[2]
CONFIG.SIMPLE_CONFIG['HEATMAP']['WINDOW_Y'] = np_img.shape[1]
CONFIG.SIMPLE_CONFIG['HEATMAP']['SIMPLE_INFO_LABEL_POSITION'] = [0, 0, np_img.shape[2], np_img.shape[1]]
CONFIG.SIMPLE_CONFIG['HEATMAP']['SIMPLE_INFO_LABEL_IMAGE_PATH'] = self.heatmap_path
self.simple_window_fun(CONFIG.SIMPLE_CONFIG['HEATMAP'])
def about_author_window_fun(self):
self.AboutAuthorWindow = QtWidgets.QMainWindow()
self.about_author_window = AboutAuthorWindow()
self.about_author_window.setup(self.AboutAuthorWindow, CONFIG.ABOUT_AUTHOR_CONFIG)
self.AboutAuthorWindow.show()
def about_datasets_window_fun(self, DATASETS_CONFIG):
self.AboutDatasetsWindow = QtWidgets.QMainWindow()
self.about_datasets_window = AboutDatasetsWindow()
self.about_datasets_window.setup(self.AboutDatasetsWindow, DATASETS_CONFIG)
self.AboutDatasetsWindow.show()
def about_dataset_nctcrche100k_window_fun(self):
self.about_datasets_window_fun(CONFIG.ABOUT_DATASETS_CONFIG['NCT_CRC_HE_100K'])
def about_dataset_breakhis_window_fun(self):
self.about_datasets_window_fun(CONFIG.ABOUT_DATASETS_CONFIG['BREAK_HIS'])
def about_models_window_fun(self, MODELS_CONFIG):
self.AboutModelsWindow = QtWidgets.QMainWindow()
self.about_models_window = AboutModelsWindow()
self.about_models_window.setup(self.AboutModelsWindow, MODELS_CONFIG)
self.AboutModelsWindow.show()
def about_model_vgg19simple_window_fun(self):
self.about_models_window_fun(CONFIG.ABOUT_MODELS_CONFIG['VGG19_SIMPLE'])
def about_model_cnnsimple_window_fun(self):
self.about_models_window_fun(CONFIG.ABOUT_MODELS_CONFIG['CNN_SIMPLE'])
def simple_window_fun(self, SIMPLE_CONFIG):
self.SimpleWindow = QtWidgets.QMainWindow()
self.simple_window = SimpleWindow()
self.simple_window.setup(self.SimpleWindow, SIMPLE_CONFIG)
self.SimpleWindow.show()
def general_window_fun(self):
self.simple_window_fun(CONFIG.SIMPLE_CONFIG['GENERAL'])
def how_to_window_fun(self):
self.simple_window_fun(CONFIG.SIMPLE_CONFIG['HOWTO'])
def exit(self):
sys.exit()
def save(self):
dir_name = QtWidgets.QFileDialog.getExistingDirectory(None, 'Save Images')
current_time = datetime.now()
current_time = current_time.strftime("_%d-%m-%Y_%H:%M:%S")
shutil.copytree(CONFIG.TEMPORARY_PLOTS_DIR, os.path.join(dir_name, 'Histopathologic_Cancer_Detection_' + current_time))
def triggers(self):
self.action_network_filters.triggered.connect(self.filter_patterns_window_fun)
self.action_intermediate_activations.triggered.connect(self.layer_activations_window_fun)
self.action_heatmap.triggered.connect(self.heatmap_window_fun)
self.action_author.triggered.connect(self.about_author_window_fun)
self.action_nctcrche100k.triggered.connect(self.about_dataset_nctcrche100k_window_fun)
self.action_breakhis.triggered.connect(self.about_dataset_breakhis_window_fun)
self.action_vgg19simple.triggered.connect(self.about_model_vgg19simple_window_fun)
self.action_cnnsimple.triggered.connect(self.about_model_cnnsimple_window_fun)
self.action_general.triggered.connect(self.general_window_fun)
self.action_how_to.triggered.connect(self.how_to_window_fun)
self.action_exit.triggered.connect(self.exit)
self.action_save.triggered.connect(self.save)
self.load_image_button.clicked.connect(self.load_image_button_event)
self.classify_button.clicked.connect(self.classify_button_event)
self.input_image_label.mousePressEvent = self.input_image_clicked_event
self.class_probabilities_plot.mousePressEvent = self.class_probabilities_plot_clicked_event
def load_image_button_event(self):
image_path = QtWidgets.QFileDialog.getOpenFileName(None, 'Select Image', '',
"Image File Types(*.jpg *.png *.tif *.tiff)")
self.image_path = image_path[0]
self.input_image_label.setPixmap(QtGui.QPixmap(self.image_path))
def classify_button_event(self):
if self.image_path:
if self.breast_tissue_radio_button.isChecked():
self.dataset, model_path = 'break_his', CONFIG.MAIN_CONFIG['VGG19_SIMPLE_MODEL_PATH']
elif self.colorectal_tissue_radio_button.isChecked():
self.dataset, model_path = 'nct_crc_he_100k', CONFIG.MAIN_CONFIG['CNN_SIMPLE_MODEL_PATH']
self.model, self.image, self.image_class, self.plot_path, self.layers = predict_image(self.image_path,
self.dataset,
model_path)
self.predicted_class_label.setText(self._translate(CONFIG.MAIN_CONFIG['WINDOW_NAME'],
self.image_class.replace('_', ' ').title()))
self.class_probabilities_plot.setPixmap(QtGui.QPixmap(self.plot_path))
self.heatmap_path = os.path.join(os.path.dirname(os.path.abspath(__file__)), 'temporary_plots', 'heatmap.jpg')
self.layer_activations = True
self.filter_patterns = True
CONFIG.INSPECT_CONV_CONFIG['LAYER_ACTIVATIONS']['COMBO_BOX_ITEMS'] = []
CONFIG.INSPECT_CONV_CONFIG['FILTER_PATTERNS']['COMBO_BOX_ITEMS'] = []
for layer in self.layers:
if layer.find('conv') >= 0 or layer.find('pool') >= 0:
CONFIG.INSPECT_CONV_CONFIG['LAYER_ACTIVATIONS']['COMBO_BOX_ITEMS'].append(layer)
if layer.find('conv') >= 0:
CONFIG.INSPECT_CONV_CONFIG['FILTER_PATTERNS']['COMBO_BOX_ITEMS'].append(layer)
def input_image_clicked_event(self, event):
if self.image_path:
self.image_clicked_event(self.image_path)
def class_probabilities_plot_clicked_event(self, event):
if self.plot_path:
self.image_clicked_event(self.plot_path)
def image_clicked_event(self, image_path):
np_img = load_image(image_path)
CONFIG.SIMPLE_CONFIG['IMAGE']['WINDOW_X'] = np_img.shape[2]
CONFIG.SIMPLE_CONFIG['IMAGE']['WINDOW_Y'] = np_img.shape[1]
CONFIG.SIMPLE_CONFIG['IMAGE']['SIMPLE_INFO_LABEL_POSITION'] = [0, 0, np_img.shape[2], np_img.shape[1]]
CONFIG.SIMPLE_CONFIG['IMAGE']['SIMPLE_INFO_LABEL_IMAGE_PATH'] = image_path
self.simple_window_fun(CONFIG.SIMPLE_CONFIG['IMAGE'])
def setup(self, MainWindow, MAIN_CONFIG):
self.set_main_window(MainWindow, MAIN_CONFIG)
self.create_central_widget(MainWindow, MAIN_CONFIG)
self.create_menu(MainWindow, MAIN_CONFIG)
self.add_actions()
self.retranslate(MainWindow, MAIN_CONFIG)
QtCore.QMetaObject.connectSlotsByName(MainWindow)
self.triggers()
def main():
app = QtWidgets.QApplication(sys.argv)
HistopathologicCancerDetection = QtWidgets.QMainWindow()
main_window = MainWindow()
main_window.setup(HistopathologicCancerDetection, CONFIG.MAIN_CONFIG)
HistopathologicCancerDetection.show()
sys.exit(app.exec_())
if __name__ == "__main__":
if os.path.exists(CONFIG.TEMPORARY_PLOTS_DIR):
shutil.rmtree(CONFIG.TEMPORARY_PLOTS_DIR)
os.mkdir(CONFIG.TEMPORARY_PLOTS_DIR)
main()
|
[
"os.mkdir",
"utils.predict_image.predict_image",
"gui.gui_components.get_button",
"gui.gui_components.get_image_label",
"PyQt5.QtWidgets.QFileDialog.getOpenFileName",
"PyQt5.QtWidgets.QApplication",
"shutil.rmtree",
"os.path.join",
"os.path.abspath",
"utils.misc.load_image",
"os.path.exists",
"gui.help.about_datasets_window.AboutDatasetsWindow",
"gui.gui_components.get_label",
"gui.help.simple_window.SimpleWindow",
"gui.gui_components.get_menu",
"datetime.datetime.now",
"PyQt5.QtWidgets.QFileDialog.getExistingDirectory",
"PyQt5.QtWidgets.QMainWindow",
"gui.gui_components.get_menu_bar",
"gui.help.about_author_window.AboutAuthorWindow",
"PyQt5.QtGui.QPixmap",
"PyQt5.QtCore.QMetaObject.connectSlotsByName",
"sys.exit",
"gui.gui_components.get_radio_button",
"gui.help.about_models_window.AboutModelsWindow",
"gui.further_analysis.inspect_conv_window.InspectConvWindow",
"gui.gui_components.get_action"
] |
[((19461, 19493), 'PyQt5.QtWidgets.QApplication', 'QtWidgets.QApplication', (['sys.argv'], {}), '(sys.argv)\n', (19483, 19493), False, 'from PyQt5 import QtCore, QtWidgets, QtGui\n'), ((19531, 19554), 'PyQt5.QtWidgets.QMainWindow', 'QtWidgets.QMainWindow', ([], {}), '()\n', (19552, 19554), False, 'from PyQt5 import QtCore, QtWidgets, QtGui\n'), ((19764, 19806), 'os.path.exists', 'os.path.exists', (['CONFIG.TEMPORARY_PLOTS_DIR'], {}), '(CONFIG.TEMPORARY_PLOTS_DIR)\n', (19778, 19806), False, 'import os\n'), ((19862, 19898), 'os.mkdir', 'os.mkdir', (['CONFIG.TEMPORARY_PLOTS_DIR'], {}), '(CONFIG.TEMPORARY_PLOTS_DIR)\n', (19870, 19898), False, 'import os\n'), ((1572, 1712), 'gui.gui_components.get_button', 'GUI.get_button', (['self.centralwidget', "*MAIN_CONFIG['LOAD_IMAGE_BUTTON_POSITION']", 'CONFIG.FONT', "MAIN_CONFIG['LOAD_IMAGE_BUTTON_NAME']"], {}), "(self.centralwidget, *MAIN_CONFIG[\n 'LOAD_IMAGE_BUTTON_POSITION'], CONFIG.FONT, MAIN_CONFIG[\n 'LOAD_IMAGE_BUTTON_NAME'])\n", (1586, 1712), True, 'import gui.gui_components as GUI\n'), ((1880, 2020), 'gui.gui_components.get_image_label', 'GUI.get_image_label', (['self.centralwidget', "*MAIN_CONFIG['INPUT_IMAGE_POSITION']", 'CONFIG.FONT', '(True)', "MAIN_CONFIG['INPUT_IMAGE_NAME']", 'None'], {}), "(self.centralwidget, *MAIN_CONFIG['INPUT_IMAGE_POSITION'\n ], CONFIG.FONT, True, MAIN_CONFIG['INPUT_IMAGE_NAME'], None)\n", (1899, 2020), True, 'import gui.gui_components as GUI\n'), ((2323, 2487), 'gui.gui_components.get_radio_button', 'GUI.get_radio_button', (['self.centralwidget', "*MAIN_CONFIG['BREAST_TISSUE_RADIO_BUTTON_POSITION']", 'CONFIG.FONT', "MAIN_CONFIG['BREAST_TISSUE_RADIO_BUTTON_NAME']"], {}), "(self.centralwidget, *MAIN_CONFIG[\n 'BREAST_TISSUE_RADIO_BUTTON_POSITION'], CONFIG.FONT, MAIN_CONFIG[\n 'BREAST_TISSUE_RADIO_BUTTON_NAME'])\n", (2343, 2487), True, 'import gui.gui_components as GUI\n'), ((2713, 2885), 'gui.gui_components.get_radio_button', 'GUI.get_radio_button', (['self.centralwidget', "*MAIN_CONFIG['COLORECTAL_TISSUE_RADIO_BUTTON_POSITION']", 'CONFIG.FONT', "MAIN_CONFIG['COLORECTAL_TISSUE_RADIO_BUTTON_NAME']"], {}), "(self.centralwidget, *MAIN_CONFIG[\n 'COLORECTAL_TISSUE_RADIO_BUTTON_POSITION'], CONFIG.FONT, MAIN_CONFIG[\n 'COLORECTAL_TISSUE_RADIO_BUTTON_NAME'])\n", (2733, 2885), True, 'import gui.gui_components as GUI\n'), ((3108, 3238), 'gui.gui_components.get_button', 'GUI.get_button', (['self.centralwidget', "*MAIN_CONFIG['CLASSIFY_BUTTON_POSITION']", 'CONFIG.FONT', "MAIN_CONFIG['CLASSIFY_BUTTON_NAME']"], {}), "(self.centralwidget, *MAIN_CONFIG['CLASSIFY_BUTTON_POSITION'],\n CONFIG.FONT, MAIN_CONFIG['CLASSIFY_BUTTON_NAME'])\n", (3122, 3238), True, 'import gui.gui_components as GUI\n'), ((3410, 3564), 'gui.gui_components.get_label', 'GUI.get_label', (['self.centralwidget', "*MAIN_CONFIG['PREDICTED_CLASS_LABEL_POSITION']", 'CONFIG.FONT', '(False)', "MAIN_CONFIG['PREDICTED_CLASS_LABEL_NAME']"], {}), "(self.centralwidget, *MAIN_CONFIG[\n 'PREDICTED_CLASS_LABEL_POSITION'], CONFIG.FONT, False, MAIN_CONFIG[\n 'PREDICTED_CLASS_LABEL_NAME'])\n", (3423, 3564), True, 'import gui.gui_components as GUI\n'), ((3799, 3970), 'gui.gui_components.get_image_label', 'GUI.get_image_label', (['self.centralwidget', "*MAIN_CONFIG['CLASS_PROBABILITIES_PLOT_POSITION']", 'CONFIG.FONT', '(True)', "MAIN_CONFIG['CLASS_PROBABILITIES_PLOT_NAME']", 'None'], {}), "(self.centralwidget, *MAIN_CONFIG[\n 'CLASS_PROBABILITIES_PLOT_POSITION'], CONFIG.FONT, True, MAIN_CONFIG[\n 'CLASS_PROBABILITIES_PLOT_NAME'], None)\n", (3818, 3970), True, 'import gui.gui_components as GUI\n'), ((4641, 4751), 'gui.gui_components.get_menu_bar', 'GUI.get_menu_bar', (['MainWindow', "*MAIN_CONFIG['MENU_BAR_POSITION']", 'CONFIG.FONT', "MAIN_CONFIG['MENU_BAR_NAME']"], {}), "(MainWindow, *MAIN_CONFIG['MENU_BAR_POSITION'], CONFIG.FONT,\n MAIN_CONFIG['MENU_BAR_NAME'])\n", (4657, 4751), True, 'import gui.gui_components as GUI\n'), ((4893, 4953), 'gui.gui_components.get_menu', 'GUI.get_menu', (['self.menubar', "MAIN_CONFIG['MENU']['FILE_NAME']"], {}), "(self.menubar, MAIN_CONFIG['MENU']['FILE_NAME'])\n", (4905, 4953), True, 'import gui.gui_components as GUI\n'), ((4991, 5063), 'gui.gui_components.get_menu', 'GUI.get_menu', (['self.menubar', "MAIN_CONFIG['MENU']['FURTHER_ANALYSIS_NAME']"], {}), "(self.menubar, MAIN_CONFIG['MENU']['FURTHER_ANALYSIS_NAME'])\n", (5003, 5063), True, 'import gui.gui_components as GUI\n'), ((5090, 5151), 'gui.gui_components.get_menu', 'GUI.get_menu', (['self.menubar', "MAIN_CONFIG['MENU']['ABOUT_NAME']"], {}), "(self.menubar, MAIN_CONFIG['MENU']['ABOUT_NAME'])\n", (5102, 5151), True, 'import gui.gui_components as GUI\n'), ((5184, 5251), 'gui.gui_components.get_menu', 'GUI.get_menu', (['self.menubar', "MAIN_CONFIG['MENU']['APPLICATION_NAME']"], {}), "(self.menubar, MAIN_CONFIG['MENU']['APPLICATION_NAME'])\n", (5196, 5251), True, 'import gui.gui_components as GUI\n'), ((5281, 5345), 'gui.gui_components.get_menu', 'GUI.get_menu', (['self.menubar', "MAIN_CONFIG['MENU']['DATASETS_NAME']"], {}), "(self.menubar, MAIN_CONFIG['MENU']['DATASETS_NAME'])\n", (5293, 5345), True, 'import gui.gui_components as GUI\n'), ((5373, 5435), 'gui.gui_components.get_menu', 'GUI.get_menu', (['self.menubar', "MAIN_CONFIG['MENU']['MODELS_NAME']"], {}), "(self.menubar, MAIN_CONFIG['MENU']['MODELS_NAME'])\n", (5385, 5435), True, 'import gui.gui_components as GUI\n'), ((5511, 5575), 'gui.gui_components.get_action', 'GUI.get_action', (['MainWindow', "MAIN_CONFIG['ACTION']['AUTHOR_NAME']"], {}), "(MainWindow, MAIN_CONFIG['ACTION']['AUTHOR_NAME'])\n", (5525, 5575), True, 'import gui.gui_components as GUI\n'), ((5605, 5669), 'gui.gui_components.get_action', 'GUI.get_action', (['MainWindow', "MAIN_CONFIG['ACTION']['HOW_TO_NAME']"], {}), "(MainWindow, MAIN_CONFIG['ACTION']['HOW_TO_NAME'])\n", (5619, 5669), True, 'import gui.gui_components as GUI\n'), ((5701, 5768), 'gui.gui_components.get_action', 'GUI.get_action', (['MainWindow', "MAIN_CONFIG['ACTION']['BREAK_HIS_NAME']"], {}), "(MainWindow, MAIN_CONFIG['ACTION']['BREAK_HIS_NAME'])\n", (5715, 5768), True, 'import gui.gui_components as GUI\n'), ((5804, 5877), 'gui.gui_components.get_action', 'GUI.get_action', (['MainWindow', "MAIN_CONFIG['ACTION']['NCT_CRC_HE_100K_NAME']"], {}), "(MainWindow, MAIN_CONFIG['ACTION']['NCT_CRC_HE_100K_NAME'])\n", (5818, 5877), True, 'import gui.gui_components as GUI\n'), ((5910, 5978), 'gui.gui_components.get_action', 'GUI.get_action', (['MainWindow', "MAIN_CONFIG['ACTION']['CNN_SIMPLE_NAME']"], {}), "(MainWindow, MAIN_CONFIG['ACTION']['CNN_SIMPLE_NAME'])\n", (5924, 5978), True, 'import gui.gui_components as GUI\n'), ((6013, 6083), 'gui.gui_components.get_action', 'GUI.get_action', (['MainWindow', "MAIN_CONFIG['ACTION']['VGG19_SIMPLE_NAME']"], {}), "(MainWindow, MAIN_CONFIG['ACTION']['VGG19_SIMPLE_NAME'])\n", (6027, 6083), True, 'import gui.gui_components as GUI\n'), ((6122, 6186), 'gui.gui_components.get_action', 'GUI.get_action', (['MainWindow', "MAIN_CONFIG['ACTION']['MODELS_NAME']"], {}), "(MainWindow, MAIN_CONFIG['ACTION']['MODELS_NAME'])\n", (6136, 6186), True, 'import gui.gui_components as GUI\n'), ((6234, 6321), 'gui.gui_components.get_action', 'GUI.get_action', (['MainWindow', "MAIN_CONFIG['ACTION']['INTERMEDIATE_ACTIVATIONS_NAME']"], {}), "(MainWindow, MAIN_CONFIG['ACTION'][\n 'INTERMEDIATE_ACTIVATIONS_NAME'])\n", (6248, 6321), True, 'import gui.gui_components as GUI\n'), ((6409, 6474), 'gui.gui_components.get_action', 'GUI.get_action', (['MainWindow', "MAIN_CONFIG['ACTION']['HEATMAP_NAME']"], {}), "(MainWindow, MAIN_CONFIG['ACTION']['HEATMAP_NAME'])\n", (6423, 6474), True, 'import gui.gui_components as GUI\n'), ((6502, 6564), 'gui.gui_components.get_action', 'GUI.get_action', (['MainWindow', "MAIN_CONFIG['ACTION']['EXIT_NAME']"], {}), "(MainWindow, MAIN_CONFIG['ACTION']['EXIT_NAME'])\n", (6516, 6564), True, 'import gui.gui_components as GUI\n'), ((6592, 6654), 'gui.gui_components.get_action', 'GUI.get_action', (['MainWindow', "MAIN_CONFIG['ACTION']['SAVE_NAME']"], {}), "(MainWindow, MAIN_CONFIG['ACTION']['SAVE_NAME'])\n", (6606, 6654), True, 'import gui.gui_components as GUI\n'), ((6685, 6750), 'gui.gui_components.get_action', 'GUI.get_action', (['MainWindow', "MAIN_CONFIG['ACTION']['GENERAL_NAME']"], {}), "(MainWindow, MAIN_CONFIG['ACTION']['GENERAL_NAME'])\n", (6699, 6750), True, 'import gui.gui_components as GUI\n'), ((11480, 11503), 'PyQt5.QtWidgets.QMainWindow', 'QtWidgets.QMainWindow', ([], {}), '()\n', (11501, 11503), False, 'from PyQt5 import QtCore, QtWidgets, QtGui\n'), ((11539, 11558), 'gui.further_analysis.inspect_conv_window.InspectConvWindow', 'InspectConvWindow', ([], {}), '()\n', (11556, 11558), False, 'from gui.further_analysis.inspect_conv_window import InspectConvWindow\n'), ((12799, 12822), 'PyQt5.QtWidgets.QMainWindow', 'QtWidgets.QMainWindow', ([], {}), '()\n', (12820, 12822), False, 'from PyQt5 import QtCore, QtWidgets, QtGui\n'), ((12858, 12877), 'gui.help.about_author_window.AboutAuthorWindow', 'AboutAuthorWindow', ([], {}), '()\n', (12875, 12877), False, 'from gui.help.about_author_window import AboutAuthorWindow\n'), ((13101, 13124), 'PyQt5.QtWidgets.QMainWindow', 'QtWidgets.QMainWindow', ([], {}), '()\n', (13122, 13124), False, 'from PyQt5 import QtCore, QtWidgets, QtGui\n'), ((13162, 13183), 'gui.help.about_datasets_window.AboutDatasetsWindow', 'AboutDatasetsWindow', ([], {}), '()\n', (13181, 13183), False, 'from gui.help.about_datasets_window import AboutDatasetsWindow\n'), ((13670, 13693), 'PyQt5.QtWidgets.QMainWindow', 'QtWidgets.QMainWindow', ([], {}), '()\n', (13691, 13693), False, 'from PyQt5 import QtCore, QtWidgets, QtGui\n'), ((13729, 13748), 'gui.help.about_models_window.AboutModelsWindow', 'AboutModelsWindow', ([], {}), '()\n', (13746, 13748), False, 'from gui.help.about_models_window import AboutModelsWindow\n'), ((14202, 14225), 'PyQt5.QtWidgets.QMainWindow', 'QtWidgets.QMainWindow', ([], {}), '()\n', (14223, 14225), False, 'from PyQt5 import QtCore, QtWidgets, QtGui\n'), ((14255, 14269), 'gui.help.simple_window.SimpleWindow', 'SimpleWindow', ([], {}), '()\n', (14267, 14269), False, 'from gui.help.simple_window import SimpleWindow\n'), ((14594, 14604), 'sys.exit', 'sys.exit', ([], {}), '()\n', (14602, 14604), False, 'import sys\n'), ((14645, 14708), 'PyQt5.QtWidgets.QFileDialog.getExistingDirectory', 'QtWidgets.QFileDialog.getExistingDirectory', (['None', '"""Save Images"""'], {}), "(None, 'Save Images')\n", (14687, 14708), False, 'from PyQt5 import QtCore, QtWidgets, QtGui\n'), ((14732, 14746), 'datetime.datetime.now', 'datetime.now', ([], {}), '()\n', (14744, 14746), False, 'from datetime import datetime\n'), ((16298, 16411), 'PyQt5.QtWidgets.QFileDialog.getOpenFileName', 'QtWidgets.QFileDialog.getOpenFileName', (['None', '"""Select Image"""', '""""""', '"""Image File Types(*.jpg *.png *.tif *.tiff)"""'], {}), "(None, 'Select Image', '',\n 'Image File Types(*.jpg *.png *.tif *.tiff)')\n", (16335, 16411), False, 'from PyQt5 import QtCore, QtWidgets, QtGui\n'), ((18652, 18674), 'utils.misc.load_image', 'load_image', (['image_path'], {}), '(image_path)\n', (18662, 18674), False, 'from utils.misc import load_image\n'), ((19363, 19412), 'PyQt5.QtCore.QMetaObject.connectSlotsByName', 'QtCore.QMetaObject.connectSlotsByName', (['MainWindow'], {}), '(MainWindow)\n', (19400, 19412), False, 'from PyQt5 import QtCore, QtWidgets, QtGui\n'), ((19816, 19857), 'shutil.rmtree', 'shutil.rmtree', (['CONFIG.TEMPORARY_PLOTS_DIR'], {}), '(CONFIG.TEMPORARY_PLOTS_DIR)\n', (19829, 19857), False, 'import shutil\n'), ((70, 95), 'os.path.abspath', 'os.path.abspath', (['__file__'], {}), '(__file__)\n', (85, 95), False, 'import os\n'), ((12267, 12296), 'utils.misc.load_image', 'load_image', (['self.heatmap_path'], {}), '(self.heatmap_path)\n', (12277, 12296), False, 'from utils.misc import load_image\n'), ((14866, 14940), 'os.path.join', 'os.path.join', (['dir_name', "('Histopathologic_Cancer_Detection_' + current_time)"], {}), "(dir_name, 'Histopathologic_Cancer_Detection_' + current_time)\n", (14878, 14940), False, 'import os\n'), ((16548, 16578), 'PyQt5.QtGui.QPixmap', 'QtGui.QPixmap', (['self.image_path'], {}), '(self.image_path)\n', (16561, 16578), False, 'from PyQt5 import QtCore, QtWidgets, QtGui\n'), ((17064, 17120), 'utils.predict_image.predict_image', 'predict_image', (['self.image_path', 'self.dataset', 'model_path'], {}), '(self.image_path, self.dataset, model_path)\n', (17077, 17120), False, 'from utils.predict_image import predict_image\n'), ((17559, 17588), 'PyQt5.QtGui.QPixmap', 'QtGui.QPixmap', (['self.plot_path'], {}), '(self.plot_path)\n', (17572, 17588), False, 'from PyQt5 import QtCore, QtWidgets, QtGui\n'), ((17651, 17676), 'os.path.abspath', 'os.path.abspath', (['__file__'], {}), '(__file__)\n', (17666, 17676), False, 'import os\n')]
|
# Natural Language Toolkit: Language Model Unit Tests
#
# Copyright (C) 2001-2020 NLTK Project
# Author: <NAME> <<EMAIL>>
# URL: <http://nltk.org/>
# For license information, see LICENSE.TXT
import unittest
from nltk.lm.preprocessing import padded_everygram_pipeline
class TestPreprocessing(unittest.TestCase):
def test_padded_everygram_pipeline(self):
expected_train = [
[
("<s>",),
("<s>", "a"),
("a",),
("a", "b"),
("b",),
("b", "c"),
("c",),
("c", "</s>"),
("</s>",)
]
]
expected_vocab = ["<s>", "a", "b", "c", "</s>"]
train_data, vocab_data = padded_everygram_pipeline(2, [["a", "b", "c"]])
self.assertEqual([list(sent) for sent in train_data], expected_train)
self.assertEqual(list(vocab_data), expected_vocab)
|
[
"nltk.lm.preprocessing.padded_everygram_pipeline"
] |
[((756, 803), 'nltk.lm.preprocessing.padded_everygram_pipeline', 'padded_everygram_pipeline', (['(2)', "[['a', 'b', 'c']]"], {}), "(2, [['a', 'b', 'c']])\n", (781, 803), False, 'from nltk.lm.preprocessing import padded_everygram_pipeline\n')]
|
import torch
import torch.nn as nn
import torch.nn.functional as F
from torch.autograd import Variable
from layers import *
from data import voc, coco
import os
class SSD(nn.Module):
"""Single Shot Multibox Architecture
The network is composed of a base VGG network followed by the
added multibox conv layers. Each multibox layer branches into
1) conv2d for class conf scores
2) conv2d for localization predictions
3) associated priorbox layer to produce default bounding
boxes specific to the layer's feature map size.
See: https://arxiv.org/pdf/1512.02325.pdf for more details.
Args:
phase: (string) Can be "test" or "train"
size: input image size
base: VGG16 layers for input, size of either 300 or 500
extras: extra layers that feed to multibox loc and conf layers
head: "multibox head" consists of loc and conf conv layers
"""
def __init__(self, phase, size, base, extras, head, num_classes):
super(SSD, self).__init__()
self.phase = phase
self.num_classes = num_classes
self.cfg = (coco, voc)[num_classes == 21]
# print('cfg line 33:',self.cfg)
self.priorbox = PriorBox(self.cfg)
# 产生不同层的所有的box 总共是 38*38*4 + 19*19*6 +....=8732 个boxes
self.priors = Variable(self.priorbox.forward(), volatile=True)
self.size = size
# SSD network
self.vgg = nn.ModuleList(base)
# Layer learns to scale the l2 normalized features from conv4_3
self.L2Norm = L2Norm(512, 20)
self.extras = nn.ModuleList(extras)
self.loc = nn.ModuleList(head[0])
self.conf = nn.ModuleList(head[1])
if phase == 'test':
self.softmax = nn.Softmax(dim=-1)
self.detect = Detect(num_classes, 0, 200, 0.01, 0.45)
def forward(self, x):
"""Applies network layers and ops on input image(s) x.
Args:
x: input image or batch of images. Shape: [batch,3,300,300].
Return:
Depending on phase:
test:
Variable(tensor) of output class label predictions,
confidence score, and corresponding location predictions for
each object detected. Shape: [batch,topk,7]
train:
list of concat outputs from:
1: confidence layers, Shape: [batch*num_priors,num_classes]
2: localization layers, Shape: [batch,num_priors*4]
3: priorbox layers, Shape: [2,num_priors*4]
"""
sources = list()
loc = list()
conf = list()
# apply vgg up to conv4_3 relu
for k in range(23):
x = self.vgg[k](x)
s = self.L2Norm(x)
sources.append(s)
# apply vgg up to fc7
for k in range(23, len(self.vgg)):
x = self.vgg[k](x)
sources.append(x)
# apply extra layers and cache source layer outputs
for k, v in enumerate(self.extras):
x = F.relu(v(x), inplace=True)
if k % 2 == 1:
sources.append(x)
# apply multibox head to source layers
for (x, l, c) in zip(sources, self.loc, self.conf):
loc.append(l(x).permute(0, 2, 3, 1).contiguous())
conf.append(c(x).permute(0, 2, 3, 1).contiguous())
loc = torch.cat([o.view(o.size(0), -1) for o in loc], 1)
conf = torch.cat([o.view(o.size(0), -1) for o in conf], 1)
if self.phase == "test":
output = self.detect(
loc.view(loc.size(0), -1, 4), # loc preds
self.softmax(conf.view(conf.size(0), -1,
self.num_classes)), # conf preds
self.priors.type(type(x.data)) # default boxes
)
else:
output = (
loc.view(loc.size(0), -1, 4),
conf.view(conf.size(0), -1, self.num_classes),
self.priors
)
return output
def load_weights(self, base_file):
other, ext = os.path.splitext(base_file)
if ext == '.pkl' or '.pth':
print('Loading weights into state dict...')
self.load_state_dict(torch.load(base_file,
map_location=lambda storage, loc: storage))
print('Finished!')
else:
print('Sorry only .pth and .pkl files supported.')
# This function is derived from torchvision VGG make_layers()
# https://github.com/pytorch/vision/blob/master/torchvision/models/vgg.py
def vgg(cfg, i, batch_norm=False):
layers = []
in_channels = i
for v in cfg:
if v == 'M':
layers += [nn.MaxPool2d(kernel_size=2, stride=2)]
elif v == 'C':
layers += [nn.MaxPool2d(kernel_size=2, stride=2, ceil_mode=True)]
else:
conv2d = nn.Conv2d(in_channels, v, kernel_size=3, padding=1)
if batch_norm:
layers += [conv2d, nn.BatchNorm2d(v), nn.ReLU(inplace=True)]
else:
layers += [conv2d, nn.ReLU(inplace=True)]
in_channels = v
pool5 = nn.MaxPool2d(kernel_size=3, stride=1, padding=1)
conv6 = nn.Conv2d(512, 1024, kernel_size=3, padding=6, dilation=6)
conv7 = nn.Conv2d(1024, 1024, kernel_size=1)
layers += [pool5, conv6,
nn.ReLU(inplace=True), conv7, nn.ReLU(inplace=True)]
# print("vgg layers:",layers)
return layers
def add_extras(cfg, i, batch_norm=False):
# Extra layers added to VGG for feature scaling
layers = []
in_channels = i
flag = False
for k, v in enumerate(cfg):
if in_channels != 'S':
if v == 'S':
layers += [nn.Conv2d(in_channels, cfg[k + 1],
kernel_size=(1, 3)[flag], stride=2, padding=1)]
else:
layers += [nn.Conv2d(in_channels, v, kernel_size=(1, 3)[flag])]
flag = not flag
in_channels = v
# print("add_extras:",layers)
return layers
# cfg 产生的box 个数 [4, 6, 6, 6, 4, 4]
def multibox(vgg, extra_layers, cfg, num_classes):
# print('cfg:',cfg)
# print("num_calsses:",num_classes)
# print("vgg len:",len(vgg))
# print("vgg 21:",vgg[21])
# print("vgg -2:",vgg[-2])
loc_layers = []
conf_layers = []
vgg_source = [21, -2]
for k, v in enumerate(vgg_source):
loc_layers += [nn.Conv2d(vgg[v].out_channels, # 产生4*k 相对于default bounding box的4个偏移值
cfg[k] * 4, kernel_size=3, padding=1)]
conf_layers += [nn.Conv2d(vgg[v].out_channels, # 产生 C*k 个类别的得分
cfg[k] * num_classes, kernel_size=3, padding=1)]
# print("loc_layers:",loc_layers)
# print("con_layers:",conf_layers)
for k, v in enumerate(extra_layers[1::2], 2):
# print('k:',k)
# print('v:',v)
loc_layers += [nn.Conv2d(v.out_channels, cfg[k] # 使用3*3 在feature上面产生 box 的偏移和类别得分
* 4, kernel_size=3, padding=1)]
conf_layers += [nn.Conv2d(v.out_channels, cfg[k]
* num_classes, kernel_size=3, padding=1)]
# print('loc_layers:',loc_layers)
# print('conf_layers:',conf_layers)
return vgg, extra_layers, (loc_layers, conf_layers)
base = {
'300': [64, 64, 'M', 128, 128, 'M', 256, 256, 256, 'C', 512, 512, 512, 'M',
512, 512, 512],
'512': [],
}
extras = {
'300': [256, 'S', 512, 128, 'S', 256, 128, 256, 128, 256],
'512': [],
}
mbox = {
'300': [4, 6, 6, 6, 4, 4], # number of boxes per feature map location
'512': [],
}
def build_ssd(phase, size=300, num_classes=21):
if phase != "test" and phase != "train":
print("ERROR: Phase: " + phase + " not recognized")
return
if size != 300:
print("ERROR: You specified size " + repr(size) + ". However, " +
"currently only SSD300 (size=300) is supported!")
return
print("build_sgd:",num_classes)
base_, extras_, head_ = multibox(vgg(base[str(size)], 3),
add_extras(extras[str(size)], 1024),
mbox[str(size)], num_classes)
return SSD(phase, size, base_, extras_, head_, num_classes)
|
[
"torch.nn.ReLU",
"torch.nn.ModuleList",
"torch.load",
"torch.nn.Conv2d",
"torch.nn.BatchNorm2d",
"torch.nn.Softmax",
"os.path.splitext",
"torch.nn.MaxPool2d"
] |
[((5234, 5282), 'torch.nn.MaxPool2d', 'nn.MaxPool2d', ([], {'kernel_size': '(3)', 'stride': '(1)', 'padding': '(1)'}), '(kernel_size=3, stride=1, padding=1)\n', (5246, 5282), True, 'import torch.nn as nn\n'), ((5295, 5353), 'torch.nn.Conv2d', 'nn.Conv2d', (['(512)', '(1024)'], {'kernel_size': '(3)', 'padding': '(6)', 'dilation': '(6)'}), '(512, 1024, kernel_size=3, padding=6, dilation=6)\n', (5304, 5353), True, 'import torch.nn as nn\n'), ((5366, 5402), 'torch.nn.Conv2d', 'nn.Conv2d', (['(1024)', '(1024)'], {'kernel_size': '(1)'}), '(1024, 1024, kernel_size=1)\n', (5375, 5402), True, 'import torch.nn as nn\n'), ((1443, 1462), 'torch.nn.ModuleList', 'nn.ModuleList', (['base'], {}), '(base)\n', (1456, 1462), True, 'import torch.nn as nn\n'), ((1595, 1616), 'torch.nn.ModuleList', 'nn.ModuleList', (['extras'], {}), '(extras)\n', (1608, 1616), True, 'import torch.nn as nn\n'), ((1637, 1659), 'torch.nn.ModuleList', 'nn.ModuleList', (['head[0]'], {}), '(head[0])\n', (1650, 1659), True, 'import torch.nn as nn\n'), ((1680, 1702), 'torch.nn.ModuleList', 'nn.ModuleList', (['head[1]'], {}), '(head[1])\n', (1693, 1702), True, 'import torch.nn as nn\n'), ((4156, 4183), 'os.path.splitext', 'os.path.splitext', (['base_file'], {}), '(base_file)\n', (4172, 4183), False, 'import os\n'), ((5447, 5468), 'torch.nn.ReLU', 'nn.ReLU', ([], {'inplace': '(True)'}), '(inplace=True)\n', (5454, 5468), True, 'import torch.nn as nn\n'), ((5477, 5498), 'torch.nn.ReLU', 'nn.ReLU', ([], {'inplace': '(True)'}), '(inplace=True)\n', (5484, 5498), True, 'import torch.nn as nn\n'), ((1759, 1777), 'torch.nn.Softmax', 'nn.Softmax', ([], {'dim': '(-1)'}), '(dim=-1)\n', (1769, 1777), True, 'import torch.nn as nn\n'), ((6505, 6573), 'torch.nn.Conv2d', 'nn.Conv2d', (['vgg[v].out_channels', '(cfg[k] * 4)'], {'kernel_size': '(3)', 'padding': '(1)'}), '(vgg[v].out_channels, cfg[k] * 4, kernel_size=3, padding=1)\n', (6514, 6573), True, 'import torch.nn as nn\n'), ((6671, 6749), 'torch.nn.Conv2d', 'nn.Conv2d', (['vgg[v].out_channels', '(cfg[k] * num_classes)'], {'kernel_size': '(3)', 'padding': '(1)'}), '(vgg[v].out_channels, cfg[k] * num_classes, kernel_size=3, padding=1)\n', (6680, 6749), True, 'import torch.nn as nn\n'), ((6995, 7058), 'torch.nn.Conv2d', 'nn.Conv2d', (['v.out_channels', '(cfg[k] * 4)'], {'kernel_size': '(3)', 'padding': '(1)'}), '(v.out_channels, cfg[k] * 4, kernel_size=3, padding=1)\n', (7004, 7058), True, 'import torch.nn as nn\n'), ((7153, 7226), 'torch.nn.Conv2d', 'nn.Conv2d', (['v.out_channels', '(cfg[k] * num_classes)'], {'kernel_size': '(3)', 'padding': '(1)'}), '(v.out_channels, cfg[k] * num_classes, kernel_size=3, padding=1)\n', (7162, 7226), True, 'import torch.nn as nn\n'), ((4309, 4373), 'torch.load', 'torch.load', (['base_file'], {'map_location': '(lambda storage, loc: storage)'}), '(base_file, map_location=lambda storage, loc: storage)\n', (4319, 4373), False, 'import torch\n'), ((4787, 4824), 'torch.nn.MaxPool2d', 'nn.MaxPool2d', ([], {'kernel_size': '(2)', 'stride': '(2)'}), '(kernel_size=2, stride=2)\n', (4799, 4824), True, 'import torch.nn as nn\n'), ((4962, 5013), 'torch.nn.Conv2d', 'nn.Conv2d', (['in_channels', 'v'], {'kernel_size': '(3)', 'padding': '(1)'}), '(in_channels, v, kernel_size=3, padding=1)\n', (4971, 5013), True, 'import torch.nn as nn\n'), ((4872, 4925), 'torch.nn.MaxPool2d', 'nn.MaxPool2d', ([], {'kernel_size': '(2)', 'stride': '(2)', 'ceil_mode': '(True)'}), '(kernel_size=2, stride=2, ceil_mode=True)\n', (4884, 4925), True, 'import torch.nn as nn\n'), ((5817, 5902), 'torch.nn.Conv2d', 'nn.Conv2d', (['in_channels', 'cfg[k + 1]'], {'kernel_size': '(1, 3)[flag]', 'stride': '(2)', 'padding': '(1)'}), '(in_channels, cfg[k + 1], kernel_size=(1, 3)[flag], stride=2,\n padding=1)\n', (5826, 5902), True, 'import torch.nn as nn\n'), ((5972, 6023), 'torch.nn.Conv2d', 'nn.Conv2d', (['in_channels', 'v'], {'kernel_size': '(1, 3)[flag]'}), '(in_channels, v, kernel_size=(1, 3)[flag])\n', (5981, 6023), True, 'import torch.nn as nn\n'), ((5076, 5093), 'torch.nn.BatchNorm2d', 'nn.BatchNorm2d', (['v'], {}), '(v)\n', (5090, 5093), True, 'import torch.nn as nn\n'), ((5095, 5116), 'torch.nn.ReLU', 'nn.ReLU', ([], {'inplace': '(True)'}), '(inplace=True)\n', (5102, 5116), True, 'import torch.nn as nn\n'), ((5171, 5192), 'torch.nn.ReLU', 'nn.ReLU', ([], {'inplace': '(True)'}), '(inplace=True)\n', (5178, 5192), True, 'import torch.nn as nn\n')]
|
# coding: utf-8
import appex
from markdown2 import markdown
import ui
TEMPLATE = '''
<!doctype html>
<html>
<head>
<meta charset="utf-8">
<meta name="viewport" content="width=device-width">
<title>Preview</title>
<style type="text/css">
body {
font-family: helvetica;
font-size: 15px;
margin: 10px;
}
</style>
</head>
<body>{{CONTENT}}</body>
</html>
'''
def main():
text = appex.get_text()
if not text:
print('No input text found. Use this script from the share sheet in an app like Notes.')
return
converted = markdown(text)
html = TEMPLATE.replace('{{CONTENT}}', converted)
webview = ui.WebView(name='Markdown Preview')
webview.load_html(html)
webview.present()
if __name__ == '__main__':
main()
|
[
"markdown2.markdown",
"ui.WebView",
"appex.get_text"
] |
[((380, 396), 'appex.get_text', 'appex.get_text', ([], {}), '()\n', (394, 396), False, 'import appex\n'), ((524, 538), 'markdown2.markdown', 'markdown', (['text'], {}), '(text)\n', (532, 538), False, 'from markdown2 import markdown\n'), ((601, 636), 'ui.WebView', 'ui.WebView', ([], {'name': '"""Markdown Preview"""'}), "(name='Markdown Preview')\n", (611, 636), False, 'import ui\n')]
|
#!/usr/bin/env python3
"""
usage: paste.py [-h] [FILE ...]
copy each file into place as a column
positional arguments:
FILE a file to copy (default: stdin)
options:
-h, --help show this help message and exit
quirks:
no implementation
unsurprising quirks:
prompts for stdin, like mac bash "grep -R .", unlike bash "paste -"
accepts the "stty -a" line-editing c0-control's, not also the "bind -p" c0-control's
takes no args as meaning file stdin, like linux "paste", unlike mac "paste"
examples:
Oh no! No examples disclosed!! 💥 💔 💥
"""
# FIXME: left-justify, right-justify, center, with and without sponging
# FIXME: options to add column names and separators
# FIXME: options to take column names as a first row or as a column
import sys
import argdoc
def main():
args = argdoc.parse_args()
sys.stderr.write("{}\n".format(args))
sys.stderr.write("{}\n".format(argdoc.format_usage().rstrip()))
sys.stderr.write("wc.py: error: not implemented\n")
sys.exit(2) # exit 2 from rejecting usage
if __name__ == "__main__":
main()
# copied from: git clone https://github.com/pelavarre/pybashish.git
|
[
"argdoc.format_usage",
"sys.stderr.write",
"sys.exit",
"argdoc.parse_args"
] |
[((810, 829), 'argdoc.parse_args', 'argdoc.parse_args', ([], {}), '()\n', (827, 829), False, 'import argdoc\n'), ((944, 995), 'sys.stderr.write', 'sys.stderr.write', (['"""wc.py: error: not implemented\n"""'], {}), "('wc.py: error: not implemented\\n')\n", (960, 995), False, 'import sys\n'), ((1000, 1011), 'sys.exit', 'sys.exit', (['(2)'], {}), '(2)\n', (1008, 1011), False, 'import sys\n'), ((907, 928), 'argdoc.format_usage', 'argdoc.format_usage', ([], {}), '()\n', (926, 928), False, 'import argdoc\n')]
|
import json
from collections import OrderedDict
from django.conf import settings
from django.forms import Media, widgets
from django.utils.module_loading import import_string
from wagtail.utils.widgets import WidgetWithScript
from wagtail.admin.edit_handlers import RichTextFieldPanel
from wagtail.admin.rich_text.converters.editor_html import EditorHTMLConverter
from wagtail.core.rich_text import features
class HalloPlugin:
def __init__(self, **kwargs):
self.name = kwargs.get('name', None)
self.options = kwargs.get('options', {})
self.js = kwargs.get('js', None)
self.css = kwargs.get('css', None)
self.order = kwargs.get('order', 100)
def construct_plugins_list(self, plugins):
if self.name is not None:
plugins[self.name] = self.options
@property
def media(self):
return Media(js=self.js, css=self.css)
class HalloFormatPlugin(HalloPlugin):
def __init__(self, **kwargs):
kwargs.setdefault('name', 'halloformat')
kwargs.setdefault('order', 10)
self.format_name = kwargs['format_name']
super().__init__(**kwargs)
def construct_plugins_list(self, plugins):
plugins.setdefault(self.name, {'formattings': {
'bold': False, 'italic': False, 'strikeThrough': False, 'underline': False
}})
plugins[self.name]['formattings'][self.format_name] = True
class HalloHeadingPlugin(HalloPlugin):
def __init__(self, **kwargs):
kwargs.setdefault('name', 'halloheadings')
kwargs.setdefault('order', 20)
self.element = kwargs.pop('element')
super().__init__(**kwargs)
def construct_plugins_list(self, plugins):
plugins.setdefault(self.name, {'formatBlocks': []})
plugins[self.name]['formatBlocks'].append(self.element)
class HalloListPlugin(HalloPlugin):
def __init__(self, **kwargs):
kwargs.setdefault('name', 'hallolists')
kwargs.setdefault('order', 40)
self.list_type = kwargs['list_type']
super().__init__(**kwargs)
def construct_plugins_list(self, plugins):
plugins.setdefault(self.name, {'lists': {
'ordered': False, 'unordered': False
}})
plugins[self.name]['lists'][self.list_type] = True
# Plugins which are always imported, and cannot be enabled/disabled via 'features'
CORE_HALLO_PLUGINS = [
HalloPlugin(name='halloreundo', order=50),
HalloPlugin(name='hallorequireparagraphs', js=[
'wagtailadmin/js/hallo-plugins/hallo-requireparagraphs.js',
]),
HalloHeadingPlugin(element='p')
]
class HalloRichTextArea(WidgetWithScript, widgets.Textarea):
# this class's constructor accepts a 'features' kwarg
accepts_features = True
def get_panel(self):
return RichTextFieldPanel
def __init__(self, *args, **kwargs):
self.options = kwargs.pop('options', None)
self.features = kwargs.pop('features', None)
if self.features is None:
self.features = features.get_default_features()
self.converter = EditorHTMLConverter()
else:
self.converter = EditorHTMLConverter(self.features)
# construct a list of plugin objects, by querying the feature registry
# and keeping the non-null responses from get_editor_plugin
self.plugins = CORE_HALLO_PLUGINS + list(filter(None, [
features.get_editor_plugin('hallo', feature_name)
for feature_name in self.features
]))
self.plugins.sort(key=lambda plugin: plugin.order)
super().__init__(*args, **kwargs)
def render(self, name, value, attrs=None):
if value is None:
translated_value = None
else:
translated_value = self.converter.from_database_format(value)
return super().render(name, translated_value, attrs)
def render_js_init(self, id_, name, value):
if self.options is not None and 'plugins' in self.options:
# explicit 'plugins' config passed in options, so use that
plugin_data = self.options['plugins']
else:
plugin_data = OrderedDict()
for plugin in self.plugins:
plugin.construct_plugins_list(plugin_data)
return "makeHalloRichTextEditable({0}, {1});".format(
json.dumps(id_), json.dumps(plugin_data)
)
def value_from_datadict(self, data, files, name):
original_value = super().value_from_datadict(data, files, name)
if original_value is None:
return None
return self.converter.to_database_format(original_value)
@property
def media(self):
media = Media(js=[
'wagtailadmin/js/vendor/hallo.js',
'wagtailadmin/js/hallo-bootstrap.js',
])
for plugin in self.plugins:
media += plugin.media
return media
DEFAULT_RICH_TEXT_EDITORS = {
'default': {
'WIDGET': 'wagtail.admin.rich_text.HalloRichTextArea'
}
}
def get_rich_text_editor_widget(name='default', features=None):
editor_settings = getattr(settings, 'WAGTAILADMIN_RICH_TEXT_EDITORS', DEFAULT_RICH_TEXT_EDITORS)
editor = editor_settings[name]
options = editor.get('OPTIONS', None)
if features is None and options is not None:
# fall back on 'features' list within OPTIONS, if any
features = options.get('features', None)
cls = import_string(editor['WIDGET'])
kwargs = {}
if options is not None:
kwargs['options'] = options
if getattr(cls, 'accepts_features', False):
kwargs['features'] = features
return cls(**kwargs)
|
[
"django.utils.module_loading.import_string",
"wagtail.core.rich_text.features.get_editor_plugin",
"json.dumps",
"wagtail.core.rich_text.features.get_default_features",
"wagtail.admin.rich_text.converters.editor_html.EditorHTMLConverter",
"collections.OrderedDict",
"django.forms.Media"
] |
[((5450, 5481), 'django.utils.module_loading.import_string', 'import_string', (["editor['WIDGET']"], {}), "(editor['WIDGET'])\n", (5463, 5481), False, 'from django.utils.module_loading import import_string\n'), ((868, 899), 'django.forms.Media', 'Media', ([], {'js': 'self.js', 'css': 'self.css'}), '(js=self.js, css=self.css)\n', (873, 899), False, 'from django.forms import Media, widgets\n'), ((4702, 4789), 'django.forms.Media', 'Media', ([], {'js': "['wagtailadmin/js/vendor/hallo.js', 'wagtailadmin/js/hallo-bootstrap.js']"}), "(js=['wagtailadmin/js/vendor/hallo.js',\n 'wagtailadmin/js/hallo-bootstrap.js'])\n", (4707, 4789), False, 'from django.forms import Media, widgets\n'), ((3029, 3060), 'wagtail.core.rich_text.features.get_default_features', 'features.get_default_features', ([], {}), '()\n', (3058, 3060), False, 'from wagtail.core.rich_text import features\n'), ((3090, 3111), 'wagtail.admin.rich_text.converters.editor_html.EditorHTMLConverter', 'EditorHTMLConverter', ([], {}), '()\n', (3109, 3111), False, 'from wagtail.admin.rich_text.converters.editor_html import EditorHTMLConverter\n'), ((3155, 3189), 'wagtail.admin.rich_text.converters.editor_html.EditorHTMLConverter', 'EditorHTMLConverter', (['self.features'], {}), '(self.features)\n', (3174, 3189), False, 'from wagtail.admin.rich_text.converters.editor_html import EditorHTMLConverter\n'), ((4160, 4173), 'collections.OrderedDict', 'OrderedDict', ([], {}), '()\n', (4171, 4173), False, 'from collections import OrderedDict\n'), ((4348, 4363), 'json.dumps', 'json.dumps', (['id_'], {}), '(id_)\n', (4358, 4363), False, 'import json\n'), ((4365, 4388), 'json.dumps', 'json.dumps', (['plugin_data'], {}), '(plugin_data)\n', (4375, 4388), False, 'import json\n'), ((3414, 3463), 'wagtail.core.rich_text.features.get_editor_plugin', 'features.get_editor_plugin', (['"""hallo"""', 'feature_name'], {}), "('hallo', feature_name)\n", (3440, 3463), False, 'from wagtail.core.rich_text import features\n')]
|
from time import perf_counter as clock
import subprocess
import random
import numpy
# Constants
STEP = 1000 * 100 # the size of the buffer to fill the table, in rows
SCALE = 0.1 # standard deviation of the noise compared with actual
# values
NI_NTIMES = 1 # The number of queries for doing a mean (non-idx cols)
# COLDCACHE = 10 # The number of reads where the cache is considered 'cold'
# WARMCACHE = 50 # The number of reads until the cache is considered 'warmed'
# READ_TIMES = WARMCACHE+50 # The number of complete calls to DB.query_db()
# COLDCACHE = 50 # The number of reads where the cache is considered 'cold'
# WARMCACHE = 50 # The number of reads until the cache is considered 'warmed'
# READ_TIMES = WARMCACHE+50 # The number of complete calls to DB.query_db()
MROW = 1000 * 1000
# Test values
COLDCACHE = 5 # The number of reads where the cache is considered 'cold'
WARMCACHE = 5 # The number of reads until the cache is considered 'warmed'
READ_TIMES = 10 # The number of complete calls to DB.query_db()
# global variables
rdm_cod = ['lin', 'rnd']
prec = 6 # precision for printing floats purposes
def get_nrows(nrows_str):
powers = {'k': 3, 'm': 6, 'g': 9}
try:
return int(float(nrows_str[:-1]) * 10 ** powers[nrows_str[-1]])
except KeyError:
raise ValueError(
"value of nrows must end with either 'k', 'm' or 'g' suffixes.")
class DB:
def __init__(self, nrows, rng, userandom):
global step, scale
self.step = STEP
self.scale = SCALE
self.rng = rng
self.userandom = userandom
self.filename = '-'.join([rdm_cod[userandom], nrows])
self.nrows = get_nrows(nrows)
def get_db_size(self):
sout = subprocess.Popen("sync;du -s %s" % self.filename, shell=True,
stdout=subprocess.PIPE).stdout
line = [l for l in sout][0]
return int(line.split()[0])
def print_mtime(self, t1, explain):
mtime = clock() - t1
print(f"{explain}: {mtime:.6f}")
print(f"Krows/s: {self.nrows / 1000 / mtime:.6f}")
def print_qtime(self, colname, ltimes):
qtime1 = ltimes[0] # First measured time
qtime2 = ltimes[-1] # Last measured time
print(f"Query time for {colname}: {qtime1:.6f}")
print(f"Mrows/s: {self.nrows / MROW / qtime1:.6f}")
print(f"Query time for {colname} (cached): {qtime2:.6f}")
print(f"Mrows/s (cached): {self.nrows / MROW / qtime2:.6f}")
def norm_times(self, ltimes):
"Get the mean and stddev of ltimes, avoiding the extreme values."
lmean = ltimes.mean()
lstd = ltimes.std()
ntimes = ltimes[ltimes < lmean + lstd]
nmean = ntimes.mean()
nstd = ntimes.std()
return nmean, nstd
def print_qtime_idx(self, colname, ltimes, repeated, verbose):
if repeated:
r = "[REP] "
else:
r = "[NOREP] "
ltimes = numpy.array(ltimes)
ntimes = len(ltimes)
qtime1 = ltimes[0] # First measured time
ctimes = ltimes[1:COLDCACHE]
cmean, cstd = self.norm_times(ctimes)
wtimes = ltimes[WARMCACHE:]
wmean, wstd = self.norm_times(wtimes)
if verbose:
print("Times for cold cache:\n", ctimes)
# print "Times for warm cache:\n", wtimes
hist1, hist2 = numpy.histogram(wtimes)
print(f"Histogram for warm cache: {hist1}\n{hist2}")
print(f"{r}1st query time for {colname}: {qtime1:.{prec}f}")
print(f"{r}Query time for {colname} (cold cache): "
f"{cmean:.{prec}f} +- {cstd:.{prec}f}")
print(f"{r}Query time for {colname} (warm cache): "
f"{wmean:.{prec}f} +- {wstd:.{prec}f}")
def print_db_sizes(self, init, filled, indexed):
table_size = (filled - init) / 1024
indexes_size = (indexed - filled) / 1024
print(f"Table size (MB): {table_size:.3f}")
print(f"Indexes size (MB): {indexes_size:.3f}")
print(f"Full size (MB): {table_size + indexes_size:.3f}")
def fill_arrays(self, start, stop):
arr_f8 = numpy.arange(start, stop, dtype='float64')
arr_i4 = numpy.arange(start, stop, dtype='int32')
if self.userandom:
arr_f8 += numpy.random.normal(0, stop * self.scale,
size=stop - start)
arr_i4 = numpy.array(arr_f8, dtype='int32')
return arr_i4, arr_f8
def create_db(self, dtype, kind, optlevel, verbose):
self.con = self.open_db(remove=1)
self.create_table(self.con)
init_size = self.get_db_size()
t1 = clock()
self.fill_table(self.con)
table_size = self.get_db_size()
self.print_mtime(t1, 'Insert time')
self.index_db(dtype, kind, optlevel, verbose)
indexes_size = self.get_db_size()
self.print_db_sizes(init_size, table_size, indexes_size)
self.close_db(self.con)
def index_db(self, dtype, kind, optlevel, verbose):
if dtype == "int":
idx_cols = ['col2']
elif dtype == "float":
idx_cols = ['col4']
else:
idx_cols = ['col2', 'col4']
for colname in idx_cols:
t1 = clock()
self.index_col(self.con, colname, kind, optlevel, verbose)
self.print_mtime(t1, 'Index time (%s)' % colname)
def query_db(self, niter, dtype, onlyidxquery, onlynonidxquery,
avoidfscache, verbose, inkernel):
self.con = self.open_db()
if dtype == "int":
reg_cols = ['col1']
idx_cols = ['col2']
elif dtype == "float":
reg_cols = ['col3']
idx_cols = ['col4']
else:
reg_cols = ['col1', 'col3']
idx_cols = ['col2', 'col4']
if avoidfscache:
rseed = int(numpy.random.randint(self.nrows))
else:
rseed = 19
# Query for non-indexed columns
numpy.random.seed(rseed)
base = numpy.random.randint(self.nrows)
if not onlyidxquery:
for colname in reg_cols:
ltimes = []
random.seed(rseed)
for i in range(NI_NTIMES):
t1 = clock()
results = self.do_query(self.con, colname, base, inkernel)
ltimes.append(clock() - t1)
if verbose:
print("Results len:", results)
self.print_qtime(colname, ltimes)
# Always reopen the file after *every* query loop.
# Necessary to make the benchmark to run correctly.
self.close_db(self.con)
self.con = self.open_db()
# Query for indexed columns
if not onlynonidxquery:
for colname in idx_cols:
ltimes = []
numpy.random.seed(rseed)
rndbase = numpy.random.randint(self.nrows, size=niter)
# First, non-repeated queries
for i in range(niter):
base = rndbase[i]
t1 = clock()
results = self.do_query(self.con, colname, base, inkernel)
#results, tprof = self.do_query(
# self.con, colname, base, inkernel)
ltimes.append(clock() - t1)
if verbose:
print("Results len:", results)
self.print_qtime_idx(colname, ltimes, False, verbose)
# Always reopen the file after *every* query loop.
# Necessary to make the benchmark to run correctly.
self.close_db(self.con)
self.con = self.open_db()
ltimes = []
# Second, repeated queries
# for i in range(niter):
# t1=time()
# results = self.do_query(
# self.con, colname, base, inkernel)
# results, tprof = self.do_query(self.con, colname, base, inkernel)
# ltimes.append(time()-t1)
# if verbose:
# print "Results len:", results
# self.print_qtime_idx(colname, ltimes, True, verbose)
# Print internal PyTables index tprof statistics
#tprof = numpy.array(tprof)
#tmean, tstd = self.norm_times(tprof)
# print "tprof-->", round(tmean, prec), "+-", round(tstd, prec)
# print "tprof hist-->", \
# numpy.histogram(tprof)
# print "tprof raw-->", tprof
# Always reopen the file after *every* query loop.
# Necessary to make the benchmark to run correctly.
self.close_db(self.con)
self.con = self.open_db()
# Finally, close the file.
self.close_db(self.con)
def close_db(self, con):
con.close()
if __name__ == "__main__":
import sys
import getopt
try:
import psyco
psyco_imported = 1
except:
psyco_imported = 0
usage = """usage: %s [-T] [-P] [-v] [-f] [-k] [-p] [-m] [-c] [-q] [-i] [-I] [-S] [-x] [-z complevel] [-l complib] [-R range] [-N niter] [-n nrows] [-d datadir] [-O level] [-t kind] [-s] col -Q [suplim]
-T use Pytables
-P use Postgres
-v verbose
-f do a profile of the run (only query functionality & Python 2.5)
-k do a profile for kcachegrind use (out file is 'indexed_search.kcg')
-p use "psyco" if available
-m use random values to fill the table
-q do a query (both indexed and non-indexed versions)
-i do a query (just indexed one)
-I do a query (just in-kernel one)
-S do a query (just standard one)
-x choose a different seed for random numbers (i.e. avoid FS cache)
-c create the database
-z compress with zlib (no compression by default)
-l use complib for compression (zlib used by default)
-R select a range in a field in the form "start,stop" (def "0,10")
-N number of iterations for reading
-n sets the number of rows (in krows) in each table
-d directory to save data (default: data.nobackup)
-O set the optimization level for PyTables indexes
-t select the index type: "medium" (default) or "full", "light", "ultralight"
-s select a type column for operations ('int' or 'float'. def all)
-Q do a repeteated query up to 10**value
\n""" % sys.argv[0]
try:
opts, pargs = getopt.getopt(
sys.argv[1:], 'TPvfkpmcqiISxz:l:R:N:n:d:O:t:s:Q:')
except:
sys.stderr.write(usage)
sys.exit(1)
# default options
usepytables = 0
usepostgres = 0
verbose = 0
doprofile = 0
dokprofile = 0
usepsyco = 0
userandom = 0
docreate = 0
optlevel = 0
kind = "medium"
docompress = 0
complib = "zlib"
doquery = False
onlyidxquery = False
onlynonidxquery = False
inkernel = True
avoidfscache = 0
#rng = [-10, 10]
rng = [-1000, -1000]
repeatquery = 0
repeatvalue = 0
krows = '1k'
niter = READ_TIMES
dtype = "all"
datadir = "data.nobackup"
# Get the options
for option in opts:
if option[0] == '-T':
usepytables = 1
elif option[0] == '-P':
usepostgres = 1
elif option[0] == '-v':
verbose = 1
elif option[0] == '-f':
doprofile = 1
elif option[0] == '-k':
dokprofile = 1
elif option[0] == '-p':
usepsyco = 1
elif option[0] == '-m':
userandom = 1
elif option[0] == '-c':
docreate = 1
elif option[0] == '-q':
doquery = True
elif option[0] == '-i':
doquery = True
onlyidxquery = True
elif option[0] == '-I':
doquery = True
onlynonidxquery = True
elif option[0] == '-S':
doquery = True
onlynonidxquery = True
inkernel = False
elif option[0] == '-x':
avoidfscache = 1
elif option[0] == '-z':
docompress = int(option[1])
elif option[0] == '-l':
complib = option[1]
elif option[0] == '-R':
rng = [int(i) for i in option[1].split(",")]
elif option[0] == '-N':
niter = int(option[1])
elif option[0] == '-n':
krows = option[1]
elif option[0] == '-d':
datadir = option[1]
elif option[0] == '-O':
optlevel = int(option[1])
elif option[0] == '-t':
if option[1] in ('full', 'medium', 'light', 'ultralight'):
kind = option[1]
else:
print("kind should be either 'full', 'medium', 'light' or "
"'ultralight'")
sys.exit(1)
elif option[0] == '-s':
if option[1] in ('int', 'float'):
dtype = option[1]
else:
print("column should be either 'int' or 'float'")
sys.exit(1)
elif option[0] == '-Q':
repeatquery = 1
repeatvalue = int(option[1])
# If not database backend selected, abort
if not usepytables and not usepostgres:
print("Please select a backend:")
print("PyTables: -T")
print("Postgres: -P")
sys.exit(1)
# Create the class for the database
if usepytables:
from pytables_backend import PyTables_DB
db = PyTables_DB(krows, rng, userandom, datadir,
docompress, complib, kind, optlevel)
elif usepostgres:
from postgres_backend import Postgres_DB
db = Postgres_DB(krows, rng, userandom)
if not avoidfscache:
# in order to always generate the same random sequence
numpy.random.seed(20)
if verbose:
if userandom:
print("using random values")
if onlyidxquery:
print("doing indexed queries only")
if psyco_imported and usepsyco:
psyco.bind(db.create_db)
psyco.bind(db.query_db)
if docreate:
if verbose:
print("writing %s rows" % krows)
db.create_db(dtype, kind, optlevel, verbose)
if doquery:
print("Calling query_db() %s times" % niter)
if doprofile:
import pstats
import cProfile as prof
prof.run(
'db.query_db(niter, dtype, onlyidxquery, onlynonidxquery, '
'avoidfscache, verbose, inkernel)',
'indexed_search.prof')
stats = pstats.Stats('indexed_search.prof')
stats.strip_dirs()
stats.sort_stats('time', 'calls')
if verbose:
stats.print_stats()
else:
stats.print_stats(20)
elif dokprofile:
from cProfile import Profile
import lsprofcalltree
prof = Profile()
prof.run(
'db.query_db(niter, dtype, onlyidxquery, onlynonidxquery, '
'avoidfscache, verbose, inkernel)')
kcg = lsprofcalltree.KCacheGrind(prof)
ofile = open('indexed_search.kcg', 'w')
kcg.output(ofile)
ofile.close()
elif doprofile:
import hotshot
import hotshot.stats
prof = hotshot.Profile("indexed_search.prof")
benchtime, stones = prof.run(
'db.query_db(niter, dtype, onlyidxquery, onlynonidxquery, '
'avoidfscache, verbose, inkernel)')
prof.close()
stats = hotshot.stats.load("indexed_search.prof")
stats.strip_dirs()
stats.sort_stats('time', 'calls')
stats.print_stats(20)
else:
db.query_db(niter, dtype, onlyidxquery, onlynonidxquery,
avoidfscache, verbose, inkernel)
if repeatquery:
# Start by a range which is almost None
db.rng = [1, 1]
if verbose:
print("range:", db.rng)
db.query_db(niter, dtype, onlyidxquery, onlynonidxquery,
avoidfscache, verbose, inkernel)
for i in range(repeatvalue):
for j in (1, 2, 5):
rng = j * 10 ** i
db.rng = [-rng / 2, rng / 2]
if verbose:
print("range:", db.rng)
# if usepostgres:
# os.system(
# "echo 1 > /proc/sys/vm/drop_caches;"
# " /etc/init.d/postgresql restart")
# else:
# os.system("echo 1 > /proc/sys/vm/drop_caches")
db.query_db(niter, dtype, onlyidxquery, onlynonidxquery,
avoidfscache, verbose, inkernel)
|
[
"numpy.random.seed",
"getopt.getopt",
"cProfile.Profile",
"numpy.histogram",
"numpy.random.randint",
"numpy.arange",
"hotshot.stats.load",
"numpy.random.normal",
"cProfile.close",
"pytables_backend.PyTables_DB",
"pstats.Stats",
"psyco.bind",
"random.seed",
"hotshot.Profile",
"subprocess.Popen",
"postgres_backend.Postgres_DB",
"time.perf_counter",
"sys.exit",
"lsprofcalltree.KCacheGrind",
"numpy.array",
"sys.stderr.write",
"cProfile.run"
] |
[((3019, 3038), 'numpy.array', 'numpy.array', (['ltimes'], {}), '(ltimes)\n', (3030, 3038), False, 'import numpy\n'), ((4202, 4244), 'numpy.arange', 'numpy.arange', (['start', 'stop'], {'dtype': '"""float64"""'}), "(start, stop, dtype='float64')\n", (4214, 4244), False, 'import numpy\n'), ((4262, 4302), 'numpy.arange', 'numpy.arange', (['start', 'stop'], {'dtype': '"""int32"""'}), "(start, stop, dtype='int32')\n", (4274, 4302), False, 'import numpy\n'), ((4729, 4736), 'time.perf_counter', 'clock', ([], {}), '()\n', (4734, 4736), True, 'from time import perf_counter as clock\n'), ((6074, 6098), 'numpy.random.seed', 'numpy.random.seed', (['rseed'], {}), '(rseed)\n', (6091, 6098), False, 'import numpy\n'), ((6114, 6146), 'numpy.random.randint', 'numpy.random.randint', (['self.nrows'], {}), '(self.nrows)\n', (6134, 6146), False, 'import numpy\n'), ((10778, 10842), 'getopt.getopt', 'getopt.getopt', (['sys.argv[1:]', '"""TPvfkpmcqiISxz:l:R:N:n:d:O:t:s:Q:"""'], {}), "(sys.argv[1:], 'TPvfkpmcqiISxz:l:R:N:n:d:O:t:s:Q:')\n", (10791, 10842), False, 'import getopt\n'), ((13701, 13712), 'sys.exit', 'sys.exit', (['(1)'], {}), '(1)\n', (13709, 13712), False, 'import sys\n'), ((13836, 13921), 'pytables_backend.PyTables_DB', 'PyTables_DB', (['krows', 'rng', 'userandom', 'datadir', 'docompress', 'complib', 'kind', 'optlevel'], {}), '(krows, rng, userandom, datadir, docompress, complib, kind, optlevel\n )\n', (13847, 13921), False, 'from pytables_backend import PyTables_DB\n'), ((14158, 14179), 'numpy.random.seed', 'numpy.random.seed', (['(20)'], {}), '(20)\n', (14175, 14179), False, 'import numpy\n'), ((14378, 14402), 'psyco.bind', 'psyco.bind', (['db.create_db'], {}), '(db.create_db)\n', (14388, 14402), False, 'import psyco\n'), ((14411, 14434), 'psyco.bind', 'psyco.bind', (['db.query_db'], {}), '(db.query_db)\n', (14421, 14434), False, 'import psyco\n'), ((1784, 1874), 'subprocess.Popen', 'subprocess.Popen', (["('sync;du -s %s' % self.filename)"], {'shell': '(True)', 'stdout': 'subprocess.PIPE'}), "('sync;du -s %s' % self.filename, shell=True, stdout=\n subprocess.PIPE)\n", (1800, 1874), False, 'import subprocess\n'), ((2038, 2045), 'time.perf_counter', 'clock', ([], {}), '()\n', (2043, 2045), True, 'from time import perf_counter as clock\n'), ((3437, 3460), 'numpy.histogram', 'numpy.histogram', (['wtimes'], {}), '(wtimes)\n', (3452, 3460), False, 'import numpy\n'), ((4352, 4412), 'numpy.random.normal', 'numpy.random.normal', (['(0)', '(stop * self.scale)'], {'size': '(stop - start)'}), '(0, stop * self.scale, size=stop - start)\n', (4371, 4412), False, 'import numpy\n'), ((4476, 4510), 'numpy.array', 'numpy.array', (['arr_f8'], {'dtype': '"""int32"""'}), "(arr_f8, dtype='int32')\n", (4487, 4510), False, 'import numpy\n'), ((5331, 5338), 'time.perf_counter', 'clock', ([], {}), '()\n', (5336, 5338), True, 'from time import perf_counter as clock\n'), ((10876, 10899), 'sys.stderr.write', 'sys.stderr.write', (['usage'], {}), '(usage)\n', (10892, 10899), False, 'import sys\n'), ((10908, 10919), 'sys.exit', 'sys.exit', (['(1)'], {}), '(1)\n', (10916, 10919), False, 'import sys\n'), ((14026, 14060), 'postgres_backend.Postgres_DB', 'Postgres_DB', (['krows', 'rng', 'userandom'], {}), '(krows, rng, userandom)\n', (14037, 14060), False, 'from postgres_backend import Postgres_DB\n'), ((14737, 14871), 'cProfile.run', 'prof.run', (['"""db.query_db(niter, dtype, onlyidxquery, onlynonidxquery, avoidfscache, verbose, inkernel)"""', '"""indexed_search.prof"""'], {}), "(\n 'db.query_db(niter, dtype, onlyidxquery, onlynonidxquery, avoidfscache, verbose, inkernel)'\n , 'indexed_search.prof')\n", (14745, 14871), True, 'import cProfile as prof\n'), ((14934, 14969), 'pstats.Stats', 'pstats.Stats', (['"""indexed_search.prof"""'], {}), "('indexed_search.prof')\n", (14946, 14969), False, 'import pstats\n'), ((5955, 5987), 'numpy.random.randint', 'numpy.random.randint', (['self.nrows'], {}), '(self.nrows)\n', (5975, 5987), False, 'import numpy\n'), ((6257, 6275), 'random.seed', 'random.seed', (['rseed'], {}), '(rseed)\n', (6268, 6275), False, 'import random\n'), ((6958, 6982), 'numpy.random.seed', 'numpy.random.seed', (['rseed'], {}), '(rseed)\n', (6975, 6982), False, 'import numpy\n'), ((7009, 7053), 'numpy.random.randint', 'numpy.random.randint', (['self.nrows'], {'size': 'niter'}), '(self.nrows, size=niter)\n', (7029, 7053), False, 'import numpy\n'), ((15282, 15291), 'cProfile.Profile', 'Profile', ([], {}), '()\n', (15289, 15291), False, 'from cProfile import Profile\n'), ((15304, 15415), 'cProfile.run', 'prof.run', (['"""db.query_db(niter, dtype, onlyidxquery, onlynonidxquery, avoidfscache, verbose, inkernel)"""'], {}), "(\n 'db.query_db(niter, dtype, onlyidxquery, onlynonidxquery, avoidfscache, verbose, inkernel)'\n )\n", (15312, 15415), True, 'import cProfile as prof\n'), ((15460, 15492), 'lsprofcalltree.KCacheGrind', 'lsprofcalltree.KCacheGrind', (['prof'], {}), '(prof)\n', (15486, 15492), False, 'import lsprofcalltree\n'), ((6344, 6351), 'time.perf_counter', 'clock', ([], {}), '()\n', (6349, 6351), True, 'from time import perf_counter as clock\n'), ((7202, 7209), 'time.perf_counter', 'clock', ([], {}), '()\n', (7207, 7209), True, 'from time import perf_counter as clock\n'), ((15704, 15742), 'hotshot.Profile', 'hotshot.Profile', (['"""indexed_search.prof"""'], {}), "('indexed_search.prof')\n", (15719, 15742), False, 'import hotshot\n'), ((15775, 15886), 'cProfile.run', 'prof.run', (['"""db.query_db(niter, dtype, onlyidxquery, onlynonidxquery, avoidfscache, verbose, inkernel)"""'], {}), "(\n 'db.query_db(niter, dtype, onlyidxquery, onlynonidxquery, avoidfscache, verbose, inkernel)'\n )\n", (15783, 15886), True, 'import cProfile as prof\n'), ((15925, 15937), 'cProfile.close', 'prof.close', ([], {}), '()\n', (15935, 15937), True, 'import cProfile as prof\n'), ((15958, 15999), 'hotshot.stats.load', 'hotshot.stats.load', (['"""indexed_search.prof"""'], {}), "('indexed_search.prof')\n", (15976, 15999), False, 'import hotshot\n'), ((6465, 6472), 'time.perf_counter', 'clock', ([], {}), '()\n', (6470, 6472), True, 'from time import perf_counter as clock\n'), ((7436, 7443), 'time.perf_counter', 'clock', ([], {}), '()\n', (7441, 7443), True, 'from time import perf_counter as clock\n'), ((13163, 13174), 'sys.exit', 'sys.exit', (['(1)'], {}), '(1)\n', (13171, 13174), False, 'import sys\n'), ((13387, 13398), 'sys.exit', 'sys.exit', (['(1)'], {}), '(1)\n', (13395, 13398), False, 'import sys\n')]
|
import numpy as np
import pandas as pd
from tqdm import tqdm
def bootstrap(x, iter=int(1E6), return_samples=False):
"""
Performs a simple bootstrap resampling method on an array of data.
Parameters
----------
x : numpy array
A one-dimensional numpy array containing values you wish to bootstrap.
If this array is < 10 values long, a warning will be raised that the
sampling distribution is small and the resulting resampled distribution
may not capture the full data generating distribution
iter: integer
Number of iterations to perform. Default is 10^6
return_samples : bool
If true, a pandas DataFrame of the resampled distributions will be
returned.
Returns
-------
statistics : dict
Dictionary of statistics of the resampled distribution. This includes
details about the originally supplied data as well as the mean value,
standard deviation, and confidence intervals.
"""
means = np.empty(iter)
dfs = []
for i in tqdm(range(iter), desc='Performing bootstrap sampling'):
resamp = np.random.choice(x, size=len(x), replace=True)
means[i] = resamp.mean()
if return_samples:
_df = pd.DataFrame([])
_df['value'] = resamp
_df['iter'] = i + 1
dfs.append(_df)
# Compute confidence intervals of the means.
mean_val = means.mean()
bounds_ci = {'99%': (0.5, 99.5), '95%': (2.5, 97.5), '90%': (5, 95),
'75%': (12.5, 87.5), '50%': (25, 75), '25%': (37.5, 62.5),
'10%': (45, 55), '5%': (47.5, 52.5), '1%': (49.5, 50.5)}
cis = {}
for k, v in bounds_ci.items():
bounds = np.percentile(means, v)
cis[k] = bounds
statistics['original_data'] = x
statistics['resampled_means'] = means
statistics['mean_value'] = mean_val
statistics['confidence_intervals'] = cis
if return_samples:
_df = pd.concat(dfs, sort=False)
return [statistics, _df]
else:
return statistics
|
[
"numpy.percentile",
"numpy.empty",
"pandas.concat",
"pandas.DataFrame"
] |
[((1029, 1043), 'numpy.empty', 'np.empty', (['iter'], {}), '(iter)\n', (1037, 1043), True, 'import numpy as np\n'), ((1739, 1762), 'numpy.percentile', 'np.percentile', (['means', 'v'], {}), '(means, v)\n', (1752, 1762), True, 'import numpy as np\n'), ((1989, 2015), 'pandas.concat', 'pd.concat', (['dfs'], {'sort': '(False)'}), '(dfs, sort=False)\n', (1998, 2015), True, 'import pandas as pd\n'), ((1271, 1287), 'pandas.DataFrame', 'pd.DataFrame', (['[]'], {}), '([])\n', (1283, 1287), True, 'import pandas as pd\n')]
|
# -*- coding: utf-8 -*-
"""@package Methods.Machine.LamSlotWind.plot_winding
Plot the Lamination's Winding Methods
@date Created on Tue Dec 16 16:39:48 2014
@copyright (C) 2014-2015 EOMYS ENGINEERING.
@author pierre_b
@todo Matrix Zs*Qs for display (cf doc database/articles/electrical machines/
analytical modelling of electrical machines/modeling of AC windings)
"""
from matplotlib.lines import Line2D
from matplotlib.pyplot import axis, legend, plot, subplots, title
from numpy import array, linspace, meshgrid
from pyleecan.Functions.Winding.comp_wind_sym import comp_wind_sym
from pyleecan.Functions.Winding.gen_phase_list import gen_color, gen_name
def plot_winding(self, wind_mat=None, all_slot=False):
"""Plot the Winding in a matplotlib fig
Parameters
----------
self : LamSlotWind
A: LamSlotWind object
wind_mat : numpy.ndarray
Winding Matrix, if None will call comp_connection_mat (Default value = None)
all_slot : bool
True if we plot all slot and false when plotting only needed one(sym)
Returns
-------
None
"""
# We compute the wind_mat only if needed
if wind_mat is None:
wind_mat = self.winding.comp_connection_mat(self.slot.Zs)
# Number of point on rad and tan direction
Nrad, Ntan = self.winding.get_dim_wind()
Zs = self.slot.Zs # Number of slot
# Number of Slot to plot
if all_slot: # Every Slot
Nplot = Zs
else: # Only the needed one (sym)
Nperw = comp_wind_sym(wind_mat)[0] # Symmetry of the winding
Nplot = Zs // Nperw
qs = wind_mat.shape[3] # Number of phase
# Symbole for pole
qs_color = gen_color(self.winding.qs)
qs_name = gen_name(self.winding.qs)
# Schematic slot without ratio
Wt = 0.5
W0 = 0.5
H = 1
# Coordinate of the First Slot (center on 0)
Slot_tan = array(
[-Wt / 2 - W0 / 2, -W0 / 2, -W0 / 2, W0 / 2, W0 / 2, W0 / 2 + Wt / 2]
)
Slot_rad = [0, 0, H, H, 0, 0]
# Duplicate the Slot along tan direction (angular abscissa )
x = list()
y = list()
for i in range(0, Nplot):
x.extend((Slot_tan + (Wt + W0) * i).tolist())
y.extend(Slot_rad)
# Plot the Schematics Slots
fig, ax = subplots()
plot(x, y, "r-")
# First Winding Grid (Coordinate of the winding mark)
range_x = linspace(-W0 / 2, W0 / 2, Ntan + 1, endpoint=False)
range_y = linspace(0, H, Nrad + 1, endpoint=False)
# We don't want the first and last point of the linespace
Grid_x, Grid_y = meshgrid(range_x[1:], range_y[1:])
# Plot the Winding Grid point by point by reading wind_mat
for Zs in range(0, Nplot): # For "every" Slot
for q in range(0, qs): # For every phase
for r in range(0, Nrad): # For every rad layer
for theta in range(0, Ntan): # For every tan layer
if wind_mat[r, theta, Zs, q] != 0:
# Add the correct mark at the correct coordinates
if wind_mat[r, theta, Zs, q] > 0:
plot(
Grid_x[r][theta] + Zs * (Wt + W0),
Grid_y[r][theta],
color=qs_color[q],
linewidth=0,
marker="+",
markeredgewidth=3,
markersize=20,
)
else:
plot(
Grid_x[r][theta] + Zs * (Wt + W0),
Grid_y[r][theta],
color=qs_color[q],
linewidth=0,
marker="x",
markeredgewidth=3,
markersize=20,
)
if self.is_stator:
Lam_Name = "Stator"
else:
Lam_Name = "Rotor"
if all_slot or Nperw == 1:
title(Lam_Name + "'s Winding (every slot)")
else:
title(Lam_Name + "'s Winding (periodicity 1/" + str(Nperw) + ")")
axis("equal")
ax.get_yaxis().set_visible(False)
# Legend qs
sym_leg = list() # Symbol
label_leg = list() # Text
for q in range(0, qs): # Positive mark
sym_leg.append(
Line2D(
[],
[],
color=qs_color[q],
linewidth=0,
marker="+",
markeredgewidth=3,
markersize=20,
)
)
label_leg.append(qs_name[q] + "+")
for q in range(0, qs): # Negative mark
sym_leg.append(
Line2D(
[],
[],
color=qs_color[q],
linewidth=0,
marker="x",
markeredgewidth=3,
markersize=20,
)
)
label_leg.append(qs_name[q] + "-")
legend(sym_leg, label_leg, ncol=2)
fig.show()
|
[
"matplotlib.pyplot.title",
"numpy.meshgrid",
"matplotlib.pyplot.plot",
"pyleecan.Functions.Winding.gen_phase_list.gen_name",
"pyleecan.Functions.Winding.comp_wind_sym.comp_wind_sym",
"matplotlib.lines.Line2D",
"matplotlib.pyplot.legend",
"matplotlib.pyplot.axis",
"pyleecan.Functions.Winding.gen_phase_list.gen_color",
"numpy.array",
"numpy.linspace",
"matplotlib.pyplot.subplots"
] |
[((1670, 1696), 'pyleecan.Functions.Winding.gen_phase_list.gen_color', 'gen_color', (['self.winding.qs'], {}), '(self.winding.qs)\n', (1679, 1696), False, 'from pyleecan.Functions.Winding.gen_phase_list import gen_color, gen_name\n'), ((1711, 1736), 'pyleecan.Functions.Winding.gen_phase_list.gen_name', 'gen_name', (['self.winding.qs'], {}), '(self.winding.qs)\n', (1719, 1736), False, 'from pyleecan.Functions.Winding.gen_phase_list import gen_color, gen_name\n'), ((1876, 1952), 'numpy.array', 'array', (['[-Wt / 2 - W0 / 2, -W0 / 2, -W0 / 2, W0 / 2, W0 / 2, W0 / 2 + Wt / 2]'], {}), '([-Wt / 2 - W0 / 2, -W0 / 2, -W0 / 2, W0 / 2, W0 / 2, W0 / 2 + Wt / 2])\n', (1881, 1952), False, 'from numpy import array, linspace, meshgrid\n'), ((2255, 2265), 'matplotlib.pyplot.subplots', 'subplots', ([], {}), '()\n', (2263, 2265), False, 'from matplotlib.pyplot import axis, legend, plot, subplots, title\n'), ((2270, 2286), 'matplotlib.pyplot.plot', 'plot', (['x', 'y', '"""r-"""'], {}), "(x, y, 'r-')\n", (2274, 2286), False, 'from matplotlib.pyplot import axis, legend, plot, subplots, title\n'), ((2360, 2411), 'numpy.linspace', 'linspace', (['(-W0 / 2)', '(W0 / 2)', '(Ntan + 1)'], {'endpoint': '(False)'}), '(-W0 / 2, W0 / 2, Ntan + 1, endpoint=False)\n', (2368, 2411), False, 'from numpy import array, linspace, meshgrid\n'), ((2426, 2466), 'numpy.linspace', 'linspace', (['(0)', 'H', '(Nrad + 1)'], {'endpoint': '(False)'}), '(0, H, Nrad + 1, endpoint=False)\n', (2434, 2466), False, 'from numpy import array, linspace, meshgrid\n'), ((2550, 2584), 'numpy.meshgrid', 'meshgrid', (['range_x[1:]', 'range_y[1:]'], {}), '(range_x[1:], range_y[1:])\n', (2558, 2584), False, 'from numpy import array, linspace, meshgrid\n'), ((4193, 4206), 'matplotlib.pyplot.axis', 'axis', (['"""equal"""'], {}), "('equal')\n", (4197, 4206), False, 'from matplotlib.pyplot import axis, legend, plot, subplots, title\n'), ((5035, 5069), 'matplotlib.pyplot.legend', 'legend', (['sym_leg', 'label_leg'], {'ncol': '(2)'}), '(sym_leg, label_leg, ncol=2)\n', (5041, 5069), False, 'from matplotlib.pyplot import axis, legend, plot, subplots, title\n'), ((4060, 4103), 'matplotlib.pyplot.title', 'title', (['(Lam_Name + "\'s Winding (every slot)")'], {}), '(Lam_Name + "\'s Winding (every slot)")\n', (4065, 4103), False, 'from matplotlib.pyplot import axis, legend, plot, subplots, title\n'), ((1502, 1525), 'pyleecan.Functions.Winding.comp_wind_sym.comp_wind_sym', 'comp_wind_sym', (['wind_mat'], {}), '(wind_mat)\n', (1515, 1525), False, 'from pyleecan.Functions.Winding.comp_wind_sym import comp_wind_sym\n'), ((4404, 4501), 'matplotlib.lines.Line2D', 'Line2D', (['[]', '[]'], {'color': 'qs_color[q]', 'linewidth': '(0)', 'marker': '"""+"""', 'markeredgewidth': '(3)', 'markersize': '(20)'}), "([], [], color=qs_color[q], linewidth=0, marker='+', markeredgewidth=\n 3, markersize=20)\n", (4410, 4501), False, 'from matplotlib.lines import Line2D\n'), ((4757, 4854), 'matplotlib.lines.Line2D', 'Line2D', (['[]', '[]'], {'color': 'qs_color[q]', 'linewidth': '(0)', 'marker': '"""x"""', 'markeredgewidth': '(3)', 'markersize': '(20)'}), "([], [], color=qs_color[q], linewidth=0, marker='x', markeredgewidth=\n 3, markersize=20)\n", (4763, 4854), False, 'from matplotlib.lines import Line2D\n'), ((3093, 3232), 'matplotlib.pyplot.plot', 'plot', (['(Grid_x[r][theta] + Zs * (Wt + W0))', 'Grid_y[r][theta]'], {'color': 'qs_color[q]', 'linewidth': '(0)', 'marker': '"""+"""', 'markeredgewidth': '(3)', 'markersize': '(20)'}), "(Grid_x[r][theta] + Zs * (Wt + W0), Grid_y[r][theta], color=qs_color[q],\n linewidth=0, marker='+', markeredgewidth=3, markersize=20)\n", (3097, 3232), False, 'from matplotlib.pyplot import axis, legend, plot, subplots, title\n'), ((3542, 3681), 'matplotlib.pyplot.plot', 'plot', (['(Grid_x[r][theta] + Zs * (Wt + W0))', 'Grid_y[r][theta]'], {'color': 'qs_color[q]', 'linewidth': '(0)', 'marker': '"""x"""', 'markeredgewidth': '(3)', 'markersize': '(20)'}), "(Grid_x[r][theta] + Zs * (Wt + W0), Grid_y[r][theta], color=qs_color[q],\n linewidth=0, marker='x', markeredgewidth=3, markersize=20)\n", (3546, 3681), False, 'from matplotlib.pyplot import axis, legend, plot, subplots, title\n')]
|
# Run unittests with python -m unittest Unittest[.ClassName[.test_func]]
from collections import Counter, defaultdict
import json
import logging
import os
import random
import re
import unittest
from ItemList import item_table
from ItemPool import remove_junk_items, item_groups
from LocationList import location_groups, location_is_viewable
from Main import main, resolve_settings, build_world_graphs
from Settings import Settings, get_preset_files
test_dir = os.path.join(os.path.dirname(__file__), 'tests')
output_dir = os.path.join(test_dir, 'Output')
os.makedirs(output_dir, exist_ok=True)
logging.basicConfig(level=logging.INFO, filename=os.path.join(output_dir, 'LAST_TEST_LOG'), filemode='w')
# items never required:
# refills, maps, compasses, capacity upgrades, masks (not listed in logic)
never_prefix = ['Bombs', 'Arrows', 'Rupee', 'Deku Seeds', 'Map', 'Compass']
never_suffix = ['Capacity']
never = {
'Bunny Hood', 'Recovery Heart', 'Milk', 'Ice Arrows', 'Ice Trap',
'Double Defense', 'Biggoron Sword', 'Giants Knife',
} | {item for item, (t, adv, _, special) in item_table.items() if adv is False
or any(map(item.startswith, never_prefix)) or any(map(item.endswith, never_suffix))}
# items required at most once, specifically things with multiple possible names
# (except bottles)
once = {
'Goron Tunic', 'Zora Tunic',
}
progressive = {
item for item, (_, _, _, special) in item_table.items()
if special and 'progressive' in special
}
bottles = {
item for item, (_, _, _, special) in item_table.items()
if special and 'bottle' in special and item != 'Deliver Letter'
}
junk = set(remove_junk_items)
def make_settings_for_test(settings_dict, seed=None, outfilename=None):
# Some consistent settings for testability
settings_dict.update({
'compress_rom': "None",
'count': 1,
'create_spoiler': True,
'output_file': os.path.join(test_dir, 'Output', outfilename),
})
if seed and 'seed' not in settings_dict:
settings_dict['seed'] = seed
return Settings(settings_dict, strict=True)
def load_settings(settings_file, seed=None, filename=None):
if isinstance(settings_file, dict): # Check if settings_file is a distribution file settings dict
try:
j = settings_file
j.update({
'enable_distribution_file': True,
'distribution_file': os.path.join(test_dir, 'plando', filename + '.json')
})
except TypeError:
raise RuntimeError("Running test with in memory file but did not supply a filename for output file.")
else:
sfile = os.path.join(test_dir, settings_file)
filename = os.path.splitext(settings_file)[0]
with open(sfile) as f:
j = json.load(f)
return make_settings_for_test(j, seed=seed, outfilename=filename)
def load_spoiler(json_file):
with open(json_file) as f:
return json.load(f)
def generate_with_plandomizer(filename, live_copy=False):
distribution_file = load_spoiler(os.path.join(test_dir, 'plando', filename + '.json'))
try:
settings = load_settings(distribution_file['settings'], seed='TESTTESTTEST', filename=filename)
except KeyError: # No settings dict in distribution file, create minimal consistent configuration
settings = Settings({
'enable_distribution_file': True,
'distribution_file': os.path.join(test_dir, 'plando', filename + '.json'),
'compress_rom': "None",
'count': 1,
'create_spoiler': True,
'output_file': os.path.join(test_dir, 'Output', filename),
'seed': 'TESTTESTTEST'
})
spoiler = main(settings)
if not live_copy:
spoiler = load_spoiler('%s_Spoiler.json' % settings.output_file)
return distribution_file, spoiler
def get_actual_pool(spoiler):
"""Retrieves the actual item pool based on items placed in the spoiler log.
:param spoiler: Spoiler log output from generator
:return: dict:
key: Item name
value: count in spoiler
"""
actual_pool = {}
for location, item in spoiler['locations'].items():
if isinstance(item, dict):
test_item = item['item']
else:
test_item = item
try:
actual_pool[test_item] += 1
except KeyError:
actual_pool[test_item] = 1
return actual_pool
class TestPlandomizer(unittest.TestCase):
def test_item_list(self):
filenames = [
"plando-list",
"plando-item-list-implicit",
"plando-item-list-explicit"
]
for filename in filenames:
with self.subTest(filename):
distribution_file, spoiler = generate_with_plandomizer(filename)
for location, item_value in distribution_file['locations'].items():
spoiler_value = spoiler['locations'][location]
if isinstance(item_value, dict):
item_list = item_value['item']
else:
item_list = item_value
if isinstance(spoiler_value, dict):
self.assertIn(spoiler_value['item'], item_list)
else:
self.assertIn(spoiler_value, item_list)
def test_explicit_item_pool(self):
with self.subTest("generate with defined item pool"):
distribution_file, spoiler = generate_with_plandomizer("plando-explicit-item-pool")
for item, value in distribution_file['item_pool'].items():
self.assertEqual(value, spoiler['item_pool'][item])
actual_pool = get_actual_pool(spoiler)
for item in spoiler['item_pool']:
self.assertEqual(actual_pool[item], spoiler['item_pool'][item])
with self.subTest("even if item pool is large"):
generate_with_plandomizer("plando-explicit-item-pool-3")
with self.subTest("except when not enough junk can be added"):
self.assertRaises(RuntimeError, generate_with_plandomizer, "plando-explicit-item-pool-2")
def test_num_limited_items(self):
filenames = [
"plando-num-bottles-fountain-closed-bad",
"plando-num-bottles-fountain-open-bad",
"plando-num-adult-trade-item-bad",
"plando-num-weird-egg-item-bad"
]
for filename in filenames:
with self.subTest(filename):
self.assertRaises(RuntimeError, generate_with_plandomizer, filename)
def test_excess_starting_items(self):
distribution_file, spoiler = generate_with_plandomizer("plando-excess-starting-items")
excess_item = list(distribution_file['starting_items'])[0]
for location, item in spoiler['locations'].items():
if isinstance(item, dict):
test_item = spoiler['locations'][location]['item']
else:
test_item = spoiler['locations'][location]
self.assertNotEqual(excess_item, test_item)
self.assertNotIn(excess_item, spoiler['item_pool'])
def test_ammo_max_out_of_bounds_use_last_list_element(self):
# This issue only appeared while patching
filename = "plando-ammo-max-out-of-bounds"
settings = Settings({
'enable_distribution_file': True,
'distribution_file': os.path.join(test_dir, 'plando', filename + '.json'),
'compress_rom': "Patch",
'count': 1,
'create_spoiler': True,
'create_cosmetics_log': False,
'output_file': os.path.join(test_dir, 'Output', filename),
'seed': 'TESTTESTTEST'
})
main(settings) # Should not crash
def test_ice_traps(self):
filenames = [
"plando-item-pool-matches-items-placed-after-starting-items-replaced",
"plando-new-placed-ice-traps",
"plando-placed-and-added-ice-traps",
"non-standard-visible-ice-traps",
]
for filename in filenames:
with self.subTest(filename):
distribution_file, spoiler = generate_with_plandomizer(filename)
csmc = spoiler['settings'].get('correct_chest_sizes')
for location in spoiler['locations']:
if location_is_viewable(location, csmc):
item = spoiler['locations'][location]
if isinstance(item, dict):
if item['item'] == "Ice Trap":
self.assertIn("model", item)
else:
self.assertNotIn("Ice Trap", item)
if filename == "plando-item-pool-matches-items-placed-after-starting-items-replaced":
with self.subTest("ice traps not junk with junk ice traps off"):
self.assertEqual(spoiler['item_pool']['Ice Trap'], 6)
with self.subTest("ice traps junk with junk ice traps on"):
# This distribution file should set all junk items to 1 except for ice traps so we will reuse it
_, spoiler = generate_with_plandomizer("plando-explicit-item-pool")
self.assertGreater(spoiler['item_pool']['Ice Trap'], 6)
if filename == "non-standard-visible-ice-traps":
with self.subTest("ice trap models in non-standard visible locations"):
for location in distribution_file['locations']:
self.assertIn('model', spoiler['locations'][location])
def test_should_not_throw_exception(self):
filenames = [
"plando-bottles-in-list",
"plando-bottle-item-group",
"plando-bottle-item-group-in-list",
"plando-adult-trade-in-list",
"plando-adult-trade-item-group",
"plando-adult-trade-item-group-in-list",
"plando-weird-egg-in-list",
"plando-shop-items",
"plando-list-case-sensitivity",
"plando-num-adult-trade-item-good",
"plando-num-weird-egg-item-good",
"plando-num-bottles-fountain-closed-good",
"plando-num-bottles-fountain-open-good",
"plando-change-triforce-piece-count",
"plando-use-normal-triforce-piece-count",
"plando-egg-not-shuffled-one-pool",
"plando-egg-not-shuffled-two-pool",
"plando-egg-shuffled-one-pool",
"plando-egg-shuffled-two-pool",
"no-ice-trap-pending-junk",
"disabled-song-location",
]
for filename in filenames:
with self.subTest(filename):
generate_with_plandomizer(filename)
def test_boss_item_list(self):
filenames = [
"plando-boss-list-child",
"plando-boss-list-adult",
"plando-boss-list"]
for filename in filenames:
with self.subTest(filename):
distribution_file, spoiler = generate_with_plandomizer(filename)
for location, item_list in distribution_file['locations'].items():
self.assertIn(spoiler['locations'][location], item_list)
def test_pool_accuracy(self):
filenames = [
"empty",
"plando-list",
"plando-item-pool-matches-items-placed-after-starting-items-replaced",
"plando-change-triforce-piece-count",
"plando-use-normal-triforce-piece-count",
"plando-shop-items",
"no-ice-trap-pending-junk",
]
for filename in filenames:
with self.subTest(filename + " pool accuracy"):
distribution_file, spoiler = generate_with_plandomizer(filename)
actual_pool = get_actual_pool(spoiler)
for item in spoiler['item_pool']:
self.assertEqual(actual_pool[item], spoiler['item_pool'][item],
f"Pool item {item} count mismatch")
filename = "plando-list-exhaustion"
with self.subTest(filename + " pool accuracy"):
distribution_file, spoiler = generate_with_plandomizer(filename)
actual_pool = get_actual_pool(spoiler)
for item in distribution_file['item_pool']:
self.assertEqual(actual_pool[item], distribution_file['item_pool'][item])
filename = "plando-item-pool-matches-items-placed-after-starting-items-replaced"
with self.subTest("starting items not in actual_pool"):
distribution_file, spoiler = generate_with_plandomizer(filename)
actual_pool = get_actual_pool(spoiler)
for item in distribution_file['starting_items']:
self.assertNotIn(item, actual_pool)
def test_weird_egg_in_pool(self):
# Not shuffled, one in pool: Should remove from pool and not place anywhere
not_shuffled_one = "plando-egg-not-shuffled-one-pool"
distribution_file, spoiler = generate_with_plandomizer(not_shuffled_one)
self.assertNotIn('Weird Egg', spoiler['item_pool'])
# Not shuffled, two in pool: Should be the same outcome as previous case
not_shuffled_two = "plando-egg-not-shuffled-two-pool"
distribution_file, spoiler = generate_with_plandomizer(not_shuffled_two)
self.assertNotIn('Weird Egg', spoiler['item_pool'])
# Shuffled, one in pool: Valid config, shouldn't have to make any changes, will end with 1 in pool
shuffled_one = "plando-egg-shuffled-one-pool"
distribution_file, spoiler = generate_with_plandomizer(shuffled_one)
self.assertEqual(spoiler['item_pool']['Weird Egg'], 1)
# Shuffled, two in pool: Shouldn't have more than one, will remove force to 1 in pool
shuffled_two = "plando-egg-shuffled-two-pool"
distribution_file, spoiler = generate_with_plandomizer(shuffled_two)
self.assertEqual(spoiler['item_pool']['Weird Egg'], 1)
class TestHints(unittest.TestCase):
def test_skip_zelda(self):
# Song from Impa would be WotH, but instead of relying on random chance to get HC WotH,
# just exclude all other locations to see if HC is barren.
_, spoiler = generate_with_plandomizer("skip-zelda")
woth = spoiler[':barren_regions']
self.assertIn('Hyrule Castle', woth)
def test_ganondorf(self):
filenames = [
"light-arrows-1",
"light-arrows-2",
"light-arrows-3",
]
# Ganondorf should never hint LAs locked behind LAs
for filename in filenames:
with self.subTest(filename):
_, spoiler = generate_with_plandomizer(filename, live_copy=True)
self.assertIsNotNone(spoiler.worlds[0].light_arrow_location)
self.assertNotEqual('Ganons Tower Boss Key Chest', spoiler.worlds[0].light_arrow_location.name)
class TestValidSpoilers(unittest.TestCase):
# Normalizes spoiler dict for single world or multiple worlds
# Single world worlds_dict is a map of key -> value
# Multi world worlds_dict is a map of "World x" -> {map of key -> value} (with x the player/world number)
# Always returns a map of playernum -> {map of key -> value}
def normalize_worlds_dict(self, worlds_dict):
if 'World 1' not in worlds_dict:
worlds_dict = {'World 1': worlds_dict}
return {int(key.split()[1]): content for key, content in worlds_dict.items()}
# Collect all the locations and items from the woth or playthrough.
# locmaps is a map of key -> {map of loc -> item}
# woth: key is "World x". modify 1p games to {"World 1": woth} first
# playthrough: key is sphere index (unimportantish), loc has [Wx]
def loc_item_collection(self, locmaps):
# playernum -> location set
locations = defaultdict(set)
# playernum -> item -> count
items = defaultdict(Counter)
# location name -> item set
locitems = defaultdict(set)
for key, locmap in locmaps.items():
p = 0
if key.startswith('World'):
p = int(key.split()[1])
for loc, item_json in locmap.items():
w = loc.split()[-1]
if w[:2] == '[W':
p = int(w[2:-1])
loc = loc[:loc.rindex(' ')]
elif p == 0:
# Assume single-player playthrough
p = 1
locations[p].add(loc)
if isinstance(item_json, dict):
item = item_json['item']
item_p = item_json.get('player', p)
else:
item = item_json
item_p = p
items[item_p][item] += 1
locitems[loc].add(item)
return locations, items, locitems
def required_checks(self, spoiler, locations, items, locitems):
# Checks to make against woth/playthrough:
expected_none = {p: set() for p in items}
# No 'never' items
self.assertEqual(
expected_none,
{p: never & c.keys() for p, c in items.items()},
'Non-required items deemed required')
# No disabled locations
disables = set(spoiler['settings'].get('disabled_locations', []))
self.assertEqual(
expected_none,
{p: disables & c for p, c in locations.items()},
'Disabled locations deemed required')
# No more than one of any 'once' item
multi = {p: {it for it, ct in c.items() if ct > 1}
for p, c in items.items()}
self.assertEqual(
expected_none,
{p: once & multi[p] for p in items},
'Collected unexpected items more than once')
# Any item more than once is special['progressive']
self.assertEqual(
expected_none,
{p: multi[p] - progressive for p in items},
'Collected unexpected items more than once')
# At most one bottle
self.assertEqual(
{p: 1 for p in items},
{p: max(1, len(bottles & c.keys())) for p, c in items.items()},
'Collected too many bottles')
def verify_woth(self, spoiler):
woth = spoiler[':woth_locations']
if 'World 1' not in woth:
woth = {'World 1': woth}
self.required_checks(spoiler, *self.loc_item_collection(woth))
def verify_playthrough(self, spoiler):
pl = spoiler[':playthrough']
locations, items, locitems = self.loc_item_collection(pl)
self.required_checks(spoiler, locations, items, locitems)
# Everybody reached the win condition in the playthrough
if spoiler['settings'].get('triforce_hunt', False) or spoiler['randomized_settings'].get('triforce_hunt', False):
item_pool = self.normalize_worlds_dict(spoiler['item_pool'])
self.assertEqual(
{p: item_pool[p]['Triforce Piece'] for p in items},
{p: c['Triforce Piece'] for p, c in items.items()},
'Playthrough missing some (or having extra) Triforce Pieces')
else:
self.assertEqual(
{p: 1 for p in items},
{p: c['Triforce'] for p, c in items.items()},
'Playthrough missing some (or having extra) Triforces')
def verify_disables(self, spoiler):
locmap = spoiler['locations']
if 'World 1' not in locmap:
locmap = {'World 1': locmap}
disables = set(spoiler['settings'].get('disabled_locations', []))
dmap = {k: {loc: v[loc] for loc in disables if loc in v}
for k, v in locmap.items()}
locations, items, locitems = self.loc_item_collection(dmap)
# Only junk at disabled locations
self.assertEqual(
{loc: set() for loc in locitems},
{loc: items - junk for loc, items in locitems.items()},
'Disabled locations have non-junk')
def test_testcases(self):
test_files = [filename
for filename in os.listdir(test_dir)
if filename.endswith('.sav')]
for filename in test_files:
with self.subTest(filename=filename):
settings = load_settings(filename, seed='TESTTESTTEST')
main(settings)
# settings.output_file contains the first part of the filename
spoiler = load_spoiler('%s_Spoiler.json' % settings.output_file)
self.verify_woth(spoiler)
self.verify_playthrough(spoiler)
self.verify_disables(spoiler)
def test_presets(self):
presetsFiles = get_preset_files()
for fn in presetsFiles:
with open(fn, encoding='utf-8') as f:
presets = json.load(f)
for name, settings_dict in presets.items():
ofile = 'preset_' + re.sub(r'[^a-zA-Z0-9_-]+', '_', name)
with self.subTest(name, filename=ofile):
settings = make_settings_for_test(
settings_dict, seed='TESTTESTTEST', outfilename=ofile)
main(settings)
spoiler = load_spoiler('%s_Spoiler.json' % settings.output_file)
self.verify_woth(spoiler)
self.verify_playthrough(spoiler)
self.verify_disables(spoiler)
def test_fuzzer(self):
random.seed()
fuzz_settings = [Settings({
'randomize_settings': True,
'compress_rom': "None",
'create_spoiler': True,
'output_file': os.path.join(output_dir, 'fuzz-%d' % i),
}) for i in range(10)]
out_keys = ['randomize_settings', 'compress_rom',
'create_spoiler', 'output_file', 'seed']
for settings in fuzz_settings:
output_file = '%s_Spoiler.json' % settings.output_file
settings_file = '%s_%s_Settings.json' % (settings.output_file, settings.seed)
with self.subTest(out=output_file, settings=settings_file):
try:
main(settings, max_attempts=2)
spoiler = load_spoiler(output_file)
self.verify_woth(spoiler)
self.verify_playthrough(spoiler)
self.verify_disables(spoiler)
except Exception as e:
# output the settings file in case of any failure
with open(settings_file, 'w') as f:
d = {k: settings.__dict__[k] for k in out_keys}
json.dump(d, f, indent=0)
logging.getLogger('').exception(f'Failed to generate with these settings:\n{settings.get_settings_display()}\n')
raise
|
[
"json.dump",
"json.load",
"LocationList.location_is_viewable",
"os.makedirs",
"ItemList.item_table.items",
"os.path.dirname",
"Settings.get_preset_files",
"logging.getLogger",
"collections.defaultdict",
"Main.main",
"Settings.Settings",
"random.seed",
"os.path.splitext",
"os.path.join",
"os.listdir",
"re.sub"
] |
[((526, 558), 'os.path.join', 'os.path.join', (['test_dir', '"""Output"""'], {}), "(test_dir, 'Output')\n", (538, 558), False, 'import os\n'), ((559, 597), 'os.makedirs', 'os.makedirs', (['output_dir'], {'exist_ok': '(True)'}), '(output_dir, exist_ok=True)\n', (570, 597), False, 'import os\n'), ((477, 502), 'os.path.dirname', 'os.path.dirname', (['__file__'], {}), '(__file__)\n', (492, 502), False, 'import os\n'), ((2057, 2093), 'Settings.Settings', 'Settings', (['settings_dict'], {'strict': '(True)'}), '(settings_dict, strict=True)\n', (2065, 2093), False, 'from Settings import Settings, get_preset_files\n'), ((3715, 3729), 'Main.main', 'main', (['settings'], {}), '(settings)\n', (3719, 3729), False, 'from Main import main, resolve_settings, build_world_graphs\n'), ((648, 689), 'os.path.join', 'os.path.join', (['output_dir', '"""LAST_TEST_LOG"""'], {}), "(output_dir, 'LAST_TEST_LOG')\n", (660, 689), False, 'import os\n'), ((1416, 1434), 'ItemList.item_table.items', 'item_table.items', ([], {}), '()\n', (1432, 1434), False, 'from ItemList import item_table\n'), ((1535, 1553), 'ItemList.item_table.items', 'item_table.items', ([], {}), '()\n', (1551, 1553), False, 'from ItemList import item_table\n'), ((2646, 2683), 'os.path.join', 'os.path.join', (['test_dir', 'settings_file'], {}), '(test_dir, settings_file)\n', (2658, 2683), False, 'import os\n'), ((2945, 2957), 'json.load', 'json.load', (['f'], {}), '(f)\n', (2954, 2957), False, 'import json\n'), ((3055, 3107), 'os.path.join', 'os.path.join', (['test_dir', '"""plando"""', "(filename + '.json')"], {}), "(test_dir, 'plando', filename + '.json')\n", (3067, 3107), False, 'import os\n'), ((7782, 7796), 'Main.main', 'main', (['settings'], {}), '(settings)\n', (7786, 7796), False, 'from Main import main, resolve_settings, build_world_graphs\n'), ((16004, 16020), 'collections.defaultdict', 'defaultdict', (['set'], {}), '(set)\n', (16015, 16020), False, 'from collections import Counter, defaultdict\n'), ((16074, 16094), 'collections.defaultdict', 'defaultdict', (['Counter'], {}), '(Counter)\n', (16085, 16094), False, 'from collections import Counter, defaultdict\n'), ((16150, 16166), 'collections.defaultdict', 'defaultdict', (['set'], {}), '(set)\n', (16161, 16166), False, 'from collections import Counter, defaultdict\n'), ((20901, 20919), 'Settings.get_preset_files', 'get_preset_files', ([], {}), '()\n', (20917, 20919), False, 'from Settings import Settings, get_preset_files\n'), ((21671, 21684), 'random.seed', 'random.seed', ([], {}), '()\n', (21682, 21684), False, 'import random\n'), ((1089, 1107), 'ItemList.item_table.items', 'item_table.items', ([], {}), '()\n', (1105, 1107), False, 'from ItemList import item_table\n'), ((1910, 1955), 'os.path.join', 'os.path.join', (['test_dir', '"""Output"""', 'outfilename'], {}), "(test_dir, 'Output', outfilename)\n", (1922, 1955), False, 'import os\n'), ((2703, 2734), 'os.path.splitext', 'os.path.splitext', (['settings_file'], {}), '(settings_file)\n', (2719, 2734), False, 'import os\n'), ((2785, 2797), 'json.load', 'json.load', (['f'], {}), '(f)\n', (2794, 2797), False, 'import json\n'), ((7463, 7515), 'os.path.join', 'os.path.join', (['test_dir', '"""plando"""', "(filename + '.json')"], {}), "(test_dir, 'plando', filename + '.json')\n", (7475, 7515), False, 'import os\n'), ((7684, 7726), 'os.path.join', 'os.path.join', (['test_dir', '"""Output"""', 'filename'], {}), "(test_dir, 'Output', filename)\n", (7696, 7726), False, 'import os\n'), ((20290, 20310), 'os.listdir', 'os.listdir', (['test_dir'], {}), '(test_dir)\n', (20300, 20310), False, 'import os\n'), ((20537, 20551), 'Main.main', 'main', (['settings'], {}), '(settings)\n', (20541, 20551), False, 'from Main import main, resolve_settings, build_world_graphs\n'), ((21028, 21040), 'json.load', 'json.load', (['f'], {}), '(f)\n', (21037, 21040), False, 'import json\n'), ((2412, 2464), 'os.path.join', 'os.path.join', (['test_dir', '"""plando"""', "(filename + '.json')"], {}), "(test_dir, 'plando', filename + '.json')\n", (2424, 2464), False, 'import os\n'), ((3434, 3486), 'os.path.join', 'os.path.join', (['test_dir', '"""plando"""', "(filename + '.json')"], {}), "(test_dir, 'plando', filename + '.json')\n", (3446, 3486), False, 'import os\n'), ((3611, 3653), 'os.path.join', 'os.path.join', (['test_dir', '"""Output"""', 'filename'], {}), "(test_dir, 'Output', filename)\n", (3623, 3653), False, 'import os\n'), ((8405, 8441), 'LocationList.location_is_viewable', 'location_is_viewable', (['location', 'csmc'], {}), '(location, csmc)\n', (8425, 8441), False, 'from LocationList import location_groups, location_is_viewable\n'), ((21133, 21169), 're.sub', 're.sub', (['"""[^a-zA-Z0-9_-]+"""', '"""_"""', 'name'], {}), "('[^a-zA-Z0-9_-]+', '_', name)\n", (21139, 21169), False, 'import re\n'), ((21386, 21400), 'Main.main', 'main', (['settings'], {}), '(settings)\n', (21390, 21400), False, 'from Main import main, resolve_settings, build_world_graphs\n'), ((21860, 21899), 'os.path.join', 'os.path.join', (['output_dir', "('fuzz-%d' % i)"], {}), "(output_dir, 'fuzz-%d' % i)\n", (21872, 21899), False, 'import os\n'), ((22360, 22390), 'Main.main', 'main', (['settings'], {'max_attempts': '(2)'}), '(settings, max_attempts=2)\n', (22364, 22390), False, 'from Main import main, resolve_settings, build_world_graphs\n'), ((22857, 22882), 'json.dump', 'json.dump', (['d', 'f'], {'indent': '(0)'}), '(d, f, indent=0)\n', (22866, 22882), False, 'import json\n'), ((22903, 22924), 'logging.getLogger', 'logging.getLogger', (['""""""'], {}), "('')\n", (22920, 22924), False, 'import logging\n')]
|
import random
import os
from django.db import models
from django.db.models.signals import pre_save, post_save
from django.urls import reverse
from django.db.models import Q
from ShoeStore.utils import unique_slug_generator
def get_filename_ext(filepath):
base_name = os.path.basename(filepath)
name, ext = os.path.splitext(base_name)
return name, ext
def upload_image_path(instance, filename):
# print(instance)
#print(filename)
new_filename = random.randint(1,3910209312)
name, ext = get_filename_ext(filename)
final_filename = '{new_filename}{ext}'.format(new_filename=new_filename, ext=ext)
return "products/{new_filename}/{final_filename}".format(
new_filename=new_filename,
final_filename=final_filename
)
class ProductQuerySet(models.query.QuerySet):
def active(self):
return self.filter(active=True)
def featured(self):
return self.filter(featured=True, active=True)
def search(self, query):
lookups = (Q(title__icontains=query) |
# Q(description__icontains=query) |
Q(price__icontains=query) |
Q(tag__title__icontains=query)
)
return self.filter(lookups).distinct()
class ProductManager(models.Manager):
def get_queryset(self):
return ProductQuerySet(self.model, using=self._db)
def all(self):
return self.get_queryset().active()
def featured(self): #Product.objects.featured()
return self.get_queryset().featured()
def get_by_id(self, id):
qs = self.get_queryset().filter(id=id) # Product.objects == self.get_queryset()
if qs.count() == 1:
return qs.first()
return None
def search(self, query):
return self.get_queryset().active().search(query)
# def tag(self, request):
# product_id = request.session.get("product_id", None)
# qs = Tag.objects.filter(products__id = product_id)
# if qs.count() == 1:
# return qs.first();
# return None
# Create your models here.
class Product(models.Model):
title = models.CharField(max_length=120)
slug = models.SlugField(blank=True, unique=True)
description = models.TextField()
price = models.DecimalField(decimal_places=2, max_digits=20, default=39.99)
image = models.ImageField(upload_to=upload_image_path, null=True, blank=True)
featured = models.BooleanField(default=False)
active = models.BooleanField(default=True)
timestamp = models.DateTimeField(auto_now_add=True)
objects = ProductManager()
def get_absolute_url(self):
# return "/products/{slug}".format(slug=self.slug)
return reverse("products:detail", kwargs={"slug": self.slug})
def __str__(self):
return self.title
def __unicode__(self):
return self.title
@property
def name(self):
return self.title
def product_pre_save_receiver(sender, instance, *args, **kwargs):
if not instance.slug:
instance.slug = unique_slug_generator(instance)
pre_save.connect(product_pre_save_receiver, sender=Product)
|
[
"django.db.models.signals.pre_save.connect",
"django.db.models.TextField",
"random.randint",
"os.path.basename",
"django.db.models.CharField",
"ShoeStore.utils.unique_slug_generator",
"django.db.models.Q",
"django.db.models.SlugField",
"django.db.models.BooleanField",
"django.db.models.ImageField",
"django.db.models.DecimalField",
"django.urls.reverse",
"os.path.splitext",
"django.db.models.DateTimeField"
] |
[((3163, 3222), 'django.db.models.signals.pre_save.connect', 'pre_save.connect', (['product_pre_save_receiver'], {'sender': 'Product'}), '(product_pre_save_receiver, sender=Product)\n', (3179, 3222), False, 'from django.db.models.signals import pre_save, post_save\n'), ((274, 300), 'os.path.basename', 'os.path.basename', (['filepath'], {}), '(filepath)\n', (290, 300), False, 'import os\n'), ((317, 344), 'os.path.splitext', 'os.path.splitext', (['base_name'], {}), '(base_name)\n', (333, 344), False, 'import os\n'), ((473, 502), 'random.randint', 'random.randint', (['(1)', '(3910209312)'], {}), '(1, 3910209312)\n', (487, 502), False, 'import random\n'), ((2158, 2190), 'django.db.models.CharField', 'models.CharField', ([], {'max_length': '(120)'}), '(max_length=120)\n', (2174, 2190), False, 'from django.db import models\n'), ((2213, 2254), 'django.db.models.SlugField', 'models.SlugField', ([], {'blank': '(True)', 'unique': '(True)'}), '(blank=True, unique=True)\n', (2229, 2254), False, 'from django.db import models\n'), ((2277, 2295), 'django.db.models.TextField', 'models.TextField', ([], {}), '()\n', (2293, 2295), False, 'from django.db import models\n'), ((2318, 2385), 'django.db.models.DecimalField', 'models.DecimalField', ([], {'decimal_places': '(2)', 'max_digits': '(20)', 'default': '(39.99)'}), '(decimal_places=2, max_digits=20, default=39.99)\n', (2337, 2385), False, 'from django.db import models\n'), ((2408, 2477), 'django.db.models.ImageField', 'models.ImageField', ([], {'upload_to': 'upload_image_path', 'null': '(True)', 'blank': '(True)'}), '(upload_to=upload_image_path, null=True, blank=True)\n', (2425, 2477), False, 'from django.db import models\n'), ((2500, 2534), 'django.db.models.BooleanField', 'models.BooleanField', ([], {'default': '(False)'}), '(default=False)\n', (2519, 2534), False, 'from django.db import models\n'), ((2557, 2590), 'django.db.models.BooleanField', 'models.BooleanField', ([], {'default': '(True)'}), '(default=True)\n', (2576, 2590), False, 'from django.db import models\n'), ((2613, 2652), 'django.db.models.DateTimeField', 'models.DateTimeField', ([], {'auto_now_add': '(True)'}), '(auto_now_add=True)\n', (2633, 2652), False, 'from django.db import models\n'), ((2792, 2846), 'django.urls.reverse', 'reverse', (['"""products:detail"""'], {'kwargs': "{'slug': self.slug}"}), "('products:detail', kwargs={'slug': self.slug})\n", (2799, 2846), False, 'from django.urls import reverse\n'), ((3130, 3161), 'ShoeStore.utils.unique_slug_generator', 'unique_slug_generator', (['instance'], {}), '(instance)\n', (3151, 3161), False, 'from ShoeStore.utils import unique_slug_generator\n'), ((1176, 1206), 'django.db.models.Q', 'Q', ([], {'tag__title__icontains': 'query'}), '(tag__title__icontains=query)\n', (1177, 1206), False, 'from django.db.models import Q\n'), ((1027, 1052), 'django.db.models.Q', 'Q', ([], {'title__icontains': 'query'}), '(title__icontains=query)\n', (1028, 1052), False, 'from django.db.models import Q\n'), ((1129, 1154), 'django.db.models.Q', 'Q', ([], {'price__icontains': 'query'}), '(price__icontains=query)\n', (1130, 1154), False, 'from django.db.models import Q\n')]
|
# -*- coding: utf-8 -
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may not
# use this file except in compliance with the License. You may obtain a copy of
# the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations under
# the License.
#
# Copyright 2011 Cloudant, Inc.
import six
import sys
import time
import socket
import struct
import logging
try:
import cPickle as pickle
except ImportError:
import pickle
import bucky2.client as client
import bucky2.names as names
if six.PY3:
xrange = range
log = logging.getLogger(__name__)
class DebugSocket(object):
def sendall(self, data):
sys.stdout.write(data)
class CarbonClient(client.Client):
def __init__(self, cfg, pipe):
super(CarbonClient, self).__init__(pipe)
self.debug = cfg.debug
self.ip = cfg.graphite_ip
self.port = cfg.graphite_port
self.max_reconnects = cfg.graphite_max_reconnects
self.reconnect_delay = cfg.graphite_reconnect_delay
if self.max_reconnects <= 0:
self.max_reconnects = sys.maxint
self.connect()
def connect(self):
if self.debug:
log.debug("Connected the debug socket.")
self.sock = DebugSocket()
return
for i in xrange(self.max_reconnects):
self.sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
try:
self.sock.connect((self.ip, self.port))
log.info("Connected to Carbon at %s:%s", self.ip, self.port)
return
except socket.error as e:
if i + 1 >= self.max_reconnects:
raise
log.error("Failed to connect to %s:%s: %s", self.ip, self.port, e)
if self.reconnect_delay > 0:
time.sleep(self.reconnect_delay)
def reconnect(self):
self.close()
self.connect()
def close(self):
try:
self.sock.close()
except:
pass
def send(self, host, name, value, mtime):
raise NotImplemented
class PlaintextClient(CarbonClient):
def send(self, host, name, value, mtime):
stat = names.statname(host, name)
mesg = "%s %s %s\n" % (stat, value, mtime)
for i in xrange(self.max_reconnects):
try:
self.sock.sendall(mesg)
return
except socket.error as err:
if i + 1 >= self.max_reconnects:
raise
log.error("Failed to send data to Carbon server: %s", err)
self.reconnect()
class PickleClient(CarbonClient):
def __init__(self, cfg, pipe):
super(PickleClient, self).__init__(cfg, pipe)
self.buffer_size = cfg.graphite_pickle_buffer_size
self.buffer = []
def send(self, host, name, value, mtime):
stat = names.statname(host, name)
self.buffer.append((stat, (mtime, value)))
if len(self.buffer) >= self.buffer_size:
self.transmit()
def transmit(self):
payload = pickle.dumps(self.buffer, protocol=-1)
header = struct.pack("!L", len(payload))
self.buffer = []
for i in xrange(self.max_reconnects):
try:
self.sock.sendall(header + payload)
return
except socket.error as err:
if i + 1 >= self.max_reconnects:
raise
log.error("Failed to send data to Carbon server: %s", err)
self.reconnect()
|
[
"sys.stdout.write",
"socket.socket",
"bucky2.names.statname",
"time.sleep",
"logging.getLogger",
"pickle.dumps"
] |
[((852, 879), 'logging.getLogger', 'logging.getLogger', (['__name__'], {}), '(__name__)\n', (869, 879), False, 'import logging\n'), ((946, 968), 'sys.stdout.write', 'sys.stdout.write', (['data'], {}), '(data)\n', (962, 968), False, 'import sys\n'), ((2504, 2530), 'bucky2.names.statname', 'names.statname', (['host', 'name'], {}), '(host, name)\n', (2518, 2530), True, 'import bucky2.names as names\n'), ((3202, 3228), 'bucky2.names.statname', 'names.statname', (['host', 'name'], {}), '(host, name)\n', (3216, 3228), True, 'import bucky2.names as names\n'), ((3400, 3438), 'pickle.dumps', 'pickle.dumps', (['self.buffer'], {'protocol': '(-1)'}), '(self.buffer, protocol=-1)\n', (3412, 3438), False, 'import pickle\n'), ((1643, 1692), 'socket.socket', 'socket.socket', (['socket.AF_INET', 'socket.SOCK_STREAM'], {}), '(socket.AF_INET, socket.SOCK_STREAM)\n', (1656, 1692), False, 'import socket\n'), ((2127, 2159), 'time.sleep', 'time.sleep', (['self.reconnect_delay'], {}), '(self.reconnect_delay)\n', (2137, 2159), False, 'import time\n')]
|
#
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
"""
DaskExecutor
.. seealso::
For more information on how the DaskExecutor works, take a look at the guide:
:ref:`executor:DaskExecutor`
"""
import subprocess
from typing import Any, Dict, Optional
from distributed import Client, Future, as_completed
from distributed.security import Security
from airflow.configuration import conf
from airflow.exceptions import AirflowException
from airflow.executors.base_executor import NOT_STARTED_MESSAGE, BaseExecutor, CommandType
from airflow.models.taskinstance import TaskInstanceKey
class DaskExecutor(BaseExecutor):
"""
DaskExecutor submits tasks to a Dask Distributed cluster.
"""
def __init__(self, cluster_address=None):
super().__init__(parallelism=0)
if cluster_address is None:
cluster_address = conf.get('dask', 'cluster_address')
if not cluster_address:
raise ValueError('Please provide a Dask cluster address in airflow.cfg')
self.cluster_address = cluster_address
# ssl / tls parameters
self.tls_ca = conf.get('dask', 'tls_ca')
self.tls_key = conf.get('dask', 'tls_key')
self.tls_cert = conf.get('dask', 'tls_cert')
self.client: Optional[Client] = None
self.futures: Optional[Dict[Future, TaskInstanceKey]] = None
def start(self) -> None:
if self.tls_ca or self.tls_key or self.tls_cert:
security = Security(
tls_client_key=self.tls_key,
tls_client_cert=self.tls_cert,
tls_ca_file=self.tls_ca,
require_encryption=True,
)
else:
security = None
self.client = Client(self.cluster_address, security=security)
self.futures = {}
def execute_async(self,
key: TaskInstanceKey,
command: CommandType,
queue: Optional[str] = None,
executor_config: Optional[Any] = None) -> None:
self.validate_command(command)
def airflow_run():
return subprocess.check_call(command, close_fds=True)
if not self.client:
raise AirflowException(NOT_STARTED_MESSAGE)
future = self.client.submit(airflow_run, pure=False)
self.futures[future] = key # type: ignore
def _process_future(self, future: Future) -> None:
if not self.futures:
raise AirflowException(NOT_STARTED_MESSAGE)
if future.done():
key = self.futures[future]
if future.exception():
self.log.error("Failed to execute task: %s", repr(future.exception()))
self.fail(key)
elif future.cancelled():
self.log.error("Failed to execute task")
self.fail(key)
else:
self.success(key)
self.futures.pop(future)
def sync(self) -> None:
if not self.futures:
raise AirflowException(NOT_STARTED_MESSAGE)
# make a copy so futures can be popped during iteration
for future in self.futures.copy():
self._process_future(future)
def end(self) -> None:
if not self.client:
raise AirflowException(NOT_STARTED_MESSAGE)
if not self.futures:
raise AirflowException(NOT_STARTED_MESSAGE)
self.client.cancel(list(self.futures.keys()))
for future in as_completed(self.futures.copy()):
self._process_future(future)
def terminate(self):
if not self.futures:
raise AirflowException(NOT_STARTED_MESSAGE)
self.client.cancel(self.futures.keys())
self.end()
|
[
"distributed.Client",
"airflow.configuration.conf.get",
"distributed.security.Security",
"airflow.exceptions.AirflowException",
"subprocess.check_call"
] |
[((1844, 1870), 'airflow.configuration.conf.get', 'conf.get', (['"""dask"""', '"""tls_ca"""'], {}), "('dask', 'tls_ca')\n", (1852, 1870), False, 'from airflow.configuration import conf\n'), ((1894, 1921), 'airflow.configuration.conf.get', 'conf.get', (['"""dask"""', '"""tls_key"""'], {}), "('dask', 'tls_key')\n", (1902, 1921), False, 'from airflow.configuration import conf\n'), ((1946, 1974), 'airflow.configuration.conf.get', 'conf.get', (['"""dask"""', '"""tls_cert"""'], {}), "('dask', 'tls_cert')\n", (1954, 1974), False, 'from airflow.configuration import conf\n'), ((2462, 2509), 'distributed.Client', 'Client', (['self.cluster_address'], {'security': 'security'}), '(self.cluster_address, security=security)\n', (2468, 2509), False, 'from distributed import Client, Future, as_completed\n'), ((1591, 1626), 'airflow.configuration.conf.get', 'conf.get', (['"""dask"""', '"""cluster_address"""'], {}), "('dask', 'cluster_address')\n", (1599, 1626), False, 'from airflow.configuration import conf\n'), ((2199, 2321), 'distributed.security.Security', 'Security', ([], {'tls_client_key': 'self.tls_key', 'tls_client_cert': 'self.tls_cert', 'tls_ca_file': 'self.tls_ca', 'require_encryption': '(True)'}), '(tls_client_key=self.tls_key, tls_client_cert=self.tls_cert,\n tls_ca_file=self.tls_ca, require_encryption=True)\n', (2207, 2321), False, 'from distributed.security import Security\n'), ((2861, 2907), 'subprocess.check_call', 'subprocess.check_call', (['command'], {'close_fds': '(True)'}), '(command, close_fds=True)\n', (2882, 2907), False, 'import subprocess\n'), ((2955, 2992), 'airflow.exceptions.AirflowException', 'AirflowException', (['NOT_STARTED_MESSAGE'], {}), '(NOT_STARTED_MESSAGE)\n', (2971, 2992), False, 'from airflow.exceptions import AirflowException\n'), ((3209, 3246), 'airflow.exceptions.AirflowException', 'AirflowException', (['NOT_STARTED_MESSAGE'], {}), '(NOT_STARTED_MESSAGE)\n', (3225, 3246), False, 'from airflow.exceptions import AirflowException\n'), ((3755, 3792), 'airflow.exceptions.AirflowException', 'AirflowException', (['NOT_STARTED_MESSAGE'], {}), '(NOT_STARTED_MESSAGE)\n', (3771, 3792), False, 'from airflow.exceptions import AirflowException\n'), ((4015, 4052), 'airflow.exceptions.AirflowException', 'AirflowException', (['NOT_STARTED_MESSAGE'], {}), '(NOT_STARTED_MESSAGE)\n', (4031, 4052), False, 'from airflow.exceptions import AirflowException\n'), ((4100, 4137), 'airflow.exceptions.AirflowException', 'AirflowException', (['NOT_STARTED_MESSAGE'], {}), '(NOT_STARTED_MESSAGE)\n', (4116, 4137), False, 'from airflow.exceptions import AirflowException\n'), ((4363, 4400), 'airflow.exceptions.AirflowException', 'AirflowException', (['NOT_STARTED_MESSAGE'], {}), '(NOT_STARTED_MESSAGE)\n', (4379, 4400), False, 'from airflow.exceptions import AirflowException\n')]
|
#! /usr/bin/env python3
## -*- Mode: python; py-indent-offset: 4; indent-tabs-mode: nil; coding: utf-8; -*-
#
# Copyright (c) 2014 <NAME>
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License version 2 as
# published by the Free Software Foundation;
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
#
# NOTE: Run this script with the Python3 interpreter if the python3 compatibility
# of the ns-3 unit test runner needs to be tested.
# The following options of test.py are being tested for poratability by this script.
# To see the options supported by this script, run with the -h option on the command line
#
# -h, --help show this help message and exit
# -b BUILDPATH, --buildpath=BUILDPATH
# specify the path where ns-3 was built (defaults to the
# build directory for the current variant)
# -c KIND, --constrain=KIND
# constrain the test-runner by kind of test
# -d, --duration print the duration of each test suite and example
# -e EXAMPLE, --example=EXAMPLE
# specify a single example to run (no relative path is
# needed)
# -u, --update-data If examples use reference data files, get them to re-
# generate them
# -f FULLNESS, --fullness=FULLNESS
# choose the duration of tests to run: QUICK, EXTENSIVE,
# or TAKES_FOREVER, where EXTENSIVE includes QUICK and
# TAKES_FOREVER includes QUICK and EXTENSIVE (only QUICK
# tests are run by default)
# -g, --grind run the test suites and examples using valgrind
# -k, --kinds print the kinds of tests available
# -l, --list print the list of known tests
# -m, --multiple report multiple failures from test suites and test
# cases
# -n, --nowaf do not run waf before starting testing
# -p PYEXAMPLE, --pyexample=PYEXAMPLE
# specify a single python example to run (with relative
# path)
# -r, --retain retain all temporary files (which are normally
# deleted)
# -s TEST-SUITE, --suite=TEST-SUITE
# specify a single test suite to run
# -t TEXT-FILE, --text=TEXT-FILE
# write detailed test results into TEXT-FILE.txt
# -v, --verbose print progress and informational messages
# -w HTML-FILE, --web=HTML-FILE, --html=HTML-FILE
# write detailed test results into HTML-FILE.html
# -x XML-FILE, --xml=XML-FILE
# write detailed test results into XML-FILE.xml
from __future__ import print_function
from TestBase import TestBaseClass
import sys
def main(argv):
"""
Prepares test cases and executes
"""
test_cases = [
'',
'-h',
'--help',
'-b build/',
'--buildpath=build/',
'-c performance',
'--constrain=performance',
'-d',
'--duration',
'-e socket-options-ipv6',
'--example=socket-options-ipv6',
'-u',
'--update-data',
'-f EXTENSIVE --fullness=EXTENSIVE'
'-g',
'--grind',
'-l',
'--list',
'-m',
'--multiple',
'-n',
'--nowaf',
'-p first',
'--pyexample=first',
'-r',
'--retain',
'-s ns3-tcp-interoperability',
'--suite=ns3-tcp-interoperability',
'-t t_opt.txt',
'--text=t_opt.txt && rm -rf t_opt.txt',
'-v',
'--verbose',
'-w t_opt.html && rm -rf t_opt.html',
'--web=t_opt.html && rm -rf t_opt.html',
'--html=t_opt.html && rm -rf t_opt.html',
'-x t_opt.xml && rm -rf t_opt.xml',
'--xml=t_opt.xml && rm -rf t_opt.xml',
]
configure_string = sys.executable + ' waf configure --enable-tests --enable-examples'
clean_string = sys.executable + ' waf clean'
cmd_execute_list = [ '%s && %s test.py %s && %s' % (configure_string, sys.executable, option, clean_string) for option in test_cases]
runner = TestBaseClass(argv[1:], "Test suite for the ns-3 unit test runner" , 'test-py')
return runner.runtests(cmd_execute_list)
if __name__ == '__main__':
sys.exit(main(sys.argv))
|
[
"TestBase.TestBaseClass"
] |
[((4650, 4728), 'TestBase.TestBaseClass', 'TestBaseClass', (['argv[1:]', '"""Test suite for the ns-3 unit test runner"""', '"""test-py"""'], {}), "(argv[1:], 'Test suite for the ns-3 unit test runner', 'test-py')\n", (4663, 4728), False, 'from TestBase import TestBaseClass\n')]
|
# -*- coding: utf-8 -*-
from __future__ import absolute_import
from __future__ import division
from __future__ import unicode_literals
import json
import platform
import time
import urllib
import urllib2
import xbmc
import xbmcgui
from resources.lib.utils import notice
class AuthException(Exception):
pass
class AuthPendingException(AuthException):
pass
class AuthExpiredException(AuthException):
pass
class EmptyTokenException(AuthException):
pass
class AuthDialog(object):
def __init__(self, plugin):
self.total = 0
self.plugin = plugin
self._dialog = xbmcgui.DialogProgress()
def close(self, cancel=False):
if self._dialog:
self._dialog.close()
self._dialog = None
xbmc.executebuiltin("Container.Refresh")
if cancel:
self.plugin.routing.redirect("/")
def update(self, step):
position = int(100 * step / float(self.total))
self._dialog.update(position)
def show(self, text):
self._dialog.create("Активация устройства", text)
@property
def iscanceled(self):
return self._dialog.iscanceled() if self._dialog else True
class Auth(object):
CLIENT_ID = "xbmc"
CLIENT_SECRET = "<KEY>"
OAUTH_API_URL = "http://api.service-kp.com/oauth2/device"
def __init__(self, plugin):
self._auth_dialog = AuthDialog(plugin)
self.plugin = plugin
def _make_request(self, payload):
self.plugin.logger.notice("sending payload {} to oauth api".format(payload))
try:
response = urllib2.urlopen(
urllib2.Request(self.OAUTH_API_URL), urllib.urlencode(payload)
).read()
return json.loads(response)
except urllib2.HTTPError as e:
if e.code == 400:
response = json.loads(e.read())
error = response.get("error")
if error and error in ["code_expired", "authorization_expired"]:
raise AuthExpiredException
if error and error == "authorization_pending":
raise AuthPendingException
if error:
notice("Ошибка аутентификации")
raise AuthException(error)
return response
# server can respond with 429 status, so we just wait until it gives a correct response
elif e.code == 429:
for _ in range(2):
time.sleep(3)
return self.request(payload)
else:
self.plugin.logger.fatal(
"oauth request error; status: {}; message: {}".format(e.code, e.message)
)
notice("Код ответа сервера {}".format(response["status"]), "Неизвестная ошибка")
raise
def _get_device_code(self):
payload = {
"grant_type": "device_code",
"client_id": self.CLIENT_ID,
"client_secret": self.CLIENT_SECRET,
}
resp = self._make_request(payload)
return {
"device_code": resp["code"],
"user_code": resp["user_code"],
"verification_uri": resp["verification_uri"],
"refresh_interval": int(resp["interval"]),
}
def _get_device_token(self, device_code):
self.plugin.logger.notice("getting a new device token")
payload = {
"grant_type": "device_token",
"client_id": self.CLIENT_ID,
"code": device_code,
"client_secret": self.CLIENT_SECRET,
}
resp = self._make_request(payload)
self._update_settings(resp["refresh_token"], resp["access_token"], resp["expires_in"])
def _refresh_token(self):
self.plugin.logger.notice("refreshing token")
payload = {
"grant_type": "refresh_token",
"refresh_token": self.plugin.settings.refresh_token,
"client_id": self.CLIENT_ID,
"client_secret": self.CLIENT_SECRET,
}
try:
resp = self._make_request(payload)
except AuthExpiredException:
self._activate()
return
self._update_settings(resp["refresh_token"], resp["access_token"], resp["expires_in"])
def _update_device_info(self):
result = {"build_version": "Busy", "friendly_name": "Busy"}
while "Busy" in result.values():
result = {
"build_version": xbmc.getInfoLabel("System.BuildVersion"),
"friendly_name": xbmc.getInfoLabel("System.FriendlyName"),
}
software = "Kodi {}".format(result["build_version"].split()[0])
title = result["friendly_name"] if result["friendly_name"] != "unknown" else platform.node()
self.plugin.client("device/notify").post(
data={"title": title, "hardware": platform.machine(), "software": software}
)
def _verify_device_code(self, interval, device_code):
steps = (5 * 60) // interval
self._auth_dialog.total = steps
for i in range(steps):
if self._auth_dialog.iscanceled:
self._auth_dialog.close(cancel=True)
break
else:
try:
self._get_device_token(device_code)
except AuthPendingException:
self._auth_dialog.update(i)
xbmc.sleep(interval * 1000)
else:
self._update_device_info()
self._auth_dialog.close()
break
else:
self._auth_dialog.close(cancel=True)
def _update_settings(self, refresh_token, access_token, expires_in):
self.plugin.settings.refresh_token = refresh_token
self.plugin.settings.access_token = access_token
self.plugin.settings.access_token_expire = str(expires_in + int(time.time()))
self.plugin.logger.notice(
"refresh token - {}; access token - {}; expires in - {}".format(
refresh_token, access_token, expires_in
)
)
def _activate(self):
resp = self._get_device_code()
self._auth_dialog.show(
"\n".join(
[
"Откройте [B]{}[/B]".format(resp["verification_uri"]),
"и введите следующий код: [B]{}[/B]".format(resp["user_code"]),
]
)
)
self._verify_device_code(resp["refresh_interval"], resp["device_code"])
@property
def is_token_expired(self):
return int(self.plugin.settings.access_token_expire) < int(time.time())
def get_token(self):
if not self.plugin.settings.access_token:
self._activate()
else:
self._refresh_token()
|
[
"xbmc.sleep",
"platform.node",
"json.loads",
"resources.lib.utils.notice",
"xbmc.getInfoLabel",
"xbmcgui.DialogProgress",
"urllib2.Request",
"platform.machine",
"time.time",
"time.sleep",
"urllib.urlencode",
"xbmc.executebuiltin"
] |
[((612, 636), 'xbmcgui.DialogProgress', 'xbmcgui.DialogProgress', ([], {}), '()\n', (634, 636), False, 'import xbmcgui\n'), ((775, 815), 'xbmc.executebuiltin', 'xbmc.executebuiltin', (['"""Container.Refresh"""'], {}), "('Container.Refresh')\n", (794, 815), False, 'import xbmc\n'), ((1736, 1756), 'json.loads', 'json.loads', (['response'], {}), '(response)\n', (1746, 1756), False, 'import json\n'), ((4803, 4818), 'platform.node', 'platform.node', ([], {}), '()\n', (4816, 4818), False, 'import platform\n'), ((4515, 4555), 'xbmc.getInfoLabel', 'xbmc.getInfoLabel', (['"""System.BuildVersion"""'], {}), "('System.BuildVersion')\n", (4532, 4555), False, 'import xbmc\n'), ((4590, 4630), 'xbmc.getInfoLabel', 'xbmc.getInfoLabel', (['"""System.FriendlyName"""'], {}), "('System.FriendlyName')\n", (4607, 4630), False, 'import xbmc\n'), ((6695, 6706), 'time.time', 'time.time', ([], {}), '()\n', (6704, 6706), False, 'import time\n'), ((4915, 4933), 'platform.machine', 'platform.machine', ([], {}), '()\n', (4931, 4933), False, 'import platform\n'), ((5956, 5967), 'time.time', 'time.time', ([], {}), '()\n', (5965, 5967), False, 'import time\n'), ((1633, 1668), 'urllib2.Request', 'urllib2.Request', (['self.OAUTH_API_URL'], {}), '(self.OAUTH_API_URL)\n', (1648, 1668), False, 'import urllib2\n'), ((1670, 1695), 'urllib.urlencode', 'urllib.urlencode', (['payload'], {}), '(payload)\n', (1686, 1695), False, 'import urllib\n'), ((2204, 2235), 'resources.lib.utils.notice', 'notice', (['"""Ошибка аутентификации"""'], {}), "('Ошибка аутентификации')\n", (2210, 2235), False, 'from resources.lib.utils import notice\n'), ((5462, 5489), 'xbmc.sleep', 'xbmc.sleep', (['(interval * 1000)'], {}), '(interval * 1000)\n', (5472, 5489), False, 'import xbmc\n'), ((2502, 2515), 'time.sleep', 'time.sleep', (['(3)'], {}), '(3)\n', (2512, 2515), False, 'import time\n')]
|
from collections import Counter, defaultdict
from copy import copy
from datetime import datetime
def process_input(lines):
template = lines[0]
rules = {
line.split('->')[0].strip(): line.split('->')[1].strip() for line in lines[2:]
}
return template, rules
def apply_insertion(template, rules, num):
pairs = Counter([template[i : i + 2] for i in range(len(template) - 1)])
for j in range(num):
print(f'{datetime.now()} {j=}')
new_pairs = defaultdict(int)
for pair, middle in rules.items():
char1, char2 = pair
new_pairs[f'{char1}{middle}'] += pairs[pair]
new_pairs[f'{middle}{char2}'] += pairs[pair]
pairs = new_pairs
return pairs
def main(lines, num=40):
template, rules = process_input(lines)
pair_counts = apply_insertion(template, rules, num)
character_counts = defaultdict(int)
for pair, count in pair_counts.items():
for c in pair:
character_counts[c] += count
actual_character_counts = {k: v//2 for k, v in character_counts.items()}
actual_character_counts[template[0]] += 1
actual_character_counts[template[-1]] += 1
counts = Counter(actual_character_counts)
return counts.most_common()[0][1] - counts.most_common()[-1][1]
if __name__ == '__main__':
with open('../data/input14.txt') as f:
lines = [line.strip() for line in f.readlines()]
print(main(lines))
|
[
"collections.defaultdict",
"collections.Counter",
"datetime.datetime.now"
] |
[((891, 907), 'collections.defaultdict', 'defaultdict', (['int'], {}), '(int)\n', (902, 907), False, 'from collections import Counter, defaultdict\n'), ((1199, 1231), 'collections.Counter', 'Counter', (['actual_character_counts'], {}), '(actual_character_counts)\n', (1206, 1231), False, 'from collections import Counter, defaultdict\n'), ((490, 506), 'collections.defaultdict', 'defaultdict', (['int'], {}), '(int)\n', (501, 506), False, 'from collections import Counter, defaultdict\n'), ((447, 461), 'datetime.datetime.now', 'datetime.now', ([], {}), '()\n', (459, 461), False, 'from datetime import datetime\n')]
|
from flask import Blueprint, render_template, flash, request, redirect, url_for, abort, jsonify
from flask_login import login_required, current_user
from server.models.properties import Properties, PropertyImgs
from server.models.history import History, HistoryContent
from server.utils.authority_verification import is_admin
from server.forms.forms import PropertyForm, DeleteForm
from server.utils.query_utils import serialize, pst_time
from server.utils.s3_helpers import *
from server import db
from datetime import datetime
mod = Blueprint('administration_property', __name__)
ALLOWED_EXTENSIONS = ['png','jpeg','jpg']
"""
PROPERTIES
"""
@mod.route('/property-settings', methods=['GET'])
@login_required
def property_settings():
p = Properties.query.order_by(Properties.date_posted.desc()).all()
count = 0
properties = []
for x in p:
prop = serialize(x, Properties)
prop['date_posted'] = pst_time(x.date_posted)
prop['recent_order'] = count
prop['images'] = [i.img_url for i in x.images]
properties.append(prop)
count += 1
return render_template('administration/properties/property_settings.html', properties = properties)
@mod.route('/edit-property/<string:property_id>', methods=['GET'])
@login_required
def edit_property_get(property_id):
form = PropertyForm()
property = Properties.query.get(property_id)
if not property:
abort(404)
form.notes.data = property.notes
return render_template('administration/properties/edit_property.html', property=property, form=form)
@mod.route('/edit-property/<string:property_id>', methods=['POST'])
@login_required
def edit_property(property_id):
form = PropertyForm(request.form)
property = Properties.query.get(property_id)
if not property:
abort(404)
if request.method=='POST' and form.validate():
try:
filelist = request.form.getlist('file-order')
newlist = request.files.getlist('user_file')
if not filelist:
return jsonify({'status':'error', 'msg':'Something went wrong. Please refresh the page and try again.'})
currentimgs = []
imglist = property.images
if imglist:
for i in imglist:
currentimgs.append(i.img_id)
# check for changes in images
change = False
if len(filelist) != len(currentimgs):
change = True
else:
for i,j in zip(currentimgs, filelist):
if j == 'new':
change = True
break
elif i != int(j):
change = True
break
#figure out which columns have changed and set values and record history
content = []
for p in property.__dict__:
for f in form.__dict__.keys():
if p == f:
if property.__dict__[p] != form.__dict__[p].data:
content.append({
form.__dict__[p].label.text: "\"{}\" to \"{}\"".format(property.__dict__[p], form.__dict__[p].data)
})
setattr(property, p, form.__dict__[p].data)
address = "{}; {}; {}, {} {}".format(property.address_l1, property.address_l2, property.city, property.state, property.zipcode)
new_history = None
if len(content) >= 1 or change:
#record history if there are changes
new_history = History('edit_property',current_user.id, tgt_prop_id = property.property_id)
db.session.add(new_history)
db.session.flush()
new_content = HistoryContent(new_history.history_id, 'Identifier',address)
db.session.add(new_content)
for c in content:
for k in c:
new_content = HistoryContent(new_history.history_id, k, c[k])
db.session.add(new_content)
else:
return jsonify({'status':'danger', 'msg':'No changes were specified.'})
#add new pictures and redetermine order.
if change:
new_content = HistoryContent(new_history.history_id, 'Images','Images edited')
db.session.add(new_content)
newcount = 0
for filenum in filelist:
if filenum == 'new':
newfile = newlist[newcount]
if newfile.filename == '' or not allowed_file(newfile.filename, ALLOWED_EXTENSIONS):
return jsonify({'msg':'Only {} files are accepted.'.format(', '.join(ALLOWED_EXTENSIONS)),'status':'danger'})
image = PropertyImgs(property.property_id,"")
db.session.add(image)
db.session.flush()
property.add_image(image)
newfile.filename = "property_{}_{}".format(property.property_id, image.img_id)
output = upload_file_to_s3(newfile, app.config['PROPERTY_S3_BUCKET'], app.config["S3_BUCKET"])
if not output:
raise
else:
image.img_url = output
new_content = HistoryContent(new_history.history_id, 'Image', output)
db.session.add(new_content)
newcount += 1
else:
img_id = int(filenum)
image = PropertyImgs.query.get(img_id)
if not image or image.property_id != property.property_id:
raise
image.date_added = datetime.utcnow()
del currentimgs[currentimgs.index(image.img_id)]
# delete deleted images
for i in currentimgs:
del_img = PropertyImgs.query.get(int(i))
if not del_img or del_img.property_id != property.property_id:
raise
s = delete_file_from_s3('property_{}_{}'.format(property.property_id, del_img.img_id), app.config['PROPERTY_S3_BUCKET'], app.config["S3_BUCKET"])
if not s:
raise
else:
new_content = HistoryContent(new_history.history_id, 'Image', 'deleted')
db.session.add(new_content)
db.session.delete(del_img)
db.session.commit()
flash('Property was successfully edited', 'success')
return jsonify({'status':'success','msg':'Property was successfully edited','reload':True})
except Exception as e:
print(e)
db.session.rollback()
return jsonify({'status':'error', 'msg':'Something went wrong. Please refresh the page and try again.'})
if form.errors:
return jsonify({'status':'danger','msg':'There were errors in the form.', 'form_errors':form.errors})
else:
return jsonify({'status':'danger','msg':'Something went wrong. Please refresh the page and try again.'})
@mod.route('/add-property', methods=['GET'])
@login_required
def add_property_get():
form = PropertyForm()
return render_template('administration/properties/add_property.html', form=form)
@mod.route('/add-property', methods=['POST'])
@login_required
def add_property():
msg = 'Something went wrong uploading the file. Please refresh the page and try again.'
form = PropertyForm(request.form)
if request.method=='POST' and form.validate():
# make sure file is a valid file
if 'user_file' not in request.files:
return jsonify({'status':'danger','msg':'You must upload an image.'})
uploads = request.files.getlist('user_file')
if not uploads:
return jsonify({'status':'danger','msg':'You must upload an image.'})
for file in uploads:
if file.filename == '':
del uploads[uploads.index(file)]
# return jsonify({'status':'danger','msg':'Please upload an image.'})
if not allowed_file(file.filename, ALLOWED_EXTENSIONS):
return jsonify({'msg':'Only {} files are accepted.'.format(', '.join(ALLOWED_EXTENSIONS)),'status':'danger'})
try:
#initiate property data
property = Properties(form.data)
db.session.add(property)
db.session.flush()
#initiatiate history data
address = "{}; {}; {}, {} {}".format(form.address_l1.data, form.address_l2.data, form.city.data, form.state.data, form.zipcode.data)
new_history = History('add_property',current_user.id, tgt_prop_id = property.property_id)
db.session.add(new_history)
db.session.flush()
#record history of new property
content = []
for f in form.__dict__['_fields'].keys():
content.append({form.__dict__[f].label.text: form.__dict__[f].data})
print(content)
new_content = HistoryContent(new_history.history_id, 'Identifier',address)
db.session.add(new_content)
for c in content:
for k in c:
new_content = HistoryContent(new_history.history_id, k, c[k])
db.session.add(new_content)
print('here')
#upload pictures. if successful, record history
for file in uploads:
image = PropertyImgs(property.property_id,"")
db.session.add(image)
db.session.flush()
property.add_image(image)
file.filename = "property_{}_{}".format(property.property_id, image.img_id)
output = upload_file_to_s3(file, app.config['PROPERTY_S3_BUCKET'], app.config["S3_BUCKET"])
if not output:
raise
else:
image.img_url = output
new_content = HistoryContent(new_history.history_id, 'Image', output)
db.session.add(new_content)
db.session.commit()
flash('Property was successfully added!','success')
return jsonify({'status':'success','msg':'Property was successfully added!','reload':True})
except Exception as e:
print(e)
db.session.rollback()
return jsonify({'msg':msg,'status':'danger'})
if form.errors:
return jsonify({'status':'danger','msg':'There were errors in the form.', 'form_errors':form.errors})
else:
return jsonify({'status':'danger','msg':'Something went wrong. Please refresh the page and try again.'})
@mod.route('/delete-property', methods=['POST'])
@login_required
def delete_property():
form = DeleteForm(request.form)
if not form.validate():
flash('Something went wrong. Please refresh the page and try again.','danger')
return redirect(url_for('administration_property.property_settings'))
try:
property_id = int(request.form['id'])
except:
abort(404)
try:
property = Properties.query.get(property_id)
new_history = History('del_property',current_user.id, tgt_prop_id=property.property_id)
db.session.add(new_history)
db.session.flush()
address = "{}; {}; {}, {} {}".format(property.address_l1, property.address_l2, property.city, property.state, property.zipcode)
new_content = HistoryContent(new_history.history_id, 'Identifier', address)
db.session.add(new_content)
images = property.images
if images:
for i in images:
s = delete_file_from_s3('property_{}_{}'.format(property.property_id, i.img_id), app.config['PROPERTY_S3_BUCKET'], app.config["S3_BUCKET"])
if not s:
raise
db.session.delete(property)
db.session.commit()
# return jsonify({'status':'success', 'msg':'Property successfully deleted!', 'reload':'true'})
flash('Property was successfully deleted','success')
return redirect(url_for('administration_property.property_settings'))
except Exception as e:
print(e)
db.session.rollback()
flash('Something went wrong. Please refresh the page and try again.','danger')
return redirect(url_for('administration_property.property_settings'))
|
[
"flask.flash",
"server.db.session.delete",
"datetime.datetime.utcnow",
"flask.jsonify",
"flask.url_for",
"server.forms.forms.DeleteForm",
"server.db.session.rollback",
"flask.request.files.getlist",
"flask.abort",
"server.db.session.commit",
"server.models.properties.PropertyImgs.query.get",
"flask.render_template",
"server.models.properties.Properties",
"flask.Blueprint",
"server.utils.query_utils.pst_time",
"server.models.history.History",
"server.models.history.HistoryContent",
"server.db.session.flush",
"server.models.properties.Properties.date_posted.desc",
"flask.request.form.getlist",
"server.models.properties.Properties.query.get",
"server.db.session.add",
"server.models.properties.PropertyImgs",
"server.forms.forms.PropertyForm",
"server.utils.query_utils.serialize"
] |
[((540, 586), 'flask.Blueprint', 'Blueprint', (['"""administration_property"""', '__name__'], {}), "('administration_property', __name__)\n", (549, 586), False, 'from flask import Blueprint, render_template, flash, request, redirect, url_for, abort, jsonify\n'), ((1060, 1154), 'flask.render_template', 'render_template', (['"""administration/properties/property_settings.html"""'], {'properties': 'properties'}), "('administration/properties/property_settings.html',\n properties=properties)\n", (1075, 1154), False, 'from flask import Blueprint, render_template, flash, request, redirect, url_for, abort, jsonify\n'), ((1281, 1295), 'server.forms.forms.PropertyForm', 'PropertyForm', ([], {}), '()\n', (1293, 1295), False, 'from server.forms.forms import PropertyForm, DeleteForm\n'), ((1308, 1341), 'server.models.properties.Properties.query.get', 'Properties.query.get', (['property_id'], {}), '(property_id)\n', (1328, 1341), False, 'from server.models.properties import Properties, PropertyImgs\n'), ((1416, 1514), 'flask.render_template', 'render_template', (['"""administration/properties/edit_property.html"""'], {'property': 'property', 'form': 'form'}), "('administration/properties/edit_property.html', property=\n property, form=form)\n", (1431, 1514), False, 'from flask import Blueprint, render_template, flash, request, redirect, url_for, abort, jsonify\n'), ((1636, 1662), 'server.forms.forms.PropertyForm', 'PropertyForm', (['request.form'], {}), '(request.form)\n', (1648, 1662), False, 'from server.forms.forms import PropertyForm, DeleteForm\n'), ((1675, 1708), 'server.models.properties.Properties.query.get', 'Properties.query.get', (['property_id'], {}), '(property_id)\n', (1695, 1708), False, 'from server.models.properties import Properties, PropertyImgs\n'), ((6034, 6048), 'server.forms.forms.PropertyForm', 'PropertyForm', ([], {}), '()\n', (6046, 6048), False, 'from server.forms.forms import PropertyForm, DeleteForm\n'), ((6057, 6130), 'flask.render_template', 'render_template', (['"""administration/properties/add_property.html"""'], {'form': 'form'}), "('administration/properties/add_property.html', form=form)\n", (6072, 6130), False, 'from flask import Blueprint, render_template, flash, request, redirect, url_for, abort, jsonify\n'), ((6312, 6338), 'server.forms.forms.PropertyForm', 'PropertyForm', (['request.form'], {}), '(request.form)\n', (6324, 6338), False, 'from server.forms.forms import PropertyForm, DeleteForm\n'), ((9074, 9098), 'server.forms.forms.DeleteForm', 'DeleteForm', (['request.form'], {}), '(request.form)\n', (9084, 9098), False, 'from server.forms.forms import PropertyForm, DeleteForm\n'), ((859, 883), 'server.utils.query_utils.serialize', 'serialize', (['x', 'Properties'], {}), '(x, Properties)\n', (868, 883), False, 'from server.utils.query_utils import serialize, pst_time\n'), ((908, 931), 'server.utils.query_utils.pst_time', 'pst_time', (['x.date_posted'], {}), '(x.date_posted)\n', (916, 931), False, 'from server.utils.query_utils import serialize, pst_time\n'), ((1362, 1372), 'flask.abort', 'abort', (['(404)'], {}), '(404)\n', (1367, 1372), False, 'from flask import Blueprint, render_template, flash, request, redirect, url_for, abort, jsonify\n'), ((1729, 1739), 'flask.abort', 'abort', (['(404)'], {}), '(404)\n', (1734, 1739), False, 'from flask import Blueprint, render_template, flash, request, redirect, url_for, abort, jsonify\n'), ((5729, 5831), 'flask.jsonify', 'jsonify', (["{'status': 'danger', 'msg': 'There were errors in the form.', 'form_errors':\n form.errors}"], {}), "({'status': 'danger', 'msg': 'There were errors in the form.',\n 'form_errors': form.errors})\n", (5736, 5831), False, 'from flask import Blueprint, render_template, flash, request, redirect, url_for, abort, jsonify\n'), ((5840, 5944), 'flask.jsonify', 'jsonify', (["{'status': 'danger', 'msg':\n 'Something went wrong. Please refresh the page and try again.'}"], {}), "({'status': 'danger', 'msg':\n 'Something went wrong. Please refresh the page and try again.'})\n", (5847, 5944), False, 'from flask import Blueprint, render_template, flash, request, redirect, url_for, abort, jsonify\n'), ((6549, 6583), 'flask.request.files.getlist', 'request.files.getlist', (['"""user_file"""'], {}), "('user_file')\n", (6570, 6583), False, 'from flask import Blueprint, render_template, flash, request, redirect, url_for, abort, jsonify\n'), ((8766, 8868), 'flask.jsonify', 'jsonify', (["{'status': 'danger', 'msg': 'There were errors in the form.', 'form_errors':\n form.errors}"], {}), "({'status': 'danger', 'msg': 'There were errors in the form.',\n 'form_errors': form.errors})\n", (8773, 8868), False, 'from flask import Blueprint, render_template, flash, request, redirect, url_for, abort, jsonify\n'), ((8877, 8981), 'flask.jsonify', 'jsonify', (["{'status': 'danger', 'msg':\n 'Something went wrong. Please refresh the page and try again.'}"], {}), "({'status': 'danger', 'msg':\n 'Something went wrong. Please refresh the page and try again.'})\n", (8884, 8981), False, 'from flask import Blueprint, render_template, flash, request, redirect, url_for, abort, jsonify\n'), ((9126, 9205), 'flask.flash', 'flash', (['"""Something went wrong. Please refresh the page and try again."""', '"""danger"""'], {}), "('Something went wrong. Please refresh the page and try again.', 'danger')\n", (9131, 9205), False, 'from flask import Blueprint, render_template, flash, request, redirect, url_for, abort, jsonify\n'), ((9366, 9399), 'server.models.properties.Properties.query.get', 'Properties.query.get', (['property_id'], {}), '(property_id)\n', (9386, 9399), False, 'from server.models.properties import Properties, PropertyImgs\n'), ((9417, 9491), 'server.models.history.History', 'History', (['"""del_property"""', 'current_user.id'], {'tgt_prop_id': 'property.property_id'}), "('del_property', current_user.id, tgt_prop_id=property.property_id)\n", (9424, 9491), False, 'from server.models.history import History, HistoryContent\n'), ((9493, 9520), 'server.db.session.add', 'db.session.add', (['new_history'], {}), '(new_history)\n', (9507, 9520), False, 'from server import db\n'), ((9523, 9541), 'server.db.session.flush', 'db.session.flush', ([], {}), '()\n', (9539, 9541), False, 'from server import db\n'), ((9689, 9750), 'server.models.history.HistoryContent', 'HistoryContent', (['new_history.history_id', '"""Identifier"""', 'address'], {}), "(new_history.history_id, 'Identifier', address)\n", (9703, 9750), False, 'from server.models.history import History, HistoryContent\n'), ((9753, 9780), 'server.db.session.add', 'db.session.add', (['new_content'], {}), '(new_content)\n', (9767, 9780), False, 'from server import db\n'), ((10014, 10041), 'server.db.session.delete', 'db.session.delete', (['property'], {}), '(property)\n', (10031, 10041), False, 'from server import db\n'), ((10044, 10063), 'server.db.session.commit', 'db.session.commit', ([], {}), '()\n', (10061, 10063), False, 'from server import db\n'), ((10164, 10217), 'flask.flash', 'flash', (['"""Property was successfully deleted"""', '"""success"""'], {}), "('Property was successfully deleted', 'success')\n", (10169, 10217), False, 'from flask import Blueprint, render_template, flash, request, redirect, url_for, abort, jsonify\n'), ((1809, 1843), 'flask.request.form.getlist', 'request.form.getlist', (['"""file-order"""'], {}), "('file-order')\n", (1829, 1843), False, 'from flask import Blueprint, render_template, flash, request, redirect, url_for, abort, jsonify\n'), ((1857, 1891), 'flask.request.files.getlist', 'request.files.getlist', (['"""user_file"""'], {}), "('user_file')\n", (1878, 1891), False, 'from flask import Blueprint, render_template, flash, request, redirect, url_for, abort, jsonify\n'), ((5361, 5380), 'server.db.session.commit', 'db.session.commit', ([], {}), '()\n', (5378, 5380), False, 'from server import db\n'), ((5384, 5436), 'flask.flash', 'flash', (['"""Property was successfully edited"""', '"""success"""'], {}), "('Property was successfully edited', 'success')\n", (5389, 5436), False, 'from flask import Blueprint, render_template, flash, request, redirect, url_for, abort, jsonify\n'), ((5447, 5540), 'flask.jsonify', 'jsonify', (["{'status': 'success', 'msg': 'Property was successfully edited', 'reload': True\n }"], {}), "({'status': 'success', 'msg': 'Property was successfully edited',\n 'reload': True})\n", (5454, 5540), False, 'from flask import Blueprint, render_template, flash, request, redirect, url_for, abort, jsonify\n'), ((6473, 6538), 'flask.jsonify', 'jsonify', (["{'status': 'danger', 'msg': 'You must upload an image.'}"], {}), "({'status': 'danger', 'msg': 'You must upload an image.'})\n", (6480, 6538), False, 'from flask import Blueprint, render_template, flash, request, redirect, url_for, abort, jsonify\n'), ((6612, 6677), 'flask.jsonify', 'jsonify', (["{'status': 'danger', 'msg': 'You must upload an image.'}"], {}), "({'status': 'danger', 'msg': 'You must upload an image.'})\n", (6619, 6677), False, 'from flask import Blueprint, render_template, flash, request, redirect, url_for, abort, jsonify\n'), ((7060, 7081), 'server.models.properties.Properties', 'Properties', (['form.data'], {}), '(form.data)\n', (7070, 7081), False, 'from server.models.properties import Properties, PropertyImgs\n'), ((7085, 7109), 'server.db.session.add', 'db.session.add', (['property'], {}), '(property)\n', (7099, 7109), False, 'from server import db\n'), ((7113, 7131), 'server.db.session.flush', 'db.session.flush', ([], {}), '()\n', (7129, 7131), False, 'from server import db\n'), ((7316, 7390), 'server.models.history.History', 'History', (['"""add_property"""', 'current_user.id'], {'tgt_prop_id': 'property.property_id'}), "('add_property', current_user.id, tgt_prop_id=property.property_id)\n", (7323, 7390), False, 'from server.models.history import History, HistoryContent\n'), ((7395, 7422), 'server.db.session.add', 'db.session.add', (['new_history'], {}), '(new_history)\n', (7409, 7422), False, 'from server import db\n'), ((7426, 7444), 'server.db.session.flush', 'db.session.flush', ([], {}), '()\n', (7442, 7444), False, 'from server import db\n'), ((7650, 7711), 'server.models.history.HistoryContent', 'HistoryContent', (['new_history.history_id', '"""Identifier"""', 'address'], {}), "(new_history.history_id, 'Identifier', address)\n", (7664, 7711), False, 'from server.models.history import History, HistoryContent\n'), ((7714, 7741), 'server.db.session.add', 'db.session.add', (['new_content'], {}), '(new_content)\n', (7728, 7741), False, 'from server import db\n'), ((8457, 8476), 'server.db.session.commit', 'db.session.commit', ([], {}), '()\n', (8474, 8476), False, 'from server import db\n'), ((8480, 8532), 'flask.flash', 'flash', (['"""Property was successfully added!"""', '"""success"""'], {}), "('Property was successfully added!', 'success')\n", (8485, 8532), False, 'from flask import Blueprint, render_template, flash, request, redirect, url_for, abort, jsonify\n'), ((8542, 8635), 'flask.jsonify', 'jsonify', (["{'status': 'success', 'msg': 'Property was successfully added!', 'reload': True\n }"], {}), "({'status': 'success', 'msg': 'Property was successfully added!',\n 'reload': True})\n", (8549, 8635), False, 'from flask import Blueprint, render_template, flash, request, redirect, url_for, abort, jsonify\n'), ((9223, 9275), 'flask.url_for', 'url_for', (['"""administration_property.property_settings"""'], {}), "('administration_property.property_settings')\n", (9230, 9275), False, 'from flask import Blueprint, render_template, flash, request, redirect, url_for, abort, jsonify\n'), ((9335, 9345), 'flask.abort', 'abort', (['(404)'], {}), '(404)\n', (9340, 9345), False, 'from flask import Blueprint, render_template, flash, request, redirect, url_for, abort, jsonify\n'), ((10235, 10287), 'flask.url_for', 'url_for', (['"""administration_property.property_settings"""'], {}), "('administration_property.property_settings')\n", (10242, 10287), False, 'from flask import Blueprint, render_template, flash, request, redirect, url_for, abort, jsonify\n'), ((10326, 10347), 'server.db.session.rollback', 'db.session.rollback', ([], {}), '()\n', (10345, 10347), False, 'from server import db\n'), ((10350, 10429), 'flask.flash', 'flash', (['"""Something went wrong. Please refresh the page and try again."""', '"""danger"""'], {}), "('Something went wrong. Please refresh the page and try again.', 'danger')\n", (10355, 10429), False, 'from flask import Blueprint, render_template, flash, request, redirect, url_for, abort, jsonify\n'), ((772, 801), 'server.models.properties.Properties.date_posted.desc', 'Properties.date_posted.desc', ([], {}), '()\n', (799, 801), False, 'from server.models.properties import Properties, PropertyImgs\n'), ((1924, 2027), 'flask.jsonify', 'jsonify', (["{'status': 'error', 'msg':\n 'Something went wrong. Please refresh the page and try again.'}"], {}), "({'status': 'error', 'msg':\n 'Something went wrong. Please refresh the page and try again.'})\n", (1931, 2027), False, 'from flask import Blueprint, render_template, flash, request, redirect, url_for, abort, jsonify\n'), ((3087, 3162), 'server.models.history.History', 'History', (['"""edit_property"""', 'current_user.id'], {'tgt_prop_id': 'property.property_id'}), "('edit_property', current_user.id, tgt_prop_id=property.property_id)\n", (3094, 3162), False, 'from server.models.history import History, HistoryContent\n'), ((3168, 3195), 'server.db.session.add', 'db.session.add', (['new_history'], {}), '(new_history)\n', (3182, 3195), False, 'from server import db\n'), ((3200, 3218), 'server.db.session.flush', 'db.session.flush', ([], {}), '()\n', (3216, 3218), False, 'from server import db\n'), ((3238, 3299), 'server.models.history.HistoryContent', 'HistoryContent', (['new_history.history_id', '"""Identifier"""', 'address'], {}), "(new_history.history_id, 'Identifier', address)\n", (3252, 3299), False, 'from server.models.history import History, HistoryContent\n'), ((3303, 3330), 'server.db.session.add', 'db.session.add', (['new_content'], {}), '(new_content)\n', (3317, 3330), False, 'from server import db\n'), ((3492, 3558), 'flask.jsonify', 'jsonify', (["{'status': 'danger', 'msg': 'No changes were specified.'}"], {}), "({'status': 'danger', 'msg': 'No changes were specified.'})\n", (3499, 3558), False, 'from flask import Blueprint, render_template, flash, request, redirect, url_for, abort, jsonify\n'), ((3634, 3699), 'server.models.history.HistoryContent', 'HistoryContent', (['new_history.history_id', '"""Images"""', '"""Images edited"""'], {}), "(new_history.history_id, 'Images', 'Images edited')\n", (3648, 3699), False, 'from server.models.history import History, HistoryContent\n'), ((3703, 3730), 'server.db.session.add', 'db.session.add', (['new_content'], {}), '(new_content)\n', (3717, 3730), False, 'from server import db\n'), ((5572, 5593), 'server.db.session.rollback', 'db.session.rollback', ([], {}), '()\n', (5591, 5593), False, 'from server import db\n'), ((5604, 5707), 'flask.jsonify', 'jsonify', (["{'status': 'error', 'msg':\n 'Something went wrong. Please refresh the page and try again.'}"], {}), "({'status': 'error', 'msg':\n 'Something went wrong. Please refresh the page and try again.'})\n", (5611, 5707), False, 'from flask import Blueprint, render_template, flash, request, redirect, url_for, abort, jsonify\n'), ((7984, 8022), 'server.models.properties.PropertyImgs', 'PropertyImgs', (['property.property_id', '""""""'], {}), "(property.property_id, '')\n", (7996, 8022), False, 'from server.models.properties import Properties, PropertyImgs\n'), ((8026, 8047), 'server.db.session.add', 'db.session.add', (['image'], {}), '(image)\n', (8040, 8047), False, 'from server import db\n'), ((8052, 8070), 'server.db.session.flush', 'db.session.flush', ([], {}), '()\n', (8068, 8070), False, 'from server import db\n'), ((8668, 8689), 'server.db.session.rollback', 'db.session.rollback', ([], {}), '()\n', (8687, 8689), False, 'from server import db\n'), ((8700, 8741), 'flask.jsonify', 'jsonify', (["{'msg': msg, 'status': 'danger'}"], {}), "({'msg': msg, 'status': 'danger'})\n", (8707, 8741), False, 'from flask import Blueprint, render_template, flash, request, redirect, url_for, abort, jsonify\n'), ((10447, 10499), 'flask.url_for', 'url_for', (['"""administration_property.property_settings"""'], {}), "('administration_property.property_settings')\n", (10454, 10499), False, 'from flask import Blueprint, render_template, flash, request, redirect, url_for, abort, jsonify\n'), ((7798, 7845), 'server.models.history.HistoryContent', 'HistoryContent', (['new_history.history_id', 'k', 'c[k]'], {}), '(new_history.history_id, k, c[k])\n', (7812, 7845), False, 'from server.models.history import History, HistoryContent\n'), ((7851, 7878), 'server.db.session.add', 'db.session.add', (['new_content'], {}), '(new_content)\n', (7865, 7878), False, 'from server import db\n'), ((8364, 8419), 'server.models.history.HistoryContent', 'HistoryContent', (['new_history.history_id', '"""Image"""', 'output'], {}), "(new_history.history_id, 'Image', output)\n", (8378, 8419), False, 'from server.models.history import History, HistoryContent\n'), ((8425, 8452), 'server.db.session.add', 'db.session.add', (['new_content'], {}), '(new_content)\n', (8439, 8452), False, 'from server import db\n'), ((3390, 3437), 'server.models.history.HistoryContent', 'HistoryContent', (['new_history.history_id', 'k', 'c[k]'], {}), '(new_history.history_id, k, c[k])\n', (3404, 3437), False, 'from server.models.history import History, HistoryContent\n'), ((3444, 3471), 'server.db.session.add', 'db.session.add', (['new_content'], {}), '(new_content)\n', (3458, 3471), False, 'from server import db\n'), ((4063, 4101), 'server.models.properties.PropertyImgs', 'PropertyImgs', (['property.property_id', '""""""'], {}), "(property.property_id, '')\n", (4075, 4101), False, 'from server.models.properties import Properties, PropertyImgs\n'), ((4107, 4128), 'server.db.session.add', 'db.session.add', (['image'], {}), '(image)\n', (4121, 4128), False, 'from server import db\n'), ((4135, 4153), 'server.db.session.flush', 'db.session.flush', ([], {}), '()\n', (4151, 4153), False, 'from server import db\n'), ((4634, 4664), 'server.models.properties.PropertyImgs.query.get', 'PropertyImgs.query.get', (['img_id'], {}), '(img_id)\n', (4656, 4664), False, 'from server.models.properties import Properties, PropertyImgs\n'), ((4768, 4785), 'datetime.datetime.utcnow', 'datetime.utcnow', ([], {}), '()\n', (4783, 4785), False, 'from datetime import datetime\n'), ((5231, 5289), 'server.models.history.HistoryContent', 'HistoryContent', (['new_history.history_id', '"""Image"""', '"""deleted"""'], {}), "(new_history.history_id, 'Image', 'deleted')\n", (5245, 5289), False, 'from server.models.history import History, HistoryContent\n'), ((5296, 5323), 'server.db.session.add', 'db.session.add', (['new_content'], {}), '(new_content)\n', (5310, 5323), False, 'from server import db\n'), ((5330, 5356), 'server.db.session.delete', 'db.session.delete', (['del_img'], {}), '(del_img)\n', (5347, 5356), False, 'from server import db\n'), ((4469, 4524), 'server.models.history.HistoryContent', 'HistoryContent', (['new_history.history_id', '"""Image"""', 'output'], {}), "(new_history.history_id, 'Image', output)\n", (4483, 4524), False, 'from server.models.history import History, HistoryContent\n'), ((4532, 4559), 'server.db.session.add', 'db.session.add', (['new_content'], {}), '(new_content)\n', (4546, 4559), False, 'from server import db\n')]
|
# Copyright (C) 2019 The Raphielscape Company LLC.
#
# Licensed under the Raphielscape Public License, Version 1.c (the "License");
# you may not use this file except in compliance with the License.
#
""" Userbot module for keeping control who PM you. """
from telethon.tl.functions.contacts import BlockRequest, UnblockRequest
from telethon.tl.functions.messages import ReportSpamRequest
from telethon.tl.types import User
from sqlalchemy.exc import IntegrityError
from userbot import (COUNT_PM, CMD_HELP, BOTLOG, BOTLOG_CHATID, PM_AUTO_BAN,
LASTMSG, LOGS)
from userbot.events import register
# ========================= CONSTANTS ============================
UNAPPROVED_MSG = (
"`Hello! This is Sultan This side.\n\n`"
"`Thank You for Trusting Shark. I will respond to you soon.`"
"`Please wait for me to look in, Leave Your Querry in my inbox and please wait for a reply.\n\n`"
"`And Please, Do not send too many texts in my inbox as i am not free always and it disturbs me.`")
# =================================================================
@register(incoming=True, disable_edited=True, disable_errors=True)
async def permitpm(event):
""" Prohibits people from PMing you without approval. \
Will block retarded nibbas automatically. """
if PM_AUTO_BAN:
self_user = await event.client.get_me()
if event.is_private and event.chat_id != 777000 and event.chat_id != self_user.id and not (
await event.get_sender()).bot:
try:
from userbot.modules.sql_helper.pm_permit_sql import is_approved
from userbot.modules.sql_helper.globals import gvarstatus
except AttributeError:
return
apprv = is_approved(event.chat_id)
notifsoff = gvarstatus("NOTIF_OFF")
# This part basically is a sanity check
# If the message that sent before is Unapproved Message
# then stop sending it again to prevent FloodHit
if not apprv and event.text != UNAPPROVED_MSG:
if event.chat_id in LASTMSG:
prevmsg = LASTMSG[event.chat_id]
# If the message doesn't same as previous one
# Send the Unapproved Message again
if event.text != prevmsg:
async for message in event.client.iter_messages(
event.chat_id,
from_user='me',
search=UNAPPROVED_MSG):
await message.delete()
await event.reply(UNAPPROVED_MSG)
LASTMSG.update({event.chat_id: event.text})
else:
await event.reply(UNAPPROVED_MSG)
LASTMSG.update({event.chat_id: event.text})
if notifsoff:
await event.client.send_read_acknowledge(event.chat_id)
if event.chat_id not in COUNT_PM:
COUNT_PM.update({event.chat_id: 1})
else:
COUNT_PM[event.chat_id] = COUNT_PM[event.chat_id] + 1
if COUNT_PM[event.chat_id] > 4:
await event.respond(
"`You were spamming my PM, which I didn't like.`\n"
"`You have been BLOCKED and reported as SPAM, until further notice.`"
)
try:
del COUNT_PM[event.chat_id]
del LASTMSG[event.chat_id]
except KeyError:
if BOTLOG:
await event.client.send_message(
BOTLOG_CHATID,
"Count PM is seemingly going retard, plis restart bot!",
)
LOGS.info("CountPM wen't rarted boi")
return
await event.client(BlockRequest(event.chat_id))
await event.client(ReportSpamRequest(peer=event.chat_id))
if BOTLOG:
name = await event.client.get_entity(event.chat_id)
name0 = str(name.first_name)
await event.client.send_message(
BOTLOG_CHATID,
"[" + name0 + "](tg://user?id=" +
str(event.chat_id) + ")" +
" was just another retarded nibba",
)
@register(disable_edited=True, outgoing=True, disable_errors=True)
async def auto_accept(event):
""" Will approve automatically if you texted them first. """
if not PM_AUTO_BAN:
return
self_user = await event.client.get_me()
if event.is_private and event.chat_id != 777000 and event.chat_id != self_user.id and not (
await event.get_sender()).bot:
try:
from userbot.modules.sql_helper.pm_permit_sql import is_approved
from userbot.modules.sql_helper.pm_permit_sql import approve
except AttributeError:
return
chat = await event.get_chat()
if isinstance(chat, User):
if is_approved(event.chat_id) or chat.bot:
return
async for message in event.client.iter_messages(event.chat_id,
reverse=True,
limit=1):
if message.message is not UNAPPROVED_MSG and message.from_id == self_user.id:
try:
approve(event.chat_id)
except IntegrityError:
return
if is_approved(event.chat_id) and BOTLOG:
await event.client.send_message(
BOTLOG_CHATID,
"#AUTO-APPROVED\n" + "User: " +
f"[{chat.first_name}](tg://user?id={chat.id})",
)
@register(outgoing=True, pattern="^.notifoff$")
async def notifoff(noff_event):
""" For .notifoff command, stop getting notifications from unapproved PMs. """
try:
from userbot.modules.sql_helper.globals import addgvar
except AttributeError:
await noff_event.edit("`Running on Non-SQL mode!`")
return
addgvar("NOTIF_OFF", True)
await noff_event.edit("`Notifications from unapproved PM's are silenced!`")
@register(outgoing=True, pattern="^.notifon$")
async def notifon(non_event):
""" For .notifoff command, get notifications from unapproved PMs. """
try:
from userbot.modules.sql_helper.globals import delgvar
except AttributeError:
await non_event.edit("`Running on Non-SQL mode!`")
return
delgvar("NOTIF_OFF")
await non_event.edit("`Notifications from unapproved PM's unmuted!`")
@register(outgoing=True, pattern="^.approve$")
async def approvepm(apprvpm):
""" For .approve command, give someone the permissions to PM you. """
try:
from userbot.modules.sql_helper.pm_permit_sql import approve
except AttributeError:
await apprvpm.edit("`Running on Non-SQL mode!`")
return
if apprvpm.reply_to_msg_id:
reply = await apprvpm.get_reply_message()
replied_user = await apprvpm.client.get_entity(reply.from_id)
aname = replied_user.id
name0 = str(replied_user.first_name)
uid = replied_user.id
else:
aname = await apprvpm.client.get_entity(apprvpm.chat_id)
name0 = str(aname.first_name)
uid = apprvpm.chat_id
try:
approve(uid)
except IntegrityError:
await apprvpm.edit("`User may already be approved.`")
return
await apprvpm.edit(f"[{name0}](tg://user?id={uid}) `approved to PM!`")
async for message in apprvpm.client.iter_messages(apprvpm.chat_id,
from_user='me',
search=UNAPPROVED_MSG):
await message.delete()
if BOTLOG:
await apprvpm.client.send_message(
BOTLOG_CHATID,
"#APPROVED\n" + "User: " + f"[{name0}](tg://user?id={uid})",
)
@register(outgoing=True, pattern="^.disapprove$")
async def disapprovepm(disapprvpm):
try:
from userbot.modules.sql_helper.pm_permit_sql import dissprove
except BaseException:
await disapprvpm.edit("`Running on Non-SQL mode!`")
return
if disapprvpm.reply_to_msg_id:
reply = await disapprvpm.get_reply_message()
replied_user = await disapprvpm.client.get_entity(reply.from_id)
aname = replied_user.id
name0 = str(replied_user.first_name)
dissprove(replied_user.id)
else:
dissprove(disapprvpm.chat_id)
aname = await disapprvpm.client.get_entity(disapprvpm.chat_id)
name0 = str(aname.first_name)
await disapprvpm.edit(
f"[{name0}](tg://user?id={disapprvpm.chat_id}) `Disaproved to PM!`")
if BOTLOG:
await disapprvpm.client.send_message(
BOTLOG_CHATID,
f"[{name0}](tg://user?id={disapprvpm.chat_id})"
" was disapproved to PM you.",
)
@register(outgoing=True, pattern="^.block$")
async def blockpm(block):
""" For .block command, block people from PMing you! """
if block.reply_to_msg_id:
reply = await block.get_reply_message()
replied_user = await block.client.get_entity(reply.from_id)
aname = replied_user.id
name0 = str(replied_user.first_name)
await block.client(BlockRequest(replied_user.id))
await block.edit("`You've been blocked!`")
uid = replied_user.id
else:
await block.client(BlockRequest(block.chat_id))
aname = await block.client.get_entity(block.chat_id)
await block.edit("`You've been blocked!`")
name0 = str(aname.first_name)
uid = block.chat_id
try:
from userbot.modules.sql_helper.pm_permit_sql import dissprove
dissprove(uid)
except AttributeError:
pass
if BOTLOG:
await block.client.send_message(
BOTLOG_CHATID,
"#BLOCKED\n" + "User: " + f"[{name0}](tg://user?id={uid})",
)
@register(outgoing=True, pattern="^.unblock$")
async def unblockpm(unblock):
""" For .unblock command, let people PMing you again! """
if unblock.reply_to_msg_id:
reply = await unblock.get_reply_message()
replied_user = await unblock.client.get_entity(reply.from_id)
name0 = str(replied_user.first_name)
await unblock.client(UnblockRequest(replied_user.id))
await unblock.edit("`You have been unblocked.`")
if BOTLOG:
await unblock.client.send_message(
BOTLOG_CHATID,
f"[{name0}](tg://user?id={replied_user.id})"
" was unblocc'd!.",
)
CMD_HELP.update({
"pmpermit":
"\
.approve\
\nUsage: Approves the mentioned/replied person to PM.\
\n\n.disapprove\
\nUsage: Disapproves the mentioned/replied person to PM.\
\n\n.block\
\nUsage: Blocks the person.\
\n\n.unblock\
\nUsage: Unblocks the person so they can PM you.\
\n\n.notifoff\
\nUsage: Clears/Disables any notifications of unapproved PMs.\
\n\n.notifon\
\nUsage: Allows notifications for unapproved PMs."
})
|
[
"userbot.modules.sql_helper.globals.addgvar",
"userbot.LOGS.info",
"userbot.modules.sql_helper.globals.gvarstatus",
"userbot.LASTMSG.update",
"userbot.events.register",
"userbot.modules.sql_helper.pm_permit_sql.approve",
"telethon.tl.functions.messages.ReportSpamRequest",
"userbot.modules.sql_helper.globals.delgvar",
"telethon.tl.functions.contacts.UnblockRequest",
"userbot.COUNT_PM.update",
"userbot.CMD_HELP.update",
"telethon.tl.functions.contacts.BlockRequest",
"userbot.modules.sql_helper.pm_permit_sql.dissprove",
"userbot.modules.sql_helper.pm_permit_sql.is_approved"
] |
[((1093, 1158), 'userbot.events.register', 'register', ([], {'incoming': '(True)', 'disable_edited': '(True)', 'disable_errors': '(True)'}), '(incoming=True, disable_edited=True, disable_errors=True)\n', (1101, 1158), False, 'from userbot.events import register\n'), ((4614, 4679), 'userbot.events.register', 'register', ([], {'disable_edited': '(True)', 'outgoing': '(True)', 'disable_errors': '(True)'}), '(disable_edited=True, outgoing=True, disable_errors=True)\n', (4622, 4679), False, 'from userbot.events import register\n'), ((6125, 6171), 'userbot.events.register', 'register', ([], {'outgoing': '(True)', 'pattern': '"""^.notifoff$"""'}), "(outgoing=True, pattern='^.notifoff$')\n", (6133, 6171), False, 'from userbot.events import register\n'), ((6575, 6620), 'userbot.events.register', 'register', ([], {'outgoing': '(True)', 'pattern': '"""^.notifon$"""'}), "(outgoing=True, pattern='^.notifon$')\n", (6583, 6620), False, 'from userbot.events import register\n'), ((7000, 7045), 'userbot.events.register', 'register', ([], {'outgoing': '(True)', 'pattern': '"""^.approve$"""'}), "(outgoing=True, pattern='^.approve$')\n", (7008, 7045), False, 'from userbot.events import register\n'), ((8365, 8413), 'userbot.events.register', 'register', ([], {'outgoing': '(True)', 'pattern': '"""^.disapprove$"""'}), "(outgoing=True, pattern='^.disapprove$')\n", (8373, 8413), False, 'from userbot.events import register\n'), ((9372, 9415), 'userbot.events.register', 'register', ([], {'outgoing': '(True)', 'pattern': '"""^.block$"""'}), "(outgoing=True, pattern='^.block$')\n", (9380, 9415), False, 'from userbot.events import register\n'), ((10422, 10467), 'userbot.events.register', 'register', ([], {'outgoing': '(True)', 'pattern': '"""^.unblock$"""'}), "(outgoing=True, pattern='^.unblock$')\n", (10430, 10467), False, 'from userbot.events import register\n'), ((11063, 11457), 'userbot.CMD_HELP.update', 'CMD_HELP.update', (['{\'pmpermit\':\n """.approve\nUsage: Approves the mentioned/replied person to PM.\n\n.disapprove\nUsage: Disapproves the mentioned/replied person to PM.\n\n.block\nUsage: Blocks the person.\n\n.unblock\nUsage: Unblocks the person so they can PM you.\n\n.notifoff\nUsage: Clears/Disables any notifications of unapproved PMs.\n\n.notifon\nUsage: Allows notifications for unapproved PMs."""\n }'], {}), '({\'pmpermit\':\n """.approve\nUsage: Approves the mentioned/replied person to PM.\n\n.disapprove\nUsage: Disapproves the mentioned/replied person to PM.\n\n.block\nUsage: Blocks the person.\n\n.unblock\nUsage: Unblocks the person so they can PM you.\n\n.notifoff\nUsage: Clears/Disables any notifications of unapproved PMs.\n\n.notifon\nUsage: Allows notifications for unapproved PMs."""\n })\n', (11078, 11457), False, 'from userbot import COUNT_PM, CMD_HELP, BOTLOG, BOTLOG_CHATID, PM_AUTO_BAN, LASTMSG, LOGS\n'), ((6465, 6491), 'userbot.modules.sql_helper.globals.addgvar', 'addgvar', (['"""NOTIF_OFF"""', '(True)'], {}), "('NOTIF_OFF', True)\n", (6472, 6491), False, 'from userbot.modules.sql_helper.globals import addgvar\n'), ((6902, 6922), 'userbot.modules.sql_helper.globals.delgvar', 'delgvar', (['"""NOTIF_OFF"""'], {}), "('NOTIF_OFF')\n", (6909, 6922), False, 'from userbot.modules.sql_helper.globals import delgvar\n'), ((7749, 7761), 'userbot.modules.sql_helper.pm_permit_sql.approve', 'approve', (['uid'], {}), '(uid)\n', (7756, 7761), False, 'from userbot.modules.sql_helper.pm_permit_sql import approve\n'), ((8878, 8904), 'userbot.modules.sql_helper.pm_permit_sql.dissprove', 'dissprove', (['replied_user.id'], {}), '(replied_user.id)\n', (8887, 8904), False, 'from userbot.modules.sql_helper.pm_permit_sql import dissprove\n'), ((8923, 8952), 'userbot.modules.sql_helper.pm_permit_sql.dissprove', 'dissprove', (['disapprvpm.chat_id'], {}), '(disapprvpm.chat_id)\n', (8932, 8952), False, 'from userbot.modules.sql_helper.pm_permit_sql import dissprove\n'), ((10198, 10212), 'userbot.modules.sql_helper.pm_permit_sql.dissprove', 'dissprove', (['uid'], {}), '(uid)\n', (10207, 10212), False, 'from userbot.modules.sql_helper.pm_permit_sql import dissprove\n'), ((1765, 1791), 'userbot.modules.sql_helper.pm_permit_sql.is_approved', 'is_approved', (['event.chat_id'], {}), '(event.chat_id)\n', (1776, 1791), False, 'from userbot.modules.sql_helper.pm_permit_sql import is_approved\n'), ((1816, 1839), 'userbot.modules.sql_helper.globals.gvarstatus', 'gvarstatus', (['"""NOTIF_OFF"""'], {}), "('NOTIF_OFF')\n", (1826, 1839), False, 'from userbot.modules.sql_helper.globals import gvarstatus\n'), ((5299, 5325), 'userbot.modules.sql_helper.pm_permit_sql.is_approved', 'is_approved', (['event.chat_id'], {}), '(event.chat_id)\n', (5310, 5325), False, 'from userbot.modules.sql_helper.pm_permit_sql import is_approved\n'), ((9753, 9782), 'telethon.tl.functions.contacts.BlockRequest', 'BlockRequest', (['replied_user.id'], {}), '(replied_user.id)\n', (9765, 9782), False, 'from telethon.tl.functions.contacts import BlockRequest, UnblockRequest\n'), ((9902, 9929), 'telethon.tl.functions.contacts.BlockRequest', 'BlockRequest', (['block.chat_id'], {}), '(block.chat_id)\n', (9914, 9929), False, 'from telethon.tl.functions.contacts import BlockRequest, UnblockRequest\n'), ((10786, 10817), 'telethon.tl.functions.contacts.UnblockRequest', 'UnblockRequest', (['replied_user.id'], {}), '(replied_user.id)\n', (10800, 10817), False, 'from telethon.tl.functions.contacts import BlockRequest, UnblockRequest\n'), ((2700, 2743), 'userbot.LASTMSG.update', 'LASTMSG.update', (['{event.chat_id: event.text}'], {}), '({event.chat_id: event.text})\n', (2714, 2743), False, 'from userbot import COUNT_PM, CMD_HELP, BOTLOG, BOTLOG_CHATID, PM_AUTO_BAN, LASTMSG, LOGS\n'), ((2840, 2883), 'userbot.LASTMSG.update', 'LASTMSG.update', (['{event.chat_id: event.text}'], {}), '({event.chat_id: event.text})\n', (2854, 2883), False, 'from userbot import COUNT_PM, CMD_HELP, BOTLOG, BOTLOG_CHATID, PM_AUTO_BAN, LASTMSG, LOGS\n'), ((3061, 3096), 'userbot.COUNT_PM.update', 'COUNT_PM.update', (['{event.chat_id: 1}'], {}), '({event.chat_id: 1})\n', (3076, 3096), False, 'from userbot import COUNT_PM, CMD_HELP, BOTLOG, BOTLOG_CHATID, PM_AUTO_BAN, LASTMSG, LOGS\n'), ((5841, 5867), 'userbot.modules.sql_helper.pm_permit_sql.is_approved', 'is_approved', (['event.chat_id'], {}), '(event.chat_id)\n', (5852, 5867), False, 'from userbot.modules.sql_helper.pm_permit_sql import is_approved\n'), ((5724, 5746), 'userbot.modules.sql_helper.pm_permit_sql.approve', 'approve', (['event.chat_id'], {}), '(event.chat_id)\n', (5731, 5746), False, 'from userbot.modules.sql_helper.pm_permit_sql import approve\n'), ((3927, 3964), 'userbot.LOGS.info', 'LOGS.info', (['"""CountPM wen\'t rarted boi"""'], {}), '("CountPM wen\'t rarted boi")\n', (3936, 3964), False, 'from userbot import COUNT_PM, CMD_HELP, BOTLOG, BOTLOG_CHATID, PM_AUTO_BAN, LASTMSG, LOGS\n'), ((4036, 4063), 'telethon.tl.functions.contacts.BlockRequest', 'BlockRequest', (['event.chat_id'], {}), '(event.chat_id)\n', (4048, 4063), False, 'from telethon.tl.functions.contacts import BlockRequest, UnblockRequest\n'), ((4104, 4141), 'telethon.tl.functions.messages.ReportSpamRequest', 'ReportSpamRequest', ([], {'peer': 'event.chat_id'}), '(peer=event.chat_id)\n', (4121, 4141), False, 'from telethon.tl.functions.messages import ReportSpamRequest\n')]
|
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""This is a simple script for generating data."""
import os
from openfermion.chem import make_atomic_ring
from openfermionpyscf import run_pyscf
if __name__ == '__main__':
# Set chemical parameters.
basis = 'sto-3g'
max_electrons = 10
spacing = 0.7414
# Select calculations.
force_recompute = 1
run_scf = 1
run_mp2 = 1
run_cisd = 1
run_ccsd = 1
run_fci = 1
verbose = 1
# Generate data.
for n_electrons in range(2, max_electrons + 1):
# Initialize.
molecule = make_atomic_ring(n_electrons, spacing, basis)
if os.path.exists(molecule.filename + '.hdf5'):
molecule.load()
# To run or not to run.
if run_scf and not molecule.hf_energy:
run_job = 1
elif run_mp2 and not molecule.mp2_energy:
run_job = 1
elif run_cisd and not molecule.cisd_energy:
run_job = 1
elif run_ccsd and not molecule.ccsd_energy:
run_job = 1
elif run_fci and not molecule.fci_energy:
run_job = 1
else:
run_job = force_recompute
# Run.
if run_job:
molecule = run_pyscf(molecule,
run_scf=run_scf,
run_mp2=run_mp2,
run_cisd=run_cisd,
run_ccsd=run_ccsd,
run_fci=run_fci,
verbose=verbose)
molecule.save()
|
[
"openfermionpyscf.run_pyscf",
"os.path.exists",
"openfermion.chem.make_atomic_ring"
] |
[((1103, 1148), 'openfermion.chem.make_atomic_ring', 'make_atomic_ring', (['n_electrons', 'spacing', 'basis'], {}), '(n_electrons, spacing, basis)\n', (1119, 1148), False, 'from openfermion.chem import make_atomic_ring\n'), ((1160, 1203), 'os.path.exists', 'os.path.exists', (["(molecule.filename + '.hdf5')"], {}), "(molecule.filename + '.hdf5')\n", (1174, 1203), False, 'import os\n'), ((1748, 1877), 'openfermionpyscf.run_pyscf', 'run_pyscf', (['molecule'], {'run_scf': 'run_scf', 'run_mp2': 'run_mp2', 'run_cisd': 'run_cisd', 'run_ccsd': 'run_ccsd', 'run_fci': 'run_fci', 'verbose': 'verbose'}), '(molecule, run_scf=run_scf, run_mp2=run_mp2, run_cisd=run_cisd,\n run_ccsd=run_ccsd, run_fci=run_fci, verbose=verbose)\n', (1757, 1877), False, 'from openfermionpyscf import run_pyscf\n')]
|
from dagster import List, Nullable, Int, PipelineDefinition
from dagster.core.types.runtime import resolve_to_runtime_type, ALL_RUNTIME_BUILTINS
def inner_type_key_set(runtime_type):
return {t.key for t in runtime_type.inner_types}
def test_inner_types():
assert resolve_to_runtime_type(Int).inner_types == []
list_int_runtime = resolve_to_runtime_type(List(Int))
assert inner_type_key_set(list_int_runtime) == set(['Int'])
list_list_int_runtime = resolve_to_runtime_type(List(List(Int)))
assert inner_type_key_set(list_list_int_runtime) == set(['Int', 'List.Int'])
list_nullable_int_runtime = resolve_to_runtime_type(List(Nullable(Int)))
assert inner_type_key_set(list_nullable_int_runtime) == set(['Int', 'Nullable.Int'])
def test_display_name():
int_runtime = resolve_to_runtime_type(Int)
assert int_runtime.display_name == 'Int'
list_int_runtime = resolve_to_runtime_type(List(Int))
assert list_int_runtime.display_name == '[Int]'
list_list_int_runtime = resolve_to_runtime_type(List(List(Int)))
assert list_list_int_runtime.display_name == '[[Int]]'
list_nullable_int_runtime = resolve_to_runtime_type(List(Nullable(Int)))
assert list_nullable_int_runtime.display_name == '[Int?]'
def test_builtins_available():
pipeline = PipelineDefinition(name='test_builting_available', solids=[])
for builtin_type in ALL_RUNTIME_BUILTINS:
assert pipeline.has_runtime_type(builtin_type.name)
assert pipeline.runtime_type_named(builtin_type.name).is_builtin
|
[
"dagster.List",
"dagster.PipelineDefinition",
"dagster.core.types.runtime.resolve_to_runtime_type",
"dagster.Nullable"
] |
[((810, 838), 'dagster.core.types.runtime.resolve_to_runtime_type', 'resolve_to_runtime_type', (['Int'], {}), '(Int)\n', (833, 838), False, 'from dagster.core.types.runtime import resolve_to_runtime_type, ALL_RUNTIME_BUILTINS\n'), ((1309, 1370), 'dagster.PipelineDefinition', 'PipelineDefinition', ([], {'name': '"""test_builting_available"""', 'solids': '[]'}), "(name='test_builting_available', solids=[])\n", (1327, 1370), False, 'from dagster import List, Nullable, Int, PipelineDefinition\n'), ((371, 380), 'dagster.List', 'List', (['Int'], {}), '(Int)\n', (375, 380), False, 'from dagster import List, Nullable, Int, PipelineDefinition\n'), ((931, 940), 'dagster.List', 'List', (['Int'], {}), '(Int)\n', (935, 940), False, 'from dagster import List, Nullable, Int, PipelineDefinition\n'), ((276, 304), 'dagster.core.types.runtime.resolve_to_runtime_type', 'resolve_to_runtime_type', (['Int'], {}), '(Int)\n', (299, 304), False, 'from dagster.core.types.runtime import resolve_to_runtime_type, ALL_RUNTIME_BUILTINS\n'), ((504, 513), 'dagster.List', 'List', (['Int'], {}), '(Int)\n', (508, 513), False, 'from dagster import List, Nullable, Int, PipelineDefinition\n'), ((659, 672), 'dagster.Nullable', 'Nullable', (['Int'], {}), '(Int)\n', (667, 672), False, 'from dagster import List, Nullable, Int, PipelineDefinition\n'), ((1051, 1060), 'dagster.List', 'List', (['Int'], {}), '(Int)\n', (1055, 1060), False, 'from dagster import List, Nullable, Int, PipelineDefinition\n'), ((1183, 1196), 'dagster.Nullable', 'Nullable', (['Int'], {}), '(Int)\n', (1191, 1196), False, 'from dagster import List, Nullable, Int, PipelineDefinition\n')]
|
#Python Code Random Option Chooser
#imported random module
import random
print("Hello...! This is random option chooser program. ")
#define a empty list to store options
options = []
#variable for run the infinite while loop
inf=0
#start the while loop
while inf==0:
print("\nChoose a number to continue.")
print(" \t01) Add the options. \n \t02) Choose a random option. \n \t03) How to use. \n \t04) About the program. \n \t05) Exit")
a = input("Your option : ")
#if cond. to store options in the list
if a=="1":
del options[:]
c = int(input("\nEnter the number of options : "))
if c==0:
print("\nNumber of options cannot be zero. Please enter more than one option.")
continue
elif c==1:
print("\nYou have enter number of options as one. Please enter more than one option for better performance.")
continue
else:
for d in range(c):
options.append( input("\nEnter the "+ str(d+1) +" option : ") )
#if cond. to choose a random option from the list. Items in the list should be more than two.
if a=="2":
e= options[random.randint(0,c-1)]
print("\nRandomly chose option for you is : " + e)
continue
#How to use print line
if a=="3":
print("\nFirst enter number one in the terminal and add your options to the program.\nThen to get a random option, enter number two in the terminal.\nIf you want to edit the options, you can enter number two again and add the options.\nEnter number four to know about the program.\nTo exit from the program enter number five.")
continue
#About the program print line
if a=="4":
print("\nYou can a choose a random option with this program.\nFor a example if you want to choose a random name from a list of name, you can enter those names first and then get a random name without an infleunce from anyone and anything.")
continue
#exit from the infinite while loop
if a=="5":
print("\nThank you for using this program. Good Bye !")
inf=1
quit()
|
[
"random.randint"
] |
[((1193, 1217), 'random.randint', 'random.randint', (['(0)', '(c - 1)'], {}), '(0, c - 1)\n', (1207, 1217), False, 'import random\n')]
|
import discord
import asyncio
import time
from datetime import datetime
from discord.ext import commands
from discord.ext.commands import Cog
from helpers.robocronp import add_job, get_crontab
class Remind(Cog):
def __init__(self, bot):
self.bot = bot
@commands.cooldown(1, 60, type=commands.BucketType.user)
@commands.command()
async def remindlist(self, ctx):
"""Lists your reminders."""
ctab = get_crontab()
uid = str(ctx.author.id)
embed = discord.Embed(title=f"Active robocronp jobs")
for jobtimestamp in ctab["remind"]:
if uid not in ctab["remind"][jobtimestamp]:
continue
job_details = ctab["remind"][jobtimestamp][uid]
expiry_timestr = datetime.utcfromtimestamp(int(jobtimestamp)).strftime(
"%Y-%m-%d %H:%M:%S (UTC)"
)
embed.add_field(
name=f"Reminder for {expiry_timestr}",
value=f"Added on: {job_details['added']}, "
f"Text: {job_details['text']}",
inline=False,
)
await ctx.send(embed=embed)
@commands.cooldown(1, 60, type=commands.BucketType.user)
@commands.command(aliases=["remindme"])
async def remind(self, ctx, when: str, *, text: str = "something"):
"""Reminds you about something."""
if ctx.guild:
await ctx.message.delete()
current_timestamp = time.time()
expiry_timestamp = self.bot.parse_time(when)
if current_timestamp + 5 > expiry_timestamp:
msg = await ctx.send(
f"{ctx.author.mention}: Minimum remind interval is 5 seconds."
)
await asyncio.sleep(5)
await msg.delete()
return
expiry_datetime = datetime.utcfromtimestamp(expiry_timestamp)
duration_text = self.bot.get_relative_timestamp(
time_to=expiry_datetime, include_to=True, humanized=True
)
safe_text = await commands.clean_content().convert(ctx, str(text))
added_on = datetime.utcnow().strftime("%Y-%m-%d %H:%M:%S (UTC)")
add_job(
"remind",
ctx.author.id,
{"text": safe_text, "added": added_on},
expiry_timestamp,
)
msg = await ctx.send(
f"{ctx.author.mention}: I'll remind you in "
f"DMs about `{safe_text}` in {duration_text}."
)
await asyncio.sleep(5)
await msg.delete()
def setup(bot):
bot.add_cog(Remind(bot))
|
[
"discord.ext.commands.command",
"discord.Embed",
"asyncio.sleep",
"helpers.robocronp.get_crontab",
"time.time",
"discord.ext.commands.cooldown",
"datetime.datetime.utcfromtimestamp",
"datetime.datetime.utcnow",
"discord.ext.commands.clean_content",
"helpers.robocronp.add_job"
] |
[((272, 327), 'discord.ext.commands.cooldown', 'commands.cooldown', (['(1)', '(60)'], {'type': 'commands.BucketType.user'}), '(1, 60, type=commands.BucketType.user)\n', (289, 327), False, 'from discord.ext import commands\n'), ((333, 351), 'discord.ext.commands.command', 'commands.command', ([], {}), '()\n', (349, 351), False, 'from discord.ext import commands\n'), ((1152, 1207), 'discord.ext.commands.cooldown', 'commands.cooldown', (['(1)', '(60)'], {'type': 'commands.BucketType.user'}), '(1, 60, type=commands.BucketType.user)\n', (1169, 1207), False, 'from discord.ext import commands\n'), ((1213, 1251), 'discord.ext.commands.command', 'commands.command', ([], {'aliases': "['remindme']"}), "(aliases=['remindme'])\n", (1229, 1251), False, 'from discord.ext import commands\n'), ((440, 453), 'helpers.robocronp.get_crontab', 'get_crontab', ([], {}), '()\n', (451, 453), False, 'from helpers.robocronp import add_job, get_crontab\n'), ((503, 548), 'discord.Embed', 'discord.Embed', ([], {'title': 'f"""Active robocronp jobs"""'}), "(title=f'Active robocronp jobs')\n", (516, 548), False, 'import discord\n'), ((1456, 1467), 'time.time', 'time.time', ([], {}), '()\n', (1465, 1467), False, 'import time\n'), ((1814, 1857), 'datetime.datetime.utcfromtimestamp', 'datetime.utcfromtimestamp', (['expiry_timestamp'], {}), '(expiry_timestamp)\n', (1839, 1857), False, 'from datetime import datetime\n'), ((2152, 2246), 'helpers.robocronp.add_job', 'add_job', (['"""remind"""', 'ctx.author.id', "{'text': safe_text, 'added': added_on}", 'expiry_timestamp'], {}), "('remind', ctx.author.id, {'text': safe_text, 'added': added_on},\n expiry_timestamp)\n", (2159, 2246), False, 'from helpers.robocronp import add_job, get_crontab\n'), ((2473, 2489), 'asyncio.sleep', 'asyncio.sleep', (['(5)'], {}), '(5)\n', (2486, 2489), False, 'import asyncio\n'), ((1720, 1736), 'asyncio.sleep', 'asyncio.sleep', (['(5)'], {}), '(5)\n', (1733, 1736), False, 'import asyncio\n'), ((2089, 2106), 'datetime.datetime.utcnow', 'datetime.utcnow', ([], {}), '()\n', (2104, 2106), False, 'from datetime import datetime\n'), ((2021, 2045), 'discord.ext.commands.clean_content', 'commands.clean_content', ([], {}), '()\n', (2043, 2045), False, 'from discord.ext import commands\n')]
|
# MktdataPublisher.py
from __future__ import print_function
from __future__ import absolute_import
import time
from optparse import OptionParser, OptionValueError
import datetime
import threading
import os
import platform as plat
import sys
if sys.version_info >= (3, 8) and plat.system().lower() == "windows":
# pylint: disable=no-member
with os.add_dll_directory(os.getenv('BLPAPI_LIBDIR')):
import blpapi
else:
import blpapi
PERMISSION_REQUEST = blpapi.Name("PermissionRequest")
RESOLUTION_SUCCESS = blpapi.Name("ResolutionSuccess")
SESSION_TERMINATED = blpapi.Name("SessionTerminated")
TOPICS = blpapi.Name("topics")
TOPIC_CREATED = blpapi.Name("TopicCreated")
TOPIC_SUBSCRIBED = blpapi.Name("TopicSubscribed")
TOPIC_UNSUBSCRIBED = blpapi.Name("TopicUnsubscribed")
TOPIC_RECAP = blpapi.Name("TopicRecap")
class MyStream(object):
def __init__(self, sid="", fields=None):
self.id = sid
self.fields = fields if fields else []
self.lastValue = 0
self.topic = blpapi.Topic()
self.isSubscribed = False
def fillData(self, eventFormatter, elementDef):
for i, f in enumerate(self.fields):
if not elementDef.typeDefinition().hasElementDefinition(f):
print("Invalid field '%s'" % f)
continue
fieldDef = elementDef.typeDefinition().getElementDefinition(f)
fieldType = fieldDef.typeDefinition().datatype()
value = None
if fieldType == blpapi.DataType.BOOL:
value = bool((self.lastValue + i) % 2 == 0)
elif fieldType == blpapi.DataType.CHAR:
value = chr((self.lastValue + i) % 100 + 32)
elif fieldType == blpapi.DataType.INT32 or \
fieldType == blpapi.DataType.INT64:
value = self.lastValue + i
elif fieldType == blpapi.DataType.FLOAT32 or \
fieldType == blpapi.DataType.FLOAT64:
value = (self.lastValue + i) * 1.1
elif fieldType == blpapi.DataType.STRING:
value = "S%d" % (self.lastValue + i)
elif fieldType == blpapi.DataType.DATE or \
fieldType == blpapi.DataType.TIME or \
fieldType == blpapi.DataType.DATETIME:
value = datetime.datetime.today()
value.replace(day=(self.lastValue / 100) % 28 + 1)
value.replace(microsecond=i * 1000)
eventFormatter.setElement(f, value)
def fillDataNull(self, eventFormatter, elementDef):
for f in self.fields:
if not elementDef.typeDefinition().hasElementDefinition(f):
print("Invalid field '%s'" % f)
continue
fieldDef = elementDef.typeDefinition().getElementDefinition(f)
if fieldDef.typeDefinition().isSimpleType():
# Publishing NULL value
eventFormatter.setElementNull(f)
def next(self):
self.lastValue += 1
def isAvailable(self):
return self.topic.isValid() and self.isSubscribed
class MyEventHandler(object):
def __init__(self,
serviceName,
messageType,
fields,
eids,
resolveSubServiceCode,
mutex,
stop,
condition):
self.serviceName = serviceName
self.messageType = messageType
self.fields = fields
self.eids = eids
self.resolveSubServiceCode = resolveSubServiceCode
self.mutex = mutex
self.stop = stop
self.condition = condition
self.streams = dict()
self.availableTopicCount = 0
def processEvent(self, event, session):
if event.eventType() == blpapi.Event.SESSION_STATUS:
for msg in event:
print(msg)
if msg.messageType() == SESSION_TERMINATED:
self.stop.set()
elif event.eventType() == blpapi.Event.TOPIC_STATUS:
topicList = blpapi.TopicList()
for msg in event:
print(msg)
if msg.messageType() == TOPIC_SUBSCRIBED:
topicStr = msg.getElementAsString("topic")
with self.mutex:
if topicStr not in self.streams:
# TopicList knows how to add an entry based on a
# TOPIC_SUBSCRIBED message.
topicList.add(msg)
self.streams[topicStr] = MyStream(topicStr,
self.fields)
stream = self.streams[topicStr]
stream.isSubscribed = True
if stream.isAvailable():
self.availableTopicCount += 1
self.condition.notifyAll()
elif msg.messageType() == TOPIC_UNSUBSCRIBED:
topicStr = msg.getElementAsString("topic")
with self.mutex:
if topicStr not in self.streams:
# We should never be coming here.
# TOPIC_UNSUBSCRIBED can not come before
# a TOPIC_SUBSCRIBED or TOPIC_CREATED
continue
stream = self.streams[topicStr]
if stream.isAvailable():
self.availableTopicCount -= 1
self.condition.notifyAll()
stream.isSubscribed = False
elif msg.messageType() == TOPIC_CREATED:
topicStr = msg.getElementAsString("topic")
with self.mutex:
if topicStr not in self.streams:
self.streams[topicStr] = MyStream(topicStr,
self.fields)
stream = self.streams[topicStr]
try:
stream.topic = session.getTopic(msg)
except blpapi.Exception as e:
print("Exception while processing " \
"TOPIC_CREATED: %s" % e)
continue
if stream.isAvailable():
self.availableTopicCount += 1
self.condition.notifyAll()
elif msg.messageType() == TOPIC_RECAP:
# Here we send a recap in response to a Recap request.
try:
topicStr = msg.getElementAsString("topic")
recapEvent = None
with self.mutex:
if topicStr not in self.streams:
continue
stream = self.streams[topicStr]
if not stream.isAvailable():
continue
topic = session.getTopic(msg)
service = topic.service()
recapCid = msg.correlationIds()[0]
recapEvent = service.createPublishEvent()
elementDef = \
service.getEventDefinition(self.messageType)
eventFormatter = blpapi.EventFormatter(recapEvent)
eventFormatter.appendRecapMessage(topic, recapCid)
stream.fillData(eventFormatter, elementDef)
session.publish(recapEvent)
except blpapi.Exception as e:
print("Exception while processing TOPIC_RECAP: %s" % e)
continue
if topicList.size() > 0:
# createTopicsAsync will result in RESOLUTION_STATUS,
# TOPIC_CREATED events.
session.createTopicsAsync(topicList)
elif event.eventType() == blpapi.Event.RESOLUTION_STATUS:
for msg in event:
print(msg)
elif event.eventType() == blpapi.Event.REQUEST:
service = session.getService(self.serviceName)
for msg in event:
print(msg)
if msg.messageType() == PERMISSION_REQUEST:
# Similar to createPublishEvent. We assume just one
# service - self.serviceName. A responseEvent can only be
# for single request so we can specify the correlationId -
# which establishes context - when we create the Event.
response = \
service.createResponseEvent(msg.correlationIds()[0])
permission = 1 # ALLOWED: 0, DENIED: 1
ef = blpapi.EventFormatter(response)
if msg.hasElement("uuid"):
msg.getElementAsInteger("uuid")
permission = 0
if msg.hasElement("applicationId"):
msg.getElementAsInteger("applicationId")
permission = 0
# In appendResponse the string is the name of the
# operation, the correlationId indicates which request we
# are responding to.
ef.appendResponse("PermissionResponse")
ef.pushElement("topicPermissions")
# For each of the topics in the request, add an entry to
# the response.
topicsElement = msg.getElement(TOPICS).values()
for topic in topicsElement:
ef.appendElement()
ef.setElement("topic", topic)
if self.resolveSubServiceCode:
try:
ef.setElement("subServiceCode",
self.resolveSubServiceCode)
print(("Mapping topic %s to subServiceCode %s" %
(topic, self.resolveSubServiceCode)))
except blpapi.Exception:
print("subServiceCode could not be set."
" Resolving without subServiceCode")
ef.setElement("result", permission)
if permission == 1: # DENIED
ef.pushElement("reason")
ef.setElement("source", "My Publisher Name")
ef.setElement("category", "NOT_AUTHORIZED")
ef.setElement("subcategory",
"Publisher Controlled")
ef.setElement(
"description",
"Permission denied by My Publisher Name")
ef.popElement()
elif self.eids:
ef.pushElement("permissions")
ef.appendElement()
ef.setElement("permissionService", "//blp/blpperm")
ef.pushElement("eids")
for e in self.eids:
ef.appendValue(e)
ef.popElement()
ef.popElement()
ef.popElement()
ef.popElement()
ef.popElement()
# Service is implicit in the Event. sendResponse has a
# second parameter - partialResponse - that defaults to
# false.
session.sendResponse(response)
else:
for msg in event:
print(msg)
return True
def authOptionCallback(_option, _opt, value, parser):
"""Parse authorization options from user input"""
vals = value.split('=', 1)
if value == "user":
authUser = blpapi.AuthUser.createWithLogonName()
authOptions = blpapi.AuthOptions.createWithUser(authUser)
elif value == "none":
authOptions = None
elif vals[0] == "app" and len(vals) == 2:
appName = vals[1]
authOptions = blpapi.AuthOptions.createWithApp(appName)
elif vals[0] == "userapp" and len(vals) == 2:
appName = vals[1]
authUser = blpapi.AuthUser.createWithLogonName()
authOptions = blpapi.AuthOptions\
.createWithUserAndApp(authUser, appName)
elif vals[0] == "dir" and len(vals) == 2:
activeDirectoryProperty = vals[1]
authUser = blpapi.AuthUser\
.createWithActiveDirectoryProperty(activeDirectoryProperty)
authOptions = blpapi.AuthOptions.createWithUser(authUser)
elif vals[0] == "manual":
parts = []
if len(vals) == 2:
parts = vals[1].split(',')
if len(parts) != 3:
raise OptionValueError("Invalid auth option {}".format(value))
appName, ip, userId = parts
authUser = blpapi.AuthUser.createWithManualOptions(userId, ip)
authOptions = blpapi.AuthOptions.createWithUserAndApp(authUser, appName)
else:
raise OptionValueError("Invalid auth option '{}'".format(value))
parser.values.auth = {'option' : authOptions}
def parseCmdLine():
parser = OptionParser(description="Publish market data.")
parser.add_option("-a",
"--ip",
dest="hosts",
help="server name or IP (default: localhost)",
metavar="ipAddress",
action="append",
default=[])
parser.add_option("-p",
dest="port",
type="int",
help="server port (default: %default)",
metavar="tcpPort",
default=8194)
parser.add_option("-s",
dest="service",
help="service name (default: %default)",
metavar="service",
default="//viper/mktdata")
parser.add_option("-f",
dest="fields",
help="field to subscribe to (default: LAST_PRICE)",
metavar="field",
action="append",
default=[])
parser.add_option("-m",
dest="messageType",
help="type of published event (default: %default)",
metavar="messageType",
default="MarketDataEvents")
parser.add_option("-e",
dest="eids",
help="permission eid for all subscriptions",
metavar="EID",
action="append",
default=[])
parser.add_option("-g",
dest="groupId",
help="publisher groupId (defaults to unique value)",
metavar="groupId")
parser.add_option("-r",
"--pri",
type="int",
dest="priority",
help="set publisher priority level (default: %default)",
metavar="priority",
default=10)
parser.add_option("-c",
type="int",
dest="clearInterval",
help="number of events after which cache will be "
"cleared (default: 0 i.e cache never cleared)",
metavar="clearInterval",
default=0)
parser.add_option("--auth",
dest="auth",
help="authentication option: "
"user|none|app=<app>|userapp=<app>|dir=<property>"
"|manual=<app,ip,user>"
" (default: user)\n"
"'none' is applicable to Desktop API product "
"that requires Bloomberg Professional service "
"to be installed locally.",
metavar="option",
action="callback",
callback=authOptionCallback,
type="string",
default={"option" :
blpapi.AuthOptions.createWithUser(
blpapi.AuthUser.createWithLogonName())})
parser.add_option("--ssc",
dest="ssc",
help="active sub-service code option: "
"<begin>,<end>,<priority>",
metavar="ssc",
default="")
parser.add_option("--rssc",
dest="rssc",
help="sub-service code to be used in resolves",
metavar="rssc",
default="")
(options, _) = parser.parse_args()
if not options.hosts:
options.hosts = ["localhost"]
if not options.fields:
options.fields = ["LAST_PRICE"]
return options
def activate(options, session):
if options.ssc:
sscBegin, sscEnd, sscPriority = map(int, options.ssc.split(","))
print(("Activating sub service code range [%s, %s] @ %s" %
(sscBegin, sscEnd, sscPriority)))
session.activateSubServiceCodeRange(options.service,
sscBegin,
sscEnd,
sscPriority)
def deactivate(options, session):
if options.ssc:
sscBegin, sscEnd, sscPriority = map(int, options.ssc.split(","))
print(("DeActivating sub service code range [%s, %s] @ %s" %
(sscBegin, sscEnd, sscPriority)))
session.deactivateSubServiceCodeRange(options.service,
sscBegin,
sscEnd)
def main():
options = parseCmdLine()
# Fill SessionOptions
sessionOptions = blpapi.SessionOptions()
for idx, host in enumerate(options.hosts):
sessionOptions.setServerAddress(host, options.port, idx)
sessionOptions.setSessionIdentityOptions(options.auth['option'])
sessionOptions.setAutoRestartOnDisconnection(True)
# NOTE: If running without a backup server, make many attempts to
# connect/reconnect to give that host a chance to come back up (the
# larger the number, the longer it will take for SessionStartupFailure
# to come on startup, or SessionTerminated due to inability to fail
# over). We don't have to do that in a redundant configuration - it's
# expected at least one server is up and reachable at any given time,
# so only try to connect to each server once.
sessionOptions.setNumStartAttempts(1 if len(options.hosts) > 1 else 1000)
print("Connecting to port %d on %s" % (
options.port, " ".join(options.hosts)))
PUBLISH_MESSAGE_TYPE = blpapi.Name(options.messageType)
mutex = threading.Lock()
stop = threading.Event()
condition = threading.Condition(mutex)
myEventHandler = MyEventHandler(options.service,
PUBLISH_MESSAGE_TYPE,
options.fields,
options.eids,
options.rssc,
mutex,
stop,
condition)
# Create a Session
session = blpapi.ProviderSession(sessionOptions,
myEventHandler.processEvent)
# Start a Session
if not session.start():
print("Failed to start session.")
return
serviceOptions = blpapi.ServiceRegistrationOptions()
if options.groupId is not None:
serviceOptions.setGroupId(options.groupId)
serviceOptions.setServicePriority(options.priority)
if options.ssc:
sscBegin, sscEnd, sscPriority = map(int, options.ssc.split(","))
print(("Adding active sub service code range [%s, %s] @ %s" %
(sscBegin, sscEnd, sscPriority)))
try:
serviceOptions.addActiveSubServiceCodeRange(sscBegin,
sscEnd,
sscPriority)
except blpapi.Exception as e:
print(("FAILED to add active sub service codes."
" Exception %s" % e.description()))
try:
if not session.registerService(options.service,
session.getAuthorizedIdentity(),
serviceOptions):
print("Failed to register '%s'" % options.service)
return
service = session.getService(options.service)
elementDef = service.getEventDefinition(PUBLISH_MESSAGE_TYPE)
eventCount = 0
numPublished = 0
while not stop.is_set():
event = service.createPublishEvent()
with condition:
while myEventHandler.availableTopicCount == 0:
# Set timeout to 1 - give a chance for CTRL-C
condition.wait(1)
if stop.is_set():
return
publishNull = False
if (options.clearInterval > 0 and
eventCount == options.clearInterval):
eventCount = 0
publishNull = True
eventFormatter = blpapi.EventFormatter(event)
for _,stream in myEventHandler.streams.items():
if not stream.isAvailable():
continue
eventFormatter.appendMessage(PUBLISH_MESSAGE_TYPE,
stream.topic)
if publishNull:
stream.fillDataNull(eventFormatter, elementDef)
else:
eventCount += 1
stream.next()
stream.fillData(eventFormatter, elementDef)
for msg in event:
print(msg)
session.publish(event)
time.sleep(1)
numPublished += 1
if numPublished % 10 == 0:
deactivate(options, session)
time.sleep(30)
activate(options, session)
finally:
# Stop the session
session.stop()
if __name__ == "__main__":
print("MktdataPublisher")
try:
main()
except KeyboardInterrupt:
print("Ctrl+C pressed. Stopping...")
__copyright__ = """
Copyright 2012. Bloomberg Finance L.P.
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to
deal in the Software without restriction, including without limitation the
rights to use, copy, modify, merge, publish, distribute, sublicense, and/or
sell copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions: The above
copyright notice and this permission notice shall be included in all copies
or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS
IN THE SOFTWARE.
"""
|
[
"optparse.OptionParser",
"blpapi.Topic",
"blpapi.AuthOptions.createWithUser",
"blpapi.AuthOptions.createWithApp",
"blpapi.AuthUser.createWithLogonName",
"blpapi.AuthUser.createWithManualOptions",
"threading.Condition",
"threading.Lock",
"blpapi.SessionOptions",
"threading.Event",
"blpapi.ProviderSession",
"blpapi.ServiceRegistrationOptions",
"datetime.datetime.today",
"blpapi.AuthOptions.createWithUserAndApp",
"time.sleep",
"platform.system",
"os.getenv",
"blpapi.EventFormatter",
"blpapi.TopicList",
"blpapi.Name",
"blpapi.AuthUser.createWithActiveDirectoryProperty"
] |
[((472, 504), 'blpapi.Name', 'blpapi.Name', (['"""PermissionRequest"""'], {}), "('PermissionRequest')\n", (483, 504), False, 'import blpapi\n'), ((526, 558), 'blpapi.Name', 'blpapi.Name', (['"""ResolutionSuccess"""'], {}), "('ResolutionSuccess')\n", (537, 558), False, 'import blpapi\n'), ((580, 612), 'blpapi.Name', 'blpapi.Name', (['"""SessionTerminated"""'], {}), "('SessionTerminated')\n", (591, 612), False, 'import blpapi\n'), ((622, 643), 'blpapi.Name', 'blpapi.Name', (['"""topics"""'], {}), "('topics')\n", (633, 643), False, 'import blpapi\n'), ((660, 687), 'blpapi.Name', 'blpapi.Name', (['"""TopicCreated"""'], {}), "('TopicCreated')\n", (671, 687), False, 'import blpapi\n'), ((707, 737), 'blpapi.Name', 'blpapi.Name', (['"""TopicSubscribed"""'], {}), "('TopicSubscribed')\n", (718, 737), False, 'import blpapi\n'), ((759, 791), 'blpapi.Name', 'blpapi.Name', (['"""TopicUnsubscribed"""'], {}), "('TopicUnsubscribed')\n", (770, 791), False, 'import blpapi\n'), ((806, 831), 'blpapi.Name', 'blpapi.Name', (['"""TopicRecap"""'], {}), "('TopicRecap')\n", (817, 831), False, 'import blpapi\n'), ((13662, 13710), 'optparse.OptionParser', 'OptionParser', ([], {'description': '"""Publish market data."""'}), "(description='Publish market data.')\n", (13674, 13710), False, 'from optparse import OptionParser, OptionValueError\n'), ((18452, 18475), 'blpapi.SessionOptions', 'blpapi.SessionOptions', ([], {}), '()\n', (18473, 18475), False, 'import blpapi\n'), ((19400, 19432), 'blpapi.Name', 'blpapi.Name', (['options.messageType'], {}), '(options.messageType)\n', (19411, 19432), False, 'import blpapi\n'), ((19445, 19461), 'threading.Lock', 'threading.Lock', ([], {}), '()\n', (19459, 19461), False, 'import threading\n'), ((19473, 19490), 'threading.Event', 'threading.Event', ([], {}), '()\n', (19488, 19490), False, 'import threading\n'), ((19507, 19533), 'threading.Condition', 'threading.Condition', (['mutex'], {}), '(mutex)\n', (19526, 19533), False, 'import threading\n'), ((19968, 20035), 'blpapi.ProviderSession', 'blpapi.ProviderSession', (['sessionOptions', 'myEventHandler.processEvent'], {}), '(sessionOptions, myEventHandler.processEvent)\n', (19990, 20035), False, 'import blpapi\n'), ((20203, 20238), 'blpapi.ServiceRegistrationOptions', 'blpapi.ServiceRegistrationOptions', ([], {}), '()\n', (20236, 20238), False, 'import blpapi\n'), ((1020, 1034), 'blpapi.Topic', 'blpapi.Topic', ([], {}), '()\n', (1032, 1034), False, 'import blpapi\n'), ((12302, 12339), 'blpapi.AuthUser.createWithLogonName', 'blpapi.AuthUser.createWithLogonName', ([], {}), '()\n', (12337, 12339), False, 'import blpapi\n'), ((12362, 12405), 'blpapi.AuthOptions.createWithUser', 'blpapi.AuthOptions.createWithUser', (['authUser'], {}), '(authUser)\n', (12395, 12405), False, 'import blpapi\n'), ((375, 401), 'os.getenv', 'os.getenv', (['"""BLPAPI_LIBDIR"""'], {}), "('BLPAPI_LIBDIR')\n", (384, 401), False, 'import os\n'), ((22704, 22717), 'time.sleep', 'time.sleep', (['(1)'], {}), '(1)\n', (22714, 22717), False, 'import time\n'), ((277, 290), 'platform.system', 'plat.system', ([], {}), '()\n', (288, 290), True, 'import platform as plat\n'), ((4078, 4096), 'blpapi.TopicList', 'blpapi.TopicList', ([], {}), '()\n', (4094, 4096), False, 'import blpapi\n'), ((12553, 12594), 'blpapi.AuthOptions.createWithApp', 'blpapi.AuthOptions.createWithApp', (['appName'], {}), '(appName)\n', (12585, 12594), False, 'import blpapi\n'), ((22009, 22037), 'blpapi.EventFormatter', 'blpapi.EventFormatter', (['event'], {}), '(event)\n', (22030, 22037), False, 'import blpapi\n'), ((22848, 22862), 'time.sleep', 'time.sleep', (['(30)'], {}), '(30)\n', (22858, 22862), False, 'import time\n'), ((12690, 12727), 'blpapi.AuthUser.createWithLogonName', 'blpapi.AuthUser.createWithLogonName', ([], {}), '()\n', (12725, 12727), False, 'import blpapi\n'), ((12750, 12808), 'blpapi.AuthOptions.createWithUserAndApp', 'blpapi.AuthOptions.createWithUserAndApp', (['authUser', 'appName'], {}), '(authUser, appName)\n', (12789, 12808), False, 'import blpapi\n'), ((16789, 16826), 'blpapi.AuthUser.createWithLogonName', 'blpapi.AuthUser.createWithLogonName', ([], {}), '()\n', (16824, 16826), False, 'import blpapi\n'), ((12930, 13004), 'blpapi.AuthUser.createWithActiveDirectoryProperty', 'blpapi.AuthUser.createWithActiveDirectoryProperty', (['activeDirectoryProperty'], {}), '(activeDirectoryProperty)\n', (12979, 13004), False, 'import blpapi\n'), ((13041, 13084), 'blpapi.AuthOptions.createWithUser', 'blpapi.AuthOptions.createWithUser', (['authUser'], {}), '(authUser)\n', (13074, 13084), False, 'import blpapi\n'), ((13361, 13412), 'blpapi.AuthUser.createWithManualOptions', 'blpapi.AuthUser.createWithManualOptions', (['userId', 'ip'], {}), '(userId, ip)\n', (13400, 13412), False, 'import blpapi\n'), ((13435, 13493), 'blpapi.AuthOptions.createWithUserAndApp', 'blpapi.AuthOptions.createWithUserAndApp', (['authUser', 'appName'], {}), '(authUser, appName)\n', (13474, 13493), False, 'import blpapi\n'), ((9016, 9047), 'blpapi.EventFormatter', 'blpapi.EventFormatter', (['response'], {}), '(response)\n', (9037, 9047), False, 'import blpapi\n'), ((2326, 2351), 'datetime.datetime.today', 'datetime.datetime.today', ([], {}), '()\n', (2349, 2351), False, 'import datetime\n'), ((7553, 7586), 'blpapi.EventFormatter', 'blpapi.EventFormatter', (['recapEvent'], {}), '(recapEvent)\n', (7574, 7586), False, 'import blpapi\n')]
|
from __future__ import print_function
import socket
import os
import sys
import pdb
import pprint
import builtins
import threading as mt
from inspect import currentframe, getframeinfo
from importlib import reload
from contextlib import contextmanager
from pprint import PrettyPrinter
from gcd.nix import flock, sh
__all__ = ["reload", "echo", "lecho", "pecho", "trace", "brk", "rbrk", "fbrk", "fixrl"]
def install_builtins():
for attr in __all__:
setattr(builtins, attr, globals()[attr])
def echo(*args, **kwargs):
file = kwargs.pop("file", sys.stderr)
print(*args, file=file, flush=True, **kwargs)
def lecho(*args, **kwargs):
info = getframeinfo(currentframe().f_back)
path = os.path.relpath(info.filename)
echo("[%s:%s]" % (path, info.lineno), *args, **kwargs)
def pecho(obj, classes=None, file=sys.stderr, *args, **kwargs):
with patched_pprint(classes):
pprint.pprint(obj, stream=file, *args, **kwargs)
file.flush()
def trace(fun):
def prefix(n, m=0):
return "| " * n + " " * m
def pformat(prefix, obj):
with patched_pprint():
text = pprint.pformat(obj, compact=True, width=cols - len(prefix))
return text.replace("\n", "\n" + prefix)
def traced(*args, **kwargs):
level = getattr(trace._local, "level", 0)
col = min(level, 10)
name = fun.__name__
echo(
"%s%s%s"
% (
prefix(col),
name,
pformat(prefix(col + 1, len(name) - 2), (args, kwargs)),
)
)
try:
trace._local.level = level + 1
res = fun(*args, **kwargs)
return res
except Exception as err:
res = err
raise
finally:
echo("%s`> %s" % (prefix(col), pformat(prefix(col, 3), res)))
trace._local.level -= 1
cols = int(sh("stty size|").split()[1])
return traced
trace._local = mt.local()
brk = pdb.set_trace
def rbrk(port=4000, host="localhost"):
rdb = RemotePdb((host, port))
rdb.set_trace(frame=sys._getframe().f_back)
def fbrk():
fdb = ForkablePdb()
fdb.set_trace(frame=sys._getframe().f_back)
def fixrl():
# Workaround until 3.5.2: fix readline notion of current terminal width.
# https://bugs.python.org/issue23735
import ctypes
ctypes.cdll["libreadline.so"].rl_resize_terminal()
print("Readline fixed.")
@contextmanager
def patched_pprint(classes=None):
def simplify(obj):
if (
obj.__class__.__repr__ not in PrettyPrinter._dispatch
and (not classes or obj.__class__ in classes)
and hasattr(obj, "__dict__")
):
obj = (obj.__class__.__qualname__, vars(obj))
return obj
def new_safe_repr(obj, *args, **kwargs):
return old_safe_repr(simplify(obj), *args, **kwargs)
old_safe_repr, pprint._safe_repr = pprint._safe_repr, new_safe_repr
def new_format(self, obj, *args, **kwargs):
return old_format(self, simplify(obj), *args, **kwargs)
old_format, PrettyPrinter._format = PrettyPrinter._format, new_format
yield
pprint._safe_repr, PrettyPrinter._format = old_safe_repr, old_format
class RemotePdb(pdb.Pdb):
def __init__(self, address):
sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
sock.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR, True)
sock.bind(address)
with flock("/tmp/rdb_lock"):
echo(">> rdb listening at %s:%s..." % sock.getsockname())
sock.listen(1)
conn, address = sock.accept()
echo(" connection accepted.\n")
self.cfile = conn.makefile("rw")
pdb.Pdb.__init__(self, stdin=self.cfile, stdout=self.cfile)
class ForkablePdb(pdb.Pdb):
pid = None
def __init__(self):
pdb.Pdb.__init__(self, nosigint=True)
def interaction(self, frame, traceback):
with flock("/tmp/fdb_lock"):
if ForkablePdb.pid != os.getpid():
sys.stdin = os.fdopen(0)
ForkablePdb.pid = os.getpid()
pdb.Pdb.interaction(self, frame, traceback)
def _cmdloop(self):
self.cmdloop()
|
[
"os.getpid",
"pdb.Pdb.interaction",
"socket.socket",
"sys._getframe",
"threading.local",
"gcd.nix.flock",
"pprint.pprint",
"os.path.relpath",
"pdb.Pdb.__init__",
"os.fdopen",
"inspect.currentframe",
"gcd.nix.sh"
] |
[((1992, 2002), 'threading.local', 'mt.local', ([], {}), '()\n', (2000, 2002), True, 'import threading as mt\n'), ((716, 746), 'os.path.relpath', 'os.path.relpath', (['info.filename'], {}), '(info.filename)\n', (731, 746), False, 'import os\n'), ((914, 962), 'pprint.pprint', 'pprint.pprint', (['obj', '*args'], {'stream': 'file'}), '(obj, *args, stream=file, **kwargs)\n', (927, 962), False, 'import pprint\n'), ((3340, 3389), 'socket.socket', 'socket.socket', (['socket.AF_INET', 'socket.SOCK_STREAM'], {}), '(socket.AF_INET, socket.SOCK_STREAM)\n', (3353, 3389), False, 'import socket\n'), ((3756, 3815), 'pdb.Pdb.__init__', 'pdb.Pdb.__init__', (['self'], {'stdin': 'self.cfile', 'stdout': 'self.cfile'}), '(self, stdin=self.cfile, stdout=self.cfile)\n', (3772, 3815), False, 'import pdb\n'), ((3895, 3932), 'pdb.Pdb.__init__', 'pdb.Pdb.__init__', (['self'], {'nosigint': '(True)'}), '(self, nosigint=True)\n', (3911, 3932), False, 'import pdb\n'), ((682, 696), 'inspect.currentframe', 'currentframe', ([], {}), '()\n', (694, 696), False, 'from inspect import currentframe, getframeinfo\n'), ((3500, 3522), 'gcd.nix.flock', 'flock', (['"""/tmp/rdb_lock"""'], {}), "('/tmp/rdb_lock')\n", (3505, 3522), False, 'from gcd.nix import flock, sh\n'), ((3992, 4014), 'gcd.nix.flock', 'flock', (['"""/tmp/fdb_lock"""'], {}), "('/tmp/fdb_lock')\n", (3997, 4014), False, 'from gcd.nix import flock, sh\n'), ((4162, 4205), 'pdb.Pdb.interaction', 'pdb.Pdb.interaction', (['self', 'frame', 'traceback'], {}), '(self, frame, traceback)\n', (4181, 4205), False, 'import pdb\n'), ((2124, 2139), 'sys._getframe', 'sys._getframe', ([], {}), '()\n', (2137, 2139), False, 'import sys\n'), ((2210, 2225), 'sys._getframe', 'sys._getframe', ([], {}), '()\n', (2223, 2225), False, 'import sys\n'), ((4050, 4061), 'os.getpid', 'os.getpid', ([], {}), '()\n', (4059, 4061), False, 'import os\n'), ((4091, 4103), 'os.fdopen', 'os.fdopen', (['(0)'], {}), '(0)\n', (4100, 4103), False, 'import os\n'), ((4138, 4149), 'os.getpid', 'os.getpid', ([], {}), '()\n', (4147, 4149), False, 'import os\n'), ((1928, 1944), 'gcd.nix.sh', 'sh', (['"""stty size|"""'], {}), "('stty size|')\n", (1930, 1944), False, 'from gcd.nix import flock, sh\n')]
|
# -*- coding: utf-8 -*-
# ------------------------------------------------------------------------------
#
# Copyright 2018-2019 Fetch.AI Limited
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
# ------------------------------------------------------------------------------
"""This module contains the tests of the http protocol package."""
import sys
from typing import Type
from unittest import mock
import pytest
from aea.common import Address
from aea.exceptions import AEAEnforceError
from aea.mail.base import Envelope
from aea.protocols.base import Message
from aea.protocols.dialogue.base import Dialogue as BaseDialogue
from aea.protocols.dialogue.base import DialogueLabel
import packages
from packages.fetchai.protocols.http.dialogues import HttpDialogue, HttpDialogues
from packages.fetchai.protocols.http.message import HttpMessage
from packages.fetchai.protocols.http.message import (
_default_logger as http_message_logger,
)
from tests.conftest import ROOT_DIR
sys.path.append(ROOT_DIR)
def test_request_serialization():
"""Test the serialization for 'request' speech-act works."""
msg = HttpMessage(
performative=HttpMessage.Performative.REQUEST,
method="some_method",
url="url",
version="some_version",
headers="some_headers",
body=b"some_body",
)
msg.to = "receiver"
envelope = Envelope(to=msg.to, sender="sender", message=msg,)
envelope_bytes = envelope.encode()
actual_envelope = Envelope.decode(envelope_bytes)
expected_envelope = envelope
assert expected_envelope.to == actual_envelope.to
assert expected_envelope.sender == actual_envelope.sender
assert (
expected_envelope.protocol_specification_id
== actual_envelope.protocol_specification_id
)
assert expected_envelope.message != actual_envelope.message
actual_msg = HttpMessage.serializer.decode(actual_envelope.message)
actual_msg.to = actual_envelope.to
actual_msg.sender = actual_envelope.sender
expected_msg = msg
assert expected_msg == actual_msg
def test_response_serialization():
"""Test the serialization for 'response' speech-act works."""
msg = HttpMessage(
message_id=2,
target=1,
performative=HttpMessage.Performative.RESPONSE,
version="some_version",
status_code=1,
status_text="some_status_text",
headers="some_headers",
body=b"some_body",
)
msg.to = "receiver"
envelope = Envelope(to=msg.to, sender="sender", message=msg,)
envelope_bytes = envelope.encode()
actual_envelope = Envelope.decode(envelope_bytes)
expected_envelope = envelope
assert expected_envelope.to == actual_envelope.to
assert expected_envelope.sender == actual_envelope.sender
assert (
expected_envelope.protocol_specification_id
== actual_envelope.protocol_specification_id
)
assert expected_envelope.message != actual_envelope.message
actual_msg = HttpMessage.serializer.decode(actual_envelope.message)
actual_msg.to = actual_envelope.to
actual_msg.sender = actual_envelope.sender
expected_msg = msg
assert expected_msg == actual_msg
def test_performative_string_value():
"""Test the string value of the performatives."""
assert (
str(HttpMessage.Performative.REQUEST) == "request"
), "The str value must be request"
assert (
str(HttpMessage.Performative.RESPONSE) == "response"
), "The str value must be response"
def test_encoding_unknown_performative():
"""Test that we raise an exception when the performative is unknown during encoding."""
msg = HttpMessage(
performative=HttpMessage.Performative.REQUEST,
method="some_method",
url="url",
version="some_version",
headers="some_headers",
body=b"some_body",
)
with pytest.raises(ValueError, match="Performative not valid:"):
with mock.patch.object(HttpMessage.Performative, "__eq__", return_value=False):
HttpMessage.serializer.encode(msg)
def test_decoding_unknown_performative():
"""Test that we raise an exception when the performative is unknown during decoding."""
msg = HttpMessage(
performative=HttpMessage.Performative.REQUEST,
method="some_method",
url="url",
version="some_version",
headers="some_headers",
body=b"some_body",
)
encoded_msg = HttpMessage.serializer.encode(msg)
with pytest.raises(ValueError, match="Performative not valid:"):
with mock.patch.object(HttpMessage.Performative, "__eq__", return_value=False):
HttpMessage.serializer.decode(encoded_msg)
@mock.patch.object(
packages.fetchai.protocols.http.message,
"enforce",
side_effect=AEAEnforceError("some error"),
)
def test_incorrect_message(mocked_enforce):
"""Test that we raise an exception when the message is incorrect."""
with mock.patch.object(http_message_logger, "error") as mock_logger:
HttpMessage(
performative=HttpMessage.Performative.REQUEST,
method="some_method",
url="url",
version="some_version",
headers="some_headers",
body=b"some_body",
)
mock_logger.assert_any_call("some error")
class TestDialogues:
"""Tests http dialogues."""
@classmethod
def setup_class(cls):
"""Set up the test."""
cls.agent_addr = "agent address"
cls.server_addr = "server address"
cls.agent_dialogues = AgentDialogues(cls.agent_addr)
cls.server_dialogues = ServerDialogues(cls.server_addr)
def test_create_self_initiated(self):
"""Test the self initialisation of a dialogue."""
result = self.agent_dialogues._create_self_initiated(
dialogue_opponent_addr=self.server_addr,
dialogue_reference=(str(0), ""),
role=HttpDialogue.Role.CLIENT,
)
assert isinstance(result, HttpDialogue)
assert result.role == HttpDialogue.Role.CLIENT, "The role must be client."
def test_create_opponent_initiated(self):
"""Test the opponent initialisation of a dialogue."""
result = self.agent_dialogues._create_opponent_initiated(
dialogue_opponent_addr=self.server_addr,
dialogue_reference=(str(0), ""),
role=HttpDialogue.Role.CLIENT,
)
assert isinstance(result, HttpDialogue)
assert result.role == HttpDialogue.Role.CLIENT, "The role must be client."
class AgentDialogue(HttpDialogue):
"""The dialogue class maintains state of a dialogue and manages it."""
def __init__(
self,
dialogue_label: DialogueLabel,
self_address: Address,
role: BaseDialogue.Role,
message_class: Type[HttpMessage],
) -> None:
"""
Initialize a dialogue.
:param dialogue_label: the identifier of the dialogue
:param self_address: the address of the entity for whom this dialogue is maintained
:param role: the role of the agent this dialogue is maintained for
:return: None
"""
HttpDialogue.__init__(
self,
dialogue_label=dialogue_label,
self_address=self_address,
role=role,
message_class=message_class,
)
class AgentDialogues(HttpDialogues):
"""The dialogues class keeps track of all dialogues."""
def __init__(self, self_address: Address) -> None:
"""
Initialize dialogues.
:return: None
"""
def role_from_first_message( # pylint: disable=unused-argument
message: Message, receiver_address: Address
) -> BaseDialogue.Role:
"""Infer the role of the agent from an incoming/outgoing first message
:param message: an incoming/outgoing first message
:param receiver_address: the address of the receiving agent
:return: The role of the agent
"""
return HttpDialogue.Role.CLIENT
HttpDialogues.__init__(
self,
self_address=self_address,
role_from_first_message=role_from_first_message,
dialogue_class=AgentDialogue,
)
class ServerDialogue(HttpDialogue):
"""The dialogue class maintains state of a dialogue and manages it."""
def __init__(
self,
dialogue_label: DialogueLabel,
self_address: Address,
role: BaseDialogue.Role,
message_class: Type[HttpMessage],
) -> None:
"""
Initialize a dialogue.
:param dialogue_label: the identifier of the dialogue
:param self_address: the address of the entity for whom this dialogue is maintained
:param role: the role of the agent this dialogue is maintained for
:return: None
"""
HttpDialogue.__init__(
self,
dialogue_label=dialogue_label,
self_address=self_address,
role=role,
message_class=message_class,
)
class ServerDialogues(HttpDialogues):
"""The dialogues class keeps track of all dialogues."""
def __init__(self, self_address: Address) -> None:
"""
Initialize dialogues.
:return: None
"""
def role_from_first_message( # pylint: disable=unused-argument
message: Message, receiver_address: Address
) -> BaseDialogue.Role:
"""Infer the role of the agent from an incoming/outgoing first message
:param message: an incoming/outgoing first message
:param receiver_address: the address of the receiving agent
:return: The role of the agent
"""
return HttpDialogue.Role.SERVER
HttpDialogues.__init__(
self,
self_address=self_address,
role_from_first_message=role_from_first_message,
dialogue_class=ServerDialogue,
)
|
[
"sys.path.append",
"unittest.mock.patch.object",
"aea.mail.base.Envelope.decode",
"packages.fetchai.protocols.http.message.HttpMessage",
"packages.fetchai.protocols.http.dialogues.HttpDialogues.__init__",
"packages.fetchai.protocols.http.message.HttpMessage.serializer.decode",
"aea.mail.base.Envelope",
"pytest.raises",
"packages.fetchai.protocols.http.dialogues.HttpDialogue.__init__",
"packages.fetchai.protocols.http.message.HttpMessage.serializer.encode",
"aea.exceptions.AEAEnforceError"
] |
[((1510, 1535), 'sys.path.append', 'sys.path.append', (['ROOT_DIR'], {}), '(ROOT_DIR)\n', (1525, 1535), False, 'import sys\n'), ((1647, 1815), 'packages.fetchai.protocols.http.message.HttpMessage', 'HttpMessage', ([], {'performative': 'HttpMessage.Performative.REQUEST', 'method': '"""some_method"""', 'url': '"""url"""', 'version': '"""some_version"""', 'headers': '"""some_headers"""', 'body': "b'some_body'"}), "(performative=HttpMessage.Performative.REQUEST, method=\n 'some_method', url='url', version='some_version', headers=\n 'some_headers', body=b'some_body')\n", (1658, 1815), False, 'from packages.fetchai.protocols.http.message import HttpMessage\n'), ((1900, 1949), 'aea.mail.base.Envelope', 'Envelope', ([], {'to': 'msg.to', 'sender': '"""sender"""', 'message': 'msg'}), "(to=msg.to, sender='sender', message=msg)\n", (1908, 1949), False, 'from aea.mail.base import Envelope\n'), ((2013, 2044), 'aea.mail.base.Envelope.decode', 'Envelope.decode', (['envelope_bytes'], {}), '(envelope_bytes)\n', (2028, 2044), False, 'from aea.mail.base import Envelope\n'), ((2400, 2454), 'packages.fetchai.protocols.http.message.HttpMessage.serializer.decode', 'HttpMessage.serializer.decode', (['actual_envelope.message'], {}), '(actual_envelope.message)\n', (2429, 2454), False, 'from packages.fetchai.protocols.http.message import HttpMessage\n'), ((2715, 2922), 'packages.fetchai.protocols.http.message.HttpMessage', 'HttpMessage', ([], {'message_id': '(2)', 'target': '(1)', 'performative': 'HttpMessage.Performative.RESPONSE', 'version': '"""some_version"""', 'status_code': '(1)', 'status_text': '"""some_status_text"""', 'headers': '"""some_headers"""', 'body': "b'some_body'"}), "(message_id=2, target=1, performative=HttpMessage.Performative.\n RESPONSE, version='some_version', status_code=1, status_text=\n 'some_status_text', headers='some_headers', body=b'some_body')\n", (2726, 2922), False, 'from packages.fetchai.protocols.http.message import HttpMessage\n'), ((3023, 3072), 'aea.mail.base.Envelope', 'Envelope', ([], {'to': 'msg.to', 'sender': '"""sender"""', 'message': 'msg'}), "(to=msg.to, sender='sender', message=msg)\n", (3031, 3072), False, 'from aea.mail.base import Envelope\n'), ((3136, 3167), 'aea.mail.base.Envelope.decode', 'Envelope.decode', (['envelope_bytes'], {}), '(envelope_bytes)\n', (3151, 3167), False, 'from aea.mail.base import Envelope\n'), ((3523, 3577), 'packages.fetchai.protocols.http.message.HttpMessage.serializer.decode', 'HttpMessage.serializer.decode', (['actual_envelope.message'], {}), '(actual_envelope.message)\n', (3552, 3577), False, 'from packages.fetchai.protocols.http.message import HttpMessage\n'), ((4190, 4358), 'packages.fetchai.protocols.http.message.HttpMessage', 'HttpMessage', ([], {'performative': 'HttpMessage.Performative.REQUEST', 'method': '"""some_method"""', 'url': '"""url"""', 'version': '"""some_version"""', 'headers': '"""some_headers"""', 'body': "b'some_body'"}), "(performative=HttpMessage.Performative.REQUEST, method=\n 'some_method', url='url', version='some_version', headers=\n 'some_headers', body=b'some_body')\n", (4201, 4358), False, 'from packages.fetchai.protocols.http.message import HttpMessage\n'), ((4755, 4923), 'packages.fetchai.protocols.http.message.HttpMessage', 'HttpMessage', ([], {'performative': 'HttpMessage.Performative.REQUEST', 'method': '"""some_method"""', 'url': '"""url"""', 'version': '"""some_version"""', 'headers': '"""some_headers"""', 'body': "b'some_body'"}), "(performative=HttpMessage.Performative.REQUEST, method=\n 'some_method', url='url', version='some_version', headers=\n 'some_headers', body=b'some_body')\n", (4766, 4923), False, 'from packages.fetchai.protocols.http.message import HttpMessage\n'), ((4988, 5022), 'packages.fetchai.protocols.http.message.HttpMessage.serializer.encode', 'HttpMessage.serializer.encode', (['msg'], {}), '(msg)\n', (5017, 5022), False, 'from packages.fetchai.protocols.http.message import HttpMessage\n'), ((4414, 4472), 'pytest.raises', 'pytest.raises', (['ValueError'], {'match': '"""Performative not valid:"""'}), "(ValueError, match='Performative not valid:')\n", (4427, 4472), False, 'import pytest\n'), ((5032, 5090), 'pytest.raises', 'pytest.raises', (['ValueError'], {'match': '"""Performative not valid:"""'}), "(ValueError, match='Performative not valid:')\n", (5045, 5090), False, 'import pytest\n'), ((5492, 5539), 'unittest.mock.patch.object', 'mock.patch.object', (['http_message_logger', '"""error"""'], {}), "(http_message_logger, 'error')\n", (5509, 5539), False, 'from unittest import mock\n'), ((5564, 5732), 'packages.fetchai.protocols.http.message.HttpMessage', 'HttpMessage', ([], {'performative': 'HttpMessage.Performative.REQUEST', 'method': '"""some_method"""', 'url': '"""url"""', 'version': '"""some_version"""', 'headers': '"""some_headers"""', 'body': "b'some_body'"}), "(performative=HttpMessage.Performative.REQUEST, method=\n 'some_method', url='url', version='some_version', headers=\n 'some_headers', body=b'some_body')\n", (5575, 5732), False, 'from packages.fetchai.protocols.http.message import HttpMessage\n'), ((5333, 5362), 'aea.exceptions.AEAEnforceError', 'AEAEnforceError', (['"""some error"""'], {}), "('some error')\n", (5348, 5362), False, 'from aea.exceptions import AEAEnforceError\n'), ((7719, 7849), 'packages.fetchai.protocols.http.dialogues.HttpDialogue.__init__', 'HttpDialogue.__init__', (['self'], {'dialogue_label': 'dialogue_label', 'self_address': 'self_address', 'role': 'role', 'message_class': 'message_class'}), '(self, dialogue_label=dialogue_label, self_address=\n self_address, role=role, message_class=message_class)\n', (7740, 7849), False, 'from packages.fetchai.protocols.http.dialogues import HttpDialogue, HttpDialogues\n'), ((8640, 8783), 'packages.fetchai.protocols.http.dialogues.HttpDialogues.__init__', 'HttpDialogues.__init__', (['self'], {'self_address': 'self_address', 'role_from_first_message': 'role_from_first_message', 'dialogue_class': 'AgentDialogue'}), '(self, self_address=self_address,\n role_from_first_message=role_from_first_message, dialogue_class=\n AgentDialogue)\n', (8662, 8783), False, 'from packages.fetchai.protocols.http.dialogues import HttpDialogue, HttpDialogues\n'), ((9456, 9586), 'packages.fetchai.protocols.http.dialogues.HttpDialogue.__init__', 'HttpDialogue.__init__', (['self'], {'dialogue_label': 'dialogue_label', 'self_address': 'self_address', 'role': 'role', 'message_class': 'message_class'}), '(self, dialogue_label=dialogue_label, self_address=\n self_address, role=role, message_class=message_class)\n', (9477, 9586), False, 'from packages.fetchai.protocols.http.dialogues import HttpDialogue, HttpDialogues\n'), ((10378, 10522), 'packages.fetchai.protocols.http.dialogues.HttpDialogues.__init__', 'HttpDialogues.__init__', (['self'], {'self_address': 'self_address', 'role_from_first_message': 'role_from_first_message', 'dialogue_class': 'ServerDialogue'}), '(self, self_address=self_address,\n role_from_first_message=role_from_first_message, dialogue_class=\n ServerDialogue)\n', (10400, 10522), False, 'from packages.fetchai.protocols.http.dialogues import HttpDialogue, HttpDialogues\n'), ((4487, 4560), 'unittest.mock.patch.object', 'mock.patch.object', (['HttpMessage.Performative', '"""__eq__"""'], {'return_value': '(False)'}), "(HttpMessage.Performative, '__eq__', return_value=False)\n", (4504, 4560), False, 'from unittest import mock\n'), ((4574, 4608), 'packages.fetchai.protocols.http.message.HttpMessage.serializer.encode', 'HttpMessage.serializer.encode', (['msg'], {}), '(msg)\n', (4603, 4608), False, 'from packages.fetchai.protocols.http.message import HttpMessage\n'), ((5105, 5178), 'unittest.mock.patch.object', 'mock.patch.object', (['HttpMessage.Performative', '"""__eq__"""'], {'return_value': '(False)'}), "(HttpMessage.Performative, '__eq__', return_value=False)\n", (5122, 5178), False, 'from unittest import mock\n'), ((5192, 5234), 'packages.fetchai.protocols.http.message.HttpMessage.serializer.decode', 'HttpMessage.serializer.decode', (['encoded_msg'], {}), '(encoded_msg)\n', (5221, 5234), False, 'from packages.fetchai.protocols.http.message import HttpMessage\n')]
|
# -*- coding: utf-8 -*-
import os
import sys
import socket
import time
import _strptime
import requests
import xbmc
import xbmcgui
import xbmcaddon
import xbmcvfs
ADDON = xbmcaddon.Addon()
ADDONNAME = ADDON.getAddonInfo('name')
ADDONID = ADDON.getAddonInfo('id')
ADDONVERSION = ADDON.getAddonInfo('version')
CWD = ADDON.getAddonInfo('path').decode("utf-8")
RESOURCE = xbmc.translatePath( os.path.join( CWD, 'resources', 'lib' ).encode("utf-8") ).decode("utf-8")
DATAPATH = xbmc.translatePath(ADDON.getAddonInfo('profile')).decode('utf-8')
WEATHER_WINDOW = xbmcgui.Window(12600)
sys.path.append(RESOURCE)
from utils import *
LCURL = 'https://www.yahoo.com/news/_tdnews/api/resource/WeatherSearch;text=%s'
FCURL = 'https://www.yahoo.com/news/_tdnews/api/resource/WeatherService;woeids=%%5B%s%%5D'
socket.setdefaulttimeout(10)
def convert_datetime(stamp):
timestruct = time.strptime(stamp[:-5], "%Y-%m-%dT%H:%M:%S")
if DATEFORMAT[1] == 'd' or DATEFORMAT[0] == 'D':
localdate = time.strftime('%d-%m-%Y', timestruct)
elif DATEFORMAT[1] == 'm' or DATEFORMAT[0] == 'M':
localdate = time.strftime('%m-%d-%Y', timestruct)
else:
localdate = time.strftime('%Y-%m-%d', timestruct)
if TIMEFORMAT != '/':
localtime = time.strftime('%I:%M %p', timestruct)
else:
localtime = time.strftime('%H:%M', timestruct)
return localtime + ' ' + localdate
def get_date(stamp, form):
timestruct = time.strptime(stamp[:-5], "%Y-%m-%dT%H:%M:%S")
month = time.strftime('%m', timestruct)
day = time.strftime('%d', timestruct)
if form == 'short':
if DATEFORMAT[1] == 'd' or DATEFORMAT[0] == 'D':
label = day + ' ' + xbmc.getLocalizedString(MONTHS[month])
else:
label = xbmc.getLocalizedString(MONTHS[month]) + ' ' + day
elif form == 'long':
if DATEFORMAT[1] == 'd' or DATEFORMAT[0] == 'D':
label = day + ' ' + xbmc.getLocalizedString(LMONTHS[month])
else:
label = xbmc.getLocalizedString(LMONTHS[month]) + ' ' + day
return label
def get_time(stamp):
timestruct = time.strptime(stamp[:-5], "%Y-%m-%dT%H:%M:%S")
if TIMEFORMAT != '/':
localtime = time.strftime('%I:%M %p', timestruct)
else:
localtime = time.strftime('%H:%M', timestruct)
return localtime
def convert_temp(temp):
celc = (float(temp)-32) * 5/9
return str(int(round(celc)))
def convert_speed(speed):
kmh = float(speed) * 1.609
return str(int(round(kmh)))
def convert_seconds(sec):
m, s = divmod(sec, 60)
h, m = divmod(m, 60)
hm = "%02d:%02d" % (h, m)
if TIMEFORMAT != '/':
timestruct = time.strptime(hm, "%H:%M")
hm = time.strftime('%I:%M %p', timestruct)
return hm
def log(txt):
if ADDON.getSetting('Debug') == 'true':
if isinstance (txt,str):
txt = txt.decode("utf-8")
message = u'%s: %s' % (ADDONID, txt)
xbmc.log(msg=message.encode("utf-8"), level=xbmc.LOGDEBUG)
def set_property(name, value):
WEATHER_WINDOW.setProperty(name, value)
def refresh_locations():
locations = 0
for count in range(1, 6):
loc_name = ADDON.getSetting('Location%s' % count)
if loc_name:
locations += 1
set_property('Location%s' % count, loc_name)
set_property('Locations', str(locations))
log('available locations: %s' % str(locations))
def location(loc):
locs = []
locids = []
log('searching for location: %s' % loc)
data = get_data(LCURL, loc)
log('location data: %s' % data)
if data:
for item in data:
locs.append(item['qualifiedName'])
locids.append(str(item['woeid']))
return locs, locids
def get_data(api, search):
url = api % search
try:
response = requests.get(url)
return response.json()
except:
return
def forecast(loc, locid):
log('weather location: %s' % locid)
retry = 0
while (retry < 10) and (not MONITOR.abortRequested()):
data = get_data(FCURL, locid)
if data:
# response
retry = 10
else:
# no response
retry += 1
xbmc.sleep(10000)
log('weather download failed')
log('forecast data: %s' % data)
if data:
properties(data, loc, locid)
else:
clear()
def clear():
set_property('Current.Condition' , 'N/A')
set_property('Current.Temperature' , '0')
set_property('Current.Wind' , '0')
set_property('Current.WindDirection' , 'N/A')
set_property('Current.Humidity' , '0')
set_property('Current.FeelsLike' , '0')
set_property('Current.UVIndex' , '0')
set_property('Current.DewPoint' , '0')
set_property('Current.OutlookIcon' , 'na.png')
set_property('Current.FanartCode' , 'na')
for count in range (0, MAXDAYS+1):
set_property('Day%i.Title' % count, 'N/A')
set_property('Day%i.HighTemp' % count, '0')
set_property('Day%i.LowTemp' % count, '0')
set_property('Day%i.Outlook' % count, 'N/A')
set_property('Day%i.OutlookIcon' % count, 'na.png')
set_property('Day%i.FanartCode' % count, 'na')
def properties(response, loc, locid):
data = response['weathers'][0]
#current - standard
set_property('Location' , loc)
set_property('Updated' , convert_datetime(data['observation']['observationTime']['timestamp']))
set_property('Current.Location' , data['location']['displayName'])
set_property('Current.Condition' , data['observation']['conditionDescription'])
set_property('Current.Temperature' , convert_temp(data['observation']['temperature']['now']))
set_property('Current.UVIndex' , str(data['observation']['uvIndex']))
set_property('Current.OutlookIcon' , '%s.png' % str(data['observation']['conditionCode'])) # Kodi translates it to Current.ConditionIcon
set_property('Current.FanartCode' , str(data['observation']['conditionCode']))
set_property('Current.Wind' , convert_speed(data['observation']['windSpeed']))
set_property('Current.WindDirection' , xbmc.getLocalizedString(WIND_DIR(data['observation']['windDirection'])))
set_property('Current.Humidity' , str(data['observation']['humidity']))
set_property('Current.DewPoint' , dewpoint(int(convert_temp(data['observation']['temperature']['now'])), data['observation']['humidity']))
set_property('Current.FeelsLike' , convert_temp(data['observation']['temperature']['feelsLike']))
#current - extended
set_property('Current.WindChill' , TEMP(windchill(data['observation']['temperature']['now'], data['observation']['windSpeed'])) + TEMPUNIT)
if 'F' in TEMPUNIT:
set_property('Current.Visibility' , str(round(data['observation']['visibility'],2)) + ' mi')
set_property('Current.Pressure' , str(round(data['observation']['barometricPressure'],2)) + ' inHg')
else:
set_property('Current.Visibility' , str(round(1.60934 * data['observation']['visibility'],2)) + ' km')
set_property('Current.Pressure' , str(int(round((33.864 * data['observation']['barometricPressure'])))) + ' mbar')
set_property('Current.Precipitation' , str(data['observation']['precipitationProbability']) + '%')
#forecast - extended
set_property('Forecast.City' , data['location']['displayName'])
set_property('Forecast.Country' , data['location']['countryName'])
set_property('Forecast.Latitude' , str(data['location']['latitude']))
set_property('Forecast.Longitude' , str(data['location']['longitude']))
set_property('Forecast.Updated' , convert_datetime(data['observation']['observationTime']['timestamp']))
#today - extended
set_property('Today.Sunrise' , convert_seconds(data['sunAndMoon']['sunrise']))
set_property('Today.Sunset' , convert_seconds(data['sunAndMoon']['sunset']))
set_property('Today.Moonphase' , MOONPHASE[data['sunAndMoon']['moonPhase']])
#hourly - extended
for count, item in enumerate(data['forecasts']['hourly']):
set_property('Hourly.%i.Time' % (count + 1), get_time(item['observationTime']['timestamp']))
set_property('Hourly.%i.LongDate' % (count + 1), get_date(item['observationTime']['timestamp'], 'long'))
set_property('Hourly.%i.ShortDate' % (count + 1), get_date(item['observationTime']['timestamp'], 'short'))
set_property('Hourly.%i.Temperature' % (count + 1), TEMP(item['temperature']['now']) + TEMPUNIT)
set_property('Hourly.%i.FeelsLike' % (count + 1), TEMP(item['temperature']['feelsLike']) + TEMPUNIT)
set_property('Hourly.%i.Outlook' % (count + 1), str(item['conditionDescription']))
set_property('Hourly.%i.OutlookIcon' % (count + 1), '%s.png' % str(item['conditionCode']))
set_property('Hourly.%i.FanartCode' % (count + 1), str(item['conditionCode']))
set_property('Hourly.%i.Humidity' % (count + 1), str(item['humidity']) + '%')
set_property('Hourly.%i.Precipitation' % (count + 1), str(item['precipitationProbability']) + '%')
set_property('Hourly.%i.WindDirection' % (count + 1), xbmc.getLocalizedString(WIND_DIR(item['windDirection'])))
set_property('Hourly.%i.WindSpeed' % (count + 1), SPEED(item['windSpeed']) + SPEEDUNIT)
set_property('Hourly.%i.WindDegree' % (count + 1), str(item['windDirection']) + u'°')
set_property('Hourly.%i.DewPoint' % (count + 1), TEMP(dewpoint(int(convert_temp(item['temperature']['now'])), item['humidity']), 'C') + TEMPUNIT)
#daily - standard
for count, item in enumerate(data['forecasts']['daily']):
set_property('Day%i.Title' % count, LDAYS[item['observationTime']['weekday']])
set_property('Day%i.HighTemp' % count, convert_temp(item['temperature']['high']))
set_property('Day%i.LowTemp' % count, convert_temp(item['temperature']['low']))
set_property('Day%i.Outlook' % count, item['conditionDescription'])
set_property('Day%i.OutlookIcon' % count, '%s.png' % str(item['conditionCode']))
set_property('Day%i.FanartCode' % count, str(item['conditionCode']))
if count == MAXDAYS:
break
#daily - extended
for count, item in enumerate(data['forecasts']['daily']):
set_property('Daily.%i.ShortDay' % (count + 1), DAYS[item['observationTime']['weekday']])
set_property('Daily.%i.LongDay' % (count + 1), LDAYS[item['observationTime']['weekday']])
set_property('Daily.%i.ShortDate' % (count + 1), get_date(item['observationTime']['timestamp'], 'short'))
set_property('Daily.%i.LongDate' % (count + 1), get_date(item['observationTime']['timestamp'], 'short'))
set_property('Daily.%i.HighTemperature' % (count + 1), TEMP(item['temperature']['high']) + TEMPUNIT)
set_property('Daily.%i.LowTemperature' % (count + 1), TEMP(item['temperature']['low']) + TEMPUNIT)
set_property('Daily.%i.Outlook' % (count + 1), str(item['conditionDescription']))
set_property('Daily.%i.OutlookIcon' % (count + 1), '%s.png' % str(item['conditionCode']))
set_property('Daily.%i.FanartCode' % (count + 1), str(item['conditionCode']))
set_property('Daily.%i.Humidity' % (count + 1), str(item['humidity']) + '%')
set_property('Daily.%i.Precipitation' % (count + 1), str(item['precipitationProbability']) + '%')
set_property('Daily.%i.DewPoint' % (count + 1), TEMP(dewpoint(int(convert_temp(item['temperature']['low'])), item['humidity']), 'C') + TEMPUNIT)
class MyMonitor(xbmc.Monitor):
def __init__(self, *args, **kwargs):
xbmc.Monitor.__init__(self)
log('version %s started: %s' % (ADDONVERSION, sys.argv))
MONITOR = MyMonitor()
set_property('Forecast.IsFetched' , 'true')
set_property('Current.IsFetched' , 'true')
set_property('Today.IsFetched' , 'true')
set_property('Daily.IsFetched' , 'true')
set_property('Hourly.IsFetched' , 'true')
set_property('WeatherProvider' , ADDONNAME)
set_property('WeatherProviderLogo', xbmc.translatePath(os.path.join(CWD, 'resources', 'banner.png')))
# Create data path if it doesn't exist
if not xbmcvfs.exists(DATAPATH):
xbmcvfs.mkdir(DATAPATH)
if sys.argv[1].startswith('Location'):
keyboard = xbmc.Keyboard('', xbmc.getLocalizedString(14024), False)
keyboard.doModal()
if (keyboard.isConfirmed() and keyboard.getText()):
text = keyboard.getText()
locs, locids = location(text)
dialog = xbmcgui.Dialog()
if locs != []:
selected = dialog.select(xbmc.getLocalizedString(396), locs)
if selected != -1:
ADDON.setSetting(sys.argv[1], locs[selected])
ADDON.setSetting(sys.argv[1] + 'id', locids[selected])
log('selected location: %s' % locs[selected])
else:
log('no locations found')
dialog.ok(ADDONNAME, xbmc.getLocalizedString(284))
else:
location = ADDON.getSetting('Location%s' % sys.argv[1])
locationid = ADDON.getSetting('Location%sid' % sys.argv[1])
if (not locationid) and (sys.argv[1] != '1'):
location = ADDON.getSetting('Location1')
locationid = ADDON.getSetting('Location1id')
log('trying location 1 instead')
if locationid:
forecast(location, locationid)
else:
log('empty location id')
clear()
refresh_locations()
log('finished')
|
[
"sys.path.append",
"xbmc.sleep",
"xbmcgui.Window",
"os.path.join",
"xbmcaddon.Addon",
"xbmcvfs.mkdir",
"time.strftime",
"socket.setdefaulttimeout",
"xbmcvfs.exists",
"xbmc.Monitor.__init__",
"xbmcgui.Dialog",
"requests.get",
"xbmc.getLocalizedString",
"time.strptime"
] |
[((172, 189), 'xbmcaddon.Addon', 'xbmcaddon.Addon', ([], {}), '()\n', (187, 189), False, 'import xbmcaddon\n'), ((557, 578), 'xbmcgui.Window', 'xbmcgui.Window', (['(12600)'], {}), '(12600)\n', (571, 578), False, 'import xbmcgui\n'), ((580, 605), 'sys.path.append', 'sys.path.append', (['RESOURCE'], {}), '(RESOURCE)\n', (595, 605), False, 'import sys\n'), ((800, 828), 'socket.setdefaulttimeout', 'socket.setdefaulttimeout', (['(10)'], {}), '(10)\n', (824, 828), False, 'import socket\n'), ((876, 922), 'time.strptime', 'time.strptime', (['stamp[:-5]', '"""%Y-%m-%dT%H:%M:%S"""'], {}), "(stamp[:-5], '%Y-%m-%dT%H:%M:%S')\n", (889, 922), False, 'import time\n'), ((1449, 1495), 'time.strptime', 'time.strptime', (['stamp[:-5]', '"""%Y-%m-%dT%H:%M:%S"""'], {}), "(stamp[:-5], '%Y-%m-%dT%H:%M:%S')\n", (1462, 1495), False, 'import time\n'), ((1508, 1539), 'time.strftime', 'time.strftime', (['"""%m"""', 'timestruct'], {}), "('%m', timestruct)\n", (1521, 1539), False, 'import time\n'), ((1550, 1581), 'time.strftime', 'time.strftime', (['"""%d"""', 'timestruct'], {}), "('%d', timestruct)\n", (1563, 1581), False, 'import time\n'), ((2115, 2161), 'time.strptime', 'time.strptime', (['stamp[:-5]', '"""%Y-%m-%dT%H:%M:%S"""'], {}), "(stamp[:-5], '%Y-%m-%dT%H:%M:%S')\n", (2128, 2161), False, 'import time\n'), ((12541, 12565), 'xbmcvfs.exists', 'xbmcvfs.exists', (['DATAPATH'], {}), '(DATAPATH)\n', (12555, 12565), False, 'import xbmcvfs\n'), ((12571, 12594), 'xbmcvfs.mkdir', 'xbmcvfs.mkdir', (['DATAPATH'], {}), '(DATAPATH)\n', (12584, 12594), False, 'import xbmcvfs\n'), ((996, 1033), 'time.strftime', 'time.strftime', (['"""%d-%m-%Y"""', 'timestruct'], {}), "('%d-%m-%Y', timestruct)\n", (1009, 1033), False, 'import time\n'), ((1261, 1298), 'time.strftime', 'time.strftime', (['"""%I:%M %p"""', 'timestruct'], {}), "('%I:%M %p', timestruct)\n", (1274, 1298), False, 'import time\n'), ((1329, 1363), 'time.strftime', 'time.strftime', (['"""%H:%M"""', 'timestruct'], {}), "('%H:%M', timestruct)\n", (1342, 1363), False, 'import time\n'), ((2208, 2245), 'time.strftime', 'time.strftime', (['"""%I:%M %p"""', 'timestruct'], {}), "('%I:%M %p', timestruct)\n", (2221, 2245), False, 'import time\n'), ((2276, 2310), 'time.strftime', 'time.strftime', (['"""%H:%M"""', 'timestruct'], {}), "('%H:%M', timestruct)\n", (2289, 2310), False, 'import time\n'), ((2670, 2696), 'time.strptime', 'time.strptime', (['hm', '"""%H:%M"""'], {}), "(hm, '%H:%M')\n", (2683, 2696), False, 'import time\n'), ((2710, 2747), 'time.strftime', 'time.strftime', (['"""%I:%M %p"""', 'timestruct'], {}), "('%I:%M %p', timestruct)\n", (2723, 2747), False, 'import time\n'), ((3810, 3827), 'requests.get', 'requests.get', (['url'], {}), '(url)\n', (3822, 3827), False, 'import requests\n'), ((12015, 12042), 'xbmc.Monitor.__init__', 'xbmc.Monitor.__init__', (['self'], {}), '(self)\n', (12036, 12042), False, 'import xbmc\n'), ((12447, 12491), 'os.path.join', 'os.path.join', (['CWD', '"""resources"""', '"""banner.png"""'], {}), "(CWD, 'resources', 'banner.png')\n", (12459, 12491), False, 'import os\n'), ((12668, 12698), 'xbmc.getLocalizedString', 'xbmc.getLocalizedString', (['(14024)'], {}), '(14024)\n', (12691, 12698), False, 'import xbmc\n'), ((12875, 12891), 'xbmcgui.Dialog', 'xbmcgui.Dialog', ([], {}), '()\n', (12889, 12891), False, 'import xbmcgui\n'), ((1109, 1146), 'time.strftime', 'time.strftime', (['"""%m-%d-%Y"""', 'timestruct'], {}), "('%m-%d-%Y', timestruct)\n", (1122, 1146), False, 'import time\n'), ((1177, 1214), 'time.strftime', 'time.strftime', (['"""%Y-%m-%d"""', 'timestruct'], {}), "('%Y-%m-%d', timestruct)\n", (1190, 1214), False, 'import time\n'), ((4202, 4219), 'xbmc.sleep', 'xbmc.sleep', (['(10000)'], {}), '(10000)\n', (4212, 4219), False, 'import xbmc\n'), ((1695, 1733), 'xbmc.getLocalizedString', 'xbmc.getLocalizedString', (['MONTHS[month]'], {}), '(MONTHS[month])\n', (1718, 1733), False, 'import xbmc\n'), ((12952, 12980), 'xbmc.getLocalizedString', 'xbmc.getLocalizedString', (['(396)'], {}), '(396)\n', (12975, 12980), False, 'import xbmc\n'), ((13299, 13327), 'xbmc.getLocalizedString', 'xbmc.getLocalizedString', (['(284)'], {}), '(284)\n', (13322, 13327), False, 'import xbmc\n'), ((389, 426), 'os.path.join', 'os.path.join', (['CWD', '"""resources"""', '"""lib"""'], {}), "(CWD, 'resources', 'lib')\n", (401, 426), False, 'import os\n'), ((1768, 1806), 'xbmc.getLocalizedString', 'xbmc.getLocalizedString', (['MONTHS[month]'], {}), '(MONTHS[month])\n', (1791, 1806), False, 'import xbmc\n'), ((1933, 1972), 'xbmc.getLocalizedString', 'xbmc.getLocalizedString', (['LMONTHS[month]'], {}), '(LMONTHS[month])\n', (1956, 1972), False, 'import xbmc\n'), ((2007, 2046), 'xbmc.getLocalizedString', 'xbmc.getLocalizedString', (['LMONTHS[month]'], {}), '(LMONTHS[month])\n', (2030, 2046), False, 'import xbmc\n')]
|
#!/usr/bin/env python
import collections
import pyaudio
import snowboydetect
import time
import wave
import os
import logging
from ctypes import *
from contextlib import contextmanager
logging.basicConfig()
logger = logging.getLogger("snowboy")
logger.setLevel(logging.INFO)
TOP_DIR = os.path.dirname(os.path.abspath(__file__))
RESOURCE_FILE = os.path.join(TOP_DIR, "resources/common.res")
DETECT_DING = os.path.join(TOP_DIR, "resources/ding.wav")
DETECT_DONG = os.path.join(TOP_DIR, "resources/dong.wav")
def py_error_handler(filename, line, function, err, fmt):
pass
ERROR_HANDLER_FUNC = CFUNCTYPE(None, c_char_p, c_int, c_char_p, c_int, c_char_p)
c_error_handler = ERROR_HANDLER_FUNC(py_error_handler)
@contextmanager
def no_alsa_error():
try:
asound = cdll.LoadLibrary('libasound.so')
asound.snd_lib_error_set_handler(c_error_handler)
yield
asound.snd_lib_error_set_handler(None)
except:
yield
pass
class RingBuffer(object):
"""Ring buffer to hold audio from PortAudio"""
def __init__(self, size = 4096):
self._buf = collections.deque(maxlen=size)
def extend(self, data):
"""Adds data to the end of buffer"""
self._buf.extend(data)
def get(self):
"""Retrieves data from the beginning of buffer and clears it"""
tmp = bytes(bytearray(self._buf))
self._buf.clear()
return tmp
def play_audio_file(fname=DETECT_DING):
"""Simple callback function to play a wave file. By default it plays
a Ding sound.
:param str fname: wave file name
:return: None
"""
ding_wav = wave.open(fname, 'rb')
ding_data = ding_wav.readframes(ding_wav.getnframes())
with no_alsa_error():
audio = pyaudio.PyAudio()
stream_out = audio.open(
format=audio.get_format_from_width(ding_wav.getsampwidth()),
channels=ding_wav.getnchannels(),
rate=ding_wav.getframerate(), input=False, output=True)
stream_out.start_stream()
stream_out.write(ding_data)
time.sleep(0.2)
stream_out.stop_stream()
stream_out.close()
audio.terminate()
class HotwordDetector(object):
"""
Snowboy decoder to detect whether a keyword specified by `decoder_model`
exists in a microphone input stream.
:param decoder_model: decoder model file path, a string or a list of strings
:param resource: resource file path.
:param sensitivity: decoder sensitivity, a float of a list of floats.
The bigger the value, the more senstive the
decoder. If an empty list is provided, then the
default sensitivity in the model will be used.
:param audio_gain: multiply input volume by this factor.
:param apply_frontend: applies the frontend processing algorithm if True.
"""
def __init__(self, decoder_model,
resource=RESOURCE_FILE,
sensitivity=[],
audio_gain=1,
apply_frontend=False):
def audio_callback(in_data, frame_count, time_info, status):
self.ring_buffer.extend(in_data)
play_data = chr(0) * len(in_data)
return play_data, pyaudio.paContinue
tm = type(decoder_model)
ts = type(sensitivity)
if tm is not list:
decoder_model = [decoder_model]
if ts is not list:
sensitivity = [sensitivity]
model_str = ",".join(decoder_model)
self.detector = snowboydetect.SnowboyDetect(
resource_filename=resource.encode(), model_str=model_str.encode())
self.detector.SetAudioGain(audio_gain)
self.detector.ApplyFrontend(apply_frontend)
self.num_hotwords = self.detector.NumHotwords()
if len(decoder_model) > 1 and len(sensitivity) == 1:
sensitivity = sensitivity*self.num_hotwords
if len(sensitivity) != 0:
assert self.num_hotwords == len(sensitivity), \
"number of hotwords in decoder_model (%d) and sensitivity " \
"(%d) does not match" % (self.num_hotwords, len(sensitivity))
sensitivity_str = ",".join([str(t) for t in sensitivity])
if len(sensitivity) != 0:
self.detector.SetSensitivity(sensitivity_str.encode())
self.ring_buffer = RingBuffer(
self.detector.NumChannels() * self.detector.SampleRate() * 5)
with no_alsa_error():
self.audio = pyaudio.PyAudio()
print('Audio init start')
self.stream_in = self.audio.open(
input=True, output=False,
format=self.audio.get_format_from_width(
self.detector.BitsPerSample() / 8),
channels=self.detector.NumChannels(),
rate=self.detector.SampleRate(),
frames_per_buffer=2048,
stream_callback=audio_callback)
print('Audio init finish')
def start(self, detected_callback=play_audio_file,
interrupt_check=lambda: False,
sleep_time=0.03,
audio_recorder_callback=None,
silent_count_threshold=15,
recording_timeout=100):
"""
Start the voice detector. For every `sleep_time` second it checks the
audio buffer for triggering keywords. If detected, then call
corresponding function in `detected_callback`, which can be a single
function (single model) or a list of callback functions (multiple
models). Every loop it also calls `interrupt_check` -- if it returns
True, then breaks from the loop and return.
:param detected_callback: a function or list of functions. The number of
items must match the number of models in
`decoder_model`.
:param interrupt_check: a function that returns True if the main loop
needs to stop.
:param float sleep_time: how much time in second every loop waits.
:param audio_recorder_callback: if specified, this will be called after
a keyword has been spoken and after the
phrase immediately after the keyword has
been recorded. The function will be
passed the name of the file where the
phrase was recorded.
:param silent_count_threshold: indicates how long silence must be heard
to mark the end of a phrase that is
being recorded.
:param recording_timeout: limits the maximum length of a recording.
:return: None
"""
if interrupt_check():
logger.debug("detect voice return")
return
tc = type(detected_callback)
if tc is not list:
detected_callback = [detected_callback]
if len(detected_callback) == 1 and self.num_hotwords > 1:
detected_callback *= self.num_hotwords
assert self.num_hotwords == len(detected_callback), \
"Error: hotwords in your models (%d) do not match the number of " \
"callbacks (%d)" % (self.num_hotwords, len(detected_callback))
logger.debug("detecting...")
state = "PASSIVE"
while True:
if interrupt_check():
logger.debug("detect voice break")
break
data = self.ring_buffer.get()
if len(data) == 0:
time.sleep(sleep_time)
continue
status = self.detector.RunDetection(data)
if status == -1:
logger.warning("Error initializing streams or reading audio data")
#small state machine to handle recording of phrase after keyword
if state == "PASSIVE":
if status > 0: #key word found
self.recordedData = []
self.recordedData.append(data)
silentCount = 0
recordingCount = 0
message = "Keyword " + str(status) + " detected at time: "
message += time.strftime("%Y-%m-%d %H:%M:%S",
time.localtime(time.time()))
logger.info(message)
callback = detected_callback[status-1]
if callback is not None:
callback()
if audio_recorder_callback is not None:
state = "ACTIVE"
continue
elif state == "ACTIVE":
stopRecording = False
if recordingCount > recording_timeout:
stopRecording = True
elif status == -2: #silence found
if silentCount > silent_count_threshold:
stopRecording = True
else:
silentCount = silentCount + 1
elif status == 0: #voice found
silentCount = 0
if stopRecording == True:
fname = self.saveMessage()
audio_recorder_callback(fname)
state = "PASSIVE"
continue
recordingCount = recordingCount + 1
self.recordedData.append(data)
logger.debug("finished.")
def saveMessage(self):
"""
Save the message stored in self.recordedData to a timestamped file.
"""
filename = 'output' + str(int(time.time())) + '.wav'
data = b''.join(self.recordedData)
#use wave to save data
wf = wave.open(filename, 'wb')
wf.setnchannels(1)
wf.setsampwidth(self.audio.get_sample_size(
self.audio.get_format_from_width(
self.detector.BitsPerSample() / 8)))
wf.setframerate(self.detector.SampleRate())
wf.writeframes(data)
wf.close()
logger.debug("finished saving: " + filename)
return filename
def terminate(self):
"""
Terminate audio stream. Users cannot call start() again to detect.
:return: None
"""
self.stream_in.stop_stream()
self.stream_in.close()
self.audio.terminate()
def restart(self):
def audio_callback(in_data, frame_count, time_info, status):
self.ring_buffer.extend(in_data)
play_data = chr(0) * len(in_data)
return play_data, pyaudio.paContinue
self.audio = pyaudio.PyAudio()
print('Audio init start')
self.stream_in = self.audio.open(
input=True, output=False,
format=self.audio.get_format_from_width(
self.detector.BitsPerSample() / 8),
channels=self.detector.NumChannels(),
rate=self.detector.SampleRate(),
frames_per_buffer=2048,
stream_callback=audio_callback)
print('Audio init end')
|
[
"wave.open",
"os.path.abspath",
"logging.basicConfig",
"collections.deque",
"time.sleep",
"time.time",
"pyaudio.PyAudio",
"os.path.join",
"logging.getLogger"
] |
[((187, 208), 'logging.basicConfig', 'logging.basicConfig', ([], {}), '()\n', (206, 208), False, 'import logging\n'), ((218, 246), 'logging.getLogger', 'logging.getLogger', (['"""snowboy"""'], {}), "('snowboy')\n", (235, 246), False, 'import logging\n'), ((347, 392), 'os.path.join', 'os.path.join', (['TOP_DIR', '"""resources/common.res"""'], {}), "(TOP_DIR, 'resources/common.res')\n", (359, 392), False, 'import os\n'), ((407, 450), 'os.path.join', 'os.path.join', (['TOP_DIR', '"""resources/ding.wav"""'], {}), "(TOP_DIR, 'resources/ding.wav')\n", (419, 450), False, 'import os\n'), ((465, 508), 'os.path.join', 'os.path.join', (['TOP_DIR', '"""resources/dong.wav"""'], {}), "(TOP_DIR, 'resources/dong.wav')\n", (477, 508), False, 'import os\n'), ((303, 328), 'os.path.abspath', 'os.path.abspath', (['__file__'], {}), '(__file__)\n', (318, 328), False, 'import os\n'), ((1632, 1654), 'wave.open', 'wave.open', (['fname', '"""rb"""'], {}), "(fname, 'rb')\n", (1641, 1654), False, 'import wave\n'), ((2044, 2059), 'time.sleep', 'time.sleep', (['(0.2)'], {}), '(0.2)\n', (2054, 2059), False, 'import time\n'), ((1105, 1135), 'collections.deque', 'collections.deque', ([], {'maxlen': 'size'}), '(maxlen=size)\n', (1122, 1135), False, 'import collections\n'), ((1756, 1773), 'pyaudio.PyAudio', 'pyaudio.PyAudio', ([], {}), '()\n', (1771, 1773), False, 'import pyaudio\n'), ((9834, 9859), 'wave.open', 'wave.open', (['filename', '"""wb"""'], {}), "(filename, 'wb')\n", (9843, 9859), False, 'import wave\n'), ((10717, 10734), 'pyaudio.PyAudio', 'pyaudio.PyAudio', ([], {}), '()\n', (10732, 10734), False, 'import pyaudio\n'), ((4498, 4515), 'pyaudio.PyAudio', 'pyaudio.PyAudio', ([], {}), '()\n', (4513, 4515), False, 'import pyaudio\n'), ((7654, 7676), 'time.sleep', 'time.sleep', (['sleep_time'], {}), '(sleep_time)\n', (7664, 7676), False, 'import time\n'), ((9723, 9734), 'time.time', 'time.time', ([], {}), '()\n', (9732, 9734), False, 'import time\n'), ((8399, 8410), 'time.time', 'time.time', ([], {}), '()\n', (8408, 8410), False, 'import time\n')]
|
import unittest
import pandas as pd
from ta.momentum import (
KAMAIndicator,
PercentagePriceOscillator,
PercentageVolumeOscillator,
ROCIndicator,
RSIIndicator,
StochasticOscillator,
StochRSIIndicator,
TSIIndicator,
UltimateOscillator,
WilliamsRIndicator,
kama,
ppo,
ppo_hist,
ppo_signal,
pvo,
pvo_hist,
pvo_signal,
roc,
rsi,
stoch,
stoch_signal,
stochrsi,
tsi,
ultimate_oscillator,
williams_r,
)
class TestRateOfChangeIndicator(unittest.TestCase):
"""
https://school.stockcharts.com/doku.php?id=technical_indicators:on_balance_volume_obv
"""
_filename = "test/data/cs-roc.csv"
@classmethod
def setUpClass(cls):
cls._df = pd.read_csv(cls._filename, sep=",")
cls._params = dict(close=cls._df["Close"], window=12, fillna=False)
cls._indicator = ROCIndicator(**cls._params)
@classmethod
def tearDownClass(cls):
del cls._df
def test_roc(self):
target = "ROC"
result = roc(**self._params)
pd.testing.assert_series_equal(
self._df[target].tail(), result.tail(), check_names=False
)
def test_roc2(self):
target = "ROC"
result = self._indicator.roc()
pd.testing.assert_series_equal(
self._df[target].tail(), result.tail(), check_names=False
)
class TestRSIIndicator(unittest.TestCase):
"""
https://school.stockcharts.com/doku.php?id=technical_indicators:relative_strength_index_rsi
Note: Using a more simple initilization (directly `ewm`; stockcharts uses `sma` + `ewm`)
"""
_filename = "test/data/cs-rsi.csv"
@classmethod
def setUpClass(cls):
cls._df = pd.read_csv(cls._filename, sep=",")
cls._params = dict(close=cls._df["Close"], window=14, fillna=False)
cls._indicator = RSIIndicator(**cls._params)
@classmethod
def tearDownClass(cls):
del cls._df
def test_rsi(self):
target = "RSI"
result = self._indicator.rsi()
pd.testing.assert_series_equal(
self._df[target].tail(), result.tail(), check_names=False
)
def test_rsi2(self):
target = "RSI"
result = rsi(**self._params)
pd.testing.assert_series_equal(
self._df[target].tail(), result.tail(), check_names=False
)
class TestStochRSIIndicator(unittest.TestCase):
"""
https://school.stockcharts.com/doku.php?id=technical_indicators:stochrsi
"""
_filename = "test/data/cs-stochrsi.csv"
@classmethod
def setUpClass(cls):
cls._df = pd.read_csv(cls._filename, sep=",")
cls._params = dict(
close=cls._df["Close"], window=14, smooth1=3, smooth2=3, fillna=False
)
cls._indicator = StochRSIIndicator(**cls._params)
@classmethod
def tearDownClass(cls):
del cls._df
def test_stochrsi(self):
target = "StochRSI(14)"
result = self._indicator.stochrsi()
pd.testing.assert_series_equal(
self._df[target].tail(), result.tail(), check_names=False
)
def test_stochrsi2(self):
target = "StochRSI(14)"
result = stochrsi(**self._params)
pd.testing.assert_series_equal(
self._df[target].tail(), result.tail(), check_names=False
)
class TestUltimateOscillator(unittest.TestCase):
"""
https://school.stockcharts.com/doku.php?id=technical_indicators:ultimate_oscillator
"""
_filename = "test/data/cs-ultosc.csv"
@classmethod
def setUpClass(cls):
cls._df = pd.read_csv(cls._filename, sep=",")
cls._params = dict(
high=cls._df["High"],
low=cls._df["Low"],
close=cls._df["Close"],
window1=7,
window2=14,
window3=28,
weight1=4.0,
weight2=2.0,
weight3=1.0,
fillna=False,
)
cls._indicator = UltimateOscillator(**cls._params)
@classmethod
def tearDownClass(cls):
del cls._df
def test_uo(self):
target = "Ult_Osc"
result = self._indicator.ultimate_oscillator()
pd.testing.assert_series_equal(
self._df[target].tail(), result.tail(), check_names=False
)
def test_uo2(self):
target = "Ult_Osc"
result = ultimate_oscillator(**self._params)
pd.testing.assert_series_equal(
self._df[target].tail(), result.tail(), check_names=False
)
class TestStochasticOscillator(unittest.TestCase):
"""
https://school.stockcharts.com/doku.php?id=technical_indicators:stochastic_oscillator_fast_slow_and_full
"""
_filename = "test/data/cs-soo.csv"
@classmethod
def setUpClass(cls):
cls._df = pd.read_csv(cls._filename, sep=",")
cls._params = dict(
high=cls._df["High"],
low=cls._df["Low"],
close=cls._df["Close"],
window=14,
smooth_window=3,
fillna=False,
)
cls._indicator = StochasticOscillator(**cls._params)
@classmethod
def tearDownClass(cls):
del cls._df
def test_so(self):
target = "SO"
result = self._indicator.stoch()
pd.testing.assert_series_equal(
self._df[target].tail(), result.tail(), check_names=False
)
def test_so_signal(self):
target = "SO_SIG"
result = self._indicator.stoch_signal()
pd.testing.assert_series_equal(
self._df[target].tail(), result.tail(), check_names=False
)
def test_so2(self):
target = "SO"
result = stoch(**self._params)
pd.testing.assert_series_equal(
self._df[target].tail(), result.tail(), check_names=False
)
def test_so_signal2(self):
target = "SO_SIG"
result = stoch_signal(**self._params)
pd.testing.assert_series_equal(
self._df[target].tail(), result.tail(), check_names=False
)
class TestWilliamsRIndicator(unittest.TestCase):
"""
https://school.stockcharts.com/doku.php?id=technical_indicators:williams_r
"""
_filename = "test/data/cs-percentr.csv"
@classmethod
def setUpClass(cls):
cls._df = pd.read_csv(cls._filename, sep=",")
cls._params = dict(
high=cls._df["High"],
low=cls._df["Low"],
close=cls._df["Close"],
lbp=14,
fillna=False,
)
cls._indicator = WilliamsRIndicator(**cls._params)
@classmethod
def tearDownClass(cls):
del cls._df
def test_wr(self):
target = "Williams_%R"
result = self._indicator.williams_r()
pd.testing.assert_series_equal(
self._df[target].tail(), result.tail(), check_names=False
)
def test_wr2(self):
target = "Williams_%R"
result = williams_r(**self._params)
pd.testing.assert_series_equal(
self._df[target].tail(), result.tail(), check_names=False
)
class TestKAMAIndicator(unittest.TestCase):
"""
https://school.stockcharts.com/doku.php?id=technical_indicators:kaufman_s_adaptive_moving_average
"""
_filename = "test/data/cs-kama.csv"
@classmethod
def setUpClass(cls):
cls._df = pd.read_csv(cls._filename, sep=",")
cls._params = dict(
close=cls._df["Close"], window=10, pow1=2, pow2=30, fillna=False
)
cls._indicator = KAMAIndicator(**cls._params)
@classmethod
def tearDownClass(cls):
del cls._df
def test_kama(self):
target = "KAMA"
result = self._indicator.kama()
pd.testing.assert_series_equal(
self._df[target].tail(), result.tail(), check_names=False
)
def test_kama2(self):
target = "KAMA"
result = kama(**self._params)
pd.testing.assert_series_equal(
self._df[target].tail(), result.tail(), check_names=False
)
class TestTSIIndicator(unittest.TestCase):
"""
https://school.stockcharts.com/doku.php?id=technical_indicators:true_strength_index
"""
_filename = "test/data/cs-tsi.csv"
@classmethod
def setUpClass(cls):
cls._df = pd.read_csv(cls._filename, sep=",")
cls._params = dict(
close=cls._df["Close"], window_slow=25, window_fast=13, fillna=False
)
cls._indicator = TSIIndicator(**cls._params)
@classmethod
def tearDownClass(cls):
del cls._df
def test_tsi(self):
target = "TSI"
result = self._indicator.tsi()
pd.testing.assert_series_equal(
self._df[target].tail(),
result.tail(),
check_names=False,
check_less_precise=True,
)
def test_tsi2(self):
target = "TSI"
result = tsi(**self._params)
pd.testing.assert_series_equal(
self._df[target].tail(),
result.tail(),
check_names=False,
check_less_precise=True,
)
class TestPercentagePriceOscillator(unittest.TestCase):
"""
https://school.stockcharts.com/doku.php?id=technical_indicators:price_oscillators_ppo
https://docs.google.com/spreadsheets/d/1h9p8_PXU7G8sD-LciydpmH6rveaLwvoL7SMBGmO3kM4/edit#gid=0
"""
_filename = "test/data/cs-ppo.csv"
@classmethod
def setUpClass(cls):
cls._df = pd.read_csv(cls._filename, sep=",")
cls._params = dict(
close=cls._df["Close"],
window_slow=26,
window_fast=12,
window_sign=9,
fillna=True,
)
cls._indicator = PercentagePriceOscillator(**cls._params)
@classmethod
def tearDownClass(cls):
del cls._df
def test_ppo(self):
target = "PPO"
result = self._indicator.ppo()
pd.testing.assert_series_equal(
self._df[target].tail(), result.tail(), check_names=False
)
def test_ppo2(self):
target = "PPO"
result = ppo(**self._params)
pd.testing.assert_series_equal(
self._df[target].tail(), result.tail(), check_names=False
)
def test_ppo_signal(self):
target = "PPO_Signal_Line"
result = self._indicator.ppo_signal()
pd.testing.assert_series_equal(
self._df[target].tail(), result.tail(), check_names=False
)
def test_ppo_signal2(self):
target = "PPO_Signal_Line"
result = ppo_signal(**self._params)
pd.testing.assert_series_equal(
self._df[target].tail(), result.tail(), check_names=False
)
def test_ppo_hist(self):
target = "PPO_Histogram"
result = self._indicator.ppo_hist()
pd.testing.assert_series_equal(
self._df[target].tail(), result.tail(), check_names=False
)
def test_ppo_hist2(self):
target = "PPO_Histogram"
result = ppo_hist(**self._params)
pd.testing.assert_series_equal(
self._df[target].tail(), result.tail(), check_names=False
)
class TestPercentageVolumeOscillator(unittest.TestCase):
"""
https://school.stockcharts.com/doku.php?id=technical_indicators:percentage_volume_oscillator_pvo
https://docs.google.com/spreadsheets/d/1SyePHvrVBAcmjDiXe877Qrycx6TmajyrZ8UdrwVk9MI/edit#gid=0
"""
_filename = "test/data/cs-pvo.csv"
@classmethod
def setUpClass(cls):
cls._df = pd.read_csv(cls._filename, sep=",")
cls._params = dict(
volume=cls._df["Volume"],
window_slow=26,
window_fast=12,
window_sign=9,
fillna=True,
)
cls._indicator = PercentageVolumeOscillator(**cls._params)
@classmethod
def tearDownClass(cls):
del cls._df
def test_pvo(self):
target = "PVO"
result = self._indicator.pvo()
pd.testing.assert_series_equal(
self._df[target].tail(), result.tail(), check_names=False
)
def test_pvo2(self):
target = "PVO"
result = pvo(**self._params)
pd.testing.assert_series_equal(
self._df[target].tail(), result.tail(), check_names=False
)
def test_pvo_signal(self):
target = "PVO_Signal_Line"
result = self._indicator.pvo_signal()
pd.testing.assert_series_equal(
self._df[target].tail(), result.tail(), check_names=False
)
def test_pvo_signal2(self):
target = "PVO_Signal_Line"
result = pvo_signal(**self._params)
pd.testing.assert_series_equal(
self._df[target].tail(), result.tail(), check_names=False
)
def test_pvo_hist(self):
target = "PVO_Histogram"
result = self._indicator.pvo_hist()
pd.testing.assert_series_equal(
self._df[target].tail(), result.tail(), check_names=False
)
def test_pvo_hist2(self):
target = "PVO_Histogram"
result = pvo_hist(**self._params)
pd.testing.assert_series_equal(
self._df[target].tail(), result.tail(), check_names=False
)
if __name__ == "__main__":
unittest.main()
|
[
"ta.momentum.StochasticOscillator",
"pandas.read_csv",
"ta.momentum.roc",
"ta.momentum.ppo_signal",
"ta.momentum.rsi",
"ta.momentum.PercentagePriceOscillator",
"ta.momentum.ppo_hist",
"unittest.main",
"ta.momentum.kama",
"ta.momentum.WilliamsRIndicator",
"ta.momentum.tsi",
"ta.momentum.williams_r",
"ta.momentum.StochRSIIndicator",
"ta.momentum.pvo",
"ta.momentum.stochrsi",
"ta.momentum.pvo_hist",
"ta.momentum.stoch",
"ta.momentum.TSIIndicator",
"ta.momentum.ultimate_oscillator",
"ta.momentum.ppo",
"ta.momentum.PercentageVolumeOscillator",
"ta.momentum.KAMAIndicator",
"ta.momentum.RSIIndicator",
"ta.momentum.stoch_signal",
"ta.momentum.pvo_signal",
"ta.momentum.ROCIndicator",
"ta.momentum.UltimateOscillator"
] |
[((13277, 13292), 'unittest.main', 'unittest.main', ([], {}), '()\n', (13290, 13292), False, 'import unittest\n'), ((758, 793), 'pandas.read_csv', 'pd.read_csv', (['cls._filename'], {'sep': '""","""'}), "(cls._filename, sep=',')\n", (769, 793), True, 'import pandas as pd\n'), ((895, 922), 'ta.momentum.ROCIndicator', 'ROCIndicator', ([], {}), '(**cls._params)\n', (907, 922), False, 'from ta.momentum import KAMAIndicator, PercentagePriceOscillator, PercentageVolumeOscillator, ROCIndicator, RSIIndicator, StochasticOscillator, StochRSIIndicator, TSIIndicator, UltimateOscillator, WilliamsRIndicator, kama, ppo, ppo_hist, ppo_signal, pvo, pvo_hist, pvo_signal, roc, rsi, stoch, stoch_signal, stochrsi, tsi, ultimate_oscillator, williams_r\n'), ((1054, 1073), 'ta.momentum.roc', 'roc', ([], {}), '(**self._params)\n', (1057, 1073), False, 'from ta.momentum import KAMAIndicator, PercentagePriceOscillator, PercentageVolumeOscillator, ROCIndicator, RSIIndicator, StochasticOscillator, StochRSIIndicator, TSIIndicator, UltimateOscillator, WilliamsRIndicator, kama, ppo, ppo_hist, ppo_signal, pvo, pvo_hist, pvo_signal, roc, rsi, stoch, stoch_signal, stochrsi, tsi, ultimate_oscillator, williams_r\n'), ((1753, 1788), 'pandas.read_csv', 'pd.read_csv', (['cls._filename'], {'sep': '""","""'}), "(cls._filename, sep=',')\n", (1764, 1788), True, 'import pandas as pd\n'), ((1890, 1917), 'ta.momentum.RSIIndicator', 'RSIIndicator', ([], {}), '(**cls._params)\n', (1902, 1917), False, 'from ta.momentum import KAMAIndicator, PercentagePriceOscillator, PercentageVolumeOscillator, ROCIndicator, RSIIndicator, StochasticOscillator, StochRSIIndicator, TSIIndicator, UltimateOscillator, WilliamsRIndicator, kama, ppo, ppo_hist, ppo_signal, pvo, pvo_hist, pvo_signal, roc, rsi, stoch, stoch_signal, stochrsi, tsi, ultimate_oscillator, williams_r\n'), ((2257, 2276), 'ta.momentum.rsi', 'rsi', ([], {}), '(**self._params)\n', (2260, 2276), False, 'from ta.momentum import KAMAIndicator, PercentagePriceOscillator, PercentageVolumeOscillator, ROCIndicator, RSIIndicator, StochasticOscillator, StochRSIIndicator, TSIIndicator, UltimateOscillator, WilliamsRIndicator, kama, ppo, ppo_hist, ppo_signal, pvo, pvo_hist, pvo_signal, roc, rsi, stoch, stoch_signal, stochrsi, tsi, ultimate_oscillator, williams_r\n'), ((2646, 2681), 'pandas.read_csv', 'pd.read_csv', (['cls._filename'], {'sep': '""","""'}), "(cls._filename, sep=',')\n", (2657, 2681), True, 'import pandas as pd\n'), ((2827, 2859), 'ta.momentum.StochRSIIndicator', 'StochRSIIndicator', ([], {}), '(**cls._params)\n', (2844, 2859), False, 'from ta.momentum import KAMAIndicator, PercentagePriceOscillator, PercentageVolumeOscillator, ROCIndicator, RSIIndicator, StochasticOscillator, StochRSIIndicator, TSIIndicator, UltimateOscillator, WilliamsRIndicator, kama, ppo, ppo_hist, ppo_signal, pvo, pvo_hist, pvo_signal, roc, rsi, stoch, stoch_signal, stochrsi, tsi, ultimate_oscillator, williams_r\n'), ((3232, 3256), 'ta.momentum.stochrsi', 'stochrsi', ([], {}), '(**self._params)\n', (3240, 3256), False, 'from ta.momentum import KAMAIndicator, PercentagePriceOscillator, PercentageVolumeOscillator, ROCIndicator, RSIIndicator, StochasticOscillator, StochRSIIndicator, TSIIndicator, UltimateOscillator, WilliamsRIndicator, kama, ppo, ppo_hist, ppo_signal, pvo, pvo_hist, pvo_signal, roc, rsi, stoch, stoch_signal, stochrsi, tsi, ultimate_oscillator, williams_r\n'), ((3636, 3671), 'pandas.read_csv', 'pd.read_csv', (['cls._filename'], {'sep': '""","""'}), "(cls._filename, sep=',')\n", (3647, 3671), True, 'import pandas as pd\n'), ((4009, 4042), 'ta.momentum.UltimateOscillator', 'UltimateOscillator', ([], {}), '(**cls._params)\n', (4027, 4042), False, 'from ta.momentum import KAMAIndicator, PercentagePriceOscillator, PercentageVolumeOscillator, ROCIndicator, RSIIndicator, StochasticOscillator, StochRSIIndicator, TSIIndicator, UltimateOscillator, WilliamsRIndicator, kama, ppo, ppo_hist, ppo_signal, pvo, pvo_hist, pvo_signal, roc, rsi, stoch, stoch_signal, stochrsi, tsi, ultimate_oscillator, williams_r\n'), ((4404, 4439), 'ta.momentum.ultimate_oscillator', 'ultimate_oscillator', ([], {}), '(**self._params)\n', (4423, 4439), False, 'from ta.momentum import KAMAIndicator, PercentagePriceOscillator, PercentageVolumeOscillator, ROCIndicator, RSIIndicator, StochasticOscillator, StochRSIIndicator, TSIIndicator, UltimateOscillator, WilliamsRIndicator, kama, ppo, ppo_hist, ppo_signal, pvo, pvo_hist, pvo_signal, roc, rsi, stoch, stoch_signal, stochrsi, tsi, ultimate_oscillator, williams_r\n'), ((4839, 4874), 'pandas.read_csv', 'pd.read_csv', (['cls._filename'], {'sep': '""","""'}), "(cls._filename, sep=',')\n", (4850, 4874), True, 'import pandas as pd\n'), ((5118, 5153), 'ta.momentum.StochasticOscillator', 'StochasticOscillator', ([], {}), '(**cls._params)\n', (5138, 5153), False, 'from ta.momentum import KAMAIndicator, PercentagePriceOscillator, PercentageVolumeOscillator, ROCIndicator, RSIIndicator, StochasticOscillator, StochRSIIndicator, TSIIndicator, UltimateOscillator, WilliamsRIndicator, kama, ppo, ppo_hist, ppo_signal, pvo, pvo_hist, pvo_signal, roc, rsi, stoch, stoch_signal, stochrsi, tsi, ultimate_oscillator, williams_r\n'), ((5716, 5737), 'ta.momentum.stoch', 'stoch', ([], {}), '(**self._params)\n', (5721, 5737), False, 'from ta.momentum import KAMAIndicator, PercentagePriceOscillator, PercentageVolumeOscillator, ROCIndicator, RSIIndicator, StochasticOscillator, StochRSIIndicator, TSIIndicator, UltimateOscillator, WilliamsRIndicator, kama, ppo, ppo_hist, ppo_signal, pvo, pvo_hist, pvo_signal, roc, rsi, stoch, stoch_signal, stochrsi, tsi, ultimate_oscillator, williams_r\n'), ((5933, 5961), 'ta.momentum.stoch_signal', 'stoch_signal', ([], {}), '(**self._params)\n', (5945, 5961), False, 'from ta.momentum import KAMAIndicator, PercentagePriceOscillator, PercentageVolumeOscillator, ROCIndicator, RSIIndicator, StochasticOscillator, StochRSIIndicator, TSIIndicator, UltimateOscillator, WilliamsRIndicator, kama, ppo, ppo_hist, ppo_signal, pvo, pvo_hist, pvo_signal, roc, rsi, stoch, stoch_signal, stochrsi, tsi, ultimate_oscillator, williams_r\n'), ((6334, 6369), 'pandas.read_csv', 'pd.read_csv', (['cls._filename'], {'sep': '""","""'}), "(cls._filename, sep=',')\n", (6345, 6369), True, 'import pandas as pd\n'), ((6581, 6614), 'ta.momentum.WilliamsRIndicator', 'WilliamsRIndicator', ([], {}), '(**cls._params)\n', (6599, 6614), False, 'from ta.momentum import KAMAIndicator, PercentagePriceOscillator, PercentageVolumeOscillator, ROCIndicator, RSIIndicator, StochasticOscillator, StochRSIIndicator, TSIIndicator, UltimateOscillator, WilliamsRIndicator, kama, ppo, ppo_hist, ppo_signal, pvo, pvo_hist, pvo_signal, roc, rsi, stoch, stoch_signal, stochrsi, tsi, ultimate_oscillator, williams_r\n'), ((6975, 7001), 'ta.momentum.williams_r', 'williams_r', ([], {}), '(**self._params)\n', (6985, 7001), False, 'from ta.momentum import KAMAIndicator, PercentagePriceOscillator, PercentageVolumeOscillator, ROCIndicator, RSIIndicator, StochasticOscillator, StochRSIIndicator, TSIIndicator, UltimateOscillator, WilliamsRIndicator, kama, ppo, ppo_hist, ppo_signal, pvo, pvo_hist, pvo_signal, roc, rsi, stoch, stoch_signal, stochrsi, tsi, ultimate_oscillator, williams_r\n'), ((7388, 7423), 'pandas.read_csv', 'pd.read_csv', (['cls._filename'], {'sep': '""","""'}), "(cls._filename, sep=',')\n", (7399, 7423), True, 'import pandas as pd\n'), ((7564, 7592), 'ta.momentum.KAMAIndicator', 'KAMAIndicator', ([], {}), '(**cls._params)\n', (7577, 7592), False, 'from ta.momentum import KAMAIndicator, PercentagePriceOscillator, PercentageVolumeOscillator, ROCIndicator, RSIIndicator, StochasticOscillator, StochRSIIndicator, TSIIndicator, UltimateOscillator, WilliamsRIndicator, kama, ppo, ppo_hist, ppo_signal, pvo, pvo_hist, pvo_signal, roc, rsi, stoch, stoch_signal, stochrsi, tsi, ultimate_oscillator, williams_r\n'), ((7937, 7957), 'ta.momentum.kama', 'kama', ([], {}), '(**self._params)\n', (7941, 7957), False, 'from ta.momentum import KAMAIndicator, PercentagePriceOscillator, PercentageVolumeOscillator, ROCIndicator, RSIIndicator, StochasticOscillator, StochRSIIndicator, TSIIndicator, UltimateOscillator, WilliamsRIndicator, kama, ppo, ppo_hist, ppo_signal, pvo, pvo_hist, pvo_signal, roc, rsi, stoch, stoch_signal, stochrsi, tsi, ultimate_oscillator, williams_r\n'), ((8328, 8363), 'pandas.read_csv', 'pd.read_csv', (['cls._filename'], {'sep': '""","""'}), "(cls._filename, sep=',')\n", (8339, 8363), True, 'import pandas as pd\n'), ((8508, 8535), 'ta.momentum.TSIIndicator', 'TSIIndicator', ([], {}), '(**cls._params)\n', (8520, 8535), False, 'from ta.momentum import KAMAIndicator, PercentagePriceOscillator, PercentageVolumeOscillator, ROCIndicator, RSIIndicator, StochasticOscillator, StochRSIIndicator, TSIIndicator, UltimateOscillator, WilliamsRIndicator, kama, ppo, ppo_hist, ppo_signal, pvo, pvo_hist, pvo_signal, roc, rsi, stoch, stoch_signal, stochrsi, tsi, ultimate_oscillator, williams_r\n'), ((8937, 8956), 'ta.momentum.tsi', 'tsi', ([], {}), '(**self._params)\n', (8940, 8956), False, 'from ta.momentum import KAMAIndicator, PercentagePriceOscillator, PercentageVolumeOscillator, ROCIndicator, RSIIndicator, StochasticOscillator, StochRSIIndicator, TSIIndicator, UltimateOscillator, WilliamsRIndicator, kama, ppo, ppo_hist, ppo_signal, pvo, pvo_hist, pvo_signal, roc, rsi, stoch, stoch_signal, stochrsi, tsi, ultimate_oscillator, williams_r\n'), ((9503, 9538), 'pandas.read_csv', 'pd.read_csv', (['cls._filename'], {'sep': '""","""'}), "(cls._filename, sep=',')\n", (9514, 9538), True, 'import pandas as pd\n'), ((9746, 9786), 'ta.momentum.PercentagePriceOscillator', 'PercentagePriceOscillator', ([], {}), '(**cls._params)\n', (9771, 9786), False, 'from ta.momentum import KAMAIndicator, PercentagePriceOscillator, PercentageVolumeOscillator, ROCIndicator, RSIIndicator, StochasticOscillator, StochRSIIndicator, TSIIndicator, UltimateOscillator, WilliamsRIndicator, kama, ppo, ppo_hist, ppo_signal, pvo, pvo_hist, pvo_signal, roc, rsi, stoch, stoch_signal, stochrsi, tsi, ultimate_oscillator, williams_r\n'), ((10126, 10145), 'ta.momentum.ppo', 'ppo', ([], {}), '(**self._params)\n', (10129, 10145), False, 'from ta.momentum import KAMAIndicator, PercentagePriceOscillator, PercentageVolumeOscillator, ROCIndicator, RSIIndicator, StochasticOscillator, StochRSIIndicator, TSIIndicator, UltimateOscillator, WilliamsRIndicator, kama, ppo, ppo_hist, ppo_signal, pvo, pvo_hist, pvo_signal, roc, rsi, stoch, stoch_signal, stochrsi, tsi, ultimate_oscillator, williams_r\n'), ((10584, 10610), 'ta.momentum.ppo_signal', 'ppo_signal', ([], {}), '(**self._params)\n', (10594, 10610), False, 'from ta.momentum import KAMAIndicator, PercentagePriceOscillator, PercentageVolumeOscillator, ROCIndicator, RSIIndicator, StochasticOscillator, StochRSIIndicator, TSIIndicator, UltimateOscillator, WilliamsRIndicator, kama, ppo, ppo_hist, ppo_signal, pvo, pvo_hist, pvo_signal, roc, rsi, stoch, stoch_signal, stochrsi, tsi, ultimate_oscillator, williams_r\n'), ((11039, 11063), 'ta.momentum.ppo_hist', 'ppo_hist', ([], {}), '(**self._params)\n', (11047, 11063), False, 'from ta.momentum import KAMAIndicator, PercentagePriceOscillator, PercentageVolumeOscillator, ROCIndicator, RSIIndicator, StochasticOscillator, StochRSIIndicator, TSIIndicator, UltimateOscillator, WilliamsRIndicator, kama, ppo, ppo_hist, ppo_signal, pvo, pvo_hist, pvo_signal, roc, rsi, stoch, stoch_signal, stochrsi, tsi, ultimate_oscillator, williams_r\n'), ((11560, 11595), 'pandas.read_csv', 'pd.read_csv', (['cls._filename'], {'sep': '""","""'}), "(cls._filename, sep=',')\n", (11571, 11595), True, 'import pandas as pd\n'), ((11805, 11846), 'ta.momentum.PercentageVolumeOscillator', 'PercentageVolumeOscillator', ([], {}), '(**cls._params)\n', (11831, 11846), False, 'from ta.momentum import KAMAIndicator, PercentagePriceOscillator, PercentageVolumeOscillator, ROCIndicator, RSIIndicator, StochasticOscillator, StochRSIIndicator, TSIIndicator, UltimateOscillator, WilliamsRIndicator, kama, ppo, ppo_hist, ppo_signal, pvo, pvo_hist, pvo_signal, roc, rsi, stoch, stoch_signal, stochrsi, tsi, ultimate_oscillator, williams_r\n'), ((12186, 12205), 'ta.momentum.pvo', 'pvo', ([], {}), '(**self._params)\n', (12189, 12205), False, 'from ta.momentum import KAMAIndicator, PercentagePriceOscillator, PercentageVolumeOscillator, ROCIndicator, RSIIndicator, StochasticOscillator, StochRSIIndicator, TSIIndicator, UltimateOscillator, WilliamsRIndicator, kama, ppo, ppo_hist, ppo_signal, pvo, pvo_hist, pvo_signal, roc, rsi, stoch, stoch_signal, stochrsi, tsi, ultimate_oscillator, williams_r\n'), ((12644, 12670), 'ta.momentum.pvo_signal', 'pvo_signal', ([], {}), '(**self._params)\n', (12654, 12670), False, 'from ta.momentum import KAMAIndicator, PercentagePriceOscillator, PercentageVolumeOscillator, ROCIndicator, RSIIndicator, StochasticOscillator, StochRSIIndicator, TSIIndicator, UltimateOscillator, WilliamsRIndicator, kama, ppo, ppo_hist, ppo_signal, pvo, pvo_hist, pvo_signal, roc, rsi, stoch, stoch_signal, stochrsi, tsi, ultimate_oscillator, williams_r\n'), ((13099, 13123), 'ta.momentum.pvo_hist', 'pvo_hist', ([], {}), '(**self._params)\n', (13107, 13123), False, 'from ta.momentum import KAMAIndicator, PercentagePriceOscillator, PercentageVolumeOscillator, ROCIndicator, RSIIndicator, StochasticOscillator, StochRSIIndicator, TSIIndicator, UltimateOscillator, WilliamsRIndicator, kama, ppo, ppo_hist, ppo_signal, pvo, pvo_hist, pvo_signal, roc, rsi, stoch, stoch_signal, stochrsi, tsi, ultimate_oscillator, williams_r\n')]
|
# -*- coding: utf-8 -*-
import tensorflow as tf
import yolo_v3
import yolo_v3_tiny
from utils import load_coco_names, load_weights
FLAGS = tf.app.flags.FLAGS
tf.app.flags.DEFINE_string(
'class_names', 'coco.names', 'File with class names')
tf.app.flags.DEFINE_string(
'weights_file', 'yolov3.weights', 'Binary file with detector weights')
tf.app.flags.DEFINE_string(
'data_format', 'NCHW', 'Data format: NCHW (gpu only) / NHWC')
tf.app.flags.DEFINE_bool(
'tiny', False, 'Use tiny version of YOLOv3')
tf.app.flags.DEFINE_string(
'ckpt_file', './saved_model/model.ckpt', 'Chceckpoint file')
def main(argv=None):
if FLAGS.tiny:
model = yolo_v3_tiny.yolo_v3_tiny
else:
model = yolo_v3.yolo_v3
classes = load_coco_names(FLAGS.class_names)
# placeholder for detector inputs
# any size > 320 will work here
inputs = tf.placeholder(tf.float32, [None, 416, 416, 3])
with tf.variable_scope('detector'):
detections = model(inputs, len(classes),
data_format=FLAGS.data_format)
load_ops = load_weights(tf.global_variables(
scope='detector'), FLAGS.weights_file)
saver = tf.train.Saver(tf.global_variables(scope='detector'))
with tf.Session() as sess:
sess.run(load_ops)
save_path = saver.save(sess, save_path=FLAGS.ckpt_file)
print('Model saved in path: {}'.format(save_path))
if __name__ == '__main__':
tf.app.run()
#! /usr/bin/env python
# coding=utf-8
|
[
"tensorflow.app.flags.DEFINE_bool",
"tensorflow.Session",
"tensorflow.variable_scope",
"tensorflow.placeholder",
"tensorflow.global_variables",
"tensorflow.app.flags.DEFINE_string",
"utils.load_coco_names",
"tensorflow.app.run"
] |
[((163, 248), 'tensorflow.app.flags.DEFINE_string', 'tf.app.flags.DEFINE_string', (['"""class_names"""', '"""coco.names"""', '"""File with class names"""'], {}), "('class_names', 'coco.names', 'File with class names'\n )\n", (189, 248), True, 'import tensorflow as tf\n'), ((249, 350), 'tensorflow.app.flags.DEFINE_string', 'tf.app.flags.DEFINE_string', (['"""weights_file"""', '"""yolov3.weights"""', '"""Binary file with detector weights"""'], {}), "('weights_file', 'yolov3.weights',\n 'Binary file with detector weights')\n", (275, 350), True, 'import tensorflow as tf\n'), ((352, 444), 'tensorflow.app.flags.DEFINE_string', 'tf.app.flags.DEFINE_string', (['"""data_format"""', '"""NCHW"""', '"""Data format: NCHW (gpu only) / NHWC"""'], {}), "('data_format', 'NCHW',\n 'Data format: NCHW (gpu only) / NHWC')\n", (378, 444), True, 'import tensorflow as tf\n'), ((446, 515), 'tensorflow.app.flags.DEFINE_bool', 'tf.app.flags.DEFINE_bool', (['"""tiny"""', '(False)', '"""Use tiny version of YOLOv3"""'], {}), "('tiny', False, 'Use tiny version of YOLOv3')\n", (470, 515), True, 'import tensorflow as tf\n'), ((521, 612), 'tensorflow.app.flags.DEFINE_string', 'tf.app.flags.DEFINE_string', (['"""ckpt_file"""', '"""./saved_model/model.ckpt"""', '"""Chceckpoint file"""'], {}), "('ckpt_file', './saved_model/model.ckpt',\n 'Chceckpoint file')\n", (547, 612), True, 'import tensorflow as tf\n'), ((755, 789), 'utils.load_coco_names', 'load_coco_names', (['FLAGS.class_names'], {}), '(FLAGS.class_names)\n', (770, 789), False, 'from utils import load_coco_names, load_weights\n'), ((878, 925), 'tensorflow.placeholder', 'tf.placeholder', (['tf.float32', '[None, 416, 416, 3]'], {}), '(tf.float32, [None, 416, 416, 3])\n', (892, 925), True, 'import tensorflow as tf\n'), ((1461, 1473), 'tensorflow.app.run', 'tf.app.run', ([], {}), '()\n', (1471, 1473), True, 'import tensorflow as tf\n'), ((936, 965), 'tensorflow.variable_scope', 'tf.variable_scope', (['"""detector"""'], {}), "('detector')\n", (953, 965), True, 'import tensorflow as tf\n'), ((1206, 1243), 'tensorflow.global_variables', 'tf.global_variables', ([], {'scope': '"""detector"""'}), "(scope='detector')\n", (1225, 1243), True, 'import tensorflow as tf\n'), ((1255, 1267), 'tensorflow.Session', 'tf.Session', ([], {}), '()\n', (1265, 1267), True, 'import tensorflow as tf\n'), ((1106, 1143), 'tensorflow.global_variables', 'tf.global_variables', ([], {'scope': '"""detector"""'}), "(scope='detector')\n", (1125, 1143), True, 'import tensorflow as tf\n')]
|
# Copyright © 2019 National Institute of Advanced Industrial Science and Technology (AIST). All rights reserved.
# !/usr/bin/env python3.6
# coding=utf-8
import unittest
from qlib.utils.logging import get_logger, log
logger = get_logger()
@log(logger)
def log_test():
print('test')
@log(logger)
def log_test2():
return 'あいうえお'
class TestAITInputGenerator(unittest.TestCase):
def test_log_func(self):
log_test()
log_test2()
if __name__ == "__main__":
unittest.main()
|
[
"unittest.main",
"qlib.utils.logging.log",
"qlib.utils.logging.get_logger"
] |
[((229, 241), 'qlib.utils.logging.get_logger', 'get_logger', ([], {}), '()\n', (239, 241), False, 'from qlib.utils.logging import get_logger, log\n'), ((245, 256), 'qlib.utils.logging.log', 'log', (['logger'], {}), '(logger)\n', (248, 256), False, 'from qlib.utils.logging import get_logger, log\n'), ((294, 305), 'qlib.utils.logging.log', 'log', (['logger'], {}), '(logger)\n', (297, 305), False, 'from qlib.utils.logging import get_logger, log\n'), ((494, 509), 'unittest.main', 'unittest.main', ([], {}), '()\n', (507, 509), False, 'import unittest\n')]
|
# -*- coding: utf-8 -*
import torch.utils.data as data
from PIL import Image
import torchvision.transforms as transforms
class BaseDataset(data.Dataset):
def __init__(self):
super(BaseDataset, self).__init__()
def name(self):
return 'BaseDataset'
@staticmethod
def modify_commandline_options(parser, is_train):
return parser
def initialize(self, opt):
pass
def __len__(self):
return 0
def get_transform(opt):
transform_list = []
# Modify transform to specify width and height,指定输入图像的高度和宽度,或者指定seg的高度和宽度
if opt.resize_or_crop == 'resize_and_crop':
osize = [opt.loadSizeH, opt.loadSizeW]
fsize = [opt.fineSizeH, opt.fineSizeW]
transform_list.append(transforms.Resize(osize, Image.BICUBIC))
transform_list.append(transforms.RandomCrop(fsize))
# Original CycleGAN code
# if opt.resize_or_crop == 'resize_and_crop':
# osize = [opt.loadSize, opt.loadSize]
# transform_list.append(transforms.Resize(osize, Image.BICUBIC))
# transform_list.append(transforms.RandomCrop(opt.fineSize))
# elif opt.resize_or_crop == 'crop':
# transform_list.append(transforms.RandomCrop(opt.fineSize))
# elif opt.resize_or_crop == 'scale_width':
# transform_list.append(transforms.Lambda(
# lambda img: __scale_width(img, opt.fineSize)))
# elif opt.resize_or_crop == 'scale_width_and_crop':
# transform_list.append(transforms.Lambda(
# lambda img: __scale_width(img, opt.loadSize)))
# transform_list.append(transforms.RandomCrop(opt.fineSize))
# elif opt.resize_or_crop == 'none':
# transform_list.append(transforms.Lambda(
# lambda img: __adjust(img)))
else:
raise ValueError('--resize_or_crop %s is not a valid option.' % opt.resize_or_crop)
if opt.isTrain and not opt.no_flip:
transform_list.append(transforms.RandomHorizontalFlip())
transform_list += [transforms.ToTensor(),
# transforms.Lambda(lambda x: x.repeat(3, 1, 1)), #失败
transforms.Normalize([0.5],
[0.5])]
return transforms.Compose(transform_list)
# just modify the width and height to be multiple of 4
def __adjust(img):
ow, oh = img.size
# the size needs to be a multiple of this number,
# because going through generator network may change img size
# and eventually cause size mismatch error
mult = 4
if ow % mult == 0 and oh % mult == 0:
return img
w = (ow - 1) // mult
w = (w + 1) * mult
h = (oh - 1) // mult
h = (h + 1) * mult
if ow != w or oh != h:
__print_size_warning(ow, oh, w, h)
return img.resize((w, h), Image.BICUBIC)
def __scale_width(img, target_width):
ow, oh = img.size
# the size needs to be a multiple of this number,
# because going through generator network may change img size
# and eventually cause size mismatch error
mult = 4
assert target_width % mult == 0, "the target width needs to be multiple of %d." % mult
if (ow == target_width and oh % mult == 0):
return img
w = target_width
target_height = int(target_width * oh / ow)
m = (target_height - 1) // mult
h = (m + 1) * mult
if target_height != h:
__print_size_warning(target_width, target_height, w, h)
return img.resize((w, h), Image.BICUBIC)
def __print_size_warning(ow, oh, w, h):
if not hasattr(__print_size_warning, 'has_printed'):
print("The image size needs to be a multiple of 4. "
"The loaded image size was (%d, %d), so it was adjusted to "
"(%d, %d). This adjustment will be done to all images "
"whose sizes are not multiples of 4" % (ow, oh, w, h))
__print_size_warning.has_printed = True
|
[
"torchvision.transforms.RandomHorizontalFlip",
"torchvision.transforms.ToTensor",
"torchvision.transforms.Compose",
"torchvision.transforms.Normalize",
"torchvision.transforms.RandomCrop",
"torchvision.transforms.Resize"
] |
[((2212, 2246), 'torchvision.transforms.Compose', 'transforms.Compose', (['transform_list'], {}), '(transform_list)\n', (2230, 2246), True, 'import torchvision.transforms as transforms\n'), ((1998, 2019), 'torchvision.transforms.ToTensor', 'transforms.ToTensor', ([], {}), '()\n', (2017, 2019), True, 'import torchvision.transforms as transforms\n'), ((2120, 2154), 'torchvision.transforms.Normalize', 'transforms.Normalize', (['[0.5]', '[0.5]'], {}), '([0.5], [0.5])\n', (2140, 2154), True, 'import torchvision.transforms as transforms\n'), ((755, 794), 'torchvision.transforms.Resize', 'transforms.Resize', (['osize', 'Image.BICUBIC'], {}), '(osize, Image.BICUBIC)\n', (772, 794), True, 'import torchvision.transforms as transforms\n'), ((826, 854), 'torchvision.transforms.RandomCrop', 'transforms.RandomCrop', (['fsize'], {}), '(fsize)\n', (847, 854), True, 'import torchvision.transforms as transforms\n'), ((1939, 1972), 'torchvision.transforms.RandomHorizontalFlip', 'transforms.RandomHorizontalFlip', ([], {}), '()\n', (1970, 1972), True, 'import torchvision.transforms as transforms\n')]
|
import os
from mahoudata.core import *
from pandas_profiling import ProfileReport
import pandas as pd
from flask import Flask, request, render_template, url_for, json
from flask_cors import CORS
from recommender import *
import json
import csv
self = {}
self['cats'] = ['graduacion','lupulo_afrutado_citrico','lupulo_floral_herbal','amargor','color','maltoso','licoroso','afrutado','especias','acidez']
self['MATRIX_FILE'] = './data/dataset-datathon.csv'
self['NAMES_FILE'] = './data/fakeNames.csv'
self['NO_VALUE_DISTANCE'] = 5
app = Flask(__name__)
CORS(app)
# TODO just for dev
app.config['SEND_FILE_MAX_AGE_DEFAULT'] = 0
@app.route('/beers')
def beers():
m = json.loads(get_beers())
for i,b in enumerate(m['data']):
b['name'] = self['names'][i]
return m
# https://flask.palletsprojects.com/en/1.1.x/api/#url-route-registrations
@app.route('/beers/<int:beer_id>')
def beer(beer_id):
beer = json.loads(get_beers(beer_id))
# complete beer data with fake name
beer['name'] = self['names'][beer_id]
return beer
@app.route('/beers/<int:beer_id>/recommendations')
def beer_recommendations(beer_id):
return get_recommendations(beer_id)
@app.route('/simpleViz')
def simpleViz():
return render_template('simpleViz.html')
@app.route('/')
def index():
return render_template('index.html', brewing_steps = get_brewing_spec())
def __translate_obj_param__(obj):
'''[{"graduacion":""},{"lupulo_afrutado_citrico":1.6},{"lupulo_floral_herbal":1.3},{"amargor":2.5},{"color":0.3},{"maltoso":0.2},{"licoroso":1.9},{"afrutado":1.7},{"especias":1.7},{"acidez":2}]'''
cats = ['graduacion','lupulo_afrutado_citrico','lupulo_floral_herbal','amargor','color','maltoso','licoroso','afrutado','especias','acidez']
vector = []
for cat in self['cats']:
for subunit in obj:
if cat in subunit:
vector.append(subunit[cat])
return vector
@app.route('/guess',methods=['POST'])
def guess():
ref_obj = request.get_json(force=True)
ref_vector = __translate_obj_param__(ref_obj)
print(ref_vector)
min = 10000;
selectedId = -1;
for i,beer in enumerate(self['matrix']):
dif = 0;
for j,value in enumerate(beer):
#print("COMPARO %s - %s" % (value,ref_vector[j]))
if float(value)!=-1 and isinstance(ref_vector[j], float):
minidif = abs(float(value)-float(ref_vector[j]));
else:
#print("EXCEPT %s" % e)
minidif = self['NO_VALUE_DISTANCE']
dif += minidif;
#print("QUEDA %s %s" % (minidif,dif))
if(dif<min):
#print("ES MENOR %s %s" % (dif,min))
#print("REF %s %s" % (ref_vector,beer))
min = dif;
selectedId = i;
print("SELECTED %s" % selectedId)
print("VALUES %s" % (self['matrix'][selectedId]))
print("OTROS %s" % (ref_vector))
return json.dumps(selectedId)
if __name__=='__main__':
input_file = csv.DictReader(open(self['MATRIX_FILE'],"r"))
self['matrix'] = []
for row in input_file:
aux = []
for cat in self['cats']:
try:
value = float(row[cat])
except Exception as e:
value = -1.0
aux.append(value)
#aux.append(row[cat])
self['matrix'].append(aux)
input_file_names = csv.DictReader(open(self['NAMES_FILE'],"r"))
self['names'] = []
for row in input_file_names:
self['names'].append(row['Name'])
app.run(debug=True)
|
[
"flask_cors.CORS",
"flask.Flask",
"json.dumps",
"flask.render_template",
"flask.request.get_json"
] |
[((539, 554), 'flask.Flask', 'Flask', (['__name__'], {}), '(__name__)\n', (544, 554), False, 'from flask import Flask, request, render_template, url_for, json\n'), ((555, 564), 'flask_cors.CORS', 'CORS', (['app'], {}), '(app)\n', (559, 564), False, 'from flask_cors import CORS\n'), ((1234, 1267), 'flask.render_template', 'render_template', (['"""simpleViz.html"""'], {}), "('simpleViz.html')\n", (1249, 1267), False, 'from flask import Flask, request, render_template, url_for, json\n'), ((2000, 2028), 'flask.request.get_json', 'request.get_json', ([], {'force': '(True)'}), '(force=True)\n', (2016, 2028), False, 'from flask import Flask, request, render_template, url_for, json\n'), ((2957, 2979), 'json.dumps', 'json.dumps', (['selectedId'], {}), '(selectedId)\n', (2967, 2979), False, 'import json\n')]
|
#!/usr/bin/env python
# -*- encoding: utf-8 -*-
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
'''base_instance.py: module for base component (base for spout/bolt) and its spec'''
import logging
import traceback
from abc import abstractmethod
from heron.common.src.python.utils.log import Log
from heron.proto import tuple_pb2
from heron.instance.src.python.utils.misc import SerializerHelper
from heron.instance.src.python.utils.misc import OutgoingTupleHelper
from heron.instance.src.python.utils import system_config
import heron.instance.src.python.utils.system_constants as system_constants
import heron.common.src.python.pex_loader as pex_loader
import heronpy.api.global_metrics as global_metrics
import heronpy.api.api_constants as api_constants
from heronpy.api.state.stateful_component import StatefulComponent
# pylint: disable=too-many-instance-attributes
class BaseInstance(object):
"""The base class for heron bolt/spout instance
Implements the following functionality:
1. Basic output collector API and pushing tuples to Out-Stream
2. Run tasks continually
:ivar pplan_helper: Physical Plan Helper for this component
:ivar in_stream: In-Stream Heron Communicator
:ivar output_helper: Outgoing Tuple Helper
:ivar serializer: Implementation of Heron Serializer
"""
make_data_tuple = lambda _: tuple_pb2.HeronDataTuple()
def __init__(self, pplan_helper, in_stream, out_stream, looper):
self.pplan_helper = pplan_helper
self.in_stream = in_stream
self.output_helper = OutgoingTupleHelper(self.pplan_helper, out_stream)
self.looper = looper
self.sys_config = system_config.get_sys_config()
# will set a root logger here
self.logger = logging.getLogger()
context = pplan_helper.context
mode = context.get_cluster_config().get(api_constants.TOPOLOGY_RELIABILITY_MODE,
api_constants.TopologyReliabilityMode.ATMOST_ONCE)
self.is_stateful = bool(mode == api_constants.TopologyReliabilityMode.EFFECTIVELY_ONCE)
self._stateful_state = None
self.serializer = SerializerHelper.get_serializer(pplan_helper.context)
self._initialized_global_metrics = False
def log(self, message, level=None):
"""Log message, optionally providing a logging level
It is compatible with StreamParse API.
:type message: str
:param message: the log message to send
:type level: str
:param level: the logging level,
one of: trace (=debug), debug, info, warn or error (default: info)
"""
if level is None:
_log_level = logging.INFO
else:
if level == "trace" or level == "debug":
_log_level = logging.DEBUG
elif level == "info":
_log_level = logging.INFO
elif level == "warn":
_log_level = logging.WARNING
elif level == "error":
_log_level = logging.ERROR
else:
raise ValueError("%s is not supported as logging level" % str(level))
self.logger.log(_log_level, message)
def admit_data_tuple(self, stream_id, data_tuple, tuple_size_in_bytes):
self.output_helper.add_data_tuple(stream_id, data_tuple, tuple_size_in_bytes)
def admit_control_tuple(self, control_tuple, tuple_size_in_bytes, is_ack):
self.output_helper.add_control_tuple(control_tuple, tuple_size_in_bytes, is_ack)
def admit_ckpt_state(self, ckpt_id, ckpt_state):
self.output_helper.add_ckpt_state(ckpt_id, self.serializer.serialize(ckpt_state))
def get_total_data_emitted_in_bytes(self):
return self.output_helper.total_data_emitted_in_bytes
def load_py_instance(self, is_spout):
"""Loads user defined component (spout/bolt)"""
try:
if is_spout:
spout_proto = self.pplan_helper.get_my_spout()
py_classpath = spout_proto.comp.class_name
self.logger.info("Loading Spout from: %s", py_classpath)
else:
bolt_proto = self.pplan_helper.get_my_bolt()
py_classpath = bolt_proto.comp.class_name
self.logger.info("Loading Bolt from: %s", py_classpath)
pex_loader.load_pex(self.pplan_helper.topology_pex_abs_path)
spbl_class = pex_loader.import_and_get_class(self.pplan_helper.topology_pex_abs_path,
py_classpath)
except Exception as e:
spbl = "spout" if is_spout else "bolt"
self.logger.error(traceback.format_exc())
raise RuntimeError("Error when loading a %s from pex: %s" % (spbl, str(e)))
return spbl_class
def handle_initiate_stateful_checkpoint(self, ckptmsg, component):
Log.info("Received initiate state checkpoint message for %s" % ckptmsg.checkpoint_id)
if not self.is_stateful:
raise RuntimeError("Received state checkpoint message but we are not stateful topology")
if isinstance(component, StatefulComponent):
component.pre_save(ckptmsg.checkpoint_id)
else:
Log.info("Trying to checkponit a non stateful component. Send empty state")
self.admit_ckpt_state(ckptmsg.checkpoint_id, self._stateful_state)
def clear_collector(self):
self.output_helper.clear()
def start(self, stateful_state):
self._stateful_state = stateful_state
self.start_component(stateful_state)
context = self.pplan_helper.context
context.invoke_hook_prepare()
# prepare global metrics
if not self._initialized_global_metrics:
interval = float(self.sys_config[system_constants.HERON_METRICS_EXPORT_INTERVAL_SEC])
collector = context.get_metrics_collector()
global_metrics.init(collector, interval)
self._initialized_global_metrics = True
# prepare for custom grouping
self.pplan_helper.prepare_custom_grouping(context)
def stop(self):
self.pplan_helper.context.invoke_hook_cleanup()
self.stop_component()
##################################################################
# The followings are to be implemented by Spout/Bolt independently
##################################################################
@abstractmethod
def start_component(self, stateful_state):
"""Do the basic setup for Heron Instance"""
raise NotImplementedError()
@abstractmethod
def stop_component(self):
"""Do the basic clean for Heron Instance
Note that this method is not guaranteed to be invoked
"""
raise NotImplementedError()
@abstractmethod
def process_incoming_tuples(self):
"""Should be called when a tuple was buffered into in_stream"""
raise NotImplementedError()
@abstractmethod
def invoke_activate(self):
"""Activate the instance"""
raise NotImplementedError()
@abstractmethod
def invoke_deactivate(self):
"""Deactivate the instance"""
raise NotImplementedError()
|
[
"heronpy.api.global_metrics.init",
"heron.instance.src.python.utils.misc.SerializerHelper.get_serializer",
"heron.proto.tuple_pb2.HeronDataTuple",
"heron.common.src.python.pex_loader.import_and_get_class",
"heron.common.src.python.pex_loader.load_pex",
"heron.instance.src.python.utils.misc.OutgoingTupleHelper",
"traceback.format_exc",
"heron.instance.src.python.utils.system_config.get_sys_config",
"logging.getLogger",
"heron.common.src.python.utils.log.Log.info"
] |
[((2090, 2116), 'heron.proto.tuple_pb2.HeronDataTuple', 'tuple_pb2.HeronDataTuple', ([], {}), '()\n', (2114, 2116), False, 'from heron.proto import tuple_pb2\n'), ((2278, 2328), 'heron.instance.src.python.utils.misc.OutgoingTupleHelper', 'OutgoingTupleHelper', (['self.pplan_helper', 'out_stream'], {}), '(self.pplan_helper, out_stream)\n', (2297, 2328), False, 'from heron.instance.src.python.utils.misc import OutgoingTupleHelper\n'), ((2376, 2406), 'heron.instance.src.python.utils.system_config.get_sys_config', 'system_config.get_sys_config', ([], {}), '()\n', (2404, 2406), False, 'from heron.instance.src.python.utils import system_config\n'), ((2460, 2479), 'logging.getLogger', 'logging.getLogger', ([], {}), '()\n', (2477, 2479), False, 'import logging\n'), ((2841, 2894), 'heron.instance.src.python.utils.misc.SerializerHelper.get_serializer', 'SerializerHelper.get_serializer', (['pplan_helper.context'], {}), '(pplan_helper.context)\n', (2872, 2894), False, 'from heron.instance.src.python.utils.misc import SerializerHelper\n'), ((5324, 5414), 'heron.common.src.python.utils.log.Log.info', 'Log.info', (["('Received initiate state checkpoint message for %s' % ckptmsg.checkpoint_id)"], {}), "('Received initiate state checkpoint message for %s' % ckptmsg.\n checkpoint_id)\n", (5332, 5414), False, 'from heron.common.src.python.utils.log import Log\n'), ((4808, 4868), 'heron.common.src.python.pex_loader.load_pex', 'pex_loader.load_pex', (['self.pplan_helper.topology_pex_abs_path'], {}), '(self.pplan_helper.topology_pex_abs_path)\n', (4827, 4868), True, 'import heron.common.src.python.pex_loader as pex_loader\n'), ((4888, 4978), 'heron.common.src.python.pex_loader.import_and_get_class', 'pex_loader.import_and_get_class', (['self.pplan_helper.topology_pex_abs_path', 'py_classpath'], {}), '(self.pplan_helper.topology_pex_abs_path,\n py_classpath)\n', (4919, 4978), True, 'import heron.common.src.python.pex_loader as pex_loader\n'), ((5647, 5722), 'heron.common.src.python.utils.log.Log.info', 'Log.info', (['"""Trying to checkponit a non stateful component. Send empty state"""'], {}), "('Trying to checkponit a non stateful component. Send empty state')\n", (5655, 5722), False, 'from heron.common.src.python.utils.log import Log\n'), ((6271, 6311), 'heronpy.api.global_metrics.init', 'global_metrics.init', (['collector', 'interval'], {}), '(collector, interval)\n', (6290, 6311), True, 'import heronpy.api.global_metrics as global_metrics\n'), ((5122, 5144), 'traceback.format_exc', 'traceback.format_exc', ([], {}), '()\n', (5142, 5144), False, 'import traceback\n')]
|
import time
import tensorflow as tf
import tensorlayer as tl
from tensorlayer.layers import *
def SRGAN_g(t_image, is_train=False, reuse=False):
""" Generator in Photo-Realistic Single Image Super-Resolution Using a Generative Adversarial Network
feature maps (n) and stride (s) feature maps (n) and stride (s)
"""
w_init = tf.random_normal_initializer(stddev=0.02)
b_init = None # tf.constant_initializer(value=0.0)
g_init = tf.random_normal_initializer(1., 0.02)
with tf.variable_scope("SRGAN_g", reuse=reuse) as vs:
# tl.layers.set_name_reuse(reuse) # remove for TL 1.8.0+
n = InputLayer(t_image, name='in')
n = ReshapeLayer(n, [-1, 512, 512, 1], name = 'reshape')
n = Conv2d(n, 64, (3, 3), (1, 1), act=tf.nn.relu, padding='SAME', W_init=w_init, name='n64s1/c')
temp = n
# B residual blocks
for i in range(16):
nn = Conv2d(n, 64, (3, 3), (1, 1), act=None, padding='SAME', W_init=w_init, b_init=b_init, name='n64s1/c1/%s' % i)
nn = BatchNormLayer(nn, act=tf.nn.relu, is_train=is_train, gamma_init=g_init, name='n64s1/b1/%s' % i)
nn = Conv2d(nn, 64, (3, 3), (1, 1), act=None, padding='SAME', W_init=w_init, b_init=b_init, name='n64s1/c2/%s' % i)
nn = BatchNormLayer(nn, is_train=is_train, gamma_init=g_init, name='n64s1/b2/%s' % i)
nn = ElementwiseLayer([n, nn], tf.add, name='b_residual_add/%s' % i)
n = nn
n = Conv2d(n, 64, (3, 3), (1, 1), act=None, padding='SAME', W_init=w_init, b_init=b_init, name='n64s1/c/m')
n = BatchNormLayer(n, is_train=is_train, gamma_init=g_init, name='n64s1/b/m')
n = ElementwiseLayer([n, temp], tf.add, name='add3')
# B residual blacks end
n = Conv2d(n, 3, (3, 3), (1, 1), act=None, padding='SAME', W_init=w_init, name='n256s1/1')
return n
def SRGAN_d(input_images, is_train=True, reuse=False):
w_init = tf.random_normal_initializer(stddev=0.02)
b_init = None # tf.constant_initializer(value=0.0)
gamma_init = tf.random_normal_initializer(1., 0.02)
df_dim = 64
lrelu = lambda x: tl.act.lrelu(x, 0.2)
with tf.variable_scope("SRGAN_d", reuse=reuse):
tl.layers.set_name_reuse(reuse)
net_in = InputLayer(input_images, name='input/images')
reshape = ReshapeLayer(net_in, [-1, 512, 512, 3], name = 'reshape')
net_h0 = Conv2d(reshape, df_dim, (4, 4), (2, 2), act=lrelu, padding='SAME', W_init=w_init, name='h0/c')
net_h1 = Conv2d(net_h0, df_dim * 2, (4, 4), (2, 2), act=None, padding='SAME', W_init=w_init, b_init=b_init, name='h1/c')
net_h1 = BatchNormLayer(net_h1, act=lrelu, is_train=is_train, gamma_init=gamma_init, name='h1/bn')
net_h2 = Conv2d(net_h1, df_dim * 4, (4, 4), (2, 2), act=None, padding='SAME', W_init=w_init, b_init=b_init, name='h2/c')
net_h2 = BatchNormLayer(net_h2, act=lrelu, is_train=is_train, gamma_init=gamma_init, name='h2/bn')
net_h3 = Conv2d(net_h2, df_dim * 8, (4, 4), (2, 2), act=None, padding='SAME', W_init=w_init, b_init=b_init, name='h3/c')
net_h3 = BatchNormLayer(net_h3, act=lrelu, is_train=is_train, gamma_init=gamma_init, name='h3/bn')
net_h4 = Conv2d(net_h3, df_dim * 16, (4, 4), (2, 2), act=None, padding='SAME', W_init=w_init, b_init=b_init, name='h4/c')
net_h4 = BatchNormLayer(net_h4, act=lrelu, is_train=is_train, gamma_init=gamma_init, name='h4/bn')
net_h5 = Conv2d(net_h4, df_dim * 32, (4, 4), (2, 2), act=None, padding='SAME', W_init=w_init, b_init=b_init, name='h5/c')
net_h5 = BatchNormLayer(net_h5, act=lrelu, is_train=is_train, gamma_init=gamma_init, name='h5/bn')
net_h6 = Conv2d(net_h5, df_dim * 16, (1, 1), (1, 1), act=None, padding='SAME', W_init=w_init, b_init=b_init, name='h6/c')
net_h6 = BatchNormLayer(net_h6, act=lrelu, is_train=is_train, gamma_init=gamma_init, name='h6/bn')
net_h7 = Conv2d(net_h6, df_dim * 8, (1, 1), (1, 1), act=None, padding='SAME', W_init=w_init, b_init=b_init, name='h7/c')
net_h7 = BatchNormLayer(net_h7, is_train=is_train, gamma_init=gamma_init, name='h7/bn')
net = Conv2d(net_h7, df_dim * 2, (1, 1), (1, 1), act=None, padding='SAME', W_init=w_init, b_init=b_init, name='res/c')
net = BatchNormLayer(net, act=lrelu, is_train=is_train, gamma_init=gamma_init, name='res/bn')
net = Conv2d(net, df_dim * 2, (3, 3), (1, 1), act=None, padding='SAME', W_init=w_init, b_init=b_init, name='res/c2')
net = BatchNormLayer(net, act=lrelu, is_train=is_train, gamma_init=gamma_init, name='res/bn2')
net = Conv2d(net, df_dim * 8, (3, 3), (1, 1), act=None, padding='SAME', W_init=w_init, b_init=b_init, name='res/c3')
net = BatchNormLayer(net, is_train=is_train, gamma_init=gamma_init, name='res/bn3')
net_h8 = ElementwiseLayer([net_h7, net], combine_fn=tf.add, name='res/add')
net_h8.outputs = tl.act.lrelu(net_h8.outputs, 0.2)
net_ho = FlattenLayer(net_h8, name='ho/flatten')
net_ho = DenseLayer(net_ho, n_units=1, act=tf.identity, W_init=w_init, name='ho/dense')
logits = net_ho.outputs
net_ho.outputs = tf.nn.sigmoid(net_ho.outputs)
return net_ho, logits
def Vgg19_simple_api(input, reuse, nchannels, rgb=False):
"""
Build the VGG 19 Model
Parameters
-----------
rgb : rgb image placeholder [batch, height, width, 3] values scaled [0, 1]
"""
VGG_MEAN = [103.939, 116.779, 123.68]
with tf.variable_scope("VGG19", reuse=reuse) as vs:
start_time = time.time()
print("build model started")
if rgb == True:
rgb_scaled = input * 255.0
# Convert RGB to BGR
if tf.__version__ <= '0.11':
red, green, blue = tf.split(3, 3, rgb_scaled)
else: # TF 1.0
# print(rgb_scaled)
red, green, blue = tf.split(rgb_scaled, 3, 3)
assert red.get_shape().as_list()[1:] == [224, 224, 1]
assert green.get_shape().as_list()[1:] == [224, 224, 1]
assert blue.get_shape().as_list()[1:] == [224, 224, 1]
if tf.__version__ <= '0.11':
bgr = tf.concat(3, [
blue - VGG_MEAN[0],
green - VGG_MEAN[1],
red - VGG_MEAN[2],
])
else:
bgr = tf.concat(
[
blue - VGG_MEAN[0],
green - VGG_MEAN[1],
red - VGG_MEAN[2],
], axis=3)
assert bgr.get_shape().as_list()[1:] == [224, 224, 3]
""" input layer """
net_in = InputLayer(bgr, name='input')
else:
assert input.get_shape().as_list()[1:] == [224, 224, nchannels]
net_in = InputLayer(input, name = 'input')
""" conv1 """
network = Conv2d(net_in, n_filter=64, filter_size=(3, 3), strides=(1, 1), act=tf.nn.relu, padding='SAME', name='conv1_1')
network = Conv2d(network, n_filter=64, filter_size=(3, 3), strides=(1, 1), act=tf.nn.relu, padding='SAME', name='conv1_2')
network = MaxPool2d(network, filter_size=(2, 2), strides=(2, 2), padding='SAME', name='pool1')
""" conv2 """
network = Conv2d(network, n_filter=128, filter_size=(3, 3), strides=(1, 1), act=tf.nn.relu, padding='SAME', name='conv2_1')
network = Conv2d(network, n_filter=128, filter_size=(3, 3), strides=(1, 1), act=tf.nn.relu, padding='SAME', name='conv2_2')
network = MaxPool2d(network, filter_size=(2, 2), strides=(2, 2), padding='SAME', name='pool2')
""" conv3 """
network = Conv2d(network, n_filter=256, filter_size=(3, 3), strides=(1, 1), act=tf.nn.relu, padding='SAME', name='conv3_1')
network = Conv2d(network, n_filter=256, filter_size=(3, 3), strides=(1, 1), act=tf.nn.relu, padding='SAME', name='conv3_2')
network = Conv2d(network, n_filter=256, filter_size=(3, 3), strides=(1, 1), act=tf.nn.relu, padding='SAME', name='conv3_3')
network = Conv2d(network, n_filter=256, filter_size=(3, 3), strides=(1, 1), act=tf.nn.relu, padding='SAME', name='conv3_4')
network = MaxPool2d(network, filter_size=(2, 2), strides=(2, 2), padding='SAME', name='pool3')
""" conv4 """
network = Conv2d(network, n_filter=512, filter_size=(3, 3), strides=(1, 1), act=tf.nn.relu, padding='SAME', name='conv4_1')
network = Conv2d(network, n_filter=512, filter_size=(3, 3), strides=(1, 1), act=tf.nn.relu, padding='SAME', name='conv4_2')
network = Conv2d(network, n_filter=512, filter_size=(3, 3), strides=(1, 1), act=tf.nn.relu, padding='SAME', name='conv4_3')
network = Conv2d(network, n_filter=512, filter_size=(3, 3), strides=(1, 1), act=tf.nn.relu, padding='SAME', name='conv4_4')
network = MaxPool2d(network, filter_size=(2, 2), strides=(2, 2), padding='SAME', name='pool4') # (batch_size, 14, 14, 512)
conv = network
""" conv5 """
network = Conv2d(network, n_filter=512, filter_size=(3, 3), strides=(1, 1), act=tf.nn.relu, padding='SAME', name='conv5_1')
network = Conv2d(network, n_filter=512, filter_size=(3, 3), strides=(1, 1), act=tf.nn.relu, padding='SAME', name='conv5_2')
network = Conv2d(network, n_filter=512, filter_size=(3, 3), strides=(1, 1), act=tf.nn.relu, padding='SAME', name='conv5_3')
network = Conv2d(network, n_filter=512, filter_size=(3, 3), strides=(1, 1), act=tf.nn.relu, padding='SAME', name='conv5_4')
network = MaxPool2d(network, filter_size=(2, 2), strides=(2, 2), padding='SAME', name='pool5') # (batch_size, 7, 7, 512)
""" fc 6~8 """
network = FlattenLayer(network, name='flatten')
network = DenseLayer(network, n_units=4096, act=tf.nn.relu, name='fc6')
network = DenseLayer(network, n_units=4096, act=tf.nn.relu, name='fc7')
network = DenseLayer(network, n_units=1000, act=tf.identity, name='fc8')
print("build model finished: %fs" % (time.time() - start_time))
return network, conv
|
[
"tensorflow.split",
"tensorflow.variable_scope",
"tensorflow.concat",
"tensorlayer.act.lrelu",
"time.time",
"tensorlayer.layers.set_name_reuse",
"tensorflow.random_normal_initializer",
"tensorflow.nn.sigmoid"
] |
[((342, 383), 'tensorflow.random_normal_initializer', 'tf.random_normal_initializer', ([], {'stddev': '(0.02)'}), '(stddev=0.02)\n', (370, 383), True, 'import tensorflow as tf\n'), ((453, 492), 'tensorflow.random_normal_initializer', 'tf.random_normal_initializer', (['(1.0)', '(0.02)'], {}), '(1.0, 0.02)\n', (481, 492), True, 'import tensorflow as tf\n'), ((1956, 1997), 'tensorflow.random_normal_initializer', 'tf.random_normal_initializer', ([], {'stddev': '(0.02)'}), '(stddev=0.02)\n', (1984, 1997), True, 'import tensorflow as tf\n'), ((2071, 2110), 'tensorflow.random_normal_initializer', 'tf.random_normal_initializer', (['(1.0)', '(0.02)'], {}), '(1.0, 0.02)\n', (2099, 2110), True, 'import tensorflow as tf\n'), ((502, 543), 'tensorflow.variable_scope', 'tf.variable_scope', (['"""SRGAN_g"""'], {'reuse': 'reuse'}), "('SRGAN_g', reuse=reuse)\n", (519, 543), True, 'import tensorflow as tf\n'), ((2148, 2168), 'tensorlayer.act.lrelu', 'tl.act.lrelu', (['x', '(0.2)'], {}), '(x, 0.2)\n', (2160, 2168), True, 'import tensorlayer as tl\n'), ((2178, 2219), 'tensorflow.variable_scope', 'tf.variable_scope', (['"""SRGAN_d"""'], {'reuse': 'reuse'}), "('SRGAN_d', reuse=reuse)\n", (2195, 2219), True, 'import tensorflow as tf\n'), ((2229, 2260), 'tensorlayer.layers.set_name_reuse', 'tl.layers.set_name_reuse', (['reuse'], {}), '(reuse)\n', (2253, 2260), True, 'import tensorlayer as tl\n'), ((4940, 4973), 'tensorlayer.act.lrelu', 'tl.act.lrelu', (['net_h8.outputs', '(0.2)'], {}), '(net_h8.outputs, 0.2)\n', (4952, 4973), True, 'import tensorlayer as tl\n'), ((5185, 5214), 'tensorflow.nn.sigmoid', 'tf.nn.sigmoid', (['net_ho.outputs'], {}), '(net_ho.outputs)\n', (5198, 5214), True, 'import tensorflow as tf\n'), ((5507, 5546), 'tensorflow.variable_scope', 'tf.variable_scope', (['"""VGG19"""'], {'reuse': 'reuse'}), "('VGG19', reuse=reuse)\n", (5524, 5546), True, 'import tensorflow as tf\n'), ((5575, 5586), 'time.time', 'time.time', ([], {}), '()\n', (5584, 5586), False, 'import time\n'), ((5797, 5823), 'tensorflow.split', 'tf.split', (['(3)', '(3)', 'rgb_scaled'], {}), '(3, 3, rgb_scaled)\n', (5805, 5823), True, 'import tensorflow as tf\n'), ((5923, 5949), 'tensorflow.split', 'tf.split', (['rgb_scaled', '(3)', '(3)'], {}), '(rgb_scaled, 3, 3)\n', (5931, 5949), True, 'import tensorflow as tf\n'), ((6214, 6288), 'tensorflow.concat', 'tf.concat', (['(3)', '[blue - VGG_MEAN[0], green - VGG_MEAN[1], red - VGG_MEAN[2]]'], {}), '(3, [blue - VGG_MEAN[0], green - VGG_MEAN[1], red - VGG_MEAN[2]])\n', (6223, 6288), True, 'import tensorflow as tf\n'), ((6408, 6487), 'tensorflow.concat', 'tf.concat', (['[blue - VGG_MEAN[0], green - VGG_MEAN[1], red - VGG_MEAN[2]]'], {'axis': '(3)'}), '([blue - VGG_MEAN[0], green - VGG_MEAN[1], red - VGG_MEAN[2]], axis=3)\n', (6417, 6487), True, 'import tensorflow as tf\n'), ((10080, 10091), 'time.time', 'time.time', ([], {}), '()\n', (10089, 10091), False, 'import time\n')]
|
import os
import apprise
valid_tokens = {
'base_url': '',
'watch_url': '',
'watch_uuid': '',
'watch_title': '',
'watch_tag': '',
'diff_url': '',
'preview_url': '',
'current_snapshot': ''
}
def process_notification(n_object, datastore):
import logging
log = logging.getLogger('apprise')
log.setLevel('TRACE')
apobj = apprise.Apprise(debug=True)
for url in n_object['notification_urls']:
url = url.strip()
print (">> Process Notification: AppRise notifying {}".format(url))
apobj.add(url)
# Get the notification body from datastore
n_body = n_object['notification_body']
n_title = n_object['notification_title']
# Insert variables into the notification content
notification_parameters = create_notification_parameters(n_object, datastore)
for n_k in notification_parameters:
token = '{' + n_k + '}'
val = notification_parameters[n_k]
n_title = n_title.replace(token, val)
n_body = n_body.replace(token, val)
apobj.notify(
body=n_body,
title=n_title
)
# Notification title + body content parameters get created here.
def create_notification_parameters(n_object, datastore):
from copy import deepcopy
# in the case we send a test notification from the main settings, there is no UUID.
uuid = n_object['uuid'] if 'uuid' in n_object else ''
if uuid != '':
watch_title = datastore.data['watching'][uuid]['title']
watch_tag = datastore.data['watching'][uuid]['tag']
else:
watch_title = 'Change Detection'
watch_tag = ''
# Create URLs to customise the notification with
base_url = datastore.data['settings']['application']['base_url']
watch_url = n_object['watch_url']
# Re #148 - Some people have just {base_url} in the body or title, but this may break some notification services
# like 'Join', so it's always best to atleast set something obvious so that they are not broken.
if base_url == '':
base_url = "<base-url-env-var-not-set>"
diff_url = "{}/diff/{}".format(base_url, uuid)
preview_url = "{}/preview/{}".format(base_url, uuid)
# Not sure deepcopy is needed here, but why not
tokens = deepcopy(valid_tokens)
# Valid_tokens also used as a field validator
tokens.update(
{
'base_url': base_url if base_url is not None else '',
'watch_url': watch_url,
'watch_uuid': uuid,
'watch_title': watch_title if watch_title is not None else '',
'watch_tag': watch_tag if watch_tag is not None else '',
'diff_url': diff_url,
'preview_url': preview_url,
'current_snapshot': n_object['current_snapshot'] if 'current_snapshot' in n_object else ''
})
return tokens
|
[
"copy.deepcopy",
"apprise.Apprise",
"logging.getLogger"
] |
[((300, 328), 'logging.getLogger', 'logging.getLogger', (['"""apprise"""'], {}), "('apprise')\n", (317, 328), False, 'import logging\n'), ((367, 394), 'apprise.Apprise', 'apprise.Apprise', ([], {'debug': '(True)'}), '(debug=True)\n', (382, 394), False, 'import apprise\n'), ((2268, 2290), 'copy.deepcopy', 'deepcopy', (['valid_tokens'], {}), '(valid_tokens)\n', (2276, 2290), False, 'from copy import deepcopy\n')]
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
#
# Traffic Module.
# Stat traffic
#
import database, user, config
import re
from utils import get_stdout
# the command that runs iptables
IPTABLES = ('/sbin/iptables',)
CHAIN_NAME = 'SSLAND2'
def stat():
'''
Get the stat data and storage into database.
'''
update_iptables()
cs, ec = get_stdout(IPTABLES + ('-nxvL', CHAIN_NAME)) # get current stat
if ec == 3: return False # No privilege
get_stdout(IPTABLES + ('-Z',CHAIN_NAME)) # empty IPTABLES stat
t = {} # dict PORT=>(pkgs, bytes)
for i in re.findall(r"^\s*(\d+)\s+(\d+).+[sd]pt:(\d+)", cs, re.M):
# i = Pkgs Traffic(byte) port
port = int(i[2])
if not port in t: t[port] = [0, 0]
t[port][0] += int(i[0])
t[port][1] += int(i[1])
query = []
users = user.get_all(only_active=True)
for u in users:
port = u.get_port()
if port in t:
ti = t[port]
if ti[0] and ti[1]: query.append((u.id, ti[0], ti[1])) # skip empty record
cursor = database.conn.cursor()
cursor.executemany('INSERT INTO traffic(user,packages,traffic) VALUES (?, ?, ?)', query)
cursor.close()
database.conn.commit()
def update_iptables():
'''
Add IPTABLES rules if necessary.
'''
cs, ec = get_stdout(IPTABLES + ('-nxvL', CHAIN_NAME)) # get current stat
if ec == 3: return False # No privilege
if ec == 1: # chain not found
# create the chain
get_stdout(IPTABLES + ('-N',CHAIN_NAME))
get_stdout(IPTABLES + ('-I','INPUT','1','-j',CHAIN_NAME))
get_stdout(IPTABLES + ('-I','OUTPUT','1','-j',CHAIN_NAME))
sport = set(int(r) for r in re.findall(r"\bspt:(\d+)", cs, re.M))
dport = set(int(r) for r in re.findall(r"\bdpt:(\d+)", cs, re.M))
users = user.get_all(only_active=True)
for u in users:
port = u.get_port()
if not port in sport: get_stdout(IPTABLES + ('-A',CHAIN_NAME,'-p','tcp','--sport',str(port)))
if not port in dport: get_stdout(IPTABLES + ('-A',CHAIN_NAME,'-p','tcp','--dport',str(port)))
QS_NONE=0
QS_ALL=1
QS_MONTH=2
QS_DAY=3
QS_YEAR=4
QS_HOUR=5
def query(uid=-1, min_time=None, max_time=None, sum=QS_NONE):
'''
Query database, returning Array of tuples (userID, pkgs, bytes, str_time)
'''
cond = []
if uid>=1: cond.append('user = %d'%uid)
if min_time: cond.append('datetime(time) >= \'%s\''%min_time) # format: 2016-05-10 12:59:00
if max_time: cond.append('datetime(time) <= \'%s\''%max_time)
q_where = (' WHERE '+' AND '.join(cond)) if len(cond) else ''
if sum:
sumfunc = "time" if sum == QS_ALL else \
"strftime('%Y-%m-%d %H',time)" if sum == QS_HOUR else \
"strftime('%Y', time)" if sum == QS_YEAR else \
"strftime('%Y-%m', time)" if sum == QS_MONTH else \
"date(time)" if sum == QS_DAY else \
"time"
query = 'SELECT user, sum(packages), sum(traffic), %s AS t FROM traffic %s GROUP BY user' % (sumfunc, q_where)
if sumfunc != "time": query = query + ', t'
else:
query = 'SELECT user, packages, traffic, time FROM traffic' + q_where
cursor = database.conn.cursor()
cursor.execute(query)
result = cursor.fetchall()
cursor.close()
return result
|
[
"database.conn.cursor",
"database.conn.commit",
"utils.get_stdout",
"re.findall",
"user.get_all"
] |
[((357, 401), 'utils.get_stdout', 'get_stdout', (["(IPTABLES + ('-nxvL', CHAIN_NAME))"], {}), "(IPTABLES + ('-nxvL', CHAIN_NAME))\n", (367, 401), False, 'from utils import get_stdout\n'), ((471, 512), 'utils.get_stdout', 'get_stdout', (["(IPTABLES + ('-Z', CHAIN_NAME))"], {}), "(IPTABLES + ('-Z', CHAIN_NAME))\n", (481, 512), False, 'from utils import get_stdout\n'), ((592, 652), 're.findall', 're.findall', (['"""^\\\\s*(\\\\d+)\\\\s+(\\\\d+).+[sd]pt:(\\\\d+)"""', 'cs', 're.M'], {}), "('^\\\\s*(\\\\d+)\\\\s+(\\\\d+).+[sd]pt:(\\\\d+)', cs, re.M)\n", (602, 652), False, 'import re\n'), ((860, 890), 'user.get_all', 'user.get_all', ([], {'only_active': '(True)'}), '(only_active=True)\n', (872, 890), False, 'import database, user, config\n'), ((1104, 1126), 'database.conn.cursor', 'database.conn.cursor', ([], {}), '()\n', (1124, 1126), False, 'import database, user, config\n'), ((1243, 1265), 'database.conn.commit', 'database.conn.commit', ([], {}), '()\n', (1263, 1265), False, 'import database, user, config\n'), ((1356, 1400), 'utils.get_stdout', 'get_stdout', (["(IPTABLES + ('-nxvL', CHAIN_NAME))"], {}), "(IPTABLES + ('-nxvL', CHAIN_NAME))\n", (1366, 1400), False, 'from utils import get_stdout\n'), ((1871, 1901), 'user.get_all', 'user.get_all', ([], {'only_active': '(True)'}), '(only_active=True)\n', (1883, 1901), False, 'import database, user, config\n'), ((3361, 3383), 'database.conn.cursor', 'database.conn.cursor', ([], {}), '()\n', (3381, 3383), False, 'import database, user, config\n'), ((1535, 1576), 'utils.get_stdout', 'get_stdout', (["(IPTABLES + ('-N', CHAIN_NAME))"], {}), "(IPTABLES + ('-N', CHAIN_NAME))\n", (1545, 1576), False, 'from utils import get_stdout\n'), ((1584, 1645), 'utils.get_stdout', 'get_stdout', (["(IPTABLES + ('-I', 'INPUT', '1', '-j', CHAIN_NAME))"], {}), "(IPTABLES + ('-I', 'INPUT', '1', '-j', CHAIN_NAME))\n", (1594, 1645), False, 'from utils import get_stdout\n'), ((1650, 1712), 'utils.get_stdout', 'get_stdout', (["(IPTABLES + ('-I', 'OUTPUT', '1', '-j', CHAIN_NAME))"], {}), "(IPTABLES + ('-I', 'OUTPUT', '1', '-j', CHAIN_NAME))\n", (1660, 1712), False, 'from utils import get_stdout\n'), ((1746, 1783), 're.findall', 're.findall', (['"""\\\\bspt:(\\\\d+)"""', 'cs', 're.M'], {}), "('\\\\bspt:(\\\\d+)', cs, re.M)\n", (1756, 1783), False, 'import re\n'), ((1816, 1853), 're.findall', 're.findall', (['"""\\\\bdpt:(\\\\d+)"""', 'cs', 're.M'], {}), "('\\\\bdpt:(\\\\d+)', cs, re.M)\n", (1826, 1853), False, 'import re\n')]
|
import math
import json
# Source: https://janakiev.com/blog/gps-points-distance-python/
def _haversine(gps_1: tuple, gps_2: tuple) -> float:
"""
Calculate distance in meters between two GPS coordinates.
Takes 2x (latitude, longitude), return meters between them.
"""
R = 6372800 # Earth radius in meters
lat1, lon1 = gps_1
lat2, lon2 = gps_2
phi1, phi2 = math.radians(lat1), math.radians(lat2)
dphi = math.radians(lat2 - lat1)
dlambda = math.radians(lon2 - lon1)
a = math.sin(dphi/2)**2 + \
math.cos(phi1)*math.cos(phi2)*math.sin(dlambda/2)**2
return 2*R*math.atan2(math.sqrt(a), math.sqrt(1 - a))
class ConnectionsAccess():
"""
Class enables simple access to JSON connection data.
"""
def __init__(self, json_data: dict):
self._json_data = json_data
def get_stops(self) -> list:
return list(self._json_data.keys())
def get_gps(self, stop: str) -> tuple:
return (self._json_data[stop]["latitude"], self._json_data[stop]["longitude"])
def get_distance_gps(self, stop_1: str, stop_2: str) -> float:
(x1, y1) = self.get_gps(stop_1)
(x2, y2) = self.get_gps(stop_2)
return ((x1-x2)**2 + (y1-y2)**2)**0.5
def get_distance_m(self, stop_1: str, stop_2: str) -> float:
return _haversine(self.get_gps(stop_1), self.get_gps(stop_2))
def get_connections(self, stop: str) -> list:
return list(self._json_data[stop]["connections"].keys())
def get_connection_distance_km(self, stop: str, connection: str) -> float:
return self._json_data[stop]["connections"][connection]["distance_km"]
def get_connection_distance_min(self, stop: str, connection: str) -> float:
return self._json_data[stop]["connections"][connection]["distance_min"]
class PathCalculations():
"""
Class calculates all paths in connection stops.
It saves sorted JSON with all the available connections
from a stop.
"""
def __init__(self, connections: ConnectionsAccess, stop: str):
self._connections = connections
self._map = {} # Dictionary of all stops and if the were processed
self._stops_queue = []
self._result = [] # [(Name, Total Distance, [Path]), ...]
self._start_stop = stop
def find_all_connections(self):
"""
Breadth-first search throudh all the stops.
Saves them in ascending order.
"""
self._initialize_map()
self._stops_queue = []
# Set start stop as visited
self._map[self._start_stop] = False
# Append the start stop to results with zero time
# (Name, Total Distance, [Path])
self._stops_queue.append((self._start_stop, 0, [self._start_stop]))
while self._stops_queue:
# Sort
self._stops_queue.sort(key=lambda tup: tup[1], reverse=True)
# Safe the closest in result list
self._result.append(self._stops_queue.pop())
# Expand the closest
self._expand()
def get_results(self) -> dict:
return self._result
def save(self, results_json: str = ""):
if results_json == "":
results_json = "output/" + self._start_stop + ".json"
with open(results_json, 'w', encoding="utf8") as f:
json.dump(self._result, f, ensure_ascii=False, sort_keys=True, indent=4)
def load(self, results_json: str = ""):
if results_json == "":
results_json = "output/" + self._start_stop + ".json"
with open(results_json, encoding="utf8") as f:
self._result = json.load(f)
def _initialize_map(self):
"""
Set all stops as true.
"""
self._map = {}
for stop in self._connections.get_stops():
self._map[stop] = True
def _expand(self):
"""
Expand the last stop from results, add to queue.
"""
current_stop = self._result[-1]
connections_list = self._connections.get_connections(current_stop[0])
for connection in connections_list:
if self._map.get(connection, False):
# (Name, Total Distance, [Path])
self._stops_queue.append((connection, round(current_stop[1] + self._connections.get_connection_distance_min(current_stop[0], connection), 1), current_stop[2] + [connection]))
# Set stops as processed
self._map[connection] = False
|
[
"json.dump",
"json.load",
"math.sqrt",
"math.radians",
"math.sin",
"math.cos"
] |
[((441, 466), 'math.radians', 'math.radians', (['(lat2 - lat1)'], {}), '(lat2 - lat1)\n', (453, 466), False, 'import math\n'), ((481, 506), 'math.radians', 'math.radians', (['(lon2 - lon1)'], {}), '(lon2 - lon1)\n', (493, 506), False, 'import math\n'), ((391, 409), 'math.radians', 'math.radians', (['lat1'], {}), '(lat1)\n', (403, 409), False, 'import math\n'), ((411, 429), 'math.radians', 'math.radians', (['lat2'], {}), '(lat2)\n', (423, 429), False, 'import math\n'), ((516, 534), 'math.sin', 'math.sin', (['(dphi / 2)'], {}), '(dphi / 2)\n', (524, 534), False, 'import math\n'), ((628, 640), 'math.sqrt', 'math.sqrt', (['a'], {}), '(a)\n', (637, 640), False, 'import math\n'), ((642, 658), 'math.sqrt', 'math.sqrt', (['(1 - a)'], {}), '(1 - a)\n', (651, 658), False, 'import math\n'), ((3342, 3414), 'json.dump', 'json.dump', (['self._result', 'f'], {'ensure_ascii': '(False)', 'sort_keys': '(True)', 'indent': '(4)'}), '(self._result, f, ensure_ascii=False, sort_keys=True, indent=4)\n', (3351, 3414), False, 'import json\n'), ((3640, 3652), 'json.load', 'json.load', (['f'], {}), '(f)\n', (3649, 3652), False, 'import json\n'), ((548, 562), 'math.cos', 'math.cos', (['phi1'], {}), '(phi1)\n', (556, 562), False, 'import math\n'), ((563, 577), 'math.cos', 'math.cos', (['phi2'], {}), '(phi2)\n', (571, 577), False, 'import math\n'), ((578, 599), 'math.sin', 'math.sin', (['(dlambda / 2)'], {}), '(dlambda / 2)\n', (586, 599), False, 'import math\n')]
|
from django.contrib.auth import get_user_model
from django.test import TestCase
from django.urls import reverse
from rest_framework import status
from rest_framework.test import APIClient
from main_app.models import Recipe, Tag, Ingredient
from recipe_app.serializers import RecipeSerializer, RecipeDetailSerializer
RECIPES_URL = reverse('recipe_app:recipe-list')
# /api/recipe/recipes
# /api/recipe/recipes/1/
def detail_url(recipe_id):
"""Return recipe detail URL"""
# pss arg list b/c there can be multipe args for url
return reverse('recipe_app:recipe-detail', args=[recipe_id])
def sample_tag(user, name='TagName'):
"""Create and return sample tag"""
return Tag.objects.create(user=user, name=name)
def sample_ingredinet(user, name='IngredinetName'):
"""Create and return sample tag"""
return Ingredient.objects.create(user=user, name=name)
def sample_recipe(user, **params):
"""Create sample recipe"""
# for more simple debuging
defaults = {
'title': "Sample recipe",
'time_minutes': 10,
'price': 5.00
}
# wich ever keys are in "defaults' will update from 'params'
defaults.update(params)
return Recipe.objects.create(user=user, **defaults)
class PublicRecipeTest(TestCase):
"""Test unatheticated recipe API acces"""
def setUp(self):
self.client = APIClient()
def test_auth_required(self):
"""Test that auth is requred to create recipe"""
res = self.client.get(RECIPES_URL)
self.assertEqual(res.status_code, status.HTTP_401_UNAUTHORIZED)
class PrivateRecipeApiTest(TestCase):
"""Test untentificated recipe API access"""
def setUp(self):
self.client = APIClient()
self.user = get_user_model().objects.create_user(
'<EMAIL>',
'<PASSWORD>'
)
self.client.force_authenticate(self.user)
def test_recipe_retriving(self):
"""Test that recipe can be created and info is corect"""
sample_recipe(self.user)
sample_recipe(self.user)
res = self.client.get(RECIPES_URL)
# get recipes we just created by the oder in wich they were created
# that mean in revers by -
recipes = Recipe.objects.all().order_by('-id')
serializer = RecipeSerializer(recipes, many=True)
self.assertEqual(res.status_code, status.HTTP_200_OK)
self.assertEqual(res.data, serializer.data)
def test_recipes_limited_to_user(self):
"""Test retriving recipes are for user"""
user2 = get_user_model().objects.create_user(
"<EMAIL>",
"<PASSWORD>word123"
)
sample_recipe(user=self.user)
sample_recipe(user=user2)
res = self.client.get(RECIPES_URL)
recipes = Recipe.objects.filter(user=self.user)
serializer = RecipeSerializer(recipes, many=True)
self.assertEqual(res.status_code, status.HTTP_200_OK)
self.assertEqual(len(res.data), 1)
self.assertEqual(res.data, serializer.data)
def test_view_recipe_detail(self):
"""Test viewing a recipe detail"""
recipe = sample_recipe(user=self.user)
recipe.tags.add(sample_tag(user=self.user))
recipe.ingredients.add(sample_ingredinet(user=self.user))
url = detail_url(recipe.id)
res = self.client.get(url)
serializer = RecipeDetailSerializer(recipe)
self.assertEqual(res.data, serializer.data)
def test_create_basic_recipe(self):
"""Test creating recipe"""
payload = {
'title': 'Chocolate cheesecake',
'time_minutes': 30,
'price': 5.00
}
res = self.client.post(RECIPES_URL, payload)
self.assertEqual(res.status_code, status.HTTP_201_CREATED)
recipe = Recipe.objects.get(id=res.data['id'])
for key in payload.keys():
self.assertEqual(payload[key], getattr(recipe, key))
def test_create_tags_recipe(self):
"""Test creating recipes with tags"""
tag1 = sample_tag(user=self.user, name='Vega')
tag2 = sample_tag(user=self.user, name='Desert')
payload = {
'title': 'Avocado cheesecake',
'tags': [tag1.id, tag2.id],
'time_minutes': 30,
'price': 5.00
}
res = self.client.post(RECIPES_URL, payload)
self.assertEqual(res.status_code, status.HTTP_201_CREATED)
recipe = Recipe.objects.get(id=res.data['id'])
tags = recipe.tags.all()
self.assertEqual(tags.count(), 2)
self.assertIn(tag1, tags)
self.assertIn(tag2, tags)
def test_create_recipe_ingrediets(self):
"""Test creating recipe with ingredients"""
ingredient1 = sample_ingredinet(user=self.user, name='Prawns')
ingredient2 = sample_ingredinet(user=self.user, name='Ginger')
payload = {
'title': 'Avocado cheesecake',
'ingredients': [ingredient1.id, ingredient2.id],
'time_minutes': 30,
'price': 5.00
}
res = self.client.post(RECIPES_URL, payload)
self.assertEqual(res.status_code, status.HTTP_201_CREATED)
recipe = Recipe.objects.get(id=res.data['id'])
ingredients = recipe.ingredients.all()
self.assertEqual(ingredients.count(), 2)
self.assertIn(ingredient1, ingredients)
self.assertIn(ingredient2, ingredients)
|
[
"main_app.models.Tag.objects.create",
"recipe_app.serializers.RecipeSerializer",
"django.contrib.auth.get_user_model",
"recipe_app.serializers.RecipeDetailSerializer",
"django.urls.reverse",
"main_app.models.Recipe.objects.filter",
"main_app.models.Recipe.objects.get",
"main_app.models.Recipe.objects.all",
"main_app.models.Ingredient.objects.create",
"rest_framework.test.APIClient",
"main_app.models.Recipe.objects.create"
] |
[((334, 367), 'django.urls.reverse', 'reverse', (['"""recipe_app:recipe-list"""'], {}), "('recipe_app:recipe-list')\n", (341, 367), False, 'from django.urls import reverse\n'), ((548, 601), 'django.urls.reverse', 'reverse', (['"""recipe_app:recipe-detail"""'], {'args': '[recipe_id]'}), "('recipe_app:recipe-detail', args=[recipe_id])\n", (555, 601), False, 'from django.urls import reverse\n'), ((692, 732), 'main_app.models.Tag.objects.create', 'Tag.objects.create', ([], {'user': 'user', 'name': 'name'}), '(user=user, name=name)\n', (710, 732), False, 'from main_app.models import Recipe, Tag, Ingredient\n'), ((837, 884), 'main_app.models.Ingredient.objects.create', 'Ingredient.objects.create', ([], {'user': 'user', 'name': 'name'}), '(user=user, name=name)\n', (862, 884), False, 'from main_app.models import Recipe, Tag, Ingredient\n'), ((1196, 1240), 'main_app.models.Recipe.objects.create', 'Recipe.objects.create', ([], {'user': 'user'}), '(user=user, **defaults)\n', (1217, 1240), False, 'from main_app.models import Recipe, Tag, Ingredient\n'), ((1367, 1378), 'rest_framework.test.APIClient', 'APIClient', ([], {}), '()\n', (1376, 1378), False, 'from rest_framework.test import APIClient\n'), ((1720, 1731), 'rest_framework.test.APIClient', 'APIClient', ([], {}), '()\n', (1729, 1731), False, 'from rest_framework.test import APIClient\n'), ((2298, 2334), 'recipe_app.serializers.RecipeSerializer', 'RecipeSerializer', (['recipes'], {'many': '(True)'}), '(recipes, many=True)\n', (2314, 2334), False, 'from recipe_app.serializers import RecipeSerializer, RecipeDetailSerializer\n'), ((2799, 2836), 'main_app.models.Recipe.objects.filter', 'Recipe.objects.filter', ([], {'user': 'self.user'}), '(user=self.user)\n', (2820, 2836), False, 'from main_app.models import Recipe, Tag, Ingredient\n'), ((2858, 2894), 'recipe_app.serializers.RecipeSerializer', 'RecipeSerializer', (['recipes'], {'many': '(True)'}), '(recipes, many=True)\n', (2874, 2894), False, 'from recipe_app.serializers import RecipeSerializer, RecipeDetailSerializer\n'), ((3395, 3425), 'recipe_app.serializers.RecipeDetailSerializer', 'RecipeDetailSerializer', (['recipe'], {}), '(recipe)\n', (3417, 3425), False, 'from recipe_app.serializers import RecipeSerializer, RecipeDetailSerializer\n'), ((3825, 3862), 'main_app.models.Recipe.objects.get', 'Recipe.objects.get', ([], {'id': "res.data['id']"}), "(id=res.data['id'])\n", (3843, 3862), False, 'from main_app.models import Recipe, Tag, Ingredient\n'), ((4471, 4508), 'main_app.models.Recipe.objects.get', 'Recipe.objects.get', ([], {'id': "res.data['id']"}), "(id=res.data['id'])\n", (4489, 4508), False, 'from main_app.models import Recipe, Tag, Ingredient\n'), ((5226, 5263), 'main_app.models.Recipe.objects.get', 'Recipe.objects.get', ([], {'id': "res.data['id']"}), "(id=res.data['id'])\n", (5244, 5263), False, 'from main_app.models import Recipe, Tag, Ingredient\n'), ((2240, 2260), 'main_app.models.Recipe.objects.all', 'Recipe.objects.all', ([], {}), '()\n', (2258, 2260), False, 'from main_app.models import Recipe, Tag, Ingredient\n'), ((1752, 1768), 'django.contrib.auth.get_user_model', 'get_user_model', ([], {}), '()\n', (1766, 1768), False, 'from django.contrib.auth import get_user_model\n'), ((2561, 2577), 'django.contrib.auth.get_user_model', 'get_user_model', ([], {}), '()\n', (2575, 2577), False, 'from django.contrib.auth import get_user_model\n')]
|
##########################################################
#
# pinout tests
#
# Use a user-defined temporary directory if
# you have problems with multiple harddrives (like I do):
#
# >>> pytest --basetemp=temp
#
##########################################################
import filecmp
import pytest
import re
import shutil
import uuid
from pathlib import Path
from importlib import reload
from pinout import manager
from pinout import config
def re_sub_ids(re_m):
id = re_m.group(0).split("_")
id = "unique_id_replaced-for-testing_" + id[-1]
return id
def mk_test_file(src, dest):
shutil.copyfile(src, dest)
with src.open() as f:
data = f.read()
# sub ids
id = re.compile(r"(?<=id=\").+(?=\")")
data = re.sub(id, re_sub_ids, data)
# sub hrefs
id = re.compile(r"(?<=href=\"#).+(?=\")")
data = re.sub(id, re_sub_ids, data)
# sub clip-path urls
id = re.compile(r"(?<=clip-path=\"url\(#).+(?=\")")
data = re.sub(id, re_sub_ids, data)
# write modified file data to testfile
dest.write_text(data)
return dest
@pytest.mark.parametrize(
"module_path, ref_path",
[
(
"../samples/arduino/arduino/uno/arduino_uno.py",
"../samples/arduino/pinout_arduino_uno_rev3.svg",
),
(
"../samples/arduino/arduino/rp2040/arduino_nano_rp2040_connect.py",
"../samples/arduino/pinout_arduino_nano_rp2040_connect.svg",
),
(
"../samples/attiny85/attiny85.py",
"../samples/attiny85/pinout_attiny85.svg",
),
(
"../samples/clip_path/pinout_diagram.py",
"../samples/clip_path/diagram.svg",
),
(
"../samples/full_sample/pinout_diagram.py",
"../samples/full_sample/pinout_diagram.svg",
),
(
"../samples/panel_layout/panel_layout.py",
"../samples/panel_layout/output/panel_layout.svg",
),
(
"../samples/panel_layout/populated_layout.py",
"../samples/panel_layout/output/populated_layout.svg",
),
(
"../samples/pci-express/pinout_x1.py",
"../samples/pci-express/pinout_x1.svg",
),
(
"../samples/section_pullout/pinout_diagram.py",
"../samples/section_pullout/diagram.svg",
),
(
"../samples/teensy_4.0/pinout_diagram.py",
"../samples/teensy_4.0/teensy_4.0_front_pinout_diagram.svg",
),
],
)
def test_output_against_reference(tmp_path, module_path, ref_path):
# Config requires reloading between tests to to ensure
# is in default state.
reload(config)
module_path = Path(module_path)
ref_path = Path(ref_path)
# Export a temp file in same location as reference:
# Required for relative links to be identical.
tempsvg = ref_path.parent / f"temp_pytest_{str(uuid.uuid4())}.svg"
manager.export_diagram(
module_path,
tempsvg,
overwrite=True,
)
# Create files for comparison. Unique ids are converted to match
file1 = mk_test_file(tempsvg, tmp_path / f"test_file.svg")
file2 = mk_test_file(ref_path, tmp_path / f"ref_file.svg")
# Remove temp file
tempsvg.unlink()
# Test files are identical
assert filecmp.cmp(file1, file2, shallow=False)
|
[
"uuid.uuid4",
"importlib.reload",
"pinout.manager.export_diagram",
"pathlib.Path",
"shutil.copyfile",
"pytest.mark.parametrize",
"filecmp.cmp",
"re.sub",
"re.compile"
] |
[((1133, 2235), 'pytest.mark.parametrize', 'pytest.mark.parametrize', (['"""module_path, ref_path"""', "[('../samples/arduino/arduino/uno/arduino_uno.py',\n '../samples/arduino/pinout_arduino_uno_rev3.svg'), (\n '../samples/arduino/arduino/rp2040/arduino_nano_rp2040_connect.py',\n '../samples/arduino/pinout_arduino_nano_rp2040_connect.svg'), (\n '../samples/attiny85/attiny85.py',\n '../samples/attiny85/pinout_attiny85.svg'), (\n '../samples/clip_path/pinout_diagram.py',\n '../samples/clip_path/diagram.svg'), (\n '../samples/full_sample/pinout_diagram.py',\n '../samples/full_sample/pinout_diagram.svg'), (\n '../samples/panel_layout/panel_layout.py',\n '../samples/panel_layout/output/panel_layout.svg'), (\n '../samples/panel_layout/populated_layout.py',\n '../samples/panel_layout/output/populated_layout.svg'), (\n '../samples/pci-express/pinout_x1.py',\n '../samples/pci-express/pinout_x1.svg'), (\n '../samples/section_pullout/pinout_diagram.py',\n '../samples/section_pullout/diagram.svg'), (\n '../samples/teensy_4.0/pinout_diagram.py',\n '../samples/teensy_4.0/teensy_4.0_front_pinout_diagram.svg')]"], {}), "('module_path, ref_path', [(\n '../samples/arduino/arduino/uno/arduino_uno.py',\n '../samples/arduino/pinout_arduino_uno_rev3.svg'), (\n '../samples/arduino/arduino/rp2040/arduino_nano_rp2040_connect.py',\n '../samples/arduino/pinout_arduino_nano_rp2040_connect.svg'), (\n '../samples/attiny85/attiny85.py',\n '../samples/attiny85/pinout_attiny85.svg'), (\n '../samples/clip_path/pinout_diagram.py',\n '../samples/clip_path/diagram.svg'), (\n '../samples/full_sample/pinout_diagram.py',\n '../samples/full_sample/pinout_diagram.svg'), (\n '../samples/panel_layout/panel_layout.py',\n '../samples/panel_layout/output/panel_layout.svg'), (\n '../samples/panel_layout/populated_layout.py',\n '../samples/panel_layout/output/populated_layout.svg'), (\n '../samples/pci-express/pinout_x1.py',\n '../samples/pci-express/pinout_x1.svg'), (\n '../samples/section_pullout/pinout_diagram.py',\n '../samples/section_pullout/diagram.svg'), (\n '../samples/teensy_4.0/pinout_diagram.py',\n '../samples/teensy_4.0/teensy_4.0_front_pinout_diagram.svg')])\n", (1156, 2235), False, 'import pytest\n'), ((604, 630), 'shutil.copyfile', 'shutil.copyfile', (['src', 'dest'], {}), '(src, dest)\n', (619, 630), False, 'import shutil\n'), ((2753, 2767), 'importlib.reload', 'reload', (['config'], {}), '(config)\n', (2759, 2767), False, 'from importlib import reload\n'), ((2787, 2804), 'pathlib.Path', 'Path', (['module_path'], {}), '(module_path)\n', (2791, 2804), False, 'from pathlib import Path\n'), ((2820, 2834), 'pathlib.Path', 'Path', (['ref_path'], {}), '(ref_path)\n', (2824, 2834), False, 'from pathlib import Path\n'), ((3018, 3078), 'pinout.manager.export_diagram', 'manager.export_diagram', (['module_path', 'tempsvg'], {'overwrite': '(True)'}), '(module_path, tempsvg, overwrite=True)\n', (3040, 3078), False, 'from pinout import manager\n'), ((3394, 3434), 'filecmp.cmp', 'filecmp.cmp', (['file1', 'file2'], {'shallow': '(False)'}), '(file1, file2, shallow=False)\n', (3405, 3434), False, 'import filecmp\n'), ((712, 746), 're.compile', 're.compile', (['"""(?<=id=\\\\").+(?=\\\\")"""'], {}), '(\'(?<=id=\\\\").+(?=\\\\")\')\n', (722, 746), False, 'import re\n'), ((761, 789), 're.sub', 're.sub', (['id', 're_sub_ids', 'data'], {}), '(id, re_sub_ids, data)\n', (767, 789), False, 'import re\n'), ((823, 860), 're.compile', 're.compile', (['"""(?<=href=\\\\"#).+(?=\\\\")"""'], {}), '(\'(?<=href=\\\\"#).+(?=\\\\")\')\n', (833, 860), False, 'import re\n'), ((875, 903), 're.sub', 're.sub', (['id', 're_sub_ids', 'data'], {}), '(id, re_sub_ids, data)\n', (881, 903), False, 'import re\n'), ((946, 994), 're.compile', 're.compile', (['"""(?<=clip-path=\\\\"url\\\\(#).+(?=\\\\")"""'], {}), '(\'(?<=clip-path=\\\\"url\\\\(#).+(?=\\\\")\')\n', (956, 994), False, 'import re\n'), ((1008, 1036), 're.sub', 're.sub', (['id', 're_sub_ids', 'data'], {}), '(id, re_sub_ids, data)\n', (1014, 1036), False, 'import re\n'), ((2994, 3006), 'uuid.uuid4', 'uuid.uuid4', ([], {}), '()\n', (3004, 3006), False, 'import uuid\n')]
|
# Copyright 2014 Google Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import io
from setuptools import find_packages
from setuptools import setup
DEPENDENCIES = (
'pyasn1-modules>=0.2.1',
'rsa>=3.1.4',
'six>=1.9.0',
'cachetools>=2.0.0',
)
with io.open('README.rst', 'r') as fh:
long_description = fh.read()
setup(
name='google-auth',
version='1.3.0',
author='Google Cloud Platform',
author_email='<EMAIL>',
description='Google Authentication Library',
long_description=long_description,
url='https://github.com/GoogleCloudPlatform/google-auth-library-python',
packages=find_packages(exclude=('tests*', 'system_tests*')),
namespace_packages=('google',),
install_requires=DEPENDENCIES,
license='Apache 2.0',
keywords='google auth oauth client',
classifiers=(
'Programming Language :: Python :: 2',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.4',
'Programming Language :: Python :: 3.5',
'Programming Language :: Python :: 3.6',
'Development Status :: 5 - Production/Stable',
'Intended Audience :: Developers',
'License :: OSI Approved :: Apache Software License',
'Operating System :: POSIX',
'Operating System :: Microsoft :: Windows',
'Operating System :: MacOS :: MacOS X',
'Operating System :: OS Independent',
'Topic :: Internet :: WWW/HTTP',
),
)
|
[
"setuptools.find_packages",
"io.open"
] |
[((772, 798), 'io.open', 'io.open', (['"""README.rst"""', '"""r"""'], {}), "('README.rst', 'r')\n", (779, 798), False, 'import io\n'), ((1134, 1184), 'setuptools.find_packages', 'find_packages', ([], {'exclude': "('tests*', 'system_tests*')"}), "(exclude=('tests*', 'system_tests*'))\n", (1147, 1184), False, 'from setuptools import find_packages\n')]
|
#!/usr/bin/env python3
import sys
sys.path.append('./lib')
import argparse
import os
import datetime
import numpy as np
import time
import pickle
import torch
from torch import optim
from param_stamp import get_param_stamp, get_param_stamp_from_args
import evaluate
from lib.encoder import Classifier
from lib.vae_models import AutoEncoder
import lib.callbacks as cb
from lib.train import train_cl
from lib.continual_learner import ContinualLearner
from lib.exemplars import ExemplarHandler
from lib.replayer import Replayer
RESULT_DIR = './results'
parser = argparse.ArgumentParser('./main.py', description='Run individual continual learning experiment.')
parser.add_argument('--get-stamp', action='store_true')
parser.add_argument('--no-gpus', action='store_false', dest='cuda')
parser.add_argument('--gpuID', type=int, nargs='+', default=[0, 1, 2, 3], help='GPU #')
parser.add_argument('--savepath', type=str, default='./results', dest='savepath')
parser.add_argument('--vis-cross-methods', action='store_true', dest='cross_methods', help='draw plots for cross methods')
parser.add_argument('--vis-cross-methods-type', nargs='+', default=['spider'], dest='cross_methods_type', help='alternatives=[\'spider\', \'bar\']')
parser.add_argument('--vis-cross-tasks', action='store_true', dest='cross_tasks', help='draw plots for cross tasks')
parser.add_argument('--matrices', type=str, nargs='+', default=['ACC', 'BWT', 'FWT', 'Overall ACC'])
parser.add_argument('--seed', type=int, default=7)
parser.add_argument('--factor', type=str, default='clutter', dest='factor')
parser.add_argument('--cumulative', type=int, default=0, dest='cul')
parser.add_argument('--bce', action='store_true')
parser.add_argument('--tasks', type=int, default=9)
parser.add_argument('--dataset', type=str, default='OpenLORIS-Object', dest='dataset')
parser.add_argument('--fc-layers', type=int, default=3, dest='fc_lay')
parser.add_argument('--fc-units', type=int, default=400, metavar="N")
parser.add_argument('--fc-drop', type=float, default=0.)
parser.add_argument('--fc-bn', type=str, default="no")
parser.add_argument('--fc-nl', type=str, default="relu", choices=["relu", "leakyrelu"])
parser.add_argument('--iters', type=int, default=3000)
parser.add_argument('--lr', type=float, default=0.0001)
parser.add_argument('--batch', type=int, default=32)
parser.add_argument('--optimizer', type=str, choices=['adam', 'adam_reset', 'sgd'], default='adam')
parser.add_argument('--feedback', action="store_true")
replay_choices = ['offline', 'exact', 'generative', 'none', 'current', 'exemplars']
parser.add_argument('--replay', type=str, default='none', choices=replay_choices)
parser.add_argument('--distill', action='store_true')
parser.add_argument('--temp', type=float, default=2., dest='temp')
parser.add_argument('--z_dim', type=int, default=100)
parser.add_argument('--g-z-dim', type=int, default=100)
parser.add_argument('--g-fc-lay', type=int)
parser.add_argument('--g-fc-uni', type=int)
parser.add_argument('--g-iters', type=int)
parser.add_argument('--lr-gen', type=float)
parser.add_argument('--ewc', action='store_true')
parser.add_argument('--lambda', type=float, default=5240., dest="ewc_lambda")
parser.add_argument('--fisher-n', type=int)
parser.add_argument('--online', action='store_true')
parser.add_argument('--gamma', type=float, default=1.)
parser.add_argument('--emp-fi', action='store_true')
parser.add_argument('--si', action='store_true')
parser.add_argument('--c', type=float, default=0.3, dest="si_c")
parser.add_argument('--epsilon', type=float, default=0.2, dest="epsilon")
parser.add_argument('--icarl', action='store_true')
parser.add_argument('--use-exemplars', action='store_true')
parser.add_argument('--add-exemplars', action='store_true')
parser.add_argument('--budget', type=int, default=2500, dest="budget")
parser.add_argument('--herding', action='store_true')
parser.add_argument('--norm-exemplars', action='store_true')
parser.add_argument('--log-per-task', action='store_true')
parser.add_argument('--loss-log', type=int, default=200, metavar="N")
parser.add_argument('--prec-log', type=int, default=200, metavar="N")
parser.add_argument('--prec-n', type=int, default=1024)
parser.add_argument('--sample-log', type=int, default=500, metavar="N")
parser.add_argument('--sample-n', type=int, default=64)
def run(args):
result_path = os.path.join('./benchmarks/results', args.savepath)
savepath = result_path + '/' + str(datetime.datetime.now().strftime('%Y-%m-%d %H-%M-%S')) + '.csv'
if not os.path.exists(result_path):
print('no exist the path and create one ...')
os.makedirs(result_path, exist_ok=True)
# Set default arguments
args.lr_gen = args.lr if args.lr_gen is None else args.lr_gen
args.g_iters = args.iters if args.g_iters is None else args.g_iters
args.g_fc_lay = args.fc_lay if args.g_fc_lay is None else args.g_fc_lay
args.g_fc_uni = args.fc_units if args.g_fc_uni is None else args.g_fc_uni
# -if [log_per_task], reset all logs
if args.log_per_task:
args.prec_log = args.iters
args.loss_log = args.iters
args.sample_log = args.iters
# -if [iCaRL] is selected, select all accompanying options
if hasattr(args, "icarl") and args.icarl:
args.use_exemplars = True
args.add_exemplars = True
# -if EWC or SI is selected together with 'feedback', give error
if args.feedback and (args.ewc or args.si or args.icarl):
raise NotImplementedError("EWC, SI and iCaRL are not supported with feedback connections.")
# -if binary classification loss is selected together with 'feedback', give error
if args.feedback and args.bce:
raise NotImplementedError("Binary classification loss not supported with feedback connections.")
if not os.path.isdir(RESULT_DIR):
os.mkdir(RESULT_DIR)
# If only want param-stamp, get it printed to screen and exit
if hasattr(args, "get_stamp") and args.get_stamp:
_ = get_param_stamp_from_args(args=args)
exit()
# Use cuda?
cuda = torch.cuda.is_available() and args.cuda
device = "cuda" if cuda else "cpu"
gpu_devices = None
if args.gpuID == None:
if torch.cuda.device_count() > 1:
gpu_devices = ','.join([str(id) for id in range(torch.cuda.device_count())])
print('==> training with CUDA (GPU id: ' + gpu_devices + ') ... <==')
else:
gpu_devices = ','.join([str(id) for id in args.gpuID])
os.environ['CUDA_VISIBLE_DEVICES'] = gpu_devices
print('==> training with CUDA (GPU id: ' + str(args.gpuID) + ') ... <==')
# Set random seeds
np.random.seed(args.seed)
torch.manual_seed(args.seed)
if cuda:
torch.cuda.manual_seed(args.seed)
if args.factor == 'sequence':
args.tasks = 12
# -------------------------------------------------------------------------------------------------#
# ----------------#
# ----- DATA -----#
# ----------------#
# Prepare data for OpenLORIS-Object
if args.dataset == 'OpenLORIS-Object':
with open('./benchmarks/data/OpenLORIS-Object/' + args.factor + '.pk', 'rb') as f:
((train_datasets, test_datasets), config, classes_per_task) = pickle.load(f)
else:
with open('./benchmarks/data/' + args.dataset + '/' + args.dataset + '.pk', 'rb') as f:
((train_datasets, test_datasets), config, classes_per_task) = pickle.load(f)
if args.cul == 1:
for i in range(1, len(train_datasets)):
train_datasets[i].imgs.extend(train_datasets[i - 1].imgs)
train_datasets[i].labels.extend(train_datasets[i - 1].labels)
# -------------------------------------------------------------------------------------------------#
# ------------------------------#
# ----- MODEL (CLASSIFIER) -----#
# ------------------------------#
# Define main model (i.e., classifier, if requested with feedback connections)
if args.feedback:
model = AutoEncoder(
image_size=config['size'], image_channels=config['channels'], classes=config['classes'],
fc_layers=args.fc_lay, fc_units=args.g_fc_uni, z_dim=args.z_dim,
fc_drop=args.fc_drop, fc_bn=True if args.fc_bn == "yes" else False, fc_nl=args.fc_nl,
).to(device)
model.lamda_pl = 1. # --> to make that this VAE is also trained to classify
else:
model = Classifier(
image_size=config['size'], image_channels=config['channels'], classes=config['classes'],
fc_layers=args.fc_lay, fc_units=args.fc_units, fc_drop=args.fc_drop, fc_nl=args.fc_nl,
fc_bn=True if args.fc_bn == "yes" else False, excit_buffer=False,
binaryCE=args.bce
).to(device)
# Define optimizer (only include parameters that "requires_grad")
model.optim_list = [{'params': filter(lambda p: p.requires_grad, model.parameters()), 'lr': args.lr}]
model.optim_type = args.optimizer
if model.optim_type in ("adam", "adam_reset"):
model.optimizer = optim.Adam(model.optim_list, betas=(0.9, 0.999))
elif model.optim_type == "sgd":
model.optimizer = optim.SGD(model.optim_list)
else:
raise ValueError("Unrecognized optimizer, '{}' is not currently a valid option".format(args.optimizer))
# ----------------------------------#
# ----- CL-STRATEGY: EXEMPLARS -----#
# ----------------------------------#
# Store in model whether, how many and in what way to store exemplars
if isinstance(model, ExemplarHandler) and (args.use_exemplars or args.add_exemplars or args.replay == "exemplars"):
model.memory_budget = args.budget
model.norm_exemplars = args.norm_exemplars
model.herding = args.herding
# -----------------------------------#
# ----- CL-STRATEGY: ALLOCATION -----#
# -----------------------------------#
# Elastic Weight Consolidation (EWC)
if isinstance(model, ContinualLearner):
model.ewc_lambda = args.ewc_lambda if args.ewc else 0
if args.ewc:
model.fisher_n = args.fisher_n
model.gamma = args.gamma
model.online = args.online
model.emp_FI = args.emp_fi
# Synpatic Intelligence (SI)
if isinstance(model, ContinualLearner):
model.si_c = args.si_c if args.si else 0
if args.si:
model.epsilon = args.epsilon
# -------------------------------------------------------------------------------------------------#
# -------------------------------#
# ----- CL-STRATEGY: REPLAY -----#
# -------------------------------#
# Use distillation loss (i.e., soft targets) for replayed data? (and set temperature)
if isinstance(model, Replayer):
model.replay_targets = "soft" if args.distill else "hard"
model.KD_temp = args.temp
# If needed, specify separate model for the generator
train_gen = True if (args.replay == "generative" and not args.feedback) else False
if train_gen:
# -specify architecture
generator = AutoEncoder(
image_size=config['size'], image_channels=config['channels'],
fc_layers=args.g_fc_lay, fc_units=args.g_fc_uni, z_dim=args.z_dim, classes=config['classes'],
fc_drop=args.fc_drop, fc_bn=True if args.fc_bn == "yes" else False, fc_nl=args.fc_nl,
).to(device)
# -set optimizer(s)
generator.optim_list = [
{'params': filter(lambda p: p.requires_grad, generator.parameters()), 'lr': args.lr_gen}]
generator.optim_type = args.optimizer
if generator.optim_type in ("adam", "adam_reset"):
generator.optimizer = optim.Adam(generator.optim_list, betas=(0.9, 0.999))
elif generator.optim_type == "sgd":
generator.optimizer = optim.SGD(generator.optim_list)
else:
generator = None
# ---------------------#
# ----- REPORTING -----#
# ---------------------#
# Get parameter-stamp (and print on screen)
param_stamp = get_param_stamp(
args, model.name, verbose=True, replay=True if (not args.replay == "none") else False,
replay_model_name=generator.name if (args.replay == "generative" and not args.feedback) else None,
)
# -define [precision_dict] to keep track of performance during training for storing and for later plotting in pdf
precision_dict = evaluate.initiate_precision_dict(args.tasks)
precision_dict_exemplars = evaluate.initiate_precision_dict(args.tasks) if args.use_exemplars else None
# ---------------------#
# ----- CALLBACKS -----#
# ---------------------#
# Callbacks for reporting on and visualizing loss
generator_loss_cbs = [
cb._VAE_loss_cb(log=args.loss_log, model=model if args.feedback else generator, tasks=args.tasks,
iters_per_task=args.iters if args.feedback else args.g_iters,
replay=False if args.replay == "none" else True)
] if (train_gen or args.feedback) else [None]
solver_loss_cbs = [
cb._solver_loss_cb(log=args.loss_log, model=model, tasks=args.tasks,
iters_per_task=args.iters, replay=False if args.replay == "none" else True)
] if (not args.feedback) else [None]
# Callbacks for evaluating and plotting generated / reconstructed samples
sample_cbs = [
cb._sample_cb(log=args.sample_log, config=config, test_datasets=test_datasets,
sample_size=args.sample_n, iters_per_task=args.iters if args.feedback else args.g_iters)
] if (train_gen or args.feedback) else [None]
# Callbacks for reporting and visualizing accuracy
eval_cb = cb._eval_cb(
log=args.prec_log, test_datasets=test_datasets, precision_dict=None, iters_per_task=args.iters,
test_size=args.prec_n, classes_per_task=classes_per_task
)
# -pdf / reporting: summary plots (i.e, only after each task)
eval_cb_full = cb._eval_cb(
log=args.iters, test_datasets=test_datasets, precision_dict=precision_dict,
iters_per_task=args.iters, classes_per_task=classes_per_task
)
eval_cb_exemplars = cb._eval_cb(
log=args.iters, test_datasets=test_datasets, classes_per_task=classes_per_task,
precision_dict=precision_dict_exemplars, iters_per_task=args.iters,
with_exemplars=True,
) if args.use_exemplars else None
# -collect them in <lists>
eval_cbs = [eval_cb, eval_cb_full]
eval_cbs_exemplars = [eval_cb_exemplars]
# --------------------#
# ----- TRAINING -----#
# --------------------#
print("--> Training:")
# Keep track of training-time
start = time.time()
# Train model
train_cl(
model, train_datasets, test_datasets, replay_mode=args.replay,
classes_per_task=classes_per_task,
iters=args.iters, batch_size=args.batch, savepath=savepath,
generator=generator, gen_iters=args.g_iters, gen_loss_cbs=generator_loss_cbs,
sample_cbs=sample_cbs, eval_cbs=eval_cbs, loss_cbs=generator_loss_cbs if args.feedback else solver_loss_cbs,
eval_cbs_exemplars=eval_cbs_exemplars, use_exemplars=args.use_exemplars, add_exemplars=args.add_exemplars,
)
# -------------------------------------------------------------------------------------------------#
# --------------------#
# -- VISUALIZATION ---#
# --------------------#
matrices_names = args.matrices
method_names = []
if args.cul == 1:
method_names.append('Cumulative')
elif args.cul == 0:
method_names.append('Naive')
if args.replay == 'current':
method_names.append('LwF')
if args.online and args.ewc:
method_names.append('Online EWC')
if args.si:
method_names.append('SI')
if args.replay == "generative" and not args.feedback and not args.distill:
method_names.append('DGR')
if args.replay == "generative" and not args.feedback and args.distill:
method_names.append('DGR with distillation')
if args.replay == "generative" and args.feedback and args.distill:
method_names.append('DGR with feedback')
if args.ewc and not args.online:
method_names.append('EWC')
print('The selected methods are:', method_names)
print('The selected performance matrices are:', matrices_names)
if args.cross_methods:
print('==> Drawing results for cross selected-methods ... <==')
if 'spider' in args.cross_methods_type:
spider = True
if 'bar' in args.cross_methods_type:
bar = True
if args.cross_tasks:
print('==> Drawing results for cross tasks ... <==')
if __name__ == '__main__':
args = parser.parse_args()
run(args)
|
[
"os.mkdir",
"numpy.random.seed",
"argparse.ArgumentParser",
"torch.cuda.device_count",
"pickle.load",
"lib.train.train_cl",
"param_stamp.get_param_stamp_from_args",
"os.path.join",
"lib.encoder.Classifier",
"sys.path.append",
"lib.callbacks._VAE_loss_cb",
"os.path.exists",
"datetime.datetime.now",
"lib.vae_models.AutoEncoder",
"torch.manual_seed",
"param_stamp.get_param_stamp",
"torch.cuda.manual_seed",
"lib.callbacks._solver_loss_cb",
"evaluate.initiate_precision_dict",
"torch.optim.Adam",
"torch.cuda.is_available",
"lib.callbacks._sample_cb",
"lib.callbacks._eval_cb",
"os.makedirs",
"os.path.isdir",
"time.time",
"torch.optim.SGD"
] |
[((35, 59), 'sys.path.append', 'sys.path.append', (['"""./lib"""'], {}), "('./lib')\n", (50, 59), False, 'import sys\n'), ((564, 666), 'argparse.ArgumentParser', 'argparse.ArgumentParser', (['"""./main.py"""'], {'description': '"""Run individual continual learning experiment."""'}), "('./main.py', description=\n 'Run individual continual learning experiment.')\n", (587, 666), False, 'import argparse\n'), ((4370, 4421), 'os.path.join', 'os.path.join', (['"""./benchmarks/results"""', 'args.savepath'], {}), "('./benchmarks/results', args.savepath)\n", (4382, 4421), False, 'import os\n'), ((6663, 6688), 'numpy.random.seed', 'np.random.seed', (['args.seed'], {}), '(args.seed)\n', (6677, 6688), True, 'import numpy as np\n'), ((6693, 6721), 'torch.manual_seed', 'torch.manual_seed', (['args.seed'], {}), '(args.seed)\n', (6710, 6721), False, 'import torch\n'), ((12078, 12285), 'param_stamp.get_param_stamp', 'get_param_stamp', (['args', 'model.name'], {'verbose': '(True)', 'replay': "(True if not args.replay == 'none' else False)", 'replay_model_name': "(generator.name if args.replay == 'generative' and not args.feedback else None)"}), "(args, model.name, verbose=True, replay=True if not args.\n replay == 'none' else False, replay_model_name=generator.name if args.\n replay == 'generative' and not args.feedback else None)\n", (12093, 12285), False, 'from param_stamp import get_param_stamp, get_param_stamp_from_args\n'), ((12443, 12487), 'evaluate.initiate_precision_dict', 'evaluate.initiate_precision_dict', (['args.tasks'], {}), '(args.tasks)\n', (12475, 12487), False, 'import evaluate\n'), ((13742, 13916), 'lib.callbacks._eval_cb', 'cb._eval_cb', ([], {'log': 'args.prec_log', 'test_datasets': 'test_datasets', 'precision_dict': 'None', 'iters_per_task': 'args.iters', 'test_size': 'args.prec_n', 'classes_per_task': 'classes_per_task'}), '(log=args.prec_log, test_datasets=test_datasets, precision_dict=\n None, iters_per_task=args.iters, test_size=args.prec_n,\n classes_per_task=classes_per_task)\n', (13753, 13916), True, 'import lib.callbacks as cb\n'), ((14015, 14174), 'lib.callbacks._eval_cb', 'cb._eval_cb', ([], {'log': 'args.iters', 'test_datasets': 'test_datasets', 'precision_dict': 'precision_dict', 'iters_per_task': 'args.iters', 'classes_per_task': 'classes_per_task'}), '(log=args.iters, test_datasets=test_datasets, precision_dict=\n precision_dict, iters_per_task=args.iters, classes_per_task=\n classes_per_task)\n', (14026, 14174), True, 'import lib.callbacks as cb\n'), ((14729, 14740), 'time.time', 'time.time', ([], {}), '()\n', (14738, 14740), False, 'import time\n'), ((14763, 15250), 'lib.train.train_cl', 'train_cl', (['model', 'train_datasets', 'test_datasets'], {'replay_mode': 'args.replay', 'classes_per_task': 'classes_per_task', 'iters': 'args.iters', 'batch_size': 'args.batch', 'savepath': 'savepath', 'generator': 'generator', 'gen_iters': 'args.g_iters', 'gen_loss_cbs': 'generator_loss_cbs', 'sample_cbs': 'sample_cbs', 'eval_cbs': 'eval_cbs', 'loss_cbs': '(generator_loss_cbs if args.feedback else solver_loss_cbs)', 'eval_cbs_exemplars': 'eval_cbs_exemplars', 'use_exemplars': 'args.use_exemplars', 'add_exemplars': 'args.add_exemplars'}), '(model, train_datasets, test_datasets, replay_mode=args.replay,\n classes_per_task=classes_per_task, iters=args.iters, batch_size=args.\n batch, savepath=savepath, generator=generator, gen_iters=args.g_iters,\n gen_loss_cbs=generator_loss_cbs, sample_cbs=sample_cbs, eval_cbs=\n eval_cbs, loss_cbs=generator_loss_cbs if args.feedback else\n solver_loss_cbs, eval_cbs_exemplars=eval_cbs_exemplars, use_exemplars=\n args.use_exemplars, add_exemplars=args.add_exemplars)\n', (14771, 15250), False, 'from lib.train import train_cl\n'), ((4536, 4563), 'os.path.exists', 'os.path.exists', (['result_path'], {}), '(result_path)\n', (4550, 4563), False, 'import os\n'), ((4627, 4666), 'os.makedirs', 'os.makedirs', (['result_path'], {'exist_ok': '(True)'}), '(result_path, exist_ok=True)\n', (4638, 4666), False, 'import os\n'), ((5809, 5834), 'os.path.isdir', 'os.path.isdir', (['RESULT_DIR'], {}), '(RESULT_DIR)\n', (5822, 5834), False, 'import os\n'), ((5844, 5864), 'os.mkdir', 'os.mkdir', (['RESULT_DIR'], {}), '(RESULT_DIR)\n', (5852, 5864), False, 'import os\n'), ((5998, 6034), 'param_stamp.get_param_stamp_from_args', 'get_param_stamp_from_args', ([], {'args': 'args'}), '(args=args)\n', (6023, 6034), False, 'from param_stamp import get_param_stamp, get_param_stamp_from_args\n'), ((6078, 6103), 'torch.cuda.is_available', 'torch.cuda.is_available', ([], {}), '()\n', (6101, 6103), False, 'import torch\n'), ((6743, 6776), 'torch.cuda.manual_seed', 'torch.cuda.manual_seed', (['args.seed'], {}), '(args.seed)\n', (6765, 6776), False, 'import torch\n'), ((9086, 9134), 'torch.optim.Adam', 'optim.Adam', (['model.optim_list'], {'betas': '(0.9, 0.999)'}), '(model.optim_list, betas=(0.9, 0.999))\n', (9096, 9134), False, 'from torch import optim\n'), ((12519, 12563), 'evaluate.initiate_precision_dict', 'evaluate.initiate_precision_dict', (['args.tasks'], {}), '(args.tasks)\n', (12551, 12563), False, 'import evaluate\n'), ((14211, 14400), 'lib.callbacks._eval_cb', 'cb._eval_cb', ([], {'log': 'args.iters', 'test_datasets': 'test_datasets', 'classes_per_task': 'classes_per_task', 'precision_dict': 'precision_dict_exemplars', 'iters_per_task': 'args.iters', 'with_exemplars': '(True)'}), '(log=args.iters, test_datasets=test_datasets, classes_per_task=\n classes_per_task, precision_dict=precision_dict_exemplars,\n iters_per_task=args.iters, with_exemplars=True)\n', (14222, 14400), True, 'import lib.callbacks as cb\n'), ((6219, 6244), 'torch.cuda.device_count', 'torch.cuda.device_count', ([], {}), '()\n', (6242, 6244), False, 'import torch\n'), ((7265, 7279), 'pickle.load', 'pickle.load', (['f'], {}), '(f)\n', (7276, 7279), False, 'import pickle\n'), ((7460, 7474), 'pickle.load', 'pickle.load', (['f'], {}), '(f)\n', (7471, 7474), False, 'import pickle\n'), ((9197, 9224), 'torch.optim.SGD', 'optim.SGD', (['model.optim_list'], {}), '(model.optim_list)\n', (9206, 9224), False, 'from torch import optim\n'), ((11725, 11777), 'torch.optim.Adam', 'optim.Adam', (['generator.optim_list'], {'betas': '(0.9, 0.999)'}), '(generator.optim_list, betas=(0.9, 0.999))\n', (11735, 11777), False, 'from torch import optim\n'), ((12774, 12991), 'lib.callbacks._VAE_loss_cb', 'cb._VAE_loss_cb', ([], {'log': 'args.loss_log', 'model': '(model if args.feedback else generator)', 'tasks': 'args.tasks', 'iters_per_task': '(args.iters if args.feedback else args.g_iters)', 'replay': "(False if args.replay == 'none' else True)"}), "(log=args.loss_log, model=model if args.feedback else\n generator, tasks=args.tasks, iters_per_task=args.iters if args.feedback\n else args.g_iters, replay=False if args.replay == 'none' else True)\n", (12789, 12991), True, 'import lib.callbacks as cb\n'), ((13113, 13261), 'lib.callbacks._solver_loss_cb', 'cb._solver_loss_cb', ([], {'log': 'args.loss_log', 'model': 'model', 'tasks': 'args.tasks', 'iters_per_task': 'args.iters', 'replay': "(False if args.replay == 'none' else True)"}), "(log=args.loss_log, model=model, tasks=args.tasks,\n iters_per_task=args.iters, replay=False if args.replay == 'none' else True)\n", (13131, 13261), True, 'import lib.callbacks as cb\n'), ((13432, 13608), 'lib.callbacks._sample_cb', 'cb._sample_cb', ([], {'log': 'args.sample_log', 'config': 'config', 'test_datasets': 'test_datasets', 'sample_size': 'args.sample_n', 'iters_per_task': '(args.iters if args.feedback else args.g_iters)'}), '(log=args.sample_log, config=config, test_datasets=\n test_datasets, sample_size=args.sample_n, iters_per_task=args.iters if\n args.feedback else args.g_iters)\n', (13445, 13608), True, 'import lib.callbacks as cb\n'), ((8032, 8297), 'lib.vae_models.AutoEncoder', 'AutoEncoder', ([], {'image_size': "config['size']", 'image_channels': "config['channels']", 'classes': "config['classes']", 'fc_layers': 'args.fc_lay', 'fc_units': 'args.g_fc_uni', 'z_dim': 'args.z_dim', 'fc_drop': 'args.fc_drop', 'fc_bn': "(True if args.fc_bn == 'yes' else False)", 'fc_nl': 'args.fc_nl'}), "(image_size=config['size'], image_channels=config['channels'],\n classes=config['classes'], fc_layers=args.fc_lay, fc_units=args.\n g_fc_uni, z_dim=args.z_dim, fc_drop=args.fc_drop, fc_bn=True if args.\n fc_bn == 'yes' else False, fc_nl=args.fc_nl)\n", (8043, 8297), False, 'from lib.vae_models import AutoEncoder\n'), ((8453, 8738), 'lib.encoder.Classifier', 'Classifier', ([], {'image_size': "config['size']", 'image_channels': "config['channels']", 'classes': "config['classes']", 'fc_layers': 'args.fc_lay', 'fc_units': 'args.fc_units', 'fc_drop': 'args.fc_drop', 'fc_nl': 'args.fc_nl', 'fc_bn': "(True if args.fc_bn == 'yes' else False)", 'excit_buffer': '(False)', 'binaryCE': 'args.bce'}), "(image_size=config['size'], image_channels=config['channels'],\n classes=config['classes'], fc_layers=args.fc_lay, fc_units=args.\n fc_units, fc_drop=args.fc_drop, fc_nl=args.fc_nl, fc_bn=True if args.\n fc_bn == 'yes' else False, excit_buffer=False, binaryCE=args.bce)\n", (8463, 8738), False, 'from lib.encoder import Classifier\n'), ((11111, 11377), 'lib.vae_models.AutoEncoder', 'AutoEncoder', ([], {'image_size': "config['size']", 'image_channels': "config['channels']", 'fc_layers': 'args.g_fc_lay', 'fc_units': 'args.g_fc_uni', 'z_dim': 'args.z_dim', 'classes': "config['classes']", 'fc_drop': 'args.fc_drop', 'fc_bn': "(True if args.fc_bn == 'yes' else False)", 'fc_nl': 'args.fc_nl'}), "(image_size=config['size'], image_channels=config['channels'],\n fc_layers=args.g_fc_lay, fc_units=args.g_fc_uni, z_dim=args.z_dim,\n classes=config['classes'], fc_drop=args.fc_drop, fc_bn=True if args.\n fc_bn == 'yes' else False, fc_nl=args.fc_nl)\n", (11122, 11377), False, 'from lib.vae_models import AutoEncoder\n'), ((11856, 11887), 'torch.optim.SGD', 'optim.SGD', (['generator.optim_list'], {}), '(generator.optim_list)\n', (11865, 11887), False, 'from torch import optim\n'), ((4461, 4484), 'datetime.datetime.now', 'datetime.datetime.now', ([], {}), '()\n', (4482, 4484), False, 'import datetime\n'), ((6310, 6335), 'torch.cuda.device_count', 'torch.cuda.device_count', ([], {}), '()\n', (6333, 6335), False, 'import torch\n')]
|
# -*- coding: utf-8 -*-
import numpy as np
from lab.device import BaseDriver, QInteger, QOption, QReal, QString, QVector
class Driver(BaseDriver):
support_models = ['IT6302']
quants = [
QReal('CH1 Voltage', unit='V',
#set_cmd='SYST:REM;INST CH1;VOLT %(value).13e'
set_cmd='INST CH1;VOLT %(value).13e',
get_cmd='MEAS? CH1'),
QReal('CH1 Current', unit='A',
#set_cmd='SYST:REM;INST CH1;CURR %(value).13e'
set_cmd='INST CH1;CURR %(value).13e',
get_cmd='MEAS:CURR? CH1'),
QReal('CH2 Voltage', unit='V',
#set_cmd='SYST:REM;INST CH2;VOLT %(value).13e'
set_cmd='INST CH2;VOLT %(value).13e',
get_cmd='MEAS? CH2'),
QReal('CH2 Current', unit='A',
#set_cmd='SYST:REM;INST CH2;CURR %(value).13e'
set_cmd='INST CH2;CURR %(value).13e',
get_cmd='MEAS:CURR? CH2'),
QReal('CH3 Voltage', unit='V',
#set_cmd='SYST:REM;INST CH3;VOLT %(value).13e'
set_cmd='INST CH3;VOLT %(value).13e',
get_cmd='MEAS? CH3'),
QReal('CH3 Current', unit='A',
#set_cmd='SYST:REM;INST CH3;CURR %(value).13e'
set_cmd='INST CH3;CURR %(value).13e',
get_cmd='MEAS:CURR? CH3'),
QOption('Output',
set_cmd='OUTP %(option)s', options=[('OFF', 'OFF'), ('ON', 'ON')]),
]
def performOpen(self):
self.write('SYST:REM')
|
[
"lab.device.QOption",
"lab.device.QReal"
] |
[((216, 309), 'lab.device.QReal', 'QReal', (['"""CH1 Voltage"""'], {'unit': '"""V"""', 'set_cmd': '"""INST CH1;VOLT %(value).13e"""', 'get_cmd': '"""MEAS? CH1"""'}), "('CH1 Voltage', unit='V', set_cmd='INST CH1;VOLT %(value).13e',\n get_cmd='MEAS? CH1')\n", (221, 309), False, 'from lab.device import BaseDriver, QInteger, QOption, QReal, QString, QVector\n'), ((398, 496), 'lab.device.QReal', 'QReal', (['"""CH1 Current"""'], {'unit': '"""A"""', 'set_cmd': '"""INST CH1;CURR %(value).13e"""', 'get_cmd': '"""MEAS:CURR? CH1"""'}), "('CH1 Current', unit='A', set_cmd='INST CH1;CURR %(value).13e',\n get_cmd='MEAS:CURR? CH1')\n", (403, 496), False, 'from lab.device import BaseDriver, QInteger, QOption, QReal, QString, QVector\n'), ((585, 678), 'lab.device.QReal', 'QReal', (['"""CH2 Voltage"""'], {'unit': '"""V"""', 'set_cmd': '"""INST CH2;VOLT %(value).13e"""', 'get_cmd': '"""MEAS? CH2"""'}), "('CH2 Voltage', unit='V', set_cmd='INST CH2;VOLT %(value).13e',\n get_cmd='MEAS? CH2')\n", (590, 678), False, 'from lab.device import BaseDriver, QInteger, QOption, QReal, QString, QVector\n'), ((767, 865), 'lab.device.QReal', 'QReal', (['"""CH2 Current"""'], {'unit': '"""A"""', 'set_cmd': '"""INST CH2;CURR %(value).13e"""', 'get_cmd': '"""MEAS:CURR? CH2"""'}), "('CH2 Current', unit='A', set_cmd='INST CH2;CURR %(value).13e',\n get_cmd='MEAS:CURR? CH2')\n", (772, 865), False, 'from lab.device import BaseDriver, QInteger, QOption, QReal, QString, QVector\n'), ((954, 1047), 'lab.device.QReal', 'QReal', (['"""CH3 Voltage"""'], {'unit': '"""V"""', 'set_cmd': '"""INST CH3;VOLT %(value).13e"""', 'get_cmd': '"""MEAS? CH3"""'}), "('CH3 Voltage', unit='V', set_cmd='INST CH3;VOLT %(value).13e',\n get_cmd='MEAS? CH3')\n", (959, 1047), False, 'from lab.device import BaseDriver, QInteger, QOption, QReal, QString, QVector\n'), ((1136, 1234), 'lab.device.QReal', 'QReal', (['"""CH3 Current"""'], {'unit': '"""A"""', 'set_cmd': '"""INST CH3;CURR %(value).13e"""', 'get_cmd': '"""MEAS:CURR? CH3"""'}), "('CH3 Current', unit='A', set_cmd='INST CH3;CURR %(value).13e',\n get_cmd='MEAS:CURR? CH3')\n", (1141, 1234), False, 'from lab.device import BaseDriver, QInteger, QOption, QReal, QString, QVector\n'), ((1323, 1411), 'lab.device.QOption', 'QOption', (['"""Output"""'], {'set_cmd': '"""OUTP %(option)s"""', 'options': "[('OFF', 'OFF'), ('ON', 'ON')]"}), "('Output', set_cmd='OUTP %(option)s', options=[('OFF', 'OFF'), ('ON',\n 'ON')])\n", (1330, 1411), False, 'from lab.device import BaseDriver, QInteger, QOption, QReal, QString, QVector\n')]
|
#!/usr/bin/python3
import asyncio
from src.estimator import estimator
import audits.test_utils as test_utils
cases = [
["days", "ch-3"],
["weeks", "ch-3"],
["months", "ch-3"]
]
def test_challenge3():
for [period_type, challenge] in cases:
loop = asyncio.get_event_loop()
input = loop.run_until_complete(
test_utils.mock_estimation_for(period_type))
# nodes from end point
data = input["data"]
estimate = input["estimate"]
output = estimator(data)
values = test_utils.value_on_fields(output, estimate, challenge)
for [produced, expected] in values:
assert test_utils.format_float(produced) == \
test_utils.format_float(expected)
|
[
"asyncio.get_event_loop",
"src.estimator.estimator",
"audits.test_utils.format_float",
"audits.test_utils.mock_estimation_for",
"audits.test_utils.value_on_fields"
] |
[((267, 291), 'asyncio.get_event_loop', 'asyncio.get_event_loop', ([], {}), '()\n', (289, 291), False, 'import asyncio\n'), ((482, 497), 'src.estimator.estimator', 'estimator', (['data'], {}), '(data)\n', (491, 497), False, 'from src.estimator import estimator\n'), ((511, 566), 'audits.test_utils.value_on_fields', 'test_utils.value_on_fields', (['output', 'estimate', 'challenge'], {}), '(output, estimate, challenge)\n', (537, 566), True, 'import audits.test_utils as test_utils\n'), ((337, 380), 'audits.test_utils.mock_estimation_for', 'test_utils.mock_estimation_for', (['period_type'], {}), '(period_type)\n', (367, 380), True, 'import audits.test_utils as test_utils\n'), ((620, 653), 'audits.test_utils.format_float', 'test_utils.format_float', (['produced'], {}), '(produced)\n', (643, 653), True, 'import audits.test_utils as test_utils\n'), ((669, 702), 'audits.test_utils.format_float', 'test_utils.format_float', (['expected'], {}), '(expected)\n', (692, 702), True, 'import audits.test_utils as test_utils\n')]
|
import pytest
from wagtail.core.models import Site
from tests.testapp.models import BlogPage, LivePageMixin
@pytest.mark.django_db
def test_home_fixture(home):
assert home == Site.objects.first().root_page
@pytest.mark.django_db
def test_blog_page_factory_instance(blog_page):
assert BlogPage.objects.count() == 1
assert isinstance(blog_page, BlogPage)
assert isinstance(blog_page, LivePageMixin)
assert blog_page.title.startswith("Page ")
assert blog_page.channel_id == ""
@pytest.mark.django_db
def test_blog_page_factory_factory(blog_page_factory):
assert BlogPage.objects.count() == 0
blog_page = blog_page_factory(title="Some Title", channel_id="some-id")
assert isinstance(blog_page, BlogPage)
assert isinstance(blog_page, LivePageMixin)
assert blog_page.title == "Some Title"
assert blog_page.channel_id == "some-id"
assert BlogPage.objects.count() == 1
|
[
"wagtail.core.models.Site.objects.first",
"tests.testapp.models.BlogPage.objects.count"
] |
[((297, 321), 'tests.testapp.models.BlogPage.objects.count', 'BlogPage.objects.count', ([], {}), '()\n', (319, 321), False, 'from tests.testapp.models import BlogPage, LivePageMixin\n'), ((594, 618), 'tests.testapp.models.BlogPage.objects.count', 'BlogPage.objects.count', ([], {}), '()\n', (616, 618), False, 'from tests.testapp.models import BlogPage, LivePageMixin\n'), ((890, 914), 'tests.testapp.models.BlogPage.objects.count', 'BlogPage.objects.count', ([], {}), '()\n', (912, 914), False, 'from tests.testapp.models import BlogPage, LivePageMixin\n'), ((182, 202), 'wagtail.core.models.Site.objects.first', 'Site.objects.first', ([], {}), '()\n', (200, 202), False, 'from wagtail.core.models import Site\n')]
|
import json
import logging
from typing import Tuple
import redis
from intervaltree import Interval, IntervalTree
from dispatch.config import PLANNER_SERVER_ROLE, MAX_MINUTES_PER_TECH, MAX_TRANSACTION_RETRY
from dispatch import config
from dispatch.plugins.kandbox_planner.env.env_enums import *
# RTree and Intervaltree, similar?
from dispatch.plugins.kandbox_planner.env.env_enums import (
JobPlanningStatus,
JobType,
TimeSlotType,
TimeSlotOperationType,
ActionType,
KafkaMessageType,
)
from dispatch.plugins.kandbox_planner.env.env_models import (
ActionDict,
BaseJob,
LocationTuple,
TimeSlotJSONEncoder,
WorkingTimeSlot,
JobLocationBase,
)
log = logging.getLogger("rl_env.working_time_slot")
# Done (2020-10-16 10:05:06), replaced array of "working_time_slots"?
# https://stackoverflow.com/questions/2646157/what-is-the-fastest-to-access-struct-like-object-in-python
# https://medium.com/@jacktator/dataclass-vs-namedtuple-vs-object-for-performance-optimization-in-python-691e234253b9
# namedtuples are optimised for access as tuples. If you change your accessor to be a[2] instead of a.c, you'll see similar performance to the tuples. The reason is that the name accessors are effectively translating into calls to self[idx], so pay both the indexing and the name lookup price.
SLOT_CODE_SET_FOR_DEBUG = [] # "env_MY_2/CT02_01950_02520_F"
class MissingSlotException(Exception):
def __init__(self, slot_code, message):
self.slot_code = slot_code
self.message = message
# https://code.activestate.com/recipes/389916-example-setattr-getattr-overloading/
class TestingSlotDict(dict):
"""Example of overloading __getatr__ and __setattr__
This example creates a dictionary where members can be accessed as attributes
"""
def __setitem__(self, key, value):
"""Maps attributes to values.
Only if we are initialised
"""
if key in SLOT_CODE_SET_FOR_DEBUG:
log.debug(f"pause SLOT_CODE_SET_FOR_DEBUG={SLOT_CODE_SET_FOR_DEBUG}")
super(TestingSlotDict, self).__setitem__(key, value)
class WorkingTimeSlotServer:
def __init__(self, env, redis_conn): # team_id is included in worker_code
self.env = env
self.r = redis_conn
self.time_slot_dict = TestingSlotDict()
self.time_slot_tree = IntervalTree()
def get_time_slot_key(self, slot: WorkingTimeSlot) -> str:
# worker_id, start_minutes, end_minutes, slot_type, worker_id,start_minutes, end_minutes, slot_type
start_minutes_str = str(int(slot.start_minutes)).zfill(5)
end_minutes_str = str(int(slot.end_minutes)).zfill(5)
return f"{self.env.team_env_key}/s/{slot.worker_id}_{start_minutes_str }_{ end_minutes_str}_{ slot.slot_type}"
def reload_from_redis_server(self):
for slot_code in self.r.scan_iter(f"{self.env.team_env_key}/s/*"):
slot_code_str = slot_code.decode("utf-8")
#
if slot_code_str in config.DEBUGGING_SLOT_CODE_SET:
log.debug(f"debug {slot_code_str}")
slot = self.get_from_redis_to_internal_cache(slot_code_str)
if slot is None:
continue
net_overtime_minutes = 0 + slot.start_overtime_minutes + slot.end_overtime_minutes
if net_overtime_minutes != 0:
if not self.env.mutate_worker_add_overtime_minutes(
slot.worker_id,
int(slot.start_minutes / 1440),
net_overtime_minutes,
post_changes_flag=False,
):
log.warn(
f"env reaload, failed to env.mutate_worker_add_overtime_minutes, slot_code={slot_code}, worker={slot.worker_id}, net_overtime_minutes = {net_overtime_minutes}"
)
def reset(self):
log.error("real slot server should NOT be reset-ed.")
# exit(1)
def get_from_redis_to_internal_cache(self, slot_code) -> WorkingTimeSlot:
try:
slot = self.get_slot(redis_handler=self.r, slot_code=slot_code)
return slot
except MissingSlotException as mse:
log.error(
f"Failed to read slot_code={mse.slot_code}. During get_from_redis_to_internal_cache"
)
return None
except KeyError as ke:
log.error(f"Failed to read slot_code={slot_code}, Reason: {ke}")
return None
def get_slot(self, redis_handler, slot_code, raise_exception=True) -> WorkingTimeSlot:
redis_handler.watch(slot_code)
slot_on_redis = redis_handler.get(slot_code)
if slot_on_redis is None:
log.error(f"Failed to read slot_code={slot_code} from redis. Initiated remove process")
try:
self._remove_slot_code_from_internal_cache(slot_code=slot_code)
except Exception as mse:
log.error(f"failed to remove slot {str(slot_code)} from cache, error: {str(mse)}")
if raise_exception:
raise MissingSlotException(
slot_code=slot_code, message=f"Failed to read slot_code={slot_code} from redis"
)
else:
return None
current_slot_as_list = json.loads(slot_on_redis)
slot = WorkingTimeSlot(*current_slot_as_list)
self._set_into_internal_cache(slot_code, slot)
return slot
def set_slot(self, slot):
slot_code = self.get_time_slot_key(slot)
if slot_code in SLOT_CODE_SET_FOR_DEBUG:
log.debug(f"pause SLOT_CODE_SET_FOR_DEBUG={SLOT_CODE_SET_FOR_DEBUG}")
if (
(slot.worker_id == 1145)
& (int(slot.start_minutes) == 10910)
& (int(slot.end_minutes) == 11880)
):
print("pause 1145/10910/11880/F ")
self._set_into_internal_cache(slot_code, slot)
return self.r.set(slot_code, json.dumps(slot, cls=TimeSlotJSONEncoder))
def add_single_working_time_slot(
self, worker_code: str, start_minutes: int, end_minutes: int, start_location, end_location
) -> WorkingTimeSlot:
if end_minutes - start_minutes < 1:
return None
# It is such a luxury to have fake start / end for every day. So I used None to denote start/end of time slots in one original slot.
# 2020-10-14 08:53:33
start_location_base = JobLocationBase(*start_location[0:4])
end_location_base = JobLocationBase(*end_location[0:4])
slot = WorkingTimeSlot(
slot_type=TimeSlotType.FLOATING,
start_minutes=start_minutes,
end_minutes=end_minutes,
prev_slot_code=None,
next_slot_code=None,
start_location=start_location_base,
end_location=end_location_base,
worker_id=worker_code,
available_free_minutes=end_minutes - start_minutes,
assigned_job_codes=[],
)
a = self.set_slot(slot)
self.env.kafka_server.post_changed_slot_codes(
message_type=KafkaMessageType.UPDATE_WORKING_TIME_SLOTS,
changed_slot_codes_list=[self.get_time_slot_key(slot)],
)
return slot
def delete_slot__TODEL(
self, slot_code: str
): # worker_id, start_minutes, end_minutes, slot_type,
if slot_code in self.time_slot_dict.keys():
slot = self.time_slot_dict[slot_code]
log.debug(f"pause SLOT_CODE_SET_FOR_DEBUG={SLOT_CODE_SET_FOR_DEBUG}")
self.time_slot_dict[slot_code] = slot
if slot_code in SLOT_CODE_SET_FOR_DEBUG:
log.debug(f"pause SLOT_CODE_SET_FOR_DEBUG={SLOT_CODE_SET_FOR_DEBUG}")
if (
(slot.worker_id == 1145)
& (int(slot.start_minutes) == 10910)
& (int(slot.end_minutes) == 11880)
):
print("pause 1145/10910/11880/F ")
self._set_into_internal_cache(slot_code, slot)
return self.r.set(slot_code, json.dumps(slot, cls=TimeSlotJSONEncoder))
def delete_slots_from_worker(self, worker_code: str, start_minutes: int, end_minutes: int):
(interval_begin, interval_end) = self.get_interval_begin_end_by_worker_code(
worker_code, start_minutes, end_minutes
)
existing_slots = self.time_slot_tree[interval_begin:interval_end]
# slots_to_delete = []
all_slot_codes_to_delete = {
self.get_time_slot_key(an_existing_slot[2]) for an_existing_slot in existing_slots
}
for an_existing_slot in existing_slots:
slot_code = self.get_time_slot_key(an_existing_slot[2])
# if slot_code in self.time_slot_dict.keys():
if self.time_slot_dict[slot_code].next_slot_code is not None:
if self.time_slot_dict[slot_code].next_slot_code not in all_slot_codes_to_delete:
log.error(
f"The slot {slot_code} is deleted with dangling next slot: {self.time_slot_dict[slot_code].next_slot_code}. It shouldnot happend? I am fixing it anyway."
)
next_slot_as_list_str = self.r.get(
self.time_slot_dict[slot_code].next_slot_code
)
if next_slot_as_list_str is not None:
next_slot = WorkingTimeSlot(*json.loads(next_slot_as_list_str))
next_slot.prev_slot_code = None
with self.r.pipeline() as pipe:
self.atomic_slot_delete_and_add_back(
redis_handler=pipe,
slots_to_delete=[],
slots_to_add_back={
self.time_slot_dict[slot_code].next_slot_code: next_slot
},
)
else:
log.error(
f"The next slot: {self.time_slot_dict[slot_code].next_slot_code} is lost."
)
for an_existing_slot in existing_slots:
slot_code = self.get_time_slot_key(an_existing_slot[2])
if slot_code in self.time_slot_dict.keys():
del self.time_slot_dict[slot_code]
else:
log.error(f"Slot mismatch from time_slot_tree to time_slot_dict {slot_code} ")
# TODO, @duan, really works?
self.time_slot_tree.remove(an_existing_slot)
if slot_code in config.DEBUGGING_SLOT_CODE_SET:
log.debug("config ")
self.r.delete(slot_code)
# slots_to_delete.append(slot_code)
self.env.kafka_server.post_changed_slot_codes(
message_type=KafkaMessageType.DELETE_WORKING_TIME_SLOTS,
changed_slot_codes_list=all_slot_codes_to_delete,
)
# def add_new_day_working_time_slot(self):
# for w in self.env.workers_dict.keys():
# # This time I make free time same as assigned job, all linearly flattened for all days.
# # self.workers_dict[w]["worker_time_slots"] = set()
# for slot_i in range(len(self.env.workers_dict[w].linear_working_slots)):
# slot = self.env.workers_dict[w].linear_working_slots[slot_i]
# self.add_single_working_time_slot(w, slot)
# def get_worker_index_by_code(self, worker_code):
# return self.env.workers_dict[worker_code].worker_index
def get_interval_begin_end_by_worker_code(
self, worker_code, start_minutes, end_minutes
) -> (int, int):
if worker_code == "MY|D|3|CT06":
log.debug("MY|D|3|CT06")
worker_index = self.env.workers_dict[worker_code].worker_index
interval_begin = worker_index * MAX_MINUTES_PER_TECH + int(start_minutes)
interval_end = worker_index * MAX_MINUTES_PER_TECH + int(end_minutes)
return (interval_begin, interval_end)
def _decode_slot_code_info(self, slot_code):
# env_MY_2/s/MY|D|3|CT02_03638_03668_J
s = slot_code.split("/")[2].split("_")
return s[0], int(s[1]), int(s[2])
def _remove_slot_code_from_internal_cache(self, slot_code):
slot_on_redis = self.r.get(slot_code)
if slot_on_redis is not None:
log.error(f"The slot is still on redis with slot_code={slot_code}. Removal aborted.")
return
if slot_code in self.time_slot_dict.keys():
del self.time_slot_dict[slot_code]
worker_code, start_minutes, end_minutes = self._decode_slot_code_info(slot_code)
(interval_begin, interval_end) = self.get_interval_begin_end_by_worker_code(
worker_code, start_minutes, end_minutes + 1
)
intervals = self.time_slot_tree[interval_begin:interval_end]
removed_flag = False
for iv in intervals:
# if (iv.begin == interval_begin) and (iv.end == interval_end):
if (iv.data.start_minutes == start_minutes) and (iv.data.end_minutes == end_minutes):
# Delete only by absolute match. Not the overlapped ones
self.time_slot_tree.remove(iv)
log.info(
f"slot removed from interval_tree: {worker_code}_{start_minutes}_{end_minutes}"
)
removed_flag = True
if not removed_flag:
log.warn(
f"Failed to remove slot from interval_tree: {worker_code}_{start_minutes}_{end_minutes}"
)
def _set_into_internal_cache(self, slot_code, slot):
if slot_code in SLOT_CODE_SET_FOR_DEBUG:
log.debug(f"pause SLOT_CODE_SET_FOR_DEBUG={SLOT_CODE_SET_FOR_DEBUG}")
self.time_slot_dict[slot_code] = slot
(interval_begin, interval_end) = self.get_interval_begin_end_by_worker_code(
slot.worker_id,
slot.start_minutes - slot.start_overtime_minutes,
slot.end_minutes + slot.end_overtime_minutes,
)
if interval_begin == 230007710:
log.debug("debug time_slot_tree.add")
existing_slots = self.time_slot_tree[interval_begin:interval_end]
for an_existing_slot in existing_slots:
if self.get_time_slot_key(an_existing_slot[2]) == slot_code:
# If not removed, time_slot_tree will have two entries.
self.time_slot_tree.remove(an_existing_slot)
# self.time_slot_tree[interval_begin:interval_end] = slot
# if slot.start_minutes < 0:
# print("debug when slot begin == 06300")
self.time_slot_tree.add(Interval(interval_begin, interval_end, slot))
# fmt:off
def release_job_time_slots(
self, job: BaseJob
) -> Tuple[
bool, dict
]: # slot_codes: list, start_minutes, end_minutes, current_job_location, action_type:str
"""
action_dict -> action_type: 'JOB_FS' , 'JOB_N', JobType.ABSENCE
"""
# action_type = action_dict["action_type"]
job_code = job.job_code
start_minutes = job.scheduled_start_minutes
duration_minutes = job.scheduled_duration_minutes
end_minutes = start_minutes + duration_minutes
action_worker_ids = job.scheduled_worker_codes
if len(action_worker_ids) < 1:
return False, {}
with self.r.pipeline() as pipe:
try:
# put a WATCH on each job_code
error_occurred = False
error_messages = []
for worker_id in action_worker_ids:
the_slots = self.get_overlapped_slots(
worker_id=worker_id, start_minutes=start_minutes, end_minutes=end_minutes,
)
if len(the_slots) < 1:
log.error(
f"release_job_time_slots: no slots to release, worker={worker_id}, start={start_minutes}, job_code={job_code}"
)
return (
False,
{ "messages": [ {"message": f"no slots to release, worker_id={worker_id}"} ] },
)
elif len(the_slots) > 1:
# there are multiple slots matching this job duration. It means confliction among several jobs or absence_events.
# For long jobs, small urgent request may interrupt it and two jobs are in parallel for a single worker.
# The logic is:
# - Loop through all slots and find which one is the one to be released
# - realse the selected one
# - - and then link this one to prev + next
# - Loop through rest of slots, with conflicted jobs, cut off from this new free/movable again.
log.error( f"spanning across slots, not possible to release, worker_id={worker_id}, start={start_minutes}, job_code={job_code}, slots = {[self.get_time_slot_key(s) for s in the_slots ]}" )
return (
False,
{"messages": [{"message": f"spanning across slots, not possible to release, worker_id={worker_id}, start={start_minutes}" }]},
)
local_slot = the_slots.pop()
slot_code = self.get_time_slot_key(local_slot)
pipe.watch(slot_code)
slot = self.get_slot(redis_handler=pipe, slot_code = slot_code)
if slot.slot_type == TimeSlotType.FLOATING:
try:
slot.assigned_job_codes.remove(job_code)
except ValueError:
log.error(
f"JOB:{job_code}:release_job_time_slots: {job_code} is not in slot. slot_code={slot_code} "
)
return (
True,
{ "messages": [{ "message": f"error, {job_code} is not in slot. slot_code={slot_code} ", "deleted": 0, "added": 0, } ] },
)
new_slot = slot
# now we can put the pipeline back into buffered mode with MULTI
self.atomic_slot_delete_and_add_back(
redis_handler=pipe,
slots_to_delete=[ ],
slots_to_add_back={slot_code:new_slot},
)
# if a WatchError wasn't raised during execution, everything
# we just did happened atomically.
return (
True,
{ "messages": [ { "message": "success", "deleted": 0, "added": 1, } ] },
)
# This is updating existing floating slot, but to remove one slot does not change recommedation.
# I skip updating recommendation slot.
else:
if job.job_code not in slot.assigned_job_codes:
return ( True, { "messages": [ { "message": "The ", "deleted": -1, "added": -1 , } ] }, )
if len(slot.assigned_job_codes)> 1:
slot.assigned_job_codes.remove(job_code)
self.atomic_slot_delete_and_add_back(
redis_handler=pipe,
slots_to_delete=[ ],
slots_to_add_back={slot_code:slot},
)
log.warn(f"Multiple job in one JOB_FIXED, and only one {job_code} is removed. slot={str(slot)}")
return ( True, { "messages": [ { "message": "The ", "deleted": -1, "added": -1 , } ] }, )
# Now only one job is in this job_fixed slot, I remove this slot completely.
result, result_dict = self._release_slot_job_fixed(pipe, slot)
if not result:
return result, result_dict
return ( True, { "messages": [ { "message": "success", "deleted": -1, "added": -1 , } ] }, )
except redis.WatchError:
# another client must have changed 'OUR-SEQUENCE-KEY' between
# the time we started WATCHing it and the pipeline's execution.
# our best bet is to just retry.
print("WatchError: Transaction interruppted.")
return (
False,
{"messages": [{"message": "WatchError: Transaction interruppted"}]},
)
except MissingSlotException as mse:
log.error(f"Failed to read slot_code={mse.slot_code}. During release_job_time_slots for {job.job_code}")
return ( False, { "messages": [ {"message": f"Internal Error, while release_job_time_slots for {job.job_code}, failed to read slot_code={mse.slot_code}" }]}, )
def _release_slot_job_fixed(self, pipe, slot) -> Tuple[bool, dict]:
slots_to_delete = []
slots_to_add_back = {}
slot_code=self.get_time_slot_key(slot)
# TODO, How to release one planned job from multiple conflicted job slot. @duan
slots_to_delete.append(slot_code)
new_job_list = []
new_start_minutes = slot.start_minutes
new_end_minutes = slot.end_minutes
new_slot_prev_slot_code = slot.prev_slot_code
new_slot_next_slot_code = slot.next_slot_code
new_slot_start_location = slot.start_location
new_slot_end_location = slot.end_location
if slot.prev_slot_code is not None:
prev_slot = self.get_slot(redis_handler=pipe, slot_code = slot.prev_slot_code, raise_exception=False)
if prev_slot is None:
slot.prev_slot_code = None
else:
# if prev_slot is None:
# log.error(f"Failed to read prev_slot_code={slot.prev_slot_code}. During release_job_time_slots for {job.job_code}")
# return ( False, { "messages": [ {"message": f"Internal Error, release_job_time_slots for {job.job_code}, failed to read slot_code={slot.prev_slot_code}" }]}, )
# prev_2nd_slot_code = prev_slot.prev_slot_code
if prev_slot.slot_type == TimeSlotType.FLOATING:
# maximum i merge only one free on left, same as right
slots_to_delete.append(slot.prev_slot_code)
new_job_list += prev_slot.assigned_job_codes
new_start_minutes = prev_slot.start_minutes
new_slot_prev_slot_code = prev_slot.prev_slot_code
new_slot_start_location = prev_slot.start_location
if slot.next_slot_code is not None:
next_slot = self.get_slot(redis_handler=pipe, slot_code = slot.next_slot_code, raise_exception=False)
# if next_slot is None:
# log.error(f"Failed to read next_slot_code={slot.next_slot_code}. During release_job_time_slots for {job.job_code}")
# return ( False, { "messages": [ {"message": f"Internal Error, release_job_time_slots for {job.job_code}, failed to read slot_code={slot.next_slot_code}" }]}, )
if next_slot is None:
slot.next_slot_code = None
else:
if next_slot.slot_type == TimeSlotType.FLOATING:
# maximum i merge only one free on right
slots_to_delete.append(slot.next_slot_code)
new_job_list += next_slot.assigned_job_codes
new_end_minutes = next_slot.end_minutes
new_slot_next_slot_code = next_slot.next_slot_code
new_slot_end_location = next_slot.end_location
new_slot = WorkingTimeSlot(
slot_type=TimeSlotType.FLOATING,
start_minutes=new_start_minutes,
end_minutes=new_end_minutes,
prev_slot_code=new_slot_prev_slot_code,
next_slot_code=new_slot_next_slot_code,
start_location=new_slot_start_location,
end_location=new_slot_end_location,
worker_id=slot.worker_id,
available_free_minutes=new_end_minutes - new_start_minutes + 1,
assigned_job_codes=new_job_list,
)
new_slot_code = self.get_time_slot_key(new_slot)
# Link up the new slot to its previous and next slot
# This should happen after we have merged its prev and next floating slots.
# prev_2nd_slot_as_list could be same as prev_slot_code if the previous one is job slot. I still have to update its next_node code.
if new_slot.prev_slot_code is not None: # Not none, link further to prev.next_slot_code.
affect_prev_2nd_slot = self.get_slot(redis_handler=pipe, slot_code = new_slot.prev_slot_code, raise_exception=False)
if affect_prev_2nd_slot is None:
new_slot.prev_slot_code = None
else:
affect_prev_2nd_slot.next_slot_code = new_slot_code
slots_to_add_back[new_slot.prev_slot_code] = affect_prev_2nd_slot
if new_slot.next_slot_code is not None:
affect_next_2nd_slot = self.get_slot(redis_handler=pipe, slot_code = new_slot.next_slot_code, raise_exception=False)
if affect_next_2nd_slot is None:
new_slot.next_slot_code = None
else:
affect_next_2nd_slot.prev_slot_code = new_slot_code
slots_to_add_back[new_slot.next_slot_code] = affect_next_2nd_slot
if new_slot.start_minutes >= new_slot.end_minutes:
log.info(f"Empty slot generated after releasing and it is skippped., slot_code = {new_slot_code}, slot = {new_slot}")
else:
slots_to_add_back[new_slot_code] = new_slot
# This is updating existing floating slot, but to remove one slot does not change recommedation.
# I skip updating recommendation slot.
# now we can put the pipeline back into buffered mode with MULTI
self.atomic_slot_delete_and_add_back(
redis_handler=pipe,
slots_to_delete=slots_to_delete,
slots_to_add_back=slots_to_add_back,
)
# if a WatchError wasn't raised during execution, everything
# we just did happened atomically.
return (
True,
{ "messages": [ { "message": "success", "deleted": len(slots_to_delete), "added": len(slots_to_add_back), } ] },
)
def cut_off_time_slots(self, action_dict: ActionDict, probe_only=False) -> Tuple[bool, dict]:
"""
action_dict -> action_type: 'JOB_FS' , 'JOB_N',
"""
job_code = action_dict.job_code
start_minutes = action_dict.scheduled_start_minutes
duration_minutes = action_dict.scheduled_duration_minutes
end_minutes = start_minutes + duration_minutes
action_worker_ids = action_dict.scheduled_worker_codes
the_job = self.env.jobs_dict[action_dict.job_code]
# the_job_type = the_job.job_type
if job_code in config.DEBUGGING_JOB_CODE_SET:
log.debug(f"Debug: cut off {config.DEBUGGING_JOB_CODE_SET} ")
trx_i = 0
# https://redis-py-doc.readthedocs.io/en/2.7.0/README.html#pipelines
with self.r.pipeline() as pipe:
while trx_i < MAX_TRANSACTION_RETRY:
# recommended_slot_delete_list = []
# recommended_slot_update_list = []
try:
slots_to_delete = []
slots_to_add_back = {}
# put a WATCH on each job_code
error_occurred = False
error_messages = []
for worker_id in action_worker_ids:
the_slots = self.get_overlapped_slots(
worker_id=worker_id,
start_minutes=start_minutes,
end_minutes=end_minutes,
)
if len(the_slots) < 1:
if action_dict.is_forced_action:
# Very likely it is an weekend/night overtime, I will create a J- slot out of nowwhere... like "MY|D|03004640|1|PESTS|1|113"
self.create_JOB_FIXED_time_slot_for_forced_action( worker_code=worker_id, action_dict=action_dict )
continue
else:
log.warn(f"JOB:{job_code}:cut_off_time_slots: no slots to cut from, worker={worker_id}, start={start_minutes}")
return (
False,
{"messages": [{"message": f"JOB:{job_code}:WORKER:{worker_id}: No working slot matched this request. Rejected as not forced action."}]},
)
elif len(the_slots) > 1:
# very likely, multiple jobs are conflicted in certain timeslots. I should report to the business
# Maybe report it by one kafka message? KafkaEnvMessage.
if action_dict.is_forced_action:
# At least Two visits are overlapping, Here i try to merge them into one visit.
if (the_job.job_type in (JobType.APPOINTMENT, JobType.ABSENCE,)) | (the_job.planning_status == JobPlanningStatus.PLANNED) :
# - Find the overlapping Working visit and merge into it.
# - I assume there should be only 1 job_fixed.
# self.create_JOB_FIXED_time_slot_for_forced_action(
# worker_code=worker_id, action_dict=action_dict
# )
error_occurred = error_occurred or ( self._cut_off_fixed_job_from_multiple_slots(
the_slots=the_slots, the_job=the_job, action_dict=action_dict,
slots_to_delete=slots_to_delete, slots_to_add_back=slots_to_add_back,
pipe=pipe, error_messages = error_messages
))
log.warn(f"ACTION:{str(action_dict)}:One job request is spanning across slots. It is enforced for now since is_forced_action == True." )
continue # To next worker
# Even if in forced action, in-planning jobs are also rejected for now. # TODO, @duan 2020-11-23 15:49:23
log.error(f"One job request is spanning across slots. Most likely this is due to conflictions. action = {str(action_dict)}, slots={str([(s.slot_type, s.start_minutes, s.assigned_job_codes, s.end_minutes) for s in the_slots])}")
# matched slot jobs:{s.assigned_job_codes}:
return (
False,
{"messages": [{"message": f"One job request is spanning across slots. action = {str(action_dict)}, slots={str([(s.slot_type, s.start_minutes, s.assigned_job_codes, s.end_minutes) for s in the_slots])}"}]},
)
else: # len(the_slots) == 1:
local_slot = the_slots.pop()
slot_code = self.get_time_slot_key(local_slot)
if slot_code in config.DEBUGGING_SLOT_CODE_SET:
log.debug("debug atomic_slot_delete_and_add_back")
# Check that this slot is not locked by appointment recomemndation.
the_slot_lock_code = self.env.get_recommened_locked_slot_redis_key(slot_code)
pipe.watch(the_slot_lock_code)
the_slot_lock = pipe.get(the_slot_lock_code)
if (the_slot_lock is not None) & (the_job.job_type != JobType.APPOINTMENT):
log.warn(f"cut_off_time_slots:JOB:{job_code}:WORKER:{worker_id}: The target slot ({slot_code}) is locked by recommendation while trying cut off slot. ACTION = {str(action_dict)} ")
# 2021-01-14 14:26:12 , after blocking batch returns, hereby I allow all changes to happen to slots.
# return (
# False,
# {"messages": [{"message": f"JOB:{job_code}:WORKER:{worker_id}: Rejected because the target slot ({slot_code}) is locked by recommendation while trying cut off slot"}]},
# )
# Now read from redis and check the slot is still valid/alive
pipe.watch(slot_code)
slot_redis = pipe.get(slot_code)
if slot_redis is None:
log.error(f"slot_redis ({slot_code}) is None while trying cut off")
return (
False,
{"messages": [{"message": f"JOB:{job_code}:WORKER:{worker_id}: Rejected because of internal error: slot on redis ({slot_code}) is None while trying cut off"}]},
)
current_slot_as_list = json.loads(slot_redis)
slot = WorkingTimeSlot(*current_slot_as_list)
if job_code in slot.assigned_job_codes:
log.warn(f"cut_off_time_slots: while trying to add floating job, the slot={slot_code} already contains this job code={job_code}")
return (
True, # I consider this as OK, then the_job.schedule_start_minutes will indicate true start value
{"messages": [{"message": f"cut_off_time_slots: the slot={slot_code} already contains this job code={job_code}"} ]},
)
if slot.slot_type == TimeSlotType.FLOATING:
error_occurred = error_occurred or (self._cut_off_fixed_job_from_single_floating_slot(
slot=slot, the_job=the_job, action_dict=action_dict,
slots_to_delete=slots_to_delete, slots_to_add_back=slots_to_add_back,
pipe=pipe, error_messages = error_messages
))
else:
# Because I matched only 1 job_fixed slot, the new job is a subset of the matched timeslot. I simply attach the job code to current period.
# The start time is also later than the existing job_code.
if action_dict.is_forced_action:
log.warn(f"The only matched slot is not free/movable, but is_forced_action == True. Current job_code={job_code}, matched slot has {slot.assigned_job_codes}, matched slot_code={slot_code}, worker={worker_id}, start={start_minutes}. " )
slot.assigned_job_codes.append(the_job.job_code)
slots_to_add_back[slot_code] = slot
continue
else:
log.error(f"The only matched slot is not free/movable. This normally means another conflicted visit. Current job_code={job_code}, matched slot has {slot.assigned_job_codes}, matched slot_code={slot_code}, worker={worker_id}, start={start_minutes}, " )
return (
False,
{"messages": [{"message": f"no slots, worker_id={worker_id}"}]},
)
if error_occurred:
if ( action_dict.is_forced_action):
log.warn(f"REPLAY_ERROR: {str(error_messages)}")
else:
return False, {"messages": error_messages}
# now we can put the pipeline back into buffered mode with MULTI
if probe_only:
return (not error_occurred,error_messages )
self.atomic_slot_delete_and_add_back(
redis_handler=pipe,
slots_to_delete=slots_to_delete,
slots_to_add_back=slots_to_add_back,
)
return (
True,
{"messages": [{
"message": "success",
"deleted": len(slots_to_delete),
"added": len(slots_to_add_back),
}]
},
)
except redis.WatchError:
# another client must have changed 'slot' between the time we started WATCHing it and the pipeline's execution.
log.warn(f"WatchError: Transaction interruppted. Counter: {trx_i}, I will retry")
# continue
trx_i += 1
if trx_i >= MAX_TRANSACTION_RETRY:
log.error("failed to add slot, trx_i >= MAX_TRANSACTION_RETRY")
return (False,{"messages":"Internal error trx_i >= MAX_TRANSACTION_RETRY"} )
def get_overlapped_job_codes(self, worker_id, start_minutes, end_minutes):
# (interval_begin, interval_end) = self.get_interval_begin_end_by_worker_code(
# worker_id, start_minutes, end_minutes
# )
# slots = self.time_slot_tree[interval_begin:interval_end]
over_slots = self.get_overlapped_slots( worker_id, start_minutes, end_minutes)
overlapped_jobs = []
for slot in over_slots:
if slot.slot_type != TimeSlotType.ABSENCE: # F or J
# TODO, this is duplicated with job_code_sanity_ check 2020-12-18 15:57:55
for job_code in slot.assigned_job_codes:
if job_code not in self.env.jobs_dict.keys():
# TODO @duan
log.error(
f"Non existing job = {job_code} identified. Please consider deleting it"
)
# raise ValueError("TEMP")
continue
overlapped_jobs.append(job_code)
else:
pass
# overlapped_jobs.append(slot.referred_object_code)
return overlapped_jobs
def get_overlapped_slots(self, worker_id, start_minutes, end_minutes):
(interval_begin, interval_end) = self.get_interval_begin_end_by_worker_code(
worker_id, start_minutes, end_minutes
)
slots = self.time_slot_tree[interval_begin:interval_end]
# this is temp fix
# Do a sanity check over all slots and their job codes and make sure that they are not dangling codes.
all_slots = []
for slot in slots:
a_slot = self.job_code_sanity_check(slot.data)
if a_slot is not None:
all_slots.append(a_slot)
return all_slots
def job_code_sanity_check(self, slot):
if PLANNER_SERVER_ROLE != "recommendation":
return slot
new_jobs_codes = []
found_dangling_job = False
for job_code in slot.assigned_job_codes:
if job_code not in self.env.jobs_dict.keys():
log.error(f"job_code_sanity_check: Found dangling job_code = {job_code}, trying to remove. key={self.get_time_slot_key(slot)}, slot={str(slot)} ")
found_dangling_job = True
else:
new_jobs_codes.append(job_code)
if (slot.slot_type == TimeSlotType.JOB_FIXED) & (len(new_jobs_codes) < 1):
log.error(f"slot_code = {self.get_time_slot_key(slot)}, len(a_slot.assigned_job_codes) < 1. I will release this slot")
with self.r.pipeline() as p:
self._release_slot_job_fixed(pipe=p, slot = slot)
return None
if found_dangling_job:
slot.assigned_job_codes = new_jobs_codes
self.atomic_slot_delete_and_add_back(redis_handler=self.r.pipeline(), slots_to_delete=[], slots_to_add_back={self.get_time_slot_key(slot):slot})
log.info(f"job_code_sanity_check: Removed dangling job_code = {job_code}. key={self.get_time_slot_key(slot)}.")
return slot
def remove_overlapped_slots(self, original_slots: set, rejected_slots: set) -> set:
if len(rejected_slots) < 1:
return original_slots
new_slot_tree = IntervalTree([Interval(sl[0], sl[1], sl) for sl in original_slots])
for r_slot in rejected_slots:
for r_interval in new_slot_tree[r_slot[0] : r_slot[1]]:
new_slot_tree.remove(r_interval) # Interval(r_slot[0], r_slot[1], r_slot)
return set(new_slot_tree)
def atomic_slot_delete_and_add_back(self, redis_handler, slots_to_delete, slots_to_add_back):
redis_handler.multi()
for slot_code in slots_to_add_back:
slot_to_add = slots_to_add_back[slot_code]
if slot_code in config.DEBUGGING_SLOT_CODE_SET:
log.debug("debug atomic_slot_delete_and_add_back")
if slot_to_add.next_slot_code == 'env_MY_2/s/MY|D|3|CT29_08245_08277_F':
print("debug creation of ")
if slot_to_add.end_minutes - slot_to_add.start_minutes < 1:
log.error("atomic_slot_delete_and_add_back debug error±!")
pass
for slot_code in slots_to_delete:
#
if slot_code in config.DEBUGGING_SLOT_CODE_SET:
log.debug("debug atomic_slot_delete_and_add_back")
redis_handler.delete(slot_code)
self._remove_slot_code_from_internal_cache(slot_code)
for slot_code in slots_to_add_back:
# Sort all job codes by job start time
slot_to_add = slots_to_add_back[slot_code]
slot_to_add.assigned_job_codes = sorted(slot_to_add.assigned_job_codes, key=lambda x: self.env.jobs_dict[x].scheduled_start_minutes)
redis_handler.set(
slot_code, json.dumps( slot_to_add , cls=TimeSlotJSONEncoder)
)
if slot_code in config.DEBUGGING_SLOT_CODE_SET:
log.debug("debug atomic_slot_delete_and_add_back")
redis_handler.execute()
self.env.kafka_server.post_changed_slot_codes(
message_type=KafkaMessageType.DELETE_WORKING_TIME_SLOTS,
changed_slot_codes_list=slots_to_delete,
)
self.env.kafka_server.post_changed_slot_codes(
message_type=KafkaMessageType.UPDATE_WORKING_TIME_SLOTS,
changed_slot_codes_list=list(slots_to_add_back.keys()),
)
for slot_code in slots_to_delete:
if slot_code in self.time_slot_dict.keys():
slot = self.time_slot_dict[slot_code]
net_overtime_minutes = 0 - slot.start_overtime_minutes - slot.end_overtime_minutes
if net_overtime_minutes != 0:
day_seq = int(slot.start_minutes / 1440)
add_overtime_success = self.env.mutate_worker_add_overtime_minutes( slot.worker_id, day_seq, net_overtime_minutes, post_changes_flag = True)
if not add_overtime_success :
log.warn(f"While deleting slot {slot_code}, failed to remove by mutate_worker_add_overtime_minutes, slot_code={slot_code}, worker={slot.worker_id}, net_overtime_minutes = {net_overtime_minutes}")
(interval_begin, interval_end) = self.get_interval_begin_end_by_worker_code(
slot.worker_id, slot.start_minutes, slot.end_minutes
)
try:
self.time_slot_tree.remove(Interval(interval_begin, interval_end, slot))
except ValueError as ve:
print(ve)
log.error(f"failed to remove from local time_slot_tree. missing slot_code={slot_code}")
del self.time_slot_dict[slot_code]
else:
# TODO, duan. temp fix.
log.error(f"Failed to delete from local dict cache, slot_code = {slot_code}. Instead, i remove overlapped with ")
for s_code, s in slots_to_add_back.items():
(interval_begin, interval_end) = self.get_interval_begin_end_by_worker_code(
s.worker_id, s.start_minutes, s.end_minutes
)
for inter_s in self.time_slot_tree[interval_begin: interval_end]:
self.time_slot_tree.remove(inter_s)
for slot_code in slots_to_add_back:
if slot_code in SLOT_CODE_SET_FOR_DEBUG:
log.debug(f"pause SLOT_CODE_SET_FOR_DEBUG={SLOT_CODE_SET_FOR_DEBUG}")
slot_to_add = slots_to_add_back[slot_code]
net_overtime_minutes = 0 + slot_to_add.start_overtime_minutes + slot_to_add.end_overtime_minutes
if net_overtime_minutes != 0:
day_seq = int(slot_to_add.start_minutes / 1440)
add_overtime_success = self.env.mutate_worker_add_overtime_minutes( slot_to_add.worker_id, day_seq, net_overtime_minutes, post_changes_flag = True)
if not add_overtime_success :
log.warn(f"While adding slot {slot_code}, failed to mutate_worker_add_overtime_minutes, slot_code={slot_code}, worker={slot_to_add.worker_id}, net_overtime_minutes = {net_overtime_minutes}")
self._set_into_internal_cache(slot_code, slot_to_add)
# if self.time_slot_dict[slot_code].slot_type == TimeSlotType.FLOATING:
# # recommended_slot_delete_list.append(slot_code)
# self.env.recommendation_server.update_recommendation_for_slot_change_deprecated(
# slot_code, TimeSlotOperationType.DELETE
# )
"""
interval_begin = slot_to_add.worker_id * MAX_MINUTES_PER_TECH + int(
slot_to_add.start_minutes
)
interval_end = slot_to_add.worker_id * MAX_MINUTES_PER_TECH + int(
slot_to_add.end_minutes
)
# self.time_slot_tree[interval_begin:interval_end] = slot
self.time_slot_tree.add(Interval(interval_begin, interval_end, slot_to_add))
self.time_slot_dict[slot_code] = slots_to_add_back[slot_code]
"""
def _cut_off_fixed_job_from_single_floating_slot(
self, slot, the_job, action_dict: ActionDict, slots_to_delete, slots_to_add_back, pipe, error_messages
) -> bool:
start_minutes = action_dict.scheduled_start_minutes
duration_minutes = action_dict.scheduled_duration_minutes
end_minutes = start_minutes + duration_minutes
start_available_overtime_minutes = 0
end_available_overtime_minutes = 0
if slot.prev_slot_code is None:
start_available_overtime_minutes = self.env.get_worker_available_overtime_minutes(
worker_code= slot.worker_id,
day_seq = int(slot.start_minutes / 1440) )
if slot.prev_slot_code is None:
end_available_overtime_minutes = self.env.get_worker_available_overtime_minutes(
worker_code= slot.worker_id,
day_seq = int(slot.start_minutes / 1440) )
slot_code = self.get_time_slot_key(slot)
if slot_code in config.DEBUGGING_SLOT_CODE_SET:
log.debug("debug _cut_off_fixed_job_from_single_floating_slot_")
if the_job.job_code in config.DEBUGGING_JOB_CODE_SET:
log.debug("debug _cut_off_fixed_job_from_single_floating_slot__ DEBUGGING_JOB_CODE_SET" )
current_job_location = JobLocationBase(*the_job.location[0:4]) # the_job.location
if the_job.job_type == JobType.ABSENCE:
new_slot_type = TimeSlotType.ABSENCE
else:
new_slot_type = TimeSlotType.JOB_FIXED
new_slot_prev_slot_code = slot.prev_slot_code
new_slot_next_slot_code = slot.next_slot_code
new_slot_start_location = slot.start_location
new_slot_end_location = slot.end_location
if the_job.job_type == JobType.ABSENCE:
new_action_location = (
the_job.location.geo_longitude,
the_job.location.geo_latitude,
TimeSlotType.ABSENCE,
)
else:
new_action_location = (
the_job.location.geo_longitude,
the_job.location.geo_latitude,
LocationType.JOB,
)
prev_travel_minutes = self.env.travel_router.get_travel_minutes_2locations(
new_action_location, slot.start_location,
)
next_travel_minutes = self.env.travel_router.get_travel_minutes_2locations(
new_action_location, slot.end_location,
)
error_occurred = False
# TODO, urgent < mixing jobtype and action type.!!!!
# 2020-10-28 16:21:46
if (the_job.job_type in (JobType.APPOINTMENT, JobType.ABSENCE,)) or (
action_dict.action_type == ActionType.JOB_FIXED
):
has_slot_1 = False
has_slot_3 = False
slots_to_delete.append(slot_code)
front_jobs = []
back_jobs = []
for job_code in slot.assigned_job_codes:
if (
self.env.jobs_dict[job_code].scheduled_start_minutes
<= start_minutes
):
front_jobs.append(job_code)
else:
back_jobs.append(job_code)
# slot_3_key = slot.next_slot_code
slot_2 = WorkingTimeSlot(
start_minutes=start_minutes,
end_minutes=end_minutes,
prev_slot_code=slot.prev_slot_code,
next_slot_code=slot.next_slot_code,
slot_type=new_slot_type,
start_location=current_job_location,
end_location=current_job_location, # current_slot.start_location,
assigned_job_codes=[the_job.job_code],
worker_id=slot.worker_id,
referred_object_code=None,
)
slot_2_key = self.get_time_slot_key(slot_2)
if slot_2_key == "<KEY>":
log.debug("Debug releasing 'env_MY_2/s/CT36_07810_07820_J' ")
if (
start_minutes + duration_minutes
< slot.end_minutes - 1
):
# - next_travel_minutes # skip 1 minutes leftover.
# as long as there are minutes, though less than travel time, I still keep the free slot record. Otherwise, it lost track.
# 3rd, next free slot. Optional
has_slot_3 = True
slot_3 = WorkingTimeSlot(
start_minutes=end_minutes,
end_minutes=slot.end_minutes,
prev_slot_code=slot_2_key,
next_slot_code=slot.next_slot_code,
slot_type=TimeSlotType.FLOATING,
start_location=current_job_location,
end_location=slot.end_location,
assigned_job_codes=list(back_jobs),
worker_id=slot.worker_id,
referred_object_code=None,
)
if len(back_jobs) > 0:
(
prev_travel,
next_travel,
inside_travel,
) = self.env.get_travel_time_jobs_in_slot(slot_3, back_jobs)
demanded_minutes = int(prev_travel + next_travel + sum(inside_travel)) + sum([self.env.jobs_dict[jc].scheduled_duration_minutes for jc in back_jobs])
if (
demanded_minutes > slot_3.end_minutes - slot_3.start_minutes + end_available_overtime_minutes
):
error_occurred = True
error_messages.append(
{
"message": f"not enough travel in back_jobs={back_jobs}, demanding={demanded_minutes} minutes, slot {self.get_time_slot_key(slot_3)}"
}
)
elif (
demanded_minutes > slot_3.end_minutes - slot_3.start_minutes
):
slot_3.end_overtime_minutes = demanded_minutes - ( slot_3.end_minutes - slot_3.start_minutes )
log.warn(f"{slot_3.end_overtime_minutes} overtime minutes used for back_jobs={back_jobs}, demanding={demanded_minutes} minutes, slot {self.get_time_slot_key(slot_3)}")
else:
pass
# return False,
# else skipped.
# slot_3.assigned_job_codes = back_jobs
# slot_3.worker_id = worker_id
slot_3_key = self.get_time_slot_key(slot_3)
slots_to_add_back[slot_3_key] = slot_3
new_slot_next_slot_code = slot_3_key
else:
if len(back_jobs) > 0:
error_occurred = True
error_messages.append(
{
"message": f"back_jobs={back_jobs} are pushed out without slot_3 in slot {slot_code}"
}
)
# - prev_travel_minutes, even if no sufficient travel, the free slot is created to track the place.
if start_minutes > slot.start_minutes + 1:
has_slot_1 = True
slot_1 = WorkingTimeSlot(
start_minutes=slot.start_minutes,
end_minutes=start_minutes,
prev_slot_code=slot.prev_slot_code,
next_slot_code=slot_2_key,
slot_type=TimeSlotType.FLOATING,
start_location=slot.start_location,
end_location=current_job_location,
assigned_job_codes=front_jobs,
worker_id=slot.worker_id,
)
if len(front_jobs) > 0:
(
prev_travel,
next_travel,
inside_travel,
) = self.env.get_travel_time_jobs_in_slot(slot_1, front_jobs)
demanded_minutes = int(prev_travel + next_travel + sum(inside_travel)) + sum([self.env.jobs_dict[jc].scheduled_duration_minutes for jc in front_jobs])
if demanded_minutes > slot_1.end_minutes - slot_1.start_minutes + start_available_overtime_minutes:
error_occurred = True
error_messages.append(
{"message": f"not enough travel time for front_jobs={front_jobs}, demanding={demanded_minutes} minutes, in slot {self.get_time_slot_key(slot_1)}"}
)
elif (
demanded_minutes > slot_1.end_minutes - slot_1.start_minutes
):
slot_1.start_overtime_minutes = demanded_minutes - (slot_1.end_minutes - slot_1.start_minutes )
log.warn(f"{slot_1.start_overtime_minutes} overtime minutes used for back_jobs={front_jobs}, demanding={demanded_minutes} minutes, slot {self.get_time_slot_key(slot_1)}")
else:
pass
# front_jobs.available_free_minutes =
slot_1_key = self.get_time_slot_key(slot_1)
slots_to_add_back[slot_1_key] = slot_1
new_slot_prev_slot_code = slot_1_key
else:
if len(front_jobs) > 0:
error_occurred = True
error_messages.append(
{
"message": f"Front jobs={front_jobs} are pushed out in slot {slot_code}"
}
)
# Now we are ready to link them up, with prev & next
# slot_2_list = list(slot_2)
new_slot_code_for_prev_slot = slot_2_key
new_slot_code_for_next_slot = slot_2_key
if has_slot_1:
new_slot_code_for_prev_slot = slot_1_key
slot_2.prev_slot_code = slot_1_key
if has_slot_3:
new_slot_code_for_next_slot = slot_3_key
slot_2.next_slot_code = slot_3_key
# slot_2 = WorkingTimeSlot(*slot_2_list)
slots_to_add_back[slot_2_key] = slot_2
try:
if slot.prev_slot_code is not None:
affect_prev_slot = self.get_slot(redis_handler=pipe, slot_code = slot.prev_slot_code)
affect_prev_slot.next_slot_code = new_slot_code_for_prev_slot
slots_to_add_back[slot.prev_slot_code] = affect_prev_slot
if slot.next_slot_code is not None:
affect_next_slot = self.get_slot(redis_handler=pipe, slot_code = slot.next_slot_code)
affect_next_slot.prev_slot_code = new_slot_code_for_next_slot
slots_to_add_back[slot.next_slot_code] = affect_next_slot
except MissingSlotException as mse:
log.error(f"Failed to read slot_code={mse.slot_code}. During linking prev next in _cut_off_fixed_job_from_single_floating_slot for {the_job.job_code}")
pass # For now
else: # NOT [JobType.ABSENCE/APPT] Not[ActionType.JOB_FIXED]: then it is flexible (including in-planning job) arrange into existing Floating slots
if prev_travel_minutes <= next_travel_minutes:
all_jobs = [the_job.job_code] + slot.assigned_job_codes
else:
all_jobs = slot.assigned_job_codes + [the_job.job_code]
all_durations = [self.env.jobs_dict[jc].scheduled_duration_minutes for jc in all_jobs ]
max_overtime_minutes = max([start_available_overtime_minutes, end_available_overtime_minutes])
( prev_travel,
next_travel,
inside_travel,
) = self.env.get_travel_time_jobs_in_slot(slot, all_jobs)
demanded_minutes = prev_travel + next_travel + sum(inside_travel) + sum(all_durations)
if (
demanded_minutes
> slot.end_minutes - slot.start_minutes + max_overtime_minutes
):
error_occurred = True
error_messages.append(
{"message": f"not enough travel in slot {slot_code}"}
)
elif(
demanded_minutes
> slot.end_minutes - slot.start_minutes
):
if demanded_minutes < slot.end_minutes - slot.start_minutes + end_available_overtime_minutes:
slot.end_overtime_minutes = demanded_minutes - ( slot.end_minutes - slot.start_minutes )
elif demanded_minutes < slot.end_minutes - slot.start_minutes + start_available_overtime_minutes:
slot.start_overtime_minutes = demanded_minutes - ( slot.end_minutes - slot.start_minutes )
else:
log.error(f"job_code={the_job.job_code},demanded_minutes = {demanded_minutes}, failed without overtime! but why?")
else:
# Regular working hour
pass
slot.assigned_job_codes = all_jobs
# slot.assigned_job_codes = all_jobs
# new_slot = WorkingTimeSlot(*current_slot_as_list)
slots_to_add_back[slot_code] = slot
# This is updating existing floating slot, so I re-evaluate the recommendation
# recommended_slot_update_list.append(slot_code)
# self.env.recommendation_server.update_recommendation_for_slot_change_deprecated(
# slot_code, TimeSlotOperationType.UPDATE
# )
return error_occurred
def _cut_off_fixed_job_from_multiple_slots(
self, the_slots, the_job, action_dict: ActionDict, slots_to_delete, slots_to_add_back, pipe, error_messages
) -> bool:
if the_job.job_type == JobType.ABSENCE:
new_slot_type = TimeSlotType.ABSENCE
else:
new_slot_type = TimeSlotType.JOB_FIXED
all_redis_slots_temp = []
for local_slot in the_slots: # .data
slot_code = self.get_time_slot_key(local_slot)
# pipe.watch(slot_code)
# slot_redis = pipe.get(slot_code)
# current_slot_as_list = json.loads(slot_redis)
# slot__ = WorkingTimeSlot(*current_slot_as_list)
slot__ = self.get_slot(redis_handler=pipe, slot_code = slot_code, raise_exception=False)
if slot__ is not None:
all_redis_slots_temp.append(slot__)
else:
return True
all_redis_slots = sorted(all_redis_slots_temp, key=lambda x: x.start_minutes )
# working_slots = [s for s in the_slots if s.slot_type != TimeSlotType.FLOATING]
# floating_slots = [s for s in the_slots if s.slot_type == TimeSlotType.FLOATING]
has_slot_1 = False
has_slot_3 = False
prev_slot_slot_for_all = all_redis_slots[0].prev_slot_code
next_slot_slot_for_all = all_redis_slots[-1].next_slot_code
slot_2 = WorkingTimeSlot(
start_minutes=action_dict.scheduled_start_minutes,
end_minutes=action_dict.scheduled_start_minutes + action_dict.scheduled_duration_minutes,
prev_slot_code=prev_slot_slot_for_all,
next_slot_code=next_slot_slot_for_all,
slot_type=new_slot_type,
start_location=the_job.location,
end_location=the_job.location, # current_slot.start_location,
assigned_job_codes=[the_job.job_code],
worker_id=all_redis_slots[0].worker_id,
referred_object_code=None,
)
if (all_redis_slots[0].slot_type == TimeSlotType.FLOATING) & (
all_redis_slots[0].start_minutes < action_dict.scheduled_start_minutes
):
has_slot_1 = True
slot_1 = all_redis_slots[0]
slot_1_key = self.get_time_slot_key(slot_1)
# Must be true
if slot_1.end_minutes < action_dict.scheduled_start_minutes:
log.error("Not possible: slot_1.end_minutes < action_dict.scheduled_start_minutes")
slots_to_delete.append(slot_1_key)
slot_1.end_minutes=action_dict.scheduled_start_minutes
slot_1_key = self.get_time_slot_key(slot_1)
slot_2.prev_slot_code = slot_1_key
all_redis_slots.pop(0)
if (all_redis_slots[-1].slot_type == TimeSlotType.FLOATING) & (
all_redis_slots[-1].end_minutes > action_dict.scheduled_start_minutes + action_dict.scheduled_duration_minutes
):
has_slot_3 = True
slot_3 = all_redis_slots[-1]
slot_3_key = self.get_time_slot_key(slot_3)
if slot_3.start_minutes > action_dict.scheduled_start_minutes + action_dict.scheduled_duration_minutes:
log.error("Not possible: slot_3.start_minutes > action_dict.scheduled_start_minutes + action_dict.scheduled_duration_minutes")
slots_to_delete.append(slot_3_key)
slot_3.start_minutes=action_dict.scheduled_start_minutes + action_dict.scheduled_duration_minutes
slot_3_key = self.get_time_slot_key(slot_3)
slot_2.next_slot_code = slot_3_key
all_redis_slots.pop(-1)
for curr_slot in all_redis_slots:
if curr_slot.start_minutes < action_dict.scheduled_start_minutes :
slot_2.start_minutes = curr_slot.start_minutes
slot_2.assigned_job_codes = curr_slot.assigned_job_codes + slot_2.assigned_job_codes
else:
slot_2.assigned_job_codes = slot_2.assigned_job_codes + curr_slot.assigned_job_codes
if curr_slot.end_minutes > action_dict.scheduled_start_minutes + action_dict.scheduled_duration_minutes:
slot_2.end_minutes = curr_slot.end_minutes
curr_slot_code = self.get_time_slot_key(curr_slot)
slots_to_delete.append(curr_slot_code)
# Now the start and end minutes are fixed.
slot_2_key = self.get_time_slot_key(slot_2)
slots_to_add_back[slot_2_key] = slot_2
# if (slot_2.start_minutes < 0):
# log.debug("Error slot_21.start_minutes < 0")
# if slot_2_key == "<KEY>":
# log.debug("Debug cut off 'env_MY_2/s/CT36_07810_07820_J' ")
# Now we are ready to link them up, with prev & next
# slot_2_list = list(slot_2)
new_slot_code_for_prev_slot = slot_2_key
new_slot_code_for_next_slot = slot_2_key
if has_slot_1:
new_slot_code_for_prev_slot = slot_1_key
slot_2.prev_slot_code = slot_1_key
slot_1.next_slot_code = slot_2_key
slots_to_add_back[slot_1_key] = slot_1
# if (slot_1.start_minutes < 0):
# log.debug("Error slot_1.start_minutes < 0")
if has_slot_3:
new_slot_code_for_next_slot = slot_3_key
slot_2.next_slot_code = slot_3_key
slot_3.prev_slot_code = slot_2_key
slots_to_add_back[slot_3_key] = slot_3
# if (slot_3.start_minutes < 0):
# log.debug("Error slot_1.start_minutes < 0")
try:
if prev_slot_slot_for_all:
# prev_slot_redis = pipe.get(prev_slot_slot_for_all)
# affect_prev_slot_as_list = json.loads(prev_slot_redis)
# affect_prev_slot_as_list[3] = new_slot_code_for_prev_slot
# affect_prev_slot = WorkingTimeSlot(*affect_prev_slot_as_list)
affect_prev_slot = self.get_slot(redis_handler=pipe, slot_code = prev_slot_slot_for_all)
affect_prev_slot.next_slot_code = new_slot_code_for_prev_slot
slots_to_add_back[prev_slot_slot_for_all] = affect_prev_slot
if next_slot_slot_for_all:
# next_slot_redis = pipe.get(next_slot_slot_for_all)
# affect_next_slot_as_list = json.loads(next_slot_redis)
# affect_next_slot_as_list[2] = new_slot_code_for_next_slot
# affect_next_slot = WorkingTimeSlot(*affect_next_slot_as_list)
affect_next_slot = self.get_slot(redis_handler=pipe, slot_code = next_slot_slot_for_all)
affect_next_slot.prev_slot_code = new_slot_code_for_next_slot
slots_to_add_back[next_slot_slot_for_all] = affect_next_slot
except MissingSlotException as mse:
log.error(f"Failed to read slot_code={mse.slot_code}. During linking prev next for {the_job.job_code}")
pass # For now
return False # error_occurred
def create_JOB_FIXED_time_slot_for_forced_action(
self, worker_code: str, action_dict: ActionDict
) -> Tuple[bool, dict]:
start_minutes = action_dict.scheduled_start_minutes
duration_minutes = action_dict.scheduled_duration_minutes
used_overtime_minutes = duration_minutes
end_minutes = start_minutes
if self.env.jobs_dict[action_dict.job_code].job_type == JobType.ABSENCE:
# Absence event should not occur outside of working hour.
# If it is outside, it si treated as creating new working slot
end_minutes = start_minutes + duration_minutes
used_overtime_minutes = 0
the_job = self.env.jobs_dict[action_dict.job_code]
current_job_location_full = self.env.jobs_dict[action_dict.job_code].location
current_job_location = JobLocationBase(*current_job_location_full[0:4])
if the_job.job_type == JobType.ABSENCE:
new_slot_type = TimeSlotType.ABSENCE
else:
new_slot_type = TimeSlotType.JOB_FIXED
slot_2 = WorkingTimeSlot(
start_minutes=start_minutes,
end_minutes=end_minutes,
prev_slot_code=None,
next_slot_code=None,
slot_type=new_slot_type,
start_location=current_job_location,
end_location=current_job_location, # current_slot.start_location,
assigned_job_codes=[action_dict.job_code],
worker_id=worker_code,
referred_object_code=None,
end_overtime_minutes=used_overtime_minutes,
is_in_working_hour=False
)
slot_2_key = self.get_time_slot_key(slot_2)
slots_to_add_back = {slot_2_key: slot_2}
# .mset(slots_to_add_back)
with self.r.pipeline() as pipe:
self.atomic_slot_delete_and_add_back(
redis_handler=pipe, slots_to_delete=[], slots_to_add_back=slots_to_add_back,
)
return (
True,
{"messages": [{"message": f"no working slots at worker_id={worker_code}, but added as job_fixed"}]},
)
|
[
"dispatch.plugins.kandbox_planner.env.env_models.WorkingTimeSlot",
"json.loads",
"dispatch.plugins.kandbox_planner.env.env_models.JobLocationBase",
"json.dumps",
"intervaltree.IntervalTree",
"intervaltree.Interval",
"logging.getLogger"
] |
[((700, 745), 'logging.getLogger', 'logging.getLogger', (['"""rl_env.working_time_slot"""'], {}), "('rl_env.working_time_slot')\n", (717, 745), False, 'import logging\n'), ((2363, 2377), 'intervaltree.IntervalTree', 'IntervalTree', ([], {}), '()\n', (2375, 2377), False, 'from intervaltree import Interval, IntervalTree\n'), ((5320, 5345), 'json.loads', 'json.loads', (['slot_on_redis'], {}), '(slot_on_redis)\n', (5330, 5345), False, 'import json\n'), ((5361, 5399), 'dispatch.plugins.kandbox_planner.env.env_models.WorkingTimeSlot', 'WorkingTimeSlot', (['*current_slot_as_list'], {}), '(*current_slot_as_list)\n', (5376, 5399), False, 'from dispatch.plugins.kandbox_planner.env.env_models import ActionDict, BaseJob, LocationTuple, TimeSlotJSONEncoder, WorkingTimeSlot, JobLocationBase\n'), ((6460, 6497), 'dispatch.plugins.kandbox_planner.env.env_models.JobLocationBase', 'JobLocationBase', (['*start_location[0:4]'], {}), '(*start_location[0:4])\n', (6475, 6497), False, 'from dispatch.plugins.kandbox_planner.env.env_models import ActionDict, BaseJob, LocationTuple, TimeSlotJSONEncoder, WorkingTimeSlot, JobLocationBase\n'), ((6526, 6561), 'dispatch.plugins.kandbox_planner.env.env_models.JobLocationBase', 'JobLocationBase', (['*end_location[0:4]'], {}), '(*end_location[0:4])\n', (6541, 6561), False, 'from dispatch.plugins.kandbox_planner.env.env_models import ActionDict, BaseJob, LocationTuple, TimeSlotJSONEncoder, WorkingTimeSlot, JobLocationBase\n'), ((6577, 6906), 'dispatch.plugins.kandbox_planner.env.env_models.WorkingTimeSlot', 'WorkingTimeSlot', ([], {'slot_type': 'TimeSlotType.FLOATING', 'start_minutes': 'start_minutes', 'end_minutes': 'end_minutes', 'prev_slot_code': 'None', 'next_slot_code': 'None', 'start_location': 'start_location_base', 'end_location': 'end_location_base', 'worker_id': 'worker_code', 'available_free_minutes': '(end_minutes - start_minutes)', 'assigned_job_codes': '[]'}), '(slot_type=TimeSlotType.FLOATING, start_minutes=\n start_minutes, end_minutes=end_minutes, prev_slot_code=None,\n next_slot_code=None, start_location=start_location_base, end_location=\n end_location_base, worker_id=worker_code, available_free_minutes=\n end_minutes - start_minutes, assigned_job_codes=[])\n', (6592, 6906), False, 'from dispatch.plugins.kandbox_planner.env.env_models import ActionDict, BaseJob, LocationTuple, TimeSlotJSONEncoder, WorkingTimeSlot, JobLocationBase\n'), ((24177, 24590), 'dispatch.plugins.kandbox_planner.env.env_models.WorkingTimeSlot', 'WorkingTimeSlot', ([], {'slot_type': 'TimeSlotType.FLOATING', 'start_minutes': 'new_start_minutes', 'end_minutes': 'new_end_minutes', 'prev_slot_code': 'new_slot_prev_slot_code', 'next_slot_code': 'new_slot_next_slot_code', 'start_location': 'new_slot_start_location', 'end_location': 'new_slot_end_location', 'worker_id': 'slot.worker_id', 'available_free_minutes': '(new_end_minutes - new_start_minutes + 1)', 'assigned_job_codes': 'new_job_list'}), '(slot_type=TimeSlotType.FLOATING, start_minutes=\n new_start_minutes, end_minutes=new_end_minutes, prev_slot_code=\n new_slot_prev_slot_code, next_slot_code=new_slot_next_slot_code,\n start_location=new_slot_start_location, end_location=\n new_slot_end_location, worker_id=slot.worker_id, available_free_minutes\n =new_end_minutes - new_start_minutes + 1, assigned_job_codes=new_job_list)\n', (24192, 24590), False, 'from dispatch.plugins.kandbox_planner.env.env_models import ActionDict, BaseJob, LocationTuple, TimeSlotJSONEncoder, WorkingTimeSlot, JobLocationBase\n'), ((48842, 48881), 'dispatch.plugins.kandbox_planner.env.env_models.JobLocationBase', 'JobLocationBase', (['*the_job.location[0:4]'], {}), '(*the_job.location[0:4])\n', (48857, 48881), False, 'from dispatch.plugins.kandbox_planner.env.env_models import ActionDict, BaseJob, LocationTuple, TimeSlotJSONEncoder, WorkingTimeSlot, JobLocationBase\n'), ((62679, 63134), 'dispatch.plugins.kandbox_planner.env.env_models.WorkingTimeSlot', 'WorkingTimeSlot', ([], {'start_minutes': 'action_dict.scheduled_start_minutes', 'end_minutes': '(action_dict.scheduled_start_minutes + action_dict.scheduled_duration_minutes)', 'prev_slot_code': 'prev_slot_slot_for_all', 'next_slot_code': 'next_slot_slot_for_all', 'slot_type': 'new_slot_type', 'start_location': 'the_job.location', 'end_location': 'the_job.location', 'assigned_job_codes': '[the_job.job_code]', 'worker_id': 'all_redis_slots[0].worker_id', 'referred_object_code': 'None'}), '(start_minutes=action_dict.scheduled_start_minutes,\n end_minutes=action_dict.scheduled_start_minutes + action_dict.\n scheduled_duration_minutes, prev_slot_code=prev_slot_slot_for_all,\n next_slot_code=next_slot_slot_for_all, slot_type=new_slot_type,\n start_location=the_job.location, end_location=the_job.location,\n assigned_job_codes=[the_job.job_code], worker_id=all_redis_slots[0].\n worker_id, referred_object_code=None)\n', (62694, 63134), False, 'from dispatch.plugins.kandbox_planner.env.env_models import ActionDict, BaseJob, LocationTuple, TimeSlotJSONEncoder, WorkingTimeSlot, JobLocationBase\n'), ((69117, 69165), 'dispatch.plugins.kandbox_planner.env.env_models.JobLocationBase', 'JobLocationBase', (['*current_job_location_full[0:4]'], {}), '(*current_job_location_full[0:4])\n', (69132, 69165), False, 'from dispatch.plugins.kandbox_planner.env.env_models import ActionDict, BaseJob, LocationTuple, TimeSlotJSONEncoder, WorkingTimeSlot, JobLocationBase\n'), ((69347, 69738), 'dispatch.plugins.kandbox_planner.env.env_models.WorkingTimeSlot', 'WorkingTimeSlot', ([], {'start_minutes': 'start_minutes', 'end_minutes': 'end_minutes', 'prev_slot_code': 'None', 'next_slot_code': 'None', 'slot_type': 'new_slot_type', 'start_location': 'current_job_location', 'end_location': 'current_job_location', 'assigned_job_codes': '[action_dict.job_code]', 'worker_id': 'worker_code', 'referred_object_code': 'None', 'end_overtime_minutes': 'used_overtime_minutes', 'is_in_working_hour': '(False)'}), '(start_minutes=start_minutes, end_minutes=end_minutes,\n prev_slot_code=None, next_slot_code=None, slot_type=new_slot_type,\n start_location=current_job_location, end_location=current_job_location,\n assigned_job_codes=[action_dict.job_code], worker_id=worker_code,\n referred_object_code=None, end_overtime_minutes=used_overtime_minutes,\n is_in_working_hour=False)\n', (69362, 69738), False, 'from dispatch.plugins.kandbox_planner.env.env_models import ActionDict, BaseJob, LocationTuple, TimeSlotJSONEncoder, WorkingTimeSlot, JobLocationBase\n'), ((5984, 6025), 'json.dumps', 'json.dumps', (['slot'], {'cls': 'TimeSlotJSONEncoder'}), '(slot, cls=TimeSlotJSONEncoder)\n', (5994, 6025), False, 'import json\n'), ((8054, 8095), 'json.dumps', 'json.dumps', (['slot'], {'cls': 'TimeSlotJSONEncoder'}), '(slot, cls=TimeSlotJSONEncoder)\n', (8064, 8095), False, 'import json\n'), ((14703, 14747), 'intervaltree.Interval', 'Interval', (['interval_begin', 'interval_end', 'slot'], {}), '(interval_begin, interval_end, slot)\n', (14711, 14747), False, 'from intervaltree import Interval, IntervalTree\n'), ((50822, 51169), 'dispatch.plugins.kandbox_planner.env.env_models.WorkingTimeSlot', 'WorkingTimeSlot', ([], {'start_minutes': 'start_minutes', 'end_minutes': 'end_minutes', 'prev_slot_code': 'slot.prev_slot_code', 'next_slot_code': 'slot.next_slot_code', 'slot_type': 'new_slot_type', 'start_location': 'current_job_location', 'end_location': 'current_job_location', 'assigned_job_codes': '[the_job.job_code]', 'worker_id': 'slot.worker_id', 'referred_object_code': 'None'}), '(start_minutes=start_minutes, end_minutes=end_minutes,\n prev_slot_code=slot.prev_slot_code, next_slot_code=slot.next_slot_code,\n slot_type=new_slot_type, start_location=current_job_location,\n end_location=current_job_location, assigned_job_codes=[the_job.job_code\n ], worker_id=slot.worker_id, referred_object_code=None)\n', (50837, 51169), False, 'from dispatch.plugins.kandbox_planner.env.env_models import ActionDict, BaseJob, LocationTuple, TimeSlotJSONEncoder, WorkingTimeSlot, JobLocationBase\n'), ((41602, 41628), 'intervaltree.Interval', 'Interval', (['sl[0]', 'sl[1]', 'sl'], {}), '(sl[0], sl[1], sl)\n', (41610, 41628), False, 'from intervaltree import Interval, IntervalTree\n'), ((43194, 43242), 'json.dumps', 'json.dumps', (['slot_to_add'], {'cls': 'TimeSlotJSONEncoder'}), '(slot_to_add, cls=TimeSlotJSONEncoder)\n', (43204, 43242), False, 'import json\n'), ((54859, 55175), 'dispatch.plugins.kandbox_planner.env.env_models.WorkingTimeSlot', 'WorkingTimeSlot', ([], {'start_minutes': 'slot.start_minutes', 'end_minutes': 'start_minutes', 'prev_slot_code': 'slot.prev_slot_code', 'next_slot_code': 'slot_2_key', 'slot_type': 'TimeSlotType.FLOATING', 'start_location': 'slot.start_location', 'end_location': 'current_job_location', 'assigned_job_codes': 'front_jobs', 'worker_id': 'slot.worker_id'}), '(start_minutes=slot.start_minutes, end_minutes=start_minutes,\n prev_slot_code=slot.prev_slot_code, next_slot_code=slot_2_key,\n slot_type=TimeSlotType.FLOATING, start_location=slot.start_location,\n end_location=current_job_location, assigned_job_codes=front_jobs,\n worker_id=slot.worker_id)\n', (54874, 55175), False, 'from dispatch.plugins.kandbox_planner.env.env_models import ActionDict, BaseJob, LocationTuple, TimeSlotJSONEncoder, WorkingTimeSlot, JobLocationBase\n'), ((44853, 44897), 'intervaltree.Interval', 'Interval', (['interval_begin', 'interval_end', 'slot'], {}), '(interval_begin, interval_end, slot)\n', (44861, 44897), False, 'from intervaltree import Interval, IntervalTree\n'), ((9425, 9458), 'json.loads', 'json.loads', (['next_slot_as_list_str'], {}), '(next_slot_as_list_str)\n', (9435, 9458), False, 'import json\n'), ((34025, 34047), 'json.loads', 'json.loads', (['slot_redis'], {}), '(slot_redis)\n', (34035, 34047), False, 'import json\n'), ((34083, 34121), 'dispatch.plugins.kandbox_planner.env.env_models.WorkingTimeSlot', 'WorkingTimeSlot', (['*current_slot_as_list'], {}), '(*current_slot_as_list)\n', (34098, 34121), False, 'from dispatch.plugins.kandbox_planner.env.env_models import ActionDict, BaseJob, LocationTuple, TimeSlotJSONEncoder, WorkingTimeSlot, JobLocationBase\n')]
|
import argparse
import logging
from twitter_scraper import write_tweets_to_csv, enable_logging
def run():
"""twitter-scraper entry point when used as a script"""
parser = argparse.ArgumentParser(
prog='twitter-scraper', description="Scrape twitter public pages without an API key",
)
parser.add_argument('account', type=str, help="twitter account")
parser.add_argument('-f', '--filename', type=str, help="Output filename")
parser.add_argument('-p', '--pages', type=int, help="Number of pages to download", default=10)
parser.add_argument('-v', '--verbose', action='count', help="Enable logging", default=0)
args = parser.parse_args()
# Enable logging
if args.verbose > 0:
args.verbose = min(args.verbose, 3)
level = {1: logging.WARNING, 2: logging.INFO, 3: logging.DEBUG}[args.verbose]
enable_logging(level)
write_tweets_to_csv(account=args.account, filename=args.filename, page_limit=args.pages)
if __name__ == '__main__':
run()
|
[
"twitter_scraper.write_tweets_to_csv",
"argparse.ArgumentParser",
"twitter_scraper.enable_logging"
] |
[((182, 296), 'argparse.ArgumentParser', 'argparse.ArgumentParser', ([], {'prog': '"""twitter-scraper"""', 'description': '"""Scrape twitter public pages without an API key"""'}), "(prog='twitter-scraper', description=\n 'Scrape twitter public pages without an API key')\n", (205, 296), False, 'import argparse\n'), ((889, 981), 'twitter_scraper.write_tweets_to_csv', 'write_tweets_to_csv', ([], {'account': 'args.account', 'filename': 'args.filename', 'page_limit': 'args.pages'}), '(account=args.account, filename=args.filename,\n page_limit=args.pages)\n', (908, 981), False, 'from twitter_scraper import write_tweets_to_csv, enable_logging\n'), ((862, 883), 'twitter_scraper.enable_logging', 'enable_logging', (['level'], {}), '(level)\n', (876, 883), False, 'from twitter_scraper import write_tweets_to_csv, enable_logging\n')]
|
import os
import pandas as pd
from pandas import DataFrame
def load_file(file_base: str, filename: str) -> DataFrame:
"""
Load data from a csv.gz file.
Parameters
----------
file_base : str
The director to use as the root.
filename : str
Name of csv.gz to load
Returns
-------
DataFrame
Dataframe containing the loaded data.
"""
curr_dir = os.path.split(os.path.abspath(file_base))[0]
data = pd.read_csv(os.path.join(curr_dir, filename))
if "Date" in data:
data.Date = pd.to_datetime(data.Date)
data = data.set_index("Date")
for col in data:
data[col] = pd.to_numeric(data[col], errors="coerce")
return data
|
[
"os.path.abspath",
"pandas.to_datetime",
"os.path.join",
"pandas.to_numeric"
] |
[((481, 513), 'os.path.join', 'os.path.join', (['curr_dir', 'filename'], {}), '(curr_dir, filename)\n', (493, 513), False, 'import os\n'), ((558, 583), 'pandas.to_datetime', 'pd.to_datetime', (['data.Date'], {}), '(data.Date)\n', (572, 583), True, 'import pandas as pd\n'), ((663, 704), 'pandas.to_numeric', 'pd.to_numeric', (['data[col]'], {'errors': '"""coerce"""'}), "(data[col], errors='coerce')\n", (676, 704), True, 'import pandas as pd\n'), ((427, 453), 'os.path.abspath', 'os.path.abspath', (['file_base'], {}), '(file_base)\n', (442, 453), False, 'import os\n')]
|
#! /usr/bin/env python
import unittest
import os
from gerenuk import utility
from gerenuk.test import TESTS_DATA_DIR
CONFIG_DATA_DIR = os.path.join(TESTS_DATA_DIR, "configuration-files")
class Fsc2DataExtractionTestCase(unittest.TestCase):
def test_parse_configuration(self):
config_filepath = os.path.join(CONFIG_DATA_DIR, "sample1.cfg")
config_d = utility.parse_legacy_configuration(config_filepath)
self.assertIn("params", config_d)
expected_params = {
"concentrationShape": 1000.0,
"concentrationScale": 0.00437,
"thetaShape": 4.0,
"thetaScale": 0.001,
"ancestralThetaShape": 0,
"ancestralThetaScale": 0,
"thetaParameters": "000",
"tauShape": 1.0,
"tauScale": 0.02,
"timeInSubsPerSite": 1,
"bottleProportionShapeA": 0,
"bottleProportionShapeB": 0,
"bottleProportionShared": 0,
"migrationShape": 0,
"migrationScale": 0,
"numTauClasses": 0,}
self.assertEqual(len(expected_params), len(config_d["params"]))
for key in expected_params:
self.assertIn(key, config_d["params"])
self.assertEqual(expected_params[key], config_d["params"][key])
for key in config_d["params"]:
self.assertIn(key, expected_params)
self.assertIn("locus_info", config_d)
expected_locus_info = [
{'taxon_label': 'species1', 'locus_label': 'locus1', 'ploidy_factor': 1.0, 'mutation_rate_factor': 1.0, 'num_genes_deme0': 10, 'num_genes_deme1': 8, 'ti_tv_rate_ratio': 32.42, 'num_sites': 389, 'freq_a': 0.27, 'freq_c': 0.24, 'freq_g': 0.26, 'alignment_filepath': 'species1locus1.fasta', },
{'taxon_label': 'species1', 'locus_label': 'locus2', 'ploidy_factor': 1.0, 'mutation_rate_factor': 1.0, 'num_genes_deme0': 8, 'num_genes_deme1': 6, 'ti_tv_rate_ratio': 5.51, 'num_sites': 500, 'freq_a': 0.25, 'freq_c': 0.22, 'freq_g': 0.24, 'alignment_filepath': 'species1locus2.fasta', },
{'taxon_label': 'species1', 'locus_label': 'locus3', 'ploidy_factor': 1.0, 'mutation_rate_factor': 1.0, 'num_genes_deme0': 6, 'num_genes_deme1': 8, 'ti_tv_rate_ratio': 8.38, 'num_sites': 524, 'freq_a': 0.26, 'freq_c': 0.23, 'freq_g': 0.26, 'alignment_filepath': 'species1locus3.fasta', },
{'taxon_label': 'species1', 'locus_label': 'locus4', 'ploidy_factor': 1.0, 'mutation_rate_factor': 1.0, 'num_genes_deme0': 8, 'num_genes_deme1': 10, 'ti_tv_rate_ratio': 5.20, 'num_sites': 345, 'freq_a': 0.25, 'freq_c': 0.23, 'freq_g': 0.24, 'alignment_filepath': 'species1locus4.fasta', },
{'taxon_label': 'species1', 'locus_label': 'locus5', 'ploidy_factor': 1.0, 'mutation_rate_factor': 1.0, 'num_genes_deme0': 8, 'num_genes_deme1': 8, 'ti_tv_rate_ratio': 29.59, 'num_sites': 417, 'freq_a': 0.27, 'freq_c': 0.23, 'freq_g': 0.21, 'alignment_filepath': 'species1locus5.fasta', },
{'taxon_label': 'species1', 'locus_label': 'mito1', 'ploidy_factor': 0.25, 'mutation_rate_factor': 4.0, 'num_genes_deme0': 5, 'num_genes_deme1': 5, 'ti_tv_rate_ratio': 8.15, 'num_sites': 600, 'freq_a': 0.22, 'freq_c': 0.24, 'freq_g': 0.27, 'alignment_filepath': 'species1mito1.fasta', },
{'taxon_label': 'species2', 'locus_label': 'locus1', 'ploidy_factor': 1.0, 'mutation_rate_factor': 1.0, 'num_genes_deme0': 6, 'num_genes_deme1': 10, 'ti_tv_rate_ratio': 7.53, 'num_sites': 400, 'freq_a': 0.25, 'freq_c': 0.24, 'freq_g': 0.26, 'alignment_filepath': 'species2locus1.fasta', },
{'taxon_label': 'species2', 'locus_label': 'locus3', 'ploidy_factor': 1.0, 'mutation_rate_factor': 1.0, 'num_genes_deme0': 10, 'num_genes_deme1': 8, 'ti_tv_rate_ratio': 11.14, 'num_sites': 550, 'freq_a': 0.27, 'freq_c': 0.22, 'freq_g': 0.24, 'alignment_filepath': 'species2locus3.fasta', },
{'taxon_label': 'species2', 'locus_label': 'locus4', 'ploidy_factor': 1.0, 'mutation_rate_factor': 1.0, 'num_genes_deme0': 8, 'num_genes_deme1': 8, 'ti_tv_rate_ratio': 9.39, 'num_sites': 350, 'freq_a': 0.24, 'freq_c': 0.24, 'freq_g': 0.23, 'alignment_filepath': 'species2locus4.fasta', },
{'taxon_label': 'species2', 'locus_label': 'locus5', 'ploidy_factor': 1.0, 'mutation_rate_factor': 1.0, 'num_genes_deme0': 10, 'num_genes_deme1': 10, 'ti_tv_rate_ratio': 13.32, 'num_sites': 450, 'freq_a': 0.26, 'freq_c': 0.24, 'freq_g': 0.22, 'alignment_filepath': 'species2locus5.fasta', },
{'taxon_label': 'species2', 'locus_label': 'mito1', 'ploidy_factor': 0.25, 'mutation_rate_factor': 4.0, 'num_genes_deme0': 4, 'num_genes_deme1': 5, 'ti_tv_rate_ratio': 7.59, 'num_sites': 549, 'freq_a': 0.23, 'freq_c': 0.26, 'freq_g': 0.23, 'alignment_filepath': 'species2mito1.fasta', },
{'taxon_label': 'species3', 'locus_label': 'locus1', 'ploidy_factor': 1.0, 'mutation_rate_factor': 1.0, 'num_genes_deme0': 10, 'num_genes_deme1': 6, 'ti_tv_rate_ratio': 17.03, 'num_sites': 367, 'freq_a': 0.25, 'freq_c': 0.23, 'freq_g': 0.27, 'alignment_filepath': 'species3locus1.fasta', },
{'taxon_label': 'species3', 'locus_label': 'locus3', 'ploidy_factor': 1.0, 'mutation_rate_factor': 1.0, 'num_genes_deme0': 8, 'num_genes_deme1': 10, 'ti_tv_rate_ratio': 59.17, 'num_sites': 541, 'freq_a': 0.26, 'freq_c': 0.22, 'freq_g': 0.25, 'alignment_filepath': 'species3locus3.fasta', },
{'taxon_label': 'species3', 'locus_label': 'locus4', 'ploidy_factor': 1.0, 'mutation_rate_factor': 1.0, 'num_genes_deme0': 6, 'num_genes_deme1': 8, 'ti_tv_rate_ratio': 6.90, 'num_sites': 333, 'freq_a': 0.28, 'freq_c': 0.23, 'freq_g': 0.21, 'alignment_filepath': 'species3locus4.fasta', },
{'taxon_label': 'species3', 'locus_label': 'mito1', 'ploidy_factor': 0.25, 'mutation_rate_factor': 4.0, 'num_genes_deme0': 5, 'num_genes_deme1': 4, 'ti_tv_rate_ratio': 11.42, 'num_sites': 587, 'freq_a': 0.22, 'freq_c': 0.22, 'freq_g': 0.25, 'alignment_filepath': 'species3mito1.fasta', },
]
self.assertEqual(len(expected_locus_info), len(config_d["locus_info"]))
for exp_locus_definition, obs_locus_definition in zip(expected_locus_info, config_d["locus_info"]):
self.assertEqual(len(exp_locus_definition), len(obs_locus_definition))
for key in exp_locus_definition:
self.assertIn(key, obs_locus_definition)
self.assertEqual(exp_locus_definition[key], obs_locus_definition[key])
for key in obs_locus_definition:
self.assertIn(key, exp_locus_definition)
if __name__ == "__main__":
unittest.main()
|
[
"unittest.main",
"os.path.join",
"gerenuk.utility.parse_legacy_configuration"
] |
[((136, 187), 'os.path.join', 'os.path.join', (['TESTS_DATA_DIR', '"""configuration-files"""'], {}), "(TESTS_DATA_DIR, 'configuration-files')\n", (148, 187), False, 'import os\n'), ((6606, 6621), 'unittest.main', 'unittest.main', ([], {}), '()\n', (6619, 6621), False, 'import unittest\n'), ((309, 353), 'os.path.join', 'os.path.join', (['CONFIG_DATA_DIR', '"""sample1.cfg"""'], {}), "(CONFIG_DATA_DIR, 'sample1.cfg')\n", (321, 353), False, 'import os\n'), ((373, 424), 'gerenuk.utility.parse_legacy_configuration', 'utility.parse_legacy_configuration', (['config_filepath'], {}), '(config_filepath)\n', (407, 424), False, 'from gerenuk import utility\n')]
|
"""Read and write Jupyter notebooks as text files"""
import os
import io
import sys
import logging
import warnings
from copy import copy, deepcopy
from nbformat.v4.rwbase import NotebookReader, NotebookWriter
from nbformat.v4.nbbase import new_notebook, new_code_cell, NotebookNode
import nbformat
from .formats import _VALID_FORMAT_OPTIONS
from .formats import read_format_from_metadata, update_jupytext_formats_metadata, rearrange_jupytext_metadata
from .formats import format_name_for_ext, guess_format, divine_format, get_format_implementation, long_form_one_format
from .header import header_to_metadata_and_cell, metadata_and_cell_to_header, insert_jupytext_info_and_filter_metadata
from .header import encoding_and_executable, insert_or_test_version_number
from .metadata_filter import update_metadata_filters, filter_metadata
from .cell_metadata import _IGNORE_CELL_METADATA
from .languages import default_language_from_metadata_and_ext, set_main_and_cell_language
from .pep8 import pep8_lines_between_cells
from .pandoc import md_to_notebook, notebook_to_md
from .myst import myst_extensions, myst_to_notebook, notebook_to_myst, MYST_FORMAT_NAME
class TextNotebookConverter(NotebookReader, NotebookWriter):
"""A class that can read or write a Jupyter notebook as text"""
def __init__(self, fmt):
self.fmt = copy(long_form_one_format(fmt))
self.ext = self.fmt['extension']
self.implementation = get_format_implementation(self.ext, self.fmt.get('format_name'))
def update_fmt_with_notebook_options(self, metadata):
"""Update format options with the values in the notebook metadata, and record those
options in the notebook metadata"""
# format options in notebook have precedence over that in fmt
for opt in _VALID_FORMAT_OPTIONS:
if opt in metadata.get('jupytext', {}):
self.fmt.setdefault(opt, metadata['jupytext'][opt])
if opt in self.fmt:
metadata.setdefault('jupytext', {}).setdefault(opt, self.fmt[opt])
# Is this format the same as that documented in the YAML header? If so, we want to know the format version
file_fmt = metadata.get('jupytext', {}).get('text_representation', {})
if self.fmt.get('extension') == file_fmt.get('extension') and \
self.fmt.get('format_name') == file_fmt.get('format_name'):
self.fmt.update(file_fmt)
# rST to md conversion should happen only once
if metadata.get('jupytext', {}).get('rst2md') is True:
metadata['jupytext']['rst2md'] = False
def reads(self, s, **_):
"""Read a notebook represented as text"""
if self.fmt.get('format_name') == 'pandoc':
return md_to_notebook(s)
if self.fmt.get('format_name') == MYST_FORMAT_NAME:
return myst_to_notebook(s)
lines = s.splitlines()
cells = []
metadata, jupyter_md, header_cell, pos = header_to_metadata_and_cell(lines,
self.implementation.header_prefix,
self.implementation.extension)
default_language = default_language_from_metadata_and_ext(metadata, self.implementation.extension)
self.update_fmt_with_notebook_options(metadata)
if header_cell:
cells.append(header_cell)
lines = lines[pos:]
if self.implementation.format_name and self.implementation.format_name.startswith('sphinx'):
cells.append(new_code_cell(source='%matplotlib inline'))
cell_metadata_json = False
while lines:
reader = self.implementation.cell_reader_class(self.fmt, default_language)
cell, pos = reader.read(lines)
cells.append(cell)
cell_metadata_json = cell_metadata_json or reader.cell_metadata_json
if pos <= 0:
raise Exception('Blocked at lines ' + '\n'.join(lines[:6])) # pragma: no cover
lines = lines[pos:]
set_main_and_cell_language(metadata, cells, self.implementation.extension)
cell_metadata = set()
for cell in cells:
cell_metadata.update(cell.metadata.keys())
update_metadata_filters(metadata, jupyter_md, cell_metadata)
if cell_metadata_json:
metadata.setdefault('jupytext', {}).setdefault('cell_metadata_json', True)
if self.implementation.format_name and self.implementation.format_name.startswith('sphinx'):
filtered_cells = []
for i, cell in enumerate(cells):
if cell.source == '' and i > 0 and i + 1 < len(cells) \
and cells[i - 1].cell_type != 'markdown' and cells[i + 1].cell_type != 'markdown':
continue
filtered_cells.append(cell)
cells = filtered_cells
return new_notebook(cells=cells, metadata=metadata)
def writes(self, nb, metadata=None, **kwargs):
"""Return the text representation of the notebook"""
if self.fmt.get('format_name') == 'pandoc':
metadata = insert_jupytext_info_and_filter_metadata(metadata, self.ext, self.implementation)
cells = []
for cell in nb.cells:
cell_metadata = filter_metadata(cell.metadata,
self.fmt.get('cell_metadata_filter'),
_IGNORE_CELL_METADATA)
if cell.cell_type == 'code':
cells.append(new_code_cell(source=cell.source, metadata=cell_metadata))
else:
cells.append(NotebookNode(source=cell.source, metadata=cell_metadata, cell_type=cell.cell_type))
return notebook_to_md(NotebookNode(
nbformat=nb.nbformat,
nbformat_minor=nb.nbformat_minor,
metadata=metadata,
cells=cells))
if self.fmt.get('format_name') == MYST_FORMAT_NAME or self.ext in myst_extensions(no_md=True):
pygments_lexer = metadata.get("language_info", {}).get("pygments_lexer", None)
metadata = insert_jupytext_info_and_filter_metadata(metadata, self.ext, self.implementation)
cells = []
for cell in nb.cells:
cell_metadata = filter_metadata(cell.metadata,
self.fmt.get('cell_metadata_filter'),
_IGNORE_CELL_METADATA)
if cell.cell_type == 'code':
cells.append(new_code_cell(source=cell.source, metadata=cell_metadata))
else:
cells.append(NotebookNode(source=cell.source, metadata=cell_metadata, cell_type=cell.cell_type))
return notebook_to_myst(NotebookNode(
nbformat=nb.nbformat,
nbformat_minor=nb.nbformat_minor,
metadata=metadata,
cells=cells),
default_lexer=pygments_lexer)
# Copy the notebook, in order to be sure we do not modify the original notebook
nb = NotebookNode(
nbformat=nb.nbformat,
nbformat_minor=nb.nbformat_minor,
metadata=deepcopy(metadata or nb.metadata),
cells=nb.cells)
metadata = nb.metadata
default_language = default_language_from_metadata_and_ext(metadata,
self.implementation.extension,
True) or 'python'
self.update_fmt_with_notebook_options(nb.metadata)
if 'use_runtools' not in self.fmt:
for cell in nb.cells:
if cell.metadata.get('hide_input', False) or cell.metadata.get('hide_output', False):
self.fmt['use_runtools'] = True
break
header = encoding_and_executable(nb, metadata, self.ext)
header_content, header_lines_to_next_cell = metadata_and_cell_to_header(nb, metadata,
self.implementation, self.ext)
header.extend(header_content)
cell_exporters = []
looking_for_first_markdown_cell = (self.implementation.format_name and
self.implementation.format_name.startswith('sphinx'))
split_at_heading = self.fmt.get('split_at_heading', False)
for cell in nb.cells:
if looking_for_first_markdown_cell and cell.cell_type == 'markdown':
cell.metadata.setdefault('cell_marker', '"""')
looking_for_first_markdown_cell = False
cell_exporters.append(self.implementation.cell_exporter_class(cell, default_language, self.fmt))
texts = [cell.cell_to_text() for cell in cell_exporters]
lines = []
# concatenate cells in reverse order to determine how many blank lines (pep8)
for i, cell in reversed(list(enumerate(cell_exporters))):
text = cell.remove_eoc_marker(texts[i], lines)
if i == 0 and self.implementation.format_name and \
self.implementation.format_name.startswith('sphinx') and \
(text in [['%matplotlib inline'], ['# %matplotlib inline']]):
continue
lines_to_next_cell = cell.lines_to_next_cell
if lines_to_next_cell is None:
lines_to_next_cell = pep8_lines_between_cells(text, lines, self.implementation.extension)
text.extend([''] * lines_to_next_cell)
# two blank lines between markdown cells in Rmd when those do not have explicit region markers
if self.ext in ['.md', '.markdown', '.Rmd'] and not cell.is_code():
if (i + 1 < len(cell_exporters) and not cell_exporters[i + 1].is_code() and
not texts[i][0].startswith('<!-- #') and
not texts[i + 1][0].startswith('<!-- #') and
(not split_at_heading or not (texts[i + 1] and texts[i + 1][0].startswith('#')))):
text.append('')
# "" between two consecutive code cells in sphinx
if self.implementation.format_name.startswith('sphinx') and cell.is_code():
if i + 1 < len(cell_exporters) and cell_exporters[i + 1].is_code():
text.append('""')
lines = text + lines
if header_lines_to_next_cell is None:
header_lines_to_next_cell = pep8_lines_between_cells(header_content, lines, self.implementation.extension)
header.extend([''] * header_lines_to_next_cell)
return '\n'.join(header + lines)
def reads(text, fmt, as_version=nbformat.NO_CONVERT, **kwargs):
"""
Read a notebook from a string
:param text: the text representation of the notebook
:param fmt: (optional) the jupytext format like `md`, `py:percent`, ...
:param as_version: see nbformat.reads
:param kwargs: (not used) additional parameters for nbformat.reads
:return: the notebook
"""
fmt = copy(fmt) if fmt else divine_format(text)
fmt = long_form_one_format(fmt)
ext = fmt['extension']
if ext == '.ipynb':
return nbformat.reads(text, as_version, **kwargs)
format_name = read_format_from_metadata(text, ext) or fmt.get('format_name')
if format_name:
format_options = {}
else:
format_name, format_options = guess_format(text, ext)
if format_name:
fmt['format_name'] = format_name
fmt.update(format_options)
reader = TextNotebookConverter(fmt)
notebook = reader.reads(text, **kwargs)
rearrange_jupytext_metadata(notebook.metadata)
if format_name and insert_or_test_version_number():
notebook.metadata.setdefault('jupytext', {}).setdefault('text_representation', {}).update(
{'extension': ext, 'format_name': format_name})
return notebook
def read(fp, as_version=nbformat.NO_CONVERT, fmt=None, **kwargs):
""""
Read a notebook from a file name or a file object
:param fp: a file name or a file object
:param as_version: see nbformat.read
:param fmt: (optional) the jupytext format like `md`, `py:percent`, ...
:param kwargs: (not used) additional parameters for nbformat.read
:return: the notebook
"""
if as_version != nbformat.NO_CONVERT and not isinstance(as_version, int):
raise TypeError("Second argument 'as_version' should be either nbformat.NO_CONVERT, or an integer.")
if fp == '-':
text = sys.stdin.read()
return reads(text, fmt)
if not hasattr(fp, 'read'):
# Treat fp as a file name
fp = str(fp)
_, ext = os.path.splitext(fp)
fmt = copy(fmt or {})
if not isinstance(fmt, dict):
fmt = long_form_one_format(fmt)
fmt.update({'extension': ext})
with io.open(fp, encoding='utf-8') as stream:
return read(stream, as_version=as_version, fmt=fmt, **kwargs)
if fmt is not None:
fmt = long_form_one_format(fmt)
if fmt['extension'] == '.ipynb':
notebook = nbformat.read(fp, as_version, **kwargs)
rearrange_jupytext_metadata(notebook.metadata)
return notebook
return reads(fp.read(), fmt, **kwargs)
def writes(notebook, fmt, version=nbformat.NO_CONVERT, **kwargs):
""""
Write a notebook to a file name or a file object
:param notebook: the notebook
:param fmt: the jupytext format like `md`, `py:percent`, ...
:param version: see nbformat.writes
:param kwargs: (not used) additional parameters for nbformat.writes
:return: the text representation of the notebook
"""
metadata = deepcopy(notebook.metadata)
rearrange_jupytext_metadata(metadata)
fmt = copy(fmt)
fmt = long_form_one_format(fmt, metadata)
ext = fmt['extension']
format_name = fmt.get('format_name')
jupytext_metadata = metadata.get('jupytext', {})
if ext == '.ipynb':
# Remove jupytext section if empty
jupytext_metadata.pop('text_representation', {})
if not jupytext_metadata:
metadata.pop('jupytext', {})
return nbformat.writes(
NotebookNode(
nbformat=notebook.nbformat,
nbformat_minor=notebook.nbformat_minor,
metadata=metadata,
cells=notebook.cells), version, **kwargs)
if not format_name:
format_name = format_name_for_ext(metadata, ext, explicit_default=False)
if format_name:
fmt['format_name'] = format_name
update_jupytext_formats_metadata(metadata, fmt)
writer = TextNotebookConverter(fmt)
return writer.writes(notebook, metadata)
def write(nb, fp, version=nbformat.NO_CONVERT, fmt=None, **kwargs):
""""
Write a notebook to a file name or a file object
:param nb: the notebook
:param fp: a file name or a file object
:param version: see nbformat.write
:param fmt: (optional if fp is a file name) the jupytext format like `md`, `py:percent`, ...
:param kwargs: (not used) additional parameters for nbformat.write
"""
if fp == '-':
# Use sys.stdout.buffer when possible, and explicit utf-8 encoding, cf. #331
content = writes(nb, version=version, fmt=fmt, **kwargs)
try:
# Python 3
sys.stdout.buffer.write(content.encode('utf-8'))
except AttributeError:
sys.stdout.write(content.encode('utf-8'))
return
if not hasattr(fp, 'write'):
# Treat fp as a file name
fp = str(fp)
_, ext = os.path.splitext(fp)
fmt = copy(fmt or {})
fmt = long_form_one_format(fmt, update={'extension': ext})
create_prefix_dir(fp, fmt)
with io.open(fp, 'w', encoding='utf-8') as stream:
write(nb, stream, version=version, fmt=fmt, **kwargs)
return
else:
assert fmt is not None, "'fmt' argument in jupytext.write is mandatory unless fp is a file name"
content = writes(nb, version=version, fmt=fmt, **kwargs)
if isinstance(content, bytes):
content = content.decode('utf8')
fp.write(content)
if not content.endswith(u'\n'):
fp.write(u'\n')
def create_prefix_dir(nb_file, fmt):
"""Create directory if fmt has a prefix"""
if 'prefix' in fmt:
nb_dir = os.path.dirname(nb_file) + os.path.sep
if not os.path.isdir(nb_dir):
logging.log(logging.WARNING, "[jupytext] creating missing directory %s", nb_dir)
os.makedirs(nb_dir)
def readf(nb_file, fmt=None): # pragma: no cover
"""Read a notebook from the file with given name"""
warnings.warn(
"readf is deprecated. Please use read instead, and pass the fmt "
"argument with an explicit fmt=... (see https://github.com/mwouts/jupytext/issues/262)",
DeprecationWarning)
return read(fp=nb_file, fmt=fmt)
def writef(notebook, nb_file, fmt=None): # pragma: no cover
"""Write a notebook to the file with given name"""
warnings.warn(
'writef is deprecated. Please use write instead, and pass the fmt '
'argument with an explicit fmt=... (see https://github.com/mwouts/jupytext/issues/262)',
DeprecationWarning)
return write(nb=notebook, fp=nb_file, fmt=fmt)
|
[
"copy.deepcopy",
"sys.stdin.read",
"os.makedirs",
"os.path.isdir",
"os.path.dirname",
"copy.copy",
"logging.log",
"nbformat.reads",
"os.path.splitext",
"io.open",
"nbformat.read",
"warnings.warn",
"nbformat.v4.nbbase.NotebookNode",
"nbformat.v4.nbbase.new_code_cell",
"nbformat.v4.nbbase.new_notebook"
] |
[((13930, 13957), 'copy.deepcopy', 'deepcopy', (['notebook.metadata'], {}), '(notebook.metadata)\n', (13938, 13957), False, 'from copy import copy, deepcopy\n'), ((14010, 14019), 'copy.copy', 'copy', (['fmt'], {}), '(fmt)\n', (14014, 14019), False, 'from copy import copy, deepcopy\n'), ((16914, 17109), 'warnings.warn', 'warnings.warn', (['"""readf is deprecated. Please use read instead, and pass the fmt argument with an explicit fmt=... (see https://github.com/mwouts/jupytext/issues/262)"""', 'DeprecationWarning'], {}), "(\n 'readf is deprecated. Please use read instead, and pass the fmt argument with an explicit fmt=... (see https://github.com/mwouts/jupytext/issues/262)'\n , DeprecationWarning)\n", (16927, 17109), False, 'import warnings\n'), ((17288, 17485), 'warnings.warn', 'warnings.warn', (['"""writef is deprecated. Please use write instead, and pass the fmt argument with an explicit fmt=... (see https://github.com/mwouts/jupytext/issues/262)"""', 'DeprecationWarning'], {}), "(\n 'writef is deprecated. Please use write instead, and pass the fmt argument with an explicit fmt=... (see https://github.com/mwouts/jupytext/issues/262)'\n , DeprecationWarning)\n", (17301, 17485), False, 'import warnings\n'), ((4967, 5011), 'nbformat.v4.nbbase.new_notebook', 'new_notebook', ([], {'cells': 'cells', 'metadata': 'metadata'}), '(cells=cells, metadata=metadata)\n', (4979, 5011), False, 'from nbformat.v4.nbbase import new_notebook, new_code_cell, NotebookNode\n'), ((11283, 11292), 'copy.copy', 'copy', (['fmt'], {}), '(fmt)\n', (11287, 11292), False, 'from copy import copy, deepcopy\n'), ((11428, 11470), 'nbformat.reads', 'nbformat.reads', (['text', 'as_version'], {}), '(text, as_version, **kwargs)\n', (11442, 11470), False, 'import nbformat\n'), ((12758, 12774), 'sys.stdin.read', 'sys.stdin.read', ([], {}), '()\n', (12772, 12774), False, 'import sys\n'), ((12912, 12932), 'os.path.splitext', 'os.path.splitext', (['fp'], {}), '(fp)\n', (12928, 12932), False, 'import os\n'), ((12947, 12962), 'copy.copy', 'copy', (['(fmt or {})'], {}), '(fmt or {})\n', (12951, 12962), False, 'from copy import copy, deepcopy\n'), ((15840, 15860), 'os.path.splitext', 'os.path.splitext', (['fp'], {}), '(fp)\n', (15856, 15860), False, 'import os\n'), ((15875, 15890), 'copy.copy', 'copy', (['(fmt or {})'], {}), '(fmt or {})\n', (15879, 15890), False, 'from copy import copy, deepcopy\n'), ((13097, 13126), 'io.open', 'io.open', (['fp'], {'encoding': '"""utf-8"""'}), "(fp, encoding='utf-8')\n", (13104, 13126), False, 'import io\n'), ((13341, 13380), 'nbformat.read', 'nbformat.read', (['fp', 'as_version'], {}), '(fp, as_version, **kwargs)\n', (13354, 13380), False, 'import nbformat\n'), ((14432, 14558), 'nbformat.v4.nbbase.NotebookNode', 'NotebookNode', ([], {'nbformat': 'notebook.nbformat', 'nbformat_minor': 'notebook.nbformat_minor', 'metadata': 'metadata', 'cells': 'notebook.cells'}), '(nbformat=notebook.nbformat, nbformat_minor=notebook.\n nbformat_minor, metadata=metadata, cells=notebook.cells)\n', (14444, 14558), False, 'from nbformat.v4.nbbase import new_notebook, new_code_cell, NotebookNode\n'), ((16007, 16041), 'io.open', 'io.open', (['fp', '"""w"""'], {'encoding': '"""utf-8"""'}), "(fp, 'w', encoding='utf-8')\n", (16014, 16041), False, 'import io\n'), ((16600, 16624), 'os.path.dirname', 'os.path.dirname', (['nb_file'], {}), '(nb_file)\n', (16615, 16624), False, 'import os\n'), ((16654, 16675), 'os.path.isdir', 'os.path.isdir', (['nb_dir'], {}), '(nb_dir)\n', (16667, 16675), False, 'import os\n'), ((16689, 16774), 'logging.log', 'logging.log', (['logging.WARNING', '"""[jupytext] creating missing directory %s"""', 'nb_dir'], {}), "(logging.WARNING, '[jupytext] creating missing directory %s', nb_dir\n )\n", (16700, 16774), False, 'import logging\n'), ((16782, 16801), 'os.makedirs', 'os.makedirs', (['nb_dir'], {}), '(nb_dir)\n', (16793, 16801), False, 'import os\n'), ((3604, 3646), 'nbformat.v4.nbbase.new_code_cell', 'new_code_cell', ([], {'source': '"""%matplotlib inline"""'}), "(source='%matplotlib inline')\n", (3617, 3646), False, 'from nbformat.v4.nbbase import new_notebook, new_code_cell, NotebookNode\n'), ((5871, 5975), 'nbformat.v4.nbbase.NotebookNode', 'NotebookNode', ([], {'nbformat': 'nb.nbformat', 'nbformat_minor': 'nb.nbformat_minor', 'metadata': 'metadata', 'cells': 'cells'}), '(nbformat=nb.nbformat, nbformat_minor=nb.nbformat_minor,\n metadata=metadata, cells=cells)\n', (5883, 5975), False, 'from nbformat.v4.nbbase import new_notebook, new_code_cell, NotebookNode\n'), ((6928, 7032), 'nbformat.v4.nbbase.NotebookNode', 'NotebookNode', ([], {'nbformat': 'nb.nbformat', 'nbformat_minor': 'nb.nbformat_minor', 'metadata': 'metadata', 'cells': 'cells'}), '(nbformat=nb.nbformat, nbformat_minor=nb.nbformat_minor,\n metadata=metadata, cells=cells)\n', (6940, 7032), False, 'from nbformat.v4.nbbase import new_notebook, new_code_cell, NotebookNode\n'), ((7358, 7391), 'copy.deepcopy', 'deepcopy', (['(metadata or nb.metadata)'], {}), '(metadata or nb.metadata)\n', (7366, 7391), False, 'from copy import copy, deepcopy\n'), ((5638, 5695), 'nbformat.v4.nbbase.new_code_cell', 'new_code_cell', ([], {'source': 'cell.source', 'metadata': 'cell_metadata'}), '(source=cell.source, metadata=cell_metadata)\n', (5651, 5695), False, 'from nbformat.v4.nbbase import new_notebook, new_code_cell, NotebookNode\n'), ((5752, 5839), 'nbformat.v4.nbbase.NotebookNode', 'NotebookNode', ([], {'source': 'cell.source', 'metadata': 'cell_metadata', 'cell_type': 'cell.cell_type'}), '(source=cell.source, metadata=cell_metadata, cell_type=cell.\n cell_type)\n', (5764, 5839), False, 'from nbformat.v4.nbbase import new_notebook, new_code_cell, NotebookNode\n'), ((6694, 6751), 'nbformat.v4.nbbase.new_code_cell', 'new_code_cell', ([], {'source': 'cell.source', 'metadata': 'cell_metadata'}), '(source=cell.source, metadata=cell_metadata)\n', (6707, 6751), False, 'from nbformat.v4.nbbase import new_notebook, new_code_cell, NotebookNode\n'), ((6808, 6895), 'nbformat.v4.nbbase.NotebookNode', 'NotebookNode', ([], {'source': 'cell.source', 'metadata': 'cell_metadata', 'cell_type': 'cell.cell_type'}), '(source=cell.source, metadata=cell_metadata, cell_type=cell.\n cell_type)\n', (6820, 6895), False, 'from nbformat.v4.nbbase import new_notebook, new_code_cell, NotebookNode\n')]
|
# UI.Flow 1.2 以前は buttonA, buttonB, buttonC
# UI.Flow 1.3 以降は btnA, btnB, btnC
from m5stack import lcd, btnA, btnB, btnC
from dht12 import DHT12
from bmp280 import BMP280
import i2c_bus
import machine
import time
import math
import gc
import ambient
# Ambientで取得したチャネルのチャネルId, ライトキーを指定
AMBIENT_CHANNEL_ID = '26938'
AMBIENT_WRITE_KEY = '<KEY>'
# MH-Z19B のCO2濃度取得コマンド
READ_CO2_CONCENTRATION = bytearray(b'\xff\x01\x86\x00\x00\x00\x00\x00\x79')
# MH-Z19B のゼロキャリブレーションコマンド
ZERO_POINT_CALIBRATION = bytearray(b'\xff\x01\x87\x00\x00\x00\x00\x00\x78')
class Meter:
def __init__(self, x, y, w, h, tick_s, tick_e, color, title, value_format):
self.x = x # メーターの表示位置
self.y = y # メーターの表示位置
self.w = w # メーターの表示幅
self.h = h # メーターの表示高
self.tick_s = tick_s # 目盛の最小値
self.tick_e = tick_e # 目盛の最大値
self.title = title
self.value_format = value_format # 値をテキスト表示する際のフォーマット
self.center_x = x + w // 2 # 針の原点
self.center_y = y + int(h * 0.9) # 針の原点
self.prev_value = tick_s
self.prev_angle = None
lcd.roundrect(x, y, w, h, h // 10, lcd.BLACK, lcd.WHITE)
lcd.arc(self.center_x, self.center_y, int(h * 0.67), int(h * 0.07), -50, 50, color, color)
lcd.arc(self.center_x, self.center_y, int(h * 0.6), 2, -50, 50, lcd.BLACK)
# 目盛の値表示用フォント設定
if self.w == win_w:
lcd.font(lcd.FONT_Default, transparent=False)
else:
lcd.font(lcd.FONT_DefaultSmall, transparent=True)
fw, fh = lcd.fontSize()
tick = tick_s
tick_i = (tick_e - tick_s) // 4
for r in range(-50, 51, 5):
if r % 25 == 0:
# 目盛の最小値から最大値を4分割して目盛値を表示
lcd.lineByAngle(self.center_x - 1, self.center_y, int(h * 0.6), int(h * 0.1), r, lcd.BLACK)
lcd.lineByAngle(self.center_x, self.center_y, int(h * 0.6), int(h * 0.1), r, lcd.BLACK)
tick_text = str(tick)
text_width = lcd.textWidth(tick_text)
lcd.print(tick_text, self.center_x + int(math.sin(math.radians(r)) * h * 0.7) - text_width // 2,
self.center_y - int(math.cos(math.radians(r)) * h * 0.7) - fh,
lcd.BLACK)
tick += tick_i
else:
# 細かい目盛線を表示
lcd.lineByAngle(self.center_x, self.center_y, int(h * 0.6), int(h * 0.05), r, lcd.BLACK)
def update(self, value):
# 取得値をテキストでも表示
if self.w == win_w:
lcd.font(lcd.FONT_DejaVu24, transparent=False)
else:
lcd.font(lcd.FONT_DejaVu18, transparent=False)
fw, fh = lcd.fontSize()
if value is not None:
angle = int((value - self.tick_s) / (self.tick_e - self.tick_s) * 100 - 50)
if angle != self.prev_angle:
# 前回取得値の針を消去
if self.prev_angle is not None:
for i in range(-1, 2):
lcd.lineByAngle(self.center_x + i, self.center_y, int(self.h * 0.15), int(self.h * 0.42),
self.prev_angle, lcd.WHITE)
# 今回取得値の針を表示
for i in range(-1, 2):
lcd.lineByAngle(self.center_x + i, self.center_y, int(self.h * 0.15), int(self.h * 0.42),
angle, lcd.RED)
if self.title != '':
lcd.print(self.title, self.center_x - lcd.textWidth(self.title) // 2, self.y + self.h - int(fh * 2.4), lcd.BLACK)
self.prev_angle = angle
if value != self.prev_value:
text = self.value_format.format(value)
lcd.print(text, self.center_x - lcd.textWidth(text) // 2, self.y + self.h - int(fh * 1.2), lcd.BLACK)
self.prev_value = value
else:
text = self.value_format.format(self.prev_value)
lcd.print(text, self.center_x - lcd.textWidth(text) // 2, self.y + self.h - int(fh * 1.2), lcd.RED)
class Clock_digital:
def __init__(self, x, y):
self.x = x # 時計の表示位置
self.y = y # 時計の表示位置
def update(self):
lcd.font(lcd.FONT_DejaVu18, transparent=False)
lcd.print('{:02d}:{:02d}:{:02d}'.format(*time.localtime()[3:6]), self.x, self.y, lcd.BLACK)
class Clock:
def __init__(self, x, y, w, h, color):
self.x = x # 時計の表示位置
self.y = y # 時計の表示位置
self.w = w # 時計の表示幅
self.h = h # 時計の表示高
self.center_x = x + w // 2 # 針の中心
self.center_y = y + h // 2 # 針の中心
self.hour_deg = 0
self.minute_deg = 0
self.second_deg = 0
lcd.roundrect(x, y, w, h, h // 10, lcd.BLACK, lcd.WHITE)
# 0 から 360 とは書けないので、半分の円弧を合わせる
lcd.arc(self.center_x, self.center_y, int(h * 0.39), int(h * 0.08), 0, 180, color, color)
lcd.arc(self.center_x, self.center_y, int(h * 0.39), int(h * 0.08), 180, 360, color, color)
if self.w == win_w:
lcd.font(lcd.FONT_Default, transparent=False)
else:
lcd.font(lcd.FONT_DefaultSmall, transparent=True)
fw, fh = lcd.fontSize()
hour = 12
for r in range(0, 360, 360 // 60):
if r % (360 // 12) == 0:
# 1〜12の位置に黒点および数字を表示
lcd.circle(self.center_x + int(math.sin(math.radians(r)) * h / 2 * 0.7),
self.center_y - int(math.cos(math.radians(r)) * h / 2 * 0.7), 2, lcd.BLACK, lcd.BLACK)
hour_text = str(hour)
text_width = lcd.textWidth(hour_text)
lcd.print(hour_text, self.center_x + int(math.sin(math.radians(r)) * h / 2 * 0.85) - text_width // 2,
self.center_y - int(math.cos(math.radians(r)) * h / 2 * 0.85) - fh // 2,
lcd.BLACK)
hour = (hour + 1) % 12
else:
lcd.pixel(self.center_x + int(math.sin(math.radians(r)) * h / 2 * 0.7),
self.center_y - int(math.cos(math.radians(r)) * h / 2 * 0.7), lcd.BLACK)
def update(self):
def needle(n, m, deg, l, color):
for i in range(n, n + m):
if deg >= 315 or deg < 45 or deg >= 135 and deg < 225:
x, y = i, 0
else:
x, y = 0, i
lcd.lineByAngle(self.center_x + x, self.center_y + y,
0, l, deg, color)
# 時分秒の各針の角度を計算
(year, month, mday, hour, minute, second, weekday, yearday) = time.localtime()
second_deg = second * 6
minute_deg = minute * 6 + second_deg // 60
hour_deg = hour % 12 * 30 + minute_deg // 12
# 時針の消去(角度が変わっていないときは消さない)
if hour_deg != self.hour_deg:
needle(-2, 4, self.hour_deg, int(self.h / 2 * 0.3), lcd.WHITE)
# 分針の消去(角度が変わっていないときは消さない)
if minute_deg != self.minute_deg:
needle(-1, 2, self.minute_deg, int(self.h / 2 * 0.45), lcd.WHITE)
# 秒針の消去
needle(0, 1, self.second_deg, int(self.h / 2 * 0.6), lcd.WHITE)
self.second_deg = second_deg
self.minute_deg = minute_deg
self.hour_deg = hour_deg
# 時針の描画(4本線)
needle(-2, 4, hour_deg, int(self.h / 2 * 0.3), lcd.BLACK)
# 分針の描画(2本線)
needle(-1, 2, minute_deg, int(self.h / 2 * 0.45), lcd.BLACK)
# 秒針の描画(1本線)
needle(0, 1, self.second_deg, int(self.h / 2 * 0.6), lcd.RED)
# 中心に赤丸
lcd.circle(self.center_x, self.center_y, 3, lcd.RED, lcd.RED)
def mhz19_checksum(value):
s = 0
for v in value[1:8]:
s = (s + v) & 0xff
return (0xff - s + 1) & 0xff
def mhz19_response(mhz19_value, checksum):
print('{}-{:02d}-{:02d} {:02d}:{:02d}:{:02d}'.format(*time.localtime()[:6]), ' Script Name: ', __name__)
print('{} [x{:02x}]'.format(' '.join('x{:02x}'.format(v) for v in mhz19_value), checksum))
def env_meter_update(meter_mode):
while True:
# 次の秒までの差分(ミリ秒)を求めてスリープ
time.sleep_ms(1000 - int(time.time() % 1 * 1000))
# 各メーターおよび時計の更新
localtime = time.localtime()
localtime_str = '{}-{:02d}-{:02d} {:02d}:{:02d}:{:02d}'.format(*localtime[:6])
# 時計の表示を更新
clock.update()
try:
# DHT12 から湿度を取得
dht12.measure()
h = dht12.humidity()
# BMP280 から気温、気圧を取得
t, p = bmp280.values
except Exception as e:
print('{}-{:02d}-{:02d} {:02d}:{:02d}:{:02d}'.format(*time.localtime()[:6]), ' Script Name: ', __name__)
print('Exception: ', e)
t, h, p = None, None, None
# MH-Z19B からCO2濃度・気温を取得
mhz19_value = bytearray(9)
mhz19.write(READ_CO2_CONCENTRATION)
mhz19.readinto(mhz19_value, len(mhz19_value))
checksum = mhz19_checksum(mhz19_value)
c = None
if mhz19_value[0] == 0xff and mhz19_value[1] == 0x86 and mhz19_value[8] == checksum:
# CO2濃度を取得
c = mhz19_value[2] * 256 + mhz19_value[3]
# 気温を取得
mhz19_t = mhz19_value[4] - 40
else:
mhz19_response(mhz19_value, checksum)
# それぞれのメーターに取得値を表示
if meter_mode == 0 or meter_mode == 1:
t_meter.update(t)
if meter_mode == 0 or meter_mode == 2:
h_meter.update(h)
if meter_mode == 0 or meter_mode == 3:
p_meter.update(p)
if meter_mode == 0 or meter_mode == 4:
c_meter.update(c)
# Ambientへの送信は1分間隔で行う。
# localtime[5](秒) == 0 の時に送信
if localtime[5] == 0 and t is not None:
try:
if c is None:
am.send({'created': localtime_str,
'd1': dht12.temperature(), 'd2': h, 'd3': t, 'd4': p})
else:
am.send({'created': localtime_str,
'd1': dht12.temperature(), 'd2': h, 'd3': t, 'd4': p, 'd6': c, 'd7': mhz19_t})
except Exception as e:
print('{}-{:02d}-{:02d} {:02d}:{:02d}:{:02d}'.format(*time.localtime()[:6]), ' Script Name: ', __name__)
print('Ambient send error: ', e)
gc.collect()
# button A が押された時は表示を切り替える
if btnA.wasPressed():
return
# button C が押された時は MH-Z19B のゼロキャリブレーションを行う
if btnC.wasPressed():
mhz19.write(ZERO_POINT_CALIBRATION)
mhz19.readinto(mhz19_value, len(mhz19_value))
checksum = mhz19_checksum(mhz19_value)
mhz19_response(mhz19_value, checksum)
# M5 UI.Flow からの実行ではなく、APP.LIST に登録して実行する場合は、
# プログラム内でネットワーク接続を行う必要がある。
# Connect network
#
# UI.Flow 1.2 以前は以下
# import wifisetup
# wifisetup.auto_connect()
#
# UI.Flow 1.3 以降は以下
import wifiCfg
wifiCfg.autoConnect(lcdShow=True)
# 日本時間に同期
# UI.Flow 1.2 以前は以下
'''
rtc = machine.RTC()
rtc.ntp_sync('ntp.nict.jp', tz='JST-9')
# M5GOのfirmwareがv0.11ではntp_syncでtzを指定するとエラーになるので以下で対応
# rtc.ntp_sync('ntp.nict.jp')
# sys.tz('JST-9')
# 同期が完了するまで100ms程度かかる
for i in range(100):
if rtc.synced():
print('synced.')
break
print(i, end=' ')
time.sleep_ms(10)
'''
# UI.Flow 1.3 以降は以下
# ntptime は以下のモジュールの改訂版(オフセット指定を追加)
# https://github.com/micropython/micropython/blob/master/ports/esp8266/modules/ntptime.py
import ntptime_custom
ntptime_custom.settime(9*60*60) # +09:00:00 for JST
# UI.Flow 1.2 以前はタイムスタンプの出力に以下のように time.strftime を使用していたが
#print(time.strftime('%Y-%m-%d %H:%M:%S', time.localtime()), ' Script Name: ', __name__)
# UI.Flow 1.3 以降は time.strftime が使えないため以下で対応
print('{}-{:02d}-{:02d} {:02d}:{:02d}:{:02d}'.format(*time.localtime()[:6]), ' Script Name: ', __name__)
am = ambient.Ambient(AMBIENT_CHANNEL_ID, AMBIENT_WRITE_KEY)
i2c = i2c_bus.get(i2c_bus.M_BUS)
dht12 = DHT12(i2c)
bmp280 = BMP280(i2c)
mhz19 = machine.UART(2, tx=17, rx=16)
mhz19.init(9600, bits=8, parity=None, stop=1)
lcd.setColor(lcd.BLACK, lcd.WHITE)
lcd.setTextColor(lcd.BLACK, lcd.WHITE)
lcd.clear(lcd.BLACK)
win_w, win_h = lcd.winsize() # (320, 240)
meter_mode = 0
while True:
lcd.clear(lcd.BLACK)
if meter_mode == 0:
# 画面を4分割して、気温計、湿度計、気圧計、CO2濃度計を表示
# 表示フォーマットは、表示桁数が減った時に(気圧が1000から999になった時等)
# 前の表示を消すために前後に空白を入れている
t_meter = Meter(0, 0, win_w // 2, win_h // 2, 0, 40, lcd.ORANGE, 'Temp', ' {:.1f}C ')
h_meter = Meter(win_w // 2, 0, win_w // 2, win_h // 2, 20, 100, lcd.CYAN, 'Hum', ' {:.1f}% ')
p_meter = Meter(0, win_h // 2, win_w // 2, win_h // 2, 960, 1040, lcd.YELLOW, 'Baro', ' {:.1f}hPa ')
c_meter = Meter(win_w // 2, win_h // 2, win_w // 2, win_h // 2, 400, 1600, lcd.GREENYELLOW, 'CO2', ' {:.0f}ppm ')
# 中央に小さい時計を表示
clock = Clock(win_w // 2 - win_w // 8, win_h // 2 - win_h // 8 - 10, win_w // 4, win_h // 4, lcd.LIGHTGREY)
# clock = Clock_digital(win_w // 2 - 43, win_h // 2 - 8)
elif meter_mode == 1:
# 全画面で気温計を表示
t_meter = Meter(0, 0, win_w, win_h, 0, 40, lcd.ORANGE, 'Temp', ' {:.1f}C ')
clock = Clock_digital(win_w // 2 - 43, 10)
elif meter_mode == 2:
# 全画面で湿度計を表示
h_meter = Meter(0, 0, win_w, win_h, 20, 100, lcd.CYAN, 'Hum', ' {:.1f}% ')
clock = Clock_digital(win_w // 2 - 43, 10)
elif meter_mode == 3:
# 全画面で気圧計を表示
p_meter = Meter(0, 0, win_w, win_h, 960, 1040, lcd.YELLOW, 'Baro', ' {:.1f}hPa ')
clock = Clock_digital(win_w // 2 - 43, 10)
elif meter_mode == 4:
# 全画面でCO2濃度計を表示
c_meter = Meter(0, 0, win_w, win_h, 400, 1600, lcd.GREENYELLOW, 'CO2', ' {:.0f}ppm ')
clock = Clock_digital(win_w // 2 - 43, 10)
elif meter_mode == 5:
# 全画面で時計を表示
clock = Clock(0, 0, win_w, win_h, lcd.LIGHTGREY)
env_meter_update(meter_mode)
meter_mode = (meter_mode + 1) % 6
|
[
"m5stack.lcd.setTextColor",
"m5stack.btnA.wasPressed",
"gc.collect",
"ntptime_custom.settime",
"machine.UART",
"m5stack.lcd.setColor",
"m5stack.lcd.winsize",
"m5stack.lcd.roundrect",
"math.radians",
"ambient.Ambient",
"time.localtime",
"bmp280.BMP280",
"i2c_bus.get",
"m5stack.lcd.font",
"m5stack.btnC.wasPressed",
"m5stack.lcd.textWidth",
"m5stack.lcd.circle",
"m5stack.lcd.fontSize",
"time.time",
"m5stack.lcd.clear",
"m5stack.lcd.lineByAngle",
"dht12.DHT12",
"wifiCfg.autoConnect"
] |
[((10912, 10945), 'wifiCfg.autoConnect', 'wifiCfg.autoConnect', ([], {'lcdShow': '(True)'}), '(lcdShow=True)\n', (10931, 10945), False, 'import wifiCfg\n'), ((11463, 11498), 'ntptime_custom.settime', 'ntptime_custom.settime', (['(9 * 60 * 60)'], {}), '(9 * 60 * 60)\n', (11485, 11498), False, 'import ntptime_custom\n'), ((11819, 11873), 'ambient.Ambient', 'ambient.Ambient', (['AMBIENT_CHANNEL_ID', 'AMBIENT_WRITE_KEY'], {}), '(AMBIENT_CHANNEL_ID, AMBIENT_WRITE_KEY)\n', (11834, 11873), False, 'import ambient\n'), ((11881, 11907), 'i2c_bus.get', 'i2c_bus.get', (['i2c_bus.M_BUS'], {}), '(i2c_bus.M_BUS)\n', (11892, 11907), False, 'import i2c_bus\n'), ((11916, 11926), 'dht12.DHT12', 'DHT12', (['i2c'], {}), '(i2c)\n', (11921, 11926), False, 'from dht12 import DHT12\n'), ((11936, 11947), 'bmp280.BMP280', 'BMP280', (['i2c'], {}), '(i2c)\n', (11942, 11947), False, 'from bmp280 import BMP280\n'), ((11957, 11986), 'machine.UART', 'machine.UART', (['(2)'], {'tx': '(17)', 'rx': '(16)'}), '(2, tx=17, rx=16)\n', (11969, 11986), False, 'import machine\n'), ((12034, 12068), 'm5stack.lcd.setColor', 'lcd.setColor', (['lcd.BLACK', 'lcd.WHITE'], {}), '(lcd.BLACK, lcd.WHITE)\n', (12046, 12068), False, 'from m5stack import lcd, btnA, btnB, btnC\n'), ((12069, 12107), 'm5stack.lcd.setTextColor', 'lcd.setTextColor', (['lcd.BLACK', 'lcd.WHITE'], {}), '(lcd.BLACK, lcd.WHITE)\n', (12085, 12107), False, 'from m5stack import lcd, btnA, btnB, btnC\n'), ((12108, 12128), 'm5stack.lcd.clear', 'lcd.clear', (['lcd.BLACK'], {}), '(lcd.BLACK)\n', (12117, 12128), False, 'from m5stack import lcd, btnA, btnB, btnC\n'), ((12145, 12158), 'm5stack.lcd.winsize', 'lcd.winsize', ([], {}), '()\n', (12156, 12158), False, 'from m5stack import lcd, btnA, btnB, btnC\n'), ((12204, 12224), 'm5stack.lcd.clear', 'lcd.clear', (['lcd.BLACK'], {}), '(lcd.BLACK)\n', (12213, 12224), False, 'from m5stack import lcd, btnA, btnB, btnC\n'), ((1133, 1189), 'm5stack.lcd.roundrect', 'lcd.roundrect', (['x', 'y', 'w', 'h', '(h // 10)', 'lcd.BLACK', 'lcd.WHITE'], {}), '(x, y, w, h, h // 10, lcd.BLACK, lcd.WHITE)\n', (1146, 1189), False, 'from m5stack import lcd, btnA, btnB, btnC\n'), ((1577, 1591), 'm5stack.lcd.fontSize', 'lcd.fontSize', ([], {}), '()\n', (1589, 1591), False, 'from m5stack import lcd, btnA, btnB, btnC\n'), ((2743, 2757), 'm5stack.lcd.fontSize', 'lcd.fontSize', ([], {}), '()\n', (2755, 2757), False, 'from m5stack import lcd, btnA, btnB, btnC\n'), ((4246, 4292), 'm5stack.lcd.font', 'lcd.font', (['lcd.FONT_DejaVu18'], {'transparent': '(False)'}), '(lcd.FONT_DejaVu18, transparent=False)\n', (4254, 4292), False, 'from m5stack import lcd, btnA, btnB, btnC\n'), ((4740, 4796), 'm5stack.lcd.roundrect', 'lcd.roundrect', (['x', 'y', 'w', 'h', '(h // 10)', 'lcd.BLACK', 'lcd.WHITE'], {}), '(x, y, w, h, h // 10, lcd.BLACK, lcd.WHITE)\n', (4753, 4796), False, 'from m5stack import lcd, btnA, btnB, btnC\n'), ((5217, 5231), 'm5stack.lcd.fontSize', 'lcd.fontSize', ([], {}), '()\n', (5229, 5231), False, 'from m5stack import lcd, btnA, btnB, btnC\n'), ((6655, 6671), 'time.localtime', 'time.localtime', ([], {}), '()\n', (6669, 6671), False, 'import time\n'), ((7603, 7664), 'm5stack.lcd.circle', 'lcd.circle', (['self.center_x', 'self.center_y', '(3)', 'lcd.RED', 'lcd.RED'], {}), '(self.center_x, self.center_y, 3, lcd.RED, lcd.RED)\n', (7613, 7664), False, 'from m5stack import lcd, btnA, btnB, btnC\n'), ((8225, 8241), 'time.localtime', 'time.localtime', ([], {}), '()\n', (8239, 8241), False, 'import time\n'), ((10326, 10338), 'gc.collect', 'gc.collect', ([], {}), '()\n', (10336, 10338), False, 'import gc\n'), ((10386, 10403), 'm5stack.btnA.wasPressed', 'btnA.wasPressed', ([], {}), '()\n', (10401, 10403), False, 'from m5stack import lcd, btnA, btnB, btnC\n'), ((10487, 10504), 'm5stack.btnC.wasPressed', 'btnC.wasPressed', ([], {}), '()\n', (10502, 10504), False, 'from m5stack import lcd, btnA, btnB, btnC\n'), ((1437, 1482), 'm5stack.lcd.font', 'lcd.font', (['lcd.FONT_Default'], {'transparent': '(False)'}), '(lcd.FONT_Default, transparent=False)\n', (1445, 1482), False, 'from m5stack import lcd, btnA, btnB, btnC\n'), ((1509, 1558), 'm5stack.lcd.font', 'lcd.font', (['lcd.FONT_DefaultSmall'], {'transparent': '(True)'}), '(lcd.FONT_DefaultSmall, transparent=True)\n', (1517, 1558), False, 'from m5stack import lcd, btnA, btnB, btnC\n'), ((2605, 2651), 'm5stack.lcd.font', 'lcd.font', (['lcd.FONT_DejaVu24'], {'transparent': '(False)'}), '(lcd.FONT_DejaVu24, transparent=False)\n', (2613, 2651), False, 'from m5stack import lcd, btnA, btnB, btnC\n'), ((2678, 2724), 'm5stack.lcd.font', 'lcd.font', (['lcd.FONT_DejaVu18'], {'transparent': '(False)'}), '(lcd.FONT_DejaVu18, transparent=False)\n', (2686, 2724), False, 'from m5stack import lcd, btnA, btnB, btnC\n'), ((5077, 5122), 'm5stack.lcd.font', 'lcd.font', (['lcd.FONT_Default'], {'transparent': '(False)'}), '(lcd.FONT_Default, transparent=False)\n', (5085, 5122), False, 'from m5stack import lcd, btnA, btnB, btnC\n'), ((5149, 5198), 'm5stack.lcd.font', 'lcd.font', (['lcd.FONT_DefaultSmall'], {'transparent': '(True)'}), '(lcd.FONT_DefaultSmall, transparent=True)\n', (5157, 5198), False, 'from m5stack import lcd, btnA, btnB, btnC\n'), ((2044, 2068), 'm5stack.lcd.textWidth', 'lcd.textWidth', (['tick_text'], {}), '(tick_text)\n', (2057, 2068), False, 'from m5stack import lcd, btnA, btnB, btnC\n'), ((5637, 5661), 'm5stack.lcd.textWidth', 'lcd.textWidth', (['hour_text'], {}), '(hour_text)\n', (5650, 5661), False, 'from m5stack import lcd, btnA, btnB, btnC\n'), ((6457, 6528), 'm5stack.lcd.lineByAngle', 'lcd.lineByAngle', (['(self.center_x + x)', '(self.center_y + y)', '(0)', 'l', 'deg', 'color'], {}), '(self.center_x + x, self.center_y + y, 0, l, deg, color)\n', (6472, 6528), False, 'from m5stack import lcd, btnA, btnB, btnC\n'), ((11762, 11778), 'time.localtime', 'time.localtime', ([], {}), '()\n', (11776, 11778), False, 'import time\n'), ((7892, 7908), 'time.localtime', 'time.localtime', ([], {}), '()\n', (7906, 7908), False, 'import time\n'), ((4036, 4055), 'm5stack.lcd.textWidth', 'lcd.textWidth', (['text'], {}), '(text)\n', (4049, 4055), False, 'from m5stack import lcd, btnA, btnB, btnC\n'), ((4342, 4358), 'time.localtime', 'time.localtime', ([], {}), '()\n', (4356, 4358), False, 'import time\n'), ((3807, 3826), 'm5stack.lcd.textWidth', 'lcd.textWidth', (['text'], {}), '(text)\n', (3820, 3826), False, 'from m5stack import lcd, btnA, btnB, btnC\n'), ((8155, 8166), 'time.time', 'time.time', ([], {}), '()\n', (8164, 8166), False, 'import time\n'), ((3546, 3571), 'm5stack.lcd.textWidth', 'lcd.textWidth', (['self.title'], {}), '(self.title)\n', (3559, 3571), False, 'from m5stack import lcd, btnA, btnB, btnC\n'), ((8638, 8654), 'time.localtime', 'time.localtime', ([], {}), '()\n', (8652, 8654), False, 'import time\n'), ((10217, 10233), 'time.localtime', 'time.localtime', ([], {}), '()\n', (10231, 10233), False, 'import time\n'), ((2135, 2150), 'math.radians', 'math.radians', (['r'], {}), '(r)\n', (2147, 2150), False, 'import math\n'), ((2248, 2263), 'math.radians', 'math.radians', (['r'], {}), '(r)\n', (2260, 2263), False, 'import math\n'), ((5423, 5438), 'math.radians', 'math.radians', (['r'], {}), '(r)\n', (5435, 5438), False, 'import math\n'), ((5512, 5527), 'math.radians', 'math.radians', (['r'], {}), '(r)\n', (5524, 5527), False, 'import math\n'), ((6050, 6065), 'math.radians', 'math.radians', (['r'], {}), '(r)\n', (6062, 6065), False, 'import math\n'), ((6138, 6153), 'math.radians', 'math.radians', (['r'], {}), '(r)\n', (6150, 6153), False, 'import math\n'), ((5728, 5743), 'math.radians', 'math.radians', (['r'], {}), '(r)\n', (5740, 5743), False, 'import math\n'), ((5846, 5861), 'math.radians', 'math.radians', (['r'], {}), '(r)\n', (5858, 5861), False, 'import math\n')]
|