code stringlengths 22 1.05M | apis listlengths 1 3.31k | extract_api stringlengths 75 3.25M |
|---|---|---|
from typing import TYPE_CHECKING, Any, Dict, List
from aiopoke.utils.minimal_resources import MinimalResource
from aiopoke.utils.resource import Resource
if TYPE_CHECKING:
from aiopoke.objects.resources import EncounterConditionValue, EncounterMethod
class Encounter(Resource):
min_level: int
max_level: int
condition_values: List[MinimalResource["EncounterConditionValue"]]
chance: int
method: MinimalResource["EncounterMethod"]
def __init__(
self,
*,
min_level: int,
max_level: int,
condition_values: List[Dict[str, Any]],
chance: int,
method: Dict[str, Any],
):
self.min_level = min_level
self.max_level = max_level
self.condition_values = [
MinimalResource(**condition_value) for condition_value in condition_values
]
self.chance = chance
self.method = MinimalResource(**method)
| [
"aiopoke.utils.minimal_resources.MinimalResource"
] | [((910, 935), 'aiopoke.utils.minimal_resources.MinimalResource', 'MinimalResource', ([], {}), '(**method)\n', (925, 935), False, 'from aiopoke.utils.minimal_resources import MinimalResource\n'), ((774, 808), 'aiopoke.utils.minimal_resources.MinimalResource', 'MinimalResource', ([], {}), '(**condition_value)\n', (789, 808), False, 'from aiopoke.utils.minimal_resources import MinimalResource\n')] |
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
from setuptools import setup
setup(name='starsearch',
version='0.3',
description='Package to dig into the ESO archives',
author='<NAME>',
author_email='<EMAIL>',
license='MIT',
url='https://github.com/jdavidrcamacho/starsearch',
packages=['starsearch'],
install_requires=[
'numpy',
'astroquery',
"astropy",
],
)
| [
"setuptools.setup"
] | [((77, 373), 'setuptools.setup', 'setup', ([], {'name': '"""starsearch"""', 'version': '"""0.3"""', 'description': '"""Package to dig into the ESO archives"""', 'author': '"""<NAME>"""', 'author_email': '"""<EMAIL>"""', 'license': '"""MIT"""', 'url': '"""https://github.com/jdavidrcamacho/starsearch"""', 'packages': "['starsearch']", 'install_requires': "['numpy', 'astroquery', 'astropy']"}), "(name='starsearch', version='0.3', description=\n 'Package to dig into the ESO archives', author='<NAME>', author_email=\n '<EMAIL>', license='MIT', url=\n 'https://github.com/jdavidrcamacho/starsearch', packages=['starsearch'],\n install_requires=['numpy', 'astroquery', 'astropy'])\n", (82, 373), False, 'from setuptools import setup\n')] |
import os
import tempfile
from unittest.mock import patch
from galaxy.exceptions import (
ObjectNotFound,
ReferenceDataError,
)
from galaxy_test.driver import integration_util
BUILDS_DATA = (
"?\tunspecified (?)",
"hg_test\tdescription of hg_test",
"hg_test_nolen\tdescription of hg_test_nolen",
)
LEN_DATA = (
"chr1\t248956422",
"chr2\t242193529",
"chr3\t198295559",
)
def get_key(has_len_file=True):
pos = 1 if has_len_file else 2
return BUILDS_DATA[pos].split("\t")[0]
class GenomesTestCase(integration_util.IntegrationTestCase):
@classmethod
def handle_galaxy_config_kwds(cls, config):
genomes_dir = cls.temp_config_dir("test_genomes")
os.makedirs(genomes_dir)
cls._setup_builds_file(config, genomes_dir)
cls._setup_len_file(config, genomes_dir)
@classmethod
def _setup_builds_file(cls, config, genomes_dir):
"""Create builds file + set config option."""
builds_file_path = os.path.join(genomes_dir, "builds.txt")
config["builds_file_path"] = builds_file_path
with open(builds_file_path, "w") as f:
f.write("\n".join(BUILDS_DATA))
@classmethod
def _setup_len_file(cls, config, genomes_dir):
"""Create len file + set config option."""
config["len_file_path"] = genomes_dir # the config option is a dir
key = get_key()
len_file_path = os.path.join(genomes_dir, f"{key}.len")
with open(len_file_path, "w") as f:
f.write("\n".join(LEN_DATA))
def test_index(self):
response = self._get("genomes")
self._assert_status_code_is(response, 200)
rval = response.json()
expected_data = [item.split("\t")[::-1] for item in BUILDS_DATA]
assert rval == expected_data
def test_show_valid(self):
key = get_key()
response = self._get(f"genomes/{key}")
self._assert_status_code_is(response, 200)
rval = response.json()
assert rval["id"] == key
assert len(rval["chrom_info"]) == len(LEN_DATA)
def test_show_valid_no_refdata(self):
key = get_key(has_len_file=False)
response = self._get(f"genomes/{key}")
self._assert_status_code_is(response, 500)
assert response.json()["err_code"] == ReferenceDataError.err_code.code
def test_show_invalid(self):
response = self._get("genomes/invalid")
self._assert_status_code_is(response, 404)
assert response.json()["err_code"] == ObjectNotFound.err_code.code
def test_sequences(self):
class RefDataMock:
sequence = "test-value"
key = get_key()
with patch.object(self._app.genomes, "has_reference_data", return_value=True), patch.object(
self._app.genomes, "_get_reference_data", return_value=RefDataMock()
):
response = self._get(f"genomes/{key}/sequences")
self._assert_status_code_is(response, 200)
assert response.content == bytes(RefDataMock.sequence, "utf-8")
def test_sequences_no_data(self):
key = get_key()
with patch.object(self._app.genomes, "has_reference_data", return_value=False):
response = self._get(f"genomes/{key}/sequences")
self._assert_status_code_is(response, 500)
assert response.json()["err_code"] == ReferenceDataError.err_code.code
def test_indexes(self):
mock_key, mock_content, index_type, suffix = "mykey", "mydata", "fasta_indexes", ".fai"
# write some data to a tempfile
with tempfile.NamedTemporaryFile(dir=self._tempdir, suffix=suffix, mode="w", delete=False) as tf:
tf.write(mock_content)
# make a mock containing the path to the tempfile
tmpfile_path = tf.name[: -len(suffix)] # chop off the extention
mock_data = [[mock_key, tmpfile_path]]
with patch.object(self._app.tool_data_tables.data_tables[index_type], "data", new=mock_data):
response = self._get(f"genomes/{mock_key}/indexes?type={index_type}")
self._assert_status_code_is(response, 200)
assert response.content == bytes(mock_content, "utf-8")
| [
"tempfile.NamedTemporaryFile",
"os.path.join",
"os.makedirs",
"unittest.mock.patch.object"
] | [((711, 735), 'os.makedirs', 'os.makedirs', (['genomes_dir'], {}), '(genomes_dir)\n', (722, 735), False, 'import os\n'), ((990, 1029), 'os.path.join', 'os.path.join', (['genomes_dir', '"""builds.txt"""'], {}), "(genomes_dir, 'builds.txt')\n", (1002, 1029), False, 'import os\n'), ((1419, 1458), 'os.path.join', 'os.path.join', (['genomes_dir', 'f"""{key}.len"""'], {}), "(genomes_dir, f'{key}.len')\n", (1431, 1458), False, 'import os\n'), ((2679, 2751), 'unittest.mock.patch.object', 'patch.object', (['self._app.genomes', '"""has_reference_data"""'], {'return_value': '(True)'}), "(self._app.genomes, 'has_reference_data', return_value=True)\n", (2691, 2751), False, 'from unittest.mock import patch\n'), ((3127, 3200), 'unittest.mock.patch.object', 'patch.object', (['self._app.genomes', '"""has_reference_data"""'], {'return_value': '(False)'}), "(self._app.genomes, 'has_reference_data', return_value=False)\n", (3139, 3200), False, 'from unittest.mock import patch\n'), ((3579, 3668), 'tempfile.NamedTemporaryFile', 'tempfile.NamedTemporaryFile', ([], {'dir': 'self._tempdir', 'suffix': 'suffix', 'mode': '"""w"""', 'delete': '(False)'}), "(dir=self._tempdir, suffix=suffix, mode='w',\n delete=False)\n", (3606, 3668), False, 'import tempfile\n'), ((3898, 3989), 'unittest.mock.patch.object', 'patch.object', (['self._app.tool_data_tables.data_tables[index_type]', '"""data"""'], {'new': 'mock_data'}), "(self._app.tool_data_tables.data_tables[index_type], 'data',\n new=mock_data)\n", (3910, 3989), False, 'from unittest.mock import patch\n')] |
import numpy as np
import pandas as pd
import os
from tqdm import tqdm
import pacmap
import matplotlib.pyplot as plt
from sklearn.manifold import TSNE
import umap
def darius1(numberDirectory):
path = ""
if(numberDirectory == 1):
directorys = [
['training_setA/training/', 'p0']
]
if(numberDirectory == 2):
directorys = [
['training_setB/training/', 'p1']
]
if(numberDirectory == 3):
directorys = [
['training_setA/training/', 'p0'],
['training_setB/training/', 'p1']
]
dfs = []
for z, (directory, file_head) in enumerate(directorys):
for i, filename in enumerate(tqdm(os.listdir(path + directory))):
df_temp = pd.read_csv(path + directory + filename, skiprows=0, sep='|')
dfs.append(df_temp)
df = pd.concat(dfs)
#df_no_nan = df.dropna()
df_nan_zwero = df.replace(np.NaN, 0)
df_nan_zwero.head(n=50)
df_nan_none = df.replace(np.NaN, None)
df_nan_none.head(n=50)
df_nan_mean = df.fillna(df.mean())
df_nan_mean.head(n=50)
df_nan_none_2= df.where(pd.notnull(df), None)
df_nan_mean.head(n=50)
#df.shape
#df.head(n=80)
############################################################
# initializing the pacmap instance
# Setting n_neighbors to "None" leads to a default choice shown below in "parameter" section
embedding = pacmap.PaCMAP(n_dims=2, n_neighbors=None, MN_ratio=0.5, FP_ratio=2.0)
# fit the data (The index of transformed data corresponds to the index of the original data)
X_transformed = embedding.fit_transform(df_nan_none_2.values, init="pca")
# visualize the embedding
#fig, ax = plt.subplots(1, 1, figsize=(6, 6))
#ax.scatter(X_transformed[:, 0], X_transformed[:, 1], cmap="Spectral", s=0.6)
plt.scatter(X_transformed[:, 0], X_transformed[:, 1], cmap="Spectral")
plt.show()
#############################################################
X_embedded = TSNE(n_components=2, learning_rate='auto',init='random').fit_transform(df.values)
fig, ax = plt.subplots(1, 1, figsize=(6, 6))
ax.scatter(X_transformed[:, 0], X_embedded[:, 1], cmap="Spectral", c=list(df.columns), s=0.6)
############################################################# | [
"pacmap.PaCMAP",
"os.listdir",
"pandas.read_csv",
"sklearn.manifold.TSNE",
"matplotlib.pyplot.scatter",
"pandas.notnull",
"pandas.concat",
"matplotlib.pyplot.subplots",
"matplotlib.pyplot.show"
] | [((845, 859), 'pandas.concat', 'pd.concat', (['dfs'], {}), '(dfs)\n', (854, 859), True, 'import pandas as pd\n'), ((1428, 1497), 'pacmap.PaCMAP', 'pacmap.PaCMAP', ([], {'n_dims': '(2)', 'n_neighbors': 'None', 'MN_ratio': '(0.5)', 'FP_ratio': '(2.0)'}), '(n_dims=2, n_neighbors=None, MN_ratio=0.5, FP_ratio=2.0)\n', (1441, 1497), False, 'import pacmap\n'), ((1844, 1914), 'matplotlib.pyplot.scatter', 'plt.scatter', (['X_transformed[:, 0]', 'X_transformed[:, 1]'], {'cmap': '"""Spectral"""'}), "(X_transformed[:, 0], X_transformed[:, 1], cmap='Spectral')\n", (1855, 1914), True, 'import matplotlib.pyplot as plt\n'), ((1919, 1929), 'matplotlib.pyplot.show', 'plt.show', ([], {}), '()\n', (1927, 1929), True, 'import matplotlib.pyplot as plt\n'), ((2111, 2145), 'matplotlib.pyplot.subplots', 'plt.subplots', (['(1)', '(1)'], {'figsize': '(6, 6)'}), '(1, 1, figsize=(6, 6))\n', (2123, 2145), True, 'import matplotlib.pyplot as plt\n'), ((1127, 1141), 'pandas.notnull', 'pd.notnull', (['df'], {}), '(df)\n', (1137, 1141), True, 'import pandas as pd\n'), ((741, 802), 'pandas.read_csv', 'pd.read_csv', (['(path + directory + filename)'], {'skiprows': '(0)', 'sep': '"""|"""'}), "(path + directory + filename, skiprows=0, sep='|')\n", (752, 802), True, 'import pandas as pd\n'), ((2014, 2071), 'sklearn.manifold.TSNE', 'TSNE', ([], {'n_components': '(2)', 'learning_rate': '"""auto"""', 'init': '"""random"""'}), "(n_components=2, learning_rate='auto', init='random')\n", (2018, 2071), False, 'from sklearn.manifold import TSNE\n'), ((687, 715), 'os.listdir', 'os.listdir', (['(path + directory)'], {}), '(path + directory)\n', (697, 715), False, 'import os\n')] |
import torch
from torch import nn
from torch.autograd import Variable
import config
def init_weights(self):
for m in self.modules():
if isinstance(m, nn.Conv2d):
nn.init.xavier_normal_(m.weight)
if m.bias is not None:
nn.init.constant_(m.bias, 0)
def conv(in_channels, out_channels, kernel_size, stride):
conv = nn.Sequential(
nn.Conv2d(in_channels, out_channels, kernel_size, stride, bias=False),
nn.BatchNorm2d(out_channels),
nn.ReLU(inplace=True))
return conv
def channel_shuffle(x, num_groups):
N, C, H, W = x.size()
x_reshape = x.reshape(N, num_groups, C // num_groups, H, W)
x_permute = x_reshape.permute(0, 2, 1, 3, 4)
return x_permute.reshape(N, C, H, W)
class BasicUnit(nn.Module):
def __init__(self, in_channels, splits=2, groups=2):
super(BasicUnit, self).__init__()
self.in_channels = in_channels
self.splits = splits
self.groups = groups
in_channels = int(in_channels / self.splits)
self.right = nn.Sequential(*[
nn.Conv2d(in_channels, in_channels, kernel_size=1, stride=1, bias=False),
nn.BatchNorm2d(in_channels),
nn.ReLU(inplace=True),
nn.Conv2d(in_channels, in_channels, kernel_size=3, stride=1, padding=1, bias=False, groups=in_channels),
nn.BatchNorm2d(in_channels),
nn.Conv2d(in_channels, in_channels, kernel_size=1, stride=1, bias=False),
nn.BatchNorm2d(in_channels),
nn.ReLU(inplace=True)
])
init_weights(self)
def forward(self, x):
split = torch.split(x, int(self.in_channels / self.splits), dim=1)
x_left, x_right = split
x_right = self.right(x_right)
x = torch.cat([x_left, x_right], dim=1)
out = channel_shuffle(x, self.groups)
# print("Basic Unit", out.size())
return out
class DownUnit(nn.Module):
def __init__(self, in_channels, out_channels, groups=2):
super(DownUnit, self).__init__()
self.in_channels = in_channels
self.out_channels = out_channels
self.groups = groups
self.left = nn.Sequential(*[
nn.Conv2d(self.in_channels, self.in_channels, kernel_size=3, stride=2, bias=False, groups=self.in_channels),
nn.BatchNorm2d(self.in_channels),
nn.Conv2d(self.in_channels, self.out_channels // 2, kernel_size=1, stride=1, bias=False),
nn.BatchNorm2d(self.out_channels // 2),
nn.ReLU(inplace=True)
])
self.right = nn.Sequential(*[
nn.Conv2d(self.in_channels, self.in_channels, kernel_size=1, stride=1, bias=False),
nn.BatchNorm2d(self.in_channels),
nn.ReLU(inplace=True),
nn.Conv2d(self.in_channels, self.in_channels, kernel_size=3, stride=2, bias=False, groups=self.in_channels),
nn.BatchNorm2d(self.in_channels),
nn.Conv2d(self.in_channels, self.out_channels // 2, kernel_size=1, stride=1, bias=False),
nn.BatchNorm2d(self.out_channels // 2),
nn.ReLU(inplace=True)
])
init_weights(self)
def forward(self, x):
x_left = self.left(x)
x_right = self.right(x)
x = torch.cat([x_left, x_right], dim=1)
out = channel_shuffle(x, self.groups)
# print("Down Unit", out.size())
return out
class ShuffleNetV2(nn.Module):
def __init__(self, n_class, net_size):
super(ShuffleNetV2, self).__init__()
out_channels = config.net_size[net_size]
num_blocks = config.net_blocks
self.conv1 = conv(in_channels=3, out_channels=out_channels[0],
kernel_size=config.conv1_kernel_size,
stride=config.conv1_stride)
self.in_channels = out_channels[0]
self.stage2 = self._make_stage(out_channels[1], num_blocks[0])
self.stage3 = self._make_stage(out_channels[2], num_blocks[1])
# self.stage4 = self._make_stage(out_channels[3], num_blocks[2])
self.conv5 = conv(in_channels=out_channels[2],
out_channels=out_channels[3],
kernel_size=config.conv5_kernel_size,
stride=config.conv5_stride)
self.global_pool = nn.AvgPool2d(kernel_size=config.global_pool_kernel_size)
self.fc = nn.Linear(out_channels[3], n_class)
def _make_stage(self, out_channels, num_blocks):
stage = []
stage.append(DownUnit(self.in_channels, out_channels))
for i in range(num_blocks):
stage.append(BasicUnit(out_channels))
self.in_channels = out_channels # update in_channels for next iter
return nn.Sequential(*stage)
def forward(self, x):
out = self.conv1(x)
out = self.stage2(out)
out = self.stage3(out)
# out = self.stage4(out)
out = self.conv5(out)
out = self.global_pool(out)
out = out.view(out.size(0), -1) # flatten
out = self.fc(out)
return out
def test():
net = ShuffleNetV2(2300, 2)
x = Variable(torch.randn(3, 3, 32, 32))
y = net(x)
print("end", y.size())
if __name__ == '__main__':
test()
| [
"torch.nn.BatchNorm2d",
"torch.nn.ReLU",
"torch.nn.init.constant_",
"torch.nn.Sequential",
"torch.nn.Conv2d",
"torch.nn.init.xavier_normal_",
"torch.nn.Linear",
"torch.nn.AvgPool2d",
"torch.randn",
"torch.cat"
] | [((395, 464), 'torch.nn.Conv2d', 'nn.Conv2d', (['in_channels', 'out_channels', 'kernel_size', 'stride'], {'bias': '(False)'}), '(in_channels, out_channels, kernel_size, stride, bias=False)\n', (404, 464), False, 'from torch import nn\n'), ((474, 502), 'torch.nn.BatchNorm2d', 'nn.BatchNorm2d', (['out_channels'], {}), '(out_channels)\n', (488, 502), False, 'from torch import nn\n'), ((512, 533), 'torch.nn.ReLU', 'nn.ReLU', ([], {'inplace': '(True)'}), '(inplace=True)\n', (519, 533), False, 'from torch import nn\n'), ((1791, 1826), 'torch.cat', 'torch.cat', (['[x_left, x_right]'], {'dim': '(1)'}), '([x_left, x_right], dim=1)\n', (1800, 1826), False, 'import torch\n'), ((3288, 3323), 'torch.cat', 'torch.cat', (['[x_left, x_right]'], {'dim': '(1)'}), '([x_left, x_right], dim=1)\n', (3297, 3323), False, 'import torch\n'), ((4342, 4398), 'torch.nn.AvgPool2d', 'nn.AvgPool2d', ([], {'kernel_size': 'config.global_pool_kernel_size'}), '(kernel_size=config.global_pool_kernel_size)\n', (4354, 4398), False, 'from torch import nn\n'), ((4417, 4452), 'torch.nn.Linear', 'nn.Linear', (['out_channels[3]', 'n_class'], {}), '(out_channels[3], n_class)\n', (4426, 4452), False, 'from torch import nn\n'), ((4766, 4787), 'torch.nn.Sequential', 'nn.Sequential', (['*stage'], {}), '(*stage)\n', (4779, 4787), False, 'from torch import nn\n'), ((5164, 5189), 'torch.randn', 'torch.randn', (['(3)', '(3)', '(32)', '(32)'], {}), '(3, 3, 32, 32)\n', (5175, 5189), False, 'import torch\n'), ((188, 220), 'torch.nn.init.xavier_normal_', 'nn.init.xavier_normal_', (['m.weight'], {}), '(m.weight)\n', (210, 220), False, 'from torch import nn\n'), ((272, 300), 'torch.nn.init.constant_', 'nn.init.constant_', (['m.bias', '(0)'], {}), '(m.bias, 0)\n', (289, 300), False, 'from torch import nn\n'), ((1099, 1171), 'torch.nn.Conv2d', 'nn.Conv2d', (['in_channels', 'in_channels'], {'kernel_size': '(1)', 'stride': '(1)', 'bias': '(False)'}), '(in_channels, in_channels, kernel_size=1, stride=1, bias=False)\n', (1108, 1171), False, 'from torch import nn\n'), ((1185, 1212), 'torch.nn.BatchNorm2d', 'nn.BatchNorm2d', (['in_channels'], {}), '(in_channels)\n', (1199, 1212), False, 'from torch import nn\n'), ((1226, 1247), 'torch.nn.ReLU', 'nn.ReLU', ([], {'inplace': '(True)'}), '(inplace=True)\n', (1233, 1247), False, 'from torch import nn\n'), ((1261, 1368), 'torch.nn.Conv2d', 'nn.Conv2d', (['in_channels', 'in_channels'], {'kernel_size': '(3)', 'stride': '(1)', 'padding': '(1)', 'bias': '(False)', 'groups': 'in_channels'}), '(in_channels, in_channels, kernel_size=3, stride=1, padding=1,\n bias=False, groups=in_channels)\n', (1270, 1368), False, 'from torch import nn\n'), ((1378, 1405), 'torch.nn.BatchNorm2d', 'nn.BatchNorm2d', (['in_channels'], {}), '(in_channels)\n', (1392, 1405), False, 'from torch import nn\n'), ((1419, 1491), 'torch.nn.Conv2d', 'nn.Conv2d', (['in_channels', 'in_channels'], {'kernel_size': '(1)', 'stride': '(1)', 'bias': '(False)'}), '(in_channels, in_channels, kernel_size=1, stride=1, bias=False)\n', (1428, 1491), False, 'from torch import nn\n'), ((1505, 1532), 'torch.nn.BatchNorm2d', 'nn.BatchNorm2d', (['in_channels'], {}), '(in_channels)\n', (1519, 1532), False, 'from torch import nn\n'), ((1546, 1567), 'torch.nn.ReLU', 'nn.ReLU', ([], {'inplace': '(True)'}), '(inplace=True)\n', (1553, 1567), False, 'from torch import nn\n'), ((2224, 2336), 'torch.nn.Conv2d', 'nn.Conv2d', (['self.in_channels', 'self.in_channels'], {'kernel_size': '(3)', 'stride': '(2)', 'bias': '(False)', 'groups': 'self.in_channels'}), '(self.in_channels, self.in_channels, kernel_size=3, stride=2, bias\n =False, groups=self.in_channels)\n', (2233, 2336), False, 'from torch import nn\n'), ((2345, 2377), 'torch.nn.BatchNorm2d', 'nn.BatchNorm2d', (['self.in_channels'], {}), '(self.in_channels)\n', (2359, 2377), False, 'from torch import nn\n'), ((2391, 2483), 'torch.nn.Conv2d', 'nn.Conv2d', (['self.in_channels', '(self.out_channels // 2)'], {'kernel_size': '(1)', 'stride': '(1)', 'bias': '(False)'}), '(self.in_channels, self.out_channels // 2, kernel_size=1, stride=1,\n bias=False)\n', (2400, 2483), False, 'from torch import nn\n'), ((2493, 2531), 'torch.nn.BatchNorm2d', 'nn.BatchNorm2d', (['(self.out_channels // 2)'], {}), '(self.out_channels // 2)\n', (2507, 2531), False, 'from torch import nn\n'), ((2545, 2566), 'torch.nn.ReLU', 'nn.ReLU', ([], {'inplace': '(True)'}), '(inplace=True)\n', (2552, 2566), False, 'from torch import nn\n'), ((2628, 2715), 'torch.nn.Conv2d', 'nn.Conv2d', (['self.in_channels', 'self.in_channels'], {'kernel_size': '(1)', 'stride': '(1)', 'bias': '(False)'}), '(self.in_channels, self.in_channels, kernel_size=1, stride=1, bias\n =False)\n', (2637, 2715), False, 'from torch import nn\n'), ((2724, 2756), 'torch.nn.BatchNorm2d', 'nn.BatchNorm2d', (['self.in_channels'], {}), '(self.in_channels)\n', (2738, 2756), False, 'from torch import nn\n'), ((2770, 2791), 'torch.nn.ReLU', 'nn.ReLU', ([], {'inplace': '(True)'}), '(inplace=True)\n', (2777, 2791), False, 'from torch import nn\n'), ((2805, 2917), 'torch.nn.Conv2d', 'nn.Conv2d', (['self.in_channels', 'self.in_channels'], {'kernel_size': '(3)', 'stride': '(2)', 'bias': '(False)', 'groups': 'self.in_channels'}), '(self.in_channels, self.in_channels, kernel_size=3, stride=2, bias\n =False, groups=self.in_channels)\n', (2814, 2917), False, 'from torch import nn\n'), ((2926, 2958), 'torch.nn.BatchNorm2d', 'nn.BatchNorm2d', (['self.in_channels'], {}), '(self.in_channels)\n', (2940, 2958), False, 'from torch import nn\n'), ((2972, 3064), 'torch.nn.Conv2d', 'nn.Conv2d', (['self.in_channels', '(self.out_channels // 2)'], {'kernel_size': '(1)', 'stride': '(1)', 'bias': '(False)'}), '(self.in_channels, self.out_channels // 2, kernel_size=1, stride=1,\n bias=False)\n', (2981, 3064), False, 'from torch import nn\n'), ((3074, 3112), 'torch.nn.BatchNorm2d', 'nn.BatchNorm2d', (['(self.out_channels // 2)'], {}), '(self.out_channels // 2)\n', (3088, 3112), False, 'from torch import nn\n'), ((3126, 3147), 'torch.nn.ReLU', 'nn.ReLU', ([], {'inplace': '(True)'}), '(inplace=True)\n', (3133, 3147), False, 'from torch import nn\n')] |
# coding=utf-8
# Copyright 2022 The Google Research Authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# Lint as: python3
"""Different model implementation plus a general port for all the models."""
import functools
from typing import Any, Callable
from flax import linen as nn
import gin
from internal import mip, utils # pylint: disable=g-multiple-import
import jax
from jax import random
import jax.numpy as jnp
@gin.configurable
class MipNerfModel(nn.Module):
"""Nerf NN Model with both coarse and fine MLPs."""
config: Any = None # A Config class, must be set upon construction.
num_samples: int = 128 # The number of samples per level.
num_levels: int = 2 # The number of sampling levels.
stop_level_grad: bool = True # If True, don't backprop across levels.
use_viewdirs: bool = True # If True, use view directions as input.
genspace_fn: Callable[Ellipsis, Any] = None # The genspace() curve function.
ray_shape: str = 'cone' # The shape of cast rays ('cone' or 'cylinder').
disable_integration: bool = False # If True, use PE instead of IPE.
single_jitter: bool = False # If True, jitter whole rays instead of samples.
@nn.compact
def __call__(
self,
rng,
rays,
resample_padding,
compute_extras,
):
"""The mip-NeRF Model.
Args:
rng: random number generator (or None for deterministic output).
rays: util.Rays, a pytree of ray origins, directions, and viewdirs.
resample_padding: float, the histogram padding to use when resampling.
compute_extras: bool, if True, compute extra quantities besides color.
Returns:
ret: list, [*(rgb, distance, acc)]
"""
# Construct the MLP.
mlp = MLP()
renderings = []
for i_level in range(self.num_levels):
if rng is None:
key = None
else:
key, rng = random.split(rng)
if i_level == 0:
# Stratified sampling along rays
t_vals, samples = mip.sample_along_rays(
key,
rays.origins,
rays.directions,
rays.radii,
self.num_samples,
rays.near,
rays.far,
self.genspace_fn,
self.ray_shape,
self.single_jitter,
)
else:
t_vals, samples = mip.resample_along_rays(
key,
rays.origins,
rays.directions,
rays.radii,
t_vals,
weights,
self.ray_shape,
self.stop_level_grad,
resample_padding,
self.single_jitter,
)
if self.disable_integration:
samples = (samples[0], jnp.zeros_like(samples[1]))
# Point attribute predictions
if self.use_viewdirs:
(rgb, density, normals) = mlp(rng, samples, rays.viewdirs)
else:
(rgb, density, normals) = mlp(rng, samples, None)
# Volumetric rendering.
weights, _, _, delta = mip.compute_alpha_weights(
density, t_vals, rays.directions)
rendering = mip.volumetric_rendering(
rgb,
weights,
normals,
t_vals,
self.config.white_background,
self.config.vis_num_rays,
compute_extras,
delta,
)
renderings.append(rendering)
return renderings
def construct_mipnerf(rng, rays, config):
"""Construct a Neural Radiance Field.
Args:
rng: jnp.ndarray. Random number generator.
rays: an example of input Rays.
config: A Config class.
Returns:
model: nn.Model. Nerf model with parameters.
state: flax.Module.state. Nerf model state for stateful parameters.
"""
# Grab just 10 rays, to minimize memory overhead during construction.
ray = jax.tree_map(lambda x: jnp.reshape(x, [-1, x.shape[-1]])[:10], rays)
model = MipNerfModel(config=config)
init_variables = model.init(
rng, rng=None, rays=ray, resample_padding=0., compute_extras=False)
return model, init_variables
def cosine_easing_window(alpha, min_freq_log2=0, max_freq_log2=16):
"""Eases in each frequency one by one with a cosine.
This is equivalent to taking a Tukey window and sliding it to the right
along the frequency spectrum.
Args:
alpha: will ease in each frequency as alpha goes from 0.0 to num_freqs.
min_freq_log2: the lower frequency band.
max_freq_log2: the upper frequency band.
Returns:
A 1-d numpy array with num_sample elements containing the window.
"""
num_bands = max_freq_log2 - min_freq_log2
bands = jnp.linspace(min_freq_log2, max_freq_log2, num_bands)
x = jnp.clip(alpha - bands, 0.0, 1.0)
values = 0.5 * (1 + jnp.cos(jnp.pi * x + jnp.pi))
# always set first 4 freqs to 1
values = values.reshape(-1)
values = jnp.concatenate([jnp.ones_like(values[:4]), values[4:]])
values = jnp.repeat(values.reshape(-1, 1), 3, axis=1).reshape(-1)
return jnp.stack([values, values])
@gin.configurable
class MLP(nn.Module):
"""A simple MLP."""
net_depth: int = 8 # The depth of the first part of MLP.
net_width: int = 256 # The width of the first part of MLP.
net_depth_viewdirs: int = 1 # The depth of the second part of MLP.
net_width_viewdirs: int = 128 # The width of the second part of MLP.
net_activation: Callable[Ellipsis, Any] = nn.relu # The activation function.
# Initializer for the weights of the MLP.
weight_init: Callable[Ellipsis, Any] = jax.nn.initializers.glorot_uniform()
skip_layer: int = 4 # Add a skip connection to the output of every N layers.
num_rgb_channels: int = 3 # The number of RGB channels.
min_deg_point: int = 0 # Min degree of positional encoding for 3D points.
max_deg_point: int = 16 # Max degree of positional encoding for 3D points.
deg_view: int = 4 # Degree of positional encoding for viewdirs.
density_activation: Callable[Ellipsis, Any] = nn.softplus # Density activation.
density_noise: float = 0. # Standard deviation of noise added to raw density.
density_bias: float = -1. # The shift added to raw densities pre-activation.
rgb_activation: Callable[Ellipsis, Any] = nn.sigmoid # The RGB activation.
rgb_padding: float = 0.001 # Padding added to the RGB outputs.
disable_normals: bool = False # If True, don't bother computing normals.
@nn.compact
def __call__(self, rng, samples, viewdirs=None):
"""Evaluate the MLP.
Args:
rng: random number generator (or None for deterministic output).
samples: a tuple containing:
- mean: [..., num_samples, 3], coordinate means, and
- cov: [..., num_samples, 3{, 3}], coordinate covariance matrices.
viewdirs: jnp.ndarray(float32), [batch, 3], if not None, this variable
will be part of the input to the second part of the MLP concatenated
with the output vector of the first part of the MLP. If None, only the
first part of the MLP will be used with input x. In the original paper,
this variable is the view direction.
Returns:
rgb: jnp.ndarray(float32), with a shape of [..., num_rgb_channels].
density: jnp.ndarray(float32), with a shape of [...].
normals: jnp.ndarray(float32), with a shape of [..., 3].
"""
dense_layer = functools.partial(nn.Dense, kernel_init=self.weight_init)
def predict_density(rng, means, covs):
"""Helper function to output density."""
# Encode input positions
inputs = mip.integrated_pos_enc(
(means, covs), self.min_deg_point, self.max_deg_point)
# Evaluate network to output density
x = inputs
for i in range(self.net_depth):
x = dense_layer(self.net_width)(x)
x = self.net_activation(x)
if i % self.skip_layer == 0 and i > 0:
x = jnp.concatenate([x, inputs], axis=-1)
raw_density = dense_layer(1)(x)[Ellipsis, 0] # Hardcoded to a single channel.
# Add noise to regularize the density predictions if needed.
if (rng is not None) and (self.density_noise > 0):
key, rng = random.split(rng)
raw_density += self.density_noise * random.normal(
key, raw_density.shape, dtype=raw_density.dtype)
# Apply bias and activation to raw density
density = self.density_activation(raw_density + self.density_bias)
return density, x
means, covs = samples
if self.disable_normals:
density, x = predict_density(rng, means, covs)
normals = jnp.full_like(means, fill_value=jnp.nan)
else:
# Flatten the input so value_and_grad can be vmap'ed.
means_flat = means.reshape([-1, means.shape[-1]])
covs_flat = covs.reshape([-1] + list(covs.shape[len(means.shape) - 1:]))
# Evaluate the network and its gradient on the flattened input.
predict_density_and_grad_fn = jax.vmap(
jax.value_and_grad(predict_density, argnums=1, has_aux=True),
in_axes=(None, 0, 0))
(density_flat, x_flat), density_grad_flat = (
predict_density_and_grad_fn(rng, means_flat, covs_flat))
# Unflatten the output.
density = density_flat.reshape(means.shape[:-1])
x = x_flat.reshape(list(means.shape[:-1]) + [x_flat.shape[-1]])
density_grad = density_grad_flat.reshape(means.shape)
# Compute surface normals as negative normalized density gradient
eps = jnp.finfo(jnp.float32).eps
normals = -density_grad / jnp.sqrt(
jnp.maximum(jnp.sum(density_grad**2, axis=-1, keepdims=True), eps))
if viewdirs is not None:
viewdirs_enc = mip.pos_enc(
viewdirs, min_deg=0, max_deg=self.deg_view, append_identity=True)
# Output of the first part of MLP.
bottleneck = dense_layer(self.net_width)(x)
viewdirs_enc = jnp.broadcast_to(
viewdirs_enc[Ellipsis, None, :],
list(bottleneck.shape[:-1]) + [viewdirs_enc.shape[-1]])
x = jnp.concatenate([bottleneck, viewdirs_enc], axis=-1)
# Here use 1 extra layer to align with the original nerf model.
for _ in range(self.net_depth_viewdirs):
x = dense_layer(self.net_width_viewdirs)(x)
x = self.net_activation(x)
rgb = self.rgb_activation(dense_layer(self.num_rgb_channels)(x))
rgb = rgb * (1 + 2 * self.rgb_padding) - self.rgb_padding
return (rgb, density, normals)
def render_image(render_fn, rays, rng, config):
"""Render all the pixels of an image (in test mode).
Args:
render_fn: function, jit-ed render function.
rays: a `Rays` pytree, the rays to be rendered.
rng: jnp.ndarray, random number generator (used in training mode only).
config: A Config class.
Returns:
rgb: jnp.ndarray, rendered color image.
disp: jnp.ndarray, rendered disparity image.
acc: jnp.ndarray, rendered accumulated weights per pixel.
"""
height, width = rays.origins.shape[:2]
num_rays = height * width
rays = jax.tree_map(lambda r: r.reshape((num_rays, -1)), rays)
host_id = jax.host_id()
chunks = []
idx0s = range(0, num_rays, config.render_chunk_size)
for i_chunk, idx0 in enumerate(idx0s):
# pylint: disable=cell-var-from-loop
if i_chunk % max(1, len(idx0s) // 10) == 0:
print(f'Rendering chunk {i_chunk}/{len(idx0s)-1}')
chunk_rays = (
jax.tree_map(lambda r: r[idx0:idx0 + config.render_chunk_size], rays))
actual_chunk_size = chunk_rays.origins.shape[0]
rays_remaining = actual_chunk_size % jax.device_count()
if rays_remaining != 0:
padding = jax.device_count() - rays_remaining
chunk_rays = jax.tree_map(
lambda r: jnp.pad(r, ((0, padding), (0, 0)), mode='edge'), chunk_rays)
else:
padding = 0
# After padding the number of chunk_rays is always divisible by host_count.
rays_per_host = chunk_rays.origins.shape[0] // jax.host_count()
start, stop = host_id * rays_per_host, (host_id + 1) * rays_per_host
chunk_rays = jax.tree_map(lambda r: utils.shard(r[start:stop]), chunk_rays)
chunk_renderings = render_fn(rng, chunk_rays)
# Unshard the renderings
chunk_renderings = [{k: utils.unshard(v[0], padding)
for k, v in r.items()}
for r in chunk_renderings]
chunk_rendering = chunk_renderings[-1]
keys = [k for k in chunk_renderings[0] if k.find('ray_') == 0]
for k in keys:
chunk_rendering[k] = [r[k] for r in chunk_renderings]
chunks.append(chunk_rendering)
rendering = {}
for k in chunks[0]:
if isinstance(chunks[0][k], list):
rendering[k] = [r[k] for r in chunks]
ds = range(len(rendering[k][0]))
rendering[k] = [jnp.concatenate([r[d] for r in rendering[k]]) for d in ds]
else:
rendering[k] = jnp.concatenate([r[k] for r in chunks])
rendering[k] = (
rendering[k].reshape((height, width) + chunks[0][k].shape[1:]))
# After all of the ray bundles have been concatenated together, extract a
# new random bundle (deterministically) from the concatenation that is the
# same size as one of the individual bundles.
keys = [k for k in rendering if k.find('ray_') == 0]
if keys:
ray_idx = random.permutation(
random.PRNGKey(0), rendering[keys[0]][0].shape[0])[:config.vis_num_rays]
for k in keys:
rendering[k] = [r[ray_idx] for r in rendering[k]]
return rendering
| [
"jax.nn.initializers.glorot_uniform",
"internal.mip.integrated_pos_enc",
"jax.numpy.pad",
"jax.tree_map",
"internal.utils.shard",
"internal.mip.volumetric_rendering",
"jax.random.split",
"internal.mip.pos_enc",
"jax.random.PRNGKey",
"jax.device_count",
"jax.numpy.concatenate",
"jax.random.norm... | [((5039, 5092), 'jax.numpy.linspace', 'jnp.linspace', (['min_freq_log2', 'max_freq_log2', 'num_bands'], {}), '(min_freq_log2, max_freq_log2, num_bands)\n', (5051, 5092), True, 'import jax.numpy as jnp\n'), ((5099, 5132), 'jax.numpy.clip', 'jnp.clip', (['(alpha - bands)', '(0.0)', '(1.0)'], {}), '(alpha - bands, 0.0, 1.0)\n', (5107, 5132), True, 'import jax.numpy as jnp\n'), ((5396, 5423), 'jax.numpy.stack', 'jnp.stack', (['[values, values]'], {}), '([values, values])\n', (5405, 5423), True, 'import jax.numpy as jnp\n'), ((5917, 5953), 'jax.nn.initializers.glorot_uniform', 'jax.nn.initializers.glorot_uniform', ([], {}), '()\n', (5951, 5953), False, 'import jax\n'), ((11400, 11413), 'jax.host_id', 'jax.host_id', ([], {}), '()\n', (11411, 11413), False, 'import jax\n'), ((7719, 7776), 'functools.partial', 'functools.partial', (['nn.Dense'], {'kernel_init': 'self.weight_init'}), '(nn.Dense, kernel_init=self.weight_init)\n', (7736, 7776), False, 'import functools\n'), ((11697, 11766), 'jax.tree_map', 'jax.tree_map', (['(lambda r: r[idx0:idx0 + config.render_chunk_size])', 'rays'], {}), '(lambda r: r[idx0:idx0 + config.render_chunk_size], rays)\n', (11709, 11766), False, 'import jax\n'), ((3454, 3513), 'internal.mip.compute_alpha_weights', 'mip.compute_alpha_weights', (['density', 't_vals', 'rays.directions'], {}), '(density, t_vals, rays.directions)\n', (3479, 3513), False, 'from internal import mip, utils\n'), ((3543, 3682), 'internal.mip.volumetric_rendering', 'mip.volumetric_rendering', (['rgb', 'weights', 'normals', 't_vals', 'self.config.white_background', 'self.config.vis_num_rays', 'compute_extras', 'delta'], {}), '(rgb, weights, normals, t_vals, self.config.\n white_background, self.config.vis_num_rays, compute_extras, delta)\n', (3567, 3682), False, 'from internal import mip, utils\n'), ((5155, 5183), 'jax.numpy.cos', 'jnp.cos', (['(jnp.pi * x + jnp.pi)'], {}), '(jnp.pi * x + jnp.pi)\n', (5162, 5183), True, 'import jax.numpy as jnp\n'), ((5278, 5303), 'jax.numpy.ones_like', 'jnp.ones_like', (['values[:4]'], {}), '(values[:4])\n', (5291, 5303), True, 'import jax.numpy as jnp\n'), ((7914, 7991), 'internal.mip.integrated_pos_enc', 'mip.integrated_pos_enc', (['(means, covs)', 'self.min_deg_point', 'self.max_deg_point'], {}), '((means, covs), self.min_deg_point, self.max_deg_point)\n', (7936, 7991), False, 'from internal import mip, utils\n'), ((8915, 8955), 'jax.numpy.full_like', 'jnp.full_like', (['means'], {'fill_value': 'jnp.nan'}), '(means, fill_value=jnp.nan)\n', (8928, 8955), True, 'import jax.numpy as jnp\n'), ((9999, 10076), 'internal.mip.pos_enc', 'mip.pos_enc', (['viewdirs'], {'min_deg': '(0)', 'max_deg': 'self.deg_view', 'append_identity': '(True)'}), '(viewdirs, min_deg=0, max_deg=self.deg_view, append_identity=True)\n', (10010, 10076), False, 'from internal import mip, utils\n'), ((10337, 10389), 'jax.numpy.concatenate', 'jnp.concatenate', (['[bottleneck, viewdirs_enc]'], {'axis': '(-1)'}), '([bottleneck, viewdirs_enc], axis=-1)\n', (10352, 10389), True, 'import jax.numpy as jnp\n'), ((11861, 11879), 'jax.device_count', 'jax.device_count', ([], {}), '()\n', (11877, 11879), False, 'import jax\n'), ((12233, 12249), 'jax.host_count', 'jax.host_count', ([], {}), '()\n', (12247, 12249), False, 'import jax\n'), ((13139, 13178), 'jax.numpy.concatenate', 'jnp.concatenate', (['[r[k] for r in chunks]'], {}), '([r[k] for r in chunks])\n', (13154, 13178), True, 'import jax.numpy as jnp\n'), ((2360, 2377), 'jax.random.split', 'random.split', (['rng'], {}), '(rng)\n', (2372, 2377), False, 'from jax import random\n'), ((2469, 2640), 'internal.mip.sample_along_rays', 'mip.sample_along_rays', (['key', 'rays.origins', 'rays.directions', 'rays.radii', 'self.num_samples', 'rays.near', 'rays.far', 'self.genspace_fn', 'self.ray_shape', 'self.single_jitter'], {}), '(key, rays.origins, rays.directions, rays.radii, self.\n num_samples, rays.near, rays.far, self.genspace_fn, self.ray_shape,\n self.single_jitter)\n', (2490, 2640), False, 'from internal import mip, utils\n'), ((2801, 2973), 'internal.mip.resample_along_rays', 'mip.resample_along_rays', (['key', 'rays.origins', 'rays.directions', 'rays.radii', 't_vals', 'weights', 'self.ray_shape', 'self.stop_level_grad', 'resample_padding', 'self.single_jitter'], {}), '(key, rays.origins, rays.directions, rays.radii,\n t_vals, weights, self.ray_shape, self.stop_level_grad, resample_padding,\n self.single_jitter)\n', (2824, 2973), False, 'from internal import mip, utils\n'), ((4270, 4303), 'jax.numpy.reshape', 'jnp.reshape', (['x', '[-1, x.shape[-1]]'], {}), '(x, [-1, x.shape[-1]])\n', (4281, 4303), True, 'import jax.numpy as jnp\n'), ((8506, 8523), 'jax.random.split', 'random.split', (['rng'], {}), '(rng)\n', (8518, 8523), False, 'from jax import random\n'), ((9287, 9347), 'jax.value_and_grad', 'jax.value_and_grad', (['predict_density'], {'argnums': '(1)', 'has_aux': '(True)'}), '(predict_density, argnums=1, has_aux=True)\n', (9305, 9347), False, 'import jax\n'), ((9801, 9823), 'jax.numpy.finfo', 'jnp.finfo', (['jnp.float32'], {}), '(jnp.float32)\n', (9810, 9823), True, 'import jax.numpy as jnp\n'), ((11924, 11942), 'jax.device_count', 'jax.device_count', ([], {}), '()\n', (11940, 11942), False, 'import jax\n'), ((12363, 12389), 'internal.utils.shard', 'utils.shard', (['r[start:stop]'], {}), '(r[start:stop])\n', (12374, 12389), False, 'from internal import mip, utils\n'), ((12511, 12539), 'internal.utils.unshard', 'utils.unshard', (['v[0]', 'padding'], {}), '(v[0], padding)\n', (12524, 12539), False, 'from internal import mip, utils\n'), ((13049, 13094), 'jax.numpy.concatenate', 'jnp.concatenate', (['[r[d] for r in rendering[k]]'], {}), '([r[d] for r in rendering[k]])\n', (13064, 13094), True, 'import jax.numpy as jnp\n'), ((13586, 13603), 'jax.random.PRNGKey', 'random.PRNGKey', (['(0)'], {}), '(0)\n', (13600, 13603), False, 'from jax import random\n'), ((3164, 3190), 'jax.numpy.zeros_like', 'jnp.zeros_like', (['samples[1]'], {}), '(samples[1])\n', (3178, 3190), True, 'import jax.numpy as jnp\n'), ((8240, 8277), 'jax.numpy.concatenate', 'jnp.concatenate', (['[x, inputs]'], {'axis': '(-1)'}), '([x, inputs], axis=-1)\n', (8255, 8277), True, 'import jax.numpy as jnp\n'), ((8568, 8630), 'jax.random.normal', 'random.normal', (['key', 'raw_density.shape'], {'dtype': 'raw_density.dtype'}), '(key, raw_density.shape, dtype=raw_density.dtype)\n', (8581, 8630), False, 'from jax import random\n'), ((12013, 12060), 'jax.numpy.pad', 'jnp.pad', (['r', '((0, padding), (0, 0))'], {'mode': '"""edge"""'}), "(r, ((0, padding), (0, 0)), mode='edge')\n", (12020, 12060), True, 'import jax.numpy as jnp\n'), ((9892, 9942), 'jax.numpy.sum', 'jnp.sum', (['(density_grad ** 2)'], {'axis': '(-1)', 'keepdims': '(True)'}), '(density_grad ** 2, axis=-1, keepdims=True)\n', (9899, 9942), True, 'import jax.numpy as jnp\n')] |
# -*- encoding: utf-8 -*-
try:
import unittest2 as unittest
except ImportError:
import unittest
from multiprocessing import Manager
from random import randint
import logging
import sys
import os
import copy
import shutil
# add kamma path
sys.path.insert(0, os.path.abspath(os.path.join(os.path.dirname(__file__), '..', '..')))
import kamma
TEST_PATH = "test_queue"
handler = logging.StreamHandler()
handler.setFormatter(logging.Formatter('%(asctime)s [%(levelname)-8s] [%(name)-10s] [%(lineno)-4d] %(message)s'))
logger_kamma = logging.getLogger('kamma.app')
logger_kamma.handlers = [handler]
# logger_kamma.setLevel(logging.DEBUG)
logger_fqueue = logging.getLogger('kamma.queue')
logger_fqueue.handlers = [handler]
# logger_fqueue.setLevel(logging.DEBUG)
logger_task = logging.getLogger('kamma.task')
logger_task.handlers = [handler]
# logger_task.setLevel(logging.DEBUG)
logger = logging.getLogger('test')
logger.handlers = [handler]
logger.setLevel(logging.DEBUG)
def _clear_queue():
try:
shutil.rmtree(TEST_PATH)
except Exception:
pass
# it should be out of the class scope, otherwise
# python tries to pickle all class and its manager and then
# the serialization will fail
the_manager = None
class KammaTestsCheckOrder(unittest.TestCase):
def setUp(self):
_clear_queue()
self.callbacks = [self.task0, self.task1, self.task2, self.task3, self.task4, self.task5]
# Manager is necessary because it is modified from different threads
the_manager = Manager()
self.cb_indexs = the_manager.list()
for i in range(0, 100):
self.cb_indexs.append(randint(0, 5))
def tearDown(self):
_clear_queue()
def _taskx(self, task_id, data):
logger.debug("running '{}', remaining {} tasks".format(task_id, len(self.cb_indexs)))
self.assertEqual(task_id, data['id'], "{} data: {}, tasks: {}".format(task_id, data, self.cb_indexs))
self.assertEqual(task_id, self.callbacks[self.cb_indexs[0]].__name__)
self.cb_indexs.pop(0)
def task0(self, data):
self._taskx('task0', data)
def task1(self, data):
self._taskx('task1', data)
def task2(self, data):
self._taskx('task2', data)
def task3(self, data):
self._taskx('task3', data)
def task4(self, data):
self._taskx('task4', data)
def task5(self, data):
self._taskx('task5', data)
def test_usual_case(self):
worker = kamma.Worker(queue_path=TEST_PATH)
worker.add_task_callback(callback=self.task0, retry_wait=kamma.wait_fixed(0), retry_stop=kamma.stop_after_attempt(1))
worker.add_task_callback(callback=self.task1, retry_wait=kamma.wait_fixed(0), retry_stop=kamma.stop_after_attempt(1))
worker.add_task_callback(callback=self.task2, retry_wait=kamma.wait_fixed(0), retry_stop=kamma.stop_after_attempt(1))
worker.add_task_callback(callback=self.task3, retry_wait=kamma.wait_fixed(0), retry_stop=kamma.stop_after_attempt(1))
worker.add_task_callback(callback=self.task4, retry_wait=kamma.wait_fixed(0), retry_stop=kamma.stop_after_attempt(1))
worker.add_task_callback(callback=self.task5, retry_wait=kamma.wait_fixed(0), retry_stop=kamma.stop_after_attempt(1))
cloned_cb_indexs = copy.deepcopy(self.cb_indexs)
worker.run_async()
for i in cloned_cb_indexs:
worker.push_task(callback=self.callbacks[i], data={'id': self.callbacks[i].__name__})
worker.wait_empty_event()
self.assertEqual(0, worker.pending())
worker.stop()
self.assertEqual(0, len(self.cb_indexs))
class KammaTestsExceptionsInKamma(unittest.TestCase):
def setUp(self):
_clear_queue()
def tearDown(self):
_clear_queue()
def task(self):
pass
def test_exception_pushtask_TaskNotRegistered(self):
worker = kamma.Worker(queue_path=TEST_PATH)
self.assertRaises(kamma.TaskNotRegistered, lambda: worker.push_task(callback=self.task))
# worker.wait()
worker.stop()
class KammaTestsExceptionsInTask(unittest.TestCase):
def setUp(self):
_clear_queue()
the_manager = Manager()
self.count = the_manager.list()
self.count.append(0)
self.num_failures = 3
def tearDown(self):
_clear_queue()
def task0(self):
self.count[0] = self.count[0] + 1
if self.count[0] < self.num_failures:
raise Exception('I don\'t want to work, try {}'.format(self.count[0]))
def test_exception_in_task(self):
worker = kamma.Worker(queue_path=TEST_PATH)
worker.add_task_callback(callback=self.task0, retry_wait=kamma.wait_fixed(0), retry_stop=kamma.stop_after_attempt(self.num_failures+1))
worker.push_task(callback=self.task0)
worker.run_async()
worker.wait_empty_event()
worker.stop()
self.assertEqual(self.num_failures, self.count[0])
class KammaTestsOnAbortion(unittest.TestCase):
def setUp(self):
_clear_queue()
self.abortion_called = False
self.failure_called = False
def tearDown(self):
_clear_queue()
def task_abort(self):
raise kamma.AbortTask("I'm indisposed")
def task_failure(self):
raise Exception("Boom")
def on_abortion(self, json_task, reason):
self.abortion_called = True
def on_failure(self, json_task, retry_stopped):
self.failure_called = True
def test_on_abortion(self):
worker = kamma.Worker(queue_path=TEST_PATH)
worker.add_on_abortion(self.on_abortion)
worker.add_task_callback(self.task_abort)
worker.run_async()
worker.push_task(self.task_abort)
worker.wait_empty_event()
worker.stop()
self.assertTrue(self.abortion_called)
def test_on_failure(self):
worker = kamma.Worker(queue_path=TEST_PATH)
worker.add_on_failure(self.on_failure)
worker.add_task_callback(self.task_failure, retry_wait=kamma.wait_fixed(0), retry_stop=kamma.stop_after_attempt(1))
worker.run_async()
worker.push_task(self.task_failure)
worker.wait_empty_event()
worker.stop()
self.assertTrue(self.failure_called)
if __name__ == '__main__':
unittest.main()
| [
"logging.getLogger",
"kamma.wait_fixed",
"logging.StreamHandler",
"copy.deepcopy",
"logging.Formatter",
"os.path.dirname",
"kamma.Worker",
"kamma.AbortTask",
"kamma.stop_after_attempt",
"shutil.rmtree",
"unittest.main",
"random.randint",
"multiprocessing.Manager"
] | [((407, 430), 'logging.StreamHandler', 'logging.StreamHandler', ([], {}), '()\n', (428, 430), False, 'import logging\n'), ((562, 592), 'logging.getLogger', 'logging.getLogger', (['"""kamma.app"""'], {}), "('kamma.app')\n", (579, 592), False, 'import logging\n'), ((685, 717), 'logging.getLogger', 'logging.getLogger', (['"""kamma.queue"""'], {}), "('kamma.queue')\n", (702, 717), False, 'import logging\n'), ((810, 841), 'logging.getLogger', 'logging.getLogger', (['"""kamma.task"""'], {}), "('kamma.task')\n", (827, 841), False, 'import logging\n'), ((925, 950), 'logging.getLogger', 'logging.getLogger', (['"""test"""'], {}), "('test')\n", (942, 950), False, 'import logging\n'), ((453, 549), 'logging.Formatter', 'logging.Formatter', (['"""%(asctime)s [%(levelname)-8s] [%(name)-10s] [%(lineno)-4d] %(message)s"""'], {}), "(\n '%(asctime)s [%(levelname)-8s] [%(name)-10s] [%(lineno)-4d] %(message)s')\n", (470, 549), False, 'import logging\n'), ((6502, 6517), 'unittest.main', 'unittest.main', ([], {}), '()\n', (6515, 6517), False, 'import unittest\n'), ((1056, 1080), 'shutil.rmtree', 'shutil.rmtree', (['TEST_PATH'], {}), '(TEST_PATH)\n', (1069, 1080), False, 'import shutil\n'), ((1582, 1591), 'multiprocessing.Manager', 'Manager', ([], {}), '()\n', (1589, 1591), False, 'from multiprocessing import Manager\n'), ((2575, 2609), 'kamma.Worker', 'kamma.Worker', ([], {'queue_path': 'TEST_PATH'}), '(queue_path=TEST_PATH)\n', (2587, 2609), False, 'import kamma\n'), ((3400, 3429), 'copy.deepcopy', 'copy.deepcopy', (['self.cb_indexs'], {}), '(self.cb_indexs)\n', (3413, 3429), False, 'import copy\n'), ((4019, 4053), 'kamma.Worker', 'kamma.Worker', ([], {'queue_path': 'TEST_PATH'}), '(queue_path=TEST_PATH)\n', (4031, 4053), False, 'import kamma\n'), ((4327, 4336), 'multiprocessing.Manager', 'Manager', ([], {}), '()\n', (4334, 4336), False, 'from multiprocessing import Manager\n'), ((4747, 4781), 'kamma.Worker', 'kamma.Worker', ([], {'queue_path': 'TEST_PATH'}), '(queue_path=TEST_PATH)\n', (4759, 4781), False, 'import kamma\n'), ((5388, 5421), 'kamma.AbortTask', 'kamma.AbortTask', (['"""I\'m indisposed"""'], {}), '("I\'m indisposed")\n', (5403, 5421), False, 'import kamma\n'), ((5716, 5750), 'kamma.Worker', 'kamma.Worker', ([], {'queue_path': 'TEST_PATH'}), '(queue_path=TEST_PATH)\n', (5728, 5750), False, 'import kamma\n'), ((6080, 6114), 'kamma.Worker', 'kamma.Worker', ([], {'queue_path': 'TEST_PATH'}), '(queue_path=TEST_PATH)\n', (6092, 6114), False, 'import kamma\n'), ((309, 334), 'os.path.dirname', 'os.path.dirname', (['__file__'], {}), '(__file__)\n', (324, 334), False, 'import os\n'), ((1705, 1718), 'random.randint', 'randint', (['(0)', '(5)'], {}), '(0, 5)\n', (1712, 1718), False, 'from random import randint\n'), ((2676, 2695), 'kamma.wait_fixed', 'kamma.wait_fixed', (['(0)'], {}), '(0)\n', (2692, 2695), False, 'import kamma\n'), ((2708, 2735), 'kamma.stop_after_attempt', 'kamma.stop_after_attempt', (['(1)'], {}), '(1)\n', (2732, 2735), False, 'import kamma\n'), ((2803, 2822), 'kamma.wait_fixed', 'kamma.wait_fixed', (['(0)'], {}), '(0)\n', (2819, 2822), False, 'import kamma\n'), ((2835, 2862), 'kamma.stop_after_attempt', 'kamma.stop_after_attempt', (['(1)'], {}), '(1)\n', (2859, 2862), False, 'import kamma\n'), ((2930, 2949), 'kamma.wait_fixed', 'kamma.wait_fixed', (['(0)'], {}), '(0)\n', (2946, 2949), False, 'import kamma\n'), ((2962, 2989), 'kamma.stop_after_attempt', 'kamma.stop_after_attempt', (['(1)'], {}), '(1)\n', (2986, 2989), False, 'import kamma\n'), ((3057, 3076), 'kamma.wait_fixed', 'kamma.wait_fixed', (['(0)'], {}), '(0)\n', (3073, 3076), False, 'import kamma\n'), ((3089, 3116), 'kamma.stop_after_attempt', 'kamma.stop_after_attempt', (['(1)'], {}), '(1)\n', (3113, 3116), False, 'import kamma\n'), ((3184, 3203), 'kamma.wait_fixed', 'kamma.wait_fixed', (['(0)'], {}), '(0)\n', (3200, 3203), False, 'import kamma\n'), ((3216, 3243), 'kamma.stop_after_attempt', 'kamma.stop_after_attempt', (['(1)'], {}), '(1)\n', (3240, 3243), False, 'import kamma\n'), ((3311, 3330), 'kamma.wait_fixed', 'kamma.wait_fixed', (['(0)'], {}), '(0)\n', (3327, 3330), False, 'import kamma\n'), ((3343, 3370), 'kamma.stop_after_attempt', 'kamma.stop_after_attempt', (['(1)'], {}), '(1)\n', (3367, 3370), False, 'import kamma\n'), ((4848, 4867), 'kamma.wait_fixed', 'kamma.wait_fixed', (['(0)'], {}), '(0)\n', (4864, 4867), False, 'import kamma\n'), ((4880, 4927), 'kamma.stop_after_attempt', 'kamma.stop_after_attempt', (['(self.num_failures + 1)'], {}), '(self.num_failures + 1)\n', (4904, 4927), False, 'import kamma\n'), ((6227, 6246), 'kamma.wait_fixed', 'kamma.wait_fixed', (['(0)'], {}), '(0)\n', (6243, 6246), False, 'import kamma\n'), ((6259, 6286), 'kamma.stop_after_attempt', 'kamma.stop_after_attempt', (['(1)'], {}), '(1)\n', (6283, 6286), False, 'import kamma\n')] |
# coding=utf-8
# --------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for license information.
# Code generated by Microsoft (R) AutoRest Code Generator.
# Changes may cause incorrect behavior and will be lost if the code is regenerated.
# --------------------------------------------------------------------------
from enum import Enum, EnumMeta
from six import with_metaclass
class _CaseInsensitiveEnumMeta(EnumMeta):
def __getitem__(self, name):
return super().__getitem__(name.upper())
def __getattr__(cls, name):
"""Return the enum member matching `name`
We use __getattr__ instead of descriptors or inserting into the enum
class' __dict__ in order to support `name` and `value` being both
properties for enum members (which live in the class' __dict__) and
enum members themselves.
"""
try:
return cls._member_map_[name.upper()]
except KeyError:
raise AttributeError(name)
class Enum100(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)):
ID = "id"
DESCRIPTION = "description"
GROUP = "group"
HIDDEN = "hidden"
INHERITED_FROM = "inheritedFrom"
NAME = "name"
ORDER = "order"
PARENT_ID = "parentId"
READ_ONLY = "readOnly"
SEALED = "sealed"
COLUMN_LINKS = "columnLinks"
class Enum101(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)):
ASTERISK = "*"
COLUMN_LINKS = "columnLinks"
class Enum102(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)):
ID = "id"
ID_DESC = "id desc"
NAME = "name"
NAME_DESC = "name desc"
class Enum103(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)):
ID = "id"
NAME = "name"
class Enum104(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)):
ID = "id"
NAME = "name"
class Enum105(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)):
ID = "id"
CREATED_BY = "createdBy"
CREATED_DATE_TIME = "createdDateTime"
DESCRIPTION = "description"
E_TAG = "eTag"
LAST_MODIFIED_BY = "lastModifiedBy"
LAST_MODIFIED_DATE_TIME = "lastModifiedDateTime"
NAME = "name"
PARENT_REFERENCE = "parentReference"
WEB_URL = "webUrl"
DRIVE_TYPE = "driveType"
OWNER = "owner"
QUOTA = "quota"
SHARE_POINT_IDS = "sharePointIds"
SYSTEM = "system"
CREATED_BY_USER = "createdByUser"
LAST_MODIFIED_BY_USER = "lastModifiedByUser"
FOLLOWING = "following"
ITEMS = "items"
LIST = "list"
ROOT = "root"
SPECIAL = "special"
class Enum106(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)):
ASTERISK = "*"
CREATED_BY_USER = "createdByUser"
LAST_MODIFIED_BY_USER = "lastModifiedByUser"
FOLLOWING = "following"
ITEMS = "items"
LIST = "list"
ROOT = "root"
SPECIAL = "special"
class Enum107(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)):
ID = "id"
ID_DESC = "id desc"
CREATED_BY = "createdBy"
CREATED_BY_DESC = "createdBy desc"
CREATED_DATE_TIME = "createdDateTime"
CREATED_DATE_TIME_DESC = "createdDateTime desc"
DESCRIPTION = "description"
DESCRIPTION_DESC = "description desc"
E_TAG = "eTag"
E_TAG_DESC = "eTag desc"
LAST_MODIFIED_BY = "lastModifiedBy"
LAST_MODIFIED_BY_DESC = "lastModifiedBy desc"
LAST_MODIFIED_DATE_TIME = "lastModifiedDateTime"
LAST_MODIFIED_DATE_TIME_DESC = "lastModifiedDateTime desc"
NAME = "name"
NAME_DESC = "name desc"
PARENT_REFERENCE = "parentReference"
PARENT_REFERENCE_DESC = "parentReference desc"
WEB_URL = "webUrl"
WEB_URL_DESC = "webUrl desc"
CONTENT_TYPE = "contentType"
CONTENT_TYPE_DESC = "contentType desc"
SHAREPOINT_IDS = "sharepointIds"
SHAREPOINT_IDS_DESC = "sharepointIds desc"
class Enum108(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)):
ID = "id"
CREATED_BY = "createdBy"
CREATED_DATE_TIME = "createdDateTime"
DESCRIPTION = "description"
E_TAG = "eTag"
LAST_MODIFIED_BY = "lastModifiedBy"
LAST_MODIFIED_DATE_TIME = "lastModifiedDateTime"
NAME = "name"
PARENT_REFERENCE = "parentReference"
WEB_URL = "webUrl"
CONTENT_TYPE = "contentType"
SHAREPOINT_IDS = "sharepointIds"
CREATED_BY_USER = "createdByUser"
LAST_MODIFIED_BY_USER = "lastModifiedByUser"
ANALYTICS = "analytics"
DRIVE_ITEM = "driveItem"
FIELDS = "fields"
VERSIONS = "versions"
class Enum109(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)):
ASTERISK = "*"
CREATED_BY_USER = "createdByUser"
LAST_MODIFIED_BY_USER = "lastModifiedByUser"
ANALYTICS = "analytics"
DRIVE_ITEM = "driveItem"
FIELDS = "fields"
VERSIONS = "versions"
class Enum110(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)):
ID = "id"
CREATED_BY = "createdBy"
CREATED_DATE_TIME = "createdDateTime"
DESCRIPTION = "description"
E_TAG = "eTag"
LAST_MODIFIED_BY = "lastModifiedBy"
LAST_MODIFIED_DATE_TIME = "lastModifiedDateTime"
NAME = "name"
PARENT_REFERENCE = "parentReference"
WEB_URL = "webUrl"
CONTENT_TYPE = "contentType"
SHAREPOINT_IDS = "sharepointIds"
CREATED_BY_USER = "createdByUser"
LAST_MODIFIED_BY_USER = "lastModifiedByUser"
ANALYTICS = "analytics"
DRIVE_ITEM = "driveItem"
FIELDS = "fields"
VERSIONS = "versions"
class Enum111(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)):
ASTERISK = "*"
CREATED_BY_USER = "createdByUser"
LAST_MODIFIED_BY_USER = "lastModifiedByUser"
ANALYTICS = "analytics"
DRIVE_ITEM = "driveItem"
FIELDS = "fields"
VERSIONS = "versions"
class Enum112(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)):
ID = "id"
ALL_TIME = "allTime"
ITEM_ACTIVITY_STATS = "itemActivityStats"
LAST_SEVEN_DAYS = "lastSevenDays"
class Enum113(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)):
ASTERISK = "*"
ALL_TIME = "allTime"
ITEM_ACTIVITY_STATS = "itemActivityStats"
LAST_SEVEN_DAYS = "lastSevenDays"
class Enum114(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)):
ID = "id"
CREATED_BY = "createdBy"
CREATED_DATE_TIME = "createdDateTime"
DESCRIPTION = "description"
E_TAG = "eTag"
LAST_MODIFIED_BY = "lastModifiedBy"
LAST_MODIFIED_DATE_TIME = "lastModifiedDateTime"
NAME = "name"
PARENT_REFERENCE = "parentReference"
WEB_URL = "webUrl"
AUDIO = "audio"
CONTENT = "content"
C_TAG = "cTag"
DELETED = "deleted"
FILE = "file"
FILE_SYSTEM_INFO = "fileSystemInfo"
FOLDER = "folder"
IMAGE = "image"
LOCATION = "location"
PACKAGE = "package"
PENDING_OPERATIONS = "pendingOperations"
PHOTO = "photo"
PUBLICATION = "publication"
REMOTE_ITEM = "remoteItem"
ROOT = "root"
SEARCH_RESULT = "searchResult"
SHARED = "shared"
SHAREPOINT_IDS = "sharepointIds"
SIZE = "size"
SPECIAL_FOLDER = "specialFolder"
VIDEO = "video"
WEB_DAV_URL = "webDavUrl"
CREATED_BY_USER = "createdByUser"
LAST_MODIFIED_BY_USER = "lastModifiedByUser"
WORKBOOK = "workbook"
ANALYTICS = "analytics"
CHILDREN = "children"
LIST_ITEM = "listItem"
PERMISSIONS = "permissions"
SUBSCRIPTIONS = "subscriptions"
THUMBNAILS = "thumbnails"
VERSIONS = "versions"
class Enum115(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)):
ASTERISK = "*"
CREATED_BY_USER = "createdByUser"
LAST_MODIFIED_BY_USER = "lastModifiedByUser"
WORKBOOK = "workbook"
ANALYTICS = "analytics"
CHILDREN = "children"
LIST_ITEM = "listItem"
PERMISSIONS = "permissions"
SUBSCRIPTIONS = "subscriptions"
THUMBNAILS = "thumbnails"
VERSIONS = "versions"
class Enum116(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)):
ID = "id"
ID_DESC = "id desc"
LAST_MODIFIED_BY = "lastModifiedBy"
LAST_MODIFIED_BY_DESC = "lastModifiedBy desc"
LAST_MODIFIED_DATE_TIME = "lastModifiedDateTime"
LAST_MODIFIED_DATE_TIME_DESC = "lastModifiedDateTime desc"
PUBLICATION = "publication"
PUBLICATION_DESC = "publication desc"
class Enum117(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)):
ID = "id"
LAST_MODIFIED_BY = "lastModifiedBy"
LAST_MODIFIED_DATE_TIME = "lastModifiedDateTime"
PUBLICATION = "publication"
FIELDS = "fields"
class Enum118(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)):
ASTERISK = "*"
FIELDS = "fields"
class Enum119(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)):
ID = "id"
LAST_MODIFIED_BY = "lastModifiedBy"
LAST_MODIFIED_DATE_TIME = "lastModifiedDateTime"
PUBLICATION = "publication"
FIELDS = "fields"
class Enum120(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)):
ASTERISK = "*"
FIELDS = "fields"
class Enum121(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)):
ID = "id"
ID_DESC = "id desc"
APPLICATION_ID = "applicationId"
APPLICATION_ID_DESC = "applicationId desc"
CHANGE_TYPE = "changeType"
CHANGE_TYPE_DESC = "changeType desc"
CLIENT_STATE = "clientState"
CLIENT_STATE_DESC = "clientState desc"
CREATOR_ID = "creatorId"
CREATOR_ID_DESC = "creatorId desc"
ENCRYPTION_CERTIFICATE = "encryptionCertificate"
ENCRYPTION_CERTIFICATE_DESC = "encryptionCertificate desc"
ENCRYPTION_CERTIFICATE_ID = "encryptionCertificateId"
ENCRYPTION_CERTIFICATE_ID_DESC = "encryptionCertificateId desc"
EXPIRATION_DATE_TIME = "expirationDateTime"
EXPIRATION_DATE_TIME_DESC = "expirationDateTime desc"
INCLUDE_RESOURCE_DATA = "includeResourceData"
INCLUDE_RESOURCE_DATA_DESC = "includeResourceData desc"
LATEST_SUPPORTED_TLS_VERSION = "latestSupportedTlsVersion"
LATEST_SUPPORTED_TLS_VERSION_DESC = "latestSupportedTlsVersion desc"
LIFECYCLE_NOTIFICATION_URL = "lifecycleNotificationUrl"
LIFECYCLE_NOTIFICATION_URL_DESC = "lifecycleNotificationUrl desc"
NOTIFICATION_URL = "notificationUrl"
NOTIFICATION_URL_DESC = "notificationUrl desc"
RESOURCE = "resource"
RESOURCE_DESC = "resource desc"
class Enum122(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)):
ID = "id"
APPLICATION_ID = "applicationId"
CHANGE_TYPE = "changeType"
CLIENT_STATE = "clientState"
CREATOR_ID = "creatorId"
ENCRYPTION_CERTIFICATE = "encryptionCertificate"
ENCRYPTION_CERTIFICATE_ID = "encryptionCertificateId"
EXPIRATION_DATE_TIME = "expirationDateTime"
INCLUDE_RESOURCE_DATA = "includeResourceData"
LATEST_SUPPORTED_TLS_VERSION = "latestSupportedTlsVersion"
LIFECYCLE_NOTIFICATION_URL = "lifecycleNotificationUrl"
NOTIFICATION_URL = "notificationUrl"
RESOURCE = "resource"
class Enum123(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)):
ID = "id"
APPLICATION_ID = "applicationId"
CHANGE_TYPE = "changeType"
CLIENT_STATE = "clientState"
CREATOR_ID = "creatorId"
ENCRYPTION_CERTIFICATE = "encryptionCertificate"
ENCRYPTION_CERTIFICATE_ID = "encryptionCertificateId"
EXPIRATION_DATE_TIME = "expirationDateTime"
INCLUDE_RESOURCE_DATA = "includeResourceData"
LATEST_SUPPORTED_TLS_VERSION = "latestSupportedTlsVersion"
LIFECYCLE_NOTIFICATION_URL = "lifecycleNotificationUrl"
NOTIFICATION_URL = "notificationUrl"
RESOURCE = "resource"
class Enum127(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)):
ID = "id"
ID_DESC = "id desc"
CREATED_BY = "createdBy"
CREATED_BY_DESC = "createdBy desc"
CREATED_DATE_TIME = "createdDateTime"
CREATED_DATE_TIME_DESC = "createdDateTime desc"
DESCRIPTION = "description"
DESCRIPTION_DESC = "description desc"
E_TAG = "eTag"
E_TAG_DESC = "eTag desc"
LAST_MODIFIED_BY = "lastModifiedBy"
LAST_MODIFIED_BY_DESC = "lastModifiedBy desc"
LAST_MODIFIED_DATE_TIME = "lastModifiedDateTime"
LAST_MODIFIED_DATE_TIME_DESC = "lastModifiedDateTime desc"
NAME = "name"
NAME_DESC = "name desc"
PARENT_REFERENCE = "parentReference"
PARENT_REFERENCE_DESC = "parentReference desc"
WEB_URL = "webUrl"
WEB_URL_DESC = "webUrl desc"
DISPLAY_NAME = "displayName"
DISPLAY_NAME_DESC = "displayName desc"
ERROR = "error"
ERROR_DESC = "error desc"
ROOT = "root"
ROOT_DESC = "root desc"
SHAREPOINT_IDS = "sharepointIds"
SHAREPOINT_IDS_DESC = "sharepointIds desc"
SITE_COLLECTION = "siteCollection"
SITE_COLLECTION_DESC = "siteCollection desc"
class Enum128(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)):
ID = "id"
CREATED_BY = "createdBy"
CREATED_DATE_TIME = "createdDateTime"
DESCRIPTION = "description"
E_TAG = "eTag"
LAST_MODIFIED_BY = "lastModifiedBy"
LAST_MODIFIED_DATE_TIME = "lastModifiedDateTime"
NAME = "name"
PARENT_REFERENCE = "parentReference"
WEB_URL = "webUrl"
DISPLAY_NAME = "displayName"
ERROR = "error"
ROOT = "root"
SHAREPOINT_IDS = "sharepointIds"
SITE_COLLECTION = "siteCollection"
CREATED_BY_USER = "createdByUser"
LAST_MODIFIED_BY_USER = "lastModifiedByUser"
ANALYTICS = "analytics"
COLUMNS = "columns"
CONTENT_TYPES = "contentTypes"
DRIVE = "drive"
DRIVES = "drives"
ITEMS = "items"
LISTS = "lists"
SITES = "sites"
ONENOTE = "onenote"
class Enum129(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)):
ASTERISK = "*"
CREATED_BY_USER = "createdByUser"
LAST_MODIFIED_BY_USER = "lastModifiedByUser"
ANALYTICS = "analytics"
COLUMNS = "columns"
CONTENT_TYPES = "contentTypes"
DRIVE = "drive"
DRIVES = "drives"
ITEMS = "items"
LISTS = "lists"
SITES = "sites"
ONENOTE = "onenote"
class Enum130(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)):
ID = "id"
CREATED_BY = "createdBy"
CREATED_DATE_TIME = "createdDateTime"
DESCRIPTION = "description"
E_TAG = "eTag"
LAST_MODIFIED_BY = "lastModifiedBy"
LAST_MODIFIED_DATE_TIME = "lastModifiedDateTime"
NAME = "name"
PARENT_REFERENCE = "parentReference"
WEB_URL = "webUrl"
DISPLAY_NAME = "displayName"
ERROR = "error"
ROOT = "root"
SHAREPOINT_IDS = "sharepointIds"
SITE_COLLECTION = "siteCollection"
CREATED_BY_USER = "createdByUser"
LAST_MODIFIED_BY_USER = "lastModifiedByUser"
ANALYTICS = "analytics"
COLUMNS = "columns"
CONTENT_TYPES = "contentTypes"
DRIVE = "drive"
DRIVES = "drives"
ITEMS = "items"
LISTS = "lists"
SITES = "sites"
ONENOTE = "onenote"
class Enum131(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)):
ASTERISK = "*"
CREATED_BY_USER = "createdByUser"
LAST_MODIFIED_BY_USER = "lastModifiedByUser"
ANALYTICS = "analytics"
COLUMNS = "columns"
CONTENT_TYPES = "contentTypes"
DRIVE = "drive"
DRIVES = "drives"
ITEMS = "items"
LISTS = "lists"
SITES = "sites"
ONENOTE = "onenote"
class Enum132(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)):
ID = "id"
ID_DESC = "id desc"
CREATED_BY = "createdBy"
CREATED_BY_DESC = "createdBy desc"
CREATED_DATE_TIME = "createdDateTime"
CREATED_DATE_TIME_DESC = "createdDateTime desc"
DESCRIPTION = "description"
DESCRIPTION_DESC = "description desc"
E_TAG = "eTag"
E_TAG_DESC = "eTag desc"
LAST_MODIFIED_BY = "lastModifiedBy"
LAST_MODIFIED_BY_DESC = "lastModifiedBy desc"
LAST_MODIFIED_DATE_TIME = "lastModifiedDateTime"
LAST_MODIFIED_DATE_TIME_DESC = "lastModifiedDateTime desc"
NAME = "name"
NAME_DESC = "name desc"
PARENT_REFERENCE = "parentReference"
PARENT_REFERENCE_DESC = "parentReference desc"
WEB_URL = "webUrl"
WEB_URL_DESC = "webUrl desc"
DISPLAY_NAME = "displayName"
DISPLAY_NAME_DESC = "displayName desc"
ERROR = "error"
ERROR_DESC = "error desc"
ROOT = "root"
ROOT_DESC = "root desc"
SHAREPOINT_IDS = "sharepointIds"
SHAREPOINT_IDS_DESC = "sharepointIds desc"
SITE_COLLECTION = "siteCollection"
SITE_COLLECTION_DESC = "siteCollection desc"
class Enum133(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)):
ID = "id"
CREATED_BY = "createdBy"
CREATED_DATE_TIME = "createdDateTime"
DESCRIPTION = "description"
E_TAG = "eTag"
LAST_MODIFIED_BY = "lastModifiedBy"
LAST_MODIFIED_DATE_TIME = "lastModifiedDateTime"
NAME = "name"
PARENT_REFERENCE = "parentReference"
WEB_URL = "webUrl"
DISPLAY_NAME = "displayName"
ERROR = "error"
ROOT = "root"
SHAREPOINT_IDS = "sharepointIds"
SITE_COLLECTION = "siteCollection"
CREATED_BY_USER = "createdByUser"
LAST_MODIFIED_BY_USER = "lastModifiedByUser"
ANALYTICS = "analytics"
COLUMNS = "columns"
CONTENT_TYPES = "contentTypes"
DRIVE = "drive"
DRIVES = "drives"
ITEMS = "items"
LISTS = "lists"
SITES = "sites"
ONENOTE = "onenote"
class Enum134(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)):
ASTERISK = "*"
CREATED_BY_USER = "createdByUser"
LAST_MODIFIED_BY_USER = "lastModifiedByUser"
ANALYTICS = "analytics"
COLUMNS = "columns"
CONTENT_TYPES = "contentTypes"
DRIVE = "drive"
DRIVES = "drives"
ITEMS = "items"
LISTS = "lists"
SITES = "sites"
ONENOTE = "onenote"
class Enum135(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)):
ID = "id"
ID_DESC = "id desc"
CREATED_BY = "createdBy"
CREATED_BY_DESC = "createdBy desc"
CREATED_DATE_TIME = "createdDateTime"
CREATED_DATE_TIME_DESC = "createdDateTime desc"
DESCRIPTION = "description"
DESCRIPTION_DESC = "description desc"
E_TAG = "eTag"
E_TAG_DESC = "eTag desc"
LAST_MODIFIED_BY = "lastModifiedBy"
LAST_MODIFIED_BY_DESC = "lastModifiedBy desc"
LAST_MODIFIED_DATE_TIME = "lastModifiedDateTime"
LAST_MODIFIED_DATE_TIME_DESC = "lastModifiedDateTime desc"
NAME = "name"
NAME_DESC = "name desc"
PARENT_REFERENCE = "parentReference"
PARENT_REFERENCE_DESC = "parentReference desc"
WEB_URL = "webUrl"
WEB_URL_DESC = "webUrl desc"
DISPLAY_NAME = "displayName"
DISPLAY_NAME_DESC = "displayName desc"
ERROR = "error"
ERROR_DESC = "error desc"
ROOT = "root"
ROOT_DESC = "root desc"
SHAREPOINT_IDS = "sharepointIds"
SHAREPOINT_IDS_DESC = "sharepointIds desc"
SITE_COLLECTION = "siteCollection"
SITE_COLLECTION_DESC = "siteCollection desc"
class Enum65(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)):
ID = "id"
CREATED_BY = "createdBy"
CREATED_DATE_TIME = "createdDateTime"
DESCRIPTION = "description"
E_TAG = "eTag"
LAST_MODIFIED_BY = "lastModifiedBy"
LAST_MODIFIED_DATE_TIME = "lastModifiedDateTime"
NAME = "name"
PARENT_REFERENCE = "parentReference"
WEB_URL = "webUrl"
DISPLAY_NAME = "displayName"
ERROR = "error"
ROOT = "root"
SHAREPOINT_IDS = "sharepointIds"
SITE_COLLECTION = "siteCollection"
CREATED_BY_USER = "createdByUser"
LAST_MODIFIED_BY_USER = "lastModifiedByUser"
ANALYTICS = "analytics"
COLUMNS = "columns"
CONTENT_TYPES = "contentTypes"
DRIVE = "drive"
DRIVES = "drives"
ITEMS = "items"
LISTS = "lists"
SITES = "sites"
ONENOTE = "onenote"
class Enum66(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)):
ASTERISK = "*"
CREATED_BY_USER = "createdByUser"
LAST_MODIFIED_BY_USER = "lastModifiedByUser"
ANALYTICS = "analytics"
COLUMNS = "columns"
CONTENT_TYPES = "contentTypes"
DRIVE = "drive"
DRIVES = "drives"
ITEMS = "items"
LISTS = "lists"
SITES = "sites"
ONENOTE = "onenote"
class Enum68(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)):
ASTERISK = "*"
CREATED_BY_USER = "createdByUser"
LAST_MODIFIED_BY_USER = "lastModifiedByUser"
ANALYTICS = "analytics"
COLUMNS = "columns"
CONTENT_TYPES = "contentTypes"
DRIVE = "drive"
DRIVES = "drives"
ITEMS = "items"
LISTS = "lists"
SITES = "sites"
ONENOTE = "onenote"
class Enum69(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)):
ID = "id"
ALL_TIME = "allTime"
ITEM_ACTIVITY_STATS = "itemActivityStats"
LAST_SEVEN_DAYS = "lastSevenDays"
class Enum70(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)):
ASTERISK = "*"
ALL_TIME = "allTime"
ITEM_ACTIVITY_STATS = "itemActivityStats"
LAST_SEVEN_DAYS = "lastSevenDays"
class Enum71(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)):
ID = "id"
ID_DESC = "id desc"
BOOLEAN = "boolean"
BOOLEAN_DESC = "boolean desc"
CALCULATED = "calculated"
CALCULATED_DESC = "calculated desc"
CHOICE = "choice"
CHOICE_DESC = "choice desc"
COLUMN_GROUP = "columnGroup"
COLUMN_GROUP_DESC = "columnGroup desc"
CURRENCY = "currency"
CURRENCY_DESC = "currency desc"
DATE_TIME = "dateTime"
DATE_TIME_DESC = "dateTime desc"
DEFAULT_VALUE = "defaultValue"
DEFAULT_VALUE_DESC = "defaultValue desc"
DESCRIPTION = "description"
DESCRIPTION_DESC = "description desc"
DISPLAY_NAME = "displayName"
DISPLAY_NAME_DESC = "displayName desc"
ENFORCE_UNIQUE_VALUES = "enforceUniqueValues"
ENFORCE_UNIQUE_VALUES_DESC = "enforceUniqueValues desc"
GEOLOCATION = "geolocation"
GEOLOCATION_DESC = "geolocation desc"
HIDDEN = "hidden"
HIDDEN_DESC = "hidden desc"
INDEXED = "indexed"
INDEXED_DESC = "indexed desc"
LOOKUP = "lookup"
LOOKUP_DESC = "lookup desc"
NAME = "name"
NAME_DESC = "name desc"
NUMBER = "number"
NUMBER_DESC = "number desc"
PERSON_OR_GROUP = "personOrGroup"
PERSON_OR_GROUP_DESC = "personOrGroup desc"
READ_ONLY = "readOnly"
READ_ONLY_DESC = "readOnly desc"
REQUIRED = "required"
REQUIRED_DESC = "required desc"
TEXT = "text"
TEXT_DESC = "text desc"
class Enum72(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)):
ID = "id"
BOOLEAN = "boolean"
CALCULATED = "calculated"
CHOICE = "choice"
COLUMN_GROUP = "columnGroup"
CURRENCY = "currency"
DATE_TIME = "dateTime"
DEFAULT_VALUE = "defaultValue"
DESCRIPTION = "description"
DISPLAY_NAME = "displayName"
ENFORCE_UNIQUE_VALUES = "enforceUniqueValues"
GEOLOCATION = "geolocation"
HIDDEN = "hidden"
INDEXED = "indexed"
LOOKUP = "lookup"
NAME = "name"
NUMBER = "number"
PERSON_OR_GROUP = "personOrGroup"
READ_ONLY = "readOnly"
REQUIRED = "required"
TEXT = "text"
class Enum73(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)):
ID = "id"
BOOLEAN = "boolean"
CALCULATED = "calculated"
CHOICE = "choice"
COLUMN_GROUP = "columnGroup"
CURRENCY = "currency"
DATE_TIME = "dateTime"
DEFAULT_VALUE = "defaultValue"
DESCRIPTION = "description"
DISPLAY_NAME = "displayName"
ENFORCE_UNIQUE_VALUES = "enforceUniqueValues"
GEOLOCATION = "geolocation"
HIDDEN = "hidden"
INDEXED = "indexed"
LOOKUP = "lookup"
NAME = "name"
NUMBER = "number"
PERSON_OR_GROUP = "personOrGroup"
READ_ONLY = "readOnly"
REQUIRED = "required"
TEXT = "text"
class Enum74(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)):
ID = "id"
ID_DESC = "id desc"
DESCRIPTION = "description"
DESCRIPTION_DESC = "description desc"
GROUP = "group"
GROUP_DESC = "group desc"
HIDDEN = "hidden"
HIDDEN_DESC = "hidden desc"
INHERITED_FROM = "inheritedFrom"
INHERITED_FROM_DESC = "inheritedFrom desc"
NAME = "name"
NAME_DESC = "name desc"
ORDER = "order"
ORDER_DESC = "order desc"
PARENT_ID = "parentId"
PARENT_ID_DESC = "parentId desc"
READ_ONLY = "readOnly"
READ_ONLY_DESC = "readOnly desc"
SEALED = "sealed"
SEALED_DESC = "sealed desc"
class Enum75(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)):
ID = "id"
DESCRIPTION = "description"
GROUP = "group"
HIDDEN = "hidden"
INHERITED_FROM = "inheritedFrom"
NAME = "name"
ORDER = "order"
PARENT_ID = "parentId"
READ_ONLY = "readOnly"
SEALED = "sealed"
COLUMN_LINKS = "columnLinks"
class Enum76(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)):
ASTERISK = "*"
COLUMN_LINKS = "columnLinks"
class Enum77(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)):
ID = "id"
DESCRIPTION = "description"
GROUP = "group"
HIDDEN = "hidden"
INHERITED_FROM = "inheritedFrom"
NAME = "name"
ORDER = "order"
PARENT_ID = "parentId"
READ_ONLY = "readOnly"
SEALED = "sealed"
COLUMN_LINKS = "columnLinks"
class Enum78(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)):
ASTERISK = "*"
COLUMN_LINKS = "columnLinks"
class Enum79(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)):
ID = "id"
ID_DESC = "id desc"
NAME = "name"
NAME_DESC = "name desc"
class Enum80(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)):
ID = "id"
NAME = "name"
class Enum81(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)):
ID = "id"
NAME = "name"
class Enum82(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)):
ID = "id"
CREATED_BY = "createdBy"
CREATED_DATE_TIME = "createdDateTime"
DESCRIPTION = "description"
E_TAG = "eTag"
LAST_MODIFIED_BY = "lastModifiedBy"
LAST_MODIFIED_DATE_TIME = "lastModifiedDateTime"
NAME = "name"
PARENT_REFERENCE = "parentReference"
WEB_URL = "webUrl"
DRIVE_TYPE = "driveType"
OWNER = "owner"
QUOTA = "quota"
SHARE_POINT_IDS = "sharePointIds"
SYSTEM = "system"
CREATED_BY_USER = "createdByUser"
LAST_MODIFIED_BY_USER = "lastModifiedByUser"
FOLLOWING = "following"
ITEMS = "items"
LIST = "list"
ROOT = "root"
SPECIAL = "special"
class Enum83(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)):
ASTERISK = "*"
CREATED_BY_USER = "createdByUser"
LAST_MODIFIED_BY_USER = "lastModifiedByUser"
FOLLOWING = "following"
ITEMS = "items"
LIST = "list"
ROOT = "root"
SPECIAL = "special"
class Enum84(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)):
ID = "id"
ID_DESC = "id desc"
CREATED_BY = "createdBy"
CREATED_BY_DESC = "createdBy desc"
CREATED_DATE_TIME = "createdDateTime"
CREATED_DATE_TIME_DESC = "createdDateTime desc"
DESCRIPTION = "description"
DESCRIPTION_DESC = "description desc"
E_TAG = "eTag"
E_TAG_DESC = "eTag desc"
LAST_MODIFIED_BY = "lastModifiedBy"
LAST_MODIFIED_BY_DESC = "lastModifiedBy desc"
LAST_MODIFIED_DATE_TIME = "lastModifiedDateTime"
LAST_MODIFIED_DATE_TIME_DESC = "lastModifiedDateTime desc"
NAME = "name"
NAME_DESC = "name desc"
PARENT_REFERENCE = "parentReference"
PARENT_REFERENCE_DESC = "parentReference desc"
WEB_URL = "webUrl"
WEB_URL_DESC = "webUrl desc"
DRIVE_TYPE = "driveType"
DRIVE_TYPE_DESC = "driveType desc"
OWNER = "owner"
OWNER_DESC = "owner desc"
QUOTA = "quota"
QUOTA_DESC = "quota desc"
SHARE_POINT_IDS = "sharePointIds"
SHARE_POINT_IDS_DESC = "sharePointIds desc"
SYSTEM = "system"
SYSTEM_DESC = "system desc"
class Enum85(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)):
ID = "id"
CREATED_BY = "createdBy"
CREATED_DATE_TIME = "createdDateTime"
DESCRIPTION = "description"
E_TAG = "eTag"
LAST_MODIFIED_BY = "lastModifiedBy"
LAST_MODIFIED_DATE_TIME = "lastModifiedDateTime"
NAME = "name"
PARENT_REFERENCE = "parentReference"
WEB_URL = "webUrl"
DRIVE_TYPE = "driveType"
OWNER = "owner"
QUOTA = "quota"
SHARE_POINT_IDS = "sharePointIds"
SYSTEM = "system"
CREATED_BY_USER = "createdByUser"
LAST_MODIFIED_BY_USER = "lastModifiedByUser"
FOLLOWING = "following"
ITEMS = "items"
LIST = "list"
ROOT = "root"
SPECIAL = "special"
class Enum86(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)):
ASTERISK = "*"
CREATED_BY_USER = "createdByUser"
LAST_MODIFIED_BY_USER = "lastModifiedByUser"
FOLLOWING = "following"
ITEMS = "items"
LIST = "list"
ROOT = "root"
SPECIAL = "special"
class Enum87(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)):
ID = "id"
CREATED_BY = "createdBy"
CREATED_DATE_TIME = "createdDateTime"
DESCRIPTION = "description"
E_TAG = "eTag"
LAST_MODIFIED_BY = "lastModifiedBy"
LAST_MODIFIED_DATE_TIME = "lastModifiedDateTime"
NAME = "name"
PARENT_REFERENCE = "parentReference"
WEB_URL = "webUrl"
DRIVE_TYPE = "driveType"
OWNER = "owner"
QUOTA = "quota"
SHARE_POINT_IDS = "sharePointIds"
SYSTEM = "system"
CREATED_BY_USER = "createdByUser"
LAST_MODIFIED_BY_USER = "lastModifiedByUser"
FOLLOWING = "following"
ITEMS = "items"
LIST = "list"
ROOT = "root"
SPECIAL = "special"
class Enum88(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)):
ASTERISK = "*"
CREATED_BY_USER = "createdByUser"
LAST_MODIFIED_BY_USER = "lastModifiedByUser"
FOLLOWING = "following"
ITEMS = "items"
LIST = "list"
ROOT = "root"
SPECIAL = "special"
class Enum89(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)):
ID = "id"
ID_DESC = "id desc"
CREATED_BY = "createdBy"
CREATED_BY_DESC = "createdBy desc"
CREATED_DATE_TIME = "createdDateTime"
CREATED_DATE_TIME_DESC = "createdDateTime desc"
DESCRIPTION = "description"
DESCRIPTION_DESC = "description desc"
E_TAG = "eTag"
E_TAG_DESC = "eTag desc"
LAST_MODIFIED_BY = "lastModifiedBy"
LAST_MODIFIED_BY_DESC = "lastModifiedBy desc"
LAST_MODIFIED_DATE_TIME = "lastModifiedDateTime"
LAST_MODIFIED_DATE_TIME_DESC = "lastModifiedDateTime desc"
NAME = "name"
NAME_DESC = "name desc"
PARENT_REFERENCE = "parentReference"
PARENT_REFERENCE_DESC = "parentReference desc"
WEB_URL = "webUrl"
WEB_URL_DESC = "webUrl desc"
DISPLAY_NAME = "displayName"
DISPLAY_NAME_DESC = "displayName desc"
LIST = "list"
LIST_DESC = "list desc"
SHAREPOINT_IDS = "sharepointIds"
SHAREPOINT_IDS_DESC = "sharepointIds desc"
SYSTEM = "system"
SYSTEM_DESC = "system desc"
class Enum90(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)):
ID = "id"
CREATED_BY = "createdBy"
CREATED_DATE_TIME = "createdDateTime"
DESCRIPTION = "description"
E_TAG = "eTag"
LAST_MODIFIED_BY = "lastModifiedBy"
LAST_MODIFIED_DATE_TIME = "lastModifiedDateTime"
NAME = "name"
PARENT_REFERENCE = "parentReference"
WEB_URL = "webUrl"
DISPLAY_NAME = "displayName"
LIST = "list"
SHAREPOINT_IDS = "sharepointIds"
SYSTEM = "system"
CREATED_BY_USER = "createdByUser"
LAST_MODIFIED_BY_USER = "lastModifiedByUser"
COLUMNS = "columns"
CONTENT_TYPES = "contentTypes"
DRIVE = "drive"
ITEMS = "items"
SUBSCRIPTIONS = "subscriptions"
class Enum91(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)):
ASTERISK = "*"
CREATED_BY_USER = "createdByUser"
LAST_MODIFIED_BY_USER = "lastModifiedByUser"
COLUMNS = "columns"
CONTENT_TYPES = "contentTypes"
DRIVE = "drive"
ITEMS = "items"
SUBSCRIPTIONS = "subscriptions"
class Enum92(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)):
ID = "id"
CREATED_BY = "createdBy"
CREATED_DATE_TIME = "createdDateTime"
DESCRIPTION = "description"
E_TAG = "eTag"
LAST_MODIFIED_BY = "lastModifiedBy"
LAST_MODIFIED_DATE_TIME = "lastModifiedDateTime"
NAME = "name"
PARENT_REFERENCE = "parentReference"
WEB_URL = "webUrl"
DISPLAY_NAME = "displayName"
LIST = "list"
SHAREPOINT_IDS = "sharepointIds"
SYSTEM = "system"
CREATED_BY_USER = "createdByUser"
LAST_MODIFIED_BY_USER = "lastModifiedByUser"
COLUMNS = "columns"
CONTENT_TYPES = "contentTypes"
DRIVE = "drive"
ITEMS = "items"
SUBSCRIPTIONS = "subscriptions"
class Enum93(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)):
ASTERISK = "*"
CREATED_BY_USER = "createdByUser"
LAST_MODIFIED_BY_USER = "lastModifiedByUser"
COLUMNS = "columns"
CONTENT_TYPES = "contentTypes"
DRIVE = "drive"
ITEMS = "items"
SUBSCRIPTIONS = "subscriptions"
class Enum94(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)):
ID = "id"
ID_DESC = "id desc"
BOOLEAN = "boolean"
BOOLEAN_DESC = "boolean desc"
CALCULATED = "calculated"
CALCULATED_DESC = "calculated desc"
CHOICE = "choice"
CHOICE_DESC = "choice desc"
COLUMN_GROUP = "columnGroup"
COLUMN_GROUP_DESC = "columnGroup desc"
CURRENCY = "currency"
CURRENCY_DESC = "currency desc"
DATE_TIME = "dateTime"
DATE_TIME_DESC = "dateTime desc"
DEFAULT_VALUE = "defaultValue"
DEFAULT_VALUE_DESC = "defaultValue desc"
DESCRIPTION = "description"
DESCRIPTION_DESC = "description desc"
DISPLAY_NAME = "displayName"
DISPLAY_NAME_DESC = "displayName desc"
ENFORCE_UNIQUE_VALUES = "enforceUniqueValues"
ENFORCE_UNIQUE_VALUES_DESC = "enforceUniqueValues desc"
GEOLOCATION = "geolocation"
GEOLOCATION_DESC = "geolocation desc"
HIDDEN = "hidden"
HIDDEN_DESC = "hidden desc"
INDEXED = "indexed"
INDEXED_DESC = "indexed desc"
LOOKUP = "lookup"
LOOKUP_DESC = "lookup desc"
NAME = "name"
NAME_DESC = "name desc"
NUMBER = "number"
NUMBER_DESC = "number desc"
PERSON_OR_GROUP = "personOrGroup"
PERSON_OR_GROUP_DESC = "personOrGroup desc"
READ_ONLY = "readOnly"
READ_ONLY_DESC = "readOnly desc"
REQUIRED = "required"
REQUIRED_DESC = "required desc"
TEXT = "text"
TEXT_DESC = "text desc"
class Enum95(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)):
ID = "id"
BOOLEAN = "boolean"
CALCULATED = "calculated"
CHOICE = "choice"
COLUMN_GROUP = "columnGroup"
CURRENCY = "currency"
DATE_TIME = "dateTime"
DEFAULT_VALUE = "defaultValue"
DESCRIPTION = "description"
DISPLAY_NAME = "displayName"
ENFORCE_UNIQUE_VALUES = "enforceUniqueValues"
GEOLOCATION = "geolocation"
HIDDEN = "hidden"
INDEXED = "indexed"
LOOKUP = "lookup"
NAME = "name"
NUMBER = "number"
PERSON_OR_GROUP = "personOrGroup"
READ_ONLY = "readOnly"
REQUIRED = "required"
TEXT = "text"
class Enum96(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)):
ID = "id"
BOOLEAN = "boolean"
CALCULATED = "calculated"
CHOICE = "choice"
COLUMN_GROUP = "columnGroup"
CURRENCY = "currency"
DATE_TIME = "dateTime"
DEFAULT_VALUE = "defaultValue"
DESCRIPTION = "description"
DISPLAY_NAME = "displayName"
ENFORCE_UNIQUE_VALUES = "enforceUniqueValues"
GEOLOCATION = "geolocation"
HIDDEN = "hidden"
INDEXED = "indexed"
LOOKUP = "lookup"
NAME = "name"
NUMBER = "number"
PERSON_OR_GROUP = "personOrGroup"
READ_ONLY = "readOnly"
REQUIRED = "required"
TEXT = "text"
class Enum97(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)):
ID = "id"
ID_DESC = "id desc"
DESCRIPTION = "description"
DESCRIPTION_DESC = "description desc"
GROUP = "group"
GROUP_DESC = "group desc"
HIDDEN = "hidden"
HIDDEN_DESC = "hidden desc"
INHERITED_FROM = "inheritedFrom"
INHERITED_FROM_DESC = "inheritedFrom desc"
NAME = "name"
NAME_DESC = "name desc"
ORDER = "order"
ORDER_DESC = "order desc"
PARENT_ID = "parentId"
PARENT_ID_DESC = "parentId desc"
READ_ONLY = "readOnly"
READ_ONLY_DESC = "readOnly desc"
SEALED = "sealed"
SEALED_DESC = "sealed desc"
class Enum98(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)):
ID = "id"
DESCRIPTION = "description"
GROUP = "group"
HIDDEN = "hidden"
INHERITED_FROM = "inheritedFrom"
NAME = "name"
ORDER = "order"
PARENT_ID = "parentId"
READ_ONLY = "readOnly"
SEALED = "sealed"
COLUMN_LINKS = "columnLinks"
class Enum99(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)):
ASTERISK = "*"
COLUMN_LINKS = "columnLinks"
class Get1ItemsItem(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)):
ID = "id"
CREATED_BY = "createdBy"
CREATED_DATE_TIME = "createdDateTime"
DESCRIPTION = "description"
E_TAG = "eTag"
LAST_MODIFIED_BY = "lastModifiedBy"
LAST_MODIFIED_DATE_TIME = "lastModifiedDateTime"
NAME = "name"
PARENT_REFERENCE = "parentReference"
WEB_URL = "webUrl"
DISPLAY_NAME = "displayName"
ERROR = "error"
ROOT = "root"
SHAREPOINT_IDS = "sharepointIds"
SITE_COLLECTION = "siteCollection"
CREATED_BY_USER = "createdByUser"
LAST_MODIFIED_BY_USER = "lastModifiedByUser"
ANALYTICS = "analytics"
COLUMNS = "columns"
CONTENT_TYPES = "contentTypes"
DRIVE = "drive"
DRIVES = "drives"
ITEMS = "items"
LISTS = "lists"
SITES = "sites"
ONENOTE = "onenote"
class Get2ItemsItem(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)):
ID = "id"
CREATED_BY = "createdBy"
CREATED_DATE_TIME = "createdDateTime"
DESCRIPTION = "description"
E_TAG = "eTag"
LAST_MODIFIED_BY = "lastModifiedBy"
LAST_MODIFIED_DATE_TIME = "lastModifiedDateTime"
NAME = "name"
PARENT_REFERENCE = "parentReference"
WEB_URL = "webUrl"
DISPLAY_NAME = "displayName"
ERROR = "error"
ROOT = "root"
SHAREPOINT_IDS = "sharepointIds"
SITE_COLLECTION = "siteCollection"
CREATED_BY_USER = "createdByUser"
LAST_MODIFIED_BY_USER = "lastModifiedByUser"
ANALYTICS = "analytics"
COLUMNS = "columns"
CONTENT_TYPES = "contentTypes"
DRIVE = "drive"
DRIVES = "drives"
ITEMS = "items"
LISTS = "lists"
SITES = "sites"
ONENOTE = "onenote"
class Get3ItemsItem(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)):
ASTERISK = "*"
CREATED_BY_USER = "createdByUser"
LAST_MODIFIED_BY_USER = "lastModifiedByUser"
ANALYTICS = "analytics"
COLUMNS = "columns"
CONTENT_TYPES = "contentTypes"
DRIVE = "drive"
DRIVES = "drives"
ITEMS = "items"
LISTS = "lists"
SITES = "sites"
ONENOTE = "onenote"
class Get5ItemsItem(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)):
ID = "id"
ID_DESC = "id desc"
CREATED_BY = "createdBy"
CREATED_BY_DESC = "createdBy desc"
CREATED_DATE_TIME = "createdDateTime"
CREATED_DATE_TIME_DESC = "createdDateTime desc"
DESCRIPTION = "description"
DESCRIPTION_DESC = "description desc"
E_TAG = "eTag"
E_TAG_DESC = "eTag desc"
LAST_MODIFIED_BY = "lastModifiedBy"
LAST_MODIFIED_BY_DESC = "lastModifiedBy desc"
LAST_MODIFIED_DATE_TIME = "lastModifiedDateTime"
LAST_MODIFIED_DATE_TIME_DESC = "lastModifiedDateTime desc"
NAME = "name"
NAME_DESC = "name desc"
PARENT_REFERENCE = "parentReference"
PARENT_REFERENCE_DESC = "parentReference desc"
WEB_URL = "webUrl"
WEB_URL_DESC = "webUrl desc"
DISPLAY_NAME = "displayName"
DISPLAY_NAME_DESC = "displayName desc"
ERROR = "error"
ERROR_DESC = "error desc"
ROOT = "root"
ROOT_DESC = "root desc"
SHAREPOINT_IDS = "sharepointIds"
SHAREPOINT_IDS_DESC = "sharepointIds desc"
SITE_COLLECTION = "siteCollection"
SITE_COLLECTION_DESC = "siteCollection desc"
class Get6ItemsItem(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)):
ID = "id"
ID_DESC = "id desc"
CREATED_BY = "createdBy"
CREATED_BY_DESC = "createdBy desc"
CREATED_DATE_TIME = "createdDateTime"
CREATED_DATE_TIME_DESC = "createdDateTime desc"
DESCRIPTION = "description"
DESCRIPTION_DESC = "description desc"
E_TAG = "eTag"
E_TAG_DESC = "eTag desc"
LAST_MODIFIED_BY = "lastModifiedBy"
LAST_MODIFIED_BY_DESC = "lastModifiedBy desc"
LAST_MODIFIED_DATE_TIME = "lastModifiedDateTime"
LAST_MODIFIED_DATE_TIME_DESC = "lastModifiedDateTime desc"
NAME = "name"
NAME_DESC = "name desc"
PARENT_REFERENCE = "parentReference"
PARENT_REFERENCE_DESC = "parentReference desc"
WEB_URL = "webUrl"
WEB_URL_DESC = "webUrl desc"
DISPLAY_NAME = "displayName"
DISPLAY_NAME_DESC = "displayName desc"
ERROR = "error"
ERROR_DESC = "error desc"
ROOT = "root"
ROOT_DESC = "root desc"
SHAREPOINT_IDS = "sharepointIds"
SHAREPOINT_IDS_DESC = "sharepointIds desc"
SITE_COLLECTION = "siteCollection"
SITE_COLLECTION_DESC = "siteCollection desc"
class Get7ItemsItem(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)):
ID = "id"
CREATED_BY = "createdBy"
CREATED_DATE_TIME = "createdDateTime"
DESCRIPTION = "description"
E_TAG = "eTag"
LAST_MODIFIED_BY = "lastModifiedBy"
LAST_MODIFIED_DATE_TIME = "lastModifiedDateTime"
NAME = "name"
PARENT_REFERENCE = "parentReference"
WEB_URL = "webUrl"
DISPLAY_NAME = "displayName"
ERROR = "error"
ROOT = "root"
SHAREPOINT_IDS = "sharepointIds"
SITE_COLLECTION = "siteCollection"
CREATED_BY_USER = "createdByUser"
LAST_MODIFIED_BY_USER = "lastModifiedByUser"
ANALYTICS = "analytics"
COLUMNS = "columns"
CONTENT_TYPES = "contentTypes"
DRIVE = "drive"
DRIVES = "drives"
ITEMS = "items"
LISTS = "lists"
SITES = "sites"
ONENOTE = "onenote"
class Get8ItemsItem(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)):
ASTERISK = "*"
CREATED_BY_USER = "createdByUser"
LAST_MODIFIED_BY_USER = "lastModifiedByUser"
ANALYTICS = "analytics"
COLUMNS = "columns"
CONTENT_TYPES = "contentTypes"
DRIVE = "drive"
DRIVES = "drives"
ITEMS = "items"
LISTS = "lists"
SITES = "sites"
ONENOTE = "onenote"
class MicrosoftGraphActionState(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)):
NONE = "none"
PENDING = "pending"
CANCELED = "canceled"
ACTIVE = "active"
DONE = "done"
FAILED = "failed"
NOT_SUPPORTED = "notSupported"
class MicrosoftGraphAttendeeType(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)):
REQUIRED = "required"
OPTIONAL = "optional"
RESOURCE = "resource"
class MicrosoftGraphAutomaticRepliesStatus(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)):
DISABLED = "disabled"
ALWAYS_ENABLED = "alwaysEnabled"
SCHEDULED = "scheduled"
class MicrosoftGraphBodyType(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)):
TEXT = "text"
HTML = "html"
class MicrosoftGraphCalendarColor(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)):
LIGHT_BLUE = "lightBlue"
LIGHT_GREEN = "lightGreen"
AUTO = "auto"
LIGHT_ORANGE = "lightOrange"
LIGHT_GRAY = "lightGray"
LIGHT_YELLOW = "lightYellow"
LIGHT_TEAL = "lightTeal"
LIGHT_PINK = "lightPink"
LIGHT_BROWN = "lightBrown"
LIGHT_RED = "lightRed"
MAX_COLOR = "maxColor"
class MicrosoftGraphCalendarRoleType(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)):
NONE = "none"
FREE_BUSY_READ = "freeBusyRead"
LIMITED_READ = "limitedRead"
READ = "read"
WRITE = "write"
DELEGATE_WITHOUT_PRIVATE_EVENT_ACCESS = "delegateWithoutPrivateEventAccess"
DELEGATE_WITH_PRIVATE_EVENT_ACCESS = "delegateWithPrivateEventAccess"
CUSTOM = "custom"
class MicrosoftGraphCategoryColor(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)):
PRESET0 = "preset0"
PRESET1 = "preset1"
NONE = "none"
PRESET2 = "preset2"
PRESET3 = "preset3"
PRESET4 = "preset4"
PRESET5 = "preset5"
PRESET6 = "preset6"
PRESET7 = "preset7"
PRESET8 = "preset8"
PRESET9 = "preset9"
PRESET10 = "preset10"
PRESET11 = "preset11"
PRESET12 = "preset12"
PRESET13 = "preset13"
PRESET14 = "preset14"
PRESET15 = "preset15"
PRESET16 = "preset16"
PRESET17 = "preset17"
PRESET18 = "preset18"
PRESET19 = "preset19"
PRESET20 = "preset20"
PRESET21 = "preset21"
PRESET22 = "preset22"
PRESET23 = "preset23"
PRESET24 = "preset24"
class MicrosoftGraphChannelMembershipType(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)):
STANDARD = "standard"
PRIVATE = "private"
UNKNOWN_FUTURE_VALUE = "unknownFutureValue"
class MicrosoftGraphChatMessageImportance(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)):
NORMAL = "normal"
HIGH = "high"
URGENT = "urgent"
UNKNOWN_FUTURE_VALUE = "unknownFutureValue"
class MicrosoftGraphChatMessagePolicyViolationDlpActionTypes(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)):
NONE = "none"
NOTIFY_SENDER = "notifySender"
BLOCK_ACCESS = "blockAccess"
BLOCK_ACCESS_EXTERNAL = "blockAccessExternal"
class MicrosoftGraphChatMessagePolicyViolationUserActionTypes(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)):
NONE = "none"
OVERRIDE = "override"
REPORT_FALSE_POSITIVE = "reportFalsePositive"
class MicrosoftGraphChatMessagePolicyViolationVerdictDetailsTypes(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)):
NONE = "none"
ALLOW_FALSE_POSITIVE_OVERRIDE = "allowFalsePositiveOverride"
ALLOW_OVERRIDE_WITHOUT_JUSTIFICATION = "allowOverrideWithoutJustification"
ALLOW_OVERRIDE_WITH_JUSTIFICATION = "allowOverrideWithJustification"
class MicrosoftGraphChatMessageType(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)):
MESSAGE = "message"
CHAT_EVENT = "chatEvent"
TYPING = "typing"
UNKNOWN_FUTURE_VALUE = "unknownFutureValue"
class MicrosoftGraphComplianceState(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)):
UNKNOWN = "unknown"
COMPLIANT = "compliant"
NONCOMPLIANT = "noncompliant"
CONFLICT = "conflict"
ERROR = "error"
IN_GRACE_PERIOD = "inGracePeriod"
CONFIG_MANAGER = "configManager"
class MicrosoftGraphComplianceStatus(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)):
UNKNOWN = "unknown"
NOT_APPLICABLE = "notApplicable"
COMPLIANT = "compliant"
REMEDIATED = "remediated"
NON_COMPLIANT = "nonCompliant"
ERROR = "error"
CONFLICT = "conflict"
NOT_ASSIGNED = "notAssigned"
class MicrosoftGraphDayOfWeek(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)):
SUNDAY = "sunday"
MONDAY = "monday"
TUESDAY = "tuesday"
WEDNESDAY = "wednesday"
THURSDAY = "thursday"
FRIDAY = "friday"
SATURDAY = "saturday"
class MicrosoftGraphDelegateMeetingMessageDeliveryOptions(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)):
SEND_TO_DELEGATE_AND_INFORMATION_TO_PRINCIPAL = "sendToDelegateAndInformationToPrincipal"
SEND_TO_DELEGATE_AND_PRINCIPAL = "sendToDelegateAndPrincipal"
SEND_TO_DELEGATE_ONLY = "sendToDelegateOnly"
class MicrosoftGraphDeviceEnrollmentType(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)):
UNKNOWN = "unknown"
USER_ENROLLMENT = "userEnrollment"
DEVICE_ENROLLMENT_MANAGER = "deviceEnrollmentManager"
APPLE_BULK_WITH_USER = "appleBulkWithUser"
APPLE_BULK_WITHOUT_USER = "appleBulkWithoutUser"
WINDOWS_AZURE_AD_JOIN = "windowsAzureADJoin"
WINDOWS_BULK_USERLESS = "windowsBulkUserless"
WINDOWS_AUTO_ENROLLMENT = "windowsAutoEnrollment"
WINDOWS_BULK_AZURE_DOMAIN_JOIN = "windowsBulkAzureDomainJoin"
WINDOWS_CO_MANAGEMENT = "windowsCoManagement"
class MicrosoftGraphDeviceManagementExchangeAccessState(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)):
NONE = "none"
UNKNOWN = "unknown"
ALLOWED = "allowed"
BLOCKED = "blocked"
QUARANTINED = "quarantined"
class MicrosoftGraphDeviceManagementExchangeAccessStateReason(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)):
NONE = "none"
UNKNOWN = "unknown"
EXCHANGE_GLOBAL_RULE = "exchangeGlobalRule"
EXCHANGE_INDIVIDUAL_RULE = "exchangeIndividualRule"
EXCHANGE_DEVICE_RULE = "exchangeDeviceRule"
EXCHANGE_UPGRADE = "exchangeUpgrade"
EXCHANGE_MAILBOX_POLICY = "exchangeMailboxPolicy"
OTHER = "other"
COMPLIANT = "compliant"
NOT_COMPLIANT = "notCompliant"
NOT_ENROLLED = "notEnrolled"
UNKNOWN_LOCATION = "unknownLocation"
MFA_REQUIRED = "mfaRequired"
AZURE_AD_BLOCK_DUE_TO_ACCESS_POLICY = "azureADBlockDueToAccessPolicy"
COMPROMISED_PASSWORD = "compromisedPassword"
DEVICE_NOT_KNOWN_WITH_MANAGED_APP = "deviceNotKnownWithManagedApp"
class MicrosoftGraphDeviceRegistrationState(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)):
NOT_REGISTERED = "notRegistered"
REGISTERED = "registered"
REVOKED = "revoked"
KEY_CONFLICT = "keyConflict"
APPROVAL_PENDING = "approvalPending"
CERTIFICATE_RESET = "certificateReset"
NOT_REGISTERED_PENDING_ENROLLMENT = "notRegisteredPendingEnrollment"
UNKNOWN = "unknown"
class MicrosoftGraphEventType(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)):
SINGLE_INSTANCE = "singleInstance"
OCCURRENCE = "occurrence"
EXCEPTION = "exception"
SERIES_MASTER = "seriesMaster"
class MicrosoftGraphExternalAudienceScope(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)):
NONE = "none"
CONTACTS_ONLY = "contactsOnly"
ALL = "all"
class MicrosoftGraphFollowupFlagStatus(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)):
NOT_FLAGGED = "notFlagged"
COMPLETE = "complete"
FLAGGED = "flagged"
class MicrosoftGraphFreeBusyStatus(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)):
FREE = "free"
TENTATIVE = "tentative"
UNKNOWN = "unknown"
BUSY = "busy"
OOF = "oof"
WORKING_ELSEWHERE = "workingElsewhere"
class MicrosoftGraphGiphyRatingType(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)):
STRICT = "strict"
MODERATE = "moderate"
UNKNOWN_FUTURE_VALUE = "unknownFutureValue"
class MicrosoftGraphImportance(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)):
LOW = "low"
NORMAL = "normal"
HIGH = "high"
class MicrosoftGraphInferenceClassificationType(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)):
FOCUSED = "focused"
OTHER = "other"
class MicrosoftGraphLocationType(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)):
DEFAULT = "default"
CONFERENCE_ROOM = "conferenceRoom"
HOME_ADDRESS = "homeAddress"
BUSINESS_ADDRESS = "businessAddress"
GEO_COORDINATES = "geoCoordinates"
STREET_ADDRESS = "streetAddress"
HOTEL = "hotel"
RESTAURANT = "restaurant"
LOCAL_BUSINESS = "localBusiness"
POSTAL_ADDRESS = "postalAddress"
class MicrosoftGraphLocationUniqueIdType(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)):
UNKNOWN = "unknown"
LOCATION_STORE = "locationStore"
DIRECTORY = "directory"
PRIVATE = "private"
BING = "bing"
class MicrosoftGraphManagedAppFlaggedReason(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)):
NONE = "none"
ROOTED_DEVICE = "rootedDevice"
class MicrosoftGraphManagedDeviceOwnerType(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)):
UNKNOWN = "unknown"
COMPANY = "company"
PERSONAL = "personal"
class MicrosoftGraphManagedDevicePartnerReportedHealthState(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)):
UNKNOWN = "unknown"
ACTIVATED = "activated"
DEACTIVATED = "deactivated"
SECURED = "secured"
LOW_SEVERITY = "lowSeverity"
MEDIUM_SEVERITY = "mediumSeverity"
HIGH_SEVERITY = "highSeverity"
UNRESPONSIVE = "unresponsive"
COMPROMISED = "compromised"
MISCONFIGURED = "misconfigured"
class MicrosoftGraphManagementAgentType(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)):
EAS = "eas"
MDM = "mdm"
EAS_MDM = "easMdm"
INTUNE_CLIENT = "intuneClient"
EAS_INTUNE_CLIENT = "easIntuneClient"
CONFIGURATION_MANAGER_CLIENT = "configurationManagerClient"
CONFIGURATION_MANAGER_CLIENT_MDM = "configurationManagerClientMdm"
CONFIGURATION_MANAGER_CLIENT_MDM_EAS = "configurationManagerClientMdmEas"
UNKNOWN = "unknown"
JAMF = "jamf"
GOOGLE_CLOUD_DEVICE_POLICY_CONTROLLER = "googleCloudDevicePolicyController"
class MicrosoftGraphMessageActionFlag(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)):
ANY = "any"
CALL = "call"
DO_NOT_FORWARD = "doNotForward"
FOLLOW_UP = "followUp"
FYI = "fyi"
FORWARD = "forward"
NO_RESPONSE_NECESSARY = "noResponseNecessary"
READ = "read"
REPLY = "reply"
REPLY_TO_ALL = "replyToAll"
REVIEW = "review"
class MicrosoftGraphOnenotePatchActionType(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)):
REPLACE = "Replace"
APPEND = "Append"
DELETE = "Delete"
INSERT = "Insert"
PREPEND = "Prepend"
class MicrosoftGraphOnenotePatchInsertPosition(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)):
AFTER = "After"
BEFORE = "Before"
class MicrosoftGraphOnenoteSourceService(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)):
UNKNOWN = "Unknown"
ONE_DRIVE = "OneDrive"
ONE_DRIVE_FOR_BUSINESS = "OneDriveForBusiness"
ON_PREM_ONE_DRIVE_FOR_BUSINESS = "OnPremOneDriveForBusiness"
class MicrosoftGraphOnenoteUserRole(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)):
OWNER = "Owner"
CONTRIBUTOR = "Contributor"
NONE = "None"
READER = "Reader"
class MicrosoftGraphOnlineMeetingProviderType(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)):
UNKNOWN = "unknown"
SKYPE_FOR_BUSINESS = "skypeForBusiness"
SKYPE_FOR_CONSUMER = "skypeForConsumer"
TEAMS_FOR_BUSINESS = "teamsForBusiness"
class MicrosoftGraphOperationStatus(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)):
NOT_STARTED = "NotStarted"
RUNNING = "Running"
COMPLETED = "Completed"
FAILED = "Failed"
class MicrosoftGraphPhoneType(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)):
HOME = "home"
BUSINESS = "business"
MOBILE = "mobile"
OTHER = "other"
ASSISTANT = "assistant"
HOME_FAX = "homeFax"
BUSINESS_FAX = "businessFax"
OTHER_FAX = "otherFax"
PAGER = "pager"
RADIO = "radio"
class MicrosoftGraphPlannerPreviewType(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)):
AUTOMATIC = "automatic"
NO_PREVIEW = "noPreview"
CHECKLIST = "checklist"
DESCRIPTION = "description"
REFERENCE = "reference"
class MicrosoftGraphPolicyPlatformType(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)):
ANDROID = "android"
I_OS = "iOS"
MAC_OS = "macOS"
WINDOWS_PHONE81 = "windowsPhone81"
WINDOWS81_AND_LATER = "windows81AndLater"
WINDOWS10_AND_LATER = "windows10AndLater"
ANDROID_WORK_PROFILE = "androidWorkProfile"
ALL = "all"
class MicrosoftGraphRecurrencePatternType(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)):
DAILY = "daily"
WEEKLY = "weekly"
ABSOLUTE_MONTHLY = "absoluteMonthly"
RELATIVE_MONTHLY = "relativeMonthly"
ABSOLUTE_YEARLY = "absoluteYearly"
RELATIVE_YEARLY = "relativeYearly"
class MicrosoftGraphRecurrenceRangeType(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)):
END_DATE = "endDate"
NO_END = "noEnd"
NUMBERED = "numbered"
class MicrosoftGraphResponseType(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)):
NONE = "none"
ORGANIZER = "organizer"
TENTATIVELY_ACCEPTED = "tentativelyAccepted"
ACCEPTED = "accepted"
DECLINED = "declined"
NOT_RESPONDED = "notResponded"
class MicrosoftGraphScheduleChangeRequestActor(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)):
SENDER = "sender"
RECIPIENT = "recipient"
MANAGER = "manager"
SYSTEM = "system"
UNKNOWN_FUTURE_VALUE = "unknownFutureValue"
class MicrosoftGraphScheduleChangeState(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)):
PENDING = "pending"
APPROVED = "approved"
DECLINED = "declined"
UNKNOWN_FUTURE_VALUE = "unknownFutureValue"
class MicrosoftGraphScheduleEntityTheme(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)):
WHITE = "white"
BLUE = "blue"
GREEN = "green"
PURPLE = "purple"
PINK = "pink"
YELLOW = "yellow"
GRAY = "gray"
DARK_BLUE = "darkBlue"
DARK_GREEN = "darkGreen"
DARK_PURPLE = "darkPurple"
DARK_PINK = "darkPink"
DARK_YELLOW = "darkYellow"
UNKNOWN_FUTURE_VALUE = "unknownFutureValue"
class MicrosoftGraphSelectionLikelihoodInfo(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)):
NOT_SPECIFIED = "notSpecified"
HIGH = "high"
class MicrosoftGraphSensitivity(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)):
NORMAL = "normal"
PERSONAL = "personal"
PRIVATE = "private"
CONFIDENTIAL = "confidential"
class MicrosoftGraphStatus(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)):
ACTIVE = "active"
UPDATED = "updated"
DELETED = "deleted"
IGNORED = "ignored"
UNKNOWN_FUTURE_VALUE = "unknownFutureValue"
class MicrosoftGraphTeamsAppDistributionMethod(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)):
STORE = "store"
ORGANIZATION = "organization"
SIDELOADED = "sideloaded"
UNKNOWN_FUTURE_VALUE = "unknownFutureValue"
class MicrosoftGraphTeamsAsyncOperationStatus(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)):
INVALID = "invalid"
NOT_STARTED = "notStarted"
IN_PROGRESS = "inProgress"
SUCCEEDED = "succeeded"
FAILED = "failed"
UNKNOWN_FUTURE_VALUE = "unknownFutureValue"
class MicrosoftGraphTeamsAsyncOperationType(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)):
INVALID = "invalid"
CLONE_TEAM = "cloneTeam"
ARCHIVE_TEAM = "archiveTeam"
UNARCHIVE_TEAM = "unarchiveTeam"
CREATE_TEAM = "createTeam"
UNKNOWN_FUTURE_VALUE = "unknownFutureValue"
class MicrosoftGraphTeamSpecialization(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)):
NONE = "none"
EDUCATION_STANDARD = "educationStandard"
EDUCATION_CLASS = "educationClass"
EDUCATION_PROFESSIONAL_LEARNING_COMMUNITY = "educationProfessionalLearningCommunity"
EDUCATION_STAFF = "educationStaff"
HEALTHCARE_STANDARD = "healthcareStandard"
HEALTHCARE_CARE_COORDINATION = "healthcareCareCoordination"
UNKNOWN_FUTURE_VALUE = "unknownFutureValue"
class MicrosoftGraphTeamVisibilityType(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)):
PRIVATE = "private"
PUBLIC = "public"
HIDDEN_MEMBERSHIP = "hiddenMembership"
UNKNOWN_FUTURE_VALUE = "unknownFutureValue"
class MicrosoftGraphTimeOffReasonIconType(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)):
NONE = "none"
CAR = "car"
CALENDAR = "calendar"
RUNNING = "running"
PLANE = "plane"
FIRST_AID = "firstAid"
DOCTOR = "doctor"
NOT_WORKING = "notWorking"
CLOCK = "clock"
JURY_DUTY = "juryDuty"
GLOBE = "globe"
CUP = "cup"
PHONE = "phone"
WEATHER = "weather"
UMBRELLA = "umbrella"
PIGGY_BANK = "piggyBank"
DOG = "dog"
CAKE = "cake"
TRAFFIC_CONE = "trafficCone"
PIN = "pin"
SUNNY = "sunny"
UNKNOWN_FUTURE_VALUE = "unknownFutureValue"
class MicrosoftGraphWebsiteType(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)):
OTHER = "other"
HOME = "home"
WORK = "work"
BLOG = "blog"
PROFILE = "profile"
class MicrosoftGraphWeekIndex(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)):
FIRST = "first"
SECOND = "second"
THIRD = "third"
FOURTH = "fourth"
LAST = "last"
class MicrosoftGraphWorkbookOperationStatus(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)):
NOT_STARTED = "notStarted"
RUNNING = "running"
SUCCEEDED = "succeeded"
FAILED = "failed"
| [
"six.with_metaclass"
] | [((1154, 1205), 'six.with_metaclass', 'with_metaclass', (['_CaseInsensitiveEnumMeta', 'str', 'Enum'], {}), '(_CaseInsensitiveEnumMeta, str, Enum)\n', (1168, 1205), False, 'from six import with_metaclass\n'), ((1496, 1547), 'six.with_metaclass', 'with_metaclass', (['_CaseInsensitiveEnumMeta', 'str', 'Enum'], {}), '(_CaseInsensitiveEnumMeta, str, Enum)\n', (1510, 1547), False, 'from six import with_metaclass\n'), ((1618, 1669), 'six.with_metaclass', 'with_metaclass', (['_CaseInsensitiveEnumMeta', 'str', 'Enum'], {}), '(_CaseInsensitiveEnumMeta, str, Enum)\n', (1632, 1669), False, 'from six import with_metaclass\n'), ((1772, 1823), 'six.with_metaclass', 'with_metaclass', (['_CaseInsensitiveEnumMeta', 'str', 'Enum'], {}), '(_CaseInsensitiveEnumMeta, str, Enum)\n', (1786, 1823), False, 'from six import with_metaclass\n'), ((1874, 1925), 'six.with_metaclass', 'with_metaclass', (['_CaseInsensitiveEnumMeta', 'str', 'Enum'], {}), '(_CaseInsensitiveEnumMeta, str, Enum)\n', (1888, 1925), False, 'from six import with_metaclass\n'), ((1976, 2027), 'six.with_metaclass', 'with_metaclass', (['_CaseInsensitiveEnumMeta', 'str', 'Enum'], {}), '(_CaseInsensitiveEnumMeta, str, Enum)\n', (1990, 2027), False, 'from six import with_metaclass\n'), ((2681, 2732), 'six.with_metaclass', 'with_metaclass', (['_CaseInsensitiveEnumMeta', 'str', 'Enum'], {}), '(_CaseInsensitiveEnumMeta, str, Enum)\n', (2695, 2732), False, 'from six import with_metaclass\n'), ((2965, 3016), 'six.with_metaclass', 'with_metaclass', (['_CaseInsensitiveEnumMeta', 'str', 'Enum'], {}), '(_CaseInsensitiveEnumMeta, str, Enum)\n', (2979, 3016), False, 'from six import with_metaclass\n'), ((3917, 3968), 'six.with_metaclass', 'with_metaclass', (['_CaseInsensitiveEnumMeta', 'str', 'Enum'], {}), '(_CaseInsensitiveEnumMeta, str, Enum)\n', (3931, 3968), False, 'from six import with_metaclass\n'), ((4560, 4611), 'six.with_metaclass', 'with_metaclass', (['_CaseInsensitiveEnumMeta', 'str', 'Enum'], {}), '(_CaseInsensitiveEnumMeta, str, Enum)\n', (4574, 4611), False, 'from six import with_metaclass\n'), ((4841, 4892), 'six.with_metaclass', 'with_metaclass', (['_CaseInsensitiveEnumMeta', 'str', 'Enum'], {}), '(_CaseInsensitiveEnumMeta, str, Enum)\n', (4855, 4892), False, 'from six import with_metaclass\n'), ((5484, 5535), 'six.with_metaclass', 'with_metaclass', (['_CaseInsensitiveEnumMeta', 'str', 'Enum'], {}), '(_CaseInsensitiveEnumMeta, str, Enum)\n', (5498, 5535), False, 'from six import with_metaclass\n'), ((5765, 5816), 'six.with_metaclass', 'with_metaclass', (['_CaseInsensitiveEnumMeta', 'str', 'Enum'], {}), '(_CaseInsensitiveEnumMeta, str, Enum)\n', (5779, 5816), False, 'from six import with_metaclass\n'), ((5958, 6009), 'six.with_metaclass', 'with_metaclass', (['_CaseInsensitiveEnumMeta', 'str', 'Enum'], {}), '(_CaseInsensitiveEnumMeta, str, Enum)\n', (5972, 6009), False, 'from six import with_metaclass\n'), ((6156, 6207), 'six.with_metaclass', 'with_metaclass', (['_CaseInsensitiveEnumMeta', 'str', 'Enum'], {}), '(_CaseInsensitiveEnumMeta, str, Enum)\n', (6170, 6207), False, 'from six import with_metaclass\n'), ((7437, 7488), 'six.with_metaclass', 'with_metaclass', (['_CaseInsensitiveEnumMeta', 'str', 'Enum'], {}), '(_CaseInsensitiveEnumMeta, str, Enum)\n', (7451, 7488), False, 'from six import with_metaclass\n'), ((7844, 7895), 'six.with_metaclass', 'with_metaclass', (['_CaseInsensitiveEnumMeta', 'str', 'Enum'], {}), '(_CaseInsensitiveEnumMeta, str, Enum)\n', (7858, 7895), False, 'from six import with_metaclass\n'), ((8232, 8283), 'six.with_metaclass', 'with_metaclass', (['_CaseInsensitiveEnumMeta', 'str', 'Enum'], {}), '(_CaseInsensitiveEnumMeta, str, Enum)\n', (8246, 8283), False, 'from six import with_metaclass\n'), ((8463, 8514), 'six.with_metaclass', 'with_metaclass', (['_CaseInsensitiveEnumMeta', 'str', 'Enum'], {}), '(_CaseInsensitiveEnumMeta, str, Enum)\n', (8477, 8514), False, 'from six import with_metaclass\n'), ((8574, 8625), 'six.with_metaclass', 'with_metaclass', (['_CaseInsensitiveEnumMeta', 'str', 'Enum'], {}), '(_CaseInsensitiveEnumMeta, str, Enum)\n', (8588, 8625), False, 'from six import with_metaclass\n'), ((8805, 8856), 'six.with_metaclass', 'with_metaclass', (['_CaseInsensitiveEnumMeta', 'str', 'Enum'], {}), '(_CaseInsensitiveEnumMeta, str, Enum)\n', (8819, 8856), False, 'from six import with_metaclass\n'), ((8916, 8967), 'six.with_metaclass', 'with_metaclass', (['_CaseInsensitiveEnumMeta', 'str', 'Enum'], {}), '(_CaseInsensitiveEnumMeta, str, Enum)\n', (8930, 8967), False, 'from six import with_metaclass\n'), ((10202, 10253), 'six.with_metaclass', 'with_metaclass', (['_CaseInsensitiveEnumMeta', 'str', 'Enum'], {}), '(_CaseInsensitiveEnumMeta, str, Enum)\n', (10216, 10253), False, 'from six import with_metaclass\n'), ((10815, 10866), 'six.with_metaclass', 'with_metaclass', (['_CaseInsensitiveEnumMeta', 'str', 'Enum'], {}), '(_CaseInsensitiveEnumMeta, str, Enum)\n', (10829, 10866), False, 'from six import with_metaclass\n'), ((11428, 11479), 'six.with_metaclass', 'with_metaclass', (['_CaseInsensitiveEnumMeta', 'str', 'Enum'], {}), '(_CaseInsensitiveEnumMeta, str, Enum)\n', (11442, 11479), False, 'from six import with_metaclass\n'), ((12564, 12615), 'six.with_metaclass', 'with_metaclass', (['_CaseInsensitiveEnumMeta', 'str', 'Enum'], {}), '(_CaseInsensitiveEnumMeta, str, Enum)\n', (12578, 12615), False, 'from six import with_metaclass\n'), ((13392, 13443), 'six.with_metaclass', 'with_metaclass', (['_CaseInsensitiveEnumMeta', 'str', 'Enum'], {}), '(_CaseInsensitiveEnumMeta, str, Enum)\n', (13406, 13443), False, 'from six import with_metaclass\n'), ((13781, 13832), 'six.with_metaclass', 'with_metaclass', (['_CaseInsensitiveEnumMeta', 'str', 'Enum'], {}), '(_CaseInsensitiveEnumMeta, str, Enum)\n', (13795, 13832), False, 'from six import with_metaclass\n'), ((14609, 14660), 'six.with_metaclass', 'with_metaclass', (['_CaseInsensitiveEnumMeta', 'str', 'Enum'], {}), '(_CaseInsensitiveEnumMeta, str, Enum)\n', (14623, 14660), False, 'from six import with_metaclass\n'), ((14998, 15049), 'six.with_metaclass', 'with_metaclass', (['_CaseInsensitiveEnumMeta', 'str', 'Enum'], {}), '(_CaseInsensitiveEnumMeta, str, Enum)\n', (15012, 15049), False, 'from six import with_metaclass\n'), ((16134, 16185), 'six.with_metaclass', 'with_metaclass', (['_CaseInsensitiveEnumMeta', 'str', 'Enum'], {}), '(_CaseInsensitiveEnumMeta, str, Enum)\n', (16148, 16185), False, 'from six import with_metaclass\n'), ((16962, 17013), 'six.with_metaclass', 'with_metaclass', (['_CaseInsensitiveEnumMeta', 'str', 'Enum'], {}), '(_CaseInsensitiveEnumMeta, str, Enum)\n', (16976, 17013), False, 'from six import with_metaclass\n'), ((17351, 17402), 'six.with_metaclass', 'with_metaclass', (['_CaseInsensitiveEnumMeta', 'str', 'Enum'], {}), '(_CaseInsensitiveEnumMeta, str, Enum)\n', (17365, 17402), False, 'from six import with_metaclass\n'), ((18486, 18537), 'six.with_metaclass', 'with_metaclass', (['_CaseInsensitiveEnumMeta', 'str', 'Enum'], {}), '(_CaseInsensitiveEnumMeta, str, Enum)\n', (18500, 18537), False, 'from six import with_metaclass\n'), ((19313, 19364), 'six.with_metaclass', 'with_metaclass', (['_CaseInsensitiveEnumMeta', 'str', 'Enum'], {}), '(_CaseInsensitiveEnumMeta, str, Enum)\n', (19327, 19364), False, 'from six import with_metaclass\n'), ((19701, 19752), 'six.with_metaclass', 'with_metaclass', (['_CaseInsensitiveEnumMeta', 'str', 'Enum'], {}), '(_CaseInsensitiveEnumMeta, str, Enum)\n', (19715, 19752), False, 'from six import with_metaclass\n'), ((20089, 20140), 'six.with_metaclass', 'with_metaclass', (['_CaseInsensitiveEnumMeta', 'str', 'Enum'], {}), '(_CaseInsensitiveEnumMeta, str, Enum)\n', (20103, 20140), False, 'from six import with_metaclass\n'), ((20281, 20332), 'six.with_metaclass', 'with_metaclass', (['_CaseInsensitiveEnumMeta', 'str', 'Enum'], {}), '(_CaseInsensitiveEnumMeta, str, Enum)\n', (20295, 20332), False, 'from six import with_metaclass\n'), ((20478, 20529), 'six.with_metaclass', 'with_metaclass', (['_CaseInsensitiveEnumMeta', 'str', 'Enum'], {}), '(_CaseInsensitiveEnumMeta, str, Enum)\n', (20492, 20529), False, 'from six import with_metaclass\n'), ((21907, 21958), 'six.with_metaclass', 'with_metaclass', (['_CaseInsensitiveEnumMeta', 'str', 'Enum'], {}), '(_CaseInsensitiveEnumMeta, str, Enum)\n', (21921, 21958), False, 'from six import with_metaclass\n'), ((22551, 22602), 'six.with_metaclass', 'with_metaclass', (['_CaseInsensitiveEnumMeta', 'str', 'Enum'], {}), '(_CaseInsensitiveEnumMeta, str, Enum)\n', (22565, 22602), False, 'from six import with_metaclass\n'), ((23195, 23246), 'six.with_metaclass', 'with_metaclass', (['_CaseInsensitiveEnumMeta', 'str', 'Enum'], {}), '(_CaseInsensitiveEnumMeta, str, Enum)\n', (23209, 23246), False, 'from six import with_metaclass\n'), ((23842, 23893), 'six.with_metaclass', 'with_metaclass', (['_CaseInsensitiveEnumMeta', 'str', 'Enum'], {}), '(_CaseInsensitiveEnumMeta, str, Enum)\n', (23856, 23893), False, 'from six import with_metaclass\n'), ((24183, 24234), 'six.with_metaclass', 'with_metaclass', (['_CaseInsensitiveEnumMeta', 'str', 'Enum'], {}), '(_CaseInsensitiveEnumMeta, str, Enum)\n', (24197, 24234), False, 'from six import with_metaclass\n'), ((24304, 24355), 'six.with_metaclass', 'with_metaclass', (['_CaseInsensitiveEnumMeta', 'str', 'Enum'], {}), '(_CaseInsensitiveEnumMeta, str, Enum)\n', (24318, 24355), False, 'from six import with_metaclass\n'), ((24645, 24696), 'six.with_metaclass', 'with_metaclass', (['_CaseInsensitiveEnumMeta', 'str', 'Enum'], {}), '(_CaseInsensitiveEnumMeta, str, Enum)\n', (24659, 24696), False, 'from six import with_metaclass\n'), ((24766, 24817), 'six.with_metaclass', 'with_metaclass', (['_CaseInsensitiveEnumMeta', 'str', 'Enum'], {}), '(_CaseInsensitiveEnumMeta, str, Enum)\n', (24780, 24817), False, 'from six import with_metaclass\n'), ((24919, 24970), 'six.with_metaclass', 'with_metaclass', (['_CaseInsensitiveEnumMeta', 'str', 'Enum'], {}), '(_CaseInsensitiveEnumMeta, str, Enum)\n', (24933, 24970), False, 'from six import with_metaclass\n'), ((25020, 25071), 'six.with_metaclass', 'with_metaclass', (['_CaseInsensitiveEnumMeta', 'str', 'Enum'], {}), '(_CaseInsensitiveEnumMeta, str, Enum)\n', (25034, 25071), False, 'from six import with_metaclass\n'), ((25121, 25172), 'six.with_metaclass', 'with_metaclass', (['_CaseInsensitiveEnumMeta', 'str', 'Enum'], {}), '(_CaseInsensitiveEnumMeta, str, Enum)\n', (25135, 25172), False, 'from six import with_metaclass\n'), ((25825, 25876), 'six.with_metaclass', 'with_metaclass', (['_CaseInsensitiveEnumMeta', 'str', 'Enum'], {}), '(_CaseInsensitiveEnumMeta, str, Enum)\n', (25839, 25876), False, 'from six import with_metaclass\n'), ((26108, 26159), 'six.with_metaclass', 'with_metaclass', (['_CaseInsensitiveEnumMeta', 'str', 'Enum'], {}), '(_CaseInsensitiveEnumMeta, str, Enum)\n', (26122, 26159), False, 'from six import with_metaclass\n'), ((27207, 27258), 'six.with_metaclass', 'with_metaclass', (['_CaseInsensitiveEnumMeta', 'str', 'Enum'], {}), '(_CaseInsensitiveEnumMeta, str, Enum)\n', (27221, 27258), False, 'from six import with_metaclass\n'), ((27911, 27962), 'six.with_metaclass', 'with_metaclass', (['_CaseInsensitiveEnumMeta', 'str', 'Enum'], {}), '(_CaseInsensitiveEnumMeta, str, Enum)\n', (27925, 27962), False, 'from six import with_metaclass\n'), ((28194, 28245), 'six.with_metaclass', 'with_metaclass', (['_CaseInsensitiveEnumMeta', 'str', 'Enum'], {}), '(_CaseInsensitiveEnumMeta, str, Enum)\n', (28208, 28245), False, 'from six import with_metaclass\n'), ((28898, 28949), 'six.with_metaclass', 'with_metaclass', (['_CaseInsensitiveEnumMeta', 'str', 'Enum'], {}), '(_CaseInsensitiveEnumMeta, str, Enum)\n', (28912, 28949), False, 'from six import with_metaclass\n'), ((29181, 29232), 'six.with_metaclass', 'with_metaclass', (['_CaseInsensitiveEnumMeta', 'str', 'Enum'], {}), '(_CaseInsensitiveEnumMeta, str, Enum)\n', (29195, 29232), False, 'from six import with_metaclass\n'), ((30232, 30283), 'six.with_metaclass', 'with_metaclass', (['_CaseInsensitiveEnumMeta', 'str', 'Enum'], {}), '(_CaseInsensitiveEnumMeta, str, Enum)\n', (30246, 30283), False, 'from six import with_metaclass\n'), ((30944, 30995), 'six.with_metaclass', 'with_metaclass', (['_CaseInsensitiveEnumMeta', 'str', 'Enum'], {}), '(_CaseInsensitiveEnumMeta, str, Enum)\n', (30958, 30995), False, 'from six import with_metaclass\n'), ((31254, 31305), 'six.with_metaclass', 'with_metaclass', (['_CaseInsensitiveEnumMeta', 'str', 'Enum'], {}), '(_CaseInsensitiveEnumMeta, str, Enum)\n', (31268, 31305), False, 'from six import with_metaclass\n'), ((31966, 32017), 'six.with_metaclass', 'with_metaclass', (['_CaseInsensitiveEnumMeta', 'str', 'Enum'], {}), '(_CaseInsensitiveEnumMeta, str, Enum)\n', (31980, 32017), False, 'from six import with_metaclass\n'), ((32276, 32327), 'six.with_metaclass', 'with_metaclass', (['_CaseInsensitiveEnumMeta', 'str', 'Enum'], {}), '(_CaseInsensitiveEnumMeta, str, Enum)\n', (32290, 32327), False, 'from six import with_metaclass\n'), ((33705, 33756), 'six.with_metaclass', 'with_metaclass', (['_CaseInsensitiveEnumMeta', 'str', 'Enum'], {}), '(_CaseInsensitiveEnumMeta, str, Enum)\n', (33719, 33756), False, 'from six import with_metaclass\n'), ((34349, 34400), 'six.with_metaclass', 'with_metaclass', (['_CaseInsensitiveEnumMeta', 'str', 'Enum'], {}), '(_CaseInsensitiveEnumMeta, str, Enum)\n', (34363, 34400), False, 'from six import with_metaclass\n'), ((34993, 35044), 'six.with_metaclass', 'with_metaclass', (['_CaseInsensitiveEnumMeta', 'str', 'Enum'], {}), '(_CaseInsensitiveEnumMeta, str, Enum)\n', (35007, 35044), False, 'from six import with_metaclass\n'), ((35640, 35691), 'six.with_metaclass', 'with_metaclass', (['_CaseInsensitiveEnumMeta', 'str', 'Enum'], {}), '(_CaseInsensitiveEnumMeta, str, Enum)\n', (35654, 35691), False, 'from six import with_metaclass\n'), ((35981, 36032), 'six.with_metaclass', 'with_metaclass', (['_CaseInsensitiveEnumMeta', 'str', 'Enum'], {}), '(_CaseInsensitiveEnumMeta, str, Enum)\n', (35995, 36032), False, 'from six import with_metaclass\n'), ((36109, 36160), 'six.with_metaclass', 'with_metaclass', (['_CaseInsensitiveEnumMeta', 'str', 'Enum'], {}), '(_CaseInsensitiveEnumMeta, str, Enum)\n', (36123, 36160), False, 'from six import with_metaclass\n'), ((36943, 36994), 'six.with_metaclass', 'with_metaclass', (['_CaseInsensitiveEnumMeta', 'str', 'Enum'], {}), '(_CaseInsensitiveEnumMeta, str, Enum)\n', (36957, 36994), False, 'from six import with_metaclass\n'), ((37777, 37828), 'six.with_metaclass', 'with_metaclass', (['_CaseInsensitiveEnumMeta', 'str', 'Enum'], {}), '(_CaseInsensitiveEnumMeta, str, Enum)\n', (37791, 37828), False, 'from six import with_metaclass\n'), ((38172, 38223), 'six.with_metaclass', 'with_metaclass', (['_CaseInsensitiveEnumMeta', 'str', 'Enum'], {}), '(_CaseInsensitiveEnumMeta, str, Enum)\n', (38186, 38223), False, 'from six import with_metaclass\n'), ((39314, 39365), 'six.with_metaclass', 'with_metaclass', (['_CaseInsensitiveEnumMeta', 'str', 'Enum'], {}), '(_CaseInsensitiveEnumMeta, str, Enum)\n', (39328, 39365), False, 'from six import with_metaclass\n'), ((40456, 40507), 'six.with_metaclass', 'with_metaclass', (['_CaseInsensitiveEnumMeta', 'str', 'Enum'], {}), '(_CaseInsensitiveEnumMeta, str, Enum)\n', (40470, 40507), False, 'from six import with_metaclass\n'), ((41290, 41341), 'six.with_metaclass', 'with_metaclass', (['_CaseInsensitiveEnumMeta', 'str', 'Enum'], {}), '(_CaseInsensitiveEnumMeta, str, Enum)\n', (41304, 41341), False, 'from six import with_metaclass\n'), ((41697, 41748), 'six.with_metaclass', 'with_metaclass', (['_CaseInsensitiveEnumMeta', 'str', 'Enum'], {}), '(_CaseInsensitiveEnumMeta, str, Enum)\n', (41711, 41748), False, 'from six import with_metaclass\n'), ((41951, 42002), 'six.with_metaclass', 'with_metaclass', (['_CaseInsensitiveEnumMeta', 'str', 'Enum'], {}), '(_CaseInsensitiveEnumMeta, str, Enum)\n', (41965, 42002), False, 'from six import with_metaclass\n'), ((42128, 42179), 'six.with_metaclass', 'with_metaclass', (['_CaseInsensitiveEnumMeta', 'str', 'Enum'], {}), '(_CaseInsensitiveEnumMeta, str, Enum)\n', (42142, 42179), False, 'from six import with_metaclass\n'), ((42304, 42355), 'six.with_metaclass', 'with_metaclass', (['_CaseInsensitiveEnumMeta', 'str', 'Enum'], {}), '(_CaseInsensitiveEnumMeta, str, Enum)\n', (42318, 42355), False, 'from six import with_metaclass\n'), ((42430, 42481), 'six.with_metaclass', 'with_metaclass', (['_CaseInsensitiveEnumMeta', 'str', 'Enum'], {}), '(_CaseInsensitiveEnumMeta, str, Enum)\n', (42444, 42481), False, 'from six import with_metaclass\n'), ((42839, 42890), 'six.with_metaclass', 'with_metaclass', (['_CaseInsensitiveEnumMeta', 'str', 'Enum'], {}), '(_CaseInsensitiveEnumMeta, str, Enum)\n', (42853, 42890), False, 'from six import with_metaclass\n'), ((43230, 43281), 'six.with_metaclass', 'with_metaclass', (['_CaseInsensitiveEnumMeta', 'str', 'Enum'], {}), '(_CaseInsensitiveEnumMeta, str, Enum)\n', (43244, 43281), False, 'from six import with_metaclass\n'), ((43976, 44027), 'six.with_metaclass', 'with_metaclass', (['_CaseInsensitiveEnumMeta', 'str', 'Enum'], {}), '(_CaseInsensitiveEnumMeta, str, Enum)\n', (43990, 44027), False, 'from six import with_metaclass\n'), ((44172, 44223), 'six.with_metaclass', 'with_metaclass', (['_CaseInsensitiveEnumMeta', 'str', 'Enum'], {}), '(_CaseInsensitiveEnumMeta, str, Enum)\n', (44186, 44223), False, 'from six import with_metaclass\n'), ((44399, 44450), 'six.with_metaclass', 'with_metaclass', (['_CaseInsensitiveEnumMeta', 'str', 'Enum'], {}), '(_CaseInsensitiveEnumMeta, str, Enum)\n', (44413, 44450), False, 'from six import with_metaclass\n'), ((44653, 44704), 'six.with_metaclass', 'with_metaclass', (['_CaseInsensitiveEnumMeta', 'str', 'Enum'], {}), '(_CaseInsensitiveEnumMeta, str, Enum)\n', (44667, 44704), False, 'from six import with_metaclass\n'), ((44869, 44920), 'six.with_metaclass', 'with_metaclass', (['_CaseInsensitiveEnumMeta', 'str', 'Enum'], {}), '(_CaseInsensitiveEnumMeta, str, Enum)\n', (44883, 44920), False, 'from six import with_metaclass\n'), ((45196, 45247), 'six.with_metaclass', 'with_metaclass', (['_CaseInsensitiveEnumMeta', 'str', 'Enum'], {}), '(_CaseInsensitiveEnumMeta, str, Enum)\n', (45210, 45247), False, 'from six import with_metaclass\n'), ((45411, 45462), 'six.with_metaclass', 'with_metaclass', (['_CaseInsensitiveEnumMeta', 'str', 'Enum'], {}), '(_CaseInsensitiveEnumMeta, str, Enum)\n', (45425, 45462), False, 'from six import with_metaclass\n'), ((45711, 45762), 'six.with_metaclass', 'with_metaclass', (['_CaseInsensitiveEnumMeta', 'str', 'Enum'], {}), '(_CaseInsensitiveEnumMeta, str, Enum)\n', (45725, 45762), False, 'from six import with_metaclass\n'), ((46030, 46081), 'six.with_metaclass', 'with_metaclass', (['_CaseInsensitiveEnumMeta', 'str', 'Enum'], {}), '(_CaseInsensitiveEnumMeta, str, Enum)\n', (46044, 46081), False, 'from six import with_metaclass\n'), ((46314, 46365), 'six.with_metaclass', 'with_metaclass', (['_CaseInsensitiveEnumMeta', 'str', 'Enum'], {}), '(_CaseInsensitiveEnumMeta, str, Enum)\n', (46328, 46365), False, 'from six import with_metaclass\n'), ((46620, 46671), 'six.with_metaclass', 'with_metaclass', (['_CaseInsensitiveEnumMeta', 'str', 'Enum'], {}), '(_CaseInsensitiveEnumMeta, str, Enum)\n', (46634, 46671), False, 'from six import with_metaclass\n'), ((47222, 47273), 'six.with_metaclass', 'with_metaclass', (['_CaseInsensitiveEnumMeta', 'str', 'Enum'], {}), '(_CaseInsensitiveEnumMeta, str, Enum)\n', (47236, 47273), False, 'from six import with_metaclass\n'), ((47462, 47513), 'six.with_metaclass', 'with_metaclass', (['_CaseInsensitiveEnumMeta', 'str', 'Enum'], {}), '(_CaseInsensitiveEnumMeta, str, Enum)\n', (47476, 47513), False, 'from six import with_metaclass\n'), ((48235, 48286), 'six.with_metaclass', 'with_metaclass', (['_CaseInsensitiveEnumMeta', 'str', 'Enum'], {}), '(_CaseInsensitiveEnumMeta, str, Enum)\n', (48249, 48286), False, 'from six import with_metaclass\n'), ((48626, 48677), 'six.with_metaclass', 'with_metaclass', (['_CaseInsensitiveEnumMeta', 'str', 'Enum'], {}), '(_CaseInsensitiveEnumMeta, str, Enum)\n', (48640, 48677), False, 'from six import with_metaclass\n'), ((48856, 48907), 'six.with_metaclass', 'with_metaclass', (['_CaseInsensitiveEnumMeta', 'str', 'Enum'], {}), '(_CaseInsensitiveEnumMeta, str, Enum)\n', (48870, 48907), False, 'from six import with_metaclass\n'), ((49020, 49071), 'six.with_metaclass', 'with_metaclass', (['_CaseInsensitiveEnumMeta', 'str', 'Enum'], {}), '(_CaseInsensitiveEnumMeta, str, Enum)\n', (49034, 49071), False, 'from six import with_metaclass\n'), ((49192, 49243), 'six.with_metaclass', 'with_metaclass', (['_CaseInsensitiveEnumMeta', 'str', 'Enum'], {}), '(_CaseInsensitiveEnumMeta, str, Enum)\n', (49206, 49243), False, 'from six import with_metaclass\n'), ((49431, 49482), 'six.with_metaclass', 'with_metaclass', (['_CaseInsensitiveEnumMeta', 'str', 'Enum'], {}), '(_CaseInsensitiveEnumMeta, str, Enum)\n', (49445, 49482), False, 'from six import with_metaclass\n'), ((49614, 49665), 'six.with_metaclass', 'with_metaclass', (['_CaseInsensitiveEnumMeta', 'str', 'Enum'], {}), '(_CaseInsensitiveEnumMeta, str, Enum)\n', (49628, 49665), False, 'from six import with_metaclass\n'), ((49774, 49825), 'six.with_metaclass', 'with_metaclass', (['_CaseInsensitiveEnumMeta', 'str', 'Enum'], {}), '(_CaseInsensitiveEnumMeta, str, Enum)\n', (49788, 49825), False, 'from six import with_metaclass\n'), ((49907, 49958), 'six.with_metaclass', 'with_metaclass', (['_CaseInsensitiveEnumMeta', 'str', 'Enum'], {}), '(_CaseInsensitiveEnumMeta, str, Enum)\n', (49921, 49958), False, 'from six import with_metaclass\n'), ((50341, 50392), 'six.with_metaclass', 'with_metaclass', (['_CaseInsensitiveEnumMeta', 'str', 'Enum'], {}), '(_CaseInsensitiveEnumMeta, str, Enum)\n', (50355, 50392), False, 'from six import with_metaclass\n'), ((50572, 50623), 'six.with_metaclass', 'with_metaclass', (['_CaseInsensitiveEnumMeta', 'str', 'Enum'], {}), '(_CaseInsensitiveEnumMeta, str, Enum)\n', (50586, 50623), False, 'from six import with_metaclass\n'), ((50724, 50775), 'six.with_metaclass', 'with_metaclass', (['_CaseInsensitiveEnumMeta', 'str', 'Enum'], {}), '(_CaseInsensitiveEnumMeta, str, Enum)\n', (50738, 50775), False, 'from six import with_metaclass\n'), ((50914, 50965), 'six.with_metaclass', 'with_metaclass', (['_CaseInsensitiveEnumMeta', 'str', 'Enum'], {}), '(_CaseInsensitiveEnumMeta, str, Enum)\n', (50928, 50965), False, 'from six import with_metaclass\n'), ((51327, 51378), 'six.with_metaclass', 'with_metaclass', (['_CaseInsensitiveEnumMeta', 'str', 'Enum'], {}), '(_CaseInsensitiveEnumMeta, str, Enum)\n', (51341, 51378), False, 'from six import with_metaclass\n'), ((51888, 51939), 'six.with_metaclass', 'with_metaclass', (['_CaseInsensitiveEnumMeta', 'str', 'Enum'], {}), '(_CaseInsensitiveEnumMeta, str, Enum)\n', (51902, 51939), False, 'from six import with_metaclass\n'), ((52266, 52317), 'six.with_metaclass', 'with_metaclass', (['_CaseInsensitiveEnumMeta', 'str', 'Enum'], {}), '(_CaseInsensitiveEnumMeta, str, Enum)\n', (52280, 52317), False, 'from six import with_metaclass\n'), ((52483, 52534), 'six.with_metaclass', 'with_metaclass', (['_CaseInsensitiveEnumMeta', 'str', 'Enum'], {}), '(_CaseInsensitiveEnumMeta, str, Enum)\n', (52497, 52534), False, 'from six import with_metaclass\n'), ((52622, 52673), 'six.with_metaclass', 'with_metaclass', (['_CaseInsensitiveEnumMeta', 'str', 'Enum'], {}), '(_CaseInsensitiveEnumMeta, str, Enum)\n', (52636, 52673), False, 'from six import with_metaclass\n'), ((52881, 52932), 'six.with_metaclass', 'with_metaclass', (['_CaseInsensitiveEnumMeta', 'str', 'Enum'], {}), '(_CaseInsensitiveEnumMeta, str, Enum)\n', (52895, 52932), False, 'from six import with_metaclass\n'), ((53075, 53126), 'six.with_metaclass', 'with_metaclass', (['_CaseInsensitiveEnumMeta', 'str', 'Enum'], {}), '(_CaseInsensitiveEnumMeta, str, Enum)\n', (53089, 53126), False, 'from six import with_metaclass\n'), ((53323, 53374), 'six.with_metaclass', 'with_metaclass', (['_CaseInsensitiveEnumMeta', 'str', 'Enum'], {}), '(_CaseInsensitiveEnumMeta, str, Enum)\n', (53337, 53374), False, 'from six import with_metaclass\n'), ((53514, 53565), 'six.with_metaclass', 'with_metaclass', (['_CaseInsensitiveEnumMeta', 'str', 'Enum'], {}), '(_CaseInsensitiveEnumMeta, str, Enum)\n', (53528, 53565), False, 'from six import with_metaclass\n'), ((53848, 53899), 'six.with_metaclass', 'with_metaclass', (['_CaseInsensitiveEnumMeta', 'str', 'Enum'], {}), '(_CaseInsensitiveEnumMeta, str, Enum)\n', (53862, 53899), False, 'from six import with_metaclass\n'), ((54088, 54139), 'six.with_metaclass', 'with_metaclass', (['_CaseInsensitiveEnumMeta', 'str', 'Enum'], {}), '(_CaseInsensitiveEnumMeta, str, Enum)\n', (54102, 54139), False, 'from six import with_metaclass\n'), ((54443, 54494), 'six.with_metaclass', 'with_metaclass', (['_CaseInsensitiveEnumMeta', 'str', 'Enum'], {}), '(_CaseInsensitiveEnumMeta, str, Enum)\n', (54457, 54494), False, 'from six import with_metaclass\n'), ((54741, 54792), 'six.with_metaclass', 'with_metaclass', (['_CaseInsensitiveEnumMeta', 'str', 'Enum'], {}), '(_CaseInsensitiveEnumMeta, str, Enum)\n', (54755, 54792), False, 'from six import with_metaclass\n'), ((54902, 54953), 'six.with_metaclass', 'with_metaclass', (['_CaseInsensitiveEnumMeta', 'str', 'Enum'], {}), '(_CaseInsensitiveEnumMeta, str, Enum)\n', (54916, 54953), False, 'from six import with_metaclass\n'), ((55187, 55238), 'six.with_metaclass', 'with_metaclass', (['_CaseInsensitiveEnumMeta', 'str', 'Enum'], {}), '(_CaseInsensitiveEnumMeta, str, Enum)\n', (55201, 55238), False, 'from six import with_metaclass\n'), ((55427, 55478), 'six.with_metaclass', 'with_metaclass', (['_CaseInsensitiveEnumMeta', 'str', 'Enum'], {}), '(_CaseInsensitiveEnumMeta, str, Enum)\n', (55441, 55478), False, 'from six import with_metaclass\n'), ((55647, 55698), 'six.with_metaclass', 'with_metaclass', (['_CaseInsensitiveEnumMeta', 'str', 'Enum'], {}), '(_CaseInsensitiveEnumMeta, str, Enum)\n', (55661, 55698), False, 'from six import with_metaclass\n'), ((56078, 56129), 'six.with_metaclass', 'with_metaclass', (['_CaseInsensitiveEnumMeta', 'str', 'Enum'], {}), '(_CaseInsensitiveEnumMeta, str, Enum)\n', (56092, 56129), False, 'from six import with_metaclass\n'), ((56219, 56270), 'six.with_metaclass', 'with_metaclass', (['_CaseInsensitiveEnumMeta', 'str', 'Enum'], {}), '(_CaseInsensitiveEnumMeta, str, Enum)\n', (56233, 56270), False, 'from six import with_metaclass\n'), ((56408, 56459), 'six.with_metaclass', 'with_metaclass', (['_CaseInsensitiveEnumMeta', 'str', 'Enum'], {}), '(_CaseInsensitiveEnumMeta, str, Enum)\n', (56422, 56459), False, 'from six import with_metaclass\n'), ((56653, 56704), 'six.with_metaclass', 'with_metaclass', (['_CaseInsensitiveEnumMeta', 'str', 'Enum'], {}), '(_CaseInsensitiveEnumMeta, str, Enum)\n', (56667, 56704), False, 'from six import with_metaclass\n'), ((56887, 56938), 'six.with_metaclass', 'with_metaclass', (['_CaseInsensitiveEnumMeta', 'str', 'Enum'], {}), '(_CaseInsensitiveEnumMeta, str, Enum)\n', (56901, 56938), False, 'from six import with_metaclass\n'), ((57171, 57222), 'six.with_metaclass', 'with_metaclass', (['_CaseInsensitiveEnumMeta', 'str', 'Enum'], {}), '(_CaseInsensitiveEnumMeta, str, Enum)\n', (57185, 57222), False, 'from six import with_metaclass\n'), ((57468, 57519), 'six.with_metaclass', 'with_metaclass', (['_CaseInsensitiveEnumMeta', 'str', 'Enum'], {}), '(_CaseInsensitiveEnumMeta, str, Enum)\n', (57482, 57519), False, 'from six import with_metaclass\n'), ((57952, 58003), 'six.with_metaclass', 'with_metaclass', (['_CaseInsensitiveEnumMeta', 'str', 'Enum'], {}), '(_CaseInsensitiveEnumMeta, str, Enum)\n', (57966, 58003), False, 'from six import with_metaclass\n'), ((58187, 58238), 'six.with_metaclass', 'with_metaclass', (['_CaseInsensitiveEnumMeta', 'str', 'Enum'], {}), '(_CaseInsensitiveEnumMeta, str, Enum)\n', (58201, 58238), False, 'from six import with_metaclass\n'), ((58792, 58843), 'six.with_metaclass', 'with_metaclass', (['_CaseInsensitiveEnumMeta', 'str', 'Enum'], {}), '(_CaseInsensitiveEnumMeta, str, Enum)\n', (58806, 58843), False, 'from six import with_metaclass\n'), ((58976, 59027), 'six.with_metaclass', 'with_metaclass', (['_CaseInsensitiveEnumMeta', 'str', 'Enum'], {}), '(_CaseInsensitiveEnumMeta, str, Enum)\n', (58990, 59027), False, 'from six import with_metaclass\n'), ((59178, 59229), 'six.with_metaclass', 'with_metaclass', (['_CaseInsensitiveEnumMeta', 'str', 'Enum'], {}), '(_CaseInsensitiveEnumMeta, str, Enum)\n', (59192, 59229), False, 'from six import with_metaclass\n')] |
#!/usr/bin/env python2
# -*- mode: python -*-
#
# Electrum - lightweight Bitcoin client
# Copyright (C) 2016 The Electrum developers
#
# Permission is hereby granted, free of charge, to any person
# obtaining a copy of this software and associated documentation files
# (the "Software"), to deal in the Software without restriction,
# including without limitation the rights to use, copy, modify, merge,
# publish, distribute, sublicense, and/or sell copies of the Software,
# and to permit persons to whom the Software is furnished to do so,
# subject to the following conditions:
#
# The above copyright notice and this permission notice shall be
# included in all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
# EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
# MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS
# BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN
# ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN
# CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
# SOFTWARE.
import threading
from electrumsv.i18n import _
from electrumsv.logs import logs
from electrumsv.util import versiontuple
from .cmdline import CmdLineHandler
class HW_PluginBase(object):
hid_lock = threading.Lock()
def __init__(self, device_kind):
self.device = self.keystore_class.device
self.name = device_kind
self.logger = logs.get_logger(device_kind)
def create_keystore(self, d):
keystore = self.keystore_class(d)
keystore.plugin = self
# This should be replaced when a window is opened in the gui
keystore.gui_handler = CmdLineHandler()
return keystore
def is_enabled(self):
return True
def get_library_version(self) -> str:
"""Returns the version of the 3rd party python library
for the hw wallet. For example '0.9.0'
Returns 'unknown' if library is found but cannot determine version.
Raises 'ImportError' if library is not found.
Raises 'LibraryFoundButUnusable' if found but there was a problem (includes version num).
"""
raise NotImplementedError()
def check_libraries_available(self) -> bool:
def version_str(t):
return ".".join(str(i) for i in t)
try:
# this might raise ImportError or LibraryFoundButUnusable
library_version = self.get_library_version()
# if no exception so far, we might still raise LibraryFoundButUnusable
if (library_version == 'unknown' or
versiontuple(library_version) < self.minimum_library or
hasattr(self, "maximum_library") and
versiontuple(library_version) >= self.maximum_library):
raise LibraryFoundButUnusable(library_version=library_version)
except ImportError:
return False
except LibraryFoundButUnusable as e:
library_version = e.library_version
max_version_str = (version_str(self.maximum_library)
if hasattr(self, "maximum_library") else "inf")
self.libraries_available_message = (
_("Library version for '{}' is incompatible.").format(self.name)
+ '\nInstalled: {}, Needed: {} <= x < {}'
.format(library_version, version_str(self.minimum_library), max_version_str))
self.logger.warning(self.libraries_available_message)
return False
return True
def get_library_not_available_message(self) -> str:
if hasattr(self, 'libraries_available_message'):
message = self.libraries_available_message
else:
message = _("Missing libraries for {}.").format(self.name)
message += '\n' + _("Make sure you install it with python3")
return message
def enumerate_devices(self):
raise NotImplementedError
class LibraryFoundButUnusable(Exception):
def __init__(self, library_version='unknown'):
super().__init__()
self.library_version = library_version
| [
"threading.Lock",
"electrumsv.i18n._",
"electrumsv.logs.logs.get_logger",
"electrumsv.util.versiontuple"
] | [((1401, 1417), 'threading.Lock', 'threading.Lock', ([], {}), '()\n', (1415, 1417), False, 'import threading\n'), ((1559, 1587), 'electrumsv.logs.logs.get_logger', 'logs.get_logger', (['device_kind'], {}), '(device_kind)\n', (1574, 1587), False, 'from electrumsv.logs import logs\n'), ((3975, 4017), 'electrumsv.i18n._', '_', (['"""Make sure you install it with python3"""'], {}), "('Make sure you install it with python3')\n", (3976, 4017), False, 'from electrumsv.i18n import _\n'), ((2731, 2760), 'electrumsv.util.versiontuple', 'versiontuple', (['library_version'], {}), '(library_version)\n', (2743, 2760), False, 'from electrumsv.util import versiontuple\n'), ((3900, 3930), 'electrumsv.i18n._', '_', (['"""Missing libraries for {}."""'], {}), "('Missing libraries for {}.')\n", (3901, 3930), False, 'from electrumsv.i18n import _\n'), ((2864, 2893), 'electrumsv.util.versiontuple', 'versiontuple', (['library_version'], {}), '(library_version)\n', (2876, 2893), False, 'from electrumsv.util import versiontuple\n'), ((3358, 3404), 'electrumsv.i18n._', '_', (['"""Library version for \'{}\' is incompatible."""'], {}), '("Library version for \'{}\' is incompatible.")\n', (3359, 3404), False, 'from electrumsv.i18n import _\n')] |
import csv
input = open('MovieI.csv', 'rb')
output = open('MovieO.csv', 'wb')
writer = csv.writer(output)
for row in csv.reader(input):
for i in range(len(row)):
if(row[0]==''):
break
elif(row[1]==''):
break
elif(row[2]==''):
break
elif(row[3]==''):
break
elif(row[4]==''):
break
else :writer.writerow(row)
input.close()
output.close() | [
"csv.writer",
"csv.reader"
] | [((88, 106), 'csv.writer', 'csv.writer', (['output'], {}), '(output)\n', (98, 106), False, 'import csv\n'), ((118, 135), 'csv.reader', 'csv.reader', (['input'], {}), '(input)\n', (128, 135), False, 'import csv\n')] |
### ----------- Python Code ------------###
import csv
from flask import Flask, render_template
from flask_ask import Ask, statement, question, session
import pandas as pd
### ------------- Start Alexa Stuff ---------###
app = Flask(__name__)
ask = Ask(app, "/")
#logging.getLogger("flask_ask").setLevel(logging.DEBUG)
### ----------- Switch Function --------------###
def setSwitch(Switchboard, switch, new_state):
switch_df = pd.read_csv(Switchboard + ".csv")
switch_df = switch_df.set_index('switch')
switch_df.set_value(switch,'state',new_state)
switch_df.to_csv(Switchboard + ".csv")
### ----------- Switch Function --------------###
def ReadInfo(Switchboard):
info_df = pd.read_csv(Switchboard + ".csv")
count = info_df.loc[0, 'Count']
return count
### ----------- Launch Skill --------------###
@ask.launch
def start_skill():
welcome_message = 'Hello, what would you like to ask the architect'
return question(welcome_message)
### -------------- Say Hello --------------- ####
@ask.intent("hello")
def hello():
setSwitch('C:\\sfdug\\Alexa','switch00', '1')
msg = "Hello San Francisco Dynamo user group"
return statement(msg)
### -------------- Create Points --------------- ####
@ask.intent("CreatePoints")
def CreatePoints():
setSwitch('C:\\sfdug\\Alexa','switch01', '1')
msg = "I am creating the points for the Janet Echelman sculptor"
return statement(msg)
### -------------- Create Connection --------------- ####
@ask.intent("CreateConnection")
def CreateConnection():
setSwitch('C:\\sfdug\\Alexa','switch02', '1')
msg = "I am creating a connection between the points"
return statement(msg)
### -------------- Create Framing --------------- ####
@ask.intent("CreateFraming")
def CreateFraming():
setSwitch('C:\\sfdug\\Alexa','switch03', '1')
msg = "I am creating the framing for the Janet Echelman sculptor"
return statement(msg)
### -------------- Reset --------------- ####
@ask.intent("Reset")
def Reset():
setSwitch('C:\\sfdug\\Alexa','switch01', '0')
setSwitch('C:\\sfdug\\Alexa','switch02', '0')
setSwitch('C:\\sfdug\\Alexa','switch03', '0')
msg = "I have reset Revvit"
return statement(msg)
### -------------- Count Framing --------------- ####
@ask.intent("CountFraming")
def CountFraming():
info = ReadInfo('C:\\sfdug\\AlexaRead')
msg = "I have counted: {}".format(info)
return statement(msg)
### --------------- Port for Ngrok -------------##
if __name__ == '__main__':
port = 9000 #the custom port you want
app.run(host='0.0.0.0', port=port)
app.run(debug=True) | [
"flask_ask.Ask",
"pandas.read_csv",
"flask.Flask",
"flask_ask.statement",
"flask_ask.question"
] | [((233, 248), 'flask.Flask', 'Flask', (['__name__'], {}), '(__name__)\n', (238, 248), False, 'from flask import Flask, render_template\n'), ((255, 268), 'flask_ask.Ask', 'Ask', (['app', '"""/"""'], {}), "(app, '/')\n", (258, 268), False, 'from flask_ask import Ask, statement, question, session\n'), ((443, 476), 'pandas.read_csv', 'pd.read_csv', (["(Switchboard + '.csv')"], {}), "(Switchboard + '.csv')\n", (454, 476), True, 'import pandas as pd\n'), ((712, 745), 'pandas.read_csv', 'pd.read_csv', (["(Switchboard + '.csv')"], {}), "(Switchboard + '.csv')\n", (723, 745), True, 'import pandas as pd\n'), ((962, 987), 'flask_ask.question', 'question', (['welcome_message'], {}), '(welcome_message)\n', (970, 987), False, 'from flask_ask import Ask, statement, question, session\n'), ((1186, 1200), 'flask_ask.statement', 'statement', (['msg'], {}), '(msg)\n', (1195, 1200), False, 'from flask_ask import Ask, statement, question, session\n'), ((1435, 1449), 'flask_ask.statement', 'statement', (['msg'], {}), '(msg)\n', (1444, 1449), False, 'from flask_ask import Ask, statement, question, session\n'), ((1686, 1700), 'flask_ask.statement', 'statement', (['msg'], {}), '(msg)\n', (1695, 1700), False, 'from flask_ask import Ask, statement, question, session\n'), ((1939, 1953), 'flask_ask.statement', 'statement', (['msg'], {}), '(msg)\n', (1948, 1953), False, 'from flask_ask import Ask, statement, question, session\n'), ((2216, 2230), 'flask_ask.statement', 'statement', (['msg'], {}), '(msg)\n', (2225, 2230), False, 'from flask_ask import Ask, statement, question, session\n'), ((2436, 2450), 'flask_ask.statement', 'statement', (['msg'], {}), '(msg)\n', (2445, 2450), False, 'from flask_ask import Ask, statement, question, session\n')] |
# Copyright 2021 Amazon.com, Inc. or its affiliates. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance
# with the License. A copy of the License is located at
#
# http://aws.amazon.com/apache2.0/
#
# or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES
# OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions
# and limitations under the License.
import collections
import numpy as np
import scipy.sparse as smat
def cs_matrix(arg1, mat_type, shape=None, dtype=None, copy=False, check_contents=False):
"""Custom compressed sparse matrix constructor that allows indices and indptr to be stored in different types.
Args:
arg1 (tuple): (data, indices, indptr) to construct compressed sparse matrix
mat_type (type): the matrix type to construct, one of [scipy.sparse.csr_matrix | scipy.sparse.csc_matrix]
shape (tuple, optional): shape of the matrix, default None to infer from arg1
dtype (type, optional): type of values in the matrix, default None to infer from data
copy (bool, optional): whether to copy the input arrays, defaults to False
check_contents (bool, optional): whether to check array contents to determine dtype, defaults to False
Returns:
compressed sparse matrix in mat_type
"""
(data, indices, indptr) = arg1
indices_dtype = smat.sputils.get_index_dtype(indices, check_contents=check_contents)
indptr_dtype = smat.sputils.get_index_dtype(indptr, check_contents=check_contents)
ret = mat_type(shape, dtype=dtype)
# Read matrix dimensions given, if any
if shape is None:
# shape not already set, try to infer dimensions
try:
major_dim = len(ret.indptr) - 1
minor_dim = ret.indices.max() + 1
except Exception:
raise ValueError("unable to infer matrix dimensions")
else:
shape = ret._swap((major_dim, minor_dim))
ret.indices = np.array(indices, copy=copy, dtype=indices_dtype)
ret.indptr = np.array(indptr, copy=copy, dtype=indptr_dtype)
ret.data = np.array(data, copy=copy, dtype=dtype)
return ret
def csr_matrix(arg1, shape=None, dtype=None, copy=False):
"""Custom csr_matrix constructor that allows indices and indptr to be stored in different types.
Args:
arg1 (tuple): (data, indices, indptr) to construct csr_matrix
shape (tuple, optional): shape of the matrix, default None to infer from arg1
dtype (type, optional): type of values in the matrix, default None to infer from data
copy (bool, optional): whether to copy the input arrays, defaults to False
Returns:
csr_matrix
"""
return cs_matrix(arg1, smat.csr_matrix, shape=shape, dtype=dtype, copy=copy)
def csc_matrix(arg1, shape=None, dtype=None, copy=False):
"""Custom csc_matrix constructor that allows indices and indptr to be stored in different types.
Args:
arg1 (tuple): (data, indices, indptr) to construct csc_matrix
shape (tuple, optional): shape of the matrix, default None to infer from arg1
dtype (type, optional): type of values in the matrix, default None to infer from data
copy (bool, optional): whether to copy the input arrays, defaults to False
Returns:
csc_matrix
"""
return cs_matrix(arg1, smat.csc_matrix, shape=shape, dtype=dtype, copy=copy)
def save_matrix(tgt, mat):
"""Save dense or sparse matrix to file.
Args:
tgt (str): path to save the matrix
mat (numpy.ndarray or scipy.sparse.spmatrix): target matrix to save
"""
assert isinstance(tgt, str), "tgt for save_matrix must be a str, but got {}".format(type(tgt))
with open(tgt, "wb") as tgt_file:
if isinstance(mat, np.ndarray):
np.save(tgt_file, mat, allow_pickle=False)
elif isinstance(mat, smat.spmatrix):
smat.save_npz(tgt_file, mat, compressed=False)
else:
raise NotImplementedError("Save not implemented for matrix type {}".format(type(mat)))
def load_matrix(src, dtype=None):
"""Load dense or sparse matrix from file.
Args:
src (str): path to load the matrix.
dtype (numpy.dtype, optional): if given, convert matrix dtype. otherwise use default type.
Returns:
mat (numpy.ndarray or scipy.sparse.spmatrix): loaded matrix
Notes:
If underlying matrix is {"csc", "csr", "bsr"}, indices will be sorted.
"""
if not isinstance(src, str):
raise ValueError("src for load_matrix must be a str")
mat = np.load(src)
# decide whether it's dense or sparse
if isinstance(mat, np.ndarray):
pass
elif isinstance(mat, np.lib.npyio.NpzFile):
# Ref code: https://github.com/scipy/scipy/blob/v1.4.1/scipy/sparse/_matrix_io.py#L19-L80
matrix_format = mat["format"].item()
if not isinstance(matrix_format, str):
# files saved with SciPy < 1.0.0 may contain unicode or bytes.
matrix_format = matrix_format.decode("ascii")
try:
cls = getattr(smat, "{}_matrix".format(matrix_format))
except AttributeError:
raise ValueError("Unknown matrix format {}".format(matrix_format))
if matrix_format in ("csc", "csr", "bsr"):
mat = cls((mat["data"], mat["indices"], mat["indptr"]), shape=mat["shape"])
# This is in-place operation
mat.sort_indices()
elif matrix_format == "dia":
mat = cls((mat["data"], mat["offsets"]), shape=mat["shape"])
elif matrix_format == "coo":
mat = cls((mat["data"], (mat["row"], mat["col"])), shape=mat["shape"])
else:
raise NotImplementedError(
"Load is not implemented for sparse matrix of format {}.".format(matrix_format)
)
else:
raise TypeError("load_feature_matrix encountered unknown input format {}".format(type(mat)))
if dtype is None:
return mat
else:
return mat.astype(dtype)
def transpose(mat):
"""Transpose a dense/sparse matrix.
Args:
X (np.ndarray, spmatrix): input matrix to be transposed.
Returns:
transposed X
"""
if not isinstance(mat, smat.spmatrix):
raise ValueError("mat must be a smat.spmatrix type")
if isinstance(mat, smat.csr_matrix):
return csc_matrix((mat.data, mat.indices, mat.indptr), shape=(mat.shape[1], mat.shape[0]))
elif isinstance(mat, smat.csc_matrix):
return csr_matrix((mat.data, mat.indices, mat.indptr), shape=(mat.shape[1], mat.shape[0]))
else:
return mat.T
def sorted_csr_from_coo(shape, row_idx, col_idx, val, only_topk=None):
"""Return a row-sorted CSR matrix from a COO sparse matrix.
Nonzero elements in each row of the returned CSR matrix is sorted in an descending order based on the value. If only_topk is given, only topk largest elements will be kept.
Args:
shape (tuple): the shape of the input COO matrix
row_idx (ndarray): row indices of the input COO matrix
col_idx (ndarray): col indices of the input COO matrix
val (ndarray): values of the input COO matrix
only_topk (int, optional): keep only topk elements per row. Default None to ignore
Returns:
csr_matrix
"""
csr = smat.csr_matrix((val, (row_idx, col_idx)), shape=shape)
csr.sort_indices()
for i in range(shape[0]):
rng = slice(csr.indptr[i], csr.indptr[i + 1])
sorted_idx = np.argsort(-csr.data[rng], kind="mergesort")
csr.indices[rng] = csr.indices[rng][sorted_idx]
csr.data[rng] = csr.data[rng][sorted_idx]
if only_topk is not None:
assert isinstance(only_topk, int), f"Wrong type: type(only_topk) = {type(only_topk)}"
only_topk = max(min(1, only_topk), only_topk)
nnz_of_insts = csr.indptr[1:] - csr.indptr[:-1]
row_idx = np.repeat(np.arange(shape[0], dtype=csr.indices.dtype), nnz_of_insts)
selected_idx = (np.arange(len(csr.data)) - csr.indptr[row_idx]) < only_topk
row_idx = row_idx[selected_idx]
col_idx = csr.indices[selected_idx]
val = csr.data[selected_idx]
indptr = np.cumsum(np.bincount(row_idx + 1, minlength=(shape[0] + 1)))
csr = csr_matrix((val, col_idx, indptr), shape=shape, dtype=val.dtype)
return csr
def sorted_csc_from_coo(shape, row_idx, col_idx, val, only_topk=None):
"""Return a column-sorted CSC matrix from a COO sparse matrix.
Nonzero elements in each col of the returned CSC matrix is sorted in an descending order based on the value. If only_topk is given, only topk largest elements will be kept.
Args:
shape (tuple): the shape of the input COO matrix
row_idx (ndarray): row indices of the input COO matrix
col_idx (ndarray): col indices of the input COO matrix
val (ndarray): values of the input COO matrix
only_topk (int, optional): keep only topk elements per col. Default None to ignore
Returns:
csc_matrix
"""
csr = sorted_csr_from_coo(shape[::-1], col_idx, row_idx, val, only_topk=None)
return transpose(csr)
def binarized(X, inplace=False):
"""Binarize a dense/sparse matrix. All nonzero elements become 1.
Args:
X (np.ndarray, spmatrix): input matrix to binarize
inplace (bool, optional): if True do the binarization in-place, else return a copy. Default False
Returns:
binarized X
"""
if not isinstance(X, (np.ndarray, smat.spmatrix)):
raise NotImplementedError(
"this function only support X being np.ndarray or scipy.sparse.spmatrix."
)
if not inplace:
X = X.copy()
if isinstance(X, smat.spmatrix):
X.data[:] = 1
else:
X[:] = 1
return X
def sorted_csr(csr, only_topk=None):
"""Return a copy of input CSR matrix where nonzero elements in each row is sorted in an descending order based on the value.
If `only_topk` is given, only top-k largest elements will be kept.
Args:
csr (csr_matrix): input csr_matrix to sort
only_topk (int, optional): keep only topk elements per row. Default None to ignore
Returns:
csr_matrix
"""
if not isinstance(csr, smat.csr_matrix):
raise ValueError("the input matrix must be a csr_matrix.")
row_idx = np.repeat(np.arange(csr.shape[0], dtype=np.uint32), csr.indptr[1:] - csr.indptr[:-1])
return sorted_csr_from_coo(csr.shape, row_idx, csr.indices, csr.data, only_topk)
def sorted_csc(csc, only_topk=None):
"""Return a copy of input CSC matrix where nonzero elements in each column is sorted in an descending order based on the value.
If `only_topk` is given, only top-k largest elements will be kept.
Args:
csc (csc_matrix): input csc_matrix to sort
only_topk (int, optional): keep only topk elements per col. Default None to ignore
Returns:
csc_matrix
"""
if not isinstance(csc, smat.csc_matrix):
raise ValueError("the input matrix must be a csc_matrix.")
return transpose(sorted_csr(transpose(csc)))
def dense_to_csr(dense, topk=None, batch=None):
"""Memory efficient method to construct a csr_matrix from a dense matrix.
Args:
dense (ndarray): 2-D dense matrix to convert.
topk (int or None, optional): keep topk non-zeros with largest abs value for each row.
Default None to keep everything.
batch (int or None, optional): the batch size for construction.
Default None to use min(dense.shape[0], 10 ** 5).
Returns:
csr_matrix that has topk nnz each row with the same shape as dense.
"""
BATCH_LIMIT = 10 ** 5
if topk is None:
keep_topk = dense.shape[1]
else:
keep_topk = min(dense.shape[1], max(1, int(topk)))
# if batch is given, use input batch size even if input batch > BATCH_LIMIT
if batch is None:
chunk_size = min(dense.shape[0], BATCH_LIMIT)
else:
chunk_size = min(dense.shape[0], max(1, int(batch)))
max_nnz = keep_topk * dense.shape[0]
indptr_dtype = np.int32 if max_nnz < np.iinfo(np.int32).max else np.int64
indices_dtype = np.int32 if dense.shape[1] < np.iinfo(np.int32).max else np.int64
data = np.empty((keep_topk * dense.shape[0],), dtype=dense.dtype)
indices = np.empty((keep_topk * dense.shape[0],), dtype=indices_dtype)
for i in range(0, dense.shape[0], chunk_size):
cur_chunk = dense[i : i + chunk_size, :]
chunk_len = cur_chunk.shape[0]
if keep_topk < dense.shape[1]:
col_indices = np.argpartition(abs(cur_chunk), keep_topk, axis=1)[:, -keep_topk:]
else:
col_indices = np.repeat(np.arange(keep_topk)[np.newaxis, :], chunk_len, axis=0)
row_indices = np.repeat(np.arange(chunk_len)[:, np.newaxis], keep_topk, axis=1)
chunk_data = cur_chunk[row_indices, col_indices]
data[i * keep_topk : i * keep_topk + chunk_data.size] = chunk_data.flatten()
indices[i * keep_topk : i * keep_topk + col_indices.size] = col_indices.flatten()
indptr = np.arange(0, dense.shape[0] * keep_topk + 1, keep_topk, dtype=indptr_dtype)
# Bypass scipy constructor to allow different indices and indptr types
return csr_matrix((data, indices, indptr), shape=dense.shape)
def vstack_csr(matrices, dtype=None):
"""Memory efficient method to stack csr_matrices vertically.
The returned matrix will retain the indices order.
Args:
matrices (list or tuple of csr_matrix): the matrices to stack in order, with shape (M1 x N), (M2 x N), ...
dtype (dtype, optional): The data-type of the output matrix. Default None to infer from matrices
Returns:
csr_matrix with shape (M1 + M2 + ..., N)
"""
if not isinstance(matrices, (list, tuple)):
raise ValueError("matrices should be either list or tuple")
if any(not isinstance(X, smat.csr_matrix) for X in matrices):
raise ValueError("all matrix in matrices need to be csr_matrix!")
if len(matrices) <= 1:
return matrices[0] if len(matrices) == 1 else None
nr_cols = matrices[0].shape[1]
if any(mat.shape[1] != nr_cols for mat in matrices):
raise ValueError("Second dim not match")
total_nnz = sum([int(mat.nnz) for mat in matrices])
total_rows = sum([int(mat.shape[0]) for mat in matrices])
# infer result dtypes from inputs
int32max = np.iinfo(np.int32).max
if dtype is None:
dtype = smat.sputils.upcast(*[mat.dtype for mat in matrices])
indices_dtype = np.int64 if nr_cols > int32max else np.int32
indptr_dtype = np.int64 if total_nnz > int32max else np.int32
indptr = np.empty(total_rows + 1, dtype=indptr_dtype)
indices = np.empty(total_nnz, dtype=indices_dtype)
data = np.empty(total_nnz, dtype=dtype)
indptr[0], cur_nnz, cur_row = 0, 0, 0
for mat in matrices:
indices[cur_nnz : cur_nnz + mat.nnz] = mat.indices
data[cur_nnz : cur_nnz + mat.nnz] = mat.data
# can not merge the following two lines because
# mat.indptr[1:] + cur_nnz may overflow!
indptr[cur_row + 1 : cur_row + mat.shape[0] + 1] = mat.indptr[1:]
indptr[cur_row + 1 : cur_row + mat.shape[0] + 1] += cur_nnz
cur_nnz += mat.nnz
cur_row += mat.shape[0]
return csr_matrix((data, indices, indptr), shape=(total_rows, nr_cols))
def hstack_csr(matrices, dtype=None):
"""Memory efficient method to stack csr_matrices horizontally.
The returned matrix will retain the indices order.
Args:
matrices (list or tuple of csr_matrix): the matrices to stack in order, with shape (M x N1), (M x N2), ...
dtype (dtype, optional): The data-type of the output matrix. Default None to infer from matrices
Returns:
csr_matrix with shape (M, N1 + N2 + ...)
"""
if not isinstance(matrices, (list, tuple)):
raise ValueError("matrices should be either list or tuple")
if any(not isinstance(X, smat.csr_matrix) for X in matrices):
raise ValueError("all matrix in matrices need to be csr_matrix!")
if len(matrices) <= 1:
return matrices[0] if len(matrices) == 1 else None
nr_rows = matrices[0].shape[0]
if any(mat.shape[0] != nr_rows for mat in matrices):
raise ValueError("First dim not match")
total_nnz = sum([int(mat.nnz) for mat in matrices])
total_cols = sum([int(mat.shape[1]) for mat in matrices])
# infer result dtypes from inputs
int32max = np.iinfo(np.int32).max
if dtype is None:
dtype = smat.sputils.upcast(*[mat.dtype for mat in matrices])
indices_dtype = np.int64 if nr_rows > int32max else np.int32
indptr_dtype = np.int64 if total_nnz > int32max else np.int32
indptr = np.empty(nr_rows + 1, dtype=indptr_dtype)
indices = np.empty(total_nnz, dtype=indices_dtype)
data = np.empty(total_nnz, dtype=dtype)
indptr[0], cur_ptr = 0, 0
for i in range(nr_rows): # for every row
start_col = 0
for mat in matrices:
cur_nnz = mat.indptr[i + 1] - mat.indptr[i]
indices[cur_ptr : cur_ptr + cur_nnz] = (
mat.indices[mat.indptr[i] : mat.indptr[i + 1]] + start_col
)
data[cur_ptr : cur_ptr + cur_nnz] = mat.data[mat.indptr[i] : mat.indptr[i + 1]]
cur_ptr += cur_nnz
start_col += mat.shape[1]
indptr[i + 1] = cur_ptr
return csr_matrix((data, indices, indptr), shape=(nr_rows, total_cols))
def block_diag_csr(matrices, dtype=None):
"""Memory efficient method to stack csr_matrices block diagonally.
The returned matrix will retain the indices order.
Args:
matrices (list or tuple of csr_matrix): the matrices to stack in order, with shape (NR1 x NC1), (NR2 x NC2), ...
dtype (dtype, optional): The data-type of the output matrix. Default None to infer from matrices
Returns:
csr_matrix with shape (NR1 + NR2 + ..., NC1 + NC2 + ...)
"""
if not isinstance(matrices, (list, tuple)):
raise ValueError("matrices should be either list or tuple")
if any(not isinstance(X, smat.csr_matrix) for X in matrices):
raise ValueError("all matrix in matrices need to be csr_matrix!")
if len(matrices) <= 1:
return matrices[0] if len(matrices) == 1 else None
total_nnz = sum([int(mat.nnz) for mat in matrices])
total_rows = sum([int(mat.shape[0]) for mat in matrices])
total_cols = sum([int(mat.shape[1]) for mat in matrices])
# infer result dtypes from inputs
int32max = np.iinfo(np.int32).max
if dtype is None:
dtype = smat.sputils.upcast(*[mat.dtype for mat in matrices])
indices_dtype = np.int64 if total_rows > int32max else np.int32
indptr_dtype = np.int64 if total_nnz > int32max else np.int32
indptr = np.empty(total_rows + 1, dtype=indptr_dtype)
indices = np.empty(total_nnz, dtype=indices_dtype)
data = np.empty(total_nnz, dtype=dtype)
cur_row, cur_col, cur_nnz = 0, 0, 0
indptr[0] = 0
for mat in matrices:
data[cur_nnz : cur_nnz + mat.nnz] = mat.data
indices[cur_nnz : cur_nnz + mat.nnz] = mat.indices + cur_col
indptr[1 + cur_row : 1 + cur_row + mat.shape[0]] = mat.indptr[1:] + indptr[cur_row]
cur_col += mat.shape[1]
cur_row += mat.shape[0]
cur_nnz += mat.nnz
return csr_matrix((data, indices, indptr), shape=(total_rows, total_cols))
def vstack_csc(matrices, dtype=None):
"""Memory efficient method to stack csc_matrices vertically.
The returned matrix will retain the indices order.
Args:
matrices (list or tuple of csc_matrix): the matrices to stack in order, with shape (M1 x N), (M2 x N), ...
dtype (dtype, optional): The data-type of the output matrix. Default None to infer from matrices
Returns:
csc_matrix with shape (M1 + M2 + ..., N)
"""
if not isinstance(matrices, (list, tuple)):
raise ValueError("matrices should be either list or tuple")
if any(not isinstance(X, smat.csc_matrix) for X in matrices):
raise ValueError("all matrix in matrices need to be csc_matrix!")
if len(matrices) <= 1:
return matrices[0] if len(matrices) == 1 else None
return transpose(hstack_csr([transpose(mat) for mat in matrices], dtype=dtype))
def hstack_csc(matrices, dtype=None):
"""Memory efficient method to stack csc_matrices horizontally.
The returned matrix will retain the indices order.
Args:
matrices (list or tuple of csc_matrix): the matrices to stack in order, with shape (M x N1), (M x N2), ...
dtype (dtype, optional): The data-type of the output matrix. Default None to infer from matrices
Returns:
csc_matrix with shape (M, N1 + N2 + ...)
"""
if not isinstance(matrices, (list, tuple)):
raise ValueError("matrices should be either list or tuple")
if any(not isinstance(X, smat.csc_matrix) for X in matrices):
raise ValueError("all matrix in matrices need to be csc_matrix!")
if len(matrices) <= 1:
return matrices[0] if len(matrices) == 1 else None
return transpose(vstack_csr([transpose(mat) for mat in matrices], dtype=dtype))
def block_diag_csc(matrices, dtype=None):
"""Memory efficient method to stack csc_matrices block diagonally.
The returned matrix will retain the indices order.
Args:
matrices (list or tuple of csr_matrix): the matrices to stack in order, with shape (NR1 x NC1), (NR2 x NC2), ...
dtype (dtype, optional): The data-type of the output matrix. Default None to infer from matrices
Returns:
csc_matrix with shape (NR1+ NR2 + ..., NC1 + NC2 + ...)
"""
if not isinstance(matrices, (list, tuple)):
raise ValueError("matrices should be either list or tuple")
if any(not isinstance(X, smat.csc_matrix) for X in matrices):
raise ValueError("all matrix in matrices need to be csc_matrix!")
if len(matrices) <= 1:
return matrices[0] if len(matrices) == 1 else None
return transpose(block_diag_csr([transpose(mat) for mat in matrices], dtype=dtype))
def get_csc_col_nonzero(matrix):
"""Given a matrix, returns the nonzero row ids of each col
The returned ndarray will retain the indices order.
Args:
matrix: the matrix to operate on, with shape (N x M)
Returns:
list of ndarray [a_1, a_2, a_3, ...], where a_i is an array indicate the nonzero row ids of col i
"""
if not isinstance(matrix, smat.csc_matrix):
raise ValueError("matrix need to be csc_matrix!")
return [matrix.indices[matrix.indptr[i] : matrix.indptr[i + 1]] for i in range(matrix.shape[1])]
def get_csr_row_nonzero(matrix):
"""Given a matrix, returns the nonzero col ids of each row
The returned ndarray will retain the indices order.
Args:
matrix: the matrix to operate on, with shape (N x M)
Returns:
list of ndarray [a_1, a_2, a_3, ...], where a_i is an array indicate the nonzero col ids of row i
"""
if not isinstance(matrix, smat.csr_matrix):
raise ValueError("matrix need to be csr_matrix!")
return [matrix.indices[matrix.indptr[i] : matrix.indptr[i + 1]] for i in range(matrix.shape[0])]
def get_row_submatrices(matrices, row_indices):
"""Get the sub-matrices of given matrices by selecting the rows given in row_indices
Args:
matrices (list of csr_matrix or ndarray): the matrices [mat_1, mat_2, ...] to operate on, with shape (M x N1), (M x N2), ...
row_indices (list or ndarray): the row indices to select
Returns:
list of csr_matrix or ndarray
"""
if not isinstance(matrices, (list, tuple)):
raise ValueError("matrices should be either list or tuple")
n_mat = len(matrices)
if n_mat == 0:
raise ValueError("At least one matrix required as input")
if any(not isinstance(X, (smat.csr_matrix, np.ndarray)) for X in matrices):
raise ValueError("all matrix in matrices need to be csr_matrix or ndarray!")
nr_rows = matrices[0].shape[0]
if any(mat.shape[0] != nr_rows for mat in matrices):
raise ValueError("First dim not match")
if any(idx >= nr_rows or idx < 0 for idx in row_indices):
raise ValueError("row indices should be positive and do not exceed matrix first dimension")
results = []
for mat in matrices:
mat1 = mat[row_indices, :]
if isinstance(mat, smat.csr_matrix):
mat1.sort_indices()
results += [mat1]
return results
def dense_to_coo(dense):
"""Convert a dense matrix to COO format.
Args:
dense (ndarray): input dense matrix
Returns:
coo_matrix
"""
rows = np.arange(dense.shape[0], dtype=np.uint32)
cols = np.arange(dense.shape[1], dtype=np.uint32)
row_idx = np.repeat(rows, np.ones_like(rows) * len(cols)).astype(np.uint32)
col_idx = np.ones((len(rows), 1), dtype=np.uint32).dot(cols.reshape(1, -1)).ravel()
return smat.coo_matrix((dense.ravel(), (row_idx, col_idx)), shape=dense.shape)
def get_relevance_csr(csr, mm=None, dtype=np.float64):
"""Return the csr matrix containing relevance scores based on given prediction csr matrix.
Relevance score is defined as: max_rank - local_rank + 1
Args:
csr (csr_matrix): input CSR matrix, row indices are sorted in descending order
mm (int, optional): max rank, will be inferred from csr if not given
dtype (type, optional): datatype for the returned relevance matrix. Default float64.
Returns:
csr_matrix of relevance scores
"""
if mm is None:
mm = (csr.indptr[1:] - csr.indptr[:-1]).max()
nnz = len(csr.data)
nnz_of_rows = csr.indptr[1:] - csr.indptr[:-1]
row_idx = np.repeat(np.arange(csr.shape[0]), nnz_of_rows)
rel = np.array(
mm - (np.arange(nnz) - csr.indptr[row_idx]), dtype=dtype
) # adding 1 to avoiding zero entries
return smat.csr_matrix((rel, csr.indices, csr.indptr), csr.shape)
def get_sparsified_coo(coo, selected_rows, selected_columns):
"""
Zero out everything not in selected rows and columns.
Args:
coo (coo_matrix): input coo matrix
selected_rows (list of int or np.array(int)): list of rows to be not zeroed out
selected_columns (list of int or np.array(int)): list of columns to be not zeroed out
Returns:
coo matrix with unwanted rows and columns zeroed out.
"""
valid_rows = np.zeros(coo.shape[0], dtype=bool)
valid_cols = np.zeros(coo.shape[1], dtype=bool)
valid_rows[selected_rows] = True
valid_cols[selected_columns] = True
valid_idx = valid_rows[coo.row] & valid_cols[coo.col]
coo = smat.coo_matrix(
(coo.data[valid_idx], (coo.row[valid_idx], coo.col[valid_idx])), shape=coo.shape
)
return coo
def csr_rowwise_mul(A, v):
"""Row-wise multiplication between sparse csr matrix A and dense array v.
Where each row of A is multiplied by the corresponding element in v.
The number of rows of A is same as the length of v.
Args:
A (csr_matrix): The matrix to be multiplied.
v (ndarray): The multiplying vector.
Returns:
Z (csr_matrix): The product of row-wise multiplication of A and v.
"""
if not isinstance(A, smat.csr_matrix):
raise ValueError(f"A must be scipy.sparse.csr_matrix")
if not isinstance(v, np.ndarray):
raise ValueError(f"v must be a numpy ndarray")
if v.ndim != 1:
raise ValueError(f"v should be an 1-d array")
if v.shape[0] != A.shape[0]:
raise ValueError(f"The dimension of v should be the same as the number of rows of A")
Z = A.copy()
for i in range(v.shape[0]):
Z.data[Z.indptr[i] : Z.indptr[i + 1]] *= v[i]
return Z
def csc_colwise_mul(A, v):
"""Column-wise multiplication between sparse csc matrix A and dense array v, where each column of A is multiplied by the corresponding element in v (The number of columns of A is same as the length of v).
Args:
A (csc_matrix): The matrix to be multiplied.
v (ndarray): The multiplying vector.
Returns:
Z (csc_matrix): The product of column-wise multiplication of A and v.
"""
if not isinstance(A, smat.csc_matrix):
raise ValueError(f"A must be scipy.sparse.csc_matrix")
if not isinstance(v, np.ndarray):
raise ValueError(f"v must be a numpy ndarray")
if v.ndim != 1:
raise ValueError(f"v should be an 1-d array")
if v.shape[0] != A.shape[1]:
raise ValueError(f"The dimension of v should be the same as the number of columns of A")
Z = A.copy()
for i in range(v.shape[0]):
Z.data[Z.indptr[i] : Z.indptr[i + 1]] *= v[i]
return Z
def get_cocluster_spectral_embeddings(A, dim=24):
"""Obtain the co-cluster spectral embeddings for the given bipartite graph described in [1]
* [1] `<NAME>, 2001. Co-clustering documents and words using
bipartite spectral graph partition`
Args:
A (csr_matrix or csc_matrix): bipartite graph matrix
dim (int, optional): the dimension of the returned embeddings. Default 24
Returns:
(row_embedding, col_embedding): a tuple of embeddings for rows and columns respectively
row_embedding: numpy.ndarray of shape (A.shape[0], dim).
col_embedding: numpy.ndarray of shape (A.shape[1], dim).
"""
assert A.min() >= 0.0, "A must be nonnegative"
from sklearn.utils.extmath import randomized_svd
# Obtain An, the normalized adjacency bipartite matrix described in Eq (10) of [1]
# A_n = D_1^{-1/2} A D_2^{-1/2}
# row_diag = diagonal of D_1^{-1/2}
# col_diag = diagonal of D_2^{-1/2}
row_diag = np.asarray(np.sqrt(A.sum(axis=1))).squeeze()
col_diag = np.asarray(np.sqrt(A.sum(axis=0))).squeeze()
row_diag[row_diag == 0] = 1.0
col_diag[col_diag == 0] = 1.0
row_diag = 1.0 / row_diag
col_diag = 1.0 / col_diag
if smat.issparse(A):
n_rows, n_cols = A.shape
r = smat.dia_matrix((row_diag, [0]), shape=(n_rows, n_rows))
c = smat.dia_matrix((col_diag, [0]), shape=(n_cols, n_cols))
An = r * A * c
else:
An = row_diag[:, np.newaxis] * A * col_diag
# run SVD on An
nr_discards = 1 # discarding the first component
U, Sigma, VT = randomized_svd(An, dim + nr_discards, random_state=0)
# Normalized the singular vectors based on Eq (24) of [1]
row_embedding = np.ascontiguousarray(row_diag[:, np.newaxis] * U[:, nr_discards:])
col_embedding = np.ascontiguousarray(col_diag[:, np.newaxis] * VT[nr_discards:].T)
return row_embedding, col_embedding
class CsrEnsembler(object):
"""A class implementing several ensemblers for a list sorted CSR predictions"""
@staticmethod
def check_validlity(*args):
"""Check whether input CSR matrices are valid
Args:
args (iterable over csr_matrix): input CSR matrices
"""
for x in args:
assert isinstance(x, smat.csr_matrix), type(x)
assert all(x.shape == args[0].shape for x in args)
@staticmethod
def average(*args):
"""Ensemble predictions by averaging prediction values
Args:
args (iterable over csr_matrix): input CSR matrices
Returns:
ret (csr_matrix): ensembled prediction CSR matrix
"""
CsrEnsembler.check_validlity(*args)
ret = sum(args)
ret = sorted_csr(ret)
ret.data /= len(args)
return ret
@staticmethod
def rank_average(*args):
"""Ensemble predictions by averaging prediction ranks
Args:
args (iterable over csr_matrix): input CSR matrices
Returns:
ret (csr_matrix): ensembled prediction CSR matrix
"""
CsrEnsembler.check_validlity(*args)
mm = max((x.indptr[1:] - x.indptr[:-1]).max() for x in args)
ret = sum(get_relevance_csr(csr, mm) for csr in args)
ret = sorted_csr(ret)
ret.data /= len(args)
return ret
@staticmethod
def round_robin(*args):
"""Ensemble predictions by round robin
Args:
args (iterable over csr_matrix): input CSR matrices
Returns:
ret (csr_matrix): ensembled prediction CSR matrix
"""
CsrEnsembler.check_validlity(*args)
base = 1.0 / (len(args) + 1.0)
mm = max((x.indptr[1:] - x.indptr[:-1]).max() for x in args)
ret = get_relevance_csr(args[0], mm)
ret.data[:] += len(args) * base
for i, x in enumerate(args[1:], 1):
tmp = get_relevance_csr(x, mm)
tmp.data[:] += (len(args) - i) * base
ret = ret.maximum(tmp)
ret = sorted_csr(ret)
ret.data /= len(args)
return ret
@staticmethod
def print_ens(Ytrue, pred_set, param_set, topk=10):
"""Print matrices before and after ensemble
Args:
Ytrue (csr_matrix): ground truth label matrix
pred_set (iterable over csr_matrix): prediction matrices to ensemble
param_set (iterable): parameters or model names associated with pred_set
"""
for param, pred in zip(param_set, pred_set):
print("param: {}".format(param))
print(Metrics.generate(Ytrue, pred, topk=topk))
for ens in [CsrEnsembler.average, CsrEnsembler.rank_average, CsrEnsembler.round_robin]:
print("ens: {}".format(ens.__name__))
print(Metrics.generate(Ytrue, ens(*pred_set), topk=topk))
class Metrics(collections.namedtuple("Metrics", ["prec", "recall"])):
"""The metrics (precision, recall) for multi-label classification problems."""
__slots__ = ()
def __str__(self):
"""Format printing"""
def fmt(key):
return " ".join("{:4.2f}".format(100 * v) for v in getattr(self, key)[:])
return "\n".join("{:7}= {}".format(key, fmt(key)) for key in self._fields)
@classmethod
def default(cls):
"""Default dummy metric"""
return cls(prec=[], recall=[])
@classmethod
def generate(cls, tY, pY, topk=10):
"""Compute the metrics with given prediction and ground truth.
Args:
tY (csr_matrix): ground truth label matrix
pY (csr_matrix): predicted logits
topk (int, optional): only generate topk prediction. Default 10
Returns:
Metrics
"""
assert isinstance(tY, smat.csr_matrix), type(tY)
assert isinstance(pY, smat.csr_matrix), type(pY)
assert tY.shape == pY.shape, "tY.shape = {}, pY.shape = {}".format(tY.shape, pY.shape)
pY = sorted_csr(pY)
total_matched = np.zeros(topk, dtype=np.uint64)
recall = np.zeros(topk, dtype=np.float64)
for i in range(tY.shape[0]):
truth = tY.indices[tY.indptr[i] : tY.indptr[i + 1]]
matched = np.isin(pY.indices[pY.indptr[i] : pY.indptr[i + 1]][:topk], truth)
cum_matched = np.cumsum(matched, dtype=np.uint64)
total_matched[: len(cum_matched)] += cum_matched
recall[: len(cum_matched)] += cum_matched / max(len(truth), 1)
if len(cum_matched) != 0:
total_matched[len(cum_matched) :] += cum_matched[-1]
recall[len(cum_matched) :] += cum_matched[-1] / max(len(truth), 1)
prec = total_matched / tY.shape[0] / np.arange(1, topk + 1)
recall = recall / tY.shape[0]
return cls(prec=prec, recall=recall)
| [
"numpy.iinfo",
"numpy.isin",
"numpy.ascontiguousarray",
"numpy.argsort",
"numpy.array",
"scipy.sparse.sputils.get_index_dtype",
"numpy.save",
"numpy.arange",
"scipy.sparse.sputils.upcast",
"numpy.empty",
"scipy.sparse.coo_matrix",
"scipy.sparse.csr_matrix",
"sklearn.utils.extmath.randomized_... | [((33975, 34028), 'collections.namedtuple', 'collections.namedtuple', (['"""Metrics"""', "['prec', 'recall']"], {}), "('Metrics', ['prec', 'recall'])\n", (33997, 34028), False, 'import collections\n'), ((1542, 1610), 'scipy.sparse.sputils.get_index_dtype', 'smat.sputils.get_index_dtype', (['indices'], {'check_contents': 'check_contents'}), '(indices, check_contents=check_contents)\n', (1570, 1610), True, 'import scipy.sparse as smat\n'), ((1630, 1697), 'scipy.sparse.sputils.get_index_dtype', 'smat.sputils.get_index_dtype', (['indptr'], {'check_contents': 'check_contents'}), '(indptr, check_contents=check_contents)\n', (1658, 1697), True, 'import scipy.sparse as smat\n'), ((2142, 2191), 'numpy.array', 'np.array', (['indices'], {'copy': 'copy', 'dtype': 'indices_dtype'}), '(indices, copy=copy, dtype=indices_dtype)\n', (2150, 2191), True, 'import numpy as np\n'), ((2209, 2256), 'numpy.array', 'np.array', (['indptr'], {'copy': 'copy', 'dtype': 'indptr_dtype'}), '(indptr, copy=copy, dtype=indptr_dtype)\n', (2217, 2256), True, 'import numpy as np\n'), ((2272, 2310), 'numpy.array', 'np.array', (['data'], {'copy': 'copy', 'dtype': 'dtype'}), '(data, copy=copy, dtype=dtype)\n', (2280, 2310), True, 'import numpy as np\n'), ((4764, 4776), 'numpy.load', 'np.load', (['src'], {}), '(src)\n', (4771, 4776), True, 'import numpy as np\n'), ((7535, 7590), 'scipy.sparse.csr_matrix', 'smat.csr_matrix', (['(val, (row_idx, col_idx))'], {'shape': 'shape'}), '((val, (row_idx, col_idx)), shape=shape)\n', (7550, 7590), True, 'import scipy.sparse as smat\n'), ((12526, 12584), 'numpy.empty', 'np.empty', (['(keep_topk * dense.shape[0],)'], {'dtype': 'dense.dtype'}), '((keep_topk * dense.shape[0],), dtype=dense.dtype)\n', (12534, 12584), True, 'import numpy as np\n'), ((12599, 12659), 'numpy.empty', 'np.empty', (['(keep_topk * dense.shape[0],)'], {'dtype': 'indices_dtype'}), '((keep_topk * dense.shape[0],), dtype=indices_dtype)\n', (12607, 12659), True, 'import numpy as np\n'), ((13371, 13446), 'numpy.arange', 'np.arange', (['(0)', '(dense.shape[0] * keep_topk + 1)', 'keep_topk'], {'dtype': 'indptr_dtype'}), '(0, dense.shape[0] * keep_topk + 1, keep_topk, dtype=indptr_dtype)\n', (13380, 13446), True, 'import numpy as np\n'), ((14967, 15011), 'numpy.empty', 'np.empty', (['(total_rows + 1)'], {'dtype': 'indptr_dtype'}), '(total_rows + 1, dtype=indptr_dtype)\n', (14975, 15011), True, 'import numpy as np\n'), ((15026, 15066), 'numpy.empty', 'np.empty', (['total_nnz'], {'dtype': 'indices_dtype'}), '(total_nnz, dtype=indices_dtype)\n', (15034, 15066), True, 'import numpy as np\n'), ((15078, 15110), 'numpy.empty', 'np.empty', (['total_nnz'], {'dtype': 'dtype'}), '(total_nnz, dtype=dtype)\n', (15086, 15110), True, 'import numpy as np\n'), ((17053, 17094), 'numpy.empty', 'np.empty', (['(nr_rows + 1)'], {'dtype': 'indptr_dtype'}), '(nr_rows + 1, dtype=indptr_dtype)\n', (17061, 17094), True, 'import numpy as np\n'), ((17109, 17149), 'numpy.empty', 'np.empty', (['total_nnz'], {'dtype': 'indices_dtype'}), '(total_nnz, dtype=indices_dtype)\n', (17117, 17149), True, 'import numpy as np\n'), ((17161, 17193), 'numpy.empty', 'np.empty', (['total_nnz'], {'dtype': 'dtype'}), '(total_nnz, dtype=dtype)\n', (17169, 17193), True, 'import numpy as np\n'), ((19123, 19167), 'numpy.empty', 'np.empty', (['(total_rows + 1)'], {'dtype': 'indptr_dtype'}), '(total_rows + 1, dtype=indptr_dtype)\n', (19131, 19167), True, 'import numpy as np\n'), ((19182, 19222), 'numpy.empty', 'np.empty', (['total_nnz'], {'dtype': 'indices_dtype'}), '(total_nnz, dtype=indices_dtype)\n', (19190, 19222), True, 'import numpy as np\n'), ((19234, 19266), 'numpy.empty', 'np.empty', (['total_nnz'], {'dtype': 'dtype'}), '(total_nnz, dtype=dtype)\n', (19242, 19266), True, 'import numpy as np\n'), ((25049, 25091), 'numpy.arange', 'np.arange', (['dense.shape[0]'], {'dtype': 'np.uint32'}), '(dense.shape[0], dtype=np.uint32)\n', (25058, 25091), True, 'import numpy as np\n'), ((25103, 25145), 'numpy.arange', 'np.arange', (['dense.shape[1]'], {'dtype': 'np.uint32'}), '(dense.shape[1], dtype=np.uint32)\n', (25112, 25145), True, 'import numpy as np\n'), ((26289, 26347), 'scipy.sparse.csr_matrix', 'smat.csr_matrix', (['(rel, csr.indices, csr.indptr)', 'csr.shape'], {}), '((rel, csr.indices, csr.indptr), csr.shape)\n', (26304, 26347), True, 'import scipy.sparse as smat\n'), ((26815, 26849), 'numpy.zeros', 'np.zeros', (['coo.shape[0]'], {'dtype': 'bool'}), '(coo.shape[0], dtype=bool)\n', (26823, 26849), True, 'import numpy as np\n'), ((26867, 26901), 'numpy.zeros', 'np.zeros', (['coo.shape[1]'], {'dtype': 'bool'}), '(coo.shape[1], dtype=bool)\n', (26875, 26901), True, 'import numpy as np\n'), ((27047, 27149), 'scipy.sparse.coo_matrix', 'smat.coo_matrix', (['(coo.data[valid_idx], (coo.row[valid_idx], coo.col[valid_idx]))'], {'shape': 'coo.shape'}), '((coo.data[valid_idx], (coo.row[valid_idx], coo.col[\n valid_idx])), shape=coo.shape)\n', (27062, 27149), True, 'import scipy.sparse as smat\n'), ((30343, 30359), 'scipy.sparse.issparse', 'smat.issparse', (['A'], {}), '(A)\n', (30356, 30359), True, 'import scipy.sparse as smat\n'), ((30711, 30764), 'sklearn.utils.extmath.randomized_svd', 'randomized_svd', (['An', '(dim + nr_discards)'], {'random_state': '(0)'}), '(An, dim + nr_discards, random_state=0)\n', (30725, 30764), False, 'from sklearn.utils.extmath import randomized_svd\n'), ((30848, 30914), 'numpy.ascontiguousarray', 'np.ascontiguousarray', (['(row_diag[:, np.newaxis] * U[:, nr_discards:])'], {}), '(row_diag[:, np.newaxis] * U[:, nr_discards:])\n', (30868, 30914), True, 'import numpy as np\n'), ((30935, 31001), 'numpy.ascontiguousarray', 'np.ascontiguousarray', (['(col_diag[:, np.newaxis] * VT[nr_discards:].T)'], {}), '(col_diag[:, np.newaxis] * VT[nr_discards:].T)\n', (30955, 31001), True, 'import numpy as np\n'), ((7719, 7763), 'numpy.argsort', 'np.argsort', (['(-csr.data[rng])'], {'kind': '"""mergesort"""'}), "(-csr.data[rng], kind='mergesort')\n", (7729, 7763), True, 'import numpy as np\n'), ((10600, 10640), 'numpy.arange', 'np.arange', (['csr.shape[0]'], {'dtype': 'np.uint32'}), '(csr.shape[0], dtype=np.uint32)\n', (10609, 10640), True, 'import numpy as np\n'), ((14707, 14725), 'numpy.iinfo', 'np.iinfo', (['np.int32'], {}), '(np.int32)\n', (14715, 14725), True, 'import numpy as np\n'), ((14768, 14821), 'scipy.sparse.sputils.upcast', 'smat.sputils.upcast', (['*[mat.dtype for mat in matrices]'], {}), '(*[mat.dtype for mat in matrices])\n', (14787, 14821), True, 'import scipy.sparse as smat\n'), ((16793, 16811), 'numpy.iinfo', 'np.iinfo', (['np.int32'], {}), '(np.int32)\n', (16801, 16811), True, 'import numpy as np\n'), ((16854, 16907), 'scipy.sparse.sputils.upcast', 'smat.sputils.upcast', (['*[mat.dtype for mat in matrices]'], {}), '(*[mat.dtype for mat in matrices])\n', (16873, 16907), True, 'import scipy.sparse as smat\n'), ((18860, 18878), 'numpy.iinfo', 'np.iinfo', (['np.int32'], {}), '(np.int32)\n', (18868, 18878), True, 'import numpy as np\n'), ((18921, 18974), 'scipy.sparse.sputils.upcast', 'smat.sputils.upcast', (['*[mat.dtype for mat in matrices]'], {}), '(*[mat.dtype for mat in matrices])\n', (18940, 18974), True, 'import scipy.sparse as smat\n'), ((26112, 26135), 'numpy.arange', 'np.arange', (['csr.shape[0]'], {}), '(csr.shape[0])\n', (26121, 26135), True, 'import numpy as np\n'), ((30406, 30462), 'scipy.sparse.dia_matrix', 'smat.dia_matrix', (['(row_diag, [0])'], {'shape': '(n_rows, n_rows)'}), '((row_diag, [0]), shape=(n_rows, n_rows))\n', (30421, 30462), True, 'import scipy.sparse as smat\n'), ((30475, 30531), 'scipy.sparse.dia_matrix', 'smat.dia_matrix', (['(col_diag, [0])'], {'shape': '(n_cols, n_cols)'}), '((col_diag, [0]), shape=(n_cols, n_cols))\n', (30490, 30531), True, 'import scipy.sparse as smat\n'), ((35127, 35158), 'numpy.zeros', 'np.zeros', (['topk'], {'dtype': 'np.uint64'}), '(topk, dtype=np.uint64)\n', (35135, 35158), True, 'import numpy as np\n'), ((35176, 35208), 'numpy.zeros', 'np.zeros', (['topk'], {'dtype': 'np.float64'}), '(topk, dtype=np.float64)\n', (35184, 35208), True, 'import numpy as np\n'), ((3981, 4023), 'numpy.save', 'np.save', (['tgt_file', 'mat'], {'allow_pickle': '(False)'}), '(tgt_file, mat, allow_pickle=False)\n', (3988, 4023), True, 'import numpy as np\n'), ((8132, 8176), 'numpy.arange', 'np.arange', (['shape[0]'], {'dtype': 'csr.indices.dtype'}), '(shape[0], dtype=csr.indices.dtype)\n', (8141, 8176), True, 'import numpy as np\n'), ((8424, 8472), 'numpy.bincount', 'np.bincount', (['(row_idx + 1)'], {'minlength': '(shape[0] + 1)'}), '(row_idx + 1, minlength=shape[0] + 1)\n', (8435, 8472), True, 'import numpy as np\n'), ((35332, 35396), 'numpy.isin', 'np.isin', (['pY.indices[pY.indptr[i]:pY.indptr[i + 1]][:topk]', 'truth'], {}), '(pY.indices[pY.indptr[i]:pY.indptr[i + 1]][:topk], truth)\n', (35339, 35396), True, 'import numpy as np\n'), ((35425, 35460), 'numpy.cumsum', 'np.cumsum', (['matched'], {'dtype': 'np.uint64'}), '(matched, dtype=np.uint64)\n', (35434, 35460), True, 'import numpy as np\n'), ((35832, 35854), 'numpy.arange', 'np.arange', (['(1)', '(topk + 1)'], {}), '(1, topk + 1)\n', (35841, 35854), True, 'import numpy as np\n'), ((4081, 4127), 'scipy.sparse.save_npz', 'smat.save_npz', (['tgt_file', 'mat'], {'compressed': '(False)'}), '(tgt_file, mat, compressed=False)\n', (4094, 4127), True, 'import scipy.sparse as smat\n'), ((12391, 12409), 'numpy.iinfo', 'np.iinfo', (['np.int32'], {}), '(np.int32)\n', (12399, 12409), True, 'import numpy as np\n'), ((12477, 12495), 'numpy.iinfo', 'np.iinfo', (['np.int32'], {}), '(np.int32)\n', (12485, 12495), True, 'import numpy as np\n'), ((13069, 13089), 'numpy.arange', 'np.arange', (['chunk_len'], {}), '(chunk_len)\n', (13078, 13089), True, 'import numpy as np\n'), ((26184, 26198), 'numpy.arange', 'np.arange', (['nnz'], {}), '(nnz)\n', (26193, 26198), True, 'import numpy as np\n'), ((12981, 13001), 'numpy.arange', 'np.arange', (['keep_topk'], {}), '(keep_topk)\n', (12990, 13001), True, 'import numpy as np\n'), ((25176, 25194), 'numpy.ones_like', 'np.ones_like', (['rows'], {}), '(rows)\n', (25188, 25194), True, 'import numpy as np\n')] |
#!/usr/bin/env python
import os
import sys
def midi_to_freq(num):
""" Takes a MIDI number and returns a frequency in Hz for corresponding note. """
num_a = num - 69
freq = 440 * 2**(num_a / 12.0)
return freq
def fp(relative):
#if hasattr(sys, "_MEIPASS"):
# return os.path.join(sys._MEIPASS, relative)
return os.path.join(relative)
if __name__ == '__main__':
print(midi_to_freq(69))
print(midi_to_freq(60))
print(midi_to_freq(105))
| [
"os.path.join"
] | [((342, 364), 'os.path.join', 'os.path.join', (['relative'], {}), '(relative)\n', (354, 364), False, 'import os\n')] |
# -*- coding: utf-8 -*-
from __future__ import (absolute_import, division,
print_function, unicode_literals)
from builtins import *
import os
import sys
import shutil
import inspect
from ferenda import TextReader, util
from ferenda.testutil import RepoTester, file_parametrize
from ferenda.compat import unittest
# SUT
from ferenda.sources.legal.se import myndfskr
class Parse(RepoTester):
repoclass = myndfskr.MyndFskrBase # in some cases we might need to get a
# specific one like SOSFS, see below
aliases = {} # setUpClass fills this in
@classmethod
def setUpClass(cls):
super(Parse, cls).setUpClass()
# enumerate all classes defined in the module where
# MyndFskrBase is defined, check their static property 'alias'
# and use it to add to cls.aliases
for name, obj in inspect.getmembers(myndfskr):
if inspect.isclass(obj) and hasattr(obj, 'alias'):
cls.aliases[obj.alias] = obj
def parse_filename(self, filename):
# a few of the subclasses have specialized rules. make sure we
# instantiate the correct class
alias = os.path.basename(filename).split("-")[0]
basefile = os.path.splitext(
os.path.basename(filename))[0].replace("-",
"/", 1).replace("-", ":")
repoclass = self.aliases[alias]
repo = repoclass(datadir=self.datadir,
storelocation=self.datadir + "/ferenda.sqlite",
indexlocation=self.datadir + "/whoosh",)
return repo, basefile
def parametric_test(self, filename):
# these options adjusts the constructed URIs. by default, the
# official rpubl URIs are minted.
#
# self.repo.config.localizeuri = True
# self.repo.config.url = "http://example.org/"
# self.repo.config.urlpath = ''
# a few of the subclasses have specialized rules. make sure we
# instantiate the correct class
repo, basefile = self.parse_filename(filename)
doc = repo.make_document(basefile)
text = repo.sanitize_text(util.readfile(filename), basefile)
reader = TextReader(string=text, encoding='utf-8')
props = repo.extract_metadata(reader, basefile)
props = repo.sanitize_metadata(props, basefile)
resource = repo.polish_metadata(props, basefile)
repo.infer_metadata(resource, basefile)
wantfile = filename.replace(".txt", ".n3")
if os.path.exists(wantfile):
self.assertEqualGraphs(wantfile, resource.graph, exact=False)
else:
self.fail("Expected a %s with the following content:\n\n%s" %
(wantfile, doc.meta.serialize(format="n3").decode("utf-8")))
file_parametrize(Parse, "test/files/myndfskr", ".txt")
| [
"os.path.exists",
"inspect.getmembers",
"ferenda.TextReader",
"os.path.basename",
"ferenda.util.readfile",
"inspect.isclass",
"ferenda.testutil.file_parametrize"
] | [((2841, 2895), 'ferenda.testutil.file_parametrize', 'file_parametrize', (['Parse', '"""test/files/myndfskr"""', '""".txt"""'], {}), "(Parse, 'test/files/myndfskr', '.txt')\n", (2857, 2895), False, 'from ferenda.testutil import RepoTester, file_parametrize\n'), ((864, 892), 'inspect.getmembers', 'inspect.getmembers', (['myndfskr'], {}), '(myndfskr)\n', (882, 892), False, 'import inspect\n'), ((2238, 2279), 'ferenda.TextReader', 'TextReader', ([], {'string': 'text', 'encoding': '"""utf-8"""'}), "(string=text, encoding='utf-8')\n", (2248, 2279), False, 'from ferenda import TextReader, util\n'), ((2568, 2592), 'os.path.exists', 'os.path.exists', (['wantfile'], {}), '(wantfile)\n', (2582, 2592), False, 'import os\n'), ((2186, 2209), 'ferenda.util.readfile', 'util.readfile', (['filename'], {}), '(filename)\n', (2199, 2209), False, 'from ferenda import TextReader, util\n'), ((909, 929), 'inspect.isclass', 'inspect.isclass', (['obj'], {}), '(obj)\n', (924, 929), False, 'import inspect\n'), ((1170, 1196), 'os.path.basename', 'os.path.basename', (['filename'], {}), '(filename)\n', (1186, 1196), False, 'import os\n'), ((1260, 1286), 'os.path.basename', 'os.path.basename', (['filename'], {}), '(filename)\n', (1276, 1286), False, 'import os\n')] |
'''Autogenerated by xml_generate script, do not edit!'''
from OpenGL import platform as _p, arrays
# Code generation uses this
from OpenGL.raw.GL import _types as _cs
# End users want this...
from OpenGL.raw.GL._types import *
from OpenGL.raw.GL import _errors
from OpenGL.constant import Constant as _C
import ctypes
_EXTENSION_NAME = 'GL_NV_geometry_program4'
def _f( function ):
return _p.createFunction( function,_p.PLATFORM.GL,'GL_NV_geometry_program4',error_checker=_errors._error_checker)
GL_FRAMEBUFFER_ATTACHMENT_LAYERED_EXT=_C('GL_FRAMEBUFFER_ATTACHMENT_LAYERED_EXT',0x8DA7)
GL_FRAMEBUFFER_ATTACHMENT_TEXTURE_LAYER_EXT=_C('GL_FRAMEBUFFER_ATTACHMENT_TEXTURE_LAYER_EXT',0x8CD4)
GL_FRAMEBUFFER_INCOMPLETE_LAYER_COUNT_EXT=_C('GL_FRAMEBUFFER_INCOMPLETE_LAYER_COUNT_EXT',0x8DA9)
GL_FRAMEBUFFER_INCOMPLETE_LAYER_TARGETS_EXT=_C('GL_FRAMEBUFFER_INCOMPLETE_LAYER_TARGETS_EXT',0x8DA8)
GL_GEOMETRY_INPUT_TYPE_EXT=_C('GL_GEOMETRY_INPUT_TYPE_EXT',0x8DDB)
GL_GEOMETRY_OUTPUT_TYPE_EXT=_C('GL_GEOMETRY_OUTPUT_TYPE_EXT',0x8DDC)
GL_GEOMETRY_PROGRAM_NV=_C('GL_GEOMETRY_PROGRAM_NV',0x8C26)
GL_GEOMETRY_VERTICES_OUT_EXT=_C('GL_GEOMETRY_VERTICES_OUT_EXT',0x8DDA)
GL_LINES_ADJACENCY_EXT=_C('GL_LINES_ADJACENCY_EXT',0x000A)
GL_LINE_STRIP_ADJACENCY_EXT=_C('GL_LINE_STRIP_ADJACENCY_EXT',0x000B)
GL_MAX_GEOMETRY_TEXTURE_IMAGE_UNITS_EXT=_C('GL_MAX_GEOMETRY_TEXTURE_IMAGE_UNITS_EXT',0x8C29)
GL_MAX_PROGRAM_OUTPUT_VERTICES_NV=_C('GL_MAX_PROGRAM_OUTPUT_VERTICES_NV',0x8C27)
GL_MAX_PROGRAM_TOTAL_OUTPUT_COMPONENTS_NV=_C('GL_MAX_PROGRAM_TOTAL_OUTPUT_COMPONENTS_NV',0x8C28)
GL_PROGRAM_POINT_SIZE_EXT=_C('GL_PROGRAM_POINT_SIZE_EXT',0x8642)
GL_TRIANGLES_ADJACENCY_EXT=_C('GL_TRIANGLES_ADJACENCY_EXT',0x000C)
GL_TRIANGLE_STRIP_ADJACENCY_EXT=_C('GL_TRIANGLE_STRIP_ADJACENCY_EXT',0x000D)
@_f
@_p.types(None,_cs.GLenum,_cs.GLenum,_cs.GLuint,_cs.GLint)
def glFramebufferTextureEXT(target,attachment,texture,level):pass
@_f
@_p.types(None,_cs.GLenum,_cs.GLenum,_cs.GLuint,_cs.GLint,_cs.GLenum)
def glFramebufferTextureFaceEXT(target,attachment,texture,level,face):pass
@_f
@_p.types(None,_cs.GLenum,_cs.GLenum,_cs.GLuint,_cs.GLint,_cs.GLint)
def glFramebufferTextureLayerEXT(target,attachment,texture,level,layer):pass
@_f
@_p.types(None,_cs.GLenum,_cs.GLint)
def glProgramVertexLimitNV(target,limit):pass
| [
"OpenGL.platform.types",
"OpenGL.constant.Constant",
"OpenGL.platform.createFunction"
] | [((552, 602), 'OpenGL.constant.Constant', '_C', (['"""GL_FRAMEBUFFER_ATTACHMENT_LAYERED_EXT"""', '(36263)'], {}), "('GL_FRAMEBUFFER_ATTACHMENT_LAYERED_EXT', 36263)\n", (554, 602), True, 'from OpenGL.constant import Constant as _C\n'), ((648, 704), 'OpenGL.constant.Constant', '_C', (['"""GL_FRAMEBUFFER_ATTACHMENT_TEXTURE_LAYER_EXT"""', '(36052)'], {}), "('GL_FRAMEBUFFER_ATTACHMENT_TEXTURE_LAYER_EXT', 36052)\n", (650, 704), True, 'from OpenGL.constant import Constant as _C\n'), ((748, 802), 'OpenGL.constant.Constant', '_C', (['"""GL_FRAMEBUFFER_INCOMPLETE_LAYER_COUNT_EXT"""', '(36265)'], {}), "('GL_FRAMEBUFFER_INCOMPLETE_LAYER_COUNT_EXT', 36265)\n", (750, 802), True, 'from OpenGL.constant import Constant as _C\n'), ((848, 904), 'OpenGL.constant.Constant', '_C', (['"""GL_FRAMEBUFFER_INCOMPLETE_LAYER_TARGETS_EXT"""', '(36264)'], {}), "('GL_FRAMEBUFFER_INCOMPLETE_LAYER_TARGETS_EXT', 36264)\n", (850, 904), True, 'from OpenGL.constant import Constant as _C\n'), ((933, 972), 'OpenGL.constant.Constant', '_C', (['"""GL_GEOMETRY_INPUT_TYPE_EXT"""', '(36315)'], {}), "('GL_GEOMETRY_INPUT_TYPE_EXT', 36315)\n", (935, 972), True, 'from OpenGL.constant import Constant as _C\n'), ((1002, 1042), 'OpenGL.constant.Constant', '_C', (['"""GL_GEOMETRY_OUTPUT_TYPE_EXT"""', '(36316)'], {}), "('GL_GEOMETRY_OUTPUT_TYPE_EXT', 36316)\n", (1004, 1042), True, 'from OpenGL.constant import Constant as _C\n'), ((1067, 1102), 'OpenGL.constant.Constant', '_C', (['"""GL_GEOMETRY_PROGRAM_NV"""', '(35878)'], {}), "('GL_GEOMETRY_PROGRAM_NV', 35878)\n", (1069, 1102), True, 'from OpenGL.constant import Constant as _C\n'), ((1133, 1174), 'OpenGL.constant.Constant', '_C', (['"""GL_GEOMETRY_VERTICES_OUT_EXT"""', '(36314)'], {}), "('GL_GEOMETRY_VERTICES_OUT_EXT', 36314)\n", (1135, 1174), True, 'from OpenGL.constant import Constant as _C\n'), ((1199, 1231), 'OpenGL.constant.Constant', '_C', (['"""GL_LINES_ADJACENCY_EXT"""', '(10)'], {}), "('GL_LINES_ADJACENCY_EXT', 10)\n", (1201, 1231), True, 'from OpenGL.constant import Constant as _C\n'), ((1264, 1301), 'OpenGL.constant.Constant', '_C', (['"""GL_LINE_STRIP_ADJACENCY_EXT"""', '(11)'], {}), "('GL_LINE_STRIP_ADJACENCY_EXT', 11)\n", (1266, 1301), True, 'from OpenGL.constant import Constant as _C\n'), ((1346, 1398), 'OpenGL.constant.Constant', '_C', (['"""GL_MAX_GEOMETRY_TEXTURE_IMAGE_UNITS_EXT"""', '(35881)'], {}), "('GL_MAX_GEOMETRY_TEXTURE_IMAGE_UNITS_EXT', 35881)\n", (1348, 1398), True, 'from OpenGL.constant import Constant as _C\n'), ((1434, 1480), 'OpenGL.constant.Constant', '_C', (['"""GL_MAX_PROGRAM_OUTPUT_VERTICES_NV"""', '(35879)'], {}), "('GL_MAX_PROGRAM_OUTPUT_VERTICES_NV', 35879)\n", (1436, 1480), True, 'from OpenGL.constant import Constant as _C\n'), ((1524, 1578), 'OpenGL.constant.Constant', '_C', (['"""GL_MAX_PROGRAM_TOTAL_OUTPUT_COMPONENTS_NV"""', '(35880)'], {}), "('GL_MAX_PROGRAM_TOTAL_OUTPUT_COMPONENTS_NV', 35880)\n", (1526, 1578), True, 'from OpenGL.constant import Constant as _C\n'), ((1606, 1644), 'OpenGL.constant.Constant', '_C', (['"""GL_PROGRAM_POINT_SIZE_EXT"""', '(34370)'], {}), "('GL_PROGRAM_POINT_SIZE_EXT', 34370)\n", (1608, 1644), True, 'from OpenGL.constant import Constant as _C\n'), ((1673, 1709), 'OpenGL.constant.Constant', '_C', (['"""GL_TRIANGLES_ADJACENCY_EXT"""', '(12)'], {}), "('GL_TRIANGLES_ADJACENCY_EXT', 12)\n", (1675, 1709), True, 'from OpenGL.constant import Constant as _C\n'), ((1746, 1787), 'OpenGL.constant.Constant', '_C', (['"""GL_TRIANGLE_STRIP_ADJACENCY_EXT"""', '(13)'], {}), "('GL_TRIANGLE_STRIP_ADJACENCY_EXT', 13)\n", (1748, 1787), True, 'from OpenGL.constant import Constant as _C\n'), ((1798, 1859), 'OpenGL.platform.types', '_p.types', (['None', '_cs.GLenum', '_cs.GLenum', '_cs.GLuint', '_cs.GLint'], {}), '(None, _cs.GLenum, _cs.GLenum, _cs.GLuint, _cs.GLint)\n', (1806, 1859), True, 'from OpenGL import platform as _p, arrays\n'), ((1930, 2003), 'OpenGL.platform.types', '_p.types', (['None', '_cs.GLenum', '_cs.GLenum', '_cs.GLuint', '_cs.GLint', '_cs.GLenum'], {}), '(None, _cs.GLenum, _cs.GLenum, _cs.GLuint, _cs.GLint, _cs.GLenum)\n', (1938, 2003), True, 'from OpenGL import platform as _p, arrays\n'), ((2082, 2154), 'OpenGL.platform.types', '_p.types', (['None', '_cs.GLenum', '_cs.GLenum', '_cs.GLuint', '_cs.GLint', '_cs.GLint'], {}), '(None, _cs.GLenum, _cs.GLenum, _cs.GLuint, _cs.GLint, _cs.GLint)\n', (2090, 2154), True, 'from OpenGL import platform as _p, arrays\n'), ((2235, 2272), 'OpenGL.platform.types', '_p.types', (['None', '_cs.GLenum', '_cs.GLint'], {}), '(None, _cs.GLenum, _cs.GLint)\n', (2243, 2272), True, 'from OpenGL import platform as _p, arrays\n'), ((406, 518), 'OpenGL.platform.createFunction', '_p.createFunction', (['function', '_p.PLATFORM.GL', '"""GL_NV_geometry_program4"""'], {'error_checker': '_errors._error_checker'}), "(function, _p.PLATFORM.GL, 'GL_NV_geometry_program4',\n error_checker=_errors._error_checker)\n", (423, 518), True, 'from OpenGL import platform as _p, arrays\n')] |
from plumbum import local
import benchbuild as bb
from benchbuild.environments.domain.declarative import ContainerImage
from benchbuild.source import HTTP
from benchbuild.utils.cmd import make, tar
class XZ(bb.Project):
""" XZ """
VERSION = '5.2.1'
NAME = 'xz'
DOMAIN = 'compression'
GROUP = 'benchbuild'
SOURCE = [
HTTP(
remote={'5.2.1': 'http://tukaani.org/xz/xz-5.2.1.tar.gz'},
local='xz.tar.gz'
),
HTTP(
remote={'1.0': 'http://lairosiel.de/dist/compression.tar.gz'},
local='compression.tar.gz'
)
]
CONTAINER = ContainerImage().from_('benchbuild:alpine')
def compile(self):
xz_source = local.path(self.source_of('xz.tar.gz'))
xz_version = self.version_of('xz.tar.gz')
compression_source = local.path(self.source_of('compression.tar.gz'))
tar('xf', xz_source)
tar('xf', compression_source)
unpack_dir = local.path(f'xz-{xz_version}')
clang = bb.compiler.cc(self)
with local.cwd(unpack_dir):
configure = local["./configure"]
_configure = bb.watch(configure)
with local.env(CC=str(clang)):
_configure(
"--enable-threads=no", "--with-gnu-ld=yes",
"--disable-shared", "--disable-dependency-tracking",
"--disable-xzdec", "--disable-lzmadec",
"--disable-lzmainfo", "--disable-lzma-links",
"--disable-scripts", "--disable-doc"
)
_make = bb.watch(make)
_make("CC=" + str(clang), "clean", "all")
def run_tests(self):
xz_version = self.version_of('xz.tar.gz')
unpack_dir = local.path(f'xz-{xz_version}')
xz = bb.wrap(unpack_dir / "src" / "xz" / "xz", self)
_xz = bb.watch(xz)
# Compress
_xz("--compress", "-f", "-k", "-e", "-9", "compression/text.html")
_xz("--compress", "-f", "-k", "-e", "-9", "compression/chicken.jpg")
_xz("--compress", "-f", "-k", "-e", "-9", "compression/control")
_xz("--compress", "-f", "-k", "-e", "-9", "compression/input.source")
_xz("--compress", "-f", "-k", "-e", "-9", "compression/liberty.jpg")
# Decompress
_xz("--decompress", "-f", "-k", "compression/text.html.xz")
_xz("--decompress", "-f", "-k", "compression/chicken.jpg.xz")
_xz("--decompress", "-f", "-k", "compression/control.xz")
_xz("--decompress", "-f", "-k", "compression/input.source.xz")
_xz("--decompress", "-f", "-k", "compression/liberty.jpg.xz")
| [
"benchbuild.environments.domain.declarative.ContainerImage",
"benchbuild.source.HTTP",
"benchbuild.compiler.cc",
"benchbuild.utils.cmd.tar",
"plumbum.local.path",
"benchbuild.wrap",
"benchbuild.watch",
"plumbum.local.cwd"
] | [((352, 439), 'benchbuild.source.HTTP', 'HTTP', ([], {'remote': "{'5.2.1': 'http://tukaani.org/xz/xz-5.2.1.tar.gz'}", 'local': '"""xz.tar.gz"""'}), "(remote={'5.2.1': 'http://tukaani.org/xz/xz-5.2.1.tar.gz'}, local=\n 'xz.tar.gz')\n", (356, 439), False, 'from benchbuild.source import HTTP\n'), ((478, 578), 'benchbuild.source.HTTP', 'HTTP', ([], {'remote': "{'1.0': 'http://lairosiel.de/dist/compression.tar.gz'}", 'local': '"""compression.tar.gz"""'}), "(remote={'1.0': 'http://lairosiel.de/dist/compression.tar.gz'}, local=\n 'compression.tar.gz')\n", (482, 578), False, 'from benchbuild.source import HTTP\n'), ((895, 915), 'benchbuild.utils.cmd.tar', 'tar', (['"""xf"""', 'xz_source'], {}), "('xf', xz_source)\n", (898, 915), False, 'from benchbuild.utils.cmd import make, tar\n'), ((924, 953), 'benchbuild.utils.cmd.tar', 'tar', (['"""xf"""', 'compression_source'], {}), "('xf', compression_source)\n", (927, 953), False, 'from benchbuild.utils.cmd import make, tar\n'), ((976, 1006), 'plumbum.local.path', 'local.path', (['f"""xz-{xz_version}"""'], {}), "(f'xz-{xz_version}')\n", (986, 1006), False, 'from plumbum import local\n'), ((1023, 1043), 'benchbuild.compiler.cc', 'bb.compiler.cc', (['self'], {}), '(self)\n', (1037, 1043), True, 'import benchbuild as bb\n'), ((1766, 1796), 'plumbum.local.path', 'local.path', (['f"""xz-{xz_version}"""'], {}), "(f'xz-{xz_version}')\n", (1776, 1796), False, 'from plumbum import local\n'), ((1810, 1857), 'benchbuild.wrap', 'bb.wrap', (["(unpack_dir / 'src' / 'xz' / 'xz')", 'self'], {}), "(unpack_dir / 'src' / 'xz' / 'xz', self)\n", (1817, 1857), True, 'import benchbuild as bb\n'), ((1872, 1884), 'benchbuild.watch', 'bb.watch', (['xz'], {}), '(xz)\n', (1880, 1884), True, 'import benchbuild as bb\n'), ((630, 646), 'benchbuild.environments.domain.declarative.ContainerImage', 'ContainerImage', ([], {}), '()\n', (644, 646), False, 'from benchbuild.environments.domain.declarative import ContainerImage\n'), ((1057, 1078), 'plumbum.local.cwd', 'local.cwd', (['unpack_dir'], {}), '(unpack_dir)\n', (1066, 1078), False, 'from plumbum import local\n'), ((1150, 1169), 'benchbuild.watch', 'bb.watch', (['configure'], {}), '(configure)\n', (1158, 1169), True, 'import benchbuild as bb\n'), ((1600, 1614), 'benchbuild.watch', 'bb.watch', (['make'], {}), '(make)\n', (1608, 1614), True, 'import benchbuild as bb\n')] |
import numpy as np
import csv
import cv2
from keras.models import Sequential
from keras.layers import Dense, Flatten
def load_data():
lines = []
with open('Data/driving_log.csv') as csvfile:
reader = csv.reader(csvfile)
for line in reader:
lines.append(line)
images = []
measurements = []
for line in lines:
source_path = line[0]
filename = source_path.split('/')[-1]
current_path = 'Data/IMG/'+filename
image = cv2.imread(current_path)
images.append(image)
measurement = float(line[3])
measurements.append(measurement)
X_train = np.array(images)
y_train = np.array(measurements)
return X_train, y_train
def train(X_train, y_train):
model = Sequential()
model.add(Flatten(input_shape=(160, 320, 3)))
model.add(Dense(1))
model.compile(loss='mse', optimizer='adam')
model.fit(X_train, y_train, validation_split=0.2, shuffle=True, nb_epoch=10)
model.save('model.h5')
if __name__ == "__main__":
X_train, y_train = load_data()
train(X_train, y_train) | [
"keras.layers.Flatten",
"keras.models.Sequential",
"numpy.array",
"csv.reader",
"keras.layers.Dense",
"cv2.imread"
] | [((645, 661), 'numpy.array', 'np.array', (['images'], {}), '(images)\n', (653, 661), True, 'import numpy as np\n'), ((676, 698), 'numpy.array', 'np.array', (['measurements'], {}), '(measurements)\n', (684, 698), True, 'import numpy as np\n'), ((769, 781), 'keras.models.Sequential', 'Sequential', ([], {}), '()\n', (779, 781), False, 'from keras.models import Sequential\n'), ((218, 237), 'csv.reader', 'csv.reader', (['csvfile'], {}), '(csvfile)\n', (228, 237), False, 'import csv\n'), ((499, 523), 'cv2.imread', 'cv2.imread', (['current_path'], {}), '(current_path)\n', (509, 523), False, 'import cv2\n'), ((796, 830), 'keras.layers.Flatten', 'Flatten', ([], {'input_shape': '(160, 320, 3)'}), '(input_shape=(160, 320, 3))\n', (803, 830), False, 'from keras.layers import Dense, Flatten\n'), ((846, 854), 'keras.layers.Dense', 'Dense', (['(1)'], {}), '(1)\n', (851, 854), False, 'from keras.layers import Dense, Flatten\n')] |
from abc import get_cache_token
from collections import OrderedDict
from torch import nn
class ResidualBlock(nn.Module):
def __init__(self, in_size, out_size):
super().__init__()
self.in_size, self.out_size = in_size, out_size
self.blocks = nn.Identity()
self.shortcut = nn.Identity()
def forward(self, x):
residual = x
if self.should_apply_shortcut:
residual = self.shortcut(x)
x = self.blocks(x)
x += residual
return x
@property
def should_apply_shortcut(self):
return self.in_size != self.out_size
# 用来处理short cut
class ResNetResidualBlock(ResidualBlock):
def __init__(self, in_size, out_size):
super().__init__(in_size, out_size)
self.shortcut = nn.Sequential(OrderedDict(
{
'dense': nn.Linear(self.in_size, self.out_size),
# 'bn': nn.BatchNorm1d(self.out_size)
})) if self.should_apply_shortcut else None
@property
def should_apply_shortcut(self):
return self.in_size != self.out_size
# 来定义一个block
class ResNetBasicBlock(ResNetResidualBlock):
def __init__(self, in_size, out_size, activation=nn.ReLU):
super().__init__(in_size, out_size)
self.blocks = nn.Sequential(
nn.Linear(self.in_size, self.out_size),
activation(),
nn.Linear(self.out_size, self.out_size),
)
# 定义一个resnet层,里面会有多个block
class ResNetLayer(nn.Module):
def __init__(self, in_size, out_size, block=ResNetBasicBlock, n=1, activation=nn.ReLU):
super().__init__()
self.blocks = nn.Sequential(
block(in_size, out_size, activation),
*[block(out_size,
out_size, activation) for _ in range(n-1)]
)
def forward(self, x):
x = self.blocks(x)
return x
# 由多个resnet layer组成encoder
class ResNetEncoder(nn.Module):
"""
ResNet encoder composed by decreasing different layers with increasing features.
"""
def __init__(self, in_size=128, blocks_sizes=[64, 32, 16], deepths=[2, 2, 2],
activation=nn.ReLU, block=ResNetBasicBlock):
super().__init__()
self.blocks_sizes = blocks_sizes
self.gate = nn.Sequential(
nn.Linear(in_size, self.blocks_sizes[0]),
# nn.BatchNorm1d(self.blocks_sizes[0]),
activation(),
)
self.in_out_block_sizes = list(zip(blocks_sizes, blocks_sizes[1:]))
self.blocks = nn.ModuleList([
*[ResNetLayer(in_size, out_size, n=n, activation=activation, block=block)
for (in_size, out_size), n in zip(self.in_out_block_sizes, deepths)]
])
def forward(self, x):
x = self.gate(x)
for block in self.blocks:
x = block(x)
return x
if __name__ == "__main__":
m = ResNetEncoder()
def get_parameter_number(net):
total_num = sum(p.numel() for p in net.parameters())
trainable_num = sum(p.numel()
for p in net.parameters() if p.requires_grad)
return {'Total': total_num, 'Trainable': trainable_num}
print(get_parameter_number(m))
| [
"torch.nn.Identity",
"torch.nn.Linear"
] | [((272, 285), 'torch.nn.Identity', 'nn.Identity', ([], {}), '()\n', (283, 285), False, 'from torch import nn\n'), ((310, 323), 'torch.nn.Identity', 'nn.Identity', ([], {}), '()\n', (321, 323), False, 'from torch import nn\n'), ((1317, 1355), 'torch.nn.Linear', 'nn.Linear', (['self.in_size', 'self.out_size'], {}), '(self.in_size, self.out_size)\n', (1326, 1355), False, 'from torch import nn\n'), ((1395, 1434), 'torch.nn.Linear', 'nn.Linear', (['self.out_size', 'self.out_size'], {}), '(self.out_size, self.out_size)\n', (1404, 1434), False, 'from torch import nn\n'), ((2308, 2348), 'torch.nn.Linear', 'nn.Linear', (['in_size', 'self.blocks_sizes[0]'], {}), '(in_size, self.blocks_sizes[0])\n', (2317, 2348), False, 'from torch import nn\n'), ((852, 890), 'torch.nn.Linear', 'nn.Linear', (['self.in_size', 'self.out_size'], {}), '(self.in_size, self.out_size)\n', (861, 890), False, 'from torch import nn\n')] |
"""
Tests asserting that ModelTypes convert to and from json when working
with ModelDatas
"""
# Allow inspection of private class members
# pylint: disable=protected-access
from mock import Mock
from xblock.core import XBlock
from xblock.fields import Field, Scope, ScopeIds
from xblock.field_data import DictFieldData
from xblock.test.tools import TestRuntime
class TestJSONConversionField(Field):
"""Field for testing json conversion"""
__test__ = False
def from_json(self, value):
assert value['$type'] == 'set'
return set(value['$vals'])
def to_json(self, value):
return {
'$type': 'set',
'$vals': sorted(value)
}
class TestBlock(XBlock):
"""XBlock for testing json conversion"""
__test__ = False
field_a = TestJSONConversionField(scope=Scope.content)
field_b = TestJSONConversionField(scope=Scope.content)
class TestModel(DictFieldData):
"""ModelData for testing json conversion"""
__test__ = False
def default(self, block, name):
return {'$type': 'set', '$vals': [0, 1]}
class TestJsonConversion:
"""
Verify that all ModelType operations correctly convert
the json that comes out of the ModelData to python objects
"""
def setup_method(self):
"""
Setup for each test method in this class.
"""
field_data = TestModel({
'field_a': {'$type': 'set', '$vals': [1, 2, 3]}
})
runtime = TestRuntime(services={'field-data': field_data})
self.block = TestBlock(runtime, scope_ids=Mock(spec=ScopeIds)) # pylint: disable=attribute-defined-outside-init
def test_get(self):
# Test field with a value
assert isinstance(self.block.field_a, set)
# Test ModelData default
assert isinstance(self.block.field_b, set)
def test_set(self):
self.block.field_b = set([5, 6, 5])
self.block.save()
assert isinstance(self.block.field_b, set)
assert {'$type': 'set', '$vals': [5, 6]} == \
self.block._field_data.get(self.block, 'field_b')
| [
"mock.Mock",
"xblock.test.tools.TestRuntime"
] | [((1487, 1535), 'xblock.test.tools.TestRuntime', 'TestRuntime', ([], {'services': "{'field-data': field_data}"}), "(services={'field-data': field_data})\n", (1498, 1535), False, 'from xblock.test.tools import TestRuntime\n'), ((1586, 1605), 'mock.Mock', 'Mock', ([], {'spec': 'ScopeIds'}), '(spec=ScopeIds)\n', (1590, 1605), False, 'from mock import Mock\n')] |
# coding=utf-8
# Copyright 2022 The Google Research Authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Utilities to help set up and run experiments."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import json
import os.path
from absl import logging
import numpy as np
import scipy.special
from six.moves import range
from six.moves import zip
import tensorflow.compat.v2 as tf
import tensorflow_datasets as tfds
gfile = tf.io.gfile
class _SimpleJsonEncoder(json.JSONEncoder):
def default(self, o):
return o.__dict__
def json_dumps(x):
return json.dumps(x, indent=2, cls=_SimpleJsonEncoder)
def record_config(config, path):
out = json_dumps(config)
logging.info('Recording config to %s\n %s', path, out)
gfile.makedirs(os.path.dirname(path))
with gfile.GFile(path, 'w') as fh:
fh.write(out)
def load_config(path):
logging.info('Loading config from %s', path)
with gfile.GFile(path) as fh:
return json.loads(fh.read())
def save_model(model, output_dir):
"""Save Keras model weights and architecture as HDF5 file."""
save_path = '%s/model.hdf5' % output_dir
logging.info('Saving model to %s', save_path)
model.save(save_path, include_optimizer=False)
return save_path
def load_model(path):
logging.info('Loading model from %s', path)
return tf.keras.models.load_model(path)
def metrics_from_stats(stats):
"""Compute metrics to report to hyperparameter tuner."""
labels, probs = stats['labels'], stats['probs']
# Reshape binary predictions to 2-class.
if len(probs.shape) == 1:
probs = np.stack([1-probs, probs], axis=-1)
assert len(probs.shape) == 2
predictions = np.argmax(probs, axis=-1)
accuracy = np.equal(labels, predictions)
label_probs = probs[np.arange(len(labels)), labels]
log_probs = np.maximum(-1e10, np.log(label_probs))
brier_scores = np.square(probs).sum(-1) - 2 * label_probs
return {'accuracy': accuracy.mean(0),
'brier_score': brier_scores.mean(0),
'log_prob': log_probs.mean(0)}
def make_predictions(
model, batched_dataset, predictions_per_example=1, writers=None,
predictions_are_logits=True, record_image_samples=True, max_batches=1e6):
"""Build a dictionary of predictions for examples from a dataset.
Args:
model: Trained Keras model.
batched_dataset: tf.data.Dataset that yields batches of image, label pairs.
predictions_per_example: Number of predictions to generate per example.
writers: `dict` with keys 'small' and 'full', containing
array_utils.StatsWriter instances for full prediction results and small
prediction results (omitting logits).
predictions_are_logits: Indicates whether model outputs are logits or
probabilities.
record_image_samples: `bool` Record one batch of input examples.
max_batches: `int`, maximum number of batches.
Returns:
Dictionary containing:
labels: Labels copied from the dataset (shape=[N]).
logits_samples: Samples of model predict outputs for each example
(shape=[N, M, K]).
probs: Probabilities after averaging over samples (shape=[N, K]).
image_samples: One batch of input images (for sanity checking).
"""
if predictions_are_logits:
samples_key = 'logits_samples'
avg_probs_fn = lambda x: scipy.special.softmax(x, axis=-1).mean(-2)
else:
samples_key = 'probs_samples'
avg_probs_fn = lambda x: x.mean(-2)
labels, outputs = [], []
predict_fn = model.predict if hasattr(model, 'predict') else model
for i, (inputs_i, labels_i) in enumerate(tfds.as_numpy(batched_dataset)):
logging.info('iteration: %d', i)
outputs_i = np.stack(
[predict_fn(inputs_i) for _ in range(predictions_per_example)], axis=1)
if writers is None:
labels.extend(labels_i)
outputs.append(outputs_i)
else:
avg_probs_i = avg_probs_fn(outputs_i)
prediction_batch = dict(labels=labels_i, probs=avg_probs_i)
if i == 0 and record_image_samples:
prediction_batch['image_samples'] = inputs_i
writers['small'].write_batch(prediction_batch)
prediction_batch[samples_key] = outputs_i
writers['full'].write_batch(prediction_batch)
# Don't predict whole ImageNet training set
if i > max_batches:
break
if writers is None:
image_samples = inputs_i # pylint: disable=undefined-loop-variable
labels = np.stack(labels, axis=0)
outputs = np.concatenate(outputs, axis=0)
stats = {'labels': labels, 'image_samples': image_samples,
samples_key: outputs, 'probs': avg_probs_fn(outputs)}
if record_image_samples:
stats['image_samples'] = image_samples
return stats
def download_dataset(dataset, batch_size_for_dl=1024):
logging.info('Starting dataset download...')
tup = list(zip(*tfds.as_numpy(dataset.batch(batch_size_for_dl))))
logging.info('dataset download complete.')
return tuple(np.concatenate(x, axis=0) for x in tup)
def get_distribution_strategy(distribution_strategy='default',
num_gpus=0,
num_workers=1,
all_reduce_alg=None,
num_packs=1):
"""Return a DistributionStrategy for running the model.
Args:
distribution_strategy: a string specifying which distribution strategy to
use. Accepted values are 'off', 'default', 'one_device', 'mirrored',
'parameter_server', 'multi_worker_mirrored', case insensitive. 'off' means
not to use Distribution Strategy; 'default' means to choose from
`MirroredStrategy`, `MultiWorkerMirroredStrategy`, or `OneDeviceStrategy`
according to the number of GPUs and number of workers.
num_gpus: Number of GPUs to run this model.
num_workers: Number of workers to run this model.
all_reduce_alg: Optional. Specifies which algorithm to use when performing
all-reduce. For `MirroredStrategy`, valid values are 'nccl' and
'hierarchical_copy'. For `MultiWorkerMirroredStrategy`, valid values are
'ring' and 'nccl'. If None, DistributionStrategy will choose based on
device topology.
num_packs: Optional. Sets the `num_packs` in `tf.distribute.NcclAllReduce`
or `tf.distribute.HierarchicalCopyAllReduce` for `MirroredStrategy`.
Returns:
tf.distribute.DistibutionStrategy object.
Raises:
ValueError: if `distribution_strategy` is 'off' or 'one_device' and
`num_gpus` is larger than 1; or `num_gpus` is negative.
"""
if num_gpus < 0:
raise ValueError('`num_gpus` can not be negative.')
distribution_strategy = distribution_strategy.lower()
if distribution_strategy == 'off':
if num_gpus > 1:
raise ValueError(
'When {} GPUs and {} workers are specified, distribution_strategy '
'flag cannot be set to "off".'.format(num_gpus, num_workers))
return None
if distribution_strategy == 'multi_worker_mirrored':
return tf.distribute.experimental.MultiWorkerMirroredStrategy(
communication=_collective_communication(all_reduce_alg))
if (distribution_strategy == 'one_device' or
(distribution_strategy == 'default' and num_gpus <= 1)):
if num_gpus == 0:
return tf.distribute.OneDeviceStrategy('device:CPU:0')
else:
if num_gpus > 1:
raise ValueError('`OneDeviceStrategy` can not be used for more than '
'one device.')
return tf.distribute.OneDeviceStrategy('device:GPU:0')
if distribution_strategy in ('mirrored', 'default'):
if num_gpus == 0:
assert distribution_strategy == 'mirrored'
devices = ['device:CPU:0']
else:
devices = ['device:GPU:%d' % i for i in range(num_gpus)]
return tf.distribute.MirroredStrategy(
devices=devices,
cross_device_ops=_mirrored_cross_device_ops(all_reduce_alg, num_packs))
if distribution_strategy == 'parameter_server':
return tf.compat.v1.distribute.experimental.ParameterServerStrategy()
raise ValueError(
'Unrecognized Distribution Strategy: %r' % distribution_strategy)
def _collective_communication(all_reduce_alg):
"""Return a CollectiveCommunication based on all_reduce_alg.
Args:
all_reduce_alg: a string specifying which collective communication to pick,
or None.
Returns:
tf.distribute.experimental.CollectiveCommunication object
Raises:
ValueError: if `all_reduce_alg` not in [None, 'ring', 'nccl']
"""
collective_communication_options = {
None: tf.distribute.experimental.CollectiveCommunication.AUTO,
'ring': tf.distribute.experimental.CollectiveCommunication.RING,
'nccl': tf.distribute.experimental.CollectiveCommunication.NCCL
}
if all_reduce_alg not in collective_communication_options:
raise ValueError(
'When used with `multi_worker_mirrored`, valid values for '
'all_reduce_alg are ["ring", "nccl"]. Supplied value: {}'.format(
all_reduce_alg))
return collective_communication_options[all_reduce_alg]
def _mirrored_cross_device_ops(all_reduce_alg, num_packs):
"""Return a CrossDeviceOps based on all_reduce_alg and num_packs.
Args:
all_reduce_alg: a string specifying which cross device op to pick, or None.
num_packs: an integer specifying number of packs for the cross device op.
Returns:
tf.distribute.CrossDeviceOps object or None.
Raises:
ValueError: if `all_reduce_alg` not in [None, 'nccl', 'hierarchical_copy'].
"""
if all_reduce_alg is None:
return None
mirrored_all_reduce_options = {
'nccl': tf.distribute.NcclAllReduce,
'hierarchical_copy': tf.distribute.HierarchicalCopyAllReduce
}
if all_reduce_alg not in mirrored_all_reduce_options:
raise ValueError(
'When used with `mirrored`, valid values for all_reduce_alg are '
'["nccl", "hierarchical_copy"]. Supplied value: {}'.format(
all_reduce_alg))
cross_device_ops_class = mirrored_all_reduce_options[all_reduce_alg]
return cross_device_ops_class(num_packs=num_packs)
| [
"tensorflow.compat.v2.distribute.OneDeviceStrategy",
"tensorflow.compat.v2.compat.v1.distribute.experimental.ParameterServerStrategy",
"six.moves.range",
"json.dumps",
"numpy.log",
"numpy.argmax",
"numpy.equal",
"absl.logging.info",
"numpy.square",
"numpy.stack",
"tensorflow.compat.v2.keras.mode... | [((1132, 1179), 'json.dumps', 'json.dumps', (['x'], {'indent': '(2)', 'cls': '_SimpleJsonEncoder'}), '(x, indent=2, cls=_SimpleJsonEncoder)\n', (1142, 1179), False, 'import json\n'), ((1244, 1301), 'absl.logging.info', 'logging.info', (['"""Recording config to %s\n %s"""', 'path', 'out'], {}), '("""Recording config to %s\n %s""", path, out)\n', (1256, 1301), False, 'from absl import logging\n'), ((1421, 1465), 'absl.logging.info', 'logging.info', (['"""Loading config from %s"""', 'path'], {}), "('Loading config from %s', path)\n", (1433, 1465), False, 'from absl import logging\n'), ((1677, 1722), 'absl.logging.info', 'logging.info', (['"""Saving model to %s"""', 'save_path'], {}), "('Saving model to %s', save_path)\n", (1689, 1722), False, 'from absl import logging\n'), ((1817, 1860), 'absl.logging.info', 'logging.info', (['"""Loading model from %s"""', 'path'], {}), "('Loading model from %s', path)\n", (1829, 1860), False, 'from absl import logging\n'), ((1870, 1902), 'tensorflow.compat.v2.keras.models.load_model', 'tf.keras.models.load_model', (['path'], {}), '(path)\n', (1896, 1902), True, 'import tensorflow.compat.v2 as tf\n'), ((2212, 2237), 'numpy.argmax', 'np.argmax', (['probs'], {'axis': '(-1)'}), '(probs, axis=-1)\n', (2221, 2237), True, 'import numpy as np\n'), ((2251, 2280), 'numpy.equal', 'np.equal', (['labels', 'predictions'], {}), '(labels, predictions)\n', (2259, 2280), True, 'import numpy as np\n'), ((5292, 5336), 'absl.logging.info', 'logging.info', (['"""Starting dataset download..."""'], {}), "('Starting dataset download...')\n", (5304, 5336), False, 'from absl import logging\n'), ((5407, 5449), 'absl.logging.info', 'logging.info', (['"""dataset download complete."""'], {}), "('dataset download complete.')\n", (5419, 5449), False, 'from absl import logging\n'), ((2128, 2165), 'numpy.stack', 'np.stack', (['[1 - probs, probs]'], {'axis': '(-1)'}), '([1 - probs, probs], axis=-1)\n', (2136, 2165), True, 'import numpy as np\n'), ((2368, 2387), 'numpy.log', 'np.log', (['label_probs'], {}), '(label_probs)\n', (2374, 2387), True, 'import numpy as np\n'), ((4115, 4145), 'tensorflow_datasets.as_numpy', 'tfds.as_numpy', (['batched_dataset'], {}), '(batched_dataset)\n', (4128, 4145), True, 'import tensorflow_datasets as tfds\n'), ((4152, 4184), 'absl.logging.info', 'logging.info', (['"""iteration: %d"""', 'i'], {}), "('iteration: %d', i)\n", (4164, 4184), False, 'from absl import logging\n'), ((4940, 4964), 'numpy.stack', 'np.stack', (['labels'], {'axis': '(0)'}), '(labels, axis=0)\n', (4948, 4964), True, 'import numpy as np\n'), ((4979, 5010), 'numpy.concatenate', 'np.concatenate', (['outputs'], {'axis': '(0)'}), '(outputs, axis=0)\n', (4993, 5010), True, 'import numpy as np\n'), ((8476, 8538), 'tensorflow.compat.v2.compat.v1.distribute.experimental.ParameterServerStrategy', 'tf.compat.v1.distribute.experimental.ParameterServerStrategy', ([], {}), '()\n', (8536, 8538), True, 'import tensorflow.compat.v2 as tf\n'), ((5465, 5490), 'numpy.concatenate', 'np.concatenate', (['x'], {'axis': '(0)'}), '(x, axis=0)\n', (5479, 5490), True, 'import numpy as np\n'), ((7773, 7820), 'tensorflow.compat.v2.distribute.OneDeviceStrategy', 'tf.distribute.OneDeviceStrategy', (['"""device:CPU:0"""'], {}), "('device:CPU:0')\n", (7804, 7820), True, 'import tensorflow.compat.v2 as tf\n'), ((7985, 8032), 'tensorflow.compat.v2.distribute.OneDeviceStrategy', 'tf.distribute.OneDeviceStrategy', (['"""device:GPU:0"""'], {}), "('device:GPU:0')\n", (8016, 8032), True, 'import tensorflow.compat.v2 as tf\n'), ((2406, 2422), 'numpy.square', 'np.square', (['probs'], {}), '(probs)\n', (2415, 2422), True, 'import numpy as np\n'), ((4250, 4280), 'six.moves.range', 'range', (['predictions_per_example'], {}), '(predictions_per_example)\n', (4255, 4280), False, 'from six.moves import range\n'), ((8249, 8264), 'six.moves.range', 'range', (['num_gpus'], {}), '(num_gpus)\n', (8254, 8264), False, 'from six.moves import range\n')] |
from __future__ import division, print_function, absolute_import
from .core import SeqletCoordinates
from modisco import util
import numpy as np
from collections import defaultdict, Counter, OrderedDict
import itertools
import sys
import time
from .value_provider import (
AbstractValTransformer, AbsPercentileValTransformer,
SignedPercentileValTransformer, PrecisionValTransformer)
import scipy
from sklearn.isotonic import IsotonicRegression
SUBSAMPLE_CAP = 1000000
#The only parts of TransformAndThresholdResults that are used in
# TfModiscoWorkflow are the transformed_pos/neg_thresholds and the
# val_transformer (used in metaclustering with multiple tasks)
#TransformAndThresholdResults are also used to be
# able to replicate the same procedure used for identifying coordinates as
# when TfMoDisco was first run; the information needed in that case would
# be specific to the type of Coordproducer used
class AbstractTransformAndThresholdResults(object):
def __init__(self, transformed_neg_threshold, transformed_pos_threshold,
val_transformer):
self.transformed_neg_threshold = transformed_neg_threshold
self.transformed_pos_threshold = transformed_pos_threshold
self.val_transformer = val_transformer
@classmethod
def from_hdf5(cls, grp):
if "class" not in grp.attrs:
the_class = FWACTransformAndThresholdResults
else:
the_class = eval(grp.attrs["class"])
if (the_class.__name__ != cls.__name__):
return the_class.from_hdf5(grp)
class BasicTransformAndThresholdResults(AbstractTransformAndThresholdResults):
def save_hdf5(self, grp):
grp.attrs["class"] = type(self).__name__
grp.attrs["transformed_neg_threshold"] = self.transformed_neg_threshold
grp.attrs["transformed_pos_threshold"] = self.transformed_pos_threshold
self.val_transformer.save_hdf5(grp.create_group("val_transformer"))
@classmethod
def load_basic_attrs_from_hdf5(cls, grp):
transformed_neg_threshold = grp.attrs['transformed_neg_threshold']
transformed_pos_threshold = grp.attrs['transformed_pos_threshold']
val_transformer = AbstractValTransformer.from_hdf5(
grp["val_transformer"])
return (transformed_neg_threshold, transformed_pos_threshold,
val_transformer)
@classmethod
def from_hdf5(cls, grp):
the_class = eval(grp.attrs["class"])
(transformed_neg_threshold,
transformed_pos_threshold,
val_transformer) = cls.load_basic_attrs_from_hdf5(grp)
return cls(transformed_neg_threshold=transformed_neg_threshold,
transformed_pos_threshold=transformed_pos_threshold,
val_transformer=val_transformer)
#FWAC = FixedWindowAroundChunks; this TransformAndThresholdResults object
# is specific to the type of info needed in that case.
class FWACTransformAndThresholdResults(
BasicTransformAndThresholdResults):
def __init__(self, neg_threshold,
transformed_neg_threshold,
pos_threshold,
transformed_pos_threshold,
val_transformer):
#both 'transformed_neg_threshold' and 'transformed_pos_threshold'
# should be positive, i.e. they should be relative to the
# transformed distribution used to set the threshold, e.g. a
# cdf value
self.neg_threshold = neg_threshold
self.pos_threshold = pos_threshold
super(FWACTransformAndThresholdResults, self).__init__(
transformed_neg_threshold=transformed_neg_threshold,
transformed_pos_threshold=transformed_pos_threshold,
val_transformer=val_transformer)
def save_hdf5(self, grp):
super(FWACTransformAndThresholdResults, self).save_hdf5(grp)
grp.attrs["neg_threshold"] = self.neg_threshold
grp.attrs["pos_threshold"] = self.pos_threshold
@classmethod
def from_hdf5(cls, grp):
(transformed_neg_threshold, transformed_pos_threshold,
val_transformer) = cls.load_basic_attrs_from_hdf5(grp)
neg_threshold = grp.attrs['neg_threshold']
pos_threshold = grp.attrs['pos_threshold']
return cls(neg_threshold=neg_threshold,
transformed_neg_threshold=transformed_neg_threshold,
pos_threshold=pos_threshold,
transformed_pos_threshold=transformed_pos_threshold,
val_transformer=val_transformer)
class AbstractCoordProducer(object):
def __call__(self):
raise NotImplementedError()
@classmethod
def from_hdf5(cls, grp):
the_class = eval(grp.attrs["class"])
return the_class.from_hdf5(grp)
class SeqletCoordsFWAP(SeqletCoordinates):
"""
Coordinates for the FixedWindowAroundChunks CoordProducer
"""
def __init__(self, example_idx, start, end, score, other_info={}):
self.score = score
self.other_info = other_info
super(SeqletCoordsFWAP, self).__init__(
example_idx=example_idx,
start=start, end=end,
is_revcomp=False)
class CoordProducerResults(object):
def __init__(self, coords, tnt_results):
self.coords = coords
self.tnt_results = tnt_results
@classmethod
def from_hdf5(cls, grp):
coord_strings = util.load_string_list(dset_name="coords",
grp=grp)
coords = [SeqletCoordinates.from_string(x) for x in coord_strings]
tnt_results = AbstractTransformAndThresholdResults.from_hdf5(
grp["tnt_results"])
return CoordProducerResults(coords=coords,
tnt_results=tnt_results)
def save_hdf5(self, grp):
util.save_string_list(
string_list=[str(x) for x in self.coords],
dset_name="coords",
grp=grp)
self.tnt_results.save_hdf5(
grp=grp.create_group("tnt_results"))
def get_simple_window_sum_function(window_size):
def window_sum_function(arrs):
to_return = []
for arr in arrs:
cumsum = np.cumsum(arr)
cumsum = np.array([0]+list(cumsum))
to_return.append(cumsum[window_size:]-cumsum[:-window_size])
return to_return
return window_sum_function
class GenerateNullDist(object):
def __call__(self, score_track):
raise NotImplementedError()
class TakeSign(GenerateNullDist):
@classmethod
def from_hdf5(cls, grp):
raise NotImplementedError()
def save_hdf(cls, grp):
raise NotImplementedError()
def __call__(self, score_track):
null_tracks = [np.sign(x) for x in score_track]
return null_tracks
class TakeAbs(GenerateNullDist):
@classmethod
def from_hdf5(cls, grp):
raise NotImplementedError()
def save_hdf(cls, grp):
raise NotImplementedError()
def __call__(self, score_track):
null_tracks = [np.abs(x) for x in score_track]
return null_tracks
class LaplaceNullDist(GenerateNullDist):
def __init__(self, num_to_samp, verbose=True,
percentiles_to_use=[5*(x+1) for x in range(19)],
random_seed=1234):
self.num_to_samp = num_to_samp
self.verbose = verbose
self.percentiles_to_use = np.array(percentiles_to_use)
self.random_seed = random_seed
self.rng = np.random.RandomState()
@classmethod
def from_hdf5(cls, grp):
num_to_samp = grp.attrs["num_to_samp"]
verbose = grp.attrs["verbose"]
percentiles_to_use = np.array(grp["percentiles_to_use"][:])
return cls(num_to_samp=num_to_samp, verbose=verbose)
def save_hdf5(self, grp):
grp.attrs["class"] = type(self).__name__
grp.attrs["num_to_samp"] = self.num_to_samp
grp.attrs["verbose"] = self.verbose
grp.create_dataset('percentiles_to_use',
data=self.percentiles_to_use)
def __call__(self, score_track, window_size, original_summed_score_track):
#original_summed_score_track is supplied to avoid recomputing it
if (original_summed_score_track is None):
window_sum_function = get_simple_window_sum_function(window_size)
original_summed_score_track = window_sum_function(arrs=score_track)
values = np.concatenate(original_summed_score_track, axis=0)
# first estimate mu, using two level histogram to get to 1e-6
hist1, bin_edges1 = np.histogram(values, bins=1000)
peak1 = np.argmax(hist1)
l_edge = bin_edges1[peak1]
r_edge = bin_edges1[peak1+1]
top_values = values[ (l_edge < values) & (values < r_edge) ]
hist2, bin_edges2 = np.histogram(top_values, bins=1000)
peak2 = np.argmax(hist2)
l_edge = bin_edges2[peak2]
r_edge = bin_edges2[peak2+1]
mu = (l_edge + r_edge) / 2
if (self.verbose):
print("peak(mu)=", mu)
pos_values = [x for x in values if x >= mu]
neg_values = [x for x in values if x <= mu]
#for an exponential distribution:
# cdf = 1 - exp(-lambda*x)
# exp(-lambda*x) = 1-cdf
# -lambda*x = log(1-cdf)
# lambda = -log(1-cdf)/x
# x = -log(1-cdf)/lambda
#Take the most aggressive lambda over all percentiles
pos_laplace_lambda = np.max(
-np.log(1-(self.percentiles_to_use/100.0))/
(np.percentile(a=pos_values, q=self.percentiles_to_use)-mu))
neg_laplace_lambda = np.max(
-np.log(1-(self.percentiles_to_use/100.0))/
(np.abs(np.percentile(a=neg_values,
q=100-self.percentiles_to_use)-mu)))
self.rng.seed(self.random_seed)
prob_pos = float(len(pos_values))/(len(pos_values)+len(neg_values))
sampled_vals = []
for i in range(self.num_to_samp):
sign = 1 if (self.rng.uniform() < prob_pos) else -1
if (sign == 1):
sampled_cdf = self.rng.uniform()
val = -np.log(1-sampled_cdf)/pos_laplace_lambda + mu
else:
sampled_cdf = self.rng.uniform()
val = mu + np.log(1-sampled_cdf)/neg_laplace_lambda
sampled_vals.append(val)
return np.array(sampled_vals)
class FlipSignNullDist(GenerateNullDist):
def __init__(self, num_seq_to_samp, shuffle_pos=False,
seed=1234, num_breaks=100,
lower_null_percentile=20,
upper_null_percentile=80):
self.num_seq_to_samp = num_seq_to_samp
self.shuffle_pos = shuffle_pos
self.seed = seed
self.rng = np.random.RandomState()
self.num_breaks = num_breaks
self.lower_null_percentile = lower_null_percentile
self.upper_null_percentile = upper_null_percentile
@classmethod
def from_hdf5(cls, grp):
raise NotImplementedError()
def save_hdf(cls, grp):
raise NotImplementedError()
def __call__(self, score_track, windowsize, original_summed_score_track):
#summed_score_track is supplied to avoid recomputing it
window_sum_function = get_simple_window_sum_function(windowsize)
if (original_summed_score_track is not None):
original_summed_score_track = window_sum_function(arrs=score_track)
all_orig_summed_scores = np.concatenate(
original_summed_score_track, axis=0)
pos_threshold = np.percentile(a=all_orig_summed_scores,
q=self.upper_null_percentile)
neg_threshold = np.percentile(a=all_orig_summed_scores,
q=self.lower_null_percentile)
#retain only the portions of the tracks that are under the
# thresholds
retained_track_portions = []
num_pos_vals = 0
num_neg_vals = 0
for (single_score_track, single_summed_score_track)\
in zip(score_track, original_summed_score_track):
window_passing_track = [
(1.0 if (x > neg_threshold and x < pos_threshold) else 0)
for x in single_summed_score_track]
padded_window_passing_track = [0.0]*int(windowsize-1)
padded_window_passing_track.extend(window_passing_track)
padded_window_passing_track.extend([0.0]*int(windowsize-1))
pos_in_passing_window = window_sum_function(
[padded_window_passing_track])[0]
assert len(single_score_track)==len(pos_in_passing_window)
single_retained_track = []
for (val, pos_passing) in zip(single_score_track,
pos_in_passing_window):
if (pos_passing > 0):
single_retained_track.append(val)
num_pos_vals += (1 if val > 0 else 0)
num_neg_vals += (1 if val < 0 else 0)
retained_track_portions.append(single_retained_track)
print("Fraction of positions retained:",
sum(len(x) for x in retained_track_portions)/
sum(len(x) for x in score_track))
prob_pos = num_pos_vals/float(num_pos_vals + num_neg_vals)
self.rng.seed(self.seed)
null_tracks = []
for i in range(self.num_seq_to_samp):
random_track = retained_track_portions[
int(self.rng.randint(0,len(retained_track_portions)))]
track_with_sign_flips = np.array([
abs(x)*(1 if self.rng.uniform() < prob_pos else -1)
for x in random_track])
if (self.shuffle_pos):
self.rng.shuffle(track_with_sign_flips)
null_tracks.append(track_with_sign_flips)
return np.concatenate(window_sum_function(null_tracks), axis=0)
def get_null_vals(null_track, score_track, window_size,
original_summed_score_track):
if (hasattr(null_track, '__call__')):
null_vals = null_track(
score_track=score_track,
window_size=window_size,
original_summed_score_track=original_summed_score_track)
else:
window_sum_function = get_simple_window_sum_function(window_size)
null_summed_score_track = window_sum_function(arrs=null_track)
null_vals = list(np.concatenate(null_summed_score_track, axis=0))
return null_vals
def subsample_if_large(arr):
if (len(arr) > SUBSAMPLE_CAP):
print("Subsampling!")
sys.stdout.flush()
arr = np.random.RandomState(1234).choice(a=arr, size=SUBSAMPLE_CAP,
replace=False)
return arr
def irval_to_probpos(irval, frac_neg):
#n(x):= pdf of null dist (negatives)
#p(x):= pdf of positive distribution
#f_p:= fraction of positives
#f_n:= fraction of negatives = 1-f_p
#o(x):= pdf of observed distribution = n(x)f_n + p(x)f_p
#The isotonic regression produces a(x) = o(x)/[o(x) + n(x)]
# o(x)/[o(x) + n(x)] = [n(x)f_n + o(x)f_p]/[n(x)(1+f_n) + p(x)]
# a(x)[n(x)(1+f_n) + p(x)f_p] = n(x)f_n + p(x)f_p
# a(x)n(x)(1+f_n) - n(x)f_n = p(x)f_p - a(x)p(x)f_p
# n(x)[a(x)(1+f_n) - f_n] = p(x)f_p[1 - a(x)]
# [a(x)/f_n + (a(x)-1)]/[1-a(x)] = (p(x)f_p)/(n(x)f_n) = r(x)
#p_pos = 1 / (1 + 1/r(x))
# = [a(x)/f_n + (a(x)-1)]/[a(x)/f_n + (a(x)-1) + (1-a(x))]
# = [a(x)/f_n + a(x)-1]/[a(x)/f_n]
# = [a(x) + f_n(a(x)-1)]/a(x)
# = 1 + f_n(a(x)-1)/a(x)
# = 1 + f_n(1 - 1/a(x))
#If solving for p_pos=0, we have -1/(1 - 1/a(x)) = f_n
#As f_n --> 100%, p_pos --> 2 - 1/a(x); this assumes max(a(x)) = 0.5
return np.minimum(np.maximum(1 + frac_neg*(
1 - (1/np.maximum(irval,1e-7))), 0.0), 1.0)
class SavableIsotonicRegression(object):
def __init__(self, origvals, nullvals, increasing, min_frac_neg=0.95):
self.origvals = origvals
self.nullvals = nullvals
self.increasing = increasing
self.min_frac_neg = min_frac_neg
self.ir = IsotonicRegression(out_of_bounds='clip',
increasing=increasing).fit(
X=np.concatenate([self.origvals, self.nullvals], axis=0),
y=([1.0 for x in self.origvals] + [0.0 for x in self.nullvals]),
sample_weight=([1.0 for x in self.origvals]
+[float(len(self.origvals))/len(self.nullvals)
for x in self.nullvals]))
#Infer frac_pos based on the minimum value of the ir probs
#See derivation in irval_to_probpos function
min_prec_x = self.ir.X_min_ if self.increasing else self.ir.X_max_
min_precision = self.ir.transform([min_prec_x])[0]
implied_frac_neg = -1/(1-(1/max(min_precision,1e-7)))
print("For increasing =",increasing,", the minimum IR precision was",
min_precision,"occurring at",min_prec_x,
"implying a frac_neg",
"of",implied_frac_neg)
if (implied_frac_neg > 1.0 or implied_frac_neg < self.min_frac_neg):
implied_frac_neg = max(min(1.0,implied_frac_neg),
self.min_frac_neg)
print("To be conservative, adjusted frac neg is",implied_frac_neg)
self.implied_frac_neg = implied_frac_neg
def transform(self, vals):
return irval_to_probpos(self.ir.transform(vals),
frac_neg=self.implied_frac_neg)
def save_hdf5(self, grp):
grp.attrs['increasing'] = self.increasing
grp.attrs['min_frac_neg'] = self.min_frac_neg
grp.create_dataset('origvals', data=self.origvals)
grp.create_dataset('nullvals', data=self.nullvals)
@classmethod
def from_hdf5(cls, grp):
increasing = grp.attrs['increasing']
min_frac_neg = grp.attrs['min_frac_neg']
origvals = np.array(grp['origvals'])
nullvals = np.array(grp['nullvals'])
return cls(origvals=origvals, nullvals=nullvals,
increasing=increasing, min_frac_neg=min_frac_neg)
def get_isotonic_regression_classifier(orig_vals, null_vals):
orig_vals = subsample_if_large(orig_vals)
null_vals = subsample_if_large(null_vals)
pos_orig_vals = (
np.array(sorted([x for x in orig_vals if x >= 0])))
neg_orig_vals = (
np.array(sorted([x for x in orig_vals if x < 0],
key=lambda x: abs(x))))
pos_null_vals = [x for x in null_vals if x >= 0]
neg_null_vals = [x for x in null_vals if x < 0]
pos_ir = SavableIsotonicRegression(origvals=pos_orig_vals,
nullvals=pos_null_vals, increasing=True)
if (len(neg_orig_vals) > 0):
neg_ir = SavableIsotonicRegression(origvals=neg_orig_vals,
nullvals=neg_null_vals, increasing=False)
else:
neg_ir = None
return pos_ir, neg_ir, orig_vals, null_vals
#sliding in this case would be a list of values
class VariableWindowAroundChunks(AbstractCoordProducer):
count = 0
def __init__(self, sliding, flank, suppress, target_fdr,
min_passing_windows_frac, max_passing_windows_frac,
separate_pos_neg_thresholds,
max_seqlets_total,
progress_update=5000,
verbose=True, plot_save_dir="figures"):
self.sliding = sliding
self.flank = flank
self.suppress = suppress
self.target_fdr = target_fdr
assert max_passing_windows_frac >= min_passing_windows_frac
self.min_passing_windows_frac = min_passing_windows_frac
self.max_passing_windows_frac = max_passing_windows_frac
self.separate_pos_neg_thresholds = separate_pos_neg_thresholds
self.max_seqlets_total = None
self.progress_update = progress_update
self.verbose = verbose
self.plot_save_dir = plot_save_dir
@classmethod
def from_hdf5(cls, grp):
sliding = np.array(grp["sliding"]).astype("int")
flank = grp.attrs["flank"]
suppress = grp.attrs["suppress"]
target_fdr = grp.attrs["target_fdr"]
min_passing_windows_frac = grp.attrs["min_passing_windows_frac"]
max_passing_windows_frac = grp.attrs["max_passing_windows_frac"]
separate_pos_neg_thresholds = grp.attrs["separate_pos_neg_thresholds"]
if ("max_seqlets_total" in grp.attrs):
max_seqlets_total = grp.attrs["max_seqlets_total"]
else:
max_seqlets_total = None
progress_update = grp.attrs["progress_update"]
verbose = grp.attrs["verbose"]
return cls(sliding=sliding, flank=flank, suppress=suppress,
target_fdr=target_fdr,
min_passing_windows_frac=min_passing_windows_frac,
max_passing_windows_frac=max_passing_windows_frac,
separate_pos_neg_thresholds=separate_pos_neg_thresholds,
max_seqlets_total=max_seqlets_total,
progress_update=progress_update, verbose=verbose)
def save_hdf5(self, grp):
grp.attrs["class"] = type(self).__name__
grp.create_dataset("sliding", data=np.array(self.sliding))
grp.attrs["flank"] = self.flank
grp.attrs["suppress"] = self.suppress
grp.attrs["target_fdr"] = self.target_fdr
grp.attrs["min_passing_windows_frac"] = self.min_passing_windows_frac
grp.attrs["max_passing_windows_frac"] = self.max_passing_windows_frac
grp.attrs["separate_pos_neg_thresholds"] =\
self.separate_pos_neg_thresholds
if (self.max_seqlets_total is not None):
grp.attrs["max_seqlets_total"] = self.max_seqlets_total
grp.attrs["progress_update"] = self.progress_update
grp.attrs["verbose"] = self.verbose
def fit_pos_and_neg_irs(self, score_track, null_track):
pos_irs = []
neg_irs = []
for sliding_window_size in self.sliding:
window_sum_function = get_simple_window_sum_function(
sliding_window_size)
print("Fitting - on window size",sliding_window_size)
if (hasattr(null_track, '__call__')):
null_vals = null_track(
score_track=score_track,
window_size=sliding_window_size,
original_summed_score_track=None)
else:
null_summed_score_track = window_sum_function(arrs=null_track)
null_vals = np.concatenate(null_summed_score_track,
axis=0)
print("Computing window sums")
sys.stdout.flush()
window_sums_rows = window_sum_function(arrs=score_track)
print("Done computing window sums")
sys.stdout.flush()
orig_vals = np.concatenate(window_sums_rows, axis=0)
pos_ir, neg_ir, subsampled_orig_vals, subsampled_null_vals =\
get_isotonic_regression_classifier(
orig_vals=np.concatenate(window_sums_rows, axis=0),
null_vals=null_vals)
make_nulldist_figure(orig_vals=subsampled_orig_vals,
null_vals=subsampled_null_vals,
pos_ir=pos_ir, neg_ir=neg_ir,
pos_threshold=None,
neg_threshold=None)
util.show_or_savefig(plot_save_dir=self.plot_save_dir,
filename="scoredist_window"
+str(sliding_window_size)+"_"
+str(VariableWindowAroundChunks.count)+".png")
pos_irs.append(pos_ir)
neg_irs.append(neg_ir)
return pos_irs, neg_irs
def __call__(self, score_track, null_track, tnt_results=None):
if (tnt_results is None):
pos_irs, neg_irs = self.fit_pos_and_neg_irs(
score_track=score_track,
null_track=null_track)
precision_transformer = PrecisionValTransformer(
sliding_window_sizes=self.sliding,
pos_irs=pos_irs,
neg_irs=neg_irs)
(precisiontransformed_score_track,
precisiontransformed_bestwindowsizeidxs) =\
precision_transformer.transform_score_track(
score_track=score_track)
subsampled_prec_vals = subsample_if_large(
np.concatenate(precisiontransformed_score_track, axis=0))
from matplotlib import pyplot as plt
plt.plot(sorted(subsampled_prec_vals),
(np.arange(len(subsampled_prec_vals))/
len(subsampled_prec_vals)))
plt.xlabel("Tranformed IR precision value")
plt.ylabel("CDF")
util.show_or_savefig(plot_save_dir=self.plot_save_dir,
filename="final_prec_vals_cdf_dist"
+str(VariableWindowAroundChunks.count)+".png")
#Pick a threshold according the the precisiontransformed score track
pos_threshold = (1-self.target_fdr)
neg_threshold = -(1-self.target_fdr)
pos_threshold, neg_threshold =\
refine_thresholds_based_on_frac_passing(
vals=subsampled_prec_vals,
pos_threshold=pos_threshold,
neg_threshold=neg_threshold,
min_passing_windows_frac=self.min_passing_windows_frac,
max_passing_windows_frac=self.max_passing_windows_frac,
separate_pos_neg_thresholds=self.separate_pos_neg_thresholds,
verbose=self.verbose)
tnt_results = BasicTransformAndThresholdResults(
transformed_neg_threshold=neg_threshold,
transformed_pos_threshold=pos_threshold,
val_transformer=precision_transformer)
else:
precision_transformer = tnt_results.val_transformer
(precisiontransformed_score_track,
precisiontransformed_bestwindowsizeidxs) =\
precision_transformer.transform_score_track(
score_track=score_track)
#Need to remove padding because identify_coords is assumed to
# operate on a scoretrack that has already been processed with
# a sliding window of window_size (and assumes that partial windows
# were not included)
left_padding_to_remove = int((max(self.sliding)-1)/2)
right_padding_to_remove = (max(self.sliding)-1)-left_padding_to_remove
coords = identify_coords(
score_track=[x[left_padding_to_remove:-right_padding_to_remove]
for x in precisiontransformed_score_track],
pos_threshold=tnt_results.transformed_pos_threshold,
neg_threshold=tnt_results.transformed_neg_threshold,
window_size=max(self.sliding),
flank=self.flank,
suppress=self.suppress,
max_seqlets_total=self.max_seqlets_total,
verbose=self.verbose,
other_info_tracks={'best_window_idx':
[x[left_padding_to_remove:-right_padding_to_remove] for x in
precisiontransformed_bestwindowsizeidxs]})
VariableWindowAroundChunks.count += 1
return CoordProducerResults(
coords=coords,
tnt_results=tnt_results)
#identify_coords is expecting something that has already been processed
# with sliding windows of size window_size
def identify_coords(score_track, pos_threshold, neg_threshold,
window_size, flank, suppress,
max_seqlets_total, verbose, other_info_tracks={}):
for other_info_track in other_info_tracks.values():
assert all([x.shape==y.shape for x,y
in zip(other_info_track,score_track)])
#cp_score_track = 'copy' of the score track, which can be modified as
# coordinates are identified
cp_score_track = [np.array(x) for x in score_track]
#if a position is less than the threshold, set it to -np.inf
#Note that the threshold comparisons need to be >= and not just > for
# cases where there are lots of ties at the high end (e.g. with an IR
# tranformation that gives a lot of values that have a precision of 1.0)
cp_score_track = [
np.array([np.abs(y) if (y >= pos_threshold
or y <= neg_threshold)
else -np.inf for y in x])
for x in cp_score_track]
coords = []
for example_idx,single_score_track in enumerate(cp_score_track):
#set the stuff near the flanks to -np.inf so that we
# don't pick it up during argmax
single_score_track[0:flank] = -np.inf
single_score_track[len(single_score_track)-(flank):
len(single_score_track)] = -np.inf
while True:
argmax = np.argmax(single_score_track,axis=0)
max_val = single_score_track[argmax]
#bail if exhausted everything that passed the threshold
#and was not suppressed
if (max_val == -np.inf):
break
#need to be able to expand without going off the edge
if ((argmax >= flank) and
(argmax < (len(single_score_track)-flank))):
coord = SeqletCoordsFWAP(
example_idx=example_idx,
start=argmax-flank,
end=argmax+window_size+flank,
score=score_track[example_idx][argmax],
other_info = dict([
(track_name, track[example_idx][argmax])
for (track_name, track) in other_info_tracks.items()]))
assert (coord.score >= pos_threshold
or coord.score <= neg_threshold)
coords.append(coord)
else:
assert False,\
("This shouldn't happen because I set stuff near the"
"border to -np.inf early on")
#suppress the chunks within +- suppress
left_supp_idx = int(max(np.floor(argmax+0.5-suppress),0))
right_supp_idx = int(min(np.ceil(argmax+0.5+suppress),
len(single_score_track)))
single_score_track[left_supp_idx:right_supp_idx] = -np.inf
if (verbose):
print("Got "+str(len(coords))+" coords")
sys.stdout.flush()
if ((max_seqlets_total is not None) and
len(coords) > max_seqlets_total):
if (verbose):
print("Limiting to top "+str(max_seqlets_total))
sys.stdout.flush()
coords = sorted(coords, key=lambda x: -np.abs(x.score))\
[:max_seqlets_total]
return coords
def refine_thresholds_based_on_frac_passing(
vals, pos_threshold, neg_threshold,
min_passing_windows_frac, max_passing_windows_frac,
separate_pos_neg_thresholds, verbose):
frac_passing_windows =(
sum(vals >= pos_threshold)
+ sum(vals <= neg_threshold))/float(len(vals))
if (verbose):
print("Thresholds from null dist were",
neg_threshold," and ",pos_threshold,
"with frac passing", frac_passing_windows)
pos_vals = [x for x in vals if x >= 0]
neg_vals = [x for x in vals if x < 0]
#deal with edge case of len < 0
pos_vals = [0] if len(pos_vals)==0 else pos_vals
neg_vals = [0] if len(neg_vals)==0 else neg_vals
#adjust the thresholds if the fall outside the min/max
# windows frac
if (frac_passing_windows < min_passing_windows_frac):
if (verbose):
print("Passing windows frac was",
frac_passing_windows,", which is below ",
min_passing_windows_frac,"; adjusting")
if (separate_pos_neg_thresholds):
pos_threshold = np.percentile(
a=pos_vals,
q=100*(1-min_passing_windows_frac))
neg_threshold = np.percentile(
a=neg_vals,
q=100*(min_passing_windows_frac))
else:
pos_threshold = np.percentile(
a=np.abs(vals),
q=100*(1-min_passing_windows_frac))
neg_threshold = -pos_threshold
if (frac_passing_windows > max_passing_windows_frac):
if (verbose):
print("Passing windows frac was",
frac_passing_windows,", which is above ",
max_passing_windows_frac,"; adjusting")
if (separate_pos_neg_thresholds):
pos_threshold = np.percentile(
a=pos_vals,
q=100*(1-max_passing_windows_frac))
neg_threshold = np.percentile(
a=neg_vals,
q=100*(max_passing_windows_frac))
else:
pos_threshold = np.percentile(
a=np.abs(vals),
q=100*(1-max_passing_windows_frac))
neg_threshold = -pos_threshold
if (verbose):
print("New thresholds are",pos_threshold,"and",neg_threshold)
return pos_threshold, neg_threshold
def make_nulldist_figure(orig_vals, null_vals, pos_ir, neg_ir,
pos_threshold, neg_threshold):
from matplotlib import pyplot as plt
fig,ax1 = plt.subplots()
orig_vals = np.array(sorted(orig_vals))
ax1.hist(orig_vals, bins=100, density=True, alpha=0.5)
ax1.hist(null_vals, bins=100, density=True, alpha=0.5)
ax1.set_ylabel("Probability density\n(blue=foreground, orange=null)")
ax1.set_xlabel("Total importance in window")
precisions = pos_ir.transform(orig_vals)
if (neg_ir is not None):
precisions = np.maximum(precisions, neg_ir.transform(orig_vals))
ax2 = ax1.twinx()
ax2.plot(orig_vals, precisions)
if (pos_threshold is not None):
ax2.plot([pos_threshold, pos_threshold], [0.0, 1.0], color="red")
if (neg_threshold is not None):
ax2.plot([neg_threshold, neg_threshold], [0.0, 1.0], color="red")
ax2.set_ylabel("Estimated foreground precision")
ax2.set_ylim(0.0, 1.02)
class FixedWindowAroundChunks(AbstractCoordProducer):
count = 0
def __init__(self, sliding,
flank,
suppress, #flanks to suppress
target_fdr,
min_passing_windows_frac,
max_passing_windows_frac,
separate_pos_neg_thresholds=False,
max_seqlets_total=None,
progress_update=5000,
verbose=True,
plot_save_dir="figures"):
self.sliding = sliding
self.flank = flank
self.suppress = suppress
self.target_fdr = target_fdr
assert max_passing_windows_frac >= min_passing_windows_frac
self.min_passing_windows_frac = min_passing_windows_frac
self.max_passing_windows_frac = max_passing_windows_frac
self.separate_pos_neg_thresholds = separate_pos_neg_thresholds
self.max_seqlets_total = None
self.progress_update = progress_update
self.verbose = verbose
self.plot_save_dir = plot_save_dir
@classmethod
def from_hdf5(cls, grp):
sliding = grp.attrs["sliding"]
flank = grp.attrs["flank"]
suppress = grp.attrs["suppress"]
target_fdr = grp.attrs["target_fdr"]
min_passing_windows_frac = grp.attrs["min_passing_windows_frac"]
max_passing_windows_frac = grp.attrs["max_passing_windows_frac"]
separate_pos_neg_thresholds = grp.attrs["separate_pos_neg_thresholds"]
if ("max_seqlets_total" in grp.attrs):
max_seqlets_total = grp.attrs["max_seqlets_total"]
else:
max_seqlets_total = None
progress_update = grp.attrs["progress_update"]
verbose = grp.attrs["verbose"]
return cls(sliding=sliding, flank=flank, suppress=suppress,
target_fdr=target_fdr,
min_passing_windows_frac=min_passing_windows_frac,
max_passing_windows_frac=max_passing_windows_frac,
separate_pos_neg_thresholds=separate_pos_neg_thresholds,
max_seqlets_total=max_seqlets_total,
progress_update=progress_update, verbose=verbose)
def save_hdf5(self, grp):
grp.attrs["class"] = type(self).__name__
grp.attrs["sliding"] = self.sliding
grp.attrs["flank"] = self.flank
grp.attrs["suppress"] = self.suppress
grp.attrs["target_fdr"] = self.target_fdr
grp.attrs["min_passing_windows_frac"] = self.min_passing_windows_frac
grp.attrs["max_passing_windows_frac"] = self.max_passing_windows_frac
grp.attrs["separate_pos_neg_thresholds"] =\
self.separate_pos_neg_thresholds
if (self.max_seqlets_total is not None):
grp.attrs["max_seqlets_total"] = self.max_seqlets_total
grp.attrs["progress_update"] = self.progress_update
grp.attrs["verbose"] = self.verbose
def __call__(self, score_track, null_track, tnt_results=None):
# score_track now can be a list of arrays,
assert all([len(x.shape)==1 for x in score_track])
window_sum_function = get_simple_window_sum_function(self.sliding)
if (self.verbose):
print("Computing windowed sums on original")
sys.stdout.flush()
original_summed_score_track = window_sum_function(arrs=score_track)
#Determine the window thresholds
if (tnt_results is None):
if (self.verbose):
print("Generating null dist")
sys.stdout.flush()
null_vals = get_null_vals(
null_track=null_track,
score_track=score_track,
window_size=self.sliding,
original_summed_score_track=original_summed_score_track)
if (self.verbose):
print("Computing threshold")
sys.stdout.flush()
orig_vals = list(
np.concatenate(original_summed_score_track, axis=0))
#Note that orig_vals may have been subsampled at this point
pos_ir, neg_ir, subsampled_orig_vals, subsampled_null_vals =\
get_isotonic_regression_classifier(
orig_vals=orig_vals,
null_vals=null_vals)
subsampled_pos_orig_vals = (
np.array(sorted([x for x in subsampled_orig_vals if x >= 0])))
subsampled_neg_orig_vals = (
np.array(sorted([x for x in subsampled_orig_vals if x < 0],
key=lambda x: abs(x))))
subsampled_pos_val_precisions =\
pos_ir.transform(subsampled_pos_orig_vals)
if (len(subsampled_neg_orig_vals) > 0):
subsampled_neg_val_precisions =\
neg_ir.transform(subsampled_neg_orig_vals)
pos_threshold = ([x[1] for x in
zip(subsampled_pos_val_precisions,
subsampled_pos_orig_vals) if x[0]
>= (1-self.target_fdr)]+[subsampled_pos_orig_vals[-1]])[0]
if (len(subsampled_neg_orig_vals) > 0):
neg_threshold = ([x[1] for x in
zip(subsampled_neg_val_precisions,
subsampled_neg_orig_vals) if x[0]
>= (1-self.target_fdr)]+[subsampled_neg_orig_vals[-1]])[0]
else:
neg_threshold = -np.inf
pos_threshold, neg_threshold =\
refine_thresholds_based_on_frac_passing(
vals=subsampled_orig_vals,
pos_threshold=pos_threshold,
neg_threshold=neg_threshold,
min_passing_windows_frac=self.min_passing_windows_frac,
max_passing_windows_frac=self.max_passing_windows_frac,
separate_pos_neg_thresholds=self.separate_pos_neg_thresholds,
verbose=self.verbose)
if (self.separate_pos_neg_thresholds):
val_transformer = SignedPercentileValTransformer(
distribution=orig_vals)
else:
val_transformer = AbsPercentileValTransformer(
distribution=orig_vals)
if (self.verbose):
print("Final raw thresholds are",
neg_threshold," and ",pos_threshold)
print("Final transformed thresholds are",
val_transformer(neg_threshold)," and ",
val_transformer(pos_threshold))
make_nulldist_figure(orig_vals=subsampled_orig_vals,
null_vals=subsampled_null_vals,
pos_ir=pos_ir, neg_ir=neg_ir,
pos_threshold=pos_threshold,
neg_threshold=neg_threshold)
util.show_or_savefig(plot_save_dir=self.plot_save_dir,
filename="scoredist_"
+str(FixedWindowAroundChunks.count)+".png")
FixedWindowAroundChunks.count += 1
tnt_results = FWACTransformAndThresholdResults(
neg_threshold=neg_threshold,
transformed_neg_threshold=val_transformer(neg_threshold),
pos_threshold=pos_threshold,
transformed_pos_threshold=val_transformer(pos_threshold),
val_transformer=val_transformer)
coords = identify_coords(
score_track=original_summed_score_track,
pos_threshold=tnt_results.pos_threshold,
neg_threshold=tnt_results.neg_threshold,
window_size=self.sliding,
flank=self.flank,
suppress=self.suppress,
max_seqlets_total=self.max_seqlets_total,
verbose=self.verbose)
return CoordProducerResults(
coords=coords,
tnt_results=tnt_results)
| [
"matplotlib.pyplot.ylabel",
"numpy.log",
"numpy.array",
"numpy.percentile",
"numpy.random.RandomState",
"numpy.histogram",
"matplotlib.pyplot.xlabel",
"numpy.concatenate",
"numpy.maximum",
"sys.stdout.flush",
"modisco.util.load_string_list",
"numpy.abs",
"numpy.ceil",
"numpy.floor",
"num... | [((33997, 34011), 'matplotlib.pyplot.subplots', 'plt.subplots', ([], {}), '()\n', (34009, 34011), True, 'from matplotlib import pyplot as plt\n'), ((5463, 5513), 'modisco.util.load_string_list', 'util.load_string_list', ([], {'dset_name': '"""coords"""', 'grp': 'grp'}), "(dset_name='coords', grp=grp)\n", (5484, 5513), False, 'from modisco import util\n'), ((7506, 7534), 'numpy.array', 'np.array', (['percentiles_to_use'], {}), '(percentiles_to_use)\n', (7514, 7534), True, 'import numpy as np\n'), ((7593, 7616), 'numpy.random.RandomState', 'np.random.RandomState', ([], {}), '()\n', (7614, 7616), True, 'import numpy as np\n'), ((7779, 7817), 'numpy.array', 'np.array', (["grp['percentiles_to_use'][:]"], {}), "(grp['percentiles_to_use'][:])\n", (7787, 7817), True, 'import numpy as np\n'), ((8544, 8595), 'numpy.concatenate', 'np.concatenate', (['original_summed_score_track'], {'axis': '(0)'}), '(original_summed_score_track, axis=0)\n', (8558, 8595), True, 'import numpy as np\n'), ((8702, 8733), 'numpy.histogram', 'np.histogram', (['values'], {'bins': '(1000)'}), '(values, bins=1000)\n', (8714, 8733), True, 'import numpy as np\n'), ((8750, 8766), 'numpy.argmax', 'np.argmax', (['hist1'], {}), '(hist1)\n', (8759, 8766), True, 'import numpy as np\n'), ((8936, 8971), 'numpy.histogram', 'np.histogram', (['top_values'], {'bins': '(1000)'}), '(top_values, bins=1000)\n', (8948, 8971), True, 'import numpy as np\n'), ((8988, 9004), 'numpy.argmax', 'np.argmax', (['hist2'], {}), '(hist2)\n', (8997, 9004), True, 'import numpy as np\n'), ((10514, 10536), 'numpy.array', 'np.array', (['sampled_vals'], {}), '(sampled_vals)\n', (10522, 10536), True, 'import numpy as np\n'), ((10928, 10951), 'numpy.random.RandomState', 'np.random.RandomState', ([], {}), '()\n', (10949, 10951), True, 'import numpy as np\n'), ((11642, 11693), 'numpy.concatenate', 'np.concatenate', (['original_summed_score_track'], {'axis': '(0)'}), '(original_summed_score_track, axis=0)\n', (11656, 11693), True, 'import numpy as np\n'), ((11731, 11800), 'numpy.percentile', 'np.percentile', ([], {'a': 'all_orig_summed_scores', 'q': 'self.upper_null_percentile'}), '(a=all_orig_summed_scores, q=self.upper_null_percentile)\n', (11744, 11800), True, 'import numpy as np\n'), ((11863, 11932), 'numpy.percentile', 'np.percentile', ([], {'a': 'all_orig_summed_scores', 'q': 'self.lower_null_percentile'}), '(a=all_orig_summed_scores, q=self.lower_null_percentile)\n', (11876, 11932), True, 'import numpy as np\n'), ((14793, 14811), 'sys.stdout.flush', 'sys.stdout.flush', ([], {}), '()\n', (14809, 14811), False, 'import sys\n'), ((18212, 18237), 'numpy.array', 'np.array', (["grp['origvals']"], {}), "(grp['origvals'])\n", (18220, 18237), True, 'import numpy as np\n'), ((18257, 18282), 'numpy.array', 'np.array', (["grp['nullvals']"], {}), "(grp['nullvals'])\n", (18265, 18282), True, 'import numpy as np\n'), ((28639, 28650), 'numpy.array', 'np.array', (['x'], {}), '(x)\n', (28647, 28650), True, 'import numpy as np\n'), ((31109, 31127), 'sys.stdout.flush', 'sys.stdout.flush', ([], {}), '()\n', (31125, 31127), False, 'import sys\n'), ((6285, 6299), 'numpy.cumsum', 'np.cumsum', (['arr'], {}), '(arr)\n', (6294, 6299), True, 'import numpy as np\n'), ((6831, 6841), 'numpy.sign', 'np.sign', (['x'], {}), '(x)\n', (6838, 6841), True, 'import numpy as np\n'), ((7135, 7144), 'numpy.abs', 'np.abs', (['x'], {}), '(x)\n', (7141, 7144), True, 'import numpy as np\n'), ((14619, 14666), 'numpy.concatenate', 'np.concatenate', (['null_summed_score_track'], {'axis': '(0)'}), '(null_summed_score_track, axis=0)\n', (14633, 14666), True, 'import numpy as np\n'), ((23049, 23067), 'sys.stdout.flush', 'sys.stdout.flush', ([], {}), '()\n', (23065, 23067), False, 'import sys\n'), ((23197, 23215), 'sys.stdout.flush', 'sys.stdout.flush', ([], {}), '()\n', (23213, 23215), False, 'import sys\n'), ((23241, 23281), 'numpy.concatenate', 'np.concatenate', (['window_sums_rows'], {'axis': '(0)'}), '(window_sums_rows, axis=0)\n', (23255, 23281), True, 'import numpy as np\n'), ((25291, 25334), 'matplotlib.pyplot.xlabel', 'plt.xlabel', (['"""Tranformed IR precision value"""'], {}), "('Tranformed IR precision value')\n", (25301, 25334), True, 'from matplotlib import pyplot as plt\n'), ((25347, 25364), 'matplotlib.pyplot.ylabel', 'plt.ylabel', (['"""CDF"""'], {}), "('CDF')\n", (25357, 25364), True, 'from matplotlib import pyplot as plt\n'), ((29563, 29600), 'numpy.argmax', 'np.argmax', (['single_score_track'], {'axis': '(0)'}), '(single_score_track, axis=0)\n', (29572, 29600), True, 'import numpy as np\n'), ((31310, 31328), 'sys.stdout.flush', 'sys.stdout.flush', ([], {}), '()\n', (31326, 31328), False, 'import sys\n'), ((32567, 32632), 'numpy.percentile', 'np.percentile', ([], {'a': 'pos_vals', 'q': '(100 * (1 - min_passing_windows_frac))'}), '(a=pos_vals, q=100 * (1 - min_passing_windows_frac))\n', (32580, 32632), True, 'import numpy as np\n'), ((32690, 32749), 'numpy.percentile', 'np.percentile', ([], {'a': 'neg_vals', 'q': '(100 * min_passing_windows_frac)'}), '(a=neg_vals, q=100 * min_passing_windows_frac)\n', (32703, 32749), True, 'import numpy as np\n'), ((33283, 33348), 'numpy.percentile', 'np.percentile', ([], {'a': 'pos_vals', 'q': '(100 * (1 - max_passing_windows_frac))'}), '(a=pos_vals, q=100 * (1 - max_passing_windows_frac))\n', (33296, 33348), True, 'import numpy as np\n'), ((33406, 33465), 'numpy.percentile', 'np.percentile', ([], {'a': 'neg_vals', 'q': '(100 * max_passing_windows_frac)'}), '(a=neg_vals, q=100 * max_passing_windows_frac)\n', (33419, 33465), True, 'import numpy as np\n'), ((38174, 38192), 'sys.stdout.flush', 'sys.stdout.flush', ([], {}), '()\n', (38190, 38192), False, 'import sys\n'), ((14826, 14853), 'numpy.random.RandomState', 'np.random.RandomState', (['(1234)'], {}), '(1234)\n', (14847, 14853), True, 'import numpy as np\n'), ((16353, 16416), 'sklearn.isotonic.IsotonicRegression', 'IsotonicRegression', ([], {'out_of_bounds': '"""clip"""', 'increasing': 'increasing'}), "(out_of_bounds='clip', increasing=increasing)\n", (16371, 16416), False, 'from sklearn.isotonic import IsotonicRegression\n'), ((16473, 16527), 'numpy.concatenate', 'np.concatenate', (['[self.origvals, self.nullvals]'], {'axis': '(0)'}), '([self.origvals, self.nullvals], axis=0)\n', (16487, 16527), True, 'import numpy as np\n'), ((20328, 20352), 'numpy.array', 'np.array', (["grp['sliding']"], {}), "(grp['sliding'])\n", (20336, 20352), True, 'import numpy as np\n'), ((21550, 21572), 'numpy.array', 'np.array', (['self.sliding'], {}), '(self.sliding)\n', (21558, 21572), True, 'import numpy as np\n'), ((22898, 22945), 'numpy.concatenate', 'np.concatenate', (['null_summed_score_track'], {'axis': '(0)'}), '(null_summed_score_track, axis=0)\n', (22912, 22945), True, 'import numpy as np\n'), ((25008, 25064), 'numpy.concatenate', 'np.concatenate', (['precisiontransformed_score_track'], {'axis': '(0)'}), '(precisiontransformed_score_track, axis=0)\n', (25022, 25064), True, 'import numpy as np\n'), ((38440, 38458), 'sys.stdout.flush', 'sys.stdout.flush', ([], {}), '()\n', (38456, 38458), False, 'import sys\n'), ((38799, 38817), 'sys.stdout.flush', 'sys.stdout.flush', ([], {}), '()\n', (38815, 38817), False, 'import sys\n'), ((38864, 38915), 'numpy.concatenate', 'np.concatenate', (['original_summed_score_track'], {'axis': '(0)'}), '(original_summed_score_track, axis=0)\n', (38878, 38915), True, 'import numpy as np\n'), ((9601, 9644), 'numpy.log', 'np.log', (['(1 - self.percentiles_to_use / 100.0)'], {}), '(1 - self.percentiles_to_use / 100.0)\n', (9607, 9644), True, 'import numpy as np\n'), ((9657, 9711), 'numpy.percentile', 'np.percentile', ([], {'a': 'pos_values', 'q': 'self.percentiles_to_use'}), '(a=pos_values, q=self.percentiles_to_use)\n', (9670, 9711), True, 'import numpy as np\n'), ((9767, 9810), 'numpy.log', 'np.log', (['(1 - self.percentiles_to_use / 100.0)'], {}), '(1 - self.percentiles_to_use / 100.0)\n', (9773, 9810), True, 'import numpy as np\n'), ((23439, 23479), 'numpy.concatenate', 'np.concatenate', (['window_sums_rows'], {'axis': '(0)'}), '(window_sums_rows, axis=0)\n', (23453, 23479), True, 'import numpy as np\n'), ((29004, 29013), 'numpy.abs', 'np.abs', (['y'], {}), '(y)\n', (29010, 29013), True, 'import numpy as np\n'), ((30801, 30834), 'numpy.floor', 'np.floor', (['(argmax + 0.5 - suppress)'], {}), '(argmax + 0.5 - suppress)\n', (30809, 30834), True, 'import numpy as np\n'), ((30872, 30904), 'numpy.ceil', 'np.ceil', (['(argmax + 0.5 + suppress)'], {}), '(argmax + 0.5 + suppress)\n', (30879, 30904), True, 'import numpy as np\n'), ((32858, 32870), 'numpy.abs', 'np.abs', (['vals'], {}), '(vals)\n', (32864, 32870), True, 'import numpy as np\n'), ((33574, 33586), 'numpy.abs', 'np.abs', (['vals'], {}), '(vals)\n', (33580, 33586), True, 'import numpy as np\n'), ((9830, 9890), 'numpy.percentile', 'np.percentile', ([], {'a': 'neg_values', 'q': '(100 - self.percentiles_to_use)'}), '(a=neg_values, q=100 - self.percentiles_to_use)\n', (9843, 9890), True, 'import numpy as np\n'), ((10421, 10444), 'numpy.log', 'np.log', (['(1 - sampled_cdf)'], {}), '(1 - sampled_cdf)\n', (10427, 10444), True, 'import numpy as np\n'), ((10279, 10302), 'numpy.log', 'np.log', (['(1 - sampled_cdf)'], {}), '(1 - sampled_cdf)\n', (10285, 10302), True, 'import numpy as np\n'), ((16034, 16058), 'numpy.maximum', 'np.maximum', (['irval', '(1e-07)'], {}), '(irval, 1e-07)\n', (16044, 16058), True, 'import numpy as np\n'), ((31376, 31391), 'numpy.abs', 'np.abs', (['x.score'], {}), '(x.score)\n', (31382, 31391), True, 'import numpy as np\n')] |
# =============================================================================== #
# #
# This file has been generated automatically!! Do not change this manually! #
# #
# =============================================================================== #
from __future__ import annotations
import typing
from pydantic import Field
from .address import Address
from .authorization_state import AuthorizationState
from .background import Background
from .basic_group import BasicGroup
from .basic_group_full_info import BasicGroupFullInfo
from .call import Call
from .callback_query_payload import CallbackQueryPayload
from .chat import Chat
from .chat_action import ChatAction
from .chat_action_bar import ChatActionBar
from .chat_filter_info import ChatFilterInfo
from .chat_invite_link import ChatInviteLink
from .chat_join_request import ChatJoinRequest
from .chat_join_requests_info import ChatJoinRequestsInfo
from .chat_list import ChatList
from .chat_member import ChatMember
from .chat_nearby import ChatNearby
from .chat_notification_settings import ChatNotificationSettings
from .chat_permissions import ChatPermissions
from .chat_photo_info import ChatPhotoInfo
from .chat_position import ChatPosition
from .chat_theme import ChatTheme
from .chat_type import ChatType
from .connection_state import ConnectionState
from .draft_message import DraftMessage
from .file import File
from .group_call import GroupCall
from .group_call_participant import GroupCallParticipant
from .language_pack_string import LanguagePackString
from .location import Location
from .message import Message
from .message_content import MessageContent
from .message_interaction_info import MessageInteractionInfo
from .message_sender import MessageSender
from .notification import Notification
from .notification_group import NotificationGroup
from .notification_group_type import NotificationGroupType
from .notification_settings_scope import NotificationSettingsScope
from .option_value import OptionValue
from .order_info import OrderInfo
from .poll import Poll
from .reply_markup import ReplyMarkup
from .scope_notification_settings import ScopeNotificationSettings
from .secret_chat import SecretChat
from .sticker import Sticker
from .sticker_set import StickerSet
from .sticker_sets import StickerSets
from .suggested_action import SuggestedAction
from .supergroup import Supergroup
from .supergroup_full_info import SupergroupFullInfo
from .terms_of_service import TermsOfService
from .user import User
from .user_full_info import UserFullInfo
from .user_privacy_setting import UserPrivacySetting
from .user_privacy_setting_rules import UserPrivacySettingRules
from .user_status import UserStatus
from .video_chat import VideoChat
from ..base_object import BaseObject
class Update(BaseObject):
"""
Contains notifications about data changes
"""
ID: str = Field("update", alias="@type")
class UpdateActiveNotifications(Update):
"""
Contains active notifications that was shown on previous application launches. This update is sent only if the message database is used. In that case it comes once before any updateNotification and updateNotificationGroup update
:param groups: Lists of active notification groups
:type groups: :class:`list[NotificationGroup]`
"""
ID: str = Field("updateActiveNotifications", alias="@type")
groups: list[NotificationGroup]
@staticmethod
def read(q: dict) -> UpdateActiveNotifications:
return UpdateActiveNotifications.construct(**q)
class UpdateAnimatedEmojiMessageClicked(Update):
"""
Some animated emoji message was clicked and a big animated sticker must be played if the message is visible on the screen. chatActionWatchingAnimations with the text of the message needs to be sent if the sticker is played
:param chat_id: Chat identifier
:type chat_id: :class:`int`
:param message_id: Message identifier
:type message_id: :class:`int`
:param sticker: The animated sticker to be played
:type sticker: :class:`Sticker`
"""
ID: str = Field("updateAnimatedEmojiMessageClicked", alias="@type")
chat_id: int
message_id: int
sticker: Sticker
@staticmethod
def read(q: dict) -> UpdateAnimatedEmojiMessageClicked:
return UpdateAnimatedEmojiMessageClicked.construct(**q)
class UpdateAnimationSearchParameters(Update):
"""
The parameters of animation search through GetOption("animation_search_bot_username") bot has changed
:param provider: Name of the animation search provider
:type provider: :class:`str`
:param emojis: The new list of emojis suggested for searching
:type emojis: :class:`list[str]`
"""
ID: str = Field("updateAnimationSearchParameters", alias="@type")
provider: str
emojis: list[str]
@staticmethod
def read(q: dict) -> UpdateAnimationSearchParameters:
return UpdateAnimationSearchParameters.construct(**q)
class UpdateAuthorizationState(Update):
"""
The user authorization state has changed
:param authorization_state: New authorization state
:type authorization_state: :class:`AuthorizationState`
"""
ID: str = Field("updateAuthorizationState", alias="@type")
authorization_state: AuthorizationState
@staticmethod
def read(q: dict) -> UpdateAuthorizationState:
return UpdateAuthorizationState.construct(**q)
class UpdateBasicGroup(Update):
"""
Some data of a basic group has changed. This update is guaranteed to come before the basic group identifier is returned to the application
:param basic_group: New data about the group
:type basic_group: :class:`BasicGroup`
"""
ID: str = Field("updateBasicGroup", alias="@type")
basic_group: BasicGroup
@staticmethod
def read(q: dict) -> UpdateBasicGroup:
return UpdateBasicGroup.construct(**q)
class UpdateBasicGroupFullInfo(Update):
"""
Some data in basicGroupFullInfo has been changed
:param basic_group_id: Identifier of a basic group
:type basic_group_id: :class:`int`
:param basic_group_full_info: New full information about the group
:type basic_group_full_info: :class:`BasicGroupFullInfo`
"""
ID: str = Field("updateBasicGroupFullInfo", alias="@type")
basic_group_id: int
basic_group_full_info: BasicGroupFullInfo
@staticmethod
def read(q: dict) -> UpdateBasicGroupFullInfo:
return UpdateBasicGroupFullInfo.construct(**q)
class UpdateCall(Update):
"""
New call was created or information about a call was updated
:param call: New data about a call
:type call: :class:`Call`
"""
ID: str = Field("updateCall", alias="@type")
call: Call
@staticmethod
def read(q: dict) -> UpdateCall:
return UpdateCall.construct(**q)
class UpdateChatAction(Update):
"""
A message sender activity in the chat has changed
:param chat_id: Chat identifier
:type chat_id: :class:`int`
:param message_thread_id: If not 0, a message thread identifier in which the action was performed
:type message_thread_id: :class:`int`
:param sender_id: Identifier of a message sender performing the action
:type sender_id: :class:`MessageSender`
:param action: The action
:type action: :class:`ChatAction`
"""
ID: str = Field("updateChatAction", alias="@type")
chat_id: int
message_thread_id: int
sender_id: MessageSender
action: ChatAction
@staticmethod
def read(q: dict) -> UpdateChatAction:
return UpdateChatAction.construct(**q)
class UpdateChatActionBar(Update):
"""
The chat action bar was changed
:param chat_id: Chat identifier
:type chat_id: :class:`int`
:param action_bar: The new value of the action bar; may be null, defaults to None
:type action_bar: :class:`ChatActionBar`, optional
"""
ID: str = Field("updateChatActionBar", alias="@type")
chat_id: int
action_bar: typing.Optional[ChatActionBar] = None
@staticmethod
def read(q: dict) -> UpdateChatActionBar:
return UpdateChatActionBar.construct(**q)
class UpdateChatDefaultDisableNotification(Update):
"""
The value of the default disable_notification parameter, used when a message is sent to the chat, was changed
:param chat_id: Chat identifier
:type chat_id: :class:`int`
:param default_disable_notification: The new default_disable_notification value
:type default_disable_notification: :class:`bool`
"""
ID: str = Field("updateChatDefaultDisableNotification", alias="@type")
chat_id: int
default_disable_notification: bool
@staticmethod
def read(q: dict) -> UpdateChatDefaultDisableNotification:
return UpdateChatDefaultDisableNotification.construct(**q)
class UpdateChatDraftMessage(Update):
"""
A chat draft has changed. Be aware that the update may come in the currently opened chat but with old content of the draft. If the user has changed the content of the draft, this update mustn't be applied
:param chat_id: Chat identifier
:type chat_id: :class:`int`
:param draft_message: The new draft message; may be null, defaults to None
:type draft_message: :class:`DraftMessage`, optional
:param positions: The new chat positions in the chat lists
:type positions: :class:`list[ChatPosition]`
"""
ID: str = Field("updateChatDraftMessage", alias="@type")
chat_id: int
draft_message: typing.Optional[DraftMessage] = None
positions: list[ChatPosition]
@staticmethod
def read(q: dict) -> UpdateChatDraftMessage:
return UpdateChatDraftMessage.construct(**q)
class UpdateChatFilters(Update):
"""
The list of chat filters or a chat filter has changed
:param chat_filters: The new list of chat filters
:type chat_filters: :class:`list[ChatFilterInfo]`
"""
ID: str = Field("updateChatFilters", alias="@type")
chat_filters: list[ChatFilterInfo]
@staticmethod
def read(q: dict) -> UpdateChatFilters:
return UpdateChatFilters.construct(**q)
class UpdateChatHasProtectedContent(Update):
"""
A chat content was allowed or restricted for saving
:param chat_id: Chat identifier
:type chat_id: :class:`int`
:param has_protected_content: New value of has_protected_content
:type has_protected_content: :class:`bool`
"""
ID: str = Field("updateChatHasProtectedContent", alias="@type")
chat_id: int
has_protected_content: bool
@staticmethod
def read(q: dict) -> UpdateChatHasProtectedContent:
return UpdateChatHasProtectedContent.construct(**q)
class UpdateChatHasScheduledMessages(Update):
"""
A chat's has_scheduled_messages field has changed
:param chat_id: Chat identifier
:type chat_id: :class:`int`
:param has_scheduled_messages: New value of has_scheduled_messages
:type has_scheduled_messages: :class:`bool`
"""
ID: str = Field("updateChatHasScheduledMessages", alias="@type")
chat_id: int
has_scheduled_messages: bool
@staticmethod
def read(q: dict) -> UpdateChatHasScheduledMessages:
return UpdateChatHasScheduledMessages.construct(**q)
class UpdateChatIsBlocked(Update):
"""
A chat was blocked or unblocked
:param chat_id: Chat identifier
:type chat_id: :class:`int`
:param is_blocked: New value of is_blocked
:type is_blocked: :class:`bool`
"""
ID: str = Field("updateChatIsBlocked", alias="@type")
chat_id: int
is_blocked: bool
@staticmethod
def read(q: dict) -> UpdateChatIsBlocked:
return UpdateChatIsBlocked.construct(**q)
class UpdateChatIsMarkedAsUnread(Update):
"""
A chat was marked as unread or was read
:param chat_id: Chat identifier
:type chat_id: :class:`int`
:param is_marked_as_unread: New value of is_marked_as_unread
:type is_marked_as_unread: :class:`bool`
"""
ID: str = Field("updateChatIsMarkedAsUnread", alias="@type")
chat_id: int
is_marked_as_unread: bool
@staticmethod
def read(q: dict) -> UpdateChatIsMarkedAsUnread:
return UpdateChatIsMarkedAsUnread.construct(**q)
class UpdateChatLastMessage(Update):
"""
The last message of a chat was changed. If last_message is null, then the last message in the chat became unknown. Some new unknown messages might be added to the chat in this case
:param chat_id: Chat identifier
:type chat_id: :class:`int`
:param last_message: The new last message in the chat; may be null, defaults to None
:type last_message: :class:`Message`, optional
:param positions: The new chat positions in the chat lists
:type positions: :class:`list[ChatPosition]`
"""
ID: str = Field("updateChatLastMessage", alias="@type")
chat_id: int
last_message: typing.Optional[Message] = None
positions: list[ChatPosition]
@staticmethod
def read(q: dict) -> UpdateChatLastMessage:
return UpdateChatLastMessage.construct(**q)
class UpdateChatMember(Update):
"""
User rights changed in a chat; for bots only
:param chat_id: Chat identifier
:type chat_id: :class:`int`
:param actor_user_id: Identifier of the user, changing the rights
:type actor_user_id: :class:`int`
:param date: Point in time (Unix timestamp) when the user rights was changed
:type date: :class:`int`
:param invite_link: If user has joined the chat using an invite link, the invite link; may be null, defaults to None
:type invite_link: :class:`ChatInviteLink`, optional
:param old_chat_member: Previous chat member
:type old_chat_member: :class:`ChatMember`
:param new_chat_member: New chat member
:type new_chat_member: :class:`ChatMember`
"""
ID: str = Field("updateChatMember", alias="@type")
chat_id: int
actor_user_id: int
date: int
invite_link: typing.Optional[ChatInviteLink] = None
old_chat_member: ChatMember
new_chat_member: ChatMember
@staticmethod
def read(q: dict) -> UpdateChatMember:
return UpdateChatMember.construct(**q)
class UpdateChatMessageSender(Update):
"""
The message sender that is selected to send messages in a chat has changed
:param chat_id: Chat identifier
:type chat_id: :class:`int`
:param message_sender_id: New value of message_sender_id; may be null if the user can't change message sender, defaults to None
:type message_sender_id: :class:`MessageSender`, optional
"""
ID: str = Field("updateChatMessageSender", alias="@type")
chat_id: int
message_sender_id: typing.Optional[MessageSender] = None
@staticmethod
def read(q: dict) -> UpdateChatMessageSender:
return UpdateChatMessageSender.construct(**q)
class UpdateChatMessageTtl(Update):
"""
The message Time To Live setting for a chat was changed
:param chat_id: Chat identifier
:type chat_id: :class:`int`
:param message_ttl: New value of message_ttl
:type message_ttl: :class:`int`
"""
ID: str = Field("updateChatMessageTtl", alias="@type")
chat_id: int
message_ttl: int
@staticmethod
def read(q: dict) -> UpdateChatMessageTtl:
return UpdateChatMessageTtl.construct(**q)
class UpdateChatNotificationSettings(Update):
"""
Notification settings for a chat were changed
:param chat_id: Chat identifier
:type chat_id: :class:`int`
:param notification_settings: The new notification settings
:type notification_settings: :class:`ChatNotificationSettings`
"""
ID: str = Field("updateChatNotificationSettings", alias="@type")
chat_id: int
notification_settings: ChatNotificationSettings
@staticmethod
def read(q: dict) -> UpdateChatNotificationSettings:
return UpdateChatNotificationSettings.construct(**q)
class UpdateChatOnlineMemberCount(Update):
"""
The number of online group members has changed. This update with non-zero count is sent only for currently opened chats. There is no guarantee that it will be sent just after the count has changed
:param chat_id: Identifier of the chat
:type chat_id: :class:`int`
:param online_member_count: New number of online members in the chat, or 0 if unknown
:type online_member_count: :class:`int`
"""
ID: str = Field("updateChatOnlineMemberCount", alias="@type")
chat_id: int
online_member_count: int
@staticmethod
def read(q: dict) -> UpdateChatOnlineMemberCount:
return UpdateChatOnlineMemberCount.construct(**q)
class UpdateChatPendingJoinRequests(Update):
"""
The chat pending join requests were changed
:param chat_id: Chat identifier
:type chat_id: :class:`int`
:param pending_join_requests: The new data about pending join requests; may be null, defaults to None
:type pending_join_requests: :class:`ChatJoinRequestsInfo`, optional
"""
ID: str = Field("updateChatPendingJoinRequests", alias="@type")
chat_id: int
pending_join_requests: typing.Optional[ChatJoinRequestsInfo] = None
@staticmethod
def read(q: dict) -> UpdateChatPendingJoinRequests:
return UpdateChatPendingJoinRequests.construct(**q)
class UpdateChatPermissions(Update):
"""
Chat permissions was changed
:param chat_id: Chat identifier
:type chat_id: :class:`int`
:param permissions: The new chat permissions
:type permissions: :class:`ChatPermissions`
"""
ID: str = Field("updateChatPermissions", alias="@type")
chat_id: int
permissions: ChatPermissions
@staticmethod
def read(q: dict) -> UpdateChatPermissions:
return UpdateChatPermissions.construct(**q)
class UpdateChatPhoto(Update):
"""
A chat photo was changed
:param chat_id: Chat identifier
:type chat_id: :class:`int`
:param photo: The new chat photo; may be null, defaults to None
:type photo: :class:`ChatPhotoInfo`, optional
"""
ID: str = Field("updateChatPhoto", alias="@type")
chat_id: int
photo: typing.Optional[ChatPhotoInfo] = None
@staticmethod
def read(q: dict) -> UpdateChatPhoto:
return UpdateChatPhoto.construct(**q)
class UpdateChatPosition(Update):
"""
The position of a chat in a chat list has changed. Instead of this update updateChatLastMessage or updateChatDraftMessage might be sent
:param chat_id: Chat identifier
:type chat_id: :class:`int`
:param position: New chat position. If new order is 0, then the chat needs to be removed from the list
:type position: :class:`ChatPosition`
"""
ID: str = Field("updateChatPosition", alias="@type")
chat_id: int
position: ChatPosition
@staticmethod
def read(q: dict) -> UpdateChatPosition:
return UpdateChatPosition.construct(**q)
class UpdateChatReadInbox(Update):
"""
Incoming messages were read or the number of unread messages has been changed
:param chat_id: Chat identifier
:type chat_id: :class:`int`
:param last_read_inbox_message_id: Identifier of the last read incoming message
:type last_read_inbox_message_id: :class:`int`
:param unread_count: The number of unread messages left in the chat
:type unread_count: :class:`int`
"""
ID: str = Field("updateChatReadInbox", alias="@type")
chat_id: int
last_read_inbox_message_id: int
unread_count: int
@staticmethod
def read(q: dict) -> UpdateChatReadInbox:
return UpdateChatReadInbox.construct(**q)
class UpdateChatReadOutbox(Update):
"""
Outgoing messages were read
:param chat_id: Chat identifier
:type chat_id: :class:`int`
:param last_read_outbox_message_id: Identifier of last read outgoing message
:type last_read_outbox_message_id: :class:`int`
"""
ID: str = Field("updateChatReadOutbox", alias="@type")
chat_id: int
last_read_outbox_message_id: int
@staticmethod
def read(q: dict) -> UpdateChatReadOutbox:
return UpdateChatReadOutbox.construct(**q)
class UpdateChatReplyMarkup(Update):
"""
The default chat reply markup was changed. Can occur because new messages with reply markup were received or because an old reply markup was hidden by the user
:param chat_id: Chat identifier
:type chat_id: :class:`int`
:param reply_markup_message_id: Identifier of the message from which reply markup needs to be used; 0 if there is no default custom reply markup in the chat
:type reply_markup_message_id: :class:`int`
"""
ID: str = Field("updateChatReplyMarkup", alias="@type")
chat_id: int
reply_markup_message_id: int
@staticmethod
def read(q: dict) -> UpdateChatReplyMarkup:
return UpdateChatReplyMarkup.construct(**q)
class UpdateChatTheme(Update):
"""
The chat theme was changed
:param chat_id: Chat identifier
:type chat_id: :class:`int`
:param theme_name: The new name of the chat theme; may be empty if theme was reset to default
:type theme_name: :class:`str`
"""
ID: str = Field("updateChatTheme", alias="@type")
chat_id: int
theme_name: str
@staticmethod
def read(q: dict) -> UpdateChatTheme:
return UpdateChatTheme.construct(**q)
class UpdateChatThemes(Update):
"""
The list of available chat themes has changed
:param chat_themes: The new list of chat themes
:type chat_themes: :class:`list[ChatTheme]`
"""
ID: str = Field("updateChatThemes", alias="@type")
chat_themes: list[ChatTheme]
@staticmethod
def read(q: dict) -> UpdateChatThemes:
return UpdateChatThemes.construct(**q)
class UpdateChatTitle(Update):
"""
The title of a chat was changed
:param chat_id: Chat identifier
:type chat_id: :class:`int`
:param title: The new chat title
:type title: :class:`str`
"""
ID: str = Field("updateChatTitle", alias="@type")
chat_id: int
title: str
@staticmethod
def read(q: dict) -> UpdateChatTitle:
return UpdateChatTitle.construct(**q)
class UpdateChatUnreadMentionCount(Update):
"""
The chat unread_mention_count has changed
:param chat_id: Chat identifier
:type chat_id: :class:`int`
:param unread_mention_count: The number of unread mention messages left in the chat
:type unread_mention_count: :class:`int`
"""
ID: str = Field("updateChatUnreadMentionCount", alias="@type")
chat_id: int
unread_mention_count: int
@staticmethod
def read(q: dict) -> UpdateChatUnreadMentionCount:
return UpdateChatUnreadMentionCount.construct(**q)
class UpdateChatVideoChat(Update):
"""
A chat video chat state has changed
:param chat_id: Chat identifier
:type chat_id: :class:`int`
:param video_chat: New value of video_chat
:type video_chat: :class:`VideoChat`
"""
ID: str = Field("updateChatVideoChat", alias="@type")
chat_id: int
video_chat: VideoChat
@staticmethod
def read(q: dict) -> UpdateChatVideoChat:
return UpdateChatVideoChat.construct(**q)
class UpdateConnectionState(Update):
"""
The connection state has changed. This update must be used only to show a human-readable description of the connection state
:param state: The new connection state
:type state: :class:`ConnectionState`
"""
ID: str = Field("updateConnectionState", alias="@type")
state: ConnectionState
@staticmethod
def read(q: dict) -> UpdateConnectionState:
return UpdateConnectionState.construct(**q)
class UpdateDeleteMessages(Update):
"""
Some messages were deleted
:param chat_id: Chat identifier
:type chat_id: :class:`int`
:param message_ids: Identifiers of the deleted messages
:type message_ids: :class:`list[int]`
:param is_permanent: True, if the messages are permanently deleted by a user (as opposed to just becoming inaccessible)
:type is_permanent: :class:`bool`
:param from_cache: True, if the messages are deleted only from the cache and can possibly be retrieved again in the future
:type from_cache: :class:`bool`
"""
ID: str = Field("updateDeleteMessages", alias="@type")
chat_id: int
message_ids: list[int]
is_permanent: bool
from_cache: bool
@staticmethod
def read(q: dict) -> UpdateDeleteMessages:
return UpdateDeleteMessages.construct(**q)
class UpdateDiceEmojis(Update):
"""
The list of supported dice emojis has changed
:param emojis: The new list of supported dice emojis
:type emojis: :class:`list[str]`
"""
ID: str = Field("updateDiceEmojis", alias="@type")
emojis: list[str]
@staticmethod
def read(q: dict) -> UpdateDiceEmojis:
return UpdateDiceEmojis.construct(**q)
class UpdateFavoriteStickers(Update):
"""
The list of favorite stickers was updated
:param sticker_ids: The new list of file identifiers of favorite stickers
:type sticker_ids: :class:`list[int]`
"""
ID: str = Field("updateFavoriteStickers", alias="@type")
sticker_ids: list[int]
@staticmethod
def read(q: dict) -> UpdateFavoriteStickers:
return UpdateFavoriteStickers.construct(**q)
class UpdateFile(Update):
"""
Information about a file was updated
:param file: New data about the file
:type file: :class:`File`
"""
ID: str = Field("updateFile", alias="@type")
file: File
@staticmethod
def read(q: dict) -> UpdateFile:
return UpdateFile.construct(**q)
class UpdateFileGenerationStart(Update):
"""
The file generation process needs to be started by the application
:param generation_id: Unique identifier for the generation process
:type generation_id: :class:`int`
:param original_path: The path to a file from which a new file is generated; may be empty
:type original_path: :class:`str`
:param destination_path: The path to a file that must be created and where the new file is generated
:type destination_path: :class:`str`
:param conversion: String specifying the conversion applied to the original file. If conversion is "#url#" than original_path contains an HTTP/HTTPS URL of a file, which must be downloaded by the application
:type conversion: :class:`str`
"""
ID: str = Field("updateFileGenerationStart", alias="@type")
generation_id: int
original_path: str
destination_path: str
conversion: str
@staticmethod
def read(q: dict) -> UpdateFileGenerationStart:
return UpdateFileGenerationStart.construct(**q)
class UpdateFileGenerationStop(Update):
"""
File generation is no longer needed
:param generation_id: Unique identifier for the generation process
:type generation_id: :class:`int`
"""
ID: str = Field("updateFileGenerationStop", alias="@type")
generation_id: int
@staticmethod
def read(q: dict) -> UpdateFileGenerationStop:
return UpdateFileGenerationStop.construct(**q)
class UpdateGroupCall(Update):
"""
Information about a group call was updated
:param group_call: New data about a group call
:type group_call: :class:`GroupCall`
"""
ID: str = Field("updateGroupCall", alias="@type")
group_call: GroupCall
@staticmethod
def read(q: dict) -> UpdateGroupCall:
return UpdateGroupCall.construct(**q)
class UpdateGroupCallParticipant(Update):
"""
Information about a group call participant was changed. The updates are sent only after the group call is received through getGroupCall and only if the call is joined or being joined
:param group_call_id: Identifier of group call
:type group_call_id: :class:`int`
:param participant: New data about a participant
:type participant: :class:`GroupCallParticipant`
"""
ID: str = Field("updateGroupCallParticipant", alias="@type")
group_call_id: int
participant: GroupCallParticipant
@staticmethod
def read(q: dict) -> UpdateGroupCallParticipant:
return UpdateGroupCallParticipant.construct(**q)
class UpdateHavePendingNotifications(Update):
"""
Describes whether there are some pending notification updates. Can be used to prevent application from killing, while there are some pending notifications
:param have_delayed_notifications: True, if there are some delayed notification updates, which will be sent soon
:type have_delayed_notifications: :class:`bool`
:param have_unreceived_notifications: True, if there can be some yet unreceived notifications, which are being fetched from the server
:type have_unreceived_notifications: :class:`bool`
"""
ID: str = Field("updateHavePendingNotifications", alias="@type")
have_delayed_notifications: bool
have_unreceived_notifications: bool
@staticmethod
def read(q: dict) -> UpdateHavePendingNotifications:
return UpdateHavePendingNotifications.construct(**q)
class UpdateInstalledStickerSets(Update):
"""
The list of installed sticker sets was updated
:param is_masks: True, if the list of installed mask sticker sets was updated
:type is_masks: :class:`bool`
:param sticker_set_ids: The new list of installed ordinary sticker sets
:type sticker_set_ids: :class:`list[int]`
"""
ID: str = Field("updateInstalledStickerSets", alias="@type")
is_masks: bool
sticker_set_ids: list[int]
@staticmethod
def read(q: dict) -> UpdateInstalledStickerSets:
return UpdateInstalledStickerSets.construct(**q)
class UpdateLanguagePackStrings(Update):
"""
Some language pack strings have been updated
:param localization_target: Localization target to which the language pack belongs
:type localization_target: :class:`str`
:param language_pack_id: Identifier of the updated language pack
:type language_pack_id: :class:`str`
:param strings: List of changed language pack strings
:type strings: :class:`list[LanguagePackString]`
"""
ID: str = Field("updateLanguagePackStrings", alias="@type")
localization_target: str
language_pack_id: str
strings: list[LanguagePackString]
@staticmethod
def read(q: dict) -> UpdateLanguagePackStrings:
return UpdateLanguagePackStrings.construct(**q)
class UpdateMessageContent(Update):
"""
The message content has changed
:param chat_id: Chat identifier
:type chat_id: :class:`int`
:param message_id: Message identifier
:type message_id: :class:`int`
:param new_content: New message content
:type new_content: :class:`MessageContent`
"""
ID: str = Field("updateMessageContent", alias="@type")
chat_id: int
message_id: int
new_content: MessageContent
@staticmethod
def read(q: dict) -> UpdateMessageContent:
return UpdateMessageContent.construct(**q)
class UpdateMessageContentOpened(Update):
"""
The message content was opened. Updates voice note messages to "listened", video note messages to "viewed" and starts the TTL timer for self-destructing messages
:param chat_id: Chat identifier
:type chat_id: :class:`int`
:param message_id: Message identifier
:type message_id: :class:`int`
"""
ID: str = Field("updateMessageContentOpened", alias="@type")
chat_id: int
message_id: int
@staticmethod
def read(q: dict) -> UpdateMessageContentOpened:
return UpdateMessageContentOpened.construct(**q)
class UpdateMessageEdited(Update):
"""
A message was edited. Changes in the message content will come in a separate updateMessageContent
:param chat_id: Chat identifier
:type chat_id: :class:`int`
:param message_id: Message identifier
:type message_id: :class:`int`
:param edit_date: Point in time (Unix timestamp) when the message was edited
:type edit_date: :class:`int`
:param reply_markup: New message reply markup; may be null, defaults to None
:type reply_markup: :class:`ReplyMarkup`, optional
"""
ID: str = Field("updateMessageEdited", alias="@type")
chat_id: int
message_id: int
edit_date: int
reply_markup: typing.Optional[ReplyMarkup] = None
@staticmethod
def read(q: dict) -> UpdateMessageEdited:
return UpdateMessageEdited.construct(**q)
class UpdateMessageInteractionInfo(Update):
"""
The information about interactions with a message has changed
:param chat_id: Chat identifier
:type chat_id: :class:`int`
:param message_id: Message identifier
:type message_id: :class:`int`
:param interaction_info: New information about interactions with the message; may be null, defaults to None
:type interaction_info: :class:`MessageInteractionInfo`, optional
"""
ID: str = Field("updateMessageInteractionInfo", alias="@type")
chat_id: int
message_id: int
interaction_info: typing.Optional[MessageInteractionInfo] = None
@staticmethod
def read(q: dict) -> UpdateMessageInteractionInfo:
return UpdateMessageInteractionInfo.construct(**q)
class UpdateMessageIsPinned(Update):
"""
The message pinned state was changed
:param chat_id: Chat identifier
:type chat_id: :class:`int`
:param message_id: The message identifier
:type message_id: :class:`int`
:param is_pinned: True, if the message is pinned
:type is_pinned: :class:`bool`
"""
ID: str = Field("updateMessageIsPinned", alias="@type")
chat_id: int
message_id: int
is_pinned: bool
@staticmethod
def read(q: dict) -> UpdateMessageIsPinned:
return UpdateMessageIsPinned.construct(**q)
class UpdateMessageLiveLocationViewed(Update):
"""
A message with a live location was viewed. When the update is received, the application is supposed to update the live location
:param chat_id: Identifier of the chat with the live location message
:type chat_id: :class:`int`
:param message_id: Identifier of the message with live location
:type message_id: :class:`int`
"""
ID: str = Field("updateMessageLiveLocationViewed", alias="@type")
chat_id: int
message_id: int
@staticmethod
def read(q: dict) -> UpdateMessageLiveLocationViewed:
return UpdateMessageLiveLocationViewed.construct(**q)
class UpdateMessageMentionRead(Update):
"""
A message with an unread mention was read
:param chat_id: Chat identifier
:type chat_id: :class:`int`
:param message_id: Message identifier
:type message_id: :class:`int`
:param unread_mention_count: The new number of unread mention messages left in the chat
:type unread_mention_count: :class:`int`
"""
ID: str = Field("updateMessageMentionRead", alias="@type")
chat_id: int
message_id: int
unread_mention_count: int
@staticmethod
def read(q: dict) -> UpdateMessageMentionRead:
return UpdateMessageMentionRead.construct(**q)
class UpdateMessageSendAcknowledged(Update):
"""
A request to send a message has reached the Telegram server. This doesn't mean that the message will be sent successfully or even that the send message request will be processed. This update will be sent only if the option "use_quick_ack" is set to true. This update may be sent multiple times for the same message
:param chat_id: The chat identifier of the sent message
:type chat_id: :class:`int`
:param message_id: A temporary message identifier
:type message_id: :class:`int`
"""
ID: str = Field("updateMessageSendAcknowledged", alias="@type")
chat_id: int
message_id: int
@staticmethod
def read(q: dict) -> UpdateMessageSendAcknowledged:
return UpdateMessageSendAcknowledged.construct(**q)
class UpdateMessageSendFailed(Update):
"""
A message failed to send. Be aware that some messages being sent can be irrecoverably deleted, in which case updateDeleteMessages will be received instead of this update
:param message: The failed to send message
:type message: :class:`Message`
:param old_message_id: The previous temporary message identifier
:type old_message_id: :class:`int`
:param error_code: An error code
:type error_code: :class:`int`
:param error_message: Error message
:type error_message: :class:`str`
"""
ID: str = Field("updateMessageSendFailed", alias="@type")
message: Message
old_message_id: int
error_code: int
error_message: str
@staticmethod
def read(q: dict) -> UpdateMessageSendFailed:
return UpdateMessageSendFailed.construct(**q)
class UpdateMessageSendSucceeded(Update):
"""
A message has been successfully sent
:param message: The sent message. Usually only the message identifier, date, and content are changed, but almost all other fields can also change
:type message: :class:`Message`
:param old_message_id: The previous temporary message identifier
:type old_message_id: :class:`int`
"""
ID: str = Field("updateMessageSendSucceeded", alias="@type")
message: Message
old_message_id: int
@staticmethod
def read(q: dict) -> UpdateMessageSendSucceeded:
return UpdateMessageSendSucceeded.construct(**q)
class UpdateNewCallSignalingData(Update):
"""
New call signaling data arrived
:param call_id: The call identifier
:type call_id: :class:`int`
:param data: The data
:type data: :class:`str`
"""
ID: str = Field("updateNewCallSignalingData", alias="@type")
call_id: int
data: str
@staticmethod
def read(q: dict) -> UpdateNewCallSignalingData:
return UpdateNewCallSignalingData.construct(**q)
class UpdateNewCallbackQuery(Update):
"""
A new incoming callback query; for bots only
:param id: Unique query identifier
:type id: :class:`int`
:param sender_user_id: Identifier of the user who sent the query
:type sender_user_id: :class:`int`
:param chat_id: Identifier of the chat where the query was sent
:type chat_id: :class:`int`
:param message_id: Identifier of the message, from which the query originated
:type message_id: :class:`int`
:param chat_instance: Identifier that uniquely corresponds to the chat to which the message was sent
:type chat_instance: :class:`int`
:param payload: Query payload
:type payload: :class:`CallbackQueryPayload`
"""
ID: str = Field("updateNewCallbackQuery", alias="@type")
id: int
sender_user_id: int
chat_id: int
message_id: int
chat_instance: int
payload: CallbackQueryPayload
@staticmethod
def read(q: dict) -> UpdateNewCallbackQuery:
return UpdateNewCallbackQuery.construct(**q)
class UpdateNewChat(Update):
"""
A new chat has been loaded/created. This update is guaranteed to come before the chat identifier is returned to the application. The chat field changes will be reported through separate updates
:param chat: The chat
:type chat: :class:`Chat`
"""
ID: str = Field("updateNewChat", alias="@type")
chat: Chat
@staticmethod
def read(q: dict) -> UpdateNewChat:
return UpdateNewChat.construct(**q)
class UpdateNewChatJoinRequest(Update):
"""
A user sent a join request to a chat; for bots only
:param chat_id: Chat identifier
:type chat_id: :class:`int`
:param request: Join request
:type request: :class:`ChatJoinRequest`
:param invite_link: The invite link, which was used to send join request; may be null, defaults to None
:type invite_link: :class:`ChatInviteLink`, optional
"""
ID: str = Field("updateNewChatJoinRequest", alias="@type")
chat_id: int
request: ChatJoinRequest
invite_link: typing.Optional[ChatInviteLink] = None
@staticmethod
def read(q: dict) -> UpdateNewChatJoinRequest:
return UpdateNewChatJoinRequest.construct(**q)
class UpdateNewChosenInlineResult(Update):
"""
The user has chosen a result of an inline query; for bots only
:param sender_user_id: Identifier of the user who sent the query
:type sender_user_id: :class:`int`
:param user_location: User location; may be null, defaults to None
:type user_location: :class:`Location`, optional
:param query: Text of the query
:type query: :class:`str`
:param result_id: Identifier of the chosen result
:type result_id: :class:`str`
:param inline_message_id: Identifier of the sent inline message, if known
:type inline_message_id: :class:`str`
"""
ID: str = Field("updateNewChosenInlineResult", alias="@type")
sender_user_id: int
user_location: typing.Optional[Location] = None
query: str
result_id: str
inline_message_id: str
@staticmethod
def read(q: dict) -> UpdateNewChosenInlineResult:
return UpdateNewChosenInlineResult.construct(**q)
class UpdateNewCustomEvent(Update):
"""
A new incoming event; for bots only
:param event: A JSON-serialized event
:type event: :class:`str`
"""
ID: str = Field("updateNewCustomEvent", alias="@type")
event: str
@staticmethod
def read(q: dict) -> UpdateNewCustomEvent:
return UpdateNewCustomEvent.construct(**q)
class UpdateNewCustomQuery(Update):
"""
A new incoming query; for bots only
:param id: The query identifier
:type id: :class:`int`
:param data: JSON-serialized query data
:type data: :class:`str`
:param timeout: Query timeout
:type timeout: :class:`int`
"""
ID: str = Field("updateNewCustomQuery", alias="@type")
id: int
data: str
timeout: int
@staticmethod
def read(q: dict) -> UpdateNewCustomQuery:
return UpdateNewCustomQuery.construct(**q)
class UpdateNewInlineCallbackQuery(Update):
"""
A new incoming callback query from a message sent via a bot; for bots only
:param id: Unique query identifier
:type id: :class:`int`
:param sender_user_id: Identifier of the user who sent the query
:type sender_user_id: :class:`int`
:param inline_message_id: Identifier of the inline message, from which the query originated
:type inline_message_id: :class:`str`
:param chat_instance: An identifier uniquely corresponding to the chat a message was sent to
:type chat_instance: :class:`int`
:param payload: Query payload
:type payload: :class:`CallbackQueryPayload`
"""
ID: str = Field("updateNewInlineCallbackQuery", alias="@type")
id: int
sender_user_id: int
inline_message_id: str
chat_instance: int
payload: CallbackQueryPayload
@staticmethod
def read(q: dict) -> UpdateNewInlineCallbackQuery:
return UpdateNewInlineCallbackQuery.construct(**q)
class UpdateNewInlineQuery(Update):
"""
A new incoming inline query; for bots only
:param id: Unique query identifier
:type id: :class:`int`
:param sender_user_id: Identifier of the user who sent the query
:type sender_user_id: :class:`int`
:param user_location: User location; may be null, defaults to None
:type user_location: :class:`Location`, optional
:param chat_type: The type of the chat, from which the query originated; may be null if unknown, defaults to None
:type chat_type: :class:`ChatType`, optional
:param query: Text of the query
:type query: :class:`str`
:param offset: Offset of the first entry to return
:type offset: :class:`str`
"""
ID: str = Field("updateNewInlineQuery", alias="@type")
id: int
sender_user_id: int
user_location: typing.Optional[Location] = None
chat_type: typing.Optional[ChatType] = None
query: str
offset: str
@staticmethod
def read(q: dict) -> UpdateNewInlineQuery:
return UpdateNewInlineQuery.construct(**q)
class UpdateNewMessage(Update):
"""
A new message was received; can also be an outgoing message
:param message: The new message
:type message: :class:`Message`
"""
ID: str = Field("updateNewMessage", alias="@type")
message: Message
@staticmethod
def read(q: dict) -> UpdateNewMessage:
return UpdateNewMessage.construct(**q)
class UpdateNewPreCheckoutQuery(Update):
"""
A new incoming pre-checkout query; for bots only. Contains full information about a checkout
:param id: Unique query identifier
:type id: :class:`int`
:param sender_user_id: Identifier of the user who sent the query
:type sender_user_id: :class:`int`
:param currency: Currency for the product price
:type currency: :class:`str`
:param total_amount: Total price for the product, in the smallest units of the currency
:type total_amount: :class:`int`
:param invoice_payload: Invoice payload
:type invoice_payload: :class:`str`
:param shipping_option_id: Identifier of a shipping option chosen by the user; may be empty if not applicable
:type shipping_option_id: :class:`str`
:param order_info: Information about the order; may be null, defaults to None
:type order_info: :class:`OrderInfo`, optional
"""
ID: str = Field("updateNewPreCheckoutQuery", alias="@type")
id: int
sender_user_id: int
currency: str
total_amount: int
invoice_payload: str
shipping_option_id: str
order_info: typing.Optional[OrderInfo] = None
@staticmethod
def read(q: dict) -> UpdateNewPreCheckoutQuery:
return UpdateNewPreCheckoutQuery.construct(**q)
class UpdateNewShippingQuery(Update):
"""
A new incoming shipping query; for bots only. Only for invoices with flexible price
:param id: Unique query identifier
:type id: :class:`int`
:param sender_user_id: Identifier of the user who sent the query
:type sender_user_id: :class:`int`
:param invoice_payload: Invoice payload
:type invoice_payload: :class:`str`
:param shipping_address: User shipping address
:type shipping_address: :class:`Address`
"""
ID: str = Field("updateNewShippingQuery", alias="@type")
id: int
sender_user_id: int
invoice_payload: str
shipping_address: Address
@staticmethod
def read(q: dict) -> UpdateNewShippingQuery:
return UpdateNewShippingQuery.construct(**q)
class UpdateNotification(Update):
"""
A notification was changed
:param notification_group_id: Unique notification group identifier
:type notification_group_id: :class:`int`
:param notification: Changed notification
:type notification: :class:`Notification`
"""
ID: str = Field("updateNotification", alias="@type")
notification_group_id: int
notification: Notification
@staticmethod
def read(q: dict) -> UpdateNotification:
return UpdateNotification.construct(**q)
class UpdateNotificationGroup(Update):
"""
A list of active notifications in a notification group has changed
:param notification_group_id: Unique notification group identifier
:type notification_group_id: :class:`int`
:param type_: New type of the notification group
:type type_: :class:`NotificationGroupType`
:param chat_id: Identifier of a chat to which all notifications in the group belong
:type chat_id: :class:`int`
:param notification_settings_chat_id: Chat identifier, which notification settings must be applied to the added notifications
:type notification_settings_chat_id: :class:`int`
:param is_silent: True, if the notifications must be shown without sound
:type is_silent: :class:`bool`
:param total_count: Total number of unread notifications in the group, can be bigger than number of active notifications
:type total_count: :class:`int`
:param added_notifications: List of added group notifications, sorted by notification ID
:type added_notifications: :class:`list[Notification]`
:param removed_notification_ids: Identifiers of removed group notifications, sorted by notification ID
:type removed_notification_ids: :class:`list[int]`
"""
ID: str = Field("updateNotificationGroup", alias="@type")
notification_group_id: int
type_: NotificationGroupType = Field(..., alias='type')
chat_id: int
notification_settings_chat_id: int
is_silent: bool
total_count: int
added_notifications: list[Notification]
removed_notification_ids: list[int]
@staticmethod
def read(q: dict) -> UpdateNotificationGroup:
return UpdateNotificationGroup.construct(**q)
class UpdateOption(Update):
"""
An option changed its value
:param name: The option name
:type name: :class:`str`
:param value: The new option value
:type value: :class:`OptionValue`
"""
ID: str = Field("updateOption", alias="@type")
name: str
value: OptionValue
@staticmethod
def read(q: dict) -> UpdateOption:
return UpdateOption.construct(**q)
class UpdatePoll(Update):
"""
A poll was updated; for bots only
:param poll: New data about the poll
:type poll: :class:`Poll`
"""
ID: str = Field("updatePoll", alias="@type")
poll: Poll
@staticmethod
def read(q: dict) -> UpdatePoll:
return UpdatePoll.construct(**q)
class UpdatePollAnswer(Update):
"""
A user changed the answer to a poll; for bots only
:param poll_id: Unique poll identifier
:type poll_id: :class:`int`
:param user_id: The user, who changed the answer to the poll
:type user_id: :class:`int`
:param option_ids: 0-based identifiers of answer options, chosen by the user
:type option_ids: :class:`list[int]`
"""
ID: str = Field("updatePollAnswer", alias="@type")
poll_id: int
user_id: int
option_ids: list[int]
@staticmethod
def read(q: dict) -> UpdatePollAnswer:
return UpdatePollAnswer.construct(**q)
class UpdateRecentStickers(Update):
"""
The list of recently used stickers was updated
:param is_attached: True, if the list of stickers attached to photo or video files was updated, otherwise the list of sent stickers is updated
:type is_attached: :class:`bool`
:param sticker_ids: The new list of file identifiers of recently used stickers
:type sticker_ids: :class:`list[int]`
"""
ID: str = Field("updateRecentStickers", alias="@type")
is_attached: bool
sticker_ids: list[int]
@staticmethod
def read(q: dict) -> UpdateRecentStickers:
return UpdateRecentStickers.construct(**q)
class UpdateSavedAnimations(Update):
"""
The list of saved animations was updated
:param animation_ids: The new list of file identifiers of saved animations
:type animation_ids: :class:`list[int]`
"""
ID: str = Field("updateSavedAnimations", alias="@type")
animation_ids: list[int]
@staticmethod
def read(q: dict) -> UpdateSavedAnimations:
return UpdateSavedAnimations.construct(**q)
class UpdateScopeNotificationSettings(Update):
"""
Notification settings for some type of chats were updated
:param scope: Types of chats for which notification settings were updated
:type scope: :class:`NotificationSettingsScope`
:param notification_settings: The new notification settings
:type notification_settings: :class:`ScopeNotificationSettings`
"""
ID: str = Field("updateScopeNotificationSettings", alias="@type")
scope: NotificationSettingsScope
notification_settings: ScopeNotificationSettings
@staticmethod
def read(q: dict) -> UpdateScopeNotificationSettings:
return UpdateScopeNotificationSettings.construct(**q)
class UpdateSecretChat(Update):
"""
Some data of a secret chat has changed. This update is guaranteed to come before the secret chat identifier is returned to the application
:param secret_chat: New data about the secret chat
:type secret_chat: :class:`SecretChat`
"""
ID: str = Field("updateSecretChat", alias="@type")
secret_chat: SecretChat
@staticmethod
def read(q: dict) -> UpdateSecretChat:
return UpdateSecretChat.construct(**q)
class UpdateSelectedBackground(Update):
"""
The selected background has changed
:param for_dark_theme: True, if background for dark theme has changed
:type for_dark_theme: :class:`bool`
:param background: The new selected background; may be null, defaults to None
:type background: :class:`Background`, optional
"""
ID: str = Field("updateSelectedBackground", alias="@type")
for_dark_theme: bool
background: typing.Optional[Background] = None
@staticmethod
def read(q: dict) -> UpdateSelectedBackground:
return UpdateSelectedBackground.construct(**q)
class UpdateServiceNotification(Update):
"""
A service notification from the server was received. Upon receiving this the application must show a popup with the content of the notification
:param type_: Notification type. If type begins with "AUTH_KEY_DROP_", then two buttons "Cancel" and "Log out" must be shown under notification; if user presses the second, all local data must be destroyed using Destroy method
:type type_: :class:`str`
:param content: Notification content
:type content: :class:`MessageContent`
"""
ID: str = Field("updateServiceNotification", alias="@type")
type_: str = Field(..., alias='type')
content: MessageContent
@staticmethod
def read(q: dict) -> UpdateServiceNotification:
return UpdateServiceNotification.construct(**q)
class UpdateStickerSet(Update):
"""
A sticker set has changed
:param sticker_set: The sticker set
:type sticker_set: :class:`StickerSet`
"""
ID: str = Field("updateStickerSet", alias="@type")
sticker_set: StickerSet
@staticmethod
def read(q: dict) -> UpdateStickerSet:
return UpdateStickerSet.construct(**q)
class UpdateSuggestedActions(Update):
"""
The list of suggested to the user actions has changed
:param added_actions: Added suggested actions
:type added_actions: :class:`list[SuggestedAction]`
:param removed_actions: Removed suggested actions
:type removed_actions: :class:`list[SuggestedAction]`
"""
ID: str = Field("updateSuggestedActions", alias="@type")
added_actions: list[SuggestedAction]
removed_actions: list[SuggestedAction]
@staticmethod
def read(q: dict) -> UpdateSuggestedActions:
return UpdateSuggestedActions.construct(**q)
class UpdateSupergroup(Update):
"""
Some data of a supergroup or a channel has changed. This update is guaranteed to come before the supergroup identifier is returned to the application
:param supergroup: New data about the supergroup
:type supergroup: :class:`Supergroup`
"""
ID: str = Field("updateSupergroup", alias="@type")
supergroup: Supergroup
@staticmethod
def read(q: dict) -> UpdateSupergroup:
return UpdateSupergroup.construct(**q)
class UpdateSupergroupFullInfo(Update):
"""
Some data in supergroupFullInfo has been changed
:param supergroup_id: Identifier of the supergroup or channel
:type supergroup_id: :class:`int`
:param supergroup_full_info: New full information about the supergroup
:type supergroup_full_info: :class:`SupergroupFullInfo`
"""
ID: str = Field("updateSupergroupFullInfo", alias="@type")
supergroup_id: int
supergroup_full_info: SupergroupFullInfo
@staticmethod
def read(q: dict) -> UpdateSupergroupFullInfo:
return UpdateSupergroupFullInfo.construct(**q)
class UpdateTermsOfService(Update):
"""
New terms of service must be accepted by the user. If the terms of service are declined, then the deleteAccount method must be called with the reason "Decline ToS update"
:param terms_of_service_id: Identifier of the terms of service
:type terms_of_service_id: :class:`str`
:param terms_of_service: The new terms of service
:type terms_of_service: :class:`TermsOfService`
"""
ID: str = Field("updateTermsOfService", alias="@type")
terms_of_service_id: str
terms_of_service: TermsOfService
@staticmethod
def read(q: dict) -> UpdateTermsOfService:
return UpdateTermsOfService.construct(**q)
class UpdateTrendingStickerSets(Update):
"""
The list of trending sticker sets was updated or some of them were viewed
:param sticker_sets: The prefix of the list of trending sticker sets with the newest trending sticker sets
:type sticker_sets: :class:`StickerSets`
"""
ID: str = Field("updateTrendingStickerSets", alias="@type")
sticker_sets: StickerSets
@staticmethod
def read(q: dict) -> UpdateTrendingStickerSets:
return UpdateTrendingStickerSets.construct(**q)
class UpdateUnreadChatCount(Update):
"""
Number of unread chats, i.e. with unread messages or marked as unread, has changed. This update is sent only if the message database is used
:param chat_list: The chat list with changed number of unread messages
:type chat_list: :class:`ChatList`
:param total_count: Approximate total number of chats in the chat list
:type total_count: :class:`int`
:param unread_count: Total number of unread chats
:type unread_count: :class:`int`
:param unread_unmuted_count: Total number of unread unmuted chats
:type unread_unmuted_count: :class:`int`
:param marked_as_unread_count: Total number of chats marked as unread
:type marked_as_unread_count: :class:`int`
:param marked_as_unread_unmuted_count: Total number of unmuted chats marked as unread
:type marked_as_unread_unmuted_count: :class:`int`
"""
ID: str = Field("updateUnreadChatCount", alias="@type")
chat_list: ChatList
total_count: int
unread_count: int
unread_unmuted_count: int
marked_as_unread_count: int
marked_as_unread_unmuted_count: int
@staticmethod
def read(q: dict) -> UpdateUnreadChatCount:
return UpdateUnreadChatCount.construct(**q)
class UpdateUnreadMessageCount(Update):
"""
Number of unread messages in a chat list has changed. This update is sent only if the message database is used
:param chat_list: The chat list with changed number of unread messages
:type chat_list: :class:`ChatList`
:param unread_count: Total number of unread messages
:type unread_count: :class:`int`
:param unread_unmuted_count: Total number of unread messages in unmuted chats
:type unread_unmuted_count: :class:`int`
"""
ID: str = Field("updateUnreadMessageCount", alias="@type")
chat_list: ChatList
unread_count: int
unread_unmuted_count: int
@staticmethod
def read(q: dict) -> UpdateUnreadMessageCount:
return UpdateUnreadMessageCount.construct(**q)
class UpdateUser(Update):
"""
Some data of a user has changed. This update is guaranteed to come before the user identifier is returned to the application
:param user: New data about the user
:type user: :class:`User`
"""
ID: str = Field("updateUser", alias="@type")
user: User
@staticmethod
def read(q: dict) -> UpdateUser:
return UpdateUser.construct(**q)
class UpdateUserFullInfo(Update):
"""
Some data in userFullInfo has been changed
:param user_id: User identifier
:type user_id: :class:`int`
:param user_full_info: New full information about the user
:type user_full_info: :class:`UserFullInfo`
"""
ID: str = Field("updateUserFullInfo", alias="@type")
user_id: int
user_full_info: UserFullInfo
@staticmethod
def read(q: dict) -> UpdateUserFullInfo:
return UpdateUserFullInfo.construct(**q)
class UpdateUserPrivacySettingRules(Update):
"""
Some privacy setting rules have been changed
:param setting: The privacy setting
:type setting: :class:`UserPrivacySetting`
:param rules: New privacy rules
:type rules: :class:`UserPrivacySettingRules`
"""
ID: str = Field("updateUserPrivacySettingRules", alias="@type")
setting: UserPrivacySetting
rules: UserPrivacySettingRules
@staticmethod
def read(q: dict) -> UpdateUserPrivacySettingRules:
return UpdateUserPrivacySettingRules.construct(**q)
class UpdateUserStatus(Update):
"""
The user went online or offline
:param user_id: User identifier
:type user_id: :class:`int`
:param status: New status of the user
:type status: :class:`UserStatus`
"""
ID: str = Field("updateUserStatus", alias="@type")
user_id: int
status: UserStatus
@staticmethod
def read(q: dict) -> UpdateUserStatus:
return UpdateUserStatus.construct(**q)
class UpdateUsersNearby(Update):
"""
The list of users nearby has changed. The update is guaranteed to be sent only 60 seconds after a successful searchChatsNearby request
:param users_nearby: The new list of users nearby
:type users_nearby: :class:`list[ChatNearby]`
"""
ID: str = Field("updateUsersNearby", alias="@type")
users_nearby: list[ChatNearby]
@staticmethod
def read(q: dict) -> UpdateUsersNearby:
return UpdateUsersNearby.construct(**q)
| [
"pydantic.Field"
] | [((3037, 3067), 'pydantic.Field', 'Field', (['"""update"""'], {'alias': '"""@type"""'}), "('update', alias='@type')\n", (3042, 3067), False, 'from pydantic import Field\n'), ((3491, 3540), 'pydantic.Field', 'Field', (['"""updateActiveNotifications"""'], {'alias': '"""@type"""'}), "('updateActiveNotifications', alias='@type')\n", (3496, 3540), False, 'from pydantic import Field\n'), ((4268, 4325), 'pydantic.Field', 'Field', (['"""updateAnimatedEmojiMessageClicked"""'], {'alias': '"""@type"""'}), "('updateAnimatedEmojiMessageClicked', alias='@type')\n", (4273, 4325), False, 'from pydantic import Field\n'), ((4923, 4978), 'pydantic.Field', 'Field', (['"""updateAnimationSearchParameters"""'], {'alias': '"""@type"""'}), "('updateAnimationSearchParameters', alias='@type')\n", (4928, 4978), False, 'from pydantic import Field\n'), ((5401, 5449), 'pydantic.Field', 'Field', (['"""updateAuthorizationState"""'], {'alias': '"""@type"""'}), "('updateAuthorizationState', alias='@type')\n", (5406, 5449), False, 'from pydantic import Field\n'), ((5929, 5969), 'pydantic.Field', 'Field', (['"""updateBasicGroup"""'], {'alias': '"""@type"""'}), "('updateBasicGroup', alias='@type')\n", (5934, 5969), False, 'from pydantic import Field\n'), ((6474, 6522), 'pydantic.Field', 'Field', (['"""updateBasicGroupFullInfo"""'], {'alias': '"""@type"""'}), "('updateBasicGroupFullInfo', alias='@type')\n", (6479, 6522), False, 'from pydantic import Field\n'), ((6921, 6955), 'pydantic.Field', 'Field', (['"""updateCall"""'], {'alias': '"""@type"""'}), "('updateCall', alias='@type')\n", (6926, 6955), False, 'from pydantic import Field\n'), ((7611, 7651), 'pydantic.Field', 'Field', (['"""updateChatAction"""'], {'alias': '"""@type"""'}), "('updateChatAction', alias='@type')\n", (7616, 7651), False, 'from pydantic import Field\n'), ((8185, 8228), 'pydantic.Field', 'Field', (['"""updateChatActionBar"""'], {'alias': '"""@type"""'}), "('updateChatActionBar', alias='@type')\n", (8190, 8228), False, 'from pydantic import Field\n'), ((8835, 8895), 'pydantic.Field', 'Field', (['"""updateChatDefaultDisableNotification"""'], {'alias': '"""@type"""'}), "('updateChatDefaultDisableNotification', alias='@type')\n", (8840, 8895), False, 'from pydantic import Field\n'), ((9717, 9763), 'pydantic.Field', 'Field', (['"""updateChatDraftMessage"""'], {'alias': '"""@type"""'}), "('updateChatDraftMessage', alias='@type')\n", (9722, 9763), False, 'from pydantic import Field\n'), ((10234, 10275), 'pydantic.Field', 'Field', (['"""updateChatFilters"""'], {'alias': '"""@type"""'}), "('updateChatFilters', alias='@type')\n", (10239, 10275), False, 'from pydantic import Field\n'), ((10759, 10812), 'pydantic.Field', 'Field', (['"""updateChatHasProtectedContent"""'], {'alias': '"""@type"""'}), "('updateChatHasProtectedContent', alias='@type')\n", (10764, 10812), False, 'from pydantic import Field\n'), ((11332, 11386), 'pydantic.Field', 'Field', (['"""updateChatHasScheduledMessages"""'], {'alias': '"""@type"""'}), "('updateChatHasScheduledMessages', alias='@type')\n", (11337, 11386), False, 'from pydantic import Field\n'), ((11844, 11887), 'pydantic.Field', 'Field', (['"""updateChatIsBlocked"""'], {'alias': '"""@type"""'}), "('updateChatIsBlocked', alias='@type')\n", (11849, 11887), False, 'from pydantic import Field\n'), ((12353, 12403), 'pydantic.Field', 'Field', (['"""updateChatIsMarkedAsUnread"""'], {'alias': '"""@type"""'}), "('updateChatIsMarkedAsUnread', alias='@type')\n", (12358, 12403), False, 'from pydantic import Field\n'), ((13175, 13220), 'pydantic.Field', 'Field', (['"""updateChatLastMessage"""'], {'alias': '"""@type"""'}), "('updateChatLastMessage', alias='@type')\n", (13180, 13220), False, 'from pydantic import Field\n'), ((14241, 14281), 'pydantic.Field', 'Field', (['"""updateChatMember"""'], {'alias': '"""@type"""'}), "('updateChatMember', alias='@type')\n", (14246, 14281), False, 'from pydantic import Field\n'), ((14993, 15040), 'pydantic.Field', 'Field', (['"""updateChatMessageSender"""'], {'alias': '"""@type"""'}), "('updateChatMessageSender', alias='@type')\n", (14998, 15040), False, 'from pydantic import Field\n'), ((15539, 15583), 'pydantic.Field', 'Field', (['"""updateChatMessageTtl"""'], {'alias': '"""@type"""'}), "('updateChatMessageTtl', alias='@type')\n", (15544, 15583), False, 'from pydantic import Field\n'), ((16082, 16136), 'pydantic.Field', 'Field', (['"""updateChatNotificationSettings"""'], {'alias': '"""@type"""'}), "('updateChatNotificationSettings', alias='@type')\n", (16087, 16136), False, 'from pydantic import Field\n'), ((16844, 16895), 'pydantic.Field', 'Field', (['"""updateChatOnlineMemberCount"""'], {'alias': '"""@type"""'}), "('updateChatOnlineMemberCount', alias='@type')\n", (16849, 16895), False, 'from pydantic import Field\n'), ((17461, 17514), 'pydantic.Field', 'Field', (['"""updateChatPendingJoinRequests"""'], {'alias': '"""@type"""'}), "('updateChatPendingJoinRequests', alias='@type')\n", (17466, 17514), False, 'from pydantic import Field\n'), ((18022, 18067), 'pydantic.Field', 'Field', (['"""updateChatPermissions"""'], {'alias': '"""@type"""'}), "('updateChatPermissions', alias='@type')\n", (18027, 18067), False, 'from pydantic import Field\n'), ((18531, 18570), 'pydantic.Field', 'Field', (['"""updateChatPhoto"""'], {'alias': '"""@type"""'}), "('updateChatPhoto', alias='@type')\n", (18536, 18570), False, 'from pydantic import Field\n'), ((19183, 19225), 'pydantic.Field', 'Field', (['"""updateChatPosition"""'], {'alias': '"""@type"""'}), "('updateChatPosition', alias='@type')\n", (19188, 19225), False, 'from pydantic import Field\n'), ((19865, 19908), 'pydantic.Field', 'Field', (['"""updateChatReadInbox"""'], {'alias': '"""@type"""'}), "('updateChatReadInbox', alias='@type')\n", (19870, 19908), False, 'from pydantic import Field\n'), ((20416, 20460), 'pydantic.Field', 'Field', (['"""updateChatReadOutbox"""'], {'alias': '"""@type"""'}), "('updateChatReadOutbox', alias='@type')\n", (20421, 20460), False, 'from pydantic import Field\n'), ((21158, 21203), 'pydantic.Field', 'Field', (['"""updateChatReplyMarkup"""'], {'alias': '"""@type"""'}), "('updateChatReplyMarkup', alias='@type')\n", (21163, 21203), False, 'from pydantic import Field\n'), ((21684, 21723), 'pydantic.Field', 'Field', (['"""updateChatTheme"""'], {'alias': '"""@type"""'}), "('updateChatTheme', alias='@type')\n", (21689, 21723), False, 'from pydantic import Field\n'), ((22093, 22133), 'pydantic.Field', 'Field', (['"""updateChatThemes"""'], {'alias': '"""@type"""'}), "('updateChatThemes', alias='@type')\n", (22098, 22133), False, 'from pydantic import Field\n'), ((22526, 22565), 'pydantic.Field', 'Field', (['"""updateChatTitle"""'], {'alias': '"""@type"""'}), "('updateChatTitle', alias='@type')\n", (22531, 22565), False, 'from pydantic import Field\n'), ((23044, 23096), 'pydantic.Field', 'Field', (['"""updateChatUnreadMentionCount"""'], {'alias': '"""@type"""'}), "('updateChatUnreadMentionCount', alias='@type')\n", (23049, 23096), False, 'from pydantic import Field\n'), ((23556, 23599), 'pydantic.Field', 'Field', (['"""updateChatVideoChat"""'], {'alias': '"""@type"""'}), "('updateChatVideoChat', alias='@type')\n", (23561, 23599), False, 'from pydantic import Field\n'), ((24052, 24097), 'pydantic.Field', 'Field', (['"""updateConnectionState"""'], {'alias': '"""@type"""'}), "('updateConnectionState', alias='@type')\n", (24057, 24097), False, 'from pydantic import Field\n'), ((24864, 24908), 'pydantic.Field', 'Field', (['"""updateDeleteMessages"""'], {'alias': '"""@type"""'}), "('updateDeleteMessages', alias='@type')\n", (24869, 24908), False, 'from pydantic import Field\n'), ((25333, 25373), 'pydantic.Field', 'Field', (['"""updateDiceEmojis"""'], {'alias': '"""@type"""'}), "('updateDiceEmojis', alias='@type')\n", (25338, 25373), False, 'from pydantic import Field\n'), ((25752, 25798), 'pydantic.Field', 'Field', (['"""updateFavoriteStickers"""'], {'alias': '"""@type"""'}), "('updateFavoriteStickers', alias='@type')\n", (25757, 25798), False, 'from pydantic import Field\n'), ((26128, 26162), 'pydantic.Field', 'Field', (['"""updateFile"""'], {'alias': '"""@type"""'}), "('updateFile', alias='@type')\n", (26133, 26162), False, 'from pydantic import Field\n'), ((27079, 27128), 'pydantic.Field', 'Field', (['"""updateFileGenerationStart"""'], {'alias': '"""@type"""'}), "('updateFileGenerationStart', alias='@type')\n", (27084, 27128), False, 'from pydantic import Field\n'), ((27580, 27628), 'pydantic.Field', 'Field', (['"""updateFileGenerationStop"""'], {'alias': '"""@type"""'}), "('updateFileGenerationStop', alias='@type')\n", (27585, 27628), False, 'from pydantic import Field\n'), ((27990, 28029), 'pydantic.Field', 'Field', (['"""updateGroupCall"""'], {'alias': '"""@type"""'}), "('updateGroupCall', alias='@type')\n", (27995, 28029), False, 'from pydantic import Field\n'), ((28635, 28685), 'pydantic.Field', 'Field', (['"""updateGroupCallParticipant"""'], {'alias': '"""@type"""'}), "('updateGroupCallParticipant', alias='@type')\n", (28640, 28685), False, 'from pydantic import Field\n'), ((29492, 29546), 'pydantic.Field', 'Field', (['"""updateHavePendingNotifications"""'], {'alias': '"""@type"""'}), "('updateHavePendingNotifications', alias='@type')\n", (29497, 29546), False, 'from pydantic import Field\n'), ((30140, 30190), 'pydantic.Field', 'Field', (['"""updateInstalledStickerSets"""'], {'alias': '"""@type"""'}), "('updateInstalledStickerSets', alias='@type')\n", (30145, 30190), False, 'from pydantic import Field\n'), ((30865, 30914), 'pydantic.Field', 'Field', (['"""updateLanguagePackStrings"""'], {'alias': '"""@type"""'}), "('updateLanguagePackStrings', alias='@type')\n", (30870, 30914), False, 'from pydantic import Field\n'), ((31496, 31540), 'pydantic.Field', 'Field', (['"""updateMessageContent"""'], {'alias': '"""@type"""'}), "('updateMessageContent', alias='@type')\n", (31501, 31540), False, 'from pydantic import Field\n'), ((32128, 32178), 'pydantic.Field', 'Field', (['"""updateMessageContentOpened"""'], {'alias': '"""@type"""'}), "('updateMessageContentOpened', alias='@type')\n", (32133, 32178), False, 'from pydantic import Field\n'), ((32936, 32979), 'pydantic.Field', 'Field', (['"""updateMessageEdited"""'], {'alias': '"""@type"""'}), "('updateMessageEdited', alias='@type')\n", (32941, 32979), False, 'from pydantic import Field\n'), ((33695, 33747), 'pydantic.Field', 'Field', (['"""updateMessageInteractionInfo"""'], {'alias': '"""@type"""'}), "('updateMessageInteractionInfo', alias='@type')\n", (33700, 33747), False, 'from pydantic import Field\n'), ((34355, 34400), 'pydantic.Field', 'Field', (['"""updateMessageIsPinned"""'], {'alias': '"""@type"""'}), "('updateMessageIsPinned', alias='@type')\n", (34360, 34400), False, 'from pydantic import Field\n'), ((35013, 35068), 'pydantic.Field', 'Field', (['"""updateMessageLiveLocationViewed"""'], {'alias': '"""@type"""'}), "('updateMessageLiveLocationViewed', alias='@type')\n", (35018, 35068), False, 'from pydantic import Field\n'), ((35666, 35714), 'pydantic.Field', 'Field', (['"""updateMessageMentionRead"""'], {'alias': '"""@type"""'}), "('updateMessageMentionRead', alias='@type')\n", (35671, 35714), False, 'from pydantic import Field\n'), ((36500, 36553), 'pydantic.Field', 'Field', (['"""updateMessageSendAcknowledged"""'], {'alias': '"""@type"""'}), "('updateMessageSendAcknowledged', alias='@type')\n", (36505, 36553), False, 'from pydantic import Field\n'), ((37338, 37385), 'pydantic.Field', 'Field', (['"""updateMessageSendFailed"""'], {'alias': '"""@type"""'}), "('updateMessageSendFailed', alias='@type')\n", (37343, 37385), False, 'from pydantic import Field\n'), ((38022, 38072), 'pydantic.Field', 'Field', (['"""updateMessageSendSucceeded"""'], {'alias': '"""@type"""'}), "('updateMessageSendSucceeded', alias='@type')\n", (38027, 38072), False, 'from pydantic import Field\n'), ((38500, 38550), 'pydantic.Field', 'Field', (['"""updateNewCallSignalingData"""'], {'alias': '"""@type"""'}), "('updateNewCallSignalingData', alias='@type')\n", (38505, 38550), False, 'from pydantic import Field\n'), ((39483, 39529), 'pydantic.Field', 'Field', (['"""updateNewCallbackQuery"""'], {'alias': '"""@type"""'}), "('updateNewCallbackQuery', alias='@type')\n", (39488, 39529), False, 'from pydantic import Field\n'), ((40107, 40144), 'pydantic.Field', 'Field', (['"""updateNewChat"""'], {'alias': '"""@type"""'}), "('updateNewChat', alias='@type')\n", (40112, 40144), False, 'from pydantic import Field\n'), ((40722, 40770), 'pydantic.Field', 'Field', (['"""updateNewChatJoinRequest"""'], {'alias': '"""@type"""'}), "('updateNewChatJoinRequest', alias='@type')\n", (40727, 40770), False, 'from pydantic import Field\n'), ((41677, 41728), 'pydantic.Field', 'Field', (['"""updateNewChosenInlineResult"""'], {'alias': '"""@type"""'}), "('updateNewChosenInlineResult', alias='@type')\n", (41682, 41728), False, 'from pydantic import Field\n'), ((42188, 42232), 'pydantic.Field', 'Field', (['"""updateNewCustomEvent"""'], {'alias': '"""@type"""'}), "('updateNewCustomEvent', alias='@type')\n", (42193, 42232), False, 'from pydantic import Field\n'), ((42696, 42740), 'pydantic.Field', 'Field', (['"""updateNewCustomQuery"""'], {'alias': '"""@type"""'}), "('updateNewCustomQuery', alias='@type')\n", (42701, 42740), False, 'from pydantic import Field\n'), ((43617, 43669), 'pydantic.Field', 'Field', (['"""updateNewInlineCallbackQuery"""'], {'alias': '"""@type"""'}), "('updateNewInlineCallbackQuery', alias='@type')\n", (43622, 43669), False, 'from pydantic import Field\n'), ((44691, 44735), 'pydantic.Field', 'Field', (['"""updateNewInlineQuery"""'], {'alias': '"""@type"""'}), "('updateNewInlineQuery', alias='@type')\n", (44696, 44735), False, 'from pydantic import Field\n'), ((45231, 45271), 'pydantic.Field', 'Field', (['"""updateNewMessage"""'], {'alias': '"""@type"""'}), "('updateNewMessage', alias='@type')\n", (45236, 45271), False, 'from pydantic import Field\n'), ((46375, 46424), 'pydantic.Field', 'Field', (['"""updateNewPreCheckoutQuery"""'], {'alias': '"""@type"""'}), "('updateNewPreCheckoutQuery', alias='@type')\n", (46380, 46424), False, 'from pydantic import Field\n'), ((47269, 47315), 'pydantic.Field', 'Field', (['"""updateNewShippingQuery"""'], {'alias': '"""@type"""'}), "('updateNewShippingQuery', alias='@type')\n", (47274, 47315), False, 'from pydantic import Field\n'), ((47850, 47892), 'pydantic.Field', 'Field', (['"""updateNotification"""'], {'alias': '"""@type"""'}), "('updateNotification', alias='@type')\n", (47855, 47892), False, 'from pydantic import Field\n'), ((49365, 49412), 'pydantic.Field', 'Field', (['"""updateNotificationGroup"""'], {'alias': '"""@type"""'}), "('updateNotificationGroup', alias='@type')\n", (49370, 49412), False, 'from pydantic import Field\n'), ((49479, 49503), 'pydantic.Field', 'Field', (['...'], {'alias': '"""type"""'}), "(..., alias='type')\n", (49484, 49503), False, 'from pydantic import Field\n'), ((50055, 50091), 'pydantic.Field', 'Field', (['"""updateOption"""'], {'alias': '"""@type"""'}), "('updateOption', alias='@type')\n", (50060, 50091), False, 'from pydantic import Field\n'), ((50408, 50442), 'pydantic.Field', 'Field', (['"""updatePoll"""'], {'alias': '"""@type"""'}), "('updatePoll', alias='@type')\n", (50413, 50442), False, 'from pydantic import Field\n'), ((50989, 51029), 'pydantic.Field', 'Field', (['"""updatePollAnswer"""'], {'alias': '"""@type"""'}), "('updatePollAnswer', alias='@type')\n", (50994, 51029), False, 'from pydantic import Field\n'), ((51643, 51687), 'pydantic.Field', 'Field', (['"""updateRecentStickers"""'], {'alias': '"""@type"""'}), "('updateRecentStickers', alias='@type')\n", (51648, 51687), False, 'from pydantic import Field\n'), ((52102, 52147), 'pydantic.Field', 'Field', (['"""updateSavedAnimations"""'], {'alias': '"""@type"""'}), "('updateSavedAnimations', alias='@type')\n", (52107, 52147), False, 'from pydantic import Field\n'), ((52715, 52770), 'pydantic.Field', 'Field', (['"""updateScopeNotificationSettings"""'], {'alias': '"""@type"""'}), "('updateScopeNotificationSettings', alias='@type')\n", (52720, 52770), False, 'from pydantic import Field\n'), ((53316, 53356), 'pydantic.Field', 'Field', (['"""updateSecretChat"""'], {'alias': '"""@type"""'}), "('updateSecretChat', alias='@type')\n", (53321, 53356), False, 'from pydantic import Field\n'), ((53870, 53918), 'pydantic.Field', 'Field', (['"""updateSelectedBackground"""'], {'alias': '"""@type"""'}), "('updateSelectedBackground', alias='@type')\n", (53875, 53918), False, 'from pydantic import Field\n'), ((54702, 54751), 'pydantic.Field', 'Field', (['"""updateServiceNotification"""'], {'alias': '"""@type"""'}), "('updateServiceNotification', alias='@type')\n", (54707, 54751), False, 'from pydantic import Field\n'), ((54769, 54793), 'pydantic.Field', 'Field', (['...'], {'alias': '"""type"""'}), "(..., alias='type')\n", (54774, 54793), False, 'from pydantic import Field\n'), ((55137, 55177), 'pydantic.Field', 'Field', (['"""updateStickerSet"""'], {'alias': '"""@type"""'}), "('updateStickerSet', alias='@type')\n", (55142, 55177), False, 'from pydantic import Field\n'), ((55677, 55723), 'pydantic.Field', 'Field', (['"""updateSuggestedActions"""'], {'alias': '"""@type"""'}), "('updateSuggestedActions', alias='@type')\n", (55682, 55723), False, 'from pydantic import Field\n'), ((56253, 56293), 'pydantic.Field', 'Field', (['"""updateSupergroup"""'], {'alias': '"""@type"""'}), "('updateSupergroup', alias='@type')\n", (56258, 56293), False, 'from pydantic import Field\n'), ((56810, 56858), 'pydantic.Field', 'Field', (['"""updateSupergroupFullInfo"""'], {'alias': '"""@type"""'}), "('updateSupergroupFullInfo', alias='@type')\n", (56815, 56858), False, 'from pydantic import Field\n'), ((57528, 57572), 'pydantic.Field', 'Field', (['"""updateTermsOfService"""'], {'alias': '"""@type"""'}), "('updateTermsOfService', alias='@type')\n", (57533, 57572), False, 'from pydantic import Field\n'), ((58074, 58123), 'pydantic.Field', 'Field', (['"""updateTrendingStickerSets"""'], {'alias': '"""@type"""'}), "('updateTrendingStickerSets', alias='@type')\n", (58079, 58123), False, 'from pydantic import Field\n'), ((59228, 59273), 'pydantic.Field', 'Field', (['"""updateUnreadChatCount"""'], {'alias': '"""@type"""'}), "('updateUnreadChatCount', alias='@type')\n", (59233, 59273), False, 'from pydantic import Field\n'), ((60105, 60153), 'pydantic.Field', 'Field', (['"""updateUnreadMessageCount"""'], {'alias': '"""@type"""'}), "('updateUnreadMessageCount', alias='@type')\n", (60110, 60153), False, 'from pydantic import Field\n'), ((60624, 60658), 'pydantic.Field', 'Field', (['"""updateUser"""'], {'alias': '"""@type"""'}), "('updateUser', alias='@type')\n", (60629, 60658), False, 'from pydantic import Field\n'), ((61079, 61121), 'pydantic.Field', 'Field', (['"""updateUserFullInfo"""'], {'alias': '"""@type"""'}), "('updateUserFullInfo', alias='@type')\n", (61084, 61121), False, 'from pydantic import Field\n'), ((61600, 61653), 'pydantic.Field', 'Field', (['"""updateUserPrivacySettingRules"""'], {'alias': '"""@type"""'}), "('updateUserPrivacySettingRules', alias='@type')\n", (61605, 61653), False, 'from pydantic import Field\n'), ((62120, 62160), 'pydantic.Field', 'Field', (['"""updateUserStatus"""'], {'alias': '"""@type"""'}), "('updateUserStatus', alias='@type')\n", (62125, 62160), False, 'from pydantic import Field\n'), ((62629, 62670), 'pydantic.Field', 'Field', (['"""updateUsersNearby"""'], {'alias': '"""@type"""'}), "('updateUsersNearby', alias='@type')\n", (62634, 62670), False, 'from pydantic import Field\n')] |
import json
from django.utils.translation import ugettext as _
from django.http import HttpResponse, HttpResponseRedirect
from django.template import RequestContext
from django.utils.encoding import smart_str
from django.shortcuts import render_to_response
from forum.modules import decorate
from forum import views
from lib.akismet import Akismet
from forum.settings import APP_URL, OSQA_VERSION
from settings import WORDPRESS_API_KEY, REP_FOR_NO_SPAM_CHECK
from forum.models.user import User
from forum.forms.general import SimpleCaptchaForm
import settings
def can_bypass_spam_check(user):
return user.is_authenticated and (user.is_superuser or user.is_staff or cmp(int(user.reputation), REP_FOR_NO_SPAM_CHECK) > 0)
def check_spam(param, comment_type):
def wrapper(origin, request, *args, **kwargs):
if request.POST and request.POST.get(param, None) and WORDPRESS_API_KEY and (not can_bypass_spam_check(request.user)):
comment = smart_str(request.POST[param])
data = {
"user_ip":request.META["REMOTE_ADDR"],
"user_agent":request.environ['HTTP_USER_AGENT'],
"comment_type": comment_type,
"comment":comment
}
if request.user.is_authenticated():
data.update({
"comment_author":smart_str(request.user.username),
"comment_author_email":request.user.email,
"comment_author_url":request.user.website,
})
api = Akismet(settings.WORDPRESS_API_KEY, APP_URL, "OSQA/%s" % OSQA_VERSION)
if api.comment_check(comment, data):
post_data = request.POST
captcha_form = SimpleCaptchaForm(request.POST)
if request.is_ajax():
response = {
'success': False,
'error_message': _("Sorry, but akismet thinks your %s is spam.") % comment_type
}
return HttpResponse(json.dumps(response), mimetype="application/json")
else:
captcha_checked = False
try:
if captcha_form.is_valid() and 'recaptcha' in captcha_form.fields.keys():
captcha_checked = True
except:
pass
if not captcha_checked:
return render_to_response('modules/akismet/foundspam.html', {
'action_name': comment_type,
'post_data' : post_data,
'captcha_form' : captcha_form,
}, RequestContext(request))
return origin(request, *args, **kwargs)
return wrapper
decorate(views.writers.ask)(check_spam('text', _('question')))
decorate(views.writers.answer)(check_spam('text', _('answer')))
decorate(views.commands.comment)(check_spam('comment', _('comment')))
| [
"forum.modules.decorate",
"json.dumps",
"django.template.RequestContext",
"lib.akismet.Akismet",
"django.utils.encoding.smart_str",
"django.utils.translation.ugettext",
"forum.forms.general.SimpleCaptchaForm"
] | [((2800, 2827), 'forum.modules.decorate', 'decorate', (['views.writers.ask'], {}), '(views.writers.ask)\n', (2808, 2827), False, 'from forum.modules import decorate\n'), ((2863, 2893), 'forum.modules.decorate', 'decorate', (['views.writers.answer'], {}), '(views.writers.answer)\n', (2871, 2893), False, 'from forum.modules import decorate\n'), ((2927, 2959), 'forum.modules.decorate', 'decorate', (['views.commands.comment'], {}), '(views.commands.comment)\n', (2935, 2959), False, 'from forum.modules import decorate\n'), ((2847, 2860), 'django.utils.translation.ugettext', '_', (['"""question"""'], {}), "('question')\n", (2848, 2860), True, 'from django.utils.translation import ugettext as _\n'), ((2913, 2924), 'django.utils.translation.ugettext', '_', (['"""answer"""'], {}), "('answer')\n", (2914, 2924), True, 'from django.utils.translation import ugettext as _\n'), ((2982, 2994), 'django.utils.translation.ugettext', '_', (['"""comment"""'], {}), "('comment')\n", (2983, 2994), True, 'from django.utils.translation import ugettext as _\n'), ((976, 1006), 'django.utils.encoding.smart_str', 'smart_str', (['request.POST[param]'], {}), '(request.POST[param])\n', (985, 1006), False, 'from django.utils.encoding import smart_str\n'), ((1529, 1599), 'lib.akismet.Akismet', 'Akismet', (['settings.WORDPRESS_API_KEY', 'APP_URL', "('OSQA/%s' % OSQA_VERSION)"], {}), "(settings.WORDPRESS_API_KEY, APP_URL, 'OSQA/%s' % OSQA_VERSION)\n", (1536, 1599), False, 'from lib.akismet import Akismet\n'), ((1721, 1752), 'forum.forms.general.SimpleCaptchaForm', 'SimpleCaptchaForm', (['request.POST'], {}), '(request.POST)\n', (1738, 1752), False, 'from forum.forms.general import SimpleCaptchaForm\n'), ((1339, 1371), 'django.utils.encoding.smart_str', 'smart_str', (['request.user.username'], {}), '(request.user.username)\n', (1348, 1371), False, 'from django.utils.encoding import smart_str\n'), ((2041, 2061), 'json.dumps', 'json.dumps', (['response'], {}), '(response)\n', (2051, 2061), False, 'import json\n'), ((1916, 1963), 'django.utils.translation.ugettext', '_', (['"""Sorry, but akismet thinks your %s is spam."""'], {}), "('Sorry, but akismet thinks your %s is spam.')\n", (1917, 1963), True, 'from django.utils.translation import ugettext as _\n'), ((2704, 2727), 'django.template.RequestContext', 'RequestContext', (['request'], {}), '(request)\n', (2718, 2727), False, 'from django.template import RequestContext\n')] |
"""
Translate an element, which is described by the YAML method file
and a descriptor file, into a target function.
Procedure:
1. When analyzing a YAML file, parse the call to the method-element, to get:
- list of inputs,
- list of outputs
2. Parse the YAML of that element, to know the name of the inputs and outputs,
create inputs and outputs with such names, value are translated-names (string,
given by the name-allocator before translating methods), they will be accessed
in the descriptor of that element.
3. Process the descriptor:
- If preprocess part is available: execute the preprocess part as Python 3 code.
- Treat the code part as text (a string), parse that text to detect:
anywhere there is the structure <var_name>, then replace it with the value
of that variable currently in Python memory (within scope of processing that
specific descriptor). The new text after processing the code part is named code.
- If postprocess part is available: execute the postprocess part as Python 3
code. By requirement, at the end of postprocess part, there will be a variables
named `code`. Write the value of `code` into the output string.
"""
import re
from . import descriptor_parser
from . import utils
from .shared_parameters import *
# def descriptor_file_parse(descriptor_file, method_file):
# descriptor = descriptor_file_read(descriptor_file)
# yaml_method = yaml_method_file_read(method_file)
# preprocess_parse(descriptor_file)
def yaml_single_method_file_read(yaml_method_file):
"""
Read a method file which contains only one block
"""
yaml_block = utils.yaml_file_read(yaml_method_file)
# Analyze its commands
return
def translate_command_element(odict_command, element_file, descriptor_file):
descriptor = descriptor_parser.descriptor_file_read(descriptor_file)
preprocess_string = descriptor['preprocess']
code_string = descriptor['code']
postprocess_string = descriptor['postprocess']
yaml_element = utils.yaml_file_read(element_file)
list_command_keys = [key for key in odict_command.keys()]
first_key = list_command_keys[0]
input_names = odict_command[first_key]
output_name = utils.get_var_name_from_bank(1)
list_element_keys = [key for key in yaml_element.keys()]
element_name = list_element_keys[0]
element_inputs = yaml_element[element_name]['inputs']
element_output = yaml_element[element_name]['outputs']
if not element_name.startswith(first_key): # overloading: add__float for add
raise ValueError('Element does not match command.')
else:
real_inputs = analyze_inputs(input_names, element_inputs)
real_output = analyze_outputs(output_name, element_output)
translated_code = translate_single_code(real_inputs, real_output,\
preprocess_string, code_string, postprocess_string)
return translated_code
def analyze_inputs(input_names, element_inputs):
"""
Get decoded names from the input_names (list) and the template
element_inputs (odict).
The output is a dict, with keys from element_inputs and values are picked
with corresponding order from input_names.
If element_inputs contains both 'name' and 'array_name', then array_name
must be the last item. This function automatically assign the rest of the
input names into an array, if 'array_name' is found in element_inputs.
"""
real_inputs = {}
index_input_names = 0
for item in element_inputs:
# item == OrderedDict([('array_name', 'input_'), ('length', ''), ('type', 'float')])
if 'name' in item:
real_inputs.update({item['name']: input_names[index_input_names]})
index_input_names += 1
elif 'array_name' in item:
names_left = input_names[index_input_names:]
array_length = len(names_left)
real_inputs.update({item['array_name']: names_left})
# for k in range(array_length):
# real_inputs.update({item['array_name'] + '[' + str(k) + ']': names_left[k]})
return real_inputs
def analyze_outputs(output_name, element_output):
output_var = element_output[0]['name']
output_dict = {output_var: output_name[0]}
return output_dict
def parse_code(code_string):
"""
Parse the multi-line string which contains the code, pick variable in <>.
Output: list of segments, each is a dict with key `text` or `var`,
and value is the text or the variable name.
"""
code = []
var_pattern = r'\<[\w\[\]]+\>'
rolling_code = code_string
while re.search(var_pattern, rolling_code):
start_index = re.search(var_pattern, rolling_code).start()
var_group = re.search(var_pattern, rolling_code).group()
var_name = var_group.strip('<>')
if start_index > 0:
text_before = rolling_code[0:start_index]
code.append({'text': text_before})
code.append({'var': var_name})
rolling_code = rolling_code[start_index+len(var_group):]
return code
def translate_single_code(input_dict, output_dict, preprocess_string,\
code_string, postprocess_string):
"""
input_dict == {'input_': ['A_1', 'A_2', 'A_3']}
output_dict == {'output': 'Alpha'}
parsed_code == [{'var': 'output'}, {'text': ' := '}, {'var': 'command_text'}]
"""
_code_series = parse_code(code_string)
print('preprocess:')
print(preprocess_string)
print('code:')
print(code_string)
print('postprocess:')
print(postprocess_string)
for _key in input_dict:
if isinstance(input_dict[_key], list):
# it is an array
_assign_code = _key + '=' + '['
for _item in input_dict[_key]:
_assign_code += '\'' + _item + '\','
_assign_code = _assign_code[:-1]+']' # remove the last comma
else:
_assign_code = _key + '=' + '\'' + input_dict[_key] + '\''
exec(_assign_code)
for _key in output_dict:
_assign_code = _key + '=' + '\'' + output_dict[_key] + '\''
exec(_assign_code)
exec(preprocess_string)
# 1st round: substitute variable names in code string
_1st_processed_code = ''
for _chunk in _code_series:
if 'text' in _chunk:
_1st_processed_code += _chunk['text']
if 'var' in _chunk:
_1st_processed_code += eval(_chunk['var'])
#2nd round: replace variable names left, which might come from preprocess,
# like: input_[0]
_parsed_2nd_code = parse_code(_1st_processed_code)
code = ''
for _chunk in _parsed_2nd_code:
if 'text' in _chunk:
code += _chunk['text']
if 'var' in _chunk:
code += eval(_chunk['var'])
# Preset output code, in case postprocess part is empty
exec(output_code_descriptor + ' = code')
# BUG: if output_code_descriptor is 'code', there is a Python bug that
# variable code is not updated after the next exec
exec(postprocess_string)
final_processed_code = eval(output_code_descriptor)
return final_processed_code
| [
"re.search"
] | [((4561, 4597), 're.search', 're.search', (['var_pattern', 'rolling_code'], {}), '(var_pattern, rolling_code)\n', (4570, 4597), False, 'import re\n'), ((4621, 4657), 're.search', 're.search', (['var_pattern', 'rolling_code'], {}), '(var_pattern, rolling_code)\n', (4630, 4657), False, 'import re\n'), ((4686, 4722), 're.search', 're.search', (['var_pattern', 'rolling_code'], {}), '(var_pattern, rolling_code)\n', (4695, 4722), False, 'import re\n')] |
#!/usr/bin/env python3
import csv
import gzip
import json
import networkx as nx
import sys
import time
import utils
from argparse import ArgumentParser
from calculate_activity_network import embedded_extended_tweet_url, root_of_conversation
from collections import defaultdict
from datetime import datetime
from utils import eprint, expanded_urls_from, extract_text, flatten, lowered_hashtags_from, mentioned_ids_from#, timestamp_2_epoch_seconds
# Builds feature vectors for HCC members and their groupings as input to the
# classifiers for validation
#
# This version extracts 32 features
#
# Renamed from extract_feature_vectors_for_hcc_classifier.py
class Options():
def __init__(self):
self._init_parser()
def _init_parser(self):
usage = 'extract_feature_vectors_for_hcc_classifier.py -t <tweets.json> -i <ids.csv> -l <label>'
self.parser = ArgumentParser(usage=usage,conflict_handler='resolve')
self.parser.add_argument(
'-t', '--tweets',
required=True,
dest='tweets_file',
help='File containing all the tweets'
)
self.parser.add_argument(
'-i', '--ids-file',
required=True,
dest='ids_file',
help='The list of IDs to build feature vectors for.'
)
self.parser.add_argument(
'-l', '--label',
required=True,
dest='label',
help='The label to apply to each entry in the data generated (first column).'
)
self.parser.add_argument(
'-v', '--verbose',
action='store_true',
default=False,
dest='verbose',
help='Turn on verbose logging (default: False)'
)
def parse(self, args=None):
return self.parser.parse_args(args)
TWITTER_TS_FORMAT = '%a %b %d %H:%M:%S +0000 %Y' # Tue Apr 26 08:57:55 +0000 2011
def parse_ts(ts_str):
time_struct = time.strptime(ts_str, TWITTER_TS_FORMAT)
return datetime.fromtimestamp(time.mktime(time_struct))
def count(fltr): return len(list(fltr))
def root_of_conversation(tweet_in_conversation, tweet_map):
"""Finds the root of the conversation that the provided tweet is in"""
root_id = tweet_in_conversation
# go until we reply outside of the corpus, or the current tweet isn't a reply
while root_id in tweet_map and 'in_reply_to_status_id_str' in tweet_map[root_id] and tweet_map[root_id]['in_reply_to_status_id_str']:
root_id = tweet_map[root_id]['in_reply_to_status_id_str']
return root_id
def embedded_extended_tweet_url(tweet_id, url):
# extended tweets, because their text field is not long enough for the
# content, they include an embedded url pointing to the full tweet
# Of course, this isn't the sort of URL we're interested in, so we can
# test for it so we can strip it out. This method identifies it.
return url == 'https://twitter.com/i/web/status/%s' % tweet_id
USER_FEATURES = [
'U_tweet_count',
'U_retweet_count',
'U_reply_count',
'U_tweet_rate',
'U_mentioned_ids', # unique IDs
'U_mention_count', # every mention
'U_unique_hts', # unique hashtags
'U_ht_count', # every hashtag
'U_unique_urls', # unique hashtags
'U_url_count', # every hashtag
'U_default_img',
'U_desc_len',
'U_url'
]
DEFAULT_PROF_IMG_URL = 'http://abs.twimg.com/sticky/default_profile_images/default_profile_normal.png'
def build_user_feature_vector(u_id, activity, collection_period_mins):
profile = activity[0]['user']
return {
'U_tweet_count' : len(activity),
'U_retweet_count' : count(filter(lambda t: 'retweeted_status' in t and t['retweeted_status'], activity)),
'U_reply_count' : count(filter(lambda t: t['in_reply_to_status_id_str'], activity)),
'U_tweet_rate' : len(activity) / collection_period_mins,
'U_mentioned_ids' : len(set(flatten(map(mentioned_ids_from, activity)))), # unique IDs
'U_mention_count' : len(list(flatten(map(mentioned_ids_from, activity)))), # every mention
'U_unique_hts' : len(set(flatten(map(lowered_hashtags_from, activity)))), # unique hashtags
'U_ht_count' : len(list(flatten(map(lowered_hashtags_from, activity)))), # every hashtag
'U_unique_urls' : len(set(flatten(map(expanded_urls_from, activity)))), # unique hashtags
'U_url_count' : len(list(flatten(map(expanded_urls_from, activity)))), # every hashtag
'U_default_img' : 1 if profile['profile_image_url'] == DEFAULT_PROF_IMG_URL else 0,
'U_desc_len' : len(profile['description'] if profile['description'] else ''),
'U_url' : len(profile['url'] if profile['url'] else ''),
}
COMMUNITY_FEATURES = [
'C_tweet_count',
'C_node_count',
'C_edge_count',
'C_user_count',
'C_author_count',
'C_hashtag_count',
'C_url_count',
'C_repost_count',
'C_quote_count',
'C_mention_count',
'C_reply_count',
'C_use_ht_count',
'C_use_url_count',
'C_in_conv_count',
'C_in/ext_repost',
'C_in/ext_mention',
'C_in/ext_reply',
]
def build_community_feature_vector(community, g):
def count_nodes_if(cond):
return len([n for n, d in g.nodes(data=True) if cond(n, d)])
def count_edges_if(cond):
return len([k for u, v, k, d in g.edges(data=True,keys=True) if cond(u, v, k, d)]) # d['interaction'] == t]
# return len(['x' for u, v, d in g.edges(data=True) if cond(u, v, d)])
int_users = [n for n, d in g.nodes(data=True) if d['is_author']]
ext_users = [n for n, d in g.nodes(data=True) if d['n_type'] == 'USER' and not d['is_author']]
repost_count = count_edges_if(lambda u, v, k, d: d['interaction'] == 'REPOST')
reply_count = count_edges_if(lambda u, v, k, d: d['interaction'] == 'REPLY')
mention_count = count_edges_if(lambda u, v, k, d: d['interaction'] == 'MENTION')
return {
'C_tweet_count' : g.graph['post_count'],
'C_node_count' : len(g),
'C_edge_count' : len(g.edges()),
'C_user_count' : count_nodes_if(lambda n, d: d['n_type'] == 'USER'),
'C_author_count' : count_nodes_if(lambda n, d: d['n_type'] == 'USER' and d['is_author']),
'C_hashtag_count' : count_nodes_if(lambda n, d: d['n_type'] == 'HASHTAG'),
'C_url_count' : count_nodes_if(lambda n, d: d['n_type'] == 'URL'),
'C_repost_count' : repost_count,
'C_quote_count' : count_edges_if(lambda u, v, k, d: d['interaction'] == 'QUOTE'),
'C_mention_count' : mention_count,
'C_reply_count' : reply_count,
'C_use_ht_count' : count_edges_if(lambda u, v, k, d: d['interaction'] == 'HASHTAG'),
'C_use_url_count' : count_edges_if(lambda u, v, k, d: d['interaction'] == 'URL'),
'C_in_conv_count' : count_edges_if(lambda u, v, k, d: d['interaction'] == 'IN_CONVERSATION'),
'C_in/ext_repost' : count_edges_if(lambda u, v, k, d: d['interaction'] == 'REPOST' and v in int_users) / repost_count if repost_count else 0,
'C_in/ext_mention' : count_edges_if(lambda u, v, k, d: d['interaction'] == 'MENTION' and v in int_users) / mention_count if mention_count else 0,
'C_in/ext_reply' : count_edges_if(lambda u, v, k, d: d['interaction'] == 'REPLY' and v in int_users) / reply_count if reply_count else 0
}
def mk_feature_str(keys, feature_map):
return ','.join([str(feature_map[k]) for k in keys])
def build_activity_graph(tweets, t_0): # tweets is a tweet map { tweet_id : tweet }
first_tweet_ts_str = utils.ts_to_str(t_0, fmt=utils.TWITTER_TS_FORMAT) # epoch_seconds_2_timestamp_str(t_0)
first_tweet_ts = utils.epoch_seconds_2_ts(t_0) #first_tweet_ts_str) # parse_twitter_ts(first_tweet_ts_str)
g = nx.MultiDiGraph(post_count=len(tweets))
def add_node(g, n_id, n_type='USER', is_author=False):
if n_id not in g:
g.add_node(n_id, n_type=n_type, label=n_id, is_author=is_author)
elif is_author:
# g.nodes[n_id]['n_type'] = n_type
g.nodes[n_id]['is_author'] = is_author
def node_type_for(interaction):
if interaction == 'HASHTAG' or interaction == 'URL':
return interaction
else:
return 'USER'
def add_edge(g, from_id, to_id, tweet_id, ts_str, int_type, **kwargs):
add_node(g, from_id, 'USER', True)
# g.nodes[from_id]['is_author'] = True
add_node(g, to_id, n_type=node_type_for(int_type))
t = utils.extract_ts_s(ts_str) - t_0 # timestamp_2_epoch_seconds(utils.extract_ts_s(ts_str)) - t_0
attrs = {
'time_t' : t,
'tweet_id' : tweet_id,
'interaction' : int_type
}
key = '%s %s %s in %s' % (from_id, int_type, to_id, tweet_id)
g.add_edge(from_id, to_id, key=key, **{**attrs, **kwargs})
# Build networks
# edge types: REPOST, MENTION, REPLY, QUOTE, URL, HASHTAG
observed_user_ids = set()
for tweet_id in tweets:
tweet = tweets[tweet_id]
hashtags = lowered_hashtags_from(tweet)
urls = expanded_urls_from(tweet)
mentions = mentioned_ids_from(tweet)
tweet_text = extract_text(tweet)
tweet_ts = tweet['created_at']
tweet_id = tweet['id_str']
tweeter_id = tweet['user']['id_str']
observed_user_ids.add(tweeter_id)
for ht in hashtags:
add_edge(g, tweeter_id, ht, tweet_id, tweet_ts, 'HASHTAG')
for url in urls:
if not embedded_extended_tweet_url(tweet_id, url): # extended tweets include a URL to their extended form
add_edge(g, tweeter_id, url, tweet_id, tweet_ts, 'URL')
for mentioned_id in mentions:
observed_user_ids.add(mentioned_id)
add_edge(g, tweeter_id, mentioned_id, tweet_id, tweet_ts, 'MENTION')
if 'retweeted_status' in tweet:
retweeter = tweeter_id
retweetee = tweet['retweeted_status']['user']['id_str']
observed_user_ids.add(retweetee)
add_edge(
g, retweeter, retweetee, tweet_id, tweet_ts, 'REPOST',
original_tweet_id=tweet['retweeted_status']['id_str'],
original_tweet_ts=tweet['retweeted_status']['created_at'],
posting_delay_sec=(
utils.extract_ts_s(tweet['retweeted_status']['created_at']) -
utils.extract_ts_s(tweet_ts)
)#.total_seconds()
)
elif 'quoted_status' in tweet and 'retweeted_status' not in tweet:
quoter = tweeter_id
quotee = tweet['quoted_status']['user']['id_str']
observed_user_ids.add(quotee)
add_edge(
g, quoter, quotee, tweet_id, tweet_ts, 'QUOTE',
original_tweet_id=tweet['quoted_status']['id_str'],
original_tweet_ts=tweet['quoted_status']['created_at'],
posting_delay_sec=(
utils.extract_ts_s(tweet['quoted_status']['created_at']) -
utils.extract_ts_s(tweet_ts)
)#.total_seconds()
)
elif 'in_reply_to_status_id_str' in tweet and tweet['in_reply_to_status_id_str'] in tweets:
# only consider replies that appear in the corpus
# basic reply info
replier = tweeter_id
replied_to = tweet['in_reply_to_user_id_str']
observed_user_ids.add(replied_to)
replied_to_status = tweets[tweet['in_reply_to_status_id_str']]
replied_to_status_ts = replied_to_status['created_at']
posting_delay_sec = (utils.extract_ts_s(replied_to_status_ts) - utils.extract_ts_s(tweet_ts))#.total_seconds()
add_edge(
g, replier, replied_to, tweet_id, tweet_ts, 'REPLY',
original_tweet_id=tweet['in_reply_to_status_id_str'],
original_tweet_ts=replied_to_status_ts,
posting_delay_sec=posting_delay_sec
)
# in conversation
if tweet['in_reply_to_status_id_str'] in tweets:
# follow the reply chain as far as we can
conversation_root = root_of_conversation(tweet['in_reply_to_status_id_str'], tweets)
# conversation_root MAY NOT be in the corpus - it's still a link though
conv_root_ts = first_tweet_ts_str
posting_delay_sec = (utils.ts_2_epoch_seconds(first_tweet_ts) - utils.extract_ts_s(tweet_ts))#.total_seconds()
if conversation_root in tweets:
observed_user_ids.add(tweets[conversation_root]['user']['id_str'])
conv_root_ts = tweets[conversation_root]['created_at']
posting_delay_sec = (utils.extract_ts_s(conv_root_ts) - utils.extract_ts_s(tweet_ts))#.total_seconds()
add_edge(
g, replier, conversation_root, tweet_id, tweet_ts, 'IN_CONVERSATION',
original_tweet_id=conversation_root,
original_tweet_ts=conv_root_ts,
posting_delay_sec=posting_delay_sec
)
return g
DEBUG=False
def log(msg):
if DEBUG: eprint(msg)
if __name__ == '__main__':
options = Options()
opts = options.parse(sys.argv[1:])
DEBUG=opts.verbose
users = {}
communities = defaultdict(lambda: [], {})
with open(opts.ids_file, 'r', encoding='utf-8') as f:
csv_reader = csv.DictReader(f, delimiter=',', quotechar='"')
for row in csv_reader:
r = {}
for key in row: # range(len(row)):
r[key] = row[key]
users[r['node_id']] = r
communities[r['community_id']].append(r['node_id'])
# users[r[0]] = r
tweets = dict([(uid, []) for uid in users.keys()])
earliest_ts = sys.maxsize
latest_ts = 0
# with open(opts.tweets_file, 'r', encoding='utf-8') as f:
f = gzip.open(opts.tweets_file, 'rt') if opts.tweets_file[-1] in 'zZ' else open(opts.tweets_file, 'r', encoding='utf-8')
for l in f:
tweet = json.loads(l.strip())
tweet['ts'] = utils.extract_ts_s(tweet['created_at']) # timestamp_2_epoch_seconds(parse_ts(tweet['created_at']))
if tweet['ts'] < earliest_ts: earliest_ts = tweet['ts']
if tweet['ts'] > latest_ts: latest_ts = tweet['ts']
user_id = tweet['user']['id_str']
if user_id in users.keys():
# tweet['ts'] = timestamp_2_epoch_seconds(parse_ts(tweet['created_at']))
tweets[user_id].append(tweet)
f.close()
collection_period_mins = (latest_ts - earliest_ts) / 60
user_feature_vectors = {}
for user_id in tweets:
tweets[user_id].sort(key=lambda t: t['ts'])
user_feature_vectors[user_id] = build_user_feature_vector(user_id, tweets[user_id], collection_period_mins)
community_feature_vectors = {}
for community_id in communities:
community_tweets = {}
community = communities[community_id]
for user_id in community:
for t in tweets[user_id]:
community_tweets[t['id_str']] = t
# community_tweets += tweets[user_id]
# community_tweets.sort(key=lambda t: t['ts'])
# build activity graph from tweets
g = build_activity_graph(community_tweets, earliest_ts)
# build feature vector from activity graph
community_feature_vectors[community_id] = build_community_feature_vector(community, g)
header = ','.join(map(str, ['Label'] + USER_FEATURES + ['U_prop_hcc_degree', 'community_id'] + COMMUNITY_FEATURES))
print(header)
for user_id in tweets:
user_vector = user_feature_vectors[user_id]
hcc_prop_degree = users[user_id]['proportional_degree']
community_id = users[user_id]['community_id']
community_vector = community_feature_vectors[community_id]
print(','.join([
opts.label,
mk_feature_str(USER_FEATURES, user_vector),
hcc_prop_degree,
community_id,
mk_feature_str(COMMUNITY_FEATURES, community_vector)
]))
# print('%s: %s %s' % (user_id, str(user_feature_vectors[user_id]), str()))
| [
"utils.extract_text",
"time.strptime",
"csv.DictReader",
"utils.lowered_hashtags_from",
"argparse.ArgumentParser",
"gzip.open",
"calculate_activity_network.embedded_extended_tweet_url",
"utils.ts_to_str",
"time.mktime",
"utils.expanded_urls_from",
"utils.epoch_seconds_2_ts",
"utils.mentioned_i... | [((1961, 2001), 'time.strptime', 'time.strptime', (['ts_str', 'TWITTER_TS_FORMAT'], {}), '(ts_str, TWITTER_TS_FORMAT)\n', (1974, 2001), False, 'import time\n'), ((7645, 7694), 'utils.ts_to_str', 'utils.ts_to_str', (['t_0'], {'fmt': 'utils.TWITTER_TS_FORMAT'}), '(t_0, fmt=utils.TWITTER_TS_FORMAT)\n', (7660, 7694), False, 'import utils\n'), ((7754, 7783), 'utils.epoch_seconds_2_ts', 'utils.epoch_seconds_2_ts', (['t_0'], {}), '(t_0)\n', (7778, 7783), False, 'import utils\n'), ((13481, 13509), 'collections.defaultdict', 'defaultdict', (['(lambda : [])', '{}'], {}), '(lambda : [], {})\n', (13492, 13509), False, 'from collections import defaultdict\n'), ((884, 939), 'argparse.ArgumentParser', 'ArgumentParser', ([], {'usage': 'usage', 'conflict_handler': '"""resolve"""'}), "(usage=usage, conflict_handler='resolve')\n", (898, 939), False, 'from argparse import ArgumentParser\n'), ((2036, 2060), 'time.mktime', 'time.mktime', (['time_struct'], {}), '(time_struct)\n', (2047, 2060), False, 'import time\n'), ((9142, 9170), 'utils.lowered_hashtags_from', 'lowered_hashtags_from', (['tweet'], {}), '(tweet)\n', (9163, 9170), False, 'from utils import eprint, expanded_urls_from, extract_text, flatten, lowered_hashtags_from, mentioned_ids_from\n'), ((9186, 9211), 'utils.expanded_urls_from', 'expanded_urls_from', (['tweet'], {}), '(tweet)\n', (9204, 9211), False, 'from utils import eprint, expanded_urls_from, extract_text, flatten, lowered_hashtags_from, mentioned_ids_from\n'), ((9231, 9256), 'utils.mentioned_ids_from', 'mentioned_ids_from', (['tweet'], {}), '(tweet)\n', (9249, 9256), False, 'from utils import eprint, expanded_urls_from, extract_text, flatten, lowered_hashtags_from, mentioned_ids_from\n'), ((9278, 9297), 'utils.extract_text', 'extract_text', (['tweet'], {}), '(tweet)\n', (9290, 9297), False, 'from utils import eprint, expanded_urls_from, extract_text, flatten, lowered_hashtags_from, mentioned_ids_from\n'), ((13318, 13329), 'utils.eprint', 'eprint', (['msg'], {}), '(msg)\n', (13324, 13329), False, 'from utils import eprint, expanded_urls_from, extract_text, flatten, lowered_hashtags_from, mentioned_ids_from\n'), ((13588, 13635), 'csv.DictReader', 'csv.DictReader', (['f'], {'delimiter': '""","""', 'quotechar': '"""\\""""'}), '(f, delimiter=\',\', quotechar=\'"\')\n', (13602, 13635), False, 'import csv\n'), ((14073, 14106), 'gzip.open', 'gzip.open', (['opts.tweets_file', '"""rt"""'], {}), "(opts.tweets_file, 'rt')\n", (14082, 14106), False, 'import gzip\n'), ((14266, 14305), 'utils.extract_ts_s', 'utils.extract_ts_s', (["tweet['created_at']"], {}), "(tweet['created_at'])\n", (14284, 14305), False, 'import utils\n'), ((8585, 8611), 'utils.extract_ts_s', 'utils.extract_ts_s', (['ts_str'], {}), '(ts_str)\n', (8603, 8611), False, 'import utils\n'), ((9603, 9645), 'calculate_activity_network.embedded_extended_tweet_url', 'embedded_extended_tweet_url', (['tweet_id', 'url'], {}), '(tweet_id, url)\n', (9630, 9645), False, 'from calculate_activity_network import embedded_extended_tweet_url, root_of_conversation\n'), ((10426, 10485), 'utils.extract_ts_s', 'utils.extract_ts_s', (["tweet['retweeted_status']['created_at']"], {}), "(tweet['retweeted_status']['created_at'])\n", (10444, 10485), False, 'import utils\n'), ((10508, 10536), 'utils.extract_ts_s', 'utils.extract_ts_s', (['tweet_ts'], {}), '(tweet_ts)\n', (10526, 10536), False, 'import utils\n'), ((11742, 11782), 'utils.extract_ts_s', 'utils.extract_ts_s', (['replied_to_status_ts'], {}), '(replied_to_status_ts)\n', (11760, 11782), False, 'import utils\n'), ((11785, 11813), 'utils.extract_ts_s', 'utils.extract_ts_s', (['tweet_ts'], {}), '(tweet_ts)\n', (11803, 11813), False, 'import utils\n'), ((12301, 12365), 'calculate_activity_network.root_of_conversation', 'root_of_conversation', (["tweet['in_reply_to_status_id_str']", 'tweets'], {}), "(tweet['in_reply_to_status_id_str'], tweets)\n", (12321, 12365), False, 'from calculate_activity_network import embedded_extended_tweet_url, root_of_conversation\n'), ((11079, 11135), 'utils.extract_ts_s', 'utils.extract_ts_s', (["tweet['quoted_status']['created_at']"], {}), "(tweet['quoted_status']['created_at'])\n", (11097, 11135), False, 'import utils\n'), ((11158, 11186), 'utils.extract_ts_s', 'utils.extract_ts_s', (['tweet_ts'], {}), '(tweet_ts)\n', (11176, 11186), False, 'import utils\n'), ((12541, 12581), 'utils.ts_2_epoch_seconds', 'utils.ts_2_epoch_seconds', (['first_tweet_ts'], {}), '(first_tweet_ts)\n', (12565, 12581), False, 'import utils\n'), ((12584, 12612), 'utils.extract_ts_s', 'utils.extract_ts_s', (['tweet_ts'], {}), '(tweet_ts)\n', (12602, 12612), False, 'import utils\n'), ((12882, 12914), 'utils.extract_ts_s', 'utils.extract_ts_s', (['conv_root_ts'], {}), '(conv_root_ts)\n', (12900, 12914), False, 'import utils\n'), ((12917, 12945), 'utils.extract_ts_s', 'utils.extract_ts_s', (['tweet_ts'], {}), '(tweet_ts)\n', (12935, 12945), False, 'import utils\n')] |
from django.http import QueryDict
from django.http.response import JsonResponse
from rest_framework import viewsets, status
from rest_framework.views import APIView
from .serializers import *
class UserInfoViewSet(viewsets.ModelViewSet):
queryset = UserInfoModel.objects.all()
serializer_class = UserInfoSerializer
def get_queryset(self):
queryset = super().get_queryset()
user_id = self.request.query_params.get('user_id', '')
if user_id:
queryset.get(user_id=user_id)
return queryset
class UserInfoSessionView(APIView):
# noinspection PyMethodMayBeStatic
def get(self, request, *args, **kwargs):
user_id = request.GET['user_id']
user_model = UserInfoModel.objects.get(user_id=user_id)
serializer = UserInfoSerializer(user_model)
return JsonResponse({
'code': True,
'status': status.HTTP_200_OK,
'response': serializer.data,
'message': 'SEARCH_SUCCESS'}, status=status.HTTP_200_OK)
# 로그인 토큰 업데이트
class UpdateTokenAction(APIView):
# noinspection PyMethodMayBeStatic
def put(self, request):
put = QueryDict(request.body)
user_id = put.get('user_id')
device_token = put.get('device_token')
if UserInfoModel.objects.filter(user_id=user_id).exists():
user_detail = UserInfoModel.objects.get(user_id=user_id)
user_detail.device_token = device_token
user_detail.save()
return JsonResponse({
'code': True,
'status': status.HTTP_200_OK,
'message': 'UPDATE_SUCCESS'}, status=status.HTTP_200_OK)
return JsonResponse({
'code': False,
'status': status.HTTP_200_OK,
'message': 'FAIL'}, status=status.HTTP_200_OK) | [
"django.http.QueryDict",
"django.http.response.JsonResponse"
] | [((843, 990), 'django.http.response.JsonResponse', 'JsonResponse', (["{'code': True, 'status': status.HTTP_200_OK, 'response': serializer.data,\n 'message': 'SEARCH_SUCCESS'}"], {'status': 'status.HTTP_200_OK'}), "({'code': True, 'status': status.HTTP_200_OK, 'response':\n serializer.data, 'message': 'SEARCH_SUCCESS'}, status=status.HTTP_200_OK)\n", (855, 990), False, 'from django.http.response import JsonResponse\n'), ((1168, 1191), 'django.http.QueryDict', 'QueryDict', (['request.body'], {}), '(request.body)\n', (1177, 1191), False, 'from django.http import QueryDict\n'), ((1696, 1805), 'django.http.response.JsonResponse', 'JsonResponse', (["{'code': False, 'status': status.HTTP_200_OK, 'message': 'FAIL'}"], {'status': 'status.HTTP_200_OK'}), "({'code': False, 'status': status.HTTP_200_OK, 'message':\n 'FAIL'}, status=status.HTTP_200_OK)\n", (1708, 1805), False, 'from django.http.response import JsonResponse\n'), ((1516, 1634), 'django.http.response.JsonResponse', 'JsonResponse', (["{'code': True, 'status': status.HTTP_200_OK, 'message': 'UPDATE_SUCCESS'}"], {'status': 'status.HTTP_200_OK'}), "({'code': True, 'status': status.HTTP_200_OK, 'message':\n 'UPDATE_SUCCESS'}, status=status.HTTP_200_OK)\n", (1528, 1634), False, 'from django.http.response import JsonResponse\n')] |
import os
import re
import sys
def spec_replacer(match):
if match.group(0) == ' ':
return '^_'
return '^' + match.group(0)
def create_content(type, major, minor, level, edit):
python_dir = '/python$root'
python_dir_len = len(python_dir)
all_dirs = []
all_files = []
spec_pattern = re.compile('([. ^+()])')
for root, dirs, files in os.walk(python_dir):
inner_dirs = list(filter(lambda x: x != '', spec_pattern.sub(spec_replacer, root[python_dir_len:]).split('/')))
kit_dir = '[' + '.'.join(['python'] + inner_dirs) + ']'
all_dirs.append('directory "' + kit_dir + '" version limit 1;')
for file in files:
file_name, file_ext = os.path.splitext(file)
if file_ext == '':
file_ext = '.'
file_name = spec_pattern.sub(spec_replacer, file_name)
all_files.append('file "' + \
kit_dir + file_name + file_ext + \
'" source "' + \
kit_dir + file_name + file_ext + \
'";')
# try:
# dirs.remove('__pycache__')
# except:
# pass
kit_template = '''--
-- (C) Copyright 2021 VMS Software Inc.
--
product VSI I64VMS PYTHON {type}{major}.{minor}-{level}{edit} FULL ;
--
-- Execute the preconfigure procedure
--
execute preconfigure "@pcsi$source:[python]python$pcsi_preconfigure.com" uses [python]python$pcsi_preconfigure.com ;
--
-- Make sure VMS V8.4 or above is installed
--
if ((not <software VSI I64VMS VMS version minimum V8.4>) and (not <software HP I64VMS VMS version minimum V8.4>)) ;
error NO_MIN_VMS abort ;
end if ;
--
-- ODS-5 Disk(s) should be available on this system
--
if (<logical name PYTHON$ODS5_AVAIL equals 0 table LNM$JOB>) ;
error NO_ODS5_DISKS ;
end if ;
--
-- Directories...
--
{dirs}
--
-- Files...
--
{files}
--
-- Start-up and shutdown scripts
--
file "[sys$startup]python$define_root.com" source "[python]python$define_root.com";
file "[sys$startup]python$startup.com" source "[python]python$startup.com";
file "[sys$startup]python$shutdown.com" source "[python]python$shutdown.com";
--
-- Release notes
--
-- (none)
--
-- Do post-install tasks
--
execute postinstall "@pcsi$source:[python]python$define_root.com" interactive uses "[python]python$define_root.com" ;
--
-- Okay, done. Tell the user what to do next.
--
information POST_INSTALL phase after with helptext;
--
-- All done
--
end product;
'''
# type, major, minor, level, edit must be the same as in pythlib.pcsi$text
kit_content = kit_template.format(
type=type,
major=major,
minor=minor,
level=level,
edit=edit,
dirs='\n '.join(all_dirs),
files='\n '.join(all_files))
with open('python.pcsi$desc', 'w') as file:
file.write(kit_content)
text_template = '''=product VSI I64VMS PYTHON {type}{major}.{minor}-{level}{edit} full
1 'PRODUCT
=prompt Python for OpenVMS is based on Python Version 3.8.2
1 'PRODUCER
=prompt VSI Software Inc.
1 'NOTICE
=prompt (C) Copyright 2021 VMS Software Inc.
1 NO_MIN_VMS
=prompt Minimum OpenVMS software version not found on this system, abort instalation
This kit requires a minimum of OpenVMS I64 V8.4.
1 NO_ODS5_DISKS
=prompt ODS-5 disk(s) not found on this system, abort installation
This kit requires an ODS-5 disk to be correctly installed in this system.
1 POST_INSTALL
=prompt Post-installation tasks are required.
To define the Python runtime at system boot time, add the
following lines to SYS$MANAGER:SYSTARTUP_VMS.COM:
$ file := SYS$STARTUP:PYTHON$STARTUP.COM
$ if f$search("''file'") .nes. "" then @'file'
To shutdown the Python runtime at system shutdown time, add the
following lines to SYS$MANAGER:SYSHUTDWN.COM:
$ file := SYS$STARTUP:PYTHON$SHUTDOWN.COM
$ if f$search("''file'") .nes. "" then @'file'
'''
text_content = text_template.format(
type=type,
major=major,
minor=minor,
level=level,
edit=edit,
dirs='\n '.join(all_dirs),
files='\n '.join(all_files))
with open('python.pcsi$text', 'w') as file:
file.write(text_content)
if __name__ == "__main__":
import getopt
import datetime
opts, args = getopt.getopt(sys.argv[1:], '', ['type=', 'major=', 'minor=', 'level=', 'edit='])
type = 'F'
major = '3'
minor = '8'
level = '2'
edit = '' # 'd' + datetime.date.today().strftime('%Y%m%d')
for opt, optarg in opts:
if opt in ['--type']:
type = optarg
elif opt in ['--major']:
major = optarg
elif opt in ['--minor']:
minor = optarg
elif opt in ['--level']:
level = optarg
elif opt in ['--edit']:
edit = optarg
else:
print('Unknown option %s' % opt)
create_content(
type,
major,
minor,
level,
edit,
)
| [
"getopt.getopt",
"os.path.splitext",
"os.walk",
"re.compile"
] | [((319, 343), 're.compile', 're.compile', (['"""([. ^+()])"""'], {}), "('([. ^+()])')\n", (329, 343), False, 'import re\n'), ((373, 392), 'os.walk', 'os.walk', (['python_dir'], {}), '(python_dir)\n', (380, 392), False, 'import os\n'), ((4349, 4434), 'getopt.getopt', 'getopt.getopt', (['sys.argv[1:]', '""""""', "['type=', 'major=', 'minor=', 'level=', 'edit=']"], {}), "(sys.argv[1:], '', ['type=', 'major=', 'minor=', 'level=',\n 'edit='])\n", (4362, 4434), False, 'import getopt\n'), ((711, 733), 'os.path.splitext', 'os.path.splitext', (['file'], {}), '(file)\n', (727, 733), False, 'import os\n')] |
from flatifylists import flatifyList
example = [[[1,2], [3,[4,[5],6],7],8,9]]
print(flatifyList(example)) | [
"flatifylists.flatifyList"
] | [((86, 106), 'flatifylists.flatifyList', 'flatifyList', (['example'], {}), '(example)\n', (97, 106), False, 'from flatifylists import flatifyList\n')] |
import unittest
from sys import argv
import numpy as np
import torch
from objective.ridge import Ridge, Ridge_ClosedForm, Ridge_Gradient
from .utils import Container, assert_all_close, assert_all_close_dict
def _init_ridge(cls):
np.random.seed(1234)
torch.manual_seed(1234)
n_features = 3
n_samples = 5
mu = 0.02
cls.hparams = Container(n_features=n_features,
n_samples=n_samples,
mu=mu)
cls.w = torch.randn(n_features, 1, requires_grad=True)
cls.x = torch.randn(n_samples, n_features)
cls.y = torch.randn(n_samples)
class TestObj_Ridge_ClosedForm(unittest.TestCase):
def setUp(self):
_init_ridge(self)
self.obj = Ridge_ClosedForm(self.hparams)
def test_error(self):
error_test = self.obj.task_error(self.w, self.x, self.y)
error_ref = torch.tensor(1.3251)
assert_all_close(error_test, error_ref, "task_error returned value")
def test_oracle(self):
oracle_info_test = self.obj.oracle(self.w, self.x, self.y)
oracle_info_ref = {
'sol': torch.tensor([[-0.2297], [-0.7944], [-0.5806]]),
'obj': torch.tensor(1.3370)}
assert_all_close_dict(oracle_info_ref, oracle_info_test, "oracle_info")
class TestObj_Ridge_Gradient(unittest.TestCase):
def setUp(self):
_init_ridge(self)
self.obj = Ridge_Gradient(self.hparams)
def test_error(self):
error_test = self.obj.task_error(self.w, self.x, self.y)
error_ref = torch.tensor(1.3251)
assert_all_close(error_test, error_ref, "task_error returned value")
def test_oracle(self):
oracle_info_test = self.obj.oracle(self.w, self.x, self.y)
oracle_info_ref = {
'dw': torch.tensor([[0.7323], [1.4816], [-0.3771]]),
'obj': torch.tensor(1.3370)}
assert_all_close_dict(oracle_info_ref, oracle_info_test, "oracle_info")
if __name__ == '__main__':
unittest.main(argv=argv)
| [
"torch.manual_seed",
"objective.ridge.Ridge_Gradient",
"torch.tensor",
"numpy.random.seed",
"objective.ridge.Ridge_ClosedForm",
"unittest.main",
"torch.randn"
] | [((237, 257), 'numpy.random.seed', 'np.random.seed', (['(1234)'], {}), '(1234)\n', (251, 257), True, 'import numpy as np\n'), ((262, 285), 'torch.manual_seed', 'torch.manual_seed', (['(1234)'], {}), '(1234)\n', (279, 285), False, 'import torch\n'), ((486, 532), 'torch.randn', 'torch.randn', (['n_features', '(1)'], {'requires_grad': '(True)'}), '(n_features, 1, requires_grad=True)\n', (497, 532), False, 'import torch\n'), ((545, 579), 'torch.randn', 'torch.randn', (['n_samples', 'n_features'], {}), '(n_samples, n_features)\n', (556, 579), False, 'import torch\n'), ((592, 614), 'torch.randn', 'torch.randn', (['n_samples'], {}), '(n_samples)\n', (603, 614), False, 'import torch\n'), ((1985, 2009), 'unittest.main', 'unittest.main', ([], {'argv': 'argv'}), '(argv=argv)\n', (1998, 2009), False, 'import unittest\n'), ((734, 764), 'objective.ridge.Ridge_ClosedForm', 'Ridge_ClosedForm', (['self.hparams'], {}), '(self.hparams)\n', (750, 764), False, 'from objective.ridge import Ridge, Ridge_ClosedForm, Ridge_Gradient\n'), ((877, 897), 'torch.tensor', 'torch.tensor', (['(1.3251)'], {}), '(1.3251)\n', (889, 897), False, 'import torch\n'), ((1404, 1432), 'objective.ridge.Ridge_Gradient', 'Ridge_Gradient', (['self.hparams'], {}), '(self.hparams)\n', (1418, 1432), False, 'from objective.ridge import Ridge, Ridge_ClosedForm, Ridge_Gradient\n'), ((1545, 1565), 'torch.tensor', 'torch.tensor', (['(1.3251)'], {}), '(1.3251)\n', (1557, 1565), False, 'import torch\n'), ((1117, 1164), 'torch.tensor', 'torch.tensor', (['[[-0.2297], [-0.7944], [-0.5806]]'], {}), '([[-0.2297], [-0.7944], [-0.5806]])\n', (1129, 1164), False, 'import torch\n'), ((1185, 1204), 'torch.tensor', 'torch.tensor', (['(1.337)'], {}), '(1.337)\n', (1197, 1204), False, 'import torch\n'), ((1784, 1829), 'torch.tensor', 'torch.tensor', (['[[0.7323], [1.4816], [-0.3771]]'], {}), '([[0.7323], [1.4816], [-0.3771]])\n', (1796, 1829), False, 'import torch\n'), ((1850, 1869), 'torch.tensor', 'torch.tensor', (['(1.337)'], {}), '(1.337)\n', (1862, 1869), False, 'import torch\n')] |
from instanotifier.fetcher import tests
def run():
# is executed when ran with 'manage.py runscript tests'
tests.test_rss_fetcher()
| [
"instanotifier.fetcher.tests.test_rss_fetcher"
] | [((117, 141), 'instanotifier.fetcher.tests.test_rss_fetcher', 'tests.test_rss_fetcher', ([], {}), '()\n', (139, 141), False, 'from instanotifier.fetcher import tests\n')] |
import sqlite3
def main():
# se establece conexion con la BD y abro cursor
conn = sqlite3.connect("alumnos.db")
cursor = conn.cursor()
# creo una tupla de tuplas para agregar registros a la tabla
alumnos = (
(1, "Juan", "Granizado", 8, 25),
(2, "Esteban", "Quito", 2, 19),
(3, "Marina", "Cordoba", 10, 25),
)
for alumno in alumnos:
cursor.execute("INSERT INTO alumnos VALUES (?, ?, ?, ?, ?)", alumno)
# Para que se agreguen los registros efectivamente tenemos que hacer commit
conn.commit()
print("Datos cargados!")
# Cerramos conexion
conn.close()
if __name__ == '__main__':
main()
| [
"sqlite3.connect"
] | [((92, 121), 'sqlite3.connect', 'sqlite3.connect', (['"""alumnos.db"""'], {}), "('alumnos.db')\n", (107, 121), False, 'import sqlite3\n')] |
import sqlite3
con = sqlite3.connect('agenda.db')
cursor = con.cursor()
cursor.execute('''
create table if not exists agenda(
nome text,
telefone text)
''')
cursor.execute('''
insert into agenda(nome, telefone)
values(?, ?)
''', ("Tamara", "51-98175-0510"))
con.commit()
cursor.close()
con.close()
| [
"sqlite3.connect"
] | [((21, 49), 'sqlite3.connect', 'sqlite3.connect', (['"""agenda.db"""'], {}), "('agenda.db')\n", (36, 49), False, 'import sqlite3\n')] |
import logging
import math
import re
import time
import dask
import numpy as np
import requests
import json
import xml.etree.ElementTree as ET
from falconcv.data.scraper.scraper import ImagesScraper
from falconcv.util import ImageUtil
logger = logging.getLogger(__name__)
FLICKR_ENDPOINT = "https://www.flickr.com/services/rest"
# List of sizes:
# url_o: Original (4520 × 3229)
# url_k: Large 2048 (2048 × 1463)
# url_h: Large 1600 (1600 × 1143)
# url_l=: Large 1024 (1024 × 732)
# url_c: Medium 800 (800 × 572)
# url_z: Medium 640 (640 × 457)
# url_m: Medium 500 (500 × 357)
# url_n: Small 320 (320 × 229)
# url_s: Small 240 (240 × 171)
# url_t: Thumbnail (100 × 71)
# url_q: Square 150 (150 × 150)
# url_sq: Square 75 (75 × 75)
class FlickrScraper(ImagesScraper):
def __init__(self, api_key):
super(FlickrScraper, self).__init__()
self.api_key = api_key
def _authenticate(self):
pass
def _get_total_matches(self, q):
total_matches = 0
try:
response = requests.get(url=FLICKR_ENDPOINT, params={
"api_key": self.api_key,
"method": "flickr.photos.search",
"tags": ",".join(q),
"tag_mode": "any",
# "privacy_filter": "1"
"content_type": 1,
"media": "photos",
"per_page": 0,
"format": "json"
})
if response.status_code == 200:
json_text = re.search(r'\((.*?)\)', response.text).group(1)
json_object = json.loads(json_text)
if json_object["stat"] == "ok":
total_matches = int(json_object["photos"]["total"])
# total_matches = json_object["photos"]
except Exception as ex:
logger.error("Error making the request : {}".format(ex))
return total_matches
def _request_photos(self, q, count, page):
images = []
try:
response = requests.get(url=FLICKR_ENDPOINT, params={
"api_key": self.api_key,
"method": "flickr.photos.search",
"tags": ",".join(q),
"tag_mode": "any",
# "privacy_filter": "1"
"content_type": 1,
"media": "photos",
"per_page": count,
"page": page,
"extras": ",".join(["url_o", "url_k", "url_h", "url_l", "url_c", "url_m"])
})
if response.status_code == 200:
try:
# print(response.text)
root: ET.Element = ET.fromstring(response.text)
stat = root.get("stat")
if stat == "ok":
for photo in root.iterfind("photos/photo"):
photo: ET.Element
images.append(photo.attrib)
except Exception as ex:
logger.error("error gathering the response: {}".format(ex))
except Exception as ex:
logger.error("Error making the request : {}".format(ex))
return images
@dask.delayed
def _fetch_image(self, image_info, sz):
try:
if sz in image_info:
url = image_info[sz]
return ImageUtil.url2img(url)
except Exception as ex:
logger.error("Error fetching the image: " % ex)
return None
def fetch(self, q, batch_size: int = 100, timestamp=1, sz="url_m"):
try:
assert batch_size <= 500, "invalid count parameter"
total_matches = self._get_total_matches(q)
logger.debug("{} images found ".format(total_matches))
number_of_pages = math.ceil(total_matches / batch_size)
for page in range(1, number_of_pages):
photos = self._request_photos(q, batch_size, page)
delayed_tasks = list(map(lambda img: self._fetch_image(img, sz), photos))
compute_result = dask.compute(*delayed_tasks)
yield [img for img in compute_result if isinstance(img, np.ndarray)]
time.sleep(timestamp)
except Exception as ex:
logger.error("error fetching the images: {}".format(ex))
| [
"logging.getLogger",
"json.loads",
"math.ceil",
"dask.compute",
"time.sleep",
"xml.etree.ElementTree.fromstring",
"falconcv.util.ImageUtil.url2img",
"re.search"
] | [((244, 271), 'logging.getLogger', 'logging.getLogger', (['__name__'], {}), '(__name__)\n', (261, 271), False, 'import logging\n'), ((3765, 3802), 'math.ceil', 'math.ceil', (['(total_matches / batch_size)'], {}), '(total_matches / batch_size)\n', (3774, 3802), False, 'import math\n'), ((1565, 1586), 'json.loads', 'json.loads', (['json_text'], {}), '(json_text)\n', (1575, 1586), False, 'import json\n'), ((3327, 3349), 'falconcv.util.ImageUtil.url2img', 'ImageUtil.url2img', (['url'], {}), '(url)\n', (3344, 3349), False, 'from falconcv.util import ImageUtil\n'), ((4044, 4072), 'dask.compute', 'dask.compute', (['*delayed_tasks'], {}), '(*delayed_tasks)\n', (4056, 4072), False, 'import dask\n'), ((4174, 4195), 'time.sleep', 'time.sleep', (['timestamp'], {}), '(timestamp)\n', (4184, 4195), False, 'import time\n'), ((2635, 2663), 'xml.etree.ElementTree.fromstring', 'ET.fromstring', (['response.text'], {}), '(response.text)\n', (2648, 2663), True, 'import xml.etree.ElementTree as ET\n'), ((1487, 1526), 're.search', 're.search', (['"""\\\\((.*?)\\\\)"""', 'response.text'], {}), "('\\\\((.*?)\\\\)', response.text)\n", (1496, 1526), False, 'import re\n')] |
# pip install -U pywinauto
from pywinauto.application import Application
import subprocess
import time
subprocess.run('SCHTASKS /DELETE /TN BuildTasks\\Sites /f')
app = Application(backend='uia')
app.start('C:\\Program Files\\Google\\Chrome\\Application\\chrome.exe --force-renderer-accessibility ')
window = app.top_window()
# Allow the registry installed extensions to load...
time.sleep(45)
ch_window = window.child_window(title="Address and search bar", control_type="Edit")
ch_window.type_keys('^a')
ch_window.type_keys('{BACKSPACE}chrome://extensions/{ENTER}')
time.sleep(3)
# Enable Honey (or disable google drive offline)
dlg = window.button6
try:
dlg.click()
except Exception:
dlg.close()
# Enable Soccer wallpapers (or Soccer wallpapers)
dlg = window.button9
try:
dlg.click()
except Exception:
dlg.close()
# Enable Soccer wallpapers (if it exists)
dlg = window.button12
try:
dlg.click()
except Exception:
dlg.close()
time.sleep(5)
ch_window.type_keys('^a')
ch_window.type_keys('{BACKSPACE}https://thepiratebay.org{ENTER}')
time.sleep(10)
# Allow notifications
dlg = window.AllowButton
try:
dlg.wait_not('visible', timeout=2)
dlg.click()
except Exception:
dlg.close()
ch_window.type_keys('^a')
ch_window.type_keys('{BACKSPACE}{BACKSPACE}https://yts.mx{ENTER}')
time.sleep(3)
window.close()
| [
"pywinauto.application.Application",
"subprocess.run",
"time.sleep"
] | [((103, 162), 'subprocess.run', 'subprocess.run', (['"""SCHTASKS /DELETE /TN BuildTasks\\\\Sites /f"""'], {}), "('SCHTASKS /DELETE /TN BuildTasks\\\\Sites /f')\n", (117, 162), False, 'import subprocess\n'), ((169, 195), 'pywinauto.application.Application', 'Application', ([], {'backend': '"""uia"""'}), "(backend='uia')\n", (180, 195), False, 'from pywinauto.application import Application\n'), ((379, 393), 'time.sleep', 'time.sleep', (['(45)'], {}), '(45)\n', (389, 393), False, 'import time\n'), ((567, 580), 'time.sleep', 'time.sleep', (['(3)'], {}), '(3)\n', (577, 580), False, 'import time\n'), ((945, 958), 'time.sleep', 'time.sleep', (['(5)'], {}), '(5)\n', (955, 958), False, 'import time\n'), ((1051, 1065), 'time.sleep', 'time.sleep', (['(10)'], {}), '(10)\n', (1061, 1065), False, 'import time\n'), ((1297, 1310), 'time.sleep', 'time.sleep', (['(3)'], {}), '(3)\n', (1307, 1310), False, 'import time\n')] |
# ----------------------------------------------------------------------
# Migrate SLAProbe to workflow
# ----------------------------------------------------------------------
# Copyright (C) 2007-2021 The NOC Project
# See LICENSE for details
# ----------------------------------------------------------------------
# Third-party modules
from pymongo import UpdateMany
from bson import ObjectId
# NOC modules
from noc.core.migration.base import BaseMigration
class Migration(BaseMigration):
depends_on = [("wf", "0005_slaprobe_default")]
def migrate(self):
coll = self.mongo_db["noc.sla_probes"]
coll.bulk_write(
[
# "Planned"
UpdateMany({}, {"$set": {"state": ObjectId("607a7e1d3d18d4fb3c12032a")}}),
]
)
# Service Profile Workflow
self.mongo_db["noc.sla_profiles"].bulk_write(
[UpdateMany({}, {"$set": {"workflow": ObjectId("607a7dddff3a857a47600b9b")}})]
)
| [
"bson.ObjectId"
] | [((737, 773), 'bson.ObjectId', 'ObjectId', (['"""607a7e1d3d18d4fb3c12032a"""'], {}), "('607a7e1d3d18d4fb3c12032a')\n", (745, 773), False, 'from bson import ObjectId\n'), ((941, 977), 'bson.ObjectId', 'ObjectId', (['"""607a7dddff3a857a47600b9b"""'], {}), "('607a7dddff3a857a47600b9b')\n", (949, 977), False, 'from bson import ObjectId\n')] |
from ast import literal_eval
from collections import Counter
from typing import Dict, Optional
from anndata import AnnData
from spatialtis.config import Config, analysis_list
from ...utils import doc
from ..base import graph_position_interactive, graph_position_static
from .utils import query_df
@doc
def community_map(
data: AnnData,
roi: Dict,
min_cells: int = 10,
use: str = "static",
community_key: Optional[str] = None,
centroid_key: Optional[str] = None,
neighbors_key: Optional[str] = None,
**plot_options,
):
"""Visualize cell communities in ROI
Args:
data: {adata_plotting}
roi: {roi}
min_cells: Show communities contain more than a number of cells
use: "static" or "interactive" (Default: "static")
community_key: {community_key}
centroid_key: {centroid_key}
neighbors_key: {neighbors_key}
**plot_options: Pass to :class:`spatialtis._plotting.base.graph_position_static` or
:class:`spatialtis._plotting.base.graph_position_interactive`
{pyecharts_tips}
"""
if community_key is None:
community_key = analysis_list["cell_community"].last_used_key
if centroid_key is None:
centroid_key = Config.centroid_key
if neighbors_key is None:
neighbors_key = Config.NEIGHBORS_KEY
df = query_df(data.obs, roi)
nodes_types = df[community_key].tolist()
commus = []
for commu, count in Counter(nodes_types).items():
if count >= min_cells:
commus.append(commu)
df = df.reset_index(drop=True)
xdf = df[df[community_key].isin(commus)]
xdf = xdf.reset_index()
if len(xdf) == 0:
raise ValueError("Seems like there is no cells left to be drawn")
need_eval_nodes = isinstance(xdf[centroid_key][0], str)
need_eval_neighs = isinstance(xdf[neighbors_key][0], str)
if need_eval_nodes:
nodes = [literal_eval(n) for n in xdf[centroid_key]]
else:
nodes = [n for n in xdf[centroid_key]]
if need_eval_neighs:
neighs = [literal_eval(n) for n in xdf[neighbors_key]]
else:
neighs = [n for n in xdf[neighbors_key]]
nodes_types = xdf[community_key]
edges = []
edges_types = []
for i, n in zip(xdf.index, neighs):
for x in n:
new_x = xdf[xdf["index"] == x].index
if len(new_x) == 1:
new_x = new_x[0]
if nodes_types[i] == nodes_types[new_x]:
edges.append((i, new_x))
edges_types.append(nodes_types[i])
plot_options["saved_name"] = "community_map_" + ",".join(
[f"{k}={v}" for k, v in roi.items()]
)
if use == "interactive":
return graph_position_interactive(
nodes, edges, edges_types=edges_types, **plot_options
)
else:
return graph_position_static(
nodes, edges, edges_types=edges_types, **plot_options
)
| [
"collections.Counter",
"ast.literal_eval"
] | [((1467, 1487), 'collections.Counter', 'Counter', (['nodes_types'], {}), '(nodes_types)\n', (1474, 1487), False, 'from collections import Counter\n'), ((1929, 1944), 'ast.literal_eval', 'literal_eval', (['n'], {}), '(n)\n', (1941, 1944), False, 'from ast import literal_eval\n'), ((2073, 2088), 'ast.literal_eval', 'literal_eval', (['n'], {}), '(n)\n', (2085, 2088), False, 'from ast import literal_eval\n')] |
"""
Train shadow net script
"""
import argparse
import functools
import itertools
import os
import os.path as ops
import sys
import time
import numpy as np
import tensorflow as tf
import pprint
import shadownet
import six
from six.moves import xrange # pylint: disable=redefined-builtin
sys.path.append('/data/')
from crnn_model import crnn_model
from local_utils import data_utils, log_utils, tensorboard_vis_summary
from global_configuration import config
from uaitrain.arch.tensorflow import uflag
from typing import List
from tensorflow.core.framework import node_def_pb2
from tensorflow.python.framework import device as pydev
from tensorflow.python.training import device_setter
tf.app.flags.DEFINE_string('dataset_dir','/data/data/tfrecords','data path')
tf.app.flags.DEFINE_string('weights_path',None,'weight path')
FLAGS = tf.app.flags.FLAGS
logger = log_utils.init_logger()
def local_device_setter(num_devices=1,
ps_device_type='cpu',
worker_device='/cpu:0',
ps_ops=None,
ps_strategy=None):
if ps_ops == None:
ps_ops = ['Variable', 'VariableV2', 'VarHandleOp']
if ps_strategy is None:
ps_strategy = device_setter._RoundRobinStrategy(num_devices)
if not six.callable(ps_strategy):
raise TypeError("ps_strategy must be callable")
def _local_device_chooser(op):
current_device = pydev.DeviceSpec.from_string(op.device or "")
node_def = op if isinstance(op, node_def_pb2.NodeDef) else op.node_def
if node_def.op in ps_ops:
ps_device_spec = pydev.DeviceSpec.from_string(
'/{}:{}'.format(ps_device_type, ps_strategy(op)))
ps_device_spec.merge_from(current_device)
return ps_device_spec.to_string()
else:
worker_device_spec = pydev.DeviceSpec.from_string(worker_device or "")
worker_device_spec.merge_from(current_device)
return worker_device_spec.to_string()
return _local_device_chooser
def get_words_from_chars(characters_list: List[str], sequence_lengths: List[int], name='chars_conversion'):
with tf.name_scope(name=name):
def join_charcaters_fn(coords):
return tf.reduce_join(characters_list[coords[0]:coords[1]])
def coords_several_sequences():
end_coords = tf.cumsum(sequence_lengths)
start_coords = tf.concat([[0], end_coords[:-1]], axis=0)
coords = tf.stack([start_coords, end_coords], axis=1)
coords = tf.cast(coords, dtype=tf.int32)
return tf.map_fn(join_charcaters_fn, coords, dtype=tf.string)
def coords_single_sequence():
return tf.reduce_join(characters_list, keep_dims=True)
words = tf.cond(tf.shape(sequence_lengths)[0] > 1,
true_fn=lambda: coords_several_sequences(),
false_fn=lambda: coords_single_sequence())
return words
def get_shadownet_fn(num_gpus, variable_strategy, num_workers):
"""Returns a function that will build shadownet model."""
def _shadownet_fun(features, labels, mode, params):
is_training = (mode == tf.estimator.ModeKeys.TRAIN)
tower_features = features
tower_labels = labels
tower_losses = []
tower_gradvars = []
tower_preds = []
tower_tensor_dict = []
tower_seq_len = []
num_devices = num_gpus
device_type = 'gpu'
tower_batch_size = int(params.batch_size / num_devices)
for i in range(num_devices):
worker_device = '/{}:{}'.format(device_type, i)
device_setter = local_device_setter(worker_device=worker_device)
with tf.variable_scope('shadownet', reuse=bool(i != 0)):
with tf.name_scope('tower_%d' % i) as name_scope:
with tf.device(device_setter):
loss, gradvars, preds, tensor_dict, seq_len = _tower_fn(
is_training, tower_features[i], tower_labels[i], tower_batch_size, params.l_size)
tower_losses.append(loss)
tower_gradvars.append(gradvars)
tower_preds.append(preds)
tower_tensor_dict.append(tensor_dict)
tower_seq_len.append(seq_len)
if i == 0:
# Only trigger batch_norm moving mean and variance update from
# the 1st tower. Ideally, we should grab the updates from all
# towers but these stats accumulate extremely fast so we can
# ignore the other stats from the other towers without
# significant detriment.
update_ops = tf.get_collection(tf.GraphKeys.UPDATE_OPS,
name_scope)
# Now compute global loss and gradients.
gradvars = []
with tf.name_scope('gradient_averaging'):
all_grads = {}
for grad, var in itertools.chain(*tower_gradvars):
if grad is not None:
all_grads.setdefault(var, []).append(grad)
for var, grads in six.iteritems(all_grads):
# Average gradients on the same device as the variables
with tf.device(var.device):
if len(grads) == 1:
avg_grad = grads[0]
else:
avg_grad = tf.multiply(tf.add_n(grads), 1. / len(grads))
gradvars.append((avg_grad, var))
# Device that runs the ops to apply global gradient updates.
consolidation_device = '/gpu:0' if variable_strategy == 'GPU' else '/cpu:0'
with tf.device(consolidation_device):
global_step = tf.train.get_global_step()
starter_learning_rate = params.learning_rate
learning_rate = tf.train.exponential_decay(starter_learning_rate, global_step,
params.decay_steps, params.decay_rate,
staircase=True)
loss = tf.reduce_mean(tower_losses, name='loss')
decoded, log_prob = tf.nn.ctc_beam_search_decoder(tower_preds[0],
tower_seq_len[0]*np.ones(tower_batch_size),
merge_repeated=False)
sequence_dist = tf.reduce_mean(tf.edit_distance(tf.cast(decoded[0], tf.int32), tower_labels[0]))
sequence_lengths_pred = tf.bincount(tf.cast(decoded[0].indices[:, 0], tf.int32),
minlength=tf.shape(tower_labels[0])[1])
label_lengths_pred = tf.bincount(tf.cast(labels[0].indices[:, 0], tf.int32),
minlength=tf.shape(tower_labels[0])[1])
tensors_to_log = {'global_step': global_step, 'learning_rate': learning_rate, 'loss': loss}
dist_to_log = {'global_step': global_step,
'learning_rate': learning_rate,
'loss': loss,
'train_seq_dist': sequence_dist,
'sequence_lengths_pred': sequence_lengths_pred,
'label_lengths_pred': label_lengths_pred}
logging_hook = tf.train.LoggingTensorHook(
tensors=tensors_to_log, every_n_iter=10)
dist_hook = tf.train.LoggingTensorHook(
tensors=dist_to_log, every_n_iter=1000)
train_hooks = [logging_hook, dist_hook]
seq_dist_sum = tf.summary.scalar(name='Seq_Dist', tensor=sequence_dist)
lr_sum = tf.summary.scalar(name='Learning_rate', tensor=learning_rate)
summaries = [seq_dist_sum, lr_sum]
summary_hook = tf.train.SummarySaverHook(
save_steps=1000,
output_dir='/data/output/',
summary_op=summaries)
optimizer = tf.train.AdadeltaOptimizer(learning_rate=learning_rate)
if params.sync:
optimizer = tf.train.SyncReplicasOptimizer(
optimizer, replicas_to_aggregate=num_workers)
sync_replicas_hook = optimizer.make_session_run_hook(params.is_chief)
train_hooks.append(sync_replicas_hook)
# Create single grouped train op
train_op = [
optimizer.apply_gradients(
gradvars, global_step=tf.train.get_global_step())
]
train_op.extend(update_ops)
train_op = tf.group(*train_op)
return tf.estimator.EstimatorSpec(
mode=mode,
loss=loss,
train_op=train_op,
training_hooks=train_hooks)
return _shadownet_fun
def _tower_fn(is_training, feature, label, batch_size, l_size):
seq_len=l_size
shadownet = crnn_model.ShadowNet(phase='Train', hidden_nums=256, layers_nums=2, seq_length=seq_len,
num_classes=config.cfg.TRAIN.CLASSES_NUMS, rnn_cell_type='lstm')
imgs = tf.image.resize_images(feature, (32, l_size*4), method=0)
input_imgs = tf.cast(x=imgs, dtype=tf.float32)
with tf.variable_scope('shadow', reuse=False):
net_out, tensor_dict = shadownet.build_shadownet(inputdata=input_imgs)
cost = tf.reduce_mean(tf.nn.ctc_loss(labels=label, inputs=net_out,
sequence_length=seq_len*np.ones(batch_size)))
#lstm l2 normalization loss
lstm_tv = tf.trainable_variables(scope='LSTMLayers')
r_lambda = 0.001
regularization_cost = r_lambda * tf.reduce_sum([tf.nn.l2_loss(v) for v in lstm_tv])
cost = cost + regularization_cost
model_params = tf.trainable_variables()
tower_grad = tf.gradients(cost, model_params)
return cost, zip(tower_grad, model_params), net_out, tensor_dict, seq_len
def input_fn(data_dir,
subset,
num_shards,
batch_size,
use_distortion_for_training=True):
"""Create input graph for model.
Args:
data_dir: Directory where TFRecords representing the dataset are located.
subset: one of 'train', 'validate' and 'eval'.
num_shards: num of towers participating in data-parallel training.
batch_size: total batch size for training to be divided by the number of
shards.
use_distortion_for_training: True to use distortions.
Returns:
three
"""
with tf.device('/cpu:0'):
use_distortion = subset == 'train' and use_distortion_for_training
dataset = shadownet.ShadownetDataSet(data_dir, subset, use_distortion)
inputdata, input_labels = dataset.make_batch(batch_size)
if num_shards <= 1:
# No GPU available or only 1 GPU.
num_shards = 1
feature_shards = tf.split(inputdata, num_shards)
label_shards = tf.sparse_split(sp_input=input_labels, num_split=num_shards, axis=0)
return feature_shards, label_shards
def get_experiment_fn(data_dir,
num_gpus,
use_distortion_for_training=True):
def _experiment_fn(run_config, hparams):
"""Returns an Experiment."""
# Create estimator.
train_input_fn = functools.partial(
input_fn,
data_dir,
subset='train',
num_shards=num_gpus,
batch_size=hparams.batch_size,
use_distortion_for_training=use_distortion_for_training)
eval_input_fn = functools.partial(
input_fn,
data_dir,
subset='validation',
batch_size=hparams.batch_size,
num_shards=num_gpus)
train_steps = hparams.steps
eval_steps = 2048 // hparams.batch_size
variable_strategy = 'CPU'
classifier = tf.estimator.Estimator(
model_fn=get_shadownet_fn(num_gpus,
variable_strategy,
run_config.num_worker_replicas or 1),
config=run_config,
params=hparams)
# Create experiment.
return tf.contrib.learn.Experiment(
classifier,
train_input_fn=train_input_fn,
eval_input_fn=eval_input_fn,
train_steps=train_steps,
eval_steps=eval_steps,
min_eval_frequency=100)
return _experiment_fn
def main(num_gpus, log_device_placement, num_intra_threads, data_dir, output_dir, tfrecord_dir, **hparams):
# The env variable is on deprecation path, default is set to off.
os.environ['TF_SYNC_ON_FINISH'] = '0'
os.environ['TF_ENABLE_WINOGRAD_NONFUSED'] = '1'
data_dir = os.path.join(data_dir, tfrecord_dir)
# Session configuration.
sess_config = tf.ConfigProto(
allow_soft_placement=True,
log_device_placement=log_device_placement,
intra_op_parallelism_threads=num_intra_threads,
gpu_options=tf.GPUOptions(force_gpu_compatible=True))
config = tf.contrib.learn.RunConfig(session_config=sess_config, model_dir=output_dir)
tf.contrib.learn.learn_runner.run(
get_experiment_fn(data_dir, num_gpus),
run_config=config,
hparams=tf.contrib.training.HParams(
is_chief=config.is_chief,
**hparams))
if __name__ == '__main__':
# init args
# args = init_args()
#if not ops.exists(args.dataset_dir):
# raise ValueError('{:s} doesn\'t exist'.format(args.dataset_dir))
#train_shadownet(args.dataset_dir, args.weights_path)
# if args.weights_path is not None and 'two_stage' in args.weights_path:
# train_shadownet(args.dataset_dir, args.weights_path, restore_from_cnn_subnet_work=False)
# elif args.weights_path is not None and 'cnnsub' in args.weights_path:
# train_shadownet(args.dataset_dir, args.weights_path, restore_from_cnn_subnet_work=True)
# else:
# train_shadownet(args.dataset_dir)
parser = argparse.ArgumentParser()
parser.add_argument(
'--num_gpus',
type=int,
default=1,
help='UAI-SDK related. The number of gpus used.')
parser.add_argument(
'--log-device-placement',
action='store_true',
default=False,
help='Whether to log device placement.')
parser.add_argument(
'--num-intra-threads',
type=int,
default=0,
help="""\
Number of threads to use for intra-op parallelism. When training on CPU
set to 0 to have the system pick the appropriate number or alternatively
set it to the number of physical CPU cores.\
""")
parser.add_argument(
'--num-inter-threads',
type=int,
default=0,
help="""\
Number of threads to use for inter-op parallelism. If set to 0, the
system will pick an appropriate number.\
""")
parser.add_argument(
'--sync',
action='store_true',
default=False,
help="""\
If present when running in a distributed environment will run on sync mode.\
""")
parser.add_argument(
'--work_dir',
type=str,
default='/data/',
help='UAI SDK related.')
parser.add_argument(
'--data_dir',
type=str,
required=True,
help='UAI-SDK related. The directory where the CIFAR-10 input data is stored.')
parser.add_argument(
'--output_dir',
type=str,
required=True,
help='UAI-SDK related. The directory where the model will be stored.')
parser.add_argument(
'--log_dir',
type=str,
default='/data/data/',
help='UAI SDK related.')
parser.add_argument(
'--l_size',
type=int,
default=10,
help="""l_batch_label, how many labels CNN net work will output into LSTM""")
parser.add_argument(
'--learning_rate',
type=float,
default=0.1)
parser.add_argument(
'--decay_rate',
type=float,
default=0.1)
parser.add_argument(
'--decay_steps',
type=int,
default=40000)
parser.add_argument(
'--steps',
type=int,
default=200000)
parser.add_argument(
'--batch_size',
type=int,
default=512)
parser.add_argument(
'--tfrecord_dir',
type=str,
default='tfrecords')
args = parser.parse_args()
main(**vars(args))
print('Done') | [
"itertools.chain",
"tensorflow.image.resize_images",
"tensorflow.shape",
"tensorflow.split",
"tensorflow.estimator.EstimatorSpec",
"tensorflow.gradients",
"tensorflow.group",
"local_utils.log_utils.init_logger",
"tensorflow.reduce_mean",
"tensorflow.cast",
"sys.path.append",
"shadownet.build_s... | [((291, 316), 'sys.path.append', 'sys.path.append', (['"""/data/"""'], {}), "('/data/')\n", (306, 316), False, 'import sys\n'), ((692, 770), 'tensorflow.app.flags.DEFINE_string', 'tf.app.flags.DEFINE_string', (['"""dataset_dir"""', '"""/data/data/tfrecords"""', '"""data path"""'], {}), "('dataset_dir', '/data/data/tfrecords', 'data path')\n", (718, 770), True, 'import tensorflow as tf\n'), ((769, 832), 'tensorflow.app.flags.DEFINE_string', 'tf.app.flags.DEFINE_string', (['"""weights_path"""', 'None', '"""weight path"""'], {}), "('weights_path', None, 'weight path')\n", (795, 832), True, 'import tensorflow as tf\n'), ((868, 891), 'local_utils.log_utils.init_logger', 'log_utils.init_logger', ([], {}), '()\n', (889, 891), False, 'from local_utils import data_utils, log_utils, tensorboard_vis_summary\n'), ((9093, 9253), 'crnn_model.crnn_model.ShadowNet', 'crnn_model.ShadowNet', ([], {'phase': '"""Train"""', 'hidden_nums': '(256)', 'layers_nums': '(2)', 'seq_length': 'seq_len', 'num_classes': 'config.cfg.TRAIN.CLASSES_NUMS', 'rnn_cell_type': '"""lstm"""'}), "(phase='Train', hidden_nums=256, layers_nums=2,\n seq_length=seq_len, num_classes=config.cfg.TRAIN.CLASSES_NUMS,\n rnn_cell_type='lstm')\n", (9113, 9253), False, 'from crnn_model import crnn_model\n'), ((9413, 9472), 'tensorflow.image.resize_images', 'tf.image.resize_images', (['feature', '(32, l_size * 4)'], {'method': '(0)'}), '(feature, (32, l_size * 4), method=0)\n', (9435, 9472), True, 'import tensorflow as tf\n'), ((9579, 9612), 'tensorflow.cast', 'tf.cast', ([], {'x': 'imgs', 'dtype': 'tf.float32'}), '(x=imgs, dtype=tf.float32)\n', (9586, 9612), True, 'import tensorflow as tf\n'), ((10225, 10267), 'tensorflow.trainable_variables', 'tf.trainable_variables', ([], {'scope': '"""LSTMLayers"""'}), "(scope='LSTMLayers')\n", (10247, 10267), True, 'import tensorflow as tf\n'), ((10736, 10760), 'tensorflow.trainable_variables', 'tf.trainable_variables', ([], {}), '()\n', (10758, 10760), True, 'import tensorflow as tf\n'), ((10778, 10810), 'tensorflow.gradients', 'tf.gradients', (['cost', 'model_params'], {}), '(cost, model_params)\n', (10790, 10810), True, 'import tensorflow as tf\n'), ((13845, 13881), 'os.path.join', 'os.path.join', (['data_dir', 'tfrecord_dir'], {}), '(data_dir, tfrecord_dir)\n', (13857, 13881), False, 'import os\n'), ((14163, 14239), 'tensorflow.contrib.learn.RunConfig', 'tf.contrib.learn.RunConfig', ([], {'session_config': 'sess_config', 'model_dir': 'output_dir'}), '(session_config=sess_config, model_dir=output_dir)\n', (14189, 14239), True, 'import tensorflow as tf\n'), ((15126, 15151), 'argparse.ArgumentParser', 'argparse.ArgumentParser', ([], {}), '()\n', (15149, 15151), False, 'import argparse\n'), ((1239, 1285), 'tensorflow.python.training.device_setter._RoundRobinStrategy', 'device_setter._RoundRobinStrategy', (['num_devices'], {}), '(num_devices)\n', (1272, 1285), False, 'from tensorflow.python.training import device_setter\n'), ((1297, 1322), 'six.callable', 'six.callable', (['ps_strategy'], {}), '(ps_strategy)\n', (1309, 1322), False, 'import six\n'), ((1441, 1486), 'tensorflow.python.framework.device.DeviceSpec.from_string', 'pydev.DeviceSpec.from_string', (["(op.device or '')"], {}), "(op.device or '')\n", (1469, 1486), True, 'from tensorflow.python.framework import device as pydev\n'), ((2185, 2209), 'tensorflow.name_scope', 'tf.name_scope', ([], {'name': 'name'}), '(name=name)\n', (2198, 2209), True, 'import tensorflow as tf\n'), ((8822, 8921), 'tensorflow.estimator.EstimatorSpec', 'tf.estimator.EstimatorSpec', ([], {'mode': 'mode', 'loss': 'loss', 'train_op': 'train_op', 'training_hooks': 'train_hooks'}), '(mode=mode, loss=loss, train_op=train_op,\n training_hooks=train_hooks)\n', (8848, 8921), True, 'import tensorflow as tf\n'), ((9736, 9776), 'tensorflow.variable_scope', 'tf.variable_scope', (['"""shadow"""'], {'reuse': '(False)'}), "('shadow', reuse=False)\n", (9753, 9776), True, 'import tensorflow as tf\n'), ((9809, 9856), 'shadownet.build_shadownet', 'shadownet.build_shadownet', ([], {'inputdata': 'input_imgs'}), '(inputdata=input_imgs)\n', (9834, 9856), False, 'import shadownet\n'), ((11593, 11612), 'tensorflow.device', 'tf.device', (['"""/cpu:0"""'], {}), "('/cpu:0')\n", (11602, 11612), True, 'import tensorflow as tf\n'), ((11707, 11767), 'shadownet.ShadownetDataSet', 'shadownet.ShadownetDataSet', (['data_dir', 'subset', 'use_distortion'], {}), '(data_dir, subset, use_distortion)\n', (11733, 11767), False, 'import shadownet\n'), ((11961, 11992), 'tensorflow.split', 'tf.split', (['inputdata', 'num_shards'], {}), '(inputdata, num_shards)\n', (11969, 11992), True, 'import tensorflow as tf\n'), ((12016, 12084), 'tensorflow.sparse_split', 'tf.sparse_split', ([], {'sp_input': 'input_labels', 'num_split': 'num_shards', 'axis': '(0)'}), '(sp_input=input_labels, num_split=num_shards, axis=0)\n', (12031, 12084), True, 'import tensorflow as tf\n'), ((12386, 12557), 'functools.partial', 'functools.partial', (['input_fn', 'data_dir'], {'subset': '"""train"""', 'num_shards': 'num_gpus', 'batch_size': 'hparams.batch_size', 'use_distortion_for_training': 'use_distortion_for_training'}), "(input_fn, data_dir, subset='train', num_shards=num_gpus,\n batch_size=hparams.batch_size, use_distortion_for_training=\n use_distortion_for_training)\n", (12403, 12557), False, 'import functools\n'), ((12647, 12762), 'functools.partial', 'functools.partial', (['input_fn', 'data_dir'], {'subset': '"""validation"""', 'batch_size': 'hparams.batch_size', 'num_shards': 'num_gpus'}), "(input_fn, data_dir, subset='validation', batch_size=\n hparams.batch_size, num_shards=num_gpus)\n", (12664, 12762), False, 'import functools\n'), ((13261, 13441), 'tensorflow.contrib.learn.Experiment', 'tf.contrib.learn.Experiment', (['classifier'], {'train_input_fn': 'train_input_fn', 'eval_input_fn': 'eval_input_fn', 'train_steps': 'train_steps', 'eval_steps': 'eval_steps', 'min_eval_frequency': '(100)'}), '(classifier, train_input_fn=train_input_fn,\n eval_input_fn=eval_input_fn, train_steps=train_steps, eval_steps=\n eval_steps, min_eval_frequency=100)\n', (13288, 13441), True, 'import tensorflow as tf\n'), ((1876, 1925), 'tensorflow.python.framework.device.DeviceSpec.from_string', 'pydev.DeviceSpec.from_string', (["(worker_device or '')"], {}), "(worker_device or '')\n", (1904, 1925), True, 'from tensorflow.python.framework import device as pydev\n'), ((2270, 2322), 'tensorflow.reduce_join', 'tf.reduce_join', (['characters_list[coords[0]:coords[1]]'], {}), '(characters_list[coords[0]:coords[1]])\n', (2284, 2322), True, 'import tensorflow as tf\n'), ((2389, 2416), 'tensorflow.cumsum', 'tf.cumsum', (['sequence_lengths'], {}), '(sequence_lengths)\n', (2398, 2416), True, 'import tensorflow as tf\n'), ((2444, 2485), 'tensorflow.concat', 'tf.concat', (['[[0], end_coords[:-1]]'], {'axis': '(0)'}), '([[0], end_coords[:-1]], axis=0)\n', (2453, 2485), True, 'import tensorflow as tf\n'), ((2507, 2551), 'tensorflow.stack', 'tf.stack', (['[start_coords, end_coords]'], {'axis': '(1)'}), '([start_coords, end_coords], axis=1)\n', (2515, 2551), True, 'import tensorflow as tf\n'), ((2573, 2604), 'tensorflow.cast', 'tf.cast', (['coords'], {'dtype': 'tf.int32'}), '(coords, dtype=tf.int32)\n', (2580, 2604), True, 'import tensorflow as tf\n'), ((2624, 2678), 'tensorflow.map_fn', 'tf.map_fn', (['join_charcaters_fn', 'coords'], {'dtype': 'tf.string'}), '(join_charcaters_fn, coords, dtype=tf.string)\n', (2633, 2678), True, 'import tensorflow as tf\n'), ((2737, 2784), 'tensorflow.reduce_join', 'tf.reduce_join', (['characters_list'], {'keep_dims': '(True)'}), '(characters_list, keep_dims=True)\n', (2751, 2784), True, 'import tensorflow as tf\n'), ((5078, 5113), 'tensorflow.name_scope', 'tf.name_scope', (['"""gradient_averaging"""'], {}), "('gradient_averaging')\n", (5091, 5113), True, 'import tensorflow as tf\n'), ((5171, 5203), 'itertools.chain', 'itertools.chain', (['*tower_gradvars'], {}), '(*tower_gradvars)\n', (5186, 5203), False, 'import itertools\n'), ((5335, 5359), 'six.iteritems', 'six.iteritems', (['all_grads'], {}), '(all_grads)\n', (5348, 5359), False, 'import six\n'), ((5884, 5915), 'tensorflow.device', 'tf.device', (['consolidation_device'], {}), '(consolidation_device)\n', (5893, 5915), True, 'import tensorflow as tf\n'), ((5943, 5969), 'tensorflow.train.get_global_step', 'tf.train.get_global_step', ([], {}), '()\n', (5967, 5969), True, 'import tensorflow as tf\n'), ((6055, 6177), 'tensorflow.train.exponential_decay', 'tf.train.exponential_decay', (['starter_learning_rate', 'global_step', 'params.decay_steps', 'params.decay_rate'], {'staircase': '(True)'}), '(starter_learning_rate, global_step, params.\n decay_steps, params.decay_rate, staircase=True)\n', (6081, 6177), True, 'import tensorflow as tf\n'), ((6302, 6343), 'tensorflow.reduce_mean', 'tf.reduce_mean', (['tower_losses'], {'name': '"""loss"""'}), "(tower_losses, name='loss')\n", (6316, 6343), True, 'import tensorflow as tf\n'), ((7517, 7584), 'tensorflow.train.LoggingTensorHook', 'tf.train.LoggingTensorHook', ([], {'tensors': 'tensors_to_log', 'every_n_iter': '(10)'}), '(tensors=tensors_to_log, every_n_iter=10)\n', (7543, 7584), True, 'import tensorflow as tf\n'), ((7626, 7692), 'tensorflow.train.LoggingTensorHook', 'tf.train.LoggingTensorHook', ([], {'tensors': 'dist_to_log', 'every_n_iter': '(1000)'}), '(tensors=dist_to_log, every_n_iter=1000)\n', (7652, 7692), True, 'import tensorflow as tf\n'), ((7791, 7847), 'tensorflow.summary.scalar', 'tf.summary.scalar', ([], {'name': '"""Seq_Dist"""', 'tensor': 'sequence_dist'}), "(name='Seq_Dist', tensor=sequence_dist)\n", (7808, 7847), True, 'import tensorflow as tf\n'), ((7869, 7930), 'tensorflow.summary.scalar', 'tf.summary.scalar', ([], {'name': '"""Learning_rate"""', 'tensor': 'learning_rate'}), "(name='Learning_rate', tensor=learning_rate)\n", (7886, 7930), True, 'import tensorflow as tf\n'), ((8006, 8102), 'tensorflow.train.SummarySaverHook', 'tf.train.SummarySaverHook', ([], {'save_steps': '(1000)', 'output_dir': '"""/data/output/"""', 'summary_op': 'summaries'}), "(save_steps=1000, output_dir='/data/output/',\n summary_op=summaries)\n", (8031, 8102), True, 'import tensorflow as tf\n'), ((8173, 8228), 'tensorflow.train.AdadeltaOptimizer', 'tf.train.AdadeltaOptimizer', ([], {'learning_rate': 'learning_rate'}), '(learning_rate=learning_rate)\n', (8199, 8228), True, 'import tensorflow as tf\n'), ((8786, 8805), 'tensorflow.group', 'tf.group', (['*train_op'], {}), '(*train_op)\n', (8794, 8805), True, 'import tensorflow as tf\n'), ((14108, 14148), 'tensorflow.GPUOptions', 'tf.GPUOptions', ([], {'force_gpu_compatible': '(True)'}), '(force_gpu_compatible=True)\n', (14121, 14148), True, 'import tensorflow as tf\n'), ((14370, 14434), 'tensorflow.contrib.training.HParams', 'tf.contrib.training.HParams', ([], {'is_chief': 'config.is_chief'}), '(is_chief=config.is_chief, **hparams)\n', (14397, 14434), True, 'import tensorflow as tf\n'), ((6770, 6813), 'tensorflow.cast', 'tf.cast', (['decoded[0].indices[:, 0]', 'tf.int32'], {}), '(decoded[0].indices[:, 0], tf.int32)\n', (6777, 6813), True, 'import tensorflow as tf\n'), ((6948, 6990), 'tensorflow.cast', 'tf.cast', (['labels[0].indices[:, 0]', 'tf.int32'], {}), '(labels[0].indices[:, 0], tf.int32)\n', (6955, 6990), True, 'import tensorflow as tf\n'), ((8285, 8361), 'tensorflow.train.SyncReplicasOptimizer', 'tf.train.SyncReplicasOptimizer', (['optimizer'], {'replicas_to_aggregate': 'num_workers'}), '(optimizer, replicas_to_aggregate=num_workers)\n', (8315, 8361), True, 'import tensorflow as tf\n'), ((10444, 10460), 'tensorflow.nn.l2_loss', 'tf.nn.l2_loss', (['v'], {}), '(v)\n', (10457, 10460), True, 'import tensorflow as tf\n'), ((2810, 2836), 'tensorflow.shape', 'tf.shape', (['sequence_lengths'], {}), '(sequence_lengths)\n', (2818, 2836), True, 'import tensorflow as tf\n'), ((3838, 3867), 'tensorflow.name_scope', 'tf.name_scope', (["('tower_%d' % i)"], {}), "('tower_%d' % i)\n", (3851, 3867), True, 'import tensorflow as tf\n'), ((5454, 5475), 'tensorflow.device', 'tf.device', (['var.device'], {}), '(var.device)\n', (5463, 5475), True, 'import tensorflow as tf\n'), ((6501, 6526), 'numpy.ones', 'np.ones', (['tower_batch_size'], {}), '(tower_batch_size)\n', (6508, 6526), True, 'import numpy as np\n'), ((6672, 6701), 'tensorflow.cast', 'tf.cast', (['decoded[0]', 'tf.int32'], {}), '(decoded[0], tf.int32)\n', (6679, 6701), True, 'import tensorflow as tf\n'), ((10079, 10098), 'numpy.ones', 'np.ones', (['batch_size'], {}), '(batch_size)\n', (10086, 10098), True, 'import numpy as np\n'), ((3908, 3932), 'tensorflow.device', 'tf.device', (['device_setter'], {}), '(device_setter)\n', (3917, 3932), True, 'import tensorflow as tf\n'), ((6873, 6898), 'tensorflow.shape', 'tf.shape', (['tower_labels[0]'], {}), '(tower_labels[0])\n', (6881, 6898), True, 'import tensorflow as tf\n'), ((7050, 7075), 'tensorflow.shape', 'tf.shape', (['tower_labels[0]'], {}), '(tower_labels[0])\n', (7058, 7075), True, 'import tensorflow as tf\n'), ((8681, 8707), 'tensorflow.train.get_global_step', 'tf.train.get_global_step', ([], {}), '()\n', (8705, 8707), True, 'import tensorflow as tf\n'), ((4880, 4934), 'tensorflow.get_collection', 'tf.get_collection', (['tf.GraphKeys.UPDATE_OPS', 'name_scope'], {}), '(tf.GraphKeys.UPDATE_OPS, name_scope)\n', (4897, 4934), True, 'import tensorflow as tf\n'), ((5634, 5649), 'tensorflow.add_n', 'tf.add_n', (['grads'], {}), '(grads)\n', (5642, 5649), True, 'import tensorflow as tf\n')] |
# Copyright 2014 CloudFounders NV
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""
Module for the VMware hypervisor client
"""
import os
from ovs.extensions.hypervisor.apis.vmware.sdk import Sdk
class VMware(object):
"""
Represents the hypervisor client for VMware
"""
def __init__(self, ip, username, password):
"""
Initializes the object with credentials and connection information
"""
self.sdk = Sdk(ip, username, password)
self.state_mapping = {'poweredOn' : 'RUNNING',
'poweredOff': 'HALTED',
'suspended' : 'PAUSED'}
def get_state(self, vmid):
"""
Get the current power state of a virtual machine
@param vmid: hypervisor id of the virtual machine
"""
return self.state_mapping[self.sdk.get_power_state(vmid)]
def create_vm_from_template(self, name, source_vm, disks, ip, mountpoint, wait=True):
"""
Create a new vmachine from an existing template
"""
task = self.sdk.create_vm_from_template(name, source_vm, disks, ip, mountpoint, wait)
if wait is True:
if self.sdk.validate_result(task):
task_info = self.sdk.get_task_info(task)
return task_info.info.result.value
return None
def clone_vm(self, vmid, name, disks, wait=False):
"""
Clone a vmachine
@param vmid: hypervisor id of the virtual machine
@param name: name of the virtual machine
@param disks: list of disk information
@param wait: wait for action to complete
"""
task = self.sdk.clone_vm(vmid, name, disks, wait)
if wait is True:
if self.sdk.validate_result(task):
task_info = self.sdk.get_task_info(task)
return task_info.info.result.value
return None
def delete_vm(self, vmid, storagedriver_mountpoint, storagedriver_storage_ip, devicename, disks_info=None, wait=False):
"""
Remove the vmachine from the hypervisor
@param vmid: hypervisor id of the virtual machine
@param wait: wait for action to complete
"""
if disks_info is None:
disks_info = []
_ = disks_info
self.sdk.delete_vm(vmid, storagedriver_mountpoint, storagedriver_storage_ip, devicename, wait)
def get_vm_object(self, vmid):
"""
Gets the VMware virtual machine object from VMware by its identifier
"""
return self.sdk.get_vm(vmid)
def get_vm_agnostic_object(self, vmid):
"""
Gets the VMware virtual machine object from VMware by its identifier
"""
return self.sdk.make_agnostic_config(self.sdk.get_vm(vmid))
def get_vm_object_by_devicename(self, devicename, ip, mountpoint):
"""
Gets the VMware virtual machine object from VMware by devicename
and datastore identifiers
"""
return self.sdk.make_agnostic_config(self.sdk.get_nfs_datastore_object(ip, mountpoint, devicename)[0])
def get_vms_by_nfs_mountinfo(self, ip, mountpoint):
"""
Gets a list of agnostic vm objects for a given ip and mountpoint
"""
for vm in self.sdk.get_vms(ip, mountpoint):
yield self.sdk.make_agnostic_config(vm)
def is_datastore_available(self, ip, mountpoint):
"""
@param ip : hypervisor ip to query for datastore presence
@param mountpoint: nfs mountpoint on hypervisor
@rtype: boolean
@return: True | False
"""
return self.sdk.is_datastore_available(ip, mountpoint)
def set_as_template(self, vmid, disks, wait=False):
"""
Configure a vm as template
This lets the machine exist on the hypervisor but configures
all disks as "Independent Non-persistent"
@param vmid: hypervisor id of the virtual machine
"""
return self.sdk.set_disk_mode(vmid, disks, 'independent_nonpersistent', wait)
def mount_nfs_datastore(self, name, remote_host, remote_path):
"""
Mounts a given NFS export as a datastore
"""
return self.sdk.mount_nfs_datastore(name, remote_host, remote_path)
def test_connection(self):
"""
Checks whether this node is a vCenter
"""
return self.sdk.test_connection()
def clean_backing_disk_filename(self, path):
"""
Cleans a backing disk filename to the corresponding disk filename
"""
_ = self
return path.replace('-flat.vmdk', '.vmdk').strip('/')
def get_backing_disk_path(self, machinename, devicename):
"""
Builds the path for the file backing a given device/disk
"""
_ = self
return '/{}/{}-flat.vmdk'.format(machinename.replace(' ', '_'), devicename)
def get_disk_path(self, machinename, devicename):
"""
Builds the path for the file backing a given device/disk
"""
_ = self
return '/{}/{}.vmdk'.format(machinename.replace(' ', '_'), devicename)
def clean_vmachine_filename(self, path):
"""
Cleans a VM filename
"""
_ = self
return path.strip('/')
def get_vmachine_path(self, machinename, storagerouter_machineid):
"""
Builds the path for the file representing a given vmachine
"""
_ = self, storagerouter_machineid # For compatibility purposes only
machinename = machinename.replace(' ', '_')
return '/{}/{}.vmx'.format(machinename, machinename)
def get_rename_scenario(self, old_name, new_name):
"""
Gets the rename scenario based on the old and new name
"""
_ = self
if old_name.endswith('.vmx') and new_name.endswith('.vmx'):
return 'RENAME'
elif old_name.endswith('.vmx~') and new_name.endswith('.vmx'):
return 'UPDATE'
return 'UNSUPPORTED'
def should_process(self, devicename, machine_ids=None):
"""
Checks whether a given device should be processed
"""
_ = self, devicename, machine_ids
return True
def file_exists(self, vpool, devicename):
"""
Check if devicename exists on the given vpool
"""
_ = self
filename = '/mnt/{0}/{1}'.format(vpool.name, devicename)
return os.path.exists(filename) and os.path.isfile(filename)
| [
"os.path.isfile",
"os.path.exists",
"ovs.extensions.hypervisor.apis.vmware.sdk.Sdk"
] | [((953, 980), 'ovs.extensions.hypervisor.apis.vmware.sdk.Sdk', 'Sdk', (['ip', 'username', 'password'], {}), '(ip, username, password)\n', (956, 980), False, 'from ovs.extensions.hypervisor.apis.vmware.sdk import Sdk\n'), ((6953, 6977), 'os.path.exists', 'os.path.exists', (['filename'], {}), '(filename)\n', (6967, 6977), False, 'import os\n'), ((6982, 7006), 'os.path.isfile', 'os.path.isfile', (['filename'], {}), '(filename)\n', (6996, 7006), False, 'import os\n')] |
# vim: set encoding=utf-8
import re
from lxml import etree
import logging
from regparser import content
from regparser.tree.depth import heuristics, rules, markers as mtypes
from regparser.tree.depth.derive import derive_depths
from regparser.tree.struct import Node
from regparser.tree.paragraph import p_level_of
from regparser.tree.xml_parser.appendices import build_non_reg_text
from regparser.tree import reg_text
from regparser.tree.xml_parser import tree_utils
from settings import PARAGRAPH_HIERARCHY
def get_reg_part(reg_doc):
"""
Depending on source, the CFR part number exists in different places. Fetch
it, wherever it is.
"""
potential_parts = []
potential_parts.extend(
# FR notice
node.attrib['PART'] for node in reg_doc.xpath('//REGTEXT'))
potential_parts.extend(
# e-CFR XML, under PART/EAR
node.text.replace('Pt.', '').strip()
for node in reg_doc.xpath('//PART/EAR')
if 'Pt.' in node.text)
potential_parts.extend(
# e-CFR XML, under FDSYS/HEADING
node.text.replace('PART', '').strip()
for node in reg_doc.xpath('//FDSYS/HEADING')
if 'PART' in node.text)
potential_parts.extend(
# e-CFR XML, under FDSYS/GRANULENUM
node.text.strip() for node in reg_doc.xpath('//FDSYS/GRANULENUM'))
potential_parts = [p for p in potential_parts if p.strip()]
if potential_parts:
return potential_parts[0]
def get_title(reg_doc):
""" Extract the title of the regulation. """
parent = reg_doc.xpath('//PART/HD')[0]
title = parent.text
return title
def preprocess_xml(xml):
"""This transforms the read XML through macros. Each macro consists of
an xpath and a replacement xml string"""
for path, replacement in content.Macros():
replacement = etree.fromstring('<ROOT>' + replacement + '</ROOT>')
for node in xml.xpath(path):
parent = node.getparent()
idx = parent.index(node)
parent.remove(node)
for repl in replacement:
parent.insert(idx, repl)
idx += 1
def build_tree(reg_xml):
if isinstance(reg_xml, str) or isinstance(reg_xml, unicode):
doc = etree.fromstring(reg_xml)
else:
doc = reg_xml
preprocess_xml(doc)
reg_part = get_reg_part(doc)
title = get_title(doc)
tree = Node("", [], [reg_part], title)
part = doc.xpath('//PART')[0]
subpart_xmls = [c for c in part.getchildren() if c.tag == 'SUBPART']
if len(subpart_xmls) > 0:
subparts = [build_subpart(reg_part, s) for s in subpart_xmls]
tree.children = subparts
else:
section_xmls = [c for c in part.getchildren() if c.tag == 'SECTION']
sections = []
for section_xml in section_xmls:
sections.extend(build_from_section(reg_part, section_xml))
empty_part = reg_text.build_empty_part(reg_part)
empty_part.children = sections
tree.children = [empty_part]
non_reg_sections = build_non_reg_text(doc, reg_part)
tree.children += non_reg_sections
return tree
def get_subpart_title(subpart_xml):
hds = subpart_xml.xpath('./HD|./RESERVED')
return [hd.text for hd in hds][0]
def build_subpart(reg_part, subpart_xml):
subpart_title = get_subpart_title(subpart_xml)
subpart = reg_text.build_subpart(subpart_title, reg_part)
sections = []
for ch in subpart_xml.getchildren():
if ch.tag == 'SECTION':
sections.extend(build_from_section(reg_part, ch))
subpart.children = sections
return subpart
# @profile
def get_markers(text):
""" Extract all the paragraph markers from text. Do some checks on the
collapsed markers."""
markers = tree_utils.get_paragraph_markers(text)
collapsed_markers = tree_utils.get_collapsed_markers(text)
# Check that the collapsed markers make sense (i.e. are at least one
# level below the initial marker)
if markers and collapsed_markers:
initial_marker_levels = p_level_of(markers[-1])
final_collapsed_markers = []
for collapsed_marker in collapsed_markers:
collapsed_marker_levels = p_level_of(collapsed_marker)
if any(c > f for f in initial_marker_levels
for c in collapsed_marker_levels):
final_collapsed_markers.append(collapsed_marker)
collapsed_markers = final_collapsed_markers
markers_list = [m for m in markers] + [m for m in collapsed_markers]
return markers_list
def get_markers_and_text(node, markers_list):
node_text = tree_utils.get_node_text(node, add_spaces=True)
text_with_tags = tree_utils.get_node_text_tags_preserved(node)
if len(markers_list) > 1:
actual_markers = ['(%s)' % m for m in markers_list]
plain_markers = [m.replace('<E T="03">', '').replace('</E>', '')
for m in actual_markers]
node_texts = tree_utils.split_text(node_text, plain_markers)
tagged_texts = tree_utils.split_text(text_with_tags, actual_markers)
node_text_list = zip(node_texts, tagged_texts)
elif markers_list:
node_text_list = [(node_text, text_with_tags)]
else:
node_text_list = [('', '')]
return zip(markers_list, node_text_list)
def next_marker(xml_node, remaining_markers):
"""Try to determine the marker following the current xml_node. Remaining
markers is a list of other marks *within* the xml_node. May return
None"""
# More markers in this xml node
if remaining_markers:
return remaining_markers[0][0]
# Check the next xml node; skip over stars
sib = xml_node.getnext()
while sib is not None and sib.tag in ('STARS', 'PRTPAGE'):
sib = sib.getnext()
if sib is not None:
next_text = tree_utils.get_node_text(sib)
next_markers = get_markers(next_text)
if next_markers:
return next_markers[0]
def build_from_section(reg_part, section_xml):
section_texts = []
nodes = []
section_no = section_xml.xpath('SECTNO')[0].text
section_no_without_marker = re.search('[0-9]+\.[0-9]+',
section_no).group(0)
subject_xml = section_xml.xpath('SUBJECT')
if not subject_xml:
subject_xml = section_xml.xpath('RESERVED')
subject_text = subject_xml[0].text
manual_hierarchy = []
if (reg_part in PARAGRAPH_HIERARCHY
and section_no_without_marker in PARAGRAPH_HIERARCHY[reg_part]):
manual_hierarchy = PARAGRAPH_HIERARCHY[reg_part][
section_no_without_marker]
# Collect paragraph markers and section text (intro text for the
# section)
i = 0
children = [ch for ch in section_xml.getchildren()
if ch.tag in ['P', 'STARS']]
for ch in children:
text = tree_utils.get_node_text(ch, add_spaces=True)
tagged_text = tree_utils.get_node_text_tags_preserved(ch)
markers_list = get_markers(tagged_text.strip())
# If the child has a 'DEPTH' attribute, we're in manual
# hierarchy mode, just constructed from the XML instead of
# specified in configuration.
# This presumes that every child in the section has DEPTH
# specified, if not, things will break in and around
# derive_depths below.
if ch.get("depth") is not None:
manual_hierarchy.append(int(ch.get("depth")))
if ch.tag == 'STARS':
nodes.append(Node(label=[mtypes.STARS_TAG]))
elif not markers_list and manual_hierarchy:
# is this a bunch of definitions that don't have numbers next to
# them?
if len(nodes) > 0:
if (subject_text.find('Definitions.') > -1
or nodes[-1].text.find(
'For the purposes of this section')):
# TODO: create a grammar for definitions
if text.find('means') > -1:
def_marker = text.split('means')[0].strip().split()
def_marker = ''.join([word[0].upper() + word[1:]
for word in def_marker])
elif text.find('shall have the same meaning') > -1:
def_marker = text.split('shall')[0].strip().split()
def_marker = ''.join([word[0].upper() + word[1:]
for word in def_marker])
else:
def_marker = 'def{0}'.format(i)
i += 1
n = Node(text, label=[def_marker], source_xml=ch)
n.tagged_text = tagged_text
nodes.append(n)
else:
section_texts.append((text, tagged_text))
else:
if len(children) > 1:
def_marker = 'def{0}'.format(i)
n = Node(text, [], [def_marker], source_xml=ch)
n.tagged_text = tagged_text
i += 1
nodes.append(n)
else:
# this is the only node around
section_texts.append((text, tagged_text))
elif not markers_list and not manual_hierarchy:
# No manual heirarchy specified, append to the section.
section_texts.append((text, tagged_text))
else:
for m, node_text in get_markers_and_text(ch, markers_list):
n = Node(node_text[0], [], [m], source_xml=ch)
n.tagged_text = unicode(node_text[1])
nodes.append(n)
if node_text[0].endswith('* * *'):
nodes.append(Node(label=[mtypes.INLINE_STARS]))
# Trailing stars don't matter; slightly more efficient to ignore them
while nodes and nodes[-1].label[0] in mtypes.stars:
nodes = nodes[:-1]
m_stack = tree_utils.NodeStack()
# Use constraint programming to figure out possible depth assignments
if not manual_hierarchy:
depths = derive_depths(
[node.label[0] for node in nodes],
[rules.depth_type_order([mtypes.lower, mtypes.ints, mtypes.roman,
mtypes.upper, mtypes.em_ints,
mtypes.em_roman])])
if not manual_hierarchy and depths:
# Find the assignment which violates the least of our heuristics
depths = heuristics.prefer_multiple_children(depths, 0.5)
depths = sorted(depths, key=lambda d: d.weight, reverse=True)
depths = depths[0]
for node, par in zip(nodes, depths):
if par.typ != mtypes.stars:
last = m_stack.peek()
node.label = [l.replace('<E T="03">', '').replace('</E>', '')
for l in node.label]
if len(last) == 0:
m_stack.push_last((1 + par.depth, node))
else:
m_stack.add(1 + par.depth, node)
elif nodes and manual_hierarchy:
logging.warning('Using manual depth hierarchy.')
depths = manual_hierarchy
if len(nodes) == len(depths):
for node, spec in zip(nodes, depths):
if isinstance(spec, int):
depth = spec
elif isinstance(spec, tuple):
depth, marker = spec
node.marker = marker
last = m_stack.peek()
node.label = [l.replace('<E T="03">', '').replace('</E>', '')
for l in node.label]
if len(last) == 0:
m_stack.push_last((1 + depth, node))
else:
m_stack.add(1 + depth, node)
else:
logging.error('Manual hierarchy length does not match node '
'list length! ({0} nodes but {1} provided, '
'{2})'.format(
len(nodes),
len(depths),
[x.label[0] for x in nodes]))
elif nodes and not manual_hierarchy:
logging.warning(
'Could not determine depth when parsing {0}:\n{1}'.format(
section_no_without_marker, [node.label[0] for node in nodes]))
for node in nodes:
last = m_stack.peek()
node.label = [l.replace('<E T="03">', '').replace('</E>', '')
for l in node.label]
if len(last) == 0:
m_stack.push_last((3, node))
else:
m_stack.add(3, node)
nodes = []
section_nums = []
for match in re.finditer(r'%s\.(\d+)' % reg_part, section_no):
section_nums.append(int(match.group(1)))
# Span of section numbers
if u'§§' == section_no[:2] and '-' in section_no:
first, last = section_nums
section_nums = []
for i in range(first, last + 1):
section_nums.append(i)
for section_number in section_nums:
section_number = str(section_number)
plain_sect_texts = [s[0] for s in section_texts]
tagged_sect_texts = [s[1] for s in section_texts]
section_title = u"§ " + reg_part + "." + section_number
if subject_text:
section_title += " " + subject_text
section_text = ' '.join([section_xml.text] + plain_sect_texts)
tagged_section_text = ' '.join([section_xml.text] + tagged_sect_texts)
sect_node = Node(section_text, label=[reg_part, section_number],
title=section_title)
sect_node.tagged_text = tagged_section_text
m_stack.add_to_bottom((1, sect_node))
while m_stack.size() > 1:
m_stack.unwind()
nodes.append(m_stack.pop()[0][1])
return nodes
| [
"regparser.tree.struct.Node",
"regparser.tree.xml_parser.tree_utils.get_paragraph_markers",
"regparser.tree.reg_text.build_subpart",
"regparser.tree.depth.rules.depth_type_order",
"regparser.tree.paragraph.p_level_of",
"regparser.tree.xml_parser.tree_utils.get_collapsed_markers",
"regparser.tree.depth.h... | [((1793, 1809), 'regparser.content.Macros', 'content.Macros', ([], {}), '()\n', (1807, 1809), False, 'from regparser import content\n'), ((2394, 2425), 'regparser.tree.struct.Node', 'Node', (['""""""', '[]', '[reg_part]', 'title'], {}), "('', [], [reg_part], title)\n", (2398, 2425), False, 'from regparser.tree.struct import Node\n'), ((3046, 3079), 'regparser.tree.xml_parser.appendices.build_non_reg_text', 'build_non_reg_text', (['doc', 'reg_part'], {}), '(doc, reg_part)\n', (3064, 3079), False, 'from regparser.tree.xml_parser.appendices import build_non_reg_text\n'), ((3367, 3414), 'regparser.tree.reg_text.build_subpart', 'reg_text.build_subpart', (['subpart_title', 'reg_part'], {}), '(subpart_title, reg_part)\n', (3389, 3414), False, 'from regparser.tree import reg_text\n'), ((3772, 3810), 'regparser.tree.xml_parser.tree_utils.get_paragraph_markers', 'tree_utils.get_paragraph_markers', (['text'], {}), '(text)\n', (3804, 3810), False, 'from regparser.tree.xml_parser import tree_utils\n'), ((3835, 3873), 'regparser.tree.xml_parser.tree_utils.get_collapsed_markers', 'tree_utils.get_collapsed_markers', (['text'], {}), '(text)\n', (3867, 3873), False, 'from regparser.tree.xml_parser import tree_utils\n'), ((4629, 4676), 'regparser.tree.xml_parser.tree_utils.get_node_text', 'tree_utils.get_node_text', (['node'], {'add_spaces': '(True)'}), '(node, add_spaces=True)\n', (4653, 4676), False, 'from regparser.tree.xml_parser import tree_utils\n'), ((4698, 4743), 'regparser.tree.xml_parser.tree_utils.get_node_text_tags_preserved', 'tree_utils.get_node_text_tags_preserved', (['node'], {}), '(node)\n', (4737, 4743), False, 'from regparser.tree.xml_parser import tree_utils\n'), ((10019, 10041), 'regparser.tree.xml_parser.tree_utils.NodeStack', 'tree_utils.NodeStack', ([], {}), '()\n', (10039, 10041), False, 'from regparser.tree.xml_parser import tree_utils\n'), ((12807, 12856), 're.finditer', 're.finditer', (["('%s\\\\.(\\\\d+)' % reg_part)", 'section_no'], {}), "('%s\\\\.(\\\\d+)' % reg_part, section_no)\n", (12818, 12856), False, 'import re\n'), ((1833, 1885), 'lxml.etree.fromstring', 'etree.fromstring', (["('<ROOT>' + replacement + '</ROOT>')"], {}), "('<ROOT>' + replacement + '</ROOT>')\n", (1849, 1885), False, 'from lxml import etree\n'), ((2239, 2264), 'lxml.etree.fromstring', 'etree.fromstring', (['reg_xml'], {}), '(reg_xml)\n', (2255, 2264), False, 'from lxml import etree\n'), ((2910, 2945), 'regparser.tree.reg_text.build_empty_part', 'reg_text.build_empty_part', (['reg_part'], {}), '(reg_part)\n', (2935, 2945), False, 'from regparser.tree import reg_text\n'), ((4060, 4083), 'regparser.tree.paragraph.p_level_of', 'p_level_of', (['markers[-1]'], {}), '(markers[-1])\n', (4070, 4083), False, 'from regparser.tree.paragraph import p_level_of\n'), ((4979, 5026), 'regparser.tree.xml_parser.tree_utils.split_text', 'tree_utils.split_text', (['node_text', 'plain_markers'], {}), '(node_text, plain_markers)\n', (5000, 5026), False, 'from regparser.tree.xml_parser import tree_utils\n'), ((5050, 5103), 'regparser.tree.xml_parser.tree_utils.split_text', 'tree_utils.split_text', (['text_with_tags', 'actual_markers'], {}), '(text_with_tags, actual_markers)\n', (5071, 5103), False, 'from regparser.tree.xml_parser import tree_utils\n'), ((5854, 5883), 'regparser.tree.xml_parser.tree_utils.get_node_text', 'tree_utils.get_node_text', (['sib'], {}), '(sib)\n', (5878, 5883), False, 'from regparser.tree.xml_parser import tree_utils\n'), ((6891, 6936), 'regparser.tree.xml_parser.tree_utils.get_node_text', 'tree_utils.get_node_text', (['ch'], {'add_spaces': '(True)'}), '(ch, add_spaces=True)\n', (6915, 6936), False, 'from regparser.tree.xml_parser import tree_utils\n'), ((6959, 7002), 'regparser.tree.xml_parser.tree_utils.get_node_text_tags_preserved', 'tree_utils.get_node_text_tags_preserved', (['ch'], {}), '(ch)\n', (6998, 7002), False, 'from regparser.tree.xml_parser import tree_utils\n'), ((10558, 10606), 'regparser.tree.depth.heuristics.prefer_multiple_children', 'heuristics.prefer_multiple_children', (['depths', '(0.5)'], {}), '(depths, 0.5)\n', (10593, 10606), False, 'from regparser.tree.depth import heuristics, rules, markers as mtypes\n'), ((13640, 13713), 'regparser.tree.struct.Node', 'Node', (['section_text'], {'label': '[reg_part, section_number]', 'title': 'section_title'}), '(section_text, label=[reg_part, section_number], title=section_title)\n', (13644, 13713), False, 'from regparser.tree.struct import Node\n'), ((4210, 4238), 'regparser.tree.paragraph.p_level_of', 'p_level_of', (['collapsed_marker'], {}), '(collapsed_marker)\n', (4220, 4238), False, 'from regparser.tree.paragraph import p_level_of\n'), ((6163, 6203), 're.search', 're.search', (['"""[0-9]+\\\\.[0-9]+"""', 'section_no'], {}), "('[0-9]+\\\\.[0-9]+', section_no)\n", (6172, 6203), False, 'import re\n'), ((11174, 11222), 'logging.warning', 'logging.warning', (['"""Using manual depth hierarchy."""'], {}), "('Using manual depth hierarchy.')\n", (11189, 11222), False, 'import logging\n'), ((7541, 7571), 'regparser.tree.struct.Node', 'Node', ([], {'label': '[mtypes.STARS_TAG]'}), '(label=[mtypes.STARS_TAG])\n', (7545, 7571), False, 'from regparser.tree.struct import Node\n'), ((10238, 10355), 'regparser.tree.depth.rules.depth_type_order', 'rules.depth_type_order', (['[mtypes.lower, mtypes.ints, mtypes.roman, mtypes.upper, mtypes.em_ints,\n mtypes.em_roman]'], {}), '([mtypes.lower, mtypes.ints, mtypes.roman, mtypes.\n upper, mtypes.em_ints, mtypes.em_roman])\n', (10260, 10355), False, 'from regparser.tree.depth import heuristics, rules, markers as mtypes\n'), ((8684, 8729), 'regparser.tree.struct.Node', 'Node', (['text'], {'label': '[def_marker]', 'source_xml': 'ch'}), '(text, label=[def_marker], source_xml=ch)\n', (8688, 8729), False, 'from regparser.tree.struct import Node\n'), ((9030, 9073), 'regparser.tree.struct.Node', 'Node', (['text', '[]', '[def_marker]'], {'source_xml': 'ch'}), '(text, [], [def_marker], source_xml=ch)\n', (9034, 9073), False, 'from regparser.tree.struct import Node\n'), ((9605, 9647), 'regparser.tree.struct.Node', 'Node', (['node_text[0]', '[]', '[m]'], {'source_xml': 'ch'}), '(node_text[0], [], [m], source_xml=ch)\n', (9609, 9647), False, 'from regparser.tree.struct import Node\n'), ((9811, 9844), 'regparser.tree.struct.Node', 'Node', ([], {'label': '[mtypes.INLINE_STARS]'}), '(label=[mtypes.INLINE_STARS])\n', (9815, 9844), False, 'from regparser.tree.struct import Node\n')] |
from distutils.core import setup
from setuptools import find_packages
setup(
name="stytra",
version="0.1",
author="<NAME>, <NAME> @portugueslab",
author_email="<EMAIL>",
license="MIT",
packages=find_packages(),
install_requires=[
"pyqtgraph>=0.10.0",
"numpy",
"numba",
"matplotlib",
"pandas",
"qdarkstyle",
"qimage2ndarray",
"deepdish",
"param",
"pims",
"GitPython",
"pymongo",
"colorspacious",
"arrayqueues",
],
classifiers=[
"Development Status :: 3 - Alpha",
"Intended Audience :: Science/Research",
# Pick your license as you wish (should match "license" above)
"License :: OSI Approved :: MIT License",
"Programming Language :: Python :: 3.5",
"Programming Language :: Python :: 3.6",
],
keywords="tracking processing",
description="A modular package to control stimulation and track behaviour in zebrafish experiments.",
project_urls={
"Source": "https://github.com/portugueslab/stytra",
"Tracker": "https://github.com/portugueslab/stytra/issues",
},
)
| [
"setuptools.find_packages"
] | [((220, 235), 'setuptools.find_packages', 'find_packages', ([], {}), '()\n', (233, 235), False, 'from setuptools import find_packages\n')] |
# -*- coding: utf-8 -*-
# Form implementation generated from reading ui file 'MainUi.ui'
#
# Created by: PyQt5 UI code generator 5.13.0
#
# WARNING! All changes made in this file will be lost!
from PyQt5 import QtCore, QtGui, QtWidgets
class Ui_MainWindow(object):
def setupUi(self, MainWindow):
MainWindow.setObjectName("MainWindow")
MainWindow.setEnabled(True)
MainWindow.resize(1070, 837)
sizePolicy = QtWidgets.QSizePolicy(QtWidgets.QSizePolicy.Minimum, QtWidgets.QSizePolicy.Minimum)
sizePolicy.setHorizontalStretch(0)
sizePolicy.setVerticalStretch(0)
sizePolicy.setHeightForWidth(MainWindow.sizePolicy().hasHeightForWidth())
MainWindow.setSizePolicy(sizePolicy)
MainWindow.setMinimumSize(QtCore.QSize(0, 0))
MainWindow.setMaximumSize(QtCore.QSize(16777215, 16777215))
self.centralwidget = QtWidgets.QWidget(MainWindow)
self.centralwidget.setObjectName("centralwidget")
self.verticalLayout_2 = QtWidgets.QVBoxLayout(self.centralwidget)
self.verticalLayout_2.setContentsMargins(5, 10, 5, 5)
self.verticalLayout_2.setSpacing(5)
self.verticalLayout_2.setObjectName("verticalLayout_2")
self.groupBox = QtWidgets.QGroupBox(self.centralwidget)
self.groupBox.setTitle("")
self.groupBox.setObjectName("groupBox")
self.verticalLayout_3 = QtWidgets.QVBoxLayout(self.groupBox)
self.verticalLayout_3.setContentsMargins(5, 5, 5, 5)
self.verticalLayout_3.setSpacing(5)
self.verticalLayout_3.setObjectName("verticalLayout_3")
self.gridLayout_3 = QtWidgets.QGridLayout()
self.gridLayout_3.setSizeConstraint(QtWidgets.QLayout.SetDefaultConstraint)
self.gridLayout_3.setObjectName("gridLayout_3")
self.label_costume_state_4 = QtWidgets.QLabel(self.groupBox)
sizePolicy = QtWidgets.QSizePolicy(QtWidgets.QSizePolicy.Minimum, QtWidgets.QSizePolicy.Preferred)
sizePolicy.setHorizontalStretch(0)
sizePolicy.setVerticalStretch(0)
sizePolicy.setHeightForWidth(self.label_costume_state_4.sizePolicy().hasHeightForWidth())
self.label_costume_state_4.setSizePolicy(sizePolicy)
self.label_costume_state_4.setMaximumSize(QtCore.QSize(16777215, 28))
self.label_costume_state_4.setObjectName("label_costume_state_4")
self.gridLayout_3.addWidget(self.label_costume_state_4, 4, 4, 1, 1)
self.label_servant_state_2 = QtWidgets.QLabel(self.groupBox)
sizePolicy = QtWidgets.QSizePolicy(QtWidgets.QSizePolicy.Minimum, QtWidgets.QSizePolicy.Preferred)
sizePolicy.setHorizontalStretch(0)
sizePolicy.setVerticalStretch(0)
sizePolicy.setHeightForWidth(self.label_servant_state_2.sizePolicy().hasHeightForWidth())
self.label_servant_state_2.setSizePolicy(sizePolicy)
self.label_servant_state_2.setMinimumSize(QtCore.QSize(110, 65))
self.label_servant_state_2.setMaximumSize(QtCore.QSize(16777215, 65))
self.label_servant_state_2.setObjectName("label_servant_state_2")
self.gridLayout_3.addWidget(self.label_servant_state_2, 2, 1, 1, 1)
self.line_7 = QtWidgets.QFrame(self.groupBox)
self.line_7.setFrameShape(QtWidgets.QFrame.VLine)
self.line_7.setFrameShadow(QtWidgets.QFrame.Sunken)
self.line_7.setObjectName("line_7")
self.gridLayout_3.addWidget(self.line_7, 0, 7, 1, 1)
self.label_costume_state_1 = QtWidgets.QLabel(self.groupBox)
sizePolicy = QtWidgets.QSizePolicy(QtWidgets.QSizePolicy.Minimum, QtWidgets.QSizePolicy.Preferred)
sizePolicy.setHorizontalStretch(0)
sizePolicy.setVerticalStretch(0)
sizePolicy.setHeightForWidth(self.label_costume_state_1.sizePolicy().hasHeightForWidth())
self.label_costume_state_1.setSizePolicy(sizePolicy)
self.label_costume_state_1.setMaximumSize(QtCore.QSize(16777212, 28))
self.label_costume_state_1.setObjectName("label_costume_state_1")
self.gridLayout_3.addWidget(self.label_costume_state_1, 4, 0, 1, 1)
self.box_skill_confirm = QtWidgets.QCheckBox(self.groupBox)
self.box_skill_confirm.setObjectName("box_skill_confirm")
self.gridLayout_3.addWidget(self.box_skill_confirm, 4, 8, 1, 1)
self.horizontalLayout_29 = QtWidgets.QHBoxLayout()
self.horizontalLayout_29.setObjectName("horizontalLayout_29")
self.btn_select_servant_5 = QtWidgets.QPushButton(self.groupBox)
sizePolicy = QtWidgets.QSizePolicy(QtWidgets.QSizePolicy.Minimum, QtWidgets.QSizePolicy.Fixed)
sizePolicy.setHorizontalStretch(0)
sizePolicy.setVerticalStretch(0)
sizePolicy.setHeightForWidth(self.btn_select_servant_5.sizePolicy().hasHeightForWidth())
self.btn_select_servant_5.setSizePolicy(sizePolicy)
self.btn_select_servant_5.setMinimumSize(QtCore.QSize(92, 100))
self.btn_select_servant_5.setMaximumSize(QtCore.QSize(92, 100))
self.btn_select_servant_5.setText("")
self.btn_select_servant_5.setIconSize(QtCore.QSize(92, 100))
self.btn_select_servant_5.setObjectName("btn_select_servant_5")
self.horizontalLayout_29.addWidget(self.btn_select_servant_5)
self.gridLayout_3.addLayout(self.horizontalLayout_29, 0, 5, 1, 1)
self.horizontalLayout_3 = QtWidgets.QHBoxLayout()
self.horizontalLayout_3.setObjectName("horizontalLayout_3")
self.label = QtWidgets.QLabel(self.groupBox)
sizePolicy = QtWidgets.QSizePolicy(QtWidgets.QSizePolicy.Minimum, QtWidgets.QSizePolicy.Preferred)
sizePolicy.setHorizontalStretch(0)
sizePolicy.setVerticalStretch(0)
sizePolicy.setHeightForWidth(self.label.sizePolicy().hasHeightForWidth())
self.label.setSizePolicy(sizePolicy)
self.label.setObjectName("label")
self.horizontalLayout_3.addWidget(self.label)
self.spinbox_required_prob = QtWidgets.QSpinBox(self.groupBox)
self.spinbox_required_prob.setMaximum(100)
self.spinbox_required_prob.setProperty("value", 100)
self.spinbox_required_prob.setObjectName("spinbox_required_prob")
self.horizontalLayout_3.addWidget(self.spinbox_required_prob)
self.gridLayout_3.addLayout(self.horizontalLayout_3, 3, 8, 1, 1)
self.horizontalLayout_24 = QtWidgets.QHBoxLayout()
self.horizontalLayout_24.setObjectName("horizontalLayout_24")
self.btn_select_servant_1 = QtWidgets.QPushButton(self.groupBox)
sizePolicy = QtWidgets.QSizePolicy(QtWidgets.QSizePolicy.Minimum, QtWidgets.QSizePolicy.Fixed)
sizePolicy.setHorizontalStretch(0)
sizePolicy.setVerticalStretch(0)
sizePolicy.setHeightForWidth(self.btn_select_servant_1.sizePolicy().hasHeightForWidth())
self.btn_select_servant_1.setSizePolicy(sizePolicy)
self.btn_select_servant_1.setMinimumSize(QtCore.QSize(92, 100))
self.btn_select_servant_1.setMaximumSize(QtCore.QSize(92, 100))
self.btn_select_servant_1.setText("")
self.btn_select_servant_1.setIconSize(QtCore.QSize(92, 100))
self.btn_select_servant_1.setObjectName("btn_select_servant_1")
self.horizontalLayout_24.addWidget(self.btn_select_servant_1)
self.gridLayout_3.addLayout(self.horizontalLayout_24, 0, 0, 1, 1)
self.label_costume_state_5 = QtWidgets.QLabel(self.groupBox)
sizePolicy = QtWidgets.QSizePolicy(QtWidgets.QSizePolicy.Minimum, QtWidgets.QSizePolicy.Preferred)
sizePolicy.setHorizontalStretch(0)
sizePolicy.setVerticalStretch(0)
sizePolicy.setHeightForWidth(self.label_costume_state_5.sizePolicy().hasHeightForWidth())
self.label_costume_state_5.setSizePolicy(sizePolicy)
self.label_costume_state_5.setMaximumSize(QtCore.QSize(16777215, 28))
self.label_costume_state_5.setObjectName("label_costume_state_5")
self.gridLayout_3.addWidget(self.label_costume_state_5, 4, 5, 1, 1)
self.label_costume_state_6 = QtWidgets.QLabel(self.groupBox)
sizePolicy = QtWidgets.QSizePolicy(QtWidgets.QSizePolicy.Minimum, QtWidgets.QSizePolicy.Preferred)
sizePolicy.setHorizontalStretch(0)
sizePolicy.setVerticalStretch(0)
sizePolicy.setHeightForWidth(self.label_costume_state_6.sizePolicy().hasHeightForWidth())
self.label_costume_state_6.setSizePolicy(sizePolicy)
self.label_costume_state_6.setMaximumSize(QtCore.QSize(16777215, 28))
self.label_costume_state_6.setObjectName("label_costume_state_6")
self.gridLayout_3.addWidget(self.label_costume_state_6, 4, 6, 1, 1)
self.horizontalLayout_26 = QtWidgets.QHBoxLayout()
self.horizontalLayout_26.setObjectName("horizontalLayout_26")
self.btn_select_servant_2 = QtWidgets.QPushButton(self.groupBox)
sizePolicy = QtWidgets.QSizePolicy(QtWidgets.QSizePolicy.Minimum, QtWidgets.QSizePolicy.Fixed)
sizePolicy.setHorizontalStretch(0)
sizePolicy.setVerticalStretch(0)
sizePolicy.setHeightForWidth(self.btn_select_servant_2.sizePolicy().hasHeightForWidth())
self.btn_select_servant_2.setSizePolicy(sizePolicy)
self.btn_select_servant_2.setMinimumSize(QtCore.QSize(92, 100))
self.btn_select_servant_2.setMaximumSize(QtCore.QSize(92, 100))
self.btn_select_servant_2.setText("")
self.btn_select_servant_2.setIconSize(QtCore.QSize(92, 100))
self.btn_select_servant_2.setObjectName("btn_select_servant_2")
self.horizontalLayout_26.addWidget(self.btn_select_servant_2)
self.gridLayout_3.addLayout(self.horizontalLayout_26, 0, 1, 1, 1)
self.horizontalLayout_23 = QtWidgets.QHBoxLayout()
self.horizontalLayout_23.setObjectName("horizontalLayout_23")
self.btn_select_master = QtWidgets.QPushButton(self.groupBox)
self.btn_select_master.setMinimumSize(QtCore.QSize(92, 100))
self.btn_select_master.setMaximumSize(QtCore.QSize(92, 100))
self.btn_select_master.setText("")
self.btn_select_master.setIconSize(QtCore.QSize(100, 100))
self.btn_select_master.setObjectName("btn_select_master")
self.horizontalLayout_23.addWidget(self.btn_select_master)
self.gridLayout_3.addLayout(self.horizontalLayout_23, 0, 8, 1, 1)
self.label_servant_state_1 = QtWidgets.QLabel(self.groupBox)
sizePolicy = QtWidgets.QSizePolicy(QtWidgets.QSizePolicy.Minimum, QtWidgets.QSizePolicy.Preferred)
sizePolicy.setHorizontalStretch(0)
sizePolicy.setVerticalStretch(0)
sizePolicy.setHeightForWidth(self.label_servant_state_1.sizePolicy().hasHeightForWidth())
self.label_servant_state_1.setSizePolicy(sizePolicy)
self.label_servant_state_1.setMinimumSize(QtCore.QSize(110, 65))
self.label_servant_state_1.setMaximumSize(QtCore.QSize(16777215, 65))
self.label_servant_state_1.setObjectName("label_servant_state_1")
self.gridLayout_3.addWidget(self.label_servant_state_1, 2, 0, 1, 1)
self.label_master_state = QtWidgets.QLabel(self.groupBox)
sizePolicy = QtWidgets.QSizePolicy(QtWidgets.QSizePolicy.Minimum, QtWidgets.QSizePolicy.Preferred)
sizePolicy.setHorizontalStretch(0)
sizePolicy.setVerticalStretch(0)
sizePolicy.setHeightForWidth(self.label_master_state.sizePolicy().hasHeightForWidth())
self.label_master_state.setSizePolicy(sizePolicy)
self.label_master_state.setMinimumSize(QtCore.QSize(0, 0))
self.label_master_state.setMaximumSize(QtCore.QSize(16777215, 28))
self.label_master_state.setObjectName("label_master_state")
self.gridLayout_3.addWidget(self.label_master_state, 2, 8, 1, 1)
self.horizontalLayout_38 = QtWidgets.QHBoxLayout()
self.horizontalLayout_38.setObjectName("horizontalLayout_38")
self.btn_select_costume_3 = QtWidgets.QPushButton(self.groupBox)
self.btn_select_costume_3.setMinimumSize(QtCore.QSize(100, 45))
self.btn_select_costume_3.setMaximumSize(QtCore.QSize(100, 45))
self.btn_select_costume_3.setText("")
self.btn_select_costume_3.setIconSize(QtCore.QSize(100, 150))
self.btn_select_costume_3.setObjectName("btn_select_costume_3")
self.horizontalLayout_38.addWidget(self.btn_select_costume_3)
self.gridLayout_3.addLayout(self.horizontalLayout_38, 3, 2, 1, 1)
self.label_costume_state_2 = QtWidgets.QLabel(self.groupBox)
sizePolicy = QtWidgets.QSizePolicy(QtWidgets.QSizePolicy.Minimum, QtWidgets.QSizePolicy.Preferred)
sizePolicy.setHorizontalStretch(0)
sizePolicy.setVerticalStretch(0)
sizePolicy.setHeightForWidth(self.label_costume_state_2.sizePolicy().hasHeightForWidth())
self.label_costume_state_2.setSizePolicy(sizePolicy)
self.label_costume_state_2.setMaximumSize(QtCore.QSize(16777215, 28))
self.label_costume_state_2.setObjectName("label_costume_state_2")
self.gridLayout_3.addWidget(self.label_costume_state_2, 4, 1, 1, 1)
self.horizontalLayout_28 = QtWidgets.QHBoxLayout()
self.horizontalLayout_28.setObjectName("horizontalLayout_28")
self.btn_select_servant_4 = QtWidgets.QPushButton(self.groupBox)
sizePolicy = QtWidgets.QSizePolicy(QtWidgets.QSizePolicy.Minimum, QtWidgets.QSizePolicy.Fixed)
sizePolicy.setHorizontalStretch(0)
sizePolicy.setVerticalStretch(0)
sizePolicy.setHeightForWidth(self.btn_select_servant_4.sizePolicy().hasHeightForWidth())
self.btn_select_servant_4.setSizePolicy(sizePolicy)
self.btn_select_servant_4.setMinimumSize(QtCore.QSize(92, 100))
self.btn_select_servant_4.setMaximumSize(QtCore.QSize(92, 100))
self.btn_select_servant_4.setText("")
self.btn_select_servant_4.setIconSize(QtCore.QSize(92, 100))
self.btn_select_servant_4.setObjectName("btn_select_servant_4")
self.horizontalLayout_28.addWidget(self.btn_select_servant_4)
self.gridLayout_3.addLayout(self.horizontalLayout_28, 0, 4, 1, 1)
self.horizontalLayout_36 = QtWidgets.QHBoxLayout()
self.horizontalLayout_36.setObjectName("horizontalLayout_36")
self.btn_select_costume_2 = QtWidgets.QPushButton(self.groupBox)
self.btn_select_costume_2.setMinimumSize(QtCore.QSize(100, 45))
self.btn_select_costume_2.setMaximumSize(QtCore.QSize(100, 45))
self.btn_select_costume_2.setText("")
self.btn_select_costume_2.setIconSize(QtCore.QSize(100, 150))
self.btn_select_costume_2.setObjectName("btn_select_costume_2")
self.horizontalLayout_36.addWidget(self.btn_select_costume_2)
self.gridLayout_3.addLayout(self.horizontalLayout_36, 3, 1, 1, 1)
self.horizontalLayout_46 = QtWidgets.QHBoxLayout()
self.horizontalLayout_46.setObjectName("horizontalLayout_46")
self.btn_select_costume_1 = QtWidgets.QPushButton(self.groupBox)
sizePolicy = QtWidgets.QSizePolicy(QtWidgets.QSizePolicy.Minimum, QtWidgets.QSizePolicy.Fixed)
sizePolicy.setHorizontalStretch(0)
sizePolicy.setVerticalStretch(0)
sizePolicy.setHeightForWidth(self.btn_select_costume_1.sizePolicy().hasHeightForWidth())
self.btn_select_costume_1.setSizePolicy(sizePolicy)
self.btn_select_costume_1.setMinimumSize(QtCore.QSize(100, 45))
self.btn_select_costume_1.setMaximumSize(QtCore.QSize(100, 45))
self.btn_select_costume_1.setText("")
self.btn_select_costume_1.setIconSize(QtCore.QSize(100, 150))
self.btn_select_costume_1.setObjectName("btn_select_costume_1")
self.horizontalLayout_46.addWidget(self.btn_select_costume_1)
self.gridLayout_3.addLayout(self.horizontalLayout_46, 3, 0, 1, 1)
self.label_servant_state_3 = QtWidgets.QLabel(self.groupBox)
sizePolicy = QtWidgets.QSizePolicy(QtWidgets.QSizePolicy.Minimum, QtWidgets.QSizePolicy.Preferred)
sizePolicy.setHorizontalStretch(0)
sizePolicy.setVerticalStretch(0)
sizePolicy.setHeightForWidth(self.label_servant_state_3.sizePolicy().hasHeightForWidth())
self.label_servant_state_3.setSizePolicy(sizePolicy)
self.label_servant_state_3.setMinimumSize(QtCore.QSize(110, 65))
self.label_servant_state_3.setMaximumSize(QtCore.QSize(16777215, 65))
self.label_servant_state_3.setObjectName("label_servant_state_3")
self.gridLayout_3.addWidget(self.label_servant_state_3, 2, 2, 1, 1)
self.label_servant_state_5 = QtWidgets.QLabel(self.groupBox)
sizePolicy = QtWidgets.QSizePolicy(QtWidgets.QSizePolicy.Minimum, QtWidgets.QSizePolicy.Preferred)
sizePolicy.setHorizontalStretch(0)
sizePolicy.setVerticalStretch(0)
sizePolicy.setHeightForWidth(self.label_servant_state_5.sizePolicy().hasHeightForWidth())
self.label_servant_state_5.setSizePolicy(sizePolicy)
self.label_servant_state_5.setMinimumSize(QtCore.QSize(110, 65))
self.label_servant_state_5.setMaximumSize(QtCore.QSize(16777215, 65))
self.label_servant_state_5.setObjectName("label_servant_state_5")
self.gridLayout_3.addWidget(self.label_servant_state_5, 2, 5, 1, 1)
self.horizontalLayout_44 = QtWidgets.QHBoxLayout()
self.horizontalLayout_44.setObjectName("horizontalLayout_44")
self.btn_select_costume_6 = QtWidgets.QPushButton(self.groupBox)
self.btn_select_costume_6.setMinimumSize(QtCore.QSize(100, 45))
self.btn_select_costume_6.setMaximumSize(QtCore.QSize(100, 45))
self.btn_select_costume_6.setText("")
self.btn_select_costume_6.setIconSize(QtCore.QSize(100, 150))
self.btn_select_costume_6.setObjectName("btn_select_costume_6")
self.horizontalLayout_44.addWidget(self.btn_select_costume_6)
self.gridLayout_3.addLayout(self.horizontalLayout_44, 3, 6, 1, 1)
self.horizontalLayout_27 = QtWidgets.QHBoxLayout()
self.horizontalLayout_27.setObjectName("horizontalLayout_27")
self.btn_select_servant_3 = QtWidgets.QPushButton(self.groupBox)
sizePolicy = QtWidgets.QSizePolicy(QtWidgets.QSizePolicy.Minimum, QtWidgets.QSizePolicy.Fixed)
sizePolicy.setHorizontalStretch(0)
sizePolicy.setVerticalStretch(0)
sizePolicy.setHeightForWidth(self.btn_select_servant_3.sizePolicy().hasHeightForWidth())
self.btn_select_servant_3.setSizePolicy(sizePolicy)
self.btn_select_servant_3.setMinimumSize(QtCore.QSize(92, 100))
self.btn_select_servant_3.setMaximumSize(QtCore.QSize(92, 100))
self.btn_select_servant_3.setText("")
self.btn_select_servant_3.setIconSize(QtCore.QSize(92, 100))
self.btn_select_servant_3.setObjectName("btn_select_servant_3")
self.horizontalLayout_27.addWidget(self.btn_select_servant_3)
self.gridLayout_3.addLayout(self.horizontalLayout_27, 0, 2, 1, 1)
self.label_costume_state_3 = QtWidgets.QLabel(self.groupBox)
sizePolicy = QtWidgets.QSizePolicy(QtWidgets.QSizePolicy.Minimum, QtWidgets.QSizePolicy.Preferred)
sizePolicy.setHorizontalStretch(0)
sizePolicy.setVerticalStretch(0)
sizePolicy.setHeightForWidth(self.label_costume_state_3.sizePolicy().hasHeightForWidth())
self.label_costume_state_3.setSizePolicy(sizePolicy)
self.label_costume_state_3.setMaximumSize(QtCore.QSize(16777215, 28))
self.label_costume_state_3.setObjectName("label_costume_state_3")
self.gridLayout_3.addWidget(self.label_costume_state_3, 4, 2, 1, 1)
self.label_servant_state_4 = QtWidgets.QLabel(self.groupBox)
sizePolicy = QtWidgets.QSizePolicy(QtWidgets.QSizePolicy.Minimum, QtWidgets.QSizePolicy.Preferred)
sizePolicy.setHorizontalStretch(0)
sizePolicy.setVerticalStretch(0)
sizePolicy.setHeightForWidth(self.label_servant_state_4.sizePolicy().hasHeightForWidth())
self.label_servant_state_4.setSizePolicy(sizePolicy)
self.label_servant_state_4.setMinimumSize(QtCore.QSize(110, 65))
self.label_servant_state_4.setMaximumSize(QtCore.QSize(16777215, 65))
self.label_servant_state_4.setObjectName("label_servant_state_4")
self.gridLayout_3.addWidget(self.label_servant_state_4, 2, 4, 1, 1)
self.line_8 = QtWidgets.QFrame(self.groupBox)
self.line_8.setFrameShape(QtWidgets.QFrame.VLine)
self.line_8.setFrameShadow(QtWidgets.QFrame.Sunken)
self.line_8.setObjectName("line_8")
self.gridLayout_3.addWidget(self.line_8, 3, 7, 1, 1)
self.horizontalLayout_40 = QtWidgets.QHBoxLayout()
self.horizontalLayout_40.setObjectName("horizontalLayout_40")
self.btn_select_costume_4 = QtWidgets.QPushButton(self.groupBox)
self.btn_select_costume_4.setMinimumSize(QtCore.QSize(100, 45))
self.btn_select_costume_4.setMaximumSize(QtCore.QSize(100, 45))
self.btn_select_costume_4.setText("")
self.btn_select_costume_4.setIconSize(QtCore.QSize(100, 150))
self.btn_select_costume_4.setObjectName("btn_select_costume_4")
self.horizontalLayout_40.addWidget(self.btn_select_costume_4)
self.gridLayout_3.addLayout(self.horizontalLayout_40, 3, 4, 1, 1)
self.label_servant_state_6 = QtWidgets.QLabel(self.groupBox)
sizePolicy = QtWidgets.QSizePolicy(QtWidgets.QSizePolicy.Minimum, QtWidgets.QSizePolicy.Preferred)
sizePolicy.setHorizontalStretch(0)
sizePolicy.setVerticalStretch(0)
sizePolicy.setHeightForWidth(self.label_servant_state_6.sizePolicy().hasHeightForWidth())
self.label_servant_state_6.setSizePolicy(sizePolicy)
self.label_servant_state_6.setMinimumSize(QtCore.QSize(110, 65))
self.label_servant_state_6.setMaximumSize(QtCore.QSize(16777215, 65))
self.label_servant_state_6.setObjectName("label_servant_state_6")
self.gridLayout_3.addWidget(self.label_servant_state_6, 2, 6, 1, 1)
self.line_3 = QtWidgets.QFrame(self.groupBox)
self.line_3.setFrameShape(QtWidgets.QFrame.VLine)
self.line_3.setFrameShadow(QtWidgets.QFrame.Sunken)
self.line_3.setObjectName("line_3")
self.gridLayout_3.addWidget(self.line_3, 0, 3, 1, 1)
self.horizontalLayout_42 = QtWidgets.QHBoxLayout()
self.horizontalLayout_42.setObjectName("horizontalLayout_42")
self.btn_select_costume_5 = QtWidgets.QPushButton(self.groupBox)
self.btn_select_costume_5.setMinimumSize(QtCore.QSize(100, 45))
self.btn_select_costume_5.setMaximumSize(QtCore.QSize(100, 45))
self.btn_select_costume_5.setText("")
self.btn_select_costume_5.setIconSize(QtCore.QSize(100, 150))
self.btn_select_costume_5.setObjectName("btn_select_costume_5")
self.horizontalLayout_42.addWidget(self.btn_select_costume_5)
self.gridLayout_3.addLayout(self.horizontalLayout_42, 3, 5, 1, 1)
self.horizontalLayout_30 = QtWidgets.QHBoxLayout()
self.horizontalLayout_30.setObjectName("horizontalLayout_30")
self.btn_select_servant_6 = QtWidgets.QPushButton(self.groupBox)
sizePolicy = QtWidgets.QSizePolicy(QtWidgets.QSizePolicy.Minimum, QtWidgets.QSizePolicy.Fixed)
sizePolicy.setHorizontalStretch(0)
sizePolicy.setVerticalStretch(0)
sizePolicy.setHeightForWidth(self.btn_select_servant_6.sizePolicy().hasHeightForWidth())
self.btn_select_servant_6.setSizePolicy(sizePolicy)
self.btn_select_servant_6.setMinimumSize(QtCore.QSize(92, 100))
self.btn_select_servant_6.setMaximumSize(QtCore.QSize(92, 100))
self.btn_select_servant_6.setText("")
self.btn_select_servant_6.setIconSize(QtCore.QSize(92, 100))
self.btn_select_servant_6.setObjectName("btn_select_servant_6")
self.horizontalLayout_30.addWidget(self.btn_select_servant_6)
self.gridLayout_3.addLayout(self.horizontalLayout_30, 0, 6, 1, 1)
self.line_4 = QtWidgets.QFrame(self.groupBox)
self.line_4.setFrameShape(QtWidgets.QFrame.VLine)
self.line_4.setFrameShadow(QtWidgets.QFrame.Sunken)
self.line_4.setObjectName("line_4")
self.gridLayout_3.addWidget(self.line_4, 3, 3, 1, 1)
self.verticalLayout_3.addLayout(self.gridLayout_3)
self.horizontalLayout_4 = QtWidgets.QHBoxLayout()
self.horizontalLayout_4.setObjectName("horizontalLayout_4")
self.btn_set_progress = QtWidgets.QPushButton(self.groupBox)
self.btn_set_progress.setObjectName("btn_set_progress")
self.horizontalLayout_4.addWidget(self.btn_set_progress)
self.btn_choose_level = QtWidgets.QPushButton(self.groupBox)
self.btn_choose_level.setObjectName("btn_choose_level")
self.horizontalLayout_4.addWidget(self.btn_choose_level)
self.btn_confirm_team = QtWidgets.QPushButton(self.groupBox)
self.btn_confirm_team.setObjectName("btn_confirm_team")
self.horizontalLayout_4.addWidget(self.btn_confirm_team)
self.btn_change_team = QtWidgets.QPushButton(self.groupBox)
self.btn_change_team.setEnabled(False)
self.btn_change_team.setObjectName("btn_change_team")
self.horizontalLayout_4.addWidget(self.btn_change_team)
self.btn_round_reset = QtWidgets.QPushButton(self.groupBox)
self.btn_round_reset.setEnabled(False)
self.btn_round_reset.setObjectName("btn_round_reset")
self.horizontalLayout_4.addWidget(self.btn_round_reset)
self.verticalLayout_3.addLayout(self.horizontalLayout_4)
self.verticalLayout_2.addWidget(self.groupBox)
self.horizontalLayout_15 = QtWidgets.QHBoxLayout()
self.horizontalLayout_15.setObjectName("horizontalLayout_15")
self.groupBox_2 = QtWidgets.QGroupBox(self.centralwidget)
self.groupBox_2.setTitle("")
self.groupBox_2.setObjectName("groupBox_2")
self.verticalLayout_6 = QtWidgets.QVBoxLayout(self.groupBox_2)
self.verticalLayout_6.setContentsMargins(5, 5, 5, 5)
self.verticalLayout_6.setSpacing(5)
self.verticalLayout_6.setObjectName("verticalLayout_6")
self.verticalLayout_4 = QtWidgets.QVBoxLayout()
self.verticalLayout_4.setObjectName("verticalLayout_4")
self.gridLayout_7 = QtWidgets.QGridLayout()
self.gridLayout_7.setObjectName("gridLayout_7")
self.round1_enemy3_class = QtWidgets.QLabel(self.groupBox_2)
self.round1_enemy3_class.setMinimumSize(QtCore.QSize(150, 0))
self.round1_enemy3_class.setMaximumSize(QtCore.QSize(150, 16777215))
self.round1_enemy3_class.setText("")
self.round1_enemy3_class.setObjectName("round1_enemy3_class")
self.gridLayout_7.addWidget(self.round1_enemy3_class, 1, 0, 1, 1)
self.round3_enemy1_class = QtWidgets.QLabel(self.groupBox_2)
self.round3_enemy1_class.setMinimumSize(QtCore.QSize(150, 0))
self.round3_enemy1_class.setMaximumSize(QtCore.QSize(150, 16777215))
self.round3_enemy1_class.setText("")
self.round3_enemy1_class.setObjectName("round3_enemy1_class")
self.gridLayout_7.addWidget(self.round3_enemy1_class, 7, 2, 1, 1)
self.horizontalLayout_10 = QtWidgets.QHBoxLayout()
self.horizontalLayout_10.setObjectName("horizontalLayout_10")
self.round2_enemy1_pic = QtWidgets.QPushButton(self.groupBox_2)
self.round2_enemy1_pic.setEnabled(False)
self.round2_enemy1_pic.setMinimumSize(QtCore.QSize(64, 64))
self.round2_enemy1_pic.setMaximumSize(QtCore.QSize(64, 64))
self.round2_enemy1_pic.setText("")
self.round2_enemy1_pic.setIconSize(QtCore.QSize(64, 64))
self.round2_enemy1_pic.setObjectName("round2_enemy1_pic")
self.horizontalLayout_10.addWidget(self.round2_enemy1_pic)
self.gridLayout_7.addLayout(self.horizontalLayout_10, 3, 2, 1, 1)
self.round3_enemy1_health = QtWidgets.QLabel(self.groupBox_2)
self.round3_enemy1_health.setMinimumSize(QtCore.QSize(150, 0))
self.round3_enemy1_health.setMaximumSize(QtCore.QSize(150, 16777215))
self.round3_enemy1_health.setText("")
self.round3_enemy1_health.setObjectName("round3_enemy1_health")
self.gridLayout_7.addWidget(self.round3_enemy1_health, 8, 2, 1, 1)
self.horizontalLayout_20 = QtWidgets.QHBoxLayout()
self.horizontalLayout_20.setObjectName("horizontalLayout_20")
self.round3_enemy2_pic = QtWidgets.QPushButton(self.groupBox_2)
self.round3_enemy2_pic.setEnabled(False)
self.round3_enemy2_pic.setMinimumSize(QtCore.QSize(64, 64))
self.round3_enemy2_pic.setMaximumSize(QtCore.QSize(64, 64))
self.round3_enemy2_pic.setText("")
self.round3_enemy2_pic.setIconSize(QtCore.QSize(64, 64))
self.round3_enemy2_pic.setObjectName("round3_enemy2_pic")
self.horizontalLayout_20.addWidget(self.round3_enemy2_pic)
self.gridLayout_7.addLayout(self.horizontalLayout_20, 6, 1, 1, 1)
self.round2_enemy3_class = QtWidgets.QLabel(self.groupBox_2)
self.round2_enemy3_class.setMinimumSize(QtCore.QSize(150, 0))
self.round2_enemy3_class.setMaximumSize(QtCore.QSize(150, 16777215))
self.round2_enemy3_class.setText("")
self.round2_enemy3_class.setObjectName("round2_enemy3_class")
self.gridLayout_7.addWidget(self.round2_enemy3_class, 4, 0, 1, 1)
self.round2_enemy2_class = QtWidgets.QLabel(self.groupBox_2)
self.round2_enemy2_class.setMinimumSize(QtCore.QSize(150, 0))
self.round2_enemy2_class.setMaximumSize(QtCore.QSize(150, 16777215))
self.round2_enemy2_class.setText("")
self.round2_enemy2_class.setObjectName("round2_enemy2_class")
self.gridLayout_7.addWidget(self.round2_enemy2_class, 4, 1, 1, 1)
self.horizontalLayout_6 = QtWidgets.QHBoxLayout()
self.horizontalLayout_6.setObjectName("horizontalLayout_6")
self.round2_enemy3_pic = QtWidgets.QPushButton(self.groupBox_2)
self.round2_enemy3_pic.setEnabled(False)
self.round2_enemy3_pic.setMinimumSize(QtCore.QSize(64, 64))
self.round2_enemy3_pic.setMaximumSize(QtCore.QSize(64, 64))
self.round2_enemy3_pic.setText("")
self.round2_enemy3_pic.setIconSize(QtCore.QSize(64, 64))
self.round2_enemy3_pic.setObjectName("round2_enemy3_pic")
self.horizontalLayout_6.addWidget(self.round2_enemy3_pic)
self.gridLayout_7.addLayout(self.horizontalLayout_6, 3, 0, 1, 1)
self.horizontalLayout_7 = QtWidgets.QHBoxLayout()
self.horizontalLayout_7.setObjectName("horizontalLayout_7")
self.round2_enemy2_pic = QtWidgets.QPushButton(self.groupBox_2)
self.round2_enemy2_pic.setEnabled(False)
self.round2_enemy2_pic.setMinimumSize(QtCore.QSize(64, 64))
self.round2_enemy2_pic.setMaximumSize(QtCore.QSize(64, 64))
self.round2_enemy2_pic.setText("")
self.round2_enemy2_pic.setIconSize(QtCore.QSize(64, 64))
self.round2_enemy2_pic.setObjectName("round2_enemy2_pic")
self.horizontalLayout_7.addWidget(self.round2_enemy2_pic)
self.gridLayout_7.addLayout(self.horizontalLayout_7, 3, 1, 1, 1)
self.horizontalLayout_21 = QtWidgets.QHBoxLayout()
self.horizontalLayout_21.setObjectName("horizontalLayout_21")
self.round3_enemy3_pic = QtWidgets.QPushButton(self.groupBox_2)
self.round3_enemy3_pic.setEnabled(False)
self.round3_enemy3_pic.setMinimumSize(QtCore.QSize(64, 64))
self.round3_enemy3_pic.setMaximumSize(QtCore.QSize(64, 64))
self.round3_enemy3_pic.setText("")
self.round3_enemy3_pic.setIconSize(QtCore.QSize(64, 64))
self.round3_enemy3_pic.setObjectName("round3_enemy3_pic")
self.horizontalLayout_21.addWidget(self.round3_enemy3_pic)
self.gridLayout_7.addLayout(self.horizontalLayout_21, 6, 0, 1, 1)
self.horizontalLayout_2 = QtWidgets.QHBoxLayout()
self.horizontalLayout_2.setObjectName("horizontalLayout_2")
self.round1_enemy2_pic = QtWidgets.QPushButton(self.groupBox_2)
self.round1_enemy2_pic.setEnabled(False)
self.round1_enemy2_pic.setMinimumSize(QtCore.QSize(64, 64))
self.round1_enemy2_pic.setMaximumSize(QtCore.QSize(64, 64))
self.round1_enemy2_pic.setText("")
self.round1_enemy2_pic.setIconSize(QtCore.QSize(64, 64))
self.round1_enemy2_pic.setObjectName("round1_enemy2_pic")
self.horizontalLayout_2.addWidget(self.round1_enemy2_pic)
self.gridLayout_7.addLayout(self.horizontalLayout_2, 0, 1, 1, 1)
self.round3_enemy3_class = QtWidgets.QLabel(self.groupBox_2)
self.round3_enemy3_class.setMinimumSize(QtCore.QSize(150, 0))
self.round3_enemy3_class.setMaximumSize(QtCore.QSize(150, 28))
self.round3_enemy3_class.setText("")
self.round3_enemy3_class.setObjectName("round3_enemy3_class")
self.gridLayout_7.addWidget(self.round3_enemy3_class, 7, 0, 1, 1)
self.round1_enemy2_class = QtWidgets.QLabel(self.groupBox_2)
self.round1_enemy2_class.setMinimumSize(QtCore.QSize(150, 0))
self.round1_enemy2_class.setMaximumSize(QtCore.QSize(150, 28))
self.round1_enemy2_class.setText("")
self.round1_enemy2_class.setObjectName("round1_enemy2_class")
self.gridLayout_7.addWidget(self.round1_enemy2_class, 1, 1, 1, 1)
self.round3_enemy3_health = QtWidgets.QLabel(self.groupBox_2)
self.round3_enemy3_health.setMinimumSize(QtCore.QSize(150, 0))
self.round3_enemy3_health.setMaximumSize(QtCore.QSize(150, 16777215))
self.round3_enemy3_health.setText("")
self.round3_enemy3_health.setObjectName("round3_enemy3_health")
self.gridLayout_7.addWidget(self.round3_enemy3_health, 8, 0, 1, 1)
self.round1_enemy3_health = QtWidgets.QLabel(self.groupBox_2)
self.round1_enemy3_health.setMinimumSize(QtCore.QSize(150, 0))
self.round1_enemy3_health.setMaximumSize(QtCore.QSize(150, 16777215))
self.round1_enemy3_health.setText("")
self.round1_enemy3_health.setObjectName("round1_enemy3_health")
self.gridLayout_7.addWidget(self.round1_enemy3_health, 2, 0, 1, 1)
self.horizontalLayout = QtWidgets.QHBoxLayout()
self.horizontalLayout.setObjectName("horizontalLayout")
self.round1_enemy1_pic = QtWidgets.QPushButton(self.groupBox_2)
self.round1_enemy1_pic.setEnabled(False)
self.round1_enemy1_pic.setMinimumSize(QtCore.QSize(64, 64))
self.round1_enemy1_pic.setMaximumSize(QtCore.QSize(64, 64))
self.round1_enemy1_pic.setText("")
self.round1_enemy1_pic.setIconSize(QtCore.QSize(64, 64))
self.round1_enemy1_pic.setObjectName("round1_enemy1_pic")
self.horizontalLayout.addWidget(self.round1_enemy1_pic)
self.gridLayout_7.addLayout(self.horizontalLayout, 0, 2, 1, 1)
self.round2_enemy3_health = QtWidgets.QLabel(self.groupBox_2)
self.round2_enemy3_health.setMinimumSize(QtCore.QSize(150, 0))
self.round2_enemy3_health.setMaximumSize(QtCore.QSize(150, 16777215))
self.round2_enemy3_health.setText("")
self.round2_enemy3_health.setObjectName("round2_enemy3_health")
self.gridLayout_7.addWidget(self.round2_enemy3_health, 5, 0, 1, 1)
self.round2_enemy2_health = QtWidgets.QLabel(self.groupBox_2)
self.round2_enemy2_health.setMinimumSize(QtCore.QSize(150, 0))
self.round2_enemy2_health.setMaximumSize(QtCore.QSize(150, 16777215))
self.round2_enemy2_health.setText("")
self.round2_enemy2_health.setObjectName("round2_enemy2_health")
self.gridLayout_7.addWidget(self.round2_enemy2_health, 5, 1, 1, 1)
self.round3_enemy2_health = QtWidgets.QLabel(self.groupBox_2)
self.round3_enemy2_health.setMinimumSize(QtCore.QSize(150, 0))
self.round3_enemy2_health.setMaximumSize(QtCore.QSize(150, 16777215))
self.round3_enemy2_health.setText("")
self.round3_enemy2_health.setObjectName("round3_enemy2_health")
self.gridLayout_7.addWidget(self.round3_enemy2_health, 8, 1, 1, 1)
self.round1_enemy2_health = QtWidgets.QLabel(self.groupBox_2)
self.round1_enemy2_health.setMinimumSize(QtCore.QSize(150, 0))
self.round1_enemy2_health.setMaximumSize(QtCore.QSize(150, 16777215))
self.round1_enemy2_health.setText("")
self.round1_enemy2_health.setObjectName("round1_enemy2_health")
self.gridLayout_7.addWidget(self.round1_enemy2_health, 2, 1, 1, 1)
self.round2_enemy1_class = QtWidgets.QLabel(self.groupBox_2)
self.round2_enemy1_class.setMinimumSize(QtCore.QSize(150, 0))
self.round2_enemy1_class.setMaximumSize(QtCore.QSize(150, 16777215))
self.round2_enemy1_class.setText("")
self.round2_enemy1_class.setObjectName("round2_enemy1_class")
self.gridLayout_7.addWidget(self.round2_enemy1_class, 4, 2, 1, 1)
self.round1_enemy1_class = QtWidgets.QLabel(self.groupBox_2)
self.round1_enemy1_class.setMinimumSize(QtCore.QSize(150, 0))
self.round1_enemy1_class.setMaximumSize(QtCore.QSize(150, 28))
self.round1_enemy1_class.setText("")
self.round1_enemy1_class.setObjectName("round1_enemy1_class")
self.gridLayout_7.addWidget(self.round1_enemy1_class, 1, 2, 1, 1)
self.round1_enemy1_health = QtWidgets.QLabel(self.groupBox_2)
self.round1_enemy1_health.setMinimumSize(QtCore.QSize(150, 0))
self.round1_enemy1_health.setMaximumSize(QtCore.QSize(150, 16777215))
self.round1_enemy1_health.setText("")
self.round1_enemy1_health.setObjectName("round1_enemy1_health")
self.gridLayout_7.addWidget(self.round1_enemy1_health, 2, 2, 1, 1)
self.round2_enemy1_health = QtWidgets.QLabel(self.groupBox_2)
self.round2_enemy1_health.setMinimumSize(QtCore.QSize(150, 0))
self.round2_enemy1_health.setMaximumSize(QtCore.QSize(150, 16777215))
self.round2_enemy1_health.setText("")
self.round2_enemy1_health.setObjectName("round2_enemy1_health")
self.gridLayout_7.addWidget(self.round2_enemy1_health, 5, 2, 1, 1)
self.horizontalLayout_12 = QtWidgets.QHBoxLayout()
self.horizontalLayout_12.setObjectName("horizontalLayout_12")
self.round3_enemy1_pic = QtWidgets.QPushButton(self.groupBox_2)
self.round3_enemy1_pic.setEnabled(False)
self.round3_enemy1_pic.setMinimumSize(QtCore.QSize(64, 64))
self.round3_enemy1_pic.setMaximumSize(QtCore.QSize(64, 64))
self.round3_enemy1_pic.setText("")
self.round3_enemy1_pic.setIconSize(QtCore.QSize(64, 64))
self.round3_enemy1_pic.setObjectName("round3_enemy1_pic")
self.horizontalLayout_12.addWidget(self.round3_enemy1_pic)
self.gridLayout_7.addLayout(self.horizontalLayout_12, 6, 2, 1, 1)
self.horizontalLayout_5 = QtWidgets.QHBoxLayout()
self.horizontalLayout_5.setObjectName("horizontalLayout_5")
self.round1_enemy3_pic = QtWidgets.QPushButton(self.groupBox_2)
self.round1_enemy3_pic.setEnabled(False)
self.round1_enemy3_pic.setMinimumSize(QtCore.QSize(64, 64))
self.round1_enemy3_pic.setMaximumSize(QtCore.QSize(64, 64))
self.round1_enemy3_pic.setText("")
self.round1_enemy3_pic.setIconSize(QtCore.QSize(64, 64))
self.round1_enemy3_pic.setObjectName("round1_enemy3_pic")
self.horizontalLayout_5.addWidget(self.round1_enemy3_pic)
self.gridLayout_7.addLayout(self.horizontalLayout_5, 0, 0, 1, 1)
self.round3_enemy2_class = QtWidgets.QLabel(self.groupBox_2)
self.round3_enemy2_class.setMinimumSize(QtCore.QSize(150, 0))
self.round3_enemy2_class.setMaximumSize(QtCore.QSize(150, 28))
self.round3_enemy2_class.setText("")
self.round3_enemy2_class.setObjectName("round3_enemy2_class")
self.gridLayout_7.addWidget(self.round3_enemy2_class, 7, 1, 1, 1)
self.verticalLayout_4.addLayout(self.gridLayout_7)
self.verticalLayout_6.addLayout(self.verticalLayout_4)
self.horizontalLayout_15.addWidget(self.groupBox_2)
self.groupBox_3 = QtWidgets.QGroupBox(self.centralwidget)
sizePolicy = QtWidgets.QSizePolicy(QtWidgets.QSizePolicy.Minimum, QtWidgets.QSizePolicy.Minimum)
sizePolicy.setHorizontalStretch(0)
sizePolicy.setVerticalStretch(0)
sizePolicy.setHeightForWidth(self.groupBox_3.sizePolicy().hasHeightForWidth())
self.groupBox_3.setSizePolicy(sizePolicy)
self.groupBox_3.setTitle("")
self.groupBox_3.setObjectName("groupBox_3")
self.verticalLayout_7 = QtWidgets.QVBoxLayout(self.groupBox_3)
self.verticalLayout_7.setContentsMargins(5, 5, 5, 5)
self.verticalLayout_7.setSpacing(5)
self.verticalLayout_7.setObjectName("verticalLayout_7")
self.gridLayout_2 = QtWidgets.QGridLayout()
self.gridLayout_2.setObjectName("gridLayout_2")
self.verticalLayout_5 = QtWidgets.QVBoxLayout()
self.verticalLayout_5.setObjectName("verticalLayout_5")
self.round1_label_random = QtWidgets.QLabel(self.groupBox_3)
self.round1_label_random.setEnabled(False)
self.round1_label_random.setMaximumSize(QtCore.QSize(100, 16777215))
self.round1_label_random.setObjectName("round1_label_random")
self.verticalLayout_5.addWidget(self.round1_label_random)
self.round1_bar_random = QtWidgets.QSlider(self.groupBox_3)
self.round1_bar_random.setEnabled(False)
self.round1_bar_random.setMaximumSize(QtCore.QSize(100, 16777215))
self.round1_bar_random.setMinimum(90)
self.round1_bar_random.setMaximum(110)
self.round1_bar_random.setProperty("value", 90)
self.round1_bar_random.setOrientation(QtCore.Qt.Horizontal)
self.round1_bar_random.setObjectName("round1_bar_random")
self.verticalLayout_5.addWidget(self.round1_bar_random)
self.gridLayout_2.addLayout(self.verticalLayout_5, 1, 8, 1, 1)
self.round1_servant2_np = QtWidgets.QPushButton(self.groupBox_3)
self.round1_servant2_np.setEnabled(False)
self.round1_servant2_np.setObjectName("round1_servant2_np")
self.gridLayout_2.addWidget(self.round1_servant2_np, 4, 5, 1, 1)
self.round1_servant3_np = QtWidgets.QPushButton(self.groupBox_3)
self.round1_servant3_np.setEnabled(False)
self.round1_servant3_np.setObjectName("round1_servant3_np")
self.gridLayout_2.addWidget(self.round1_servant3_np, 4, 6, 1, 1)
self.horizontalLayout_16 = QtWidgets.QHBoxLayout()
self.horizontalLayout_16.setObjectName("horizontalLayout_16")
self.round1_servant2_pic = QtWidgets.QPushButton(self.groupBox_3)
self.round1_servant2_pic.setEnabled(False)
self.round1_servant2_pic.setMinimumSize(QtCore.QSize(64, 70))
self.round1_servant2_pic.setMaximumSize(QtCore.QSize(64, 70))
self.round1_servant2_pic.setText("")
self.round1_servant2_pic.setIconSize(QtCore.QSize(64, 70))
self.round1_servant2_pic.setObjectName("round1_servant2_pic")
self.horizontalLayout_16.addWidget(self.round1_servant2_pic)
self.gridLayout_2.addLayout(self.horizontalLayout_16, 1, 5, 1, 1)
self.horizontalLayout_9 = QtWidgets.QHBoxLayout()
self.horizontalLayout_9.setObjectName("horizontalLayout_9")
self.round1_servant1_pic = QtWidgets.QPushButton(self.groupBox_3)
self.round1_servant1_pic.setEnabled(False)
sizePolicy = QtWidgets.QSizePolicy(QtWidgets.QSizePolicy.Minimum, QtWidgets.QSizePolicy.Fixed)
sizePolicy.setHorizontalStretch(0)
sizePolicy.setVerticalStretch(0)
sizePolicy.setHeightForWidth(self.round1_servant1_pic.sizePolicy().hasHeightForWidth())
self.round1_servant1_pic.setSizePolicy(sizePolicy)
self.round1_servant1_pic.setMinimumSize(QtCore.QSize(64, 70))
self.round1_servant1_pic.setMaximumSize(QtCore.QSize(64, 70))
self.round1_servant1_pic.setText("")
self.round1_servant1_pic.setIconSize(QtCore.QSize(64, 70))
self.round1_servant1_pic.setObjectName("round1_servant1_pic")
self.horizontalLayout_9.addWidget(self.round1_servant1_pic)
self.gridLayout_2.addLayout(self.horizontalLayout_9, 1, 4, 1, 1)
self.horizontalLayout_19 = QtWidgets.QHBoxLayout()
self.horizontalLayout_19.setObjectName("horizontalLayout_19")
self.round1_master_pic = QtWidgets.QPushButton(self.groupBox_3)
self.round1_master_pic.setEnabled(False)
self.round1_master_pic.setMinimumSize(QtCore.QSize(64, 70))
self.round1_master_pic.setMaximumSize(QtCore.QSize(64, 70))
self.round1_master_pic.setText("")
self.round1_master_pic.setIconSize(QtCore.QSize(64, 64))
self.round1_master_pic.setObjectName("round1_master_pic")
self.horizontalLayout_19.addWidget(self.round1_master_pic)
self.gridLayout_2.addLayout(self.horizontalLayout_19, 1, 7, 1, 1)
self.horizontalLayout_11 = QtWidgets.QHBoxLayout()
self.horizontalLayout_11.setObjectName("horizontalLayout_11")
self.round1_servant1_skill1 = QtWidgets.QPushButton(self.groupBox_3)
self.round1_servant1_skill1.setEnabled(False)
self.round1_servant1_skill1.setMinimumSize(QtCore.QSize(30, 30))
self.round1_servant1_skill1.setMaximumSize(QtCore.QSize(30, 30))
self.round1_servant1_skill1.setText("")
self.round1_servant1_skill1.setIconSize(QtCore.QSize(30, 30))
self.round1_servant1_skill1.setObjectName("round1_servant1_skill1")
self.horizontalLayout_11.addWidget(self.round1_servant1_skill1)
self.round1_servant1_skill2 = QtWidgets.QPushButton(self.groupBox_3)
self.round1_servant1_skill2.setEnabled(False)
self.round1_servant1_skill2.setMinimumSize(QtCore.QSize(30, 30))
self.round1_servant1_skill2.setMaximumSize(QtCore.QSize(30, 30))
self.round1_servant1_skill2.setText("")
self.round1_servant1_skill2.setIconSize(QtCore.QSize(30, 30))
self.round1_servant1_skill2.setObjectName("round1_servant1_skill2")
self.horizontalLayout_11.addWidget(self.round1_servant1_skill2)
self.round1_servant1_skill3 = QtWidgets.QPushButton(self.groupBox_3)
self.round1_servant1_skill3.setEnabled(False)
self.round1_servant1_skill3.setMinimumSize(QtCore.QSize(30, 30))
self.round1_servant1_skill3.setMaximumSize(QtCore.QSize(30, 30))
self.round1_servant1_skill3.setText("")
self.round1_servant1_skill3.setIconSize(QtCore.QSize(30, 30))
self.round1_servant1_skill3.setObjectName("round1_servant1_skill3")
self.horizontalLayout_11.addWidget(self.round1_servant1_skill3)
self.gridLayout_2.addLayout(self.horizontalLayout_11, 3, 4, 1, 1)
self.horizontalLayout_17 = QtWidgets.QHBoxLayout()
self.horizontalLayout_17.setObjectName("horizontalLayout_17")
self.round1_servant3_pic = QtWidgets.QPushButton(self.groupBox_3)
self.round1_servant3_pic.setEnabled(False)
self.round1_servant3_pic.setMinimumSize(QtCore.QSize(64, 70))
self.round1_servant3_pic.setMaximumSize(QtCore.QSize(64, 70))
self.round1_servant3_pic.setText("")
self.round1_servant3_pic.setIconSize(QtCore.QSize(64, 70))
self.round1_servant3_pic.setObjectName("round1_servant3_pic")
self.horizontalLayout_17.addWidget(self.round1_servant3_pic)
self.gridLayout_2.addLayout(self.horizontalLayout_17, 1, 6, 1, 1)
self.btn_round1_next = QtWidgets.QPushButton(self.groupBox_3)
self.btn_round1_next.setEnabled(False)
self.btn_round1_next.setMinimumSize(QtCore.QSize(0, 30))
self.btn_round1_next.setMaximumSize(QtCore.QSize(16777215, 30))
self.btn_round1_next.setObjectName("btn_round1_next")
self.gridLayout_2.addWidget(self.btn_round1_next, 3, 8, 1, 1)
self.round1_servant1_np = QtWidgets.QPushButton(self.groupBox_3)
self.round1_servant1_np.setEnabled(False)
self.round1_servant1_np.setObjectName("round1_servant1_np")
self.gridLayout_2.addWidget(self.round1_servant1_np, 4, 4, 1, 1)
self.horizontalLayout_14 = QtWidgets.QHBoxLayout()
self.horizontalLayout_14.setObjectName("horizontalLayout_14")
self.round1_servant3_skill1 = QtWidgets.QPushButton(self.groupBox_3)
self.round1_servant3_skill1.setEnabled(False)
self.round1_servant3_skill1.setMinimumSize(QtCore.QSize(30, 30))
self.round1_servant3_skill1.setMaximumSize(QtCore.QSize(30, 30))
self.round1_servant3_skill1.setText("")
self.round1_servant3_skill1.setIconSize(QtCore.QSize(30, 30))
self.round1_servant3_skill1.setObjectName("round1_servant3_skill1")
self.horizontalLayout_14.addWidget(self.round1_servant3_skill1)
self.round1_servant3_skill2 = QtWidgets.QPushButton(self.groupBox_3)
self.round1_servant3_skill2.setEnabled(False)
self.round1_servant3_skill2.setMinimumSize(QtCore.QSize(30, 30))
self.round1_servant3_skill2.setMaximumSize(QtCore.QSize(30, 30))
self.round1_servant3_skill2.setText("")
self.round1_servant3_skill2.setIconSize(QtCore.QSize(30, 30))
self.round1_servant3_skill2.setObjectName("round1_servant3_skill2")
self.horizontalLayout_14.addWidget(self.round1_servant3_skill2)
self.round1_servant3_skill3 = QtWidgets.QPushButton(self.groupBox_3)
self.round1_servant3_skill3.setEnabled(False)
self.round1_servant3_skill3.setMinimumSize(QtCore.QSize(30, 30))
self.round1_servant3_skill3.setMaximumSize(QtCore.QSize(30, 30))
self.round1_servant3_skill3.setText("")
self.round1_servant3_skill3.setIconSize(QtCore.QSize(30, 30))
self.round1_servant3_skill3.setObjectName("round1_servant3_skill3")
self.horizontalLayout_14.addWidget(self.round1_servant3_skill3)
self.gridLayout_2.addLayout(self.horizontalLayout_14, 3, 6, 1, 1)
self.horizontalLayout_18 = QtWidgets.QHBoxLayout()
self.horizontalLayout_18.setObjectName("horizontalLayout_18")
self.round1_master_skill1 = QtWidgets.QPushButton(self.groupBox_3)
self.round1_master_skill1.setEnabled(False)
self.round1_master_skill1.setMinimumSize(QtCore.QSize(30, 30))
self.round1_master_skill1.setMaximumSize(QtCore.QSize(30, 30))
self.round1_master_skill1.setText("")
self.round1_master_skill1.setIconSize(QtCore.QSize(30, 30))
self.round1_master_skill1.setObjectName("round1_master_skill1")
self.horizontalLayout_18.addWidget(self.round1_master_skill1)
self.round1_master_skill2 = QtWidgets.QPushButton(self.groupBox_3)
self.round1_master_skill2.setEnabled(False)
self.round1_master_skill2.setMinimumSize(QtCore.QSize(30, 30))
self.round1_master_skill2.setMaximumSize(QtCore.QSize(30, 30))
self.round1_master_skill2.setText("")
self.round1_master_skill2.setIconSize(QtCore.QSize(30, 30))
self.round1_master_skill2.setObjectName("round1_master_skill2")
self.horizontalLayout_18.addWidget(self.round1_master_skill2)
self.round1_master_skill3 = QtWidgets.QPushButton(self.groupBox_3)
self.round1_master_skill3.setEnabled(False)
self.round1_master_skill3.setMinimumSize(QtCore.QSize(30, 30))
self.round1_master_skill3.setMaximumSize(QtCore.QSize(30, 30))
self.round1_master_skill3.setText("")
self.round1_master_skill3.setIconSize(QtCore.QSize(30, 30))
self.round1_master_skill3.setObjectName("round1_master_skill3")
self.horizontalLayout_18.addWidget(self.round1_master_skill3)
self.gridLayout_2.addLayout(self.horizontalLayout_18, 3, 7, 1, 1)
self.horizontalLayout_13 = QtWidgets.QHBoxLayout()
self.horizontalLayout_13.setObjectName("horizontalLayout_13")
self.round1_servant2_skill1 = QtWidgets.QPushButton(self.groupBox_3)
self.round1_servant2_skill1.setEnabled(False)
self.round1_servant2_skill1.setMinimumSize(QtCore.QSize(30, 30))
self.round1_servant2_skill1.setMaximumSize(QtCore.QSize(30, 30))
self.round1_servant2_skill1.setText("")
self.round1_servant2_skill1.setIconSize(QtCore.QSize(30, 30))
self.round1_servant2_skill1.setObjectName("round1_servant2_skill1")
self.horizontalLayout_13.addWidget(self.round1_servant2_skill1)
self.round1_servant2_skill2 = QtWidgets.QPushButton(self.groupBox_3)
self.round1_servant2_skill2.setEnabled(False)
self.round1_servant2_skill2.setMinimumSize(QtCore.QSize(30, 30))
self.round1_servant2_skill2.setMaximumSize(QtCore.QSize(30, 30))
self.round1_servant2_skill2.setText("")
self.round1_servant2_skill2.setIconSize(QtCore.QSize(30, 30))
self.round1_servant2_skill2.setObjectName("round1_servant2_skill2")
self.horizontalLayout_13.addWidget(self.round1_servant2_skill2)
self.round1_servant2_skill3 = QtWidgets.QPushButton(self.groupBox_3)
self.round1_servant2_skill3.setEnabled(False)
self.round1_servant2_skill3.setMinimumSize(QtCore.QSize(30, 30))
self.round1_servant2_skill3.setMaximumSize(QtCore.QSize(30, 30))
self.round1_servant2_skill3.setText("")
self.round1_servant2_skill3.setIconSize(QtCore.QSize(30, 30))
self.round1_servant2_skill3.setObjectName("round1_servant2_skill3")
self.horizontalLayout_13.addWidget(self.round1_servant2_skill3)
self.gridLayout_2.addLayout(self.horizontalLayout_13, 3, 5, 1, 1)
self.verticalLayout_7.addLayout(self.gridLayout_2)
self.gridLayout_4 = QtWidgets.QGridLayout()
self.gridLayout_4.setObjectName("gridLayout_4")
self.round2_servant3_np = QtWidgets.QPushButton(self.groupBox_3)
self.round2_servant3_np.setEnabled(False)
self.round2_servant3_np.setObjectName("round2_servant3_np")
self.gridLayout_4.addWidget(self.round2_servant3_np, 3, 5, 1, 1)
self.horizontalLayout_181 = QtWidgets.QHBoxLayout()
self.horizontalLayout_181.setObjectName("horizontalLayout_181")
self.round2_master_skill1 = QtWidgets.QPushButton(self.groupBox_3)
self.round2_master_skill1.setEnabled(False)
self.round2_master_skill1.setMinimumSize(QtCore.QSize(30, 30))
self.round2_master_skill1.setMaximumSize(QtCore.QSize(30, 30))
self.round2_master_skill1.setText("")
self.round2_master_skill1.setIconSize(QtCore.QSize(30, 30))
self.round2_master_skill1.setObjectName("round2_master_skill1")
self.horizontalLayout_181.addWidget(self.round2_master_skill1)
self.round2_master_skill2 = QtWidgets.QPushButton(self.groupBox_3)
self.round2_master_skill2.setEnabled(False)
self.round2_master_skill2.setMinimumSize(QtCore.QSize(30, 30))
self.round2_master_skill2.setMaximumSize(QtCore.QSize(30, 30))
self.round2_master_skill2.setText("")
self.round2_master_skill2.setIconSize(QtCore.QSize(30, 30))
self.round2_master_skill2.setObjectName("round2_master_skill2")
self.horizontalLayout_181.addWidget(self.round2_master_skill2)
self.round2_master_skill3 = QtWidgets.QPushButton(self.groupBox_3)
self.round2_master_skill3.setEnabled(False)
self.round2_master_skill3.setMinimumSize(QtCore.QSize(30, 30))
self.round2_master_skill3.setMaximumSize(QtCore.QSize(30, 30))
self.round2_master_skill3.setText("")
self.round2_master_skill3.setIconSize(QtCore.QSize(30, 30))
self.round2_master_skill3.setObjectName("round2_master_skill3")
self.horizontalLayout_181.addWidget(self.round2_master_skill3)
self.gridLayout_4.addLayout(self.horizontalLayout_181, 1, 6, 1, 1)
self.round2_servant1_np = QtWidgets.QPushButton(self.groupBox_3)
self.round2_servant1_np.setEnabled(False)
self.round2_servant1_np.setObjectName("round2_servant1_np")
self.gridLayout_4.addWidget(self.round2_servant1_np, 3, 3, 1, 1)
self.horizontalLayout_171 = QtWidgets.QHBoxLayout()
self.horizontalLayout_171.setObjectName("horizontalLayout_171")
self.round2_servant3_pic = QtWidgets.QPushButton(self.groupBox_3)
self.round2_servant3_pic.setEnabled(False)
self.round2_servant3_pic.setMinimumSize(QtCore.QSize(64, 70))
self.round2_servant3_pic.setMaximumSize(QtCore.QSize(64, 70))
self.round2_servant3_pic.setText("")
self.round2_servant3_pic.setIconSize(QtCore.QSize(64, 70))
self.round2_servant3_pic.setObjectName("round2_servant3_pic")
self.horizontalLayout_171.addWidget(self.round2_servant3_pic)
self.gridLayout_4.addLayout(self.horizontalLayout_171, 0, 5, 1, 1)
self.horizontalLayout_161 = QtWidgets.QHBoxLayout()
self.horizontalLayout_161.setObjectName("horizontalLayout_161")
self.round2_servant2_pic = QtWidgets.QPushButton(self.groupBox_3)
self.round2_servant2_pic.setEnabled(False)
self.round2_servant2_pic.setMinimumSize(QtCore.QSize(64, 70))
self.round2_servant2_pic.setMaximumSize(QtCore.QSize(64, 70))
self.round2_servant2_pic.setText("")
self.round2_servant2_pic.setIconSize(QtCore.QSize(64, 70))
self.round2_servant2_pic.setObjectName("round2_servant2_pic")
self.horizontalLayout_161.addWidget(self.round2_servant2_pic)
self.gridLayout_4.addLayout(self.horizontalLayout_161, 0, 4, 1, 1)
self.horizontalLayout_131 = QtWidgets.QHBoxLayout()
self.horizontalLayout_131.setObjectName("horizontalLayout_131")
self.round2_servant2_skill1 = QtWidgets.QPushButton(self.groupBox_3)
self.round2_servant2_skill1.setEnabled(False)
self.round2_servant2_skill1.setMinimumSize(QtCore.QSize(30, 30))
self.round2_servant2_skill1.setMaximumSize(QtCore.QSize(30, 30))
self.round2_servant2_skill1.setText("")
self.round2_servant2_skill1.setIconSize(QtCore.QSize(30, 30))
self.round2_servant2_skill1.setObjectName("round2_servant2_skill1")
self.horizontalLayout_131.addWidget(self.round2_servant2_skill1)
self.round2_servant2_skill2 = QtWidgets.QPushButton(self.groupBox_3)
self.round2_servant2_skill2.setEnabled(False)
self.round2_servant2_skill2.setMinimumSize(QtCore.QSize(30, 30))
self.round2_servant2_skill2.setMaximumSize(QtCore.QSize(30, 30))
self.round2_servant2_skill2.setText("")
self.round2_servant2_skill2.setIconSize(QtCore.QSize(30, 30))
self.round2_servant2_skill2.setObjectName("round2_servant2_skill2")
self.horizontalLayout_131.addWidget(self.round2_servant2_skill2)
self.round2_servant2_skill3 = QtWidgets.QPushButton(self.groupBox_3)
self.round2_servant2_skill3.setEnabled(False)
self.round2_servant2_skill3.setMinimumSize(QtCore.QSize(30, 30))
self.round2_servant2_skill3.setMaximumSize(QtCore.QSize(30, 30))
self.round2_servant2_skill3.setText("")
self.round2_servant2_skill3.setIconSize(QtCore.QSize(30, 30))
self.round2_servant2_skill3.setObjectName("round2_servant2_skill3")
self.horizontalLayout_131.addWidget(self.round2_servant2_skill3)
self.gridLayout_4.addLayout(self.horizontalLayout_131, 1, 4, 1, 1)
self.horizontalLayout_141 = QtWidgets.QHBoxLayout()
self.horizontalLayout_141.setObjectName("horizontalLayout_141")
self.round2_servant3_skill1 = QtWidgets.QPushButton(self.groupBox_3)
self.round2_servant3_skill1.setEnabled(False)
self.round2_servant3_skill1.setMinimumSize(QtCore.QSize(30, 30))
self.round2_servant3_skill1.setMaximumSize(QtCore.QSize(30, 30))
self.round2_servant3_skill1.setText("")
self.round2_servant3_skill1.setIconSize(QtCore.QSize(30, 30))
self.round2_servant3_skill1.setObjectName("round2_servant3_skill1")
self.horizontalLayout_141.addWidget(self.round2_servant3_skill1)
self.round2_servant3_skill2 = QtWidgets.QPushButton(self.groupBox_3)
self.round2_servant3_skill2.setEnabled(False)
self.round2_servant3_skill2.setMinimumSize(QtCore.QSize(30, 30))
self.round2_servant3_skill2.setMaximumSize(QtCore.QSize(30, 30))
self.round2_servant3_skill2.setText("")
self.round2_servant3_skill2.setIconSize(QtCore.QSize(30, 30))
self.round2_servant3_skill2.setObjectName("round2_servant3_skill2")
self.horizontalLayout_141.addWidget(self.round2_servant3_skill2)
self.round2_servant3_skill3 = QtWidgets.QPushButton(self.groupBox_3)
self.round2_servant3_skill3.setEnabled(False)
self.round2_servant3_skill3.setMinimumSize(QtCore.QSize(30, 30))
self.round2_servant3_skill3.setMaximumSize(QtCore.QSize(30, 30))
self.round2_servant3_skill3.setText("")
self.round2_servant3_skill3.setIconSize(QtCore.QSize(30, 30))
self.round2_servant3_skill3.setObjectName("round2_servant3_skill3")
self.horizontalLayout_141.addWidget(self.round2_servant3_skill3)
self.gridLayout_4.addLayout(self.horizontalLayout_141, 1, 5, 1, 1)
self.btn_round2_next = QtWidgets.QPushButton(self.groupBox_3)
self.btn_round2_next.setEnabled(False)
self.btn_round2_next.setMinimumSize(QtCore.QSize(0, 30))
self.btn_round2_next.setMaximumSize(QtCore.QSize(16777215, 16777215))
self.btn_round2_next.setObjectName("btn_round2_next")
self.gridLayout_4.addWidget(self.btn_round2_next, 1, 7, 1, 1)
self.horizontalLayout_191 = QtWidgets.QHBoxLayout()
self.horizontalLayout_191.setObjectName("horizontalLayout_191")
self.round2_master_pic = QtWidgets.QPushButton(self.groupBox_3)
self.round2_master_pic.setEnabled(False)
self.round2_master_pic.setMinimumSize(QtCore.QSize(64, 70))
self.round2_master_pic.setMaximumSize(QtCore.QSize(64, 70))
self.round2_master_pic.setText("")
self.round2_master_pic.setIconSize(QtCore.QSize(64, 64))
self.round2_master_pic.setObjectName("round2_master_pic")
self.horizontalLayout_191.addWidget(self.round2_master_pic)
self.gridLayout_4.addLayout(self.horizontalLayout_191, 0, 6, 1, 1)
self.round2_servant2_np = QtWidgets.QPushButton(self.groupBox_3)
self.round2_servant2_np.setEnabled(False)
self.round2_servant2_np.setObjectName("round2_servant2_np")
self.gridLayout_4.addWidget(self.round2_servant2_np, 3, 4, 1, 1)
self.horizontalLayout_91 = QtWidgets.QHBoxLayout()
self.horizontalLayout_91.setObjectName("horizontalLayout_91")
self.round2_servant1_pic = QtWidgets.QPushButton(self.groupBox_3)
self.round2_servant1_pic.setEnabled(False)
self.round2_servant1_pic.setMinimumSize(QtCore.QSize(64, 70))
self.round2_servant1_pic.setMaximumSize(QtCore.QSize(64, 70))
self.round2_servant1_pic.setText("")
self.round2_servant1_pic.setIconSize(QtCore.QSize(64, 70))
self.round2_servant1_pic.setObjectName("round2_servant1_pic")
self.horizontalLayout_91.addWidget(self.round2_servant1_pic)
self.gridLayout_4.addLayout(self.horizontalLayout_91, 0, 3, 1, 1)
self.verticalLayout_12 = QtWidgets.QVBoxLayout()
self.verticalLayout_12.setObjectName("verticalLayout_12")
self.round2_label_random = QtWidgets.QLabel(self.groupBox_3)
self.round2_label_random.setEnabled(False)
self.round2_label_random.setObjectName("round2_label_random")
self.verticalLayout_12.addWidget(self.round2_label_random)
self.round2_bar_random = QtWidgets.QSlider(self.groupBox_3)
self.round2_bar_random.setEnabled(False)
self.round2_bar_random.setMaximumSize(QtCore.QSize(100, 16777215))
self.round2_bar_random.setMinimum(90)
self.round2_bar_random.setMaximum(110)
self.round2_bar_random.setProperty("value", 90)
self.round2_bar_random.setOrientation(QtCore.Qt.Horizontal)
self.round2_bar_random.setObjectName("round2_bar_random")
self.verticalLayout_12.addWidget(self.round2_bar_random)
self.gridLayout_4.addLayout(self.verticalLayout_12, 0, 7, 1, 1)
self.horizontalLayout_111 = QtWidgets.QHBoxLayout()
self.horizontalLayout_111.setObjectName("horizontalLayout_111")
self.round2_servant1_skill1 = QtWidgets.QPushButton(self.groupBox_3)
self.round2_servant1_skill1.setEnabled(False)
self.round2_servant1_skill1.setMinimumSize(QtCore.QSize(30, 30))
self.round2_servant1_skill1.setMaximumSize(QtCore.QSize(30, 30))
self.round2_servant1_skill1.setText("")
self.round2_servant1_skill1.setIconSize(QtCore.QSize(30, 30))
self.round2_servant1_skill1.setObjectName("round2_servant1_skill1")
self.horizontalLayout_111.addWidget(self.round2_servant1_skill1)
self.round2_servant1_skill2 = QtWidgets.QPushButton(self.groupBox_3)
self.round2_servant1_skill2.setEnabled(False)
self.round2_servant1_skill2.setMinimumSize(QtCore.QSize(30, 30))
self.round2_servant1_skill2.setMaximumSize(QtCore.QSize(30, 30))
self.round2_servant1_skill2.setText("")
self.round2_servant1_skill2.setIconSize(QtCore.QSize(30, 30))
self.round2_servant1_skill2.setObjectName("round2_servant1_skill2")
self.horizontalLayout_111.addWidget(self.round2_servant1_skill2)
self.round2_servant1_skill3 = QtWidgets.QPushButton(self.groupBox_3)
self.round2_servant1_skill3.setEnabled(False)
self.round2_servant1_skill3.setMinimumSize(QtCore.QSize(30, 30))
self.round2_servant1_skill3.setMaximumSize(QtCore.QSize(30, 30))
self.round2_servant1_skill3.setText("")
self.round2_servant1_skill3.setIconSize(QtCore.QSize(30, 30))
self.round2_servant1_skill3.setObjectName("round2_servant1_skill3")
self.horizontalLayout_111.addWidget(self.round2_servant1_skill3)
self.gridLayout_4.addLayout(self.horizontalLayout_111, 1, 3, 1, 1)
self.verticalLayout_7.addLayout(self.gridLayout_4)
self.gridLayout_5 = QtWidgets.QGridLayout()
self.gridLayout_5.setObjectName("gridLayout_5")
self.round3_servant3_np = QtWidgets.QPushButton(self.groupBox_3)
self.round3_servant3_np.setEnabled(False)
self.round3_servant3_np.setObjectName("round3_servant3_np")
self.gridLayout_5.addWidget(self.round3_servant3_np, 3, 6, 1, 1)
self.horizontalLayout_192 = QtWidgets.QHBoxLayout()
self.horizontalLayout_192.setObjectName("horizontalLayout_192")
self.round3_master_pic = QtWidgets.QPushButton(self.groupBox_3)
self.round3_master_pic.setEnabled(False)
self.round3_master_pic.setMinimumSize(QtCore.QSize(64, 70))
self.round3_master_pic.setMaximumSize(QtCore.QSize(64, 70))
self.round3_master_pic.setText("")
self.round3_master_pic.setIconSize(QtCore.QSize(64, 64))
self.round3_master_pic.setObjectName("round3_master_pic")
self.horizontalLayout_192.addWidget(self.round3_master_pic)
self.gridLayout_5.addLayout(self.horizontalLayout_192, 0, 7, 1, 1)
self.horizontalLayout_92 = QtWidgets.QHBoxLayout()
self.horizontalLayout_92.setObjectName("horizontalLayout_92")
self.round3_servant1_pic = QtWidgets.QPushButton(self.groupBox_3)
self.round3_servant1_pic.setEnabled(False)
self.round3_servant1_pic.setMinimumSize(QtCore.QSize(64, 70))
self.round3_servant1_pic.setMaximumSize(QtCore.QSize(64, 70))
self.round3_servant1_pic.setText("")
self.round3_servant1_pic.setIconSize(QtCore.QSize(64, 70))
self.round3_servant1_pic.setObjectName("round3_servant1_pic")
self.horizontalLayout_92.addWidget(self.round3_servant1_pic)
self.gridLayout_5.addLayout(self.horizontalLayout_92, 0, 3, 1, 1)
self.round3_servant1_np = QtWidgets.QPushButton(self.groupBox_3)
self.round3_servant1_np.setEnabled(False)
self.round3_servant1_np.setObjectName("round3_servant1_np")
self.gridLayout_5.addWidget(self.round3_servant1_np, 3, 3, 1, 1)
self.round3_servant2_np = QtWidgets.QPushButton(self.groupBox_3)
self.round3_servant2_np.setEnabled(False)
self.round3_servant2_np.setObjectName("round3_servant2_np")
self.gridLayout_5.addWidget(self.round3_servant2_np, 3, 5, 1, 1)
self.horizontalLayout_172 = QtWidgets.QHBoxLayout()
self.horizontalLayout_172.setObjectName("horizontalLayout_172")
self.round3_servant3_pic = QtWidgets.QPushButton(self.groupBox_3)
self.round3_servant3_pic.setEnabled(False)
self.round3_servant3_pic.setMinimumSize(QtCore.QSize(64, 70))
self.round3_servant3_pic.setMaximumSize(QtCore.QSize(64, 70))
self.round3_servant3_pic.setText("")
self.round3_servant3_pic.setIconSize(QtCore.QSize(64, 70))
self.round3_servant3_pic.setObjectName("round3_servant3_pic")
self.horizontalLayout_172.addWidget(self.round3_servant3_pic)
self.gridLayout_5.addLayout(self.horizontalLayout_172, 0, 6, 1, 1)
self.horizontalLayout_162 = QtWidgets.QHBoxLayout()
self.horizontalLayout_162.setObjectName("horizontalLayout_162")
self.round3_servant2_pic = QtWidgets.QPushButton(self.groupBox_3)
self.round3_servant2_pic.setEnabled(False)
self.round3_servant2_pic.setMinimumSize(QtCore.QSize(64, 70))
self.round3_servant2_pic.setMaximumSize(QtCore.QSize(64, 70))
self.round3_servant2_pic.setText("")
self.round3_servant2_pic.setIconSize(QtCore.QSize(64, 70))
self.round3_servant2_pic.setObjectName("round3_servant2_pic")
self.horizontalLayout_162.addWidget(self.round3_servant2_pic)
self.gridLayout_5.addLayout(self.horizontalLayout_162, 0, 5, 1, 1)
self.btn_output_strategy = QtWidgets.QPushButton(self.groupBox_3)
self.btn_output_strategy.setEnabled(False)
self.btn_output_strategy.setObjectName("btn_output_strategy")
self.gridLayout_5.addWidget(self.btn_output_strategy, 2, 9, 1, 1)
self.horizontalLayout_132 = QtWidgets.QHBoxLayout()
self.horizontalLayout_132.setObjectName("horizontalLayout_132")
self.round3_servant2_skill1 = QtWidgets.QPushButton(self.groupBox_3)
self.round3_servant2_skill1.setEnabled(False)
self.round3_servant2_skill1.setMinimumSize(QtCore.QSize(30, 30))
self.round3_servant2_skill1.setMaximumSize(QtCore.QSize(30, 30))
self.round3_servant2_skill1.setText("")
self.round3_servant2_skill1.setIconSize(QtCore.QSize(30, 30))
self.round3_servant2_skill1.setObjectName("round3_servant2_skill1")
self.horizontalLayout_132.addWidget(self.round3_servant2_skill1)
self.round3_servant2_skill2 = QtWidgets.QPushButton(self.groupBox_3)
self.round3_servant2_skill2.setEnabled(False)
self.round3_servant2_skill2.setMinimumSize(QtCore.QSize(30, 30))
self.round3_servant2_skill2.setMaximumSize(QtCore.QSize(30, 30))
self.round3_servant2_skill2.setText("")
self.round3_servant2_skill2.setIconSize(QtCore.QSize(30, 30))
self.round3_servant2_skill2.setObjectName("round3_servant2_skill2")
self.horizontalLayout_132.addWidget(self.round3_servant2_skill2)
self.round3_servant2_skill3 = QtWidgets.QPushButton(self.groupBox_3)
self.round3_servant2_skill3.setEnabled(False)
self.round3_servant2_skill3.setMinimumSize(QtCore.QSize(30, 30))
self.round3_servant2_skill3.setMaximumSize(QtCore.QSize(30, 30))
self.round3_servant2_skill3.setText("")
self.round3_servant2_skill3.setIconSize(QtCore.QSize(30, 30))
self.round3_servant2_skill3.setObjectName("round3_servant2_skill3")
self.horizontalLayout_132.addWidget(self.round3_servant2_skill3)
self.gridLayout_5.addLayout(self.horizontalLayout_132, 2, 5, 1, 1)
self.verticalLayout_13 = QtWidgets.QVBoxLayout()
self.verticalLayout_13.setObjectName("verticalLayout_13")
self.round3_label_random = QtWidgets.QLabel(self.groupBox_3)
self.round3_label_random.setEnabled(False)
self.round3_label_random.setObjectName("round3_label_random")
self.verticalLayout_13.addWidget(self.round3_label_random)
self.round3_bar_random = QtWidgets.QSlider(self.groupBox_3)
self.round3_bar_random.setEnabled(False)
self.round3_bar_random.setMaximumSize(QtCore.QSize(100, 16777215))
self.round3_bar_random.setMinimum(90)
self.round3_bar_random.setMaximum(110)
self.round3_bar_random.setProperty("value", 90)
self.round3_bar_random.setOrientation(QtCore.Qt.Horizontal)
self.round3_bar_random.setObjectName("round3_bar_random")
self.verticalLayout_13.addWidget(self.round3_bar_random)
self.gridLayout_5.addLayout(self.verticalLayout_13, 0, 9, 1, 1)
self.horizontalLayout_182 = QtWidgets.QHBoxLayout()
self.horizontalLayout_182.setObjectName("horizontalLayout_182")
self.round3_master_skill1 = QtWidgets.QPushButton(self.groupBox_3)
self.round3_master_skill1.setEnabled(False)
self.round3_master_skill1.setMinimumSize(QtCore.QSize(30, 30))
self.round3_master_skill1.setMaximumSize(QtCore.QSize(30, 30))
self.round3_master_skill1.setText("")
self.round3_master_skill1.setIconSize(QtCore.QSize(30, 30))
self.round3_master_skill1.setObjectName("round3_master_skill1")
self.horizontalLayout_182.addWidget(self.round3_master_skill1)
self.round3_master_skill2 = QtWidgets.QPushButton(self.groupBox_3)
self.round3_master_skill2.setEnabled(False)
self.round3_master_skill2.setMinimumSize(QtCore.QSize(30, 30))
self.round3_master_skill2.setMaximumSize(QtCore.QSize(30, 30))
self.round3_master_skill2.setText("")
self.round3_master_skill2.setIconSize(QtCore.QSize(30, 30))
self.round3_master_skill2.setObjectName("round3_master_skill2")
self.horizontalLayout_182.addWidget(self.round3_master_skill2)
self.round3_master_skill3 = QtWidgets.QPushButton(self.groupBox_3)
self.round3_master_skill3.setEnabled(False)
self.round3_master_skill3.setMinimumSize(QtCore.QSize(30, 30))
self.round3_master_skill3.setMaximumSize(QtCore.QSize(30, 30))
self.round3_master_skill3.setText("")
self.round3_master_skill3.setIconSize(QtCore.QSize(30, 30))
self.round3_master_skill3.setObjectName("round3_master_skill3")
self.horizontalLayout_182.addWidget(self.round3_master_skill3)
self.gridLayout_5.addLayout(self.horizontalLayout_182, 2, 7, 1, 1)
self.horizontalLayout_142 = QtWidgets.QHBoxLayout()
self.horizontalLayout_142.setObjectName("horizontalLayout_142")
self.round3_servant3_skill1 = QtWidgets.QPushButton(self.groupBox_3)
self.round3_servant3_skill1.setEnabled(False)
self.round3_servant3_skill1.setMinimumSize(QtCore.QSize(30, 30))
self.round3_servant3_skill1.setMaximumSize(QtCore.QSize(30, 30))
self.round3_servant3_skill1.setText("")
self.round3_servant3_skill1.setIconSize(QtCore.QSize(30, 30))
self.round3_servant3_skill1.setObjectName("round3_servant3_skill1")
self.horizontalLayout_142.addWidget(self.round3_servant3_skill1)
self.round3_servant3_skill2 = QtWidgets.QPushButton(self.groupBox_3)
self.round3_servant3_skill2.setEnabled(False)
self.round3_servant3_skill2.setMinimumSize(QtCore.QSize(30, 30))
self.round3_servant3_skill2.setMaximumSize(QtCore.QSize(30, 30))
self.round3_servant3_skill2.setText("")
self.round3_servant3_skill2.setIconSize(QtCore.QSize(30, 30))
self.round3_servant3_skill2.setObjectName("round3_servant3_skill2")
self.horizontalLayout_142.addWidget(self.round3_servant3_skill2)
self.round3_servant3_skill3 = QtWidgets.QPushButton(self.groupBox_3)
self.round3_servant3_skill3.setEnabled(False)
self.round3_servant3_skill3.setMinimumSize(QtCore.QSize(30, 30))
self.round3_servant3_skill3.setMaximumSize(QtCore.QSize(30, 30))
self.round3_servant3_skill3.setText("")
self.round3_servant3_skill3.setIconSize(QtCore.QSize(30, 30))
self.round3_servant3_skill3.setObjectName("round3_servant3_skill3")
self.horizontalLayout_142.addWidget(self.round3_servant3_skill3)
self.gridLayout_5.addLayout(self.horizontalLayout_142, 2, 6, 1, 1)
self.horizontalLayout_112 = QtWidgets.QHBoxLayout()
self.horizontalLayout_112.setObjectName("horizontalLayout_112")
self.round3_servant1_skill1 = QtWidgets.QPushButton(self.groupBox_3)
self.round3_servant1_skill1.setEnabled(False)
self.round3_servant1_skill1.setMinimumSize(QtCore.QSize(30, 30))
self.round3_servant1_skill1.setMaximumSize(QtCore.QSize(30, 30))
self.round3_servant1_skill1.setText("")
self.round3_servant1_skill1.setIconSize(QtCore.QSize(30, 30))
self.round3_servant1_skill1.setObjectName("round3_servant1_skill1")
self.horizontalLayout_112.addWidget(self.round3_servant1_skill1)
self.round3_servant1_skill2 = QtWidgets.QPushButton(self.groupBox_3)
self.round3_servant1_skill2.setEnabled(False)
self.round3_servant1_skill2.setMinimumSize(QtCore.QSize(30, 30))
self.round3_servant1_skill2.setMaximumSize(QtCore.QSize(30, 30))
self.round3_servant1_skill2.setText("")
self.round3_servant1_skill2.setIconSize(QtCore.QSize(30, 30))
self.round3_servant1_skill2.setObjectName("round3_servant1_skill2")
self.horizontalLayout_112.addWidget(self.round3_servant1_skill2)
self.round3_servant1_skill3 = QtWidgets.QPushButton(self.groupBox_3)
self.round3_servant1_skill3.setEnabled(False)
self.round3_servant1_skill3.setMinimumSize(QtCore.QSize(30, 30))
self.round3_servant1_skill3.setMaximumSize(QtCore.QSize(30, 30))
self.round3_servant1_skill3.setText("")
self.round3_servant1_skill3.setIconSize(QtCore.QSize(30, 30))
self.round3_servant1_skill3.setObjectName("round3_servant1_skill3")
self.horizontalLayout_112.addWidget(self.round3_servant1_skill3)
self.gridLayout_5.addLayout(self.horizontalLayout_112, 2, 3, 1, 1)
self.verticalLayout_7.addLayout(self.gridLayout_5)
self.horizontalLayout_15.addWidget(self.groupBox_3)
self.verticalLayout_2.addLayout(self.horizontalLayout_15)
MainWindow.setCentralWidget(self.centralwidget)
self.menubar = QtWidgets.QMenuBar(MainWindow)
self.menubar.setGeometry(QtCore.QRect(0, 0, 1070, 26))
self.menubar.setObjectName("menubar")
self.menu = QtWidgets.QMenu(self.menubar)
self.menu.setObjectName("menu")
MainWindow.setMenuBar(self.menubar)
self.statusbar = QtWidgets.QStatusBar(MainWindow)
self.statusbar.setObjectName("statusbar")
MainWindow.setStatusBar(self.statusbar)
self.action_update = QtWidgets.QAction(MainWindow)
self.action_update.setObjectName("action_update")
self.action_mooncell = QtWidgets.QAction(MainWindow)
self.action_mooncell.setObjectName("action_mooncell")
self.action_support = QtWidgets.QAction(MainWindow)
self.action_support.setObjectName("action_support")
self.action_kazemai = QtWidgets.QAction(MainWindow)
self.action_kazemai.setObjectName("action_kazemai")
self.action_about = QtWidgets.QAction(MainWindow)
self.action_about.setObjectName("action_about")
self.menu.addAction(self.action_update)
self.menu.addAction(self.action_support)
self.menu.addAction(self.action_about)
self.menu.addSeparator()
self.menu.addAction(self.action_mooncell)
self.menu.addAction(self.action_kazemai)
self.menubar.addAction(self.menu.menuAction())
self.retranslateUi(MainWindow)
QtCore.QMetaObject.connectSlotsByName(MainWindow)
def retranslateUi(self, MainWindow):
_translate = QtCore.QCoreApplication.translate
MainWindow.setWindowTitle(_translate("MainWindow", "FGO周回组队器"))
self.label_costume_state_4.setText(_translate("MainWindow", "等级: "))
self.label_servant_state_2.setText(_translate("MainWindow", "技能: \n"
"宝具: \n"
"等级: \n"
"芙芙:"))
self.label_costume_state_1.setText(_translate("MainWindow", "等级: "))
self.box_skill_confirm.setText(_translate("MainWindow", "技能提示"))
self.label.setText(_translate("MainWindow", "概率阈值:"))
self.label_costume_state_5.setText(_translate("MainWindow", "等级: "))
self.label_costume_state_6.setText(_translate("MainWindow", "等级: "))
self.label_servant_state_1.setText(_translate("MainWindow", "技能: \n"
"宝具: \n"
"等级: \n"
"芙芙:"))
self.label_master_state.setText(_translate("MainWindow", "等级:"))
self.label_costume_state_2.setText(_translate("MainWindow", "等级: "))
self.label_servant_state_3.setText(_translate("MainWindow", "技能: \n"
"宝具: \n"
"等级: \n"
"芙芙:"))
self.label_servant_state_5.setText(_translate("MainWindow", "技能: \n"
"宝具: \n"
"等级: \n"
"芙芙:"))
self.label_costume_state_3.setText(_translate("MainWindow", "等级: "))
self.label_servant_state_4.setText(_translate("MainWindow", "技能: \n"
"宝具: \n"
"等级: \n"
"芙芙:"))
self.label_servant_state_6.setText(_translate("MainWindow", "技能: \n"
"宝具: \n"
"等级: \n"
"芙芙:"))
self.btn_set_progress.setText(_translate("MainWindow", "选择进度"))
self.btn_choose_level.setText(_translate("MainWindow", "设置副本"))
self.btn_confirm_team.setText(_translate("MainWindow", "确 认"))
self.btn_change_team.setText(_translate("MainWindow", "修 改"))
self.btn_round_reset.setText(_translate("MainWindow", "撤 销"))
self.round1_label_random.setText(_translate("MainWindow", "随机数: 0.9"))
self.round1_servant2_np.setText(_translate("MainWindow", "宝具: 0%"))
self.round1_servant3_np.setText(_translate("MainWindow", "宝具: 0%"))
self.btn_round1_next.setText(_translate("MainWindow", "下一回合"))
self.round1_servant1_np.setText(_translate("MainWindow", "宝具: 0%"))
self.round2_servant3_np.setText(_translate("MainWindow", "宝具: 0%"))
self.round2_servant1_np.setText(_translate("MainWindow", "宝具: 0%"))
self.btn_round2_next.setText(_translate("MainWindow", "下一回合"))
self.round2_servant2_np.setText(_translate("MainWindow", "宝具: 0%"))
self.round2_label_random.setText(_translate("MainWindow", "随机数: 0.9"))
self.round3_servant3_np.setText(_translate("MainWindow", "宝具: 0%"))
self.round3_servant1_np.setText(_translate("MainWindow", "宝具: 0%"))
self.round3_servant2_np.setText(_translate("MainWindow", "宝具: 0%"))
self.btn_output_strategy.setText(_translate("MainWindow", "输出操作"))
self.round3_label_random.setText(_translate("MainWindow", "随机数: 0.9"))
self.menu.setTitle(_translate("MainWindow", "选 项"))
self.action_update.setText(_translate("MainWindow", "数据库更新"))
self.action_mooncell.setText(_translate("MainWindow", "Mooncell"))
self.action_support.setText(_translate("MainWindow", "软件更新"))
self.action_kazemai.setText(_translate("MainWindow", "茹西教王的理想乡"))
self.action_about.setText(_translate("MainWindow", "关于软件"))
| [
"PyQt5.QtWidgets.QSpinBox",
"PyQt5.QtWidgets.QVBoxLayout",
"PyQt5.QtWidgets.QSizePolicy",
"PyQt5.QtWidgets.QStatusBar",
"PyQt5.QtWidgets.QGroupBox",
"PyQt5.QtWidgets.QLabel",
"PyQt5.QtWidgets.QPushButton",
"PyQt5.QtWidgets.QWidget",
"PyQt5.QtWidgets.QMenu",
"PyQt5.QtCore.QMetaObject.connectSlotsBy... | [((446, 534), 'PyQt5.QtWidgets.QSizePolicy', 'QtWidgets.QSizePolicy', (['QtWidgets.QSizePolicy.Minimum', 'QtWidgets.QSizePolicy.Minimum'], {}), '(QtWidgets.QSizePolicy.Minimum, QtWidgets.QSizePolicy.\n Minimum)\n', (467, 534), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((892, 921), 'PyQt5.QtWidgets.QWidget', 'QtWidgets.QWidget', (['MainWindow'], {}), '(MainWindow)\n', (909, 921), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((1012, 1053), 'PyQt5.QtWidgets.QVBoxLayout', 'QtWidgets.QVBoxLayout', (['self.centralwidget'], {}), '(self.centralwidget)\n', (1033, 1053), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((1248, 1287), 'PyQt5.QtWidgets.QGroupBox', 'QtWidgets.QGroupBox', (['self.centralwidget'], {}), '(self.centralwidget)\n', (1267, 1287), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((1403, 1439), 'PyQt5.QtWidgets.QVBoxLayout', 'QtWidgets.QVBoxLayout', (['self.groupBox'], {}), '(self.groupBox)\n', (1424, 1439), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((1637, 1660), 'PyQt5.QtWidgets.QGridLayout', 'QtWidgets.QGridLayout', ([], {}), '()\n', (1658, 1660), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((1838, 1869), 'PyQt5.QtWidgets.QLabel', 'QtWidgets.QLabel', (['self.groupBox'], {}), '(self.groupBox)\n', (1854, 1869), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((1891, 1981), 'PyQt5.QtWidgets.QSizePolicy', 'QtWidgets.QSizePolicy', (['QtWidgets.QSizePolicy.Minimum', 'QtWidgets.QSizePolicy.Preferred'], {}), '(QtWidgets.QSizePolicy.Minimum, QtWidgets.QSizePolicy.\n Preferred)\n', (1912, 1981), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((2485, 2516), 'PyQt5.QtWidgets.QLabel', 'QtWidgets.QLabel', (['self.groupBox'], {}), '(self.groupBox)\n', (2501, 2516), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((2538, 2628), 'PyQt5.QtWidgets.QSizePolicy', 'QtWidgets.QSizePolicy', (['QtWidgets.QSizePolicy.Minimum', 'QtWidgets.QSizePolicy.Preferred'], {}), '(QtWidgets.QSizePolicy.Minimum, QtWidgets.QSizePolicy.\n Preferred)\n', (2559, 2628), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((3190, 3221), 'PyQt5.QtWidgets.QFrame', 'QtWidgets.QFrame', (['self.groupBox'], {}), '(self.groupBox)\n', (3206, 3221), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((3482, 3513), 'PyQt5.QtWidgets.QLabel', 'QtWidgets.QLabel', (['self.groupBox'], {}), '(self.groupBox)\n', (3498, 3513), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((3535, 3625), 'PyQt5.QtWidgets.QSizePolicy', 'QtWidgets.QSizePolicy', (['QtWidgets.QSizePolicy.Minimum', 'QtWidgets.QSizePolicy.Preferred'], {}), '(QtWidgets.QSizePolicy.Minimum, QtWidgets.QSizePolicy.\n Preferred)\n', (3556, 3625), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((4125, 4159), 'PyQt5.QtWidgets.QCheckBox', 'QtWidgets.QCheckBox', (['self.groupBox'], {}), '(self.groupBox)\n', (4144, 4159), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((4333, 4356), 'PyQt5.QtWidgets.QHBoxLayout', 'QtWidgets.QHBoxLayout', ([], {}), '()\n', (4354, 4356), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((4463, 4499), 'PyQt5.QtWidgets.QPushButton', 'QtWidgets.QPushButton', (['self.groupBox'], {}), '(self.groupBox)\n', (4484, 4499), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((4521, 4607), 'PyQt5.QtWidgets.QSizePolicy', 'QtWidgets.QSizePolicy', (['QtWidgets.QSizePolicy.Minimum', 'QtWidgets.QSizePolicy.Fixed'], {}), '(QtWidgets.QSizePolicy.Minimum, QtWidgets.QSizePolicy.\n Fixed)\n', (4542, 4607), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((5353, 5376), 'PyQt5.QtWidgets.QHBoxLayout', 'QtWidgets.QHBoxLayout', ([], {}), '()\n', (5374, 5376), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((5466, 5497), 'PyQt5.QtWidgets.QLabel', 'QtWidgets.QLabel', (['self.groupBox'], {}), '(self.groupBox)\n', (5482, 5497), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((5519, 5609), 'PyQt5.QtWidgets.QSizePolicy', 'QtWidgets.QSizePolicy', (['QtWidgets.QSizePolicy.Minimum', 'QtWidgets.QSizePolicy.Preferred'], {}), '(QtWidgets.QSizePolicy.Minimum, QtWidgets.QSizePolicy.\n Preferred)\n', (5540, 5609), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((5949, 5982), 'PyQt5.QtWidgets.QSpinBox', 'QtWidgets.QSpinBox', (['self.groupBox'], {}), '(self.groupBox)\n', (5967, 5982), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((6347, 6370), 'PyQt5.QtWidgets.QHBoxLayout', 'QtWidgets.QHBoxLayout', ([], {}), '()\n', (6368, 6370), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((6477, 6513), 'PyQt5.QtWidgets.QPushButton', 'QtWidgets.QPushButton', (['self.groupBox'], {}), '(self.groupBox)\n', (6498, 6513), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((6535, 6621), 'PyQt5.QtWidgets.QSizePolicy', 'QtWidgets.QSizePolicy', (['QtWidgets.QSizePolicy.Minimum', 'QtWidgets.QSizePolicy.Fixed'], {}), '(QtWidgets.QSizePolicy.Minimum, QtWidgets.QSizePolicy.\n Fixed)\n', (6556, 6621), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((7370, 7401), 'PyQt5.QtWidgets.QLabel', 'QtWidgets.QLabel', (['self.groupBox'], {}), '(self.groupBox)\n', (7386, 7401), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((7423, 7513), 'PyQt5.QtWidgets.QSizePolicy', 'QtWidgets.QSizePolicy', (['QtWidgets.QSizePolicy.Minimum', 'QtWidgets.QSizePolicy.Preferred'], {}), '(QtWidgets.QSizePolicy.Minimum, QtWidgets.QSizePolicy.\n Preferred)\n', (7444, 7513), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((8017, 8048), 'PyQt5.QtWidgets.QLabel', 'QtWidgets.QLabel', (['self.groupBox'], {}), '(self.groupBox)\n', (8033, 8048), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((8070, 8160), 'PyQt5.QtWidgets.QSizePolicy', 'QtWidgets.QSizePolicy', (['QtWidgets.QSizePolicy.Minimum', 'QtWidgets.QSizePolicy.Preferred'], {}), '(QtWidgets.QSizePolicy.Minimum, QtWidgets.QSizePolicy.\n Preferred)\n', (8091, 8160), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((8662, 8685), 'PyQt5.QtWidgets.QHBoxLayout', 'QtWidgets.QHBoxLayout', ([], {}), '()\n', (8683, 8685), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((8792, 8828), 'PyQt5.QtWidgets.QPushButton', 'QtWidgets.QPushButton', (['self.groupBox'], {}), '(self.groupBox)\n', (8813, 8828), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((8850, 8936), 'PyQt5.QtWidgets.QSizePolicy', 'QtWidgets.QSizePolicy', (['QtWidgets.QSizePolicy.Minimum', 'QtWidgets.QSizePolicy.Fixed'], {}), '(QtWidgets.QSizePolicy.Minimum, QtWidgets.QSizePolicy.\n Fixed)\n', (8871, 8936), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((9683, 9706), 'PyQt5.QtWidgets.QHBoxLayout', 'QtWidgets.QHBoxLayout', ([], {}), '()\n', (9704, 9706), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((9810, 9846), 'PyQt5.QtWidgets.QPushButton', 'QtWidgets.QPushButton', (['self.groupBox'], {}), '(self.groupBox)\n', (9831, 9846), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((10339, 10370), 'PyQt5.QtWidgets.QLabel', 'QtWidgets.QLabel', (['self.groupBox'], {}), '(self.groupBox)\n', (10355, 10370), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((10392, 10482), 'PyQt5.QtWidgets.QSizePolicy', 'QtWidgets.QSizePolicy', (['QtWidgets.QSizePolicy.Minimum', 'QtWidgets.QSizePolicy.Preferred'], {}), '(QtWidgets.QSizePolicy.Minimum, QtWidgets.QSizePolicy.\n Preferred)\n', (10413, 10482), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((11056, 11087), 'PyQt5.QtWidgets.QLabel', 'QtWidgets.QLabel', (['self.groupBox'], {}), '(self.groupBox)\n', (11072, 11087), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((11109, 11199), 'PyQt5.QtWidgets.QSizePolicy', 'QtWidgets.QSizePolicy', (['QtWidgets.QSizePolicy.Minimum', 'QtWidgets.QSizePolicy.Preferred'], {}), '(QtWidgets.QSizePolicy.Minimum, QtWidgets.QSizePolicy.\n Preferred)\n', (11130, 11199), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((11750, 11773), 'PyQt5.QtWidgets.QHBoxLayout', 'QtWidgets.QHBoxLayout', ([], {}), '()\n', (11771, 11773), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((11880, 11916), 'PyQt5.QtWidgets.QPushButton', 'QtWidgets.QPushButton', (['self.groupBox'], {}), '(self.groupBox)\n', (11901, 11916), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((12430, 12461), 'PyQt5.QtWidgets.QLabel', 'QtWidgets.QLabel', (['self.groupBox'], {}), '(self.groupBox)\n', (12446, 12461), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((12483, 12573), 'PyQt5.QtWidgets.QSizePolicy', 'QtWidgets.QSizePolicy', (['QtWidgets.QSizePolicy.Minimum', 'QtWidgets.QSizePolicy.Preferred'], {}), '(QtWidgets.QSizePolicy.Minimum, QtWidgets.QSizePolicy.\n Preferred)\n', (12504, 12573), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((13075, 13098), 'PyQt5.QtWidgets.QHBoxLayout', 'QtWidgets.QHBoxLayout', ([], {}), '()\n', (13096, 13098), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((13205, 13241), 'PyQt5.QtWidgets.QPushButton', 'QtWidgets.QPushButton', (['self.groupBox'], {}), '(self.groupBox)\n', (13226, 13241), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((13263, 13349), 'PyQt5.QtWidgets.QSizePolicy', 'QtWidgets.QSizePolicy', (['QtWidgets.QSizePolicy.Minimum', 'QtWidgets.QSizePolicy.Fixed'], {}), '(QtWidgets.QSizePolicy.Minimum, QtWidgets.QSizePolicy.\n Fixed)\n', (13284, 13349), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((14096, 14119), 'PyQt5.QtWidgets.QHBoxLayout', 'QtWidgets.QHBoxLayout', ([], {}), '()\n', (14117, 14119), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((14226, 14262), 'PyQt5.QtWidgets.QPushButton', 'QtWidgets.QPushButton', (['self.groupBox'], {}), '(self.groupBox)\n', (14247, 14262), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((14774, 14797), 'PyQt5.QtWidgets.QHBoxLayout', 'QtWidgets.QHBoxLayout', ([], {}), '()\n', (14795, 14797), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((14904, 14940), 'PyQt5.QtWidgets.QPushButton', 'QtWidgets.QPushButton', (['self.groupBox'], {}), '(self.groupBox)\n', (14925, 14940), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((14962, 15048), 'PyQt5.QtWidgets.QSizePolicy', 'QtWidgets.QSizePolicy', (['QtWidgets.QSizePolicy.Minimum', 'QtWidgets.QSizePolicy.Fixed'], {}), '(QtWidgets.QSizePolicy.Minimum, QtWidgets.QSizePolicy.\n Fixed)\n', (14983, 15048), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((15798, 15829), 'PyQt5.QtWidgets.QLabel', 'QtWidgets.QLabel', (['self.groupBox'], {}), '(self.groupBox)\n', (15814, 15829), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((15851, 15941), 'PyQt5.QtWidgets.QSizePolicy', 'QtWidgets.QSizePolicy', (['QtWidgets.QSizePolicy.Minimum', 'QtWidgets.QSizePolicy.Preferred'], {}), '(QtWidgets.QSizePolicy.Minimum, QtWidgets.QSizePolicy.\n Preferred)\n', (15872, 15941), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((16518, 16549), 'PyQt5.QtWidgets.QLabel', 'QtWidgets.QLabel', (['self.groupBox'], {}), '(self.groupBox)\n', (16534, 16549), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((16571, 16661), 'PyQt5.QtWidgets.QSizePolicy', 'QtWidgets.QSizePolicy', (['QtWidgets.QSizePolicy.Minimum', 'QtWidgets.QSizePolicy.Preferred'], {}), '(QtWidgets.QSizePolicy.Minimum, QtWidgets.QSizePolicy.\n Preferred)\n', (16592, 16661), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((17236, 17259), 'PyQt5.QtWidgets.QHBoxLayout', 'QtWidgets.QHBoxLayout', ([], {}), '()\n', (17257, 17259), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((17366, 17402), 'PyQt5.QtWidgets.QPushButton', 'QtWidgets.QPushButton', (['self.groupBox'], {}), '(self.groupBox)\n', (17387, 17402), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((17914, 17937), 'PyQt5.QtWidgets.QHBoxLayout', 'QtWidgets.QHBoxLayout', ([], {}), '()\n', (17935, 17937), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((18044, 18080), 'PyQt5.QtWidgets.QPushButton', 'QtWidgets.QPushButton', (['self.groupBox'], {}), '(self.groupBox)\n', (18065, 18080), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((18102, 18188), 'PyQt5.QtWidgets.QSizePolicy', 'QtWidgets.QSizePolicy', (['QtWidgets.QSizePolicy.Minimum', 'QtWidgets.QSizePolicy.Fixed'], {}), '(QtWidgets.QSizePolicy.Minimum, QtWidgets.QSizePolicy.\n Fixed)\n', (18123, 18188), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((18937, 18968), 'PyQt5.QtWidgets.QLabel', 'QtWidgets.QLabel', (['self.groupBox'], {}), '(self.groupBox)\n', (18953, 18968), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((18990, 19080), 'PyQt5.QtWidgets.QSizePolicy', 'QtWidgets.QSizePolicy', (['QtWidgets.QSizePolicy.Minimum', 'QtWidgets.QSizePolicy.Preferred'], {}), '(QtWidgets.QSizePolicy.Minimum, QtWidgets.QSizePolicy.\n Preferred)\n', (19011, 19080), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((19584, 19615), 'PyQt5.QtWidgets.QLabel', 'QtWidgets.QLabel', (['self.groupBox'], {}), '(self.groupBox)\n', (19600, 19615), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((19637, 19727), 'PyQt5.QtWidgets.QSizePolicy', 'QtWidgets.QSizePolicy', (['QtWidgets.QSizePolicy.Minimum', 'QtWidgets.QSizePolicy.Preferred'], {}), '(QtWidgets.QSizePolicy.Minimum, QtWidgets.QSizePolicy.\n Preferred)\n', (19658, 19727), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((20289, 20320), 'PyQt5.QtWidgets.QFrame', 'QtWidgets.QFrame', (['self.groupBox'], {}), '(self.groupBox)\n', (20305, 20320), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((20579, 20602), 'PyQt5.QtWidgets.QHBoxLayout', 'QtWidgets.QHBoxLayout', ([], {}), '()\n', (20600, 20602), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((20709, 20745), 'PyQt5.QtWidgets.QPushButton', 'QtWidgets.QPushButton', (['self.groupBox'], {}), '(self.groupBox)\n', (20730, 20745), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((21259, 21290), 'PyQt5.QtWidgets.QLabel', 'QtWidgets.QLabel', (['self.groupBox'], {}), '(self.groupBox)\n', (21275, 21290), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((21312, 21402), 'PyQt5.QtWidgets.QSizePolicy', 'QtWidgets.QSizePolicy', (['QtWidgets.QSizePolicy.Minimum', 'QtWidgets.QSizePolicy.Preferred'], {}), '(QtWidgets.QSizePolicy.Minimum, QtWidgets.QSizePolicy.\n Preferred)\n', (21333, 21402), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((21964, 21995), 'PyQt5.QtWidgets.QFrame', 'QtWidgets.QFrame', (['self.groupBox'], {}), '(self.groupBox)\n', (21980, 21995), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((22254, 22277), 'PyQt5.QtWidgets.QHBoxLayout', 'QtWidgets.QHBoxLayout', ([], {}), '()\n', (22275, 22277), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((22384, 22420), 'PyQt5.QtWidgets.QPushButton', 'QtWidgets.QPushButton', (['self.groupBox'], {}), '(self.groupBox)\n', (22405, 22420), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((22932, 22955), 'PyQt5.QtWidgets.QHBoxLayout', 'QtWidgets.QHBoxLayout', ([], {}), '()\n', (22953, 22955), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((23062, 23098), 'PyQt5.QtWidgets.QPushButton', 'QtWidgets.QPushButton', (['self.groupBox'], {}), '(self.groupBox)\n', (23083, 23098), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((23120, 23206), 'PyQt5.QtWidgets.QSizePolicy', 'QtWidgets.QSizePolicy', (['QtWidgets.QSizePolicy.Minimum', 'QtWidgets.QSizePolicy.Fixed'], {}), '(QtWidgets.QSizePolicy.Minimum, QtWidgets.QSizePolicy.\n Fixed)\n', (23141, 23206), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((23940, 23971), 'PyQt5.QtWidgets.QFrame', 'QtWidgets.QFrame', (['self.groupBox'], {}), '(self.groupBox)\n', (23956, 23971), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((24288, 24311), 'PyQt5.QtWidgets.QHBoxLayout', 'QtWidgets.QHBoxLayout', ([], {}), '()\n', (24309, 24311), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((24412, 24448), 'PyQt5.QtWidgets.QPushButton', 'QtWidgets.QPushButton', (['self.groupBox'], {}), '(self.groupBox)\n', (24433, 24448), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((24610, 24646), 'PyQt5.QtWidgets.QPushButton', 'QtWidgets.QPushButton', (['self.groupBox'], {}), '(self.groupBox)\n', (24631, 24646), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((24808, 24844), 'PyQt5.QtWidgets.QPushButton', 'QtWidgets.QPushButton', (['self.groupBox'], {}), '(self.groupBox)\n', (24829, 24844), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((25005, 25041), 'PyQt5.QtWidgets.QPushButton', 'QtWidgets.QPushButton', (['self.groupBox'], {}), '(self.groupBox)\n', (25026, 25041), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((25246, 25282), 'PyQt5.QtWidgets.QPushButton', 'QtWidgets.QPushButton', (['self.groupBox'], {}), '(self.groupBox)\n', (25267, 25282), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((25611, 25634), 'PyQt5.QtWidgets.QHBoxLayout', 'QtWidgets.QHBoxLayout', ([], {}), '()\n', (25632, 25634), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((25731, 25770), 'PyQt5.QtWidgets.QGroupBox', 'QtWidgets.QGroupBox', (['self.centralwidget'], {}), '(self.centralwidget)\n', (25750, 25770), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((25892, 25930), 'PyQt5.QtWidgets.QVBoxLayout', 'QtWidgets.QVBoxLayout', (['self.groupBox_2'], {}), '(self.groupBox_2)\n', (25913, 25930), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((26132, 26155), 'PyQt5.QtWidgets.QVBoxLayout', 'QtWidgets.QVBoxLayout', ([], {}), '()\n', (26153, 26155), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((26248, 26271), 'PyQt5.QtWidgets.QGridLayout', 'QtWidgets.QGridLayout', ([], {}), '()\n', (26269, 26271), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((26363, 26396), 'PyQt5.QtWidgets.QLabel', 'QtWidgets.QLabel', (['self.groupBox_2'], {}), '(self.groupBox_2)\n', (26379, 26396), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((26768, 26801), 'PyQt5.QtWidgets.QLabel', 'QtWidgets.QLabel', (['self.groupBox_2'], {}), '(self.groupBox_2)\n', (26784, 26801), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((27173, 27196), 'PyQt5.QtWidgets.QHBoxLayout', 'QtWidgets.QHBoxLayout', ([], {}), '()\n', (27194, 27196), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((27300, 27338), 'PyQt5.QtWidgets.QPushButton', 'QtWidgets.QPushButton', (['self.groupBox_2'], {}), '(self.groupBox_2)\n', (27321, 27338), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((27875, 27908), 'PyQt5.QtWidgets.QLabel', 'QtWidgets.QLabel', (['self.groupBox_2'], {}), '(self.groupBox_2)\n', (27891, 27908), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((28286, 28309), 'PyQt5.QtWidgets.QHBoxLayout', 'QtWidgets.QHBoxLayout', ([], {}), '()\n', (28307, 28309), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((28413, 28451), 'PyQt5.QtWidgets.QPushButton', 'QtWidgets.QPushButton', (['self.groupBox_2'], {}), '(self.groupBox_2)\n', (28434, 28451), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((28987, 29020), 'PyQt5.QtWidgets.QLabel', 'QtWidgets.QLabel', (['self.groupBox_2'], {}), '(self.groupBox_2)\n', (29003, 29020), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((29392, 29425), 'PyQt5.QtWidgets.QLabel', 'QtWidgets.QLabel', (['self.groupBox_2'], {}), '(self.groupBox_2)\n', (29408, 29425), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((29796, 29819), 'PyQt5.QtWidgets.QHBoxLayout', 'QtWidgets.QHBoxLayout', ([], {}), '()\n', (29817, 29819), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((29921, 29959), 'PyQt5.QtWidgets.QPushButton', 'QtWidgets.QPushButton', (['self.groupBox_2'], {}), '(self.groupBox_2)\n', (29942, 29959), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((30492, 30515), 'PyQt5.QtWidgets.QHBoxLayout', 'QtWidgets.QHBoxLayout', ([], {}), '()\n', (30513, 30515), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((30617, 30655), 'PyQt5.QtWidgets.QPushButton', 'QtWidgets.QPushButton', (['self.groupBox_2'], {}), '(self.groupBox_2)\n', (30638, 30655), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((31189, 31212), 'PyQt5.QtWidgets.QHBoxLayout', 'QtWidgets.QHBoxLayout', ([], {}), '()\n', (31210, 31212), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((31316, 31354), 'PyQt5.QtWidgets.QPushButton', 'QtWidgets.QPushButton', (['self.groupBox_2'], {}), '(self.groupBox_2)\n', (31337, 31354), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((31889, 31912), 'PyQt5.QtWidgets.QHBoxLayout', 'QtWidgets.QHBoxLayout', ([], {}), '()\n', (31910, 31912), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((32014, 32052), 'PyQt5.QtWidgets.QPushButton', 'QtWidgets.QPushButton', (['self.groupBox_2'], {}), '(self.groupBox_2)\n', (32035, 32052), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((32586, 32619), 'PyQt5.QtWidgets.QLabel', 'QtWidgets.QLabel', (['self.groupBox_2'], {}), '(self.groupBox_2)\n', (32602, 32619), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((32985, 33018), 'PyQt5.QtWidgets.QLabel', 'QtWidgets.QLabel', (['self.groupBox_2'], {}), '(self.groupBox_2)\n', (33001, 33018), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((33385, 33418), 'PyQt5.QtWidgets.QLabel', 'QtWidgets.QLabel', (['self.groupBox_2'], {}), '(self.groupBox_2)\n', (33401, 33418), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((33797, 33830), 'PyQt5.QtWidgets.QLabel', 'QtWidgets.QLabel', (['self.groupBox_2'], {}), '(self.groupBox_2)\n', (33813, 33830), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((34205, 34228), 'PyQt5.QtWidgets.QHBoxLayout', 'QtWidgets.QHBoxLayout', ([], {}), '()\n', (34226, 34228), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((34326, 34364), 'PyQt5.QtWidgets.QPushButton', 'QtWidgets.QPushButton', (['self.groupBox_2'], {}), '(self.groupBox_2)\n', (34347, 34364), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((34895, 34928), 'PyQt5.QtWidgets.QLabel', 'QtWidgets.QLabel', (['self.groupBox_2'], {}), '(self.groupBox_2)\n', (34911, 34928), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((35307, 35340), 'PyQt5.QtWidgets.QLabel', 'QtWidgets.QLabel', (['self.groupBox_2'], {}), '(self.groupBox_2)\n', (35323, 35340), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((35719, 35752), 'PyQt5.QtWidgets.QLabel', 'QtWidgets.QLabel', (['self.groupBox_2'], {}), '(self.groupBox_2)\n', (35735, 35752), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((36131, 36164), 'PyQt5.QtWidgets.QLabel', 'QtWidgets.QLabel', (['self.groupBox_2'], {}), '(self.groupBox_2)\n', (36147, 36164), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((36542, 36575), 'PyQt5.QtWidgets.QLabel', 'QtWidgets.QLabel', (['self.groupBox_2'], {}), '(self.groupBox_2)\n', (36558, 36575), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((36947, 36980), 'PyQt5.QtWidgets.QLabel', 'QtWidgets.QLabel', (['self.groupBox_2'], {}), '(self.groupBox_2)\n', (36963, 36980), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((37347, 37380), 'PyQt5.QtWidgets.QLabel', 'QtWidgets.QLabel', (['self.groupBox_2'], {}), '(self.groupBox_2)\n', (37363, 37380), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((37759, 37792), 'PyQt5.QtWidgets.QLabel', 'QtWidgets.QLabel', (['self.groupBox_2'], {}), '(self.groupBox_2)\n', (37775, 37792), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((38170, 38193), 'PyQt5.QtWidgets.QHBoxLayout', 'QtWidgets.QHBoxLayout', ([], {}), '()\n', (38191, 38193), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((38297, 38335), 'PyQt5.QtWidgets.QPushButton', 'QtWidgets.QPushButton', (['self.groupBox_2'], {}), '(self.groupBox_2)\n', (38318, 38335), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((38870, 38893), 'PyQt5.QtWidgets.QHBoxLayout', 'QtWidgets.QHBoxLayout', ([], {}), '()\n', (38891, 38893), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((38995, 39033), 'PyQt5.QtWidgets.QPushButton', 'QtWidgets.QPushButton', (['self.groupBox_2'], {}), '(self.groupBox_2)\n', (39016, 39033), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((39567, 39600), 'PyQt5.QtWidgets.QLabel', 'QtWidgets.QLabel', (['self.groupBox_2'], {}), '(self.groupBox_2)\n', (39583, 39600), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((40139, 40178), 'PyQt5.QtWidgets.QGroupBox', 'QtWidgets.QGroupBox', (['self.centralwidget'], {}), '(self.centralwidget)\n', (40158, 40178), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((40200, 40288), 'PyQt5.QtWidgets.QSizePolicy', 'QtWidgets.QSizePolicy', (['QtWidgets.QSizePolicy.Minimum', 'QtWidgets.QSizePolicy.Minimum'], {}), '(QtWidgets.QSizePolicy.Minimum, QtWidgets.QSizePolicy.\n Minimum)\n', (40221, 40288), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((40626, 40664), 'PyQt5.QtWidgets.QVBoxLayout', 'QtWidgets.QVBoxLayout', (['self.groupBox_3'], {}), '(self.groupBox_3)\n', (40647, 40664), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((40862, 40885), 'PyQt5.QtWidgets.QGridLayout', 'QtWidgets.QGridLayout', ([], {}), '()\n', (40883, 40885), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((40974, 40997), 'PyQt5.QtWidgets.QVBoxLayout', 'QtWidgets.QVBoxLayout', ([], {}), '()\n', (40995, 40997), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((41097, 41130), 'PyQt5.QtWidgets.QLabel', 'QtWidgets.QLabel', (['self.groupBox_3'], {}), '(self.groupBox_3)\n', (41113, 41130), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((41428, 41462), 'PyQt5.QtWidgets.QSlider', 'QtWidgets.QSlider', (['self.groupBox_3'], {}), '(self.groupBox_3)\n', (41445, 41462), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((42039, 42077), 'PyQt5.QtWidgets.QPushButton', 'QtWidgets.QPushButton', (['self.groupBox_3'], {}), '(self.groupBox_3)\n', (42060, 42077), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((42303, 42341), 'PyQt5.QtWidgets.QPushButton', 'QtWidgets.QPushButton', (['self.groupBox_3'], {}), '(self.groupBox_3)\n', (42324, 42341), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((42568, 42591), 'PyQt5.QtWidgets.QHBoxLayout', 'QtWidgets.QHBoxLayout', ([], {}), '()\n', (42589, 42591), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((42697, 42735), 'PyQt5.QtWidgets.QPushButton', 'QtWidgets.QPushButton', (['self.groupBox_3'], {}), '(self.groupBox_3)\n', (42718, 42735), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((43286, 43309), 'PyQt5.QtWidgets.QHBoxLayout', 'QtWidgets.QHBoxLayout', ([], {}), '()\n', (43307, 43309), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((43413, 43451), 'PyQt5.QtWidgets.QPushButton', 'QtWidgets.QPushButton', (['self.groupBox_3'], {}), '(self.groupBox_3)\n', (43434, 43451), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((43524, 43610), 'PyQt5.QtWidgets.QSizePolicy', 'QtWidgets.QSizePolicy', (['QtWidgets.QSizePolicy.Minimum', 'QtWidgets.QSizePolicy.Fixed'], {}), '(QtWidgets.QSizePolicy.Minimum, QtWidgets.QSizePolicy.\n Fixed)\n', (43545, 43610), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((44343, 44366), 'PyQt5.QtWidgets.QHBoxLayout', 'QtWidgets.QHBoxLayout', ([], {}), '()\n', (44364, 44366), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((44470, 44508), 'PyQt5.QtWidgets.QPushButton', 'QtWidgets.QPushButton', (['self.groupBox_3'], {}), '(self.groupBox_3)\n', (44491, 44508), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((45044, 45067), 'PyQt5.QtWidgets.QHBoxLayout', 'QtWidgets.QHBoxLayout', ([], {}), '()\n', (45065, 45067), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((45176, 45214), 'PyQt5.QtWidgets.QPushButton', 'QtWidgets.QPushButton', (['self.groupBox_3'], {}), '(self.groupBox_3)\n', (45197, 45214), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((45719, 45757), 'PyQt5.QtWidgets.QPushButton', 'QtWidgets.QPushButton', (['self.groupBox_3'], {}), '(self.groupBox_3)\n', (45740, 45757), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((46262, 46300), 'PyQt5.QtWidgets.QPushButton', 'QtWidgets.QPushButton', (['self.groupBox_3'], {}), '(self.groupBox_3)\n', (46283, 46300), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((46876, 46899), 'PyQt5.QtWidgets.QHBoxLayout', 'QtWidgets.QHBoxLayout', ([], {}), '()\n', (46897, 46899), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((47005, 47043), 'PyQt5.QtWidgets.QPushButton', 'QtWidgets.QPushButton', (['self.groupBox_3'], {}), '(self.groupBox_3)\n', (47026, 47043), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((47591, 47629), 'PyQt5.QtWidgets.QPushButton', 'QtWidgets.QPushButton', (['self.groupBox_3'], {}), '(self.groupBox_3)\n', (47612, 47629), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((47980, 48018), 'PyQt5.QtWidgets.QPushButton', 'QtWidgets.QPushButton', (['self.groupBox_3'], {}), '(self.groupBox_3)\n', (48001, 48018), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((48245, 48268), 'PyQt5.QtWidgets.QHBoxLayout', 'QtWidgets.QHBoxLayout', ([], {}), '()\n', (48266, 48268), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((48377, 48415), 'PyQt5.QtWidgets.QPushButton', 'QtWidgets.QPushButton', (['self.groupBox_3'], {}), '(self.groupBox_3)\n', (48398, 48415), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((48920, 48958), 'PyQt5.QtWidgets.QPushButton', 'QtWidgets.QPushButton', (['self.groupBox_3'], {}), '(self.groupBox_3)\n', (48941, 48958), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((49463, 49501), 'PyQt5.QtWidgets.QPushButton', 'QtWidgets.QPushButton', (['self.groupBox_3'], {}), '(self.groupBox_3)\n', (49484, 49501), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((50077, 50100), 'PyQt5.QtWidgets.QHBoxLayout', 'QtWidgets.QHBoxLayout', ([], {}), '()\n', (50098, 50100), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((50207, 50245), 'PyQt5.QtWidgets.QPushButton', 'QtWidgets.QPushButton', (['self.groupBox_3'], {}), '(self.groupBox_3)\n', (50228, 50245), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((50732, 50770), 'PyQt5.QtWidgets.QPushButton', 'QtWidgets.QPushButton', (['self.groupBox_3'], {}), '(self.groupBox_3)\n', (50753, 50770), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((51257, 51295), 'PyQt5.QtWidgets.QPushButton', 'QtWidgets.QPushButton', (['self.groupBox_3'], {}), '(self.groupBox_3)\n', (51278, 51295), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((51855, 51878), 'PyQt5.QtWidgets.QHBoxLayout', 'QtWidgets.QHBoxLayout', ([], {}), '()\n', (51876, 51878), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((51987, 52025), 'PyQt5.QtWidgets.QPushButton', 'QtWidgets.QPushButton', (['self.groupBox_3'], {}), '(self.groupBox_3)\n', (52008, 52025), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((52530, 52568), 'PyQt5.QtWidgets.QPushButton', 'QtWidgets.QPushButton', (['self.groupBox_3'], {}), '(self.groupBox_3)\n', (52551, 52568), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((53073, 53111), 'PyQt5.QtWidgets.QPushButton', 'QtWidgets.QPushButton', (['self.groupBox_3'], {}), '(self.groupBox_3)\n', (53094, 53111), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((53739, 53762), 'PyQt5.QtWidgets.QGridLayout', 'QtWidgets.QGridLayout', ([], {}), '()\n', (53760, 53762), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((53853, 53891), 'PyQt5.QtWidgets.QPushButton', 'QtWidgets.QPushButton', (['self.groupBox_3'], {}), '(self.groupBox_3)\n', (53874, 53891), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((54119, 54142), 'PyQt5.QtWidgets.QHBoxLayout', 'QtWidgets.QHBoxLayout', ([], {}), '()\n', (54140, 54142), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((54251, 54289), 'PyQt5.QtWidgets.QPushButton', 'QtWidgets.QPushButton', (['self.groupBox_3'], {}), '(self.groupBox_3)\n', (54272, 54289), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((54777, 54815), 'PyQt5.QtWidgets.QPushButton', 'QtWidgets.QPushButton', (['self.groupBox_3'], {}), '(self.groupBox_3)\n', (54798, 54815), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((55303, 55341), 'PyQt5.QtWidgets.QPushButton', 'QtWidgets.QPushButton', (['self.groupBox_3'], {}), '(self.groupBox_3)\n', (55324, 55341), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((55902, 55940), 'PyQt5.QtWidgets.QPushButton', 'QtWidgets.QPushButton', (['self.groupBox_3'], {}), '(self.groupBox_3)\n', (55923, 55940), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((56168, 56191), 'PyQt5.QtWidgets.QHBoxLayout', 'QtWidgets.QHBoxLayout', ([], {}), '()\n', (56189, 56191), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((56299, 56337), 'PyQt5.QtWidgets.QPushButton', 'QtWidgets.QPushButton', (['self.groupBox_3'], {}), '(self.groupBox_3)\n', (56320, 56337), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((56892, 56915), 'PyQt5.QtWidgets.QHBoxLayout', 'QtWidgets.QHBoxLayout', ([], {}), '()\n', (56913, 56915), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((57023, 57061), 'PyQt5.QtWidgets.QPushButton', 'QtWidgets.QPushButton', (['self.groupBox_3'], {}), '(self.groupBox_3)\n', (57044, 57061), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((57616, 57639), 'PyQt5.QtWidgets.QHBoxLayout', 'QtWidgets.QHBoxLayout', ([], {}), '()\n', (57637, 57639), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((57750, 57788), 'PyQt5.QtWidgets.QPushButton', 'QtWidgets.QPushButton', (['self.groupBox_3'], {}), '(self.groupBox_3)\n', (57771, 57788), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((58294, 58332), 'PyQt5.QtWidgets.QPushButton', 'QtWidgets.QPushButton', (['self.groupBox_3'], {}), '(self.groupBox_3)\n', (58315, 58332), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((58838, 58876), 'PyQt5.QtWidgets.QPushButton', 'QtWidgets.QPushButton', (['self.groupBox_3'], {}), '(self.groupBox_3)\n', (58859, 58876), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((59455, 59478), 'PyQt5.QtWidgets.QHBoxLayout', 'QtWidgets.QHBoxLayout', ([], {}), '()\n', (59476, 59478), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((59589, 59627), 'PyQt5.QtWidgets.QPushButton', 'QtWidgets.QPushButton', (['self.groupBox_3'], {}), '(self.groupBox_3)\n', (59610, 59627), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((60133, 60171), 'PyQt5.QtWidgets.QPushButton', 'QtWidgets.QPushButton', (['self.groupBox_3'], {}), '(self.groupBox_3)\n', (60154, 60171), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((60677, 60715), 'PyQt5.QtWidgets.QPushButton', 'QtWidgets.QPushButton', (['self.groupBox_3'], {}), '(self.groupBox_3)\n', (60698, 60715), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((61289, 61327), 'PyQt5.QtWidgets.QPushButton', 'QtWidgets.QPushButton', (['self.groupBox_3'], {}), '(self.groupBox_3)\n', (61310, 61327), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((61686, 61709), 'PyQt5.QtWidgets.QHBoxLayout', 'QtWidgets.QHBoxLayout', ([], {}), '()\n', (61707, 61709), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((61815, 61853), 'PyQt5.QtWidgets.QPushButton', 'QtWidgets.QPushButton', (['self.groupBox_3'], {}), '(self.groupBox_3)\n', (61836, 61853), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((62390, 62428), 'PyQt5.QtWidgets.QPushButton', 'QtWidgets.QPushButton', (['self.groupBox_3'], {}), '(self.groupBox_3)\n', (62411, 62428), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((62655, 62678), 'PyQt5.QtWidgets.QHBoxLayout', 'QtWidgets.QHBoxLayout', ([], {}), '()\n', (62676, 62678), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((62784, 62822), 'PyQt5.QtWidgets.QPushButton', 'QtWidgets.QPushButton', (['self.groupBox_3'], {}), '(self.groupBox_3)\n', (62805, 62822), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((63372, 63395), 'PyQt5.QtWidgets.QVBoxLayout', 'QtWidgets.QVBoxLayout', ([], {}), '()\n', (63393, 63395), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((63497, 63530), 'PyQt5.QtWidgets.QLabel', 'QtWidgets.QLabel', (['self.groupBox_3'], {}), '(self.groupBox_3)\n', (63513, 63530), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((63752, 63786), 'PyQt5.QtWidgets.QSlider', 'QtWidgets.QSlider', (['self.groupBox_3'], {}), '(self.groupBox_3)\n', (63769, 63786), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((64367, 64390), 'PyQt5.QtWidgets.QHBoxLayout', 'QtWidgets.QHBoxLayout', ([], {}), '()\n', (64388, 64390), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((64501, 64539), 'PyQt5.QtWidgets.QPushButton', 'QtWidgets.QPushButton', (['self.groupBox_3'], {}), '(self.groupBox_3)\n', (64522, 64539), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((65045, 65083), 'PyQt5.QtWidgets.QPushButton', 'QtWidgets.QPushButton', (['self.groupBox_3'], {}), '(self.groupBox_3)\n', (65066, 65083), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((65589, 65627), 'PyQt5.QtWidgets.QPushButton', 'QtWidgets.QPushButton', (['self.groupBox_3'], {}), '(self.groupBox_3)\n', (65610, 65627), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((66257, 66280), 'PyQt5.QtWidgets.QGridLayout', 'QtWidgets.QGridLayout', ([], {}), '()\n', (66278, 66280), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((66371, 66409), 'PyQt5.QtWidgets.QPushButton', 'QtWidgets.QPushButton', (['self.groupBox_3'], {}), '(self.groupBox_3)\n', (66392, 66409), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((66637, 66660), 'PyQt5.QtWidgets.QHBoxLayout', 'QtWidgets.QHBoxLayout', ([], {}), '()\n', (66658, 66660), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((66766, 66804), 'PyQt5.QtWidgets.QPushButton', 'QtWidgets.QPushButton', (['self.groupBox_3'], {}), '(self.groupBox_3)\n', (66787, 66804), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((67342, 67365), 'PyQt5.QtWidgets.QHBoxLayout', 'QtWidgets.QHBoxLayout', ([], {}), '()\n', (67363, 67365), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((67471, 67509), 'PyQt5.QtWidgets.QPushButton', 'QtWidgets.QPushButton', (['self.groupBox_3'], {}), '(self.groupBox_3)\n', (67492, 67509), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((68060, 68098), 'PyQt5.QtWidgets.QPushButton', 'QtWidgets.QPushButton', (['self.groupBox_3'], {}), '(self.groupBox_3)\n', (68081, 68098), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((68324, 68362), 'PyQt5.QtWidgets.QPushButton', 'QtWidgets.QPushButton', (['self.groupBox_3'], {}), '(self.groupBox_3)\n', (68345, 68362), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((68590, 68613), 'PyQt5.QtWidgets.QHBoxLayout', 'QtWidgets.QHBoxLayout', ([], {}), '()\n', (68611, 68613), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((68721, 68759), 'PyQt5.QtWidgets.QPushButton', 'QtWidgets.QPushButton', (['self.groupBox_3'], {}), '(self.groupBox_3)\n', (68742, 68759), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((69314, 69337), 'PyQt5.QtWidgets.QHBoxLayout', 'QtWidgets.QHBoxLayout', ([], {}), '()\n', (69335, 69337), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((69445, 69483), 'PyQt5.QtWidgets.QPushButton', 'QtWidgets.QPushButton', (['self.groupBox_3'], {}), '(self.groupBox_3)\n', (69466, 69483), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((70037, 70075), 'PyQt5.QtWidgets.QPushButton', 'QtWidgets.QPushButton', (['self.groupBox_3'], {}), '(self.groupBox_3)\n', (70058, 70075), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((70307, 70330), 'PyQt5.QtWidgets.QHBoxLayout', 'QtWidgets.QHBoxLayout', ([], {}), '()\n', (70328, 70330), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((70441, 70479), 'PyQt5.QtWidgets.QPushButton', 'QtWidgets.QPushButton', (['self.groupBox_3'], {}), '(self.groupBox_3)\n', (70462, 70479), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((70985, 71023), 'PyQt5.QtWidgets.QPushButton', 'QtWidgets.QPushButton', (['self.groupBox_3'], {}), '(self.groupBox_3)\n', (71006, 71023), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((71529, 71567), 'PyQt5.QtWidgets.QPushButton', 'QtWidgets.QPushButton', (['self.groupBox_3'], {}), '(self.groupBox_3)\n', (71550, 71567), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((72143, 72166), 'PyQt5.QtWidgets.QVBoxLayout', 'QtWidgets.QVBoxLayout', ([], {}), '()\n', (72164, 72166), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((72268, 72301), 'PyQt5.QtWidgets.QLabel', 'QtWidgets.QLabel', (['self.groupBox_3'], {}), '(self.groupBox_3)\n', (72284, 72301), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((72523, 72557), 'PyQt5.QtWidgets.QSlider', 'QtWidgets.QSlider', (['self.groupBox_3'], {}), '(self.groupBox_3)\n', (72540, 72557), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((73138, 73161), 'PyQt5.QtWidgets.QHBoxLayout', 'QtWidgets.QHBoxLayout', ([], {}), '()\n', (73159, 73161), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((73270, 73308), 'PyQt5.QtWidgets.QPushButton', 'QtWidgets.QPushButton', (['self.groupBox_3'], {}), '(self.groupBox_3)\n', (73291, 73308), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((73796, 73834), 'PyQt5.QtWidgets.QPushButton', 'QtWidgets.QPushButton', (['self.groupBox_3'], {}), '(self.groupBox_3)\n', (73817, 73834), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((74322, 74360), 'PyQt5.QtWidgets.QPushButton', 'QtWidgets.QPushButton', (['self.groupBox_3'], {}), '(self.groupBox_3)\n', (74343, 74360), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((74923, 74946), 'PyQt5.QtWidgets.QHBoxLayout', 'QtWidgets.QHBoxLayout', ([], {}), '()\n', (74944, 74946), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((75057, 75095), 'PyQt5.QtWidgets.QPushButton', 'QtWidgets.QPushButton', (['self.groupBox_3'], {}), '(self.groupBox_3)\n', (75078, 75095), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((75601, 75639), 'PyQt5.QtWidgets.QPushButton', 'QtWidgets.QPushButton', (['self.groupBox_3'], {}), '(self.groupBox_3)\n', (75622, 75639), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((76145, 76183), 'PyQt5.QtWidgets.QPushButton', 'QtWidgets.QPushButton', (['self.groupBox_3'], {}), '(self.groupBox_3)\n', (76166, 76183), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((76762, 76785), 'PyQt5.QtWidgets.QHBoxLayout', 'QtWidgets.QHBoxLayout', ([], {}), '()\n', (76783, 76785), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((76896, 76934), 'PyQt5.QtWidgets.QPushButton', 'QtWidgets.QPushButton', (['self.groupBox_3'], {}), '(self.groupBox_3)\n', (76917, 76934), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((77440, 77478), 'PyQt5.QtWidgets.QPushButton', 'QtWidgets.QPushButton', (['self.groupBox_3'], {}), '(self.groupBox_3)\n', (77461, 77478), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((77984, 78022), 'PyQt5.QtWidgets.QPushButton', 'QtWidgets.QPushButton', (['self.groupBox_3'], {}), '(self.groupBox_3)\n', (78005, 78022), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((78829, 78859), 'PyQt5.QtWidgets.QMenuBar', 'QtWidgets.QMenuBar', (['MainWindow'], {}), '(MainWindow)\n', (78847, 78859), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((78989, 79018), 'PyQt5.QtWidgets.QMenu', 'QtWidgets.QMenu', (['self.menubar'], {}), '(self.menubar)\n', (79004, 79018), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((79128, 79160), 'PyQt5.QtWidgets.QStatusBar', 'QtWidgets.QStatusBar', (['MainWindow'], {}), '(MainWindow)\n', (79148, 79160), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((79288, 79317), 'PyQt5.QtWidgets.QAction', 'QtWidgets.QAction', (['MainWindow'], {}), '(MainWindow)\n', (79305, 79317), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((79407, 79436), 'PyQt5.QtWidgets.QAction', 'QtWidgets.QAction', (['MainWindow'], {}), '(MainWindow)\n', (79424, 79436), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((79529, 79558), 'PyQt5.QtWidgets.QAction', 'QtWidgets.QAction', (['MainWindow'], {}), '(MainWindow)\n', (79546, 79558), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((79649, 79678), 'PyQt5.QtWidgets.QAction', 'QtWidgets.QAction', (['MainWindow'], {}), '(MainWindow)\n', (79666, 79678), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((79767, 79796), 'PyQt5.QtWidgets.QAction', 'QtWidgets.QAction', (['MainWindow'], {}), '(MainWindow)\n', (79784, 79796), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((80232, 80281), 'PyQt5.QtCore.QMetaObject.connectSlotsByName', 'QtCore.QMetaObject.connectSlotsByName', (['MainWindow'], {}), '(MainWindow)\n', (80269, 80281), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((775, 793), 'PyQt5.QtCore.QSize', 'QtCore.QSize', (['(0)', '(0)'], {}), '(0, 0)\n', (787, 793), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((829, 861), 'PyQt5.QtCore.QSize', 'QtCore.QSize', (['(16777215)', '(16777215)'], {}), '(16777215, 16777215)\n', (841, 861), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((2270, 2296), 'PyQt5.QtCore.QSize', 'QtCore.QSize', (['(16777215)', '(28)'], {}), '(16777215, 28)\n', (2282, 2296), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((2917, 2938), 'PyQt5.QtCore.QSize', 'QtCore.QSize', (['(110)', '(65)'], {}), '(110, 65)\n', (2929, 2938), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((2990, 3016), 'PyQt5.QtCore.QSize', 'QtCore.QSize', (['(16777215)', '(65)'], {}), '(16777215, 65)\n', (3002, 3016), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((3914, 3940), 'PyQt5.QtCore.QSize', 'QtCore.QSize', (['(16777212)', '(28)'], {}), '(16777212, 28)\n', (3926, 3940), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((4893, 4914), 'PyQt5.QtCore.QSize', 'QtCore.QSize', (['(92)', '(100)'], {}), '(92, 100)\n', (4905, 4914), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((4965, 4986), 'PyQt5.QtCore.QSize', 'QtCore.QSize', (['(92)', '(100)'], {}), '(92, 100)\n', (4977, 4986), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((5080, 5101), 'PyQt5.QtCore.QSize', 'QtCore.QSize', (['(92)', '(100)'], {}), '(92, 100)\n', (5092, 5101), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((6907, 6928), 'PyQt5.QtCore.QSize', 'QtCore.QSize', (['(92)', '(100)'], {}), '(92, 100)\n', (6919, 6928), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((6979, 7000), 'PyQt5.QtCore.QSize', 'QtCore.QSize', (['(92)', '(100)'], {}), '(92, 100)\n', (6991, 7000), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((7094, 7115), 'PyQt5.QtCore.QSize', 'QtCore.QSize', (['(92)', '(100)'], {}), '(92, 100)\n', (7106, 7115), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((7802, 7828), 'PyQt5.QtCore.QSize', 'QtCore.QSize', (['(16777215)', '(28)'], {}), '(16777215, 28)\n', (7814, 7828), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((8449, 8475), 'PyQt5.QtCore.QSize', 'QtCore.QSize', (['(16777215)', '(28)'], {}), '(16777215, 28)\n', (8461, 8475), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((9222, 9243), 'PyQt5.QtCore.QSize', 'QtCore.QSize', (['(92)', '(100)'], {}), '(92, 100)\n', (9234, 9243), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((9294, 9315), 'PyQt5.QtCore.QSize', 'QtCore.QSize', (['(92)', '(100)'], {}), '(92, 100)\n', (9306, 9315), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((9409, 9430), 'PyQt5.QtCore.QSize', 'QtCore.QSize', (['(92)', '(100)'], {}), '(92, 100)\n', (9421, 9430), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((9893, 9914), 'PyQt5.QtCore.QSize', 'QtCore.QSize', (['(92)', '(100)'], {}), '(92, 100)\n', (9905, 9914), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((9962, 9983), 'PyQt5.QtCore.QSize', 'QtCore.QSize', (['(92)', '(100)'], {}), '(92, 100)\n', (9974, 9983), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((10071, 10093), 'PyQt5.QtCore.QSize', 'QtCore.QSize', (['(100)', '(100)'], {}), '(100, 100)\n', (10083, 10093), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((10771, 10792), 'PyQt5.QtCore.QSize', 'QtCore.QSize', (['(110)', '(65)'], {}), '(110, 65)\n', (10783, 10792), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((10844, 10870), 'PyQt5.QtCore.QSize', 'QtCore.QSize', (['(16777215)', '(65)'], {}), '(16777215, 65)\n', (10856, 10870), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((11479, 11497), 'PyQt5.QtCore.QSize', 'QtCore.QSize', (['(0)', '(0)'], {}), '(0, 0)\n', (11491, 11497), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((11546, 11572), 'PyQt5.QtCore.QSize', 'QtCore.QSize', (['(16777215)', '(28)'], {}), '(16777215, 28)\n', (11558, 11572), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((11966, 11987), 'PyQt5.QtCore.QSize', 'QtCore.QSize', (['(100)', '(45)'], {}), '(100, 45)\n', (11978, 11987), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((12038, 12059), 'PyQt5.QtCore.QSize', 'QtCore.QSize', (['(100)', '(45)'], {}), '(100, 45)\n', (12050, 12059), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((12153, 12175), 'PyQt5.QtCore.QSize', 'QtCore.QSize', (['(100)', '(150)'], {}), '(100, 150)\n', (12165, 12175), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((12862, 12888), 'PyQt5.QtCore.QSize', 'QtCore.QSize', (['(16777215)', '(28)'], {}), '(16777215, 28)\n', (12874, 12888), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((13635, 13656), 'PyQt5.QtCore.QSize', 'QtCore.QSize', (['(92)', '(100)'], {}), '(92, 100)\n', (13647, 13656), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((13707, 13728), 'PyQt5.QtCore.QSize', 'QtCore.QSize', (['(92)', '(100)'], {}), '(92, 100)\n', (13719, 13728), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((13822, 13843), 'PyQt5.QtCore.QSize', 'QtCore.QSize', (['(92)', '(100)'], {}), '(92, 100)\n', (13834, 13843), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((14312, 14333), 'PyQt5.QtCore.QSize', 'QtCore.QSize', (['(100)', '(45)'], {}), '(100, 45)\n', (14324, 14333), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((14384, 14405), 'PyQt5.QtCore.QSize', 'QtCore.QSize', (['(100)', '(45)'], {}), '(100, 45)\n', (14396, 14405), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((14499, 14521), 'PyQt5.QtCore.QSize', 'QtCore.QSize', (['(100)', '(150)'], {}), '(100, 150)\n', (14511, 14521), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((15334, 15355), 'PyQt5.QtCore.QSize', 'QtCore.QSize', (['(100)', '(45)'], {}), '(100, 45)\n', (15346, 15355), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((15406, 15427), 'PyQt5.QtCore.QSize', 'QtCore.QSize', (['(100)', '(45)'], {}), '(100, 45)\n', (15418, 15427), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((15521, 15543), 'PyQt5.QtCore.QSize', 'QtCore.QSize', (['(100)', '(150)'], {}), '(100, 150)\n', (15533, 15543), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((16230, 16251), 'PyQt5.QtCore.QSize', 'QtCore.QSize', (['(110)', '(65)'], {}), '(110, 65)\n', (16242, 16251), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((16303, 16329), 'PyQt5.QtCore.QSize', 'QtCore.QSize', (['(16777215)', '(65)'], {}), '(16777215, 65)\n', (16315, 16329), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((16950, 16971), 'PyQt5.QtCore.QSize', 'QtCore.QSize', (['(110)', '(65)'], {}), '(110, 65)\n', (16962, 16971), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((17023, 17049), 'PyQt5.QtCore.QSize', 'QtCore.QSize', (['(16777215)', '(65)'], {}), '(16777215, 65)\n', (17035, 17049), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((17452, 17473), 'PyQt5.QtCore.QSize', 'QtCore.QSize', (['(100)', '(45)'], {}), '(100, 45)\n', (17464, 17473), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((17524, 17545), 'PyQt5.QtCore.QSize', 'QtCore.QSize', (['(100)', '(45)'], {}), '(100, 45)\n', (17536, 17545), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((17639, 17661), 'PyQt5.QtCore.QSize', 'QtCore.QSize', (['(100)', '(150)'], {}), '(100, 150)\n', (17651, 17661), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((18474, 18495), 'PyQt5.QtCore.QSize', 'QtCore.QSize', (['(92)', '(100)'], {}), '(92, 100)\n', (18486, 18495), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((18546, 18567), 'PyQt5.QtCore.QSize', 'QtCore.QSize', (['(92)', '(100)'], {}), '(92, 100)\n', (18558, 18567), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((18661, 18682), 'PyQt5.QtCore.QSize', 'QtCore.QSize', (['(92)', '(100)'], {}), '(92, 100)\n', (18673, 18682), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((19369, 19395), 'PyQt5.QtCore.QSize', 'QtCore.QSize', (['(16777215)', '(28)'], {}), '(16777215, 28)\n', (19381, 19395), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((20016, 20037), 'PyQt5.QtCore.QSize', 'QtCore.QSize', (['(110)', '(65)'], {}), '(110, 65)\n', (20028, 20037), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((20089, 20115), 'PyQt5.QtCore.QSize', 'QtCore.QSize', (['(16777215)', '(65)'], {}), '(16777215, 65)\n', (20101, 20115), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((20795, 20816), 'PyQt5.QtCore.QSize', 'QtCore.QSize', (['(100)', '(45)'], {}), '(100, 45)\n', (20807, 20816), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((20867, 20888), 'PyQt5.QtCore.QSize', 'QtCore.QSize', (['(100)', '(45)'], {}), '(100, 45)\n', (20879, 20888), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((20982, 21004), 'PyQt5.QtCore.QSize', 'QtCore.QSize', (['(100)', '(150)'], {}), '(100, 150)\n', (20994, 21004), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((21691, 21712), 'PyQt5.QtCore.QSize', 'QtCore.QSize', (['(110)', '(65)'], {}), '(110, 65)\n', (21703, 21712), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((21764, 21790), 'PyQt5.QtCore.QSize', 'QtCore.QSize', (['(16777215)', '(65)'], {}), '(16777215, 65)\n', (21776, 21790), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((22470, 22491), 'PyQt5.QtCore.QSize', 'QtCore.QSize', (['(100)', '(45)'], {}), '(100, 45)\n', (22482, 22491), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((22542, 22563), 'PyQt5.QtCore.QSize', 'QtCore.QSize', (['(100)', '(45)'], {}), '(100, 45)\n', (22554, 22563), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((22657, 22679), 'PyQt5.QtCore.QSize', 'QtCore.QSize', (['(100)', '(150)'], {}), '(100, 150)\n', (22669, 22679), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((23492, 23513), 'PyQt5.QtCore.QSize', 'QtCore.QSize', (['(92)', '(100)'], {}), '(92, 100)\n', (23504, 23513), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((23564, 23585), 'PyQt5.QtCore.QSize', 'QtCore.QSize', (['(92)', '(100)'], {}), '(92, 100)\n', (23576, 23585), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((23679, 23700), 'PyQt5.QtCore.QSize', 'QtCore.QSize', (['(92)', '(100)'], {}), '(92, 100)\n', (23691, 23700), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((26445, 26465), 'PyQt5.QtCore.QSize', 'QtCore.QSize', (['(150)', '(0)'], {}), '(150, 0)\n', (26457, 26465), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((26515, 26542), 'PyQt5.QtCore.QSize', 'QtCore.QSize', (['(150)', '(16777215)'], {}), '(150, 16777215)\n', (26527, 26542), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((26850, 26870), 'PyQt5.QtCore.QSize', 'QtCore.QSize', (['(150)', '(0)'], {}), '(150, 0)\n', (26862, 26870), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((26920, 26947), 'PyQt5.QtCore.QSize', 'QtCore.QSize', (['(150)', '(16777215)'], {}), '(150, 16777215)\n', (26932, 26947), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((27434, 27454), 'PyQt5.QtCore.QSize', 'QtCore.QSize', (['(64)', '(64)'], {}), '(64, 64)\n', (27446, 27454), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((27502, 27522), 'PyQt5.QtCore.QSize', 'QtCore.QSize', (['(64)', '(64)'], {}), '(64, 64)\n', (27514, 27522), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((27610, 27630), 'PyQt5.QtCore.QSize', 'QtCore.QSize', (['(64)', '(64)'], {}), '(64, 64)\n', (27622, 27630), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((27958, 27978), 'PyQt5.QtCore.QSize', 'QtCore.QSize', (['(150)', '(0)'], {}), '(150, 0)\n', (27970, 27978), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((28029, 28056), 'PyQt5.QtCore.QSize', 'QtCore.QSize', (['(150)', '(16777215)'], {}), '(150, 16777215)\n', (28041, 28056), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((28547, 28567), 'PyQt5.QtCore.QSize', 'QtCore.QSize', (['(64)', '(64)'], {}), '(64, 64)\n', (28559, 28567), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((28615, 28635), 'PyQt5.QtCore.QSize', 'QtCore.QSize', (['(64)', '(64)'], {}), '(64, 64)\n', (28627, 28635), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((28723, 28743), 'PyQt5.QtCore.QSize', 'QtCore.QSize', (['(64)', '(64)'], {}), '(64, 64)\n', (28735, 28743), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((29069, 29089), 'PyQt5.QtCore.QSize', 'QtCore.QSize', (['(150)', '(0)'], {}), '(150, 0)\n', (29081, 29089), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((29139, 29166), 'PyQt5.QtCore.QSize', 'QtCore.QSize', (['(150)', '(16777215)'], {}), '(150, 16777215)\n', (29151, 29166), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((29474, 29494), 'PyQt5.QtCore.QSize', 'QtCore.QSize', (['(150)', '(0)'], {}), '(150, 0)\n', (29486, 29494), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((29544, 29571), 'PyQt5.QtCore.QSize', 'QtCore.QSize', (['(150)', '(16777215)'], {}), '(150, 16777215)\n', (29556, 29571), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((30055, 30075), 'PyQt5.QtCore.QSize', 'QtCore.QSize', (['(64)', '(64)'], {}), '(64, 64)\n', (30067, 30075), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((30123, 30143), 'PyQt5.QtCore.QSize', 'QtCore.QSize', (['(64)', '(64)'], {}), '(64, 64)\n', (30135, 30143), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((30231, 30251), 'PyQt5.QtCore.QSize', 'QtCore.QSize', (['(64)', '(64)'], {}), '(64, 64)\n', (30243, 30251), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((30751, 30771), 'PyQt5.QtCore.QSize', 'QtCore.QSize', (['(64)', '(64)'], {}), '(64, 64)\n', (30763, 30771), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((30819, 30839), 'PyQt5.QtCore.QSize', 'QtCore.QSize', (['(64)', '(64)'], {}), '(64, 64)\n', (30831, 30839), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((30927, 30947), 'PyQt5.QtCore.QSize', 'QtCore.QSize', (['(64)', '(64)'], {}), '(64, 64)\n', (30939, 30947), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((31450, 31470), 'PyQt5.QtCore.QSize', 'QtCore.QSize', (['(64)', '(64)'], {}), '(64, 64)\n', (31462, 31470), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((31518, 31538), 'PyQt5.QtCore.QSize', 'QtCore.QSize', (['(64)', '(64)'], {}), '(64, 64)\n', (31530, 31538), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((31626, 31646), 'PyQt5.QtCore.QSize', 'QtCore.QSize', (['(64)', '(64)'], {}), '(64, 64)\n', (31638, 31646), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((32148, 32168), 'PyQt5.QtCore.QSize', 'QtCore.QSize', (['(64)', '(64)'], {}), '(64, 64)\n', (32160, 32168), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((32216, 32236), 'PyQt5.QtCore.QSize', 'QtCore.QSize', (['(64)', '(64)'], {}), '(64, 64)\n', (32228, 32236), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((32324, 32344), 'PyQt5.QtCore.QSize', 'QtCore.QSize', (['(64)', '(64)'], {}), '(64, 64)\n', (32336, 32344), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((32668, 32688), 'PyQt5.QtCore.QSize', 'QtCore.QSize', (['(150)', '(0)'], {}), '(150, 0)\n', (32680, 32688), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((32738, 32759), 'PyQt5.QtCore.QSize', 'QtCore.QSize', (['(150)', '(28)'], {}), '(150, 28)\n', (32750, 32759), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((33067, 33087), 'PyQt5.QtCore.QSize', 'QtCore.QSize', (['(150)', '(0)'], {}), '(150, 0)\n', (33079, 33087), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((33137, 33158), 'PyQt5.QtCore.QSize', 'QtCore.QSize', (['(150)', '(28)'], {}), '(150, 28)\n', (33149, 33158), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((33468, 33488), 'PyQt5.QtCore.QSize', 'QtCore.QSize', (['(150)', '(0)'], {}), '(150, 0)\n', (33480, 33488), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((33539, 33566), 'PyQt5.QtCore.QSize', 'QtCore.QSize', (['(150)', '(16777215)'], {}), '(150, 16777215)\n', (33551, 33566), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((33880, 33900), 'PyQt5.QtCore.QSize', 'QtCore.QSize', (['(150)', '(0)'], {}), '(150, 0)\n', (33892, 33900), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((33951, 33978), 'PyQt5.QtCore.QSize', 'QtCore.QSize', (['(150)', '(16777215)'], {}), '(150, 16777215)\n', (33963, 33978), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((34460, 34480), 'PyQt5.QtCore.QSize', 'QtCore.QSize', (['(64)', '(64)'], {}), '(64, 64)\n', (34472, 34480), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((34528, 34548), 'PyQt5.QtCore.QSize', 'QtCore.QSize', (['(64)', '(64)'], {}), '(64, 64)\n', (34540, 34548), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((34636, 34656), 'PyQt5.QtCore.QSize', 'QtCore.QSize', (['(64)', '(64)'], {}), '(64, 64)\n', (34648, 34656), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((34978, 34998), 'PyQt5.QtCore.QSize', 'QtCore.QSize', (['(150)', '(0)'], {}), '(150, 0)\n', (34990, 34998), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((35049, 35076), 'PyQt5.QtCore.QSize', 'QtCore.QSize', (['(150)', '(16777215)'], {}), '(150, 16777215)\n', (35061, 35076), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((35390, 35410), 'PyQt5.QtCore.QSize', 'QtCore.QSize', (['(150)', '(0)'], {}), '(150, 0)\n', (35402, 35410), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((35461, 35488), 'PyQt5.QtCore.QSize', 'QtCore.QSize', (['(150)', '(16777215)'], {}), '(150, 16777215)\n', (35473, 35488), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((35802, 35822), 'PyQt5.QtCore.QSize', 'QtCore.QSize', (['(150)', '(0)'], {}), '(150, 0)\n', (35814, 35822), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((35873, 35900), 'PyQt5.QtCore.QSize', 'QtCore.QSize', (['(150)', '(16777215)'], {}), '(150, 16777215)\n', (35885, 35900), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((36214, 36234), 'PyQt5.QtCore.QSize', 'QtCore.QSize', (['(150)', '(0)'], {}), '(150, 0)\n', (36226, 36234), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((36285, 36312), 'PyQt5.QtCore.QSize', 'QtCore.QSize', (['(150)', '(16777215)'], {}), '(150, 16777215)\n', (36297, 36312), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((36624, 36644), 'PyQt5.QtCore.QSize', 'QtCore.QSize', (['(150)', '(0)'], {}), '(150, 0)\n', (36636, 36644), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((36694, 36721), 'PyQt5.QtCore.QSize', 'QtCore.QSize', (['(150)', '(16777215)'], {}), '(150, 16777215)\n', (36706, 36721), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((37029, 37049), 'PyQt5.QtCore.QSize', 'QtCore.QSize', (['(150)', '(0)'], {}), '(150, 0)\n', (37041, 37049), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((37099, 37120), 'PyQt5.QtCore.QSize', 'QtCore.QSize', (['(150)', '(28)'], {}), '(150, 28)\n', (37111, 37120), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((37430, 37450), 'PyQt5.QtCore.QSize', 'QtCore.QSize', (['(150)', '(0)'], {}), '(150, 0)\n', (37442, 37450), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((37501, 37528), 'PyQt5.QtCore.QSize', 'QtCore.QSize', (['(150)', '(16777215)'], {}), '(150, 16777215)\n', (37513, 37528), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((37842, 37862), 'PyQt5.QtCore.QSize', 'QtCore.QSize', (['(150)', '(0)'], {}), '(150, 0)\n', (37854, 37862), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((37913, 37940), 'PyQt5.QtCore.QSize', 'QtCore.QSize', (['(150)', '(16777215)'], {}), '(150, 16777215)\n', (37925, 37940), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((38431, 38451), 'PyQt5.QtCore.QSize', 'QtCore.QSize', (['(64)', '(64)'], {}), '(64, 64)\n', (38443, 38451), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((38499, 38519), 'PyQt5.QtCore.QSize', 'QtCore.QSize', (['(64)', '(64)'], {}), '(64, 64)\n', (38511, 38519), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((38607, 38627), 'PyQt5.QtCore.QSize', 'QtCore.QSize', (['(64)', '(64)'], {}), '(64, 64)\n', (38619, 38627), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((39129, 39149), 'PyQt5.QtCore.QSize', 'QtCore.QSize', (['(64)', '(64)'], {}), '(64, 64)\n', (39141, 39149), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((39197, 39217), 'PyQt5.QtCore.QSize', 'QtCore.QSize', (['(64)', '(64)'], {}), '(64, 64)\n', (39209, 39217), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((39305, 39325), 'PyQt5.QtCore.QSize', 'QtCore.QSize', (['(64)', '(64)'], {}), '(64, 64)\n', (39317, 39325), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((39649, 39669), 'PyQt5.QtCore.QSize', 'QtCore.QSize', (['(150)', '(0)'], {}), '(150, 0)\n', (39661, 39669), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((39719, 39740), 'PyQt5.QtCore.QSize', 'QtCore.QSize', (['(150)', '(28)'], {}), '(150, 28)\n', (39731, 39740), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((41230, 41257), 'PyQt5.QtCore.QSize', 'QtCore.QSize', (['(100)', '(16777215)'], {}), '(100, 16777215)\n', (41242, 41257), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((41558, 41585), 'PyQt5.QtCore.QSize', 'QtCore.QSize', (['(100)', '(16777215)'], {}), '(100, 16777215)\n', (41570, 41585), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((42835, 42855), 'PyQt5.QtCore.QSize', 'QtCore.QSize', (['(64)', '(70)'], {}), '(64, 70)\n', (42847, 42855), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((42905, 42925), 'PyQt5.QtCore.QSize', 'QtCore.QSize', (['(64)', '(70)'], {}), '(64, 70)\n', (42917, 42925), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((43017, 43037), 'PyQt5.QtCore.QSize', 'QtCore.QSize', (['(64)', '(70)'], {}), '(64, 70)\n', (43029, 43037), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((43893, 43913), 'PyQt5.QtCore.QSize', 'QtCore.QSize', (['(64)', '(70)'], {}), '(64, 70)\n', (43905, 43913), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((43963, 43983), 'PyQt5.QtCore.QSize', 'QtCore.QSize', (['(64)', '(70)'], {}), '(64, 70)\n', (43975, 43983), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((44075, 44095), 'PyQt5.QtCore.QSize', 'QtCore.QSize', (['(64)', '(70)'], {}), '(64, 70)\n', (44087, 44095), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((44604, 44624), 'PyQt5.QtCore.QSize', 'QtCore.QSize', (['(64)', '(70)'], {}), '(64, 70)\n', (44616, 44624), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((44672, 44692), 'PyQt5.QtCore.QSize', 'QtCore.QSize', (['(64)', '(70)'], {}), '(64, 70)\n', (44684, 44692), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((44780, 44800), 'PyQt5.QtCore.QSize', 'QtCore.QSize', (['(64)', '(64)'], {}), '(64, 64)\n', (44792, 44800), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((45320, 45340), 'PyQt5.QtCore.QSize', 'QtCore.QSize', (['(30)', '(30)'], {}), '(30, 30)\n', (45332, 45340), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((45393, 45413), 'PyQt5.QtCore.QSize', 'QtCore.QSize', (['(30)', '(30)'], {}), '(30, 30)\n', (45405, 45413), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((45511, 45531), 'PyQt5.QtCore.QSize', 'QtCore.QSize', (['(30)', '(30)'], {}), '(30, 30)\n', (45523, 45531), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((45863, 45883), 'PyQt5.QtCore.QSize', 'QtCore.QSize', (['(30)', '(30)'], {}), '(30, 30)\n', (45875, 45883), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((45936, 45956), 'PyQt5.QtCore.QSize', 'QtCore.QSize', (['(30)', '(30)'], {}), '(30, 30)\n', (45948, 45956), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((46054, 46074), 'PyQt5.QtCore.QSize', 'QtCore.QSize', (['(30)', '(30)'], {}), '(30, 30)\n', (46066, 46074), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((46406, 46426), 'PyQt5.QtCore.QSize', 'QtCore.QSize', (['(30)', '(30)'], {}), '(30, 30)\n', (46418, 46426), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((46479, 46499), 'PyQt5.QtCore.QSize', 'QtCore.QSize', (['(30)', '(30)'], {}), '(30, 30)\n', (46491, 46499), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((46597, 46617), 'PyQt5.QtCore.QSize', 'QtCore.QSize', (['(30)', '(30)'], {}), '(30, 30)\n', (46609, 46617), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((47143, 47163), 'PyQt5.QtCore.QSize', 'QtCore.QSize', (['(64)', '(70)'], {}), '(64, 70)\n', (47155, 47163), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((47213, 47233), 'PyQt5.QtCore.QSize', 'QtCore.QSize', (['(64)', '(70)'], {}), '(64, 70)\n', (47225, 47233), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((47325, 47345), 'PyQt5.QtCore.QSize', 'QtCore.QSize', (['(64)', '(70)'], {}), '(64, 70)\n', (47337, 47345), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((47721, 47740), 'PyQt5.QtCore.QSize', 'QtCore.QSize', (['(0)', '(30)'], {}), '(0, 30)\n', (47733, 47740), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((47786, 47812), 'PyQt5.QtCore.QSize', 'QtCore.QSize', (['(16777215)', '(30)'], {}), '(16777215, 30)\n', (47798, 47812), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((48521, 48541), 'PyQt5.QtCore.QSize', 'QtCore.QSize', (['(30)', '(30)'], {}), '(30, 30)\n', (48533, 48541), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((48594, 48614), 'PyQt5.QtCore.QSize', 'QtCore.QSize', (['(30)', '(30)'], {}), '(30, 30)\n', (48606, 48614), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((48712, 48732), 'PyQt5.QtCore.QSize', 'QtCore.QSize', (['(30)', '(30)'], {}), '(30, 30)\n', (48724, 48732), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((49064, 49084), 'PyQt5.QtCore.QSize', 'QtCore.QSize', (['(30)', '(30)'], {}), '(30, 30)\n', (49076, 49084), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((49137, 49157), 'PyQt5.QtCore.QSize', 'QtCore.QSize', (['(30)', '(30)'], {}), '(30, 30)\n', (49149, 49157), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((49255, 49275), 'PyQt5.QtCore.QSize', 'QtCore.QSize', (['(30)', '(30)'], {}), '(30, 30)\n', (49267, 49275), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((49607, 49627), 'PyQt5.QtCore.QSize', 'QtCore.QSize', (['(30)', '(30)'], {}), '(30, 30)\n', (49619, 49627), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((49680, 49700), 'PyQt5.QtCore.QSize', 'QtCore.QSize', (['(30)', '(30)'], {}), '(30, 30)\n', (49692, 49700), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((49798, 49818), 'PyQt5.QtCore.QSize', 'QtCore.QSize', (['(30)', '(30)'], {}), '(30, 30)\n', (49810, 49818), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((50347, 50367), 'PyQt5.QtCore.QSize', 'QtCore.QSize', (['(30)', '(30)'], {}), '(30, 30)\n', (50359, 50367), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((50418, 50438), 'PyQt5.QtCore.QSize', 'QtCore.QSize', (['(30)', '(30)'], {}), '(30, 30)\n', (50430, 50438), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((50532, 50552), 'PyQt5.QtCore.QSize', 'QtCore.QSize', (['(30)', '(30)'], {}), '(30, 30)\n', (50544, 50552), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((50872, 50892), 'PyQt5.QtCore.QSize', 'QtCore.QSize', (['(30)', '(30)'], {}), '(30, 30)\n', (50884, 50892), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((50943, 50963), 'PyQt5.QtCore.QSize', 'QtCore.QSize', (['(30)', '(30)'], {}), '(30, 30)\n', (50955, 50963), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((51057, 51077), 'PyQt5.QtCore.QSize', 'QtCore.QSize', (['(30)', '(30)'], {}), '(30, 30)\n', (51069, 51077), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((51397, 51417), 'PyQt5.QtCore.QSize', 'QtCore.QSize', (['(30)', '(30)'], {}), '(30, 30)\n', (51409, 51417), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((51468, 51488), 'PyQt5.QtCore.QSize', 'QtCore.QSize', (['(30)', '(30)'], {}), '(30, 30)\n', (51480, 51488), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((51582, 51602), 'PyQt5.QtCore.QSize', 'QtCore.QSize', (['(30)', '(30)'], {}), '(30, 30)\n', (51594, 51602), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((52131, 52151), 'PyQt5.QtCore.QSize', 'QtCore.QSize', (['(30)', '(30)'], {}), '(30, 30)\n', (52143, 52151), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((52204, 52224), 'PyQt5.QtCore.QSize', 'QtCore.QSize', (['(30)', '(30)'], {}), '(30, 30)\n', (52216, 52224), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((52322, 52342), 'PyQt5.QtCore.QSize', 'QtCore.QSize', (['(30)', '(30)'], {}), '(30, 30)\n', (52334, 52342), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((52674, 52694), 'PyQt5.QtCore.QSize', 'QtCore.QSize', (['(30)', '(30)'], {}), '(30, 30)\n', (52686, 52694), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((52747, 52767), 'PyQt5.QtCore.QSize', 'QtCore.QSize', (['(30)', '(30)'], {}), '(30, 30)\n', (52759, 52767), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((52865, 52885), 'PyQt5.QtCore.QSize', 'QtCore.QSize', (['(30)', '(30)'], {}), '(30, 30)\n', (52877, 52885), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((53217, 53237), 'PyQt5.QtCore.QSize', 'QtCore.QSize', (['(30)', '(30)'], {}), '(30, 30)\n', (53229, 53237), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((53290, 53310), 'PyQt5.QtCore.QSize', 'QtCore.QSize', (['(30)', '(30)'], {}), '(30, 30)\n', (53302, 53310), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((53408, 53428), 'PyQt5.QtCore.QSize', 'QtCore.QSize', (['(30)', '(30)'], {}), '(30, 30)\n', (53420, 53428), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((54391, 54411), 'PyQt5.QtCore.QSize', 'QtCore.QSize', (['(30)', '(30)'], {}), '(30, 30)\n', (54403, 54411), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((54462, 54482), 'PyQt5.QtCore.QSize', 'QtCore.QSize', (['(30)', '(30)'], {}), '(30, 30)\n', (54474, 54482), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((54576, 54596), 'PyQt5.QtCore.QSize', 'QtCore.QSize', (['(30)', '(30)'], {}), '(30, 30)\n', (54588, 54596), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((54917, 54937), 'PyQt5.QtCore.QSize', 'QtCore.QSize', (['(30)', '(30)'], {}), '(30, 30)\n', (54929, 54937), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((54988, 55008), 'PyQt5.QtCore.QSize', 'QtCore.QSize', (['(30)', '(30)'], {}), '(30, 30)\n', (55000, 55008), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((55102, 55122), 'PyQt5.QtCore.QSize', 'QtCore.QSize', (['(30)', '(30)'], {}), '(30, 30)\n', (55114, 55122), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((55443, 55463), 'PyQt5.QtCore.QSize', 'QtCore.QSize', (['(30)', '(30)'], {}), '(30, 30)\n', (55455, 55463), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((55514, 55534), 'PyQt5.QtCore.QSize', 'QtCore.QSize', (['(30)', '(30)'], {}), '(30, 30)\n', (55526, 55534), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((55628, 55648), 'PyQt5.QtCore.QSize', 'QtCore.QSize', (['(30)', '(30)'], {}), '(30, 30)\n', (55640, 55648), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((56437, 56457), 'PyQt5.QtCore.QSize', 'QtCore.QSize', (['(64)', '(70)'], {}), '(64, 70)\n', (56449, 56457), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((56507, 56527), 'PyQt5.QtCore.QSize', 'QtCore.QSize', (['(64)', '(70)'], {}), '(64, 70)\n', (56519, 56527), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((56619, 56639), 'PyQt5.QtCore.QSize', 'QtCore.QSize', (['(64)', '(70)'], {}), '(64, 70)\n', (56631, 56639), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((57161, 57181), 'PyQt5.QtCore.QSize', 'QtCore.QSize', (['(64)', '(70)'], {}), '(64, 70)\n', (57173, 57181), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((57231, 57251), 'PyQt5.QtCore.QSize', 'QtCore.QSize', (['(64)', '(70)'], {}), '(64, 70)\n', (57243, 57251), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((57343, 57363), 'PyQt5.QtCore.QSize', 'QtCore.QSize', (['(64)', '(70)'], {}), '(64, 70)\n', (57355, 57363), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((57894, 57914), 'PyQt5.QtCore.QSize', 'QtCore.QSize', (['(30)', '(30)'], {}), '(30, 30)\n', (57906, 57914), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((57967, 57987), 'PyQt5.QtCore.QSize', 'QtCore.QSize', (['(30)', '(30)'], {}), '(30, 30)\n', (57979, 57987), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((58085, 58105), 'PyQt5.QtCore.QSize', 'QtCore.QSize', (['(30)', '(30)'], {}), '(30, 30)\n', (58097, 58105), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((58438, 58458), 'PyQt5.QtCore.QSize', 'QtCore.QSize', (['(30)', '(30)'], {}), '(30, 30)\n', (58450, 58458), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((58511, 58531), 'PyQt5.QtCore.QSize', 'QtCore.QSize', (['(30)', '(30)'], {}), '(30, 30)\n', (58523, 58531), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((58629, 58649), 'PyQt5.QtCore.QSize', 'QtCore.QSize', (['(30)', '(30)'], {}), '(30, 30)\n', (58641, 58649), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((58982, 59002), 'PyQt5.QtCore.QSize', 'QtCore.QSize', (['(30)', '(30)'], {}), '(30, 30)\n', (58994, 59002), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((59055, 59075), 'PyQt5.QtCore.QSize', 'QtCore.QSize', (['(30)', '(30)'], {}), '(30, 30)\n', (59067, 59075), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((59173, 59193), 'PyQt5.QtCore.QSize', 'QtCore.QSize', (['(30)', '(30)'], {}), '(30, 30)\n', (59185, 59193), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((59733, 59753), 'PyQt5.QtCore.QSize', 'QtCore.QSize', (['(30)', '(30)'], {}), '(30, 30)\n', (59745, 59753), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((59806, 59826), 'PyQt5.QtCore.QSize', 'QtCore.QSize', (['(30)', '(30)'], {}), '(30, 30)\n', (59818, 59826), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((59924, 59944), 'PyQt5.QtCore.QSize', 'QtCore.QSize', (['(30)', '(30)'], {}), '(30, 30)\n', (59936, 59944), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((60277, 60297), 'PyQt5.QtCore.QSize', 'QtCore.QSize', (['(30)', '(30)'], {}), '(30, 30)\n', (60289, 60297), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((60350, 60370), 'PyQt5.QtCore.QSize', 'QtCore.QSize', (['(30)', '(30)'], {}), '(30, 30)\n', (60362, 60370), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((60468, 60488), 'PyQt5.QtCore.QSize', 'QtCore.QSize', (['(30)', '(30)'], {}), '(30, 30)\n', (60480, 60488), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((60821, 60841), 'PyQt5.QtCore.QSize', 'QtCore.QSize', (['(30)', '(30)'], {}), '(30, 30)\n', (60833, 60841), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((60894, 60914), 'PyQt5.QtCore.QSize', 'QtCore.QSize', (['(30)', '(30)'], {}), '(30, 30)\n', (60906, 60914), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((61012, 61032), 'PyQt5.QtCore.QSize', 'QtCore.QSize', (['(30)', '(30)'], {}), '(30, 30)\n', (61024, 61032), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((61419, 61438), 'PyQt5.QtCore.QSize', 'QtCore.QSize', (['(0)', '(30)'], {}), '(0, 30)\n', (61431, 61438), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((61484, 61516), 'PyQt5.QtCore.QSize', 'QtCore.QSize', (['(16777215)', '(16777215)'], {}), '(16777215, 16777215)\n', (61496, 61516), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((61949, 61969), 'PyQt5.QtCore.QSize', 'QtCore.QSize', (['(64)', '(70)'], {}), '(64, 70)\n', (61961, 61969), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((62017, 62037), 'PyQt5.QtCore.QSize', 'QtCore.QSize', (['(64)', '(70)'], {}), '(64, 70)\n', (62029, 62037), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((62125, 62145), 'PyQt5.QtCore.QSize', 'QtCore.QSize', (['(64)', '(64)'], {}), '(64, 64)\n', (62137, 62145), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((62922, 62942), 'PyQt5.QtCore.QSize', 'QtCore.QSize', (['(64)', '(70)'], {}), '(64, 70)\n', (62934, 62942), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((62992, 63012), 'PyQt5.QtCore.QSize', 'QtCore.QSize', (['(64)', '(70)'], {}), '(64, 70)\n', (63004, 63012), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((63104, 63124), 'PyQt5.QtCore.QSize', 'QtCore.QSize', (['(64)', '(70)'], {}), '(64, 70)\n', (63116, 63124), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((63882, 63909), 'PyQt5.QtCore.QSize', 'QtCore.QSize', (['(100)', '(16777215)'], {}), '(100, 16777215)\n', (63894, 63909), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((64645, 64665), 'PyQt5.QtCore.QSize', 'QtCore.QSize', (['(30)', '(30)'], {}), '(30, 30)\n', (64657, 64665), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((64718, 64738), 'PyQt5.QtCore.QSize', 'QtCore.QSize', (['(30)', '(30)'], {}), '(30, 30)\n', (64730, 64738), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((64836, 64856), 'PyQt5.QtCore.QSize', 'QtCore.QSize', (['(30)', '(30)'], {}), '(30, 30)\n', (64848, 64856), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((65189, 65209), 'PyQt5.QtCore.QSize', 'QtCore.QSize', (['(30)', '(30)'], {}), '(30, 30)\n', (65201, 65209), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((65262, 65282), 'PyQt5.QtCore.QSize', 'QtCore.QSize', (['(30)', '(30)'], {}), '(30, 30)\n', (65274, 65282), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((65380, 65400), 'PyQt5.QtCore.QSize', 'QtCore.QSize', (['(30)', '(30)'], {}), '(30, 30)\n', (65392, 65400), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((65733, 65753), 'PyQt5.QtCore.QSize', 'QtCore.QSize', (['(30)', '(30)'], {}), '(30, 30)\n', (65745, 65753), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((65806, 65826), 'PyQt5.QtCore.QSize', 'QtCore.QSize', (['(30)', '(30)'], {}), '(30, 30)\n', (65818, 65826), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((65924, 65944), 'PyQt5.QtCore.QSize', 'QtCore.QSize', (['(30)', '(30)'], {}), '(30, 30)\n', (65936, 65944), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((66900, 66920), 'PyQt5.QtCore.QSize', 'QtCore.QSize', (['(64)', '(70)'], {}), '(64, 70)\n', (66912, 66920), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((66968, 66988), 'PyQt5.QtCore.QSize', 'QtCore.QSize', (['(64)', '(70)'], {}), '(64, 70)\n', (66980, 66988), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((67076, 67096), 'PyQt5.QtCore.QSize', 'QtCore.QSize', (['(64)', '(64)'], {}), '(64, 64)\n', (67088, 67096), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((67609, 67629), 'PyQt5.QtCore.QSize', 'QtCore.QSize', (['(64)', '(70)'], {}), '(64, 70)\n', (67621, 67629), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((67679, 67699), 'PyQt5.QtCore.QSize', 'QtCore.QSize', (['(64)', '(70)'], {}), '(64, 70)\n', (67691, 67699), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((67791, 67811), 'PyQt5.QtCore.QSize', 'QtCore.QSize', (['(64)', '(70)'], {}), '(64, 70)\n', (67803, 67811), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((68859, 68879), 'PyQt5.QtCore.QSize', 'QtCore.QSize', (['(64)', '(70)'], {}), '(64, 70)\n', (68871, 68879), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((68929, 68949), 'PyQt5.QtCore.QSize', 'QtCore.QSize', (['(64)', '(70)'], {}), '(64, 70)\n', (68941, 68949), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((69041, 69061), 'PyQt5.QtCore.QSize', 'QtCore.QSize', (['(64)', '(70)'], {}), '(64, 70)\n', (69053, 69061), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((69583, 69603), 'PyQt5.QtCore.QSize', 'QtCore.QSize', (['(64)', '(70)'], {}), '(64, 70)\n', (69595, 69603), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((69653, 69673), 'PyQt5.QtCore.QSize', 'QtCore.QSize', (['(64)', '(70)'], {}), '(64, 70)\n', (69665, 69673), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((69765, 69785), 'PyQt5.QtCore.QSize', 'QtCore.QSize', (['(64)', '(70)'], {}), '(64, 70)\n', (69777, 69785), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((70585, 70605), 'PyQt5.QtCore.QSize', 'QtCore.QSize', (['(30)', '(30)'], {}), '(30, 30)\n', (70597, 70605), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((70658, 70678), 'PyQt5.QtCore.QSize', 'QtCore.QSize', (['(30)', '(30)'], {}), '(30, 30)\n', (70670, 70678), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((70776, 70796), 'PyQt5.QtCore.QSize', 'QtCore.QSize', (['(30)', '(30)'], {}), '(30, 30)\n', (70788, 70796), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((71129, 71149), 'PyQt5.QtCore.QSize', 'QtCore.QSize', (['(30)', '(30)'], {}), '(30, 30)\n', (71141, 71149), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((71202, 71222), 'PyQt5.QtCore.QSize', 'QtCore.QSize', (['(30)', '(30)'], {}), '(30, 30)\n', (71214, 71222), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((71320, 71340), 'PyQt5.QtCore.QSize', 'QtCore.QSize', (['(30)', '(30)'], {}), '(30, 30)\n', (71332, 71340), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((71673, 71693), 'PyQt5.QtCore.QSize', 'QtCore.QSize', (['(30)', '(30)'], {}), '(30, 30)\n', (71685, 71693), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((71746, 71766), 'PyQt5.QtCore.QSize', 'QtCore.QSize', (['(30)', '(30)'], {}), '(30, 30)\n', (71758, 71766), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((71864, 71884), 'PyQt5.QtCore.QSize', 'QtCore.QSize', (['(30)', '(30)'], {}), '(30, 30)\n', (71876, 71884), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((72653, 72680), 'PyQt5.QtCore.QSize', 'QtCore.QSize', (['(100)', '(16777215)'], {}), '(100, 16777215)\n', (72665, 72680), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((73410, 73430), 'PyQt5.QtCore.QSize', 'QtCore.QSize', (['(30)', '(30)'], {}), '(30, 30)\n', (73422, 73430), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((73481, 73501), 'PyQt5.QtCore.QSize', 'QtCore.QSize', (['(30)', '(30)'], {}), '(30, 30)\n', (73493, 73501), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((73595, 73615), 'PyQt5.QtCore.QSize', 'QtCore.QSize', (['(30)', '(30)'], {}), '(30, 30)\n', (73607, 73615), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((73936, 73956), 'PyQt5.QtCore.QSize', 'QtCore.QSize', (['(30)', '(30)'], {}), '(30, 30)\n', (73948, 73956), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((74007, 74027), 'PyQt5.QtCore.QSize', 'QtCore.QSize', (['(30)', '(30)'], {}), '(30, 30)\n', (74019, 74027), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((74121, 74141), 'PyQt5.QtCore.QSize', 'QtCore.QSize', (['(30)', '(30)'], {}), '(30, 30)\n', (74133, 74141), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((74462, 74482), 'PyQt5.QtCore.QSize', 'QtCore.QSize', (['(30)', '(30)'], {}), '(30, 30)\n', (74474, 74482), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((74533, 74553), 'PyQt5.QtCore.QSize', 'QtCore.QSize', (['(30)', '(30)'], {}), '(30, 30)\n', (74545, 74553), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((74647, 74667), 'PyQt5.QtCore.QSize', 'QtCore.QSize', (['(30)', '(30)'], {}), '(30, 30)\n', (74659, 74667), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((75201, 75221), 'PyQt5.QtCore.QSize', 'QtCore.QSize', (['(30)', '(30)'], {}), '(30, 30)\n', (75213, 75221), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((75274, 75294), 'PyQt5.QtCore.QSize', 'QtCore.QSize', (['(30)', '(30)'], {}), '(30, 30)\n', (75286, 75294), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((75392, 75412), 'PyQt5.QtCore.QSize', 'QtCore.QSize', (['(30)', '(30)'], {}), '(30, 30)\n', (75404, 75412), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((75745, 75765), 'PyQt5.QtCore.QSize', 'QtCore.QSize', (['(30)', '(30)'], {}), '(30, 30)\n', (75757, 75765), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((75818, 75838), 'PyQt5.QtCore.QSize', 'QtCore.QSize', (['(30)', '(30)'], {}), '(30, 30)\n', (75830, 75838), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((75936, 75956), 'PyQt5.QtCore.QSize', 'QtCore.QSize', (['(30)', '(30)'], {}), '(30, 30)\n', (75948, 75956), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((76289, 76309), 'PyQt5.QtCore.QSize', 'QtCore.QSize', (['(30)', '(30)'], {}), '(30, 30)\n', (76301, 76309), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((76362, 76382), 'PyQt5.QtCore.QSize', 'QtCore.QSize', (['(30)', '(30)'], {}), '(30, 30)\n', (76374, 76382), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((76480, 76500), 'PyQt5.QtCore.QSize', 'QtCore.QSize', (['(30)', '(30)'], {}), '(30, 30)\n', (76492, 76500), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((77040, 77060), 'PyQt5.QtCore.QSize', 'QtCore.QSize', (['(30)', '(30)'], {}), '(30, 30)\n', (77052, 77060), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((77113, 77133), 'PyQt5.QtCore.QSize', 'QtCore.QSize', (['(30)', '(30)'], {}), '(30, 30)\n', (77125, 77133), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((77231, 77251), 'PyQt5.QtCore.QSize', 'QtCore.QSize', (['(30)', '(30)'], {}), '(30, 30)\n', (77243, 77251), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((77584, 77604), 'PyQt5.QtCore.QSize', 'QtCore.QSize', (['(30)', '(30)'], {}), '(30, 30)\n', (77596, 77604), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((77657, 77677), 'PyQt5.QtCore.QSize', 'QtCore.QSize', (['(30)', '(30)'], {}), '(30, 30)\n', (77669, 77677), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((77775, 77795), 'PyQt5.QtCore.QSize', 'QtCore.QSize', (['(30)', '(30)'], {}), '(30, 30)\n', (77787, 77795), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((78128, 78148), 'PyQt5.QtCore.QSize', 'QtCore.QSize', (['(30)', '(30)'], {}), '(30, 30)\n', (78140, 78148), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((78201, 78221), 'PyQt5.QtCore.QSize', 'QtCore.QSize', (['(30)', '(30)'], {}), '(30, 30)\n', (78213, 78221), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((78319, 78339), 'PyQt5.QtCore.QSize', 'QtCore.QSize', (['(30)', '(30)'], {}), '(30, 30)\n', (78331, 78339), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((78893, 78921), 'PyQt5.QtCore.QRect', 'QtCore.QRect', (['(0)', '(0)', '(1070)', '(26)'], {}), '(0, 0, 1070, 26)\n', (78905, 78921), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n')] |
from common import small_buffer
import pytest
import numpy as np
import pyarrow as pa
import vaex
def test_unique_arrow(df_factory):
ds = df_factory(x=vaex.string_column(['a', 'b', 'a', 'a', 'a', 'b', 'b', 'b', 'b', 'a']))
with small_buffer(ds, 2):
assert set(ds.unique(ds.x)) == {'a', 'b'}
values, index = ds.unique(ds.x, return_inverse=True)
assert np.array(values)[index].tolist() == ds.x.tolist()
def test_unique(df_factory):
ds = df_factory(colors=['red', 'green', 'blue', 'green'])
with small_buffer(ds, 2):
assert set(ds.unique(ds.colors)) == {'red', 'green', 'blue'}
values, index = ds.unique(ds.colors, return_inverse=True)
assert np.array(values)[index].tolist() == ds.colors.tolist()
ds = df_factory(x=['a', 'b', 'a', 'a', 'a', 'b', 'b', 'b', 'b', 'a'])
with small_buffer(ds, 2):
assert set(ds.unique(ds.x)) == {'a', 'b'}
values, index = ds.unique(ds.x, return_inverse=True)
assert np.array(values)[index].tolist() == ds.x.tolist()
def test_unique_f4(df_factory):
x = np.array([np.nan, 0, 1, np.nan, 2, np.nan], dtype='f4')
df = df_factory(x=x)
assert list(sorted(df.x.unique()))[1:] == [np.nan, 0, 1, 2][1:]
def test_unique_nan(df_factory):
x = [np.nan, 0, 1, np.nan, 2, np.nan]
df = df_factory(x=x)
assert list(sorted(df.x.unique()))[1:] == [np.nan, 0, 1, 2][1:]
with small_buffer(df, 2):
values, indices = df.unique(df.x, return_inverse=True)
values = np.array(values)
values = values[indices]
mask = np.isnan(values)
assert values[~mask].tolist() == df.x.to_numpy()[~mask].tolist()
# assert indices.tolist() == [0, 1, 2, 0, 3, 0]
def test_unique_missing(df_factory):
# Create test databn
x = np.array([None, 'A', 'B', -1, 0, 2, '', '', None, None, None, np.nan, np.nan, np.nan, np.nan])
df = df_factory(x=x)
uniques = df.x.unique(dropnan=True)
assert set(uniques) == set(['', 'A', 'B', -1, 0, 2, None])
def test_unique_missing_numeric(array_factory):
df = vaex.from_arrays(x=array_factory([1, None]))
values = df.x.unique()
assert set(values) == {1, None}
# assert list(sorted(df.x.unique()))[1:] == [np.nan, 0, 1, 2][1:]
def test_unique_string_missing(df_factory):
x = ['John', None, 'Sally', None, '0.0']
df = df_factory(x=x)
result = df.x.unique()
assert len(result) == 4
assert'John' in result
assert None in result
assert 'Sally'
def test_unique_list(df_types):
df = df_types
assert set(df.string_list.unique()) == {'aap', 'noot', 'mies', None}
assert set(df.int_list.unique()) == {1, 2, 3, 4, 5, None}
@pytest.mark.parametrize("future", [False, True])
def test_unique_categorical(df_factory, future):
df = df_factory(x=vaex.string_column(['a', 'c', 'b', 'a', 'a']))
df = df.ordinal_encode('x')
df = df._future() if future else df
if future:
assert df.x.dtype == str
assert set(df.x.unique()) == {'a', 'b', 'c'}
assert df.x.nunique() == 3
else:
assert df.x.dtype == int
assert set(df.x.unique()) == {0, 1, 2}
assert df.x.nunique() == 3
| [
"common.small_buffer",
"pytest.mark.parametrize",
"numpy.array",
"vaex.string_column",
"numpy.isnan"
] | [((2694, 2742), 'pytest.mark.parametrize', 'pytest.mark.parametrize', (['"""future"""', '[False, True]'], {}), "('future', [False, True])\n", (2717, 2742), False, 'import pytest\n'), ((1088, 1143), 'numpy.array', 'np.array', (['[np.nan, 0, 1, np.nan, 2, np.nan]'], {'dtype': '"""f4"""'}), "([np.nan, 0, 1, np.nan, 2, np.nan], dtype='f4')\n", (1096, 1143), True, 'import numpy as np\n'), ((1800, 1899), 'numpy.array', 'np.array', (["[None, 'A', 'B', -1, 0, 2, '', '', None, None, None, np.nan, np.nan, np.nan,\n np.nan]"], {}), "([None, 'A', 'B', -1, 0, 2, '', '', None, None, None, np.nan, np.\n nan, np.nan, np.nan])\n", (1808, 1899), True, 'import numpy as np\n'), ((240, 259), 'common.small_buffer', 'small_buffer', (['ds', '(2)'], {}), '(ds, 2)\n', (252, 259), False, 'from common import small_buffer\n'), ((539, 558), 'common.small_buffer', 'small_buffer', (['ds', '(2)'], {}), '(ds, 2)\n', (551, 558), False, 'from common import small_buffer\n'), ((849, 868), 'common.small_buffer', 'small_buffer', (['ds', '(2)'], {}), '(ds, 2)\n', (861, 868), False, 'from common import small_buffer\n'), ((1416, 1435), 'common.small_buffer', 'small_buffer', (['df', '(2)'], {}), '(df, 2)\n', (1428, 1435), False, 'from common import small_buffer\n'), ((1517, 1533), 'numpy.array', 'np.array', (['values'], {}), '(values)\n', (1525, 1533), True, 'import numpy as np\n'), ((1582, 1598), 'numpy.isnan', 'np.isnan', (['values'], {}), '(values)\n', (1590, 1598), True, 'import numpy as np\n'), ((159, 229), 'vaex.string_column', 'vaex.string_column', (["['a', 'b', 'a', 'a', 'a', 'b', 'b', 'b', 'b', 'a']"], {}), "(['a', 'b', 'a', 'a', 'a', 'b', 'b', 'b', 'b', 'a'])\n", (177, 229), False, 'import vaex\n'), ((2814, 2859), 'vaex.string_column', 'vaex.string_column', (["['a', 'c', 'b', 'a', 'a']"], {}), "(['a', 'c', 'b', 'a', 'a'])\n", (2832, 2859), False, 'import vaex\n'), ((387, 403), 'numpy.array', 'np.array', (['values'], {}), '(values)\n', (395, 403), True, 'import numpy as np\n'), ((710, 726), 'numpy.array', 'np.array', (['values'], {}), '(values)\n', (718, 726), True, 'import numpy as np\n'), ((996, 1012), 'numpy.array', 'np.array', (['values'], {}), '(values)\n', (1004, 1012), True, 'import numpy as np\n')] |
# utility functions for frequency related stuff
import numpy as np
import numpy.fft as fft
import math
def getFrequencyArray(fs, samples):
# frequencies go from to nyquist
nyquist = fs/2
return np.linspace(0, nyquist, samples)
# use this function for all FFT calculations
# then if change FFT later (i.e. FFTW), just replace one function
def forwardFFT(data, **kwargs):
if "norm" in kwargs and not kwargs["norm"]:
return fft.rfft(data, axis=0)
return fft.rfft(data, norm='ortho', axis=0)
def inverseFFT(data, length, **kwargs):
if "norm" in kwargs and not kwargs["norm"]:
return fft.irfft(data, n=length)
return fft.irfft(data, n=length, norm='ortho')
def padNextPower2(size):
next2Power = math.ceil(math.log(size,2))
next2Size = math.pow(2, int(next2Power))
return int(next2Size) - size
| [
"numpy.fft.rfft",
"numpy.linspace",
"numpy.fft.irfft",
"math.log"
] | [((200, 232), 'numpy.linspace', 'np.linspace', (['(0)', 'nyquist', 'samples'], {}), '(0, nyquist, samples)\n', (211, 232), True, 'import numpy as np\n'), ((462, 498), 'numpy.fft.rfft', 'fft.rfft', (['data'], {'norm': '"""ortho"""', 'axis': '(0)'}), "(data, norm='ortho', axis=0)\n", (470, 498), True, 'import numpy.fft as fft\n'), ((628, 667), 'numpy.fft.irfft', 'fft.irfft', (['data'], {'n': 'length', 'norm': '"""ortho"""'}), "(data, n=length, norm='ortho')\n", (637, 667), True, 'import numpy.fft as fft\n'), ((431, 453), 'numpy.fft.rfft', 'fft.rfft', (['data'], {'axis': '(0)'}), '(data, axis=0)\n', (439, 453), True, 'import numpy.fft as fft\n'), ((594, 619), 'numpy.fft.irfft', 'fft.irfft', (['data'], {'n': 'length'}), '(data, n=length)\n', (603, 619), True, 'import numpy.fft as fft\n'), ((718, 735), 'math.log', 'math.log', (['size', '(2)'], {}), '(size, 2)\n', (726, 735), False, 'import math\n')] |
# -*- coding: utf-8 -*-
# :Project: pglast -- Extract keywords from PostgreSQL header
# :Created: dom 06 ago 2017 23:34:53 CEST
# :Author: <NAME> <<EMAIL>>
# :License: GNU General Public License version 3 or later
# :Copyright: © 2017, 2018 Lele Gaifax
#
from collections import defaultdict
from os.path import basename
from pprint import pformat
from re import match
import subprocess
HEADER = """\
# -*- coding: utf-8 -*-
# :Project: pglast -- DO NOT EDIT: automatically extracted from %s @ %s
# :Author: <NAME> <<EMAIL>>
# :License: GNU General Public License version 3 or later
# :Copyright: © 2017 <NAME>
#
"""
def get_libpg_query_version():
result = subprocess.check_output(['git', 'describe', '--all', '--long'],
cwd='libpg_query')
return result.decode('utf-8').strip().split('/')[-1]
def extract_keywords(source):
for line in source.splitlines():
if line.startswith('PG_KEYWORD'):
m = match(r'PG_KEYWORD\("([^"]+)",[^,]+,\s*([\w_]+)\)', line.strip())
if m:
yield m.group(1), m.group(2)
def workhorse(args):
with open(args.header, encoding='utf-8') as f:
source = f.read()
bytype = defaultdict(set)
for keyword, type in extract_keywords(source):
bytype[type].add(keyword)
with open(args.output, 'w', encoding='utf-8') as output:
output.write(HEADER % (basename(args.header), get_libpg_query_version()))
for type in sorted(bytype):
output.write('\n')
output.write(type + 'S')
output.write(' = {')
keywords = pformat(bytype[type], compact=True, indent=len(type)+5, width=95)
output.write(keywords[1:].lstrip())
output.write('\n')
def main():
from argparse import ArgumentParser, ArgumentDefaultsHelpFormatter
parser = ArgumentParser(description="PG keyword extractor",
formatter_class=ArgumentDefaultsHelpFormatter)
parser.add_argument('header',
help="source header to be processed")
parser.add_argument('output',
help="Python source to be created")
args = parser.parse_args()
workhorse(args)
if __name__ == '__main__':
main()
| [
"subprocess.check_output",
"collections.defaultdict",
"os.path.basename",
"argparse.ArgumentParser"
] | [((681, 768), 'subprocess.check_output', 'subprocess.check_output', (["['git', 'describe', '--all', '--long']"], {'cwd': '"""libpg_query"""'}), "(['git', 'describe', '--all', '--long'], cwd=\n 'libpg_query')\n", (704, 768), False, 'import subprocess\n'), ((1228, 1244), 'collections.defaultdict', 'defaultdict', (['set'], {}), '(set)\n', (1239, 1244), False, 'from collections import defaultdict\n'), ((1878, 1980), 'argparse.ArgumentParser', 'ArgumentParser', ([], {'description': '"""PG keyword extractor"""', 'formatter_class': 'ArgumentDefaultsHelpFormatter'}), "(description='PG keyword extractor', formatter_class=\n ArgumentDefaultsHelpFormatter)\n", (1892, 1980), False, 'from argparse import ArgumentParser, ArgumentDefaultsHelpFormatter\n'), ((1423, 1444), 'os.path.basename', 'basename', (['args.header'], {}), '(args.header)\n', (1431, 1444), False, 'from os.path import basename\n')] |
import cv2
from imutils.paths import list_images
import imutils
import re
import datetime
from datasets.hdf5datasetwriter import HDF5DatasetWriter
import progressbar
def get_frame_number(impath):
return int(re.search(r"image data (\d+)", impath).group(1))
def get_timestamp(impath):
"assuming that the timestamp is a part of the image name"
date_str = impath.split(".")[0]
date_str = re.split(r"image data \d+ ", date_str)[1]
date = datetime.datetime.strptime(date_str, '%Y-%b-%d %H %M %S %f')
return date
# Load the data, sort by frame number
basePath = "D:/create lidar trafik data/newer data/ImageData/"
impaths = list(list_images(basePath))
impaths = sorted(impaths, key=get_frame_number)
print("[INFO] building HDF5 dataset...")
outputPath = basePath + "frames.hdf5"
writer = HDF5DatasetWriter((len(impaths), 360, 640, 3), outputPath)
# initialize the progress bar
widgets = ["Building Dataset: ", progressbar.Percentage(), " ",
progressbar.Bar(), " ", progressbar.ETA()]
pbar = progressbar.ProgressBar(maxval=len(impaths),
widgets=widgets).start()
for i, impath in enumerate(impaths):
date = get_timestamp(impath)
ts = (date - datetime.datetime(1970, 1, 1)) / datetime.timedelta(seconds=1)
image = cv2.imread(impath)
image = imutils.resize(image, width=640)
writer.add([image], [ts])
pbar.update(i)
# close the HDF5 writer
pbar.finish()
writer.close() | [
"progressbar.Bar",
"re.split",
"datetime.datetime",
"datetime.datetime.strptime",
"imutils.resize",
"progressbar.Percentage",
"imutils.paths.list_images",
"progressbar.ETA",
"datetime.timedelta",
"cv2.imread",
"re.search"
] | [((455, 515), 'datetime.datetime.strptime', 'datetime.datetime.strptime', (['date_str', '"""%Y-%b-%d %H %M %S %f"""'], {}), "(date_str, '%Y-%b-%d %H %M %S %f')\n", (481, 515), False, 'import datetime\n'), ((649, 670), 'imutils.paths.list_images', 'list_images', (['basePath'], {}), '(basePath)\n', (660, 670), False, 'from imutils.paths import list_images\n'), ((932, 956), 'progressbar.Percentage', 'progressbar.Percentage', ([], {}), '()\n', (954, 956), False, 'import progressbar\n'), ((963, 980), 'progressbar.Bar', 'progressbar.Bar', ([], {}), '()\n', (978, 980), False, 'import progressbar\n'), ((987, 1004), 'progressbar.ETA', 'progressbar.ETA', ([], {}), '()\n', (1002, 1004), False, 'import progressbar\n'), ((1251, 1269), 'cv2.imread', 'cv2.imread', (['impath'], {}), '(impath)\n', (1261, 1269), False, 'import cv2\n'), ((1282, 1314), 'imutils.resize', 'imutils.resize', (['image'], {'width': '(640)'}), '(image, width=640)\n', (1296, 1314), False, 'import imutils\n'), ((402, 440), 're.split', 're.split', (['"""image data \\\\d+ """', 'date_str'], {}), "('image data \\\\d+ ', date_str)\n", (410, 440), False, 'import re\n'), ((1204, 1233), 'datetime.timedelta', 'datetime.timedelta', ([], {'seconds': '(1)'}), '(seconds=1)\n', (1222, 1233), False, 'import datetime\n'), ((1171, 1200), 'datetime.datetime', 'datetime.datetime', (['(1970)', '(1)', '(1)'], {}), '(1970, 1, 1)\n', (1188, 1200), False, 'import datetime\n'), ((212, 250), 're.search', 're.search', (['"""image data (\\\\d+)"""', 'impath'], {}), "('image data (\\\\d+)', impath)\n", (221, 250), False, 'import re\n')] |
#!/usr/bin/env python
import matplotlib.pyplot as plt
import numpy as np
import time
import cv2
from real.camera import Camera
from robot import Robot
from subprocess import Popen, PIPE
def get_camera_to_robot_transformation(camera):
color_img, depth_img = camera.get_data()
cv2.imwrite("real/temp.jpg", color_img)
p = Popen(['./real/detect-from-file', "real/temp.jpg"], stdin=PIPE, stdout=PIPE, stderr=PIPE)
output, err = p.communicate()
tag_info = output.decode("utf-8")
tag_info = tag_info.split("\n")[:4]
for i, info in enumerate(tag_info):
tag_info[i] = info.split(" ")
print(tag_info)
tag_info = np.array(tag_info, dtype=np.float32)
assert(tag_info.shape == (4, 3))
tag_loc_camera = tag_info
tag_loc_robot = {
22: (270.15 / 1000, -637.0 / 1000),
7: (255.35 / 1000, -247.6 / 1000),
4: (-272.7 / 1000, -660.9 / 1000),
2: (-289.8 / 1000, -274.2 / 1000)
}
camera_to_robot = cv2.getPerspectiveTransform(
np.float32([tag[1:] for tag in tag_loc_camera]),
np.float32([tag_loc_robot[tag[0]] for tag in tag_loc_camera]))
return camera_to_robot
# User options (change me)
# --------------- Setup options ---------------
tcp_host_ip = '172.16.31.10' # IP and port to robot arm as TCP client (UR5)
tcp_host_ip = "172.19.97.157"
tcp_port = 30002
rtc_host_ip = '172.16.31.10' # IP and port to robot arm as real-time client (UR5)
rtc_host_ip = "172.19.97.157"
rtc_port = 30003
# Cols: min max, Rows: x y z (define workspace limits in robot coordinates)
workspace_limits = np.asarray([[0.3, 0.748], [-0.224, 0.224], [-0.255, -0.1]])
workspace_limits = np.asarray([[-0.237, 0.211], [-0.683, -0.235], [0.18, 0.4]])
# workspace_limits = np.asarray([[-0.224, 0.224], [-0.674, -0.226], [0.18, 0.4]])
# Cols: min max, Rows: x y z (define workspace limits in robot coordinates)
tool_orientation = [2.22, -2.22, 0]
tool_orientation = [0, -3.14, 0]
# ---------------------------------------------
# Move robot to home pose
robot = Robot(False, None, None, workspace_limits,
tcp_host_ip, tcp_port, rtc_host_ip, rtc_port,
False, None, None)
robot.open_gripper()
transformation_matrix = get_camera_to_robot_transformation(robot.camera)
# Slow down robot
robot.joint_acc = 1.4
robot.joint_vel = 1.05
# Callback function for clicking on OpenCV window
click_point_pix = ()
camera_color_img, camera_depth_img = robot.get_camera_data()
def mouseclick_callback(event, x, y, flags, param):
if event == cv2.EVENT_LBUTTONDOWN:
global camera, robot, click_point_pix
click_point_pix = (x, y)
# Get click point in camera coordinates
# click_z = camera_depth_img[y][x] * robot.cam_depth_scale
# click_x = np.multiply(x-robot.cam_intrinsics[0][2],click_z/robot.cam_intrinsics[0][0])
# click_y = np.multiply(y-robot.cam_intrinsics[1][2],click_z/robot.cam_intrinsics[1][1])
# if click_z == 0:
# return
# click_point = np.asarray([click_x,click_y,click_z])
# click_point.shape = (3,1)
# # Convert camera to robot coordinates
# # camera2robot = np.linalg.inv(robot.cam_pose)
# camera2robot = robot.cam_pose
# target_position = np.dot(camera2robot[0:3,0:3],click_point) + camera2robot[0:3,3:]
# target_position = target_position[0:3,0]
# print(target_position)
camera_pt = np.array([x, y, 1])
robot_pt = np.dot(transformation_matrix, camera_pt)
robot_pt = np.array([robot_pt[0], robot_pt[1]]) / robot_pt[2]
print([robot_pt[0], robot_pt[1], -0.1])
print(robot.parse_tcp_state_data(robot.get_state(), "cartesian_info"))
robot.move_to([robot_pt[0], robot_pt[1], 0.3], tool_orientation)
# Show color and depth frames
cv2.namedWindow('color')
cv2.setMouseCallback('color', mouseclick_callback)
cv2.namedWindow('depth')
while True:
camera_color_img, camera_depth_img = robot.get_camera_data()
bgr_data = cv2.cvtColor(camera_color_img, cv2.COLOR_RGB2BGR)
if len(click_point_pix) != 0:
bgr_data = cv2.circle(bgr_data, click_point_pix, 7, (0, 0, 255), 2)
cv2.imshow('color', bgr_data)
camera_depth_img[camera_depth_img < 0.19] = 0
cv2.imshow('depth', camera_depth_img)
if cv2.waitKey(1) == ord('c'):
break
cv2.destroyAllWindows()
| [
"cv2.setMouseCallback",
"cv2.imwrite",
"numpy.float32",
"robot.Robot",
"subprocess.Popen",
"numpy.asarray",
"cv2.imshow",
"numpy.array",
"numpy.dot",
"cv2.circle",
"cv2.destroyAllWindows",
"cv2.cvtColor",
"cv2.waitKey",
"cv2.namedWindow"
] | [((1585, 1644), 'numpy.asarray', 'np.asarray', (['[[0.3, 0.748], [-0.224, 0.224], [-0.255, -0.1]]'], {}), '([[0.3, 0.748], [-0.224, 0.224], [-0.255, -0.1]])\n', (1595, 1644), True, 'import numpy as np\n'), ((1664, 1724), 'numpy.asarray', 'np.asarray', (['[[-0.237, 0.211], [-0.683, -0.235], [0.18, 0.4]]'], {}), '([[-0.237, 0.211], [-0.683, -0.235], [0.18, 0.4]])\n', (1674, 1724), True, 'import numpy as np\n'), ((2036, 2147), 'robot.Robot', 'Robot', (['(False)', 'None', 'None', 'workspace_limits', 'tcp_host_ip', 'tcp_port', 'rtc_host_ip', 'rtc_port', '(False)', 'None', 'None'], {}), '(False, None, None, workspace_limits, tcp_host_ip, tcp_port,\n rtc_host_ip, rtc_port, False, None, None)\n', (2041, 2147), False, 'from robot import Robot\n'), ((3821, 3845), 'cv2.namedWindow', 'cv2.namedWindow', (['"""color"""'], {}), "('color')\n", (3836, 3845), False, 'import cv2\n'), ((3846, 3896), 'cv2.setMouseCallback', 'cv2.setMouseCallback', (['"""color"""', 'mouseclick_callback'], {}), "('color', mouseclick_callback)\n", (3866, 3896), False, 'import cv2\n'), ((3897, 3921), 'cv2.namedWindow', 'cv2.namedWindow', (['"""depth"""'], {}), "('depth')\n", (3912, 3921), False, 'import cv2\n'), ((4352, 4375), 'cv2.destroyAllWindows', 'cv2.destroyAllWindows', ([], {}), '()\n', (4373, 4375), False, 'import cv2\n'), ((286, 325), 'cv2.imwrite', 'cv2.imwrite', (['"""real/temp.jpg"""', 'color_img'], {}), "('real/temp.jpg', color_img)\n", (297, 325), False, 'import cv2\n'), ((334, 427), 'subprocess.Popen', 'Popen', (["['./real/detect-from-file', 'real/temp.jpg']"], {'stdin': 'PIPE', 'stdout': 'PIPE', 'stderr': 'PIPE'}), "(['./real/detect-from-file', 'real/temp.jpg'], stdin=PIPE, stdout=PIPE,\n stderr=PIPE)\n", (339, 427), False, 'from subprocess import Popen, PIPE\n'), ((649, 685), 'numpy.array', 'np.array', (['tag_info'], {'dtype': 'np.float32'}), '(tag_info, dtype=np.float32)\n', (657, 685), True, 'import numpy as np\n'), ((4015, 4064), 'cv2.cvtColor', 'cv2.cvtColor', (['camera_color_img', 'cv2.COLOR_RGB2BGR'], {}), '(camera_color_img, cv2.COLOR_RGB2BGR)\n', (4027, 4064), False, 'import cv2\n'), ((4179, 4208), 'cv2.imshow', 'cv2.imshow', (['"""color"""', 'bgr_data'], {}), "('color', bgr_data)\n", (4189, 4208), False, 'import cv2\n'), ((4263, 4300), 'cv2.imshow', 'cv2.imshow', (['"""depth"""', 'camera_depth_img'], {}), "('depth', camera_depth_img)\n", (4273, 4300), False, 'import cv2\n'), ((1012, 1059), 'numpy.float32', 'np.float32', (['[tag[1:] for tag in tag_loc_camera]'], {}), '([tag[1:] for tag in tag_loc_camera])\n', (1022, 1059), True, 'import numpy as np\n'), ((1069, 1130), 'numpy.float32', 'np.float32', (['[tag_loc_robot[tag[0]] for tag in tag_loc_camera]'], {}), '([tag_loc_robot[tag[0]] for tag in tag_loc_camera])\n', (1079, 1130), True, 'import numpy as np\n'), ((3437, 3456), 'numpy.array', 'np.array', (['[x, y, 1]'], {}), '([x, y, 1])\n', (3445, 3456), True, 'import numpy as np\n'), ((3476, 3516), 'numpy.dot', 'np.dot', (['transformation_matrix', 'camera_pt'], {}), '(transformation_matrix, camera_pt)\n', (3482, 3516), True, 'import numpy as np\n'), ((4118, 4174), 'cv2.circle', 'cv2.circle', (['bgr_data', 'click_point_pix', '(7)', '(0, 0, 255)', '(2)'], {}), '(bgr_data, click_point_pix, 7, (0, 0, 255), 2)\n', (4128, 4174), False, 'import cv2\n'), ((4309, 4323), 'cv2.waitKey', 'cv2.waitKey', (['(1)'], {}), '(1)\n', (4320, 4323), False, 'import cv2\n'), ((3536, 3572), 'numpy.array', 'np.array', (['[robot_pt[0], robot_pt[1]]'], {}), '([robot_pt[0], robot_pt[1]])\n', (3544, 3572), True, 'import numpy as np\n')] |
# Header starts here.
from sympy.physics.units import *
from sympy import *
# Rounding:
import decimal
from decimal import Decimal as DX
from copy import deepcopy
def iso_round(obj, pv, rounding=decimal.ROUND_HALF_EVEN):
import sympy
"""
Rounding acc. to DIN EN ISO 80000-1:2013-08
place value = Rundestellenwert
"""
assert pv in set([
# place value # round to:
1, # 1
0.1, # 1st digit after decimal
0.01, # 2nd
0.001, # 3rd
0.0001, # 4th
0.00001, # 5th
0.000001, # 6th
0.0000001, # 7th
0.00000001, # 8th
0.000000001, # 9th
0.0000000001, # 10th
])
objc = deepcopy(obj)
try:
tmp = DX(str(float(objc)))
objc = tmp.quantize(DX(str(pv)), rounding=rounding)
except:
for i in range(len(objc)):
tmp = DX(str(float(objc[i])))
objc[i] = tmp.quantize(DX(str(pv)), rounding=rounding)
return objc
# LateX:
kwargs = {}
kwargs["mat_str"] = "bmatrix"
kwargs["mat_delim"] = ""
# kwargs["symbol_names"] = {FB: "F^{\mathsf B}", }
# Units:
(k, M, G ) = ( 10**3, 10**6, 10**9 )
(mm, cm) = ( m/1000, m/100 )
Newton = kg*m/s**2
Pa = Newton/m**2
MPa = M*Pa
GPa = G*Pa
kN = k*Newton
deg = pi/180
half = S(1)/2
# Header ends here.
#
EA, l, F1, F2 = var("EA, l, F1, F2")
sub_list = [
( EA, 2 *Pa*m**2 ),
( l, 1 *m ),
( F1, 1 *Newton /2 ), # due to symmetry
( F2, 2 *Newton /2 ), # due to symmetry
]
def k(phi):
""" element stiffness matrix """
# phi is angle between:
# 1. vector along global x axis
# 2. vector along 1-2-axis of truss
# phi is counted positively about z.
# pprint("phi / deg:")
# pprint(N(deg(phi),3))
(c, s) = ( cos(phi), sin(phi) )
(cc, ss, sc) = ( c*c, s*s, s*c)
return Matrix(
[
[ cc, sc, -cc, -sc],
[ sc, ss, -sc, -ss],
[-cc, -sc, cc, sc],
[-sc, -ss, sc, ss],
])
(p1, p2, p3) = (315*pi/180, 0 *pi/180, 45 *pi/180)
# k2 uses only 1/2 A due to symmetry:
(k1, k2, k3) = (EA/l*k(p1), EA/2/l*k(p2), EA/l*k(p3))
pprint("\nk1 / (EA / l): ")
pprint(k1 / (EA/l) )
pprint("\nk2 / (EA / l): ")
pprint(k2 / (EA/l) )
pprint("\nk3 / (EA / l): ")
pprint(k3 / (EA/l) )
K = EA/l*Matrix([
[ 1 , -S(1)/2 ],
[ -S(1)/2, 1 ]
])
u2x, u3x = var("u2x, u3x")
u = Matrix([u2x , u3x ])
f = Matrix([F1 , F2 ])
u2x, u3x = var("u2x, u3x")
eq = Eq(K*u , f)
sol = solve(eq, [u2x, u3x])
pprint("\nSolution:")
pprint(sol)
u2x, u3x = sol[u2x], sol[u3x]
pprint("\nu2x / m:")
tmp = u2x.subs(sub_list)
tmp /= m
pprint(tmp)
pprint("\nu3x / m:")
tmp = u3x.subs(sub_list)
tmp /= m
pprint(tmp)
pprint("\nF1x / N:")
tmp = - EA/l * u2x/2
tmp = tmp.subs(sub_list)
tmp /= Newton
pprint(tmp)
# k1 / (EA / l):
# ⎡1/2 -1/2 -1/2 1/2 ⎤
# ⎢ ⎥
# ⎢-1/2 1/2 1/2 -1/2⎥
# ⎢ ⎥
# ⎢-1/2 1/2 1/2 -1/2⎥
# ⎢ ⎥
# ⎣1/2 -1/2 -1/2 1/2 ⎦
#
# k2 / (EA / l):
# ⎡1/2 0 -1/2 0⎤
# ⎢ ⎥
# ⎢ 0 0 0 0⎥
# ⎢ ⎥
# ⎢-1/2 0 1/2 0⎥
# ⎢ ⎥
# ⎣ 0 0 0 0⎦
#
# k3 / (EA / l):
# ⎡1/2 1/2 -1/2 -1/2⎤
# ⎢ ⎥
# ⎢1/2 1/2 -1/2 -1/2⎥
# ⎢ ⎥
# ⎢-1/2 -1/2 1/2 1/2 ⎥
# ⎢ ⎥
# ⎣-1/2 -1/2 1/2 1/2 ⎦
#
# Solution:
# ⎧ 2⋅l⋅(2⋅F₁ + F₂) 2⋅l⋅(F₁ + 2⋅F₂)⎫
# ⎨u2x: ───────────────, u3x: ───────────────⎬
# ⎩ 3⋅EA 3⋅EA ⎭
#
# u2x / m:
# 2/3
#
# u3x / m:
# 5/6
#
# F1x / N:
# -2/3
| [
"copy.deepcopy"
] | [((779, 792), 'copy.deepcopy', 'deepcopy', (['obj'], {}), '(obj)\n', (787, 792), False, 'from copy import deepcopy\n')] |
#!/usr/bin/python3
import csv
ucc_dictionary_file_list = [
'./downloads/diary08/diary08/uccd08.txt',
'./downloads/diary09/diary09/uccd09.txt',
'./downloads/diary11/diary11/uccd11.txt',
'./downloads/diary10/diary10/uccd10.txt',
]
cleaned_ucc_dictionary = dict()
for dictionary in ucc_dictionary_file_list:
with open(dictionary) as file:
line_list = file.read().splitlines()
for line in line_list:
ucc_tuple = tuple(line.split(" ", 1))
cleaned_ucc_dictionary[int(ucc_tuple[0])] = ucc_tuple[1]
with open('cleaned_ucc_dictionary.csv', 'w', newline='') as csvfile:
ucc_writer = csv.writer(csvfile, delimiter=',', quoting=csv.QUOTE_MINIMAL)
for key, value in cleaned_ucc_dictionary.items():
ucc_writer.writerow([key, value])
# print(len(cleaned_ucc_dictionary.keys()))
# print(line_list) | [
"csv.writer"
] | [((642, 703), 'csv.writer', 'csv.writer', (['csvfile'], {'delimiter': '""","""', 'quoting': 'csv.QUOTE_MINIMAL'}), "(csvfile, delimiter=',', quoting=csv.QUOTE_MINIMAL)\n", (652, 703), False, 'import csv\n')] |
from django.test import TestCase
from django.contrib.auth.models import User
from article.models import Article, Category
class ArticleModelTestCase(TestCase):
def setUp(self):
self.category = Category.objects.create(name=u'Sports')
self.user = User.objects.create(username=u'test', password=u'<PASSWORD>')
def test_save(self):
new_article = Article.objects.create(
title=u'test',
content=u'test',
author=self.user,
category=self.category
)
self.assertEqual(new_article.title, u'test')
self.assertEqual(new_article.content, u'test')
self.assertEqual(new_article.author, self.user)
self.assertEqual(new_article.category, self.category)
def test_unique_slug(self):
new_article1 = Article.objects.create(
title=u'test',
content=u'test',
author=self.user,
category=self.category
)
new_article2 = Article.objects.create(
title=u'test',
content=u'test',
author=self.user,
category=self.category
)
self.assertTrue(new_article1.slug != new_article2.slug)
| [
"django.contrib.auth.models.User.objects.create",
"article.models.Category.objects.create",
"article.models.Article.objects.create"
] | [((208, 247), 'article.models.Category.objects.create', 'Category.objects.create', ([], {'name': 'u"""Sports"""'}), "(name=u'Sports')\n", (231, 247), False, 'from article.models import Article, Category\n'), ((268, 329), 'django.contrib.auth.models.User.objects.create', 'User.objects.create', ([], {'username': 'u"""test"""', 'password': 'u"""<PASSWORD>"""'}), "(username=u'test', password=u'<PASSWORD>')\n", (287, 329), False, 'from django.contrib.auth.models import User\n'), ((378, 478), 'article.models.Article.objects.create', 'Article.objects.create', ([], {'title': 'u"""test"""', 'content': 'u"""test"""', 'author': 'self.user', 'category': 'self.category'}), "(title=u'test', content=u'test', author=self.user,\n category=self.category)\n", (400, 478), False, 'from article.models import Article, Category\n'), ((816, 916), 'article.models.Article.objects.create', 'Article.objects.create', ([], {'title': 'u"""test"""', 'content': 'u"""test"""', 'author': 'self.user', 'category': 'self.category'}), "(title=u'test', content=u'test', author=self.user,\n category=self.category)\n", (838, 916), False, 'from article.models import Article, Category\n'), ((995, 1095), 'article.models.Article.objects.create', 'Article.objects.create', ([], {'title': 'u"""test"""', 'content': 'u"""test"""', 'author': 'self.user', 'category': 'self.category'}), "(title=u'test', content=u'test', author=self.user,\n category=self.category)\n", (1017, 1095), False, 'from article.models import Article, Category\n')] |
import os , sys
sys.path.append(os.getcwd())
import pytest
from typehints_checker import *
@pytest.mark.asyncio
async def test_import():
... | [
"os.getcwd"
] | [((32, 43), 'os.getcwd', 'os.getcwd', ([], {}), '()\n', (41, 43), False, 'import os, sys\n')] |
import glib
import dbus
from dbus.mainloop.glib import DBusGMainLoop
from pyee import EventEmitter
import logbook
logger = logbook.Logger('connman-dispatcher')
__all__ = ['detector']
def property_changed(_, message):
if message.get_member() == "PropertyChanged":
_, state = message.get_args_list()
if state == 'online' and detector.state == 'offline':
logger.info('network state change: online' )
detector.emit('up')
detector.state = 'online'
elif state == 'idle':
logger.info('network state change: offline' )
detector.emit('down')
detector.state = 'offline'
detector = EventEmitter()
DBusGMainLoop(set_as_default=True)
bus = dbus.SystemBus()
bus.add_match_string_non_blocking("interface='net.connman.Manager'")
bus.add_message_filter(property_changed)
manager = dbus.Interface(bus.get_object('net.connman', "/"), 'net.connman.Manager')
def is_online():
properties = manager.GetProperties()
if properties['State'] == 'online':
return True
return False
def run():
detector.state = 'offline'
if is_online:
detector.emit('up')
detector.state = 'online'
mainloop = glib.MainLoop()
mainloop.run()
detector.run = run
| [
"pyee.EventEmitter",
"logbook.Logger",
"dbus.mainloop.glib.DBusGMainLoop",
"glib.MainLoop",
"dbus.SystemBus"
] | [((123, 159), 'logbook.Logger', 'logbook.Logger', (['"""connman-dispatcher"""'], {}), "('connman-dispatcher')\n", (137, 159), False, 'import logbook\n'), ((677, 691), 'pyee.EventEmitter', 'EventEmitter', ([], {}), '()\n', (689, 691), False, 'from pyee import EventEmitter\n'), ((692, 726), 'dbus.mainloop.glib.DBusGMainLoop', 'DBusGMainLoop', ([], {'set_as_default': '(True)'}), '(set_as_default=True)\n', (705, 726), False, 'from dbus.mainloop.glib import DBusGMainLoop\n'), ((734, 750), 'dbus.SystemBus', 'dbus.SystemBus', ([], {}), '()\n', (748, 750), False, 'import dbus\n'), ((1222, 1237), 'glib.MainLoop', 'glib.MainLoop', ([], {}), '()\n', (1235, 1237), False, 'import glib\n')] |
#!/usr/bin/env python3
#
# Copyright (c) 2015 - 2022, Intel Corporation
# SPDX-License-Identifier: BSD-3-Clause
#
"""
Runs an application with a large number of short regions and checks
that the controller successfully runs.
"""
import sys
import unittest
import os
import subprocess
import glob
import geopmpy.io
import geopmpy.agent
import geopmdpy.error
import geopmdpy.topo
from integration.test import geopm_test_launcher
from integration.test import check_trace
class AppConf(object):
"""Class that is used by the test launcher in place of a
geopmpy.io.BenchConf when running the profile_overflow benchmark.
"""
def write(self):
"""Called by the test launcher prior to executing the test application
to write any files required by the application.
"""
pass
def get_exec_path(self):
"""Path to benchmark filled in by template automatically.
"""
script_dir = os.path.dirname(os.path.realpath(__file__))
return os.path.join(script_dir, '.libs', 'test_profile_overflow')
def get_exec_args(self):
"""Returns a list of strings representing the command line arguments
to pass to the test-application for the next run. This is
especially useful for tests that execute the test-application
multiple times.
"""
return []
class TestIntegration_profile_overflow(unittest.TestCase):
@classmethod
def setUpClass(cls):
"""Create launcher, execute benchmark and set up class variables.
"""
sys.stdout.write('(' + os.path.basename(__file__).split('.')[0] +
'.' + cls.__name__ + ') ...')
test_name = 'test_profile_overflow'
cls._report_path = '{}.report'.format(test_name)
cls._trace_path = '{}.trace'.format(test_name)
cls._log_path = '{}.log'.format(test_name)
cls._agent_conf_path = test_name + '-agent-config.json'
# Set the job size parameters such that we have a 3 level tree
os.environ["GEOPM_MAX_FAN_OUT"] = "2"
num_node = 4
num_rank = geopmdpy.topo.num_domain(geopmdpy.topo.DOMAIN_CORE) - 2
time_limit = 600
# Configure the test application
app_conf = AppConf()
# Configure the agent
agent_conf = geopmpy.agent.AgentConf(cls._agent_conf_path)
# Create the test launcher with the above configuration
launcher = geopm_test_launcher.TestLauncher(app_conf,
agent_conf,
cls._report_path,
cls._trace_path,
time_limit=time_limit)
launcher.set_num_node(num_node)
launcher.set_num_rank(num_rank)
# Run the test application
try:
launcher.run(test_name)
except subprocess.CalledProcessError:
sys.stderr.write('{} failed; check log for details.\n'.format(test_name))
raise
@classmethod
def tearDownClass(cls):
os.environ.pop("GEOPM_MAX_FAN_OUT")
def test_load_report(self):
'''
Test that the report can be loaded.
'''
report = geopmpy.io.RawReport(self._report_path)
hosts = report.host_names()
for hh in hosts:
runtime = report.raw_totals(hh)['runtime (s)']
self.assertNotEqual(0, runtime)
def test_short_region_count(self):
'''
Test that the count for MPI_Barrier is as expected.
'''
report = geopmpy.io.RawReport(self._report_path)
hosts = report.host_names()
for hh in hosts:
region_data = report.raw_region(hh, 'MPI_Barrier')
count = region_data['count']
self.assertEqual(count, 10000000)
def test_sample_rate(self):
'''
Test that the sample rate is regular.
'''
traces = glob.glob(self._trace_path + "*")
if len(traces) == 0:
raise RuntimeError("No traces found with prefix: {}".format(self._trace_path_prefix))
for tt in traces:
check_trace.check_sample_rate(tt, 0.005)
if __name__ == '__main__':
unittest.main()
| [
"integration.test.check_trace.check_sample_rate",
"os.path.join",
"os.environ.pop",
"os.path.realpath",
"integration.test.geopm_test_launcher.TestLauncher",
"os.path.basename",
"unittest.main",
"glob.glob"
] | [((4278, 4293), 'unittest.main', 'unittest.main', ([], {}), '()\n', (4291, 4293), False, 'import unittest\n'), ((1009, 1067), 'os.path.join', 'os.path.join', (['script_dir', '""".libs"""', '"""test_profile_overflow"""'], {}), "(script_dir, '.libs', 'test_profile_overflow')\n", (1021, 1067), False, 'import os\n'), ((2446, 2562), 'integration.test.geopm_test_launcher.TestLauncher', 'geopm_test_launcher.TestLauncher', (['app_conf', 'agent_conf', 'cls._report_path', 'cls._trace_path'], {'time_limit': 'time_limit'}), '(app_conf, agent_conf, cls._report_path,\n cls._trace_path, time_limit=time_limit)\n', (2478, 2562), False, 'from integration.test import geopm_test_launcher\n'), ((3135, 3170), 'os.environ.pop', 'os.environ.pop', (['"""GEOPM_MAX_FAN_OUT"""'], {}), "('GEOPM_MAX_FAN_OUT')\n", (3149, 3170), False, 'import os\n'), ((4005, 4038), 'glob.glob', 'glob.glob', (["(self._trace_path + '*')"], {}), "(self._trace_path + '*')\n", (4014, 4038), False, 'import glob\n'), ((966, 992), 'os.path.realpath', 'os.path.realpath', (['__file__'], {}), '(__file__)\n', (982, 992), False, 'import os\n'), ((4204, 4244), 'integration.test.check_trace.check_sample_rate', 'check_trace.check_sample_rate', (['tt', '(0.005)'], {}), '(tt, 0.005)\n', (4233, 4244), False, 'from integration.test import check_trace\n'), ((1588, 1614), 'os.path.basename', 'os.path.basename', (['__file__'], {}), '(__file__)\n', (1604, 1614), False, 'import os\n')] |
from django import template
register = template.Library()
RACK_SIZE_PX = 20
MARGIN_HEIGHT = 2
def _rack_unit_to_height(units):
# for every unit over 1 add a 2 px margin
margin = (units - 1) * MARGIN_HEIGHT
return units * RACK_SIZE_PX + margin
def _equipment_spacer(units):
return {
'units': units,
'spacer': True,
'height': "{}px".format(_rack_unit_to_height(units)),
}
def _rack_sort(item):
# Sort by rack position, sencoded by unit size
pos = int(item.get('node').data.get('rack_position', -1))
size = int(item.get('node').data.get('rack_units', 0)) * -1
return (pos, size)
def _equipment(item):
data = item.get('node').data
units = int(data.get('rack_units', 1))
return {
'units': units,
'position': int(data.get('rack_position', 0) or 0),
'position_end': units + int(data.get('rack_position', 1)) - 1,
'height': "{}px".format(_rack_unit_to_height(units)),
'sub_equipment': [],
'is_back': data.get('rack_back'),
'data': data,
}
def place_equipment(view_data, current_idx, last_eq, result):
spacing = view_data['position'] - current_idx
if spacing < 0:
# Equipment overlaps with previous
last_eq['sub_equipment'].append(view_data)
else:
if spacing > 0:
result.append(_equipment_spacer(spacing))
result.append(view_data)
new_idx = view_data['position'] + view_data['units']
return new_idx, view_data
return current_idx, last_eq
@register.inclusion_tag('noclook/tags/rack.html')
def noclook_rack(rack, equipment):
if equipment:
equipment.sort(key=_rack_sort)
racked_equipment = []
racked_equipment_back = []
unracked_equipment = []
# mem
front_idx = 1
front_last_eq = None
back_idx = 1
back_last_eq = None
for item in equipment:
view_data = _equipment(item)
is_rack_front = not view_data.get('is_back')
if view_data['position'] > 0:
if is_rack_front:
front_idx, front_last_eq = place_equipment(view_data, front_idx, front_last_eq, racked_equipment)
else:
back_idx, back_last_eq = place_equipment(view_data, back_idx, back_last_eq, racked_equipment_back)
else:
unracked_equipment.append(item)
return {
'rack_size': _rack_unit_to_height(rack.data.get('rack_units', 42)),
'racked_equipment': racked_equipment,
'racked_equipment_back': racked_equipment_back,
'unracked_equipment': unracked_equipment,
}
@register.filter
def rack_sort(equipment):
if equipment:
equipment.sort(key=_rack_sort, reverse=True)
return equipment
| [
"django.template.Library"
] | [((40, 58), 'django.template.Library', 'template.Library', ([], {}), '()\n', (56, 58), False, 'from django import template\n')] |
import numpy as np
import matplotlib.pyplot as plt
from shamir import *
from binascii import hexlify
# img = plt.imread('cat.png')
# plt.imshow(img)
# plt.show()
s = 'TEST_STRING'.encode()
print("Original secret:", hexlify(s))
l = Shamir.split(3, 5, '12345'.encode())
for idx, item in l:
print("Share {}: {}".format(str(idx), hexlify(item)))
shares = l[1:4]
secret = Shamir.combine(shares)
print(f'Secret is : {secret.decode()}') | [
"binascii.hexlify"
] | [((218, 228), 'binascii.hexlify', 'hexlify', (['s'], {}), '(s)\n', (225, 228), False, 'from binascii import hexlify\n'), ((335, 348), 'binascii.hexlify', 'hexlify', (['item'], {}), '(item)\n', (342, 348), False, 'from binascii import hexlify\n')] |
from gerapy.server.manage import manage
import sys
def server():
# Call django cmd
manage()
| [
"gerapy.server.manage.manage"
] | [((92, 100), 'gerapy.server.manage.manage', 'manage', ([], {}), '()\n', (98, 100), False, 'from gerapy.server.manage import manage\n')] |
#!/usr/bin/env python
import sys, tty, termios, array, fcntl, curses
class TTYSettings(object):
def __init__(self):
self.tty_fd = sys.stdout.fileno()
# save
self.saved = termios.tcgetattr(self.tty_fd)
self.win_size = self.get_win_size()
self.rows, self.cols = self.win_size[0], self.win_size[1]
curses.setupterm()
self.num_colors = curses.tigetnum("colors")
def set_raw_no_echo(self):
# set raw mode
tty.setraw(self.tty_fd, termios.TCSADRAIN)
# disable echo
new = termios.tcgetattr(self.tty_fd)
new[3] &= ~termios.ECHO
termios.tcsetattr(self.tty_fd, termios.TCSADRAIN, new)
def restore(self):
# return saved conf
termios.tcsetattr(self.tty_fd, termios.TCSADRAIN, self.saved)
def get_win_size(self):
buf = array.array('h', [0, 0, 0, 0])
fcntl.ioctl(self.tty_fd, termios.TIOCGWINSZ, buf, True)
return buf
| [
"fcntl.ioctl",
"array.array",
"curses.tigetnum",
"termios.tcsetattr",
"curses.setupterm",
"termios.tcgetattr",
"sys.stdout.fileno",
"tty.setraw"
] | [((143, 162), 'sys.stdout.fileno', 'sys.stdout.fileno', ([], {}), '()\n', (160, 162), False, 'import sys, tty, termios, array, fcntl, curses\n'), ((199, 229), 'termios.tcgetattr', 'termios.tcgetattr', (['self.tty_fd'], {}), '(self.tty_fd)\n', (216, 229), False, 'import sys, tty, termios, array, fcntl, curses\n'), ((348, 366), 'curses.setupterm', 'curses.setupterm', ([], {}), '()\n', (364, 366), False, 'import sys, tty, termios, array, fcntl, curses\n'), ((393, 418), 'curses.tigetnum', 'curses.tigetnum', (['"""colors"""'], {}), "('colors')\n", (408, 418), False, 'import sys, tty, termios, array, fcntl, curses\n'), ((481, 523), 'tty.setraw', 'tty.setraw', (['self.tty_fd', 'termios.TCSADRAIN'], {}), '(self.tty_fd, termios.TCSADRAIN)\n', (491, 523), False, 'import sys, tty, termios, array, fcntl, curses\n'), ((561, 591), 'termios.tcgetattr', 'termios.tcgetattr', (['self.tty_fd'], {}), '(self.tty_fd)\n', (578, 591), False, 'import sys, tty, termios, array, fcntl, curses\n'), ((632, 686), 'termios.tcsetattr', 'termios.tcsetattr', (['self.tty_fd', 'termios.TCSADRAIN', 'new'], {}), '(self.tty_fd, termios.TCSADRAIN, new)\n', (649, 686), False, 'import sys, tty, termios, array, fcntl, curses\n'), ((746, 807), 'termios.tcsetattr', 'termios.tcsetattr', (['self.tty_fd', 'termios.TCSADRAIN', 'self.saved'], {}), '(self.tty_fd, termios.TCSADRAIN, self.saved)\n', (763, 807), False, 'import sys, tty, termios, array, fcntl, curses\n'), ((850, 880), 'array.array', 'array.array', (['"""h"""', '[0, 0, 0, 0]'], {}), "('h', [0, 0, 0, 0])\n", (861, 880), False, 'import sys, tty, termios, array, fcntl, curses\n'), ((889, 944), 'fcntl.ioctl', 'fcntl.ioctl', (['self.tty_fd', 'termios.TIOCGWINSZ', 'buf', '(True)'], {}), '(self.tty_fd, termios.TIOCGWINSZ, buf, True)\n', (900, 944), False, 'import sys, tty, termios, array, fcntl, curses\n')] |
# -*- coding: utf-8 -*-
# -----------------------------------------------------------------------------
# (C) British Crown Copyright 2017-2021 Met Office.
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are met:
#
# * Redistributions of source code must retain the above copyright notice, this
# list of conditions and the following disclaimer.
#
# * Redistributions in binary form must reproduce the above copyright notice,
# this list of conditions and the following disclaimer in the documentation
# and/or other materials provided with the distribution.
#
# * Neither the name of the copyright holder nor the names of its
# contributors may be used to endorse or promote products derived from
# this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
# AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
# ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE
# LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
# CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
# SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
# INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
# CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
# ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
# POSSIBILITY OF SUCH DAMAGE.
"""
Unit tests for the
`ensemble_calibration.CalibratedForecastDistributionParameters`
class.
"""
import unittest
import numpy as np
from iris.cube import CubeList
from iris.tests import IrisTest
from numpy.testing import assert_array_almost_equal
from improver.calibration.ensemble_calibration import (
CalibratedForecastDistributionParameters as Plugin,
)
from improver.calibration.ensemble_calibration import (
EstimateCoefficientsForEnsembleCalibration,
)
from improver.metadata.constants.attributes import MANDATORY_ATTRIBUTE_DEFAULTS
from improver.synthetic_data.set_up_test_cubes import set_up_variable_cube
from improver.utilities.warnings_handler import ManageWarnings
from .helper_functions import EnsembleCalibrationAssertions, SetupCubes
from .test_EstimateCoefficientsForEnsembleCalibration import SetupExpectedCoefficients
class SetupCoefficientsCubes(SetupCubes, SetupExpectedCoefficients):
"""Set up coefficients cubes for testing."""
@ManageWarnings(
ignored_messages=[
"Collapsing a non-contiguous coordinate.",
"invalid escape sequence",
],
warning_types=[UserWarning, DeprecationWarning],
)
def setUp(self):
"""Set up coefficients cubes for when either the ensemble mean or the
ensemble realizations have been used as the predictor. The coefficients
have been constructed from the same underlying set of ensemble
realizations, so application of these coefficients would be expected
to give similar results. The values for the coefficients used to
construct the coefficients cubes are taken from the
SetupExpectedCoefficients class. These coefficients are the
expected outputs from the tests to estimate the coefficients."""
super().setUp()
# Set up a coefficients cube when using the ensemble mean as the
# predictor.
estimator = EstimateCoefficientsForEnsembleCalibration(
"norm", desired_units="Celsius"
)
self.coeffs_from_mean = estimator.create_coefficients_cubelist(
self.expected_mean_pred_norm,
self.historic_temperature_forecast_cube,
CubeList([self.historic_temperature_forecast_cube]),
)
# Set up a timeshifted coefficients cube using the ensemble mean as a
# predictor.
forecast_timeshift_cube = self.historic_temperature_forecast_cube.copy()
for coord_name in ["time", "forecast_period"]:
forecast_timeshift_cube.coord(coord_name).points = [
_ + 3600 for _ in forecast_timeshift_cube.coord(coord_name).points
]
self.coeffs_from_mean_timeshift = estimator.create_coefficients_cubelist(
self.expected_mean_pred_norm,
forecast_timeshift_cube,
CubeList([forecast_timeshift_cube]),
)
# Set up a coefficients cube when using the ensemble mean as the
# predictor and separate coefficients at each point.
estimator = EstimateCoefficientsForEnsembleCalibration(
"norm", point_by_point=True, desired_units="Celsius"
)
point_by_point_predictor = np.stack(
[self.expected_mean_pred_norm] * 9
).T.reshape(4, 3, 3)
self.coeffs_from_mean_point_by_point = estimator.create_coefficients_cubelist(
point_by_point_predictor,
self.historic_temperature_forecast_cube,
CubeList([self.historic_temperature_forecast_cube]),
)
# Set up a coefficients cube when using the ensemble realization as the
# predictor.
estimator = EstimateCoefficientsForEnsembleCalibration(
"norm", desired_units="Celsius", predictor="realizations"
)
self.coeffs_from_realizations = estimator.create_coefficients_cubelist(
self.expected_realizations_norm,
self.historic_temperature_forecast_cube,
CubeList([self.historic_temperature_forecast_cube]),
)
# Set up a coefficients cube when using the ensemble realization as the
# predictor and separate coefficients at each point.
expected_realizations_each_site = [
array if array.ndim == 1 else np.squeeze(array)
for array in list(self.expected_realizations_each_site.values())
]
estimator = EstimateCoefficientsForEnsembleCalibration(
"norm", predictor="realizations", point_by_point=True
)
self.coeffs_from_realizations_sites = estimator.create_coefficients_cubelist(
expected_realizations_each_site,
self.historic_forecast_spot_cube,
CubeList([self.historic_temperature_forecast_cube]),
)
# # Set up a coefficients cube when using an additional predictor.
self.altitude = set_up_variable_cube(
np.ones((3, 3), dtype=np.float32), name="surface_altitude", units="m"
)
for coord in ["time", "forecast_reference_time", "forecast_period"]:
self.altitude.remove_coord(coord)
estimator = EstimateCoefficientsForEnsembleCalibration(
"norm", desired_units="Celsius"
)
self.coeffs_from_mean_alt = estimator.create_coefficients_cubelist(
self.expected_mean_pred_norm_alt,
self.historic_temperature_forecast_cube,
CubeList([self.historic_temperature_forecast_cube, self.altitude]),
)
# Some expected data that are used in various tests.
self.expected_loc_param_mean = np.array(
[
[273.7014, 274.6534, 275.4469],
[276.9385, 277.7636, 278.5570],
[279.6996, 280.1122, 281.2547],
],
dtype=np.float32,
)
self.expected_scale_param_mean = np.array(
[
[0.2316, 0.2342, 0.0168],
[0.0271, 0.0237, 0.0168],
[0.0634, 0.1151, 0.0116],
],
dtype=np.float32,
)
self.expected_loc_param_realizations = np.array(
[
[274.388, 275.3053, 275.4492],
[277.1295, 277.3866, 278.4672],
[280.2007, 280.3929, 281.2602],
],
dtype=np.float32,
)
self.expected_loc_param_realizations_sites = np.array(
[277.7531, 277.4529, 277.553, 277.2528], dtype=np.float32,
)
self.expected_scale_param_realizations_sites = np.array(
[0, 0, 0, 0], dtype=np.float32
)
self.expected_loc_param_mean_alt = np.array(
[
[275.18134, 276.18134, 277.01465],
[278.58133, 279.44797, 280.2813],
[281.48132, 281.91464, 283.11465],
],
dtype=np.float32,
)
self.expected_scale_param_mean_alt = np.array(
[
[0.4347, 0.4396, 0.0308],
[0.0503, 0.0438, 0.0308],
[0.1184, 0.2157, 0.0211],
],
dtype=np.float32,
)
# Create output cubes with the expected data.
self.expected_loc_param_mean_cube = set_up_variable_cube(
self.expected_loc_param_mean,
name="location_parameter",
units="K",
attributes=MANDATORY_ATTRIBUTE_DEFAULTS,
)
self.expected_scale_param_mean_cube = set_up_variable_cube(
self.expected_scale_param_mean,
name="scale_parameter",
units="Kelvin^2",
attributes=MANDATORY_ATTRIBUTE_DEFAULTS,
)
class Test__init__(IrisTest):
"""Test the __init__ method."""
def test_basic(self):
"""Test without specifying a predictor."""
plugin = Plugin()
self.assertEqual(plugin.predictor, "mean")
def test_with_predictor(self):
"""Test specifying the predictor."""
plugin = Plugin(predictor="realizations")
self.assertEqual(plugin.predictor, "realizations")
class Test__repr__(IrisTest):
"""Test the __repr__ method."""
def test_basic(self):
"""Test without the predictor."""
result = str(Plugin())
msg = "<CalibratedForecastDistributionParameters: " "predictor: mean>"
self.assertEqual(result, msg)
def test_with_predictor(self):
"""Test specifying the predictor."""
result = str(Plugin(predictor="realizations"))
msg = "<CalibratedForecastDistributionParameters: " "predictor: realizations>"
self.assertEqual(result, msg)
class Test__spatial_domain_match(SetupCoefficientsCubes):
""" Test the _spatial_domain_match method."""
def setUp(self):
super().setUp()
self.plugin = Plugin()
def test_matching(self):
"""Test case in which spatial domains match."""
self.plugin.current_forecast = self.current_temperature_forecast_cube
self.plugin.coefficients_cubelist = self.coeffs_from_mean
self.plugin._spatial_domain_match()
def test_unmatching_x_axis_points(self):
"""Test when the points of the x dimension do not match."""
self.current_temperature_forecast_cube.coord(axis="x").bounds = (
self.current_temperature_forecast_cube.coord(axis="x").bounds + 2.0
)
self.plugin.current_forecast = self.current_temperature_forecast_cube
self.plugin.coefficients_cubelist = self.coeffs_from_mean
msg = "The points or bounds of the x axis given by the current forecast"
with self.assertRaisesRegex(ValueError, msg):
self.plugin._spatial_domain_match()
def test_unmatching_x_axis_bounds(self):
"""Test when the bounds of the x dimension do not match."""
self.current_temperature_forecast_cube.coord(axis="x").bounds = [
[-35, -5],
[-5, 5],
[5, 35],
]
self.plugin.current_forecast = self.current_temperature_forecast_cube
self.plugin.coefficients_cubelist = self.coeffs_from_mean
msg = "The points or bounds of the x axis given by the current forecast"
with self.assertRaisesRegex(ValueError, msg):
self.plugin._spatial_domain_match()
def test_unmatching_y_axis(self):
"""Test case in which the y-dimensions of the domains do not match."""
self.current_temperature_forecast_cube.coord(axis="y").bounds = (
self.current_temperature_forecast_cube.coord(axis="y").bounds + 2.0
)
self.plugin.current_forecast = self.current_temperature_forecast_cube
self.plugin.coefficients_cubelist = self.coeffs_from_mean
msg = "The points or bounds of the y axis given by the current forecast"
with self.assertRaisesRegex(ValueError, msg):
self.plugin._spatial_domain_match()
def test_skipping_spot_forecast(self):
"""Test passing a spot forecast. In this case, the spatial domain
is not checked."""
self.plugin.current_forecast = self.current_forecast_spot_cube
self.plugin._spatial_domain_match()
class Test__calculate_location_parameter_from_mean(
SetupCoefficientsCubes, EnsembleCalibrationAssertions
):
"""Test the __calculate_location_parameter_from_mean method."""
def setUp(self):
"""Set-up coefficients and plugin for testing."""
super().setUp()
self.plugin = Plugin()
self.plugin.current_forecast = self.current_temperature_forecast_cube
self.plugin.coefficients_cubelist = self.coeffs_from_mean
@ManageWarnings(ignored_messages=["Collapsing a non-contiguous coordinate."])
def test_basic(self):
"""Test that the expected values for the location parameter are
calculated when using the ensemble mean. These expected values are
compared to the results when using the ensemble realizations to ensure
that the results are similar."""
location_parameter = self.plugin._calculate_location_parameter_from_mean()
self.assertCalibratedVariablesAlmostEqual(
location_parameter, self.expected_loc_param_mean
)
assert_array_almost_equal(
location_parameter, self.expected_loc_param_realizations, decimal=0,
)
@ManageWarnings(ignored_messages=["Collapsing a non-contiguous coordinate."])
def test_missing_additional_predictor(self):
"""Test that an error is raised if an additional predictor is expected
based on the contents of the coefficients cube."""
self.plugin.coefficients_cubelist = self.coeffs_from_mean_alt
msg = "The number of forecast predictors must equal the number"
with self.assertRaisesRegex(ValueError, msg):
self.plugin._calculate_location_parameter_from_mean()
class Test__calculate_location_parameter_from_realizations(
SetupCoefficientsCubes, EnsembleCalibrationAssertions
):
"""Test the _calculate_location_parameter_from_realizations method."""
@ManageWarnings(ignored_messages=["Collapsing a non-contiguous coordinate."])
def setUp(self):
"""Set-up coefficients and plugin for testing."""
super().setUp()
self.plugin = Plugin()
self.plugin.current_forecast = self.current_temperature_forecast_cube
@ManageWarnings(ignored_messages=["Collapsing a non-contiguous coordinate."])
def test_basic(self):
"""Test that the expected values for the location parameter are
calculated when using the ensemble realizations. These expected values
are compared to the results when using the ensemble mean to ensure
that the results are similar."""
self.plugin.coefficients_cubelist = self.coeffs_from_realizations
location_parameter = (
self.plugin._calculate_location_parameter_from_realizations()
)
self.assertCalibratedVariablesAlmostEqual(
location_parameter, self.expected_loc_param_realizations
)
assert_array_almost_equal(
location_parameter, self.expected_loc_param_mean, decimal=0
)
class Test__calculate_scale_parameter(
SetupCoefficientsCubes, EnsembleCalibrationAssertions
):
"""Test the _calculate_scale_parameter method."""
def setUp(self):
"""Set-up the plugin for testing."""
super().setUp()
self.plugin = Plugin()
self.plugin.current_forecast = self.current_temperature_forecast_cube
@ManageWarnings(ignored_messages=["Collapsing a non-contiguous coordinate."])
def test_basic(self):
"""Test the scale parameter is calculated correctly."""
self.plugin.coefficients_cubelist = self.coeffs_from_mean
scale_parameter = self.plugin._calculate_scale_parameter()
self.assertCalibratedVariablesAlmostEqual(
scale_parameter, self.expected_scale_param_mean
)
class Test__create_output_cubes(SetupCoefficientsCubes, EnsembleCalibrationAssertions):
"""Test the _create_output_cubes method."""
def setUp(self):
"""Set-up the plugin for testing."""
super().setUp()
self.plugin = Plugin()
self.plugin.current_forecast = self.current_temperature_forecast_cube
@ManageWarnings(ignored_messages=["Collapsing a non-contiguous coordinate."])
def test_basic(self):
"""Test that the cubes created containing the location and scale
parameter are formatted as expected."""
(
location_parameter_cube,
scale_parameter_cube,
) = self.plugin._create_output_cubes(
self.expected_loc_param_mean, self.expected_scale_param_mean
)
self.assertEqual(location_parameter_cube, self.expected_loc_param_mean_cube)
self.assertEqual(scale_parameter_cube, self.expected_scale_param_mean_cube)
class Test_process(SetupCoefficientsCubes, EnsembleCalibrationAssertions):
"""Test the process plugin."""
def setUp(self):
"""Set-up the plugin for testing."""
super().setUp()
self.plugin = Plugin()
@ManageWarnings(ignored_messages=["Collapsing a non-contiguous coordinate."])
def test_diagnostic_match(self):
"""Test that an error is raised if the diagnostic_standard_name does
not match when comparing a forecast cube and coefficients cubelist."""
msg = "The forecast diagnostic"
with self.assertRaisesRegex(ValueError, msg):
self.plugin.process(
self.current_wind_speed_forecast_cube, self.coeffs_from_mean
)
@ManageWarnings(ignored_messages=["Collapsing a non-contiguous coordinate."])
def test_time_match(self):
"""Test that an error is raised if the time coordinates do
not match when comparing a forecast cube and coefficients cubelist."""
msg = "rounded forecast_period hours"
with self.assertRaisesRegex(ValueError, msg):
self.plugin.process(
self.current_temperature_forecast_cube, self.coeffs_from_mean_timeshift
)
@ManageWarnings(ignored_messages=["Collapsing a non-contiguous coordinate."])
def test_time_match_tolerate(self):
"""Test that no error is raised when using a coefficients file with
a mismatching forecast_period coordinate, if the
tolerate_time_mismatch option is enabled."""
calibrated_forecast_predictor, calibrated_forecast_var = self.plugin.process(
self.current_temperature_forecast_cube,
self.coeffs_from_mean_timeshift,
tolerate_time_mismatch=True,
)
self.assertCalibratedVariablesAlmostEqual(
calibrated_forecast_predictor.data, self.expected_loc_param_mean
)
self.assertCalibratedVariablesAlmostEqual(
calibrated_forecast_var.data, self.expected_scale_param_mean
)
self.assertEqual(calibrated_forecast_predictor.dtype, np.float32)
@ManageWarnings(ignored_messages=["Collapsing a non-contiguous coordinate."])
def test_variable_setting(self):
"""Test that the cubes passed into the plugin are allocated to
plugin variables appropriately."""
_, _ = self.plugin.process(
self.current_temperature_forecast_cube, self.coeffs_from_mean
)
self.assertEqual(
self.current_temperature_forecast_cube, self.plugin.current_forecast
)
self.assertEqual(self.coeffs_from_mean, self.plugin.coefficients_cubelist)
@ManageWarnings(ignored_messages=["Collapsing a non-contiguous coordinate."])
def test_end_to_end(self):
"""An example end-to-end calculation. This repeats the test elements
above but all grouped together."""
calibrated_forecast_predictor, calibrated_forecast_var = self.plugin.process(
self.current_temperature_forecast_cube, self.coeffs_from_mean
)
self.assertCalibratedVariablesAlmostEqual(
calibrated_forecast_predictor.data, self.expected_loc_param_mean
)
self.assertCalibratedVariablesAlmostEqual(
calibrated_forecast_var.data, self.expected_scale_param_mean
)
self.assertEqual(calibrated_forecast_predictor.dtype, np.float32)
@ManageWarnings(ignored_messages=["Collapsing a non-contiguous coordinate."])
def test_end_to_end_point_by_point(self):
"""An example end-to-end calculation when a separate set of
coefficients are computed for each grid point. This repeats the test
elements above but all grouped together."""
calibrated_forecast_predictor, calibrated_forecast_var = self.plugin.process(
self.current_temperature_forecast_cube, self.coeffs_from_mean_point_by_point
)
self.assertCalibratedVariablesAlmostEqual(
calibrated_forecast_predictor.data, self.expected_loc_param_mean
)
self.assertCalibratedVariablesAlmostEqual(
calibrated_forecast_var.data, self.expected_scale_param_mean
)
self.assertEqual(calibrated_forecast_predictor.dtype, np.float32)
@ManageWarnings(ignored_messages=["Collapsing a non-contiguous coordinate."])
def test_end_to_end_point_by_point_sites_realizations(self):
"""An example end-to-end calculation when a separate set of
coefficients are computed for each site using the realizations as the
predictor. This repeats the test elements above but all grouped together."""
plugin = Plugin(predictor="realizations")
calibrated_forecast_predictor, calibrated_forecast_var = plugin.process(
self.current_forecast_spot_cube, self.coeffs_from_realizations_sites
)
self.assertCalibratedVariablesAlmostEqual(
calibrated_forecast_predictor.data,
self.expected_loc_param_realizations_sites,
)
self.assertCalibratedVariablesAlmostEqual(
calibrated_forecast_var.data, self.expected_scale_param_realizations_sites
)
self.assertEqual(calibrated_forecast_predictor.dtype, np.float32)
@ManageWarnings(ignored_messages=["Collapsing a non-contiguous coordinate."])
def test_end_to_end_with_additional_predictor(self):
"""Test that the expected calibrated forecast is generated, if an
additional predictor is provided."""
calibrated_forecast_predictor, calibrated_forecast_var = self.plugin.process(
self.current_temperature_forecast_cube,
self.coeffs_from_mean_alt,
additional_fields=CubeList([self.altitude]),
)
self.assertCalibratedVariablesAlmostEqual(
calibrated_forecast_predictor.data, self.expected_loc_param_mean_alt
)
self.assertCalibratedVariablesAlmostEqual(
calibrated_forecast_var.data, self.expected_scale_param_mean_alt
)
self.assertEqual(calibrated_forecast_predictor.dtype, np.float32)
@ManageWarnings(ignored_messages=["Collapsing a non-contiguous coordinate."])
def test_end_to_end_with_mask(self):
"""An example end-to-end calculation, but making sure that the
areas that are masked within the landsea mask, are masked at the
end."""
# Construct a mask and encapsulate as a cube.
mask = np.array([[1, 0, 0], [0, 1, 0], [0, 0, 1]])
mask_cube = self.current_temperature_forecast_cube[0].copy(data=mask)
# Convention for IMPROVER is that land points are ones and sea points
# are zeros in land-sea masks. In this case we want to mask sea points.
expected_mask = np.array(
[[False, True, True], [True, False, True], [True, True, False]]
)
calibrated_forecast_predictor, calibrated_forecast_var = self.plugin.process(
self.current_temperature_forecast_cube,
self.coeffs_from_mean,
landsea_mask=mask_cube,
)
self.assertCalibratedVariablesAlmostEqual(
calibrated_forecast_predictor.data.data, self.expected_loc_param_mean
)
self.assertArrayEqual(calibrated_forecast_predictor.data.mask, expected_mask)
self.assertCalibratedVariablesAlmostEqual(
calibrated_forecast_var.data.data, self.expected_scale_param_mean
)
self.assertArrayEqual(calibrated_forecast_var.data.mask, expected_mask)
if __name__ == "__main__":
unittest.main()
| [
"improver.calibration.ensemble_calibration.CalibratedForecastDistributionParameters",
"numpy.testing.assert_array_almost_equal",
"iris.cube.CubeList",
"numpy.ones",
"improver.synthetic_data.set_up_test_cubes.set_up_variable_cube",
"improver.calibration.ensemble_calibration.EstimateCoefficientsForEnsembleC... | [((2638, 2799), 'improver.utilities.warnings_handler.ManageWarnings', 'ManageWarnings', ([], {'ignored_messages': "['Collapsing a non-contiguous coordinate.', 'invalid escape sequence']", 'warning_types': '[UserWarning, DeprecationWarning]'}), "(ignored_messages=['Collapsing a non-contiguous coordinate.',\n 'invalid escape sequence'], warning_types=[UserWarning, DeprecationWarning]\n )\n", (2652, 2799), False, 'from improver.utilities.warnings_handler import ManageWarnings\n'), ((13255, 13331), 'improver.utilities.warnings_handler.ManageWarnings', 'ManageWarnings', ([], {'ignored_messages': "['Collapsing a non-contiguous coordinate.']"}), "(ignored_messages=['Collapsing a non-contiguous coordinate.'])\n", (13269, 13331), False, 'from improver.utilities.warnings_handler import ManageWarnings\n'), ((13962, 14038), 'improver.utilities.warnings_handler.ManageWarnings', 'ManageWarnings', ([], {'ignored_messages': "['Collapsing a non-contiguous coordinate.']"}), "(ignored_messages=['Collapsing a non-contiguous coordinate.'])\n", (13976, 14038), False, 'from improver.utilities.warnings_handler import ManageWarnings\n'), ((14693, 14769), 'improver.utilities.warnings_handler.ManageWarnings', 'ManageWarnings', ([], {'ignored_messages': "['Collapsing a non-contiguous coordinate.']"}), "(ignored_messages=['Collapsing a non-contiguous coordinate.'])\n", (14707, 14769), False, 'from improver.utilities.warnings_handler import ManageWarnings\n'), ((14989, 15065), 'improver.utilities.warnings_handler.ManageWarnings', 'ManageWarnings', ([], {'ignored_messages': "['Collapsing a non-contiguous coordinate.']"}), "(ignored_messages=['Collapsing a non-contiguous coordinate.'])\n", (15003, 15065), False, 'from improver.utilities.warnings_handler import ManageWarnings\n'), ((16158, 16234), 'improver.utilities.warnings_handler.ManageWarnings', 'ManageWarnings', ([], {'ignored_messages': "['Collapsing a non-contiguous coordinate.']"}), "(ignored_messages=['Collapsing a non-contiguous coordinate.'])\n", (16172, 16234), False, 'from improver.utilities.warnings_handler import ManageWarnings\n'), ((16924, 17000), 'improver.utilities.warnings_handler.ManageWarnings', 'ManageWarnings', ([], {'ignored_messages': "['Collapsing a non-contiguous coordinate.']"}), "(ignored_messages=['Collapsing a non-contiguous coordinate.'])\n", (16938, 17000), False, 'from improver.utilities.warnings_handler import ManageWarnings\n'), ((17768, 17844), 'improver.utilities.warnings_handler.ManageWarnings', 'ManageWarnings', ([], {'ignored_messages': "['Collapsing a non-contiguous coordinate.']"}), "(ignored_messages=['Collapsing a non-contiguous coordinate.'])\n", (17782, 17844), False, 'from improver.utilities.warnings_handler import ManageWarnings\n'), ((18262, 18338), 'improver.utilities.warnings_handler.ManageWarnings', 'ManageWarnings', ([], {'ignored_messages': "['Collapsing a non-contiguous coordinate.']"}), "(ignored_messages=['Collapsing a non-contiguous coordinate.'])\n", (18276, 18338), False, 'from improver.utilities.warnings_handler import ManageWarnings\n'), ((18757, 18833), 'improver.utilities.warnings_handler.ManageWarnings', 'ManageWarnings', ([], {'ignored_messages': "['Collapsing a non-contiguous coordinate.']"}), "(ignored_messages=['Collapsing a non-contiguous coordinate.'])\n", (18771, 18833), False, 'from improver.utilities.warnings_handler import ManageWarnings\n'), ((19646, 19722), 'improver.utilities.warnings_handler.ManageWarnings', 'ManageWarnings', ([], {'ignored_messages': "['Collapsing a non-contiguous coordinate.']"}), "(ignored_messages=['Collapsing a non-contiguous coordinate.'])\n", (19660, 19722), False, 'from improver.utilities.warnings_handler import ManageWarnings\n'), ((20201, 20277), 'improver.utilities.warnings_handler.ManageWarnings', 'ManageWarnings', ([], {'ignored_messages': "['Collapsing a non-contiguous coordinate.']"}), "(ignored_messages=['Collapsing a non-contiguous coordinate.'])\n", (20215, 20277), False, 'from improver.utilities.warnings_handler import ManageWarnings\n'), ((20952, 21028), 'improver.utilities.warnings_handler.ManageWarnings', 'ManageWarnings', ([], {'ignored_messages': "['Collapsing a non-contiguous coordinate.']"}), "(ignored_messages=['Collapsing a non-contiguous coordinate.'])\n", (20966, 21028), False, 'from improver.utilities.warnings_handler import ManageWarnings\n'), ((21810, 21886), 'improver.utilities.warnings_handler.ManageWarnings', 'ManageWarnings', ([], {'ignored_messages': "['Collapsing a non-contiguous coordinate.']"}), "(ignored_messages=['Collapsing a non-contiguous coordinate.'])\n", (21824, 21886), False, 'from improver.utilities.warnings_handler import ManageWarnings\n'), ((22799, 22875), 'improver.utilities.warnings_handler.ManageWarnings', 'ManageWarnings', ([], {'ignored_messages': "['Collapsing a non-contiguous coordinate.']"}), "(ignored_messages=['Collapsing a non-contiguous coordinate.'])\n", (22813, 22875), False, 'from improver.utilities.warnings_handler import ManageWarnings\n'), ((23657, 23733), 'improver.utilities.warnings_handler.ManageWarnings', 'ManageWarnings', ([], {'ignored_messages': "['Collapsing a non-contiguous coordinate.']"}), "(ignored_messages=['Collapsing a non-contiguous coordinate.'])\n", (23671, 23733), False, 'from improver.utilities.warnings_handler import ManageWarnings\n'), ((25107, 25122), 'unittest.main', 'unittest.main', ([], {}), '()\n', (25120, 25122), False, 'import unittest\n'), ((3588, 3663), 'improver.calibration.ensemble_calibration.EstimateCoefficientsForEnsembleCalibration', 'EstimateCoefficientsForEnsembleCalibration', (['"""norm"""'], {'desired_units': '"""Celsius"""'}), "('norm', desired_units='Celsius')\n", (3630, 3663), False, 'from improver.calibration.ensemble_calibration import EstimateCoefficientsForEnsembleCalibration\n'), ((4702, 4802), 'improver.calibration.ensemble_calibration.EstimateCoefficientsForEnsembleCalibration', 'EstimateCoefficientsForEnsembleCalibration', (['"""norm"""'], {'point_by_point': '(True)', 'desired_units': '"""Celsius"""'}), "('norm', point_by_point=True,\n desired_units='Celsius')\n", (4744, 4802), False, 'from improver.calibration.ensemble_calibration import EstimateCoefficientsForEnsembleCalibration\n'), ((5317, 5422), 'improver.calibration.ensemble_calibration.EstimateCoefficientsForEnsembleCalibration', 'EstimateCoefficientsForEnsembleCalibration', (['"""norm"""'], {'desired_units': '"""Celsius"""', 'predictor': '"""realizations"""'}), "('norm', desired_units='Celsius',\n predictor='realizations')\n", (5359, 5422), False, 'from improver.calibration.ensemble_calibration import EstimateCoefficientsForEnsembleCalibration\n'), ((6048, 6149), 'improver.calibration.ensemble_calibration.EstimateCoefficientsForEnsembleCalibration', 'EstimateCoefficientsForEnsembleCalibration', (['"""norm"""'], {'predictor': '"""realizations"""', 'point_by_point': '(True)'}), "('norm', predictor='realizations',\n point_by_point=True)\n", (6090, 6149), False, 'from improver.calibration.ensemble_calibration import EstimateCoefficientsForEnsembleCalibration\n'), ((6778, 6853), 'improver.calibration.ensemble_calibration.EstimateCoefficientsForEnsembleCalibration', 'EstimateCoefficientsForEnsembleCalibration', (['"""norm"""'], {'desired_units': '"""Celsius"""'}), "('norm', desired_units='Celsius')\n", (6820, 6853), False, 'from improver.calibration.ensemble_calibration import EstimateCoefficientsForEnsembleCalibration\n'), ((7242, 7370), 'numpy.array', 'np.array', (['[[273.7014, 274.6534, 275.4469], [276.9385, 277.7636, 278.557], [279.6996, \n 280.1122, 281.2547]]'], {'dtype': 'np.float32'}), '([[273.7014, 274.6534, 275.4469], [276.9385, 277.7636, 278.557], [\n 279.6996, 280.1122, 281.2547]], dtype=np.float32)\n', (7250, 7370), True, 'import numpy as np\n'), ((7506, 7617), 'numpy.array', 'np.array', (['[[0.2316, 0.2342, 0.0168], [0.0271, 0.0237, 0.0168], [0.0634, 0.1151, 0.0116]]'], {'dtype': 'np.float32'}), '([[0.2316, 0.2342, 0.0168], [0.0271, 0.0237, 0.0168], [0.0634, \n 0.1151, 0.0116]], dtype=np.float32)\n', (7514, 7617), True, 'import numpy as np\n'), ((7758, 7886), 'numpy.array', 'np.array', (['[[274.388, 275.3053, 275.4492], [277.1295, 277.3866, 278.4672], [280.2007, \n 280.3929, 281.2602]]'], {'dtype': 'np.float32'}), '([[274.388, 275.3053, 275.4492], [277.1295, 277.3866, 278.4672], [\n 280.2007, 280.3929, 281.2602]], dtype=np.float32)\n', (7766, 7886), True, 'import numpy as np\n'), ((8033, 8100), 'numpy.array', 'np.array', (['[277.7531, 277.4529, 277.553, 277.2528]'], {'dtype': 'np.float32'}), '([277.7531, 277.4529, 277.553, 277.2528], dtype=np.float32)\n', (8041, 8100), True, 'import numpy as np\n'), ((8180, 8220), 'numpy.array', 'np.array', (['[0, 0, 0, 0]'], {'dtype': 'np.float32'}), '([0, 0, 0, 0], dtype=np.float32)\n', (8188, 8220), True, 'import numpy as np\n'), ((8287, 8424), 'numpy.array', 'np.array', (['[[275.18134, 276.18134, 277.01465], [278.58133, 279.44797, 280.2813], [\n 281.48132, 281.91464, 283.11465]]'], {'dtype': 'np.float32'}), '([[275.18134, 276.18134, 277.01465], [278.58133, 279.44797, \n 280.2813], [281.48132, 281.91464, 283.11465]], dtype=np.float32)\n', (8295, 8424), True, 'import numpy as np\n'), ((8564, 8675), 'numpy.array', 'np.array', (['[[0.4347, 0.4396, 0.0308], [0.0503, 0.0438, 0.0308], [0.1184, 0.2157, 0.0211]]'], {'dtype': 'np.float32'}), '([[0.4347, 0.4396, 0.0308], [0.0503, 0.0438, 0.0308], [0.1184, \n 0.2157, 0.0211]], dtype=np.float32)\n', (8572, 8675), True, 'import numpy as np\n'), ((8868, 9002), 'improver.synthetic_data.set_up_test_cubes.set_up_variable_cube', 'set_up_variable_cube', (['self.expected_loc_param_mean'], {'name': '"""location_parameter"""', 'units': '"""K"""', 'attributes': 'MANDATORY_ATTRIBUTE_DEFAULTS'}), "(self.expected_loc_param_mean, name=\n 'location_parameter', units='K', attributes=MANDATORY_ATTRIBUTE_DEFAULTS)\n", (8888, 9002), False, 'from improver.synthetic_data.set_up_test_cubes import set_up_variable_cube\n'), ((9103, 9242), 'improver.synthetic_data.set_up_test_cubes.set_up_variable_cube', 'set_up_variable_cube', (['self.expected_scale_param_mean'], {'name': '"""scale_parameter"""', 'units': '"""Kelvin^2"""', 'attributes': 'MANDATORY_ATTRIBUTE_DEFAULTS'}), "(self.expected_scale_param_mean, name='scale_parameter',\n units='Kelvin^2', attributes=MANDATORY_ATTRIBUTE_DEFAULTS)\n", (9123, 9242), False, 'from improver.synthetic_data.set_up_test_cubes import set_up_variable_cube\n'), ((9462, 9470), 'improver.calibration.ensemble_calibration.CalibratedForecastDistributionParameters', 'Plugin', ([], {}), '()\n', (9468, 9470), True, 'from improver.calibration.ensemble_calibration import CalibratedForecastDistributionParameters as Plugin\n'), ((9620, 9652), 'improver.calibration.ensemble_calibration.CalibratedForecastDistributionParameters', 'Plugin', ([], {'predictor': '"""realizations"""'}), "(predictor='realizations')\n", (9626, 9652), True, 'from improver.calibration.ensemble_calibration import CalibratedForecastDistributionParameters as Plugin\n'), ((10438, 10446), 'improver.calibration.ensemble_calibration.CalibratedForecastDistributionParameters', 'Plugin', ([], {}), '()\n', (10444, 10446), True, 'from improver.calibration.ensemble_calibration import CalibratedForecastDistributionParameters as Plugin\n'), ((13096, 13104), 'improver.calibration.ensemble_calibration.CalibratedForecastDistributionParameters', 'Plugin', ([], {}), '()\n', (13102, 13104), True, 'from improver.calibration.ensemble_calibration import CalibratedForecastDistributionParameters as Plugin\n'), ((13838, 13937), 'numpy.testing.assert_array_almost_equal', 'assert_array_almost_equal', (['location_parameter', 'self.expected_loc_param_realizations'], {'decimal': '(0)'}), '(location_parameter, self.\n expected_loc_param_realizations, decimal=0)\n', (13863, 13937), False, 'from numpy.testing import assert_array_almost_equal\n'), ((14896, 14904), 'improver.calibration.ensemble_calibration.CalibratedForecastDistributionParameters', 'Plugin', ([], {}), '()\n', (14902, 14904), True, 'from improver.calibration.ensemble_calibration import CalibratedForecastDistributionParameters as Plugin\n'), ((15686, 15776), 'numpy.testing.assert_array_almost_equal', 'assert_array_almost_equal', (['location_parameter', 'self.expected_loc_param_mean'], {'decimal': '(0)'}), '(location_parameter, self.expected_loc_param_mean,\n decimal=0)\n', (15711, 15776), False, 'from numpy.testing import assert_array_almost_equal\n'), ((16065, 16073), 'improver.calibration.ensemble_calibration.CalibratedForecastDistributionParameters', 'Plugin', ([], {}), '()\n', (16071, 16073), True, 'from improver.calibration.ensemble_calibration import CalibratedForecastDistributionParameters as Plugin\n'), ((16831, 16839), 'improver.calibration.ensemble_calibration.CalibratedForecastDistributionParameters', 'Plugin', ([], {}), '()\n', (16837, 16839), True, 'from improver.calibration.ensemble_calibration import CalibratedForecastDistributionParameters as Plugin\n'), ((17753, 17761), 'improver.calibration.ensemble_calibration.CalibratedForecastDistributionParameters', 'Plugin', ([], {}), '()\n', (17759, 17761), True, 'from improver.calibration.ensemble_calibration import CalibratedForecastDistributionParameters as Plugin\n'), ((22200, 22232), 'improver.calibration.ensemble_calibration.CalibratedForecastDistributionParameters', 'Plugin', ([], {'predictor': '"""realizations"""'}), "(predictor='realizations')\n", (22206, 22232), True, 'from improver.calibration.ensemble_calibration import CalibratedForecastDistributionParameters as Plugin\n'), ((24005, 24048), 'numpy.array', 'np.array', (['[[1, 0, 0], [0, 1, 0], [0, 0, 1]]'], {}), '([[1, 0, 0], [0, 1, 0], [0, 0, 1]])\n', (24013, 24048), True, 'import numpy as np\n'), ((24309, 24382), 'numpy.array', 'np.array', (['[[False, True, True], [True, False, True], [True, True, False]]'], {}), '([[False, True, True], [True, False, True], [True, True, False]])\n', (24317, 24382), True, 'import numpy as np\n'), ((3865, 3916), 'iris.cube.CubeList', 'CubeList', (['[self.historic_temperature_forecast_cube]'], {}), '([self.historic_temperature_forecast_cube])\n', (3873, 3916), False, 'from iris.cube import CubeList\n'), ((4500, 4535), 'iris.cube.CubeList', 'CubeList', (['[forecast_timeshift_cube]'], {}), '([forecast_timeshift_cube])\n', (4508, 4535), False, 'from iris.cube import CubeList\n'), ((5132, 5183), 'iris.cube.CubeList', 'CubeList', (['[self.historic_temperature_forecast_cube]'], {}), '([self.historic_temperature_forecast_cube])\n', (5140, 5183), False, 'from iris.cube import CubeList\n'), ((5631, 5682), 'iris.cube.CubeList', 'CubeList', (['[self.historic_temperature_forecast_cube]'], {}), '([self.historic_temperature_forecast_cube])\n', (5639, 5682), False, 'from iris.cube import CubeList\n'), ((6357, 6408), 'iris.cube.CubeList', 'CubeList', (['[self.historic_temperature_forecast_cube]'], {}), '([self.historic_temperature_forecast_cube])\n', (6365, 6408), False, 'from iris.cube import CubeList\n'), ((6554, 6587), 'numpy.ones', 'np.ones', (['(3, 3)'], {'dtype': 'np.float32'}), '((3, 3), dtype=np.float32)\n', (6561, 6587), True, 'import numpy as np\n'), ((7063, 7129), 'iris.cube.CubeList', 'CubeList', (['[self.historic_temperature_forecast_cube, self.altitude]'], {}), '([self.historic_temperature_forecast_cube, self.altitude])\n', (7071, 7129), False, 'from iris.cube import CubeList\n'), ((9871, 9879), 'improver.calibration.ensemble_calibration.CalibratedForecastDistributionParameters', 'Plugin', ([], {}), '()\n', (9877, 9879), True, 'from improver.calibration.ensemble_calibration import CalibratedForecastDistributionParameters as Plugin\n'), ((10100, 10132), 'improver.calibration.ensemble_calibration.CalibratedForecastDistributionParameters', 'Plugin', ([], {'predictor': '"""realizations"""'}), "(predictor='realizations')\n", (10106, 10132), True, 'from improver.calibration.ensemble_calibration import CalibratedForecastDistributionParameters as Plugin\n'), ((5922, 5939), 'numpy.squeeze', 'np.squeeze', (['array'], {}), '(array)\n', (5932, 5939), True, 'import numpy as np\n'), ((23259, 23284), 'iris.cube.CubeList', 'CubeList', (['[self.altitude]'], {}), '([self.altitude])\n', (23267, 23284), False, 'from iris.cube import CubeList\n'), ((4856, 4900), 'numpy.stack', 'np.stack', (['([self.expected_mean_pred_norm] * 9)'], {}), '([self.expected_mean_pred_norm] * 9)\n', (4864, 4900), True, 'import numpy as np\n')] |
"Test handling/parsing of various DAZ Studio files"
from pathlib import Path
from tempfile import NamedTemporaryFile
from django.apps import apps
from rendaz.daztools import (
DSONFile,
ProductMeta,
manifest_files,
supplement_product_name,
)
TEST_DIR = Path(__file__).parent
def test_read_dson_compressed():
"Test reading compressed DSON files"
fname = TEST_DIR / "Sphere-compressed.duf"
duf = DSONFile(path=str(fname))
assert duf.path.name == "Sphere-compressed.duf"
assert duf.is_compressed
assert "asset_info" in duf.dson
def test_read_dson_uncompressed():
"Test reading uncompressed DSON files"
fname = TEST_DIR / "Sphere-uncompressed.duf"
duf = DSONFile(path=str(fname))
assert duf.path.name == "Sphere-uncompressed.duf"
assert duf.is_compressed is False
assert "asset_info" in duf.dson
def test_save_dson_compressed():
"Test write round trip, read uncompressed, write compressed, read back"
fname = TEST_DIR / "Sphere-uncompressed.duf"
duf = DSONFile(path=str(fname))
out = NamedTemporaryFile(mode="wt", delete=False)
tmpname = out.name
out.close()
try:
duf.save(tmpname, compress=True)
new = DSONFile(tmpname)
assert new.is_compressed
assert "asset_info" in new.dson
finally:
Path(tmpname).unlink()
def test_save_dson_uncompressed():
"Test write round trip, read compressed, write uncompressed, read back"
fname = TEST_DIR / "Sphere-compressed.duf"
duf = DSONFile(path=str(fname))
out = NamedTemporaryFile(mode="wt", delete=False)
tmpname = out.name
out.close()
try:
duf.save(tmpname, compress=False)
new = DSONFile(tmpname)
assert new.is_compressed is False
assert "asset_info" in new.dson
finally:
Path(tmpname).unlink()
def test_productmetafile_defaults():
production = apps.get_app_config("production")
it = ProductMeta(product_id="THETHING", stem_product_name="THETHING")
assert it.product_id == "THETHING"
assert isinstance(it.cms_files, set)
assert isinstance(it.dim_manifest_files, set)
assert isinstance(it.included_files, set)
def test_manifest_files():
expected = [
"Content/People/Genesis 8 Female/Characters/Aakash.duf",
"Content/People/Genesis 8 Female/Characters/Aakash.duf.png",
"Content/Runtime/Support/DAZ_3D_60599_Aakash_HD_for_Kala_8.dsa",
"Content/Runtime/Support/DAZ_3D_60599_Aakash_HD_for_Kala_8.dsx",
"Content/Runtime/Support/DAZ_3D_60599_Aakash_HD_for_Kala_8.jpg",
]
fname = TEST_DIR / "Manifest.dsx"
actual = list(manifest_files(fname))
assert actual == expected
def test_supplement_product_name():
expected = "Aakash HD for Kala 8"
fname = TEST_DIR / "Supplement.dsx"
actual = supplement_product_name(fname)
assert actual == expected
| [
"rendaz.daztools.DSONFile",
"pathlib.Path",
"rendaz.daztools.manifest_files",
"django.apps.apps.get_app_config",
"rendaz.daztools.ProductMeta",
"tempfile.NamedTemporaryFile",
"rendaz.daztools.supplement_product_name"
] | [((274, 288), 'pathlib.Path', 'Path', (['__file__'], {}), '(__file__)\n', (278, 288), False, 'from pathlib import Path\n'), ((1071, 1114), 'tempfile.NamedTemporaryFile', 'NamedTemporaryFile', ([], {'mode': '"""wt"""', 'delete': '(False)'}), "(mode='wt', delete=False)\n", (1089, 1114), False, 'from tempfile import NamedTemporaryFile\n'), ((1560, 1603), 'tempfile.NamedTemporaryFile', 'NamedTemporaryFile', ([], {'mode': '"""wt"""', 'delete': '(False)'}), "(mode='wt', delete=False)\n", (1578, 1603), False, 'from tempfile import NamedTemporaryFile\n'), ((1909, 1942), 'django.apps.apps.get_app_config', 'apps.get_app_config', (['"""production"""'], {}), "('production')\n", (1928, 1942), False, 'from django.apps import apps\n'), ((1952, 2016), 'rendaz.daztools.ProductMeta', 'ProductMeta', ([], {'product_id': '"""THETHING"""', 'stem_product_name': '"""THETHING"""'}), "(product_id='THETHING', stem_product_name='THETHING')\n", (1963, 2016), False, 'from rendaz.daztools import DSONFile, ProductMeta, manifest_files, supplement_product_name\n'), ((2836, 2866), 'rendaz.daztools.supplement_product_name', 'supplement_product_name', (['fname'], {}), '(fname)\n', (2859, 2866), False, 'from rendaz.daztools import DSONFile, ProductMeta, manifest_files, supplement_product_name\n'), ((1219, 1236), 'rendaz.daztools.DSONFile', 'DSONFile', (['tmpname'], {}), '(tmpname)\n', (1227, 1236), False, 'from rendaz.daztools import DSONFile, ProductMeta, manifest_files, supplement_product_name\n'), ((1709, 1726), 'rendaz.daztools.DSONFile', 'DSONFile', (['tmpname'], {}), '(tmpname)\n', (1717, 1726), False, 'from rendaz.daztools import DSONFile, ProductMeta, manifest_files, supplement_product_name\n'), ((2654, 2675), 'rendaz.daztools.manifest_files', 'manifest_files', (['fname'], {}), '(fname)\n', (2668, 2675), False, 'from rendaz.daztools import DSONFile, ProductMeta, manifest_files, supplement_product_name\n'), ((1331, 1344), 'pathlib.Path', 'Path', (['tmpname'], {}), '(tmpname)\n', (1335, 1344), False, 'from pathlib import Path\n'), ((1830, 1843), 'pathlib.Path', 'Path', (['tmpname'], {}), '(tmpname)\n', (1834, 1843), False, 'from pathlib import Path\n')] |
from django.db import models
# Create your models here.
class Project(models.Model):
title = models.CharField(max_length = 200, verbose_name = "Titulo")
description = models.TextField(verbose_name="Descripcion")
image = models.ImageField(verbose_name="Imagen", upload_to = "projects")
link = models.URLField(null=True, blank=True, verbose_name="Direecion web")
created = models.DateTimeField(auto_now_add=True, verbose_name="Fecha de creacion")
updated = models.DateTimeField(auto_now=True, verbose_name="Fecha de actualizacion")
class Meta:
verbose_name = 'proyecto'
verbose_name_plural = 'proyectos'
ordering = ['-created']
def __str__(self):
return self.title | [
"django.db.models.TextField",
"django.db.models.DateTimeField",
"django.db.models.ImageField",
"django.db.models.URLField",
"django.db.models.CharField"
] | [((102, 157), 'django.db.models.CharField', 'models.CharField', ([], {'max_length': '(200)', 'verbose_name': '"""Titulo"""'}), "(max_length=200, verbose_name='Titulo')\n", (118, 157), False, 'from django.db import models\n'), ((181, 225), 'django.db.models.TextField', 'models.TextField', ([], {'verbose_name': '"""Descripcion"""'}), "(verbose_name='Descripcion')\n", (197, 225), False, 'from django.db import models\n'), ((239, 301), 'django.db.models.ImageField', 'models.ImageField', ([], {'verbose_name': '"""Imagen"""', 'upload_to': '"""projects"""'}), "(verbose_name='Imagen', upload_to='projects')\n", (256, 301), False, 'from django.db import models\n'), ((316, 384), 'django.db.models.URLField', 'models.URLField', ([], {'null': '(True)', 'blank': '(True)', 'verbose_name': '"""Direecion web"""'}), "(null=True, blank=True, verbose_name='Direecion web')\n", (331, 384), False, 'from django.db import models\n'), ((400, 473), 'django.db.models.DateTimeField', 'models.DateTimeField', ([], {'auto_now_add': '(True)', 'verbose_name': '"""Fecha de creacion"""'}), "(auto_now_add=True, verbose_name='Fecha de creacion')\n", (420, 473), False, 'from django.db import models\n'), ((489, 563), 'django.db.models.DateTimeField', 'models.DateTimeField', ([], {'auto_now': '(True)', 'verbose_name': '"""Fecha de actualizacion"""'}), "(auto_now=True, verbose_name='Fecha de actualizacion')\n", (509, 563), False, 'from django.db import models\n')] |
import datetime
from .monzobalance import MonzoBalance
from .monzopagination import MonzoPaging
from .monzotransaction import MonzoTransaction
class MonzoAccount(object):
def __init__(self,api,json_dict=None):
self.api = api
self.account_id = None
self.created = None
self.description = None
self.account_type = None
self.balance = None
if json_dict:
self.account_id = json_dict.get("id",None)
self.description = json_dict.get("description",None)
self.account_type = json_dict.get("type",None)
self.created = json_dict.get("created",None)
if self.created:
self.created = datetime.datetime.strptime(self.created,"%Y-%m-%dT%H:%M:%S.%fZ")
@classmethod
def listAccounts(cls,api):
accounts = []
accounts_json = api.listAccounts()
for account_json in accounts_json["accounts"]:
account = cls(api,account_json)
accounts.append(account)
return accounts
@classmethod
def getAccount(cls,api,account_id):
accounts_json = api.listAccounts()
for account_json in accounts_json["accounts"]:
account = cls(api,account_json)
if account.account_id == account_id:
account.readBalance()
return account
return None
def readBalance(self):
balance_json = self.api.readBalance(self.account_id)
self.balance = MonzoBalance(self.api,balance_json)
return self.balance
def listTransactionsThisMonth(self,expand=None):
now = datetime.datetime.now()
this_month = now.replace(day=1,hour=0,minute=0,second=0,microsecond=0)
page = MonzoPaging()
page.set_since_date(this_month)
return self.listTransactions(page,expand)
def listTransactionsToday(self,expland=None):
now = datetime.datetime.now()
today = now.replace(hour=0,minute=0,second=0,microsecond=0)
page = MonzoPaging()
page.set_since_date(today)
return self.listTransactions(page,expand)
def listTransactionSinceDate(self,from_dt,expand=None):
page = MonzoPaging()
page.set_since_date(from_dt)
return self.listTransactions(page,expand)
def listTransactionsSinceTransaction(self,trans_id,expand=None):
page = MonzoPaging()
page.set_since_trans(trans_id)
return self.listTransactions(page,expand)
def listTransactionsBetween(self,since_dt,to_dt,expand=None):
page = MonzoPaging()
page.set_since_date(since_dt)
page.set_before(to_dt)
return self.listTransactions(page,expand)
def listTransactions(self,pagination=None,expand=None):
transactions = []
transactions_json = self.api.listTransactions(self.account_id,pagination,expand)
for transaction_json in transactions_json["transactions"]:
transaction = MonzoTransaction(self.api,transaction_json)
transactions.append(transaction)
return transactions
| [
"datetime.datetime.strptime",
"datetime.datetime.now"
] | [((1656, 1679), 'datetime.datetime.now', 'datetime.datetime.now', ([], {}), '()\n', (1677, 1679), False, 'import datetime\n'), ((1943, 1966), 'datetime.datetime.now', 'datetime.datetime.now', ([], {}), '()\n', (1964, 1966), False, 'import datetime\n'), ((718, 783), 'datetime.datetime.strptime', 'datetime.datetime.strptime', (['self.created', '"""%Y-%m-%dT%H:%M:%S.%fZ"""'], {}), "(self.created, '%Y-%m-%dT%H:%M:%S.%fZ')\n", (744, 783), False, 'import datetime\n')] |
from typing import Dict, Optional
from fastapi import status, Request
from fastapi.responses import JSONResponse
from fastapi.exceptions import RequestValidationError
from learninghouse.models import LearningHouseErrorMessage
MIMETYPE_JSON = 'application/json'
class LearningHouseException(Exception):
STATUS_CODE = status.HTTP_500_INTERNAL_SERVER_ERROR
UNKNOWN = 'UNKNOWN'
DESCRIPTION = 'An unknown exception occurred ' +\
'while handling your request.'
def __init__(self,
status_code: Optional[int] = None,
key: Optional[str] = None,
description: Optional[str] = None):
super().__init__()
self.http_status_code: int = status_code or self.STATUS_CODE
self.error: LearningHouseErrorMessage = LearningHouseErrorMessage(
error=key or self.UNKNOWN,
description=description or self.DESCRIPTION
)
def response(self) -> JSONResponse:
return JSONResponse(content=self.error.dict(), status_code=self.http_status_code)
@classmethod
def api_description(cls) -> Dict:
return {
'model': LearningHouseErrorMessage,
'description': 'An exception occured which is not handled by the service now. ' +
'Please write an issue on GitHub.',
'content': {
MIMETYPE_JSON: {
'example': {
'error': cls.UNKNOWN,
'description': cls.DESCRIPTION
}
}
}
}
class LearningHouseSecurityException(LearningHouseException):
STATUS_CODE = status.HTTP_403_FORBIDDEN
SECURITY_EXCEPTION = 'SECURITY_EXCEPTION'
DESCRIPTION = 'A security violation occured while handling your request.'
def __init__(self, description: str):
super().__init__(self.STATUS_CODE,
self.SECURITY_EXCEPTION,
description or self.DESCRIPTION)
@classmethod
def api_description(cls) -> Dict:
return {
'model': LearningHouseErrorMessage,
'description': 'The request didn\'t pass security checks.',
'content': {
MIMETYPE_JSON: {
'example': {
'error': cls.SECURITY_EXCEPTION,
'description': cls.DESCRIPTION
}
}
}
}
class LearningHouseValidationError(LearningHouseException):
STATUS_CODE = status.HTTP_422_UNPROCESSABLE_ENTITY
VALIDATION_ERROR = 'VALIDATION_ERROR'
DESCRIPTION = 'A validation error occurred while handling your request.'
def __init__(self, description: Optional[str] = None):
super().__init__(self.STATUS_CODE,
self.VALIDATION_ERROR,
description or self.DESCRIPTION)
@classmethod
def api_description(cls) -> Dict:
return {
'model': LearningHouseErrorMessage,
'description': 'The request didn\'t pass input validation',
'content': {
MIMETYPE_JSON: {
'example': {
'error': cls.VALIDATION_ERROR,
'description': cls.DESCRIPTION
}
}
}
}
async def validation_error_handler(request: Request, exc: RequestValidationError) -> JSONResponse: # pylint: disable=unused-argument
return LearningHouseValidationError(str(exc)).response()
async def learninghouse_exception_handler(request: Request, exc: LearningHouseException): # pylint: disable=unused-argument
return exc.response()
| [
"learninghouse.models.LearningHouseErrorMessage"
] | [((797, 899), 'learninghouse.models.LearningHouseErrorMessage', 'LearningHouseErrorMessage', ([], {'error': '(key or self.UNKNOWN)', 'description': '(description or self.DESCRIPTION)'}), '(error=key or self.UNKNOWN, description=\n description or self.DESCRIPTION)\n', (822, 899), False, 'from learninghouse.models import LearningHouseErrorMessage\n')] |
from pathlib import Path
from toolz import itertoolz, curried
import vaex
transform_path_to_posix = lambda path: path.as_posix()
def path_to_posix():
return curried.valmap(transform_path_to_posix)
transform_xlsx_to_vaex = lambda path: vaex.from_ascii(path, seperator="\t")
def xlsx_to_vaex():
return curried.valmap(transform_ascii_to_vaex)
transform_ascii_to_vaex = lambda path: vaex.from_ascii(path, seperator="\t")
def ascii_to_vaex():
return curried.valmap(transform_ascii_to_vaex)
transform_ascii_to_vaex2 = lambda path: vaex.from_ascii(path)
def ascii_to_vaex2():
return curried.valmap(transform_ascii_to_vaex2)
transform_vaex_to_list = lambda df: [itertoolz.second(x) for x in df.iterrows()]
def vaex_rows_to_list():
return curried.valmap(transform_vaex_to_list)
transform_vaex_to_dict = lambda df: df.to_dict()
def vaex_to_dict():
return curried.valmap(transform_vaex_to_dict)
| [
"toolz.curried.valmap",
"toolz.itertoolz.second",
"vaex.from_ascii"
] | [((165, 204), 'toolz.curried.valmap', 'curried.valmap', (['transform_path_to_posix'], {}), '(transform_path_to_posix)\n', (179, 204), False, 'from toolz import itertoolz, curried\n'), ((245, 282), 'vaex.from_ascii', 'vaex.from_ascii', (['path'], {'seperator': '"""\t"""'}), "(path, seperator='\\t')\n", (260, 282), False, 'import vaex\n'), ((316, 355), 'toolz.curried.valmap', 'curried.valmap', (['transform_ascii_to_vaex'], {}), '(transform_ascii_to_vaex)\n', (330, 355), False, 'from toolz import itertoolz, curried\n'), ((397, 434), 'vaex.from_ascii', 'vaex.from_ascii', (['path'], {'seperator': '"""\t"""'}), "(path, seperator='\\t')\n", (412, 434), False, 'import vaex\n'), ((469, 508), 'toolz.curried.valmap', 'curried.valmap', (['transform_ascii_to_vaex'], {}), '(transform_ascii_to_vaex)\n', (483, 508), False, 'from toolz import itertoolz, curried\n'), ((551, 572), 'vaex.from_ascii', 'vaex.from_ascii', (['path'], {}), '(path)\n', (566, 572), False, 'import vaex\n'), ((608, 648), 'toolz.curried.valmap', 'curried.valmap', (['transform_ascii_to_vaex2'], {}), '(transform_ascii_to_vaex2)\n', (622, 648), False, 'from toolz import itertoolz, curried\n'), ((770, 808), 'toolz.curried.valmap', 'curried.valmap', (['transform_vaex_to_list'], {}), '(transform_vaex_to_list)\n', (784, 808), False, 'from toolz import itertoolz, curried\n'), ((893, 931), 'toolz.curried.valmap', 'curried.valmap', (['transform_vaex_to_dict'], {}), '(transform_vaex_to_dict)\n', (907, 931), False, 'from toolz import itertoolz, curried\n'), ((688, 707), 'toolz.itertoolz.second', 'itertoolz.second', (['x'], {}), '(x)\n', (704, 707), False, 'from toolz import itertoolz, curried\n')] |
# Copyright 2018-2021 The Matrix.org Foundation C.I.C.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import logging
from typing import TYPE_CHECKING, List, Optional, Tuple
from synapse.api.errors import SynapseError
from synapse.handlers.room_member import RoomMemberHandler
from synapse.replication.http.membership import (
ReplicationRemoteJoinRestServlet as ReplRemoteJoin,
ReplicationRemoteKnockRestServlet as ReplRemoteKnock,
ReplicationRemoteRejectInviteRestServlet as ReplRejectInvite,
ReplicationRemoteRescindKnockRestServlet as ReplRescindKnock,
ReplicationUserJoinedLeftRoomRestServlet as ReplJoinedLeft,
)
from synapse.types import JsonDict, Requester, UserID
if TYPE_CHECKING:
from synapse.server import HomeServer
logger = logging.getLogger(__name__)
class RoomMemberWorkerHandler(RoomMemberHandler):
def __init__(self, hs: "HomeServer"):
super().__init__(hs)
self._remote_join_client = ReplRemoteJoin.make_client(hs)
self._remote_knock_client = ReplRemoteKnock.make_client(hs)
self._remote_reject_client = ReplRejectInvite.make_client(hs)
self._remote_rescind_client = ReplRescindKnock.make_client(hs)
self._notify_change_client = ReplJoinedLeft.make_client(hs)
async def _remote_join(
self,
requester: Requester,
remote_room_hosts: List[str],
room_id: str,
user: UserID,
content: dict,
) -> Tuple[str, int]:
"""Implements RoomMemberHandler._remote_join"""
if len(remote_room_hosts) == 0:
raise SynapseError(404, "No known servers")
ret = await self._remote_join_client(
requester=requester,
remote_room_hosts=remote_room_hosts,
room_id=room_id,
user_id=user.to_string(),
content=content,
)
return ret["event_id"], ret["stream_id"]
async def remote_reject_invite(
self,
invite_event_id: str,
txn_id: Optional[str],
requester: Requester,
content: dict,
) -> Tuple[str, int]:
"""
Rejects an out-of-band invite received from a remote user
Implements RoomMemberHandler.remote_reject_invite
"""
ret = await self._remote_reject_client(
invite_event_id=invite_event_id,
txn_id=txn_id,
requester=requester,
content=content,
)
return ret["event_id"], ret["stream_id"]
async def remote_rescind_knock(
self,
knock_event_id: str,
txn_id: Optional[str],
requester: Requester,
content: JsonDict,
) -> Tuple[str, int]:
"""
Rescinds a local knock made on a remote room
Args:
knock_event_id: the knock event
txn_id: optional transaction ID supplied by the client
requester: user making the request, according to the access token
content: additional content to include in the leave event.
Normally an empty dict.
Returns:
A tuple containing (event_id, stream_id of the leave event)
"""
ret = await self._remote_rescind_client(
knock_event_id=knock_event_id,
txn_id=txn_id,
requester=requester,
content=content,
)
return ret["event_id"], ret["stream_id"]
async def remote_knock(
self,
remote_room_hosts: List[str],
room_id: str,
user: UserID,
content: dict,
) -> Tuple[str, int]:
"""Sends a knock to a room.
Implements RoomMemberHandler.remote_knock
"""
ret = await self._remote_knock_client(
remote_room_hosts=remote_room_hosts,
room_id=room_id,
user=user,
content=content,
)
return ret["event_id"], ret["stream_id"]
async def _user_left_room(self, target: UserID, room_id: str) -> None:
"""Implements RoomMemberHandler._user_left_room"""
await self._notify_change_client(
user_id=target.to_string(), room_id=room_id, change="left"
)
async def forget(self, target: UserID, room_id: str) -> None:
raise RuntimeError("Cannot forget rooms on workers.")
| [
"logging.getLogger",
"synapse.replication.http.membership.ReplicationRemoteRejectInviteRestServlet.make_client",
"synapse.replication.http.membership.ReplicationUserJoinedLeftRoomRestServlet.make_client",
"synapse.replication.http.membership.ReplicationRemoteRescindKnockRestServlet.make_client",
"synapse.re... | [((1264, 1291), 'logging.getLogger', 'logging.getLogger', (['__name__'], {}), '(__name__)\n', (1281, 1291), False, 'import logging\n'), ((1451, 1481), 'synapse.replication.http.membership.ReplicationRemoteJoinRestServlet.make_client', 'ReplRemoteJoin.make_client', (['hs'], {}), '(hs)\n', (1477, 1481), True, 'from synapse.replication.http.membership import ReplicationRemoteJoinRestServlet as ReplRemoteJoin, ReplicationRemoteKnockRestServlet as ReplRemoteKnock, ReplicationRemoteRejectInviteRestServlet as ReplRejectInvite, ReplicationRemoteRescindKnockRestServlet as ReplRescindKnock, ReplicationUserJoinedLeftRoomRestServlet as ReplJoinedLeft\n'), ((1518, 1549), 'synapse.replication.http.membership.ReplicationRemoteKnockRestServlet.make_client', 'ReplRemoteKnock.make_client', (['hs'], {}), '(hs)\n', (1545, 1549), True, 'from synapse.replication.http.membership import ReplicationRemoteJoinRestServlet as ReplRemoteJoin, ReplicationRemoteKnockRestServlet as ReplRemoteKnock, ReplicationRemoteRejectInviteRestServlet as ReplRejectInvite, ReplicationRemoteRescindKnockRestServlet as ReplRescindKnock, ReplicationUserJoinedLeftRoomRestServlet as ReplJoinedLeft\n'), ((1587, 1619), 'synapse.replication.http.membership.ReplicationRemoteRejectInviteRestServlet.make_client', 'ReplRejectInvite.make_client', (['hs'], {}), '(hs)\n', (1615, 1619), True, 'from synapse.replication.http.membership import ReplicationRemoteJoinRestServlet as ReplRemoteJoin, ReplicationRemoteKnockRestServlet as ReplRemoteKnock, ReplicationRemoteRejectInviteRestServlet as ReplRejectInvite, ReplicationRemoteRescindKnockRestServlet as ReplRescindKnock, ReplicationUserJoinedLeftRoomRestServlet as ReplJoinedLeft\n'), ((1658, 1690), 'synapse.replication.http.membership.ReplicationRemoteRescindKnockRestServlet.make_client', 'ReplRescindKnock.make_client', (['hs'], {}), '(hs)\n', (1686, 1690), True, 'from synapse.replication.http.membership import ReplicationRemoteJoinRestServlet as ReplRemoteJoin, ReplicationRemoteKnockRestServlet as ReplRemoteKnock, ReplicationRemoteRejectInviteRestServlet as ReplRejectInvite, ReplicationRemoteRescindKnockRestServlet as ReplRescindKnock, ReplicationUserJoinedLeftRoomRestServlet as ReplJoinedLeft\n'), ((1728, 1758), 'synapse.replication.http.membership.ReplicationUserJoinedLeftRoomRestServlet.make_client', 'ReplJoinedLeft.make_client', (['hs'], {}), '(hs)\n', (1754, 1758), True, 'from synapse.replication.http.membership import ReplicationRemoteJoinRestServlet as ReplRemoteJoin, ReplicationRemoteKnockRestServlet as ReplRemoteKnock, ReplicationRemoteRejectInviteRestServlet as ReplRejectInvite, ReplicationRemoteRescindKnockRestServlet as ReplRescindKnock, ReplicationUserJoinedLeftRoomRestServlet as ReplJoinedLeft\n'), ((2077, 2114), 'synapse.api.errors.SynapseError', 'SynapseError', (['(404)', '"""No known servers"""'], {}), "(404, 'No known servers')\n", (2089, 2114), False, 'from synapse.api.errors import SynapseError\n')] |
from datetime import datetime
from pathlib import Path
import pytz
import kobuddy
def get_test_db():
# db = Path(__file__).absolute().parent.parent / 'KoboShelfes' / 'KoboReader.sqlite.0'
db = Path(__file__).absolute().parent / 'data' / 'kobo_notes' / 'input' / 'KoboReader.sqlite'
return db
# a bit meh, but ok for now
kobuddy.set_databases(get_test_db())
from kobuddy import _iter_events_aux, get_events, get_books_with_highlights, _iter_highlights
def test_events():
for e in _iter_events_aux():
print(e)
def test_hls():
for h in _iter_highlights():
print(h)
def test_get_all():
events = get_events()
assert len(events) > 50
for d in events:
print(d)
def test_books_with_highlights():
pages = get_books_with_highlights()
g = pages[0]
assert 'Essentialism' in g.book
hls = g.highlights
assert len(hls) == 273
[b] = [h for h in hls if h.eid == '520b7b13-dbef-4402-9a81-0f4e0c4978de']
# TODO wonder if there might be any useful info? StartContainerPath, EndContainerPath
assert b.kind == 'bookmark'
# TODO move to a more specific test?
# TODO assert sorted by date or smth?
assert hls[0].kind == 'highlight'
# TODO assert highlights got no annotation? not sure if it's even necessary to distinguish..
[ann] = [h for h in hls if h.annotation is not None and len(h.annotation) > 0]
assert ann.eid == 'eb264817-9a06-42fd-92ff-7bd38cd9ca79'
assert ann.kind == 'annotation'
assert ann.text == 'He does this by finding which machine has the biggest queue of materials waiting behind it and finds a way to increase its efficiency.'
assert ann.annotation == 'Bottleneck'
assert ann.dt == datetime(year=2017, month=8, day=12, hour=3, minute=49, second=13, microsecond=0, tzinfo=pytz.utc)
assert ann.book.author == '<NAME>'
assert len(pages) == 7
def test_history():
kobuddy.print_progress()
def test_annotations():
kobuddy.print_annotations()
def test_books():
kobuddy.print_books()
| [
"datetime.datetime",
"kobuddy._iter_events_aux",
"kobuddy.print_books",
"pathlib.Path",
"kobuddy._iter_highlights",
"kobuddy.print_annotations",
"kobuddy.get_books_with_highlights",
"kobuddy.get_events",
"kobuddy.print_progress"
] | [((501, 519), 'kobuddy._iter_events_aux', '_iter_events_aux', ([], {}), '()\n', (517, 519), False, 'from kobuddy import _iter_events_aux, get_events, get_books_with_highlights, _iter_highlights\n'), ((569, 587), 'kobuddy._iter_highlights', '_iter_highlights', ([], {}), '()\n', (585, 587), False, 'from kobuddy import _iter_events_aux, get_events, get_books_with_highlights, _iter_highlights\n'), ((641, 653), 'kobuddy.get_events', 'get_events', ([], {}), '()\n', (651, 653), False, 'from kobuddy import _iter_events_aux, get_events, get_books_with_highlights, _iter_highlights\n'), ((768, 795), 'kobuddy.get_books_with_highlights', 'get_books_with_highlights', ([], {}), '()\n', (793, 795), False, 'from kobuddy import _iter_events_aux, get_events, get_books_with_highlights, _iter_highlights\n'), ((1917, 1941), 'kobuddy.print_progress', 'kobuddy.print_progress', ([], {}), '()\n', (1939, 1941), False, 'import kobuddy\n'), ((1971, 1998), 'kobuddy.print_annotations', 'kobuddy.print_annotations', ([], {}), '()\n', (1996, 1998), False, 'import kobuddy\n'), ((2022, 2043), 'kobuddy.print_books', 'kobuddy.print_books', ([], {}), '()\n', (2041, 2043), False, 'import kobuddy\n'), ((1725, 1827), 'datetime.datetime', 'datetime', ([], {'year': '(2017)', 'month': '(8)', 'day': '(12)', 'hour': '(3)', 'minute': '(49)', 'second': '(13)', 'microsecond': '(0)', 'tzinfo': 'pytz.utc'}), '(year=2017, month=8, day=12, hour=3, minute=49, second=13,\n microsecond=0, tzinfo=pytz.utc)\n', (1733, 1827), False, 'from datetime import datetime\n'), ((203, 217), 'pathlib.Path', 'Path', (['__file__'], {}), '(__file__)\n', (207, 217), False, 'from pathlib import Path\n')] |
from django.shortcuts import render
from .models import Tank
from django.db import models
from django.http import HttpResponse
from django.views import View
# Create your views here.
# The view for the created model Tank
def tank_view(request):
queryset = Tank.objects.all()
context = {
'object': queryset
}
return render(request, "tankbattle.html", context)
def tank_1(request, pk):
queryset = Tank.objects.get(pk=1)
context = {
'object': queryset
}
return render(request, 'tankbattle.html', context)
def tank_2(request, pk):
queryset = Tank.objects.get(pk=2)
context = {
'object': queryset
}
return render(request, 'tankbattle.html', context)
def tank_3(request, pk):
queryset = Tank.objects.get(pk=3)
context = {
'object': queryset
}
return render(request, 'tankbattle.html', context)
def tank_4(request, pk):
queryset = Tank.objects.get(pk=4)
context = {
'object': queryset
}
return render(request, 'tankbattle.html', context)
| [
"django.shortcuts.render"
] | [((342, 385), 'django.shortcuts.render', 'render', (['request', '"""tankbattle.html"""', 'context'], {}), "(request, 'tankbattle.html', context)\n", (348, 385), False, 'from django.shortcuts import render\n'), ((511, 554), 'django.shortcuts.render', 'render', (['request', '"""tankbattle.html"""', 'context'], {}), "(request, 'tankbattle.html', context)\n", (517, 554), False, 'from django.shortcuts import render\n'), ((680, 723), 'django.shortcuts.render', 'render', (['request', '"""tankbattle.html"""', 'context'], {}), "(request, 'tankbattle.html', context)\n", (686, 723), False, 'from django.shortcuts import render\n'), ((849, 892), 'django.shortcuts.render', 'render', (['request', '"""tankbattle.html"""', 'context'], {}), "(request, 'tankbattle.html', context)\n", (855, 892), False, 'from django.shortcuts import render\n'), ((1018, 1061), 'django.shortcuts.render', 'render', (['request', '"""tankbattle.html"""', 'context'], {}), "(request, 'tankbattle.html', context)\n", (1024, 1061), False, 'from django.shortcuts import render\n')] |
import os
import numpy as np
import torch
import torch.nn as nn
import torch.optim as optim
import argparse
from tqdm import tqdm
import sys
import distributed as dist
import utils
from models.vqvae import VQVAE, VQVAE_Blob2Full
from models.discriminator import discriminator
visual_folder = '/home2/bipasha31/python_scripts/CurrentWork/samples/VQVAE'
os.makedirs(visual_folder, exist_ok=True)
verbose = False
save_idx_global = 0
save_at = 100
did = 0
models = {
'gan': 0,
'vae': 1
}
model_to_train = models['vae']
results = {
'n_updates': 0,
'recon_errors': [],
'loss_vals': [],
'perplexities': [],
'd_loss': []
}
device = 'cuda:0'
def main(args):
"""
Set up VQ-VAE model with components defined in ./models/ folder
"""
model = VQVAE(args.n_hiddens, args.n_residual_hiddens,
args.n_residual_layers, args.n_embeddings,
args.embedding_dim, args.beta, device)
if args.ckpt:
model.load_state_dict(torch.load(args.ckpt)['model'])
model = model.to(device)
if args.test:
loader = utils.load_data_and_data_loaders(args.dataset, args.batch_size, test=True)
test(loader, model)
return
"""
Load data and define batch data loaders
"""
items = utils.load_data_and_data_loaders(args.dataset, args.batch_size)
training_loader, validation_loader = items[2], items[3]
x_train_var = items[4]
"""
Set up optimizer and training loop
"""
optimizer = optim.Adam(model.parameters(), lr=args.learning_rate, amsgrad=True)
model.train()
if model_to_train == models['gan']:
train_vqgan(args, training_loader, validation_loader, x_train_var, model, optimizer)
else:
train(args, training_loader, validation_loader, x_train_var, model, optimizer)
def test(loader, model):
for i, data in enumerate(tqdm(loader)):
x, _ = data
x = x.to(device)
with torch.no_grad():
_ = model(x, save_idx=f'{i}', visual_folder=visual_folder)
def train(args, training_loader, validation_loader, x_train_var, model, optimizer):
global save_idx_global
for i in range(args.n_updates):
(x, _) = next(iter(training_loader))
x = x.to(device)
optimizer.zero_grad()
save_idx = None
embedding_loss, x_hat, perplexity = model(x)
recon_loss = torch.mean((x_hat - x)**2) / x_train_var
loss = recon_loss + embedding_loss
loss.backward()
optimizer.step()
results["recon_errors"].append(recon_loss.cpu().detach().numpy())
results["perplexities"].append(perplexity.cpu().detach().numpy())
results["loss_vals"].append(loss.cpu().detach().numpy())
results["n_updates"] = i
if i % save_at == 0:
save_idx = save_idx_global
save_idx_global += 1
model.eval()
with torch.no_grad():
for vi in tqdm(range(10)):
(x, _) = next(iter(validation_loader))
x = x.to(device)
_, _, _ = model(x, verbose=verbose, save_idx=f'{save_idx}_{vi}', visual_folder=visual_folder)
model.train()
if i % args.log_interval == 0 and dist.is_primary():
"""
save model and print values
"""
if args.save:
hyperparameters = args.__dict__
utils.save_model_and_results(
model, optimizer, results, hyperparameters, args.filename)
print('Update #', i, 'Recon Error:',
np.mean(results["recon_errors"][-args.log_interval:]),
'Loss', np.mean(results["loss_vals"][-args.log_interval:]),
'Perplexity:', np.mean(results["perplexities"][-args.log_interval:]))
def train_vqgan(args, training_loader, validation_loader, x_train_var, model, optimizer):
global save_idx_global
c_mse = nn.MSELoss()
disc = discriminator().to(device)
optim_D = optim.Adam(disc.parameters(), lr=args.learning_rate, amsgrad=True)
for i in range(args.n_updates):
(x, _) = next(iter(training_loader))
x = x.to(device)
optimizer.zero_grad()
optim_D.zero_grad()
save_idx = None
if i % save_at == 0 and i > 0:
save_idx = save_idx_global
save_idx_global += 1
embedding_loss, x_hat, perplexity = \
model(x, verbose=verbose, save_idx=save_idx, visual_folder=visual_folder)
recon_loss = torch.mean((x_hat - x)**2) / x_train_var
loss = recon_loss + embedding_loss
'''
adding the perceptual loss here - patch loss of real and fake
'''
B = args.batch_size
D = 16 * 16
ones = torch.ones((B, D), dtype=torch.float32, device=device)
zeros = torch.zeros((B, D), dtype=torch.float32, device=device)
if i % 2 == 0:
fake = disc(x_hat).view(B, D)
loss += c_mse(fake, ones)
else:
fake = disc(x_hat.clone().detach()).view(B, D)
real = disc(x).view(B, D)
d_loss = c_mse(real, ones) + c_mse(fake, zeros)
results["d_loss"].append(d_loss.cpu().detach().numpy())
d_loss.backward()
optim_D.step()
loss.backward()
optimizer.step()
results["recon_errors"].append(recon_loss.cpu().detach().numpy())
results["perplexities"].append(perplexity.cpu().detach().numpy())
results["loss_vals"].append(loss.cpu().detach().numpy())
results["n_updates"] = i
if i % args.log_interval == 0:
"""
save model and print values
"""
if args.save:
hyperparameters = args.__dict__
utils.save_model_and_results(
model, optimizer, results, hyperparameters, args.filename)
print('Update #', i, 'Recon Error:',
np.mean(results["recon_errors"][-args.log_interval:]),
'Loss', np.mean(results["loss_vals"][-args.log_interval:]),
'Discriminator Loss', np.mean(results['d_loss'][-args.log_interval:]),
'Perplexity:', np.mean(results["perplexities"][-args.log_interval:]), flush=True)
if __name__ == "__main__":
# train_vqgan()
# train_blob2full()
parser = argparse.ArgumentParser()
"""
Hyperparameters
"""
timestamp = utils.readable_timestamp()
parser.add_argument("--batch_size", type=int, default=64)
parser.add_argument("--n_updates", type=int, default=50000)
parser.add_argument("--n_hiddens", type=int, default=128)
parser.add_argument("--n_residual_hiddens", type=int, default=32)
parser.add_argument("--n_residual_layers", type=int, default=2)
parser.add_argument("--embedding_dim", type=int, default=64)
parser.add_argument("--n_embeddings", type=int, default=512)
parser.add_argument("--beta", type=float, default=.25)
parser.add_argument("--learning_rate", type=float, default=3e-4)
parser.add_argument("--ckpt", type=str)
parser.add_argument("--log_interval", type=int, default=3)
parser.add_argument("--save_at", type=int, default=100)
parser.add_argument("--device_id", type=int, default=0)
parser.add_argument("--dataset", type=str, default='HandGestures')
parser.add_argument("--test", action='store_true')
# whether or not to save model
parser.add_argument("-save", action="store_true")
parser.add_argument("--filename", type=str, default=timestamp)
args = parser.parse_args()
args.save = True
if args.save and dist.is_primary():
print('Results will be saved in ./results/vqvae_' + args.filename + '.pth')
args.n_gpu = torch.cuda.device_count()
port = (
2 ** 15
+ 2 ** 14
+ hash(os.getuid() if sys.platform != "win32" else 1) % 2 ** 14
)+1
print(f'port: {port}')
print(args)
dist.launch(main, args.n_gpu, 1, 0, f"tcp://127.0.0.1:{port}", args=(args,))
| [
"os.getuid",
"torch.cuda.device_count",
"torch.nn.MSELoss",
"models.vqvae.VQVAE",
"numpy.mean",
"models.discriminator.discriminator",
"argparse.ArgumentParser",
"torch.mean",
"utils.load_data_and_data_loaders",
"utils.readable_timestamp",
"utils.save_model_and_results",
"os.makedirs",
"distr... | [((356, 397), 'os.makedirs', 'os.makedirs', (['visual_folder'], {'exist_ok': '(True)'}), '(visual_folder, exist_ok=True)\n', (367, 397), False, 'import os\n'), ((783, 916), 'models.vqvae.VQVAE', 'VQVAE', (['args.n_hiddens', 'args.n_residual_hiddens', 'args.n_residual_layers', 'args.n_embeddings', 'args.embedding_dim', 'args.beta', 'device'], {}), '(args.n_hiddens, args.n_residual_hiddens, args.n_residual_layers, args\n .n_embeddings, args.embedding_dim, args.beta, device)\n', (788, 916), False, 'from models.vqvae import VQVAE, VQVAE_Blob2Full\n'), ((1289, 1352), 'utils.load_data_and_data_loaders', 'utils.load_data_and_data_loaders', (['args.dataset', 'args.batch_size'], {}), '(args.dataset, args.batch_size)\n', (1321, 1352), False, 'import utils\n'), ((3980, 3992), 'torch.nn.MSELoss', 'nn.MSELoss', ([], {}), '()\n', (3990, 3992), True, 'import torch.nn as nn\n'), ((6442, 6467), 'argparse.ArgumentParser', 'argparse.ArgumentParser', ([], {}), '()\n', (6465, 6467), False, 'import argparse\n'), ((6521, 6547), 'utils.readable_timestamp', 'utils.readable_timestamp', ([], {}), '()\n', (6545, 6547), False, 'import utils\n'), ((7844, 7869), 'torch.cuda.device_count', 'torch.cuda.device_count', ([], {}), '()\n', (7867, 7869), False, 'import torch\n'), ((8048, 8124), 'distributed.launch', 'dist.launch', (['main', 'args.n_gpu', '(1)', '(0)', 'f"""tcp://127.0.0.1:{port}"""'], {'args': '(args,)'}), "(main, args.n_gpu, 1, 0, f'tcp://127.0.0.1:{port}', args=(args,))\n", (8059, 8124), True, 'import distributed as dist\n'), ((1096, 1170), 'utils.load_data_and_data_loaders', 'utils.load_data_and_data_loaders', (['args.dataset', 'args.batch_size'], {'test': '(True)'}), '(args.dataset, args.batch_size, test=True)\n', (1128, 1170), False, 'import utils\n'), ((1891, 1903), 'tqdm.tqdm', 'tqdm', (['loader'], {}), '(loader)\n', (1895, 1903), False, 'from tqdm import tqdm\n'), ((4826, 4880), 'torch.ones', 'torch.ones', (['(B, D)'], {'dtype': 'torch.float32', 'device': 'device'}), '((B, D), dtype=torch.float32, device=device)\n', (4836, 4880), False, 'import torch\n'), ((4897, 4952), 'torch.zeros', 'torch.zeros', (['(B, D)'], {'dtype': 'torch.float32', 'device': 'device'}), '((B, D), dtype=torch.float32, device=device)\n', (4908, 4952), False, 'import torch\n'), ((7723, 7740), 'distributed.is_primary', 'dist.is_primary', ([], {}), '()\n', (7738, 7740), True, 'import distributed as dist\n'), ((1966, 1981), 'torch.no_grad', 'torch.no_grad', ([], {}), '()\n', (1979, 1981), False, 'import torch\n'), ((2403, 2431), 'torch.mean', 'torch.mean', (['((x_hat - x) ** 2)'], {}), '((x_hat - x) ** 2)\n', (2413, 2431), False, 'import torch\n'), ((3270, 3287), 'distributed.is_primary', 'dist.is_primary', ([], {}), '()\n', (3285, 3287), True, 'import distributed as dist\n'), ((4005, 4020), 'models.discriminator.discriminator', 'discriminator', ([], {}), '()\n', (4018, 4020), False, 'from models.discriminator import discriminator\n'), ((4583, 4611), 'torch.mean', 'torch.mean', (['((x_hat - x) ** 2)'], {}), '((x_hat - x) ** 2)\n', (4593, 4611), False, 'import torch\n'), ((998, 1019), 'torch.load', 'torch.load', (['args.ckpt'], {}), '(args.ckpt)\n', (1008, 1019), False, 'import torch\n'), ((2930, 2945), 'torch.no_grad', 'torch.no_grad', ([], {}), '()\n', (2943, 2945), False, 'import torch\n'), ((3451, 3542), 'utils.save_model_and_results', 'utils.save_model_and_results', (['model', 'optimizer', 'results', 'hyperparameters', 'args.filename'], {}), '(model, optimizer, results, hyperparameters,\n args.filename)\n', (3479, 3542), False, 'import utils\n'), ((3628, 3681), 'numpy.mean', 'np.mean', (["results['recon_errors'][-args.log_interval:]"], {}), "(results['recon_errors'][-args.log_interval:])\n", (3635, 3681), True, 'import numpy as np\n'), ((3709, 3759), 'numpy.mean', 'np.mean', (["results['loss_vals'][-args.log_interval:]"], {}), "(results['loss_vals'][-args.log_interval:])\n", (3716, 3759), True, 'import numpy as np\n'), ((3794, 3847), 'numpy.mean', 'np.mean', (["results['perplexities'][-args.log_interval:]"], {}), "(results['perplexities'][-args.log_interval:])\n", (3801, 3847), True, 'import numpy as np\n'), ((5857, 5948), 'utils.save_model_and_results', 'utils.save_model_and_results', (['model', 'optimizer', 'results', 'hyperparameters', 'args.filename'], {}), '(model, optimizer, results, hyperparameters,\n args.filename)\n', (5885, 5948), False, 'import utils\n'), ((6034, 6087), 'numpy.mean', 'np.mean', (["results['recon_errors'][-args.log_interval:]"], {}), "(results['recon_errors'][-args.log_interval:])\n", (6041, 6087), True, 'import numpy as np\n'), ((6115, 6165), 'numpy.mean', 'np.mean', (["results['loss_vals'][-args.log_interval:]"], {}), "(results['loss_vals'][-args.log_interval:])\n", (6122, 6165), True, 'import numpy as np\n'), ((6207, 6254), 'numpy.mean', 'np.mean', (["results['d_loss'][-args.log_interval:]"], {}), "(results['d_loss'][-args.log_interval:])\n", (6214, 6254), True, 'import numpy as np\n'), ((6289, 6342), 'numpy.mean', 'np.mean', (["results['perplexities'][-args.log_interval:]"], {}), "(results['perplexities'][-args.log_interval:])\n", (6296, 6342), True, 'import numpy as np\n'), ((7933, 7944), 'os.getuid', 'os.getuid', ([], {}), '()\n', (7942, 7944), False, 'import os\n')] |
# pylint: disable=unused-variable,unused-argument,expression-not-assigned
from django.forms.models import model_to_dict
import arrow
import pytest
from expecter import expect
from api.elections.models import Election
from .. import models
@pytest.fixture
def info():
return models.Identity(
first_name="John",
last_name="Doe",
birth_date=arrow.get("1985-06-19"),
)
@pytest.fixture
def voter(info):
return models.Voter(
email="<EMAIL>",
**model_to_dict(info),
)
@pytest.fixture
def status(voter):
return models.Status(
voter=voter,
election=Election(name="Sample Election"),
)
def describe_registration_info():
def describe_birth_month():
def is_parsed_from_date(info):
expect(info.birth_month) == "June"
def describe_birth_year():
def is_parsed_from_date(info):
expect(info.birth_year) == 1985
def describe_voter():
def describe_str():
def is_based_on_name(voter):
expect(str(voter)) == "<NAME>"
def describe_status():
def describe_str():
def is_based_on_voter_and_election(status):
expect(str(status)) == "Sample Election: <NAME>"
| [
"api.elections.models.Election",
"expecter.expect",
"arrow.get",
"django.forms.models.model_to_dict"
] | [((372, 395), 'arrow.get', 'arrow.get', (['"""1985-06-19"""'], {}), "('1985-06-19')\n", (381, 395), False, 'import arrow\n'), ((498, 517), 'django.forms.models.model_to_dict', 'model_to_dict', (['info'], {}), '(info)\n', (511, 517), False, 'from django.forms.models import model_to_dict\n'), ((626, 658), 'api.elections.models.Election', 'Election', ([], {'name': '"""Sample Election"""'}), "(name='Sample Election')\n", (634, 658), False, 'from api.elections.models import Election\n'), ((787, 811), 'expecter.expect', 'expect', (['info.birth_month'], {}), '(info.birth_month)\n', (793, 811), False, 'from expecter import expect\n'), ((906, 929), 'expecter.expect', 'expect', (['info.birth_year'], {}), '(info.birth_year)\n', (912, 929), False, 'from expecter import expect\n')] |
import yaml
class Config:
def __init__(self, path: str):
self.path = path
self.cfg = {}
self.parse()
def parse(self):
with open(self.path, 'r') as f:
self.cfg = yaml.load(f)
@property
def secret(self) -> str:
return self.cfg.get('secret')
@property
def db_connection_string(self) -> str:
return self.cfg.get('db_connection_string')
@property
def db_host(self) -> str:
return self.cfg.get('db_host', '127.0.0.1')
@property
def db_port(self) -> int:
return self.cfg.get('db_port', 27017)
@property
def db_name(self) -> int:
return self.cfg.get('db_name', 'runehistory')
| [
"yaml.load"
] | [((216, 228), 'yaml.load', 'yaml.load', (['f'], {}), '(f)\n', (225, 228), False, 'import yaml\n')] |
#!/usr/bin/env python3
import argparse
import csv
import logging
import os
import re
import sys
DELIMITER = ','
class CsvFilter:
def __init__(
self,
file=None,
deduplicate=False,
filter_query=None,
filter_inverse=False,
ignore_case=False,
verbose=False,
delimiter=DELIMITER
):
self.file = file
self.deduplicate = deduplicate
self.filter = filter_query
self.filter_inverse = filter_inverse
self.ignore_case = ignore_case
self.verbose = verbose
self.delimiter = delimiter
self.logger = logging.getLogger('deduplicate')
if self.verbose:
self.logger.setLevel(logging.DEBUG)
def apply(self):
base_path = os.getcwd()
source_path = base_path + '/' + self.file
destination_path = source_path.replace('.csv', '.filtered.csv')
filtered_items = self.filter_items(source_path)
self.store_items(destination_path, filtered_items)
def filter_items(self, file_path):
result = []
deduplicate_column_index = False
deduplicate_key_values = []
filter_column = False
filter_column_index = False
filter_pattern = False
counter = 0
re_flags = 0
if self.ignore_case:
re_flags = re.IGNORECASE
if self.verbose:
print('* Filtering file', file_path)
if self.filter:
filter_match = re.match('^(.+)=(.+)$', self.filter)
if filter_match:
filter_column = filter_match.group(1)
filter_pattern = filter_match.group(2)
with open(file_path, 'rt') as csv_file:
for row in csv.reader(csv_file, delimiter=self.delimiter):
if counter == 0:
if self.deduplicate:
deduplicate_column_index = row.index(self.deduplicate)
if filter_column:
filter_column_index = row.index(filter_column)
counter += 1
result.append(row)
continue
valid = False
if self.deduplicate and deduplicate_column_index is not False:
value = row[deduplicate_column_index]
if self.ignore_case:
value = value.lower()
if value in deduplicate_key_values:
valid = False
else:
deduplicate_key_values.append(value)
if filter_column_index is not False:
value = row[filter_column_index]
if bool(re.match(filter_pattern, value, re_flags)) is not self.filter_inverse:
valid = True
if valid:
result.append(row)
counter += 1
if self.verbose:
print('* Filtered', counter, 'items to', len(result))
return result
def store_items(self, file_path, items):
if self.verbose:
print('* Storing items to', file_path)
with open(file_path, 'wt') as csv_file:
writer = csv.writer(csv_file, delimiter=self.delimiter)
for row in items:
writer.writerow(row)
def parse_arguments():
parser = argparse.ArgumentParser()
parser.add_argument('--file', '-f', help='File to filter')
parser.add_argument('--deduplicate', help='Deduplication column to be applied', default=False)
parser.add_argument('--filter', help='Filter to be applied', default=False)
parser.add_argument('--filter_inverse', action='store_true', help='Inverse filter matches', default=False)
parser.add_argument('--ignore_case', '-i', action='store_true', help='Match values case insensitive', default=False)
parser.add_argument('--verbose', '-v', action='store_true', help='Enable verbose')
return parser.parse_args()
def main():
args = parse_arguments()
CsvFilter(
file=args.file,
deduplicate=args.deduplicate,
filter_query=args.filter,
filter_inverse=args.filter_inverse,
ignore_case=args.ignore_case,
verbose=args.verbose
).apply()
return 0
if __name__ == '__main__':
sys.exit(main())
| [
"logging.getLogger",
"argparse.ArgumentParser",
"csv.writer",
"re.match",
"os.getcwd",
"csv.reader"
] | [((3419, 3444), 'argparse.ArgumentParser', 'argparse.ArgumentParser', ([], {}), '()\n', (3442, 3444), False, 'import argparse\n'), ((658, 690), 'logging.getLogger', 'logging.getLogger', (['"""deduplicate"""'], {}), "('deduplicate')\n", (675, 690), False, 'import logging\n'), ((806, 817), 'os.getcwd', 'os.getcwd', ([], {}), '()\n', (815, 817), False, 'import os\n'), ((1524, 1560), 're.match', 're.match', (['"""^(.+)=(.+)$"""', 'self.filter'], {}), "('^(.+)=(.+)$', self.filter)\n", (1532, 1560), False, 'import re\n'), ((1771, 1817), 'csv.reader', 'csv.reader', (['csv_file'], {'delimiter': 'self.delimiter'}), '(csv_file, delimiter=self.delimiter)\n', (1781, 1817), False, 'import csv\n'), ((3267, 3313), 'csv.writer', 'csv.writer', (['csv_file'], {'delimiter': 'self.delimiter'}), '(csv_file, delimiter=self.delimiter)\n', (3277, 3313), False, 'import csv\n'), ((2756, 2797), 're.match', 're.match', (['filter_pattern', 'value', 're_flags'], {}), '(filter_pattern, value, re_flags)\n', (2764, 2797), False, 'import re\n')] |
# coding=utf-8
from builtins import str
import json
from django.contrib.auth.models import Group, Permission
from django.urls import reverse
from rest_framework import status
from bluebottle.impact.models import ImpactGoal
from bluebottle.impact.tests.factories import (
ImpactTypeFactory, ImpactGoalFactory
)
from bluebottle.time_based.tests.factories import DateActivityFactory
from bluebottle.members.models import MemberPlatformSettings
from bluebottle.test.factory_models.accounts import BlueBottleUserFactory
from bluebottle.test.utils import BluebottleTestCase, JSONAPITestClient
class ImpactTypeListAPITestCase(BluebottleTestCase):
def setUp(self):
super(ImpactTypeListAPITestCase, self).setUp()
self.client = JSONAPITestClient()
self.types = ImpactTypeFactory.create_batch(10)
self.url = reverse('impact-type-list')
self.user = BlueBottleUserFactory()
def test_get(self):
response = self.client.get(self.url, user=self.user)
self.assertEqual(response.status_code, status.HTTP_200_OK)
self.assertEqual(len(response.json()['data']), len(self.types))
resource = response.json()['data'][0]['attributes']
self.assertTrue('slug' in resource)
self.assertTrue('name' in resource)
self.assertTrue('unit' in resource)
self.assertTrue('text' in resource)
self.assertTrue('text-with-target' in resource)
self.assertTrue('text-passed' in resource)
resource_type = response.json()['data'][0]['type']
self.assertEqual(resource_type, 'activities/impact-types')
def test_get_anonymous(self):
response = self.client.get(self.url)
self.assertEqual(response.status_code, status.HTTP_200_OK)
self.assertEqual(len(response.json()['data']), len(self.types))
def test_get_only_active(self):
self.types[0].active = False
self.types[0].save()
response = self.client.get(self.url)
self.assertEqual(response.status_code, status.HTTP_200_OK)
self.assertEqual(len(response.json()['data']), len(self.types) - 1)
def test_get_closed(self):
MemberPlatformSettings.objects.update(closed=True)
response = self.client.get(self.url, user=self.user)
self.assertEqual(response.status_code, status.HTTP_200_OK)
def test_get_closed_anonymous(self):
MemberPlatformSettings.objects.update(closed=True)
response = self.client.get(self.url)
self.assertEqual(response.status_code, status.HTTP_401_UNAUTHORIZED)
def test_post(self):
response = self.client.post(self.url, user=self.user)
self.assertEqual(response.status_code, status.HTTP_405_METHOD_NOT_ALLOWED)
class ImpactGoalListAPITestCase(BluebottleTestCase):
def setUp(self):
super(ImpactGoalListAPITestCase, self).setUp()
self.client = JSONAPITestClient()
self.activity = DateActivityFactory.create()
self.type = ImpactTypeFactory.create()
self.url = reverse('impact-goal-list')
self.data = {
'data': {
'type': 'activities/impact-goals',
'attributes': {
'target': 1.5
},
'relationships': {
'activity': {
'data': {
'type': 'activities/time-based/dates',
'id': self.activity.pk
},
},
'type': {
'data': {
'type': 'activities/impact-types',
'id': self.type.pk
},
}
}
}
}
def test_create(self):
response = self.client.post(
self.url,
json.dumps(self.data),
user=self.activity.owner
)
self.assertEqual(response.status_code, status.HTTP_201_CREATED)
resource_type = response.json()['data']['type']
self.assertEqual(resource_type, 'activities/impact-goals')
goal = ImpactGoal.objects.get(pk=response.json()['data']['id'])
self.assertEqual(
goal.target, self.data['data']['attributes']['target']
)
self.assertEqual(goal.type, self.type)
self.assertEqual(goal.activity, self.activity)
def test_create_no_target(self):
del self.data['data']['attributes']['target']
response = self.client.post(
self.url,
json.dumps(self.data),
user=self.activity.owner
)
self.assertEqual(response.status_code, status.HTTP_201_CREATED)
resource_type = response.json()['data']['type']
self.assertEqual(resource_type, 'activities/impact-goals')
goal = ImpactGoal.objects.get(pk=response.json()['data']['id'])
self.assertEqual(
goal.target, None
)
self.assertEqual(goal.type, self.type)
self.assertEqual(goal.activity, self.activity)
def test_create_non_owner(self):
response = self.client.post(
self.url,
json.dumps(self.data),
user=BlueBottleUserFactory.create()
)
self.assertEqual(response.status_code, status.HTTP_403_FORBIDDEN)
def test_create_anonymous(self):
response = self.client.post(
self.url,
json.dumps(self.data),
)
self.assertEqual(response.status_code, status.HTTP_401_UNAUTHORIZED)
class ImpactGoalDetailsAPITestCase(BluebottleTestCase):
def setUp(self):
super(ImpactGoalDetailsAPITestCase, self).setUp()
self.client = JSONAPITestClient()
self.activity = DateActivityFactory.create()
self.type = ImpactTypeFactory.create()
self.goal = ImpactGoalFactory(type=self.type, activity=self.activity)
self.url = reverse('impact-goal-details', args=(self.goal.pk, ))
self.data = {
'data': {
'type': 'activities/impact-goals',
'id': self.goal.pk,
'attributes': {
'target': 1.5
},
}
}
def test_get(self):
response = self.client.get(
self.url,
user=self.activity.owner
)
self.assertEqual(response.status_code, status.HTTP_200_OK)
data = response.json()['data']
self.assertEqual(data['type'], 'activities/impact-goals')
self.assertEqual(
data['attributes']['target'], self.goal.target
)
self.assertEqual(
data['relationships']['type']['data']['id'],
str(self.goal.type.pk)
)
self.assertEqual(
data['relationships']['activity']['data']['id'],
str(self.goal.activity.pk)
)
def test_get_incomplete(self):
self.goal.target = None
self.goal.save()
response = self.client.get(
self.url,
user=self.activity.owner
)
self.assertEqual(response.status_code, status.HTTP_200_OK)
data = response.json()['data']
self.assertEqual(data['meta']['required'], [])
def test_get_non_owner(self):
response = self.client.get(
self.url,
user=BlueBottleUserFactory.create()
)
self.assertEqual(response.status_code, status.HTTP_200_OK)
def test_get_anonymous(self):
response = self.client.get(self.url)
self.assertEqual(response.status_code, status.HTTP_200_OK)
def test_get_closed_anonymous(self):
anonymous = Group.objects.get(name='Anonymous')
anonymous.permissions.remove(
Permission.objects.get(codename='api_read_dateactivity')
)
MemberPlatformSettings.objects.update(closed=True)
response = self.client.get(self.url)
self.assertEqual(response.status_code, status.HTTP_401_UNAUTHORIZED)
def test_update(self):
response = self.client.patch(
self.url,
data=json.dumps(self.data),
user=self.activity.owner
)
self.assertEqual(response.status_code, status.HTTP_200_OK)
data = response.json()['data']
self.assertEqual(data['type'], 'activities/impact-goals')
self.assertEqual(
data['attributes']['target'],
self.data['data']['attributes']['target']
)
self.goal.refresh_from_db()
self.assertEqual(
self.goal.target,
self.data['data']['attributes']['target']
)
def test_update_other_user(self):
response = self.client.patch(
self.url,
data=json.dumps(self.data),
user=BlueBottleUserFactory.create()
)
self.assertEqual(response.status_code, status.HTTP_403_FORBIDDEN)
def test_update_anonymous(self):
response = self.client.patch(
self.url,
data=json.dumps(self.data)
)
self.assertEqual(response.status_code, status.HTTP_401_UNAUTHORIZED)
def test_delete(self):
response = self.client.delete(
self.url,
user=self.activity.owner
)
self.assertEqual(response.status_code, status.HTTP_204_NO_CONTENT)
with self.assertRaises(ImpactGoal.DoesNotExist):
ImpactGoal.objects.get(pk=self.goal.pk)
def test_delete_other_user(self):
response = self.client.delete(
self.url,
user=BlueBottleUserFactory.create()
)
self.assertEqual(response.status_code, status.HTTP_403_FORBIDDEN)
def test_delete_anonymous(self):
response = self.client.delete(self.url)
self.assertEqual(response.status_code, status.HTTP_401_UNAUTHORIZED)
| [
"bluebottle.time_based.tests.factories.DateActivityFactory.create",
"django.contrib.auth.models.Permission.objects.get",
"django.contrib.auth.models.Group.objects.get",
"bluebottle.test.utils.JSONAPITestClient",
"bluebottle.impact.tests.factories.ImpactTypeFactory.create",
"bluebottle.impact.tests.factori... | [((747, 766), 'bluebottle.test.utils.JSONAPITestClient', 'JSONAPITestClient', ([], {}), '()\n', (764, 766), False, 'from bluebottle.test.utils import BluebottleTestCase, JSONAPITestClient\n'), ((788, 822), 'bluebottle.impact.tests.factories.ImpactTypeFactory.create_batch', 'ImpactTypeFactory.create_batch', (['(10)'], {}), '(10)\n', (818, 822), False, 'from bluebottle.impact.tests.factories import ImpactTypeFactory, ImpactGoalFactory\n'), ((842, 869), 'django.urls.reverse', 'reverse', (['"""impact-type-list"""'], {}), "('impact-type-list')\n", (849, 869), False, 'from django.urls import reverse\n'), ((890, 913), 'bluebottle.test.factory_models.accounts.BlueBottleUserFactory', 'BlueBottleUserFactory', ([], {}), '()\n', (911, 913), False, 'from bluebottle.test.factory_models.accounts import BlueBottleUserFactory\n'), ((2165, 2215), 'bluebottle.members.models.MemberPlatformSettings.objects.update', 'MemberPlatformSettings.objects.update', ([], {'closed': '(True)'}), '(closed=True)\n', (2202, 2215), False, 'from bluebottle.members.models import MemberPlatformSettings\n'), ((2395, 2445), 'bluebottle.members.models.MemberPlatformSettings.objects.update', 'MemberPlatformSettings.objects.update', ([], {'closed': '(True)'}), '(closed=True)\n', (2432, 2445), False, 'from bluebottle.members.models import MemberPlatformSettings\n'), ((2893, 2912), 'bluebottle.test.utils.JSONAPITestClient', 'JSONAPITestClient', ([], {}), '()\n', (2910, 2912), False, 'from bluebottle.test.utils import BluebottleTestCase, JSONAPITestClient\n'), ((2937, 2965), 'bluebottle.time_based.tests.factories.DateActivityFactory.create', 'DateActivityFactory.create', ([], {}), '()\n', (2963, 2965), False, 'from bluebottle.time_based.tests.factories import DateActivityFactory\n'), ((2986, 3012), 'bluebottle.impact.tests.factories.ImpactTypeFactory.create', 'ImpactTypeFactory.create', ([], {}), '()\n', (3010, 3012), False, 'from bluebottle.impact.tests.factories import ImpactTypeFactory, ImpactGoalFactory\n'), ((3032, 3059), 'django.urls.reverse', 'reverse', (['"""impact-goal-list"""'], {}), "('impact-goal-list')\n", (3039, 3059), False, 'from django.urls import reverse\n'), ((5734, 5753), 'bluebottle.test.utils.JSONAPITestClient', 'JSONAPITestClient', ([], {}), '()\n', (5751, 5753), False, 'from bluebottle.test.utils import BluebottleTestCase, JSONAPITestClient\n'), ((5778, 5806), 'bluebottle.time_based.tests.factories.DateActivityFactory.create', 'DateActivityFactory.create', ([], {}), '()\n', (5804, 5806), False, 'from bluebottle.time_based.tests.factories import DateActivityFactory\n'), ((5827, 5853), 'bluebottle.impact.tests.factories.ImpactTypeFactory.create', 'ImpactTypeFactory.create', ([], {}), '()\n', (5851, 5853), False, 'from bluebottle.impact.tests.factories import ImpactTypeFactory, ImpactGoalFactory\n'), ((5874, 5931), 'bluebottle.impact.tests.factories.ImpactGoalFactory', 'ImpactGoalFactory', ([], {'type': 'self.type', 'activity': 'self.activity'}), '(type=self.type, activity=self.activity)\n', (5891, 5931), False, 'from bluebottle.impact.tests.factories import ImpactTypeFactory, ImpactGoalFactory\n'), ((5951, 6003), 'django.urls.reverse', 'reverse', (['"""impact-goal-details"""'], {'args': '(self.goal.pk,)'}), "('impact-goal-details', args=(self.goal.pk,))\n", (5958, 6003), False, 'from django.urls import reverse\n'), ((7698, 7733), 'django.contrib.auth.models.Group.objects.get', 'Group.objects.get', ([], {'name': '"""Anonymous"""'}), "(name='Anonymous')\n", (7715, 7733), False, 'from django.contrib.auth.models import Group, Permission\n'), ((7860, 7910), 'bluebottle.members.models.MemberPlatformSettings.objects.update', 'MemberPlatformSettings.objects.update', ([], {'closed': '(True)'}), '(closed=True)\n', (7897, 7910), False, 'from bluebottle.members.models import MemberPlatformSettings\n'), ((3877, 3898), 'json.dumps', 'json.dumps', (['self.data'], {}), '(self.data)\n', (3887, 3898), False, 'import json\n'), ((4585, 4606), 'json.dumps', 'json.dumps', (['self.data'], {}), '(self.data)\n', (4595, 4606), False, 'import json\n'), ((5201, 5222), 'json.dumps', 'json.dumps', (['self.data'], {}), '(self.data)\n', (5211, 5222), False, 'import json\n'), ((5465, 5486), 'json.dumps', 'json.dumps', (['self.data'], {}), '(self.data)\n', (5475, 5486), False, 'import json\n'), ((6741, 6763), 'builtins.str', 'str', (['self.goal.type.pk'], {}), '(self.goal.type.pk)\n', (6744, 6763), False, 'from builtins import str\n'), ((6873, 6899), 'builtins.str', 'str', (['self.goal.activity.pk'], {}), '(self.goal.activity.pk)\n', (6876, 6899), False, 'from builtins import str\n'), ((7784, 7840), 'django.contrib.auth.models.Permission.objects.get', 'Permission.objects.get', ([], {'codename': '"""api_read_dateactivity"""'}), "(codename='api_read_dateactivity')\n", (7806, 7840), False, 'from django.contrib.auth.models import Group, Permission\n'), ((9448, 9487), 'bluebottle.impact.models.ImpactGoal.objects.get', 'ImpactGoal.objects.get', ([], {'pk': 'self.goal.pk'}), '(pk=self.goal.pk)\n', (9470, 9487), False, 'from bluebottle.impact.models import ImpactGoal\n'), ((5241, 5271), 'bluebottle.test.factory_models.accounts.BlueBottleUserFactory.create', 'BlueBottleUserFactory.create', ([], {}), '()\n', (5269, 5271), False, 'from bluebottle.test.factory_models.accounts import BlueBottleUserFactory\n'), ((7381, 7411), 'bluebottle.test.factory_models.accounts.BlueBottleUserFactory.create', 'BlueBottleUserFactory.create', ([], {}), '()\n', (7409, 7411), False, 'from bluebottle.test.factory_models.accounts import BlueBottleUserFactory\n'), ((8138, 8159), 'json.dumps', 'json.dumps', (['self.data'], {}), '(self.data)\n', (8148, 8159), False, 'import json\n'), ((8788, 8809), 'json.dumps', 'json.dumps', (['self.data'], {}), '(self.data)\n', (8798, 8809), False, 'import json\n'), ((8828, 8858), 'bluebottle.test.factory_models.accounts.BlueBottleUserFactory.create', 'BlueBottleUserFactory.create', ([], {}), '()\n', (8856, 8858), False, 'from bluebottle.test.factory_models.accounts import BlueBottleUserFactory\n'), ((9058, 9079), 'json.dumps', 'json.dumps', (['self.data'], {}), '(self.data)\n', (9068, 9079), False, 'import json\n'), ((9605, 9635), 'bluebottle.test.factory_models.accounts.BlueBottleUserFactory.create', 'BlueBottleUserFactory.create', ([], {}), '()\n', (9633, 9635), False, 'from bluebottle.test.factory_models.accounts import BlueBottleUserFactory\n')] |
from django.db import models
from categorias.models import Categoria
from django.contrib.auth.models import User
from django.utils import timezone
# Create your models here.
class Post(models.Model):
titulo_post = models.CharField(max_length=50, verbose_name='Titulo')
autor_post = models.ForeignKey(User, on_delete=models.DO_NOTHING, verbose_name='Autor')
data_post = models.DateTimeField(default=timezone.now, verbose_name='Data')
conteudo_post = models.TextField(verbose_name='Conteudo')
exerto_post = models.TextField(verbose_name='Exerto')
categoria_post = models.ForeignKey(
Categoria, on_delete=models.DO_NOTHING, blank=True, null=True, verbose_name='Categoria')
imagem_post = models.ImageField(upload_to='post_img', blank=True, null=True, verbose_name='Imagem')
publicacao_post = models.BooleanField(default=False, verbose_name='Publicado')
def __str__(self):
return self.titulo_post
| [
"django.db.models.TextField",
"django.db.models.ForeignKey",
"django.db.models.BooleanField",
"django.db.models.ImageField",
"django.db.models.DateTimeField",
"django.db.models.CharField"
] | [((221, 275), 'django.db.models.CharField', 'models.CharField', ([], {'max_length': '(50)', 'verbose_name': '"""Titulo"""'}), "(max_length=50, verbose_name='Titulo')\n", (237, 275), False, 'from django.db import models\n'), ((293, 367), 'django.db.models.ForeignKey', 'models.ForeignKey', (['User'], {'on_delete': 'models.DO_NOTHING', 'verbose_name': '"""Autor"""'}), "(User, on_delete=models.DO_NOTHING, verbose_name='Autor')\n", (310, 367), False, 'from django.db import models\n'), ((384, 447), 'django.db.models.DateTimeField', 'models.DateTimeField', ([], {'default': 'timezone.now', 'verbose_name': '"""Data"""'}), "(default=timezone.now, verbose_name='Data')\n", (404, 447), False, 'from django.db import models\n'), ((468, 509), 'django.db.models.TextField', 'models.TextField', ([], {'verbose_name': '"""Conteudo"""'}), "(verbose_name='Conteudo')\n", (484, 509), False, 'from django.db import models\n'), ((528, 567), 'django.db.models.TextField', 'models.TextField', ([], {'verbose_name': '"""Exerto"""'}), "(verbose_name='Exerto')\n", (544, 567), False, 'from django.db import models\n'), ((589, 700), 'django.db.models.ForeignKey', 'models.ForeignKey', (['Categoria'], {'on_delete': 'models.DO_NOTHING', 'blank': '(True)', 'null': '(True)', 'verbose_name': '"""Categoria"""'}), "(Categoria, on_delete=models.DO_NOTHING, blank=True, null=\n True, verbose_name='Categoria')\n", (606, 700), False, 'from django.db import models\n'), ((723, 813), 'django.db.models.ImageField', 'models.ImageField', ([], {'upload_to': '"""post_img"""', 'blank': '(True)', 'null': '(True)', 'verbose_name': '"""Imagem"""'}), "(upload_to='post_img', blank=True, null=True, verbose_name\n ='Imagem')\n", (740, 813), False, 'from django.db import models\n'), ((831, 891), 'django.db.models.BooleanField', 'models.BooleanField', ([], {'default': '(False)', 'verbose_name': '"""Publicado"""'}), "(default=False, verbose_name='Publicado')\n", (850, 891), False, 'from django.db import models\n')] |
#!/usr/bin/env python
import datetime
import logging
import os
import random
import rospy
import schedule
from interaction_engine.cordial_interface import CordialInterface
from interaction_engine.database import Database
from interaction_engine.int_engine import InteractionEngine
from interaction_engine.message import Message
from interaction_engine.state import State
from interaction_engine.state_collection import StateCollection
from cordial_msgs.msg import AskOnGuiAction, AskOnGuiGoal, MouseEvent
from std_msgs.msg import Bool
logging.basicConfig(level=logging.INFO)
class Keys:
GREETING = "greeting"
HOW_ARE_YOU = "how are you"
TAKE_CARE = "take care"
WHEN_TO_TALK = "when to talk"
greeting = State(
name=Keys.GREETING,
message_type=Message.Type.MULTIPLE_CHOICE_ONE_COLUMN,
content="Hello!",
next_states=[Keys.HOW_ARE_YOU],
transitions={"Hello!": Keys.HOW_ARE_YOU, "Hi!": Keys.HOW_ARE_YOU}
)
how_are_you = State(
name=Keys.HOW_ARE_YOU,
message_type=Message.Type.MULTIPLE_CHOICE_ONE_COLUMN,
content="How are you doing today?",
next_states=[Keys.TAKE_CARE],
transitions={
"Pretty good.": Keys.TAKE_CARE,
"Great!": Keys.TAKE_CARE,
"Not too good.": Keys.TAKE_CARE
}
)
take_care = State(
name=Keys.TAKE_CARE,
message_type=Message.Type.MULTIPLE_CHOICE_ONE_COLUMN,
content="Don't forget to drink enough water and get enough sleep!",
next_states=[Keys.WHEN_TO_TALK],
transitions={"Next": Keys.WHEN_TO_TALK}
)
when_to_talk = State(
name=Keys.WHEN_TO_TALK,
message_type=Message.Type.TIME_ENTRY,
content="When would you like to talk tomorrow?",
next_states=["exit"],
args=["15", "15:15"]
)
state_collection = StateCollection(
name="example interaction",
init_state_name=Keys.WHEN_TO_TALK,
states=[
greeting,
how_are_you,
take_care,
when_to_talk
]
)
cwd = os.getcwd()
database_file = os.path.join(
os.path.dirname(os.path.realpath(__file__)),
"example_interaction_database.json"
)
default_database_keys = [
Keys.GREETING,
Keys.HOW_ARE_YOU,
Keys.TAKE_CARE,
Keys.WHEN_TO_TALK
]
database_manager = Database(
database_file_name=database_file,
default_database_keys=default_database_keys
)
interface = CordialInterface(
action_name="cordial/say_and_ask_on_gui",
seconds_until_timeout=None
)
interaction_engine = InteractionEngine(
state_collection=state_collection,
database_manager=database_manager,
interface=interface
)
if __name__ == "__main__":
while not rospy.is_shutdown():
rospy.logdebug("Scheduled interaction running")
interaction_engine.run()
rospy.sleep(5)
| [
"logging.basicConfig",
"interaction_engine.database.Database",
"rospy.is_shutdown",
"interaction_engine.state_collection.StateCollection",
"os.getcwd",
"os.path.realpath",
"interaction_engine.state.State",
"interaction_engine.cordial_interface.CordialInterface",
"rospy.sleep",
"rospy.logdebug",
... | [((540, 579), 'logging.basicConfig', 'logging.basicConfig', ([], {'level': 'logging.INFO'}), '(level=logging.INFO)\n', (559, 579), False, 'import logging\n'), ((727, 938), 'interaction_engine.state.State', 'State', ([], {'name': 'Keys.GREETING', 'message_type': 'Message.Type.MULTIPLE_CHOICE_ONE_COLUMN', 'content': '"""Hello!"""', 'next_states': '[Keys.HOW_ARE_YOU]', 'transitions': "{'Hello!': Keys.HOW_ARE_YOU, 'Hi!': Keys.HOW_ARE_YOU}"}), "(name=Keys.GREETING, message_type=Message.Type.\n MULTIPLE_CHOICE_ONE_COLUMN, content='Hello!', next_states=[Keys.\n HOW_ARE_YOU], transitions={'Hello!': Keys.HOW_ARE_YOU, 'Hi!': Keys.\n HOW_ARE_YOU})\n", (732, 938), False, 'from interaction_engine.state import State\n'), ((961, 1228), 'interaction_engine.state.State', 'State', ([], {'name': 'Keys.HOW_ARE_YOU', 'message_type': 'Message.Type.MULTIPLE_CHOICE_ONE_COLUMN', 'content': '"""How are you doing today?"""', 'next_states': '[Keys.TAKE_CARE]', 'transitions': "{'Pretty good.': Keys.TAKE_CARE, 'Great!': Keys.TAKE_CARE, 'Not too good.':\n Keys.TAKE_CARE}"}), "(name=Keys.HOW_ARE_YOU, message_type=Message.Type.\n MULTIPLE_CHOICE_ONE_COLUMN, content='How are you doing today?',\n next_states=[Keys.TAKE_CARE], transitions={'Pretty good.': Keys.\n TAKE_CARE, 'Great!': Keys.TAKE_CARE, 'Not too good.': Keys.TAKE_CARE})\n", (966, 1228), False, 'from interaction_engine.state import State\n'), ((1280, 1517), 'interaction_engine.state.State', 'State', ([], {'name': 'Keys.TAKE_CARE', 'message_type': 'Message.Type.MULTIPLE_CHOICE_ONE_COLUMN', 'content': '"""Don\'t forget to drink enough water and get enough sleep!"""', 'next_states': '[Keys.WHEN_TO_TALK]', 'transitions': "{'Next': Keys.WHEN_TO_TALK}"}), '(name=Keys.TAKE_CARE, message_type=Message.Type.\n MULTIPLE_CHOICE_ONE_COLUMN, content=\n "Don\'t forget to drink enough water and get enough sleep!", next_states\n =[Keys.WHEN_TO_TALK], transitions={\'Next\': Keys.WHEN_TO_TALK})\n', (1285, 1517), False, 'from interaction_engine.state import State\n'), ((1541, 1711), 'interaction_engine.state.State', 'State', ([], {'name': 'Keys.WHEN_TO_TALK', 'message_type': 'Message.Type.TIME_ENTRY', 'content': '"""When would you like to talk tomorrow?"""', 'next_states': "['exit']", 'args': "['15', '15:15']"}), "(name=Keys.WHEN_TO_TALK, message_type=Message.Type.TIME_ENTRY, content\n ='When would you like to talk tomorrow?', next_states=['exit'], args=[\n '15', '15:15'])\n", (1546, 1711), False, 'from interaction_engine.state import State\n'), ((1744, 1884), 'interaction_engine.state_collection.StateCollection', 'StateCollection', ([], {'name': '"""example interaction"""', 'init_state_name': 'Keys.WHEN_TO_TALK', 'states': '[greeting, how_are_you, take_care, when_to_talk]'}), "(name='example interaction', init_state_name=Keys.\n WHEN_TO_TALK, states=[greeting, how_are_you, take_care, when_to_talk])\n", (1759, 1884), False, 'from interaction_engine.state_collection import StateCollection\n'), ((1939, 1950), 'os.getcwd', 'os.getcwd', ([], {}), '()\n', (1948, 1950), False, 'import os\n'), ((2204, 2296), 'interaction_engine.database.Database', 'Database', ([], {'database_file_name': 'database_file', 'default_database_keys': 'default_database_keys'}), '(database_file_name=database_file, default_database_keys=\n default_database_keys)\n', (2212, 2296), False, 'from interaction_engine.database import Database\n'), ((2315, 2405), 'interaction_engine.cordial_interface.CordialInterface', 'CordialInterface', ([], {'action_name': '"""cordial/say_and_ask_on_gui"""', 'seconds_until_timeout': 'None'}), "(action_name='cordial/say_and_ask_on_gui',\n seconds_until_timeout=None)\n", (2331, 2405), False, 'from interaction_engine.cordial_interface import CordialInterface\n'), ((2434, 2547), 'interaction_engine.int_engine.InteractionEngine', 'InteractionEngine', ([], {'state_collection': 'state_collection', 'database_manager': 'database_manager', 'interface': 'interface'}), '(state_collection=state_collection, database_manager=\n database_manager, interface=interface)\n', (2451, 2547), False, 'from interaction_engine.int_engine import InteractionEngine\n'), ((2001, 2027), 'os.path.realpath', 'os.path.realpath', (['__file__'], {}), '(__file__)\n', (2017, 2027), False, 'import os\n'), ((2632, 2651), 'rospy.is_shutdown', 'rospy.is_shutdown', ([], {}), '()\n', (2649, 2651), False, 'import rospy\n'), ((2661, 2708), 'rospy.logdebug', 'rospy.logdebug', (['"""Scheduled interaction running"""'], {}), "('Scheduled interaction running')\n", (2675, 2708), False, 'import rospy\n'), ((2750, 2764), 'rospy.sleep', 'rospy.sleep', (['(5)'], {}), '(5)\n', (2761, 2764), False, 'import rospy\n')] |
#!/usr/bin/env python
"""Script used to test the network with batfish"""
from pybatfish.client.commands import *
from pybatfish.question import load_questions
from pybatfish.client.asserts import (
assert_no_duplicate_router_ids,
assert_no_incompatible_bgp_sessions,
assert_no_incompatible_ospf_sessions,
assert_no_unestablished_bgp_sessions,
assert_no_undefined_references,
)
from rich.console import Console
console = Console(color_system="truecolor")
def test_duplicate_rtr_ids(snap):
"""Testing for duplicate router IDs"""
console.print(
":white_exclamation_mark: [bold yellow]Testing for duplicate router IDs[/bold yellow] :white_exclamation_mark:"
)
assert_no_duplicate_router_ids(
snapshot=snap,
protocols={"ospf", "bgp"},
)
console.print(
":green_heart: [bold green]No duplicate router IDs found[/bold green] :green_heart:"
)
def test_bgp_compatibility(snap):
"""Testing for incompatible BGP sessions"""
console.print(
":white_exclamation_mark: [bold yellow]Testing for incompatible BGP sessions[/bold yellow] :white_exclamation_mark:"
)
assert_no_incompatible_bgp_sessions(
snapshot=snap,
)
console.print(
":green_heart: [bold green]All BGP sessions compatible![/bold green] :green_heart:"
)
def test_ospf_compatibility(snap):
"""Testing for incompatible OSPF sessions"""
console.print(
":white_exclamation_mark: [bold yellow]Testing for incompatible OSPF sessions[/bold yellow] :white_exclamation_mark:"
)
assert_no_incompatible_ospf_sessions(
snapshot=snap,
)
console.print(
":green_heart: [bold green]All OSPF sessions compatible![/bold green] :green_heart:"
)
def test_bgp_unestablished(snap):
"""Testing for BGP sessions that are not established"""
console.print(
":white_exclamation_mark: [bold yellow]Testing for unestablished BGP sessions[/bold yellow] :white_exclamation_mark:"
)
assert_no_unestablished_bgp_sessions(
snapshot=snap,
)
console.print(
":green_heart: [bold green]All BGP sessions are established![/bold green] :green_heart:"
)
def test_undefined_references(snap):
"""Testing for any undefined references"""
console.print(
":white_exclamation_mark: [bold yellow]Testing for undefined references[/bold yellow] :white_exclamation_mark:"
)
assert_no_undefined_references(
snapshot=snap,
)
console.print(
":green_heart: [bold green]No undefined refences found![/bold green] :green_heart:"
)
def main():
"""init all the things"""
NETWORK_NAME = "PDX_NET"
SNAPSHOT_NAME = "snapshot00"
SNAPSHOT_DIR = "./snapshots"
bf_session.host = "192.168.10.193"
bf_set_network(NETWORK_NAME)
init_snap = bf_init_snapshot(SNAPSHOT_DIR, name=SNAPSHOT_NAME, overwrite=True)
load_questions()
test_duplicate_rtr_ids(init_snap)
test_bgp_compatibility(init_snap)
test_ospf_compatibility(init_snap)
test_bgp_unestablished(init_snap)
test_undefined_references(init_snap)
if __name__ == "__main__":
main()
| [
"pybatfish.client.asserts.assert_no_duplicate_router_ids",
"pybatfish.client.asserts.assert_no_unestablished_bgp_sessions",
"pybatfish.client.asserts.assert_no_undefined_references",
"rich.console.Console",
"pybatfish.client.asserts.assert_no_incompatible_bgp_sessions",
"pybatfish.question.load_questions"... | [((444, 477), 'rich.console.Console', 'Console', ([], {'color_system': '"""truecolor"""'}), "(color_system='truecolor')\n", (451, 477), False, 'from rich.console import Console\n'), ((706, 778), 'pybatfish.client.asserts.assert_no_duplicate_router_ids', 'assert_no_duplicate_router_ids', ([], {'snapshot': 'snap', 'protocols': "{'ospf', 'bgp'}"}), "(snapshot=snap, protocols={'ospf', 'bgp'})\n", (736, 778), False, 'from pybatfish.client.asserts import assert_no_duplicate_router_ids, assert_no_incompatible_bgp_sessions, assert_no_incompatible_ospf_sessions, assert_no_unestablished_bgp_sessions, assert_no_undefined_references\n'), ((1158, 1208), 'pybatfish.client.asserts.assert_no_incompatible_bgp_sessions', 'assert_no_incompatible_bgp_sessions', ([], {'snapshot': 'snap'}), '(snapshot=snap)\n', (1193, 1208), False, 'from pybatfish.client.asserts import assert_no_duplicate_router_ids, assert_no_incompatible_bgp_sessions, assert_no_incompatible_ospf_sessions, assert_no_unestablished_bgp_sessions, assert_no_undefined_references\n'), ((1582, 1633), 'pybatfish.client.asserts.assert_no_incompatible_ospf_sessions', 'assert_no_incompatible_ospf_sessions', ([], {'snapshot': 'snap'}), '(snapshot=snap)\n', (1618, 1633), False, 'from pybatfish.client.asserts import assert_no_duplicate_router_ids, assert_no_incompatible_bgp_sessions, assert_no_incompatible_ospf_sessions, assert_no_unestablished_bgp_sessions, assert_no_undefined_references\n'), ((2018, 2069), 'pybatfish.client.asserts.assert_no_unestablished_bgp_sessions', 'assert_no_unestablished_bgp_sessions', ([], {'snapshot': 'snap'}), '(snapshot=snap)\n', (2054, 2069), False, 'from pybatfish.client.asserts import assert_no_duplicate_router_ids, assert_no_incompatible_bgp_sessions, assert_no_incompatible_ospf_sessions, assert_no_unestablished_bgp_sessions, assert_no_undefined_references\n'), ((2442, 2487), 'pybatfish.client.asserts.assert_no_undefined_references', 'assert_no_undefined_references', ([], {'snapshot': 'snap'}), '(snapshot=snap)\n', (2472, 2487), False, 'from pybatfish.client.asserts import assert_no_duplicate_router_ids, assert_no_incompatible_bgp_sessions, assert_no_incompatible_ospf_sessions, assert_no_unestablished_bgp_sessions, assert_no_undefined_references\n'), ((2918, 2934), 'pybatfish.question.load_questions', 'load_questions', ([], {}), '()\n', (2932, 2934), False, 'from pybatfish.question import load_questions\n')] |
# Copyright 2013-2019 Lawrence Livermore National Security, LLC and other
# Spack Project Developers. See the top-level COPYRIGHT file for details.
#
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
import os
import pytest
import stat
from spack.hooks.permissions_setters import (
chmod_real_entries, InvalidPermissionsError
)
import llnl.util.filesystem as fs
def test_chmod_real_entries_ignores_suid_sgid(tmpdir):
path = str(tmpdir.join('file').ensure())
mode = stat.S_ISUID | stat.S_ISGID | stat.S_ISVTX
os.chmod(path, mode)
mode = os.stat(path).st_mode # adds a high bit we aren't concerned with
perms = stat.S_IRWXU
chmod_real_entries(path, perms)
assert os.stat(path).st_mode == mode | perms & ~stat.S_IXUSR
def test_chmod_rejects_group_writable_suid(tmpdir):
path = str(tmpdir.join('file').ensure())
mode = stat.S_ISUID | stat.S_ISGID | stat.S_ISVTX
fs.chmod_x(path, mode)
perms = stat.S_IRWXU | stat.S_IRWXG | stat.S_IRWXO
with pytest.raises(InvalidPermissionsError):
chmod_real_entries(path, perms)
| [
"llnl.util.filesystem.chmod_x",
"spack.hooks.permissions_setters.chmod_real_entries",
"os.chmod",
"pytest.raises",
"os.stat"
] | [((525, 545), 'os.chmod', 'os.chmod', (['path', 'mode'], {}), '(path, mode)\n', (533, 545), False, 'import os\n'), ((653, 684), 'spack.hooks.permissions_setters.chmod_real_entries', 'chmod_real_entries', (['path', 'perms'], {}), '(path, perms)\n', (671, 684), False, 'from spack.hooks.permissions_setters import chmod_real_entries, InvalidPermissionsError\n'), ((908, 930), 'llnl.util.filesystem.chmod_x', 'fs.chmod_x', (['path', 'mode'], {}), '(path, mode)\n', (918, 930), True, 'import llnl.util.filesystem as fs\n'), ((557, 570), 'os.stat', 'os.stat', (['path'], {}), '(path)\n', (564, 570), False, 'import os\n'), ((996, 1034), 'pytest.raises', 'pytest.raises', (['InvalidPermissionsError'], {}), '(InvalidPermissionsError)\n', (1009, 1034), False, 'import pytest\n'), ((1044, 1075), 'spack.hooks.permissions_setters.chmod_real_entries', 'chmod_real_entries', (['path', 'perms'], {}), '(path, perms)\n', (1062, 1075), False, 'from spack.hooks.permissions_setters import chmod_real_entries, InvalidPermissionsError\n'), ((697, 710), 'os.stat', 'os.stat', (['path'], {}), '(path)\n', (704, 710), False, 'import os\n')] |
# returns number of unique records for icews with different filtering:
# -by rounded lat/lon (100,000)
# -by country, district, province, city (100,000)
# -by lat/lon, filtered by 2 or more matches (70,000)
from pymongo import MongoClient
import os
mongo_client = MongoClient(host='localhost', port=27017) # Default port
db = mongo_client.event_data
def icews_coordinates_rounded():
print(list(db.icews.aggregate([
{
"$project": {
"_id": 0,
"lat_rounded": {
"$divide": [
{
"$subtract": [
{
"$multiply": [
"$Latitude",
100
]
},
{
"$mod": [
{
"$multiply": [
"$Latitude",
100
]
},
1
]
}
]
},
100
]
},
"lon_rounded": {
"$divide": [
{
"$subtract": [
{
"$multiply": [
"$Longitude",
100
]
},
{
"$mod": [
{
"$multiply": [
"$Longitude",
100
]
},
1
]
}
]
},
100
]
}
}
},
{
"$group": {
"_id": {
"latitude": "$lat_rounded",
"longitude": "$lon_rounded"
}
}
},
{
"$count": "uniques"
}
])))
def icews_coordinates():
print(list(db.icews.aggregate([
{
"$group": {
"_id": {
"lat": "$Latitude",
"lon": "$Longitude"
}
}
},
{
"$count": "uniques"
}
])))
def icews_names():
print(list(db.icews.aggregate([
{
"$project": {
"country": {"$toLower": "$Country"},
"district": {"$toLower": "$District"},
"province": {"$toLower": "$Province"},
"city": {"$toLower": "$City"}
}
},
{
"$group": {
"_id": {
"Country": "$country",
"District": "$district",
"Province": "$province",
"City": "$city"
},
"total": {"$sum": 1}
}
},
{
"$match": {"total": {"$gt": 1}}
},
{
"$count": "uniques"
}
])))
# icews_coordinates_rounded()
icews_coordinates()
# icews_names() | [
"pymongo.MongoClient"
] | [((266, 307), 'pymongo.MongoClient', 'MongoClient', ([], {'host': '"""localhost"""', 'port': '(27017)'}), "(host='localhost', port=27017)\n", (277, 307), False, 'from pymongo import MongoClient\n')] |
import MapReduce
import sys
"""
SQL style Joins in MapReduce
"""
mr = MapReduce.MapReduce()
# =============================
# Do not modify above this line
def mapper(record):
# key: document identifier
# value: document contents
value = str(record[0])
num = str(record[1])
mr.emit_intermediate(num, (value, record))
def reducer(key, list_of_values):
# key: word
# value: list of occurrence counts
x,y = list_of_values[0]
for val in range(len(list_of_values)):
f = []
a,b = list_of_values[val]
if a == 'line_item':
f += y
f += b
mr.emit(f)
# Do not modify below this line
# =============================
if __name__ == '__main__':
inputdata = open(sys.argv[1])
mr.execute(inputdata, mapper, reducer)
| [
"MapReduce.MapReduce"
] | [((72, 93), 'MapReduce.MapReduce', 'MapReduce.MapReduce', ([], {}), '()\n', (91, 93), False, 'import MapReduce\n')] |
from anonapi.testresources import (
MockAnonClientTool,
JobInfoFactory,
RemoteAnonServerFactory,
JobStatus,
)
def test_mock_anon_client_tool():
some_responses = [
JobInfoFactory(status=JobStatus.DONE),
JobInfoFactory(status=JobStatus.ERROR),
JobInfoFactory(status=JobStatus.INACTIVE),
]
tool = MockAnonClientTool(responses=some_responses)
server = RemoteAnonServerFactory()
assert tool.get_job_info(server=server, job_id=1).status == JobStatus.DONE
assert [x.status for x in tool.get_job_info_list(server, [1, 2, 3])] == [
JobStatus.ERROR,
JobStatus.INACTIVE,
JobStatus.DONE,
]
# job id returned will always match whatever was requested
assert [x.job_id for x in tool.get_job_info_list(server, [1, 4, 6])] == [1, 4, 6]
| [
"anonapi.testresources.MockAnonClientTool",
"anonapi.testresources.JobInfoFactory",
"anonapi.testresources.RemoteAnonServerFactory"
] | [((348, 392), 'anonapi.testresources.MockAnonClientTool', 'MockAnonClientTool', ([], {'responses': 'some_responses'}), '(responses=some_responses)\n', (366, 392), False, 'from anonapi.testresources import MockAnonClientTool, JobInfoFactory, RemoteAnonServerFactory, JobStatus\n'), ((406, 431), 'anonapi.testresources.RemoteAnonServerFactory', 'RemoteAnonServerFactory', ([], {}), '()\n', (429, 431), False, 'from anonapi.testresources import MockAnonClientTool, JobInfoFactory, RemoteAnonServerFactory, JobStatus\n'), ((193, 230), 'anonapi.testresources.JobInfoFactory', 'JobInfoFactory', ([], {'status': 'JobStatus.DONE'}), '(status=JobStatus.DONE)\n', (207, 230), False, 'from anonapi.testresources import MockAnonClientTool, JobInfoFactory, RemoteAnonServerFactory, JobStatus\n'), ((240, 278), 'anonapi.testresources.JobInfoFactory', 'JobInfoFactory', ([], {'status': 'JobStatus.ERROR'}), '(status=JobStatus.ERROR)\n', (254, 278), False, 'from anonapi.testresources import MockAnonClientTool, JobInfoFactory, RemoteAnonServerFactory, JobStatus\n'), ((288, 329), 'anonapi.testresources.JobInfoFactory', 'JobInfoFactory', ([], {'status': 'JobStatus.INACTIVE'}), '(status=JobStatus.INACTIVE)\n', (302, 329), False, 'from anonapi.testresources import MockAnonClientTool, JobInfoFactory, RemoteAnonServerFactory, JobStatus\n')] |
#!/usr/bin/env python
import rospy
import rosbag
import os
import sys
import textwrap
import yaml
lidarmsg=None
################# read the lidar msg from yaml file and return ##############
def readlidardummy():
global lidarmsg
if lidarmsg==None:
lidarmsg= doreadlidar()
return lidarmsg
def doreadlidar():
global lidarmsg
print('lidardummy do read')
with open(r'/media/student/data5/AirSim/ros/src/airsim_ros_pkgs/scripts/lidar_dummy.txt') as file:
# The FullLoader parameter handles the conversion from YAML
# scalar values to Python the dictionary format
lidarmsg = yaml.load(file)
#print(fruits_list)
#print(lidarmsg['range_max']+20)
#print(lidarmsg['header']['stamp']['secs'])
ranges=lidarmsg['ranges']
#print(len(ranges), ranges)
return lidarmsg
if __name__ == '__main__':
readlidardummy()
| [
"yaml.load"
] | [((601, 616), 'yaml.load', 'yaml.load', (['file'], {}), '(file)\n', (610, 616), False, 'import yaml\n')] |
# Generated by Django 2.0.9 on 2018-12-12 08:18
from django.db import migrations
class Migration(migrations.Migration):
dependencies = [
('cycleshare', '0012_cycle_toprofile'),
]
operations = [
migrations.RemoveField(
model_name='cycle',
name='toprofile',
),
]
| [
"django.db.migrations.RemoveField"
] | [((227, 287), 'django.db.migrations.RemoveField', 'migrations.RemoveField', ([], {'model_name': '"""cycle"""', 'name': '"""toprofile"""'}), "(model_name='cycle', name='toprofile')\n", (249, 287), False, 'from django.db import migrations\n')] |
import discord
import asyncio
import re
import logging
from data.groups_name import free_random_name
logging.basicConfig(level=logging.INFO)
client = discord.Client()
class ClientEvents(discord.Client):
'''
Classe initialization
'''
def __init__(self, *args, **kwargs):
super().__init__(*args, **kwargs)
# create the background task and run it in the background
self.bg_task = self.loop.create_task(self.my_background_task())
async def my_background_task(self):
await self.wait_until_ready()
counter = 0
channel = self.get_channel(1234567) # channel ID goes here
while not self.is_closed():
counter += 1
await channel.send(counter)
await asyncio.sleep(60) # task runs every 60 seconds
async def on_ready(self):
print('Logged in as')
print(self.user.name)
print(self.user.id)
print('------')
''' ############### EVENTS ABOUT CHANNELS AND SERVERS MANAGEMENT ###############'''
async def on_member_join(self, member):
guild = member.guild
if guild.system_channel is not None:
to_send = 'Welcome {0.mention} to {1.name}!'.format(member, guild)
await guild.system_channel.send(to_send)
'''
Permet la création et la suppression automatique de channels
'''
async def on_voice_state_update(self, member: discord.Member, before: discord.VoiceState,
after: discord.VoiceState):
await self.wait_until_ready()
after_channel: discord.VoiceChannel = after.channel
before_channel: discord.VoiceChannel = before.channel
# We enter in a channel
if type(after_channel) is discord.VoiceChannel:
category: discord.CategoryChannel = after_channel.category
guild: discord.guild = member.guild
if "Escouade".lower() in str(category.name).lower() and (
"Créer channel").lower() == after_channel.name.lower():
team_size = re.findall(r'\d+', category.name)
if len(team_size) == 0:
return
else:
team_size = int(re.findall(r'\d+', category.name)[0])
print("Création nouveau Channel")
new_name = free_random_name(team_size, guild)
new_channel: discord.VoiceChannel = await guild.create_voice_channel(
new_name,
category=category,
user_limit=int(team_size))
await member.move_to(new_channel)
# If we quit a channel and no one else is in, deletion of the channel
if type(before_channel) is discord.VoiceChannel \
and ("Créer channel").lower() != before_channel.name.lower():
if len(before_channel.members) == 0:
await before_channel.delete(reason="Channel empty")
''' ############### EVENTS ABOUT REPLIES ON MESSAGE ###############'''
@client.event
async def on_message(self, message):
# we do not want the bot to reply to itself
if message.author.id == self.user.id:
return
if message.content.startswith('!hello'):
await message.channel.send('Hello {0.author.mention} sur le serveur {1.guild}'.format(message, message))
class CommandsClient(discord.Client):
async def on_ready(self):
print('Logged in as')
print(self.user.name)
print(self.user.id)
print('------')
async def on_member_join(self, member):
guild = member.guild
if guild.system_channel is not None:
to_send = 'Welcome {0.mention} to {1.name}!'.format(member, guild)
await guild.system_channel.send(to_send)
async def on_message(message):
if message.content.startswith('$greet'):
channel = message.channel
await channel.send('Say hello!')
def check(m):
return m.content == 'hello' and m.channel == channel
msg = await client.wait_for('message', check=check)
await channel.send('Hello {.author}!'.format(msg))
| [
"logging.basicConfig",
"data.groups_name.free_random_name",
"asyncio.sleep",
"discord.Client",
"re.findall"
] | [((102, 141), 'logging.basicConfig', 'logging.basicConfig', ([], {'level': 'logging.INFO'}), '(level=logging.INFO)\n', (121, 141), False, 'import logging\n'), ((151, 167), 'discord.Client', 'discord.Client', ([], {}), '()\n', (165, 167), False, 'import discord\n'), ((757, 774), 'asyncio.sleep', 'asyncio.sleep', (['(60)'], {}), '(60)\n', (770, 774), False, 'import asyncio\n'), ((2078, 2111), 're.findall', 're.findall', (['"""\\\\d+"""', 'category.name'], {}), "('\\\\d+', category.name)\n", (2088, 2111), False, 'import re\n'), ((2353, 2387), 'data.groups_name.free_random_name', 'free_random_name', (['team_size', 'guild'], {}), '(team_size, guild)\n', (2369, 2387), False, 'from data.groups_name import free_random_name\n'), ((2237, 2270), 're.findall', 're.findall', (['"""\\\\d+"""', 'category.name'], {}), "('\\\\d+', category.name)\n", (2247, 2270), False, 'import re\n')] |
"""
Utilities
---------
The utilities module.
"""
from collections.abc import Mapping, Sequence
from functools import wraps
import types
class FrozenDict(Mapping):
"""A frozen dictionary implementation that prevents the object from being
mutated. This is primarily used when defining a dict-like object as a class
attribute that shouldn't be mutated by subclasses.
"""
def __init__(self, *args, **kwargs):
self._dict = dict(*args, **kwargs)
def copy(self):
return self._dict.copy()
def __getitem__(self, key):
return self._dict.__getitem__(key)
def __contains__(self, item):
return self._dict.__contains__(item)
def __iter__(self):
return self._dict.__iter__()
def __len__(self):
return self._dict.__len__()
def __repr__(self):
return '<%s %r>' % (self.__class__.__name__, self._dict)
def classonce(meth):
"""Decorator that executes a class method once, stores the results at the
class level, and subsequently returns those results for every future method
call.
"""
@wraps(meth)
def decorated(cls, *args, **kargs):
cached_attr = '__{0}'.format(meth.__name__)
if not hasattr(cls, cached_attr):
result = meth(cls, *args, **kargs)
setattr(cls, cached_attr, result)
return getattr(cls, cached_attr)
return decorated
def flatten(seq):
"""Flatten `seq` a single level deep."""
for item in seq:
if is_sequence(item):
for itm in item:
yield itm
else:
yield item
def is_sequence(value):
"""Test if `value` is a sequence but ``str``. This function is mainly used
to determine if `value` can be treated like a ``list`` for iteration
purposes.
"""
return (is_generator(value) or
(isinstance(value, Sequence) and not isinstance(value, str)))
def is_generator(value):
"""Return whether `value` is a generator or generator-like."""
return (isinstance(value, types.GeneratorType) or
(hasattr(value, '__iter__') and hasattr(value, '__next__') and
not hasattr(value, '__getitem__')))
| [
"functools.wraps"
] | [((1099, 1110), 'functools.wraps', 'wraps', (['meth'], {}), '(meth)\n', (1104, 1110), False, 'from functools import wraps\n')] |
try:
from . import generic as g
except BaseException:
import generic as g
class NormalsTest(g.unittest.TestCase):
def test_vertex_normal(self):
mesh = g.trimesh.creation.icosahedron()
# the icosahedron is centered at zero, so the true vertex
# normal is just a unit vector of the vertex position
truth = g.trimesh.util.unitize(mesh.vertices)
# force fallback to loop normal summing by passing None as the sparse
# matrix
normals = g.trimesh.geometry.mean_vertex_normals(len(mesh.vertices),
mesh.faces,
mesh.face_normals,
sparse=None)
self.assertTrue(g.np.allclose(normals - truth, 0.0))
# make sure the automatic sparse matrix generation works as well
normals = g.trimesh.geometry.mean_vertex_normals(len(mesh.vertices),
mesh.faces,
mesh.face_normals)
self.assertTrue(g.np.allclose(normals - truth, 0.0))
# make sure the Trimesh normals- related attributes are wired correctly
self.assertTrue(mesh.faces_sparse is not None)
self.assertTrue(mesh.vertex_normals.shape == mesh.vertices.shape)
self.assertTrue(g.np.allclose(mesh.vertex_normals - truth, 0.0))
def test_face_normals(self):
mesh = g.trimesh.creation.icosahedron()
self.assertTrue(mesh.face_normals.shape == mesh.faces.shape)
mesh.face_normals = None
self.assertTrue(mesh.face_normals.shape == mesh.faces.shape)
if __name__ == '__main__':
g.trimesh.util.attach_to_log()
g.unittest.main()
| [
"generic.trimesh.creation.icosahedron",
"generic.trimesh.util.attach_to_log",
"generic.np.allclose",
"generic.unittest.main",
"generic.trimesh.util.unitize"
] | [((1764, 1794), 'generic.trimesh.util.attach_to_log', 'g.trimesh.util.attach_to_log', ([], {}), '()\n', (1792, 1794), True, 'import generic as g\n'), ((1799, 1816), 'generic.unittest.main', 'g.unittest.main', ([], {}), '()\n', (1814, 1816), True, 'import generic as g\n'), ((174, 206), 'generic.trimesh.creation.icosahedron', 'g.trimesh.creation.icosahedron', ([], {}), '()\n', (204, 206), True, 'import generic as g\n'), ((351, 388), 'generic.trimesh.util.unitize', 'g.trimesh.util.unitize', (['mesh.vertices'], {}), '(mesh.vertices)\n', (373, 388), True, 'import generic as g\n'), ((1527, 1559), 'generic.trimesh.creation.icosahedron', 'g.trimesh.creation.icosahedron', ([], {}), '()\n', (1557, 1559), True, 'import generic as g\n'), ((801, 836), 'generic.np.allclose', 'g.np.allclose', (['(normals - truth)', '(0.0)'], {}), '(normals - truth, 0.0)\n', (814, 836), True, 'import generic as g\n'), ((1158, 1193), 'generic.np.allclose', 'g.np.allclose', (['(normals - truth)', '(0.0)'], {}), '(normals - truth, 0.0)\n', (1171, 1193), True, 'import generic as g\n'), ((1429, 1476), 'generic.np.allclose', 'g.np.allclose', (['(mesh.vertex_normals - truth)', '(0.0)'], {}), '(mesh.vertex_normals - truth, 0.0)\n', (1442, 1476), True, 'import generic as g\n')] |
# /***********************************************************************
# Copyright 2019 Google Inc.
#
# Licensed under the Apache License, Version 2.0 (the 'License');
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an 'AS IS' BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
# Note that these code samples being shared are not official Google
# products and are not formally supported.
# ************************************************************************/
import os
from typing import Union
import yaml
from yaml.parser import ParserError
from collections.abc import Iterable
from enum import EnumMeta
from typing import ClassVar
from typing import Dict
from typing import Generic
from typing import List
from typing import TypeVar
from absl import flags
from prompt_toolkit import ANSI
from prompt_toolkit import prompt
from prompt_toolkit.shortcuts import CompleteStyle
from termcolor import colored
from termcolor import cprint
from flagmaker.building_blocks import list_to_string_list
from flagmaker.exceptions import FlagMakerPromptInterruption
from flagmaker.validators import ChoiceValidator
from .building_blocks import SettingOptionInterface
from .building_blocks import SettingsInterface
from .building_blocks import Value
from .exceptions import FlagMakerConfigurationError
from .exceptions import FlagMakerInputError
from .hints import StringKeyDict
from .sanity import Validator
FLAGS = flags.FLAGS
T = TypeVar('T', bound=SettingsInterface)
class SettingConfig(object):
cache_file: str = '{}/.sa360bq'.format(os.environ['HOME'])
cache_dict: dict = {}
class SettingOption(SettingOptionInterface, Generic[T]):
settings: T = None
default = None
cache = None
help = None
method: callable = None
_value: Value = None
required: bool = False
validation: callable = None
conditional: callable = None
after: callable = None
prompt: Union[callable, str]
custom_data: StringKeyDict
include_in_interactive: bool = True
called: dict
_options: EnumMeta = None
_error: bool = False
attrs: dict
def __init__(self):
self._value = Value()
self.called = {}
@classmethod
def create(cls, settings: T, helptext=None, default=None,
method=flags.DEFINE_string, required=True, validation=None,
conditional=None, after=None, prompt=None,
include_in_interactive=True, options=None, attrs=None):
if options is None:
options = []
fl = cls()
fl.settings = settings
fl.default = default
fl.help = helptext
fl.method = method
fl.required = required
fl.validation = validation
fl.conditional = conditional
fl.after = after
fl.prompt = prompt
fl.include_in_interactive = include_in_interactive
fl._options = options
fl.attrs = attrs or {}
return fl
@property
def options(self):
return (list(map(lambda x: x.value, self._options))
if self._options is not None else None)
def get_prompt(self, k):
d = self.get_default_or_cache()
default = ' [default={0}]'.format(
d if not isinstance(d, bool) else int(d)
) if d is not None else ''
prompt_val = ''
if self.prompt is not None:
prompt_val += '\n'
if self.prompt is str:
prompt_val += self.prompt
if callable(self.prompt):
prompt_val += self.prompt(self)
prompt_val += '\nInput'
if self.method != flags.DEFINE_enum:
method = self.get_basic_prompt
else:
method = self.get_option_prompt
return method(k, default, prompt_val)
def get_option_prompt(self, k, default, prompt_val):
if not isinstance(self._options, EnumMeta):
raise FlagMakerConfigurationError('Need to add options for ' + k)
options = list_to_string_list(self.options)
return (
'{0}\n'
'{1}\n'
'{2}\n'
'Choices{3}: '
).format(
k,
colored('Options:', attrs=['underline']),
options,
default, prompt_val
)
def get_basic_prompt(self, k, default, prompt_val):
return '{}{}{}'.format(k, default, prompt_val)
@property
def value(self):
return self._value.get_val()
@value.setter
def value(self, value):
while True:
try:
self._set_value(value)
break
except FlagMakerConfigurationError as err:
cprint(str(err), 'red')
def _set_value(self, value):
if value is None:
self._value.set_val(None)
return
if self.method == flags.DEFINE_boolean:
if value in ['1', 'true', 'True', True]:
value = True
elif value in ['0', 'false', 'False', False]:
value = False
elif self.method == flags.DEFINE_integer:
value = int(value)
elif self.method == flags.DEFINE_enum:
options = self.options
is_iterable = isinstance(options, Iterable)
if not (is_iterable and value in options):
raise FlagMakerInputError(
'Need to choose one of [{}]'.format(', '.join(options))
)
self._value.set_val(value)
# perform actions
if self.after is None:
self._error = False
return
in_called = (self, self.after) not in self.called
if in_called:
self.called[(self, self.after)] = True
self.after(self)
def get_default_or_cache(self) -> str:
if self.cache is not None:
default_or_cache = self.cache
else:
default_or_cache = self.default
return default_or_cache
def set_value(self, key: str = '', value: str = '',
ask: str = '', init: str = ''):
while True:
num_opts = int(value != '') + int(ask != '') + int(init != '')
if num_opts != 1:
raise FlagMakerInputError('Need to choose either '
'init, value or ask')
default_or_cache = self.get_default_or_cache()
if init is None:
return
elif init != '':
self.value = init
return
if ask != '':
if ask is None:
# we intentionally set ask to None. A conditional prompt
# doesn't want this to continue
return
kwargs = {
'bottom_toolbar': ANSI(self.help)
}
if self.method == flags.DEFINE_enum:
choices = [str(i[0])
for i in
enumerate(self.options, start=1)]
kwargs['validator'] = ChoiceValidator(choices)
kwargs['complete_style'] = CompleteStyle.READLINE_LIKE
selection = prompt(ANSI(ask), **kwargs)
if selection == '':
val = default_or_cache
else:
val = self.options[int(selection)-1]
elif self.method == flags.DEFINE_multi_string:
val = []
i = 0
while True:
i += 1
res = prompt(ANSI(
"{} #{} (Empty Value to finish): ".format(ask, i)
), **kwargs)
if res == '':
break
val.append(res)
else:
val = prompt(ANSI(ask + ": "), **kwargs)
if val == '' and default_or_cache is not None:
self.value = default_or_cache
else:
self.value = val
if self.value is not None:
SettingConfig.cache_dict[key] = self.value
else:
self.value = value
if not Validator.validate(self) or self._error:
continue
if self.value_explicitly_set() or not self.required:
return
else:
cprint('Required Field', 'red')
def value_explicitly_set(self) -> bool:
return bool(self._value)
def maybe_needs_input(self):
return not self.value_explicitly_set() and (
self.conditional is None or self.conditional(self.settings))
def get_method(self):
return self.method
def __str__(self):
return self.value or ''
def __repr__(self):
return '[{0}: {1}]'.format(
self.default,
str(self.value) if self.value else '',
)
def __bool__(self):
return bool(self.value)
def __index__(self):
return self.value
def __getitem__(self, item) -> SettingOptionInterface:
return self.value.__getitem__(item)
class SettingBlock:
def __init__(self, block: str,
settings: Dict[str, SettingOption],
conditional: callable = None):
self.name = block
self.settings = settings
self.conditional = conditional
def get(self):
cprint('{}'.format(self.name), None, attrs=['bold'])
cprint('==========================', attrs=['bold'])
class AbstractSettings(SettingsInterface):
"""Settings Base Class
Loaded from the Config class. Used to generate flags for an app.
"""
args: List[SettingBlock] = None
flattened_args: dict = {}
def start(self):
"""Bootstraps the settings loading process
Called from load_settings. Should not be called directly.
"""
for block in self.args:
for k, s in block.settings.items():
s.set_value(init=FLAGS.get_flag_value(k, None))
self.flattened_args[k] = s
def load_settings(self):
self.start()
first = True
interactive_mode = self.args[0].settings.pop('interactive')
cache: dict = {}
if os.path.exists(SettingConfig.cache_file):
try:
with open(SettingConfig.cache_file, 'r') as fh:
cache = yaml.load(
fh.read(), Loader=yaml.Loader
) or {}
except ParserError:
cache = {}
os.remove(SettingConfig.cache_file)
for block in self.args:
header_shown = False
if block.conditional is not None and not block.conditional(self):
continue
for k, setting in block.settings.items():
setting.cache = cache[k] if k in cache else None
if setting.maybe_needs_input():
if not interactive_mode and setting.default:
setting.set_value(k, init=setting.default)
continue
if first:
cprint('Interactive Setup', attrs=['bold', 'underline'])
cprint(
'===============================',
attrs=['bold'],
)
first = False
if not header_shown:
cprint(block.name, attrs=['underline'])
header_shown = True
if setting.include_in_interactive:
try:
setting.set_value(k, ask=setting.get_prompt(k))
except FlagMakerPromptInterruption as err:
setting.set_value(k, value=err.value)
with open(SettingConfig.cache_file, 'w+') as fh:
fh.write(yaml.dump(
SettingConfig.cache_dict, Dumper=yaml.Dumper
))
return self
def assign_flags(self) -> flags:
for block in self.args:
for k, setting in block.settings.items():
kwargs = {
'default': None,
}
if not setting.include_in_interactive:
kwargs['default'] = setting.default
if setting.method == flags.DEFINE_enum:
kwargs['enum_values'] = setting.options
setting.method(k, help=setting.help, **kwargs)
self.flattened_args[k] = setting
return FLAGS
def __getitem__(self, item):
return self.flattened_args[item]
def get_settings(self):
# system settings
settings = [SettingBlock('System Settings', {
'interactive': SettingOption.create(
self,
'Enter Interactive Mode even to verify default values',
default=False,
include_in_interactive=False,
method=flags.DEFINE_bool,
),
})]
settings += self.settings()
return settings
def install(self):
self.args = self.get_settings()
self.assign_flags()
def __repr__(self):
return str(self.args)
def __enter__(self):
return self
def __exit__(self, err, value, traceback):
with open(SettingConfig.cache_file, 'a+') as fh:
fh.write(yaml.dump(
SettingConfig.cache_dict, Dumper=yaml.Dumper
))
AbstractSettingsClass = ClassVar[T]
class Config(Generic[T]):
"""The entry point for setting the Settings class for an app.
Example: Config(MySettingsClass)
This will bootstrap the settings class correctly.
"""
def __init__(self, s: ClassVar[T]):
self.s: ClassVar[T] = s
self.instance = s()
self.instance.install()
def get(self) -> T:
if not FLAGS.is_parsed():
raise FlagMakerConfigurationError(
'Do not call this '
'method until after app.run()'
)
with self.instance as instance:
instance.load_settings()
return self.instance
| [
"os.path.exists",
"termcolor.colored",
"yaml.dump",
"flagmaker.building_blocks.list_to_string_list",
"os.remove",
"flagmaker.validators.ChoiceValidator",
"prompt_toolkit.ANSI",
"termcolor.cprint",
"typing.TypeVar"
] | [((1802, 1839), 'typing.TypeVar', 'TypeVar', (['"""T"""'], {'bound': 'SettingsInterface'}), "('T', bound=SettingsInterface)\n", (1809, 1839), False, 'from typing import TypeVar\n'), ((4345, 4378), 'flagmaker.building_blocks.list_to_string_list', 'list_to_string_list', (['self.options'], {}), '(self.options)\n', (4364, 4378), False, 'from flagmaker.building_blocks import list_to_string_list\n'), ((9928, 9980), 'termcolor.cprint', 'cprint', (['"""=========================="""'], {'attrs': "['bold']"}), "('==========================', attrs=['bold'])\n", (9934, 9980), False, 'from termcolor import cprint\n'), ((10712, 10752), 'os.path.exists', 'os.path.exists', (['SettingConfig.cache_file'], {}), '(SettingConfig.cache_file)\n', (10726, 10752), False, 'import os\n'), ((4528, 4568), 'termcolor.colored', 'colored', (['"""Options:"""'], {'attrs': "['underline']"}), "('Options:', attrs=['underline'])\n", (4535, 4568), False, 'from termcolor import colored\n'), ((8842, 8873), 'termcolor.cprint', 'cprint', (['"""Required Field"""', '"""red"""'], {}), "('Required Field', 'red')\n", (8848, 8873), False, 'from termcolor import cprint\n'), ((12403, 12458), 'yaml.dump', 'yaml.dump', (['SettingConfig.cache_dict'], {'Dumper': 'yaml.Dumper'}), '(SettingConfig.cache_dict, Dumper=yaml.Dumper)\n', (12412, 12458), False, 'import yaml\n'), ((13928, 13983), 'yaml.dump', 'yaml.dump', (['SettingConfig.cache_dict'], {'Dumper': 'yaml.Dumper'}), '(SettingConfig.cache_dict, Dumper=yaml.Dumper)\n', (13937, 13983), False, 'import yaml\n'), ((7161, 7176), 'prompt_toolkit.ANSI', 'ANSI', (['self.help'], {}), '(self.help)\n', (7165, 7176), False, 'from prompt_toolkit import ANSI\n'), ((7436, 7460), 'flagmaker.validators.ChoiceValidator', 'ChoiceValidator', (['choices'], {}), '(choices)\n', (7451, 7460), False, 'from flagmaker.validators import ChoiceValidator\n'), ((11031, 11066), 'os.remove', 'os.remove', (['SettingConfig.cache_file'], {}), '(SettingConfig.cache_file)\n', (11040, 11066), False, 'import os\n'), ((7575, 7584), 'prompt_toolkit.ANSI', 'ANSI', (['ask'], {}), '(ask)\n', (7579, 7584), False, 'from prompt_toolkit import ANSI\n'), ((11623, 11679), 'termcolor.cprint', 'cprint', (['"""Interactive Setup"""'], {'attrs': "['bold', 'underline']"}), "('Interactive Setup', attrs=['bold', 'underline'])\n", (11629, 11679), False, 'from termcolor import cprint\n'), ((11704, 11761), 'termcolor.cprint', 'cprint', (['"""==============================="""'], {'attrs': "['bold']"}), "('===============================', attrs=['bold'])\n", (11710, 11761), False, 'from termcolor import cprint\n'), ((11948, 11987), 'termcolor.cprint', 'cprint', (['block.name'], {'attrs': "['underline']"}), "(block.name, attrs=['underline'])\n", (11954, 11987), False, 'from termcolor import cprint\n'), ((8276, 8292), 'prompt_toolkit.ANSI', 'ANSI', (["(ask + ': ')"], {}), "(ask + ': ')\n", (8280, 8292), False, 'from prompt_toolkit import ANSI\n')] |
# coding:utf-8
#
# The MIT License (MIT)
#
# Copyright (c) 2018-2020 azai/Rgveda/GolemQuant base on QUANTAXIS/yutiansut
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in
# all
# copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
# SOFTWARE.
#
import json
import sys
import websocket
from datetime import datetime as dt, timezone, timedelta, date
import datetime
import time as timer
import numba as nb
import traceback
try:
import easyquotation
easyquotation_not_install = False
except:
easyquotation_not_install = True
try:
import QUANTAXIS as QA
from QUANTAXIS.QAUtil.QAParameter import ORDER_DIRECTION
from QUANTAXIS.QAUtil.QASql import QA_util_sql_mongo_sort_ASCENDING
from QUANTAXIS.QAUtil.QADate_trade import (
QA_util_if_tradetime,
QA_util_get_pre_trade_date,
QA_util_get_real_date,
trade_date_sse
)
from QUANTAXIS.QAData.QADataStruct import (
QA_DataStruct_Index_min,
QA_DataStruct_Index_day,
QA_DataStruct_Stock_day,
QA_DataStruct_Stock_min
)
from QUANTAXIS.QAIndicator.talib_numpy import *
from QUANTAXIS.QAUtil.QADate_Adv import (
QA_util_timestamp_to_str,
QA_util_datetime_to_Unix_timestamp,
QA_util_print_timestamp
)
from QUANTAXIS.QAUtil import (
DATABASE,
QASETTING,
QA_util_log_info,
QA_util_log_debug,
QA_util_log_expection,
QA_util_to_json_from_pandas
)
except:
print('PLEASE run "pip install QUANTAXIS" before call GolemQ.cli.sub modules')
pass
try:
from GolemQ.utils.parameter import (
AKA,
INDICATOR_FIELD as FLD,
TREND_STATUS as ST,
)
except:
class AKA():
"""
趋势状态常量,专有名称指标,定义成常量可以避免直接打字符串造成的拼写错误。
"""
# 蜡烛线指标
CODE = 'code'
NAME = 'name'
OPEN = 'open'
HIGH = 'high'
LOW = 'low'
CLOSE = 'close'
VOLUME = 'volume'
VOL = 'vol'
DATETIME = 'datetime'
LAST_CLOSE = 'last_close'
PRICE = 'price'
SYSTEM_NAME = 'GolemQuant'
def __setattr__(self, name, value):
raise Exception(u'Const Class can\'t allow to change property\' value.')
return super().__setattr__(name, value)
from GolemQ.utils.symbol import (
normalize_code
)
def formater_l1_tick(code: str, l1_tick: dict) -> dict:
"""
处理分发 Tick 数据,新浪和tdx l1 tick差异字段格式化处理
"""
if ((len(code) == 6) and code.startswith('00')):
l1_tick['code'] = normalize_code(code, l1_tick['now'])
else:
l1_tick['code'] = normalize_code(code)
l1_tick['servertime'] = l1_tick['time']
l1_tick['datetime'] = '{} {}'.format(l1_tick['date'], l1_tick['time'])
l1_tick['price'] = l1_tick['now']
l1_tick['vol'] = l1_tick['volume']
del l1_tick['date']
del l1_tick['time']
del l1_tick['now']
del l1_tick['name']
del l1_tick['volume']
# print(l1_tick)
return l1_tick
def formater_l1_ticks(l1_ticks: dict, codelist: list = None, stacks=None, symbol_list=None) -> dict:
"""
处理 l1 ticks 数据
"""
if (stacks is None):
l1_ticks_data = []
symbol_list = []
else:
l1_ticks_data = stacks
for code, l1_tick_values in l1_ticks.items():
# l1_tick = namedtuple('l1_tick', l1_ticks[code])
# formater_l1_tick_jit(code, l1_tick)
if (codelist is None) or \
(code in codelist):
l1_tick = formater_l1_tick(code, l1_tick_values)
if (l1_tick['code'] not in symbol_list):
l1_ticks_data.append(l1_tick)
symbol_list.append(l1_tick['code'])
return l1_ticks_data, symbol_list
@nb.jit(nopython=True)
def formater_l1_ticks_jit(l1_ticks: dict) -> dict:
"""
我是阿财,我专门挖坑,所以这个函数我未调试完成
处理分发 Tick 数据,新浪和tdx l1 tick差异字段格式化处理
因为l1 tick数据必须2秒内处理完毕,尝试优化可能性,Cython或者JIT
"""
l1_ticks_data = []
for code in l1_ticks:
l1_tick = namedtuple('l1_tick', l1_ticks[code])
# formater_l1_tick_jit(code, l1_tick)
# l1_tick = formater_l1_tick(code, l1_ticks[code])
# l1_data = pd.DataFrame(l1_tick, index=['datetime'])
# l1_data['code'] = code
# l1_data = l1_data.rename({'time':'servertime', 'now':'price'})
# l1_tick = namedtuple('l1_tick', l1_tick._fields+('code',))
l1_tick['code'] = code
l1_tick['servertime'] = l1_tick['time']
l1_tick['datetime'] = '{} {}'.format(l1_tick['date'], l1_tick['time'])
l1_tick['price'] = l1_tick['now']
l1_tick['vol'] = l1_tick['volume']
del l1_tick['date']
del l1_tick['time']
del l1_tick['now']
del l1_tick['name']
del l1_tick['volume']
# del l1_tick['name']
# print(l1_tick)
# return l1_tick
l1_ticks_data.append(l1_tick)
return l1_ticks_data
def sub_l1_from_sina():
"""
从新浪获取L1数据,3秒更新一次,建议mongodb数据库存放在企业级SSD上面
(我用Intel DC P3600 800GB SSD,锐龙 3600,每个tick 保存时间 < 0.6s)
"""
client = QASETTING.client['QAREALTIME']
if (easyquotation_not_install == True):
print(u'PLEASE run "pip install easyquotation" before call GolemQ.cli.sub modules')
return
def collections_of_today():
database = client.get_collection('realtime_{}'.format(datetime.date.today()))
database.create_index([('code', QA_util_sql_mongo_sort_ASCENDING)])
database.create_index([('datetime', QA_util_sql_mongo_sort_ASCENDING)])
database.create_index([("code",
QA_util_sql_mongo_sort_ASCENDING),
("datetime",
QA_util_sql_mongo_sort_ASCENDING)],
# unique=True,
)
return database
quotation = easyquotation.use('sina') # 新浪 ['sina'] 腾讯 ['tencent', 'qq']
sleep_time = 2.0
sleep = int(sleep_time)
_time1 = dt.now()
database = collections_of_today()
get_once = True
# 开盘/收盘时间
end_time = dt.strptime(str(dt.now().date()) + ' 16:30',
'%Y-%m-%d %H:%M')
start_time = dt.strptime(str(dt.now().date()) + ' 09:15',
'%Y-%m-%d %H:%M')
day_changed_time = dt.strptime(str(dt.now().date()) + ' 01:00',
'%Y-%m-%d %H:%M')
while (dt.now() < end_time):
# 开盘/收盘时间
end_time = dt.strptime(str(dt.now().date()) + ' 16:30',
'%Y-%m-%d %H:%M')
start_time = dt.strptime(str(dt.now().date()) + ' 09:15',
'%Y-%m-%d %H:%M')
day_changed_time = dt.strptime(str(dt.now().date()) + ' 01:00',
'%Y-%m-%d %H:%M')
_time = dt.now()
if QA_util_if_tradetime(_time) and \
(dt.now() < day_changed_time):
# 日期变更,写入表也会相应变更,这是为了防止用户永不退出一直执行
print(u'当前日期更新~! {} '.format(datetime.date.today()))
database = collections_of_today()
print(u'Not Trading time 现在是中国A股收盘时间 {}'.format(_time))
timer.sleep(sleep)
continue
symbol_list = []
l1_ticks_data = []
if QA_util_if_tradetime(_time) or \
(get_once): # 如果在交易时间
l1_ticks = quotation.market_snapshot(prefix=True)
l1_ticks_data, symbol_list = formater_l1_ticks(l1_ticks)
if (dt.now() < start_time) or \
((len(l1_ticks_data) > 0) and \
(dt.strptime(l1_ticks_data[-1]['datetime'],
'%Y-%m-%d %H:%M:%S') < dt.strptime(str(dt.now().date()) + ' 00:00',
'%Y-%m-%d %H:%M'))):
print(u'Not Trading time 现在是中国A股收盘时间 {}'.format(_time))
timer.sleep(sleep)
continue
# 获取第二遍,包含上证指数信息
l1_ticks = quotation.market_snapshot(prefix=False)
l1_ticks_data, symbol_list = formater_l1_ticks(l1_ticks,
stacks=l1_ticks_data,
symbol_list=symbol_list)
# 查询是否新 tick
query_id = {
"code": {
'$in': list(set([l1_tick['code'] for l1_tick in l1_ticks_data]))
},
"datetime": sorted(list(set([l1_tick['datetime'] for l1_tick in l1_ticks_data])))[-1]
}
# print(sorted(list(set([l1_tick['datetime'] for l1_tick in
# l1_ticks_data])))[-1])
refcount = database.count_documents(query_id)
if refcount > 0:
if (len(l1_ticks_data) > 1):
# 删掉重复数据
# print('Delete', refcount, list(set([l1_tick['datetime']
# for l1_tick in l1_ticks_data])))
database.delete_many(query_id)
database.insert_many(l1_ticks_data)
else:
# 持续更新模式,更新单条记录
database.replace_one(query_id, l1_ticks_data[0])
else:
# 新 tick,插入记录
# print('insert_many', refcount)
database.insert_many(l1_ticks_data)
if (get_once != True):
print(u'Trading time now 现在是中国A股交易时间 {}\nProcessing ticks data cost:{:.3f}s'.format(dt.now(),
(
dt.now() - _time).total_seconds()))
if ((dt.now() - _time).total_seconds() < sleep):
timer.sleep(sleep - (dt.now() - _time).total_seconds())
print('Program Last Time {:.3f}s'.format((dt.now() - _time1).total_seconds()))
get_once = False
else:
print(u'Not Trading time 现在是中国A股收盘时间 {}'.format(_time))
timer.sleep(sleep)
# 每天下午5点,代码就会执行到这里,如有必要,再次执行收盘行情下载,也就是 QUANTAXIS/save X
save_time = dt.strptime(str(dt.now().date()) + ' 17:00', '%Y-%m-%d %H:%M')
if (dt.now() > end_time) and \
(dt.now() < save_time):
# 收盘时间 下午16:00到17:00 更新收盘数据
# 我不建议整合,因为少数情况会出现 程序执行阻塞 block,
# 本进程被阻塞后无人干预第二天影响实盘行情接收。
pass
# While循环每天下午5点自动结束,在此等待13小时,大概早上六点结束程序自动重启
print(u'While循环每天下午5点自动结束,在此等待13小时,大概早上六点结束程序自动重启,这样只要窗口不关,永远每天自动收取 tick')
timer.sleep(40000)
def sub_codelist_l1_from_sina(codelist: list = None):
"""
从新浪获取L1数据,3秒更新一次,建议mongodb数据库存放在企业级SSD上面
(我用Intel DC P3600 800GB SSD,锐龙 3600,每个tick 保存时间 < 0.6s)
"""
def collections_of_today():
database = DATABASE.get_collection('realtime_{}'.format(datetime.date.today()))
database.create_index([('code', QA_util_sql_mongo_sort_ASCENDING)])
database.create_index([('datetime', QA_util_sql_mongo_sort_ASCENDING)])
database.create_index([("code",
QA_util_sql_mongo_sort_ASCENDING),
("datetime",
QA_util_sql_mongo_sort_ASCENDING)],
# unique=True,
)
return database
quotation = easyquotation.use('sina') # 新浪 ['sina'] 腾讯 ['tencent', 'qq']
sleep_time = 2.0
sleep = int(sleep_time)
_time1 = dt.now()
database = collections_of_today()
get_once = True
# 开盘/收盘时间
end_time = dt.strptime(str(dt.now().date()) + ' 16:30', '%Y-%m-%d %H:%M')
start_time = dt.strptime(str(dt.now().date()) + ' 09:15', '%Y-%m-%d %H:%M')
day_changed_time = dt.strptime(str(dt.now().date()) + ' 01:00',
'%Y-%m-%d %H:%M')
while (dt.now() < end_time):
# 开盘/收盘时间
end_time = dt.strptime(str(dt.now().date()) + ' 16:30', '%Y-%m-%d %H:%M')
start_time = dt.strptime(str(dt.now().date()) + ' 09:15', '%Y-%m-%d %H:%M')
day_changed_time = dt.strptime(str(dt.now().date()) + ' 01:00',
'%Y-%m-%d %H:%M')
_time = dt.now()
if QA_util_if_tradetime(_time) and \
(dt.now() < day_changed_time):
# 日期变更,写入表也会相应变更,这是为了防止用户永不退出一直执行
print(u'当前日期更新~! {} '.format(datetime.date.today()))
database = collections_of_today()
print(u'Not Trading time 现在是中国A股收盘时间 {}'.format(_time))
timer.sleep(sleep)
continue
if QA_util_if_tradetime(_time) or \
(get_once): # 如果在交易时间
l1_ticks = quotation.market_snapshot(prefix=True)
l1_ticks_data, symbol_list = formater_l1_ticks(l1_ticks, codelist=codelist)
if (dt.now() < start_time) or \
((len(l1_ticks_data) > 0) and \
(dt.strptime(l1_ticks_data[-1]['datetime'],
'%Y-%m-%d %H:%M:%S') < dt.strptime(str(dt.now().date()) + ' 00:00',
'%Y-%m-%d %H:%M'))):
print(u'Not Trading time 现在是中国A股收盘时间 {}'.format(_time))
timer.sleep(sleep)
continue
# 获取第二遍,包含上证指数信息
l1_ticks = quotation.market_snapshot(prefix=False)
l1_ticks_data, symbol_list = formater_l1_ticks(l1_ticks,
codelist=codelist,
stacks=l1_ticks_data,
symbol_list=symbol_list)
# 查询是否新 tick
query_id = {
"code": {
'$in': list(set([l1_tick['code'] for l1_tick in l1_ticks_data]))
},
"datetime": sorted(list(set([l1_tick['datetime'] for l1_tick in l1_ticks_data])))[-1]
}
# print(symbol_list, len(symbol_list))
refcount = database.count_documents(query_id)
if refcount > 0:
if (len(l1_ticks_data) > 1):
# 删掉重复数据
database.delete_many(query_id)
database.insert_many(l1_ticks_data)
else:
# 持续更新模式,更新单条记录
database.replace_one(query_id, l1_ticks_data[0])
else:
# 新 tick,插入记录
database.insert_many(l1_ticks_data)
if (get_once != True):
print(u'Trading time now 现在是中国A股交易时间 {}\nProcessing ticks data cost:{:.3f}s'.format(dt.now(),
(
dt.now() - _time).total_seconds()))
if ((dt.now() - _time).total_seconds() < sleep):
timer.sleep(sleep - (dt.now() - _time).total_seconds())
print('Program Last Time {:.3f}s'.format((dt.now() - _time1).total_seconds()))
get_once = False
else:
print(u'Not Trading time 现在是中国A股收盘时间 {}'.format(_time))
timer.sleep(sleep)
# 每天下午5点,代码就会执行到这里,如有必要,再次执行收盘行情下载,也就是 QUANTAXIS/save X
save_time = dt.strptime(str(dt.now().date()) + ' 17:00', '%Y-%m-%d %H:%M')
if (dt.now() > end_time) and \
(dt.now() < save_time):
# 收盘时间 下午16:00到17:00 更新收盘数据
# 我不建议整合,因为少数情况会出现 程序执行阻塞 block,
# 本进程被阻塞后无人干预第二天影响实盘行情接收。
# save_X_func()
pass
# While循环每天下午5点自动结束,在此等待13小时,大概早上六点结束程序自动重启
print(u'While循环每天下午5点自动结束,在此等待13小时,大概早上六点结束程序自动重启,这样只要窗口不关,永远每天自动收取 tick')
timer.sleep(40000)
def sub_1min_from_tencent_lru():
"""
我是阿财,我专门挖坑,所以这个函数我未调试完成
从腾讯获得当天交易日分钟K线数据
"""
blockname = ['MSCI中国', 'MSCI成份', 'MSCI概念', '三网融合',
'上证180', '上证380', '沪深300', '上证380',
'深证300', '上证50', '上证电信', '电信等权',
'上证100', '上证150', '沪深300', '中证100',
'中证500', '全指消费', '中小板指', '创业板指',
'综企指数', '1000可选', '国证食品', '深证可选',
'深证消费', '深成消费', '中证酒', '中证白酒',
'行业龙头', '白酒', '证券', '消费100',
'消费电子', '消费金融', '富时A50', '银行',
'中小银行', '证券', '军工', '白酒', '啤酒',
'医疗器械', '医疗器械服务', '医疗改革', '医药商业',
'医药电商', '中药', '消费100', '消费电子',
'消费金融', '黄金', '黄金概念', '4G5G',
'5G概念', '生态农业', '生物医药', '生物疫苗',
'机场航运', '数字货币', '文化传媒']
all_stock_blocks = QA.QA_fetch_stock_block_adv()
for blockname in blocks:
if (blockname in all_stock_blocks.block_name):
codelist_300 = all_stock_blocks.get_block(blockname).code
print(u'QA预定义板块“{}”成分数据'.format(blockname))
print(codelist_300)
else:
print(u'QA默认无板块“{}”成分数据'.format(blockname))
quotation = easyquotation.use("timekline")
data = quotation.real([codelist], prefix=False)
while (True):
l1_tick = quotation.market_snapshot(prefix=False)
print(l1_tick)
return True
if __name__ == '__main__':
# 从新浪财经获取tick数据,自动启停,
# 无限轮询的任何程序都可能会断线,我没有处理异常,所以断线就会抛出异常退出,
# 我认为这样最好,可以释放意外占用的TCP/IP半连接,避免无人值守的服务器耗尽
# 端口资源。建议使用我的土味无限循环脚本,等于3秒后自动重试:
"""
举个例子,例如这个sub.py脚本保存在 D:\代码\QUANTAXIS\QUANTAXIS\cli
目录下面,并且创建一个空的 __init__.py,对同级2个文件__init__.py,
还有本 sub.py,没有你就新建一个。
创建一个PowerShell:sub_l1.ps1
D:
CD D:\代码\QUANTAXIS\
$n = 1
while($n -lt 6)
{
python -m QUANTAXIS.cli.sub
Start-Sleep -Seconds 3
}
创建一个Cmd/Batch:sub_l1.cmd
D:
CD D:\代码\QUANTAXIS\
:start
python -m QUANTAXIS.cli.sub
@ping 127.0.0.1 -n 3 >nul
goto start
pause
Linux Bash脚本我不会,你们能用linux肯定会自己编写。
"""
import sys
sys.path.append('/root/ipython/')
import CommonUtils as cu
try:
cu.sendDingMsg("Start realtime sub from sina_l1 progress start now.")
sub_l1_from_sina()
except:
traceback.print_exc()
cu.sendDingMsg("Realtime sub from sina_l1 progress has stopped. please check it soon.")
# sub_l1_from_sina()
# sub_1min_from_tencent_lru()
pass
| [
"datetime.datetime.strptime",
"CommonUtils.sendDingMsg",
"easyquotation.use",
"QUANTAXIS.QAUtil.QADate_trade.QA_util_if_tradetime",
"time.sleep",
"GolemQ.utils.symbol.normalize_code",
"datetime.datetime.now",
"traceback.print_exc",
"numba.jit",
"datetime.date.today",
"QUANTAXIS.QA_fetch_stock_bl... | [((4670, 4691), 'numba.jit', 'nb.jit', ([], {'nopython': '(True)'}), '(nopython=True)\n', (4676, 4691), True, 'import numba as nb\n'), ((6803, 6828), 'easyquotation.use', 'easyquotation.use', (['"""sina"""'], {}), "('sina')\n", (6820, 6828), False, 'import easyquotation\n'), ((6928, 6936), 'datetime.datetime.now', 'dt.now', ([], {}), '()\n', (6934, 6936), True, 'from datetime import datetime as dt, timezone, timedelta, date\n'), ((11536, 11554), 'time.sleep', 'timer.sleep', (['(40000)'], {}), '(40000)\n', (11547, 11554), True, 'import time as timer\n'), ((12346, 12371), 'easyquotation.use', 'easyquotation.use', (['"""sina"""'], {}), "('sina')\n", (12363, 12371), False, 'import easyquotation\n'), ((12471, 12479), 'datetime.datetime.now', 'dt.now', ([], {}), '()\n', (12477, 12479), True, 'from datetime import datetime as dt, timezone, timedelta, date\n'), ((16789, 16807), 'time.sleep', 'timer.sleep', (['(40000)'], {}), '(40000)\n', (16800, 16807), True, 'import time as timer\n'), ((17670, 17699), 'QUANTAXIS.QA_fetch_stock_block_adv', 'QA.QA_fetch_stock_block_adv', ([], {}), '()\n', (17697, 17699), True, 'import QUANTAXIS as QA\n'), ((18029, 18059), 'easyquotation.use', 'easyquotation.use', (['"""timekline"""'], {}), "('timekline')\n", (18046, 18059), False, 'import easyquotation\n'), ((19005, 19038), 'sys.path.append', 'sys.path.append', (['"""/root/ipython/"""'], {}), "('/root/ipython/')\n", (19020, 19038), False, 'import sys\n'), ((3483, 3519), 'GolemQ.utils.symbol.normalize_code', 'normalize_code', (['code', "l1_tick['now']"], {}), "(code, l1_tick['now'])\n", (3497, 3519), False, 'from GolemQ.utils.symbol import normalize_code\n'), ((3556, 3576), 'GolemQ.utils.symbol.normalize_code', 'normalize_code', (['code'], {}), '(code)\n', (3570, 3576), False, 'from GolemQ.utils.symbol import normalize_code\n'), ((7355, 7363), 'datetime.datetime.now', 'dt.now', ([], {}), '()\n', (7361, 7363), True, 'from datetime import datetime as dt, timezone, timedelta, date\n'), ((7770, 7778), 'datetime.datetime.now', 'dt.now', ([], {}), '()\n', (7776, 7778), True, 'from datetime import datetime as dt, timezone, timedelta, date\n'), ((12842, 12850), 'datetime.datetime.now', 'dt.now', ([], {}), '()\n', (12848, 12850), True, 'from datetime import datetime as dt, timezone, timedelta, date\n'), ((13193, 13201), 'datetime.datetime.now', 'dt.now', ([], {}), '()\n', (13199, 13201), True, 'from datetime import datetime as dt, timezone, timedelta, date\n'), ((19086, 19155), 'CommonUtils.sendDingMsg', 'cu.sendDingMsg', (['"""Start realtime sub from sina_l1 progress start now."""'], {}), "('Start realtime sub from sina_l1 progress start now.')\n", (19100, 19155), True, 'import CommonUtils as cu\n'), ((7791, 7818), 'QUANTAXIS.QAUtil.QADate_trade.QA_util_if_tradetime', 'QA_util_if_tradetime', (['_time'], {}), '(_time)\n', (7811, 7818), False, 'from QUANTAXIS.QAUtil.QADate_trade import QA_util_if_tradetime, QA_util_get_pre_trade_date, QA_util_get_real_date, trade_date_sse\n'), ((8109, 8127), 'time.sleep', 'timer.sleep', (['sleep'], {}), '(sleep)\n', (8120, 8127), True, 'import time as timer\n'), ((8213, 8240), 'QUANTAXIS.QAUtil.QADate_trade.QA_util_if_tradetime', 'QA_util_if_tradetime', (['_time'], {}), '(_time)\n', (8233, 8240), False, 'from QUANTAXIS.QAUtil.QADate_trade import QA_util_if_tradetime, QA_util_get_pre_trade_date, QA_util_get_real_date, trade_date_sse\n'), ((11050, 11068), 'time.sleep', 'timer.sleep', (['sleep'], {}), '(sleep)\n', (11061, 11068), True, 'import time as timer\n'), ((11217, 11225), 'datetime.datetime.now', 'dt.now', ([], {}), '()\n', (11223, 11225), True, 'from datetime import datetime as dt, timezone, timedelta, date\n'), ((11257, 11265), 'datetime.datetime.now', 'dt.now', ([], {}), '()\n', (11263, 11265), True, 'from datetime import datetime as dt, timezone, timedelta, date\n'), ((13214, 13241), 'QUANTAXIS.QAUtil.QADate_trade.QA_util_if_tradetime', 'QA_util_if_tradetime', (['_time'], {}), '(_time)\n', (13234, 13241), False, 'from QUANTAXIS.QAUtil.QADate_trade import QA_util_if_tradetime, QA_util_get_pre_trade_date, QA_util_get_real_date, trade_date_sse\n'), ((13532, 13550), 'time.sleep', 'timer.sleep', (['sleep'], {}), '(sleep)\n', (13543, 13550), True, 'import time as timer\n'), ((13584, 13611), 'QUANTAXIS.QAUtil.QADate_trade.QA_util_if_tradetime', 'QA_util_if_tradetime', (['_time'], {}), '(_time)\n', (13604, 13611), False, 'from QUANTAXIS.QAUtil.QADate_trade import QA_util_if_tradetime, QA_util_get_pre_trade_date, QA_util_get_real_date, trade_date_sse\n'), ((16279, 16297), 'time.sleep', 'timer.sleep', (['sleep'], {}), '(sleep)\n', (16290, 16297), True, 'import time as timer\n'), ((16446, 16454), 'datetime.datetime.now', 'dt.now', ([], {}), '()\n', (16452, 16454), True, 'from datetime import datetime as dt, timezone, timedelta, date\n'), ((16486, 16494), 'datetime.datetime.now', 'dt.now', ([], {}), '()\n', (16492, 16494), True, 'from datetime import datetime as dt, timezone, timedelta, date\n'), ((19203, 19224), 'traceback.print_exc', 'traceback.print_exc', ([], {}), '()\n', (19222, 19224), False, 'import traceback\n'), ((19233, 19325), 'CommonUtils.sendDingMsg', 'cu.sendDingMsg', (['"""Realtime sub from sina_l1 progress has stopped. please check it soon."""'], {}), "(\n 'Realtime sub from sina_l1 progress has stopped. please check it soon.')\n", (19247, 19325), True, 'import CommonUtils as cu\n'), ((6286, 6307), 'datetime.date.today', 'datetime.date.today', ([], {}), '()\n', (6305, 6307), False, 'import datetime\n'), ((7842, 7850), 'datetime.datetime.now', 'dt.now', ([], {}), '()\n', (7848, 7850), True, 'from datetime import datetime as dt, timezone, timedelta, date\n'), ((8858, 8876), 'time.sleep', 'timer.sleep', (['sleep'], {}), '(sleep)\n', (8869, 8876), True, 'import time as timer\n'), ((11829, 11850), 'datetime.date.today', 'datetime.date.today', ([], {}), '()\n', (11848, 11850), False, 'import datetime\n'), ((13265, 13273), 'datetime.datetime.now', 'dt.now', ([], {}), '()\n', (13271, 13273), True, 'from datetime import datetime as dt, timezone, timedelta, date\n'), ((14248, 14266), 'time.sleep', 'timer.sleep', (['sleep'], {}), '(sleep)\n', (14259, 14266), True, 'import time as timer\n'), ((7973, 7994), 'datetime.date.today', 'datetime.date.today', ([], {}), '()\n', (7992, 7994), False, 'import datetime\n'), ((8433, 8441), 'datetime.datetime.now', 'dt.now', ([], {}), '()\n', (8439, 8441), True, 'from datetime import datetime as dt, timezone, timedelta, date\n'), ((13396, 13417), 'datetime.date.today', 'datetime.date.today', ([], {}), '()\n', (13415, 13417), False, 'import datetime\n'), ((13823, 13831), 'datetime.datetime.now', 'dt.now', ([], {}), '()\n', (13829, 13831), True, 'from datetime import datetime as dt, timezone, timedelta, date\n'), ((7040, 7048), 'datetime.datetime.now', 'dt.now', ([], {}), '()\n', (7046, 7048), True, 'from datetime import datetime as dt, timezone, timedelta, date\n'), ((7147, 7155), 'datetime.datetime.now', 'dt.now', ([], {}), '()\n', (7153, 7155), True, 'from datetime import datetime as dt, timezone, timedelta, date\n'), ((7262, 7270), 'datetime.datetime.now', 'dt.now', ([], {}), '()\n', (7268, 7270), True, 'from datetime import datetime as dt, timezone, timedelta, date\n'), ((8535, 8598), 'datetime.datetime.strptime', 'dt.strptime', (["l1_ticks_data[-1]['datetime']", '"""%Y-%m-%d %H:%M:%S"""'], {}), "(l1_ticks_data[-1]['datetime'], '%Y-%m-%d %H:%M:%S')\n", (8546, 8598), True, 'from datetime import datetime as dt, timezone, timedelta, date\n'), ((10456, 10464), 'datetime.datetime.now', 'dt.now', ([], {}), '()\n', (10462, 10464), True, 'from datetime import datetime as dt, timezone, timedelta, date\n'), ((11162, 11170), 'datetime.datetime.now', 'dt.now', ([], {}), '()\n', (11168, 11170), True, 'from datetime import datetime as dt, timezone, timedelta, date\n'), ((12583, 12591), 'datetime.datetime.now', 'dt.now', ([], {}), '()\n', (12589, 12591), True, 'from datetime import datetime as dt, timezone, timedelta, date\n'), ((12663, 12671), 'datetime.datetime.now', 'dt.now', ([], {}), '()\n', (12669, 12671), True, 'from datetime import datetime as dt, timezone, timedelta, date\n'), ((12749, 12757), 'datetime.datetime.now', 'dt.now', ([], {}), '()\n', (12755, 12757), True, 'from datetime import datetime as dt, timezone, timedelta, date\n'), ((13925, 13988), 'datetime.datetime.strptime', 'dt.strptime', (["l1_ticks_data[-1]['datetime']", '"""%Y-%m-%d %H:%M:%S"""'], {}), "(l1_ticks_data[-1]['datetime'], '%Y-%m-%d %H:%M:%S')\n", (13936, 13988), True, 'from datetime import datetime as dt, timezone, timedelta, date\n'), ((15685, 15693), 'datetime.datetime.now', 'dt.now', ([], {}), '()\n', (15691, 15693), True, 'from datetime import datetime as dt, timezone, timedelta, date\n'), ((16391, 16399), 'datetime.datetime.now', 'dt.now', ([], {}), '()\n', (16397, 16399), True, 'from datetime import datetime as dt, timezone, timedelta, date\n'), ((7430, 7438), 'datetime.datetime.now', 'dt.now', ([], {}), '()\n', (7436, 7438), True, 'from datetime import datetime as dt, timezone, timedelta, date\n'), ((7545, 7553), 'datetime.datetime.now', 'dt.now', ([], {}), '()\n', (7551, 7553), True, 'from datetime import datetime as dt, timezone, timedelta, date\n'), ((7668, 7676), 'datetime.datetime.now', 'dt.now', ([], {}), '()\n', (7674, 7676), True, 'from datetime import datetime as dt, timezone, timedelta, date\n'), ((10720, 10728), 'datetime.datetime.now', 'dt.now', ([], {}), '()\n', (10726, 10728), True, 'from datetime import datetime as dt, timezone, timedelta, date\n'), ((12917, 12925), 'datetime.datetime.now', 'dt.now', ([], {}), '()\n', (12923, 12925), True, 'from datetime import datetime as dt, timezone, timedelta, date\n'), ((13001, 13009), 'datetime.datetime.now', 'dt.now', ([], {}), '()\n', (13007, 13009), True, 'from datetime import datetime as dt, timezone, timedelta, date\n'), ((13091, 13099), 'datetime.datetime.now', 'dt.now', ([], {}), '()\n', (13097, 13099), True, 'from datetime import datetime as dt, timezone, timedelta, date\n'), ((15949, 15957), 'datetime.datetime.now', 'dt.now', ([], {}), '()\n', (15955, 15957), True, 'from datetime import datetime as dt, timezone, timedelta, date\n'), ((10890, 10898), 'datetime.datetime.now', 'dt.now', ([], {}), '()\n', (10896, 10898), True, 'from datetime import datetime as dt, timezone, timedelta, date\n'), ((16119, 16127), 'datetime.datetime.now', 'dt.now', ([], {}), '()\n', (16125, 16127), True, 'from datetime import datetime as dt, timezone, timedelta, date\n'), ((10667, 10675), 'datetime.datetime.now', 'dt.now', ([], {}), '()\n', (10673, 10675), True, 'from datetime import datetime as dt, timezone, timedelta, date\n'), ((10801, 10809), 'datetime.datetime.now', 'dt.now', ([], {}), '()\n', (10807, 10809), True, 'from datetime import datetime as dt, timezone, timedelta, date\n'), ((15896, 15904), 'datetime.datetime.now', 'dt.now', ([], {}), '()\n', (15902, 15904), True, 'from datetime import datetime as dt, timezone, timedelta, date\n'), ((16030, 16038), 'datetime.datetime.now', 'dt.now', ([], {}), '()\n', (16036, 16038), True, 'from datetime import datetime as dt, timezone, timedelta, date\n'), ((8651, 8659), 'datetime.datetime.now', 'dt.now', ([], {}), '()\n', (8657, 8659), True, 'from datetime import datetime as dt, timezone, timedelta, date\n'), ((14041, 14049), 'datetime.datetime.now', 'dt.now', ([], {}), '()\n', (14047, 14049), True, 'from datetime import datetime as dt, timezone, timedelta, date\n')] |
#!/usr/bin/env python
"""
Installs ETA.
Copyright 2017-2021, Voxel51, Inc.
voxel51.com
"""
import os
from setuptools import setup, find_packages
from wheel.bdist_wheel import bdist_wheel
VERSION = "0.6.1"
class BdistWheelCustom(bdist_wheel):
def finalize_options(self):
bdist_wheel.finalize_options(self)
# Pure Python, so build a wheel for any Python version
self.universal = True
with open("README.md", "r") as fh:
long_description = fh.read()
def get_version():
if "RELEASE_VERSION" in os.environ:
version = os.environ["RELEASE_VERSION"]
if not version.startswith(VERSION):
raise ValueError(
"Release version does not match version: %s and %s"
% (version, VERSION)
)
return version
return VERSION
setup(
name="voxel51-eta",
version=get_version(),
description="Extensible Toolkit for Analytics",
author="Voxel51, Inc.",
author_email="<EMAIL>",
url="https://github.com/voxel51/eta",
license="Apache",
long_description=long_description,
long_description_content_type="text/markdown",
packages=find_packages(),
include_package_data=True,
install_requires=[
"argcomplete",
"dill",
"future",
"glob2",
"importlib-metadata; python_version<'3.8'",
"ndjson",
"numpy",
"opencv-python-headless<5,>=4.1",
"packaging",
"patool",
"Pillow>=6.2",
"python-dateutil",
"pytz",
"requests",
"retrying",
"six",
"scikit-image",
"sortedcontainers",
"tabulate",
"tzlocal",
],
extras_require={
"pipeline": ["blockdiag", "Sphinx", "sphinxcontrib-napoleon"],
"storage": [
"boto3>=1.15",
"google-api-python-client",
"google-cloud-storage>=1.36",
"httplib2<=0.15",
"pysftp",
],
},
classifiers=[
"Development Status :: 4 - Beta",
"Intended Audience :: Developers",
"Intended Audience :: Science/Research",
"License :: OSI Approved :: Apache Software License",
"Topic :: Scientific/Engineering :: Artificial Intelligence",
"Topic :: Scientific/Engineering :: Image Processing",
"Topic :: Scientific/Engineering :: Image Recognition",
"Topic :: Scientific/Engineering :: Information Analysis",
"Topic :: Scientific/Engineering :: Visualization",
"Operating System :: MacOS :: MacOS X",
"Operating System :: POSIX :: Linux",
"Operating System :: Microsoft :: Windows",
"Programming Language :: Python :: 2.7",
"Programming Language :: Python :: 3",
"Programming Language :: Python :: 3.6",
"Programming Language :: Python :: 3.7",
"Programming Language :: Python :: 3.8",
"Programming Language :: Python :: 3.9",
],
entry_points={"console_scripts": ["eta=eta.core.cli:main"]},
python_requires=">=2.7",
cmdclass={"bdist_wheel": BdistWheelCustom},
)
| [
"wheel.bdist_wheel.bdist_wheel.finalize_options",
"setuptools.find_packages"
] | [((287, 321), 'wheel.bdist_wheel.bdist_wheel.finalize_options', 'bdist_wheel.finalize_options', (['self'], {}), '(self)\n', (315, 321), False, 'from wheel.bdist_wheel import bdist_wheel\n'), ((1165, 1180), 'setuptools.find_packages', 'find_packages', ([], {}), '()\n', (1178, 1180), False, 'from setuptools import setup, find_packages\n')] |
import wx
import cv2
#----------------------------------------------------------------------
# Panel to display image from camera
#----------------------------------------------------------------------
class WebcamPanel(wx.Window): # wx.Panel, wx.Control
def __init__(self, parent, camera, fps=15, flip=False):
wx.Window.__init__(self, parent)
# remember arguments
self.camera = camera
self.fps = fps
self.flip = flip
# get frame size
ret_value, frame = self.camera.read()
height, width = frame.shape[:2]
# resize panel with camera image
self.SetSize( (width, height) )
#self.SetMinSize( (width, height) )
# resize main window
self.GetParent().GetParent().SetSize( (width, height+37) ) # wymaga poprawki aby nie trzeba bylo dawac +37
#self.GetGrandParent().SetSize( (width, height+25) )
#self.GetTopLevelParent().SetSize( (width, height+25) ) # wrong parent
frame = cv2.cvtColor(frame, cv2.COLOR_BGR2RGB)
if self.flip:
frame = cv2.flip(frame, 1)
# create bitmap with frame
self.bmp = wx.BitmapFromBuffer(width, height, frame)
# timer to refresh frames
self.timer = wx.Timer(self)
self.timer.Start(1000./fps)
# add functions to events
self.Bind(wx.EVT_PAINT, self.OnPaint) # run when it is needed
self.Bind(wx.EVT_TIMER, self.NextFrame) # run by timer
def OnPaint(self, event):
dc = wx.BufferedPaintDC(self)
dc.DrawBitmap(self.bmp, 0, 0)
def NextFrame(self, event):
ret_value, frame = self.camera.read()
if ret_value:
frame = cv2.cvtColor(frame, cv2.COLOR_BGR2RGB)
if self.flip:
frame = cv2.flip(frame, 1)
self.bmp.CopyFromBuffer(frame)
self.Refresh()
#----------------------------------------------------------------------
# Main Window
#----------------------------------------------------------------------
class MainWindow(wx.Frame):
def __init__(self, camera, fps=10):
wx.Frame.__init__(self, None)
self.panel = wx.Panel(self, -1)
# add sizer
self.sizer = wx.BoxSizer(wx.VERTICAL)
self.panel.SetSizer(self.sizer)
# add button
self.button = wx.Button(self.panel, label="CAPTURE")
self.button.Bind(wx.EVT_BUTTON, self.OnButton)
self.sizer.Add(self.button, 0, wx.EXPAND)
# add panel with webcam image
self.webcampanel = WebcamPanel(self.panel, camera)
self.sizer.Add(self.webcampanel, 1, wx.EXPAND)
#self.sizer.Layout()
#self.webcampanel.Layout()
#self.Fit()
self.Show()
def OnButton(self, event):
print("TODO: save image in file")
#----------------------------------------------------------------------
camera = cv2.VideoCapture(0)
app = wx.App()
MainWindow(camera)
app.MainLoop()
| [
"wx.Button",
"wx.BufferedPaintDC",
"cv2.flip",
"wx.BoxSizer",
"wx.Timer",
"wx.Panel",
"cv2.VideoCapture",
"cv2.cvtColor",
"wx.Frame.__init__",
"wx.App",
"wx.BitmapFromBuffer",
"wx.Window.__init__"
] | [((2976, 2995), 'cv2.VideoCapture', 'cv2.VideoCapture', (['(0)'], {}), '(0)\n', (2992, 2995), False, 'import cv2\n'), ((3003, 3011), 'wx.App', 'wx.App', ([], {}), '()\n', (3009, 3011), False, 'import wx\n'), ((332, 364), 'wx.Window.__init__', 'wx.Window.__init__', (['self', 'parent'], {}), '(self, parent)\n', (350, 364), False, 'import wx\n'), ((1028, 1066), 'cv2.cvtColor', 'cv2.cvtColor', (['frame', 'cv2.COLOR_BGR2RGB'], {}), '(frame, cv2.COLOR_BGR2RGB)\n', (1040, 1066), False, 'import cv2\n'), ((1192, 1233), 'wx.BitmapFromBuffer', 'wx.BitmapFromBuffer', (['width', 'height', 'frame'], {}), '(width, height, frame)\n', (1211, 1233), False, 'import wx\n'), ((1290, 1304), 'wx.Timer', 'wx.Timer', (['self'], {}), '(self)\n', (1298, 1304), False, 'import wx\n'), ((1557, 1581), 'wx.BufferedPaintDC', 'wx.BufferedPaintDC', (['self'], {}), '(self)\n', (1575, 1581), False, 'import wx\n'), ((2170, 2199), 'wx.Frame.__init__', 'wx.Frame.__init__', (['self', 'None'], {}), '(self, None)\n', (2187, 2199), False, 'import wx\n'), ((2222, 2240), 'wx.Panel', 'wx.Panel', (['self', '(-1)'], {}), '(self, -1)\n', (2230, 2240), False, 'import wx\n'), ((2284, 2308), 'wx.BoxSizer', 'wx.BoxSizer', (['wx.VERTICAL'], {}), '(wx.VERTICAL)\n', (2295, 2308), False, 'import wx\n'), ((2401, 2439), 'wx.Button', 'wx.Button', (['self.panel'], {'label': '"""CAPTURE"""'}), "(self.panel, label='CAPTURE')\n", (2410, 2439), False, 'import wx\n'), ((1118, 1136), 'cv2.flip', 'cv2.flip', (['frame', '(1)'], {}), '(frame, 1)\n', (1126, 1136), False, 'import cv2\n'), ((1750, 1788), 'cv2.cvtColor', 'cv2.cvtColor', (['frame', 'cv2.COLOR_BGR2RGB'], {}), '(frame, cv2.COLOR_BGR2RGB)\n', (1762, 1788), False, 'import cv2\n'), ((1839, 1857), 'cv2.flip', 'cv2.flip', (['frame', '(1)'], {}), '(frame, 1)\n', (1847, 1857), False, 'import cv2\n')] |
import os
import shutil
import tempfile
from taskmage2.utils import filesystem, functional
from taskmage2.asttree import asttree, renderers
from taskmage2.parser import iostream, parsers
from taskmage2.project import taskfiles
class Project(object):
def __init__(self, root='.'):
""" Constructor.
Args:
path (str, optional): ``(ex: None, '/src/project/subdir/file.mtask', '/src/project', '/src/project/.taskmage' )``
Path to your projectroot, or a file/directory within
your taskmage project root.
.. code-block:: python
'/src/project'
'/src/project/subdir/file.mtask'
'/src/project/.taskmage'
"""
self._root = None
if root:
self.load(root)
def __repr__(self):
"""
Returns:
str: ``<Project(path/to/project) at 0x7ff6b33106a0>``
"""
if self.root:
relpath = os.path.relpath(self.root)
else:
relpath = 'None'
repr_ = '<Project({}) at {}>'.format(relpath, hex(id(self)))
return repr_
def __hash__(self):
hashstr = '<taskmage2.project.projects.Project({})>'.format(str(self.root))
return hash(hashstr)
@classmethod
def from_path(cls, filepath):
""" Instantiates a new Project, loaded using `filepath`.
Args:
path (str): ``(ex: '/src/project/subdir/file.mtask', '/src/project', '/src/project/.taskmage' )``
Path to your projectroot, or a file/directory within
your taskmage project root.
.. code-block:: python
'/src/project'
'/src/project/subdir/file.mtask'
'/src/project/.taskmage'
"""
project = Project(root=None)
project.load(filepath)
return project
@property
def root(self):
""" The root directory of a project. Contains ``.taskmage`` directory.
Returns:
.. code-block:: python
'/src/project'
"""
return self._root
@classmethod
def create(cls, root):
""" Create a new taksmage project in directory `rootdir` .
Args:
rootdir (str):
Path to the root of your taskmage project.
Returns:
str: project root directory
"""
root = format_rootpath(root)
if os.path.exists(root):
if not os.path.isdir(root):
raise OSError(
'unable to create taskmage project, provided '
'path exists and is not a directory. "{}"'.format(root)
)
taskmage_dir = '{}/.taskmage'.format(root)
filesystem.make_directories(taskmage_dir)
return root
@staticmethod
def find(path):
"""
Returns:
str: absolute path to taskmage project root
"""
path = filesystem.format_path(os.path.abspath(path))
# is path root
if os.path.isdir('{}/.taskmage'.format(path)):
return path
# /src/project/.taskmage
if os.path.basename(path) == '.taskmage':
return os.path.dirname(path)
# /src/project
# /src/project/sub-path
for parent_dir in filesystem.walk_parents(path):
if os.path.isdir('{}/.taskmage'.format(parent_dir)):
return parent_dir
raise RuntimeError('unable to find taskmage project from path: {}'.format(path))
def load(self, path):
""" Loads a taskmage project from a path.
Args:
path (str): ``(ex: '/src/project/subdir/file.mtask', '/src/project', '/src/project/.taskmage' )``
Path to your projectroot, or a file/directory within
your taskmage project root.
.. code-block:: python
'/src/project'
'/src/project/subdir/file.mtask'
'/src/project/.taskmage'
"""
path = os.path.abspath(path)
projectroot = self.find(path)
self._root = projectroot
def archive_completed(self, filepath=None):
""" Archives all completed task-branches.
Example:
.. code-block:: ReStructuredText
## a,b, and c will be archived
## (entire task-branch completed)
x a
x b
x c
## nothing will be archived
## (task-branch is not entirely completed)
x a
x b
* c
Args:
filepath (str, optional): ``(ex: '/src/project/file.mtask' )``
Optionally, archive completed tasks in a single target file.
"""
if filepath is not None:
self._archive_completed(filepath)
else:
# for every mtask file in the entire project...
raise NotImplementedError('todo - archive completed tasks from all mtask files')
def is_project_path(self, filepath):
""" Test if a file is within this project.
"""
if filepath.startswith('{}/'.format(self.root)):
return True
return False
def is_archived_path(self, filepath):
""" Test if file is an archived mtask file.
"""
if filepath.startswith('{}/.taskmage/'.format(self.root)):
return True
return False
def is_active_path(self, filepath):
""" Test if file is an active (non-archived) mtask file.
"""
if self.is_project_path(filepath) and not self.is_archived_path(filepath):
return True
return False
def get_archived_path(self, filepath):
""" Returns filepath to corresponding archived mtask file's (from un-archived mtask file).
"""
if not self.is_project_path(filepath):
msg = ('filepath not within current taskmage project. \n'
'project "{}"\n'
'filepath "{}\n').format(self.root, filepath)
raise RuntimeError(msg)
if self.is_archived_path(filepath):
return filepath
filepath = filesystem.format_path(filepath)
relpath = filepath[len(self.root) + 1:]
archived_path = '{}/.taskmage/{}'.format(self.root, relpath)
return archived_path
def get_active_path(self, filepath):
""" Returns filepath to corresponding un-archived mtask file (from archived mtask file).
"""
if not self.is_project_path(filepath):
raise RuntimeError(
('filepath not within current taskmage project. \n'
'project "{}"\n'
'filepath "{}\n').format(self.root, filepath)
)
if not self.is_archived_path(filepath):
return filepath
filepath = filesystem.format_path(filepath)
taskdir = '{}/.taskmage'.format(self.root)
relpath = filepath[len(taskdir) + 1:]
active_path = '{}/{}'.format(self.root, relpath)
return active_path
def get_counterpart(self, filepath):
""" Returns active-path if archived-path, or inverse.
"""
if not self.is_project_path(filepath):
raise RuntimeError(
('filepath not within current taskmage project. \n'
'project "{}"\n'
'filepath "{}\n').format(self.root, filepath)
)
if self.is_archived_path(filepath):
return self.get_active_path(filepath)
else:
return self.get_archived_path(filepath)
def filter_taskfiles(self, filters):
""" Returns a list of all taskfiles in project, filtered by provided `filters` .
Args:
filters (list):
List of functions that accepts a :py:obj:`taskmage2.project.taskfiles.TaskFile`
as an argument, and returns True (keep) or False (remove)
Returns:
Iterable:
iterable of project taskfiles (after all filters applied to them).
.. code-block:: python
[
TaskFile('/path/to/todos/file1.mtask'),
TaskFile('/path/to/todos/file2.mtask'),
TaskFile('/path/to/todos/file3.mtask'),
...
]
"""
return functional.multifilter(filters, self.iter_taskfiles())
def iter_taskfiles(self):
""" Iterates over all `*.mtask` files in project (both completed and uncompleted).
Returns:
Iterable:
iterable of all project taskfiles
.. code-block:: python
[
TaskFile('/path/to/todos/file1.mtask'),
TaskFile('/path/to/todos/file2.mtask'),
TaskFile('/path/to/todos/file3.mtask'),
...
]
"""
for (root, dirnames, filenames) in os.walk(self.root):
for filename in filenames:
if not filename.endswith('.mtask'):
continue
filepath = '{}/{}'.format(root, filename)
yield taskfiles.TaskFile(filepath)
def _archive_completed(self, filepath):
"""
Args:
filepath (str):
absolute path to a .mtask file.
"""
(active_ast, archive_ast) = self._archive_completed_as_ast(filepath)
archive_path = self.get_archived_path(filepath)
tempdir = tempfile.mkdtemp()
try:
# create tempfile objects
active_taskfile = taskfiles.TaskFile('{}/active.mtask'.format(tempdir))
archive_taskfile = taskfiles.TaskFile('{}/archive.mtask'.format(tempdir))
# write tempfiles
active_taskfile.write(active_ast)
archive_taskfile.write(archive_ast)
# (if successful) overwrite real files
active_taskfile.copyfile(filepath)
archive_taskfile.copyfile(archive_path)
finally:
# delete tempdir
if os.path.isdir(tempdir):
shutil.rmtree(tempdir)
def _archive_completed_as_ast(self, filepath):
"""
Returns:
.. code-block:: python
(
asttree.AbstractSyntaxTree(), # new active AST
asttree.AbstractSyntaxTree(), # new archive AST
)
"""
# get active AST
active_ast = self._get_mtaskfile_ast(filepath)
# get archive AST
archive_path = self.get_archived_path(filepath)
archive_ast = self._get_mtaskfile_ast(archive_path)
# perform archive
archive_ast = active_ast.archive_completed(archive_ast)
return (active_ast, archive_ast)
def _get_mtaskfile_ast(self, filepath):
if not os.path.isfile(filepath):
return asttree.AbstractSyntaxTree()
with open(filepath, 'r') as fd_src:
fd = iostream.FileDescriptor(fd_src)
AST = parsers.parse(fd, 'mtask')
return AST
def format_rootpath(path):
""" Formats a project-directory path.
Ensures path ends with `.taskmage` dir, and uses forward slashes exclusively.
Returns:
str:
a new formatted path
"""
return functional.pipeline(
path,
[
_ensure_path_ends_with_dot_taskmage,
filesystem.format_path,
]
)
def _ensure_path_ends_with_dot_taskmage(path):
if os.path.basename(path):
return path
return '{}/.taskmage'.format(path)
| [
"taskmage2.utils.filesystem.walk_parents",
"os.path.exists",
"taskmage2.utils.filesystem.format_path",
"taskmage2.parser.iostream.FileDescriptor",
"taskmage2.parser.parsers.parse",
"os.path.relpath",
"os.path.dirname",
"os.path.isfile",
"os.path.isdir",
"tempfile.mkdtemp",
"os.path.basename",
... | [((11557, 11650), 'taskmage2.utils.functional.pipeline', 'functional.pipeline', (['path', '[_ensure_path_ends_with_dot_taskmage, filesystem.format_path]'], {}), '(path, [_ensure_path_ends_with_dot_taskmage, filesystem.\n format_path])\n', (11576, 11650), False, 'from taskmage2.utils import filesystem, functional\n'), ((11759, 11781), 'os.path.basename', 'os.path.basename', (['path'], {}), '(path)\n', (11775, 11781), False, 'import os\n'), ((2504, 2524), 'os.path.exists', 'os.path.exists', (['root'], {}), '(root)\n', (2518, 2524), False, 'import os\n'), ((2818, 2859), 'taskmage2.utils.filesystem.make_directories', 'filesystem.make_directories', (['taskmage_dir'], {}), '(taskmage_dir)\n', (2845, 2859), False, 'from taskmage2.utils import filesystem, functional\n'), ((3387, 3416), 'taskmage2.utils.filesystem.walk_parents', 'filesystem.walk_parents', (['path'], {}), '(path)\n', (3410, 3416), False, 'from taskmage2.utils import filesystem, functional\n'), ((4124, 4145), 'os.path.abspath', 'os.path.abspath', (['path'], {}), '(path)\n', (4139, 4145), False, 'import os\n'), ((6313, 6345), 'taskmage2.utils.filesystem.format_path', 'filesystem.format_path', (['filepath'], {}), '(filepath)\n', (6335, 6345), False, 'from taskmage2.utils import filesystem, functional\n'), ((6997, 7029), 'taskmage2.utils.filesystem.format_path', 'filesystem.format_path', (['filepath'], {}), '(filepath)\n', (7019, 7029), False, 'from taskmage2.utils import filesystem, functional\n'), ((9174, 9192), 'os.walk', 'os.walk', (['self.root'], {}), '(self.root)\n', (9181, 9192), False, 'import os\n'), ((9735, 9753), 'tempfile.mkdtemp', 'tempfile.mkdtemp', ([], {}), '()\n', (9751, 9753), False, 'import tempfile\n'), ((1001, 1027), 'os.path.relpath', 'os.path.relpath', (['self.root'], {}), '(self.root)\n', (1016, 1027), False, 'import os\n'), ((3054, 3075), 'os.path.abspath', 'os.path.abspath', (['path'], {}), '(path)\n', (3069, 3075), False, 'import os\n'), ((3225, 3247), 'os.path.basename', 'os.path.basename', (['path'], {}), '(path)\n', (3241, 3247), False, 'import os\n'), ((3283, 3304), 'os.path.dirname', 'os.path.dirname', (['path'], {}), '(path)\n', (3298, 3304), False, 'import os\n'), ((10312, 10334), 'os.path.isdir', 'os.path.isdir', (['tempdir'], {}), '(tempdir)\n', (10325, 10334), False, 'import os\n'), ((11093, 11117), 'os.path.isfile', 'os.path.isfile', (['filepath'], {}), '(filepath)\n', (11107, 11117), False, 'import os\n'), ((11138, 11166), 'taskmage2.asttree.asttree.AbstractSyntaxTree', 'asttree.AbstractSyntaxTree', ([], {}), '()\n', (11164, 11166), False, 'from taskmage2.asttree import asttree, renderers\n'), ((11229, 11260), 'taskmage2.parser.iostream.FileDescriptor', 'iostream.FileDescriptor', (['fd_src'], {}), '(fd_src)\n', (11252, 11260), False, 'from taskmage2.parser import iostream, parsers\n'), ((11279, 11305), 'taskmage2.parser.parsers.parse', 'parsers.parse', (['fd', '"""mtask"""'], {}), "(fd, 'mtask')\n", (11292, 11305), False, 'from taskmage2.parser import iostream, parsers\n'), ((2545, 2564), 'os.path.isdir', 'os.path.isdir', (['root'], {}), '(root)\n', (2558, 2564), False, 'import os\n'), ((10352, 10374), 'shutil.rmtree', 'shutil.rmtree', (['tempdir'], {}), '(tempdir)\n', (10365, 10374), False, 'import shutil\n'), ((9394, 9422), 'taskmage2.project.taskfiles.TaskFile', 'taskfiles.TaskFile', (['filepath'], {}), '(filepath)\n', (9412, 9422), False, 'from taskmage2.project import taskfiles\n')] |
from discord.ext import commands
class FilteredUser(commands.UserConverter):
"""
A simple :class:`discord.ext.commands.UserConverter` that doesn't allow bots
or the author to be passed into the function.
"""
def __init__(self, *, allow_author: bool = False, allow_bots: bool = False):
super().__init__()
self.allow_author = allow_author
self.allow_bots = allow_bots
async def convert(self, ctx: commands.Context, argument: str):
m = await super().convert(ctx, argument)
if self.allow_author is False and ctx.author.id == m.id:
raise commands.BadArgument("You can't run this command on yourself.")
if self.allow_bots is False and m.bot:
raise commands.BadArgument("You can't run this command on bots.")
return m
class FilteredMember(commands.MemberConverter):
"""
A simple :class:`discord.ext.commands.MemberConverter` that doesn't allow bots
or the author to be passed into the function.
"""
def __init__(self, *, allow_author: bool = False, allow_bots: bool = False):
super().__init__()
self.allow_author = allow_author
self.allow_bots = allow_bots
async def convert(self, ctx: commands.Context, argument: str):
m = await super().convert(ctx, argument)
if self.allow_author is False and ctx.author.id == m.id:
raise commands.BadArgument("You can't run this command on yourself.")
if self.allow_bots is False and m.bot:
raise commands.BadArgument("You can't run this command on bots.")
return m
| [
"discord.ext.commands.BadArgument"
] | [((613, 676), 'discord.ext.commands.BadArgument', 'commands.BadArgument', (['"""You can\'t run this command on yourself."""'], {}), '("You can\'t run this command on yourself.")\n', (633, 676), False, 'from discord.ext import commands\n'), ((742, 801), 'discord.ext.commands.BadArgument', 'commands.BadArgument', (['"""You can\'t run this command on bots."""'], {}), '("You can\'t run this command on bots.")\n', (762, 801), False, 'from discord.ext import commands\n'), ((1405, 1468), 'discord.ext.commands.BadArgument', 'commands.BadArgument', (['"""You can\'t run this command on yourself."""'], {}), '("You can\'t run this command on yourself.")\n', (1425, 1468), False, 'from discord.ext import commands\n'), ((1534, 1593), 'discord.ext.commands.BadArgument', 'commands.BadArgument', (['"""You can\'t run this command on bots."""'], {}), '("You can\'t run this command on bots.")\n', (1554, 1593), False, 'from discord.ext import commands\n')] |
# Copyright 2019 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# https://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Tests image classification accuracy with ImageNet validation data set.
Please download the validation image data from to edgetpu/test_data/imagenet/
"""
import unittest
from edgetpu.classification.engine import ClassificationEngine
from PIL import Image
from . import test_utils
class TestImagenetClassification(unittest.TestCase):
def _crop_image(self, image, cropping_fraction=0.75):
"""Crops an image in the center.
Args:
image: PIL image object.
cropping_fraction: Fraction of cropped image.
Returns:
Cropped image as PIL image object.
"""
init_width, init_height = image.size
new_width = round(init_width * cropping_fraction)
new_height = round(init_height * cropping_fraction)
width_offset = round((init_width - init_width * cropping_fraction) / 2.0)
height_offset = round((init_height - init_height * cropping_fraction) / 2.0)
return image.crop((width_offset, height_offset,
width_offset + new_width, height_offset + new_height))
def _test_model(self, model_name, expected_top_1=None, expected_top_5=None):
engine = ClassificationEngine(test_utils.test_data_path(model_name))
with open(test_utils.test_data_path('imagenet/val.txt'), 'r') as gt_file:
gt = [line .strip().split(' ') for line in gt_file.readlines()]
top_1_count = 0
top_5_count = 0
print('Running inference for model %s...' % model_name)
for i in range(50000):
label = int(gt[i][1]) + 1
image_name = 'imagenet/ILSVRC2012_val_%s.JPEG' % str(i + 1).zfill(8)
with test_utils.test_image(image_name) as image:
image = self._crop_image(image.convert('RGB'))
prediction = engine.classify_with_image(image, threshold=0.0, top_k=5)
if prediction[0][0] == label:
top_1_count += 1
top_5_count += 1
else:
for j in range(1, len(prediction)):
if prediction[j][0] == label:
top_5_count += 1
top_1_accuracy = top_1_count / 50000.0
top_5_accuracy = top_5_count / 50000.0
print('Top 1 accuracy: %.2f%%' % (top_1_accuracy * 100))
print('Top 5 accuracy: %.2f%%' % (top_5_accuracy * 100))
if expected_top_1 is not None:
self.assertAlmostEqual(top_1_accuracy, expected_top_1, places=4)
if expected_top_5 is not None:
self.assertAlmostEqual(top_5_accuracy, expected_top_5, places=4)
def test_mobilenet_v1(self):
self._test_model('mobilenet_v1_1.0_224_quant_edgetpu.tflite',
expected_top_1=0.6854, expected_top_5=0.8772)
def test_mobilenet_v2(self):
self._test_model('mobilenet_v2_1.0_224_quant_edgetpu.tflite',
expected_top_1=0.6912, expected_top_5=0.8829)
if __name__ == '__main__':
unittest.main()
| [
"unittest.main"
] | [((3339, 3354), 'unittest.main', 'unittest.main', ([], {}), '()\n', (3352, 3354), False, 'import unittest\n')] |
# Improved version of the code from chapter 03
# created in chapter 11 to accelerate execution
import random
XMAX, YMAX = 19, 16
def create_grid_string(dots, xsize, ysize):
"""
Creates a grid of size (xx, yy)
with the given positions of dots.
"""
grid = ""
for y in range(ysize):
for x in range(xsize):
grid += "." if (x, y) in dots else "#"
grid += "\n"
return grid
def get_all_dot_positions(xsize, ysize):
"""Returns a list of (x, y) tuples covering all positions in a grid"""
return [(x,y) for x in range(1, xsize-1) for y in range(1, ysize-1)]
def get_neighbors(x, y):
"""Returns a list with the 8 neighbor positions of (x, y)"""
return [
(x, y-1), (x, y+1), (x-1, y), (x+1, y),
(x-1, y-1), (x+1, y-1), (x-1, y+1), (x+1, y+1)
]
def generate_dot_positions(xsize, ysize):
"""Creates positions of dots for a random maze"""
positions = get_all_dot_positions(xsize, ysize)
random.shuffle(positions)
dots = set()
for x, y in positions:
neighbors = get_neighbors(x, y)
free = [nb in dots for nb in neighbors]
if free.count(True) < 5:
dots.add((x, y))
return dots
def create_maze(xsize, ysize):
"""Returns a xsize*ysize maze as a string"""
dots = generate_dot_positions(xsize, ysize)
maze = create_grid_string(dots, xsize, ysize)
return maze
if __name__ == '__main__':
dots = set(((1,1), (1,2), (1,3), (2,2), (3,1), (3,2), (3,3)))
print(create_grid_string(dots, 5, 5))
positions = get_all_dot_positions(5, 5)
print(create_grid_string(positions, 5, 5))
neighbors = get_neighbors(3, 2)
print(create_grid_string(neighbors, 5, 5))
maze = create_maze(12, 7)
print(maze)
| [
"random.shuffle"
] | [((989, 1014), 'random.shuffle', 'random.shuffle', (['positions'], {}), '(positions)\n', (1003, 1014), False, 'import random\n')] |
from api.models.models import User
from api import db, create_app
db.create_all(app=create_app()) | [
"api.create_app"
] | [((85, 97), 'api.create_app', 'create_app', ([], {}), '()\n', (95, 97), False, 'from api import db, create_app\n')] |
import numpy as np
import scipy.stats as stats
from UQpy.Distributions.baseclass.Distribution import Distribution
class DistributionContinuous1D(Distribution):
"""
Parent class for univariate continuous probability distributions.
"""
def __init__(self, **kwargs):
super().__init__(**kwargs)
@staticmethod
def _check_x_dimension(x):
"""
Check the dimension of input x - must be an ndarray of shape (npoints,) or (npoints, 1)
"""
x = np.atleast_1d(x)
if len(x.shape) > 2 or (len(x.shape) == 2 and x.shape[1] != 1):
raise ValueError('Wrong dimension in x.')
return x.reshape((-1,))
def _construct_from_scipy(self, scipy_name=stats.rv_continuous):
self.cdf = lambda x: scipy_name.cdf(x=self._check_x_dimension(x), **self.params)
self.pdf = lambda x: scipy_name.pdf(x=self._check_x_dimension(x), **self.params)
self.log_pdf = lambda x: scipy_name.logpdf(x=self._check_x_dimension(x), **self.params)
self.icdf = lambda x: scipy_name.ppf(q=self._check_x_dimension(x), **self.params)
self.moments = lambda moments2return='mvsk': scipy_name.stats(moments=moments2return, **self.params)
self.rvs = lambda nsamples=1, random_state=None: scipy_name.rvs(
size=nsamples, random_state=random_state, **self.params).reshape((nsamples, 1))
def tmp_fit(dist, data):
data = self._check_x_dimension(data)
fixed_params = {}
for key, value in dist.params.items():
if value is not None:
fixed_params['f' + key] = value
params_fitted = scipy_name.fit(data=data, **fixed_params)
return dict(zip(dist.order_params, params_fitted))
self.fit = lambda data: tmp_fit(self, data) | [
"numpy.atleast_1d"
] | [((499, 515), 'numpy.atleast_1d', 'np.atleast_1d', (['x'], {}), '(x)\n', (512, 515), True, 'import numpy as np\n')] |
import requests
import re
import time
import random
import pprint
import os
headers = {"user-agent": "Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/77.0.3858.0 Safari/537.36"}
def youdict(threadName, q):
res = []
index = 0
url = q.get(timeout = 2)
index += 1
r = requests.get(url, headers = headers, timeout = 5)
html = str(r.content, encoding="utf-8").replace("\n", "").replace(" ", "").replace('<span class="yd-kw-suffix">[英语单词大全]</span>', "")
words = re.findall('<div class="caption"><h3 style="margin-top: 10px;"><a style="color:#333;" target="_blank" href="/w/.*?">(.*?)</a>[ ]?</h3><p>(.*?)</p></div>', html)
for word in words:
res.append(word)
if index%5 == 0:
time.sleep(3 + random.random())
else:
time.sleep(1 + random.random())
return res
def hujiang(threadName, q):
res = []
index = 0
url = q.get(timeout = 2)
index += 1
r = requests.get(url, headers=headers, timeout=5)
html = str(r.content, encoding="utf-8").replace("\n", "").replace(" ", "").replace('<span class="yd-kw-suffix">[英语单词大全]</span>', "")
words = re.findall('<li class="clearfix"><a href="/ciku/(.*?)/" target="_blank">.*?</a><span>(.*?)</span></li>', html)
for word in words:
res.append(word)
if index%5 == 0:
time.sleep(3 + random.random())
else:
time.sleep(1 + random.random())
return res
if __name__ == "__main__":
main()
| [
"random.random",
"re.findall",
"requests.get"
] | [((343, 388), 'requests.get', 'requests.get', (['url'], {'headers': 'headers', 'timeout': '(5)'}), '(url, headers=headers, timeout=5)\n', (355, 388), False, 'import requests\n'), ((547, 717), 're.findall', 're.findall', (['"""<div class="caption"><h3 style="margin-top: 10px;"><a style="color:#333;" target="_blank" href="/w/.*?">(.*?)</a>[ ]?</h3><p>(.*?)</p></div>"""', 'html'], {}), '(\n \'<div class="caption"><h3 style="margin-top: 10px;"><a style="color:#333;" target="_blank" href="/w/.*?">(.*?)</a>[ ]?</h3><p>(.*?)</p></div>\'\n , html)\n', (557, 717), False, 'import re\n'), ((1012, 1057), 'requests.get', 'requests.get', (['url'], {'headers': 'headers', 'timeout': '(5)'}), '(url, headers=headers, timeout=5)\n', (1024, 1057), False, 'import requests\n'), ((1212, 1332), 're.findall', 're.findall', (['"""<li class="clearfix"><a href="/ciku/(.*?)/" target="_blank">.*?</a><span>(.*?)</span></li>"""', 'html'], {}), '(\n \'<li class="clearfix"><a href="/ciku/(.*?)/" target="_blank">.*?</a><span>(.*?)</span></li>\'\n , html)\n', (1222, 1332), False, 'import re\n'), ((806, 821), 'random.random', 'random.random', ([], {}), '()\n', (819, 821), False, 'import random\n'), ((858, 873), 'random.random', 'random.random', ([], {}), '()\n', (871, 873), False, 'import random\n'), ((1429, 1444), 'random.random', 'random.random', ([], {}), '()\n', (1442, 1444), False, 'import random\n'), ((1481, 1496), 'random.random', 'random.random', ([], {}), '()\n', (1494, 1496), False, 'import random\n')] |
#!/usr/bin/env python3
"""
Created by sarathkaul on 14/11/19
Basic authentication using an API password is deprecated and will soon no longer work.
Visit https://developer.github.com/changes/2020-02-14-deprecating-password-auth
for more information around suggested workarounds and removal dates.
"""
import requests
_GITHUB_API = "https://api.github.com/user"
def fetch_github_info(auth_user: str, auth_pass: str) -> dict:
"""
Fetch GitHub info of a user using the requests module
"""
return requests.get(_GITHUB_API, auth=(auth_user, auth_pass)).json()
if __name__ == "__main__":
for key, value in fetch_github_info("<USER NAME>", "<PASSWORD>").items():
print(f"{key}: {value}")
| [
"requests.get"
] | [((516, 570), 'requests.get', 'requests.get', (['_GITHUB_API'], {'auth': '(auth_user, auth_pass)'}), '(_GITHUB_API, auth=(auth_user, auth_pass))\n', (528, 570), False, 'import requests\n')] |
#!/usr/bin/env python3
# vim: set ai et ts=4 sw=4:
import os
import subprocess
import argparse
import time
import calendar
import re
def run(cmd):
code = subprocess.call(['/bin/bash', '-o', 'pipefail', '-c', cmd])
if code != 0:
raise RuntimeError("Command `%s` returned non-zero status: %d" %
(cmd, code))
def get_output(cmd):
data = subprocess.check_output(cmd, shell=True)
data = data.decode('utf-8').strip()
return data
parser = argparse.ArgumentParser(
description='Run nightly Insolar Jepsen-like tests')
parser.add_argument(
'-b', '--branch', metavar='B', type=str, default='master',
help='git branch name (default: master)')
parser.add_argument(
'-r', '--repeat', metavar='N', type=int, default=100,
help='number of times to repeat tests (default: 100)')
parser.add_argument(
'-c', '--channel', metavar='C', type=str, default='#dev-backend',
help='slack channel (default: #dev-backend)')
parser.add_argument(
'-e', '--emoji', metavar='E', type=str, default='aphyr',
help='message emoji (default: aphyr)')
parser.add_argument(
'-s', '--slack', metavar='H', type=str, required=True,
help='slack hook string (it looks like base64 string)')
parser.add_argument(
'-l', '--logdir', metavar='DIR', type=str, required=True,
help='path to the directory where logfiles will be saved')
parser.add_argument(
'-u', '--url', metavar='URL', type=str, required=True,
help='URL where saved logfiles will be accessible')
args = parser.parse_args()
tests_passed = False
date = "FAILED_TO_GET_DATE"
try:
date = get_output('date +%Y%m%d%H%M00')
except Exception as e:
print("ERROR:")
print(str(e))
logfile_name = 'jepsen-' + date + '.txt'
logfile_fullname = args.logdir + '/' + logfile_name
try:
run('echo "=== BUILDING BRANCH '+args.branch +
' ===" | tee -a '+logfile_fullname)
run('./build-docker.py '+args.branch+' 2>&1 | tee -a '+logfile_fullname)
run('echo "==== RUNNING TESTS '+str(args.repeat) +
' TIMES ===" | tee -a '+logfile_fullname)
run('./run-test.py -i insolar-jepsen:latest -r ' +
str(args.repeat)+' 2>&1 | tee -a '+logfile_fullname)
tests_passed = True
except Exception as e:
print("ERROR:")
print(str(e))
podlogs_name = 'jepsen-' + date + '.tgz'
podlogs_fullname = args.logdir + '/' + podlogs_name
try:
run('echo "=== AGGREGATING LOGS TO ' +
podlogs_fullname+' ===" | tee -a '+logfile_fullname)
run('./aggregate-logs.py /tmp/jepsen-'+date)
run('gunzip /tmp/jepsen-'+date+'/*/*.log.gz || true')
run('tar -cvzf '+podlogs_fullname+' /tmp/jepsen-'+date)
run('rm -r /tmp/jepsen-'+date)
run('echo "=== CLEANING UP '+args.logdir+' ===" | tee -a '+logfile_fullname)
now = int(time.time())
os.chdir(args.logdir)
for fname in os.listdir("."):
m = re.search("jepsen-(\d{4}\d{2}\d{2})", fname)
if m is None:
run(' echo "File: ' + fname + ' - skipped" | tee -a '+logfile_fullname)
continue
ftime = calendar.timegm(time.strptime(m.group(1), "%Y%m%d"))
ndays = int((now - ftime) / (60 * 60 * 24))
delete = ndays > 15
run(' echo "File: ' + fname + ', ndays: ' + str(ndays) +
', delete: ' + str(delete) + '" | tee -a '+logfile_fullname)
if delete:
os.unlink(fname)
except Exception as e:
print("ERROR:")
print(str(e))
print("Test passed: "+str(tests_passed))
message = 'PASSED' if tests_passed else 'FAILED'
message = 'Nightly Jepsen-like tests '+message +\
'. Log: '+args.url+'/'+logfile_name +\
' Pod logs: '+args.url+'/'+podlogs_name
cmd = 'curl -X POST --data-urlencode \'payload={"channel": "'+args.channel +\
'", "username": "aphyr", "text": "'+message +\
'", "icon_emoji": ":'+args.emoji +\
':"}\' https://hooks.slack.com/services/'+args.slack
print("EXECUTING: "+cmd)
run(cmd)
| [
"subprocess.check_output",
"os.listdir",
"argparse.ArgumentParser",
"os.chdir",
"subprocess.call",
"os.unlink",
"time.time",
"re.search"
] | [((494, 570), 'argparse.ArgumentParser', 'argparse.ArgumentParser', ([], {'description': '"""Run nightly Insolar Jepsen-like tests"""'}), "(description='Run nightly Insolar Jepsen-like tests')\n", (517, 570), False, 'import argparse\n'), ((161, 220), 'subprocess.call', 'subprocess.call', (["['/bin/bash', '-o', 'pipefail', '-c', cmd]"], {}), "(['/bin/bash', '-o', 'pipefail', '-c', cmd])\n", (176, 220), False, 'import subprocess\n'), ((386, 426), 'subprocess.check_output', 'subprocess.check_output', (['cmd'], {'shell': '(True)'}), '(cmd, shell=True)\n', (409, 426), False, 'import subprocess\n'), ((2816, 2837), 'os.chdir', 'os.chdir', (['args.logdir'], {}), '(args.logdir)\n', (2824, 2837), False, 'import os\n'), ((2855, 2870), 'os.listdir', 'os.listdir', (['"""."""'], {}), "('.')\n", (2865, 2870), False, 'import os\n'), ((2799, 2810), 'time.time', 'time.time', ([], {}), '()\n', (2808, 2810), False, 'import time\n'), ((2884, 2931), 're.search', 're.search', (['"""jepsen-(\\\\d{4}\\\\d{2}\\\\d{2})"""', 'fname'], {}), "('jepsen-(\\\\d{4}\\\\d{2}\\\\d{2})', fname)\n", (2893, 2931), False, 'import re\n'), ((3374, 3390), 'os.unlink', 'os.unlink', (['fname'], {}), '(fname)\n', (3383, 3390), False, 'import os\n')] |
# Copyright 2013 Red Hat, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import threading
import warnings
import eventlet
import fixtures
from oslo_config import cfg
from six.moves import mock
import testscenarios
import oslo_messaging
from oslo_messaging import rpc
from oslo_messaging.rpc import dispatcher
from oslo_messaging.rpc import server as rpc_server_module
from oslo_messaging import server as server_module
from oslo_messaging.tests import utils as test_utils
load_tests = testscenarios.load_tests_apply_scenarios
class ServerSetupMixin(object):
class Server(object):
def __init__(self, transport, topic, server, endpoint, serializer):
self.controller = ServerSetupMixin.ServerController()
target = oslo_messaging.Target(topic=topic, server=server)
self.server = oslo_messaging.get_rpc_server(transport,
target,
[endpoint,
self.controller],
serializer=serializer)
def wait(self):
# Wait for the executor to process the stop message, indicating all
# test messages have been processed
self.controller.stopped.wait()
# Check start() does nothing with a running server
self.server.start()
self.server.stop()
self.server.wait()
def start(self):
self.server.start()
class ServerController(object):
def __init__(self):
self.stopped = threading.Event()
def stop(self, ctxt):
self.stopped.set()
class TestSerializer(object):
def serialize_entity(self, ctxt, entity):
return ('s' + entity) if entity else entity
def deserialize_entity(self, ctxt, entity):
return ('d' + entity) if entity else entity
def serialize_context(self, ctxt):
return dict([(k, 's' + v) for k, v in ctxt.items()])
def deserialize_context(self, ctxt):
return dict([(k, 'd' + v) for k, v in ctxt.items()])
def __init__(self):
self.serializer = self.TestSerializer()
def _setup_server(self, transport, endpoint, topic=None, server=None):
server = self.Server(transport,
topic=topic or 'testtopic',
server=server or 'testserver',
endpoint=endpoint,
serializer=self.serializer)
server.start()
return server
def _stop_server(self, client, server, topic=None):
if topic is not None:
client = client.prepare(topic=topic)
client.cast({}, 'stop')
server.wait()
def _setup_client(self, transport, topic='testtopic'):
return oslo_messaging.RPCClient(transport,
oslo_messaging.Target(topic=topic),
serializer=self.serializer)
class TestRPCServer(test_utils.BaseTestCase, ServerSetupMixin):
def __init__(self, *args):
super(TestRPCServer, self).__init__(*args)
ServerSetupMixin.__init__(self)
def setUp(self):
super(TestRPCServer, self).setUp(conf=cfg.ConfigOpts())
@mock.patch('warnings.warn')
def test_constructor(self, warn):
transport = oslo_messaging.get_rpc_transport(self.conf, url='fake:')
target = oslo_messaging.Target(topic='foo', server='bar')
endpoints = [object()]
serializer = object()
access_policy = dispatcher.DefaultRPCAccessPolicy
warnings.simplefilter("always", FutureWarning)
server = oslo_messaging.get_rpc_server(transport,
target,
endpoints,
serializer=serializer,
access_policy=access_policy)
self.assertIs(server.conf, self.conf)
self.assertIs(server.transport, transport)
self.assertIsInstance(server.dispatcher, oslo_messaging.RPCDispatcher)
self.assertIs(server.dispatcher.endpoints, endpoints)
self.assertIs(server.dispatcher.serializer, serializer)
self.assertEqual('blocking', server.executor_type)
self.assertEqual([
mock.call("blocking executor is deprecated. Executor default will "
"be removed. Use explicitly threading or eventlet "
"instead in version 'pike' and will be removed in "
"version 'rocky'",
category=FutureWarning, stacklevel=3)
], warn.mock_calls)
@mock.patch('warnings.warn')
def test_constructor_without_explicit_RPCAccessPolicy(self, warn):
transport = oslo_messaging.get_rpc_transport(self.conf, url='fake:')
target = oslo_messaging.Target(topic='foo', server='bar')
endpoints = [object()]
serializer = object()
warnings.simplefilter("always", FutureWarning)
oslo_messaging.get_rpc_server(transport, target,
endpoints, serializer=serializer)
self.assertEqual([
mock.call(mock.ANY, category=FutureWarning, stacklevel=3),
mock.call("blocking executor is deprecated. Executor default will "
"be removed. Use explicitly threading or eventlet "
"instead in version 'pike' and will be removed in "
"version 'rocky'",
category=FutureWarning, stacklevel=3)
], warn.mock_calls)
def test_server_wait_method(self):
transport = oslo_messaging.get_rpc_transport(self.conf, url='fake:')
target = oslo_messaging.Target(topic='foo', server='bar')
endpoints = [object()]
serializer = object()
class MagicMockIgnoreArgs(mock.MagicMock):
"""MagicMock ignores arguments.
A MagicMock which can never misinterpret the arguments passed to
it during construction.
"""
def __init__(self, *args, **kwargs):
super(MagicMockIgnoreArgs, self).__init__()
server = oslo_messaging.get_rpc_server(transport, target, endpoints,
serializer=serializer)
# Mocking executor
server._executor_cls = MagicMockIgnoreArgs
server._create_listener = MagicMockIgnoreArgs()
server.dispatcher = MagicMockIgnoreArgs()
# Here assigning executor's listener object to listener variable
# before calling wait method, because in wait method we are
# setting executor to None.
server.start()
listener = server.listener
server.stop()
# call server wait method
server.wait()
self.assertEqual(1, listener.cleanup.call_count)
def test_no_target_server(self):
transport = oslo_messaging.get_rpc_transport(self.conf, url='fake:')
server = oslo_messaging.get_rpc_server(
transport,
oslo_messaging.Target(topic='testtopic'),
[])
try:
server.start()
except Exception as ex:
self.assertIsInstance(ex, oslo_messaging.InvalidTarget, ex)
self.assertEqual('testtopic', ex.target.topic)
else:
self.assertTrue(False)
def test_no_server_topic(self):
transport = oslo_messaging.get_rpc_transport(self.conf, url='fake:')
target = oslo_messaging.Target(server='testserver')
server = oslo_messaging.get_rpc_server(transport, target, [])
try:
server.start()
except Exception as ex:
self.assertIsInstance(ex, oslo_messaging.InvalidTarget, ex)
self.assertEqual('testserver', ex.target.server)
else:
self.assertTrue(False)
def _test_no_client_topic(self, call=True):
transport = oslo_messaging.get_rpc_transport(self.conf, url='fake:')
client = self._setup_client(transport, topic=None)
method = client.call if call else client.cast
try:
method({}, 'ping', arg='foo')
except Exception as ex:
self.assertIsInstance(ex, oslo_messaging.InvalidTarget, ex)
self.assertIsNotNone(ex.target)
else:
self.assertTrue(False)
def test_no_client_topic_call(self):
self._test_no_client_topic(call=True)
def test_no_client_topic_cast(self):
self._test_no_client_topic(call=False)
def test_client_call_timeout(self):
transport = oslo_messaging.get_rpc_transport(self.conf, url='fake:')
finished = False
wait = threading.Condition()
class TestEndpoint(object):
def ping(self, ctxt, arg):
with wait:
if not finished:
wait.wait()
server_thread = self._setup_server(transport, TestEndpoint())
client = self._setup_client(transport)
try:
client.prepare(timeout=0).call({}, 'ping', arg='foo')
except Exception as ex:
self.assertIsInstance(ex, oslo_messaging.MessagingTimeout, ex)
else:
self.assertTrue(False)
with wait:
finished = True
wait.notify()
self._stop_server(client, server_thread)
def test_unknown_executor(self):
transport = oslo_messaging.get_rpc_transport(self.conf, url='fake:')
try:
oslo_messaging.get_rpc_server(transport, None, [], executor='foo')
except Exception as ex:
self.assertIsInstance(ex, oslo_messaging.ExecutorLoadFailure)
self.assertEqual('foo', ex.executor)
else:
self.assertTrue(False)
def test_cast(self):
transport = oslo_messaging.get_rpc_transport(self.conf, url='fake:')
class TestEndpoint(object):
def __init__(self):
self.pings = []
def ping(self, ctxt, arg):
self.pings.append(arg)
endpoint = TestEndpoint()
server_thread = self._setup_server(transport, endpoint)
client = self._setup_client(transport)
client.cast({}, 'ping', arg='foo')
client.cast({}, 'ping', arg='bar')
self._stop_server(client, server_thread)
self.assertEqual(['dsfoo', 'dsbar'], endpoint.pings)
def test_call(self):
transport = oslo_messaging.get_rpc_transport(self.conf, url='fake:')
class TestEndpoint(object):
def ping(self, ctxt, arg):
return arg
server_thread = self._setup_server(transport, TestEndpoint())
client = self._setup_client(transport)
self.assertIsNone(client.call({}, 'ping', arg=None))
self.assertEqual(0, client.call({}, 'ping', arg=0))
self.assertFalse(client.call({}, 'ping', arg=False))
self.assertEqual([], client.call({}, 'ping', arg=[]))
self.assertEqual({}, client.call({}, 'ping', arg={}))
self.assertEqual('dsdsfoo', client.call({}, 'ping', arg='foo'))
self._stop_server(client, server_thread)
def test_direct_call(self):
transport = oslo_messaging.get_rpc_transport(self.conf, url='fake:')
class TestEndpoint(object):
def ping(self, ctxt, arg):
return arg
server_thread = self._setup_server(transport, TestEndpoint())
client = self._setup_client(transport)
direct = client.prepare(server='testserver')
self.assertIsNone(direct.call({}, 'ping', arg=None))
self.assertEqual(0, client.call({}, 'ping', arg=0))
self.assertFalse(client.call({}, 'ping', arg=False))
self.assertEqual([], client.call({}, 'ping', arg=[]))
self.assertEqual({}, client.call({}, 'ping', arg={}))
self.assertEqual('dsdsfoo', direct.call({}, 'ping', arg='foo'))
self._stop_server(client, server_thread)
def test_context(self):
transport = oslo_messaging.get_rpc_transport(self.conf, url='fake:')
class TestEndpoint(object):
def ctxt_check(self, ctxt, key):
return ctxt[key]
server_thread = self._setup_server(transport, TestEndpoint())
client = self._setup_client(transport)
self.assertEqual('dsdsb',
client.call({'dsa': 'b'},
'ctxt_check',
key='a'))
self._stop_server(client, server_thread)
def test_failure(self):
transport = oslo_messaging.get_rpc_transport(self.conf, url='fake:')
class TestEndpoint(object):
def ping(self, ctxt, arg):
raise ValueError(arg)
debugs = []
errors = []
def stub_debug(msg, *a, **kw):
if (a and len(a) == 1 and isinstance(a[0], dict) and a[0]):
a = a[0]
debugs.append(str(msg) % a)
def stub_error(msg, *a, **kw):
if (a and len(a) == 1 and isinstance(a[0], dict) and a[0]):
a = a[0]
errors.append(str(msg) % a)
self.useFixture(fixtures.MockPatchObject(
rpc_server_module.LOG, 'debug', stub_debug))
self.useFixture(fixtures.MockPatchObject(
rpc_server_module.LOG, 'error', stub_error))
server_thread = self._setup_server(transport, TestEndpoint())
client = self._setup_client(transport)
try:
client.call({}, 'ping', arg='foo')
except Exception as ex:
self.assertIsInstance(ex, ValueError)
self.assertEqual('dsfoo', str(ex))
self.assertTrue(len(debugs) == 0)
self.assertGreater(len(errors), 0)
else:
self.assertTrue(False)
self._stop_server(client, server_thread)
def test_expected_failure(self):
transport = oslo_messaging.get_rpc_transport(self.conf, url='fake:')
debugs = []
errors = []
def stub_debug(msg, *a, **kw):
if (a and len(a) == 1 and isinstance(a[0], dict) and a[0]):
a = a[0]
debugs.append(str(msg) % a)
def stub_error(msg, *a, **kw):
if (a and len(a) == 1 and isinstance(a[0], dict) and a[0]):
a = a[0]
errors.append(str(msg) % a)
self.useFixture(fixtures.MockPatchObject(
rpc_server_module.LOG, 'debug', stub_debug))
self.useFixture(fixtures.MockPatchObject(
rpc_server_module.LOG, 'error', stub_error))
class TestEndpoint(object):
@oslo_messaging.expected_exceptions(ValueError)
def ping(self, ctxt, arg):
raise ValueError(arg)
server_thread = self._setup_server(transport, TestEndpoint())
client = self._setup_client(transport)
try:
client.call({}, 'ping', arg='foo')
except Exception as ex:
self.assertIsInstance(ex, ValueError)
self.assertEqual('dsfoo', str(ex))
self.assertGreater(len(debugs), 0)
self.assertTrue(len(errors) == 0)
else:
self.assertTrue(False)
self._stop_server(client, server_thread)
class TestMultipleServers(test_utils.BaseTestCase, ServerSetupMixin):
_exchanges = [
('same_exchange', dict(exchange1=None, exchange2=None)),
('diff_exchange', dict(exchange1='x1', exchange2='x2')),
]
_topics = [
('same_topic', dict(topic1='t', topic2='t')),
('diff_topic', dict(topic1='t1', topic2='t2')),
]
_server = [
('same_server', dict(server1=None, server2=None)),
('diff_server', dict(server1='s1', server2='s2')),
]
_fanout = [
('not_fanout', dict(fanout1=None, fanout2=None)),
('fanout', dict(fanout1=True, fanout2=True)),
]
_method = [
('call', dict(call1=True, call2=True)),
('cast', dict(call1=False, call2=False)),
]
_endpoints = [
('one_endpoint',
dict(multi_endpoints=False,
expect1=['ds1', 'ds2'],
expect2=['ds1', 'ds2'])),
('two_endpoints',
dict(multi_endpoints=True,
expect1=['ds1'],
expect2=['ds2'])),
]
@classmethod
def generate_scenarios(cls):
cls.scenarios = testscenarios.multiply_scenarios(cls._exchanges,
cls._topics,
cls._server,
cls._fanout,
cls._method,
cls._endpoints)
# fanout call not supported
def filter_fanout_call(scenario):
params = scenario[1]
fanout = params['fanout1'] or params['fanout2']
call = params['call1'] or params['call2']
return not (call and fanout)
# listening multiple times on same topic/server pair not supported
def filter_same_topic_and_server(scenario):
params = scenario[1]
single_topic = params['topic1'] == params['topic2']
single_server = params['server1'] == params['server2']
return not (single_topic and single_server)
# fanout to multiple servers on same topic and exchange
# each endpoint will receive both messages
def fanout_to_servers(scenario):
params = scenario[1]
fanout = params['fanout1'] or params['fanout2']
single_exchange = params['exchange1'] == params['exchange2']
single_topic = params['topic1'] == params['topic2']
multi_servers = params['server1'] != params['server2']
if fanout and single_exchange and single_topic and multi_servers:
params['expect1'] = params['expect1'][:] + params['expect1']
params['expect2'] = params['expect2'][:] + params['expect2']
return scenario
# multiple endpoints on same topic and exchange
# either endpoint can get either message
def single_topic_multi_endpoints(scenario):
params = scenario[1]
single_exchange = params['exchange1'] == params['exchange2']
single_topic = params['topic1'] == params['topic2']
if single_topic and single_exchange and params['multi_endpoints']:
params['expect_either'] = (params['expect1'] +
params['expect2'])
params['expect1'] = params['expect2'] = []
else:
params['expect_either'] = []
return scenario
for f in [filter_fanout_call, filter_same_topic_and_server]:
cls.scenarios = [i for i in cls.scenarios if f(i)]
for m in [fanout_to_servers, single_topic_multi_endpoints]:
cls.scenarios = [m(i) for i in cls.scenarios]
def __init__(self, *args):
super(TestMultipleServers, self).__init__(*args)
ServerSetupMixin.__init__(self)
def setUp(self):
super(TestMultipleServers, self).setUp(conf=cfg.ConfigOpts())
def test_multiple_servers(self):
url1 = 'fake:///' + (self.exchange1 or '')
url2 = 'fake:///' + (self.exchange2 or '')
transport1 = oslo_messaging.get_rpc_transport(self.conf, url=url1)
if url1 != url2:
transport2 = oslo_messaging.get_rpc_transport(self.conf, url=url1)
else:
transport2 = transport1
class TestEndpoint(object):
def __init__(self):
self.pings = []
def ping(self, ctxt, arg):
self.pings.append(arg)
def alive(self, ctxt):
return 'alive'
if self.multi_endpoints:
endpoint1, endpoint2 = TestEndpoint(), TestEndpoint()
else:
endpoint1 = endpoint2 = TestEndpoint()
server1 = self._setup_server(transport1, endpoint1,
topic=self.topic1, server=self.server1)
server2 = self._setup_server(transport2, endpoint2,
topic=self.topic2, server=self.server2)
client1 = self._setup_client(transport1, topic=self.topic1)
client2 = self._setup_client(transport2, topic=self.topic2)
client1 = client1.prepare(server=self.server1)
client2 = client2.prepare(server=self.server2)
if self.fanout1:
client1.call({}, 'alive')
client1 = client1.prepare(fanout=True)
if self.fanout2:
client2.call({}, 'alive')
client2 = client2.prepare(fanout=True)
(client1.call if self.call1 else client1.cast)({}, 'ping', arg='1')
(client2.call if self.call2 else client2.cast)({}, 'ping', arg='2')
self._stop_server(client1.prepare(fanout=None),
server1, topic=self.topic1)
self._stop_server(client2.prepare(fanout=None),
server2, topic=self.topic2)
def check(pings, expect):
self.assertEqual(len(expect), len(pings))
for a in expect:
self.assertIn(a, pings)
if self.expect_either:
check(endpoint1.pings + endpoint2.pings, self.expect_either)
else:
check(endpoint1.pings, self.expect1)
check(endpoint2.pings, self.expect2)
TestMultipleServers.generate_scenarios()
class TestServerLocking(test_utils.BaseTestCase):
def setUp(self):
super(TestServerLocking, self).setUp(conf=cfg.ConfigOpts())
def _logmethod(name):
def method(self, *args, **kwargs):
with self._lock:
self._calls.append(name)
return method
executors = []
class FakeExecutor(object):
def __init__(self, *args, **kwargs):
self._lock = threading.Lock()
self._calls = []
executors.append(self)
submit = _logmethod('submit')
shutdown = _logmethod('shutdown')
self.executors = executors
class MessageHandlingServerImpl(oslo_messaging.MessageHandlingServer):
def _create_listener(self):
return mock.Mock()
def _process_incoming(self, incoming):
pass
self.server = MessageHandlingServerImpl(mock.Mock(), mock.Mock())
self.server._executor_cls = FakeExecutor
def test_start_stop_wait(self):
# Test a simple execution of start, stop, wait in order
eventlet.spawn(self.server.start)
self.server.stop()
self.server.wait()
self.assertEqual(1, len(self.executors))
self.assertEqual(['shutdown'], self.executors[0]._calls)
self.assertTrue(self.server.listener.cleanup.called)
def test_reversed_order(self):
# Test that if we call wait, stop, start, these will be correctly
# reordered
eventlet.spawn(self.server.wait)
# This is non-deterministic, but there's not a great deal we can do
# about that
eventlet.sleep(0)
eventlet.spawn(self.server.stop)
eventlet.sleep(0)
eventlet.spawn(self.server.start)
self.server.wait()
self.assertEqual(1, len(self.executors))
self.assertEqual(['shutdown'], self.executors[0]._calls)
def test_wait_for_running_task(self):
# Test that if 2 threads call a method simultaneously, both will wait,
# but only 1 will call the underlying executor method.
start_event = threading.Event()
finish_event = threading.Event()
running_event = threading.Event()
done_event = threading.Event()
_runner = [None]
class SteppingFakeExecutor(self.server._executor_cls):
def __init__(self, *args, **kwargs):
# Tell the test which thread won the race
_runner[0] = eventlet.getcurrent()
running_event.set()
start_event.wait()
super(SteppingFakeExecutor, self).__init__(*args, **kwargs)
done_event.set()
finish_event.wait()
self.server._executor_cls = SteppingFakeExecutor
start1 = eventlet.spawn(self.server.start)
start2 = eventlet.spawn(self.server.start)
# Wait until one of the threads starts running
running_event.wait()
runner = _runner[0]
waiter = start2 if runner == start1 else start2
waiter_finished = threading.Event()
waiter.link(lambda _: waiter_finished.set())
# At this point, runner is running start(), and waiter() is waiting for
# it to complete. runner has not yet logged anything.
self.assertEqual(0, len(self.executors))
self.assertFalse(waiter_finished.is_set())
# Let the runner log the call
start_event.set()
done_event.wait()
# We haven't signalled completion yet, so submit shouldn't have run
self.assertEqual(1, len(self.executors))
self.assertEqual([], self.executors[0]._calls)
self.assertFalse(waiter_finished.is_set())
# Let the runner complete
finish_event.set()
waiter.wait()
runner.wait()
# Check that both threads have finished, start was only called once,
# and execute ran
self.assertTrue(waiter_finished.is_set())
self.assertEqual(1, len(self.executors))
self.assertEqual([], self.executors[0]._calls)
def test_start_stop_wait_stop_wait(self):
# Test that we behave correctly when calling stop/wait more than once.
# Subsequent calls should be noops.
self.server.start()
self.server.stop()
self.server.wait()
self.server.stop()
self.server.wait()
self.assertEqual(len(self.executors), 1)
self.assertEqual(['shutdown'], self.executors[0]._calls)
self.assertTrue(self.server.listener.cleanup.called)
def test_state_wrapping(self):
# Test that we behave correctly if a thread waits, and the server state
# has wrapped when it it next scheduled
# Ensure that if 2 threads wait for the completion of 'start', the
# first will wait until complete_event is signalled, but the second
# will continue
complete_event = threading.Event()
complete_waiting_callback = threading.Event()
start_state = self.server._states['start']
old_wait_for_completion = start_state.wait_for_completion
waited = [False]
def new_wait_for_completion(*args, **kwargs):
if not waited[0]:
waited[0] = True
complete_waiting_callback.set()
complete_event.wait()
old_wait_for_completion(*args, **kwargs)
start_state.wait_for_completion = new_wait_for_completion
# thread1 will wait for start to complete until we signal it
thread1 = eventlet.spawn(self.server.stop)
thread1_finished = threading.Event()
thread1.link(lambda _: thread1_finished.set())
self.server.start()
complete_waiting_callback.wait()
# The server should have started, but stop should not have been called
self.assertEqual(1, len(self.executors))
self.assertEqual([], self.executors[0]._calls)
self.assertFalse(thread1_finished.is_set())
self.server.stop()
self.server.wait()
# We should have gone through all the states, and thread1 should still
# be waiting
self.assertEqual(1, len(self.executors))
self.assertEqual(['shutdown'], self.executors[0]._calls)
self.assertFalse(thread1_finished.is_set())
# Start again
self.server.start()
# We should now record 4 executors (2 for each server)
self.assertEqual(2, len(self.executors))
self.assertEqual(['shutdown'], self.executors[0]._calls)
self.assertEqual([], self.executors[1]._calls)
self.assertFalse(thread1_finished.is_set())
# Allow thread1 to complete
complete_event.set()
thread1_finished.wait()
# thread1 should now have finished, and stop should not have been
# called again on either the first or second executor
self.assertEqual(2, len(self.executors))
self.assertEqual(['shutdown'], self.executors[0]._calls)
self.assertEqual([], self.executors[1]._calls)
self.assertTrue(thread1_finished.is_set())
@mock.patch.object(server_module, 'DEFAULT_LOG_AFTER', 1)
@mock.patch.object(server_module, 'LOG')
def test_logging(self, mock_log):
# Test that we generate a log message if we wait longer than
# DEFAULT_LOG_AFTER
log_event = threading.Event()
mock_log.warning.side_effect = lambda _, __: log_event.set()
# Call stop without calling start. We should log a wait after 1 second
thread = eventlet.spawn(self.server.stop)
log_event.wait()
# Redundant given that we already waited, but it's nice to assert
self.assertTrue(mock_log.warning.called)
thread.kill()
@mock.patch.object(server_module, 'LOG')
def test_logging_explicit_wait(self, mock_log):
# Test that we generate a log message if we wait longer than
# the number of seconds passed to log_after
log_event = threading.Event()
mock_log.warning.side_effect = lambda _, __: log_event.set()
# Call stop without calling start. We should log a wait after 1 second
thread = eventlet.spawn(self.server.stop, log_after=1)
log_event.wait()
# Redundant given that we already waited, but it's nice to assert
self.assertTrue(mock_log.warning.called)
thread.kill()
@mock.patch.object(server_module, 'LOG')
def test_logging_with_timeout(self, mock_log):
# Test that we log a message after log_after seconds if we've also
# specified an absolute timeout
log_event = threading.Event()
mock_log.warning.side_effect = lambda _, __: log_event.set()
# Call stop without calling start. We should log a wait after 1 second
thread = eventlet.spawn(self.server.stop, log_after=1, timeout=2)
log_event.wait()
# Redundant given that we already waited, but it's nice to assert
self.assertTrue(mock_log.warning.called)
thread.kill()
def test_timeout_wait(self):
# Test that we will eventually timeout when passing the timeout option
# if a preceding condition is not satisfied.
self.assertRaises(server_module.TaskTimeout,
self.server.stop, timeout=1)
def test_timeout_running(self):
# Test that we will eventually timeout if we're waiting for another
# thread to complete this task
# Start the server, which will also instantiate an executor
self.server.start()
self.server.stop()
shutdown_called = threading.Event()
# Patch the executor's stop method to be very slow
def slow_shutdown(wait):
shutdown_called.set()
eventlet.sleep(10)
self.executors[0].shutdown = slow_shutdown
# Call wait in a new thread
thread = eventlet.spawn(self.server.wait)
# Wait until the thread is in the slow stop method
shutdown_called.wait()
# Call wait again in the main thread with a timeout
self.assertRaises(server_module.TaskTimeout,
self.server.wait, timeout=1)
thread.kill()
@mock.patch.object(server_module, 'LOG')
def test_log_after_zero(self, mock_log):
# Test that we do not log a message after DEFAULT_LOG_AFTER if the
# caller gave log_after=1
# Call stop without calling start.
self.assertRaises(server_module.TaskTimeout,
self.server.stop, log_after=0, timeout=2)
# We timed out. Ensure we didn't log anything.
self.assertFalse(mock_log.warning.called)
class TestRPCExposeDecorator(test_utils.BaseTestCase):
def foo(self):
pass
@rpc.expose
def bar(self):
"""bar docstring"""
pass
def test_undecorated(self):
self.assertRaises(AttributeError, lambda: self.foo.exposed)
def test_decorated(self):
self.assertEqual(True, self.bar.exposed)
self.assertEqual("""bar docstring""", self.bar.__doc__)
self.assertEqual('bar', self.bar.__name__)
| [
"six.moves.mock.Mock",
"eventlet.getcurrent",
"oslo_config.cfg.ConfigOpts",
"eventlet.sleep",
"oslo_messaging.get_rpc_transport",
"eventlet.spawn",
"fixtures.MockPatchObject",
"threading.Lock",
"oslo_messaging.get_rpc_server",
"testscenarios.multiply_scenarios",
"threading.Event",
"six.moves.m... | [((3927, 3954), 'six.moves.mock.patch', 'mock.patch', (['"""warnings.warn"""'], {}), "('warnings.warn')\n", (3937, 3954), False, 'from six.moves import mock\n'), ((5379, 5406), 'six.moves.mock.patch', 'mock.patch', (['"""warnings.warn"""'], {}), "('warnings.warn')\n", (5389, 5406), False, 'from six.moves import mock\n'), ((29526, 29582), 'six.moves.mock.patch.object', 'mock.patch.object', (['server_module', '"""DEFAULT_LOG_AFTER"""', '(1)'], {}), "(server_module, 'DEFAULT_LOG_AFTER', 1)\n", (29543, 29582), False, 'from six.moves import mock\n'), ((29588, 29627), 'six.moves.mock.patch.object', 'mock.patch.object', (['server_module', '"""LOG"""'], {}), "(server_module, 'LOG')\n", (29605, 29627), False, 'from six.moves import mock\n'), ((30178, 30217), 'six.moves.mock.patch.object', 'mock.patch.object', (['server_module', '"""LOG"""'], {}), "(server_module, 'LOG')\n", (30195, 30217), False, 'from six.moves import mock\n'), ((30819, 30858), 'six.moves.mock.patch.object', 'mock.patch.object', (['server_module', '"""LOG"""'], {}), "(server_module, 'LOG')\n", (30836, 30858), False, 'from six.moves import mock\n'), ((32637, 32676), 'six.moves.mock.patch.object', 'mock.patch.object', (['server_module', '"""LOG"""'], {}), "(server_module, 'LOG')\n", (32654, 32676), False, 'from six.moves import mock\n'), ((4013, 4069), 'oslo_messaging.get_rpc_transport', 'oslo_messaging.get_rpc_transport', (['self.conf'], {'url': '"""fake:"""'}), "(self.conf, url='fake:')\n", (4045, 4069), False, 'import oslo_messaging\n'), ((4087, 4135), 'oslo_messaging.Target', 'oslo_messaging.Target', ([], {'topic': '"""foo"""', 'server': '"""bar"""'}), "(topic='foo', server='bar')\n", (4108, 4135), False, 'import oslo_messaging\n'), ((4264, 4310), 'warnings.simplefilter', 'warnings.simplefilter', (['"""always"""', 'FutureWarning'], {}), "('always', FutureWarning)\n", (4285, 4310), False, 'import warnings\n'), ((4328, 4444), 'oslo_messaging.get_rpc_server', 'oslo_messaging.get_rpc_server', (['transport', 'target', 'endpoints'], {'serializer': 'serializer', 'access_policy': 'access_policy'}), '(transport, target, endpoints, serializer=\n serializer, access_policy=access_policy)\n', (4357, 4444), False, 'import oslo_messaging\n'), ((5498, 5554), 'oslo_messaging.get_rpc_transport', 'oslo_messaging.get_rpc_transport', (['self.conf'], {'url': '"""fake:"""'}), "(self.conf, url='fake:')\n", (5530, 5554), False, 'import oslo_messaging\n'), ((5572, 5620), 'oslo_messaging.Target', 'oslo_messaging.Target', ([], {'topic': '"""foo"""', 'server': '"""bar"""'}), "(topic='foo', server='bar')\n", (5593, 5620), False, 'import oslo_messaging\n'), ((5691, 5737), 'warnings.simplefilter', 'warnings.simplefilter', (['"""always"""', 'FutureWarning'], {}), "('always', FutureWarning)\n", (5712, 5737), False, 'import warnings\n'), ((5746, 5833), 'oslo_messaging.get_rpc_server', 'oslo_messaging.get_rpc_server', (['transport', 'target', 'endpoints'], {'serializer': 'serializer'}), '(transport, target, endpoints, serializer=\n serializer)\n', (5775, 5833), False, 'import oslo_messaging\n'), ((6382, 6438), 'oslo_messaging.get_rpc_transport', 'oslo_messaging.get_rpc_transport', (['self.conf'], {'url': '"""fake:"""'}), "(self.conf, url='fake:')\n", (6414, 6438), False, 'import oslo_messaging\n'), ((6456, 6504), 'oslo_messaging.Target', 'oslo_messaging.Target', ([], {'topic': '"""foo"""', 'server': '"""bar"""'}), "(topic='foo', server='bar')\n", (6477, 6504), False, 'import oslo_messaging\n'), ((6920, 7007), 'oslo_messaging.get_rpc_server', 'oslo_messaging.get_rpc_server', (['transport', 'target', 'endpoints'], {'serializer': 'serializer'}), '(transport, target, endpoints, serializer=\n serializer)\n', (6949, 7007), False, 'import oslo_messaging\n'), ((7662, 7718), 'oslo_messaging.get_rpc_transport', 'oslo_messaging.get_rpc_transport', (['self.conf'], {'url': '"""fake:"""'}), "(self.conf, url='fake:')\n", (7694, 7718), False, 'import oslo_messaging\n'), ((8170, 8226), 'oslo_messaging.get_rpc_transport', 'oslo_messaging.get_rpc_transport', (['self.conf'], {'url': '"""fake:"""'}), "(self.conf, url='fake:')\n", (8202, 8226), False, 'import oslo_messaging\n'), ((8244, 8286), 'oslo_messaging.Target', 'oslo_messaging.Target', ([], {'server': '"""testserver"""'}), "(server='testserver')\n", (8265, 8286), False, 'import oslo_messaging\n'), ((8304, 8356), 'oslo_messaging.get_rpc_server', 'oslo_messaging.get_rpc_server', (['transport', 'target', '[]'], {}), '(transport, target, [])\n', (8333, 8356), False, 'import oslo_messaging\n'), ((8680, 8736), 'oslo_messaging.get_rpc_transport', 'oslo_messaging.get_rpc_transport', (['self.conf'], {'url': '"""fake:"""'}), "(self.conf, url='fake:')\n", (8712, 8736), False, 'import oslo_messaging\n'), ((9343, 9399), 'oslo_messaging.get_rpc_transport', 'oslo_messaging.get_rpc_transport', (['self.conf'], {'url': '"""fake:"""'}), "(self.conf, url='fake:')\n", (9375, 9399), False, 'import oslo_messaging\n'), ((9441, 9462), 'threading.Condition', 'threading.Condition', ([], {}), '()\n', (9460, 9462), False, 'import threading\n'), ((10175, 10231), 'oslo_messaging.get_rpc_transport', 'oslo_messaging.get_rpc_transport', (['self.conf'], {'url': '"""fake:"""'}), "(self.conf, url='fake:')\n", (10207, 10231), False, 'import oslo_messaging\n'), ((10575, 10631), 'oslo_messaging.get_rpc_transport', 'oslo_messaging.get_rpc_transport', (['self.conf'], {'url': '"""fake:"""'}), "(self.conf, url='fake:')\n", (10607, 10631), False, 'import oslo_messaging\n'), ((11203, 11259), 'oslo_messaging.get_rpc_transport', 'oslo_messaging.get_rpc_transport', (['self.conf'], {'url': '"""fake:"""'}), "(self.conf, url='fake:')\n", (11235, 11259), False, 'import oslo_messaging\n'), ((11963, 12019), 'oslo_messaging.get_rpc_transport', 'oslo_messaging.get_rpc_transport', (['self.conf'], {'url': '"""fake:"""'}), "(self.conf, url='fake:')\n", (11995, 12019), False, 'import oslo_messaging\n'), ((12772, 12828), 'oslo_messaging.get_rpc_transport', 'oslo_messaging.get_rpc_transport', (['self.conf'], {'url': '"""fake:"""'}), "(self.conf, url='fake:')\n", (12804, 12828), False, 'import oslo_messaging\n'), ((13345, 13401), 'oslo_messaging.get_rpc_transport', 'oslo_messaging.get_rpc_transport', (['self.conf'], {'url': '"""fake:"""'}), "(self.conf, url='fake:')\n", (13377, 13401), False, 'import oslo_messaging\n'), ((14684, 14740), 'oslo_messaging.get_rpc_transport', 'oslo_messaging.get_rpc_transport', (['self.conf'], {'url': '"""fake:"""'}), "(self.conf, url='fake:')\n", (14716, 14740), False, 'import oslo_messaging\n'), ((17150, 17270), 'testscenarios.multiply_scenarios', 'testscenarios.multiply_scenarios', (['cls._exchanges', 'cls._topics', 'cls._server', 'cls._fanout', 'cls._method', 'cls._endpoints'], {}), '(cls._exchanges, cls._topics, cls._server,\n cls._fanout, cls._method, cls._endpoints)\n', (17182, 17270), False, 'import testscenarios\n'), ((20205, 20258), 'oslo_messaging.get_rpc_transport', 'oslo_messaging.get_rpc_transport', (['self.conf'], {'url': 'url1'}), '(self.conf, url=url1)\n', (20237, 20258), False, 'import oslo_messaging\n'), ((23507, 23540), 'eventlet.spawn', 'eventlet.spawn', (['self.server.start'], {}), '(self.server.start)\n', (23521, 23540), False, 'import eventlet\n'), ((23910, 23942), 'eventlet.spawn', 'eventlet.spawn', (['self.server.wait'], {}), '(self.server.wait)\n', (23924, 23942), False, 'import eventlet\n'), ((24048, 24065), 'eventlet.sleep', 'eventlet.sleep', (['(0)'], {}), '(0)\n', (24062, 24065), False, 'import eventlet\n'), ((24075, 24107), 'eventlet.spawn', 'eventlet.spawn', (['self.server.stop'], {}), '(self.server.stop)\n', (24089, 24107), False, 'import eventlet\n'), ((24116, 24133), 'eventlet.sleep', 'eventlet.sleep', (['(0)'], {}), '(0)\n', (24130, 24133), False, 'import eventlet\n'), ((24143, 24176), 'eventlet.spawn', 'eventlet.spawn', (['self.server.start'], {}), '(self.server.start)\n', (24157, 24176), False, 'import eventlet\n'), ((24528, 24545), 'threading.Event', 'threading.Event', ([], {}), '()\n', (24543, 24545), False, 'import threading\n'), ((24569, 24586), 'threading.Event', 'threading.Event', ([], {}), '()\n', (24584, 24586), False, 'import threading\n'), ((24612, 24629), 'threading.Event', 'threading.Event', ([], {}), '()\n', (24627, 24629), False, 'import threading\n'), ((24651, 24668), 'threading.Event', 'threading.Event', ([], {}), '()\n', (24666, 24668), False, 'import threading\n'), ((25211, 25244), 'eventlet.spawn', 'eventlet.spawn', (['self.server.start'], {}), '(self.server.start)\n', (25225, 25244), False, 'import eventlet\n'), ((25262, 25295), 'eventlet.spawn', 'eventlet.spawn', (['self.server.start'], {}), '(self.server.start)\n', (25276, 25295), False, 'import eventlet\n'), ((25492, 25509), 'threading.Event', 'threading.Event', ([], {}), '()\n', (25507, 25509), False, 'import threading\n'), ((27341, 27358), 'threading.Event', 'threading.Event', ([], {}), '()\n', (27356, 27358), False, 'import threading\n'), ((27395, 27412), 'threading.Event', 'threading.Event', ([], {}), '()\n', (27410, 27412), False, 'import threading\n'), ((27968, 28000), 'eventlet.spawn', 'eventlet.spawn', (['self.server.stop'], {}), '(self.server.stop)\n', (27982, 28000), False, 'import eventlet\n'), ((28028, 28045), 'threading.Event', 'threading.Event', ([], {}), '()\n', (28043, 28045), False, 'import threading\n'), ((29784, 29801), 'threading.Event', 'threading.Event', ([], {}), '()\n', (29799, 29801), False, 'import threading\n'), ((29968, 30000), 'eventlet.spawn', 'eventlet.spawn', (['self.server.stop'], {}), '(self.server.stop)\n', (29982, 30000), False, 'import eventlet\n'), ((30412, 30429), 'threading.Event', 'threading.Event', ([], {}), '()\n', (30427, 30429), False, 'import threading\n'), ((30596, 30641), 'eventlet.spawn', 'eventlet.spawn', (['self.server.stop'], {'log_after': '(1)'}), '(self.server.stop, log_after=1)\n', (30610, 30641), False, 'import eventlet\n'), ((31046, 31063), 'threading.Event', 'threading.Event', ([], {}), '()\n', (31061, 31063), False, 'import threading\n'), ((31230, 31286), 'eventlet.spawn', 'eventlet.spawn', (['self.server.stop'], {'log_after': '(1)', 'timeout': '(2)'}), '(self.server.stop, log_after=1, timeout=2)\n', (31244, 31286), False, 'import eventlet\n'), ((32035, 32052), 'threading.Event', 'threading.Event', ([], {}), '()\n', (32050, 32052), False, 'import threading\n'), ((32316, 32348), 'eventlet.spawn', 'eventlet.spawn', (['self.server.wait'], {}), '(self.server.wait)\n', (32330, 32348), False, 'import eventlet\n'), ((1287, 1336), 'oslo_messaging.Target', 'oslo_messaging.Target', ([], {'topic': 'topic', 'server': 'server'}), '(topic=topic, server=server)\n', (1308, 1336), False, 'import oslo_messaging\n'), ((1363, 1468), 'oslo_messaging.get_rpc_server', 'oslo_messaging.get_rpc_server', (['transport', 'target', '[endpoint, self.controller]'], {'serializer': 'serializer'}), '(transport, target, [endpoint, self.controller\n ], serializer=serializer)\n', (1392, 1468), False, 'import oslo_messaging\n'), ((2193, 2210), 'threading.Event', 'threading.Event', ([], {}), '()\n', (2208, 2210), False, 'import threading\n'), ((3542, 3576), 'oslo_messaging.Target', 'oslo_messaging.Target', ([], {'topic': 'topic'}), '(topic=topic)\n', (3563, 3576), False, 'import oslo_messaging\n'), ((7803, 7843), 'oslo_messaging.Target', 'oslo_messaging.Target', ([], {'topic': '"""testtopic"""'}), "(topic='testtopic')\n", (7824, 7843), False, 'import oslo_messaging\n'), ((10258, 10324), 'oslo_messaging.get_rpc_server', 'oslo_messaging.get_rpc_server', (['transport', 'None', '[]'], {'executor': '"""foo"""'}), "(transport, None, [], executor='foo')\n", (10287, 10324), False, 'import oslo_messaging\n'), ((13936, 14004), 'fixtures.MockPatchObject', 'fixtures.MockPatchObject', (['rpc_server_module.LOG', '"""debug"""', 'stub_debug'], {}), "(rpc_server_module.LOG, 'debug', stub_debug)\n", (13960, 14004), False, 'import fixtures\n'), ((14043, 14111), 'fixtures.MockPatchObject', 'fixtures.MockPatchObject', (['rpc_server_module.LOG', '"""error"""', 'stub_error'], {}), "(rpc_server_module.LOG, 'error', stub_error)\n", (14067, 14111), False, 'import fixtures\n'), ((15161, 15229), 'fixtures.MockPatchObject', 'fixtures.MockPatchObject', (['rpc_server_module.LOG', '"""debug"""', 'stub_debug'], {}), "(rpc_server_module.LOG, 'debug', stub_debug)\n", (15185, 15229), False, 'import fixtures\n'), ((15268, 15336), 'fixtures.MockPatchObject', 'fixtures.MockPatchObject', (['rpc_server_module.LOG', '"""error"""', 'stub_error'], {}), "(rpc_server_module.LOG, 'error', stub_error)\n", (15292, 15336), False, 'import fixtures\n'), ((15401, 15447), 'oslo_messaging.expected_exceptions', 'oslo_messaging.expected_exceptions', (['ValueError'], {}), '(ValueError)\n', (15435, 15447), False, 'import oslo_messaging\n'), ((20309, 20362), 'oslo_messaging.get_rpc_transport', 'oslo_messaging.get_rpc_transport', (['self.conf'], {'url': 'url1'}), '(self.conf, url=url1)\n', (20341, 20362), False, 'import oslo_messaging\n'), ((23322, 23333), 'six.moves.mock.Mock', 'mock.Mock', ([], {}), '()\n', (23331, 23333), False, 'from six.moves import mock\n'), ((23335, 23346), 'six.moves.mock.Mock', 'mock.Mock', ([], {}), '()\n', (23344, 23346), False, 'from six.moves import mock\n'), ((32192, 32210), 'eventlet.sleep', 'eventlet.sleep', (['(10)'], {}), '(10)\n', (32206, 32210), False, 'import eventlet\n'), ((3903, 3919), 'oslo_config.cfg.ConfigOpts', 'cfg.ConfigOpts', ([], {}), '()\n', (3917, 3919), False, 'from oslo_config import cfg\n'), ((5028, 5257), 'six.moves.mock.call', 'mock.call', (['"""blocking executor is deprecated. Executor default will be removed. Use explicitly threading or eventlet instead in version \'pike\' and will be removed in version \'rocky\'"""'], {'category': 'FutureWarning', 'stacklevel': '(3)'}), '(\n "blocking executor is deprecated. Executor default will be removed. Use explicitly threading or eventlet instead in version \'pike\' and will be removed in version \'rocky\'"\n , category=FutureWarning, stacklevel=3)\n', (5037, 5257), False, 'from six.moves import mock\n'), ((5906, 5963), 'six.moves.mock.call', 'mock.call', (['mock.ANY'], {'category': 'FutureWarning', 'stacklevel': '(3)'}), '(mock.ANY, category=FutureWarning, stacklevel=3)\n', (5915, 5963), False, 'from six.moves import mock\n'), ((5977, 6206), 'six.moves.mock.call', 'mock.call', (['"""blocking executor is deprecated. Executor default will be removed. Use explicitly threading or eventlet instead in version \'pike\' and will be removed in version \'rocky\'"""'], {'category': 'FutureWarning', 'stacklevel': '(3)'}), '(\n "blocking executor is deprecated. Executor default will be removed. Use explicitly threading or eventlet instead in version \'pike\' and will be removed in version \'rocky\'"\n , category=FutureWarning, stacklevel=3)\n', (5986, 6206), False, 'from six.moves import mock\n'), ((20025, 20041), 'oslo_config.cfg.ConfigOpts', 'cfg.ConfigOpts', ([], {}), '()\n', (20039, 20041), False, 'from oslo_config import cfg\n'), ((22492, 22508), 'oslo_config.cfg.ConfigOpts', 'cfg.ConfigOpts', ([], {}), '()\n', (22506, 22508), False, 'from oslo_config import cfg\n'), ((22831, 22847), 'threading.Lock', 'threading.Lock', ([], {}), '()\n', (22845, 22847), False, 'import threading\n'), ((23188, 23199), 'six.moves.mock.Mock', 'mock.Mock', ([], {}), '()\n', (23197, 23199), False, 'from six.moves import mock\n'), ((24895, 24916), 'eventlet.getcurrent', 'eventlet.getcurrent', ([], {}), '()\n', (24914, 24916), False, 'import eventlet\n')] |
from itertools import chain
from components.config import getConfig
from components.convert import fetchUser, pretRes
import discord
from discord import channel
from discord.ext import commands
class Feelings(commands.Cog):
def __init__(self, client) -> None:
self.client = client
self.config = getConfig()
@commands.command()
async def love(self, msg: commands.Context, user: discord.User = None):
user = await fetchUser(self.client, user)
await msg.channel.send(f"> {self.client.user.mention} hat {user.mention} ganz dolle lieb ❤️");
@commands.command()
async def arsch(self, msg: commands.Context, user: discord.User = None):
user = await fetchUser(self.client, user)
await msg.channel.send(f"> {user.display_name} ist ein Arsch! <:nani:663857832256471084>");
@commands.command()
async def unimpressed(self, msg: commands.Context, user: discord.User = None):
user = await fetchUser(self.client, user)
userPropertie = ""
if(user != None):
userPropertie = f"von {user.mention} "
await msg.channel.send(f"> {self.client.user.mention} ist {userPropertie}nicht beeindruckt... ");
def setup(client: commands.Bot) -> None:
client.add_cog(Feelings(client))
| [
"components.config.getConfig",
"discord.ext.commands.command",
"components.convert.fetchUser"
] | [((336, 354), 'discord.ext.commands.command', 'commands.command', ([], {}), '()\n', (352, 354), False, 'from discord.ext import commands\n'), ((591, 609), 'discord.ext.commands.command', 'commands.command', ([], {}), '()\n', (607, 609), False, 'from discord.ext import commands\n'), ((844, 862), 'discord.ext.commands.command', 'commands.command', ([], {}), '()\n', (860, 862), False, 'from discord.ext import commands\n'), ((317, 328), 'components.config.getConfig', 'getConfig', ([], {}), '()\n', (326, 328), False, 'from components.config import getConfig\n'), ((452, 480), 'components.convert.fetchUser', 'fetchUser', (['self.client', 'user'], {}), '(self.client, user)\n', (461, 480), False, 'from components.convert import fetchUser, pretRes\n'), ((708, 736), 'components.convert.fetchUser', 'fetchUser', (['self.client', 'user'], {}), '(self.client, user)\n', (717, 736), False, 'from components.convert import fetchUser, pretRes\n'), ((967, 995), 'components.convert.fetchUser', 'fetchUser', (['self.client', 'user'], {}), '(self.client, user)\n', (976, 995), False, 'from components.convert import fetchUser, pretRes\n')] |
"""
The tool to check the availability or syntax of domain, IP or URL.
::
██████╗ ██╗ ██╗███████╗██╗ ██╗███╗ ██╗ ██████╗███████╗██████╗ ██╗ ███████╗
██╔══██╗╚██╗ ██╔╝██╔════╝██║ ██║████╗ ██║██╔════╝██╔════╝██╔══██╗██║ ██╔════╝
██████╔╝ ╚████╔╝ █████╗ ██║ ██║██╔██╗ ██║██║ █████╗ ██████╔╝██║ █████╗
██╔═══╝ ╚██╔╝ ██╔══╝ ██║ ██║██║╚██╗██║██║ ██╔══╝ ██╔══██╗██║ ██╔══╝
██║ ██║ ██║ ╚██████╔╝██║ ╚████║╚██████╗███████╗██████╔╝███████╗███████╗
╚═╝ ╚═╝ ╚═╝ ╚═════╝ ╚═╝ ╚═══╝ ╚═════╝╚══════╝╚═════╝ ╚══════╝╚══════╝
Tests of the file helper.
Author:
<NAME>, @funilrys, contactTATAfunilrysTODTODcom
Special thanks:
https://pyfunceble.github.io/special-thanks.html
Contributors:
https://pyfunceble.github.io/contributors.html
Project link:
https://github.com/funilrys/PyFunceble
Project documentation:
https://pyfunceble.readthedocs.io/en/dev/
Project homepage:
https://pyfunceble.github.io/
License:
::
Copyright 2017, 2018, 2019, 2020, 2021, 2021 <NAME>
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
"""
import os
import secrets
import tempfile
import unittest
from PyFunceble.helpers.file import FileHelper
from PyFunceble.utils.platform import PlatformUtility
class TestFileHelper(unittest.TestCase):
"""
Tests of the file helpers.
"""
def test_set_path_return(self) -> None:
"""
Tests the response from the method which let us set the path to work with.
"""
given = tempfile.NamedTemporaryFile()
file_helper = FileHelper()
actual = file_helper.set_path(given.name)
self.assertIsInstance(actual, FileHelper)
def test_set_path(self) -> None:
"""
Tests the method which let us set the path to work with.
"""
given = tempfile.NamedTemporaryFile()
expected = given.name
file_helper = FileHelper()
file_helper.set_path(given.name)
actual = file_helper.path
self.assertEqual(expected, actual)
file_helper = FileHelper(given.name)
actual = file_helper.path
self.assertEqual(expected, actual)
def test_set_path_not_str(self) -> None:
"""
Tests the method which let us set the path to work with for the case
that it's not a string.
"""
given = ["Hello", "World"]
file_helper = FileHelper()
self.assertRaises(TypeError, lambda: file_helper.set_path(given))
def test_join_path(self) -> None:
"""
Tests the method which let us join paths.
"""
given = "/hello/world"
if PlatformUtility.is_windows():
expected = "/hello/world\\hello\\world"
else:
expected = "/hello/world/hello/world"
actual = FileHelper(given).join_path("hello", "world")
self.assertEqual(expected, actual)
def test_exists(self) -> None:
"""
Tests the method which let us check if the given file exists.
"""
file_helper = FileHelper(tempfile.gettempdir())
file_helper.set_path(file_helper.join_path(secrets.token_hex(8)))
expected = False
actual = file_helper.exists()
self.assertEqual(expected, actual)
with open(file_helper.path, "w") as file_stream:
file_stream.write("Hello, World!")
expected = True
actual = file_helper.exists()
self.assertEqual(expected, actual)
os.remove(file_helper.path)
expected = False
actual = file_helper.exists()
self.assertEqual(expected, actual)
def test_get_size(self) -> None:
"""
Tests the method which let us get the size of a file.
"""
file_helper = FileHelper(tempfile.gettempdir())
file_helper.set_path(file_helper.join_path(secrets.token_hex(8)))
expected = False
actual = file_helper.exists()
self.assertEqual(expected, actual)
with open(file_helper.path, "w") as file_stream:
file_stream.write("Hello, World!")
expected = True
actual = file_helper.exists()
self.assertEqual(expected, actual)
expected = 13
actual = file_helper.get_size()
self.assertEqual(expected, actual)
os.remove(file_helper.path)
def test_is_empty(self) -> None:
"""
Tests the method which let us check if a file is empty.
"""
file_helper = FileHelper(tempfile.gettempdir())
file_helper.set_path(file_helper.join_path(secrets.token_hex(8)))
expected = False
actual = file_helper.exists()
self.assertEqual(expected, actual)
with open(file_helper.path, "w") as file_stream:
file_stream.write("")
expected = True
actual = file_helper.is_empty()
self.assertEqual(expected, actual)
with open(file_helper.path, "w") as file_stream:
file_stream.write("Hello, World!")
expected = False
actual = file_helper.is_empty()
self.assertEqual(expected, actual)
os.remove(file_helper.path)
def test_delete(self) -> None:
"""
Tests the method which let us delete a file.
"""
file_helper = FileHelper(tempfile.gettempdir())
file_helper.set_path(file_helper.join_path(secrets.token_hex(8)))
expected = False
actual = file_helper.exists()
self.assertEqual(expected, actual)
with open(file_helper.path, "w") as file_stream:
file_stream.write("")
expected = True
actual = file_helper.exists()
self.assertEqual(expected, actual)
file_helper.delete()
expected = False
actual = file_helper.exists()
self.assertEqual(expected, actual)
def test_write(self) -> None:
"""
Tests the method which let us write a file.
"""
given = tempfile.NamedTemporaryFile(delete=False)
file_helper = FileHelper(given.name)
file_helper.write("Hello, World!")
given.seek(0)
expected = b"Hello, World!"
actual = given.read()
self.assertEqual(expected, actual)
file_helper.write("Hello, this is Funilrys!")
given.seek(0)
expected = b"Hello, World!Hello, this is Funilrys!"
actual = given.read()
self.assertEqual(expected, actual)
file_helper.write("Hello, World!", overwrite=True)
given.seek(0)
expected = b"Hello, World!"
actual = given.read()
self.assertEqual(expected, actual)
def test_read(self) -> None:
"""
Tests the method which let us read a file.
"""
given = tempfile.NamedTemporaryFile(delete=False)
file_helper = FileHelper(given.name)
file_helper.write("Hello, World!")
given.seek(0)
expected = "Hello, World!"
actual = file_helper.read()
self.assertEqual(expected, actual)
def test_read_file_does_not_exists(self) -> None:
"""
Tests the method which let us read a file for the case that the given
file does not exists.
"""
file_helper = FileHelper(tempfile.gettempdir())
file_helper.set_path(file_helper.join_path(secrets.token_hex(8)))
expected = False
actual = file_helper.exists()
self.assertEqual(expected, actual)
expected = None
actual = file_helper.read()
self.assertEqual(expected, actual)
def test_read_bytes(self) -> None:
"""
Tests the method which let us read (bytes) a file.
"""
given = tempfile.NamedTemporaryFile(delete=False)
file_helper = FileHelper(given.name)
file_helper.write("Hello, World!")
given.seek(0)
expected = b"Hello, World!"
actual = file_helper.read_bytes()
self.assertEqual(expected, actual)
def test_read_bytes_file_does_not_exists(self) -> None:
"""
Tests the method which let us read (bytes) a file for the case that
the given file does not exists.
"""
file_helper = FileHelper(tempfile.gettempdir())
file_helper.set_path(file_helper.join_path(secrets.token_hex(8)))
expected = False
actual = file_helper.exists()
self.assertEqual(expected, actual)
expected = None
actual = file_helper.read_bytes()
self.assertEqual(expected, actual)
def test_open(self) -> None:
"""
Tests the method which let us open the given file as we want.
"""
file_helper = FileHelper(tempfile.gettempdir())
file_helper.set_path(file_helper.join_path(secrets.token_hex(8)))
expected = False
actual = file_helper.exists()
self.assertEqual(expected, actual)
with file_helper.open("w") as file_stream:
file_stream.write("Hello, World!")
expected = True
actual = file_helper.exists()
self.assertEqual(expected, actual)
expected = "Hello, World!"
actual = file_helper.read()
self.assertEqual(expected, actual)
def test_copy(self) -> None:
"""
Tests the method which let us copy a file to another place.
"""
file_helper = FileHelper(tempfile.gettempdir())
file_helper.set_path(file_helper.join_path(secrets.token_hex(8)))
copy_file_helper = FileHelper(tempfile.gettempdir())
copy_file_helper.set_path(copy_file_helper.join_path(secrets.token_hex(8)))
expected = False
actual = file_helper.exists()
actual_copy = copy_file_helper.exists()
self.assertEqual(expected, actual)
self.assertEqual(expected, actual_copy)
file_helper.write("Hello, World!")
expected = True
actual = file_helper.exists()
self.assertEqual(expected, actual)
expected = False
actual_copy = copy_file_helper.exists()
self.assertEqual(expected, actual_copy)
file_helper.copy(copy_file_helper.path)
expected = True
actual_copy = copy_file_helper.exists()
self.assertEqual(expected, actual_copy)
expected = "Hello, World!"
actual = copy_file_helper.read()
self.assertEqual(expected, actual)
expected = True
actual = file_helper.exists()
actual_copy = copy_file_helper.exists()
self.assertEqual(expected, actual)
self.assertEqual(expected, actual_copy)
def test_move(self) -> None:
"""
Tests of the method which let us move a file to another location.
"""
file_helper = FileHelper(tempfile.gettempdir())
file_helper.set_path(file_helper.join_path(secrets.token_hex(8)))
destination_file_helper = FileHelper(tempfile.gettempdir())
destination_file_helper.set_path(
destination_file_helper.join_path(secrets.token_hex(8))
)
expected = False
actual = file_helper.exists()
actual_destination = destination_file_helper.exists()
self.assertEqual(expected, actual)
self.assertEqual(expected, actual_destination)
file_helper.write("Hello, World!")
expected = True
actual = file_helper.exists()
self.assertEqual(expected, actual)
expected = False
actual_destination = destination_file_helper.exists()
self.assertEqual(expected, actual_destination)
file_helper.move(destination_file_helper.path)
expected = True
actual_destination = destination_file_helper.exists()
self.assertEqual(expected, actual_destination)
expected = "Hello, World!"
actual = destination_file_helper.read()
self.assertEqual(expected, actual)
expected = False
actual = file_helper.exists()
self.assertEqual(expected, actual)
expected = True
actual_destination = destination_file_helper.exists()
self.assertEqual(expected, actual_destination)
if __name__ == "__main__":
unittest.main()
| [
"secrets.token_hex",
"PyFunceble.utils.platform.PlatformUtility.is_windows",
"PyFunceble.helpers.file.FileHelper",
"tempfile.gettempdir",
"tempfile.NamedTemporaryFile",
"unittest.main",
"os.remove"
] | [((12749, 12764), 'unittest.main', 'unittest.main', ([], {}), '()\n', (12762, 12764), False, 'import unittest\n'), ((2067, 2096), 'tempfile.NamedTemporaryFile', 'tempfile.NamedTemporaryFile', ([], {}), '()\n', (2094, 2096), False, 'import tempfile\n'), ((2120, 2132), 'PyFunceble.helpers.file.FileHelper', 'FileHelper', ([], {}), '()\n', (2130, 2132), False, 'from PyFunceble.helpers.file import FileHelper\n'), ((2378, 2407), 'tempfile.NamedTemporaryFile', 'tempfile.NamedTemporaryFile', ([], {}), '()\n', (2405, 2407), False, 'import tempfile\n'), ((2461, 2473), 'PyFunceble.helpers.file.FileHelper', 'FileHelper', ([], {}), '()\n', (2471, 2473), False, 'from PyFunceble.helpers.file import FileHelper\n'), ((2617, 2639), 'PyFunceble.helpers.file.FileHelper', 'FileHelper', (['given.name'], {}), '(given.name)\n', (2627, 2639), False, 'from PyFunceble.helpers.file import FileHelper\n'), ((2957, 2969), 'PyFunceble.helpers.file.FileHelper', 'FileHelper', ([], {}), '()\n', (2967, 2969), False, 'from PyFunceble.helpers.file import FileHelper\n'), ((3202, 3230), 'PyFunceble.utils.platform.PlatformUtility.is_windows', 'PlatformUtility.is_windows', ([], {}), '()\n', (3228, 3230), False, 'from PyFunceble.utils.platform import PlatformUtility\n'), ((4046, 4073), 'os.remove', 'os.remove', (['file_helper.path'], {}), '(file_helper.path)\n', (4055, 4073), False, 'import os\n'), ((4873, 4900), 'os.remove', 'os.remove', (['file_helper.path'], {}), '(file_helper.path)\n', (4882, 4900), False, 'import os\n'), ((5691, 5718), 'os.remove', 'os.remove', (['file_helper.path'], {}), '(file_helper.path)\n', (5700, 5718), False, 'import os\n'), ((6536, 6577), 'tempfile.NamedTemporaryFile', 'tempfile.NamedTemporaryFile', ([], {'delete': '(False)'}), '(delete=False)\n', (6563, 6577), False, 'import tempfile\n'), ((6601, 6623), 'PyFunceble.helpers.file.FileHelper', 'FileHelper', (['given.name'], {}), '(given.name)\n', (6611, 6623), False, 'from PyFunceble.helpers.file import FileHelper\n'), ((7332, 7373), 'tempfile.NamedTemporaryFile', 'tempfile.NamedTemporaryFile', ([], {'delete': '(False)'}), '(delete=False)\n', (7359, 7373), False, 'import tempfile\n'), ((7397, 7419), 'PyFunceble.helpers.file.FileHelper', 'FileHelper', (['given.name'], {}), '(given.name)\n', (7407, 7419), False, 'from PyFunceble.helpers.file import FileHelper\n'), ((8273, 8314), 'tempfile.NamedTemporaryFile', 'tempfile.NamedTemporaryFile', ([], {'delete': '(False)'}), '(delete=False)\n', (8300, 8314), False, 'import tempfile\n'), ((8338, 8360), 'PyFunceble.helpers.file.FileHelper', 'FileHelper', (['given.name'], {}), '(given.name)\n', (8348, 8360), False, 'from PyFunceble.helpers.file import FileHelper\n'), ((3620, 3641), 'tempfile.gettempdir', 'tempfile.gettempdir', ([], {}), '()\n', (3639, 3641), False, 'import tempfile\n'), ((4340, 4361), 'tempfile.gettempdir', 'tempfile.gettempdir', ([], {}), '()\n', (4359, 4361), False, 'import tempfile\n'), ((5061, 5082), 'tempfile.gettempdir', 'tempfile.gettempdir', ([], {}), '()\n', (5080, 5082), False, 'import tempfile\n'), ((5866, 5887), 'tempfile.gettempdir', 'tempfile.gettempdir', ([], {}), '()\n', (5885, 5887), False, 'import tempfile\n'), ((7823, 7844), 'tempfile.gettempdir', 'tempfile.gettempdir', ([], {}), '()\n', (7842, 7844), False, 'import tempfile\n'), ((8785, 8806), 'tempfile.gettempdir', 'tempfile.gettempdir', ([], {}), '()\n', (8804, 8806), False, 'import tempfile\n'), ((9263, 9284), 'tempfile.gettempdir', 'tempfile.gettempdir', ([], {}), '()\n', (9282, 9284), False, 'import tempfile\n'), ((9950, 9971), 'tempfile.gettempdir', 'tempfile.gettempdir', ([], {}), '()\n', (9969, 9971), False, 'import tempfile\n'), ((10086, 10107), 'tempfile.gettempdir', 'tempfile.gettempdir', ([], {}), '()\n', (10105, 10107), False, 'import tempfile\n'), ((11332, 11353), 'tempfile.gettempdir', 'tempfile.gettempdir', ([], {}), '()\n', (11351, 11353), False, 'import tempfile\n'), ((11475, 11496), 'tempfile.gettempdir', 'tempfile.gettempdir', ([], {}), '()\n', (11494, 11496), False, 'import tempfile\n'), ((3366, 3383), 'PyFunceble.helpers.file.FileHelper', 'FileHelper', (['given'], {}), '(given)\n', (3376, 3383), False, 'from PyFunceble.helpers.file import FileHelper\n'), ((3694, 3714), 'secrets.token_hex', 'secrets.token_hex', (['(8)'], {}), '(8)\n', (3711, 3714), False, 'import secrets\n'), ((4414, 4434), 'secrets.token_hex', 'secrets.token_hex', (['(8)'], {}), '(8)\n', (4431, 4434), False, 'import secrets\n'), ((5135, 5155), 'secrets.token_hex', 'secrets.token_hex', (['(8)'], {}), '(8)\n', (5152, 5155), False, 'import secrets\n'), ((5940, 5960), 'secrets.token_hex', 'secrets.token_hex', (['(8)'], {}), '(8)\n', (5957, 5960), False, 'import secrets\n'), ((7897, 7917), 'secrets.token_hex', 'secrets.token_hex', (['(8)'], {}), '(8)\n', (7914, 7917), False, 'import secrets\n'), ((8859, 8879), 'secrets.token_hex', 'secrets.token_hex', (['(8)'], {}), '(8)\n', (8876, 8879), False, 'import secrets\n'), ((9337, 9357), 'secrets.token_hex', 'secrets.token_hex', (['(8)'], {}), '(8)\n', (9354, 9357), False, 'import secrets\n'), ((10024, 10044), 'secrets.token_hex', 'secrets.token_hex', (['(8)'], {}), '(8)\n', (10041, 10044), False, 'import secrets\n'), ((10170, 10190), 'secrets.token_hex', 'secrets.token_hex', (['(8)'], {}), '(8)\n', (10187, 10190), False, 'import secrets\n'), ((11406, 11426), 'secrets.token_hex', 'secrets.token_hex', (['(8)'], {}), '(8)\n', (11423, 11426), False, 'import secrets\n'), ((11586, 11606), 'secrets.token_hex', 'secrets.token_hex', (['(8)'], {}), '(8)\n', (11603, 11606), False, 'import secrets\n')] |
from malaya_speech.utils import (
check_file,
load_graph,
generate_session,
nodes_session,
)
from malaya_speech.model.tf import UNET, UNETSTFT, UNET1D
def load(model, module, quantized=False, **kwargs):
path = check_file(
file=model,
module=module,
keys={'model': 'model.pb'},
quantized=quantized,
**kwargs,
)
g = load_graph(path['model'], **kwargs)
inputs = ['Placeholder']
outputs = ['logits']
input_nodes, output_nodes = nodes_session(g, inputs, outputs)
return UNET(
input_nodes=input_nodes,
output_nodes=output_nodes,
sess=generate_session(graph=g, **kwargs),
model=model,
name=module,
)
def load_stft(model, module, instruments, quantized=False, **kwargs):
path = check_file(
file=model,
module=module,
keys={'model': 'model.pb'},
quantized=quantized,
**kwargs,
)
g = load_graph(path['model'], **kwargs)
inputs = ['Placeholder']
outputs = [f'logits_{i}' for i in range(len(instruments))]
input_nodes, output_nodes = nodes_session(g, inputs, outputs)
return UNETSTFT(
input_nodes=input_nodes,
output_nodes=output_nodes,
instruments=instruments,
sess=generate_session(graph=g, **kwargs),
model=model,
name=module,
)
def load_1d(model, module, quantized=False, **kwargs):
path = check_file(
file=model,
module=module,
keys={'model': 'model.pb'},
quantized=quantized,
**kwargs,
)
g = load_graph(path['model'], **kwargs)
inputs = ['Placeholder']
outputs = ['logits']
input_nodes, output_nodes = nodes_session(g, inputs, outputs)
return UNET1D(
input_nodes=input_nodes,
output_nodes=output_nodes,
sess=generate_session(graph=g, **kwargs),
model=model,
name=module,
)
| [
"malaya_speech.utils.nodes_session",
"malaya_speech.utils.check_file",
"malaya_speech.utils.load_graph",
"malaya_speech.utils.generate_session"
] | [((233, 334), 'malaya_speech.utils.check_file', 'check_file', ([], {'file': 'model', 'module': 'module', 'keys': "{'model': 'model.pb'}", 'quantized': 'quantized'}), "(file=model, module=module, keys={'model': 'model.pb'}, quantized\n =quantized, **kwargs)\n", (243, 334), False, 'from malaya_speech.utils import check_file, load_graph, generate_session, nodes_session\n'), ((385, 420), 'malaya_speech.utils.load_graph', 'load_graph', (["path['model']"], {}), "(path['model'], **kwargs)\n", (395, 420), False, 'from malaya_speech.utils import check_file, load_graph, generate_session, nodes_session\n'), ((508, 541), 'malaya_speech.utils.nodes_session', 'nodes_session', (['g', 'inputs', 'outputs'], {}), '(g, inputs, outputs)\n', (521, 541), False, 'from malaya_speech.utils import check_file, load_graph, generate_session, nodes_session\n'), ((809, 910), 'malaya_speech.utils.check_file', 'check_file', ([], {'file': 'model', 'module': 'module', 'keys': "{'model': 'model.pb'}", 'quantized': 'quantized'}), "(file=model, module=module, keys={'model': 'model.pb'}, quantized\n =quantized, **kwargs)\n", (819, 910), False, 'from malaya_speech.utils import check_file, load_graph, generate_session, nodes_session\n'), ((961, 996), 'malaya_speech.utils.load_graph', 'load_graph', (["path['model']"], {}), "(path['model'], **kwargs)\n", (971, 996), False, 'from malaya_speech.utils import check_file, load_graph, generate_session, nodes_session\n'), ((1121, 1154), 'malaya_speech.utils.nodes_session', 'nodes_session', (['g', 'inputs', 'outputs'], {}), '(g, inputs, outputs)\n', (1134, 1154), False, 'from malaya_speech.utils import check_file, load_graph, generate_session, nodes_session\n'), ((1444, 1545), 'malaya_speech.utils.check_file', 'check_file', ([], {'file': 'model', 'module': 'module', 'keys': "{'model': 'model.pb'}", 'quantized': 'quantized'}), "(file=model, module=module, keys={'model': 'model.pb'}, quantized\n =quantized, **kwargs)\n", (1454, 1545), False, 'from malaya_speech.utils import check_file, load_graph, generate_session, nodes_session\n'), ((1596, 1631), 'malaya_speech.utils.load_graph', 'load_graph', (["path['model']"], {}), "(path['model'], **kwargs)\n", (1606, 1631), False, 'from malaya_speech.utils import check_file, load_graph, generate_session, nodes_session\n'), ((1718, 1751), 'malaya_speech.utils.nodes_session', 'nodes_session', (['g', 'inputs', 'outputs'], {}), '(g, inputs, outputs)\n', (1731, 1751), False, 'from malaya_speech.utils import check_file, load_graph, generate_session, nodes_session\n'), ((641, 676), 'malaya_speech.utils.generate_session', 'generate_session', ([], {'graph': 'g'}), '(graph=g, **kwargs)\n', (657, 676), False, 'from malaya_speech.utils import check_file, load_graph, generate_session, nodes_session\n'), ((1291, 1326), 'malaya_speech.utils.generate_session', 'generate_session', ([], {'graph': 'g'}), '(graph=g, **kwargs)\n', (1307, 1326), False, 'from malaya_speech.utils import check_file, load_graph, generate_session, nodes_session\n'), ((1853, 1888), 'malaya_speech.utils.generate_session', 'generate_session', ([], {'graph': 'g'}), '(graph=g, **kwargs)\n', (1869, 1888), False, 'from malaya_speech.utils import check_file, load_graph, generate_session, nodes_session\n')] |
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
"""
test_util_matrix
@author: jdiedrichsen
"""
import unittest
import pyrsa.util as rsu
import numpy as np
class TestIndicator(unittest.TestCase):
def test_indicator(self):
a = np.array(range(0, 5))
a = np.concatenate((a, a))
X = rsu.matrix.indicator(a)
n_row, n_col = X.shape
self.assertEqual(n_row, 10)
self.assertEqual(n_col, 5)
self.assertEqual(X[0, 0], 1.0)
def test_indicator_pos(self):
a = np.array(range(0, 5))
a = np.concatenate((a, a))
X = rsu.matrix.indicator(a, positive=True)
n_row, n_col = X.shape
self.assertEqual(n_row, 10)
self.assertEqual(n_col, 4)
self.assertEqual(X[0, 0], 0.0)
def test_pairwise(self):
a = np.array(range(0, 5))
X = rsu.matrix.pairwise_contrast(a)
n_row, n_col = X.shape
self.assertEqual(n_row, 10)
self.assertEqual(n_col, 5)
self.assertEqual(X[0, 0], 1.0)
def test_centering(self):
X = rsu.matrix.centering(10)
n_row, n_col = X.shape
self.assertEqual(n_row, 10)
self.assertEqual(n_col, 10)
if __name__ == '__main__':
unittest.main()
| [
"pyrsa.util.matrix.indicator",
"pyrsa.util.matrix.pairwise_contrast",
"numpy.concatenate",
"unittest.main",
"pyrsa.util.matrix.centering"
] | [((1224, 1239), 'unittest.main', 'unittest.main', ([], {}), '()\n', (1237, 1239), False, 'import unittest\n'), ((275, 297), 'numpy.concatenate', 'np.concatenate', (['(a, a)'], {}), '((a, a))\n', (289, 297), True, 'import numpy as np\n'), ((310, 333), 'pyrsa.util.matrix.indicator', 'rsu.matrix.indicator', (['a'], {}), '(a)\n', (330, 333), True, 'import pyrsa.util as rsu\n'), ((556, 578), 'numpy.concatenate', 'np.concatenate', (['(a, a)'], {}), '((a, a))\n', (570, 578), True, 'import numpy as np\n'), ((591, 629), 'pyrsa.util.matrix.indicator', 'rsu.matrix.indicator', (['a'], {'positive': '(True)'}), '(a, positive=True)\n', (611, 629), True, 'import pyrsa.util as rsu\n'), ((847, 878), 'pyrsa.util.matrix.pairwise_contrast', 'rsu.matrix.pairwise_contrast', (['a'], {}), '(a)\n', (875, 878), True, 'import pyrsa.util as rsu\n'), ((1063, 1087), 'pyrsa.util.matrix.centering', 'rsu.matrix.centering', (['(10)'], {}), '(10)\n', (1083, 1087), True, 'import pyrsa.util as rsu\n')] |