hexsha stringlengths 40 40 | size int64 2 1.02M | ext stringclasses 10
values | lang stringclasses 1
value | max_stars_repo_path stringlengths 4 245 | max_stars_repo_name stringlengths 6 130 | max_stars_repo_head_hexsha stringlengths 40 40 | max_stars_repo_licenses listlengths 1 10 | max_stars_count int64 1 191k ⌀ | max_stars_repo_stars_event_min_datetime stringlengths 24 24 ⌀ | max_stars_repo_stars_event_max_datetime stringlengths 24 24 ⌀ | max_issues_repo_path stringlengths 4 245 | max_issues_repo_name stringlengths 6 130 | max_issues_repo_head_hexsha stringlengths 40 40 | max_issues_repo_licenses listlengths 1 10 | max_issues_count int64 1 67k ⌀ | max_issues_repo_issues_event_min_datetime stringlengths 24 24 ⌀ | max_issues_repo_issues_event_max_datetime stringlengths 24 24 ⌀ | max_forks_repo_path stringlengths 4 245 | max_forks_repo_name stringlengths 6 130 | max_forks_repo_head_hexsha stringlengths 40 40 | max_forks_repo_licenses listlengths 1 10 | max_forks_count int64 1 105k ⌀ | max_forks_repo_forks_event_min_datetime stringlengths 24 24 ⌀ | max_forks_repo_forks_event_max_datetime stringlengths 24 24 ⌀ | content stringlengths 2 1.02M | avg_line_length float64 1 417k | max_line_length int64 1 987k | alphanum_fraction float64 0 1 | content_no_comment stringlengths 0 1.01M | is_comment_constant_removed bool 1
class | is_sharp_comment_removed bool 1
class |
|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
f71a8db5b96c9e6d722390a922326bcdd0e4974b | 534 | py | Python | src/quom/tokenizer/token.py | Viatorus/Quom | 5f2aa90a86a8eed5689670748967ab8d4de2d9c1 | [
"MIT"
] | 90 | 2018-11-27T21:49:32.000Z | 2022-03-13T08:48:51.000Z | src/quom/tokenizer/token.py | Viatorus/Quom | 5f2aa90a86a8eed5689670748967ab8d4de2d9c1 | [
"MIT"
] | 12 | 2018-12-04T22:18:36.000Z | 2021-08-15T11:41:15.000Z | src/quom/tokenizer/token.py | Viatorus/Quom | 5f2aa90a86a8eed5689670748967ab8d4de2d9c1 | [
"MIT"
] | 2 | 2021-06-11T14:11:07.000Z | 2021-08-15T06:07:28.000Z | from .iterator import Span, RawIterator
class Token:
def __init__(self, start, end):
self.start = start.copy()
self.end = end.copy()
@property
def raw(self):
return str(Span(RawIterator(self.start), RawIterator(self.end)))
def __str__(self):
return str(Span(self.start, self.end))
class EmptyToken(Token):
def __init__(self):
super().__init__(RawIterator(''), RawIterator(''))
print(self)
class StartToken(Token):
pass
class EndToken(Token):
pass
| 17.8 | 72 | 0.627341 | from .iterator import Span, RawIterator
class Token:
def __init__(self, start, end):
self.start = start.copy()
self.end = end.copy()
@property
def raw(self):
return str(Span(RawIterator(self.start), RawIterator(self.end)))
def __str__(self):
return str(Span(self.start, self.end))
class EmptyToken(Token):
def __init__(self):
super().__init__(RawIterator(''), RawIterator(''))
print(self)
class StartToken(Token):
pass
class EndToken(Token):
pass
| true | true |
f71a8e2fb8856f94587904ef39bc7d65a4aae1c7 | 3,477 | py | Python | src/transpiler/cppy/CodeGeneration.py | ArmindoFlores/cppy | 5ce0832e79bbdb56b11cd03490ee1d6d09a454a0 | [
"MIT"
] | 5 | 2021-12-24T00:11:22.000Z | 2022-01-06T23:53:10.000Z | src/transpiler/cppy/CodeGeneration.py | ArmindoFlores/cppy | 5ce0832e79bbdb56b11cd03490ee1d6d09a454a0 | [
"MIT"
] | null | null | null | src/transpiler/cppy/CodeGeneration.py | ArmindoFlores/cppy | 5ce0832e79bbdb56b11cd03490ee1d6d09a454a0 | [
"MIT"
] | null | null | null | from . import PythonExpressions
class CodeBlock:
def get_code(self, scope):
return NotImplemented
class CBAssign(CodeBlock):
def __init__(self, var, value):
self._var = var
self._value = value
def get_code(self, scope):
return f"SCOPE.set_var(\"{self._var.get_members()[0]}\", \"{scope.get_scope_path()}\", {self._value.get_code(scope)});"
class CBName(CodeBlock):
def __init__(self, var):
self._var = var
def get_code(self, scope):
return self._var.get_code(scope) + ";"
class Scope(CodeBlock):
def __init__(self, name, parent_ctx=None):
self.name = name
self._parent_ctx = parent_ctx
self._variables = {}
self._code_blocks = []
def add_cb(self, cb):
self._code_blocks.append(cb)
def get_scope_path(self):
if self._parent_ctx is None:
return self.name
return self._parent_ctx.get_scope_path() + "." + self.name
def get_code(self, scope):
# var_decl_code = "\n".join(
# (f"\tPyObject *{var};" for var in self._variables)
# )
var_decl_code = ""
total_code = var_decl_code + (("\n" + self._parent_ctx.get_code()) if self._parent_ctx is not None else "")
total_code += "\n".join("\n".join(("\t" + line for line in cb.get_code(self).splitlines())) for cb in self._code_blocks)
total_code = f"\ncppy::PyObjectPtr {self.name}()\n\x7b\n{total_code}\n\treturn cppy::helpers::new_none();\n\x7d"
return total_code
def has_var(self, name):
if name in self._variables:
return True
if self._parent_ctx is not None:
return self._parent_ctx.hasvar(name)
return False
def has_local_var(self, name):
return name in self._variables
def get_var(self, name):
if name in self._variables:
return self._variables[name]
if self._parent_ctx is not None:
return self._parent_ctx.getvar(name)
return None
def add_var(self, name, var):
if self.has_local_var(name):
return False
self._variables[name] = var
return True
class CBIf(CodeBlock):
def __init__(self, if_condition, if_body, elifs_conditions, elifs_bodies, else_body):
self._if_cond = if_condition
self._if_body = if_body
self._elifs_conds = elifs_conditions
self._elifs_bodies = elifs_bodies
self._else_body = else_body
def get_code(self, scope):
if_text = f"if (cppy::helpers::cbool({self._if_cond.get_code(scope)})) " + "{\n"
if_text += "\n".join("\n".join(("\t" + line for line in cb.get_code(scope).splitlines())) for cb in self._if_body)
if_text += "\n}\n"
for i in range(len(self._elifs_conds)):
if_text += f"else if (cppy::helpers::cbool({self._elifs_conds[i].get_code(scope)})) " + "{\n"
if_text += "\n".join("\n".join(("\t" + line for line in cb.get_code(scope).splitlines())) for cb in self._elifs_bodies[i])
if_text += "\n}\n"
if self._else_body is not None:
if_text += "else {\n"
if_text += "\n".join("\n".join(("\t" + line for line in cb.get_code(scope).splitlines())) for cb in self._else_body)
if_text += "\n}\n"
return if_text | 36.989362 | 135 | 0.581823 | from . import PythonExpressions
class CodeBlock:
def get_code(self, scope):
return NotImplemented
class CBAssign(CodeBlock):
def __init__(self, var, value):
self._var = var
self._value = value
def get_code(self, scope):
return f"SCOPE.set_var(\"{self._var.get_members()[0]}\", \"{scope.get_scope_path()}\", {self._value.get_code(scope)});"
class CBName(CodeBlock):
def __init__(self, var):
self._var = var
def get_code(self, scope):
return self._var.get_code(scope) + ";"
class Scope(CodeBlock):
def __init__(self, name, parent_ctx=None):
self.name = name
self._parent_ctx = parent_ctx
self._variables = {}
self._code_blocks = []
def add_cb(self, cb):
self._code_blocks.append(cb)
def get_scope_path(self):
if self._parent_ctx is None:
return self.name
return self._parent_ctx.get_scope_path() + "." + self.name
def get_code(self, scope):
var_decl_code = ""
total_code = var_decl_code + (("\n" + self._parent_ctx.get_code()) if self._parent_ctx is not None else "")
total_code += "\n".join("\n".join(("\t" + line for line in cb.get_code(self).splitlines())) for cb in self._code_blocks)
total_code = f"\ncppy::PyObjectPtr {self.name}()\n\x7b\n{total_code}\n\treturn cppy::helpers::new_none();\n\x7d"
return total_code
def has_var(self, name):
if name in self._variables:
return True
if self._parent_ctx is not None:
return self._parent_ctx.hasvar(name)
return False
def has_local_var(self, name):
return name in self._variables
def get_var(self, name):
if name in self._variables:
return self._variables[name]
if self._parent_ctx is not None:
return self._parent_ctx.getvar(name)
return None
def add_var(self, name, var):
if self.has_local_var(name):
return False
self._variables[name] = var
return True
class CBIf(CodeBlock):
def __init__(self, if_condition, if_body, elifs_conditions, elifs_bodies, else_body):
self._if_cond = if_condition
self._if_body = if_body
self._elifs_conds = elifs_conditions
self._elifs_bodies = elifs_bodies
self._else_body = else_body
def get_code(self, scope):
if_text = f"if (cppy::helpers::cbool({self._if_cond.get_code(scope)})) " + "{\n"
if_text += "\n".join("\n".join(("\t" + line for line in cb.get_code(scope).splitlines())) for cb in self._if_body)
if_text += "\n}\n"
for i in range(len(self._elifs_conds)):
if_text += f"else if (cppy::helpers::cbool({self._elifs_conds[i].get_code(scope)})) " + "{\n"
if_text += "\n".join("\n".join(("\t" + line for line in cb.get_code(scope).splitlines())) for cb in self._elifs_bodies[i])
if_text += "\n}\n"
if self._else_body is not None:
if_text += "else {\n"
if_text += "\n".join("\n".join(("\t" + line for line in cb.get_code(scope).splitlines())) for cb in self._else_body)
if_text += "\n}\n"
return if_text | true | true |
f71a8fd9a5f02c00e4c48dcb982f21552d529470 | 4,796 | py | Python | yt/frontends/athena/io.py | danielgrassinger/yt_new_frontend | 5f91d2fb8721c4c5da0af543a6256ed979cd9fc9 | [
"BSD-3-Clause-Clear"
] | null | null | null | yt/frontends/athena/io.py | danielgrassinger/yt_new_frontend | 5f91d2fb8721c4c5da0af543a6256ed979cd9fc9 | [
"BSD-3-Clause-Clear"
] | 1 | 2016-04-05T22:30:14.000Z | 2016-04-05T22:30:14.000Z | yt/frontends/athena/io.py | danielgrassinger/yt_new_frontend | 5f91d2fb8721c4c5da0af543a6256ed979cd9fc9 | [
"BSD-3-Clause-Clear"
] | 1 | 2020-12-05T05:51:09.000Z | 2020-12-05T05:51:09.000Z | """
The data-file handling functions
"""
#-----------------------------------------------------------------------------
# Copyright (c) 2013, yt Development Team.
#
# Distributed under the terms of the Modified BSD License.
#
# The full license is in the file COPYING.txt, distributed with this software.
#-----------------------------------------------------------------------------
from yt.utilities.io_handler import \
BaseIOHandler
import numpy as np
from yt.funcs import mylog, defaultdict
from .data_structures import chk23
float_size = {"float":np.dtype(">f4").itemsize,
"double":np.dtype(">f8").itemsize}
axis_list = ["_x","_y","_z"]
class IOHandlerAthena(BaseIOHandler):
_dataset_type = "athena"
_offset_string = 'data:offsets=0'
_data_string = 'data:datatype=0'
_read_table_offset = None
def _field_dict(self,fhandle):
keys = fhandle['field_types'].keys()
val = fhandle['field_types'].keys()
return dict(zip(keys,val))
def _read_field_names(self,grid):
pass
def _read_chunk_data(self,chunk,fields):
data = {}
if len(chunk.objs) == 0: return data
for grid in chunk.objs:
if grid.filename is None:
continue
f = open(grid.filename, "rb")
data[grid.id] = {}
grid_dims = grid.ActiveDimensions
read_dims = grid.read_dims.astype("int64")
grid_ncells = np.prod(read_dims)
grid0_ncells = np.prod(grid.index.grids[0].read_dims)
read_table_offset = get_read_table_offset(f)
for field in fields:
ftype, offsetr, dtype = grid.index._field_map[field]
if grid_ncells != grid0_ncells:
offset = offsetr + ((grid_ncells-grid0_ncells) * (offsetr//grid0_ncells))
if grid_ncells == grid0_ncells:
offset = offsetr
offset = int(offset) # Casting to be certain.
file_offset = grid.file_offset[2]*read_dims[0]*read_dims[1]*float_size[dtype]
xread = slice(grid.file_offset[0],grid.file_offset[0]+grid_dims[0])
yread = slice(grid.file_offset[1],grid.file_offset[1]+grid_dims[1])
f.seek(read_table_offset+offset+file_offset)
if dtype == 'float':
dt = '>f4'
elif dtype == 'double':
dt = '>f8'
if ftype == 'scalar':
f.seek(read_table_offset+offset+file_offset)
v = np.fromfile(f, dtype=dt,
count=grid_ncells).reshape(read_dims,order='F')
if ftype == 'vector':
vec_offset = axis_list.index(field[-1][-2:])
f.seek(read_table_offset+offset+3*file_offset)
v = np.fromfile(f, dtype=dt, count=3*grid_ncells)
v = v[vec_offset::3].reshape(read_dims,order='F')
if grid.ds.field_ordering == 1:
data[grid.id][field] = v[xread,yread,:].T.astype("float64")
else:
data[grid.id][field] = v[xread,yread,:].astype("float64")
f.close()
return data
def _read_data_slice(self, grid, field, axis, coord):
sl = [slice(None), slice(None), slice(None)]
sl[axis] = slice(coord, coord + 1)
if grid.ds.field_ordering == 1:
sl.reverse()
return self._read_data_set(grid, field)[sl]
def _read_fluid_selection(self, chunks, selector, fields, size):
chunks = list(chunks)
if any((ftype != "athena" for ftype, fname in fields)):
raise NotImplementedError
rv = {}
for field in fields:
rv[field] = np.empty(size, dtype="float64")
ng = sum(len(c.objs) for c in chunks)
mylog.debug("Reading %s cells of %s fields in %s grids",
size, [f2 for f1, f2 in fields], ng)
ind = 0
for chunk in chunks:
data = self._read_chunk_data(chunk, fields)
for g in chunk.objs:
for field in fields:
ftype, fname = field
ds = data[g.id].pop(field)
nd = g.select(selector, ds, rv[field], ind) # caches
ind += nd
data.pop(g.id)
return rv
def get_read_table_offset(f):
line = f.readline()
while True:
splitup = line.strip().split()
chkc = chk23('CELL_DATA')
chkp = chk23('POINT_DATA')
if chkc in splitup or chkp in splitup:
f.readline()
read_table_offset = f.tell()
break
line = f.readline()
return read_table_offset
| 37.76378 | 93 | 0.533987 |
from yt.utilities.io_handler import \
BaseIOHandler
import numpy as np
from yt.funcs import mylog, defaultdict
from .data_structures import chk23
float_size = {"float":np.dtype(">f4").itemsize,
"double":np.dtype(">f8").itemsize}
axis_list = ["_x","_y","_z"]
class IOHandlerAthena(BaseIOHandler):
_dataset_type = "athena"
_offset_string = 'data:offsets=0'
_data_string = 'data:datatype=0'
_read_table_offset = None
def _field_dict(self,fhandle):
keys = fhandle['field_types'].keys()
val = fhandle['field_types'].keys()
return dict(zip(keys,val))
def _read_field_names(self,grid):
pass
def _read_chunk_data(self,chunk,fields):
data = {}
if len(chunk.objs) == 0: return data
for grid in chunk.objs:
if grid.filename is None:
continue
f = open(grid.filename, "rb")
data[grid.id] = {}
grid_dims = grid.ActiveDimensions
read_dims = grid.read_dims.astype("int64")
grid_ncells = np.prod(read_dims)
grid0_ncells = np.prod(grid.index.grids[0].read_dims)
read_table_offset = get_read_table_offset(f)
for field in fields:
ftype, offsetr, dtype = grid.index._field_map[field]
if grid_ncells != grid0_ncells:
offset = offsetr + ((grid_ncells-grid0_ncells) * (offsetr//grid0_ncells))
if grid_ncells == grid0_ncells:
offset = offsetr
offset = int(offset)
file_offset = grid.file_offset[2]*read_dims[0]*read_dims[1]*float_size[dtype]
xread = slice(grid.file_offset[0],grid.file_offset[0]+grid_dims[0])
yread = slice(grid.file_offset[1],grid.file_offset[1]+grid_dims[1])
f.seek(read_table_offset+offset+file_offset)
if dtype == 'float':
dt = '>f4'
elif dtype == 'double':
dt = '>f8'
if ftype == 'scalar':
f.seek(read_table_offset+offset+file_offset)
v = np.fromfile(f, dtype=dt,
count=grid_ncells).reshape(read_dims,order='F')
if ftype == 'vector':
vec_offset = axis_list.index(field[-1][-2:])
f.seek(read_table_offset+offset+3*file_offset)
v = np.fromfile(f, dtype=dt, count=3*grid_ncells)
v = v[vec_offset::3].reshape(read_dims,order='F')
if grid.ds.field_ordering == 1:
data[grid.id][field] = v[xread,yread,:].T.astype("float64")
else:
data[grid.id][field] = v[xread,yread,:].astype("float64")
f.close()
return data
def _read_data_slice(self, grid, field, axis, coord):
sl = [slice(None), slice(None), slice(None)]
sl[axis] = slice(coord, coord + 1)
if grid.ds.field_ordering == 1:
sl.reverse()
return self._read_data_set(grid, field)[sl]
def _read_fluid_selection(self, chunks, selector, fields, size):
chunks = list(chunks)
if any((ftype != "athena" for ftype, fname in fields)):
raise NotImplementedError
rv = {}
for field in fields:
rv[field] = np.empty(size, dtype="float64")
ng = sum(len(c.objs) for c in chunks)
mylog.debug("Reading %s cells of %s fields in %s grids",
size, [f2 for f1, f2 in fields], ng)
ind = 0
for chunk in chunks:
data = self._read_chunk_data(chunk, fields)
for g in chunk.objs:
for field in fields:
ftype, fname = field
ds = data[g.id].pop(field)
nd = g.select(selector, ds, rv[field], ind)
ind += nd
data.pop(g.id)
return rv
def get_read_table_offset(f):
line = f.readline()
while True:
splitup = line.strip().split()
chkc = chk23('CELL_DATA')
chkp = chk23('POINT_DATA')
if chkc in splitup or chkp in splitup:
f.readline()
read_table_offset = f.tell()
break
line = f.readline()
return read_table_offset
| true | true |
f71a90002a6262037bfee9acd3d8a0d96e934ba0 | 3,017 | py | Python | src/configs/adult/adult_mlp_weighted.py | nbingo/sMOOth | aacdc5d24b931e534e984681923ec74f1103ca2f | [
"MIT"
] | null | null | null | src/configs/adult/adult_mlp_weighted.py | nbingo/sMOOth | aacdc5d24b931e534e984681923ec74f1103ca2f | [
"MIT"
] | null | null | null | src/configs/adult/adult_mlp_weighted.py | nbingo/sMOOth | aacdc5d24b931e534e984681923ec74f1103ca2f | [
"MIT"
] | null | null | null | """
An example config file to train a ImageNet classifier with detectron2.
Model and dataloader both come from torchvision.
This shows how to use detectron2 as a general engine for any new models and tasks.
To run, use the following command:
python tools/lazyconfig_train_net.py --config-file configs/Misc/torchvision_imagenet_R_50.py \
--num-gpus 8 dataloader.train.dataset.root=/path/to/imagenet/
"""
import yaml
import torch
from omegaconf import OmegaConf
from fvcore.common.param_scheduler import CosineParamScheduler
from detectron2.solver import WarmupParamScheduler
from detectron2.solver.build import get_default_optimizer_params
from detectron2.config import LazyConfig, LazyCall as L
from detectron2.evaluation import DatasetEvaluators
from src.configs.common.utils import build_data_loader
from src.models.adult_mlp import IncomeClassifier
from src.loaders.adult_loader import FeatDataset
from src.metrics.evaluators import ClassificationAcc, BinaryEqualizedOddsViolation
from src.metrics.losses import cross_entropy_loss, equalized_odds_violation, MultiObjectiveLoss
from src.harnesses.harnesses import MultiProcessHarness, SimpleHarness
dataloader = OmegaConf.create()
dataloader.train = L(build_data_loader)(
dataset=L(FeatDataset)(
subset='train',
income_const=yaml.load(open('/lfs/local/0/nomir/sMOOth/data/Adult/income.yml'), Loader=yaml.FullLoader)
),
batch_size=256,
num_workers=4,
training=True,
)
dataloader.test = L(build_data_loader)(
dataset=L(FeatDataset)(
subset='val',
income_const=yaml.load(open('/lfs/local/0/nomir/sMOOth/data/Adult/income.yml'), Loader=yaml.FullLoader)
),
batch_size=256,
num_workers=4,
training=False,
)
# Can also be list of DatasetEvaluators
dataloader.evaluator = L(DatasetEvaluators)(evaluators=(ClassificationAcc(), BinaryEqualizedOddsViolation()))
train = LazyConfig.load("/lfs/local/0/nomir/sMOOth/src/configs/common/train.py").train
train.init_checkpoint = None
# max_iter = number epochs * (train dataset size / batch size)
train.max_iter = 50 * 30162 // 256
train.eval_period = 30162 // 256
train.loss_fn = L(MultiObjectiveLoss)(losses=[cross_entropy_loss, equalized_odds_violation])
train.loss_tradeoff = torch.Tensor([0.5, 0.5])
# Arguments for multiprocess training
train.harness = SimpleHarness
train.num_workers = 1
train.gpus = [0] # TODO: Eventually want this to be a commandline arg
train.process_over_key = 'model.loss_fn'
train.process_over_vals = [cross_entropy_loss]
model = L(IncomeClassifier)(
in_dim=105,
hidden_dim=105,
num_hidden_blocks=2,
drop_prob=0.2,
out_dim=2,
loss_fn=train.loss_fn,
device=train.device,
)
optimizer = L(torch.optim.Adam)(
params=L(get_default_optimizer_params)(),
lr=1e-3,
weight_decay=1e-4,
)
lr_multiplier = L(WarmupParamScheduler)(
scheduler=L(CosineParamScheduler)(
start_value=0.1,
end_value=1e-4,
),
warmup_length=1 / 100,
warmup_factor=0.1,
)
| 32.793478 | 111 | 0.764667 |
import yaml
import torch
from omegaconf import OmegaConf
from fvcore.common.param_scheduler import CosineParamScheduler
from detectron2.solver import WarmupParamScheduler
from detectron2.solver.build import get_default_optimizer_params
from detectron2.config import LazyConfig, LazyCall as L
from detectron2.evaluation import DatasetEvaluators
from src.configs.common.utils import build_data_loader
from src.models.adult_mlp import IncomeClassifier
from src.loaders.adult_loader import FeatDataset
from src.metrics.evaluators import ClassificationAcc, BinaryEqualizedOddsViolation
from src.metrics.losses import cross_entropy_loss, equalized_odds_violation, MultiObjectiveLoss
from src.harnesses.harnesses import MultiProcessHarness, SimpleHarness
dataloader = OmegaConf.create()
dataloader.train = L(build_data_loader)(
dataset=L(FeatDataset)(
subset='train',
income_const=yaml.load(open('/lfs/local/0/nomir/sMOOth/data/Adult/income.yml'), Loader=yaml.FullLoader)
),
batch_size=256,
num_workers=4,
training=True,
)
dataloader.test = L(build_data_loader)(
dataset=L(FeatDataset)(
subset='val',
income_const=yaml.load(open('/lfs/local/0/nomir/sMOOth/data/Adult/income.yml'), Loader=yaml.FullLoader)
),
batch_size=256,
num_workers=4,
training=False,
)
dataloader.evaluator = L(DatasetEvaluators)(evaluators=(ClassificationAcc(), BinaryEqualizedOddsViolation()))
train = LazyConfig.load("/lfs/local/0/nomir/sMOOth/src/configs/common/train.py").train
train.init_checkpoint = None
train.max_iter = 50 * 30162 // 256
train.eval_period = 30162 // 256
train.loss_fn = L(MultiObjectiveLoss)(losses=[cross_entropy_loss, equalized_odds_violation])
train.loss_tradeoff = torch.Tensor([0.5, 0.5])
train.harness = SimpleHarness
train.num_workers = 1
train.gpus = [0]
train.process_over_key = 'model.loss_fn'
train.process_over_vals = [cross_entropy_loss]
model = L(IncomeClassifier)(
in_dim=105,
hidden_dim=105,
num_hidden_blocks=2,
drop_prob=0.2,
out_dim=2,
loss_fn=train.loss_fn,
device=train.device,
)
optimizer = L(torch.optim.Adam)(
params=L(get_default_optimizer_params)(),
lr=1e-3,
weight_decay=1e-4,
)
lr_multiplier = L(WarmupParamScheduler)(
scheduler=L(CosineParamScheduler)(
start_value=0.1,
end_value=1e-4,
),
warmup_length=1 / 100,
warmup_factor=0.1,
)
| true | true |
f71a90ab75738523d69c347c11d6351be429b483 | 2,311 | py | Python | python/message_queues/pika_route.py | edgells/dev_coms | a7e50c32bcb45c6b6781e6d0514fda6ddf8aef02 | [
"MIT"
] | null | null | null | python/message_queues/pika_route.py | edgells/dev_coms | a7e50c32bcb45c6b6781e6d0514fda6ddf8aef02 | [
"MIT"
] | null | null | null | python/message_queues/pika_route.py | edgells/dev_coms | a7e50c32bcb45c6b6781e6d0514fda6ddf8aef02 | [
"MIT"
] | null | null | null | import random
import threading
import pika
"""
总结:
"""
def send():
tag = random.choice(['info', 'error', 'warn'])
rb_conn = pika.BlockingConnection(pika.ConnectionParameters(host='192.168.101.129',
port=5672,
virtual_host='/',
credentials=pika.PlainCredentials(username='admin',
password='admin')),
)
ch = rb_conn.channel()
ch.exchange_declare(exchange='direct_logs', exchange_type='direct') # create direct exchange
# bind queue
msg = b"hello world"
for n in range(100):
for tag in ['info', 'error', 'warn']:
ch.basic_publish(exchange="direct_logs",
routing_key=tag,
body=msg) # to exchange send message
ch.close()
print('send over')
def recv():
rb_conn = pika.BlockingConnection(pika.ConnectionParameters(host='192.168.101.129',
port=5672,
virtual_host='/',
credentials=pika.PlainCredentials(username='admin',
password='admin')),
)
ch = rb_conn.channel()
ch.exchange_declare('direct_logs', exchange_type='direct')
def callback(ch, method, p, msg):
print(threading.get_ident(), '---', method.routing_key, '---', msg)
queue = ch.queue_declare(queue='', exclusive=True)
queue_name = queue.method.queue
for tag in ['info', 'error', 'warn']:
ch.queue_bind(exchange='direct_logs', queue=queue_name, routing_key=tag)
ch.basic_consume(
queue=queue_name,
on_message_callback=callback,
auto_ack=True
)
ch.start_consuming()
if __name__ == '__main__':
rv = threading.Thread(target=recv)
rv.start()
send()
rv.join()
| 32.549296 | 117 | 0.450887 | import random
import threading
import pika
def send():
tag = random.choice(['info', 'error', 'warn'])
rb_conn = pika.BlockingConnection(pika.ConnectionParameters(host='192.168.101.129',
port=5672,
virtual_host='/',
credentials=pika.PlainCredentials(username='admin',
password='admin')),
)
ch = rb_conn.channel()
ch.exchange_declare(exchange='direct_logs', exchange_type='direct')
msg = b"hello world"
for n in range(100):
for tag in ['info', 'error', 'warn']:
ch.basic_publish(exchange="direct_logs",
routing_key=tag,
body=msg)
ch.close()
print('send over')
def recv():
rb_conn = pika.BlockingConnection(pika.ConnectionParameters(host='192.168.101.129',
port=5672,
virtual_host='/',
credentials=pika.PlainCredentials(username='admin',
password='admin')),
)
ch = rb_conn.channel()
ch.exchange_declare('direct_logs', exchange_type='direct')
def callback(ch, method, p, msg):
print(threading.get_ident(), '---', method.routing_key, '---', msg)
queue = ch.queue_declare(queue='', exclusive=True)
queue_name = queue.method.queue
for tag in ['info', 'error', 'warn']:
ch.queue_bind(exchange='direct_logs', queue=queue_name, routing_key=tag)
ch.basic_consume(
queue=queue_name,
on_message_callback=callback,
auto_ack=True
)
ch.start_consuming()
if __name__ == '__main__':
rv = threading.Thread(target=recv)
rv.start()
send()
rv.join()
| true | true |
f71a90c7288736f03f64c09624abaf7fafd6201a | 2,080 | py | Python | hippybot/plugins/plusplusbot.py | 1stvamp/hippybot | 931fb1accae295da3ae94184ef138aeedd5a726e | [
"BSD-2-Clause-FreeBSD"
] | 33 | 2015-03-03T08:41:56.000Z | 2022-02-16T12:05:30.000Z | hippybot/plugins/plusplusbot.py | 1stvamp/hippybot | 931fb1accae295da3ae94184ef138aeedd5a726e | [
"BSD-2-Clause-FreeBSD"
] | 9 | 2015-01-09T00:29:33.000Z | 2016-06-21T13:09:54.000Z | hippybot/plugins/plusplusbot.py | 1stvamp/hippybot | 931fb1accae295da3ae94184ef138aeedd5a726e | [
"BSD-2-Clause-FreeBSD"
] | 18 | 2015-01-07T22:40:45.000Z | 2018-04-04T18:58:50.000Z | import os
import os.path
import re
import sqlite3dbm
from threading import RLock
from hippybot.hipchat import HipChatApi
from hippybot.decorators import botcmd, contentcmd
CONFIG_DIR = os.path.expanduser("~/.techbot")
DB = os.path.expanduser("~/.techbot/score.db")
class Plugin(object):
"""Plugin to handle knewton replacement of ++ bot in partychatapp
"""
global_commands = ['scores']
def __init__(self, config):
pass
def __init__(self):
self.rlock = RLock()
self.db = self.get_db()
def get_db(self):
self.create_dir()
db = sqlite3dbm.sshelve.open(DB)
return db
def create_dir(self):
if not os.path.exists(CONFIG_DIR):
os.mkdir(CONFIG_DIR)
@contentcmd
def change_score(self, mess, **kwargs):
message = mess.getBody()
if message:
room = unicode(mess.getFrom()).split("/")[0]
user = unicode(mess.getFrom()).split("/")[1]
results = []
if message.find('++') > -1 or message.find('--') > -1:
self.bot.log.info("plusplusbot: %s" % mess)
if message.endswith("++") or message.endswith("--"):
results.extend(self.process_message(message, room, user))
for m in re.findall("\((.*?)\)", message):
if m.endswith("++") or m.endswith("--"):
results.extend(self.process_message(m, room, user))
if len(results) > 0:
return "\n".join(results)
def process_message(self, message, room, user):
results = []
victim = message[:-2]
excl = "woot!"
plus = 1
if message.endswith('--'):
excl = "ouch!"
plus = -1
with self.rlock:
scores = self.db.get(room, {})
score = scores.setdefault(victim, 0)
score += plus
scores[victim] = score
self.db[room] = scores
return ["[%s] %s [%s now at %s]" % (user, victim, excl, score)]
@botcmd
def scores(self, mess, args, **kwargs):
"""
Prints all scores from this room
Format: @NickName scores
"""
self.bot.log.info("score: %s" % mess)
room = unicode(mess.getFrom()).split("/")[0]
ret = []
with self.rlock:
scores = self.db.get(room, {})
for key in scores:
ret.append("%s: %s" %(key, scores[key]))
return '\n'.join(ret)
| 25.679012 | 66 | 0.639904 | import os
import os.path
import re
import sqlite3dbm
from threading import RLock
from hippybot.hipchat import HipChatApi
from hippybot.decorators import botcmd, contentcmd
CONFIG_DIR = os.path.expanduser("~/.techbot")
DB = os.path.expanduser("~/.techbot/score.db")
class Plugin(object):
global_commands = ['scores']
def __init__(self, config):
pass
def __init__(self):
self.rlock = RLock()
self.db = self.get_db()
def get_db(self):
self.create_dir()
db = sqlite3dbm.sshelve.open(DB)
return db
def create_dir(self):
if not os.path.exists(CONFIG_DIR):
os.mkdir(CONFIG_DIR)
@contentcmd
def change_score(self, mess, **kwargs):
message = mess.getBody()
if message:
room = unicode(mess.getFrom()).split("/")[0]
user = unicode(mess.getFrom()).split("/")[1]
results = []
if message.find('++') > -1 or message.find('--') > -1:
self.bot.log.info("plusplusbot: %s" % mess)
if message.endswith("++") or message.endswith("--"):
results.extend(self.process_message(message, room, user))
for m in re.findall("\((.*?)\)", message):
if m.endswith("++") or m.endswith("--"):
results.extend(self.process_message(m, room, user))
if len(results) > 0:
return "\n".join(results)
def process_message(self, message, room, user):
results = []
victim = message[:-2]
excl = "woot!"
plus = 1
if message.endswith('--'):
excl = "ouch!"
plus = -1
with self.rlock:
scores = self.db.get(room, {})
score = scores.setdefault(victim, 0)
score += plus
scores[victim] = score
self.db[room] = scores
return ["[%s] %s [%s now at %s]" % (user, victim, excl, score)]
@botcmd
def scores(self, mess, args, **kwargs):
self.bot.log.info("score: %s" % mess)
room = unicode(mess.getFrom()).split("/")[0]
ret = []
with self.rlock:
scores = self.db.get(room, {})
for key in scores:
ret.append("%s: %s" %(key, scores[key]))
return '\n'.join(ret)
| true | true |
f71a90cbe34b2055cfe4879a68c1824ee28a3a13 | 8,902 | py | Python | logchecker/__init__.py | Lifars/log-checker | 462d3a0c66b5fa5a964689ce594cb70833960862 | [
"MIT"
] | 6 | 2021-01-13T05:32:14.000Z | 2022-02-18T01:35:09.000Z | logchecker/__init__.py | Lifars/log-checker | 462d3a0c66b5fa5a964689ce594cb70833960862 | [
"MIT"
] | null | null | null | logchecker/__init__.py | Lifars/log-checker | 462d3a0c66b5fa5a964689ce594cb70833960862 | [
"MIT"
] | 1 | 2021-09-27T12:56:21.000Z | 2021-09-27T12:56:21.000Z | #!/usr/bin/env python3
# -*- coding: utf-8 -*-
"""Logchecker tool for scanning log files against YETI Threat Intelligence Repository.
By LIFARS
This code is licensed under MIT license (see LICENSE for details)
"""
__version__ = "0.8"
__author__ = "LIFARS LLC"
__copyright__ = "Copyright (c) 2020,2021 LIFARS LLC"
__credits__ = ["LIFARS LLC"]
__license__ = "MIT"
__maintainer__ = "LIFARS LLC"
__status__ = "Production"
import argparse
import collections
import configparser
import csv
import json
import os
import re
import sys
import Evtx.Evtx as evtx
import pyeti
Config = collections.namedtuple("Config", ["url", "key", "output"])
def is_valid_file(parser, arg):
if not os.path.exists(arg):
parser.error("The file %s does not exist!" % arg)
else:
return arg
def main():
parser = argparse.ArgumentParser()
parser.add_argument(
"-c",
"--config",
help="Config file path. Config file should contain url of YETI database,"
" authorization key and output format. If it is present, it overrides"
" --url, --key and --csv/--json options.",
type=argparse.FileType("r"),
)
parser.add_argument(
"-f",
"--file",
help="[REQUIRED] Log file path.",
type=lambda x: is_valid_file(parser, x),
required=True,
)
parser.add_argument(
"-o",
"--output",
help="Output file path. If file does not exist, creates new file."
"If not specified, output is printed to STDOUT.",
type=argparse.FileType("w+"),
)
parser.add_argument(
"-a",
"--address",
default=False,
action="store_true",
help="Search only for ip addresses. If none of the address, "
"domain or hash flag is specified, it search for all mentioned.",
)
parser.add_argument(
"-d",
"--domain",
default=False,
action="store_true",
help="Search only for domains. If none of the address, "
"domain or hash flag is specified, it search for all mentioned.",
)
parser.add_argument(
"-H",
"--hash",
default=False,
action="store_true",
help="Search only for hashes. If none of the address, "
"domain or hash flag is specified, it search for all mentioned.",
)
parser.add_argument(
"-A",
"--all",
default=False,
action="store_true",
help="Show all values in logs. By default it shows only values "
"which have record in database.",
)
group = parser.add_mutually_exclusive_group()
group.add_argument(
"-C",
"--csv",
default=False,
action="store_true",
help="Output in CSV format. This is default option.",
)
group.add_argument(
"-j",
"--json",
default=False,
action="store_true",
help="Output in JSON format. By default output is in CSV format.",
)
parser.add_argument("-u", "--url", help="URL of YETI instance.", type=str)
parser.add_argument("-k", "--key", help="API key for YETI.", type=str)
args = parser.parse_args()
if not (args.config or args.url):
parser.error(
"Missing URL of YETI. Use --url URL or add config file using --config CONFIG"
)
url = args.url
key = args.key
csv = args.csv
json = args.json
if args.config:
url, key, outf = parse_config_file(args.config)
if outf.lower() == "json":
json = True
csv = False
elif outf.lower() == "csv":
json = False
csv = True
else:
print("Unsupported output format. Using default", file=sys.stderr)
json = False
csv = True
check_log_file(
args.file,
url,
key,
output=args.output,
address=args.address,
domain=args.domain,
hash=args.hash,
all=args.all,
csv=csv,
json=json,
)
def parse_config_file(file):
config = configparser.ConfigParser()
config.read_file(file)
url = config.get("DEFAULT", "url")
key = config.get("DEFAULT", "api_key")
output = config.get("DEFAULT", "output_format")
return Config(url, key, output)
def check_log_file(file, url, key, **kwargs):
_, file_extension = os.path.splitext(file)
print("reading file", file=sys.stderr)
if file_extension == ".evtx":
log = __read_evtx_file(file)
else:
log = __read_text_file(file)
print("parsing file", file=sys.stderr)
values = parse_log_file(log)
print("looking in database", file=sys.stderr)
results = []
a = kwargs.get("all", False)
api = pyeti.YetiApi(url, api_key=key)
for val, logs in values.items():
result = {"value": val}
yeti = api.observable_search(value=val)
if yeti:
result["tags"] = yeti[0].get("tags", [])
result["created"] = yeti[0].get("created", "")
result["sources"] = yeti[0].get("sources", [])
else:
result["tags"] = []
result["created"] = ""
result["sources"] = []
result["original_log"] = logs
if yeti or a:
results.append(result)
print("writing results", file=sys.stderr)
ret = kwargs.get("ret", False)
if ret:
return results
output = kwargs.get("output", None)
if not output:
output = sys.stdout
j = kwargs.get("json", False)
if j:
json.dump(results, output, indent=4, sort_keys=True)
else:
fields = ["value", "tags", "created", "sources", "original_log"]
results = __flatten(map(__unpack_logs, map(__csv_row, results)))
writer = csv.DictWriter(output, fieldnames=fields, quoting=csv.QUOTE_ALL)
writer.writeheader()
writer.writerows(results)
outfh = kwargs.get("output", None)
if outfh:
outfh.close()
print("finished", file=sys.stderr)
def parse_log_file(log, **kwargs):
addr_pattern = re.compile("(?:[0-9]{1,3}\.){3}[0-9]{1,3}")
ipv6_pattern = re.compile(
"(?:[0-9a-fA-F]{1,4}:){7}[0-9a-fA-F]{1,4}|"
"fe80:(?::[0-9a-fA-F]{0,4}){0,4}%[0-9a-zA-Z]+|"
"::(?:ffff(?::0{1,4})?:)?"
"(?:(?:25[0-5]|(?:2[0-4]|1?[0-9])?[0-9])\.){3}"
"(?:25[0-5]|(?:2[0-4]|1?[0-9])?[0-9])|"
"(?:[0-9a-fA-F]{1,4}:){1,4}:"
"(?:(?:25[0-5]|(?:2[0-4]|1?[0-9])?[0-9])\.){3}"
"(?:25[0-5]|(?:2[0-4]|1?[0-9])?[0-9])|"
":(?:(?::[0-9a-fA-F]{1,4}){1,7}|:)|"
"[0-9a-fA-F]{1,4}:(?:(?::[0-9a-fA-F]{1,4}){1,6})|"
"(?:[0-9a-fA-F]{1,4}:){1,2}(?::[0-9a-fA-F]{1,4}){1,5}|"
"(?:[0-9a-fA-F]{1,4}:){1,3}(?::[0-9a-fA-F]{1,4}){1,4}|"
"(?:[0-9a-fA-F]{1,4}:){1,4}(?::[0-9a-fA-F]{1,4}){1,3}|"
"(?:[0-9a-fA-F]{1,4}:){1,5}(?::[0-9a-fA-F]{1,4}){1,2}|"
"(?:[0-9a-fA-F]{1,4}:){1,6}:[0-9a-fA-F]{1,4}|"
"(?:[0-9a-fA-F]{1,4}:){1,7}:"
)
domain_pattern = re.compile("(?:[a-z0-9](?:[a-z0-9-]{0,61}[a-z0-9])?\.)+[a-z]{2,6}")
hash_pattern = re.compile("[0-9a-fA-F]{64}|[0-9a-fA-F]{40}|[0-9a-fA-F]{32}")
a = kwargs.get("address", False)
d = kwargs.get("domain", False)
h = kwargs.get("hash", False)
flags = a or d or h
values = {}
for line in log:
if (not flags) or a:
addr = addr_pattern.findall(line)
for match in addr:
values.setdefault(match, []).append(line)
addr = ipv6_pattern.findall(line)
for match in addr:
values.setdefault(match.lower(), []).append(line)
if (not flags) or d:
dom = domain_pattern.findall(line)
for match in dom:
values.setdefault(match.lower(), []).append(line)
if (not flags) or h:
ha = hash_pattern.findall(line)
for match in ha:
values.setdefault(match.lower(), []).append(line)
values.pop("schemas.microsoft.com", None)
return values
def __read_evtx_file(file):
with evtx.Evtx(file) as f:
log = list(map(evtx.Record.xml, f.records()))
return log
def __read_text_file(file):
with open(file) as f:
log = f.read().splitlines()
return log
def __dict_to_string(d):
return " ".join(["{}:{}".format(key, val) for key, val in d.items()])
def __list_to_string(li):
return " ".join(li)
def __csv_row(d):
d["tags"] = __list_to_string([__dict_to_string(tag) for tag in d["tags"]])
d["sources"] = __list_to_string(d["sources"])
return d
def __unpack_logs(d):
result = []
for log in d["original_log"]:
new = d.copy()
new["original_log"] = log
result.append(new)
return result
def __flatten(li):
return [item for sublist in li for item in sublist]
if __name__ == "__main__":
main()
| 28.902597 | 89 | 0.552123 |
__version__ = "0.8"
__author__ = "LIFARS LLC"
__copyright__ = "Copyright (c) 2020,2021 LIFARS LLC"
__credits__ = ["LIFARS LLC"]
__license__ = "MIT"
__maintainer__ = "LIFARS LLC"
__status__ = "Production"
import argparse
import collections
import configparser
import csv
import json
import os
import re
import sys
import Evtx.Evtx as evtx
import pyeti
Config = collections.namedtuple("Config", ["url", "key", "output"])
def is_valid_file(parser, arg):
if not os.path.exists(arg):
parser.error("The file %s does not exist!" % arg)
else:
return arg
def main():
parser = argparse.ArgumentParser()
parser.add_argument(
"-c",
"--config",
help="Config file path. Config file should contain url of YETI database,"
" authorization key and output format. If it is present, it overrides"
" --url, --key and --csv/--json options.",
type=argparse.FileType("r"),
)
parser.add_argument(
"-f",
"--file",
help="[REQUIRED] Log file path.",
type=lambda x: is_valid_file(parser, x),
required=True,
)
parser.add_argument(
"-o",
"--output",
help="Output file path. If file does not exist, creates new file."
"If not specified, output is printed to STDOUT.",
type=argparse.FileType("w+"),
)
parser.add_argument(
"-a",
"--address",
default=False,
action="store_true",
help="Search only for ip addresses. If none of the address, "
"domain or hash flag is specified, it search for all mentioned.",
)
parser.add_argument(
"-d",
"--domain",
default=False,
action="store_true",
help="Search only for domains. If none of the address, "
"domain or hash flag is specified, it search for all mentioned.",
)
parser.add_argument(
"-H",
"--hash",
default=False,
action="store_true",
help="Search only for hashes. If none of the address, "
"domain or hash flag is specified, it search for all mentioned.",
)
parser.add_argument(
"-A",
"--all",
default=False,
action="store_true",
help="Show all values in logs. By default it shows only values "
"which have record in database.",
)
group = parser.add_mutually_exclusive_group()
group.add_argument(
"-C",
"--csv",
default=False,
action="store_true",
help="Output in CSV format. This is default option.",
)
group.add_argument(
"-j",
"--json",
default=False,
action="store_true",
help="Output in JSON format. By default output is in CSV format.",
)
parser.add_argument("-u", "--url", help="URL of YETI instance.", type=str)
parser.add_argument("-k", "--key", help="API key for YETI.", type=str)
args = parser.parse_args()
if not (args.config or args.url):
parser.error(
"Missing URL of YETI. Use --url URL or add config file using --config CONFIG"
)
url = args.url
key = args.key
csv = args.csv
json = args.json
if args.config:
url, key, outf = parse_config_file(args.config)
if outf.lower() == "json":
json = True
csv = False
elif outf.lower() == "csv":
json = False
csv = True
else:
print("Unsupported output format. Using default", file=sys.stderr)
json = False
csv = True
check_log_file(
args.file,
url,
key,
output=args.output,
address=args.address,
domain=args.domain,
hash=args.hash,
all=args.all,
csv=csv,
json=json,
)
def parse_config_file(file):
config = configparser.ConfigParser()
config.read_file(file)
url = config.get("DEFAULT", "url")
key = config.get("DEFAULT", "api_key")
output = config.get("DEFAULT", "output_format")
return Config(url, key, output)
def check_log_file(file, url, key, **kwargs):
_, file_extension = os.path.splitext(file)
print("reading file", file=sys.stderr)
if file_extension == ".evtx":
log = __read_evtx_file(file)
else:
log = __read_text_file(file)
print("parsing file", file=sys.stderr)
values = parse_log_file(log)
print("looking in database", file=sys.stderr)
results = []
a = kwargs.get("all", False)
api = pyeti.YetiApi(url, api_key=key)
for val, logs in values.items():
result = {"value": val}
yeti = api.observable_search(value=val)
if yeti:
result["tags"] = yeti[0].get("tags", [])
result["created"] = yeti[0].get("created", "")
result["sources"] = yeti[0].get("sources", [])
else:
result["tags"] = []
result["created"] = ""
result["sources"] = []
result["original_log"] = logs
if yeti or a:
results.append(result)
print("writing results", file=sys.stderr)
ret = kwargs.get("ret", False)
if ret:
return results
output = kwargs.get("output", None)
if not output:
output = sys.stdout
j = kwargs.get("json", False)
if j:
json.dump(results, output, indent=4, sort_keys=True)
else:
fields = ["value", "tags", "created", "sources", "original_log"]
results = __flatten(map(__unpack_logs, map(__csv_row, results)))
writer = csv.DictWriter(output, fieldnames=fields, quoting=csv.QUOTE_ALL)
writer.writeheader()
writer.writerows(results)
outfh = kwargs.get("output", None)
if outfh:
outfh.close()
print("finished", file=sys.stderr)
def parse_log_file(log, **kwargs):
addr_pattern = re.compile("(?:[0-9]{1,3}\.){3}[0-9]{1,3}")
ipv6_pattern = re.compile(
"(?:[0-9a-fA-F]{1,4}:){7}[0-9a-fA-F]{1,4}|"
"fe80:(?::[0-9a-fA-F]{0,4}){0,4}%[0-9a-zA-Z]+|"
"::(?:ffff(?::0{1,4})?:)?"
"(?:(?:25[0-5]|(?:2[0-4]|1?[0-9])?[0-9])\.){3}"
"(?:25[0-5]|(?:2[0-4]|1?[0-9])?[0-9])|"
"(?:[0-9a-fA-F]{1,4}:){1,4}:"
"(?:(?:25[0-5]|(?:2[0-4]|1?[0-9])?[0-9])\.){3}"
"(?:25[0-5]|(?:2[0-4]|1?[0-9])?[0-9])|"
":(?:(?::[0-9a-fA-F]{1,4}){1,7}|:)|"
"[0-9a-fA-F]{1,4}:(?:(?::[0-9a-fA-F]{1,4}){1,6})|"
"(?:[0-9a-fA-F]{1,4}:){1,2}(?::[0-9a-fA-F]{1,4}){1,5}|"
"(?:[0-9a-fA-F]{1,4}:){1,3}(?::[0-9a-fA-F]{1,4}){1,4}|"
"(?:[0-9a-fA-F]{1,4}:){1,4}(?::[0-9a-fA-F]{1,4}){1,3}|"
"(?:[0-9a-fA-F]{1,4}:){1,5}(?::[0-9a-fA-F]{1,4}){1,2}|"
"(?:[0-9a-fA-F]{1,4}:){1,6}:[0-9a-fA-F]{1,4}|"
"(?:[0-9a-fA-F]{1,4}:){1,7}:"
)
domain_pattern = re.compile("(?:[a-z0-9](?:[a-z0-9-]{0,61}[a-z0-9])?\.)+[a-z]{2,6}")
hash_pattern = re.compile("[0-9a-fA-F]{64}|[0-9a-fA-F]{40}|[0-9a-fA-F]{32}")
a = kwargs.get("address", False)
d = kwargs.get("domain", False)
h = kwargs.get("hash", False)
flags = a or d or h
values = {}
for line in log:
if (not flags) or a:
addr = addr_pattern.findall(line)
for match in addr:
values.setdefault(match, []).append(line)
addr = ipv6_pattern.findall(line)
for match in addr:
values.setdefault(match.lower(), []).append(line)
if (not flags) or d:
dom = domain_pattern.findall(line)
for match in dom:
values.setdefault(match.lower(), []).append(line)
if (not flags) or h:
ha = hash_pattern.findall(line)
for match in ha:
values.setdefault(match.lower(), []).append(line)
values.pop("schemas.microsoft.com", None)
return values
def __read_evtx_file(file):
with evtx.Evtx(file) as f:
log = list(map(evtx.Record.xml, f.records()))
return log
def __read_text_file(file):
with open(file) as f:
log = f.read().splitlines()
return log
def __dict_to_string(d):
return " ".join(["{}:{}".format(key, val) for key, val in d.items()])
def __list_to_string(li):
return " ".join(li)
def __csv_row(d):
d["tags"] = __list_to_string([__dict_to_string(tag) for tag in d["tags"]])
d["sources"] = __list_to_string(d["sources"])
return d
def __unpack_logs(d):
result = []
for log in d["original_log"]:
new = d.copy()
new["original_log"] = log
result.append(new)
return result
def __flatten(li):
return [item for sublist in li for item in sublist]
if __name__ == "__main__":
main()
| true | true |
f71a912403bfab59958931030305960d9f1ae9a4 | 1,594 | py | Python | python/perspective/perspective/core/plugin.py | JKGu/perspective | 7b319b7896e58d5860b72bd8756997976f9a7722 | [
"Apache-2.0"
] | 1 | 2020-05-12T10:41:12.000Z | 2020-05-12T10:41:12.000Z | python/perspective/perspective/core/plugin.py | JKGu/perspective | 7b319b7896e58d5860b72bd8756997976f9a7722 | [
"Apache-2.0"
] | null | null | null | python/perspective/perspective/core/plugin.py | JKGu/perspective | 7b319b7896e58d5860b72bd8756997976f9a7722 | [
"Apache-2.0"
] | null | null | null | ################################################################################
#
# Copyright (c) 2019, the Perspective Authors.
#
# This file is part of the Perspective library, distributed under the terms of
# the Apache License 2.0. The full license can be found in the LICENSE file.
#
from enum import Enum
class Plugin(Enum):
'''The plugins (grids/charts) available in Perspective. Pass these into
the `plugin` arg in `PerspectiveWidget` or `PerspectiveViewer`.
Examples:
>>> widget = PerspectiveWidget(data, plugin=Plugin.TREEMAP)
'''
HYPERGRID = 'hypergrid' # hypergrid
GRID = 'hypergrid' # hypergrid
YBAR = 'y_bar' # highcharts
XBAR = 'x_bar' # highcharts
YLINE = 'y_line' # highcharts
YAREA = 'y_area' # highcharts
YSCATTER = 'y_scatter' # highcharts
XYLINE = 'xy_line' # highcharts
XYSCATTER = 'xy_scatter' # highcharts
TREEMAP = 'treemap' # highcharts
SUNBURST = 'sunburst' # highcharts
HEATMAP = 'heatmap' # highcharts
YBAR_D3 = 'd3_y_bar' # d3fc
XBAR_D3 = 'd3_x_bar' # d3fc
YLINE_D3 = 'd3_y_line' # d3fc
YAREA_D3 = 'd3_y_area' # d3fc
YSCATTER_D3 = 'd3_y_scatter' # d3fc
XYSCATTER_D3 = 'd3_xy_scatter' # d3fc
TREEMAP_D3 = 'd3_treemap' # d3fc
SUNBURST_D3 = 'd3_sunburst' # d3fc
HEATMAP_D3 = 'd3_heatmap' # d3fc
CANDLESTICK = 'd3_candlestick' # d3fc
CANDLESTICK_D3 = 'd3_candlestick' # d3fc
OHLC = 'd3_ohlc' # d3fc
OHLC_D3 = 'd3_ohlc' # d3fc
@staticmethod
def options():
return list(c.value for c in Plugin)
| 31.254902 | 80 | 0.617942 | true | true | |
f71a913f37c249d4a0288dfa1a5ae20fc0e63d6e | 275 | py | Python | BasicPythonPrograms/pythonExe16.py | Pushkar745/PythonProgramming | ea60e97b70d46fb63ef203913c8b3f9570232dd3 | [
"Apache-2.0"
] | null | null | null | BasicPythonPrograms/pythonExe16.py | Pushkar745/PythonProgramming | ea60e97b70d46fb63ef203913c8b3f9570232dd3 | [
"Apache-2.0"
] | null | null | null | BasicPythonPrograms/pythonExe16.py | Pushkar745/PythonProgramming | ea60e97b70d46fb63ef203913c8b3f9570232dd3 | [
"Apache-2.0"
] | null | null | null | #Explicit function
def digitSum(n):
dsum=0
for ele in str(n):
dsum+=int (ele)
return dsum
#Initializing list
List=[367,111,562,945,6726,873]
#Using the function on odd element of the list
newList=[digitSum(i) for i in List if i & 1]
print(newList) | 25 | 46 | 0.665455 |
def digitSum(n):
dsum=0
for ele in str(n):
dsum+=int (ele)
return dsum
List=[367,111,562,945,6726,873]
newList=[digitSum(i) for i in List if i & 1]
print(newList) | true | true |
f71a9211a58e7ed7bd817495b5f5893f861323b7 | 19,870 | py | Python | examples/resnet34_imagenet/resnet34.py | FujitsuResearch/automatic_pruning | b3bb525b736ca3e465cb6fb87f134748424a0fe5 | [
"BSD-3-Clause-Clear"
] | 2 | 2022-01-25T12:28:21.000Z | 2022-01-25T12:29:05.000Z | examples/resnet34_imagenet/resnet34.py | FujitsuResearch/automatic_pruning | b3bb525b736ca3e465cb6fb87f134748424a0fe5 | [
"BSD-3-Clause-Clear"
] | null | null | null | examples/resnet34_imagenet/resnet34.py | FujitsuResearch/automatic_pruning | b3bb525b736ca3e465cb6fb87f134748424a0fe5 | [
"BSD-3-Clause-Clear"
] | null | null | null | # resnet34.py COPYRIGHT Fujitsu Limited 2022
import torch.nn as nn
import torch.nn.functional as F
def zero_padding(x1, x2):
num_ch1 = x1.size()[1]
num_ch2 = x2.size()[1]
ch_diff = num_ch1 - num_ch2
# path1 < path2 : zero padding to path1 tensor
if num_ch1 < num_ch2:
ch_diff = -1 * ch_diff
if ch_diff%2 ==0:
x1 = F.pad(x1[:, :, :, :], (0, 0, 0, 0, ch_diff//2, ch_diff//2), "constant", 0)
else:
x1 = F.pad(x1[:, :, :, :], (0, 0, 0, 0, ch_diff//2, (ch_diff//2)+1), "constant", 0)
# path1 > path2 : zero padding to path2 tensor
elif num_ch1 > num_ch2:
if ch_diff%2 ==0:
x2 = F.pad(x2[:, :, :, :], (0, 0, 0, 0, ch_diff//2, ch_diff//2), "constant", 0)
else:
x2 = F.pad(x2[:, :, :, :], (0, 0, 0, 0, ch_diff//2, (ch_diff//2)+1), "constant", 0)
return x1, x2
def conv3x3(in_planes, out_planes, stride=1, groups=1, dilation=1):
"""3x3 convolution with padding"""
return nn.Conv2d(
in_planes,
out_planes,
kernel_size=3,
stride=stride,
padding=dilation,
groups=groups,
bias=False,
dilation=dilation,
)
def conv1x1(in_planes, out_planes, stride=1):
"""1x1 convolution"""
return nn.Conv2d(in_planes, out_planes, kernel_size=1, stride=stride, bias=False)
class BasicBlock(nn.Module):
expansion = 1
def __init__(
self,
inplanes,
planes,
stride=1,
downsample=None,
groups=1,
base_width=64,
dilation=1,
norm_layer=None,
n_in_channels=None,
n_channels1=None,
n_channels2=None,
):
super(BasicBlock, self).__init__()
if norm_layer is None:
norm_layer = nn.BatchNorm2d
if groups != 1 or base_width != 64:
raise ValueError("BasicBlock only supports groups=1 and base_width=64")
if dilation > 1:
raise NotImplementedError("Dilation > 1 not supported in BasicBlock")
# Both self.conv1 and self.downsample layers downsample the input when stride != 1
self.conv1 = conv3x3(n_in_channels, n_channels1, stride)
self.bn1 = norm_layer(n_channels1)
self.relu = nn.ReLU(inplace=True)
self.conv2 = conv3x3(n_channels1, n_channels2)
self.bn2 = norm_layer(n_channels2)
self.downsample = downsample #if dawnsample else downsample(n_in_channels, n_channels3)
self.stride = stride
def forward(self, x):
identity = x
out = self.conv1(x)
out = self.bn1(out)
out = self.relu(out)
out = self.conv2(out)
out = self.bn2(out)
if self.downsample is not None:
identity = self.downsample(x)
out, identity = zero_padding(out, identity) # zero padding
out += identity
out = self.relu(out)
return out
class ResNet34(nn.Module):
def __init__(
self,
block=BasicBlock,
layers=[3, 4, 6, 3],
num_classes=1000,
zero_init_residual=False,
groups=1,
width_per_group=64,
replace_stride_with_dilation=None,
norm_layer=None,
ch_conv1=64,
ch_l10_1=64,
ch_l10_2=64,
ch_l11_1=64,
ch_l11_2=64,
ch_l12_1=64,
ch_l12_2=64,
ch_l20_1=128,
ch_l20_2=128,
ch_l20_ds=128,
ch_l21_1=128,
ch_l21_2=128,
ch_l22_1=128,
ch_l22_2=128,
ch_l23_1=128,
ch_l23_2=128,
ch_l30_1=256,
ch_l30_2=256,
ch_l30_ds=256,
ch_l31_1=256,
ch_l31_2=256,
ch_l32_1=256,
ch_l32_2=256,
ch_l33_1=256,
ch_l33_2=256,
ch_l34_1=256,
ch_l34_2=256,
ch_l35_1=256,
ch_l35_2=256,
ch_l40_1=512,
ch_l40_2=512,
ch_l40_ds=512,
ch_l41_1=512,
ch_l41_2=512,
ch_l42_1=512,
ch_l42_2=512,
):
super(ResNet34, self).__init__()
if norm_layer is None:
norm_layer = nn.BatchNorm2d
self._norm_layer = norm_layer
self.inplanes = 64
self.dilation = 1
if replace_stride_with_dilation is None:
# each element in the tuple indicates if we should replace
# the 2x2 stride with a dilated convolution instead
replace_stride_with_dilation = [False, False, False]
if len(replace_stride_with_dilation) != 3:
raise ValueError(
"replace_stride_with_dilation should be None "
"or a 3-element tuple, got {}".format(replace_stride_with_dilation)
)
self.groups = groups
self.base_width = width_per_group
self.conv1 = nn.Conv2d(3, ch_conv1, kernel_size=7, stride=2, padding=3, bias=False)
self.bn1 = norm_layer(ch_conv1)
self.relu = nn.ReLU(inplace=True)
self.maxpool = nn.MaxPool2d(kernel_size=3, stride=2, padding=1)
in_ch_l11 = max(ch_conv1, ch_l10_2)
in_ch_l12 = max(in_ch_l11, ch_l11_2)
self.layer1 = self._make_layer_3(block=block, planes=64, blocks=layers[0],
n_in_channels0=ch_conv1,
n_channels00=ch_l10_1,
n_channels01=ch_l10_2,
n_channels_ds=None,
n_in_channels1=in_ch_l11,
n_channels10=ch_l11_1,
n_channels11=ch_l11_2,
n_in_channels2=in_ch_l12,
n_channels20=ch_l12_1,
n_channels21=ch_l12_2,
)
in_ch_l20 = max(in_ch_l12, ch_l12_2)
in_ch_l21 = max(ch_l20_ds, ch_l20_2)
in_ch_l22 = max(in_ch_l21, ch_l21_2)
in_ch_l23 = max(in_ch_l22, ch_l22_2)
self.layer2 = self._make_layer_4(block, 128, layers[1], stride=2,
dilate=replace_stride_with_dilation[0],
n_in_channels0=in_ch_l20,
n_channels00=ch_l20_1,
n_channels01=ch_l20_2,
n_channels_ds=ch_l20_ds,
n_in_channels1=in_ch_l21,
n_channels10=ch_l21_1,
n_channels11=ch_l21_2,
n_in_channels2=in_ch_l22,
n_channels20=ch_l22_1,
n_channels21=ch_l22_2,
n_in_channels3=in_ch_l23,
n_channels30=ch_l23_1,
n_channels31=ch_l23_2,
)
in_ch_l30 = max(in_ch_l23, ch_l23_2)
in_ch_l31 = max(ch_l30_ds, ch_l30_2)
in_ch_l32 = max(in_ch_l31, ch_l31_2)
in_ch_l33 = max(in_ch_l32, ch_l32_2)
in_ch_l34 = max(in_ch_l33, ch_l33_2)
in_ch_l35 = max(in_ch_l34, ch_l34_2)
self.layer3 = self._make_layer_6(block, 256, layers[2], stride=2,
dilate=replace_stride_with_dilation[1],
n_in_channels0=in_ch_l30,
n_channels00=ch_l30_1,
n_channels01=ch_l30_2,
n_channels_ds=ch_l30_ds,
n_in_channels1=in_ch_l31,
n_channels10=ch_l31_1,
n_channels11=ch_l31_2,
n_in_channels2=in_ch_l32,
n_channels20=ch_l32_1,
n_channels21=ch_l32_2,
n_in_channels3=in_ch_l33,
n_channels30=ch_l33_1,
n_channels31=ch_l33_2,
n_in_channels4=in_ch_l34,
n_channels40=ch_l34_1,
n_channels41=ch_l34_2,
n_in_channels5=in_ch_l35,
n_channels50=ch_l35_1,
n_channels51=ch_l35_2,
)
in_ch_l40 = max(in_ch_l35, ch_l35_2)
in_ch_l41 = max(ch_l40_ds, ch_l40_2)
in_ch_l42 = max(in_ch_l41, ch_l41_2)
self.layer4 = self._make_layer_3(block, 512, layers[3], stride=2,
dilate=replace_stride_with_dilation[2],
n_in_channels0=in_ch_l40,
n_channels00=ch_l40_1,
n_channels01=ch_l40_2,
n_channels_ds=ch_l40_ds,
n_in_channels1=in_ch_l41,
n_channels10=ch_l41_1,
n_channels11=ch_l41_2,
n_in_channels2=in_ch_l42,
n_channels20=ch_l42_1,
n_channels21=ch_l42_2,
)
in_ch_fc = max(in_ch_l42, ch_l42_2)
self.avgpool = nn.AdaptiveAvgPool2d((1, 1))
self.fc = nn.Linear(in_ch_fc, num_classes)
for m in self.modules():
if isinstance(m, nn.Conv2d):
nn.init.kaiming_normal_(m.weight, mode="fan_out", nonlinearity="relu")
elif isinstance(m, (nn.BatchNorm2d, nn.GroupNorm)):
nn.init.constant_(m.weight, 1)
nn.init.constant_(m.bias, 0)
# Zero-initialize the last BN in each residual branch,
# so that the residual branch starts with zeros, and each residual block behaves like an identity.
# This improves the model by 0.2~0.3% according to https://arxiv.org/abs/1706.02677
if zero_init_residual:
for m in self.modules():
if isinstance(m, Bottleneck):
nn.init.constant_(m.bn3.weight, 0)
elif isinstance(m, BasicBlock):
nn.init.constant_(m.bn2.weight, 0)
def _make_layer_3(self, block, planes, blocks, stride=1, dilate=False,
n_in_channels0=None,
n_channels00=None, n_channels01=None,
n_channels_ds=None,
n_in_channels1=None,
n_channels10=None, n_channels11=None,
n_in_channels2=None,
n_channels20=None, n_channels21=None,
):
norm_layer = self._norm_layer
downsample = None
previous_dilation = self.dilation
if dilate:
self.dilation *= stride
stride = 1
if stride != 1 or self.inplanes != planes * block.expansion:
downsample = nn.Sequential( conv1x1(n_in_channels0, n_channels_ds, stride), norm_layer(n_channels_ds) )
self.inplanes = planes * block.expansion
layers = []
# layer_0
layers.append(
block(
self.inplanes,
planes,
stride,
downsample,
self.groups,
self.base_width,
previous_dilation,
norm_layer,
n_in_channels=n_in_channels0,
n_channels1=n_channels00,
n_channels2=n_channels01,
)
)
# layer_1
layers.append(
block(
self.inplanes,
planes,
groups=self.groups,
base_width=self.base_width,
dilation=self.dilation,
norm_layer=norm_layer,
n_in_channels=n_in_channels1,
n_channels1=n_channels10,
n_channels2=n_channels11,
)
)
# layer_2
layers.append(
block(
self.inplanes,
planes,
groups=self.groups,
base_width=self.base_width,
dilation=self.dilation,
norm_layer=norm_layer,
n_in_channels=n_in_channels2,
n_channels1=n_channels20,
n_channels2=n_channels21,
)
)
return nn.Sequential(*layers)
def _make_layer_4(self, block, planes, blocks, stride=1, dilate=False,
n_in_channels0=None,
n_channels00=None, n_channels01=None,
n_channels_ds=None,
n_in_channels1=None,
n_channels10=None, n_channels11=None,
n_in_channels2=None,
n_channels20=None, n_channels21=None,
n_in_channels3=None,
n_channels30=None, n_channels31=None,
):
norm_layer = self._norm_layer
downsample = None
previous_dilation = self.dilation
if dilate:
self.dilation *= stride
stride = 1
if stride != 1 or self.inplanes != planes * block.expansion:
downsample = nn.Sequential( conv1x1(n_in_channels0, n_channels_ds, stride), norm_layer(n_channels_ds) )
self.inplanes = planes * block.expansion
layers = []
# layer_0
layers.append(
block(
self.inplanes,
planes,
stride,
downsample,
self.groups,
self.base_width,
previous_dilation,
norm_layer,
n_in_channels=n_in_channels0,
n_channels1=n_channels00,
n_channels2=n_channels01,
)
)
# layer_1
layers.append(
block(
self.inplanes,
planes,
groups=self.groups,
base_width=self.base_width,
dilation=self.dilation,
norm_layer=norm_layer,
n_in_channels=n_in_channels1,
n_channels1=n_channels10,
n_channels2=n_channels11,
)
)
# layer_2
layers.append(
block(
self.inplanes,
planes,
groups=self.groups,
base_width=self.base_width,
dilation=self.dilation,
norm_layer=norm_layer,
n_in_channels=n_in_channels2,
n_channels1=n_channels20,
n_channels2=n_channels21,
)
)
# layer_3
layers.append(
block(
self.inplanes,
planes,
groups=self.groups,
base_width=self.base_width,
dilation=self.dilation,
norm_layer=norm_layer,
n_in_channels=n_in_channels3,
n_channels1=n_channels30,
n_channels2=n_channels31,
)
)
return nn.Sequential(*layers)
def _make_layer_6(self, block, planes, blocks, stride=1, dilate=False,
n_in_channels0=None,
n_channels00=None, n_channels01=None,
n_channels_ds=None,
n_in_channels1=None,
n_channels10=None, n_channels11=None,
n_in_channels2=None,
n_channels20=None, n_channels21=None,
n_in_channels3=None,
n_channels30=None, n_channels31=None,
n_in_channels4=None,
n_channels40=None, n_channels41=None,
n_in_channels5=None,
n_channels50=None, n_channels51=None,
):
norm_layer = self._norm_layer
downsample = None
previous_dilation = self.dilation
if dilate:
self.dilation *= stride
stride = 1
if stride != 1 or self.inplanes != planes * block.expansion:
downsample = nn.Sequential( conv1x1(n_in_channels0, n_channels_ds, stride), norm_layer(n_channels_ds) )
self.inplanes = planes * block.expansion
layers = []
# layer_0
layers.append(
block(
self.inplanes,
planes,
stride,
downsample,
self.groups,
self.base_width,
previous_dilation,
norm_layer,
n_in_channels=n_in_channels0,
n_channels1=n_channels00,
n_channels2=n_channels01,
)
)
# layer_1
layers.append(
block(
self.inplanes,
planes,
groups=self.groups,
base_width=self.base_width,
dilation=self.dilation,
norm_layer=norm_layer,
n_in_channels=n_in_channels1,
n_channels1=n_channels10,
n_channels2=n_channels11,
)
)
# layer_2
layers.append(
block(
self.inplanes,
planes,
groups=self.groups,
base_width=self.base_width,
dilation=self.dilation,
norm_layer=norm_layer,
n_in_channels=n_in_channels2,
n_channels1=n_channels20,
n_channels2=n_channels21,
)
)
# layer_3
layers.append(
block(
self.inplanes,
planes,
groups=self.groups,
base_width=self.base_width,
dilation=self.dilation,
norm_layer=norm_layer,
n_in_channels=n_in_channels3,
n_channels1=n_channels30,
n_channels2=n_channels31,
)
)
# layer_4
layers.append(
block(
self.inplanes,
planes,
groups=self.groups,
base_width=self.base_width,
dilation=self.dilation,
norm_layer=norm_layer,
n_in_channels=n_in_channels4,
n_channels1=n_channels40,
n_channels2=n_channels41,
)
)
# layer_5
layers.append(
block(
self.inplanes,
planes,
groups=self.groups,
base_width=self.base_width,
dilation=self.dilation,
norm_layer=norm_layer,
n_in_channels=n_in_channels5,
n_channels1=n_channels50,
n_channels2=n_channels51,
)
)
return nn.Sequential(*layers)
def forward(self, x):
x = self.conv1(x)
x = self.bn1(x)
x = self.relu(x)
x = self.maxpool(x)
x = self.layer1(x)
x = self.layer2(x)
x = self.layer3(x)
x = self.layer4(x)
x = self.avgpool(x)
x = x.reshape(x.size(0), -1)
x = self.fc(x)
return x
| 35.230496 | 115 | 0.48767 |
import torch.nn as nn
import torch.nn.functional as F
def zero_padding(x1, x2):
num_ch1 = x1.size()[1]
num_ch2 = x2.size()[1]
ch_diff = num_ch1 - num_ch2
if num_ch1 < num_ch2:
ch_diff = -1 * ch_diff
if ch_diff%2 ==0:
x1 = F.pad(x1[:, :, :, :], (0, 0, 0, 0, ch_diff//2, ch_diff//2), "constant", 0)
else:
x1 = F.pad(x1[:, :, :, :], (0, 0, 0, 0, ch_diff//2, (ch_diff//2)+1), "constant", 0)
elif num_ch1 > num_ch2:
if ch_diff%2 ==0:
x2 = F.pad(x2[:, :, :, :], (0, 0, 0, 0, ch_diff//2, ch_diff//2), "constant", 0)
else:
x2 = F.pad(x2[:, :, :, :], (0, 0, 0, 0, ch_diff//2, (ch_diff//2)+1), "constant", 0)
return x1, x2
def conv3x3(in_planes, out_planes, stride=1, groups=1, dilation=1):
return nn.Conv2d(
in_planes,
out_planes,
kernel_size=3,
stride=stride,
padding=dilation,
groups=groups,
bias=False,
dilation=dilation,
)
def conv1x1(in_planes, out_planes, stride=1):
return nn.Conv2d(in_planes, out_planes, kernel_size=1, stride=stride, bias=False)
class BasicBlock(nn.Module):
expansion = 1
def __init__(
self,
inplanes,
planes,
stride=1,
downsample=None,
groups=1,
base_width=64,
dilation=1,
norm_layer=None,
n_in_channels=None,
n_channels1=None,
n_channels2=None,
):
super(BasicBlock, self).__init__()
if norm_layer is None:
norm_layer = nn.BatchNorm2d
if groups != 1 or base_width != 64:
raise ValueError("BasicBlock only supports groups=1 and base_width=64")
if dilation > 1:
raise NotImplementedError("Dilation > 1 not supported in BasicBlock")
self.conv1 = conv3x3(n_in_channels, n_channels1, stride)
self.bn1 = norm_layer(n_channels1)
self.relu = nn.ReLU(inplace=True)
self.conv2 = conv3x3(n_channels1, n_channels2)
self.bn2 = norm_layer(n_channels2)
self.downsample = downsample
self.stride = stride
def forward(self, x):
identity = x
out = self.conv1(x)
out = self.bn1(out)
out = self.relu(out)
out = self.conv2(out)
out = self.bn2(out)
if self.downsample is not None:
identity = self.downsample(x)
out, identity = zero_padding(out, identity)
out += identity
out = self.relu(out)
return out
class ResNet34(nn.Module):
def __init__(
self,
block=BasicBlock,
layers=[3, 4, 6, 3],
num_classes=1000,
zero_init_residual=False,
groups=1,
width_per_group=64,
replace_stride_with_dilation=None,
norm_layer=None,
ch_conv1=64,
ch_l10_1=64,
ch_l10_2=64,
ch_l11_1=64,
ch_l11_2=64,
ch_l12_1=64,
ch_l12_2=64,
ch_l20_1=128,
ch_l20_2=128,
ch_l20_ds=128,
ch_l21_1=128,
ch_l21_2=128,
ch_l22_1=128,
ch_l22_2=128,
ch_l23_1=128,
ch_l23_2=128,
ch_l30_1=256,
ch_l30_2=256,
ch_l30_ds=256,
ch_l31_1=256,
ch_l31_2=256,
ch_l32_1=256,
ch_l32_2=256,
ch_l33_1=256,
ch_l33_2=256,
ch_l34_1=256,
ch_l34_2=256,
ch_l35_1=256,
ch_l35_2=256,
ch_l40_1=512,
ch_l40_2=512,
ch_l40_ds=512,
ch_l41_1=512,
ch_l41_2=512,
ch_l42_1=512,
ch_l42_2=512,
):
super(ResNet34, self).__init__()
if norm_layer is None:
norm_layer = nn.BatchNorm2d
self._norm_layer = norm_layer
self.inplanes = 64
self.dilation = 1
if replace_stride_with_dilation is None:
replace_stride_with_dilation = [False, False, False]
if len(replace_stride_with_dilation) != 3:
raise ValueError(
"replace_stride_with_dilation should be None "
"or a 3-element tuple, got {}".format(replace_stride_with_dilation)
)
self.groups = groups
self.base_width = width_per_group
self.conv1 = nn.Conv2d(3, ch_conv1, kernel_size=7, stride=2, padding=3, bias=False)
self.bn1 = norm_layer(ch_conv1)
self.relu = nn.ReLU(inplace=True)
self.maxpool = nn.MaxPool2d(kernel_size=3, stride=2, padding=1)
in_ch_l11 = max(ch_conv1, ch_l10_2)
in_ch_l12 = max(in_ch_l11, ch_l11_2)
self.layer1 = self._make_layer_3(block=block, planes=64, blocks=layers[0],
n_in_channels0=ch_conv1,
n_channels00=ch_l10_1,
n_channels01=ch_l10_2,
n_channels_ds=None,
n_in_channels1=in_ch_l11,
n_channels10=ch_l11_1,
n_channels11=ch_l11_2,
n_in_channels2=in_ch_l12,
n_channels20=ch_l12_1,
n_channels21=ch_l12_2,
)
in_ch_l20 = max(in_ch_l12, ch_l12_2)
in_ch_l21 = max(ch_l20_ds, ch_l20_2)
in_ch_l22 = max(in_ch_l21, ch_l21_2)
in_ch_l23 = max(in_ch_l22, ch_l22_2)
self.layer2 = self._make_layer_4(block, 128, layers[1], stride=2,
dilate=replace_stride_with_dilation[0],
n_in_channels0=in_ch_l20,
n_channels00=ch_l20_1,
n_channels01=ch_l20_2,
n_channels_ds=ch_l20_ds,
n_in_channels1=in_ch_l21,
n_channels10=ch_l21_1,
n_channels11=ch_l21_2,
n_in_channels2=in_ch_l22,
n_channels20=ch_l22_1,
n_channels21=ch_l22_2,
n_in_channels3=in_ch_l23,
n_channels30=ch_l23_1,
n_channels31=ch_l23_2,
)
in_ch_l30 = max(in_ch_l23, ch_l23_2)
in_ch_l31 = max(ch_l30_ds, ch_l30_2)
in_ch_l32 = max(in_ch_l31, ch_l31_2)
in_ch_l33 = max(in_ch_l32, ch_l32_2)
in_ch_l34 = max(in_ch_l33, ch_l33_2)
in_ch_l35 = max(in_ch_l34, ch_l34_2)
self.layer3 = self._make_layer_6(block, 256, layers[2], stride=2,
dilate=replace_stride_with_dilation[1],
n_in_channels0=in_ch_l30,
n_channels00=ch_l30_1,
n_channels01=ch_l30_2,
n_channels_ds=ch_l30_ds,
n_in_channels1=in_ch_l31,
n_channels10=ch_l31_1,
n_channels11=ch_l31_2,
n_in_channels2=in_ch_l32,
n_channels20=ch_l32_1,
n_channels21=ch_l32_2,
n_in_channels3=in_ch_l33,
n_channels30=ch_l33_1,
n_channels31=ch_l33_2,
n_in_channels4=in_ch_l34,
n_channels40=ch_l34_1,
n_channels41=ch_l34_2,
n_in_channels5=in_ch_l35,
n_channels50=ch_l35_1,
n_channels51=ch_l35_2,
)
in_ch_l40 = max(in_ch_l35, ch_l35_2)
in_ch_l41 = max(ch_l40_ds, ch_l40_2)
in_ch_l42 = max(in_ch_l41, ch_l41_2)
self.layer4 = self._make_layer_3(block, 512, layers[3], stride=2,
dilate=replace_stride_with_dilation[2],
n_in_channels0=in_ch_l40,
n_channels00=ch_l40_1,
n_channels01=ch_l40_2,
n_channels_ds=ch_l40_ds,
n_in_channels1=in_ch_l41,
n_channels10=ch_l41_1,
n_channels11=ch_l41_2,
n_in_channels2=in_ch_l42,
n_channels20=ch_l42_1,
n_channels21=ch_l42_2,
)
in_ch_fc = max(in_ch_l42, ch_l42_2)
self.avgpool = nn.AdaptiveAvgPool2d((1, 1))
self.fc = nn.Linear(in_ch_fc, num_classes)
for m in self.modules():
if isinstance(m, nn.Conv2d):
nn.init.kaiming_normal_(m.weight, mode="fan_out", nonlinearity="relu")
elif isinstance(m, (nn.BatchNorm2d, nn.GroupNorm)):
nn.init.constant_(m.weight, 1)
nn.init.constant_(m.bias, 0)
if zero_init_residual:
for m in self.modules():
if isinstance(m, Bottleneck):
nn.init.constant_(m.bn3.weight, 0)
elif isinstance(m, BasicBlock):
nn.init.constant_(m.bn2.weight, 0)
def _make_layer_3(self, block, planes, blocks, stride=1, dilate=False,
n_in_channels0=None,
n_channels00=None, n_channels01=None,
n_channels_ds=None,
n_in_channels1=None,
n_channels10=None, n_channels11=None,
n_in_channels2=None,
n_channels20=None, n_channels21=None,
):
norm_layer = self._norm_layer
downsample = None
previous_dilation = self.dilation
if dilate:
self.dilation *= stride
stride = 1
if stride != 1 or self.inplanes != planes * block.expansion:
downsample = nn.Sequential( conv1x1(n_in_channels0, n_channels_ds, stride), norm_layer(n_channels_ds) )
self.inplanes = planes * block.expansion
layers = []
layers.append(
block(
self.inplanes,
planes,
stride,
downsample,
self.groups,
self.base_width,
previous_dilation,
norm_layer,
n_in_channels=n_in_channels0,
n_channels1=n_channels00,
n_channels2=n_channels01,
)
)
layers.append(
block(
self.inplanes,
planes,
groups=self.groups,
base_width=self.base_width,
dilation=self.dilation,
norm_layer=norm_layer,
n_in_channels=n_in_channels1,
n_channels1=n_channels10,
n_channels2=n_channels11,
)
)
layers.append(
block(
self.inplanes,
planes,
groups=self.groups,
base_width=self.base_width,
dilation=self.dilation,
norm_layer=norm_layer,
n_in_channels=n_in_channels2,
n_channels1=n_channels20,
n_channels2=n_channels21,
)
)
return nn.Sequential(*layers)
def _make_layer_4(self, block, planes, blocks, stride=1, dilate=False,
n_in_channels0=None,
n_channels00=None, n_channels01=None,
n_channels_ds=None,
n_in_channels1=None,
n_channels10=None, n_channels11=None,
n_in_channels2=None,
n_channels20=None, n_channels21=None,
n_in_channels3=None,
n_channels30=None, n_channels31=None,
):
norm_layer = self._norm_layer
downsample = None
previous_dilation = self.dilation
if dilate:
self.dilation *= stride
stride = 1
if stride != 1 or self.inplanes != planes * block.expansion:
downsample = nn.Sequential( conv1x1(n_in_channels0, n_channels_ds, stride), norm_layer(n_channels_ds) )
self.inplanes = planes * block.expansion
layers = []
layers.append(
block(
self.inplanes,
planes,
stride,
downsample,
self.groups,
self.base_width,
previous_dilation,
norm_layer,
n_in_channels=n_in_channels0,
n_channels1=n_channels00,
n_channels2=n_channels01,
)
)
layers.append(
block(
self.inplanes,
planes,
groups=self.groups,
base_width=self.base_width,
dilation=self.dilation,
norm_layer=norm_layer,
n_in_channels=n_in_channels1,
n_channels1=n_channels10,
n_channels2=n_channels11,
)
)
layers.append(
block(
self.inplanes,
planes,
groups=self.groups,
base_width=self.base_width,
dilation=self.dilation,
norm_layer=norm_layer,
n_in_channels=n_in_channels2,
n_channels1=n_channels20,
n_channels2=n_channels21,
)
)
layers.append(
block(
self.inplanes,
planes,
groups=self.groups,
base_width=self.base_width,
dilation=self.dilation,
norm_layer=norm_layer,
n_in_channels=n_in_channels3,
n_channels1=n_channels30,
n_channels2=n_channels31,
)
)
return nn.Sequential(*layers)
def _make_layer_6(self, block, planes, blocks, stride=1, dilate=False,
n_in_channels0=None,
n_channels00=None, n_channels01=None,
n_channels_ds=None,
n_in_channels1=None,
n_channels10=None, n_channels11=None,
n_in_channels2=None,
n_channels20=None, n_channels21=None,
n_in_channels3=None,
n_channels30=None, n_channels31=None,
n_in_channels4=None,
n_channels40=None, n_channels41=None,
n_in_channels5=None,
n_channels50=None, n_channels51=None,
):
norm_layer = self._norm_layer
downsample = None
previous_dilation = self.dilation
if dilate:
self.dilation *= stride
stride = 1
if stride != 1 or self.inplanes != planes * block.expansion:
downsample = nn.Sequential( conv1x1(n_in_channels0, n_channels_ds, stride), norm_layer(n_channels_ds) )
self.inplanes = planes * block.expansion
layers = []
layers.append(
block(
self.inplanes,
planes,
stride,
downsample,
self.groups,
self.base_width,
previous_dilation,
norm_layer,
n_in_channels=n_in_channels0,
n_channels1=n_channels00,
n_channels2=n_channels01,
)
)
layers.append(
block(
self.inplanes,
planes,
groups=self.groups,
base_width=self.base_width,
dilation=self.dilation,
norm_layer=norm_layer,
n_in_channels=n_in_channels1,
n_channels1=n_channels10,
n_channels2=n_channels11,
)
)
layers.append(
block(
self.inplanes,
planes,
groups=self.groups,
base_width=self.base_width,
dilation=self.dilation,
norm_layer=norm_layer,
n_in_channels=n_in_channels2,
n_channels1=n_channels20,
n_channels2=n_channels21,
)
)
layers.append(
block(
self.inplanes,
planes,
groups=self.groups,
base_width=self.base_width,
dilation=self.dilation,
norm_layer=norm_layer,
n_in_channels=n_in_channels3,
n_channels1=n_channels30,
n_channels2=n_channels31,
)
)
layers.append(
block(
self.inplanes,
planes,
groups=self.groups,
base_width=self.base_width,
dilation=self.dilation,
norm_layer=norm_layer,
n_in_channels=n_in_channels4,
n_channels1=n_channels40,
n_channels2=n_channels41,
)
)
layers.append(
block(
self.inplanes,
planes,
groups=self.groups,
base_width=self.base_width,
dilation=self.dilation,
norm_layer=norm_layer,
n_in_channels=n_in_channels5,
n_channels1=n_channels50,
n_channels2=n_channels51,
)
)
return nn.Sequential(*layers)
def forward(self, x):
x = self.conv1(x)
x = self.bn1(x)
x = self.relu(x)
x = self.maxpool(x)
x = self.layer1(x)
x = self.layer2(x)
x = self.layer3(x)
x = self.layer4(x)
x = self.avgpool(x)
x = x.reshape(x.size(0), -1)
x = self.fc(x)
return x
| true | true |
f71a921e657b6f695c22749f6d1c6b756adc0c9a | 5,837 | py | Python | allegation/tests/services/test_download_allegations.py | invinst/CPDB | c2d8ae8888b13d956cc1068742f18d45736d4121 | [
"Apache-2.0"
] | 16 | 2016-05-20T09:03:32.000Z | 2020-09-13T14:23:06.000Z | allegation/tests/services/test_download_allegations.py | invinst/CPDB | c2d8ae8888b13d956cc1068742f18d45736d4121 | [
"Apache-2.0"
] | 2 | 2016-05-24T01:44:14.000Z | 2016-06-17T22:19:45.000Z | allegation/tests/services/test_download_allegations.py | invinst/CPDB | c2d8ae8888b13d956cc1068742f18d45736d4121 | [
"Apache-2.0"
] | 2 | 2016-10-10T16:14:19.000Z | 2020-10-26T00:17:02.000Z | from mock import patch, MagicMock, call
from allegation.factories import (
DownloadFactory, OfficerAllegationFactory, AllegationFactory, ComplainingWitnessFactory, OfficerFactory)
from allegation.services.download_allegations import AllegationsDownload
from api.models import Setting
from common.tests.core import SimpleTestCase
from share.factories import SettingFactory
class AllegationsDownloadTestCase(SimpleTestCase):
@patch('allegation.services.download_allegations.xlsxwriter.Workbook')
def test_write_disclaimer(self, mock_workbook):
setting = Setting.objects.first() or SettingFactory()
download = DownloadFactory()
line_1 = 'line_1'
line_2 = 'line_2'
setting.export_excel_disclaimer = '{line_1}\n{line_2}'.format(line_1=line_1, line_2=line_2)
setting.save()
mock_worksheet = MagicMock()
mock_workbook().add_worksheet.return_value = mock_worksheet
with patch('allegation.services.download_allegations.os'):
allegation_download = AllegationsDownload(download.id)
allegation_download.init_workbook()
allegation_download.write_disclaimer()
expected_calls = [
call.write('A1', line_1),
call.write('A2', line_2)
]
mock_worksheet.assert_has_calls(expected_calls)
@patch('allegation.services.download_allegations.xlsxwriter.Workbook')
def test_investigator_name_rank_in_allegation_sheet(self, mock_workbook):
officer_allegation_1 = OfficerAllegationFactory()
investigator = officer_allegation_1.allegation.investigator
allegation_download = AllegationsDownload(DownloadFactory().id)
allegation_download.officer_allegations = [officer_allegation_1]
allegation_download.update_crids()
allegation_download.write_headers = MagicMock()
mock_worksheet = MagicMock()
with patch('allegation.services.download_allegations.os'):
allegation_download.init_workbook()
allegation_download.write_allegations_columns(mock_worksheet)
(sheet, columns), _ = allegation_download.write_headers.call_args
sheet.should.equal(mock_worksheet)
(set(columns) > set(['InvestigatorName', 'InvestigatorRank'])).should.be.true
allegation_download.write_allegations_data(mock_worksheet)
mock_worksheet.write.assert_any_call(1, 21, officer_allegation_1.allegation.investigator.name)
mock_worksheet.write.assert_any_call(1, 22, investigator.current_rank)
@patch('allegation.services.download_allegations.xlsxwriter.Workbook')
def test_complaining_witness_sheet(self, mock_workbook):
allegation = AllegationFactory()
witness = ComplainingWitnessFactory(allegation=allegation, crid=allegation.crid)
officer_allegation = OfficerAllegationFactory(allegation=allegation)
allegation_download = AllegationsDownload(DownloadFactory().id)
allegation_download.officer_allegations = [officer_allegation]
allegation_download.update_crids()
allegation_download.write_headers = MagicMock()
with patch('allegation.services.download_allegations.os'):
allegation_download.init_workbook()
mock_worksheet = MagicMock()
allegation_download.workbook.add_worksheet = MagicMock(return_value=mock_worksheet)
allegation_download.write_complaint_witnesses()
(sheet, columns), _ = allegation_download.write_headers.call_args
sheet.should.equal(mock_worksheet)
columns.should.equal(['CRID', 'Gender', 'Race', 'Age'])
mock_worksheet.write.assert_any_call(1, 0, str(allegation.crid))
mock_worksheet.write.assert_any_call(1, 1, witness.gender)
mock_worksheet.write.assert_any_call(1, 2, witness.race)
mock_worksheet.write.assert_any_call(1, 3, witness.age)
@patch('allegation.services.download_allegations.xlsxwriter.Workbook')
def test_officer_sheet(self, mock_workbook):
allegation = AllegationFactory()
officer = OfficerFactory()
officer_allegation = OfficerAllegationFactory(allegation=allegation, officer=officer)
allegation_download = AllegationsDownload(DownloadFactory().id)
allegation_download.officer_allegations = [officer_allegation]
allegation_download.update_crids()
allegation_download.write_headers = MagicMock()
with patch('allegation.services.download_allegations.os'):
allegation_download.init_workbook()
mock_worksheet = MagicMock()
allegation_download.workbook.add_worksheet = MagicMock(return_value=mock_worksheet)
allegation_download.write_officer_profile()
(sheet, columns), _ = allegation_download.write_headers.call_args
sheet.should.equal(mock_worksheet)
columns.should.equal([
'OfficerID', 'OfficerFirst', 'OfficerLast', 'Gender', 'Race',
'ApptDate', 'Unit', 'Rank', 'Star', 'Age'])
mock_worksheet.write.assert_any_call(1, 0, officer.id)
mock_worksheet.write.assert_any_call(1, 1, officer.officer_first)
mock_worksheet.write.assert_any_call(1, 2, officer.officer_last)
mock_worksheet.write.assert_any_call(1, 3, officer.gender)
mock_worksheet.write.assert_any_call(1, 4, officer.race)
mock_worksheet.write.assert_any_call(1, 5, officer.appt_date)
mock_worksheet.write.assert_any_call(1, 6, officer.unit.unit_name)
mock_worksheet.write.assert_any_call(1, 7, officer.rank)
mock_worksheet.write.assert_any_call(1, 8, officer.star)
mock_worksheet.write.assert_any_call(1, 9, officer.age)
| 50.318966 | 108 | 0.714922 | from mock import patch, MagicMock, call
from allegation.factories import (
DownloadFactory, OfficerAllegationFactory, AllegationFactory, ComplainingWitnessFactory, OfficerFactory)
from allegation.services.download_allegations import AllegationsDownload
from api.models import Setting
from common.tests.core import SimpleTestCase
from share.factories import SettingFactory
class AllegationsDownloadTestCase(SimpleTestCase):
@patch('allegation.services.download_allegations.xlsxwriter.Workbook')
def test_write_disclaimer(self, mock_workbook):
setting = Setting.objects.first() or SettingFactory()
download = DownloadFactory()
line_1 = 'line_1'
line_2 = 'line_2'
setting.export_excel_disclaimer = '{line_1}\n{line_2}'.format(line_1=line_1, line_2=line_2)
setting.save()
mock_worksheet = MagicMock()
mock_workbook().add_worksheet.return_value = mock_worksheet
with patch('allegation.services.download_allegations.os'):
allegation_download = AllegationsDownload(download.id)
allegation_download.init_workbook()
allegation_download.write_disclaimer()
expected_calls = [
call.write('A1', line_1),
call.write('A2', line_2)
]
mock_worksheet.assert_has_calls(expected_calls)
@patch('allegation.services.download_allegations.xlsxwriter.Workbook')
def test_investigator_name_rank_in_allegation_sheet(self, mock_workbook):
officer_allegation_1 = OfficerAllegationFactory()
investigator = officer_allegation_1.allegation.investigator
allegation_download = AllegationsDownload(DownloadFactory().id)
allegation_download.officer_allegations = [officer_allegation_1]
allegation_download.update_crids()
allegation_download.write_headers = MagicMock()
mock_worksheet = MagicMock()
with patch('allegation.services.download_allegations.os'):
allegation_download.init_workbook()
allegation_download.write_allegations_columns(mock_worksheet)
(sheet, columns), _ = allegation_download.write_headers.call_args
sheet.should.equal(mock_worksheet)
(set(columns) > set(['InvestigatorName', 'InvestigatorRank'])).should.be.true
allegation_download.write_allegations_data(mock_worksheet)
mock_worksheet.write.assert_any_call(1, 21, officer_allegation_1.allegation.investigator.name)
mock_worksheet.write.assert_any_call(1, 22, investigator.current_rank)
@patch('allegation.services.download_allegations.xlsxwriter.Workbook')
def test_complaining_witness_sheet(self, mock_workbook):
allegation = AllegationFactory()
witness = ComplainingWitnessFactory(allegation=allegation, crid=allegation.crid)
officer_allegation = OfficerAllegationFactory(allegation=allegation)
allegation_download = AllegationsDownload(DownloadFactory().id)
allegation_download.officer_allegations = [officer_allegation]
allegation_download.update_crids()
allegation_download.write_headers = MagicMock()
with patch('allegation.services.download_allegations.os'):
allegation_download.init_workbook()
mock_worksheet = MagicMock()
allegation_download.workbook.add_worksheet = MagicMock(return_value=mock_worksheet)
allegation_download.write_complaint_witnesses()
(sheet, columns), _ = allegation_download.write_headers.call_args
sheet.should.equal(mock_worksheet)
columns.should.equal(['CRID', 'Gender', 'Race', 'Age'])
mock_worksheet.write.assert_any_call(1, 0, str(allegation.crid))
mock_worksheet.write.assert_any_call(1, 1, witness.gender)
mock_worksheet.write.assert_any_call(1, 2, witness.race)
mock_worksheet.write.assert_any_call(1, 3, witness.age)
@patch('allegation.services.download_allegations.xlsxwriter.Workbook')
def test_officer_sheet(self, mock_workbook):
allegation = AllegationFactory()
officer = OfficerFactory()
officer_allegation = OfficerAllegationFactory(allegation=allegation, officer=officer)
allegation_download = AllegationsDownload(DownloadFactory().id)
allegation_download.officer_allegations = [officer_allegation]
allegation_download.update_crids()
allegation_download.write_headers = MagicMock()
with patch('allegation.services.download_allegations.os'):
allegation_download.init_workbook()
mock_worksheet = MagicMock()
allegation_download.workbook.add_worksheet = MagicMock(return_value=mock_worksheet)
allegation_download.write_officer_profile()
(sheet, columns), _ = allegation_download.write_headers.call_args
sheet.should.equal(mock_worksheet)
columns.should.equal([
'OfficerID', 'OfficerFirst', 'OfficerLast', 'Gender', 'Race',
'ApptDate', 'Unit', 'Rank', 'Star', 'Age'])
mock_worksheet.write.assert_any_call(1, 0, officer.id)
mock_worksheet.write.assert_any_call(1, 1, officer.officer_first)
mock_worksheet.write.assert_any_call(1, 2, officer.officer_last)
mock_worksheet.write.assert_any_call(1, 3, officer.gender)
mock_worksheet.write.assert_any_call(1, 4, officer.race)
mock_worksheet.write.assert_any_call(1, 5, officer.appt_date)
mock_worksheet.write.assert_any_call(1, 6, officer.unit.unit_name)
mock_worksheet.write.assert_any_call(1, 7, officer.rank)
mock_worksheet.write.assert_any_call(1, 8, officer.star)
mock_worksheet.write.assert_any_call(1, 9, officer.age)
| true | true |
f71a9246d59e712669453737c400d746d8277d54 | 1,433 | py | Python | stream_alert/rule_processor/main.py | ashmere/streamalert | 5a03d3d272a8e4e4b1ee71567fad1d7e185bb903 | [
"Apache-2.0"
] | 1 | 2018-11-18T12:13:44.000Z | 2018-11-18T12:13:44.000Z | stream_alert/rule_processor/main.py | GSA/streamalert | 57d78157c76c19b9a0fe5bd6deae541cda928914 | [
"Apache-2.0"
] | 110 | 2019-02-13T05:32:07.000Z | 2021-07-29T05:42:01.000Z | stream_alert/rule_processor/main.py | ashmere/streamalert | 5a03d3d272a8e4e4b1ee71567fad1d7e185bb903 | [
"Apache-2.0"
] | 1 | 2019-11-01T01:03:47.000Z | 2019-11-01T01:03:47.000Z | """
Copyright 2017-present, Airbnb Inc.
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
"""
import importlib
import os
from stream_alert.rule_processor.handler import StreamAlert
modules_to_import = set()
# walk the rules directory to dymanically import
for folder in ('matchers', 'rules'):
for root, dirs, files in os.walk(folder):
filtered_files = [rule_file for rule_file in files if not (rule_file.startswith((
'.', '__init__')) or rule_file.endswith('.pyc'))]
package_path = root.replace('/', '.')
for import_file in filtered_files:
import_module = os.path.splitext(import_file)[0]
if package_path and import_module:
modules_to_import.add('{}.{}'.format(package_path, import_module))
for module_name in modules_to_import:
importlib.import_module(module_name)
def handler(event, context):
"""Main Lambda handler function"""
StreamAlert(context).run(event)
| 35.825 | 89 | 0.728542 | import importlib
import os
from stream_alert.rule_processor.handler import StreamAlert
modules_to_import = set()
for folder in ('matchers', 'rules'):
for root, dirs, files in os.walk(folder):
filtered_files = [rule_file for rule_file in files if not (rule_file.startswith((
'.', '__init__')) or rule_file.endswith('.pyc'))]
package_path = root.replace('/', '.')
for import_file in filtered_files:
import_module = os.path.splitext(import_file)[0]
if package_path and import_module:
modules_to_import.add('{}.{}'.format(package_path, import_module))
for module_name in modules_to_import:
importlib.import_module(module_name)
def handler(event, context):
StreamAlert(context).run(event)
| true | true |
f71a9251405f51578902104c3076923ed80a68f2 | 666 | py | Python | eth/chains/mainnet/constants.py | shreyasnbhat/py-evm | cd31d83185e102a7cb2f11e2f67923b069ee9cef | [
"MIT"
] | 1 | 2018-12-09T11:56:53.000Z | 2018-12-09T11:56:53.000Z | eth/chains/mainnet/constants.py | shreyasnbhat/py-evm | cd31d83185e102a7cb2f11e2f67923b069ee9cef | [
"MIT"
] | null | null | null | eth/chains/mainnet/constants.py | shreyasnbhat/py-evm | cd31d83185e102a7cb2f11e2f67923b069ee9cef | [
"MIT"
] | 2 | 2019-09-05T01:31:56.000Z | 2019-09-17T09:09:16.000Z | from eth_typing import BlockNumber
# https://github.com/ethereum/EIPs/blob/master/EIPS/eip-155.md
MAINNET_CHAIN_ID = 1
# Fork Blocks listed in ascending order
#
# Homestead Block
#
HOMESTEAD_MAINNET_BLOCK = BlockNumber(1150000)
#
# DAO Block
#
DAO_FORK_MAINNET_BLOCK = BlockNumber(1920000)
DAO_FORK_MAINNET_EXTRA_DATA = b'dao-hard-fork'
#
# Tangerine Whistle Block
#
TANGERINE_WHISTLE_MAINNET_BLOCK = BlockNumber(2463000)
#
# Spurious Dragon Block
#
SPURIOUS_DRAGON_MAINNET_BLOCK = BlockNumber(2675000)
#
# Byzantium Block
#
BYZANTIUM_MAINNET_BLOCK = BlockNumber(4370000)
#
# Constantinople Block
#
CONSTANTINOPLE_MAINNET_BLOCK = BlockNumber(7080000)
| 14.8 | 62 | 0.78979 | from eth_typing import BlockNumber
MAINNET_CHAIN_ID = 1
HOMESTEAD_MAINNET_BLOCK = BlockNumber(1150000)
DAO_FORK_MAINNET_BLOCK = BlockNumber(1920000)
DAO_FORK_MAINNET_EXTRA_DATA = b'dao-hard-fork'
TANGERINE_WHISTLE_MAINNET_BLOCK = BlockNumber(2463000)
SPURIOUS_DRAGON_MAINNET_BLOCK = BlockNumber(2675000)
BYZANTIUM_MAINNET_BLOCK = BlockNumber(4370000)
CONSTANTINOPLE_MAINNET_BLOCK = BlockNumber(7080000)
| true | true |
f71a929b94aaa07c53b09d5b18de47578263ba83 | 6,430 | py | Python | conf.py | isabella232/grr-doc | 2b0e28dc8d456dd0301aa14d45bf53d36de02781 | [
"Apache-2.0"
] | null | null | null | conf.py | isabella232/grr-doc | 2b0e28dc8d456dd0301aa14d45bf53d36de02781 | [
"Apache-2.0"
] | 1 | 2021-06-27T17:20:11.000Z | 2021-06-27T17:20:11.000Z | conf.py | isabella232/grr-doc | 2b0e28dc8d456dd0301aa14d45bf53d36de02781 | [
"Apache-2.0"
] | null | null | null | # -*- coding: utf-8 -*-
#
# GRR documentation build configuration file, created by
# sphinx-quickstart on Wed Nov 22 17:54:03 2017.
#
# This file is execfile()d with the current directory set to its
# containing dir.
#
# Note that not all possible configuration values are present in this
# autogenerated file.
#
# All configuration values have a default; values that are commented out
# serve to show the default.
# If extensions (or modules to document with autodoc) are in another directory,
# add these directories to sys.path here. If the directory is relative to the
# documentation root, use os.path.abspath to make it absolute, like shown here.
#
# import os
# import sys
# sys.path.insert(0, os.path.abspath('.'))
# -- General configuration ------------------------------------------------
# If your documentation needs a minimal Sphinx version, state it here.
#
# needs_sphinx = '1.0'
# Add any Sphinx extension module names here, as strings. They can be
# extensions coming with Sphinx (named 'sphinx.ext.*') or your custom
# ones.
extensions = [
'sphinx.ext.autodoc',
'sphinx.ext.napoleon',
'sphinx.ext.mathjax',
'recommonmark',
]
# Add any paths that contain templates here, relative to this directory.
templates_path = ['_templates']
# The master toctree document.
master_doc = 'index'
# General information about the project.
project = u'GRR'
copyright = u'2021, GRR team'
author = u'GRR team'
# The version info for the project you're documenting, acts as replacement for
# |version| and |release|, also used in various other places throughout the
# built documents.
#
# The short X.Y version.
version = u''
# The full version, including alpha/beta/rc tags.
release = u''
# The language for content autogenerated by Sphinx. Refer to documentation
# for a list of supported languages.
#
# This is also used if you do content translation via gettext catalogs.
# Usually you set "language" from the command line for these cases.
language = None
# List of patterns, relative to source directory, that match files and
# directories to ignore when looking for source files.
# This patterns also effect to html_static_path and html_extra_path
exclude_patterns = ['_build', 'Thumbs.db', '.DS_Store']
# The name of the Pygments (syntax highlighting) style to use.
pygments_style = 'sphinx'
# If true, `todo` and `todoList` produce output, else they produce nothing.
todo_include_todos = False
# -- Options for HTML output ----------------------------------------------
# The theme to use for HTML and HTML Help pages. See the documentation for
# a list of builtin themes.
#
html_theme = 'sphinx_rtd_theme'
# Theme options are theme-specific and customize the look and feel of a theme
# further. For a list of options available for each theme, see the
# documentation.
#
# html_theme_options = {}
# Add any paths that contain custom static files (such as style sheets) here,
# relative to this directory. They are copied after the builtin static files,
# so a file named "default.css" will overwrite the builtin "default.css".
html_static_path = []
# Custom sidebar templates, must be a dictionary that maps document names
# to template names.
#
# This is required for the alabaster theme
# refs: http://alabaster.readthedocs.io/en/latest/installation.html#sidebars
# html_sidebars = {
# '**': [
# 'relations.html', # needs 'show_related': True theme option to display
# 'searchbox.html',
# ]
# }
# -- Options for HTMLHelp output ------------------------------------------
# Output file base name for HTML help builder.
htmlhelp_basename = 'GRRdoc'
# -- Options for LaTeX output ---------------------------------------------
latex_elements = {
# The paper size ('letterpaper' or 'a4paper').
#
# 'papersize': 'letterpaper',
# The font size ('10pt', '11pt' or '12pt').
#
# 'pointsize': '10pt',
# Additional stuff for the LaTeX preamble.
#
# 'preamble': '',
# Latex figure (float) alignment
#
# 'figure_align': 'htmaster_dobp',
}
# Grouping the document tree into LaTeX files. List of tuples
# (source start file, target name, title,
# author, documentclass [howto, manual, or own class]).
latex_documents = [
(master_doc, 'GRR.tex', u'GRR Documentation',
u'GRR team', 'manual'),
]
# -- Options for manual page output ---------------------------------------
# One entry per manual page. List of tuples
# (source start file, name, description, authors, manual section).
man_pages = [
(master_doc, 'grr', u'GRR Documentation',
[author], 1)
]
# -- Options for Texinfo output -------------------------------------------
# Grouping the document tree into Texinfo files. List of tuples
# (source start file, target name, title, author,
# dir menu entry, description, category)
texinfo_documents = [
(master_doc, 'GRR', u'GRR Documentation',
author, 'GRR', 'One line description of project.',
'Miscellaneous'),
]
# Configure sphinx to convert markdown links (recommonmark is broken at the
# moment).
from docutils import nodes, transforms
class ProcessLink(transforms.Transform):
default_priority = 1000
text_replacements = {
"__GRR_VERSION__": "3.4.3.1",
"__GRR_DEB_VERSION__": "3.4.3-1"
}
def find_replace(self, node):
if isinstance(node, nodes.reference) and "refuri" in node:
r = node["refuri"]
if r.endswith(".md"):
r = r[:-3] + ".html"
node["refuri"] = r
if isinstance(node, nodes.Text):
for k, v in self.text_replacements.items():
if k in node.astext():
repl = nodes.Text(node.replace(k, v))
node.parent.replace(node, repl)
return node
def traverse(self, node):
"""Traverse the document tree rooted at node.
node : docutil node
current root node to traverse
"""
self.find_replace(node)
for c in node.children:
self.traverse(c)
def apply(self):
self.current_level = 0
self.traverse(self.document)
from recommonmark.transform import AutoStructify
def setup(app):
app.add_config_value('recommonmark_config', {
'enable_auto_toc_tree': True,
'auto_toc_tree_section': 'Table of contents',
}, True)
app.add_transform(AutoStructify)
app.add_transform(ProcessLink)
| 29.768519 | 81 | 0.657387 |
extensions = [
'sphinx.ext.autodoc',
'sphinx.ext.napoleon',
'sphinx.ext.mathjax',
'recommonmark',
]
templates_path = ['_templates']
master_doc = 'index'
project = u'GRR'
copyright = u'2021, GRR team'
author = u'GRR team'
# |version| and |release|, also used in various other places throughout the
# built documents.
#
# The short X.Y version.
version = u''
# The full version, including alpha/beta/rc tags.
release = u''
# The language for content autogenerated by Sphinx. Refer to documentation
# for a list of supported languages.
#
# This is also used if you do content translation via gettext catalogs.
# Usually you set "language" from the command line for these cases.
language = None
# List of patterns, relative to source directory, that match files and
# directories to ignore when looking for source files.
# This patterns also effect to html_static_path and html_extra_path
exclude_patterns = ['_build', 'Thumbs.db', '.DS_Store']
# The name of the Pygments (syntax highlighting) style to use.
pygments_style = 'sphinx'
# If true, `todo` and `todoList` produce output, else they produce nothing.
todo_include_todos = False
# -- Options for HTML output ----------------------------------------------
# The theme to use for HTML and HTML Help pages. See the documentation for
# a list of builtin themes.
#
html_theme = 'sphinx_rtd_theme'
# Theme options are theme-specific and customize the look and feel of a theme
# further. For a list of options available for each theme, see the
# documentation.
#
# html_theme_options = {}
# Add any paths that contain custom static files (such as style sheets) here,
# relative to this directory. They are copied after the builtin static files,
# so a file named "default.css" will overwrite the builtin "default.css".
html_static_path = []
# Custom sidebar templates, must be a dictionary that maps document names
# to template names.
#
# This is required for the alabaster theme
# refs: http://alabaster.readthedocs.io/en/latest/installation.html#sidebars
# html_sidebars = {
# '**': [
# 'relations.html', # needs 'show_related': True theme option to display
# 'searchbox.html',
# ]
# }
# -- Options for HTMLHelp output ------------------------------------------
# Output file base name for HTML help builder.
htmlhelp_basename = 'GRRdoc'
# -- Options for LaTeX output ---------------------------------------------
latex_elements = {
# The paper size ('letterpaper' or 'a4paper').
#
# 'papersize': 'letterpaper',
# The font size ('10pt', '11pt' or '12pt').
#
# 'pointsize': '10pt',
# Additional stuff for the LaTeX preamble.
#
# 'preamble': '',
# Latex figure (float) alignment
#
# 'figure_align': 'htmaster_dobp',
}
# Grouping the document tree into LaTeX files. List of tuples
# (source start file, target name, title,
# author, documentclass [howto, manual, or own class]).
latex_documents = [
(master_doc, 'GRR.tex', u'GRR Documentation',
u'GRR team', 'manual'),
]
# -- Options for manual page output ---------------------------------------
# One entry per manual page. List of tuples
# (source start file, name, description, authors, manual section).
man_pages = [
(master_doc, 'grr', u'GRR Documentation',
[author], 1)
]
# -- Options for Texinfo output -------------------------------------------
# Grouping the document tree into Texinfo files. List of tuples
# (source start file, target name, title, author,
# dir menu entry, description, category)
texinfo_documents = [
(master_doc, 'GRR', u'GRR Documentation',
author, 'GRR', 'One line description of project.',
'Miscellaneous'),
]
# Configure sphinx to convert markdown links (recommonmark is broken at the
# moment).
from docutils import nodes, transforms
class ProcessLink(transforms.Transform):
default_priority = 1000
text_replacements = {
"__GRR_VERSION__": "3.4.3.1",
"__GRR_DEB_VERSION__": "3.4.3-1"
}
def find_replace(self, node):
if isinstance(node, nodes.reference) and "refuri" in node:
r = node["refuri"]
if r.endswith(".md"):
r = r[:-3] + ".html"
node["refuri"] = r
if isinstance(node, nodes.Text):
for k, v in self.text_replacements.items():
if k in node.astext():
repl = nodes.Text(node.replace(k, v))
node.parent.replace(node, repl)
return node
def traverse(self, node):
self.find_replace(node)
for c in node.children:
self.traverse(c)
def apply(self):
self.current_level = 0
self.traverse(self.document)
from recommonmark.transform import AutoStructify
def setup(app):
app.add_config_value('recommonmark_config', {
'enable_auto_toc_tree': True,
'auto_toc_tree_section': 'Table of contents',
}, True)
app.add_transform(AutoStructify)
app.add_transform(ProcessLink)
| true | true |
f71a931bbfeddaef6760880c9e0d84b9e3ce6a96 | 3,111 | py | Python | resend_kafka_message/logic/client/kafka_client.py | Tungnt24/reprocess-kafka-message | 50a6495675630866b0a800a2b2857754f9cdfb02 | [
"MIT"
] | null | null | null | resend_kafka_message/logic/client/kafka_client.py | Tungnt24/reprocess-kafka-message | 50a6495675630866b0a800a2b2857754f9cdfb02 | [
"MIT"
] | null | null | null | resend_kafka_message/logic/client/kafka_client.py | Tungnt24/reprocess-kafka-message | 50a6495675630866b0a800a2b2857754f9cdfb02 | [
"MIT"
] | null | null | null | from kafka import KafkaProducer, KafkaConsumer
from resend_kafka_message.setting import (
KafkaProducerConfig,
KafkaConsumerConfig,
)
import json
from kafka.structs import TopicPartition
from resend_kafka_message.utils.logger import logger
class KafkaBackupProducer:
def __init__(self) -> None:
self.producer = KafkaProducer(
bootstrap_servers=KafkaProducerConfig.KAFKA_BROKER,
value_serializer=lambda x: json.dumps(x).encode("utf-8"),
)
self.topic = KafkaProducerConfig.KAFKA_TOPIC
def send_message(self, user, event, partition):
self.producer.send(
topic=self.topic,
key=bytes(user, "utf-8"),
value=event,
partition=partition,
)
self.producer.flush()
class KafkaBackupConsumer:
def __init__(self) -> None:
self.consumer = KafkaConsumer(
bootstrap_servers=KafkaConsumerConfig.KAFKA_BROKER,
auto_offset_reset=KafkaConsumerConfig.KAFKA_AUTO_OFFSET_RESET,
value_deserializer=lambda x: json.loads(x.decode("utf-8")),
enable_auto_commit=KafkaConsumerConfig.KAFKA_ENABLE_AUTO_COMMIT,
max_poll_records=KafkaConsumerConfig.KAFKA_MAX_POLL_RECORDS,
)
self.topic = KafkaConsumerConfig.KAFKA_TOPIC
def kafka_close(self):
self.consumer.close(autocommit=False)
def current_possion(self, partition):
tp = TopicPartition(self.topic, partition)
return self.consumer.position(tp)
def assign_partition(self, partition):
tp = TopicPartition(self.topic, partition)
self.consumer.assign([tp])
def seek_message(self, partition, offset_start):
tp = TopicPartition(self.topic, partition)
self.consumer.seek(tp, offset_start)
return self.consumer
def get_offset_and_timestamp(self, tp, timestamp_start, timestamp_end):
offset_and_timestamp_start = self.consumer.offsets_for_times(
{tp: int(timestamp_start)}
)
offset_and_timestamp_end = self.consumer.offsets_for_times(
{tp: int(timestamp_end)}
)
offset_and_timestamp_start = list(offset_and_timestamp_start.values())[
0
]
offset_and_timestamp_end = list(offset_and_timestamp_end.values())[0]
if (
offset_and_timestamp_start is None
or offset_and_timestamp_end is None
):
return None, None
return offset_and_timestamp_start, offset_and_timestamp_end
def get_offset(self, partition, timestamp_start, timestamp_end):
tp = TopicPartition(self.topic, partition)
(
offset_timestamp_start,
offset_timestamp_end,
) = self.get_offset_and_timestamp(tp, timestamp_start, timestamp_end)
if offset_timestamp_start is None or offset_timestamp_start is None:
raise Exception("could not found offset and timestamp")
offset_start = offset_timestamp_start.offset
offset_end = offset_timestamp_end.offset
return offset_start, offset_end
| 36.6 | 79 | 0.68306 | from kafka import KafkaProducer, KafkaConsumer
from resend_kafka_message.setting import (
KafkaProducerConfig,
KafkaConsumerConfig,
)
import json
from kafka.structs import TopicPartition
from resend_kafka_message.utils.logger import logger
class KafkaBackupProducer:
def __init__(self) -> None:
self.producer = KafkaProducer(
bootstrap_servers=KafkaProducerConfig.KAFKA_BROKER,
value_serializer=lambda x: json.dumps(x).encode("utf-8"),
)
self.topic = KafkaProducerConfig.KAFKA_TOPIC
def send_message(self, user, event, partition):
self.producer.send(
topic=self.topic,
key=bytes(user, "utf-8"),
value=event,
partition=partition,
)
self.producer.flush()
class KafkaBackupConsumer:
def __init__(self) -> None:
self.consumer = KafkaConsumer(
bootstrap_servers=KafkaConsumerConfig.KAFKA_BROKER,
auto_offset_reset=KafkaConsumerConfig.KAFKA_AUTO_OFFSET_RESET,
value_deserializer=lambda x: json.loads(x.decode("utf-8")),
enable_auto_commit=KafkaConsumerConfig.KAFKA_ENABLE_AUTO_COMMIT,
max_poll_records=KafkaConsumerConfig.KAFKA_MAX_POLL_RECORDS,
)
self.topic = KafkaConsumerConfig.KAFKA_TOPIC
def kafka_close(self):
self.consumer.close(autocommit=False)
def current_possion(self, partition):
tp = TopicPartition(self.topic, partition)
return self.consumer.position(tp)
def assign_partition(self, partition):
tp = TopicPartition(self.topic, partition)
self.consumer.assign([tp])
def seek_message(self, partition, offset_start):
tp = TopicPartition(self.topic, partition)
self.consumer.seek(tp, offset_start)
return self.consumer
def get_offset_and_timestamp(self, tp, timestamp_start, timestamp_end):
offset_and_timestamp_start = self.consumer.offsets_for_times(
{tp: int(timestamp_start)}
)
offset_and_timestamp_end = self.consumer.offsets_for_times(
{tp: int(timestamp_end)}
)
offset_and_timestamp_start = list(offset_and_timestamp_start.values())[
0
]
offset_and_timestamp_end = list(offset_and_timestamp_end.values())[0]
if (
offset_and_timestamp_start is None
or offset_and_timestamp_end is None
):
return None, None
return offset_and_timestamp_start, offset_and_timestamp_end
def get_offset(self, partition, timestamp_start, timestamp_end):
tp = TopicPartition(self.topic, partition)
(
offset_timestamp_start,
offset_timestamp_end,
) = self.get_offset_and_timestamp(tp, timestamp_start, timestamp_end)
if offset_timestamp_start is None or offset_timestamp_start is None:
raise Exception("could not found offset and timestamp")
offset_start = offset_timestamp_start.offset
offset_end = offset_timestamp_end.offset
return offset_start, offset_end
| true | true |
f71a939f803f8836cd5408d397bbd195ac54e34a | 394 | py | Python | Applications/powershell/6.0.2/package.py | cashmerepipeline/CashmereRez | 13a73931d715ffac27c337abcd6df97b5c47534b | [
"MIT"
] | null | null | null | Applications/powershell/6.0.2/package.py | cashmerepipeline/CashmereRez | 13a73931d715ffac27c337abcd6df97b5c47534b | [
"MIT"
] | null | null | null | Applications/powershell/6.0.2/package.py | cashmerepipeline/CashmereRez | 13a73931d715ffac27c337abcd6df97b5c47534b | [
"MIT"
] | null | null | null | # -*- coding: utf-8 -*-
name = 'powershell'
version = '6.0.2'
author = ['microsoft']
tools = ["pwsh"]
requires = []
variants = [
['platform-windows'],
]
def commands():
import os
applications_path = os.environ["APPLICATIONS_PATH"]
env.PATH.append(os.path.join(applications_path, "powershell", "%s"%version).replace('/', os.sep))
| 13.586207 | 101 | 0.560914 |
name = 'powershell'
version = '6.0.2'
author = ['microsoft']
tools = ["pwsh"]
requires = []
variants = [
['platform-windows'],
]
def commands():
import os
applications_path = os.environ["APPLICATIONS_PATH"]
env.PATH.append(os.path.join(applications_path, "powershell", "%s"%version).replace('/', os.sep))
| true | true |
f71a96389c5ecde338aa29ef1117227f29df61b8 | 1,098 | py | Python | files/sun/practice/binarytree.py | 1ta/study_python | 7623ed019397225f63093c5aaccb155bdf289805 | [
"MIT"
] | null | null | null | files/sun/practice/binarytree.py | 1ta/study_python | 7623ed019397225f63093c5aaccb155bdf289805 | [
"MIT"
] | null | null | null | files/sun/practice/binarytree.py | 1ta/study_python | 7623ed019397225f63093c5aaccb155bdf289805 | [
"MIT"
] | null | null | null | """
Definition of TreeNode:
class TreeNode:
def __init__(self, val):
self.val = val
self.left, self.right = None, None
"""
class Solution:
"""
@param inorder : A list of integers that inorder traversal of a tree
@param postorder : A list of integers that postorder traversal of a tree
@return : Root of a tree
"""
def buildTree(self, inorder, postorder):
def genTree(inorder,postorder):
if len(inorder)==0:
return None
root_val = postorder[-1]
root = TreeNode(root_val)
n = inorder.index(root_val)
left_inorder = inorder[:n]
left_postorder = postorder[:n]
right_inorder = inorder[n+1:]
right_postorder= postorder[n:len(postorder)-1]
if len(left_inorder) > 0:
root.left = genTree(left_inorder, left_postorder)
if len(right_inorder) > 0:
root.right = genTree(right_inorder, right_postorder)
return root
root = genTree(inorder, postorder)
return root
| 32.294118 | 76 | 0.583789 |
class Solution:
def buildTree(self, inorder, postorder):
def genTree(inorder,postorder):
if len(inorder)==0:
return None
root_val = postorder[-1]
root = TreeNode(root_val)
n = inorder.index(root_val)
left_inorder = inorder[:n]
left_postorder = postorder[:n]
right_inorder = inorder[n+1:]
right_postorder= postorder[n:len(postorder)-1]
if len(left_inorder) > 0:
root.left = genTree(left_inorder, left_postorder)
if len(right_inorder) > 0:
root.right = genTree(right_inorder, right_postorder)
return root
root = genTree(inorder, postorder)
return root
| true | true |
f71a974a3093e8096614977acf39bdfa59c13911 | 6,492 | py | Python | utils/dataload.py | hobinkwak/Stock-Movements-Classification | dac2e90d9ef2294f5c4dc8f6605b9051c71b3f45 | [
"MIT"
] | null | null | null | utils/dataload.py | hobinkwak/Stock-Movements-Classification | dac2e90d9ef2294f5c4dc8f6605b9051c71b3f45 | [
"MIT"
] | null | null | null | utils/dataload.py | hobinkwak/Stock-Movements-Classification | dac2e90d9ef2294f5c4dc8f6605b9051c71b3f45 | [
"MIT"
] | null | null | null | from itertools import combinations
import pandas as pd
from utils.utils import *
def load_etf():
etf_data = pd.read_csv(
"data/etf_data.csv", encoding="euc_kr", parse_dates=["tdate"]
)
etf_ohlcv = etf_data.set_index(["tdate", "etf_code", "data_name"])[
"value"
].unstack()
etf_close = etf_ohlcv["종가"].unstack()
return etf_close
def load_macro_data():
macro_data = pd.read_csv('외부데이터/macro_final.csv', index_col='Item Name').iloc[1:, :]
macro_data.index = pd.to_datetime(macro_data.index)
macro_data = macro_data.fillna(method='ffill')
macro_data = (macro_data.resample('m').last() / macro_data.resample('m').first())
macro_data.columns = ['FOMC정책금리', '한국정책금리', '중국정책금리', '미국국채_1m', '미국국채_3m', '미국국채_6m', '미국국채_1y', '미국국채_5y',
'미국국채_10y', '리보_달러_1m', '리보_달러_1y', '리보_달러_3m', '리보_달러_6m', '리보_달러_1w',
'DDR4 16G (2G*8) 2666 MHZ', 'NAND 16Gb 2Gx8 SLC', 'DDR4 16G (2G*8) eTT MHZ',
'DDR3 4Gb 512Mx8 1600/1866Mbps', 'DDR3 4Gb 512Mx8 eTT',
'NAND 8Gb 1Gx8 SLC', 'NAND 64Gb 8Gx8 MLC', 'WTI_1M', 'BRENT_1M', 'DUBAI_ASIA1M',
'난방유_선물_NYMEX', '천연가스_선물_NYMEX', '가스오일_선물_IPE', '천연가스_선물_IPE', '금_선물', '은_선물', '알루미늄_선물',
'전기동_선물', '납_선물', '니켈_선물', '주석_선물', '아연_선물', '10YR BEI', 'T10Y2Y', 'DFF',
'HY Ef Yield', 'Trade DI', 'VIX', 'USDKRW', 'Eco Policy Uncertainty']
macro_data = macro_data[
['FOMC정책금리', '한국정책금리', '중국정책금리', '미국국채_1m', '미국국채_3m', '미국국채_6m', '미국국채_1y', '미국국채_5y', '미국국채_10y', '리보_달러_1m',
'리보_달러_1y', '리보_달러_3m', '리보_달러_6m', '리보_달러_1w', 'DDR3 4Gb 512Mx8 eTT',
'NAND 8Gb 1Gx8 SLC', 'WTI_1M', 'BRENT_1M', 'DUBAI_ASIA1M', '난방유_선물_NYMEX', '천연가스_선물_NYMEX', '가스오일_선물_IPE',
'천연가스_선물_IPE', '금_선물', '은_선물', '알루미늄_선물', '전기동_선물', '납_선물', '니켈_선물', '주석_선물', '아연_선물', '10YR BEI', 'T10Y2Y',
'HY Ef Yield', 'Trade DI', 'VIX', 'USDKRW', 'Eco Policy Uncertainty']]
return macro_data
def load_wics_data():
WICS대_exposure = process_wics_data("./외부데이터/ETF별 업종 exposure.csv")
WICS업종 = process_wics_data("./외부데이터/WICS 업종별 투자정보 데이터.csv")
WICS대 = WICS업종[
[
"에너지",
"소재",
"산업재",
"경기관련소비재",
"필수소비재",
"건강관리",
"금융",
"IT",
"커뮤니케이션서비스",
"유틸리티",
]
]
WICS대 = WICS대.T.drop_duplicates().T
return WICS대, WICS대_exposure
def features_from_wics(wics):
"""
wics : WICS대 (from load_wics_data())
"""
wics_price = wics.xs("종가지수", level=1, axis=1)
momentums = get_moving_features(wics_price, type='price')
wics_trd_volume = wics.xs("거래대금", level=1, axis=1)
trd_volumes = get_moving_features(wics_trd_volume, type='volume')
wics_retail_volume = wics.xs("개인 순매수대금(일간)", level=1, axis=1).fillna(0)
retail_volumes = get_moving_features(wics_retail_volume, type='volume')
wics_for_volume = wics.xs("외국인총합계순매수대금(일간)", level=1, axis=1).fillna(0)
for_volumes = get_moving_features(wics_for_volume, type='volume')
wics_inst_volume = wics.xs("기관 순매수대금(일간)", level=1,axis=1).fillna(0)
inst_volumes = get_moving_features(wics_inst_volume, type='volume')
wics_pe = wics.xs("P/E(FY0)", level=1,axis=1)
pe_scale = wics_pe.resample('M').last().apply(lambda X: minmaxscale(X), axis=1)
wics_fwd_pe = wics.xs("P/E(Fwd.12M)", level=1,axis=1)
fwd_pe_changes = get_moving_features(wics_fwd_pe, type='fwd')
wics_fwd_eps = wics.xs("EPS(Fwd.12M, 지배)", level=1,axis=1)
fwd_eps_changes =get_moving_features(wics_fwd_eps, type='fwd')
size_ = wics.xs("시가총액", level=1,axis=1).resample('M').last()
features = {
"macro": load_macro_data(),
"size": size_,
"mom_1m": momentums[0],
"mom_3m": momentums[1],
"mom_6m": momentums[2],
"mom_1y": momentums[3],
"trd_1m": trd_volumes[0],
"trd_3m": trd_volumes[1],
"trd_6m": trd_volumes[2],
"trd_1y": trd_volumes[3],
"retail_trd_1m": retail_volumes[0],
"retail_trd_3m": retail_volumes[1],
"retail_trd_6m": retail_volumes[2],
"retail_trd_1y": retail_volumes[3],
"for_trd_1m": for_volumes[0],
"for_trd_3m": for_volumes[1],
"for_trd_6m": for_volumes[2],
"for_trd_1y": for_volumes[3],
"inst_trd_1m": inst_volumes[0],
"inst_trd_3m": inst_volumes[1],
"inst_trd_6m": inst_volumes[2],
"inst_trd_1y": inst_volumes[3],
"fwd_pe_1m": fwd_pe_changes[0],
"fwd_pe_3m": fwd_pe_changes[1],
"fwd_eps_1m": fwd_eps_changes[0],
"fwd_eps_3m": fwd_eps_changes[1],
"pe": pe_scale,
}
return wics_price, features
def combination_set(pair, start, end, price, features):
"""
:param pair: WICS대분류 pair
:param start: 기간
:param end: 기간
:param price: wics_prices (from features_from_wics())
:param features: features (from features_from_wics())
"""
comb_price = price[list(pair)]
comb_ret = (comb_price.resample('m').last() / comb_price.resample('m').first()).loc[start:end]
feature_table = features['macro'].loc[start:end]
for key in list(features.keys())[1:6]:
feature_table[key] = features[key].apply(lambda x: (x[pair[0]] / x[pair[1]]), axis=1).loc[start:end]
for key in list(features.keys())[6:]:
feature_table[key] = features[key].apply(lambda x: (x[pair[0]] - x[pair[1]]), axis=1).loc[start:end]
comb_ret['winner'] = comb_ret.apply(
lambda x: comb_ret.columns[0] if (x[comb_ret.columns[0]] > x[comb_ret.columns[1]]) else comb_ret.columns[1],
axis=1)
feature_table = feature_table.replace([-np.inf, np.inf], np.nan).fillna(method='ffill')
comb_ret = comb_ret.replace([-np.inf, np.inf], np.nan).fillna(method='ffill')
feature_table = feature_table.shift(1).iloc[1:]
comb_ret = comb_ret.iloc[1:]
X_data = feature_table
y_data = comb_ret[['winner']].astype('category')
return X_data, y_data
def load_dataset():
WICS대,_ = load_wics_data()
price, features = features_from_wics(WICS대)
columns = ['에너지', '소재', '산업재', '경기관련소비재', '필수소비재', '건강관리', '금융', 'IT', '커뮤니케이션서비스', '유틸리티']
pairs = list(combinations(columns, 2))
total_dataset = {pair : combination_set(pair,'2011-12','2021-05', price, features) for pair in pairs}
return total_dataset
| 40.074074 | 119 | 0.611214 | from itertools import combinations
import pandas as pd
from utils.utils import *
def load_etf():
etf_data = pd.read_csv(
"data/etf_data.csv", encoding="euc_kr", parse_dates=["tdate"]
)
etf_ohlcv = etf_data.set_index(["tdate", "etf_code", "data_name"])[
"value"
].unstack()
etf_close = etf_ohlcv["종가"].unstack()
return etf_close
def load_macro_data():
macro_data = pd.read_csv('외부데이터/macro_final.csv', index_col='Item Name').iloc[1:, :]
macro_data.index = pd.to_datetime(macro_data.index)
macro_data = macro_data.fillna(method='ffill')
macro_data = (macro_data.resample('m').last() / macro_data.resample('m').first())
macro_data.columns = ['FOMC정책금리', '한국정책금리', '중국정책금리', '미국국채_1m', '미국국채_3m', '미국국채_6m', '미국국채_1y', '미국국채_5y',
'미국국채_10y', '리보_달러_1m', '리보_달러_1y', '리보_달러_3m', '리보_달러_6m', '리보_달러_1w',
'DDR4 16G (2G*8) 2666 MHZ', 'NAND 16Gb 2Gx8 SLC', 'DDR4 16G (2G*8) eTT MHZ',
'DDR3 4Gb 512Mx8 1600/1866Mbps', 'DDR3 4Gb 512Mx8 eTT',
'NAND 8Gb 1Gx8 SLC', 'NAND 64Gb 8Gx8 MLC', 'WTI_1M', 'BRENT_1M', 'DUBAI_ASIA1M',
'난방유_선물_NYMEX', '천연가스_선물_NYMEX', '가스오일_선물_IPE', '천연가스_선물_IPE', '금_선물', '은_선물', '알루미늄_선물',
'전기동_선물', '납_선물', '니켈_선물', '주석_선물', '아연_선물', '10YR BEI', 'T10Y2Y', 'DFF',
'HY Ef Yield', 'Trade DI', 'VIX', 'USDKRW', 'Eco Policy Uncertainty']
macro_data = macro_data[
['FOMC정책금리', '한국정책금리', '중국정책금리', '미국국채_1m', '미국국채_3m', '미국국채_6m', '미국국채_1y', '미국국채_5y', '미국국채_10y', '리보_달러_1m',
'리보_달러_1y', '리보_달러_3m', '리보_달러_6m', '리보_달러_1w', 'DDR3 4Gb 512Mx8 eTT',
'NAND 8Gb 1Gx8 SLC', 'WTI_1M', 'BRENT_1M', 'DUBAI_ASIA1M', '난방유_선물_NYMEX', '천연가스_선물_NYMEX', '가스오일_선물_IPE',
'천연가스_선물_IPE', '금_선물', '은_선물', '알루미늄_선물', '전기동_선물', '납_선물', '니켈_선물', '주석_선물', '아연_선물', '10YR BEI', 'T10Y2Y',
'HY Ef Yield', 'Trade DI', 'VIX', 'USDKRW', 'Eco Policy Uncertainty']]
return macro_data
def load_wics_data():
WICS대_exposure = process_wics_data("./외부데이터/ETF별 업종 exposure.csv")
WICS업종 = process_wics_data("./외부데이터/WICS 업종별 투자정보 데이터.csv")
WICS대 = WICS업종[
[
"에너지",
"소재",
"산업재",
"경기관련소비재",
"필수소비재",
"건강관리",
"금융",
"IT",
"커뮤니케이션서비스",
"유틸리티",
]
]
WICS대 = WICS대.T.drop_duplicates().T
return WICS대, WICS대_exposure
def features_from_wics(wics):
wics_price = wics.xs("종가지수", level=1, axis=1)
momentums = get_moving_features(wics_price, type='price')
wics_trd_volume = wics.xs("거래대금", level=1, axis=1)
trd_volumes = get_moving_features(wics_trd_volume, type='volume')
wics_retail_volume = wics.xs("개인 순매수대금(일간)", level=1, axis=1).fillna(0)
retail_volumes = get_moving_features(wics_retail_volume, type='volume')
wics_for_volume = wics.xs("외국인총합계순매수대금(일간)", level=1, axis=1).fillna(0)
for_volumes = get_moving_features(wics_for_volume, type='volume')
wics_inst_volume = wics.xs("기관 순매수대금(일간)", level=1,axis=1).fillna(0)
inst_volumes = get_moving_features(wics_inst_volume, type='volume')
wics_pe = wics.xs("P/E(FY0)", level=1,axis=1)
pe_scale = wics_pe.resample('M').last().apply(lambda X: minmaxscale(X), axis=1)
wics_fwd_pe = wics.xs("P/E(Fwd.12M)", level=1,axis=1)
fwd_pe_changes = get_moving_features(wics_fwd_pe, type='fwd')
wics_fwd_eps = wics.xs("EPS(Fwd.12M, 지배)", level=1,axis=1)
fwd_eps_changes =get_moving_features(wics_fwd_eps, type='fwd')
size_ = wics.xs("시가총액", level=1,axis=1).resample('M').last()
features = {
"macro": load_macro_data(),
"size": size_,
"mom_1m": momentums[0],
"mom_3m": momentums[1],
"mom_6m": momentums[2],
"mom_1y": momentums[3],
"trd_1m": trd_volumes[0],
"trd_3m": trd_volumes[1],
"trd_6m": trd_volumes[2],
"trd_1y": trd_volumes[3],
"retail_trd_1m": retail_volumes[0],
"retail_trd_3m": retail_volumes[1],
"retail_trd_6m": retail_volumes[2],
"retail_trd_1y": retail_volumes[3],
"for_trd_1m": for_volumes[0],
"for_trd_3m": for_volumes[1],
"for_trd_6m": for_volumes[2],
"for_trd_1y": for_volumes[3],
"inst_trd_1m": inst_volumes[0],
"inst_trd_3m": inst_volumes[1],
"inst_trd_6m": inst_volumes[2],
"inst_trd_1y": inst_volumes[3],
"fwd_pe_1m": fwd_pe_changes[0],
"fwd_pe_3m": fwd_pe_changes[1],
"fwd_eps_1m": fwd_eps_changes[0],
"fwd_eps_3m": fwd_eps_changes[1],
"pe": pe_scale,
}
return wics_price, features
def combination_set(pair, start, end, price, features):
comb_price = price[list(pair)]
comb_ret = (comb_price.resample('m').last() / comb_price.resample('m').first()).loc[start:end]
feature_table = features['macro'].loc[start:end]
for key in list(features.keys())[1:6]:
feature_table[key] = features[key].apply(lambda x: (x[pair[0]] / x[pair[1]]), axis=1).loc[start:end]
for key in list(features.keys())[6:]:
feature_table[key] = features[key].apply(lambda x: (x[pair[0]] - x[pair[1]]), axis=1).loc[start:end]
comb_ret['winner'] = comb_ret.apply(
lambda x: comb_ret.columns[0] if (x[comb_ret.columns[0]] > x[comb_ret.columns[1]]) else comb_ret.columns[1],
axis=1)
feature_table = feature_table.replace([-np.inf, np.inf], np.nan).fillna(method='ffill')
comb_ret = comb_ret.replace([-np.inf, np.inf], np.nan).fillna(method='ffill')
feature_table = feature_table.shift(1).iloc[1:]
comb_ret = comb_ret.iloc[1:]
X_data = feature_table
y_data = comb_ret[['winner']].astype('category')
return X_data, y_data
def load_dataset():
WICS대,_ = load_wics_data()
price, features = features_from_wics(WICS대)
columns = ['에너지', '소재', '산업재', '경기관련소비재', '필수소비재', '건강관리', '금융', 'IT', '커뮤니케이션서비스', '유틸리티']
pairs = list(combinations(columns, 2))
total_dataset = {pair : combination_set(pair,'2011-12','2021-05', price, features) for pair in pairs}
return total_dataset
| true | true |
f71a97da98a14131d787d14e3647b6eaf3f98b88 | 8,968 | py | Python | neko/Scanners/CFBFScanner/CFBFScanner.py | mebuis/neko | c76eacb60c3a3f6adfb6a7a6fd7f61640be2c00d | [
"Apache-2.0"
] | 1 | 2018-12-07T02:05:16.000Z | 2018-12-07T02:05:16.000Z | neko/Scanners/CFBFScanner/CFBFScanner.py | mebuis/neko | c76eacb60c3a3f6adfb6a7a6fd7f61640be2c00d | [
"Apache-2.0"
] | null | null | null | neko/Scanners/CFBFScanner/CFBFScanner.py | mebuis/neko | c76eacb60c3a3f6adfb6a7a6fd7f61640be2c00d | [
"Apache-2.0"
] | null | null | null | # -*- encoding: UTF-8 -*-
import string
from neko.Common import Threat
from neko.Common.CLSID import CLSID_NULL, LOW_RISK_LEVEL_OBJECTS, HIGH_RISK_LEVEL_OBJECTS
from neko.Common.DataStructures.OLE1 import LengthPrefixedByteArray
from neko.Common.DataStructures.OLE2 import OLEStream, SOAPMoniker, CompositeMoniker, FileMoniker, UrlMoniker
from neko.Parsers.CFBFParser import CFBFParser
from neko.Parsers.CFBFParser.DataStructures import DirectorySectorEntry
class CFBFScanner:
def __init__(self):
from neko import Dispatcher
self.Dispatcher: Dispatcher = None
self.Parser: CFBFParser = None
self.Flags = set()
def Scan(self, **kwargs):
self.Dispatcher = kwargs["dispatcher"]
self.Parser = kwargs["parser"]
self.CheckDirectoryEntries()
self.CheckOLEStreams()
self.CheckStreamData()
return self
def CheckDirectoryEntryNames(self):
for entry in self.Parser.DirectoryEntries.values():
entry_name = entry.ObjectName.lower() # stream names are case-insensitive
if ("MACRO" not in self.Flags) and (entry_name in frozenset(["_vba_project", "dir", "_srp_0", "projectlk", "projectwm", "project"])):
self.Dispatcher.ThreatList.append(
Threat(
location = self.Dispatcher.Label,
type = "FOUND_MACRO",
information = {}
)
)
self.Flags.add("MACRO")
if ("OCX" not in self.Flags) and (entry_name in frozenset(["\\x03ocxname"])):
self.Dispatcher.ThreatList.append(
Threat(
location = self.Dispatcher.Label,
type = "FOUND_OLE_CONTROL_EXTENSION",
information = {}
)
)
self.Flags.add("OCX")
if ("ENCRYPTED_PACKAGE" not in self.Flags) and (entry_name in frozenset(["encryptedpackage"])):
self.Dispatcher.ThreatList.append(
Threat(
location = self.Dispatcher.Label,
type = "FOUND_ENCRYPTED_PACKAGE",
information = {}
)
)
self.Flags.add("ENCRYPTED_PACKAGE")
def CheckDirectoryEntryCLSIDs(self):
for entry in self.Parser.DirectoryEntries.values():
if entry.ObjectType.Value == DirectorySectorEntry.STREAM_OBJECT:
continue
clsid = str(entry.CLSID)
if clsid == CLSID_NULL:
continue # unknown handler
elif clsid in LOW_RISK_LEVEL_OBJECTS:
if clsid not in self.Flags:
self.Dispatcher.ThreatList.append(
Threat(
location = self.Dispatcher.Label,
type = "FOUND_LOW_RISK_LEVEL_OBJECT",
information = {
"type": LOW_RISK_LEVEL_OBJECTS[clsid],
"clsid": clsid
}
)
)
self.Flags.add(clsid)
elif clsid in HIGH_RISK_LEVEL_OBJECTS:
if clsid not in self.Flags:
self.Dispatcher.ThreatList.append(
Threat(
location = self.Dispatcher.Label,
type = "FOUND_HIGH_RISK_LEVEL_OBJECT",
information = {
"type": HIGH_RISK_LEVEL_OBJECTS[clsid],
"clsid": clsid
}
)
)
self.Flags.add(clsid)
else:
if clsid not in self.Flags:
self.Dispatcher.ThreatList.append(
Threat(
location = self.Dispatcher.Label,
type = "FOUND_UNKNOWN_OBJECT",
information = {
"clsid": clsid
}
)
)
self.Flags.add(clsid)
def CheckDirectoryEntries(self):
self.CheckDirectoryEntryNames()
self.CheckDirectoryEntryCLSIDs()
def CheckOLEStreams(self):
for entry in self.Parser.DirectoryEntries.values():
entry_name = entry.ObjectName.lower()
if entry_name != "\\x01ole":
continue
olestream = OLEStream().Parse(entry.StreamData)
relative_moniker_stream = olestream.RelativeMonikerStream
absolute_moniker_stream = olestream.AbsoluteMonikerStream
if str(relative_moniker_stream.CLSID) != CLSID_NULL:
outer_moniker_stream = relative_moniker_stream
elif str(absolute_moniker_stream.CLSID) != CLSID_NULL:
outer_moniker_stream = absolute_moniker_stream
else:
continue
outer_moniker = outer_moniker_stream.Moniker
if isinstance(outer_moniker, SOAPMoniker):
self.Dispatcher.ThreatList.append(
Threat(
location = self.Dispatcher.Label,
type = "FOUND_SOAP_MONIKER",
information = {
"url": str(outer_moniker.Url).strip(string.whitespace + "\x00")[5:] # wsdl=
}
)
)
elif isinstance(outer_moniker, CompositeMoniker):
for inner_moniker_stream in outer_moniker.MonikerArray:
inner_moniker = inner_moniker_stream.Moniker
if isinstance(inner_moniker, FileMoniker):
self.Dispatcher.ThreatList.append(
Threat(
location = self.Dispatcher.Label,
type = "FOUND_COMPOSITED_FILE_MONIKER",
information = {
"ansi_path": str(inner_moniker.AnsiPath).strip(string.whitespace + "\x00"),
"unicode_path": str(inner_moniker.UnicodePath).strip(string.whitespace + "\x00")
}
)
)
elif isinstance(inner_moniker, UrlMoniker):
self.Dispatcher.ThreatList.append(
Threat(
location = self.Dispatcher.Label,
type = "FOUND_COMPOSITED_URL_MONIKER",
information = {
"url": str(inner_moniker.Url).strip(string.whitespace + "\x00")
}
)
)
elif isinstance(outer_moniker, FileMoniker):
self.Dispatcher.ThreatList.append(
Threat(
location = self.Dispatcher.Label,
type = "FOUND_FILE_MONIKER",
information = {
"ansi_path": str(outer_moniker.AnsiPath).strip(string.whitespace + "\x00"),
"unicode_path": str(outer_moniker.UnicodePath).strip(string.whitespace + "\x00")
}
)
)
elif isinstance(outer_moniker, UrlMoniker):
self.Dispatcher.ThreatList.append(
Threat(
location = self.Dispatcher.Label,
type = "FOUND_URL_MONIKER",
information = {
"url": str(outer_moniker.Url).strip(string.whitespace + "\x00")
}
)
)
def CheckStreamData(self):
for entry in self.Parser.DirectoryEntries.values():
if entry.ObjectType.Value != DirectorySectorEntry.STREAM_OBJECT:
continue
entry_name = entry.ObjectName.lower()
if entry_name.startswith(("\\x01", "\\x03", "\\x05")) and (entry_name != "\\x01ole10native"):
continue
stream_data = entry.StreamData
if entry_name == "\\x01ole10native":
stream_data = LengthPrefixedByteArray().Parse(stream_data).Data
if stream_data:
from neko import Dispatcher
dispatcher = Dispatcher(label = f"{self.Dispatcher.Label} -> Stream \"{entry.ObjectName}\"")
dispatcher.Dispatch(stream_data)
self.Dispatcher.ChildDispatchers.append(dispatcher)
| 41.327189 | 145 | 0.492975 |
import string
from neko.Common import Threat
from neko.Common.CLSID import CLSID_NULL, LOW_RISK_LEVEL_OBJECTS, HIGH_RISK_LEVEL_OBJECTS
from neko.Common.DataStructures.OLE1 import LengthPrefixedByteArray
from neko.Common.DataStructures.OLE2 import OLEStream, SOAPMoniker, CompositeMoniker, FileMoniker, UrlMoniker
from neko.Parsers.CFBFParser import CFBFParser
from neko.Parsers.CFBFParser.DataStructures import DirectorySectorEntry
class CFBFScanner:
def __init__(self):
from neko import Dispatcher
self.Dispatcher: Dispatcher = None
self.Parser: CFBFParser = None
self.Flags = set()
def Scan(self, **kwargs):
self.Dispatcher = kwargs["dispatcher"]
self.Parser = kwargs["parser"]
self.CheckDirectoryEntries()
self.CheckOLEStreams()
self.CheckStreamData()
return self
def CheckDirectoryEntryNames(self):
for entry in self.Parser.DirectoryEntries.values():
entry_name = entry.ObjectName.lower()
if ("MACRO" not in self.Flags) and (entry_name in frozenset(["_vba_project", "dir", "_srp_0", "projectlk", "projectwm", "project"])):
self.Dispatcher.ThreatList.append(
Threat(
location = self.Dispatcher.Label,
type = "FOUND_MACRO",
information = {}
)
)
self.Flags.add("MACRO")
if ("OCX" not in self.Flags) and (entry_name in frozenset(["\\x03ocxname"])):
self.Dispatcher.ThreatList.append(
Threat(
location = self.Dispatcher.Label,
type = "FOUND_OLE_CONTROL_EXTENSION",
information = {}
)
)
self.Flags.add("OCX")
if ("ENCRYPTED_PACKAGE" not in self.Flags) and (entry_name in frozenset(["encryptedpackage"])):
self.Dispatcher.ThreatList.append(
Threat(
location = self.Dispatcher.Label,
type = "FOUND_ENCRYPTED_PACKAGE",
information = {}
)
)
self.Flags.add("ENCRYPTED_PACKAGE")
def CheckDirectoryEntryCLSIDs(self):
for entry in self.Parser.DirectoryEntries.values():
if entry.ObjectType.Value == DirectorySectorEntry.STREAM_OBJECT:
continue
clsid = str(entry.CLSID)
if clsid == CLSID_NULL:
continue
elif clsid in LOW_RISK_LEVEL_OBJECTS:
if clsid not in self.Flags:
self.Dispatcher.ThreatList.append(
Threat(
location = self.Dispatcher.Label,
type = "FOUND_LOW_RISK_LEVEL_OBJECT",
information = {
"type": LOW_RISK_LEVEL_OBJECTS[clsid],
"clsid": clsid
}
)
)
self.Flags.add(clsid)
elif clsid in HIGH_RISK_LEVEL_OBJECTS:
if clsid not in self.Flags:
self.Dispatcher.ThreatList.append(
Threat(
location = self.Dispatcher.Label,
type = "FOUND_HIGH_RISK_LEVEL_OBJECT",
information = {
"type": HIGH_RISK_LEVEL_OBJECTS[clsid],
"clsid": clsid
}
)
)
self.Flags.add(clsid)
else:
if clsid not in self.Flags:
self.Dispatcher.ThreatList.append(
Threat(
location = self.Dispatcher.Label,
type = "FOUND_UNKNOWN_OBJECT",
information = {
"clsid": clsid
}
)
)
self.Flags.add(clsid)
def CheckDirectoryEntries(self):
self.CheckDirectoryEntryNames()
self.CheckDirectoryEntryCLSIDs()
def CheckOLEStreams(self):
for entry in self.Parser.DirectoryEntries.values():
entry_name = entry.ObjectName.lower()
if entry_name != "\\x01ole":
continue
olestream = OLEStream().Parse(entry.StreamData)
relative_moniker_stream = olestream.RelativeMonikerStream
absolute_moniker_stream = olestream.AbsoluteMonikerStream
if str(relative_moniker_stream.CLSID) != CLSID_NULL:
outer_moniker_stream = relative_moniker_stream
elif str(absolute_moniker_stream.CLSID) != CLSID_NULL:
outer_moniker_stream = absolute_moniker_stream
else:
continue
outer_moniker = outer_moniker_stream.Moniker
if isinstance(outer_moniker, SOAPMoniker):
self.Dispatcher.ThreatList.append(
Threat(
location = self.Dispatcher.Label,
type = "FOUND_SOAP_MONIKER",
information = {
"url": str(outer_moniker.Url).strip(string.whitespace + "\x00")[5:]
}
)
)
elif isinstance(outer_moniker, CompositeMoniker):
for inner_moniker_stream in outer_moniker.MonikerArray:
inner_moniker = inner_moniker_stream.Moniker
if isinstance(inner_moniker, FileMoniker):
self.Dispatcher.ThreatList.append(
Threat(
location = self.Dispatcher.Label,
type = "FOUND_COMPOSITED_FILE_MONIKER",
information = {
"ansi_path": str(inner_moniker.AnsiPath).strip(string.whitespace + "\x00"),
"unicode_path": str(inner_moniker.UnicodePath).strip(string.whitespace + "\x00")
}
)
)
elif isinstance(inner_moniker, UrlMoniker):
self.Dispatcher.ThreatList.append(
Threat(
location = self.Dispatcher.Label,
type = "FOUND_COMPOSITED_URL_MONIKER",
information = {
"url": str(inner_moniker.Url).strip(string.whitespace + "\x00")
}
)
)
elif isinstance(outer_moniker, FileMoniker):
self.Dispatcher.ThreatList.append(
Threat(
location = self.Dispatcher.Label,
type = "FOUND_FILE_MONIKER",
information = {
"ansi_path": str(outer_moniker.AnsiPath).strip(string.whitespace + "\x00"),
"unicode_path": str(outer_moniker.UnicodePath).strip(string.whitespace + "\x00")
}
)
)
elif isinstance(outer_moniker, UrlMoniker):
self.Dispatcher.ThreatList.append(
Threat(
location = self.Dispatcher.Label,
type = "FOUND_URL_MONIKER",
information = {
"url": str(outer_moniker.Url).strip(string.whitespace + "\x00")
}
)
)
def CheckStreamData(self):
for entry in self.Parser.DirectoryEntries.values():
if entry.ObjectType.Value != DirectorySectorEntry.STREAM_OBJECT:
continue
entry_name = entry.ObjectName.lower()
if entry_name.startswith(("\\x01", "\\x03", "\\x05")) and (entry_name != "\\x01ole10native"):
continue
stream_data = entry.StreamData
if entry_name == "\\x01ole10native":
stream_data = LengthPrefixedByteArray().Parse(stream_data).Data
if stream_data:
from neko import Dispatcher
dispatcher = Dispatcher(label = f"{self.Dispatcher.Label} -> Stream \"{entry.ObjectName}\"")
dispatcher.Dispatch(stream_data)
self.Dispatcher.ChildDispatchers.append(dispatcher)
| true | true |
f71a97f2cf8061f969605f468dcddb25a7b8ae82 | 1,596 | py | Python | progress_bar.py | qcrit/LaTeCH-CLfL-2019-GreekClassification | 0984f88c455d314afd6395be927bcf1383378860 | [
"MIT"
] | 1 | 2019-11-03T21:10:01.000Z | 2019-11-03T21:10:01.000Z | progress_bar.py | qcrit/LaTeCH-CLfL-2019-GreekClassification | 0984f88c455d314afd6395be927bcf1383378860 | [
"MIT"
] | null | null | null | progress_bar.py | qcrit/LaTeCH-CLfL-2019-GreekClassification | 0984f88c455d314afd6395be927bcf1383378860 | [
"MIT"
] | 2 | 2019-12-23T20:05:32.000Z | 2019-12-23T20:10:27.000Z | # From https://stackoverflow.com/a/34325723
_prev_str_length = None
# Print iterations progress
def print_progress_bar(iteration, total, prefix='', suffix='', decimals=1, length=18, fill='█'):
"""
Call in a loop to create terminal progress bar
@params:
iteration - Required : current iteration (Int)
total - Required : total iterations (Int)
prefix - Optional : prefix string (Str)
suffix - Optional : suffix string (Str)
decimals - Optional : positive number of decimals in percent complete (Int)
length - Optional : character length of bar (Int)
fill - Optional : bar fill character (Str)
"""
percent = ("{0:." + str(decimals) + "f}").format(100 * (iteration / float(total)))
filledLength = int(length * iteration // total)
bar = fill * filledLength + '-' * (length - filledLength)
s = '%s |%s| %s%% %s' % (prefix, bar, percent, suffix)
global _prev_str_length
if _prev_str_length:
print(' ' * _prev_str_length, end='\r') #Clear out previous bar to prevent lingering characters if current bar is shorter
print(s, end='\r')
_prev_str_length = len(s)
# Print New Line on Complete
if iteration == total:
_prev_str_length = None
print()
if __name__ == '__main__':
#
# Sample Usage
#
from time import sleep
# A List of Items
items = list(range(0, 57))
l = len(items)
for i in range(l + 1):
# Do stuff...
sleep(0.1)
# Update Progress Bar
print_progress_bar(i, l, prefix='Progress:', suffix='Complete')
# Sample Output
# Progress: |█████████████████████████████████████████████-----| 90.0% Complete
| 31.294118 | 123 | 0.641604 |
_prev_str_length = None
def print_progress_bar(iteration, total, prefix='', suffix='', decimals=1, length=18, fill='█'):
percent = ("{0:." + str(decimals) + "f}").format(100 * (iteration / float(total)))
filledLength = int(length * iteration // total)
bar = fill * filledLength + '-' * (length - filledLength)
s = '%s |%s| %s%% %s' % (prefix, bar, percent, suffix)
global _prev_str_length
if _prev_str_length:
print(' ' * _prev_str_length, end='\r')
print(s, end='\r')
_prev_str_length = len(s)
if iteration == total:
_prev_str_length = None
print()
if __name__ == '__main__':
from time import sleep
items = list(range(0, 57))
l = len(items)
for i in range(l + 1):
sleep(0.1)
print_progress_bar(i, l, prefix='Progress:', suffix='Complete')
| true | true |
f71a984be3f40ce7973e0b35ea72325af786a392 | 3,517 | py | Python | app/graph/Node.py | OuissalTAIM/jenkins | 7ea5bcdeb6c0bb3cc14c2826a68e4f521de163c1 | [
"BSD-1-Clause"
] | null | null | null | app/graph/Node.py | OuissalTAIM/jenkins | 7ea5bcdeb6c0bb3cc14c2826a68e4f521de163c1 | [
"BSD-1-Clause"
] | 6 | 2021-02-02T22:52:41.000Z | 2022-03-12T00:37:30.000Z | app/graph/Node.py | OuissalTAIM/jenkins | 7ea5bcdeb6c0bb3cc14c2826a68e4f521de163c1 | [
"BSD-1-Clause"
] | null | null | null | # -*- coding: utf-8 -*-
from app.entity.MineBeneficiation import *
import json
import pandas as pd
from app.graph.Graph import Edge
class NodeJSONEncoder(json.JSONEncoder):
def default(self, o):
if isinstance(o, Node):
return o.moniker()
if isinstance(o, pd.core.series.Series):
return o.to_dict()
return json.JSONEncoder.default(self, o)
class Node:
"""
A node is an entity and its upstreams/downstreams
"""
def __init__(self, entity):
"""
ctor
:param entity: Entity
"""
self.entity = entity
self.upstream = {}
self.downstream = {}
def __repr__(self):
"""
Node representation
:return: string
"""
return self.moniker()
def __str__(self):
"""
Stringify
:return: dict
"""
return self.moniker()
def name(self):
"""
Primary entity name
:return: string
"""
return self.entity.name
def location(self):
return self.entity.location
def nominal_capacity(self):
return self.entity.nominal_capacity
def moniker(self):
"""
Primary moniker
:return: string
"""
return self.entity.moniker
def layer(self):
"""
Layer enumeration
:return: Enum
"""
return self.entity.layer
def add_downstream(self, transport, entity_id):
"""
Connect to downstream
:param transport: mean of transport
:param entity_id: identifier of entity
:return: None
"""
if entity_id not in Entity.ENTITIES:
raise Exception("Downstream entity {0} does not exist".format(entity_id))
ds_entity = Entity.ENTITIES[entity_id]
if entity_id in self.downstream and self.downstream[entity_id].transport == transport:
raise Exception("Downstream entity {0} via {1} already exists with node {2}".format(entity_id, transport, self.name()))
self.downstream[entity_id] = Edge(transport, self.entity, ds_entity)
def cost_pv(self, downstream_node=None):
"""
Cost PV including transport
:param downstream_node: destination node
:return: double
"""
if downstream_node is None:
return self.entity.cost_pv()
edge = self.downstream[downstream_node.moniker()]
#TODO: make sure that edge.cost() is in same unit as volume,
# rework this code
transport_cost = edge.cost() * self.entity.volume()
cost = self.entity.cost_pv()
cost["transport"] = (transport_cost.unit, transport_cost.value)
return cost
class ComboNode(Node):
"""
Node combining 2 nodes
"""
def __init__(self, layer, up_node, down_node):
"""
ctor
:param layer: PipelineLayer
:param up_node: Node
:param down_node: Node
"""
self.layer = layer
self.up_node = up_node
self.down_node = down_node
if layer == env.PipelineLayer.MINE_BENEFICIATION:
self.entity = MineBeneficiationEntity(self.up_node.entity, self.down_node.entity)
else:
name = "%s%s%s" % (up_node.name(), env.COMBO_NODES_SEPARATION, down_node.name())
moniker = "%s%s%s" % (up_node.moniker(), env.COMBO_NODES_SEPARATION, down_node.moniker())
self.entity = Entity(name=name, layer=layer, id=moniker)
| 27.476563 | 131 | 0.59056 |
from app.entity.MineBeneficiation import *
import json
import pandas as pd
from app.graph.Graph import Edge
class NodeJSONEncoder(json.JSONEncoder):
def default(self, o):
if isinstance(o, Node):
return o.moniker()
if isinstance(o, pd.core.series.Series):
return o.to_dict()
return json.JSONEncoder.default(self, o)
class Node:
def __init__(self, entity):
self.entity = entity
self.upstream = {}
self.downstream = {}
def __repr__(self):
return self.moniker()
def __str__(self):
return self.moniker()
def name(self):
return self.entity.name
def location(self):
return self.entity.location
def nominal_capacity(self):
return self.entity.nominal_capacity
def moniker(self):
return self.entity.moniker
def layer(self):
return self.entity.layer
def add_downstream(self, transport, entity_id):
if entity_id not in Entity.ENTITIES:
raise Exception("Downstream entity {0} does not exist".format(entity_id))
ds_entity = Entity.ENTITIES[entity_id]
if entity_id in self.downstream and self.downstream[entity_id].transport == transport:
raise Exception("Downstream entity {0} via {1} already exists with node {2}".format(entity_id, transport, self.name()))
self.downstream[entity_id] = Edge(transport, self.entity, ds_entity)
def cost_pv(self, downstream_node=None):
if downstream_node is None:
return self.entity.cost_pv()
edge = self.downstream[downstream_node.moniker()]
transport_cost = edge.cost() * self.entity.volume()
cost = self.entity.cost_pv()
cost["transport"] = (transport_cost.unit, transport_cost.value)
return cost
class ComboNode(Node):
def __init__(self, layer, up_node, down_node):
self.layer = layer
self.up_node = up_node
self.down_node = down_node
if layer == env.PipelineLayer.MINE_BENEFICIATION:
self.entity = MineBeneficiationEntity(self.up_node.entity, self.down_node.entity)
else:
name = "%s%s%s" % (up_node.name(), env.COMBO_NODES_SEPARATION, down_node.name())
moniker = "%s%s%s" % (up_node.moniker(), env.COMBO_NODES_SEPARATION, down_node.moniker())
self.entity = Entity(name=name, layer=layer, id=moniker)
| true | true |
f71a9945ebfc1939e5f3b7f7596845dbf01070cf | 2,311 | py | Python | Semester 4/Open Source Technology/exp1.py | atharva8300/Engineering-Practical-Experiments | 3f7fe4abbbe69a3bbb8aa19892dd7209e70c69ac | [
"Unlicense"
] | 7 | 2020-04-20T19:32:23.000Z | 2021-08-03T16:50:15.000Z | Semester 4/Open Source Technology/exp1.py | atharva8300/Engineering-Practical-Experiments | 3f7fe4abbbe69a3bbb8aa19892dd7209e70c69ac | [
"Unlicense"
] | null | null | null | Semester 4/Open Source Technology/exp1.py | atharva8300/Engineering-Practical-Experiments | 3f7fe4abbbe69a3bbb8aa19892dd7209e70c69ac | [
"Unlicense"
] | 5 | 2019-04-20T06:35:25.000Z | 2021-12-12T12:25:08.000Z | print("String example")
s = "this is a test String"
print(f"String: {s}")
print(f"String Capitalized: {s.capitalize()}")
print(f"String Finding index: {s.find('e')}")
print(f"String Lowercase: {s.lower()}")
print(f"String Uppercase: {s.upper()}")
print(f"String Length: {len(s)}")
print(f"String Replace: {s.replace('this', 'THIS')}")
print(f"String Swapcase: {s.swapcase()}")
print(f"String Title: {s.title()}")
print()
print("List examples")
L = ['C++', 'Java', 'Python']
print(f"List: {L}")
print(f"List slicing: {L[1:]}")
print(f"List slicing: {L[::-1]}")
print(f"List slicing: {L[0:2]}")
L = [1, 2, 3, 4, 5, 6, 7, 8, 9]
print(f"List: {L}")
L.append(10)
print(f"List Appending:{L}")
print(f"List Popping:{L.pop()}")
L.insert(4, 20)
print(f"List Inserting : {L}") # position, value
L.reverse()
print(f"List Reversed: {L}")
L.sort()
reversed_list = reversed(L)
print("Reversed list: {}".format(reversed_list))
for i in reversed_list:
print(i)
print(f"List Sorted: {L}")
print("\nTuple example")
tup1 = ('physics', 'chemistry', 1997, 2000)
tup2 = (1, 2, 3, 4, 5, 6, 7)
print(f"tup1[0]: {tup1[0]}")
print(f"tup2[1:5]: {tup2[1:5]}")
tup3 = tup1 + tup2
print(f"Creating new from existing: tup3: {tup3}")
print("\nDictionary examples")
d = {'Name': 'Test', 'Age': 99, 'Class': 'failed'}
print(f"Dicstionary d: {d}")
d['Age'] = 0 # update existing entry
d['School'] = "Under a tree" # Add new entry
print(f"Updating d['Age']: {d['Age']}")
print(f"Updating d['School']: {d['School']}")
print(f"Dictionary d: {d}")
print(f"Get Qualification : {d.get('Qualification', 'NA')}")
print(f"Dictionary items: {d.items()}")
print(f"Dictionary keys: {d.keys()}")
print(f"Dictionary values: {d.values()}")
print("\nSets example")
my_set = {1, 3}
print(my_set)
my_set.add(2) # add an element
print(my_set)
my_set.update([2, 3, 4]) # add multiple elements
print(my_set)
my_set.update([4, 5], {1, 6, 8}) # add list and set
print(my_set)
my_set.remove(6)
print(my_set)
my_set.pop() # pop another random element
print(my_set)
A = {1, 2, 3, 4, 5}
B = {4, 5, 6, 7, 8}
print(A | B) # Union or A.union(B)
print(A & B) # Intersection or A.intersection(B)
print(A - B) # Difference or A.difference(B)
A = frozenset([1, 2, 3, 4])
B = frozenset([3, 4, 5, 6])
print(A.difference(B))
print(A | B)
print(A.add(3)) # Error
| 28.182927 | 60 | 0.633059 | print("String example")
s = "this is a test String"
print(f"String: {s}")
print(f"String Capitalized: {s.capitalize()}")
print(f"String Finding index: {s.find('e')}")
print(f"String Lowercase: {s.lower()}")
print(f"String Uppercase: {s.upper()}")
print(f"String Length: {len(s)}")
print(f"String Replace: {s.replace('this', 'THIS')}")
print(f"String Swapcase: {s.swapcase()}")
print(f"String Title: {s.title()}")
print()
print("List examples")
L = ['C++', 'Java', 'Python']
print(f"List: {L}")
print(f"List slicing: {L[1:]}")
print(f"List slicing: {L[::-1]}")
print(f"List slicing: {L[0:2]}")
L = [1, 2, 3, 4, 5, 6, 7, 8, 9]
print(f"List: {L}")
L.append(10)
print(f"List Appending:{L}")
print(f"List Popping:{L.pop()}")
L.insert(4, 20)
print(f"List Inserting : {L}")
L.reverse()
print(f"List Reversed: {L}")
L.sort()
reversed_list = reversed(L)
print("Reversed list: {}".format(reversed_list))
for i in reversed_list:
print(i)
print(f"List Sorted: {L}")
print("\nTuple example")
tup1 = ('physics', 'chemistry', 1997, 2000)
tup2 = (1, 2, 3, 4, 5, 6, 7)
print(f"tup1[0]: {tup1[0]}")
print(f"tup2[1:5]: {tup2[1:5]}")
tup3 = tup1 + tup2
print(f"Creating new from existing: tup3: {tup3}")
print("\nDictionary examples")
d = {'Name': 'Test', 'Age': 99, 'Class': 'failed'}
print(f"Dicstionary d: {d}")
d['Age'] = 0
d['School'] = "Under a tree"
print(f"Updating d['Age']: {d['Age']}")
print(f"Updating d['School']: {d['School']}")
print(f"Dictionary d: {d}")
print(f"Get Qualification : {d.get('Qualification', 'NA')}")
print(f"Dictionary items: {d.items()}")
print(f"Dictionary keys: {d.keys()}")
print(f"Dictionary values: {d.values()}")
print("\nSets example")
my_set = {1, 3}
print(my_set)
my_set.add(2)
print(my_set)
my_set.update([2, 3, 4])
print(my_set)
my_set.update([4, 5], {1, 6, 8})
print(my_set)
my_set.remove(6)
print(my_set)
my_set.pop()
print(my_set)
A = {1, 2, 3, 4, 5}
B = {4, 5, 6, 7, 8}
print(A | B)
print(A & B)
print(A - B)
A = frozenset([1, 2, 3, 4])
B = frozenset([3, 4, 5, 6])
print(A.difference(B))
print(A | B)
print(A.add(3))
| true | true |
f71a9a29fc9f435c927a8cf78515482f4439afa0 | 105 | py | Python | checks/root_path.py | Amourspirit/python-ooouno-ex | 523dd9b89a74aaf887edbcfe1dda316a04c7125b | [
"MIT"
] | null | null | null | checks/root_path.py | Amourspirit/python-ooouno-ex | 523dd9b89a74aaf887edbcfe1dda316a04c7125b | [
"MIT"
] | 2 | 2022-03-28T19:03:21.000Z | 2022-03-29T00:03:34.000Z | checks/root_path.py | Amourspirit/python-ooouno-ex | 523dd9b89a74aaf887edbcfe1dda316a04c7125b | [
"MIT"
] | null | null | null | # coding: utf-8
import sys
from pathlib import Path
sys.path.insert(0, str(Path(__file__).parent.parent)) | 26.25 | 53 | 0.771429 |
import sys
from pathlib import Path
sys.path.insert(0, str(Path(__file__).parent.parent)) | true | true |
f71a9a81693d0910320d55fb9df477edf8edac0a | 209,992 | py | Python | cinder/tests/unit/volume/drivers/huawei/test_huawei_drivers.py | 2020human/cinder | 04528318848620e4ce2639ea2dd5323783dc7a1f | [
"Apache-2.0"
] | null | null | null | cinder/tests/unit/volume/drivers/huawei/test_huawei_drivers.py | 2020human/cinder | 04528318848620e4ce2639ea2dd5323783dc7a1f | [
"Apache-2.0"
] | null | null | null | cinder/tests/unit/volume/drivers/huawei/test_huawei_drivers.py | 2020human/cinder | 04528318848620e4ce2639ea2dd5323783dc7a1f | [
"Apache-2.0"
] | null | null | null | # Copyright (c) 2016 Huawei Technologies Co., Ltd.
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
"""Tests for huawei drivers."""
import collections
import copy
import ddt
import json
import mock
import re
import tempfile
import unittest
from xml.dom import minidom
from cinder import context
from cinder import exception
from cinder import test
from cinder.tests.unit.consistencygroup import fake_cgsnapshot
from cinder.tests.unit.consistencygroup import fake_consistencygroup
from cinder.tests.unit import fake_snapshot
from cinder.tests.unit import fake_volume
from cinder.tests.unit import utils
from cinder.volume import configuration as conf
from cinder.volume.drivers.huawei import constants
from cinder.volume.drivers.huawei import fc_zone_helper
from cinder.volume.drivers.huawei import huawei_conf
from cinder.volume.drivers.huawei import huawei_driver
from cinder.volume.drivers.huawei import huawei_utils
from cinder.volume.drivers.huawei import hypermetro
from cinder.volume.drivers.huawei import replication
from cinder.volume.drivers.huawei import rest_client
from cinder.volume.drivers.huawei import smartx
from cinder.volume import qos_specs
from cinder.volume import volume_types
admin_contex = context.get_admin_context()
vol_attrs = ('id', 'lun_type', 'provider_location', 'metadata')
Volume = collections.namedtuple('Volume', vol_attrs)
PROVIDER_LOCATION = '11'
HOST = 'ubuntu001@backend001#OpenStack_Pool'
ID = '21ec7341-9256-497b-97d9-ef48edcf0635'
ENCODE_NAME = huawei_utils.encode_name(ID)
ADMIN_METADATA = {'huawei_lun_wwn': '6643e8c1004c5f6723e9f454003'}
TEST_PAIR_ID = "3400a30d844d0004"
REPLICA_DRIVER_DATA = '{"pair_id": "%s", "rmt_lun_id": "1"}' % TEST_PAIR_ID
VOL_METADATA = [{'key': 'hypermetro_id', 'value': '11'},
{'key': 'remote_lun_id', 'value': '1'}]
hypermetro_devices = """{
"remote_device": {
"RestURL": "http://192.0.2.69:8082/deviceManager/rest",
"UserName": "admin",
"UserPassword": "Admin@storage1",
"StoragePool": "OpenStack_Pool",
"domain_name": "hypermetro-domain",
"remote_target_ip": "192.0.2.241"
}
}
"""
fake_smartx_value = {'smarttier': 'true',
'smartcache': 'true',
'smartpartition': 'true',
'thin_provisioning_support': 'true',
'thick_provisioning_support': False,
'policy': '2',
'cachename': 'cache-test',
'partitionname': 'partition-test',
}
fake_hypermetro_opts = {'hypermetro': 'true',
'smarttier': False,
'smartcache': False,
'smartpartition': False,
'thin_provisioning_support': False,
'thick_provisioning_support': False,
}
sync_replica_specs = {'replication_enabled': '<is> True',
'replication_type': '<in> sync'}
async_replica_specs = {'replication_enabled': '<is> True',
'replication_type': '<in> async'}
replica_hypermetro_specs = {'hypermetro': '<is> True',
'replication_enabled': '<is> True'}
test_host = {'host': 'ubuntu001@backend001#OpenStack_Pool',
'capabilities': {'smartcache': True,
'location_info': '210235G7J20000000000',
'QoS_support': True,
'pool_name': 'OpenStack_Pool',
'timestamp': '2015-07-13T11:41:00.513549',
'smartpartition': True,
'allocated_capacity_gb': 0,
'volume_backend_name': 'HuaweiFCDriver',
'free_capacity_gb': 20.0,
'driver_version': '1.1.0',
'total_capacity_gb': 20.0,
'smarttier': True,
'hypermetro': True,
'reserved_percentage': 0,
'vendor_name': None,
'thick_provisioning_support': False,
'thin_provisioning_support': True,
'storage_protocol': 'FC',
}
}
test_new_type = {
'name': u'new_type',
'qos_specs_id': None,
'deleted': False,
'created_at': None,
'updated_at': None,
'extra_specs': {
'smarttier': '<is> true',
'smartcache': '<is> true',
'smartpartition': '<is> true',
'thin_provisioning_support': '<is> true',
'thick_provisioning_support': '<is> False',
'policy': '2',
'smartcache:cachename': 'cache-test',
'smartpartition:partitionname': 'partition-test',
},
'is_public': True,
'deleted_at': None,
'id': u'530a56e1-a1a4-49f3-ab6c-779a6e5d999f',
'description': None,
}
test_new_replication_type = {
'name': u'new_type',
'qos_specs_id': None,
'deleted': False,
'created_at': None,
'updated_at': None,
'extra_specs': {
'replication_enabled': '<is> True',
'replication_type': '<in> sync',
},
'is_public': True,
'deleted_at': None,
'id': u'530a56e1-a1a4-49f3-ab6c-779a6e5d999f',
'description': None,
}
test_hypermetro_type = {
'name': u'new_type',
'qos_specs_id': None,
'deleted': False,
'created_at': None,
'updated_at': None,
'extra_specs': {
'hypermetro': '<is> True'
},
'is_public': True,
'deleted_at': None,
'id': u'550c089b-bfdd-4f7f-86e1-3ba88125555c',
'description': None,
}
hypermetro_devices = """
{
"remote_device": {
"RestURL": "http://192.0.2.69:8082/deviceManager/rest",
"UserName":"admin",
"UserPassword":"Admin@storage2",
"StoragePool":"OpenStack_Pool",
"domain_name":"hypermetro_test"}
}
"""
FAKE_FIND_POOL_RESPONSE = {'CAPACITY': '985661440',
'ID': '0',
'TOTALCAPACITY': '985661440'}
FAKE_CREATE_VOLUME_RESPONSE = {"ID": "1",
"NAME": "5mFHcBv4RkCcD+JyrWc0SA",
"WWN": '6643e8c1004c5f6723e9f454003'}
FakeConnector = {'initiator': 'iqn.1993-08.debian:01:ec2bff7ac3a3',
'multipath': False,
'wwpns': ['10000090fa0d6754'],
'wwnns': ['10000090fa0d6755'],
'host': 'ubuntuc',
}
smarttier_opts = {'smarttier': 'true',
'smartpartition': False,
'smartcache': False,
'thin_provisioning_support': True,
'thick_provisioning_support': False,
'policy': '3',
'readcachepolicy': '1',
'writecachepolicy': None,
}
fake_fabric_mapping = {
'swd1': {
'target_port_wwn_list': ['2000643e8c4c5f66'],
'initiator_port_wwn_list': ['10000090fa0d6754']
}
}
fake_fabric_mapping_no_ports = {
'swd1': {
'target_port_wwn_list': [],
'initiator_port_wwn_list': ['10000090fa0d6754']
}
}
fake_fabric_mapping_no_wwn = {
'swd1': {
'target_port_wwn_list': ['2000643e8c4c5f66'],
'initiator_port_wwn_list': []
}
}
CHANGE_OPTS = {'policy': ('1', '2'),
'partitionid': (['1', 'partition001'], ['2', 'partition002']),
'cacheid': (['1', 'cache001'], ['2', 'cache002']),
'qos': (['11', {'MAXIOPS': '100', 'IOType': '1'}],
{'MAXIOPS': '100', 'IOType': '2',
'MIN': 1, 'LATENCY': 1}),
'host': ('ubuntu@huawei#OpenStack_Pool',
'ubuntu@huawei#OpenStack_Pool'),
'LUNType': ('0', '1'),
}
# A fake response of create a host
FAKE_CREATE_HOST_RESPONSE = """
{
"error": {
"code": 0
},
"data":{"NAME": "ubuntuc001",
"ID": "1"}
}
"""
FAKE_GET_HOST_RESPONSE = """
{
"error": {
"code": 0
},
"data":{"NAME": "ubuntuc001",
"ID": "1",
"ISADD2HOSTGROUP": "true"}
}
"""
# A fake response of success response storage
FAKE_COMMON_SUCCESS_RESPONSE = """
{
"error": {
"code": 0,
"description": "None"
},
"data":{}
}
"""
# A fake response of fail response storage
FAKE_COMMON_FAIL_RESPONSE = """
{
"error": {
"code": 50331651,
"description": "An error occurs to the parameter."
},
"data":{}
}
"""
# A fake response of login huawei storage
FAKE_GET_LOGIN_STORAGE_RESPONSE = """
{
"error": {
"code": 0
},
"data": {
"username": "admin",
"iBaseToken": "2001031430",
"deviceid": "210235G7J20000000000",
"accountstate": 2
}
}
"""
# A fake response of login out huawei storage
FAKE_LOGIN_OUT_STORAGE_RESPONSE = """
{
"error": {
"code": 0
},
"data": {
"ID": 11
}
}
"""
# A fake response of mock storage pool info
FAKE_STORAGE_POOL_RESPONSE = """
{
"error": {
"code": 0
},
"data": [{
"USERFREECAPACITY": "985661440",
"ID": "0",
"NAME": "OpenStack_Pool",
"USERTOTALCAPACITY": "985661440",
"TIER0CAPACITY": "100",
"TIER1CAPACITY": "0",
"TIER2CAPACITY": "0"
}]
}
"""
# A fake response of lun or lungroup response
FAKE_LUN_INFO_RESPONSE = """
{
"error": {
"code": 0
},
"data": {
"ID": "1",
"NAME": "5mFHcBv4RkCcD+JyrWc0SA",
"WWN": "6643e8c1004c5f6723e9f454003",
"DESCRIPTION": "21ec7341-9256-497b-97d9-ef48edcf0635",
"HEALTHSTATUS": "1",
"RUNNINGSTATUS": "27",
"ALLOCTYPE": "1",
"CAPACITY": "2097152"
}
}
"""
# A fake report of mock storage pool info
FAKE_POOLS_UNSUPPORT_REPORT = {
'pool_name': 'StoragePool',
'location_info': '2102350BVB10F2000020',
'QoS_support': False,
'smartcache': False,
'thick_provisioning_support': False,
'splitmirror': False,
'allocated_capacity_gb': 7,
'thin_provisioning_support': True,
'free_capacity_gb': 400.0,
'smartpartition': False,
'total_capacity_gb': 400.0,
'reserved_percentage': 0,
'max_over_subscription_ratio': 20.0,
'luncopy': False
}
FAKE_POOLS_SUPPORT_REPORT = {
'pool_name': 'StoragePool',
'location_info': '2102350BVB10F2000020',
'QoS_support': True,
'smartcache': True,
'thick_provisioning_support': True,
'splitmirror': True,
'allocated_capacity_gb': 7,
'thin_provisioning_support': True,
'free_capacity_gb': 400.0,
'smartpartition': True,
'total_capacity_gb': 400.0,
'reserved_percentage': 0,
'max_over_subscription_ratio': 20.0,
'luncopy': True,
'hypermetro': True,
'consistencygroup_support': True
}
FAKE_LUN_GET_SUCCESS_RESPONSE = """
{
"error": {
"code": 0
},
"data": {
"ID": "11",
"IOCLASSID": "11",
"NAME": "5mFHcBv4RkCcD+JyrWc0SA",
"DESCRIPTION": "21ec7341-9256-497b-97d9-ef48edcf0635",
"RUNNINGSTATUS": "10",
"HEALTHSTATUS": "1",
"RUNNINGSTATUS": "27",
"LUNLIST": "",
"ALLOCTYPE": "1",
"CAPACITY": "2097152",
"WRITEPOLICY": "1",
"MIRRORPOLICY": "0",
"PREFETCHPOLICY": "1",
"PREFETCHVALUE": "20",
"DATATRANSFERPOLICY": "1",
"READCACHEPOLICY": "2",
"WRITECACHEPOLICY": "5",
"OWNINGCONTROLLER": "0B",
"SMARTCACHEPARTITIONID": "",
"CACHEPARTITIONID": "",
"WWN": "6643e8c1004c5f6723e9f454003",
"PARENTNAME": "OpenStack_Pool"
}
}
"""
FAKE_QUERY_ALL_LUN_RESPONSE = {
"error": {
"code": 0
},
"data": [{
"ID": "1",
"NAME": ENCODE_NAME
}]
}
FAKE_LUN_ASSOCIATE_RESPONSE = """
{
"error":{
"code":0
},
"data":[{
"ID":"11"
}]
}
"""
FAKE_QUERY_LUN_GROUP_INFO_RESPONSE = """
{
"error": {
"code":0
},
"data":[{
"NAME":"OpenStack_LunGroup_1",
"DESCRIPTION":"5mFHcBv4RkCcD+JyrWc0SA",
"ID":"11",
"TYPE":256
}]
}
"""
FAKE_QUERY_LUN_GROUP_RESPONSE = """
{
"error": {
"code":0
},
"data":{
"NAME":"5mFHcBv4RkCcD+JyrWc0SA",
"DESCRIPTION":"5mFHcBv4RkCcD+JyrWc0SA",
"ID":"11",
"TYPE":256
}
}
"""
FAKE_QUERY_LUN_GROUP_ASSOCIAT_RESPONSE = """
{
"error":{
"code":0
},
"data":{
"NAME":"5mFHcBv4RkCcD+JyrWc0SA",
"DESCRIPTION":"5mFHcBv4RkCcD+JyrWc0SA",
"ID":"11",
"TYPE":256
}
}
"""
FAKE_LUN_COUNT_RESPONSE = """
{
"data":{
"COUNT":"0"
},
"error":{
"code":0,
"description":"0"
}
}
"""
# A fake response of snapshot list response
FAKE_SNAPSHOT_LIST_INFO_RESPONSE = {
"error": {
"code": 0,
"description": "0"
},
"data": [{
"ID": 11,
"NAME": ENCODE_NAME
}, ]
}
# A fake response of create snapshot response
FAKE_CREATE_SNAPSHOT_INFO_RESPONSE = """
{
"error": {
"code": 0
},
"data": {
"ID": 11,
"NAME": "YheUoRwbSX2BxN7"
}
}
"""
# A fake response of get snapshot response
FAKE_GET_SNAPSHOT_INFO_RESPONSE = """
{
"error": {
"code": 0,
"description": "0"
},
"data": {
"ID": 11,
"NAME": "YheUoRwbSX2BxN7"
}
}
"""
FAKE_SNAPSHOT_COUNT_RESPONSE = """
{
"data":{
"COUNT":"2"
},
"error":{
"code":0,
"description":"0"
}
}
"""
# A fake response of get iscsi response
FAKE_GET_ISCSI_INFO_RESPONSE = """
{
"data": [{
"ETHPORTID": "139267",
"ID": "0+iqn.oceanstor:21004846fb8ca15f::22004:192.0.2.1,t,0x2005",
"TPGT": "8197",
"TYPE": 249
},
{
"ETHPORTID": "139268",
"ID": "1+iqn.oceanstor:21004846fb8ca15f::22003:192.0.2.2,t,0x2004",
"TPGT": "8196",
"TYPE": 249
}
],
"error": {
"code": 0,
"description": "0"
}
}
"""
# A fake response of get eth info response
FAKE_GET_ETH_INFO_RESPONSE = """
{
"error": {
"code": 0
},
"data": [{
"PARENTTYPE": 209,
"MACADDRESS": "00:22:a1:0a:79:57",
"ETHNEGOTIATE": "-1",
"ERRORPACKETS": "0",
"IPV4ADDR": "192.0.2.2",
"IPV6GATEWAY": "",
"IPV6MASK": "0",
"OVERFLOWEDPACKETS": "0",
"ISCSINAME": "P0",
"HEALTHSTATUS": "1",
"ETHDUPLEX": "2",
"ID": "16909568",
"LOSTPACKETS": "0",
"TYPE": 213,
"NAME": "P0",
"INIORTGT": "4",
"RUNNINGSTATUS": "10",
"IPV4GATEWAY": "",
"BONDNAME": "",
"STARTTIME": "1371684218",
"SPEED": "1000",
"ISCSITCPPORT": "0",
"IPV4MASK": "255.255.0.0",
"IPV6ADDR": "",
"LOGICTYPE": "0",
"LOCATION": "ENG0.A5.P0",
"MTU": "1500",
"PARENTID": "1.5"
},
{
"PARENTTYPE": 209,
"MACADDRESS": "00:22:a1:0a:79:57",
"ETHNEGOTIATE": "-1",
"ERRORPACKETS": "0",
"IPV4ADDR": "192.0.2.1",
"IPV6GATEWAY": "",
"IPV6MASK": "0",
"OVERFLOWEDPACKETS": "0",
"ISCSINAME": "P0",
"HEALTHSTATUS": "1",
"ETHDUPLEX": "2",
"ID": "16909568",
"LOSTPACKETS": "0",
"TYPE": 213,
"NAME": "P0",
"INIORTGT": "4",
"RUNNINGSTATUS": "10",
"IPV4GATEWAY": "",
"BONDNAME": "",
"STARTTIME": "1371684218",
"SPEED": "1000",
"ISCSITCPPORT": "0",
"IPV4MASK": "255.255.0.0",
"IPV6ADDR": "",
"LOGICTYPE": "0",
"LOCATION": "ENG0.A5.P3",
"MTU": "1500",
"PARENTID": "1.5"
}]
}
"""
FAKE_GET_ETH_ASSOCIATE_RESPONSE = """
{
"error":{
"code":0
},
"data":[{
"IPV4ADDR": "192.0.2.1",
"HEALTHSTATUS": "1",
"RUNNINGSTATUS": "10"
},
{
"IPV4ADDR": "192.0.2.2",
"HEALTHSTATUS": "1",
"RUNNINGSTATUS": "10"
}
]
}
"""
# A fake response of get iscsi device info response
FAKE_GET_ISCSI_DEVICE_RESPONSE = """
{
"error": {
"code": 0
},
"data": [{
"CMO_ISCSI_DEVICE_NAME": "iqn.2006-08.com.huawei:oceanstor:21000022a:"
}]
}
"""
# A fake response of get iscsi device info response
FAKE_GET_ALL_HOST_INFO_RESPONSE = """
{
"error": {
"code": 0
},
"data": [{
"PARENTTYPE": 245,
"NAME": "ubuntuc",
"DESCRIPTION": "",
"RUNNINGSTATUS": "1",
"IP": "",
"PARENTNAME": "",
"OPERATIONSYSTEM": "0",
"LOCATION": "",
"HEALTHSTATUS": "1",
"MODEL": "",
"ID": "1",
"PARENTID": "",
"NETWORKNAME": "",
"TYPE": 21
},
{
"PARENTTYPE": 245,
"NAME": "ubuntu",
"DESCRIPTION": "",
"RUNNINGSTATUS": "1",
"IP": "",
"PARENTNAME": "",
"OPERATIONSYSTEM": "0",
"LOCATION": "",
"HEALTHSTATUS": "1",
"MODEL": "",
"ID": "2",
"PARENTID": "",
"NETWORKNAME": "",
"TYPE": 21
}]
}
"""
# A fake response of get host or hostgroup info response
FAKE_GET_ALL_HOST_GROUP_INFO_RESPONSE = """
{
"error": {
"code": 0
},
"data": [{
"NAME":"ubuntuc",
"DESCRIPTION":"",
"ID":"0",
"TYPE":14
},
{"NAME":"OpenStack_HostGroup_1",
"DESCRIPTION":"",
"ID":"0",
"TYPE":14
}
]
}
"""
FAKE_GET_HOST_GROUP_INFO_RESPONSE = """
{
"error": {
"code": 0
},
"data":{
"NAME":"ubuntuc",
"DESCRIPTION":"",
"ID":"0",
"TYPE":14
}
}
"""
# A fake response of lun copy info response
FAKE_GET_LUN_COPY_INFO_RESPONSE = """
{
"error": {
"code": 0
},
"data": {
"COPYSTOPTIME": "-1",
"HEALTHSTATUS": "1",
"NAME": "w1PSNvu6RumcZMmSh4/l+Q==",
"RUNNINGSTATUS": "36",
"DESCRIPTION": "w1PSNvu6RumcZMmSh4/l+Q==",
"ID": "0",
"LUNCOPYTYPE": "1",
"COPYPROGRESS": "0",
"COPYSPEED": "2",
"TYPE": 219,
"COPYSTARTTIME": "-1"
}
}
"""
# A fake response of lun copy list info response
FAKE_GET_LUN_COPY_LIST_INFO_RESPONSE = """
{
"error": {
"code": 0
},
"data": [{
"COPYSTOPTIME": "1372209335",
"HEALTHSTATUS": "1",
"NAME": "w1PSNvu6RumcZMmSh4/l+Q==",
"RUNNINGSTATUS": "40",
"DESCRIPTION": "w1PSNvu6RumcZMmSh4/l+Q==",
"ID": "0",
"LUNCOPYTYPE": "1",
"COPYPROGRESS": "100",
"COPYSPEED": "2",
"TYPE": 219,
"COPYSTARTTIME": "1372209329"
}]
}
"""
# A fake response of mappingview info response
FAKE_GET_MAPPING_VIEW_INFO_RESPONSE = """
{
"error":{
"code":0
},
"data":[{
"WORKMODE":"255",
"HEALTHSTATUS":"1",
"NAME":"OpenStack_Mapping_View_1",
"RUNNINGSTATUS":"27",
"DESCRIPTION":"",
"ENABLEINBANDCOMMAND":"true",
"ID":"1",
"INBANDLUNWWN":"",
"TYPE":245
},
{
"WORKMODE":"255",
"HEALTHSTATUS":"1",
"NAME":"YheUoRwbSX2BxN767nvLSw",
"RUNNINGSTATUS":"27",
"DESCRIPTION":"",
"ENABLEINBANDCOMMAND":"true",
"ID":"2",
"INBANDLUNWWN": "",
"TYPE": 245
}]
}
"""
FAKE_GET_MAPPING_VIEW_RESPONSE = """
{
"error":{
"code":0
},
"data":[{
"WORKMODE":"255",
"HEALTHSTATUS":"1",
"NAME":"mOWtSXnaQKi3hpB3tdFRIQ",
"RUNNINGSTATUS":"27",
"DESCRIPTION":"",
"ENABLEINBANDCOMMAND":"true",
"ID":"11",
"INBANDLUNWWN":"",
"TYPE": 245,
"AVAILABLEHOSTLUNIDLIST": ""
}]
}
"""
FAKE_GET_SPEC_MAPPING_VIEW_RESPONSE = """
{
"error":{
"code":0
},
"data":{
"WORKMODE":"255",
"HEALTHSTATUS":"1",
"NAME":"mOWtSXnaQKi3hpB3tdFRIQ",
"RUNNINGSTATUS":"27",
"DESCRIPTION":"",
"ENABLEINBANDCOMMAND":"true",
"ID":"1",
"INBANDLUNWWN":"",
"TYPE":245,
"AVAILABLEHOSTLUNIDLIST": "[1]"
}
}
"""
FAKE_FC_INFO_RESPONSE = """
{
"error":{
"code":0
},
"data":[{
"HEALTHSTATUS":"1",
"NAME":"",
"MULTIPATHTYPE":"1",
"ISFREE":"true",
"RUNNINGSTATUS":"27",
"ID":"10000090fa0d6754",
"OPERATIONSYSTEM":"255",
"TYPE":223
},
{
"HEALTHSTATUS":"1",
"NAME":"",
"MULTIPATHTYPE":"1",
"ISFREE":"true",
"RUNNINGSTATUS":"27",
"ID":"10000090fa0d6755",
"OPERATIONSYSTEM":"255",
"TYPE":223
}]
}
"""
FAKE_ISCSI_INITIATOR_RESPONSE = """
{
"error":{
"code":0
},
"data":[{
"CHAPNAME":"mm-user",
"HEALTHSTATUS":"1",
"ID":"iqn.1993-08.org.debian:01:9073aba6c6f",
"ISFREE":"true",
"MULTIPATHTYPE":"1",
"NAME":"",
"OPERATIONSYSTEM":"255",
"RUNNINGSTATUS":"28",
"TYPE":222,
"USECHAP":"true"
},
{
"ISFREE":"true",
"ID":"ini-1"
},
{
"ISFREE":"false",
"ID":"ini-2",
"PARENTNAME":"Host2",
"PARENTID":"2"
}]
}
"""
FAKE_HOST_LINK_RESPONSE = """
{
"error":{
"code":0
},
"data":[{
"PARENTTYPE":21,
"TARGET_ID":"0000000000000000",
"INITIATOR_NODE_WWN":"20000090fa0d6754",
"INITIATOR_TYPE":"223",
"RUNNINGSTATUS":"27",
"PARENTNAME":"ubuntuc",
"INITIATOR_ID":"10000090fa0d6754",
"TARGET_PORT_WWN":"24000022a10a2a39",
"HEALTHSTATUS":"1",
"INITIATOR_PORT_WWN":"10000090fa0d6754",
"ID":"010000090fa0d675-0000000000110400",
"TARGET_NODE_WWN":"21000022a10a2a39",
"PARENTID":"1",
"CTRL_ID":"0",
"TYPE":255,
"TARGET_TYPE":"212"
}]
}
"""
FAKE_PORT_GROUP_RESPONSE = """
{
"error":{
"code":0
},
"data":[{
"ID":11,
"NAME": "portgroup-test"
}]
}
"""
FAKE_ERROR_INFO_RESPONSE = """
{
"error":{
"code":31755596
}
}
"""
FAKE_ERROR_CONNECT_RESPONSE = """
{
"error":{
"code":-403
}
}
"""
FAKE_ERROR_LUN_INFO_RESPONSE = """
{
"error":{
"code":0
},
"data":{
"ID":"11",
"IOCLASSID":"11",
"NAME":"5mFHcBv4RkCcD+JyrWc0SA",
"ALLOCTYPE": "0",
"DATATRANSFERPOLICY": "0",
"SMARTCACHEPARTITIONID": "0",
"CACHEPARTITIONID": "0"
}
}
"""
FAKE_GET_FC_INI_RESPONSE = """
{
"error":{
"code":0
},
"data":[{
"ID":"10000090fa0d6754",
"ISFREE":"true"
}]
}
"""
FAKE_SYSTEM_VERSION_RESPONSE = """
{
"error":{
"code": 0
},
"data":{
"PRODUCTVERSION": "V100R001C10",
"wwn": "21003400a30d844d"
}
}
"""
FAKE_GET_LUN_MIGRATION_RESPONSE = """
{
"data":[{"ENDTIME":"1436816174",
"ID":"9",
"PARENTID":"11",
"PARENTNAME":"xmRBHMlVRruql5vwthpPXQ",
"PROCESS":"-1",
"RUNNINGSTATUS":"76",
"SPEED":"2",
"STARTTIME":"1436816111",
"TARGETLUNID":"1",
"TARGETLUNNAME":"4924891454902893639",
"TYPE":253,
"WORKMODE":"0"
}],
"error":{"code":0,
"description":"0"}
}
"""
FAKE_HYPERMETRODOMAIN_RESPONSE = """
{
"error":{
"code": 0
},
"data":[{
"PRODUCTVERSION": "V100R001C10",
"ID": "11",
"NAME": "hypermetro_test",
"RUNNINGSTATUS": "1",
"HEALTHSTATUS": "0"
}]
}
"""
FAKE_HYPERMETRO_RESPONSE = """
{
"error":{
"code": 0
},
"data":{
"PRODUCTVERSION": "V100R001C10",
"ID": "11",
"NAME": "hypermetro_test",
"RUNNINGSTATUS": "1",
"HEALTHSTATUS": "1"
}
}
"""
FAKE_QOS_INFO_RESPONSE = """
{
"error":{
"code": 0
},
"data":{
"ID": "11"
}
}
"""
FAKE_GET_FC_PORT_RESPONSE = """
{
"error":{
"code":0
},
"data":[{
"RUNNINGSTATUS":"10",
"WWN":"2000643e8c4c5f66",
"PARENTID":"0A.1",
"ID": "1114368",
"RUNSPEED": "16000"
},
{
"RUNNINGSTATUS":"10",
"WWN":"2000643e8c4c5f67",
"PARENTID":"0A.1",
"ID": "1114369",
"RUNSPEED": "16000"
}]
}
"""
FAKE_SMARTCACHEPARTITION_RESPONSE = """
{
"error":{
"code":0
},
"data":{
"ID":"11",
"NAME":"cache-name"
}
}
"""
FAKE_CONNECT_FC_RESPONSE = {
"driver_volume_type": 'fibre_channel',
"data": {
"target_wwn": ["10000090fa0d6754"],
"target_lun": "1",
"volume_id": ID
}
}
FAKE_METRO_INFO_RESPONSE = {
"PRODUCTVERSION": "V100R001C10",
"ID": "11",
"NAME": "hypermetro_test",
"RUNNINGSTATUS": "42",
"HEALTHSTATUS": "0"
}
FAKE_METRO_INFO_NEW_RESPONSE = """{
"error": {
"code": 0
},
"data": {
"PRODUCTVERSION": "V100R001C10",
"ID": "11",
"NAME": "hypermetro_test",
"RUNNINGSTATUS": "1",
"HEALTHSTATUS": "1"
}
}
"""
FAKE_CREATE_METROROUP_RESPONSE = """
{
"data": {
"DESCRIPTION": "",
"DOMAINID": "643e8c4c5f670100",
"DOMAINNAME": "hypermetro-domain",
"HEALTHSTATUS": "1",
"ID": "3400a30d844d8002",
"ISEMPTY": "true",
"NAME": "6F7kdHZcQJ2zbzxHmBl4FQ",
"PRIORITYSTATIONTYPE": "0",
"RECOVERYPOLICY": "1",
"RESOURCETYPE": "11",
"RUNNINGSTATUS": "41",
"SPEED": "2",
"SYNCDIRECTION": "1",
"TYPE": 15364
},
"error": {
"code": 0,
"description": "0"
}
}
"""
FAKE_GET_METROROUP_RESPONSE = {
"data": [{
"DESCRIPTION": "",
"DOMAINID": "643e8c4c5f670100",
"DOMAINNAME": "hypermetro-domain",
"HEALTHSTATUS": "1",
"ID": "11",
"ISEMPTY": "true",
"NAME": huawei_utils.encode_name(ID),
"PRIORITYSTATIONTYPE": "0",
"RECOVERYPOLICY": "1",
"RESOURCETYPE": "11",
"RUNNINGSTATUS": "41",
"SPEED": "2",
"SYNCDIRECTION": "1",
"TYPE": 15364
}],
"error": {
"code": 0,
"description": "0"
},
}
FAKE_GET_METROROUP_ID_RESPONSE = """
{
"data": {
"DESCRIPTION": "",
"DOMAINID": "643e8c4c5f670100",
"DOMAINNAME": "hypermetro-domain",
"HEALTHSTATUS": "1",
"ID": "11",
"ISEMPTY": "false",
"NAME": "IexzQZJWSXuX2e9I7c8GNQ",
"PRIORITYSTATIONTYPE": "0",
"RECOVERYPOLICY": "1",
"RESOURCETYPE": "11",
"RUNNINGSTATUS": "1",
"SPEED": "2",
"SYNCDIRECTION": "1",
"TYPE": 15364
},
"error": {
"code": 0,
"description": "0"
}
}
"""
# mock login info map
MAP_COMMAND_TO_FAKE_RESPONSE = {}
MAP_COMMAND_TO_FAKE_RESPONSE['/xx/sessions'] = (
FAKE_GET_LOGIN_STORAGE_RESPONSE)
MAP_COMMAND_TO_FAKE_RESPONSE['/sessions'] = (
FAKE_LOGIN_OUT_STORAGE_RESPONSE)
MAP_COMMAND_TO_FAKE_RESPONSE['/LUN_MIGRATION/POST'] = (
FAKE_COMMON_SUCCESS_RESPONSE)
MAP_COMMAND_TO_FAKE_RESPONSE['/LUN_MIGRATION?range=[0-256]/GET'] = (
FAKE_GET_LUN_MIGRATION_RESPONSE)
MAP_COMMAND_TO_FAKE_RESPONSE['/LUN_MIGRATION/11/DELETE'] = (
FAKE_COMMON_SUCCESS_RESPONSE)
# mock storage info map
MAP_COMMAND_TO_FAKE_RESPONSE['/storagepool'] = (
FAKE_STORAGE_POOL_RESPONSE)
# mock lun info map
MAP_COMMAND_TO_FAKE_RESPONSE['/lun'] = (
FAKE_LUN_INFO_RESPONSE)
MAP_COMMAND_TO_FAKE_RESPONSE['/lun/11/GET'] = (
FAKE_LUN_GET_SUCCESS_RESPONSE)
MAP_COMMAND_TO_FAKE_RESPONSE['/lun/1/GET'] = (
FAKE_LUN_GET_SUCCESS_RESPONSE)
MAP_COMMAND_TO_FAKE_RESPONSE['/lun/11/DELETE'] = (
FAKE_COMMON_SUCCESS_RESPONSE)
MAP_COMMAND_TO_FAKE_RESPONSE['/lun/1/DELETE'] = (
FAKE_COMMON_SUCCESS_RESPONSE)
MAP_COMMAND_TO_FAKE_RESPONSE['/lun/1/PUT'] = (
FAKE_COMMON_SUCCESS_RESPONSE)
MAP_COMMAND_TO_FAKE_RESPONSE['/lun/11/PUT'] = (
FAKE_COMMON_SUCCESS_RESPONSE)
MAP_COMMAND_TO_FAKE_RESPONSE['/lun?filter=NAME::%s/GET' % ENCODE_NAME] = (
json.dumps(FAKE_QUERY_ALL_LUN_RESPONSE))
MAP_COMMAND_TO_FAKE_RESPONSE['/lun/associate?TYPE=11&ASSOCIATEOBJTYPE=256'
'&ASSOCIATEOBJID=11/GET'] = (
FAKE_LUN_ASSOCIATE_RESPONSE)
MAP_COMMAND_TO_FAKE_RESPONSE['/lun/associate?TYPE=11&ASSOCIATEOBJTYPE=256'
'&ASSOCIATEOBJID=12/GET'] = (
FAKE_LUN_ASSOCIATE_RESPONSE)
MAP_COMMAND_TO_FAKE_RESPONSE['/lun/associate?ID=1&TYPE=11&ASSOCIATEOBJTYPE=21'
'&ASSOCIATEOBJID=0/GET'] = (
FAKE_LUN_ASSOCIATE_RESPONSE)
MAP_COMMAND_TO_FAKE_RESPONSE['/lun/associate?TYPE=11&ASSOCIATEOBJTYPE=21'
'&ASSOCIATEOBJID=1/GET'] = (
FAKE_COMMON_SUCCESS_RESPONSE)
MAP_COMMAND_TO_FAKE_RESPONSE['/lun/associate/cachepartition?ID=1'
'&ASSOCIATEOBJTYPE=11&ASSOCIATEOBJID=11'
'/DELETE'] = (
FAKE_COMMON_SUCCESS_RESPONSE)
MAP_COMMAND_TO_FAKE_RESPONSE['/snapshot/associate?TYPE=27&ASSOCIATEOBJTYPE=21'
'&ASSOCIATEOBJID=1/GET'] = (
FAKE_COMMON_SUCCESS_RESPONSE)
MAP_COMMAND_TO_FAKE_RESPONSE['/snapshot/associate?TYPE=27&ASSOCIATEOBJTYPE=256'
'&ASSOCIATEOBJID=11/GET'] = (
FAKE_COMMON_SUCCESS_RESPONSE)
MAP_COMMAND_TO_FAKE_RESPONSE['/lungroup?range=[0-8191]/GET'] = (
FAKE_QUERY_LUN_GROUP_INFO_RESPONSE)
MAP_COMMAND_TO_FAKE_RESPONSE['/lungroup'] = (
FAKE_QUERY_LUN_GROUP_RESPONSE)
MAP_COMMAND_TO_FAKE_RESPONSE['/lungroup/associate'] = (
FAKE_QUERY_LUN_GROUP_ASSOCIAT_RESPONSE)
MAP_COMMAND_TO_FAKE_RESPONSE['/LUNGroup/11/DELETE'] = (
FAKE_COMMON_SUCCESS_RESPONSE)
MAP_COMMAND_TO_FAKE_RESPONSE['/lungroup/associate?ID=11&ASSOCIATEOBJTYPE=11'
'&ASSOCIATEOBJID=1/DELETE'] = (
FAKE_COMMON_SUCCESS_RESPONSE)
MAP_COMMAND_TO_FAKE_RESPONSE['/lungroup/associate?TYPE=256&ASSOCIATEOBJTYPE=11'
'&ASSOCIATEOBJID=11/GET'] = (
FAKE_LUN_ASSOCIATE_RESPONSE)
MAP_COMMAND_TO_FAKE_RESPONSE['/lungroup/associate?TYPE=256&ASSOCIATEOBJTYPE=11'
'&ASSOCIATEOBJID=1/GET'] = (
FAKE_LUN_ASSOCIATE_RESPONSE)
MAP_COMMAND_TO_FAKE_RESPONSE['/lungroup/associate?ID=11&ASSOCIATEOBJTYPE=11'
'&ASSOCIATEOBJID=11/DELETE'] = (
FAKE_COMMON_SUCCESS_RESPONSE)
MAP_COMMAND_TO_FAKE_RESPONSE['/lungroup/associate?ID=11&ASSOCIATEOBJTYPE=27'
'&ASSOCIATEOBJID=11/DELETE'] = (
FAKE_COMMON_SUCCESS_RESPONSE)
MAP_COMMAND_TO_FAKE_RESPONSE['/lun/count?TYPE=11&ASSOCIATEOBJTYPE=256'
'&ASSOCIATEOBJID=11/GET'] = (
FAKE_LUN_COUNT_RESPONSE)
MAP_COMMAND_TO_FAKE_RESPONSE['/snapshot/count?TYPE=27&ASSOCIATEOBJTYPE=256'
'&ASSOCIATEOBJID=1/GET'] = (
FAKE_SNAPSHOT_COUNT_RESPONSE)
MAP_COMMAND_TO_FAKE_RESPONSE['/snapshot/count?TYPE=27&ASSOCIATEOBJTYPE=256'
'&ASSOCIATEOBJID=11/GET'] = (
FAKE_SNAPSHOT_COUNT_RESPONSE)
MAP_COMMAND_TO_FAKE_RESPONSE['/lungroup/associate?TYPE=256&ASSOCIATEOBJTYPE=27'
'&ASSOCIATEOBJID=11/GET'] = (
FAKE_LUN_ASSOCIATE_RESPONSE)
MAP_COMMAND_TO_FAKE_RESPONSE['/lun/expand/PUT'] = (
FAKE_LUN_INFO_RESPONSE)
MAP_COMMAND_TO_FAKE_RESPONSE['/lungroup/associate?ID=12&ASSOCIATEOBJTYPE=11'
'&ASSOCIATEOBJID=12/DELETE'] = (
FAKE_COMMON_SUCCESS_RESPONSE)
# mock snapshot info map
MAP_COMMAND_TO_FAKE_RESPONSE['/snapshot'] = (
FAKE_CREATE_SNAPSHOT_INFO_RESPONSE)
# mock snapshot info map
MAP_COMMAND_TO_FAKE_RESPONSE['/snapshot/11/GET'] = (
FAKE_GET_SNAPSHOT_INFO_RESPONSE)
MAP_COMMAND_TO_FAKE_RESPONSE['/snapshot/activate'] = (
FAKE_COMMON_SUCCESS_RESPONSE)
MAP_COMMAND_TO_FAKE_RESPONSE['/snapshot/stop/PUT'] = (
FAKE_COMMON_SUCCESS_RESPONSE)
MAP_COMMAND_TO_FAKE_RESPONSE['/snapshot/11/DELETE'] = (
FAKE_COMMON_SUCCESS_RESPONSE)
MAP_COMMAND_TO_FAKE_RESPONSE['/snapshot?filter=NAME::%s/GET' % ENCODE_NAME] = (
json.dumps(FAKE_SNAPSHOT_LIST_INFO_RESPONSE))
# mock QoS info map
MAP_COMMAND_TO_FAKE_RESPONSE['/ioclass/11/GET'] = (
FAKE_LUN_GET_SUCCESS_RESPONSE)
MAP_COMMAND_TO_FAKE_RESPONSE['/ioclass/11/DELETE'] = (
FAKE_COMMON_SUCCESS_RESPONSE)
MAP_COMMAND_TO_FAKE_RESPONSE['/ioclass/11/PUT'] = (
FAKE_COMMON_SUCCESS_RESPONSE)
MAP_COMMAND_TO_FAKE_RESPONSE['/ioclass/active/11/PUT'] = (
FAKE_COMMON_SUCCESS_RESPONSE)
MAP_COMMAND_TO_FAKE_RESPONSE['/ioclass/'] = (
FAKE_QOS_INFO_RESPONSE)
MAP_COMMAND_TO_FAKE_RESPONSE['/ioclass/count'] = (
FAKE_COMMON_FAIL_RESPONSE)
# mock iscsi info map
MAP_COMMAND_TO_FAKE_RESPONSE['/iscsi_tgt_port/GET'] = (
FAKE_GET_ISCSI_INFO_RESPONSE)
MAP_COMMAND_TO_FAKE_RESPONSE['/eth_port/GET'] = (
FAKE_GET_ETH_INFO_RESPONSE)
MAP_COMMAND_TO_FAKE_RESPONSE['/eth_port/associate?TYPE=213&ASSOCIATEOBJTYPE'
'=257&ASSOCIATEOBJID=11/GET'] = (
FAKE_GET_ETH_ASSOCIATE_RESPONSE)
MAP_COMMAND_TO_FAKE_RESPONSE['/iscsidevicename'] = (
FAKE_GET_ISCSI_DEVICE_RESPONSE)
MAP_COMMAND_TO_FAKE_RESPONSE['/iscsi_initiator?range=[0-256]/GET'] = (
FAKE_ISCSI_INITIATOR_RESPONSE)
MAP_COMMAND_TO_FAKE_RESPONSE['/iscsi_initiator/'] = (
FAKE_ISCSI_INITIATOR_RESPONSE)
MAP_COMMAND_TO_FAKE_RESPONSE['/iscsi_initiator/POST'] = (
FAKE_ISCSI_INITIATOR_RESPONSE)
MAP_COMMAND_TO_FAKE_RESPONSE['/iscsi_initiator/PUT'] = (
FAKE_ISCSI_INITIATOR_RESPONSE)
MAP_COMMAND_TO_FAKE_RESPONSE['/iscsi_initiator?PARENTTYPE=21&PARENTID'
'=1/GET'] = (
FAKE_ISCSI_INITIATOR_RESPONSE)
MAP_COMMAND_TO_FAKE_RESPONSE['/iscsi_initiator/remove_iscsi_from_host/PUT'] = (
FAKE_COMMON_SUCCESS_RESPONSE)
MAP_COMMAND_TO_FAKE_RESPONSE['/iscsi_initiator/'
'iqn.1993-08.debian:01:ec2bff7ac3a3/PUT'] = (
FAKE_ISCSI_INITIATOR_RESPONSE)
# mock host info map
MAP_COMMAND_TO_FAKE_RESPONSE['/host?range=[0-65535]/GET'] = (
FAKE_GET_ALL_HOST_INFO_RESPONSE)
MAP_COMMAND_TO_FAKE_RESPONSE['/host/1/DELETE'] = (
FAKE_COMMON_SUCCESS_RESPONSE)
MAP_COMMAND_TO_FAKE_RESPONSE['/host/1/GET'] = (
FAKE_GET_HOST_RESPONSE)
MAP_COMMAND_TO_FAKE_RESPONSE['/host'] = (
FAKE_CREATE_HOST_RESPONSE)
MAP_COMMAND_TO_FAKE_RESPONSE['/hostgroup?range=[0-8191]/GET'] = (
FAKE_GET_ALL_HOST_GROUP_INFO_RESPONSE)
MAP_COMMAND_TO_FAKE_RESPONSE['/hostgroup'] = (
FAKE_GET_HOST_GROUP_INFO_RESPONSE)
MAP_COMMAND_TO_FAKE_RESPONSE['/host/associate?TYPE=14&ID=0'
'&ASSOCIATEOBJTYPE=21&ASSOCIATEOBJID=1'
'/DELETE'] = (
FAKE_COMMON_SUCCESS_RESPONSE)
MAP_COMMAND_TO_FAKE_RESPONSE['/host/associate?TYPE=14&ID=0'
'&ASSOCIATEOBJID=0/GET'] = (
FAKE_COMMON_SUCCESS_RESPONSE)
MAP_COMMAND_TO_FAKE_RESPONSE['/host/associate?TYPE=21&'
'ASSOCIATEOBJTYPE=14&ASSOCIATEOBJID=0/GET'] = (
FAKE_COMMON_SUCCESS_RESPONSE)
MAP_COMMAND_TO_FAKE_RESPONSE['/hostgroup/0/DELETE'] = (
FAKE_COMMON_SUCCESS_RESPONSE)
MAP_COMMAND_TO_FAKE_RESPONSE['/host/associate?TYPE=21&'
'ASSOCIATEOBJTYPE=14&ASSOCIATEOBJID=0/GET'] = (
FAKE_COMMON_SUCCESS_RESPONSE)
MAP_COMMAND_TO_FAKE_RESPONSE['/hostgroup/associate'] = (
FAKE_COMMON_SUCCESS_RESPONSE)
# mock copy info map
MAP_COMMAND_TO_FAKE_RESPONSE['/luncopy'] = (
FAKE_GET_LUN_COPY_INFO_RESPONSE)
MAP_COMMAND_TO_FAKE_RESPONSE['/LUNCOPY?range=[0-1023]/GET'] = (
FAKE_GET_LUN_COPY_LIST_INFO_RESPONSE)
MAP_COMMAND_TO_FAKE_RESPONSE['/LUNCOPY/start/PUT'] = (
FAKE_COMMON_SUCCESS_RESPONSE)
MAP_COMMAND_TO_FAKE_RESPONSE['/LUNCOPY/0/DELETE'] = (
FAKE_COMMON_SUCCESS_RESPONSE)
# mock mapping view info map
MAP_COMMAND_TO_FAKE_RESPONSE['/mappingview?range=[0-8191]/GET'] = (
FAKE_GET_MAPPING_VIEW_INFO_RESPONSE)
MAP_COMMAND_TO_FAKE_RESPONSE['/mappingview'] = (
FAKE_GET_MAPPING_VIEW_RESPONSE)
MAP_COMMAND_TO_FAKE_RESPONSE['/mappingview/PUT'] = (
FAKE_GET_MAPPING_VIEW_RESPONSE)
MAP_COMMAND_TO_FAKE_RESPONSE['/MAPPINGVIEW/1/GET'] = (
FAKE_GET_SPEC_MAPPING_VIEW_RESPONSE)
MAP_COMMAND_TO_FAKE_RESPONSE['/mappingview/1/DELETE'] = (
FAKE_COMMON_SUCCESS_RESPONSE)
MAP_COMMAND_TO_FAKE_RESPONSE['/mappingview/REMOVE_ASSOCIATE/PUT'] = (
FAKE_COMMON_SUCCESS_RESPONSE)
MAP_COMMAND_TO_FAKE_RESPONSE['/mappingview/associate/lungroup?TYPE=256&'
'ASSOCIATEOBJTYPE=245&ASSOCIATEOBJID=1/GET'] = (
FAKE_GET_MAPPING_VIEW_RESPONSE)
MAP_COMMAND_TO_FAKE_RESPONSE['/mappingview/associate?TYPE=245&'
'ASSOCIATEOBJTYPE=14&ASSOCIATEOBJID=0/GET'] = (
FAKE_GET_MAPPING_VIEW_RESPONSE)
MAP_COMMAND_TO_FAKE_RESPONSE['/mappingview/associate?TYPE=245&'
'ASSOCIATEOBJTYPE=256&ASSOCIATEOBJID=11/GET'] = (
FAKE_GET_MAPPING_VIEW_RESPONSE)
MAP_COMMAND_TO_FAKE_RESPONSE['/mappingview/associate?TYPE=245&'
'ASSOCIATEOBJTYPE=257&ASSOCIATEOBJID=0/GET'] = (
FAKE_GET_MAPPING_VIEW_RESPONSE)
MAP_COMMAND_TO_FAKE_RESPONSE['/mappingview/associate?TYPE=245&'
'ASSOCIATEOBJTYPE=257&ASSOCIATEOBJID=11/GET'] = (
FAKE_GET_MAPPING_VIEW_RESPONSE)
FAKE_GET_ENGINES_RESPONSE = """
{
"error":{
"code": 0
},
"data":[{
"NODELIST": "[]",
"ID": "0"
}]
}
"""
MAP_COMMAND_TO_FAKE_RESPONSE['/storageengine/GET'] = (
FAKE_GET_ENGINES_RESPONSE)
MAP_COMMAND_TO_FAKE_RESPONSE['/portgroup/associate?ASSOCIATEOBJTYPE=245&'
'ASSOCIATEOBJID=1&range=[0-8191]/GET'] = (
FAKE_GET_MAPPING_VIEW_RESPONSE)
MAP_COMMAND_TO_FAKE_RESPONSE['/MAPPINGVIEW/CREATE_ASSOCIATE/PUT'] = (
FAKE_COMMON_SUCCESS_RESPONSE)
# mock FC info map
MAP_COMMAND_TO_FAKE_RESPONSE['/fc_initiator?ISFREE=true&'
'range=[0-8191]/GET'] = (
FAKE_FC_INFO_RESPONSE)
MAP_COMMAND_TO_FAKE_RESPONSE['/MAPPINGVIEW/CREATE_ASSOCIATE/PUT'] = (
FAKE_COMMON_SUCCESS_RESPONSE)
# mock FC info map
MAP_COMMAND_TO_FAKE_RESPONSE['/fc_initiator?ISFREE=true&'
'range=[0-8191]/GET'] = (
FAKE_FC_INFO_RESPONSE)
MAP_COMMAND_TO_FAKE_RESPONSE['/fc_initiator/10000090fa0d6754/GET'] = (
FAKE_FC_INFO_RESPONSE)
MAP_COMMAND_TO_FAKE_RESPONSE['/fc_initiator/10000090fa0d6754/PUT'] = (
FAKE_COMMON_SUCCESS_RESPONSE)
MAP_COMMAND_TO_FAKE_RESPONSE['/host_link?INITIATOR_TYPE=223'
'&INITIATOR_PORT_WWN=10000090fa0d6754/GET'] = (
FAKE_HOST_LINK_RESPONSE)
MAP_COMMAND_TO_FAKE_RESPONSE['/portgroup?range=[0-8191]&TYPE=257/GET'] = (
FAKE_PORT_GROUP_RESPONSE)
# mock system info map
MAP_COMMAND_TO_FAKE_RESPONSE['/system//GET'] = (
FAKE_SYSTEM_VERSION_RESPONSE)
MAP_COMMAND_TO_FAKE_RESPONSE['/fc_initiator?range=[0-256]/GET'] = (
FAKE_GET_FC_INI_RESPONSE)
MAP_COMMAND_TO_FAKE_RESPONSE['/fc_port/GET'] = (
FAKE_GET_FC_PORT_RESPONSE)
MAP_COMMAND_TO_FAKE_RESPONSE['/fc_initiator/GET'] = (
FAKE_GET_FC_PORT_RESPONSE)
MAP_COMMAND_TO_FAKE_RESPONSE['fc_initiator?range=[0-256]/GET'] = (
FAKE_GET_FC_PORT_RESPONSE)
MAP_COMMAND_TO_FAKE_RESPONSE['/fc_initiator?PARENTTYPE=21&PARENTID=1/GET'] = (
FAKE_GET_FC_PORT_RESPONSE)
MAP_COMMAND_TO_FAKE_RESPONSE['/lun/associate/cachepartition/POST'] = (
FAKE_SYSTEM_VERSION_RESPONSE)
MAP_COMMAND_TO_FAKE_RESPONSE['/fc_initiator?range=[0-256]&PARENTID=1/GET'] = (
FAKE_GET_FC_PORT_RESPONSE)
MAP_COMMAND_TO_FAKE_RESPONSE['/fc_initiator?PARENTTYPE=21&PARENTID=1/GET'] = (
FAKE_GET_FC_PORT_RESPONSE)
MAP_COMMAND_TO_FAKE_RESPONSE['/SMARTCACHEPARTITION/0/GET'] = (
FAKE_SMARTCACHEPARTITION_RESPONSE)
MAP_COMMAND_TO_FAKE_RESPONSE['/SMARTCACHEPARTITION/REMOVE_ASSOCIATE/PUT'] = (
FAKE_COMMON_SUCCESS_RESPONSE)
MAP_COMMAND_TO_FAKE_RESPONSE['/SMARTCACHEPARTITION/count'] = (
FAKE_COMMON_FAIL_RESPONSE)
MAP_COMMAND_TO_FAKE_RESPONSE['/cachepartition/0/GET'] = (
FAKE_SMARTCACHEPARTITION_RESPONSE)
MAP_COMMAND_TO_FAKE_RESPONSE['/HyperMetroDomain?range=[0-32]/GET'] = (
FAKE_HYPERMETRODOMAIN_RESPONSE)
MAP_COMMAND_TO_FAKE_RESPONSE['/HyperMetroPair/POST'] = (
FAKE_HYPERMETRO_RESPONSE)
MAP_COMMAND_TO_FAKE_RESPONSE['/HyperMetroPair/3400a30d844d0007/GET'] = (
FAKE_METRO_INFO_NEW_RESPONSE)
MAP_COMMAND_TO_FAKE_RESPONSE['/HyperMetroPair/disable_hcpair/PUT'] = (
FAKE_COMMON_SUCCESS_RESPONSE)
MAP_COMMAND_TO_FAKE_RESPONSE['/hyperMetro/associate/pair/POST'] = (
FAKE_COMMON_SUCCESS_RESPONSE)
MAP_COMMAND_TO_FAKE_RESPONSE['/hyperMetro/associate/pair/DELETE'] = (
FAKE_COMMON_SUCCESS_RESPONSE)
MAP_COMMAND_TO_FAKE_RESPONSE['/HyperMetroPair/11/DELETE'] = (
FAKE_COMMON_SUCCESS_RESPONSE)
MAP_COMMAND_TO_FAKE_RESPONSE['/HyperMetroPair/11/GET'] = (
FAKE_HYPERMETRO_RESPONSE)
MAP_COMMAND_TO_FAKE_RESPONSE['/HyperMetroPair?range=[0-4095]/GET'] = (
FAKE_COMMON_SUCCESS_RESPONSE)
MAP_COMMAND_TO_FAKE_RESPONSE['/HyperMetroPair/synchronize_hcpair/PUT'] = (
FAKE_COMMON_SUCCESS_RESPONSE)
MAP_COMMAND_TO_FAKE_RESPONSE['/splitmirror?range=[0-8191]/GET'] = (
FAKE_COMMON_SUCCESS_RESPONSE)
MAP_COMMAND_TO_FAKE_RESPONSE['/splitmirror/count'] = (
FAKE_COMMON_FAIL_RESPONSE)
MAP_COMMAND_TO_FAKE_RESPONSE['/smartcachepool/count'] = (
FAKE_COMMON_FAIL_RESPONSE)
FAKE_GET_PORTG_BY_VIEW = """
{
"data": [{
"DESCRIPTION": "Please do NOT modify this. Engine ID: 0",
"ID": "0",
"NAME": "OpenStack_PortGroup_1",
"TYPE": 257
}],
"error": {
"code": 0
}
}
"""
MAP_COMMAND_TO_FAKE_RESPONSE['/portgroup/associate/mappingview?TYPE=257&AS'
'SOCIATEOBJTYPE=245&ASSOCIATEOBJID=1/GET'] = (
FAKE_GET_PORTG_BY_VIEW)
FAKE_GET_PORT_BY_PORTG = """
{
"data":[{
"CONFSPEED":"0","FCCONFMODE":"3",
"FCRUNMODE":"0","HEALTHSTATUS":"1","ID":"2000643e8c4c5f66",
"MAXSUPPORTSPEED":"16000","NAME":"P0","PARENTID":"0B.1",
"PARENTTYPE":209,"RUNNINGSTATUS":"10","RUNSPEED":"8000",
"WWN":"2000643e8c4c5f66"
}],
"error":{
"code":0,"description":"0"
}
}
"""
MAP_COMMAND_TO_FAKE_RESPONSE['/fc_port/associate/portgroup?TYPE=212&ASSOCI'
'ATEOBJTYPE=257&ASSOCIATEOBJID=0/GET'] = (
FAKE_GET_PORT_BY_PORTG)
FAKE_GET_PORTG = """
{
"data": {
"TYPE": 257,
"NAME": "OpenStack_PortGroup_1",
"DESCRIPTION": "Please DO NOT change thefollowing message: 0",
"ID": "0"
},
"error": {
"code": 0,
"description": "0"
}
}
"""
MAP_COMMAND_TO_FAKE_RESPONSE['/portgroup/0/GET'] = FAKE_GET_PORTG
MAP_COMMAND_TO_FAKE_RESPONSE['/portgroup/0/PUT'] = FAKE_GET_PORTG
MAP_COMMAND_TO_FAKE_RESPONSE['/port/associate/portgroup/POST'] = (
FAKE_GET_PORT_BY_PORTG)
MAP_COMMAND_TO_FAKE_RESPONSE['/port/associate/portgroup?ID=0&TYPE=257&ASSOCIA'
'TEOBJTYPE=212&ASSOCIATEOBJID=2000643e8c4c5f66/DE'
'LETE'] = (
FAKE_COMMON_SUCCESS_RESPONSE)
FAKE_CREATE_PORTG = """
{
"data": {
"DESCRIPTION": "Please DO NOT change the following message: 0",
"ID": "0",
"NAME": "OpenStack_PortGroup_1",
"TYPE": 257
},
"error": {
"code": 0,
"description": "0"
}
}
"""
MAP_COMMAND_TO_FAKE_RESPONSE['/PortGroup/POST'] = FAKE_CREATE_PORTG
MAP_COMMAND_TO_FAKE_RESPONSE['/PortGroup/1/DELETE'] = (
FAKE_COMMON_SUCCESS_RESPONSE)
FAKE_GET_PORTG_FROM_PORT = """
{
"data": [{
"TYPE": 257,
"NAME": "OpenStack_PortGroup_1",
"DESCRIPTION": "PleaseDONOTchangethefollowingmessage: 0",
"ID": "0"
}],
"error": {
"code": 0,
"description": "0"
}
}
"""
MAP_COMMAND_TO_FAKE_RESPONSE['/portgroup/associate/fc_port?TYPE=257&ASSOCIA'
'TEOBJTYPE=212&ASSOCIATEOBJID=1114368/GET'] = (
FAKE_GET_PORTG_FROM_PORT)
FAKE_GET_VIEW_BY_PORTG = """
{
"data": [{
"ASSOCIATEOBJID": "0",
"COUNT": "0",
"ASSOCIATEOBJTYPE": "0",
"INBANDLUNWWN": "",
"FORFILESYSTEM": "false",
"ID": "2",
"ENABLEINBANDCOMMAND": "false",
"NAME": "OpenStack_Mapping_View_1",
"WORKMODE": "0",
"TYPE": 245,
"HOSTLUNID": "0",
"DESCRIPTION": ""
}],
"error": {
"code": 0,
"description": "0"
}
}
"""
MAP_COMMAND_TO_FAKE_RESPONSE['/mappingview/associate/portgroup?TYPE=245&ASS'
'OCIATEOBJTYPE=257&ASSOCIATEOBJID=0/GET'] = (
FAKE_GET_VIEW_BY_PORTG)
FAKE_GET_LUNG_BY_VIEW = """
{
"data": [{
"TYPE": 256,
"NAME": "OpenStack_LunGroup_1",
"DESCRIPTION": "OpenStack_LunGroup_1",
"ID": "1"
}],
"error": {
"code": 0,
"description": "0"
}
}
"""
MAP_COMMAND_TO_FAKE_RESPONSE['/lungroup/associate/mappingview?TYPE=256&ASSO'
'CIATEOBJTYPE=245&ASSOCIATEOBJID=2/GET'] = (
FAKE_GET_LUNG_BY_VIEW)
FAKE_LUN_COUNT_RESPONSE_1 = """
{
"data":{
"COUNT":"2"
},
"error":{
"code":0,
"description":"0"
}
}
"""
MAP_COMMAND_TO_FAKE_RESPONSE['/lun/count?TYPE=11&ASSOCIATEOB'
'JTYPE=256&ASSOCIATEOBJID=1/GET'] = (
FAKE_LUN_COUNT_RESPONSE_1)
FAKE_PORTS_IN_PG_RESPONSE = """
{
"data": [{
"ID": "1114114",
"WWN": "2002643e8c4c5f66"
},
{
"ID": "1114113",
"WWN": "2001643e8c4c5f66"
}],
"error": {
"code": 0,
"description": "0"
}
}
"""
MAP_COMMAND_TO_FAKE_RESPONSE['/fc_port/associate?TYPE=213&ASSOCIATEOBJTYPE='
'257&ASSOCIATEOBJID=0/GET'] = (
FAKE_PORTS_IN_PG_RESPONSE)
MAP_COMMAND_TO_FAKE_RESPONSE['/HyperMetro_ConsistentGroup/POST'] = (
FAKE_CREATE_METROROUP_RESPONSE)
MAP_COMMAND_TO_FAKE_RESPONSE["/HyperMetro_ConsistentGroup?type"
"='15364'/GET"] = (
json.dumps(FAKE_GET_METROROUP_RESPONSE))
MAP_COMMAND_TO_FAKE_RESPONSE["/HyperMetro_ConsistentGroup/11/GET"] = (
FAKE_GET_METROROUP_ID_RESPONSE)
MAP_COMMAND_TO_FAKE_RESPONSE["/HyperMetro_ConsistentGroup/11/DELETE"] = (
FAKE_COMMON_SUCCESS_RESPONSE)
MAP_COMMAND_TO_FAKE_RESPONSE["/HyperMetro_ConsistentGroup/stop/PUT"] = (
FAKE_COMMON_SUCCESS_RESPONSE)
MAP_COMMAND_TO_FAKE_RESPONSE["/HyperMetro_ConsistentGroup/sync/PUT"] = (
FAKE_COMMON_SUCCESS_RESPONSE)
FAKE_GET_REMOTEDEV_RESPONSE = """
{
"data":[{
"ARRAYTYPE":"1",
"HEALTHSTATUS":"1",
"ID":"0",
"NAME":"Huawei.Storage",
"RUNNINGSTATUS":"1",
"WWN":"21003400a30d844d"
}],
"error":{
"code":0,
"description":"0"
}
}
"""
MAP_COMMAND_TO_FAKE_RESPONSE['/remote_device/GET'] = (
FAKE_GET_REMOTEDEV_RESPONSE)
FAKE_CREATE_PAIR_RESPONSE = """
{
"data":{
"ID":"%s"
},
"error":{
"code":0,
"description":"0"
}
}
""" % TEST_PAIR_ID
MAP_COMMAND_TO_FAKE_RESPONSE['/REPLICATIONPAIR/POST'] = (
FAKE_CREATE_PAIR_RESPONSE)
FAKE_DELETE_PAIR_RESPONSE = """
{
"data":{},
"error":{
"code":0,
"description":"0"
}
}
"""
MAP_COMMAND_TO_FAKE_RESPONSE['/REPLICATIONPAIR/%s/DELETE' % TEST_PAIR_ID] = (
FAKE_DELETE_PAIR_RESPONSE)
FAKE_SET_PAIR_ACCESS_RESPONSE = """
{
"data":{},
"error":{
"code":0,
"description":"0"
}
}
"""
MAP_COMMAND_TO_FAKE_RESPONSE['/REPLICATIONPAIR/%s/PUT' % TEST_PAIR_ID] = (
FAKE_SET_PAIR_ACCESS_RESPONSE)
FAKE_GET_PAIR_NORMAL_RESPONSE = """
{
"data":{
"REPLICATIONMODEL": "1",
"RUNNINGSTATUS": "1",
"SECRESACCESS": "2",
"HEALTHSTATUS": "1",
"ISPRIMARY": "true"
},
"error":{
"code":0,
"description":"0"
}
}
"""
FAKE_GET_PAIR_SPLIT_RESPONSE = """
{
"data":{
"REPLICATIONMODEL": "1",
"RUNNINGSTATUS": "26",
"SECRESACCESS": "2",
"ISPRIMARY": "true"
},
"error":{
"code":0,
"description":"0"
}
}
"""
FAKE_GET_PAIR_SYNC_RESPONSE = """
{
"data":{
"REPLICATIONMODEL": "1",
"RUNNINGSTATUS": "23",
"SECRESACCESS": "2"
},
"error":{
"code":0,
"description":"0"
}
}
"""
MAP_COMMAND_TO_FAKE_RESPONSE['/REPLICATIONPAIR/%s/GET' % TEST_PAIR_ID] = (
FAKE_GET_PAIR_NORMAL_RESPONSE)
FAKE_SYNC_PAIR_RESPONSE = """
{
"data":{},
"error":{
"code":0,
"description":"0"
}
}
"""
MAP_COMMAND_TO_FAKE_RESPONSE['/REPLICATIONPAIR/sync/PUT'] = (
FAKE_SYNC_PAIR_RESPONSE)
FAKE_SPLIT_PAIR_RESPONSE = """
{
"data":{},
"error":{
"code":0,
"description":"0"
}
}
"""
MAP_COMMAND_TO_FAKE_RESPONSE['/REPLICATIONPAIR/split/PUT'] = (
FAKE_SPLIT_PAIR_RESPONSE)
FAKE_SWITCH_PAIR_RESPONSE = """
{
"data":{},
"error":{
"code":0,
"description":"0"
}
}
"""
MAP_COMMAND_TO_FAKE_RESPONSE['/REPLICATIONPAIR/switch/PUT'] = (
FAKE_SWITCH_PAIR_RESPONSE)
FAKE_PORTS_IN_PG_RESPONSE = """
{
"data": [{
"ID": "1114114",
"WWN": "2002643e8c4c5f66"
},
{
"ID": "1114113",
"WWN": "2001643e8c4c5f66"
}],
"error": {
"code": 0,
"description": "0"
}
}
"""
MAP_COMMAND_TO_FAKE_RESPONSE['/fc_port/associate?TYPE=213&ASSOCIATEOBJTYPE='
'257&ASSOCIATEOBJID=0/GET'] = (
FAKE_PORTS_IN_PG_RESPONSE)
MAP_COMMAND_TO_FAKE_RESPONSE['/portgroup/associate/fc_port?TYPE=257&ASSOCIA'
'TEOBJTYPE=212&ASSOCIATEOBJID=1114369/GET'] = (
FAKE_PORTS_IN_PG_RESPONSE)
MAP_COMMAND_TO_FAKE_RESPONSE['/mappingview/associate/portgroup?TYPE=245&ASSOC'
'IATEOBJTYPE=257&ASSOCIATEOBJID=1114114/GET'] = (
FAKE_SWITCH_PAIR_RESPONSE)
MAP_COMMAND_TO_FAKE_RESPONSE['/mappingview/associate/portgroup?TYPE=245&ASSOC'
'IATEOBJTYPE=257&ASSOCIATEOBJID=1114113/GET'] = (
FAKE_COMMON_SUCCESS_RESPONSE)
REPLICA_BACKEND_ID = 'huawei-replica-1'
class FakeHuaweiConf(huawei_conf.HuaweiConf):
def __init__(self, conf, protocol):
self.conf = conf
self.protocol = protocol
def safe_get(self, key):
try:
return getattr(self.conf, key)
except Exception:
return
def update_config_value(self):
setattr(self.conf, 'volume_backend_name', 'huawei_storage')
setattr(self.conf, 'san_address',
['http://192.0.2.69:8082/deviceManager/rest/'])
setattr(self.conf, 'san_user', 'admin')
setattr(self.conf, 'san_password', 'Admin@storage')
setattr(self.conf, 'san_product', 'V3')
setattr(self.conf, 'san_protocol', self.protocol)
setattr(self.conf, 'lun_type', constants.THICK_LUNTYPE)
setattr(self.conf, 'lun_ready_wait_interval', 2)
setattr(self.conf, 'lun_copy_wait_interval', 2)
setattr(self.conf, 'lun_timeout', 43200)
setattr(self.conf, 'lun_write_type', '1')
setattr(self.conf, 'lun_mirror_switch', '1')
setattr(self.conf, 'lun_prefetch_type', '1')
setattr(self.conf, 'lun_prefetch_value', '0')
setattr(self.conf, 'lun_policy', '0')
setattr(self.conf, 'lun_read_cache_policy', '2')
setattr(self.conf, 'lun_write_cache_policy', '5')
setattr(self.conf, 'storage_pools', ['OpenStack_Pool'])
setattr(self.conf, 'iscsi_default_target_ip', ['192.0.2.68'])
setattr(self.conf, 'metro_san_address',
['https://192.0.2.240:8088/deviceManager/rest/'])
setattr(self.conf, 'metro_storage_pools', 'OpenStack_Pool')
setattr(self.conf, 'metro_san_user', 'admin')
setattr(self.conf, 'metro_san_password', 'Admin@storage1')
setattr(self.conf, 'metro_domain_name', 'hypermetro_test')
iscsi_info = {'Name': 'iqn.1993-08.debian:01:ec2bff7ac3a3',
'TargetIP': '192.0.2.2',
'CHAPinfo': 'mm-user;mm-user@storage',
'ALUA': '1',
'TargetPortGroup': 'portgroup-test', }
setattr(self.conf, 'iscsi_info', [iscsi_info])
rmt_iscsi_info = ('{ Name: iqn.1993-08.debian:01:ec2bff7acxxx;\n'
'TargetIP:1.1.1.1;CHAPinfo:mm-user#mm-user@storage;'
'ALUA:1; TargetPortGroup:portgroup-test};\t\n '
'{ Name: iqn.1993-08.debian:01:ec2bff7acyyy;\n'
'TargetIP:2.2.2.2;CHAPinfo:nn-user#nn-user@storage;'
'ALUA:0; TargetPortGroup:portgroup-test1}\t\n')
targets = [{'backend_id': REPLICA_BACKEND_ID,
'storage_pool': 'OpenStack_Pool',
'san_address':
'https://192.0.2.69:8088/deviceManager/rest/',
'san_user': 'admin',
'san_password': 'Admin@storage1',
'iscsi_info': rmt_iscsi_info}]
setattr(self.conf, 'replication_device', targets)
setattr(self.conf, 'safe_get', self.safe_get)
class FakeClient(rest_client.RestClient):
def __init__(self, configuration):
san_address = configuration.san_address
san_user = configuration.san_user
san_password = configuration.san_password
rest_client.RestClient.__init__(self, configuration,
san_address,
san_user,
san_password)
self.test_fail = False
self.test_multi_url_flag = False
self.cache_not_exist = False
self.partition_not_exist = False
def _get_snapshotid_by_name(self, snapshot_name):
return "11"
def _check_snapshot_exist(self, snapshot_id):
return True
def get_partition_id_by_name(self, name):
if self.partition_not_exist:
return None
return "11"
def get_cache_id_by_name(self, name):
if self.cache_not_exist:
return None
return "11"
def add_lun_to_cache(self, lunid, cache_id):
pass
def do_call(self, url=False, data=None, method=None, calltimeout=4,
log_filter_flag=False):
url = url.replace('http://192.0.2.69:8082/deviceManager/rest', '')
command = url.replace('/210235G7J20000000000/', '')
data = json.dumps(data) if data else None
if method:
command = command + "/" + method
for item in MAP_COMMAND_TO_FAKE_RESPONSE.keys():
if command == item:
data = MAP_COMMAND_TO_FAKE_RESPONSE[item]
if self.test_fail:
data = FAKE_ERROR_INFO_RESPONSE
if command == 'lun/11/GET':
data = FAKE_ERROR_LUN_INFO_RESPONSE
self.test_fail = False
if self.test_multi_url_flag:
data = FAKE_ERROR_CONNECT_RESPONSE
self.test_multi_url_flag = False
return json.loads(data)
class FakeReplicaPairManager(replication.ReplicaPairManager):
def _init_rmt_client(self):
self.rmt_client = FakeClient(self.conf)
class FakeISCSIStorage(huawei_driver.HuaweiISCSIDriver):
"""Fake Huawei Storage, Rewrite some methods of HuaweiISCSIDriver."""
def __init__(self, configuration):
self.configuration = configuration
self.huawei_conf = FakeHuaweiConf(self.configuration, 'iSCSI')
self.active_backend_id = None
self.replica = None
self.support_func = None
def do_setup(self):
self.metro_flag = True
self.huawei_conf.update_config_value()
self.get_local_and_remote_dev_conf()
self.client = FakeClient(configuration=self.configuration)
self.rmt_client = FakeClient(configuration=self.configuration)
self.replica_client = FakeClient(configuration=self.configuration)
self.metro = hypermetro.HuaweiHyperMetro(self.client,
self.rmt_client,
self.configuration)
self.replica = FakeReplicaPairManager(self.client,
self.replica_client,
self.configuration)
class FakeFCStorage(huawei_driver.HuaweiFCDriver):
"""Fake Huawei Storage, Rewrite some methods of HuaweiISCSIDriver."""
def __init__(self, configuration):
self.configuration = configuration
self.fcsan = None
self.huawei_conf = FakeHuaweiConf(self.configuration, 'iSCSI')
self.active_backend_id = None
self.replica = None
self.support_func = None
def do_setup(self):
self.metro_flag = True
self.huawei_conf.update_config_value()
self.get_local_and_remote_dev_conf()
self.client = FakeClient(configuration=self.configuration)
self.rmt_client = FakeClient(configuration=self.configuration)
self.replica_client = FakeClient(configuration=self.configuration)
self.metro = hypermetro.HuaweiHyperMetro(self.client,
self.rmt_client,
self.configuration)
self.replica = FakeReplicaPairManager(self.client,
self.replica_client,
self.configuration)
@ddt.ddt
class HuaweiTestBase(test.TestCase):
"""Base class for Huawei test cases.
Implement common setup operations or test cases in this class.
"""
def setUp(self):
super(HuaweiTestBase, self).setUp()
self.configuration = mock.Mock(spec=conf.Configuration)
self.driver = FakeISCSIStorage(configuration=self.configuration)
self.driver.do_setup()
self.volume = fake_volume.fake_volume_obj(
admin_contex, host=HOST, provider_location=PROVIDER_LOCATION,
admin_metadata=ADMIN_METADATA, id=ID)
self.snapshot = fake_snapshot.fake_snapshot_obj(
admin_contex, provider_location=PROVIDER_LOCATION, id=ID)
self.snapshot.volume = self.volume
self.replica_volume = fake_volume.fake_volume_obj(
admin_contex, host=HOST, provider_location=PROVIDER_LOCATION,
admin_metadata=ADMIN_METADATA, replication_status='disabled',
replication_driver_data=REPLICA_DRIVER_DATA, id=ID)
self.hyper_volume = fake_volume.fake_volume_obj(
admin_contex, host=HOST, provider_location=PROVIDER_LOCATION,
volume_metadata=VOL_METADATA, id=ID)
self.original_volume = fake_volume.fake_volume_obj(admin_contex,
id=ID)
self.current_volume = fake_volume.fake_volume_obj(
admin_contex, id=ID, provider_location=PROVIDER_LOCATION,
name_id=ID)
self.cgsnapshot = fake_cgsnapshot.fake_cgsnapshot_obj(
admin_contex, id=ID, consistencygroup_id=ID, status='available')
self.cg = fake_consistencygroup.fake_consistencyobject_obj(
admin_contex, id=ID, status='available')
def test_encode_name(self):
lun_name = huawei_utils.encode_name(self.volume.id)
# The hash value is different between py27 and py34.
# So we use assertIn.
self.assertIn(lun_name, ('21ec7341-4687000622165227970',
'21ec7341-7953146827712520106'))
@mock.patch.object(rest_client, 'RestClient')
def test_create_snapshot_success(self, mock_client):
lun_info = self.driver.create_snapshot(self.snapshot)
self.assertEqual(11, lun_info['provider_location'])
self.snapshot.volume_id = ID
self.snapshot.volume = self.volume
lun_info = self.driver.create_snapshot(self.snapshot)
self.assertEqual(11, lun_info['provider_location'])
@ddt.data('1', '', '0')
def test_copy_volume(self, input_speed):
self.driver.configuration.lun_copy_wait_interval = 0
self.volume.metadata = {'copyspeed': input_speed}
mocker = self.mock_object(
self.driver.client, 'create_luncopy',
mock.Mock(wraps=self.driver.client.create_luncopy))
self.driver._copy_volume(self.volume,
'fake_copy_name',
'fake_src_lun',
'fake_tgt_lun')
mocker.assert_called_once_with('fake_copy_name',
'fake_src_lun',
'fake_tgt_lun',
input_speed)
@ddt.data({'input_speed': '1',
'actual_speed': '1'},
{'input_speed': '',
'actual_speed': '2'},
{'input_speed': None,
'actual_speed': '2'},
{'input_speed': '5',
'actual_speed': '2'})
@ddt.unpack
def test_client_create_luncopy(self, input_speed, actual_speed):
mocker = self.mock_object(
self.driver.client, 'call',
mock.Mock(wraps=self.driver.client.call))
self.driver.client.create_luncopy('fake_copy_name',
'fake_src_lun',
'fake_tgt_lun',
input_speed)
mocker.assert_called_once_with(
mock.ANY,
{"TYPE": 219,
"NAME": 'fake_copy_name',
"DESCRIPTION": 'fake_copy_name',
"COPYSPEED": actual_speed,
"LUNCOPYTYPE": "1",
"SOURCELUN": "INVALID;fake_src_lun;INVALID;INVALID;INVALID",
"TARGETLUN": "INVALID;fake_tgt_lun;INVALID;INVALID;INVALID"}
)
@ddt.ddt
class HuaweiISCSIDriverTestCase(HuaweiTestBase):
def setUp(self):
super(HuaweiISCSIDriverTestCase, self).setUp()
self.configuration = mock.Mock(spec=conf.Configuration)
self.configuration.hypermetro_devices = hypermetro_devices
self.flags(rpc_backend='oslo_messaging._drivers.impl_fake')
self.driver = FakeISCSIStorage(configuration=self.configuration)
self.driver.do_setup()
self.portgroup = 'portgroup-test'
self.iscsi_iqns = ['iqn.2006-08.com.huawei:oceanstor:21000022a:'
':20503:192.0.2.1',
'iqn.2006-08.com.huawei:oceanstor:21000022a:'
':20500:192.0.2.2']
self.target_ips = ['192.0.2.1',
'192.0.2.2']
self.portgroup_id = 11
self.driver.client.login()
def test_parse_rmt_iscsi_info(self):
rmt_devs = self.driver.huawei_conf.get_replication_devices()
iscsi_info = rmt_devs[0]['iscsi_info']
expected_iscsi_info = [{'Name': 'iqn.1993-08.debian:01:ec2bff7acxxx',
'TargetIP': '1.1.1.1',
'CHAPinfo': 'mm-user;mm-user@storage',
'ALUA': '1',
'TargetPortGroup': 'portgroup-test'},
{'Name': 'iqn.1993-08.debian:01:ec2bff7acyyy',
'TargetIP': '2.2.2.2',
'CHAPinfo': 'nn-user;nn-user@storage',
'ALUA': '0',
'TargetPortGroup': 'portgroup-test1'}]
self.assertEqual(expected_iscsi_info, iscsi_info)
def test_parse_rmt_iscsi_info_without_iscsi_configuration(self):
self.configuration.replication_device[0]['iscsi_info'] = ''
rmt_devs = self.driver.huawei_conf.get_replication_devices()
iscsi_info = rmt_devs[0]['iscsi_info']
self.assertEqual([], iscsi_info)
def test_login_success(self):
device_id = self.driver.client.login()
self.assertEqual('210235G7J20000000000', device_id)
@ddt.data(constants.PWD_EXPIRED, constants.PWD_RESET)
def test_login_password_expires_and_reset_fail(self, state):
with mock.patch.object(self.driver.client, 'logout') as mock_logout:
self.mock_object(FakeClient, 'do_call',
return_value={"error": {"code": 0},
"data": {
"username": "admin",
"iBaseToken": "2001031430",
"deviceid": "210235G7J20000000000",
"accountstate": state}})
self.assertRaises(exception.VolumeBackendAPIException,
self.driver.client.login)
mock_logout.assert_called_once_with()
def test_login_logout_fail(self):
login_info = {"error": {"code": 0},
"data": {"username": "admin",
"iBaseToken": "2001031430",
"deviceid": "210235G7J20000000000",
"accountstate": 3}}
logout_info = {"error": {"code": 1}, "data": {}}
self.mock_object(FakeClient, 'do_call',
side_effect=[login_info, logout_info])
self.assertRaises(exception.VolumeBackendAPIException,
self.driver.client.login)
def test_check_volume_exist_on_array(self):
self.mock_object(rest_client.RestClient, 'get_lun_id_by_name',
return_value=None)
self.driver._check_volume_exist_on_array(
self.volume, constants.VOLUME_NOT_EXISTS_WARN)
def test_create_volume_success(self):
# Have pool info in the volume.
self.volume.host = 'ubuntu001@backend001#OpenStack_Pool'
lun_info = self.driver.create_volume(self.volume)
self.assertEqual('1', lun_info['provider_location'])
# No pool info in the volume.
self.volume.host = 'ubuntu001@backend001'
lun_info = self.driver.create_volume(self.volume)
self.assertEqual('1', lun_info['provider_location'])
@ddt.data(FAKE_POOLS_UNSUPPORT_REPORT, FAKE_POOLS_SUPPORT_REPORT)
def test_delete_replication_fail(self, pool_data):
self.driver.support_func = pool_data
self.mock_object(replication.ReplicaCommonDriver, 'split')
self.mock_object(
huawei_driver.HuaweiBaseDriver,
'_get_volume_type',
return_value={'extra_specs': sync_replica_specs})
self.mock_object(rest_client.RestClient,
'delete_lun',
side_effect=exception.VolumeBackendAPIException(
data='err'))
self.assertRaises(exception.VolumeBackendAPIException,
self.driver.delete_volume, self.replica_volume)
@ddt.data(FAKE_POOLS_UNSUPPORT_REPORT, FAKE_POOLS_SUPPORT_REPORT)
def test_migrate_volume_success_no_data(self, pool_data):
self.driver.support_func = pool_data
task_info = {"data": [{"ENDTIME": "1436816174",
"ID": "9",
"PARENTID": "11",
"PARENTNAME": "xmRBHMlVRruql5vwthpPXQ",
"PROCESS": "-1",
"RUNNINGSTATUS": "76",
"SPEED": "2",
"STARTTIME": "1436816111",
"TARGETLUNID": "1",
"TARGETLUNNAME": "4924891454902893639",
"TYPE": 253,
"WORKMODE": "0"
}],
"error": {"code": 0,
"description": "0"}
}
moved = False
empty_dict = {}
self.mock_object(rest_client.RestClient, 'get_lun_migration_task',
side_effect=[{}, task_info])
moved, model_update = self.driver.migrate_volume(None,
self.volume,
test_host,
None)
self.assertTrue(moved)
self.assertEqual(empty_dict, model_update)
@ddt.data(FAKE_POOLS_UNSUPPORT_REPORT, FAKE_POOLS_SUPPORT_REPORT)
def test_migrate_volume_success_with_replication(self, pool_data):
self.driver.support_func = pool_data
task_info = {"data": [{"ENDTIME": "1436816174",
"ID": "9",
"PARENTID": "11",
"PARENTNAME": "xmRBHMlVRruql5vwthpPXQ",
"PROCESS": "-1",
"RUNNINGSTATUS": "76",
"SPEED": "2",
"STARTTIME": "1436816111",
"TARGETLUNID": "1",
"TARGETLUNNAME": "4924891454902893639",
"TYPE": 253,
"WORKMODE": "0"
}],
"error": {"code": 0,
"description": "0"}
}
moved = False
empty_dict = {}
self.mock_object(rest_client.RestClient, 'get_lun_migration_task',
return_value=task_info)
moved, model_update = self.driver.migrate_volume(None,
self.replica_volume,
test_host,
None)
self.assertTrue(moved)
self.assertEqual(empty_dict, model_update)
@ddt.data(FAKE_POOLS_UNSUPPORT_REPORT, FAKE_POOLS_SUPPORT_REPORT)
def test_migrate_volume_fail_migration_fault(self, pool_data):
self.driver.support_func = pool_data
task_info = {"data": [{"ENDTIME": "1436816174",
"ID": "9",
"PARENTID": "11",
"PARENTNAME": "xmRBHMlVRruql5vwthpPXQ",
"PROCESS": "-1",
"RUNNINGSTATUS": "74",
"SPEED": "2",
"STARTTIME": "1436816111",
"TARGETLUNID": "1",
"TARGETLUNNAME": "4924891454902893639",
"TYPE": 253,
"WORKMODE": "0"
}],
"error": {"code": 0,
"description": "0"}
}
self.mock_object(rest_client.RestClient, 'get_lun_migration_task',
return_value=task_info)
self.assertRaises(exception.VolumeBackendAPIException,
self.driver.migrate_volume,
None, self.volume, test_host, None)
@ddt.data(FAKE_POOLS_UNSUPPORT_REPORT, FAKE_POOLS_SUPPORT_REPORT)
def test_migrate_volume_fail_no_migrate_task(self, pool_data):
self.driver.support_func = pool_data
task_info = {"data": [{"ENDTIME": "1436816174",
"ID": "9",
"PARENTID": "12",
"PARENTNAME": "xmRBHMlVRruql5vwthpPXQ",
"PROCESS": "-1",
"RUNNINGSTATUS": "76",
"SPEED": "2",
"STARTTIME": "1436816111",
"TARGETLUNID": "1",
"TARGETLUNNAME": "4924891454902893639",
"TYPE": 253,
"WORKMODE": "0"
}],
"error": {"code": 0,
"description": "0"}
}
self.mock_object(rest_client.RestClient, 'get_lun_migration_task',
return_value=task_info)
self.assertRaises(exception.VolumeBackendAPIException,
self.driver.migrate_volume,
None, self.volume, test_host, None)
@ddt.data(FAKE_POOLS_UNSUPPORT_REPORT, FAKE_POOLS_SUPPORT_REPORT)
def test_migrate_volume_with_type_id(self, pool_data):
self.driver.support_func = pool_data
self.volume.volume_type_id = '550c089b-bfdd-4f7f-86e1-3ba88125555c'
task_info = {"data": [{"ENDTIME": "1436816174",
"ID": "9",
"PARENTID": "11",
"PARENTNAME": "xmRBHMlVRruql5vwthpPXQ",
"PROCESS": "-1",
"RUNNINGSTATUS": "76",
"SPEED": "2",
"STARTTIME": "1436816111",
"TARGETLUNID": "1",
"TARGETLUNNAME": "4924891454902893639",
"TYPE": 253,
"WORKMODE": "0"
}],
"error": {"code": 0,
"description": "0"}
}
empty_dict = {}
self.mock_object(volume_types, 'get_volume_type',
return_value=test_new_type)
self.mock_object(rest_client.RestClient, 'get_lun_migration_task',
return_value=task_info)
moved, model_update = self.driver.migrate_volume(None,
self.volume,
test_host,
None)
self.assertTrue(moved)
self.assertEqual(empty_dict, model_update)
@ddt.data(FAKE_POOLS_UNSUPPORT_REPORT, FAKE_POOLS_SUPPORT_REPORT)
def test_manage_existing_fail(self, pool_data):
self.driver.support_func = pool_data
self.mock_object(rest_client.RestClient, 'get_lun_info',
return_value={'CAPACITY': 2097152, 'ALLOCTYPE': 1})
self.mock_object(rest_client.RestClient, 'get_lun_id_by_name',
return_value='ID1')
self.mock_object(rest_client.RestClient, 'rename_lun')
self.mock_object(huawei_driver.HuaweiBaseDriver,
'_get_lun_info_by_ref',
return_value={
'PARENTNAME': 'OpenStack_Pool',
'SNAPSHOTIDS': [],
'ID': 'ID1',
'HEALTHSTATUS': constants.STATUS_HEALTH,
'WWN': '6643e8c1004c5f6723e9f454003'})
self.mock_object(volume_types, 'get_volume_type',
return_value={'extra_specs': test_new_type})
self.mock_object(huawei_driver.HuaweiBaseDriver,
'_check_needed_changes',
return_value={})
external_ref = {'source-name': 'test1',
'source-id': 'ID1'}
self.driver.manage_existing(self.volume, external_ref)
@ddt.data(FAKE_POOLS_UNSUPPORT_REPORT, FAKE_POOLS_SUPPORT_REPORT)
def test_delete_volume_success(self, pool_data):
self.driver.support_func = pool_data
self.driver.delete_volume(self.volume)
def test_delete_snapshot_success(self):
self.driver.delete_snapshot(self.snapshot)
@unittest.skip("Skip until bug #1578986 is fixed")
def test_create_volume_from_snapsuccess(self):
self.mock_object(
huawei_driver.HuaweiBaseDriver,
'_get_volume_type',
return_value={'extra_specs': sync_replica_specs})
self.mock_object(replication.ReplicaCommonDriver, 'sync')
model_update = self.driver.create_volume_from_snapshot(self.volume,
self.volume)
self.assertEqual('1', model_update['provider_location'])
driver_data = {'pair_id': TEST_PAIR_ID,
'rmt_lun_id': '1'}
driver_data = replication.to_string(driver_data)
self.assertEqual(driver_data, model_update['replication_driver_data'])
self.assertEqual('available', model_update['replication_status'])
@mock.patch.object(huawei_driver.HuaweiISCSIDriver,
'initialize_connection',
return_value={"data": {'target_lun': 1}})
def test_initialize_connection_snapshot_success(self, mock_iscsi_init):
iscsi_properties = self.driver.initialize_connection_snapshot(
self.snapshot, FakeConnector)
volume = Volume(id=self.snapshot.id,
provider_location=self.snapshot.provider_location,
lun_type='27',
metadata=None)
self.assertEqual(1, iscsi_properties['data']['target_lun'])
mock_iscsi_init.assert_called_with(volume, FakeConnector)
def test_initialize_connection_success_multipath_portgroup(self):
temp_connector = copy.deepcopy(FakeConnector)
temp_connector['multipath'] = True
self.mock_object(rest_client.RestClient, 'get_tgt_port_group',
return_value = '11')
iscsi_properties = self.driver.initialize_connection(self.volume,
temp_connector)
self.assertEqual([1, 1], iscsi_properties['data']['target_luns'])
def test_initialize_connection_fail_multipath_portgroup(self):
temp_connector = copy.deepcopy(FakeConnector)
temp_connector['multipath'] = True
self.mock_object(rest_client.RestClient, 'get_tgt_port_group',
return_value = '12')
self.mock_object(rest_client.RestClient, '_get_tgt_ip_from_portgroup',
return_value = [])
self.assertRaises(exception.VolumeBackendAPIException,
self.driver.initialize_connection,
self.volume, temp_connector)
def test_initialize_connection_success_multipath_targetip(self):
iscsi_info = [{'Name': 'iqn.1993-08.debian:01:ec2bff7ac3a3',
'TargetIP': '192.0.2.2',
'CHAPinfo': 'mm-user;mm-user@storage',
'ALUA': '1'}]
configuration = mock.Mock(spec = conf.Configuration)
configuration.hypermetro_devices = hypermetro_devices
driver = FakeISCSIStorage(configuration = self.configuration)
driver.do_setup()
driver.configuration.iscsi_info = iscsi_info
driver.client.iscsi_info = iscsi_info
temp_connector = copy.deepcopy(FakeConnector)
temp_connector['multipath'] = True
iscsi_properties = driver.initialize_connection(self.volume,
temp_connector)
self.assertEqual([1], iscsi_properties['data']['target_luns'])
def test_initialize_connection_fail_multipath_targetip(self):
iscsi_info = [{'Name': 'iqn.1993-08.debian:01:ec2bff7ac3a3',
'TargetIP': '192.0.2.6',
'CHAPinfo': 'mm-user;mm-user@storage',
'ALUA': '1'}]
configuration = mock.Mock(spec = conf.Configuration)
configuration.hypermetro_devices = hypermetro_devices
driver = FakeISCSIStorage(configuration = self.configuration)
driver.do_setup()
driver.configuration.iscsi_info = iscsi_info
driver.client.iscsi_info = iscsi_info
temp_connector = copy.deepcopy(FakeConnector)
temp_connector['multipath'] = True
self.assertRaises(exception.VolumeBackendAPIException,
driver.initialize_connection,
self.volume, temp_connector)
def test_initialize_connection_success_multipath_defaultip(self):
iscsi_info = [{'Name': 'iqn.1993-08.debian:01:ec2bff7ac3a3',
'CHAPinfo': 'mm-user;mm-user@storage',
'ALUA': '1'}]
default_target_ip = ['192.0.2.2']
configuration = mock.Mock(spec = conf.Configuration)
configuration.hypermetro_devices = hypermetro_devices
driver = FakeISCSIStorage(configuration = self.configuration)
driver.do_setup()
driver.configuration.iscsi_info = iscsi_info
driver.client.iscsi_info = iscsi_info
driver.configuration.iscsi_default_target_ip = default_target_ip
driver.client.iscsi_default_target_ip = default_target_ip
temp_connector = copy.deepcopy(FakeConnector)
temp_connector['multipath'] = True
iscsi_properties = driver.initialize_connection(self.volume,
temp_connector)
self.assertEqual([1], iscsi_properties['data']['target_luns'])
def test_initialize_connection_fail_multipath_defaultip(self):
iscsi_info = [{'Name': 'iqn.1993-08.debian:01:ec2bff7ac3a3',
'CHAPinfo': 'mm-user;mm-user@storage',
'ALUA': '1'}]
default_target_ip = ['192.0.2.6']
configuration = mock.Mock(spec = conf.Configuration)
configuration.hypermetro_devices = hypermetro_devices
driver = FakeISCSIStorage(configuration = self.configuration)
driver.do_setup()
driver.configuration.iscsi_info = iscsi_info
driver.client.iscsi_info = iscsi_info
driver.configuration.iscsi_default_target_ip = default_target_ip
driver.client.iscsi_default_target_ip = default_target_ip
temp_connector = copy.deepcopy(FakeConnector)
temp_connector['multipath'] = True
self.assertRaises(exception.VolumeBackendAPIException,
driver.initialize_connection,
self.volume, temp_connector)
def test_initialize_connection_fail_no_port_in_portgroup(self):
temp_connector = copy.deepcopy(FakeConnector)
temp_connector['multipath'] = True
self.mock_object(rest_client.RestClient, 'get_tgt_port_group',
return_value='11')
self.mock_object(rest_client.RestClient, '_get_tgt_ip_from_portgroup',
return_value=[])
self.assertRaises(exception.VolumeBackendAPIException,
self.driver.initialize_connection,
self.volume, temp_connector)
def test_initialize_connection_fail_multipath_no_ip(self):
iscsi_info = [{'Name': 'iqn.1993-08.debian:01:ec2bff7ac3a3',
'CHAPinfo': 'mm-user;mm-user@storage',
'ALUA': '1'}]
configuration = mock.Mock(spec = conf.Configuration)
configuration.hypermetro_devices = hypermetro_devices
driver = FakeISCSIStorage(configuration = self.configuration)
driver.do_setup()
driver.configuration.iscsi_info = iscsi_info
driver.client.iscsi_info = iscsi_info
driver.configuration.iscsi_default_target_ip = None
driver.client.iscsi_default_target_ip = None
temp_connector = copy.deepcopy(FakeConnector)
temp_connector['multipath'] = True
self.assertRaises(exception.VolumeBackendAPIException,
driver.initialize_connection,
self.volume, temp_connector)
@mock.patch.object(huawei_driver.HuaweiISCSIDriver,
'terminate_connection')
def test_terminate_connection_snapshot_success(self, mock_iscsi_term):
self.driver.terminate_connection_snapshot(self.snapshot,
FakeConnector)
volume = Volume(id=self.snapshot.id,
provider_location=self.snapshot.provider_location,
lun_type='27',
metadata=None)
mock_iscsi_term.assert_called_with(volume, FakeConnector)
def test_terminate_connection_success(self):
self.driver.terminate_connection(self.volume, FakeConnector)
def test_get_volume_status(self):
data = self.driver.get_volume_stats()
self.assertEqual(self.driver.VERSION, data['driver_version'])
@mock.patch.object(rest_client.RestClient, 'get_lun_info',
return_value={"CAPACITY": 6291456})
@mock.patch.object(rest_client.RestClient, 'extend_lun')
def test_extend_volume_size_equal(self, mock_extend, mock_lun_info):
self.driver.extend_volume(self.volume, 3)
self.assertEqual(0, mock_extend.call_count)
@mock.patch.object(rest_client.RestClient, 'get_lun_info',
return_value={"CAPACITY": 5291456})
@mock.patch.object(rest_client.RestClient, 'extend_lun')
def test_extend_volume_success(self, mock_extend, mock_lun_info):
self.driver.extend_volume(self.volume, 3)
self.assertEqual(1, mock_extend.call_count)
@mock.patch.object(rest_client.RestClient, 'get_lun_info',
return_value={"CAPACITY": 7291456})
def test_extend_volume_fail(self, mock_lun_info):
self.assertRaises(exception.VolumeBackendAPIException,
self.driver.extend_volume, self.volume, 3)
def test_extend_nonexistent_volume(self):
self.volume = fake_volume.fake_volume_obj(admin_contex)
self.mock_object(rest_client.RestClient,
'get_lun_id_by_name',
return_value=None)
self.assertRaises(exception.VolumeBackendAPIException,
self.driver.extend_volume,
self.volume, 3)
def test_get_admin_metadata(self):
metadata = [{'key': 'huawei_lun_wwn', 'value': '1'}]
tmp_volume = fake_volume.fake_volume_obj(
admin_contex, volume_admin_metadata=metadata)
expected_value = {'huawei_lun_wwn': '1'}
admin_metadata = huawei_utils.get_admin_metadata(tmp_volume)
self.assertEqual(expected_value, admin_metadata)
metadata = {'huawei_lun_wwn': '1'}
tmp_volume = fake_volume.fake_volume_obj(admin_contex)
tmp_volume.admin_metadata = metadata
admin_metadata = huawei_utils.get_admin_metadata(tmp_volume)
self.assertEqual(expected_value, admin_metadata)
def test_login_fail(self):
self.driver.client.test_fail = True
self.assertRaises(exception.VolumeBackendAPIException,
self.driver.client.login)
def test_create_snapshot_fail(self):
self.driver.client.test_fail = True
self.assertRaises(exception.VolumeBackendAPIException,
self.driver.create_snapshot, self.snapshot)
def test_create_volume_fail(self):
self.driver.client.test_fail = True
self.assertRaises(exception.VolumeBackendAPIException,
self.driver.create_volume, self.volume)
def test_delete_volume_fail(self):
self.driver.client.test_fail = True
self.driver.delete_volume(self.volume)
def test_delete_snapshot_fail(self):
self.driver.client.test_fail = True
self.driver.delete_snapshot(self.snapshot)
def test_delete_snapshot_with_snapshot_nonexistent(self):
self.snapshot.provider_location = None
self.driver.delete_snapshot(self.snapshot)
def test_initialize_connection_fail(self):
self.driver.client.test_fail = True
self.assertRaises(exception.VolumeBackendAPIException,
self.driver.initialize_connection,
self.volume, FakeConnector)
def test_lun_is_associated_to_lungroup(self):
self.driver.client.associate_lun_to_lungroup('11', '11')
result = self.driver.client._is_lun_associated_to_lungroup('11',
'11')
self.assertTrue(result)
def test_lun_is_not_associated_to_lun_group(self):
self.driver.client.associate_lun_to_lungroup('12', '12')
self.driver.client.remove_lun_from_lungroup('12', '12')
result = self.driver.client._is_lun_associated_to_lungroup('12', '12')
self.assertFalse(result)
def test_get_tgtip(self):
portg_id = self.driver.client.get_tgt_port_group(self.portgroup)
target_ip = self.driver.client._get_tgt_ip_from_portgroup(portg_id)
self.assertEqual(self.target_ips, target_ip)
def test_find_chap_info(self):
tmp_dict = {}
tmp_dict['Name'] = 'iqn.1993-08.debian:01:ec2bff7ac3a3'
tmp_dict['CHAPinfo'] = 'mm-user;mm-user@storage'
iscsi_info = [tmp_dict]
initiator_name = FakeConnector['initiator']
chapinfo = self.driver.client.find_chap_info(iscsi_info,
initiator_name)
chap_username, chap_password = chapinfo.split(';')
self.assertEqual('mm-user', chap_username)
self.assertEqual('mm-user@storage', chap_password)
def test_find_alua_info(self):
tmp_dict = {}
tmp_dict['Name'] = 'iqn.1993-08.debian:01:ec2bff7ac3a3'
tmp_dict['ALUA'] = '1'
iscsi_info = [tmp_dict]
initiator_name = FakeConnector['initiator']
type = self.driver.client._find_alua_info(iscsi_info,
initiator_name)
self.assertEqual('1', type)
def test_get_pool_info(self):
pools = [{"NAME": "test001",
"ID": "0",
"USERFREECAPACITY": "36",
"USERTOTALCAPACITY": "48",
"USAGETYPE": constants.BLOCK_STORAGE_POOL_TYPE,
"TIER0CAPACITY": "48",
"TIER1CAPACITY": "0",
"TIER2CAPACITY": "0"},
{"NAME": "test002",
"ID": "1",
"USERFREECAPACITY": "37",
"USERTOTALCAPACITY": "49",
"USAGETYPE": constants.FILE_SYSTEM_POOL_TYPE,
"TIER0CAPACITY": "0",
"TIER1CAPACITY": "49",
"TIER2CAPACITY": "0"},
{"NAME": "test003",
"ID": "0",
"USERFREECAPACITY": "36",
"DATASPACE": "35",
"USERTOTALCAPACITY": "48",
"USAGETYPE": constants.BLOCK_STORAGE_POOL_TYPE,
"TIER0CAPACITY": "0",
"TIER1CAPACITY": "0",
"TIER2CAPACITY": "48"}]
pool_name = 'test001'
test_info = {'CAPACITY': '36', 'ID': '0', 'TOTALCAPACITY': '48',
'TIER0CAPACITY': '48', 'TIER1CAPACITY': '0',
'TIER2CAPACITY': '0'}
pool_info = self.driver.client.get_pool_info(pool_name, pools)
self.assertEqual(test_info, pool_info)
pool_name = 'test002'
test_info = {}
pool_info = self.driver.client.get_pool_info(pool_name, pools)
self.assertEqual(test_info, pool_info)
pool_name = 'test000'
test_info = {}
pool_info = self.driver.client.get_pool_info(pool_name, pools)
self.assertEqual(test_info, pool_info)
pool_name = 'test003'
test_info = {'CAPACITY': '35', 'ID': '0', 'TOTALCAPACITY': '48',
'TIER0CAPACITY': '0', 'TIER1CAPACITY': '0',
'TIER2CAPACITY': '48'}
pool_info = self.driver.client.get_pool_info(pool_name, pools)
self.assertEqual(test_info, pool_info)
def test_get_smartx_specs_opts(self):
smartx_opts = smartx.SmartX().get_smartx_specs_opts(smarttier_opts)
self.assertEqual('3', smartx_opts['policy'])
@ddt.data(FAKE_POOLS_UNSUPPORT_REPORT, FAKE_POOLS_SUPPORT_REPORT)
@mock.patch.object(smartx.SmartQos, 'get_qos_by_volume_type',
return_value={'MAXIOPS': '100',
'IOType': '2'})
def test_create_smartqos(self, mock_qos_value, pool_data):
self.driver.support_func = pool_data
lun_info = self.driver.create_volume(self.volume)
self.assertEqual('1', lun_info['provider_location'])
@mock.patch.object(huawei_driver.HuaweiBaseDriver, '_get_volume_params',
return_value={'smarttier': 'true',
'smartcache': 'true',
'smartpartition': 'true',
'thin_provisioning_support': 'true',
'thick_provisioning_support': 'false',
'policy': '2',
'cachename': 'cache-test',
'partitionname': 'partition-test'})
@mock.patch.object(huawei_driver.HuaweiBaseDriver, '_get_volume_type',
return_value={'qos_specs_id': u'025ce295-15e9-41a7'})
@mock.patch.object(qos_specs, 'get_qos_specs',
return_value={'specs': {'maxBandWidth': '100',
'IOType': '0'},
'consumer': 'back-end'})
def test_create_smartqos_success(self,
mock_qos_specs,
mock_value_type,
mock_volume_params):
self.driver.support_func = FAKE_POOLS_SUPPORT_REPORT
lun_info = self.driver.create_volume(self.volume)
self.assertEqual('1', lun_info['provider_location'])
@ddt.data([{'specs': {'maxBandWidth': '100', 'IOType': '3'}},
FAKE_POOLS_UNSUPPORT_REPORT],
[{'specs': {'maxBandWidth': '100', 'IOType': '3'}},
FAKE_POOLS_SUPPORT_REPORT],
[{'specs': {'minBandWidth': '0', 'IOType': '2'}},
FAKE_POOLS_UNSUPPORT_REPORT],
[{'specs': {'minBandWidth': '0', 'IOType': '2'}},
FAKE_POOLS_SUPPORT_REPORT])
@ddt.unpack
def test_create_smartqos_failed(self, qos_specs_value, pool_data):
self.driver.support_func = pool_data
self.mock_object(
huawei_driver.HuaweiBaseDriver,
'_get_volume_params',
return_value={'smarttier': 'true',
'smartcache': 'true',
'smartpartition': 'true',
'thin_provisioning_support': 'true',
'thick_provisioning_support': 'false',
'policy': '2',
'cachename': 'cache-test',
'partitionname': 'partition-test'})
self.mock_object(huawei_driver.HuaweiBaseDriver, '_get_volume_type',
return_value={'qos_specs_id': u'025ce295-15e9-41a7'})
self.mock_object(qos_specs, 'get_qos_specs',
return_value=qos_specs_value)
self.assertRaises(exception.VolumeBackendAPIException,
self.driver.create_volume, self.volume)
@ddt.data(FAKE_POOLS_UNSUPPORT_REPORT, FAKE_POOLS_SUPPORT_REPORT)
def test_create_smartqos_without_huawei_type(self, pool_data):
self.driver.support_func = pool_data
self.mock_object(
huawei_driver.HuaweiBaseDriver,
'_get_volume_params',
return_value={'smarttier': 'true',
'smartcache': 'true',
'smartpartition': 'true',
'thin_provisioning_support': 'true',
'thick_provisioning_support': 'false',
'policy': '2',
'cachename': 'cache-test',
'partitionname': 'partition-test'})
self.mock_object(huawei_driver.HuaweiBaseDriver, '_get_volume_type',
return_value={'qos_specs_id': u'025ce295-15e9-41a7'})
self.mock_object(qos_specs, 'get_qos_specs',
return_value={'specs': {'fake_qos_type': '100',
'IOType': '2'}})
self.assertRaises(exception.VolumeBackendAPIException,
self.driver.create_volume, self.volume)
@mock.patch.object(smartx.SmartQos, 'get_qos_by_volume_type',
return_value={'MAXIOPS': '100',
'IOType': '2'})
@mock.patch.object(rest_client.RestClient, 'find_array_version',
return_value='V300R003C00')
@mock.patch.object(rest_client.RestClient, 'find_available_qos',
return_value=(None, []))
def test_create_smartqos_on_v3r3_with_no_qos(self,
mock_find_available_qos,
mock_qos_value,
mock_array_version):
self.driver.support_func = FAKE_POOLS_SUPPORT_REPORT
lun_info = self.driver.create_volume(self.volume)
self.assertEqual('1', lun_info['provider_location'])
@mock.patch.object(smartx.SmartQos, 'get_qos_by_volume_type',
return_value={'MINIOPS': '100',
'IOType': '2'})
@mock.patch.object(rest_client.RestClient, 'find_array_version',
return_value='V300R003C00')
@mock.patch.object(rest_client.RestClient, 'find_available_qos',
return_value=('11', u'["0", "2", "3"]'))
def test_create_smartqos_on_v3r3_with_qos(self,
mock_find_available_qos,
mock_qos_value,
mock_array_version):
self.driver.support_func = FAKE_POOLS_SUPPORT_REPORT
lun_info = self.driver.create_volume(self.volume)
self.assertEqual('1', lun_info['provider_location'])
@mock.patch.object(smartx.SmartQos, 'get_qos_by_volume_type',
return_value={'MINIOPS': '100',
'IOType': '2'})
@mock.patch.object(rest_client.RestClient, 'find_array_version',
return_value='V300R003C00')
@mock.patch.object(rest_client.RestClient, 'find_available_qos',
return_value=('11', u'["0", "2", "3"]'))
def test_create_smartqos_on_v3r3_with_unsupport_qos(
self, mock_find_available_qos,
mock_qos_value, mock_array_version):
self.driver.support_func = FAKE_POOLS_UNSUPPORT_REPORT
self.assertRaises(exception.VolumeBackendAPIException,
self.driver.create_volume, self.volume)
@ddt.data(FAKE_POOLS_UNSUPPORT_REPORT, FAKE_POOLS_SUPPORT_REPORT)
@mock.patch.object(smartx.SmartQos, 'get_qos_by_volume_type',
return_value={'MINIOPS': '100',
'IOType': '2'})
@mock.patch.object(rest_client.RestClient, 'find_array_version',
return_value='V300R003C00')
@mock.patch.object(rest_client.RestClient, 'find_available_qos',
return_value=(None, []))
@mock.patch.object(rest_client.RestClient, 'activate_deactivate_qos')
def test_create_smartqos_on_v3r3_active_failed(self,
pool_data,
mock_activate_qos,
mock_find_available_qos,
mock_qos_value,
mock_array_version):
self.driver.support_func = pool_data
mock_activate_qos.side_effect = (
exception.VolumeBackendAPIException(data='Activate or deactivate '
'QoS error. '))
self.assertRaises(exception.VolumeBackendAPIException,
self.driver.create_volume, self.volume)
@ddt.data(FAKE_POOLS_UNSUPPORT_REPORT, FAKE_POOLS_SUPPORT_REPORT)
@mock.patch.object(smartx.SmartQos, 'get_qos_by_volume_type',
return_value={'MINIOPS': '100',
'IOType': '2'})
@mock.patch.object(rest_client.RestClient, 'find_array_version',
return_value='V300R003C00')
@mock.patch.object(rest_client.RestClient, 'find_available_qos',
return_value=(None, []))
@mock.patch.object(rest_client.RestClient, 'create_qos_policy')
def test_create_smartqos_on_v3r3_qos_failed(self,
pool_data,
mock_create_qos,
mock_find_available_qos,
mock_qos_value,
mock_array_version):
self.driver.support_func = pool_data
mock_create_qos.side_effect = (
exception.VolumeBackendAPIException(data='Create QoS policy '
'error.'))
self.assertRaises(exception.VolumeBackendAPIException,
self.driver.create_volume, self.volume)
@ddt.data(FAKE_POOLS_UNSUPPORT_REPORT, FAKE_POOLS_SUPPORT_REPORT)
@mock.patch.object(rest_client.RestClient, 'get_qos_info',
return_value={"LUNLIST": u'["1", "2", "3"]',
"RUNNINGSTATUS": "2"})
def test_delete_smartqos_with_lun_left(self, mock_qos_info, pool_data):
self.driver.support_func = pool_data
self.driver.delete_volume(self.volume)
@ddt.data(FAKE_POOLS_UNSUPPORT_REPORT, FAKE_POOLS_SUPPORT_REPORT)
@mock.patch.object(rest_client.RestClient, 'get_qos_info',
return_value={"LUNLIST": u'["1"]',
"RUNNINGSTATUS": "2"})
def test_delete_smartqos_with_no_lun_left(self, mock_qos_info, pool_data):
self.driver.support_func = pool_data
self.driver.delete_volume(self.volume)
@mock.patch.object(rest_client.RestClient, 'add_lun_to_partition')
@mock.patch.object(huawei_driver.HuaweiBaseDriver, '_get_volume_params',
return_value={'smarttier': 'true',
'smartcache': 'true',
'smartpartition': 'true',
'thin_provisioning_support': 'true',
'thick_provisioning_support': 'false',
'policy': '2',
'cachename': 'cache-test',
'partitionname': 'partition-test'})
def test_create_smartx(self, mock_volume_types, mock_add_lun_to_partition):
lun_info = self.driver.create_volume(self.volume)
self.assertEqual('1', lun_info['provider_location'])
@ddt.data([{'smarttier': 'true', 'smartcache': 'true',
'smartpartition': 'true',
'thin_provisioning_support': 'true',
'thick_provisioning_support': 'false',
'policy': '2', 'cachename': None,
'partitionname': 'partition-test'},
FAKE_POOLS_UNSUPPORT_REPORT],
[{'smarttier': 'true', 'smartcache': 'true',
'smartpartition': 'true',
'thin_provisioning_support': 'true',
'thick_provisioning_support': 'false',
'policy': '2', 'cachename': 'cache-test',
'partitionname': None},
FAKE_POOLS_SUPPORT_REPORT],
[{'smarttier': 'true', 'smartcache': 'true',
'smartpartition': 'true',
'thin_provisioning_support': 'true',
'thick_provisioning_support': 'false',
'policy': '2', 'cachename': None,
'partitionname': 'partition-test'},
FAKE_POOLS_SUPPORT_REPORT],
[{'smarttier': 'true', 'smartcache': 'true',
'smartpartition': 'true',
'thin_provisioning_support': 'true',
'thick_provisioning_support': 'false',
'policy': '2', 'cachename': 'cache-test',
'partitionname': None},
FAKE_POOLS_UNSUPPORT_REPORT])
@ddt.unpack
def test_create_smartCache_failed(self, opts, pool_data):
self.driver.support_func = pool_data
self.mock_object(
huawei_driver.HuaweiBaseDriver,
'_get_volume_params',
return_value=opts)
self.assertRaises(exception.VolumeBackendAPIException,
self.driver.create_volume, self.volume)
@ddt.data(FAKE_POOLS_UNSUPPORT_REPORT, FAKE_POOLS_SUPPORT_REPORT)
@mock.patch.object(huawei_driver.HuaweiBaseDriver, '_get_volume_params',
return_value={'smarttier': 'true',
'smartcache': 'true',
'smartpartition': 'true',
'thin_provisioning_support': 'true',
'thick_provisioning_support': 'false',
'policy': '2',
'cachename': 'cache-test',
'partitionname': 'partition-test'})
def test_create_smartCache_failed_with_no_cacheid(self,
mock_volume_type,
pool_data):
self.driver.client.cache_not_exist = True
self.driver.support_func = pool_data
self.assertRaises(exception.VolumeBackendAPIException,
self.driver.create_volume, self.volume)
@ddt.data(FAKE_POOLS_UNSUPPORT_REPORT, FAKE_POOLS_SUPPORT_REPORT)
@mock.patch.object(huawei_driver.HuaweiBaseDriver, '_get_volume_params',
return_value={'smarttier': 'true',
'smartcache': 'true',
'smartpartition': 'true',
'thin_provisioning_support': 'true',
'thick_provisioning_support': 'false',
'policy': '2',
'cachename': 'cache-test',
'partitionname': 'partition-test'})
def test_create_smartPartition_failed_with_no_partid(self,
mock_volume_type,
pool_data):
self.driver.client.partition_not_exist = True
self.driver.support_func = pool_data
self.assertRaises(exception.VolumeBackendAPIException,
self.driver.create_volume, self.volume)
def test_find_available_qos(self):
qos = {'MAXIOPS': '100', 'IOType': '2'}
fake_qos_info_response_equal = {
"error": {
"code": 0
},
"data": [{
"ID": "11",
"MAXIOPS": "100",
"LATENCY": "0",
"IOType": "2",
"FSLIST": u'[""]',
'RUNNINGSTATUS': "2",
"NAME": "OpenStack_57_20151225102851",
"LUNLIST": u'["1", "2", "3", "4", "5", "6", "7", "8", "9",\
"10", ,"11", "12", "13", "14", "15", "16", "17", "18", "19",\
"20", ,"21", "22", "23", "24", "25", "26", "27", "28", "29",\
"30", ,"31", "32", "33", "34", "35", "36", "37", "38", "39",\
"40", ,"41", "42", "43", "44", "45", "46", "47", "48", "49",\
"50", ,"51", "52", "53", "54", "55", "56", "57", "58", "59",\
"60", ,"61", "62", "63", "64"]'
}]
}
# Number of LUNs in QoS is equal to 64
with mock.patch.object(rest_client.RestClient, 'get_qos',
return_value=fake_qos_info_response_equal):
(qos_id, lun_list) = self.driver.client.find_available_qos(qos)
self.assertEqual((None, []), (qos_id, lun_list))
# Number of LUNs in QoS is less than 64
fake_qos_info_response_less = {
"error": {
"code": 0
},
"data": [{
"ID": "11",
"MAXIOPS": "100",
"LATENCY": "0",
"IOType": "2",
"FSLIST": u'[""]',
'RUNNINGSTATUS': "2",
"NAME": "OpenStack_57_20151225102851",
"LUNLIST": u'["0", "1", "2"]'
}]
}
with mock.patch.object(rest_client.RestClient, 'get_qos',
return_value=fake_qos_info_response_less):
(qos_id, lun_list) = self.driver.client.find_available_qos(qos)
self.assertEqual(("11", u'["0", "1", "2"]'), (qos_id, lun_list))
@mock.patch.object(huawei_driver.HuaweiBaseDriver, '_get_volume_params',
return_value=fake_hypermetro_opts)
@mock.patch.object(rest_client.RestClient, 'get_all_pools',
return_value=FAKE_STORAGE_POOL_RESPONSE)
@mock.patch.object(rest_client.RestClient, 'get_pool_info',
return_value=FAKE_FIND_POOL_RESPONSE)
@mock.patch.object(rest_client.RestClient, 'get_hyper_domain_id',
return_value='11')
@mock.patch.object(hypermetro.HuaweiHyperMetro, '_wait_volume_ready',
return_value=True)
def test_create_hypermetro_success(self,
mock_volume_ready,
mock_hyper_domain,
mock_pool_info,
mock_all_pool_info,
mock_login_return):
metadata = {"hypermetro_id": '11',
"remote_lun_id": '1'}
lun_info = self.driver.create_volume(self.hyper_volume)
self.assertEqual(metadata, lun_info['metadata'])
@ddt.data(FAKE_POOLS_UNSUPPORT_REPORT, FAKE_POOLS_SUPPORT_REPORT)
@mock.patch.object(huawei_driver.HuaweiBaseDriver, '_get_volume_params',
return_value=fake_hypermetro_opts)
@mock.patch.object(rest_client.RestClient, 'get_all_pools',
return_value=FAKE_STORAGE_POOL_RESPONSE)
@mock.patch.object(rest_client.RestClient, 'get_pool_info',
return_value=FAKE_FIND_POOL_RESPONSE)
@mock.patch.object(rest_client.RestClient, 'get_hyper_domain_id',
return_value='11')
@mock.patch.object(hypermetro.HuaweiHyperMetro, '_wait_volume_ready',
return_value=True)
@mock.patch.object(hypermetro.HuaweiHyperMetro,
'_create_hypermetro_pair')
@mock.patch.object(rest_client.RestClient, 'delete_lun')
def test_create_hypermetro_fail(self,
pool_data,
mock_delete_lun,
mock_hyper_pair_info,
mock_volume_ready,
mock_hyper_domain,
mock_pool_info,
mock_all_pool_info,
mock_hypermetro_opts
):
self.driver.client.login()
self.driver.support_func = pool_data
mock_hyper_pair_info.side_effect = exception.VolumeBackendAPIException(
data='Create hypermetro error.')
self.assertRaises(exception.VolumeBackendAPIException,
self.driver.create_volume, self.hyper_volume)
mock_delete_lun.assert_called_with('1')
@mock.patch.object(rest_client.RestClient, 'get_all_pools',
return_value=FAKE_STORAGE_POOL_RESPONSE)
@mock.patch.object(rest_client.RestClient, 'get_pool_info',
return_value={})
def test_create_hypermetro_remote_pool_none_fail(self,
mock_pool_info,
mock_all_pool_info):
param = {'TYPE': '11',
'PARENTID': ''}
self.driver.client.login()
self.assertRaises(exception.VolumeBackendAPIException,
self.driver.metro.create_hypermetro,
'2', param)
@mock.patch.object(rest_client.RestClient, 'get_all_pools',
return_value=FAKE_STORAGE_POOL_RESPONSE)
@mock.patch.object(rest_client.RestClient, 'get_pool_info',
return_value=FAKE_FIND_POOL_RESPONSE)
@mock.patch.object(rest_client.RestClient, 'create_lun',
return_value={'CAPACITY': '2097152',
'DESCRIPTION': '2f0635',
'HEALTHSTATUS': '1',
'ALLOCTYPE': '1',
'WWN': '6643e8c1004c5f6723e9f454003',
'ID': '1',
'RUNNINGSTATUS': '27',
'NAME': '5mFHcBv4RkCcD'})
@mock.patch.object(rest_client.RestClient, 'get_hyper_domain_id',
return_value='11')
@mock.patch.object(hypermetro.HuaweiHyperMetro, '_wait_volume_ready',
return_value=True)
def test_create_hypermetro_remote_pool_parentid(self,
mock_volume_ready,
mock_hyper_domain,
mock_create_lun,
mock_pool_info,
mock_all_pool_info):
param = {'TYPE': '11',
'PARENTID': ''}
self.driver.metro.create_hypermetro('2', param)
lun_PARENTID = mock_create_lun.call_args[0][0]['PARENTID']
self.assertEqual(FAKE_FIND_POOL_RESPONSE['ID'], lun_PARENTID)
@mock.patch.object(huawei_driver.huawei_utils, 'get_volume_metadata',
return_value={'hypermetro_id': '3400a30d844d0007',
'remote_lun_id': '1'})
def test_hypermetro_none_map_info_fail(self, mock_metadata):
self.assertRaises(exception.VolumeBackendAPIException,
self.driver.metro.connect_volume_fc,
self.volume,
FakeConnector)
@ddt.data(FAKE_POOLS_UNSUPPORT_REPORT, FAKE_POOLS_SUPPORT_REPORT)
@mock.patch.object(rest_client.RestClient, 'check_lun_exist',
return_value=True)
@mock.patch.object(rest_client.RestClient, 'check_hypermetro_exist',
return_value=True)
@mock.patch.object(rest_client.RestClient, 'delete_hypermetro',
return_value=FAKE_COMMON_SUCCESS_RESPONSE)
@mock.patch.object(rest_client.RestClient, 'delete_lun',
return_value=None)
def test_delete_hypermetro_success(self,
mock_delete_lun,
mock_delete_hypermetro,
mock_check_hyermetro,
mock_lun_exit,
pool_data):
self.driver.support_func = pool_data
self.driver.delete_volume(self.hyper_volume)
@ddt.data(FAKE_POOLS_UNSUPPORT_REPORT, FAKE_POOLS_SUPPORT_REPORT)
@mock.patch.object(rest_client.RestClient, 'check_lun_exist',
return_value=True)
@mock.patch.object(rest_client.RestClient, 'check_hypermetro_exist',
return_value=True)
@mock.patch.object(rest_client.RestClient, 'get_hypermetro_by_id',
return_value=FAKE_METRO_INFO_RESPONSE)
@mock.patch.object(rest_client.RestClient, 'delete_hypermetro')
@mock.patch.object(rest_client.RestClient, 'delete_lun',
return_value=None)
def test_delete_hypermetro_fail(self,
pool_data,
mock_delete_lun,
mock_delete_hypermetro,
mock_metro_info,
mock_check_hyermetro,
mock_lun_exit):
self.driver.support_func = pool_data
mock_delete_hypermetro.side_effect = (
exception.VolumeBackendAPIException(data='Delete hypermetro '
'error.'))
self.assertRaises(exception.VolumeBackendAPIException,
self.driver.delete_volume, self.hyper_volume)
mock_delete_lun.assert_called_with('11')
def test_manage_existing_get_size_invalid_reference(self):
# Can't find LUN by source-name.
external_ref = {'source-name': 'LUN1'}
with mock.patch.object(rest_client.RestClient, 'get_lun_id_by_name',
return_value=None):
ex = self.assertRaises(exception.ManageExistingInvalidReference,
self.driver.manage_existing_get_size,
self.volume, external_ref)
self.assertIsNotNone(re.search('please check the source-name '
'or source-id', ex.msg))
# Can't find LUN by source-id.
external_ref = {'source-id': 'ID1'}
with mock.patch.object(rest_client.RestClient, 'get_lun_info') as m_gt:
m_gt.side_effect = exception.VolumeBackendAPIException(
data='Error')
self.assertRaises(exception.VolumeBackendAPIException,
self.driver.manage_existing_get_size,
self.volume, external_ref)
self.assertIsNotNone(re.search('please check the source-name '
'or source-id', ex.msg))
@ddt.data({'source-id': 'ID1'}, {'source-name': 'LUN1'},
{'source-name': 'LUN1', 'source-id': 'ID1'})
@mock.patch.object(rest_client.RestClient, 'get_lun_info',
return_value={'CAPACITY': 3097152})
@mock.patch.object(rest_client.RestClient, 'get_lun_id_by_name',
return_value='ID1')
def test_manage_existing_get_size_success(self, mock_get_lun_id_by_name,
mock_get_lun_info,
external_ref):
size = self.driver.manage_existing_get_size(self.volume,
external_ref)
self.assertEqual(2, size)
@mock.patch.object(rest_client.RestClient, 'get_lun_info',
return_value={'CAPACITY': 2097152,
'ID': 'ID1',
'PARENTNAME': 'OpenStack_Pool'})
@mock.patch.object(rest_client.RestClient, 'get_lun_id_by_name',
return_value='ID1')
def test_manage_existing_pool_mismatch(self, mock_get_by_name,
mock_get_info):
# LUN does not belong to the specified pool.
with mock.patch.object(huawei_driver.HuaweiBaseDriver,
'_get_lun_info_by_ref',
return_value={'PARENTNAME': 'StoragePool'}):
external_ref = {'source-name': 'LUN1'}
ex = self.assertRaises(exception.ManageExistingInvalidReference,
self.driver.manage_existing,
self.volume, external_ref)
self.assertIsNotNone(re.search('The specified LUN does not belong'
' to the given pool', ex.msg))
@mock.patch.object(rest_client.RestClient, 'get_lun_info',
return_value={'CAPACITY': 2097152,
'ID': 'ID1',
'PARENTNAME': 'OpenStack_Pool'})
@mock.patch.object(rest_client.RestClient, 'get_lun_id_by_name',
return_value='ID1')
def test_manage_existing_lun_abnormal(self, mock_get_by_name,
mock_get_info):
# Status is not normal.
ret = {'PARENTNAME': "OpenStack_Pool",
'HEALTHSTATUS': '2'}
with mock.patch.object(huawei_driver.HuaweiBaseDriver,
'_get_lun_info_by_ref',
return_value=ret):
external_ref = {'source-name': 'LUN1'}
ex = self.assertRaises(exception.ManageExistingInvalidReference,
self.driver.manage_existing,
self.volume, external_ref)
self.assertIsNotNone(re.search('LUN status is not normal', ex.msg))
@ddt.data(FAKE_POOLS_UNSUPPORT_REPORT, FAKE_POOLS_SUPPORT_REPORT)
@mock.patch.object(rest_client.RestClient, 'get_hypermetro_pairs',
return_value=[{'LOCALOBJID': 'ID1'}])
@mock.patch.object(rest_client.RestClient, 'get_lun_info',
return_value={'CAPACITY': 2097152,
'ID': 'ID1',
'PARENTNAME': 'OpenStack_Pool',
'HEALTHSTATUS': constants.STATUS_HEALTH})
@mock.patch.object(rest_client.RestClient, 'get_lun_id_by_name',
return_value='ID1')
def test_manage_existing_with_hypermetro(self, mock_get_by_name,
mock_get_info,
mock_get_hyper_pairs,
pool_data):
self.driver.support_func = pool_data
# Exists in a HyperMetroPair.
with mock.patch.object(rest_client.RestClient,
'get_hypermetro_pairs',
return_value=[{'LOCALOBJID': 'ID1'}]):
external_ref = {'source-name': 'LUN1'}
ex = self.assertRaises(exception.ManageExistingInvalidReference,
self.driver.manage_existing,
self.volume, external_ref)
self.assertIsNotNone(re.search('HyperMetroPair', ex.msg))
@ddt.data(FAKE_POOLS_UNSUPPORT_REPORT, FAKE_POOLS_SUPPORT_REPORT)
@mock.patch.object(rest_client.RestClient, 'get_hypermetro_pairs')
@mock.patch.object(rest_client.RestClient, 'rename_lun')
@mock.patch.object(rest_client.RestClient, 'get_lun_info',
return_value={'CAPACITY': 2097152,
'ID': 'ID1',
'PARENTNAME': 'OpenStack_Pool',
'HEALTHSTATUS': constants.STATUS_HEALTH,
'WWN': '6643e8c1004c5f6723e9f454003'})
@mock.patch.object(rest_client.RestClient, 'get_lun_id_by_name',
return_value='ID1')
def test_manage_existing_with_lower_version(self, pool_data,
mock_get_by_name,
mock_get_info, mock_rename,
mock_get_hyper_pairs):
self.driver.support_func = pool_data
mock_get_hyper_pairs.side_effect = (
exception.VolumeBackendAPIException(data='err'))
external_ref = {'source-name': 'LUN1'}
model_update = self.driver.manage_existing(self.volume,
external_ref)
expected_val = {
'admin_metadata': {
'huawei_lun_wwn': '6643e8c1004c5f6723e9f454003'
},
'provider_location': 'ID1'}
self.assertEqual(expected_val, model_update)
@ddt.data([[{'PRILUNID': 'ID1'}], []],
[[{'PRILUNID': 'ID2'}], ['ID1', 'ID2']])
@mock.patch.object(rest_client.RestClient, 'get_lun_info',
return_value={'CAPACITY': 2097152,
'ID': 'ID1',
'PARENTNAME': 'OpenStack_Pool',
'HEALTHSTATUS': constants.STATUS_HEALTH})
@mock.patch.object(rest_client.RestClient, 'get_lun_id_by_name',
return_value='ID1')
def test_manage_existing_with_splitmirror(self, ddt_data,
mock_get_by_name,
mock_get_info):
self.driver.support_func = FAKE_POOLS_SUPPORT_REPORT
# Exists in a SplitMirror.
with mock.patch.object(rest_client.RestClient, 'get_split_mirrors',
return_value=ddt_data[0]), \
mock.patch.object(rest_client.RestClient, 'get_target_luns',
return_value=ddt_data[1]):
external_ref = {'source-name': 'LUN1'}
ex = self.assertRaises(exception.ManageExistingInvalidReference,
self.driver.manage_existing,
self.volume, external_ref)
self.assertIsNotNone(re.search('SplitMirror', ex.msg))
@ddt.data([[{'PARENTID': 'ID1'}], FAKE_POOLS_UNSUPPORT_REPORT],
[[{'TARGETLUNID': 'ID1'}], FAKE_POOLS_UNSUPPORT_REPORT],
[[{'PARENTID': 'ID1'}], FAKE_POOLS_SUPPORT_REPORT],
[[{'TARGETLUNID': 'ID1'}], FAKE_POOLS_SUPPORT_REPORT])
@mock.patch.object(rest_client.RestClient, 'get_lun_info',
return_value={'CAPACITY': 2097152,
'ID': 'ID1',
'PARENTNAME': 'OpenStack_Pool',
'HEALTHSTATUS': constants.STATUS_HEALTH})
@mock.patch.object(rest_client.RestClient, 'get_lun_id_by_name',
return_value='ID1')
@ddt.unpack
def test_manage_existing_under_migration(self, ddt_data, pool_data,
mock_get_by_name,
mock_get_info):
self.driver.support_func = pool_data
# Exists in a migration task.
with mock.patch.object(rest_client.RestClient, 'get_migration_task',
return_value=ddt_data):
external_ref = {'source-name': 'LUN1'}
ex = self.assertRaises(exception.ManageExistingInvalidReference,
self.driver.manage_existing,
self.volume, external_ref)
self.assertIsNotNone(re.search('migration', ex.msg))
@mock.patch.object(rest_client.RestClient, 'get_lun_info',
return_value={'CAPACITY': 2097152,
'ID': 'ID1',
'PARENTNAME': 'OpenStack_Pool',
'SNAPSHOTIDS': [],
'ISADD2LUNGROUP': 'true',
'HEALTHSTATUS': constants.STATUS_HEALTH})
@mock.patch.object(rest_client.RestClient, 'get_lun_id_by_name',
return_value='ID1')
def test_manage_existing_with_lungroup(self, mock_get_by_name,
mock_get_info):
# Already in LUN group.
external_ref = {'source-name': 'LUN1'}
ex = self.assertRaises(exception.ManageExistingInvalidReference,
self.driver.manage_existing,
self.volume, external_ref)
self.assertIsNotNone(re.search('Already exists in a LUN group',
ex.msg))
@ddt.data([{'source-name': 'LUN1'}, FAKE_POOLS_UNSUPPORT_REPORT],
[{'source-name': 'LUN1'}, FAKE_POOLS_SUPPORT_REPORT],
[{'source-id': 'ID1'}, FAKE_POOLS_UNSUPPORT_REPORT],
[{'source-id': 'ID1'}, FAKE_POOLS_SUPPORT_REPORT])
@mock.patch.object(rest_client.RestClient, 'rename_lun')
@mock.patch.object(huawei_driver.HuaweiBaseDriver,
'_get_lun_info_by_ref',
return_value={'PARENTNAME': 'OpenStack_Pool',
'SNAPSHOTIDS': [],
'ID': 'ID1',
'HEALTHSTATUS': constants.STATUS_HEALTH,
'WWN': '6643e8c1004c5f6723e9f454003'})
@mock.patch.object(rest_client.RestClient, 'get_lun_info',
return_value={'CAPACITY': 2097152,
'ALLOCTYPE': 1})
@mock.patch.object(rest_client.RestClient, 'get_lun_id_by_name',
return_value='ID1')
@ddt.unpack
def test_manage_existing_success(self, mock_get_by_name, mock_get_info,
mock_check_lun, mock_rename,
external_ref, pool_data):
self.driver.support_func = pool_data
model_update = self.driver.manage_existing(self.volume,
external_ref)
expected_val = {
'admin_metadata': {
'huawei_lun_wwn': '6643e8c1004c5f6723e9f454003'
},
'provider_location': 'ID1'}
self.assertEqual(expected_val, model_update)
def test_unmanage(self):
self.driver.unmanage(self.volume)
def test_manage_existing_snapshot_abnormal(self):
with mock.patch.object(huawei_driver.HuaweiBaseDriver,
'_get_snapshot_info_by_ref',
return_value={'HEALTHSTATUS': '2',
'PARENTID': '11'}):
external_ref = {'source-name': 'test1'}
ex = self.assertRaises(exception.ManageExistingInvalidReference,
self.driver.manage_existing_snapshot,
self.snapshot, external_ref)
self.assertIsNotNone(re.search('Snapshot status is not normal',
ex.msg))
@mock.patch.object(rest_client.RestClient, 'get_snapshot_info',
return_value={'ID': 'ID1',
'EXPOSEDTOINITIATOR': 'true',
'NAME': 'test1',
'PARENTID': '11',
'USERCAPACITY': 2097152,
'HEALTHSTATUS': constants.STATUS_HEALTH})
@mock.patch.object(rest_client.RestClient, 'get_snapshot_id_by_name',
return_value='ID1')
def test_manage_existing_snapshot_with_lungroup(self, mock_get_by_name,
mock_get_info):
# Already in LUN group.
external_ref = {'source-name': 'test1'}
ex = self.assertRaises(exception.ManageExistingInvalidReference,
self.driver.manage_existing_snapshot,
self.snapshot, external_ref)
self.assertIsNotNone(re.search('Snapshot is exposed to initiator',
ex.msg))
@mock.patch.object(rest_client.RestClient, 'rename_snapshot')
@mock.patch.object(huawei_driver.HuaweiBaseDriver,
'_get_snapshot_info_by_ref',
return_value={'ID': 'ID1',
'EXPOSEDTOINITIATOR': 'false',
'NAME': 'test1',
'PARENTID': '11',
'USERCAPACITY': 2097152,
'HEALTHSTATUS': constants.STATUS_HEALTH})
def test_manage_existing_snapshot_success(self, mock_get_info,
mock_rename):
external_ref = {'source-name': 'test1'}
model_update = self.driver.manage_existing_snapshot(self.snapshot,
external_ref)
self.assertEqual({'provider_location': 'ID1'}, model_update)
external_ref = {'source-id': 'ID1'}
model_update = self.driver.manage_existing_snapshot(self.snapshot,
external_ref)
self.assertEqual({'provider_location': 'ID1'}, model_update)
@mock.patch.object(rest_client.RestClient, 'get_snapshot_info',
return_value={'ID': 'ID1',
'EXPOSEDTOINITIATOR': 'false',
'NAME': 'test1',
'USERCAPACITY': 2097152,
'PARENTID': '12',
'HEALTHSTATUS': constants.STATUS_HEALTH})
@mock.patch.object(rest_client.RestClient, 'get_snapshot_id_by_name',
return_value='ID1')
def test_manage_existing_snapshot_mismatch_lun(self, mock_get_by_name,
mock_get_info):
external_ref = {'source-name': 'test1'}
ex = self.assertRaises(exception.ManageExistingInvalidReference,
self.driver.manage_existing_snapshot,
self.snapshot, external_ref)
self.assertIsNotNone(re.search("Snapshot doesn't belong to volume",
ex.msg))
@mock.patch.object(rest_client.RestClient, 'get_snapshot_info',
return_value={'USERCAPACITY': 3097152})
@mock.patch.object(rest_client.RestClient, 'get_snapshot_id_by_name',
return_value='ID1')
def test_manage_existing_snapshot_get_size_success(self,
mock_get_id_by_name,
mock_get_info):
external_ref = {'source-name': 'test1',
'source-id': 'ID1'}
size = self.driver.manage_existing_snapshot_get_size(self.snapshot,
external_ref)
self.assertEqual(2, size)
external_ref = {'source-name': 'test1'}
size = self.driver.manage_existing_snapshot_get_size(self.snapshot,
external_ref)
self.assertEqual(2, size)
external_ref = {'source-id': 'ID1'}
size = self.driver.manage_existing_snapshot_get_size(self.snapshot,
external_ref)
self.assertEqual(2, size)
def test_unmanage_snapshot(self):
self.driver.unmanage_snapshot(self.snapshot)
@ddt.data(sync_replica_specs, async_replica_specs)
def test_create_replication_success(self, mock_type):
self.mock_object(replication.ReplicaCommonDriver, 'sync')
self.mock_object(
huawei_driver.HuaweiBaseDriver,
'_get_volume_type',
return_value={'extra_specs': mock_type})
model_update = self.driver.create_volume(self.replica_volume)
driver_data = {'pair_id': TEST_PAIR_ID,
'rmt_lun_id': '1'}
driver_data = replication.to_string(driver_data)
self.assertEqual(driver_data, model_update['replication_driver_data'])
self.assertEqual('available', model_update['replication_status'])
@ddt.data(
[
rest_client.RestClient,
'get_array_info',
mock.Mock(
side_effect=exception.VolumeBackendAPIException(data='err')),
FAKE_POOLS_UNSUPPORT_REPORT
],
[
rest_client.RestClient,
'get_remote_devices',
mock.Mock(
side_effect=exception.VolumeBackendAPIException(data='err')),
FAKE_POOLS_UNSUPPORT_REPORT
],
[
rest_client.RestClient,
'get_remote_devices',
mock.Mock(return_value={}),
FAKE_POOLS_UNSUPPORT_REPORT
],
[
replication.ReplicaPairManager,
'wait_volume_online',
mock.Mock(side_effect=[
None,
exception.VolumeBackendAPIException(data='err')]),
FAKE_POOLS_UNSUPPORT_REPORT
],
[
rest_client.RestClient,
'create_pair',
mock.Mock(
side_effect=exception.VolumeBackendAPIException(data='err')),
FAKE_POOLS_UNSUPPORT_REPORT
],
[
replication.ReplicaCommonDriver,
'sync',
mock.Mock(
side_effect=exception.VolumeBackendAPIException(data='err')),
FAKE_POOLS_UNSUPPORT_REPORT
],
[
rest_client.RestClient,
'get_array_info',
mock.Mock(
side_effect=exception.VolumeBackendAPIException(data='err')),
FAKE_POOLS_SUPPORT_REPORT
],
[
rest_client.RestClient,
'get_remote_devices',
mock.Mock(
side_effect=exception.VolumeBackendAPIException(data='err')),
FAKE_POOLS_SUPPORT_REPORT
],
[
rest_client.RestClient,
'get_remote_devices',
mock.Mock(return_value={}),
FAKE_POOLS_SUPPORT_REPORT
],
[
replication.ReplicaPairManager,
'wait_volume_online',
mock.Mock(side_effect=[
None,
exception.VolumeBackendAPIException(data='err')]),
FAKE_POOLS_SUPPORT_REPORT
],
[
rest_client.RestClient,
'create_pair',
mock.Mock(
side_effect=exception.VolumeBackendAPIException(data='err')),
FAKE_POOLS_SUPPORT_REPORT
],
[
replication.ReplicaCommonDriver,
'sync',
mock.Mock(
side_effect=exception.VolumeBackendAPIException(data='err')),
FAKE_POOLS_SUPPORT_REPORT
],
)
@ddt.unpack
def test_create_replication_fail(self, mock_module, mock_func,
mock_value, pool_data):
self.driver.support_func = pool_data
self.mock_object(
huawei_driver.HuaweiBaseDriver,
'_get_volume_type',
return_value={'extra_specs': sync_replica_specs})
self.mock_object(replication.ReplicaPairManager, '_delete_pair')
self.mock_object(mock_module, mock_func, mock_value)
self.assertRaises(
exception.VolumeBackendAPIException,
self.driver.create_volume, self.replica_volume)
@ddt.data(FAKE_POOLS_UNSUPPORT_REPORT, FAKE_POOLS_SUPPORT_REPORT)
def test_delete_replication_success(self, pool_data):
self.driver.support_func = pool_data
self.mock_object(replication.ReplicaCommonDriver, 'split')
self.mock_object(
huawei_driver.HuaweiBaseDriver,
'_get_volume_type',
return_value={'extra_specs': sync_replica_specs})
self.driver.delete_volume(self.replica_volume)
self.mock_object(rest_client.RestClient, 'check_lun_exist',
return_value=False)
self.driver.delete_volume(self.replica_volume)
@unittest.skip("Skip until bug #1578986 is fixed")
def test_wait_volume_online(self):
replica = FakeReplicaPairManager(self.driver.client,
self.driver.replica_client,
self.configuration)
lun_info = {'ID': '11'}
replica.wait_volume_online(self.driver.client, lun_info)
offline_status = {'RUNNINGSTATUS': '28'}
replica.wait_volume_online(self.driver.client, lun_info)
with mock.patch.object(rest_client.RestClient, 'get_lun_info',
offline_status):
self.assertRaises(exception.VolumeBackendAPIException,
replica.wait_volume_online,
self.driver.client,
lun_info)
@unittest.skip("Skip until bug #1578986 is fixed")
def test_wait_second_access(self):
pair_id = '1'
access_ro = constants.REPLICA_SECOND_RO
access_rw = constants.REPLICA_SECOND_RW
op = replication.PairOp(self.driver.client)
common_driver = replication.ReplicaCommonDriver(self.configuration, op)
self.mock_object(replication.PairOp, 'get_replica_info',
return_value={'SECRESACCESS': access_ro})
self.mock_object(huawei_utils.time, 'time',
side_effect=utils.generate_timeout_series(
constants.DEFAULT_REPLICA_WAIT_TIMEOUT))
common_driver.wait_second_access(pair_id, access_ro)
self.assertRaises(exception.VolumeBackendAPIException,
common_driver.wait_second_access, pair_id, access_rw)
@unittest.skip("Skip until bug #1578986 is fixed")
def test_wait_replica_ready(self):
normal_status = {
'RUNNINGSTATUS': constants.REPLICA_RUNNING_STATUS_NORMAL,
'HEALTHSTATUS': constants.REPLICA_HEALTH_STATUS_NORMAL
}
split_status = {
'RUNNINGSTATUS': constants.REPLICA_RUNNING_STATUS_SPLIT,
'HEALTHSTATUS': constants.REPLICA_HEALTH_STATUS_NORMAL
}
sync_status = {
'RUNNINGSTATUS': constants.REPLICA_RUNNING_STATUS_SYNC,
'HEALTHSTATUS': constants.REPLICA_HEALTH_STATUS_NORMAL
}
pair_id = '1'
op = replication.PairOp(self.driver.client)
common_driver = replication.ReplicaCommonDriver(self.configuration, op)
with mock.patch.object(replication.PairOp, 'get_replica_info',
return_value=normal_status):
common_driver.wait_replica_ready(pair_id)
with mock.patch.object(
replication.PairOp,
'get_replica_info',
side_effect=[sync_status, normal_status]):
common_driver.wait_replica_ready(pair_id)
with mock.patch.object(replication.PairOp, 'get_replica_info',
return_value=split_status):
self.assertRaises(exception.VolumeBackendAPIException,
common_driver.wait_replica_ready, pair_id)
def test_failover_to_current(self):
driver = FakeISCSIStorage(configuration=self.configuration)
driver.do_setup()
old_client = driver.client
old_replica_client = driver.replica_client
old_replica = driver.replica
secondary_id, volumes_update = driver.failover_host(
None, [self.volume], 'default')
self.assertIn(driver.active_backend_id, ('', None))
self.assertEqual(old_client, driver.client)
self.assertEqual(old_replica_client, driver.replica_client)
self.assertEqual(old_replica, driver.replica)
self.assertEqual('default', secondary_id)
self.assertEqual(0, len(volumes_update))
def test_failover_normal_volumes(self):
driver = FakeISCSIStorage(configuration=self.configuration)
driver.do_setup()
old_client = driver.client
old_replica_client = driver.replica_client
old_replica = driver.replica
secondary_id, volumes_update = driver.failover_host(
None, [self.volume], REPLICA_BACKEND_ID)
self.assertEqual(REPLICA_BACKEND_ID, driver.active_backend_id)
self.assertEqual(old_client, driver.replica_client)
self.assertEqual(old_replica_client, driver.client)
self.assertNotEqual(old_replica, driver.replica)
self.assertEqual(REPLICA_BACKEND_ID, secondary_id)
self.assertEqual(1, len(volumes_update))
v_id = volumes_update[0]['volume_id']
v_update = volumes_update[0]['updates']
self.assertEqual(self.volume.id, v_id)
self.assertEqual('error', v_update['status'])
self.assertEqual(self.volume['status'],
v_update['metadata']['old_status'])
def test_failback_to_current(self):
driver = FakeISCSIStorage(configuration=self.configuration)
driver.active_backend_id = REPLICA_BACKEND_ID
driver.do_setup()
old_client = driver.client
old_replica_client = driver.replica_client
old_replica = driver.replica
secondary_id, volumes_update = driver.failover_host(
None, [self.volume], REPLICA_BACKEND_ID)
self.assertEqual(REPLICA_BACKEND_ID, driver.active_backend_id)
self.assertEqual(old_client, driver.client)
self.assertEqual(old_replica_client, driver.replica_client)
self.assertEqual(old_replica, driver.replica)
self.assertEqual(REPLICA_BACKEND_ID, secondary_id)
self.assertEqual(0, len(volumes_update))
def test_failback_normal_volumes(self):
self.volume.status = 'error'
self.volume.metadata = {'old_status': 'available'}
driver = FakeISCSIStorage(configuration=self.configuration)
driver.active_backend_id = REPLICA_BACKEND_ID
driver.do_setup()
old_client = driver.client
old_replica_client = driver.replica_client
old_replica = driver.replica
secondary_id, volumes_update = driver.failover_host(
None, [self.volume], 'default')
self.assertIn(driver.active_backend_id, ('', None))
self.assertEqual(old_client, driver.replica_client)
self.assertEqual(old_replica_client, driver.client)
self.assertNotEqual(old_replica, driver.replica)
self.assertEqual('default', secondary_id)
self.assertEqual(1, len(volumes_update))
v_id = volumes_update[0]['volume_id']
v_update = volumes_update[0]['updates']
self.assertEqual(self.volume.id, v_id)
self.assertEqual('available', v_update['status'])
self.assertNotIn('old_status', v_update['metadata'])
def test_failover_replica_volumes(self):
driver = FakeISCSIStorage(configuration=self.configuration)
driver.do_setup()
old_client = driver.client
old_replica_client = driver.replica_client
old_replica = driver.replica
self.mock_object(replication.ReplicaCommonDriver, 'failover')
self.mock_object(huawei_driver.HuaweiBaseDriver, '_get_volume_params',
return_value={'replication_enabled': 'true'})
secondary_id, volumes_update = driver.failover_host(
None, [self.replica_volume], REPLICA_BACKEND_ID)
self.assertEqual(REPLICA_BACKEND_ID, driver.active_backend_id)
self.assertEqual(old_client, driver.replica_client)
self.assertEqual(old_replica_client, driver.client)
self.assertNotEqual(old_replica, driver.replica)
self.assertEqual(REPLICA_BACKEND_ID, secondary_id)
self.assertEqual(1, len(volumes_update))
v_id = volumes_update[0]['volume_id']
v_update = volumes_update[0]['updates']
self.assertEqual(self.replica_volume.id, v_id)
self.assertEqual('1', v_update['provider_location'])
self.assertEqual('failed-over', v_update['replication_status'])
new_drv_data = {'pair_id': TEST_PAIR_ID,
'rmt_lun_id': self.replica_volume.provider_location}
new_drv_data = replication.to_string(new_drv_data)
self.assertEqual(new_drv_data, v_update['replication_driver_data'])
@ddt.data({}, {'pair_id': TEST_PAIR_ID})
def test_failover_replica_volumes_invalid_drv_data(self, mock_drv_data):
volume = self.replica_volume
volume['replication_driver_data'] = replication.to_string(
mock_drv_data)
driver = FakeISCSIStorage(configuration=self.configuration)
driver.do_setup()
old_client = driver.client
old_replica_client = driver.replica_client
old_replica = driver.replica
self.mock_object(huawei_driver.HuaweiBaseDriver, '_get_volume_params',
return_value={'replication_enabled': 'true'})
secondary_id, volumes_update = driver.failover_host(
None, [volume], REPLICA_BACKEND_ID)
self.assertEqual(driver.active_backend_id, REPLICA_BACKEND_ID)
self.assertEqual(old_client, driver.replica_client)
self.assertEqual(old_replica_client, driver.client)
self.assertNotEqual(old_replica, driver.replica)
self.assertEqual(REPLICA_BACKEND_ID, secondary_id)
self.assertEqual(1, len(volumes_update))
v_id = volumes_update[0]['volume_id']
v_update = volumes_update[0]['updates']
self.assertEqual(volume.id, v_id)
self.assertEqual('error', v_update['replication_status'])
def test_failback_replica_volumes(self):
self.mock_object(replication.ReplicaCommonDriver, 'enable')
self.mock_object(replication.ReplicaCommonDriver, 'wait_replica_ready')
self.mock_object(replication.ReplicaCommonDriver, 'failover')
self.mock_object(huawei_driver.HuaweiBaseDriver, '_get_volume_params',
return_value={'replication_enabled': 'true'})
volume = self.replica_volume
driver = FakeISCSIStorage(configuration=self.configuration)
driver.active_backend_id = REPLICA_BACKEND_ID
driver.do_setup()
old_client = driver.client
old_replica_client = driver.replica_client
old_replica = driver.replica
secondary_id, volumes_update = driver.failover_host(
None, [volume], 'default')
self.assertIn(driver.active_backend_id, ('', None))
self.assertEqual(old_client, driver.replica_client)
self.assertEqual(old_replica_client, driver.client)
self.assertNotEqual(old_replica, driver.replica)
self.assertEqual('default', secondary_id)
self.assertEqual(1, len(volumes_update))
v_id = volumes_update[0]['volume_id']
v_update = volumes_update[0]['updates']
self.assertEqual(self.replica_volume.id, v_id)
self.assertEqual('1', v_update['provider_location'])
self.assertEqual('available', v_update['replication_status'])
new_drv_data = {'pair_id': TEST_PAIR_ID,
'rmt_lun_id': self.replica_volume.provider_location}
new_drv_data = replication.to_string(new_drv_data)
self.assertEqual(new_drv_data, v_update['replication_driver_data'])
@ddt.data({}, {'pair_id': TEST_PAIR_ID})
def test_failback_replica_volumes_invalid_drv_data(self, mock_drv_data):
self.mock_object(huawei_driver.HuaweiBaseDriver, '_get_volume_params',
return_value={'replication_enabled': 'true'})
volume = self.replica_volume
volume['replication_driver_data'] = replication.to_string(
mock_drv_data)
driver = FakeISCSIStorage(configuration=self.configuration)
driver.active_backend_id = REPLICA_BACKEND_ID
driver.do_setup()
old_client = driver.client
old_replica_client = driver.replica_client
old_replica = driver.replica
secondary_id, volumes_update = driver.failover_host(
None, [volume], 'default')
self.assertIn(driver.active_backend_id, ('', None))
self.assertEqual(old_client, driver.replica_client)
self.assertEqual(old_replica_client, driver.client)
self.assertNotEqual(old_replica, driver.replica)
self.assertEqual('default', secondary_id)
self.assertEqual(1, len(volumes_update))
v_id = volumes_update[0]['volume_id']
v_update = volumes_update[0]['updates']
self.assertEqual(self.replica_volume.id, v_id)
self.assertEqual('error', v_update['replication_status'])
@unittest.skip("Skip until bug #1578986 is fixed")
@mock.patch('oslo_service.loopingcall.FixedIntervalLoopingCall',
new=utils.ZeroIntervalLoopingCall)
@mock.patch.object(replication.PairOp, 'is_primary',
side_effect=[False, True])
@mock.patch.object(replication.ReplicaCommonDriver, 'split')
@mock.patch.object(replication.ReplicaCommonDriver, 'unprotect_second')
def test_replication_driver_enable_success(self,
mock_unprotect,
mock_split,
mock_is_primary):
replica_id = TEST_PAIR_ID
op = replication.PairOp(self.driver.client)
common_driver = replication.ReplicaCommonDriver(self.configuration, op)
common_driver.enable(replica_id)
self.assertTrue(mock_unprotect.called)
self.assertTrue(mock_split.called)
self.assertTrue(mock_is_primary.called)
@mock.patch.object(replication.PairOp, 'is_primary', return_value=False)
@mock.patch.object(replication.ReplicaCommonDriver, 'split')
def test_replication_driver_failover_success(self,
mock_split,
mock_is_primary):
replica_id = TEST_PAIR_ID
op = replication.PairOp(self.driver.client)
common_driver = replication.ReplicaCommonDriver(self.configuration, op)
common_driver.failover(replica_id)
self.assertTrue(mock_split.called)
self.assertTrue(mock_is_primary.called)
@mock.patch.object(replication.PairOp, 'is_primary', return_value=True)
def test_replication_driver_failover_fail(self, mock_is_primary):
replica_id = TEST_PAIR_ID
op = replication.PairOp(self.driver.client)
common_driver = replication.ReplicaCommonDriver(self.configuration, op)
self.assertRaises(
exception.VolumeBackendAPIException,
common_driver.failover,
replica_id)
@ddt.data(constants.REPLICA_SECOND_RW, constants.REPLICA_SECOND_RO)
def test_replication_driver_protect_second(self, mock_access):
replica_id = TEST_PAIR_ID
op = replication.PairOp(self.driver.client)
common_driver = replication.ReplicaCommonDriver(self.configuration, op)
self.mock_object(replication.ReplicaCommonDriver, 'wait_second_access')
self.mock_object(
replication.PairOp,
'get_replica_info',
return_value={'SECRESACCESS': mock_access})
common_driver.protect_second(replica_id)
common_driver.unprotect_second(replica_id)
@unittest.skip("Skip until bug #1578986 is fixed")
def test_replication_driver_sync(self):
replica_id = TEST_PAIR_ID
op = replication.PairOp(self.driver.client)
common_driver = replication.ReplicaCommonDriver(self.configuration, op)
async_normal_status = {
'REPLICATIONMODEL': constants.REPLICA_ASYNC_MODEL,
'RUNNINGSTATUS': constants.REPLICA_RUNNING_STATUS_NORMAL,
'HEALTHSTATUS': constants.REPLICA_HEALTH_STATUS_NORMAL
}
self.mock_object(replication.ReplicaCommonDriver, 'protect_second')
self.mock_object(replication.PairOp, 'get_replica_info',
return_value=async_normal_status)
common_driver.sync(replica_id, True)
common_driver.sync(replica_id, False)
def test_replication_driver_split(self):
replica_id = TEST_PAIR_ID
op = replication.PairOp(self.driver.client)
common_driver = replication.ReplicaCommonDriver(self.configuration, op)
self.mock_object(replication.ReplicaCommonDriver, 'wait_expect_state')
self.mock_object(
replication.PairOp, 'split',
side_effect=exception.VolumeBackendAPIException(data='err'))
common_driver.split(replica_id)
@mock.patch.object(replication.PairOp, 'split')
@ddt.data(constants.REPLICA_RUNNING_STATUS_SPLIT,
constants.REPLICA_RUNNING_STATUS_INVALID,
constants.REPLICA_RUNNING_STATUS_ERRUPTED)
def test_replication_driver_split_already_disabled(self, mock_status,
mock_op_split):
replica_id = TEST_PAIR_ID
op = replication.PairOp(self.driver.client)
common_driver = replication.ReplicaCommonDriver(self.configuration, op)
pair_info = json.loads(FAKE_GET_PAIR_NORMAL_RESPONSE)['data']
pair_info['RUNNINGSTATUS'] = mock_status
self.mock_object(rest_client.RestClient, 'get_pair_by_id',
return_value=pair_info)
common_driver.split(replica_id)
self.assertFalse(mock_op_split.called)
def test_replication_base_op(self):
replica_id = '1'
op = replication.AbsReplicaOp(None)
op.create()
op.delete(replica_id)
op.protect_second(replica_id)
op.unprotect_second(replica_id)
op.sync(replica_id)
op.split(replica_id)
op.switch(replica_id)
op.is_primary({})
op.get_replica_info(replica_id)
op._is_status(None, {'key': 'volue'}, None)
@mock.patch.object(rest_client.RestClient, 'call',
return_value={"error": {"code": 0}})
def test_get_tgt_port_group_no_portg_exist(self, mock_call):
portg = self.driver.client.get_tgt_port_group('test_portg')
self.assertIsNone(portg)
def test_get_tgt_iqn_from_rest_match(self):
match_res = {
'data': [{
'TYPE': 249,
'ID': '0+iqn.2006-08.com: 210048cee9d: 111.111.111.19,t,0x01'
}, {
'TYPE': 249,
'ID': '0+iqn.2006-08.com: 210048cee9d: 111.111.111.191,t,0x01'
}],
'error': {
'code': 0
}
}
ip = '111.111.111.19'
expected_iqn = 'iqn.2006-08.com: 210048cee9d: 111.111.111.19'
self.mock_object(rest_client.RestClient, 'call',
return_value=match_res)
iqn = self.driver.client._get_tgt_iqn_from_rest(ip)
self.assertEqual(expected_iqn, iqn)
def test_get_tgt_iqn_from_rest_mismatch(self):
match_res = {
'data': [{
'TYPE': 249,
'ID': '0+iqn.2006-08.com: 210048cee9d: 192.0.2.191,t,0x01'
}, {
'TYPE': 249,
'ID': '0+iqn.2006-08.com: 210048cee9d: 192.0.2.192,t,0x01'
}],
'error': {
'code': 0
}
}
ip = '192.0.2.19'
self.mock_object(rest_client.RestClient, 'call',
return_value=match_res)
iqn = self.driver.client._get_tgt_iqn_from_rest(ip)
self.assertIsNone(iqn)
def test_create_cgsnapshot(self):
test_snapshots = [self.snapshot]
ctxt = context.get_admin_context()
model, snapshots = self.driver.create_cgsnapshot(ctxt,
self.cgsnapshot,
test_snapshots)
snapshots_model_update = [{'id': '21ec7341-9256-497b-97d9'
'-ef48edcf0635',
'status': 'available',
'provider_location': 11}]
self.assertEqual(snapshots_model_update, snapshots)
self.assertEqual('available', model['status'])
def test_create_cgsnapshot_create_snapshot_fail(self):
test_snapshots = [self.snapshot]
ctxt = context.get_admin_context()
self.mock_object(rest_client.RestClient, 'create_snapshot',
side_effect=(
exception.VolumeBackendAPIException(data='err')))
self.assertRaises(exception.VolumeBackendAPIException,
self.driver.create_cgsnapshot,
ctxt,
self.cgsnapshot,
test_snapshots)
def test_create_cgsnapshot_active_snapshot_fail(self):
test_snapshots = [self.snapshot]
ctxt = context.get_admin_context()
self.mock_object(rest_client.RestClient, 'activate_snapshot',
side_effect=(
exception.VolumeBackendAPIException(data='err')))
self.assertRaises(exception.VolumeBackendAPIException,
self.driver.create_cgsnapshot,
ctxt,
self.cgsnapshot,
test_snapshots)
def test_delete_cgsnapshot(self):
test_snapshots = [self.snapshot]
ctxt = context.get_admin_context()
self.driver.delete_cgsnapshot(ctxt,
self.cgsnapshot,
test_snapshots)
class FCSanLookupService(object):
def get_device_mapping_from_network(self, initiator_list,
target_list):
return fake_fabric_mapping
@ddt.ddt
class HuaweiFCDriverTestCase(HuaweiTestBase):
def setUp(self):
super(HuaweiFCDriverTestCase, self).setUp()
self.configuration = mock.Mock(spec=conf.Configuration)
self.flags(rpc_backend='oslo_messaging._drivers.impl_fake')
self.huawei_conf = FakeHuaweiConf(self.configuration, 'FC')
self.configuration.hypermetro_devices = hypermetro_devices
driver = FakeFCStorage(configuration=self.configuration)
self.driver = driver
self.driver.do_setup()
self.driver.client.login()
def test_login_success(self):
device_id = self.driver.client.login()
self.assertEqual('210235G7J20000000000', device_id)
def test_create_volume_success(self):
lun_info = self.driver.create_volume(self.volume)
self.assertEqual('1', lun_info['provider_location'])
@ddt.data(FAKE_POOLS_UNSUPPORT_REPORT, FAKE_POOLS_SUPPORT_REPORT)
def test_delete_volume_success(self, pool_data):
self.driver.support_func = pool_data
self.driver.delete_volume(self.volume)
def test_delete_snapshot_success(self):
self.driver.delete_snapshot(self.snapshot)
@unittest.skip("Skip until bug #1578986 is fixed")
def test_create_volume_from_snapsuccess(self):
lun_info = self.driver.create_volume_from_snapshot(self.volume,
self.volume)
self.assertEqual('1', lun_info['provider_location'])
@mock.patch.object(huawei_driver.HuaweiFCDriver,
'initialize_connection',
return_value={"data": {'target_lun': 1}})
def test_initialize_connection_snapshot_success(self, mock_fc_init):
iscsi_properties = self.driver.initialize_connection_snapshot(
self.snapshot, FakeConnector)
volume = Volume(id=self.snapshot.id,
provider_location=self.snapshot.provider_location,
lun_type='27',
metadata=None)
self.assertEqual(1, iscsi_properties['data']['target_lun'])
mock_fc_init.assert_called_with(volume, FakeConnector)
def test_initialize_connection_success(self):
iscsi_properties = self.driver.initialize_connection(self.volume,
FakeConnector)
self.assertEqual(1, iscsi_properties['data']['target_lun'])
def test_initialize_connection_fail_no_online_wwns_in_host(self):
self.mock_object(rest_client.RestClient, 'get_online_free_wwns',
return_value=[])
self.assertRaises(exception.VolumeBackendAPIException,
self.driver.initialize_connection,
self.volume, FakeConnector)
def test_initialize_connection_no_local_ini_tgt_map(self):
self.mock_object(rest_client.RestClient, 'get_init_targ_map',
return_value=('', ''))
self.mock_object(huawei_driver.HuaweiFCDriver, '_get_same_hostid',
return_value='')
self.mock_object(rest_client.RestClient, 'change_hostlun_id',
return_value=None)
self.mock_object(rest_client.RestClient, 'do_mapping',
return_value={'lun_id': '1',
'view_id': '1',
'aval_luns': '[1]'})
self.driver.initialize_connection(self.hyper_volume, FakeConnector)
def test_hypermetro_connection_success(self):
self.mock_object(rest_client.RestClient, 'find_array_version',
return_value='V300R003C00')
fc_properties = self.driver.initialize_connection(self.hyper_volume,
FakeConnector)
self.assertEqual(1, fc_properties['data']['target_lun'])
@mock.patch.object(huawei_driver.HuaweiFCDriver,
'terminate_connection')
def test_terminate_connection_snapshot_success(self, mock_fc_term):
self.driver.terminate_connection_snapshot(self.snapshot,
FakeConnector)
volume = Volume(id=self.snapshot.id,
provider_location=self.snapshot.provider_location,
lun_type='27',
metadata=None)
mock_fc_term.assert_called_with(volume, FakeConnector)
def test_terminate_connection_success(self):
self.driver.client.terminateFlag = True
self.driver.terminate_connection(self.volume, FakeConnector)
self.assertTrue(self.driver.client.terminateFlag)
def test_terminate_connection_portgroup_associated(self):
self.mock_object(rest_client.RestClient,
'is_portgroup_associated_to_view',
return_value=True)
self.mock_object(huawei_driver.HuaweiFCDriver,
'_delete_zone_and_remove_fc_initiators',
return_value=({}, 1))
self.driver.terminate_connection(self.volume, FakeConnector)
def test_terminate_connection_fc_initiators_exist_in_host(self):
self.mock_object(rest_client.RestClient,
'check_fc_initiators_exist_in_host',
return_value=True)
self.driver.terminate_connection(self.volume, FakeConnector)
def test_terminate_connection_hypermetro_in_metadata(self):
self.driver.terminate_connection(self.hyper_volume, FakeConnector)
def test_get_volume_status(self):
remote_device_info = {"ARRAYTYPE": "1",
"HEALTHSTATUS": "1",
"RUNNINGSTATUS": "10"}
self.mock_object(
replication.ReplicaPairManager,
'get_remote_device_by_wwn',
return_value=remote_device_info)
data = self.driver.get_volume_stats()
self.assertEqual(self.driver.VERSION, data['driver_version'])
self.assertTrue(data['pools'][0]['replication_enabled'])
self.assertListEqual(['sync', 'async'],
data['pools'][0]['replication_type'])
self.mock_object(
replication.ReplicaPairManager,
'get_remote_device_by_wwn',
return_value={})
data = self.driver.get_volume_stats()
self.assertNotIn('replication_enabled', data['pools'][0])
self.mock_object(
replication.ReplicaPairManager,
'try_get_remote_wwn',
return_value={})
data = self.driver.get_volume_stats()
self.assertEqual(self.driver.VERSION, data['driver_version'])
self.assertNotIn('replication_enabled', data['pools'][0])
@ddt.data({'TIER0CAPACITY': '100',
'TIER1CAPACITY': '0',
'TIER2CAPACITY': '0',
'disktype': 'ssd'},
{'TIER0CAPACITY': '0',
'TIER1CAPACITY': '100',
'TIER2CAPACITY': '0',
'disktype': 'sas'},
{'TIER0CAPACITY': '0',
'TIER1CAPACITY': '0',
'TIER2CAPACITY': '100',
'disktype': 'nl_sas'},
{'TIER0CAPACITY': '100',
'TIER1CAPACITY': '100',
'TIER2CAPACITY': '100',
'disktype': 'mix'},
{'TIER0CAPACITY': '0',
'TIER1CAPACITY': '0',
'TIER2CAPACITY': '0',
'disktype': ''})
def test_get_volume_disk_type(self, disk_type_value):
response_dict = json.loads(FAKE_STORAGE_POOL_RESPONSE)
storage_pool_sas = copy.deepcopy(response_dict)
storage_pool_sas['data'][0]['TIER0CAPACITY'] = (
disk_type_value['TIER0CAPACITY'])
storage_pool_sas['data'][0]['TIER1CAPACITY'] = (
disk_type_value['TIER1CAPACITY'])
storage_pool_sas['data'][0]['TIER2CAPACITY'] = (
disk_type_value['TIER2CAPACITY'])
driver = FakeISCSIStorage(configuration=self.configuration)
driver.do_setup()
driver.replica = None
self.mock_object(rest_client.RestClient, 'get_all_pools',
return_value=storage_pool_sas['data'])
data = driver.get_volume_stats()
if disk_type_value['disktype']:
self.assertEqual(disk_type_value['disktype'],
data['pools'][0]['disk_type'])
else:
self.assertIsNone(data['pools'][0].get('disk_type'))
def test_get_disk_type_pool_info_none(self):
driver = FakeISCSIStorage(configuration=self.configuration)
driver.do_setup()
driver.replica = None
self.mock_object(rest_client.RestClient, 'get_pool_info',
return_value=None)
data = driver.get_volume_stats()
self.assertIsNone(data['pools'][0].get('disk_type'))
def test_extend_volume(self):
self.driver.extend_volume(self.volume, 3)
def test_login_fail(self):
self.driver.client.test_fail = True
self.assertRaises(exception.VolumeBackendAPIException,
self.driver.client.login)
def test_create_snapshot_fail(self):
self.driver.client.test_fail = True
self.assertRaises(exception.VolumeBackendAPIException,
self.driver.create_snapshot, self.snapshot)
def test_create_volume_fail(self):
self.driver.client.test_fail = True
self.assertRaises(exception.VolumeBackendAPIException,
self.driver.create_volume, self.volume)
def test_delete_volume_fail(self):
self.driver.client.test_fail = True
self.driver.delete_volume(self.volume)
def test_delete_snapshot_fail(self):
self.driver.client.test_fail = True
self.driver.delete_snapshot(self.snapshot)
def test_initialize_connection_fail(self):
self.driver.client.test_fail = True
self.assertRaises(exception.VolumeBackendAPIException,
self.driver.initialize_connection,
self.volume, FakeConnector)
def test_lun_is_associated_to_lungroup(self):
self.driver.client.associate_lun_to_lungroup('11', '11')
result = self.driver.client._is_lun_associated_to_lungroup('11',
'11')
self.assertTrue(result)
def test_lun_is_not_associated_to_lun_group(self):
self.driver.client.associate_lun_to_lungroup('12', '12')
self.driver.client.remove_lun_from_lungroup('12', '12')
result = self.driver.client._is_lun_associated_to_lungroup('12',
'12')
self.assertFalse(result)
@unittest.skip("Skip until bug #1578986 is fixed")
@ddt.data(FAKE_POOLS_UNSUPPORT_REPORT, FAKE_POOLS_SUPPORT_REPORT)
@mock.patch.object(rest_client, 'RestClient')
def test_migrate_volume_success(self, mock_add_lun_to_partition,
pool_data):
# Migrate volume without new type.
empty_dict = {}
self.driver.support_func = pool_data
moved, model_update = self.driver.migrate_volume(None,
self.volume,
test_host,
None)
self.assertTrue(moved)
self.assertEqual(empty_dict, model_update)
# Migrate volume with new type.
empty_dict = {}
new_type = {'extra_specs':
{'smarttier': '<is> true',
'smartcache': '<is> true',
'smartpartition': '<is> true',
'thin_provisioning_support': '<is> true',
'thick_provisioning_support': '<is> False',
'policy': '2',
'smartcache:cachename': 'cache-test',
'smartpartition:partitionname': 'partition-test'}}
moved, model_update = self.driver.migrate_volume(None,
self.volume,
test_host,
new_type)
self.assertTrue(moved)
self.assertEqual(empty_dict, model_update)
def test_migrate_volume_fail(self):
self.driver.client.test_fail = True
# Migrate volume without new type.
self.assertRaises(exception.VolumeBackendAPIException,
self.driver.migrate_volume, None,
self.volume, test_host, None)
# Migrate volume with new type.
new_type = {'extra_specs':
{'smarttier': '<is> true',
'smartcache': '<is> true',
'thin_provisioning_support': '<is> true',
'thick_provisioning_support': '<is> False',
'policy': '2',
'smartcache:cachename': 'cache-test',
'partitionname': 'partition-test'}}
self.driver.client.test_fail = True
self.assertRaises(exception.VolumeBackendAPIException,
self.driver.migrate_volume, None,
self.volume, test_host, new_type)
def test_check_migration_valid(self):
is_valid = self.driver._check_migration_valid(test_host,
self.volume)
self.assertTrue(is_valid)
# No pool_name in capabilities.
invalid_host1 = {'host': 'ubuntu001@backend002#OpenStack_Pool',
'capabilities':
{'location_info': '210235G7J20000000000',
'allocated_capacity_gb': 0,
'volume_backend_name': 'HuaweiFCDriver',
'storage_protocol': 'FC'}}
is_valid = self.driver._check_migration_valid(invalid_host1,
self.volume)
self.assertFalse(is_valid)
# location_info in capabilities is not matched.
invalid_host2 = {'host': 'ubuntu001@backend002#OpenStack_Pool',
'capabilities':
{'location_info': '210235G7J20000000001',
'allocated_capacity_gb': 0,
'pool_name': 'OpenStack_Pool',
'volume_backend_name': 'HuaweiFCDriver',
'storage_protocol': 'FC'}}
is_valid = self.driver._check_migration_valid(invalid_host2,
self.volume)
self.assertFalse(is_valid)
# storage_protocol is not match current protocol and volume status is
# 'in-use'.
volume_in_use = {'name': 'volume-21ec7341-9256-497b-97d9-ef48edcf0635',
'size': 2,
'volume_name': 'vol1',
'id': ID,
'volume_id': '21ec7341-9256-497b-97d9-ef48edcf0635',
'volume_attachment': 'in-use',
'provider_location': '11'}
invalid_host2 = {'host': 'ubuntu001@backend002#OpenStack_Pool',
'capabilities':
{'location_info': '210235G7J20000000001',
'allocated_capacity_gb': 0,
'pool_name': 'OpenStack_Pool',
'volume_backend_name': 'HuaweiFCDriver',
'storage_protocol': 'iSCSI'}}
is_valid = self.driver._check_migration_valid(invalid_host2,
volume_in_use)
self.assertFalse(is_valid)
# pool_name is empty.
invalid_host3 = {'host': 'ubuntu001@backend002#OpenStack_Pool',
'capabilities':
{'location_info': '210235G7J20000000001',
'allocated_capacity_gb': 0,
'pool_name': '',
'volume_backend_name': 'HuaweiFCDriver',
'storage_protocol': 'iSCSI'}}
is_valid = self.driver._check_migration_valid(invalid_host3,
self.volume)
self.assertFalse(is_valid)
@mock.patch.object(rest_client.RestClient, 'rename_lun')
def test_update_migrated_volume_success(self, mock_rename_lun):
model_update = self.driver.update_migrated_volume(None,
self.original_volume,
self.current_volume,
'available')
self.assertEqual({'_name_id': None}, model_update)
@mock.patch.object(rest_client.RestClient, 'rename_lun')
def test_update_migrated_volume_fail(self, mock_rename_lun):
mock_rename_lun.side_effect = exception.VolumeBackendAPIException(
data='Error occurred.')
model_update = self.driver.update_migrated_volume(None,
self.original_volume,
self.current_volume,
'available')
self.assertEqual(self.current_volume.name_id,
model_update['_name_id'])
@mock.patch.object(rest_client.RestClient, 'add_lun_to_partition')
def test_retype_volume_success(self, mock_add_lun_to_partition):
self.driver.support_func = FAKE_POOLS_SUPPORT_REPORT
retype = self.driver.retype(None, self.volume,
test_new_type, None, test_host)
self.assertTrue(retype)
@unittest.skip("Skip until bug #1578986 is fixed")
@ddt.data(FAKE_POOLS_UNSUPPORT_REPORT, FAKE_POOLS_SUPPORT_REPORT)
@mock.patch.object(rest_client, 'RestClient')
@mock.patch.object(
huawei_driver.HuaweiBaseDriver,
'_get_volume_type',
return_value={'extra_specs': sync_replica_specs})
def test_retype_replication_volume_success(self, mock_get_type,
mock_add_lun_to_partition,
pool_data):
self.driver.support_func = pool_data
retype = self.driver.retype(None, self.volume,
test_new_replication_type, None, test_host)
self.assertTrue(retype)
@ddt.data(
[
replication.ReplicaPairManager,
'create_replica',
exception.VolumeBackendAPIException(
data='Can\'t support smarttier on the array.'),
FAKE_POOLS_UNSUPPORT_REPORT
],
[
replication.ReplicaPairManager,
'create_replica',
exception.VolumeBackendAPIException(
data='Can\'t support smarttier on the array.'),
FAKE_POOLS_SUPPORT_REPORT
],
[
replication.ReplicaPairManager,
'delete_replica',
exception.VolumeBackendAPIException(
data='Can\'t support smarttier on the array.'),
FAKE_POOLS_SUPPORT_REPORT
],
[
replication.ReplicaPairManager,
'delete_replica',
exception.VolumeBackendAPIException(
data='Can\'t support smarttier on the array.'),
FAKE_POOLS_UNSUPPORT_REPORT
],
)
@ddt.unpack
def test_retype_replication_volume_fail(self,
mock_module,
mock_func,
side_effect,
pool_data):
self.driver.support_func = pool_data
self.mock_object(mock_module, mock_func, side_effect=side_effect)
self.mock_object(rest_client.RestClient, 'add_lun_to_partition')
self.mock_object(
huawei_driver.HuaweiBaseDriver,
'_get_volume_type',
return_value={'extra_specs': sync_replica_specs})
retype = self.driver.retype(None, self.volume,
test_new_replication_type, None, test_host)
self.assertFalse(retype)
@ddt.data(FAKE_POOLS_UNSUPPORT_REPORT, FAKE_POOLS_SUPPORT_REPORT)
def test_retype_volume_cache_fail(self, pool_data):
self.driver.client.cache_not_exist = True
self.driver.support_func = pool_data
self.assertRaises(exception.VolumeBackendAPIException,
self.driver.retype, None,
self.volume, test_new_type, None, test_host)
@ddt.data(FAKE_POOLS_UNSUPPORT_REPORT, FAKE_POOLS_SUPPORT_REPORT)
def test_retype_volume_partition_fail(self, pool_data):
self.driver.support_func = pool_data
self.driver.client.partition_not_exist = True
self.assertRaises(exception.VolumeBackendAPIException,
self.driver.retype, None,
self.volume, test_new_type, None, test_host)
@mock.patch.object(rest_client.RestClient, 'add_lun_to_partition')
def test_retype_volume_fail(self, mock_add_lun_to_partition):
self.driver.support_func = FAKE_POOLS_SUPPORT_REPORT
mock_add_lun_to_partition.side_effect = (
exception.VolumeBackendAPIException(data='Error occurred.'))
retype = self.driver.retype(None, self.volume,
test_new_type, None, test_host)
self.assertFalse(retype)
@mock.patch.object(rest_client.RestClient, 'get_all_engines',
return_value=[{'NODELIST': '["0A","0B"]', 'ID': '0'}])
def test_build_ini_targ_map_engie_recorded(self, mock_engines):
fake_lookup_service = FCSanLookupService()
zone_helper = fc_zone_helper.FCZoneHelper(
fake_lookup_service, self.driver.client)
(tgt_wwns, portg_id, init_targ_map) = zone_helper.build_ini_targ_map(
['10000090fa0d6754'], '1', '11')
target_port_wwns = ['2000643e8c4c5f66']
self.assertEqual(target_port_wwns, tgt_wwns)
self.assertEqual({}, init_targ_map)
@ddt.data(fake_fabric_mapping_no_ports, fake_fabric_mapping_no_wwn)
def test_filter_by_fabric_fail(self, ddt_map):
self.mock_object(
FCSanLookupService, 'get_device_mapping_from_network',
return_value=ddt_map)
fake_lookup_service = FCSanLookupService()
zone_helper = fc_zone_helper.FCZoneHelper(
fake_lookup_service, self.driver.client)
self.assertRaises(exception.VolumeBackendAPIException,
zone_helper._filter_by_fabric, ['10000090fa0d6754'],
None)
@mock.patch.object(rest_client.RestClient, 'get_all_engines',
return_value=[{'NODELIST': '["0A"]', 'ID': '0'},
{'NODELIST': '["0B"]', 'ID': '1'}])
@mock.patch.object(fc_zone_helper.FCZoneHelper, '_build_contr_port_map',
return_value={'0B': ['2000643e8c4c5f67']})
def test_build_ini_targ_map_engie_not_recorded(self, mock_engines, map):
fake_lookup_service = FCSanLookupService()
zone_helper = fc_zone_helper.FCZoneHelper(
fake_lookup_service, self.driver.client)
(tgt_wwns, portg_id, init_targ_map) = zone_helper.build_ini_targ_map(
['10000090fa0d6754'], '1', '11')
expected_wwns = ['2000643e8c4c5f67', '2000643e8c4c5f66']
expected_map = {'10000090fa0d6754': expected_wwns}
self.assertEqual(expected_wwns, tgt_wwns)
self.assertEqual(expected_map, init_targ_map)
@mock.patch.object(rest_client.RestClient, 'get_all_engines',
return_value=[{'NODELIST': '["0A", "0B"]', 'ID': '0'}])
def test_build_ini_targ_map_no_map(self, mock_engines):
fake_lookup_service = FCSanLookupService()
zone_helper = fc_zone_helper.FCZoneHelper(
fake_lookup_service, self.driver.client)
# Host with id '5' has no map on the array.
(tgt_wwns, portg_id, init_targ_map) = zone_helper.build_ini_targ_map(
['10000090fa0d6754'], '5', '11')
expected_wwns = ['2000643e8c4c5f66']
expected_map = {'10000090fa0d6754': ['2000643e8c4c5f66']}
self.assertEqual(expected_wwns, tgt_wwns)
self.assertEqual(expected_map, init_targ_map)
@mock.patch.object(rest_client.RestClient, 'get_all_engines',
return_value=[{'NODELIST': '["0A", "0B"]', 'ID': '0'}])
@mock.patch.object(rest_client.RestClient, 'get_tgt_port_group',
return_value='0')
@mock.patch.object(rest_client.RestClient, 'delete_portgroup')
def test_build_ini_targ_map_exist_portg(self, delete, engines, portg):
fake_lookup_service = FCSanLookupService()
zone_helper = fc_zone_helper.FCZoneHelper(
fake_lookup_service, self.driver.client)
# Host with id '5' has no map on the array.
(tgt_wwns, portg_id, init_targ_map) = zone_helper.build_ini_targ_map(
['10000090fa0d6754'], '5', '11')
expected_wwns = ['2000643e8c4c5f66']
expected_map = {'10000090fa0d6754': ['2000643e8c4c5f66']}
self.assertEqual(expected_wwns, tgt_wwns)
self.assertEqual(expected_map, init_targ_map)
self.assertEqual(1, delete.call_count)
def test_get_init_targ_map(self):
fake_lookup_service = FCSanLookupService()
zone_helper = fc_zone_helper.FCZoneHelper(
fake_lookup_service, self.driver.client)
(tgt_wwns, portg_id, init_targ_map) = zone_helper.get_init_targ_map(
['10000090fa0d6754'], '1')
expected_wwns = ['2000643e8c4c5f66']
expected_map = {'10000090fa0d6754': ['2000643e8c4c5f66']}
self.assertEqual(expected_wwns, tgt_wwns)
self.assertEqual(expected_map, init_targ_map)
def test_get_init_targ_map_no_host(self):
fake_lookup_service = FCSanLookupService()
zone_helper = fc_zone_helper.FCZoneHelper(
fake_lookup_service, self.driver.client)
ret = zone_helper.get_init_targ_map(
['10000090fa0d6754'], None)
expected_ret = ([], None, {})
self.assertEqual(expected_ret, ret)
def test_multi_resturls_success(self):
self.driver.client.test_multi_url_flag = True
lun_info = self.driver.create_volume(self.volume)
self.assertEqual('1', lun_info['provider_location'])
def test_get_id_from_result(self):
result = {}
name = 'test_name'
key = 'NAME'
re = self.driver.client._get_id_from_result(result, name, key)
self.assertIsNone(re)
result = {'data': {}}
re = self.driver.client._get_id_from_result(result, name, key)
self.assertIsNone(re)
result = {'data': [{'COUNT': 1, 'ID': '1'},
{'COUNT': 2, 'ID': '2'}]}
re = self.driver.client._get_id_from_result(result, name, key)
self.assertIsNone(re)
result = {'data': [{'NAME': 'test_name1', 'ID': '1'},
{'NAME': 'test_name2', 'ID': '2'}]}
re = self.driver.client._get_id_from_result(result, name, key)
self.assertIsNone(re)
result = {'data': [{'NAME': 'test_name', 'ID': '1'},
{'NAME': 'test_name2', 'ID': '2'}]}
re = self.driver.client._get_id_from_result(result, name, key)
self.assertEqual('1', re)
@mock.patch.object(rest_client.RestClient, 'get_pool_info',
return_value={'ID': 1,
'CAPACITY': 110362624,
'TOTALCAPACITY': 209715200})
def test_get_capacity(self, mock_get_pool_info):
expected_pool_capacity = {'total_capacity': 100.0,
'free_capacity': 52.625}
pool_capacity = self.driver.client._get_capacity(None,
None)
self.assertEqual(expected_pool_capacity, pool_capacity)
@mock.patch.object(huawei_driver.HuaweiBaseDriver, '_get_volume_params',
return_value=fake_hypermetro_opts)
@mock.patch.object(rest_client.RestClient, 'get_all_pools',
return_value=FAKE_STORAGE_POOL_RESPONSE)
@mock.patch.object(rest_client.RestClient, 'get_pool_info',
return_value=FAKE_FIND_POOL_RESPONSE)
@mock.patch.object(rest_client.RestClient, 'get_hyper_domain_id',
return_value='11')
@mock.patch.object(hypermetro.HuaweiHyperMetro, '_wait_volume_ready',
return_value=True)
@mock.patch.object(hypermetro.HuaweiHyperMetro,
'_create_hypermetro_pair',
return_value={"ID": '11',
"NAME": 'hypermetro-pair'})
@mock.patch.object(rest_client.RestClient, 'logout',
return_value=None)
def test_create_hypermetro_success(self, mock_hypermetro_opts,
mock_login_return,
mock_all_pool_info,
mock_pool_info,
mock_hyper_domain,
mock_volume_ready,
mock_logout):
metadata = {"hypermetro_id": '11',
"remote_lun_id": '1'}
lun_info = self.driver.create_volume(self.hyper_volume)
self.assertEqual(metadata, lun_info['metadata'])
@ddt.data(FAKE_POOLS_UNSUPPORT_REPORT, FAKE_POOLS_SUPPORT_REPORT)
@mock.patch.object(huawei_driver.HuaweiBaseDriver, '_get_volume_params',
return_value=fake_hypermetro_opts)
@mock.patch.object(rest_client.RestClient, 'get_all_pools',
return_value=FAKE_STORAGE_POOL_RESPONSE)
@mock.patch.object(rest_client.RestClient, 'get_pool_info',
return_value=FAKE_FIND_POOL_RESPONSE)
@mock.patch.object(rest_client.RestClient, 'get_hyper_domain_id',
return_value='11')
@mock.patch.object(hypermetro.HuaweiHyperMetro, '_wait_volume_ready',
return_value=True)
@mock.patch.object(rest_client.RestClient, 'create_hypermetro')
def test_create_hypermetro_fail(self,
pool_data,
mock_pair_info,
mock_hypermetro_opts,
mock_all_pool_info,
mock_pool_info,
mock_hyper_domain,
mock_volume_ready
):
self.driver.support_func = pool_data
mock_pair_info.side_effect = (
exception.VolumeBackendAPIException(data='Error occurred.'))
self.assertRaises(exception.VolumeBackendAPIException,
self.driver.metro.create_hypermetro, "11", {})
@mock.patch.object(huawei_driver.huawei_utils, 'get_volume_metadata',
return_value={'hypermetro_id': '3400a30d844d0007',
'remote_lun_id': '1'})
@mock.patch.object(rest_client.RestClient, 'do_mapping',
return_value={'lun_id': '1',
'view_id': '1',
'aval_luns': '[1]'})
def test_hypermetro_connection_success_2(self, mock_map, mock_metadata):
fc_properties = self.driver.metro.connect_volume_fc(self.volume,
FakeConnector)
self.assertEqual(1, fc_properties['data']['target_lun'])
@mock.patch.object(huawei_driver.huawei_utils, 'get_volume_metadata',
return_value={'hypermetro_id': '3400a30d844d0007',
'remote_lun_id': '1'})
def test_terminate_hypermetro_connection_success(self, mock_metradata):
self.driver.metro.disconnect_volume_fc(self.volume, FakeConnector)
@mock.patch.object(huawei_driver.huawei_utils, 'get_volume_metadata',
return_value={'hypermetro_id': '3400a30d844d0007',
'remote_lun_id': None})
@mock.patch.object(rest_client.RestClient, 'get_lun_id_by_name',
return_value=None)
def test_hypermetroid_none_fail(self, mock_metadata, moke_metro_name):
self.assertRaises(exception.VolumeBackendAPIException,
self.driver.metro.connect_volume_fc,
self.volume,
FakeConnector)
@unittest.skip("Skip until bug #1578986 is fixed")
def test_wait_volume_ready_success(self):
flag = self.driver.metro._wait_volume_ready("11")
self.assertIsNone(flag)
@mock.patch.object(huawei_driver.huawei_utils, 'get_volume_metadata',
return_value={'hypermetro_id': '3400a30d844d0007',
'remote_lun_id': '1'})
@mock.patch.object(rest_client.RestClient, 'get_online_free_wwns',
return_value=[])
@mock.patch.object(rest_client.RestClient, 'get_host_iscsi_initiators',
return_value=[])
def test_hypermetro_connection_fail(self, mock_metadata,
mock_fc_initiator,
mock_host_initiators):
self.assertRaises(exception.VolumeBackendAPIException,
self.driver.metro.connect_volume_fc,
self.volume,
FakeConnector)
def test_create_snapshot_fail_hypermetro(self):
self.mock_object(
huawei_driver.HuaweiBaseDriver,
'_get_volume_type',
return_value={'extra_specs': replica_hypermetro_specs})
self.assertRaises(exception.VolumeBackendAPIException,
self.driver.create_volume_from_snapshot,
self.volume, self.snapshot)
def test_create_snapshot_fail_no_snapshot_id(self):
self.snapshot.provider_location = None
self.mock_object(rest_client.RestClient, 'get_snapshot_id_by_name',
return_value=None)
self.assertRaises(exception.VolumeBackendAPIException,
self.driver.create_volume_from_snapshot,
self.volume, self.snapshot)
@mock.patch.object(rest_client.RestClient, 'call',
return_value={"data": [{"RUNNINGSTATUS": "27",
"ID": '1'},
{"RUNNINGSTATUS": "26",
"ID": '2'}],
"error": {"code": 0}})
def test_get_online_free_wwns(self, mock_call):
wwns = self.driver.client.get_online_free_wwns()
self.assertEqual(['1'], wwns)
@mock.patch.object(rest_client.RestClient, 'call',
return_value={"data": {"ID": 1}, "error": {"code": 0}})
def test_rename_lun(self, mock_call):
des = 'This LUN is renamed.'
new_name = 'test_name'
self.driver.client.rename_lun('1', new_name, des)
self.assertEqual(1, mock_call.call_count)
url = "/lun/1"
data = {"NAME": new_name, "DESCRIPTION": des}
mock_call.assert_called_once_with(url, data, "PUT")
@mock.patch.object(rest_client.RestClient, 'call',
return_value={"data": {}})
def test_is_host_associated_to_hostgroup_no_data(self, mock_call):
res = self.driver.client.is_host_associated_to_hostgroup('1')
self.assertFalse(res)
@mock.patch.object(rest_client.RestClient, 'call',
return_value={"data": {'ISADD2HOSTGROUP': 'true'}})
def test_is_host_associated_to_hostgroup_true(self, mock_call):
res = self.driver.client.is_host_associated_to_hostgroup('1')
self.assertTrue(res)
@mock.patch.object(rest_client.RestClient, 'call',
return_value={"data": {'ISADD2HOSTGROUP': 'false'}})
def test_is_host_associated_to_hostgroup_false(self, mock_call):
res = self.driver.client.is_host_associated_to_hostgroup('1')
self.assertFalse(res)
@mock.patch.object(huawei_driver.HuaweiBaseDriver,
'_get_consistencygroup_type',
return_value={"hypermetro": "true"})
def test_create_hypermetro_consistencygroup_success(self, mock_grouptype):
"""Test that create_consistencygroup return successfully."""
ctxt = context.get_admin_context()
# Create consistency group
model_update = self.driver.create_consistencygroup(ctxt, self.cg)
self.assertEqual('available',
model_update['status'],
"Consistency Group created failed")
@mock.patch.object(huawei_driver.HuaweiBaseDriver,
'_get_consistencygroup_type',
return_value={"hypermetro": "false"})
def test_create_normal_consistencygroup_success(self,
mock_grouptype):
"""Test that create_consistencygroup return successfully."""
ctxt = context.get_admin_context()
# Create consistency group
model_update = self.driver.create_consistencygroup(ctxt, self.cg)
self.assertEqual('available',
model_update['status'],
"Consistency Group created failed")
@mock.patch.object(huawei_driver.HuaweiBaseDriver,
'_get_consistencygroup_type',
return_value={"hypermetro": "true"})
def test_delete_hypermetro_consistencygroup_success(self, mock_grouptype):
"""Test that create_consistencygroup return successfully."""
test_volumes = [self.volume]
ctxt = context.get_admin_context()
# Create consistency group
model, volumes = self.driver.delete_consistencygroup(ctxt,
self.cg,
test_volumes)
self.assertEqual('available',
model['status'],
"Consistency Group created failed")
def test_delete_normal_consistencygroup_success(self):
ctxt = context.get_admin_context()
test_volumes = [self.volume]
self.mock_object(huawei_driver.HuaweiBaseDriver,
'_get_consistencygroup_type',
return_value={"hypermetro": "false"})
model, volumes = self.driver.delete_consistencygroup(ctxt,
self.cg,
test_volumes)
self.assertEqual('available',
model['status'],
"Consistency Group created failed")
@mock.patch.object(huawei_driver.HuaweiBaseDriver,
'_get_consistencygroup_type',
return_value={"hypermetro": "true"})
@mock.patch.object(huawei_driver.huawei_utils, 'get_volume_metadata',
return_value={'hypermetro_id': '3400a30d844d0007',
'remote_lun_id': '59'})
def test_update_consistencygroup_success(self,
mock_grouptype,
mock_metadata):
"""Test that create_consistencygroup return successfully."""
ctxt = context.get_admin_context()
add_volumes = [self.volume]
remove_volumes = [self.volume]
# Create consistency group
model_update = self.driver.update_consistencygroup(ctxt,
self.cg,
add_volumes,
remove_volumes)
self.assertEqual('available',
model_update[0]['status'],
"Consistency Group update failed")
def test_create_hypermetro_consistencygroup_success_2(self):
ctxt = context.get_admin_context()
# Create consistency group
temp_cg = copy.deepcopy(self.cg)
temp_cg['volume_type_id'] = '550c089b-bfdd-4f7f-86e1-3ba88125555c,'
self.mock_object(volume_types, 'get_volume_type',
return_value=test_hypermetro_type)
model_update = self.driver.create_consistencygroup(ctxt, temp_cg)
self.assertEqual('available',
model_update['status'],
"Consistency Group created failed")
def test_is_initiator_associated_to_host_raise(self):
self.assertRaises(exception.VolumeBackendAPIException,
self.driver.client.is_initiator_associated_to_host,
'ini-2', '1')
def test_is_initiator_associated_to_host_true(self):
ret = self.driver.client.is_initiator_associated_to_host('ini-1', '1')
self.assertFalse(ret)
ret = self.driver.client.is_initiator_associated_to_host('ini-2', '2')
self.assertTrue(ret)
class HuaweiConfTestCase(test.TestCase):
def setUp(self):
super(HuaweiConfTestCase, self).setUp()
self.tmp_dir = tempfile.mkdtemp()
self.fake_xml_file = self.tmp_dir + '/cinder_huawei_conf.xml'
self.conf = mock.Mock()
self.conf.cinder_huawei_conf_file = self.fake_xml_file
self.huawei_conf = huawei_conf.HuaweiConf(self.conf)
def _create_fake_conf_file(self):
"""Create a fake Config file.
Huawei storage customize a XML configuration file, the configuration
file is used to set the Huawei storage custom parameters, therefore,
in the UT test we need to simulate such a configuration file.
"""
doc = minidom.Document()
config = doc.createElement('config')
doc.appendChild(config)
storage = doc.createElement('Storage')
config.appendChild(storage)
url = doc.createElement('RestURL')
url_text = doc.createTextNode('http://192.0.2.69:8082/'
'deviceManager/rest/')
url.appendChild(url_text)
storage.appendChild(url)
username = doc.createElement('UserName')
username_text = doc.createTextNode('admin')
username.appendChild(username_text)
storage.appendChild(username)
password = doc.createElement('UserPassword')
password_text = doc.createTextNode('Admin@storage')
password.appendChild(password_text)
storage.appendChild(password)
product = doc.createElement('Product')
product_text = doc.createTextNode('V3')
product.appendChild(product_text)
storage.appendChild(product)
protocol = doc.createElement('Protocol')
protocol_text = doc.createTextNode('iSCSI')
protocol.appendChild(protocol_text)
storage.appendChild(protocol)
lun = doc.createElement('LUN')
config.appendChild(lun)
luntype = doc.createElement('LUNType')
luntype_text = doc.createTextNode('Thick')
luntype.appendChild(luntype_text)
lun.appendChild(luntype)
lun_ready_wait_interval = doc.createElement('LUNReadyWaitInterval')
lun_ready_wait_interval_text = doc.createTextNode('2')
lun_ready_wait_interval.appendChild(lun_ready_wait_interval_text)
lun.appendChild(lun_ready_wait_interval)
lun_copy_wait_interval = doc.createElement('LUNcopyWaitInterval')
lun_copy_wait_interval_text = doc.createTextNode('2')
lun_copy_wait_interval.appendChild(lun_copy_wait_interval_text)
lun.appendChild(lun_copy_wait_interval)
timeout = doc.createElement('Timeout')
timeout_text = doc.createTextNode('43200')
timeout.appendChild(timeout_text)
lun.appendChild(timeout)
write_type = doc.createElement('WriteType')
write_type_text = doc.createTextNode('1')
write_type.appendChild(write_type_text)
lun.appendChild(write_type)
mirror_switch = doc.createElement('MirrorSwitch')
mirror_switch_text = doc.createTextNode('1')
mirror_switch.appendChild(mirror_switch_text)
lun.appendChild(mirror_switch)
prefetch = doc.createElement('Prefetch')
prefetch.setAttribute('Type', '1')
prefetch.setAttribute('Value', '0')
lun.appendChild(prefetch)
pool = doc.createElement('StoragePool')
pool_text = doc.createTextNode('OpenStack_Pool')
pool.appendChild(pool_text)
lun.appendChild(pool)
iscsi = doc.createElement('iSCSI')
config.appendChild(iscsi)
defaulttargetip = doc.createElement('DefaultTargetIP')
defaulttargetip_text = doc.createTextNode('192.0.2.68')
defaulttargetip.appendChild(defaulttargetip_text)
iscsi.appendChild(defaulttargetip)
initiator = doc.createElement('Initiator')
initiator.setAttribute('Name', 'iqn.1993-08.debian:01:ec2bff7ac3a3')
initiator.setAttribute('TargetIP', '192.0.2.2')
initiator.setAttribute('CHAPinfo', 'mm-user;mm-user@storage')
initiator.setAttribute('ALUA', '1')
initiator.setAttribute('TargetPortGroup', 'PortGroup001')
iscsi.appendChild(initiator)
fakefile = open(self.conf.cinder_huawei_conf_file, 'w')
fakefile.write(doc.toprettyxml(indent=''))
fakefile.close()
| 39.524186 | 79 | 0.574789 |
import collections
import copy
import ddt
import json
import mock
import re
import tempfile
import unittest
from xml.dom import minidom
from cinder import context
from cinder import exception
from cinder import test
from cinder.tests.unit.consistencygroup import fake_cgsnapshot
from cinder.tests.unit.consistencygroup import fake_consistencygroup
from cinder.tests.unit import fake_snapshot
from cinder.tests.unit import fake_volume
from cinder.tests.unit import utils
from cinder.volume import configuration as conf
from cinder.volume.drivers.huawei import constants
from cinder.volume.drivers.huawei import fc_zone_helper
from cinder.volume.drivers.huawei import huawei_conf
from cinder.volume.drivers.huawei import huawei_driver
from cinder.volume.drivers.huawei import huawei_utils
from cinder.volume.drivers.huawei import hypermetro
from cinder.volume.drivers.huawei import replication
from cinder.volume.drivers.huawei import rest_client
from cinder.volume.drivers.huawei import smartx
from cinder.volume import qos_specs
from cinder.volume import volume_types
admin_contex = context.get_admin_context()
vol_attrs = ('id', 'lun_type', 'provider_location', 'metadata')
Volume = collections.namedtuple('Volume', vol_attrs)
PROVIDER_LOCATION = '11'
HOST = 'ubuntu001@backend001#OpenStack_Pool'
ID = '21ec7341-9256-497b-97d9-ef48edcf0635'
ENCODE_NAME = huawei_utils.encode_name(ID)
ADMIN_METADATA = {'huawei_lun_wwn': '6643e8c1004c5f6723e9f454003'}
TEST_PAIR_ID = "3400a30d844d0004"
REPLICA_DRIVER_DATA = '{"pair_id": "%s", "rmt_lun_id": "1"}' % TEST_PAIR_ID
VOL_METADATA = [{'key': 'hypermetro_id', 'value': '11'},
{'key': 'remote_lun_id', 'value': '1'}]
hypermetro_devices = """{
"remote_device": {
"RestURL": "http://192.0.2.69:8082/deviceManager/rest",
"UserName": "admin",
"UserPassword": "Admin@storage1",
"StoragePool": "OpenStack_Pool",
"domain_name": "hypermetro-domain",
"remote_target_ip": "192.0.2.241"
}
}
"""
fake_smartx_value = {'smarttier': 'true',
'smartcache': 'true',
'smartpartition': 'true',
'thin_provisioning_support': 'true',
'thick_provisioning_support': False,
'policy': '2',
'cachename': 'cache-test',
'partitionname': 'partition-test',
}
fake_hypermetro_opts = {'hypermetro': 'true',
'smarttier': False,
'smartcache': False,
'smartpartition': False,
'thin_provisioning_support': False,
'thick_provisioning_support': False,
}
sync_replica_specs = {'replication_enabled': '<is> True',
'replication_type': '<in> sync'}
async_replica_specs = {'replication_enabled': '<is> True',
'replication_type': '<in> async'}
replica_hypermetro_specs = {'hypermetro': '<is> True',
'replication_enabled': '<is> True'}
test_host = {'host': 'ubuntu001@backend001#OpenStack_Pool',
'capabilities': {'smartcache': True,
'location_info': '210235G7J20000000000',
'QoS_support': True,
'pool_name': 'OpenStack_Pool',
'timestamp': '2015-07-13T11:41:00.513549',
'smartpartition': True,
'allocated_capacity_gb': 0,
'volume_backend_name': 'HuaweiFCDriver',
'free_capacity_gb': 20.0,
'driver_version': '1.1.0',
'total_capacity_gb': 20.0,
'smarttier': True,
'hypermetro': True,
'reserved_percentage': 0,
'vendor_name': None,
'thick_provisioning_support': False,
'thin_provisioning_support': True,
'storage_protocol': 'FC',
}
}
test_new_type = {
'name': u'new_type',
'qos_specs_id': None,
'deleted': False,
'created_at': None,
'updated_at': None,
'extra_specs': {
'smarttier': '<is> true',
'smartcache': '<is> true',
'smartpartition': '<is> true',
'thin_provisioning_support': '<is> true',
'thick_provisioning_support': '<is> False',
'policy': '2',
'smartcache:cachename': 'cache-test',
'smartpartition:partitionname': 'partition-test',
},
'is_public': True,
'deleted_at': None,
'id': u'530a56e1-a1a4-49f3-ab6c-779a6e5d999f',
'description': None,
}
test_new_replication_type = {
'name': u'new_type',
'qos_specs_id': None,
'deleted': False,
'created_at': None,
'updated_at': None,
'extra_specs': {
'replication_enabled': '<is> True',
'replication_type': '<in> sync',
},
'is_public': True,
'deleted_at': None,
'id': u'530a56e1-a1a4-49f3-ab6c-779a6e5d999f',
'description': None,
}
test_hypermetro_type = {
'name': u'new_type',
'qos_specs_id': None,
'deleted': False,
'created_at': None,
'updated_at': None,
'extra_specs': {
'hypermetro': '<is> True'
},
'is_public': True,
'deleted_at': None,
'id': u'550c089b-bfdd-4f7f-86e1-3ba88125555c',
'description': None,
}
hypermetro_devices = """
{
"remote_device": {
"RestURL": "http://192.0.2.69:8082/deviceManager/rest",
"UserName":"admin",
"UserPassword":"Admin@storage2",
"StoragePool":"OpenStack_Pool",
"domain_name":"hypermetro_test"}
}
"""
FAKE_FIND_POOL_RESPONSE = {'CAPACITY': '985661440',
'ID': '0',
'TOTALCAPACITY': '985661440'}
FAKE_CREATE_VOLUME_RESPONSE = {"ID": "1",
"NAME": "5mFHcBv4RkCcD+JyrWc0SA",
"WWN": '6643e8c1004c5f6723e9f454003'}
FakeConnector = {'initiator': 'iqn.1993-08.debian:01:ec2bff7ac3a3',
'multipath': False,
'wwpns': ['10000090fa0d6754'],
'wwnns': ['10000090fa0d6755'],
'host': 'ubuntuc',
}
smarttier_opts = {'smarttier': 'true',
'smartpartition': False,
'smartcache': False,
'thin_provisioning_support': True,
'thick_provisioning_support': False,
'policy': '3',
'readcachepolicy': '1',
'writecachepolicy': None,
}
fake_fabric_mapping = {
'swd1': {
'target_port_wwn_list': ['2000643e8c4c5f66'],
'initiator_port_wwn_list': ['10000090fa0d6754']
}
}
fake_fabric_mapping_no_ports = {
'swd1': {
'target_port_wwn_list': [],
'initiator_port_wwn_list': ['10000090fa0d6754']
}
}
fake_fabric_mapping_no_wwn = {
'swd1': {
'target_port_wwn_list': ['2000643e8c4c5f66'],
'initiator_port_wwn_list': []
}
}
CHANGE_OPTS = {'policy': ('1', '2'),
'partitionid': (['1', 'partition001'], ['2', 'partition002']),
'cacheid': (['1', 'cache001'], ['2', 'cache002']),
'qos': (['11', {'MAXIOPS': '100', 'IOType': '1'}],
{'MAXIOPS': '100', 'IOType': '2',
'MIN': 1, 'LATENCY': 1}),
'host': ('ubuntu@huawei#OpenStack_Pool',
'ubuntu@huawei#OpenStack_Pool'),
'LUNType': ('0', '1'),
}
FAKE_CREATE_HOST_RESPONSE = """
{
"error": {
"code": 0
},
"data":{"NAME": "ubuntuc001",
"ID": "1"}
}
"""
FAKE_GET_HOST_RESPONSE = """
{
"error": {
"code": 0
},
"data":{"NAME": "ubuntuc001",
"ID": "1",
"ISADD2HOSTGROUP": "true"}
}
"""
FAKE_COMMON_SUCCESS_RESPONSE = """
{
"error": {
"code": 0,
"description": "None"
},
"data":{}
}
"""
FAKE_COMMON_FAIL_RESPONSE = """
{
"error": {
"code": 50331651,
"description": "An error occurs to the parameter."
},
"data":{}
}
"""
FAKE_GET_LOGIN_STORAGE_RESPONSE = """
{
"error": {
"code": 0
},
"data": {
"username": "admin",
"iBaseToken": "2001031430",
"deviceid": "210235G7J20000000000",
"accountstate": 2
}
}
"""
FAKE_LOGIN_OUT_STORAGE_RESPONSE = """
{
"error": {
"code": 0
},
"data": {
"ID": 11
}
}
"""
FAKE_STORAGE_POOL_RESPONSE = """
{
"error": {
"code": 0
},
"data": [{
"USERFREECAPACITY": "985661440",
"ID": "0",
"NAME": "OpenStack_Pool",
"USERTOTALCAPACITY": "985661440",
"TIER0CAPACITY": "100",
"TIER1CAPACITY": "0",
"TIER2CAPACITY": "0"
}]
}
"""
FAKE_LUN_INFO_RESPONSE = """
{
"error": {
"code": 0
},
"data": {
"ID": "1",
"NAME": "5mFHcBv4RkCcD+JyrWc0SA",
"WWN": "6643e8c1004c5f6723e9f454003",
"DESCRIPTION": "21ec7341-9256-497b-97d9-ef48edcf0635",
"HEALTHSTATUS": "1",
"RUNNINGSTATUS": "27",
"ALLOCTYPE": "1",
"CAPACITY": "2097152"
}
}
"""
FAKE_POOLS_UNSUPPORT_REPORT = {
'pool_name': 'StoragePool',
'location_info': '2102350BVB10F2000020',
'QoS_support': False,
'smartcache': False,
'thick_provisioning_support': False,
'splitmirror': False,
'allocated_capacity_gb': 7,
'thin_provisioning_support': True,
'free_capacity_gb': 400.0,
'smartpartition': False,
'total_capacity_gb': 400.0,
'reserved_percentage': 0,
'max_over_subscription_ratio': 20.0,
'luncopy': False
}
FAKE_POOLS_SUPPORT_REPORT = {
'pool_name': 'StoragePool',
'location_info': '2102350BVB10F2000020',
'QoS_support': True,
'smartcache': True,
'thick_provisioning_support': True,
'splitmirror': True,
'allocated_capacity_gb': 7,
'thin_provisioning_support': True,
'free_capacity_gb': 400.0,
'smartpartition': True,
'total_capacity_gb': 400.0,
'reserved_percentage': 0,
'max_over_subscription_ratio': 20.0,
'luncopy': True,
'hypermetro': True,
'consistencygroup_support': True
}
FAKE_LUN_GET_SUCCESS_RESPONSE = """
{
"error": {
"code": 0
},
"data": {
"ID": "11",
"IOCLASSID": "11",
"NAME": "5mFHcBv4RkCcD+JyrWc0SA",
"DESCRIPTION": "21ec7341-9256-497b-97d9-ef48edcf0635",
"RUNNINGSTATUS": "10",
"HEALTHSTATUS": "1",
"RUNNINGSTATUS": "27",
"LUNLIST": "",
"ALLOCTYPE": "1",
"CAPACITY": "2097152",
"WRITEPOLICY": "1",
"MIRRORPOLICY": "0",
"PREFETCHPOLICY": "1",
"PREFETCHVALUE": "20",
"DATATRANSFERPOLICY": "1",
"READCACHEPOLICY": "2",
"WRITECACHEPOLICY": "5",
"OWNINGCONTROLLER": "0B",
"SMARTCACHEPARTITIONID": "",
"CACHEPARTITIONID": "",
"WWN": "6643e8c1004c5f6723e9f454003",
"PARENTNAME": "OpenStack_Pool"
}
}
"""
FAKE_QUERY_ALL_LUN_RESPONSE = {
"error": {
"code": 0
},
"data": [{
"ID": "1",
"NAME": ENCODE_NAME
}]
}
FAKE_LUN_ASSOCIATE_RESPONSE = """
{
"error":{
"code":0
},
"data":[{
"ID":"11"
}]
}
"""
FAKE_QUERY_LUN_GROUP_INFO_RESPONSE = """
{
"error": {
"code":0
},
"data":[{
"NAME":"OpenStack_LunGroup_1",
"DESCRIPTION":"5mFHcBv4RkCcD+JyrWc0SA",
"ID":"11",
"TYPE":256
}]
}
"""
FAKE_QUERY_LUN_GROUP_RESPONSE = """
{
"error": {
"code":0
},
"data":{
"NAME":"5mFHcBv4RkCcD+JyrWc0SA",
"DESCRIPTION":"5mFHcBv4RkCcD+JyrWc0SA",
"ID":"11",
"TYPE":256
}
}
"""
FAKE_QUERY_LUN_GROUP_ASSOCIAT_RESPONSE = """
{
"error":{
"code":0
},
"data":{
"NAME":"5mFHcBv4RkCcD+JyrWc0SA",
"DESCRIPTION":"5mFHcBv4RkCcD+JyrWc0SA",
"ID":"11",
"TYPE":256
}
}
"""
FAKE_LUN_COUNT_RESPONSE = """
{
"data":{
"COUNT":"0"
},
"error":{
"code":0,
"description":"0"
}
}
"""
FAKE_SNAPSHOT_LIST_INFO_RESPONSE = {
"error": {
"code": 0,
"description": "0"
},
"data": [{
"ID": 11,
"NAME": ENCODE_NAME
}, ]
}
FAKE_CREATE_SNAPSHOT_INFO_RESPONSE = """
{
"error": {
"code": 0
},
"data": {
"ID": 11,
"NAME": "YheUoRwbSX2BxN7"
}
}
"""
FAKE_GET_SNAPSHOT_INFO_RESPONSE = """
{
"error": {
"code": 0,
"description": "0"
},
"data": {
"ID": 11,
"NAME": "YheUoRwbSX2BxN7"
}
}
"""
FAKE_SNAPSHOT_COUNT_RESPONSE = """
{
"data":{
"COUNT":"2"
},
"error":{
"code":0,
"description":"0"
}
}
"""
FAKE_GET_ISCSI_INFO_RESPONSE = """
{
"data": [{
"ETHPORTID": "139267",
"ID": "0+iqn.oceanstor:21004846fb8ca15f::22004:192.0.2.1,t,0x2005",
"TPGT": "8197",
"TYPE": 249
},
{
"ETHPORTID": "139268",
"ID": "1+iqn.oceanstor:21004846fb8ca15f::22003:192.0.2.2,t,0x2004",
"TPGT": "8196",
"TYPE": 249
}
],
"error": {
"code": 0,
"description": "0"
}
}
"""
FAKE_GET_ETH_INFO_RESPONSE = """
{
"error": {
"code": 0
},
"data": [{
"PARENTTYPE": 209,
"MACADDRESS": "00:22:a1:0a:79:57",
"ETHNEGOTIATE": "-1",
"ERRORPACKETS": "0",
"IPV4ADDR": "192.0.2.2",
"IPV6GATEWAY": "",
"IPV6MASK": "0",
"OVERFLOWEDPACKETS": "0",
"ISCSINAME": "P0",
"HEALTHSTATUS": "1",
"ETHDUPLEX": "2",
"ID": "16909568",
"LOSTPACKETS": "0",
"TYPE": 213,
"NAME": "P0",
"INIORTGT": "4",
"RUNNINGSTATUS": "10",
"IPV4GATEWAY": "",
"BONDNAME": "",
"STARTTIME": "1371684218",
"SPEED": "1000",
"ISCSITCPPORT": "0",
"IPV4MASK": "255.255.0.0",
"IPV6ADDR": "",
"LOGICTYPE": "0",
"LOCATION": "ENG0.A5.P0",
"MTU": "1500",
"PARENTID": "1.5"
},
{
"PARENTTYPE": 209,
"MACADDRESS": "00:22:a1:0a:79:57",
"ETHNEGOTIATE": "-1",
"ERRORPACKETS": "0",
"IPV4ADDR": "192.0.2.1",
"IPV6GATEWAY": "",
"IPV6MASK": "0",
"OVERFLOWEDPACKETS": "0",
"ISCSINAME": "P0",
"HEALTHSTATUS": "1",
"ETHDUPLEX": "2",
"ID": "16909568",
"LOSTPACKETS": "0",
"TYPE": 213,
"NAME": "P0",
"INIORTGT": "4",
"RUNNINGSTATUS": "10",
"IPV4GATEWAY": "",
"BONDNAME": "",
"STARTTIME": "1371684218",
"SPEED": "1000",
"ISCSITCPPORT": "0",
"IPV4MASK": "255.255.0.0",
"IPV6ADDR": "",
"LOGICTYPE": "0",
"LOCATION": "ENG0.A5.P3",
"MTU": "1500",
"PARENTID": "1.5"
}]
}
"""
FAKE_GET_ETH_ASSOCIATE_RESPONSE = """
{
"error":{
"code":0
},
"data":[{
"IPV4ADDR": "192.0.2.1",
"HEALTHSTATUS": "1",
"RUNNINGSTATUS": "10"
},
{
"IPV4ADDR": "192.0.2.2",
"HEALTHSTATUS": "1",
"RUNNINGSTATUS": "10"
}
]
}
"""
FAKE_GET_ISCSI_DEVICE_RESPONSE = """
{
"error": {
"code": 0
},
"data": [{
"CMO_ISCSI_DEVICE_NAME": "iqn.2006-08.com.huawei:oceanstor:21000022a:"
}]
}
"""
FAKE_GET_ALL_HOST_INFO_RESPONSE = """
{
"error": {
"code": 0
},
"data": [{
"PARENTTYPE": 245,
"NAME": "ubuntuc",
"DESCRIPTION": "",
"RUNNINGSTATUS": "1",
"IP": "",
"PARENTNAME": "",
"OPERATIONSYSTEM": "0",
"LOCATION": "",
"HEALTHSTATUS": "1",
"MODEL": "",
"ID": "1",
"PARENTID": "",
"NETWORKNAME": "",
"TYPE": 21
},
{
"PARENTTYPE": 245,
"NAME": "ubuntu",
"DESCRIPTION": "",
"RUNNINGSTATUS": "1",
"IP": "",
"PARENTNAME": "",
"OPERATIONSYSTEM": "0",
"LOCATION": "",
"HEALTHSTATUS": "1",
"MODEL": "",
"ID": "2",
"PARENTID": "",
"NETWORKNAME": "",
"TYPE": 21
}]
}
"""
FAKE_GET_ALL_HOST_GROUP_INFO_RESPONSE = """
{
"error": {
"code": 0
},
"data": [{
"NAME":"ubuntuc",
"DESCRIPTION":"",
"ID":"0",
"TYPE":14
},
{"NAME":"OpenStack_HostGroup_1",
"DESCRIPTION":"",
"ID":"0",
"TYPE":14
}
]
}
"""
FAKE_GET_HOST_GROUP_INFO_RESPONSE = """
{
"error": {
"code": 0
},
"data":{
"NAME":"ubuntuc",
"DESCRIPTION":"",
"ID":"0",
"TYPE":14
}
}
"""
FAKE_GET_LUN_COPY_INFO_RESPONSE = """
{
"error": {
"code": 0
},
"data": {
"COPYSTOPTIME": "-1",
"HEALTHSTATUS": "1",
"NAME": "w1PSNvu6RumcZMmSh4/l+Q==",
"RUNNINGSTATUS": "36",
"DESCRIPTION": "w1PSNvu6RumcZMmSh4/l+Q==",
"ID": "0",
"LUNCOPYTYPE": "1",
"COPYPROGRESS": "0",
"COPYSPEED": "2",
"TYPE": 219,
"COPYSTARTTIME": "-1"
}
}
"""
FAKE_GET_LUN_COPY_LIST_INFO_RESPONSE = """
{
"error": {
"code": 0
},
"data": [{
"COPYSTOPTIME": "1372209335",
"HEALTHSTATUS": "1",
"NAME": "w1PSNvu6RumcZMmSh4/l+Q==",
"RUNNINGSTATUS": "40",
"DESCRIPTION": "w1PSNvu6RumcZMmSh4/l+Q==",
"ID": "0",
"LUNCOPYTYPE": "1",
"COPYPROGRESS": "100",
"COPYSPEED": "2",
"TYPE": 219,
"COPYSTARTTIME": "1372209329"
}]
}
"""
FAKE_GET_MAPPING_VIEW_INFO_RESPONSE = """
{
"error":{
"code":0
},
"data":[{
"WORKMODE":"255",
"HEALTHSTATUS":"1",
"NAME":"OpenStack_Mapping_View_1",
"RUNNINGSTATUS":"27",
"DESCRIPTION":"",
"ENABLEINBANDCOMMAND":"true",
"ID":"1",
"INBANDLUNWWN":"",
"TYPE":245
},
{
"WORKMODE":"255",
"HEALTHSTATUS":"1",
"NAME":"YheUoRwbSX2BxN767nvLSw",
"RUNNINGSTATUS":"27",
"DESCRIPTION":"",
"ENABLEINBANDCOMMAND":"true",
"ID":"2",
"INBANDLUNWWN": "",
"TYPE": 245
}]
}
"""
FAKE_GET_MAPPING_VIEW_RESPONSE = """
{
"error":{
"code":0
},
"data":[{
"WORKMODE":"255",
"HEALTHSTATUS":"1",
"NAME":"mOWtSXnaQKi3hpB3tdFRIQ",
"RUNNINGSTATUS":"27",
"DESCRIPTION":"",
"ENABLEINBANDCOMMAND":"true",
"ID":"11",
"INBANDLUNWWN":"",
"TYPE": 245,
"AVAILABLEHOSTLUNIDLIST": ""
}]
}
"""
FAKE_GET_SPEC_MAPPING_VIEW_RESPONSE = """
{
"error":{
"code":0
},
"data":{
"WORKMODE":"255",
"HEALTHSTATUS":"1",
"NAME":"mOWtSXnaQKi3hpB3tdFRIQ",
"RUNNINGSTATUS":"27",
"DESCRIPTION":"",
"ENABLEINBANDCOMMAND":"true",
"ID":"1",
"INBANDLUNWWN":"",
"TYPE":245,
"AVAILABLEHOSTLUNIDLIST": "[1]"
}
}
"""
FAKE_FC_INFO_RESPONSE = """
{
"error":{
"code":0
},
"data":[{
"HEALTHSTATUS":"1",
"NAME":"",
"MULTIPATHTYPE":"1",
"ISFREE":"true",
"RUNNINGSTATUS":"27",
"ID":"10000090fa0d6754",
"OPERATIONSYSTEM":"255",
"TYPE":223
},
{
"HEALTHSTATUS":"1",
"NAME":"",
"MULTIPATHTYPE":"1",
"ISFREE":"true",
"RUNNINGSTATUS":"27",
"ID":"10000090fa0d6755",
"OPERATIONSYSTEM":"255",
"TYPE":223
}]
}
"""
FAKE_ISCSI_INITIATOR_RESPONSE = """
{
"error":{
"code":0
},
"data":[{
"CHAPNAME":"mm-user",
"HEALTHSTATUS":"1",
"ID":"iqn.1993-08.org.debian:01:9073aba6c6f",
"ISFREE":"true",
"MULTIPATHTYPE":"1",
"NAME":"",
"OPERATIONSYSTEM":"255",
"RUNNINGSTATUS":"28",
"TYPE":222,
"USECHAP":"true"
},
{
"ISFREE":"true",
"ID":"ini-1"
},
{
"ISFREE":"false",
"ID":"ini-2",
"PARENTNAME":"Host2",
"PARENTID":"2"
}]
}
"""
FAKE_HOST_LINK_RESPONSE = """
{
"error":{
"code":0
},
"data":[{
"PARENTTYPE":21,
"TARGET_ID":"0000000000000000",
"INITIATOR_NODE_WWN":"20000090fa0d6754",
"INITIATOR_TYPE":"223",
"RUNNINGSTATUS":"27",
"PARENTNAME":"ubuntuc",
"INITIATOR_ID":"10000090fa0d6754",
"TARGET_PORT_WWN":"24000022a10a2a39",
"HEALTHSTATUS":"1",
"INITIATOR_PORT_WWN":"10000090fa0d6754",
"ID":"010000090fa0d675-0000000000110400",
"TARGET_NODE_WWN":"21000022a10a2a39",
"PARENTID":"1",
"CTRL_ID":"0",
"TYPE":255,
"TARGET_TYPE":"212"
}]
}
"""
FAKE_PORT_GROUP_RESPONSE = """
{
"error":{
"code":0
},
"data":[{
"ID":11,
"NAME": "portgroup-test"
}]
}
"""
FAKE_ERROR_INFO_RESPONSE = """
{
"error":{
"code":31755596
}
}
"""
FAKE_ERROR_CONNECT_RESPONSE = """
{
"error":{
"code":-403
}
}
"""
FAKE_ERROR_LUN_INFO_RESPONSE = """
{
"error":{
"code":0
},
"data":{
"ID":"11",
"IOCLASSID":"11",
"NAME":"5mFHcBv4RkCcD+JyrWc0SA",
"ALLOCTYPE": "0",
"DATATRANSFERPOLICY": "0",
"SMARTCACHEPARTITIONID": "0",
"CACHEPARTITIONID": "0"
}
}
"""
FAKE_GET_FC_INI_RESPONSE = """
{
"error":{
"code":0
},
"data":[{
"ID":"10000090fa0d6754",
"ISFREE":"true"
}]
}
"""
FAKE_SYSTEM_VERSION_RESPONSE = """
{
"error":{
"code": 0
},
"data":{
"PRODUCTVERSION": "V100R001C10",
"wwn": "21003400a30d844d"
}
}
"""
FAKE_GET_LUN_MIGRATION_RESPONSE = """
{
"data":[{"ENDTIME":"1436816174",
"ID":"9",
"PARENTID":"11",
"PARENTNAME":"xmRBHMlVRruql5vwthpPXQ",
"PROCESS":"-1",
"RUNNINGSTATUS":"76",
"SPEED":"2",
"STARTTIME":"1436816111",
"TARGETLUNID":"1",
"TARGETLUNNAME":"4924891454902893639",
"TYPE":253,
"WORKMODE":"0"
}],
"error":{"code":0,
"description":"0"}
}
"""
FAKE_HYPERMETRODOMAIN_RESPONSE = """
{
"error":{
"code": 0
},
"data":[{
"PRODUCTVERSION": "V100R001C10",
"ID": "11",
"NAME": "hypermetro_test",
"RUNNINGSTATUS": "1",
"HEALTHSTATUS": "0"
}]
}
"""
FAKE_HYPERMETRO_RESPONSE = """
{
"error":{
"code": 0
},
"data":{
"PRODUCTVERSION": "V100R001C10",
"ID": "11",
"NAME": "hypermetro_test",
"RUNNINGSTATUS": "1",
"HEALTHSTATUS": "1"
}
}
"""
FAKE_QOS_INFO_RESPONSE = """
{
"error":{
"code": 0
},
"data":{
"ID": "11"
}
}
"""
FAKE_GET_FC_PORT_RESPONSE = """
{
"error":{
"code":0
},
"data":[{
"RUNNINGSTATUS":"10",
"WWN":"2000643e8c4c5f66",
"PARENTID":"0A.1",
"ID": "1114368",
"RUNSPEED": "16000"
},
{
"RUNNINGSTATUS":"10",
"WWN":"2000643e8c4c5f67",
"PARENTID":"0A.1",
"ID": "1114369",
"RUNSPEED": "16000"
}]
}
"""
FAKE_SMARTCACHEPARTITION_RESPONSE = """
{
"error":{
"code":0
},
"data":{
"ID":"11",
"NAME":"cache-name"
}
}
"""
FAKE_CONNECT_FC_RESPONSE = {
"driver_volume_type": 'fibre_channel',
"data": {
"target_wwn": ["10000090fa0d6754"],
"target_lun": "1",
"volume_id": ID
}
}
FAKE_METRO_INFO_RESPONSE = {
"PRODUCTVERSION": "V100R001C10",
"ID": "11",
"NAME": "hypermetro_test",
"RUNNINGSTATUS": "42",
"HEALTHSTATUS": "0"
}
FAKE_METRO_INFO_NEW_RESPONSE = """{
"error": {
"code": 0
},
"data": {
"PRODUCTVERSION": "V100R001C10",
"ID": "11",
"NAME": "hypermetro_test",
"RUNNINGSTATUS": "1",
"HEALTHSTATUS": "1"
}
}
"""
FAKE_CREATE_METROROUP_RESPONSE = """
{
"data": {
"DESCRIPTION": "",
"DOMAINID": "643e8c4c5f670100",
"DOMAINNAME": "hypermetro-domain",
"HEALTHSTATUS": "1",
"ID": "3400a30d844d8002",
"ISEMPTY": "true",
"NAME": "6F7kdHZcQJ2zbzxHmBl4FQ",
"PRIORITYSTATIONTYPE": "0",
"RECOVERYPOLICY": "1",
"RESOURCETYPE": "11",
"RUNNINGSTATUS": "41",
"SPEED": "2",
"SYNCDIRECTION": "1",
"TYPE": 15364
},
"error": {
"code": 0,
"description": "0"
}
}
"""
FAKE_GET_METROROUP_RESPONSE = {
"data": [{
"DESCRIPTION": "",
"DOMAINID": "643e8c4c5f670100",
"DOMAINNAME": "hypermetro-domain",
"HEALTHSTATUS": "1",
"ID": "11",
"ISEMPTY": "true",
"NAME": huawei_utils.encode_name(ID),
"PRIORITYSTATIONTYPE": "0",
"RECOVERYPOLICY": "1",
"RESOURCETYPE": "11",
"RUNNINGSTATUS": "41",
"SPEED": "2",
"SYNCDIRECTION": "1",
"TYPE": 15364
}],
"error": {
"code": 0,
"description": "0"
},
}
FAKE_GET_METROROUP_ID_RESPONSE = """
{
"data": {
"DESCRIPTION": "",
"DOMAINID": "643e8c4c5f670100",
"DOMAINNAME": "hypermetro-domain",
"HEALTHSTATUS": "1",
"ID": "11",
"ISEMPTY": "false",
"NAME": "IexzQZJWSXuX2e9I7c8GNQ",
"PRIORITYSTATIONTYPE": "0",
"RECOVERYPOLICY": "1",
"RESOURCETYPE": "11",
"RUNNINGSTATUS": "1",
"SPEED": "2",
"SYNCDIRECTION": "1",
"TYPE": 15364
},
"error": {
"code": 0,
"description": "0"
}
}
"""
MAP_COMMAND_TO_FAKE_RESPONSE = {}
MAP_COMMAND_TO_FAKE_RESPONSE['/xx/sessions'] = (
FAKE_GET_LOGIN_STORAGE_RESPONSE)
MAP_COMMAND_TO_FAKE_RESPONSE['/sessions'] = (
FAKE_LOGIN_OUT_STORAGE_RESPONSE)
MAP_COMMAND_TO_FAKE_RESPONSE['/LUN_MIGRATION/POST'] = (
FAKE_COMMON_SUCCESS_RESPONSE)
MAP_COMMAND_TO_FAKE_RESPONSE['/LUN_MIGRATION?range=[0-256]/GET'] = (
FAKE_GET_LUN_MIGRATION_RESPONSE)
MAP_COMMAND_TO_FAKE_RESPONSE['/LUN_MIGRATION/11/DELETE'] = (
FAKE_COMMON_SUCCESS_RESPONSE)
MAP_COMMAND_TO_FAKE_RESPONSE['/storagepool'] = (
FAKE_STORAGE_POOL_RESPONSE)
MAP_COMMAND_TO_FAKE_RESPONSE['/lun'] = (
FAKE_LUN_INFO_RESPONSE)
MAP_COMMAND_TO_FAKE_RESPONSE['/lun/11/GET'] = (
FAKE_LUN_GET_SUCCESS_RESPONSE)
MAP_COMMAND_TO_FAKE_RESPONSE['/lun/1/GET'] = (
FAKE_LUN_GET_SUCCESS_RESPONSE)
MAP_COMMAND_TO_FAKE_RESPONSE['/lun/11/DELETE'] = (
FAKE_COMMON_SUCCESS_RESPONSE)
MAP_COMMAND_TO_FAKE_RESPONSE['/lun/1/DELETE'] = (
FAKE_COMMON_SUCCESS_RESPONSE)
MAP_COMMAND_TO_FAKE_RESPONSE['/lun/1/PUT'] = (
FAKE_COMMON_SUCCESS_RESPONSE)
MAP_COMMAND_TO_FAKE_RESPONSE['/lun/11/PUT'] = (
FAKE_COMMON_SUCCESS_RESPONSE)
MAP_COMMAND_TO_FAKE_RESPONSE['/lun?filter=NAME::%s/GET' % ENCODE_NAME] = (
json.dumps(FAKE_QUERY_ALL_LUN_RESPONSE))
MAP_COMMAND_TO_FAKE_RESPONSE['/lun/associate?TYPE=11&ASSOCIATEOBJTYPE=256'
'&ASSOCIATEOBJID=11/GET'] = (
FAKE_LUN_ASSOCIATE_RESPONSE)
MAP_COMMAND_TO_FAKE_RESPONSE['/lun/associate?TYPE=11&ASSOCIATEOBJTYPE=256'
'&ASSOCIATEOBJID=12/GET'] = (
FAKE_LUN_ASSOCIATE_RESPONSE)
MAP_COMMAND_TO_FAKE_RESPONSE['/lun/associate?ID=1&TYPE=11&ASSOCIATEOBJTYPE=21'
'&ASSOCIATEOBJID=0/GET'] = (
FAKE_LUN_ASSOCIATE_RESPONSE)
MAP_COMMAND_TO_FAKE_RESPONSE['/lun/associate?TYPE=11&ASSOCIATEOBJTYPE=21'
'&ASSOCIATEOBJID=1/GET'] = (
FAKE_COMMON_SUCCESS_RESPONSE)
MAP_COMMAND_TO_FAKE_RESPONSE['/lun/associate/cachepartition?ID=1'
'&ASSOCIATEOBJTYPE=11&ASSOCIATEOBJID=11'
'/DELETE'] = (
FAKE_COMMON_SUCCESS_RESPONSE)
MAP_COMMAND_TO_FAKE_RESPONSE['/snapshot/associate?TYPE=27&ASSOCIATEOBJTYPE=21'
'&ASSOCIATEOBJID=1/GET'] = (
FAKE_COMMON_SUCCESS_RESPONSE)
MAP_COMMAND_TO_FAKE_RESPONSE['/snapshot/associate?TYPE=27&ASSOCIATEOBJTYPE=256'
'&ASSOCIATEOBJID=11/GET'] = (
FAKE_COMMON_SUCCESS_RESPONSE)
MAP_COMMAND_TO_FAKE_RESPONSE['/lungroup?range=[0-8191]/GET'] = (
FAKE_QUERY_LUN_GROUP_INFO_RESPONSE)
MAP_COMMAND_TO_FAKE_RESPONSE['/lungroup'] = (
FAKE_QUERY_LUN_GROUP_RESPONSE)
MAP_COMMAND_TO_FAKE_RESPONSE['/lungroup/associate'] = (
FAKE_QUERY_LUN_GROUP_ASSOCIAT_RESPONSE)
MAP_COMMAND_TO_FAKE_RESPONSE['/LUNGroup/11/DELETE'] = (
FAKE_COMMON_SUCCESS_RESPONSE)
MAP_COMMAND_TO_FAKE_RESPONSE['/lungroup/associate?ID=11&ASSOCIATEOBJTYPE=11'
'&ASSOCIATEOBJID=1/DELETE'] = (
FAKE_COMMON_SUCCESS_RESPONSE)
MAP_COMMAND_TO_FAKE_RESPONSE['/lungroup/associate?TYPE=256&ASSOCIATEOBJTYPE=11'
'&ASSOCIATEOBJID=11/GET'] = (
FAKE_LUN_ASSOCIATE_RESPONSE)
MAP_COMMAND_TO_FAKE_RESPONSE['/lungroup/associate?TYPE=256&ASSOCIATEOBJTYPE=11'
'&ASSOCIATEOBJID=1/GET'] = (
FAKE_LUN_ASSOCIATE_RESPONSE)
MAP_COMMAND_TO_FAKE_RESPONSE['/lungroup/associate?ID=11&ASSOCIATEOBJTYPE=11'
'&ASSOCIATEOBJID=11/DELETE'] = (
FAKE_COMMON_SUCCESS_RESPONSE)
MAP_COMMAND_TO_FAKE_RESPONSE['/lungroup/associate?ID=11&ASSOCIATEOBJTYPE=27'
'&ASSOCIATEOBJID=11/DELETE'] = (
FAKE_COMMON_SUCCESS_RESPONSE)
MAP_COMMAND_TO_FAKE_RESPONSE['/lun/count?TYPE=11&ASSOCIATEOBJTYPE=256'
'&ASSOCIATEOBJID=11/GET'] = (
FAKE_LUN_COUNT_RESPONSE)
MAP_COMMAND_TO_FAKE_RESPONSE['/snapshot/count?TYPE=27&ASSOCIATEOBJTYPE=256'
'&ASSOCIATEOBJID=1/GET'] = (
FAKE_SNAPSHOT_COUNT_RESPONSE)
MAP_COMMAND_TO_FAKE_RESPONSE['/snapshot/count?TYPE=27&ASSOCIATEOBJTYPE=256'
'&ASSOCIATEOBJID=11/GET'] = (
FAKE_SNAPSHOT_COUNT_RESPONSE)
MAP_COMMAND_TO_FAKE_RESPONSE['/lungroup/associate?TYPE=256&ASSOCIATEOBJTYPE=27'
'&ASSOCIATEOBJID=11/GET'] = (
FAKE_LUN_ASSOCIATE_RESPONSE)
MAP_COMMAND_TO_FAKE_RESPONSE['/lun/expand/PUT'] = (
FAKE_LUN_INFO_RESPONSE)
MAP_COMMAND_TO_FAKE_RESPONSE['/lungroup/associate?ID=12&ASSOCIATEOBJTYPE=11'
'&ASSOCIATEOBJID=12/DELETE'] = (
FAKE_COMMON_SUCCESS_RESPONSE)
MAP_COMMAND_TO_FAKE_RESPONSE['/snapshot'] = (
FAKE_CREATE_SNAPSHOT_INFO_RESPONSE)
MAP_COMMAND_TO_FAKE_RESPONSE['/snapshot/11/GET'] = (
FAKE_GET_SNAPSHOT_INFO_RESPONSE)
MAP_COMMAND_TO_FAKE_RESPONSE['/snapshot/activate'] = (
FAKE_COMMON_SUCCESS_RESPONSE)
MAP_COMMAND_TO_FAKE_RESPONSE['/snapshot/stop/PUT'] = (
FAKE_COMMON_SUCCESS_RESPONSE)
MAP_COMMAND_TO_FAKE_RESPONSE['/snapshot/11/DELETE'] = (
FAKE_COMMON_SUCCESS_RESPONSE)
MAP_COMMAND_TO_FAKE_RESPONSE['/snapshot?filter=NAME::%s/GET' % ENCODE_NAME] = (
json.dumps(FAKE_SNAPSHOT_LIST_INFO_RESPONSE))
MAP_COMMAND_TO_FAKE_RESPONSE['/ioclass/11/GET'] = (
FAKE_LUN_GET_SUCCESS_RESPONSE)
MAP_COMMAND_TO_FAKE_RESPONSE['/ioclass/11/DELETE'] = (
FAKE_COMMON_SUCCESS_RESPONSE)
MAP_COMMAND_TO_FAKE_RESPONSE['/ioclass/11/PUT'] = (
FAKE_COMMON_SUCCESS_RESPONSE)
MAP_COMMAND_TO_FAKE_RESPONSE['/ioclass/active/11/PUT'] = (
FAKE_COMMON_SUCCESS_RESPONSE)
MAP_COMMAND_TO_FAKE_RESPONSE['/ioclass/'] = (
FAKE_QOS_INFO_RESPONSE)
MAP_COMMAND_TO_FAKE_RESPONSE['/ioclass/count'] = (
FAKE_COMMON_FAIL_RESPONSE)
MAP_COMMAND_TO_FAKE_RESPONSE['/iscsi_tgt_port/GET'] = (
FAKE_GET_ISCSI_INFO_RESPONSE)
MAP_COMMAND_TO_FAKE_RESPONSE['/eth_port/GET'] = (
FAKE_GET_ETH_INFO_RESPONSE)
MAP_COMMAND_TO_FAKE_RESPONSE['/eth_port/associate?TYPE=213&ASSOCIATEOBJTYPE'
'=257&ASSOCIATEOBJID=11/GET'] = (
FAKE_GET_ETH_ASSOCIATE_RESPONSE)
MAP_COMMAND_TO_FAKE_RESPONSE['/iscsidevicename'] = (
FAKE_GET_ISCSI_DEVICE_RESPONSE)
MAP_COMMAND_TO_FAKE_RESPONSE['/iscsi_initiator?range=[0-256]/GET'] = (
FAKE_ISCSI_INITIATOR_RESPONSE)
MAP_COMMAND_TO_FAKE_RESPONSE['/iscsi_initiator/'] = (
FAKE_ISCSI_INITIATOR_RESPONSE)
MAP_COMMAND_TO_FAKE_RESPONSE['/iscsi_initiator/POST'] = (
FAKE_ISCSI_INITIATOR_RESPONSE)
MAP_COMMAND_TO_FAKE_RESPONSE['/iscsi_initiator/PUT'] = (
FAKE_ISCSI_INITIATOR_RESPONSE)
MAP_COMMAND_TO_FAKE_RESPONSE['/iscsi_initiator?PARENTTYPE=21&PARENTID'
'=1/GET'] = (
FAKE_ISCSI_INITIATOR_RESPONSE)
MAP_COMMAND_TO_FAKE_RESPONSE['/iscsi_initiator/remove_iscsi_from_host/PUT'] = (
FAKE_COMMON_SUCCESS_RESPONSE)
MAP_COMMAND_TO_FAKE_RESPONSE['/iscsi_initiator/'
'iqn.1993-08.debian:01:ec2bff7ac3a3/PUT'] = (
FAKE_ISCSI_INITIATOR_RESPONSE)
MAP_COMMAND_TO_FAKE_RESPONSE['/host?range=[0-65535]/GET'] = (
FAKE_GET_ALL_HOST_INFO_RESPONSE)
MAP_COMMAND_TO_FAKE_RESPONSE['/host/1/DELETE'] = (
FAKE_COMMON_SUCCESS_RESPONSE)
MAP_COMMAND_TO_FAKE_RESPONSE['/host/1/GET'] = (
FAKE_GET_HOST_RESPONSE)
MAP_COMMAND_TO_FAKE_RESPONSE['/host'] = (
FAKE_CREATE_HOST_RESPONSE)
MAP_COMMAND_TO_FAKE_RESPONSE['/hostgroup?range=[0-8191]/GET'] = (
FAKE_GET_ALL_HOST_GROUP_INFO_RESPONSE)
MAP_COMMAND_TO_FAKE_RESPONSE['/hostgroup'] = (
FAKE_GET_HOST_GROUP_INFO_RESPONSE)
MAP_COMMAND_TO_FAKE_RESPONSE['/host/associate?TYPE=14&ID=0'
'&ASSOCIATEOBJTYPE=21&ASSOCIATEOBJID=1'
'/DELETE'] = (
FAKE_COMMON_SUCCESS_RESPONSE)
MAP_COMMAND_TO_FAKE_RESPONSE['/host/associate?TYPE=14&ID=0'
'&ASSOCIATEOBJID=0/GET'] = (
FAKE_COMMON_SUCCESS_RESPONSE)
MAP_COMMAND_TO_FAKE_RESPONSE['/host/associate?TYPE=21&'
'ASSOCIATEOBJTYPE=14&ASSOCIATEOBJID=0/GET'] = (
FAKE_COMMON_SUCCESS_RESPONSE)
MAP_COMMAND_TO_FAKE_RESPONSE['/hostgroup/0/DELETE'] = (
FAKE_COMMON_SUCCESS_RESPONSE)
MAP_COMMAND_TO_FAKE_RESPONSE['/host/associate?TYPE=21&'
'ASSOCIATEOBJTYPE=14&ASSOCIATEOBJID=0/GET'] = (
FAKE_COMMON_SUCCESS_RESPONSE)
MAP_COMMAND_TO_FAKE_RESPONSE['/hostgroup/associate'] = (
FAKE_COMMON_SUCCESS_RESPONSE)
MAP_COMMAND_TO_FAKE_RESPONSE['/luncopy'] = (
FAKE_GET_LUN_COPY_INFO_RESPONSE)
MAP_COMMAND_TO_FAKE_RESPONSE['/LUNCOPY?range=[0-1023]/GET'] = (
FAKE_GET_LUN_COPY_LIST_INFO_RESPONSE)
MAP_COMMAND_TO_FAKE_RESPONSE['/LUNCOPY/start/PUT'] = (
FAKE_COMMON_SUCCESS_RESPONSE)
MAP_COMMAND_TO_FAKE_RESPONSE['/LUNCOPY/0/DELETE'] = (
FAKE_COMMON_SUCCESS_RESPONSE)
MAP_COMMAND_TO_FAKE_RESPONSE['/mappingview?range=[0-8191]/GET'] = (
FAKE_GET_MAPPING_VIEW_INFO_RESPONSE)
MAP_COMMAND_TO_FAKE_RESPONSE['/mappingview'] = (
FAKE_GET_MAPPING_VIEW_RESPONSE)
MAP_COMMAND_TO_FAKE_RESPONSE['/mappingview/PUT'] = (
FAKE_GET_MAPPING_VIEW_RESPONSE)
MAP_COMMAND_TO_FAKE_RESPONSE['/MAPPINGVIEW/1/GET'] = (
FAKE_GET_SPEC_MAPPING_VIEW_RESPONSE)
MAP_COMMAND_TO_FAKE_RESPONSE['/mappingview/1/DELETE'] = (
FAKE_COMMON_SUCCESS_RESPONSE)
MAP_COMMAND_TO_FAKE_RESPONSE['/mappingview/REMOVE_ASSOCIATE/PUT'] = (
FAKE_COMMON_SUCCESS_RESPONSE)
MAP_COMMAND_TO_FAKE_RESPONSE['/mappingview/associate/lungroup?TYPE=256&'
'ASSOCIATEOBJTYPE=245&ASSOCIATEOBJID=1/GET'] = (
FAKE_GET_MAPPING_VIEW_RESPONSE)
MAP_COMMAND_TO_FAKE_RESPONSE['/mappingview/associate?TYPE=245&'
'ASSOCIATEOBJTYPE=14&ASSOCIATEOBJID=0/GET'] = (
FAKE_GET_MAPPING_VIEW_RESPONSE)
MAP_COMMAND_TO_FAKE_RESPONSE['/mappingview/associate?TYPE=245&'
'ASSOCIATEOBJTYPE=256&ASSOCIATEOBJID=11/GET'] = (
FAKE_GET_MAPPING_VIEW_RESPONSE)
MAP_COMMAND_TO_FAKE_RESPONSE['/mappingview/associate?TYPE=245&'
'ASSOCIATEOBJTYPE=257&ASSOCIATEOBJID=0/GET'] = (
FAKE_GET_MAPPING_VIEW_RESPONSE)
MAP_COMMAND_TO_FAKE_RESPONSE['/mappingview/associate?TYPE=245&'
'ASSOCIATEOBJTYPE=257&ASSOCIATEOBJID=11/GET'] = (
FAKE_GET_MAPPING_VIEW_RESPONSE)
FAKE_GET_ENGINES_RESPONSE = """
{
"error":{
"code": 0
},
"data":[{
"NODELIST": "[]",
"ID": "0"
}]
}
"""
MAP_COMMAND_TO_FAKE_RESPONSE['/storageengine/GET'] = (
FAKE_GET_ENGINES_RESPONSE)
MAP_COMMAND_TO_FAKE_RESPONSE['/portgroup/associate?ASSOCIATEOBJTYPE=245&'
'ASSOCIATEOBJID=1&range=[0-8191]/GET'] = (
FAKE_GET_MAPPING_VIEW_RESPONSE)
MAP_COMMAND_TO_FAKE_RESPONSE['/MAPPINGVIEW/CREATE_ASSOCIATE/PUT'] = (
FAKE_COMMON_SUCCESS_RESPONSE)
MAP_COMMAND_TO_FAKE_RESPONSE['/fc_initiator?ISFREE=true&'
'range=[0-8191]/GET'] = (
FAKE_FC_INFO_RESPONSE)
MAP_COMMAND_TO_FAKE_RESPONSE['/MAPPINGVIEW/CREATE_ASSOCIATE/PUT'] = (
FAKE_COMMON_SUCCESS_RESPONSE)
MAP_COMMAND_TO_FAKE_RESPONSE['/fc_initiator?ISFREE=true&'
'range=[0-8191]/GET'] = (
FAKE_FC_INFO_RESPONSE)
MAP_COMMAND_TO_FAKE_RESPONSE['/fc_initiator/10000090fa0d6754/GET'] = (
FAKE_FC_INFO_RESPONSE)
MAP_COMMAND_TO_FAKE_RESPONSE['/fc_initiator/10000090fa0d6754/PUT'] = (
FAKE_COMMON_SUCCESS_RESPONSE)
MAP_COMMAND_TO_FAKE_RESPONSE['/host_link?INITIATOR_TYPE=223'
'&INITIATOR_PORT_WWN=10000090fa0d6754/GET'] = (
FAKE_HOST_LINK_RESPONSE)
MAP_COMMAND_TO_FAKE_RESPONSE['/portgroup?range=[0-8191]&TYPE=257/GET'] = (
FAKE_PORT_GROUP_RESPONSE)
MAP_COMMAND_TO_FAKE_RESPONSE['/system//GET'] = (
FAKE_SYSTEM_VERSION_RESPONSE)
MAP_COMMAND_TO_FAKE_RESPONSE['/fc_initiator?range=[0-256]/GET'] = (
FAKE_GET_FC_INI_RESPONSE)
MAP_COMMAND_TO_FAKE_RESPONSE['/fc_port/GET'] = (
FAKE_GET_FC_PORT_RESPONSE)
MAP_COMMAND_TO_FAKE_RESPONSE['/fc_initiator/GET'] = (
FAKE_GET_FC_PORT_RESPONSE)
MAP_COMMAND_TO_FAKE_RESPONSE['fc_initiator?range=[0-256]/GET'] = (
FAKE_GET_FC_PORT_RESPONSE)
MAP_COMMAND_TO_FAKE_RESPONSE['/fc_initiator?PARENTTYPE=21&PARENTID=1/GET'] = (
FAKE_GET_FC_PORT_RESPONSE)
MAP_COMMAND_TO_FAKE_RESPONSE['/lun/associate/cachepartition/POST'] = (
FAKE_SYSTEM_VERSION_RESPONSE)
MAP_COMMAND_TO_FAKE_RESPONSE['/fc_initiator?range=[0-256]&PARENTID=1/GET'] = (
FAKE_GET_FC_PORT_RESPONSE)
MAP_COMMAND_TO_FAKE_RESPONSE['/fc_initiator?PARENTTYPE=21&PARENTID=1/GET'] = (
FAKE_GET_FC_PORT_RESPONSE)
MAP_COMMAND_TO_FAKE_RESPONSE['/SMARTCACHEPARTITION/0/GET'] = (
FAKE_SMARTCACHEPARTITION_RESPONSE)
MAP_COMMAND_TO_FAKE_RESPONSE['/SMARTCACHEPARTITION/REMOVE_ASSOCIATE/PUT'] = (
FAKE_COMMON_SUCCESS_RESPONSE)
MAP_COMMAND_TO_FAKE_RESPONSE['/SMARTCACHEPARTITION/count'] = (
FAKE_COMMON_FAIL_RESPONSE)
MAP_COMMAND_TO_FAKE_RESPONSE['/cachepartition/0/GET'] = (
FAKE_SMARTCACHEPARTITION_RESPONSE)
MAP_COMMAND_TO_FAKE_RESPONSE['/HyperMetroDomain?range=[0-32]/GET'] = (
FAKE_HYPERMETRODOMAIN_RESPONSE)
MAP_COMMAND_TO_FAKE_RESPONSE['/HyperMetroPair/POST'] = (
FAKE_HYPERMETRO_RESPONSE)
MAP_COMMAND_TO_FAKE_RESPONSE['/HyperMetroPair/3400a30d844d0007/GET'] = (
FAKE_METRO_INFO_NEW_RESPONSE)
MAP_COMMAND_TO_FAKE_RESPONSE['/HyperMetroPair/disable_hcpair/PUT'] = (
FAKE_COMMON_SUCCESS_RESPONSE)
MAP_COMMAND_TO_FAKE_RESPONSE['/hyperMetro/associate/pair/POST'] = (
FAKE_COMMON_SUCCESS_RESPONSE)
MAP_COMMAND_TO_FAKE_RESPONSE['/hyperMetro/associate/pair/DELETE'] = (
FAKE_COMMON_SUCCESS_RESPONSE)
MAP_COMMAND_TO_FAKE_RESPONSE['/HyperMetroPair/11/DELETE'] = (
FAKE_COMMON_SUCCESS_RESPONSE)
MAP_COMMAND_TO_FAKE_RESPONSE['/HyperMetroPair/11/GET'] = (
FAKE_HYPERMETRO_RESPONSE)
MAP_COMMAND_TO_FAKE_RESPONSE['/HyperMetroPair?range=[0-4095]/GET'] = (
FAKE_COMMON_SUCCESS_RESPONSE)
MAP_COMMAND_TO_FAKE_RESPONSE['/HyperMetroPair/synchronize_hcpair/PUT'] = (
FAKE_COMMON_SUCCESS_RESPONSE)
MAP_COMMAND_TO_FAKE_RESPONSE['/splitmirror?range=[0-8191]/GET'] = (
FAKE_COMMON_SUCCESS_RESPONSE)
MAP_COMMAND_TO_FAKE_RESPONSE['/splitmirror/count'] = (
FAKE_COMMON_FAIL_RESPONSE)
MAP_COMMAND_TO_FAKE_RESPONSE['/smartcachepool/count'] = (
FAKE_COMMON_FAIL_RESPONSE)
FAKE_GET_PORTG_BY_VIEW = """
{
"data": [{
"DESCRIPTION": "Please do NOT modify this. Engine ID: 0",
"ID": "0",
"NAME": "OpenStack_PortGroup_1",
"TYPE": 257
}],
"error": {
"code": 0
}
}
"""
MAP_COMMAND_TO_FAKE_RESPONSE['/portgroup/associate/mappingview?TYPE=257&AS'
'SOCIATEOBJTYPE=245&ASSOCIATEOBJID=1/GET'] = (
FAKE_GET_PORTG_BY_VIEW)
FAKE_GET_PORT_BY_PORTG = """
{
"data":[{
"CONFSPEED":"0","FCCONFMODE":"3",
"FCRUNMODE":"0","HEALTHSTATUS":"1","ID":"2000643e8c4c5f66",
"MAXSUPPORTSPEED":"16000","NAME":"P0","PARENTID":"0B.1",
"PARENTTYPE":209,"RUNNINGSTATUS":"10","RUNSPEED":"8000",
"WWN":"2000643e8c4c5f66"
}],
"error":{
"code":0,"description":"0"
}
}
"""
MAP_COMMAND_TO_FAKE_RESPONSE['/fc_port/associate/portgroup?TYPE=212&ASSOCI'
'ATEOBJTYPE=257&ASSOCIATEOBJID=0/GET'] = (
FAKE_GET_PORT_BY_PORTG)
FAKE_GET_PORTG = """
{
"data": {
"TYPE": 257,
"NAME": "OpenStack_PortGroup_1",
"DESCRIPTION": "Please DO NOT change thefollowing message: 0",
"ID": "0"
},
"error": {
"code": 0,
"description": "0"
}
}
"""
MAP_COMMAND_TO_FAKE_RESPONSE['/portgroup/0/GET'] = FAKE_GET_PORTG
MAP_COMMAND_TO_FAKE_RESPONSE['/portgroup/0/PUT'] = FAKE_GET_PORTG
MAP_COMMAND_TO_FAKE_RESPONSE['/port/associate/portgroup/POST'] = (
FAKE_GET_PORT_BY_PORTG)
MAP_COMMAND_TO_FAKE_RESPONSE['/port/associate/portgroup?ID=0&TYPE=257&ASSOCIA'
'TEOBJTYPE=212&ASSOCIATEOBJID=2000643e8c4c5f66/DE'
'LETE'] = (
FAKE_COMMON_SUCCESS_RESPONSE)
FAKE_CREATE_PORTG = """
{
"data": {
"DESCRIPTION": "Please DO NOT change the following message: 0",
"ID": "0",
"NAME": "OpenStack_PortGroup_1",
"TYPE": 257
},
"error": {
"code": 0,
"description": "0"
}
}
"""
MAP_COMMAND_TO_FAKE_RESPONSE['/PortGroup/POST'] = FAKE_CREATE_PORTG
MAP_COMMAND_TO_FAKE_RESPONSE['/PortGroup/1/DELETE'] = (
FAKE_COMMON_SUCCESS_RESPONSE)
FAKE_GET_PORTG_FROM_PORT = """
{
"data": [{
"TYPE": 257,
"NAME": "OpenStack_PortGroup_1",
"DESCRIPTION": "PleaseDONOTchangethefollowingmessage: 0",
"ID": "0"
}],
"error": {
"code": 0,
"description": "0"
}
}
"""
MAP_COMMAND_TO_FAKE_RESPONSE['/portgroup/associate/fc_port?TYPE=257&ASSOCIA'
'TEOBJTYPE=212&ASSOCIATEOBJID=1114368/GET'] = (
FAKE_GET_PORTG_FROM_PORT)
FAKE_GET_VIEW_BY_PORTG = """
{
"data": [{
"ASSOCIATEOBJID": "0",
"COUNT": "0",
"ASSOCIATEOBJTYPE": "0",
"INBANDLUNWWN": "",
"FORFILESYSTEM": "false",
"ID": "2",
"ENABLEINBANDCOMMAND": "false",
"NAME": "OpenStack_Mapping_View_1",
"WORKMODE": "0",
"TYPE": 245,
"HOSTLUNID": "0",
"DESCRIPTION": ""
}],
"error": {
"code": 0,
"description": "0"
}
}
"""
MAP_COMMAND_TO_FAKE_RESPONSE['/mappingview/associate/portgroup?TYPE=245&ASS'
'OCIATEOBJTYPE=257&ASSOCIATEOBJID=0/GET'] = (
FAKE_GET_VIEW_BY_PORTG)
FAKE_GET_LUNG_BY_VIEW = """
{
"data": [{
"TYPE": 256,
"NAME": "OpenStack_LunGroup_1",
"DESCRIPTION": "OpenStack_LunGroup_1",
"ID": "1"
}],
"error": {
"code": 0,
"description": "0"
}
}
"""
MAP_COMMAND_TO_FAKE_RESPONSE['/lungroup/associate/mappingview?TYPE=256&ASSO'
'CIATEOBJTYPE=245&ASSOCIATEOBJID=2/GET'] = (
FAKE_GET_LUNG_BY_VIEW)
FAKE_LUN_COUNT_RESPONSE_1 = """
{
"data":{
"COUNT":"2"
},
"error":{
"code":0,
"description":"0"
}
}
"""
MAP_COMMAND_TO_FAKE_RESPONSE['/lun/count?TYPE=11&ASSOCIATEOB'
'JTYPE=256&ASSOCIATEOBJID=1/GET'] = (
FAKE_LUN_COUNT_RESPONSE_1)
FAKE_PORTS_IN_PG_RESPONSE = """
{
"data": [{
"ID": "1114114",
"WWN": "2002643e8c4c5f66"
},
{
"ID": "1114113",
"WWN": "2001643e8c4c5f66"
}],
"error": {
"code": 0,
"description": "0"
}
}
"""
MAP_COMMAND_TO_FAKE_RESPONSE['/fc_port/associate?TYPE=213&ASSOCIATEOBJTYPE='
'257&ASSOCIATEOBJID=0/GET'] = (
FAKE_PORTS_IN_PG_RESPONSE)
MAP_COMMAND_TO_FAKE_RESPONSE['/HyperMetro_ConsistentGroup/POST'] = (
FAKE_CREATE_METROROUP_RESPONSE)
MAP_COMMAND_TO_FAKE_RESPONSE["/HyperMetro_ConsistentGroup?type"
"='15364'/GET"] = (
json.dumps(FAKE_GET_METROROUP_RESPONSE))
MAP_COMMAND_TO_FAKE_RESPONSE["/HyperMetro_ConsistentGroup/11/GET"] = (
FAKE_GET_METROROUP_ID_RESPONSE)
MAP_COMMAND_TO_FAKE_RESPONSE["/HyperMetro_ConsistentGroup/11/DELETE"] = (
FAKE_COMMON_SUCCESS_RESPONSE)
MAP_COMMAND_TO_FAKE_RESPONSE["/HyperMetro_ConsistentGroup/stop/PUT"] = (
FAKE_COMMON_SUCCESS_RESPONSE)
MAP_COMMAND_TO_FAKE_RESPONSE["/HyperMetro_ConsistentGroup/sync/PUT"] = (
FAKE_COMMON_SUCCESS_RESPONSE)
FAKE_GET_REMOTEDEV_RESPONSE = """
{
"data":[{
"ARRAYTYPE":"1",
"HEALTHSTATUS":"1",
"ID":"0",
"NAME":"Huawei.Storage",
"RUNNINGSTATUS":"1",
"WWN":"21003400a30d844d"
}],
"error":{
"code":0,
"description":"0"
}
}
"""
MAP_COMMAND_TO_FAKE_RESPONSE['/remote_device/GET'] = (
FAKE_GET_REMOTEDEV_RESPONSE)
FAKE_CREATE_PAIR_RESPONSE = """
{
"data":{
"ID":"%s"
},
"error":{
"code":0,
"description":"0"
}
}
""" % TEST_PAIR_ID
MAP_COMMAND_TO_FAKE_RESPONSE['/REPLICATIONPAIR/POST'] = (
FAKE_CREATE_PAIR_RESPONSE)
FAKE_DELETE_PAIR_RESPONSE = """
{
"data":{},
"error":{
"code":0,
"description":"0"
}
}
"""
MAP_COMMAND_TO_FAKE_RESPONSE['/REPLICATIONPAIR/%s/DELETE' % TEST_PAIR_ID] = (
FAKE_DELETE_PAIR_RESPONSE)
FAKE_SET_PAIR_ACCESS_RESPONSE = """
{
"data":{},
"error":{
"code":0,
"description":"0"
}
}
"""
MAP_COMMAND_TO_FAKE_RESPONSE['/REPLICATIONPAIR/%s/PUT' % TEST_PAIR_ID] = (
FAKE_SET_PAIR_ACCESS_RESPONSE)
FAKE_GET_PAIR_NORMAL_RESPONSE = """
{
"data":{
"REPLICATIONMODEL": "1",
"RUNNINGSTATUS": "1",
"SECRESACCESS": "2",
"HEALTHSTATUS": "1",
"ISPRIMARY": "true"
},
"error":{
"code":0,
"description":"0"
}
}
"""
FAKE_GET_PAIR_SPLIT_RESPONSE = """
{
"data":{
"REPLICATIONMODEL": "1",
"RUNNINGSTATUS": "26",
"SECRESACCESS": "2",
"ISPRIMARY": "true"
},
"error":{
"code":0,
"description":"0"
}
}
"""
FAKE_GET_PAIR_SYNC_RESPONSE = """
{
"data":{
"REPLICATIONMODEL": "1",
"RUNNINGSTATUS": "23",
"SECRESACCESS": "2"
},
"error":{
"code":0,
"description":"0"
}
}
"""
MAP_COMMAND_TO_FAKE_RESPONSE['/REPLICATIONPAIR/%s/GET' % TEST_PAIR_ID] = (
FAKE_GET_PAIR_NORMAL_RESPONSE)
FAKE_SYNC_PAIR_RESPONSE = """
{
"data":{},
"error":{
"code":0,
"description":"0"
}
}
"""
MAP_COMMAND_TO_FAKE_RESPONSE['/REPLICATIONPAIR/sync/PUT'] = (
FAKE_SYNC_PAIR_RESPONSE)
FAKE_SPLIT_PAIR_RESPONSE = """
{
"data":{},
"error":{
"code":0,
"description":"0"
}
}
"""
MAP_COMMAND_TO_FAKE_RESPONSE['/REPLICATIONPAIR/split/PUT'] = (
FAKE_SPLIT_PAIR_RESPONSE)
FAKE_SWITCH_PAIR_RESPONSE = """
{
"data":{},
"error":{
"code":0,
"description":"0"
}
}
"""
MAP_COMMAND_TO_FAKE_RESPONSE['/REPLICATIONPAIR/switch/PUT'] = (
FAKE_SWITCH_PAIR_RESPONSE)
FAKE_PORTS_IN_PG_RESPONSE = """
{
"data": [{
"ID": "1114114",
"WWN": "2002643e8c4c5f66"
},
{
"ID": "1114113",
"WWN": "2001643e8c4c5f66"
}],
"error": {
"code": 0,
"description": "0"
}
}
"""
MAP_COMMAND_TO_FAKE_RESPONSE['/fc_port/associate?TYPE=213&ASSOCIATEOBJTYPE='
'257&ASSOCIATEOBJID=0/GET'] = (
FAKE_PORTS_IN_PG_RESPONSE)
MAP_COMMAND_TO_FAKE_RESPONSE['/portgroup/associate/fc_port?TYPE=257&ASSOCIA'
'TEOBJTYPE=212&ASSOCIATEOBJID=1114369/GET'] = (
FAKE_PORTS_IN_PG_RESPONSE)
MAP_COMMAND_TO_FAKE_RESPONSE['/mappingview/associate/portgroup?TYPE=245&ASSOC'
'IATEOBJTYPE=257&ASSOCIATEOBJID=1114114/GET'] = (
FAKE_SWITCH_PAIR_RESPONSE)
MAP_COMMAND_TO_FAKE_RESPONSE['/mappingview/associate/portgroup?TYPE=245&ASSOC'
'IATEOBJTYPE=257&ASSOCIATEOBJID=1114113/GET'] = (
FAKE_COMMON_SUCCESS_RESPONSE)
REPLICA_BACKEND_ID = 'huawei-replica-1'
class FakeHuaweiConf(huawei_conf.HuaweiConf):
def __init__(self, conf, protocol):
self.conf = conf
self.protocol = protocol
def safe_get(self, key):
try:
return getattr(self.conf, key)
except Exception:
return
def update_config_value(self):
setattr(self.conf, 'volume_backend_name', 'huawei_storage')
setattr(self.conf, 'san_address',
['http://192.0.2.69:8082/deviceManager/rest/'])
setattr(self.conf, 'san_user', 'admin')
setattr(self.conf, 'san_password', 'Admin@storage')
setattr(self.conf, 'san_product', 'V3')
setattr(self.conf, 'san_protocol', self.protocol)
setattr(self.conf, 'lun_type', constants.THICK_LUNTYPE)
setattr(self.conf, 'lun_ready_wait_interval', 2)
setattr(self.conf, 'lun_copy_wait_interval', 2)
setattr(self.conf, 'lun_timeout', 43200)
setattr(self.conf, 'lun_write_type', '1')
setattr(self.conf, 'lun_mirror_switch', '1')
setattr(self.conf, 'lun_prefetch_type', '1')
setattr(self.conf, 'lun_prefetch_value', '0')
setattr(self.conf, 'lun_policy', '0')
setattr(self.conf, 'lun_read_cache_policy', '2')
setattr(self.conf, 'lun_write_cache_policy', '5')
setattr(self.conf, 'storage_pools', ['OpenStack_Pool'])
setattr(self.conf, 'iscsi_default_target_ip', ['192.0.2.68'])
setattr(self.conf, 'metro_san_address',
['https://192.0.2.240:8088/deviceManager/rest/'])
setattr(self.conf, 'metro_storage_pools', 'OpenStack_Pool')
setattr(self.conf, 'metro_san_user', 'admin')
setattr(self.conf, 'metro_san_password', 'Admin@storage1')
setattr(self.conf, 'metro_domain_name', 'hypermetro_test')
iscsi_info = {'Name': 'iqn.1993-08.debian:01:ec2bff7ac3a3',
'TargetIP': '192.0.2.2',
'CHAPinfo': 'mm-user;mm-user@storage',
'ALUA': '1',
'TargetPortGroup': 'portgroup-test', }
setattr(self.conf, 'iscsi_info', [iscsi_info])
rmt_iscsi_info = ('{ Name: iqn.1993-08.debian:01:ec2bff7acxxx;\n'
'TargetIP:1.1.1.1;CHAPinfo:mm-user#mm-user@storage;'
'ALUA:1; TargetPortGroup:portgroup-test};\t\n '
'{ Name: iqn.1993-08.debian:01:ec2bff7acyyy;\n'
'TargetIP:2.2.2.2;CHAPinfo:nn-user#nn-user@storage;'
'ALUA:0; TargetPortGroup:portgroup-test1}\t\n')
targets = [{'backend_id': REPLICA_BACKEND_ID,
'storage_pool': 'OpenStack_Pool',
'san_address':
'https://192.0.2.69:8088/deviceManager/rest/',
'san_user': 'admin',
'san_password': 'Admin@storage1',
'iscsi_info': rmt_iscsi_info}]
setattr(self.conf, 'replication_device', targets)
setattr(self.conf, 'safe_get', self.safe_get)
class FakeClient(rest_client.RestClient):
def __init__(self, configuration):
san_address = configuration.san_address
san_user = configuration.san_user
san_password = configuration.san_password
rest_client.RestClient.__init__(self, configuration,
san_address,
san_user,
san_password)
self.test_fail = False
self.test_multi_url_flag = False
self.cache_not_exist = False
self.partition_not_exist = False
def _get_snapshotid_by_name(self, snapshot_name):
return "11"
def _check_snapshot_exist(self, snapshot_id):
return True
def get_partition_id_by_name(self, name):
if self.partition_not_exist:
return None
return "11"
def get_cache_id_by_name(self, name):
if self.cache_not_exist:
return None
return "11"
def add_lun_to_cache(self, lunid, cache_id):
pass
def do_call(self, url=False, data=None, method=None, calltimeout=4,
log_filter_flag=False):
url = url.replace('http://192.0.2.69:8082/deviceManager/rest', '')
command = url.replace('/210235G7J20000000000/', '')
data = json.dumps(data) if data else None
if method:
command = command + "/" + method
for item in MAP_COMMAND_TO_FAKE_RESPONSE.keys():
if command == item:
data = MAP_COMMAND_TO_FAKE_RESPONSE[item]
if self.test_fail:
data = FAKE_ERROR_INFO_RESPONSE
if command == 'lun/11/GET':
data = FAKE_ERROR_LUN_INFO_RESPONSE
self.test_fail = False
if self.test_multi_url_flag:
data = FAKE_ERROR_CONNECT_RESPONSE
self.test_multi_url_flag = False
return json.loads(data)
class FakeReplicaPairManager(replication.ReplicaPairManager):
def _init_rmt_client(self):
self.rmt_client = FakeClient(self.conf)
class FakeISCSIStorage(huawei_driver.HuaweiISCSIDriver):
def __init__(self, configuration):
self.configuration = configuration
self.huawei_conf = FakeHuaweiConf(self.configuration, 'iSCSI')
self.active_backend_id = None
self.replica = None
self.support_func = None
def do_setup(self):
self.metro_flag = True
self.huawei_conf.update_config_value()
self.get_local_and_remote_dev_conf()
self.client = FakeClient(configuration=self.configuration)
self.rmt_client = FakeClient(configuration=self.configuration)
self.replica_client = FakeClient(configuration=self.configuration)
self.metro = hypermetro.HuaweiHyperMetro(self.client,
self.rmt_client,
self.configuration)
self.replica = FakeReplicaPairManager(self.client,
self.replica_client,
self.configuration)
class FakeFCStorage(huawei_driver.HuaweiFCDriver):
def __init__(self, configuration):
self.configuration = configuration
self.fcsan = None
self.huawei_conf = FakeHuaweiConf(self.configuration, 'iSCSI')
self.active_backend_id = None
self.replica = None
self.support_func = None
def do_setup(self):
self.metro_flag = True
self.huawei_conf.update_config_value()
self.get_local_and_remote_dev_conf()
self.client = FakeClient(configuration=self.configuration)
self.rmt_client = FakeClient(configuration=self.configuration)
self.replica_client = FakeClient(configuration=self.configuration)
self.metro = hypermetro.HuaweiHyperMetro(self.client,
self.rmt_client,
self.configuration)
self.replica = FakeReplicaPairManager(self.client,
self.replica_client,
self.configuration)
@ddt.ddt
class HuaweiTestBase(test.TestCase):
def setUp(self):
super(HuaweiTestBase, self).setUp()
self.configuration = mock.Mock(spec=conf.Configuration)
self.driver = FakeISCSIStorage(configuration=self.configuration)
self.driver.do_setup()
self.volume = fake_volume.fake_volume_obj(
admin_contex, host=HOST, provider_location=PROVIDER_LOCATION,
admin_metadata=ADMIN_METADATA, id=ID)
self.snapshot = fake_snapshot.fake_snapshot_obj(
admin_contex, provider_location=PROVIDER_LOCATION, id=ID)
self.snapshot.volume = self.volume
self.replica_volume = fake_volume.fake_volume_obj(
admin_contex, host=HOST, provider_location=PROVIDER_LOCATION,
admin_metadata=ADMIN_METADATA, replication_status='disabled',
replication_driver_data=REPLICA_DRIVER_DATA, id=ID)
self.hyper_volume = fake_volume.fake_volume_obj(
admin_contex, host=HOST, provider_location=PROVIDER_LOCATION,
volume_metadata=VOL_METADATA, id=ID)
self.original_volume = fake_volume.fake_volume_obj(admin_contex,
id=ID)
self.current_volume = fake_volume.fake_volume_obj(
admin_contex, id=ID, provider_location=PROVIDER_LOCATION,
name_id=ID)
self.cgsnapshot = fake_cgsnapshot.fake_cgsnapshot_obj(
admin_contex, id=ID, consistencygroup_id=ID, status='available')
self.cg = fake_consistencygroup.fake_consistencyobject_obj(
admin_contex, id=ID, status='available')
def test_encode_name(self):
lun_name = huawei_utils.encode_name(self.volume.id)
self.assertIn(lun_name, ('21ec7341-4687000622165227970',
'21ec7341-7953146827712520106'))
@mock.patch.object(rest_client, 'RestClient')
def test_create_snapshot_success(self, mock_client):
lun_info = self.driver.create_snapshot(self.snapshot)
self.assertEqual(11, lun_info['provider_location'])
self.snapshot.volume_id = ID
self.snapshot.volume = self.volume
lun_info = self.driver.create_snapshot(self.snapshot)
self.assertEqual(11, lun_info['provider_location'])
@ddt.data('1', '', '0')
def test_copy_volume(self, input_speed):
self.driver.configuration.lun_copy_wait_interval = 0
self.volume.metadata = {'copyspeed': input_speed}
mocker = self.mock_object(
self.driver.client, 'create_luncopy',
mock.Mock(wraps=self.driver.client.create_luncopy))
self.driver._copy_volume(self.volume,
'fake_copy_name',
'fake_src_lun',
'fake_tgt_lun')
mocker.assert_called_once_with('fake_copy_name',
'fake_src_lun',
'fake_tgt_lun',
input_speed)
@ddt.data({'input_speed': '1',
'actual_speed': '1'},
{'input_speed': '',
'actual_speed': '2'},
{'input_speed': None,
'actual_speed': '2'},
{'input_speed': '5',
'actual_speed': '2'})
@ddt.unpack
def test_client_create_luncopy(self, input_speed, actual_speed):
mocker = self.mock_object(
self.driver.client, 'call',
mock.Mock(wraps=self.driver.client.call))
self.driver.client.create_luncopy('fake_copy_name',
'fake_src_lun',
'fake_tgt_lun',
input_speed)
mocker.assert_called_once_with(
mock.ANY,
{"TYPE": 219,
"NAME": 'fake_copy_name',
"DESCRIPTION": 'fake_copy_name',
"COPYSPEED": actual_speed,
"LUNCOPYTYPE": "1",
"SOURCELUN": "INVALID;fake_src_lun;INVALID;INVALID;INVALID",
"TARGETLUN": "INVALID;fake_tgt_lun;INVALID;INVALID;INVALID"}
)
@ddt.ddt
class HuaweiISCSIDriverTestCase(HuaweiTestBase):
def setUp(self):
super(HuaweiISCSIDriverTestCase, self).setUp()
self.configuration = mock.Mock(spec=conf.Configuration)
self.configuration.hypermetro_devices = hypermetro_devices
self.flags(rpc_backend='oslo_messaging._drivers.impl_fake')
self.driver = FakeISCSIStorage(configuration=self.configuration)
self.driver.do_setup()
self.portgroup = 'portgroup-test'
self.iscsi_iqns = ['iqn.2006-08.com.huawei:oceanstor:21000022a:'
':20503:192.0.2.1',
'iqn.2006-08.com.huawei:oceanstor:21000022a:'
':20500:192.0.2.2']
self.target_ips = ['192.0.2.1',
'192.0.2.2']
self.portgroup_id = 11
self.driver.client.login()
def test_parse_rmt_iscsi_info(self):
rmt_devs = self.driver.huawei_conf.get_replication_devices()
iscsi_info = rmt_devs[0]['iscsi_info']
expected_iscsi_info = [{'Name': 'iqn.1993-08.debian:01:ec2bff7acxxx',
'TargetIP': '1.1.1.1',
'CHAPinfo': 'mm-user;mm-user@storage',
'ALUA': '1',
'TargetPortGroup': 'portgroup-test'},
{'Name': 'iqn.1993-08.debian:01:ec2bff7acyyy',
'TargetIP': '2.2.2.2',
'CHAPinfo': 'nn-user;nn-user@storage',
'ALUA': '0',
'TargetPortGroup': 'portgroup-test1'}]
self.assertEqual(expected_iscsi_info, iscsi_info)
def test_parse_rmt_iscsi_info_without_iscsi_configuration(self):
self.configuration.replication_device[0]['iscsi_info'] = ''
rmt_devs = self.driver.huawei_conf.get_replication_devices()
iscsi_info = rmt_devs[0]['iscsi_info']
self.assertEqual([], iscsi_info)
def test_login_success(self):
device_id = self.driver.client.login()
self.assertEqual('210235G7J20000000000', device_id)
@ddt.data(constants.PWD_EXPIRED, constants.PWD_RESET)
def test_login_password_expires_and_reset_fail(self, state):
with mock.patch.object(self.driver.client, 'logout') as mock_logout:
self.mock_object(FakeClient, 'do_call',
return_value={"error": {"code": 0},
"data": {
"username": "admin",
"iBaseToken": "2001031430",
"deviceid": "210235G7J20000000000",
"accountstate": state}})
self.assertRaises(exception.VolumeBackendAPIException,
self.driver.client.login)
mock_logout.assert_called_once_with()
def test_login_logout_fail(self):
login_info = {"error": {"code": 0},
"data": {"username": "admin",
"iBaseToken": "2001031430",
"deviceid": "210235G7J20000000000",
"accountstate": 3}}
logout_info = {"error": {"code": 1}, "data": {}}
self.mock_object(FakeClient, 'do_call',
side_effect=[login_info, logout_info])
self.assertRaises(exception.VolumeBackendAPIException,
self.driver.client.login)
def test_check_volume_exist_on_array(self):
self.mock_object(rest_client.RestClient, 'get_lun_id_by_name',
return_value=None)
self.driver._check_volume_exist_on_array(
self.volume, constants.VOLUME_NOT_EXISTS_WARN)
def test_create_volume_success(self):
self.volume.host = 'ubuntu001@backend001#OpenStack_Pool'
lun_info = self.driver.create_volume(self.volume)
self.assertEqual('1', lun_info['provider_location'])
self.volume.host = 'ubuntu001@backend001'
lun_info = self.driver.create_volume(self.volume)
self.assertEqual('1', lun_info['provider_location'])
@ddt.data(FAKE_POOLS_UNSUPPORT_REPORT, FAKE_POOLS_SUPPORT_REPORT)
def test_delete_replication_fail(self, pool_data):
self.driver.support_func = pool_data
self.mock_object(replication.ReplicaCommonDriver, 'split')
self.mock_object(
huawei_driver.HuaweiBaseDriver,
'_get_volume_type',
return_value={'extra_specs': sync_replica_specs})
self.mock_object(rest_client.RestClient,
'delete_lun',
side_effect=exception.VolumeBackendAPIException(
data='err'))
self.assertRaises(exception.VolumeBackendAPIException,
self.driver.delete_volume, self.replica_volume)
@ddt.data(FAKE_POOLS_UNSUPPORT_REPORT, FAKE_POOLS_SUPPORT_REPORT)
def test_migrate_volume_success_no_data(self, pool_data):
self.driver.support_func = pool_data
task_info = {"data": [{"ENDTIME": "1436816174",
"ID": "9",
"PARENTID": "11",
"PARENTNAME": "xmRBHMlVRruql5vwthpPXQ",
"PROCESS": "-1",
"RUNNINGSTATUS": "76",
"SPEED": "2",
"STARTTIME": "1436816111",
"TARGETLUNID": "1",
"TARGETLUNNAME": "4924891454902893639",
"TYPE": 253,
"WORKMODE": "0"
}],
"error": {"code": 0,
"description": "0"}
}
moved = False
empty_dict = {}
self.mock_object(rest_client.RestClient, 'get_lun_migration_task',
side_effect=[{}, task_info])
moved, model_update = self.driver.migrate_volume(None,
self.volume,
test_host,
None)
self.assertTrue(moved)
self.assertEqual(empty_dict, model_update)
@ddt.data(FAKE_POOLS_UNSUPPORT_REPORT, FAKE_POOLS_SUPPORT_REPORT)
def test_migrate_volume_success_with_replication(self, pool_data):
self.driver.support_func = pool_data
task_info = {"data": [{"ENDTIME": "1436816174",
"ID": "9",
"PARENTID": "11",
"PARENTNAME": "xmRBHMlVRruql5vwthpPXQ",
"PROCESS": "-1",
"RUNNINGSTATUS": "76",
"SPEED": "2",
"STARTTIME": "1436816111",
"TARGETLUNID": "1",
"TARGETLUNNAME": "4924891454902893639",
"TYPE": 253,
"WORKMODE": "0"
}],
"error": {"code": 0,
"description": "0"}
}
moved = False
empty_dict = {}
self.mock_object(rest_client.RestClient, 'get_lun_migration_task',
return_value=task_info)
moved, model_update = self.driver.migrate_volume(None,
self.replica_volume,
test_host,
None)
self.assertTrue(moved)
self.assertEqual(empty_dict, model_update)
@ddt.data(FAKE_POOLS_UNSUPPORT_REPORT, FAKE_POOLS_SUPPORT_REPORT)
def test_migrate_volume_fail_migration_fault(self, pool_data):
self.driver.support_func = pool_data
task_info = {"data": [{"ENDTIME": "1436816174",
"ID": "9",
"PARENTID": "11",
"PARENTNAME": "xmRBHMlVRruql5vwthpPXQ",
"PROCESS": "-1",
"RUNNINGSTATUS": "74",
"SPEED": "2",
"STARTTIME": "1436816111",
"TARGETLUNID": "1",
"TARGETLUNNAME": "4924891454902893639",
"TYPE": 253,
"WORKMODE": "0"
}],
"error": {"code": 0,
"description": "0"}
}
self.mock_object(rest_client.RestClient, 'get_lun_migration_task',
return_value=task_info)
self.assertRaises(exception.VolumeBackendAPIException,
self.driver.migrate_volume,
None, self.volume, test_host, None)
@ddt.data(FAKE_POOLS_UNSUPPORT_REPORT, FAKE_POOLS_SUPPORT_REPORT)
def test_migrate_volume_fail_no_migrate_task(self, pool_data):
self.driver.support_func = pool_data
task_info = {"data": [{"ENDTIME": "1436816174",
"ID": "9",
"PARENTID": "12",
"PARENTNAME": "xmRBHMlVRruql5vwthpPXQ",
"PROCESS": "-1",
"RUNNINGSTATUS": "76",
"SPEED": "2",
"STARTTIME": "1436816111",
"TARGETLUNID": "1",
"TARGETLUNNAME": "4924891454902893639",
"TYPE": 253,
"WORKMODE": "0"
}],
"error": {"code": 0,
"description": "0"}
}
self.mock_object(rest_client.RestClient, 'get_lun_migration_task',
return_value=task_info)
self.assertRaises(exception.VolumeBackendAPIException,
self.driver.migrate_volume,
None, self.volume, test_host, None)
@ddt.data(FAKE_POOLS_UNSUPPORT_REPORT, FAKE_POOLS_SUPPORT_REPORT)
def test_migrate_volume_with_type_id(self, pool_data):
self.driver.support_func = pool_data
self.volume.volume_type_id = '550c089b-bfdd-4f7f-86e1-3ba88125555c'
task_info = {"data": [{"ENDTIME": "1436816174",
"ID": "9",
"PARENTID": "11",
"PARENTNAME": "xmRBHMlVRruql5vwthpPXQ",
"PROCESS": "-1",
"RUNNINGSTATUS": "76",
"SPEED": "2",
"STARTTIME": "1436816111",
"TARGETLUNID": "1",
"TARGETLUNNAME": "4924891454902893639",
"TYPE": 253,
"WORKMODE": "0"
}],
"error": {"code": 0,
"description": "0"}
}
empty_dict = {}
self.mock_object(volume_types, 'get_volume_type',
return_value=test_new_type)
self.mock_object(rest_client.RestClient, 'get_lun_migration_task',
return_value=task_info)
moved, model_update = self.driver.migrate_volume(None,
self.volume,
test_host,
None)
self.assertTrue(moved)
self.assertEqual(empty_dict, model_update)
@ddt.data(FAKE_POOLS_UNSUPPORT_REPORT, FAKE_POOLS_SUPPORT_REPORT)
def test_manage_existing_fail(self, pool_data):
self.driver.support_func = pool_data
self.mock_object(rest_client.RestClient, 'get_lun_info',
return_value={'CAPACITY': 2097152, 'ALLOCTYPE': 1})
self.mock_object(rest_client.RestClient, 'get_lun_id_by_name',
return_value='ID1')
self.mock_object(rest_client.RestClient, 'rename_lun')
self.mock_object(huawei_driver.HuaweiBaseDriver,
'_get_lun_info_by_ref',
return_value={
'PARENTNAME': 'OpenStack_Pool',
'SNAPSHOTIDS': [],
'ID': 'ID1',
'HEALTHSTATUS': constants.STATUS_HEALTH,
'WWN': '6643e8c1004c5f6723e9f454003'})
self.mock_object(volume_types, 'get_volume_type',
return_value={'extra_specs': test_new_type})
self.mock_object(huawei_driver.HuaweiBaseDriver,
'_check_needed_changes',
return_value={})
external_ref = {'source-name': 'test1',
'source-id': 'ID1'}
self.driver.manage_existing(self.volume, external_ref)
@ddt.data(FAKE_POOLS_UNSUPPORT_REPORT, FAKE_POOLS_SUPPORT_REPORT)
def test_delete_volume_success(self, pool_data):
self.driver.support_func = pool_data
self.driver.delete_volume(self.volume)
def test_delete_snapshot_success(self):
self.driver.delete_snapshot(self.snapshot)
@unittest.skip("Skip until bug #1578986 is fixed")
def test_create_volume_from_snapsuccess(self):
self.mock_object(
huawei_driver.HuaweiBaseDriver,
'_get_volume_type',
return_value={'extra_specs': sync_replica_specs})
self.mock_object(replication.ReplicaCommonDriver, 'sync')
model_update = self.driver.create_volume_from_snapshot(self.volume,
self.volume)
self.assertEqual('1', model_update['provider_location'])
driver_data = {'pair_id': TEST_PAIR_ID,
'rmt_lun_id': '1'}
driver_data = replication.to_string(driver_data)
self.assertEqual(driver_data, model_update['replication_driver_data'])
self.assertEqual('available', model_update['replication_status'])
@mock.patch.object(huawei_driver.HuaweiISCSIDriver,
'initialize_connection',
return_value={"data": {'target_lun': 1}})
def test_initialize_connection_snapshot_success(self, mock_iscsi_init):
iscsi_properties = self.driver.initialize_connection_snapshot(
self.snapshot, FakeConnector)
volume = Volume(id=self.snapshot.id,
provider_location=self.snapshot.provider_location,
lun_type='27',
metadata=None)
self.assertEqual(1, iscsi_properties['data']['target_lun'])
mock_iscsi_init.assert_called_with(volume, FakeConnector)
def test_initialize_connection_success_multipath_portgroup(self):
temp_connector = copy.deepcopy(FakeConnector)
temp_connector['multipath'] = True
self.mock_object(rest_client.RestClient, 'get_tgt_port_group',
return_value = '11')
iscsi_properties = self.driver.initialize_connection(self.volume,
temp_connector)
self.assertEqual([1, 1], iscsi_properties['data']['target_luns'])
def test_initialize_connection_fail_multipath_portgroup(self):
temp_connector = copy.deepcopy(FakeConnector)
temp_connector['multipath'] = True
self.mock_object(rest_client.RestClient, 'get_tgt_port_group',
return_value = '12')
self.mock_object(rest_client.RestClient, '_get_tgt_ip_from_portgroup',
return_value = [])
self.assertRaises(exception.VolumeBackendAPIException,
self.driver.initialize_connection,
self.volume, temp_connector)
def test_initialize_connection_success_multipath_targetip(self):
iscsi_info = [{'Name': 'iqn.1993-08.debian:01:ec2bff7ac3a3',
'TargetIP': '192.0.2.2',
'CHAPinfo': 'mm-user;mm-user@storage',
'ALUA': '1'}]
configuration = mock.Mock(spec = conf.Configuration)
configuration.hypermetro_devices = hypermetro_devices
driver = FakeISCSIStorage(configuration = self.configuration)
driver.do_setup()
driver.configuration.iscsi_info = iscsi_info
driver.client.iscsi_info = iscsi_info
temp_connector = copy.deepcopy(FakeConnector)
temp_connector['multipath'] = True
iscsi_properties = driver.initialize_connection(self.volume,
temp_connector)
self.assertEqual([1], iscsi_properties['data']['target_luns'])
def test_initialize_connection_fail_multipath_targetip(self):
iscsi_info = [{'Name': 'iqn.1993-08.debian:01:ec2bff7ac3a3',
'TargetIP': '192.0.2.6',
'CHAPinfo': 'mm-user;mm-user@storage',
'ALUA': '1'}]
configuration = mock.Mock(spec = conf.Configuration)
configuration.hypermetro_devices = hypermetro_devices
driver = FakeISCSIStorage(configuration = self.configuration)
driver.do_setup()
driver.configuration.iscsi_info = iscsi_info
driver.client.iscsi_info = iscsi_info
temp_connector = copy.deepcopy(FakeConnector)
temp_connector['multipath'] = True
self.assertRaises(exception.VolumeBackendAPIException,
driver.initialize_connection,
self.volume, temp_connector)
def test_initialize_connection_success_multipath_defaultip(self):
iscsi_info = [{'Name': 'iqn.1993-08.debian:01:ec2bff7ac3a3',
'CHAPinfo': 'mm-user;mm-user@storage',
'ALUA': '1'}]
default_target_ip = ['192.0.2.2']
configuration = mock.Mock(spec = conf.Configuration)
configuration.hypermetro_devices = hypermetro_devices
driver = FakeISCSIStorage(configuration = self.configuration)
driver.do_setup()
driver.configuration.iscsi_info = iscsi_info
driver.client.iscsi_info = iscsi_info
driver.configuration.iscsi_default_target_ip = default_target_ip
driver.client.iscsi_default_target_ip = default_target_ip
temp_connector = copy.deepcopy(FakeConnector)
temp_connector['multipath'] = True
iscsi_properties = driver.initialize_connection(self.volume,
temp_connector)
self.assertEqual([1], iscsi_properties['data']['target_luns'])
def test_initialize_connection_fail_multipath_defaultip(self):
iscsi_info = [{'Name': 'iqn.1993-08.debian:01:ec2bff7ac3a3',
'CHAPinfo': 'mm-user;mm-user@storage',
'ALUA': '1'}]
default_target_ip = ['192.0.2.6']
configuration = mock.Mock(spec = conf.Configuration)
configuration.hypermetro_devices = hypermetro_devices
driver = FakeISCSIStorage(configuration = self.configuration)
driver.do_setup()
driver.configuration.iscsi_info = iscsi_info
driver.client.iscsi_info = iscsi_info
driver.configuration.iscsi_default_target_ip = default_target_ip
driver.client.iscsi_default_target_ip = default_target_ip
temp_connector = copy.deepcopy(FakeConnector)
temp_connector['multipath'] = True
self.assertRaises(exception.VolumeBackendAPIException,
driver.initialize_connection,
self.volume, temp_connector)
def test_initialize_connection_fail_no_port_in_portgroup(self):
temp_connector = copy.deepcopy(FakeConnector)
temp_connector['multipath'] = True
self.mock_object(rest_client.RestClient, 'get_tgt_port_group',
return_value='11')
self.mock_object(rest_client.RestClient, '_get_tgt_ip_from_portgroup',
return_value=[])
self.assertRaises(exception.VolumeBackendAPIException,
self.driver.initialize_connection,
self.volume, temp_connector)
def test_initialize_connection_fail_multipath_no_ip(self):
iscsi_info = [{'Name': 'iqn.1993-08.debian:01:ec2bff7ac3a3',
'CHAPinfo': 'mm-user;mm-user@storage',
'ALUA': '1'}]
configuration = mock.Mock(spec = conf.Configuration)
configuration.hypermetro_devices = hypermetro_devices
driver = FakeISCSIStorage(configuration = self.configuration)
driver.do_setup()
driver.configuration.iscsi_info = iscsi_info
driver.client.iscsi_info = iscsi_info
driver.configuration.iscsi_default_target_ip = None
driver.client.iscsi_default_target_ip = None
temp_connector = copy.deepcopy(FakeConnector)
temp_connector['multipath'] = True
self.assertRaises(exception.VolumeBackendAPIException,
driver.initialize_connection,
self.volume, temp_connector)
@mock.patch.object(huawei_driver.HuaweiISCSIDriver,
'terminate_connection')
def test_terminate_connection_snapshot_success(self, mock_iscsi_term):
self.driver.terminate_connection_snapshot(self.snapshot,
FakeConnector)
volume = Volume(id=self.snapshot.id,
provider_location=self.snapshot.provider_location,
lun_type='27',
metadata=None)
mock_iscsi_term.assert_called_with(volume, FakeConnector)
def test_terminate_connection_success(self):
self.driver.terminate_connection(self.volume, FakeConnector)
def test_get_volume_status(self):
data = self.driver.get_volume_stats()
self.assertEqual(self.driver.VERSION, data['driver_version'])
@mock.patch.object(rest_client.RestClient, 'get_lun_info',
return_value={"CAPACITY": 6291456})
@mock.patch.object(rest_client.RestClient, 'extend_lun')
def test_extend_volume_size_equal(self, mock_extend, mock_lun_info):
self.driver.extend_volume(self.volume, 3)
self.assertEqual(0, mock_extend.call_count)
@mock.patch.object(rest_client.RestClient, 'get_lun_info',
return_value={"CAPACITY": 5291456})
@mock.patch.object(rest_client.RestClient, 'extend_lun')
def test_extend_volume_success(self, mock_extend, mock_lun_info):
self.driver.extend_volume(self.volume, 3)
self.assertEqual(1, mock_extend.call_count)
@mock.patch.object(rest_client.RestClient, 'get_lun_info',
return_value={"CAPACITY": 7291456})
def test_extend_volume_fail(self, mock_lun_info):
self.assertRaises(exception.VolumeBackendAPIException,
self.driver.extend_volume, self.volume, 3)
def test_extend_nonexistent_volume(self):
self.volume = fake_volume.fake_volume_obj(admin_contex)
self.mock_object(rest_client.RestClient,
'get_lun_id_by_name',
return_value=None)
self.assertRaises(exception.VolumeBackendAPIException,
self.driver.extend_volume,
self.volume, 3)
def test_get_admin_metadata(self):
metadata = [{'key': 'huawei_lun_wwn', 'value': '1'}]
tmp_volume = fake_volume.fake_volume_obj(
admin_contex, volume_admin_metadata=metadata)
expected_value = {'huawei_lun_wwn': '1'}
admin_metadata = huawei_utils.get_admin_metadata(tmp_volume)
self.assertEqual(expected_value, admin_metadata)
metadata = {'huawei_lun_wwn': '1'}
tmp_volume = fake_volume.fake_volume_obj(admin_contex)
tmp_volume.admin_metadata = metadata
admin_metadata = huawei_utils.get_admin_metadata(tmp_volume)
self.assertEqual(expected_value, admin_metadata)
def test_login_fail(self):
self.driver.client.test_fail = True
self.assertRaises(exception.VolumeBackendAPIException,
self.driver.client.login)
def test_create_snapshot_fail(self):
self.driver.client.test_fail = True
self.assertRaises(exception.VolumeBackendAPIException,
self.driver.create_snapshot, self.snapshot)
def test_create_volume_fail(self):
self.driver.client.test_fail = True
self.assertRaises(exception.VolumeBackendAPIException,
self.driver.create_volume, self.volume)
def test_delete_volume_fail(self):
self.driver.client.test_fail = True
self.driver.delete_volume(self.volume)
def test_delete_snapshot_fail(self):
self.driver.client.test_fail = True
self.driver.delete_snapshot(self.snapshot)
def test_delete_snapshot_with_snapshot_nonexistent(self):
self.snapshot.provider_location = None
self.driver.delete_snapshot(self.snapshot)
def test_initialize_connection_fail(self):
self.driver.client.test_fail = True
self.assertRaises(exception.VolumeBackendAPIException,
self.driver.initialize_connection,
self.volume, FakeConnector)
def test_lun_is_associated_to_lungroup(self):
self.driver.client.associate_lun_to_lungroup('11', '11')
result = self.driver.client._is_lun_associated_to_lungroup('11',
'11')
self.assertTrue(result)
def test_lun_is_not_associated_to_lun_group(self):
self.driver.client.associate_lun_to_lungroup('12', '12')
self.driver.client.remove_lun_from_lungroup('12', '12')
result = self.driver.client._is_lun_associated_to_lungroup('12', '12')
self.assertFalse(result)
def test_get_tgtip(self):
portg_id = self.driver.client.get_tgt_port_group(self.portgroup)
target_ip = self.driver.client._get_tgt_ip_from_portgroup(portg_id)
self.assertEqual(self.target_ips, target_ip)
def test_find_chap_info(self):
tmp_dict = {}
tmp_dict['Name'] = 'iqn.1993-08.debian:01:ec2bff7ac3a3'
tmp_dict['CHAPinfo'] = 'mm-user;mm-user@storage'
iscsi_info = [tmp_dict]
initiator_name = FakeConnector['initiator']
chapinfo = self.driver.client.find_chap_info(iscsi_info,
initiator_name)
chap_username, chap_password = chapinfo.split(';')
self.assertEqual('mm-user', chap_username)
self.assertEqual('mm-user@storage', chap_password)
def test_find_alua_info(self):
tmp_dict = {}
tmp_dict['Name'] = 'iqn.1993-08.debian:01:ec2bff7ac3a3'
tmp_dict['ALUA'] = '1'
iscsi_info = [tmp_dict]
initiator_name = FakeConnector['initiator']
type = self.driver.client._find_alua_info(iscsi_info,
initiator_name)
self.assertEqual('1', type)
def test_get_pool_info(self):
pools = [{"NAME": "test001",
"ID": "0",
"USERFREECAPACITY": "36",
"USERTOTALCAPACITY": "48",
"USAGETYPE": constants.BLOCK_STORAGE_POOL_TYPE,
"TIER0CAPACITY": "48",
"TIER1CAPACITY": "0",
"TIER2CAPACITY": "0"},
{"NAME": "test002",
"ID": "1",
"USERFREECAPACITY": "37",
"USERTOTALCAPACITY": "49",
"USAGETYPE": constants.FILE_SYSTEM_POOL_TYPE,
"TIER0CAPACITY": "0",
"TIER1CAPACITY": "49",
"TIER2CAPACITY": "0"},
{"NAME": "test003",
"ID": "0",
"USERFREECAPACITY": "36",
"DATASPACE": "35",
"USERTOTALCAPACITY": "48",
"USAGETYPE": constants.BLOCK_STORAGE_POOL_TYPE,
"TIER0CAPACITY": "0",
"TIER1CAPACITY": "0",
"TIER2CAPACITY": "48"}]
pool_name = 'test001'
test_info = {'CAPACITY': '36', 'ID': '0', 'TOTALCAPACITY': '48',
'TIER0CAPACITY': '48', 'TIER1CAPACITY': '0',
'TIER2CAPACITY': '0'}
pool_info = self.driver.client.get_pool_info(pool_name, pools)
self.assertEqual(test_info, pool_info)
pool_name = 'test002'
test_info = {}
pool_info = self.driver.client.get_pool_info(pool_name, pools)
self.assertEqual(test_info, pool_info)
pool_name = 'test000'
test_info = {}
pool_info = self.driver.client.get_pool_info(pool_name, pools)
self.assertEqual(test_info, pool_info)
pool_name = 'test003'
test_info = {'CAPACITY': '35', 'ID': '0', 'TOTALCAPACITY': '48',
'TIER0CAPACITY': '0', 'TIER1CAPACITY': '0',
'TIER2CAPACITY': '48'}
pool_info = self.driver.client.get_pool_info(pool_name, pools)
self.assertEqual(test_info, pool_info)
def test_get_smartx_specs_opts(self):
smartx_opts = smartx.SmartX().get_smartx_specs_opts(smarttier_opts)
self.assertEqual('3', smartx_opts['policy'])
@ddt.data(FAKE_POOLS_UNSUPPORT_REPORT, FAKE_POOLS_SUPPORT_REPORT)
@mock.patch.object(smartx.SmartQos, 'get_qos_by_volume_type',
return_value={'MAXIOPS': '100',
'IOType': '2'})
def test_create_smartqos(self, mock_qos_value, pool_data):
self.driver.support_func = pool_data
lun_info = self.driver.create_volume(self.volume)
self.assertEqual('1', lun_info['provider_location'])
@mock.patch.object(huawei_driver.HuaweiBaseDriver, '_get_volume_params',
return_value={'smarttier': 'true',
'smartcache': 'true',
'smartpartition': 'true',
'thin_provisioning_support': 'true',
'thick_provisioning_support': 'false',
'policy': '2',
'cachename': 'cache-test',
'partitionname': 'partition-test'})
@mock.patch.object(huawei_driver.HuaweiBaseDriver, '_get_volume_type',
return_value={'qos_specs_id': u'025ce295-15e9-41a7'})
@mock.patch.object(qos_specs, 'get_qos_specs',
return_value={'specs': {'maxBandWidth': '100',
'IOType': '0'},
'consumer': 'back-end'})
def test_create_smartqos_success(self,
mock_qos_specs,
mock_value_type,
mock_volume_params):
self.driver.support_func = FAKE_POOLS_SUPPORT_REPORT
lun_info = self.driver.create_volume(self.volume)
self.assertEqual('1', lun_info['provider_location'])
@ddt.data([{'specs': {'maxBandWidth': '100', 'IOType': '3'}},
FAKE_POOLS_UNSUPPORT_REPORT],
[{'specs': {'maxBandWidth': '100', 'IOType': '3'}},
FAKE_POOLS_SUPPORT_REPORT],
[{'specs': {'minBandWidth': '0', 'IOType': '2'}},
FAKE_POOLS_UNSUPPORT_REPORT],
[{'specs': {'minBandWidth': '0', 'IOType': '2'}},
FAKE_POOLS_SUPPORT_REPORT])
@ddt.unpack
def test_create_smartqos_failed(self, qos_specs_value, pool_data):
self.driver.support_func = pool_data
self.mock_object(
huawei_driver.HuaweiBaseDriver,
'_get_volume_params',
return_value={'smarttier': 'true',
'smartcache': 'true',
'smartpartition': 'true',
'thin_provisioning_support': 'true',
'thick_provisioning_support': 'false',
'policy': '2',
'cachename': 'cache-test',
'partitionname': 'partition-test'})
self.mock_object(huawei_driver.HuaweiBaseDriver, '_get_volume_type',
return_value={'qos_specs_id': u'025ce295-15e9-41a7'})
self.mock_object(qos_specs, 'get_qos_specs',
return_value=qos_specs_value)
self.assertRaises(exception.VolumeBackendAPIException,
self.driver.create_volume, self.volume)
@ddt.data(FAKE_POOLS_UNSUPPORT_REPORT, FAKE_POOLS_SUPPORT_REPORT)
def test_create_smartqos_without_huawei_type(self, pool_data):
self.driver.support_func = pool_data
self.mock_object(
huawei_driver.HuaweiBaseDriver,
'_get_volume_params',
return_value={'smarttier': 'true',
'smartcache': 'true',
'smartpartition': 'true',
'thin_provisioning_support': 'true',
'thick_provisioning_support': 'false',
'policy': '2',
'cachename': 'cache-test',
'partitionname': 'partition-test'})
self.mock_object(huawei_driver.HuaweiBaseDriver, '_get_volume_type',
return_value={'qos_specs_id': u'025ce295-15e9-41a7'})
self.mock_object(qos_specs, 'get_qos_specs',
return_value={'specs': {'fake_qos_type': '100',
'IOType': '2'}})
self.assertRaises(exception.VolumeBackendAPIException,
self.driver.create_volume, self.volume)
@mock.patch.object(smartx.SmartQos, 'get_qos_by_volume_type',
return_value={'MAXIOPS': '100',
'IOType': '2'})
@mock.patch.object(rest_client.RestClient, 'find_array_version',
return_value='V300R003C00')
@mock.patch.object(rest_client.RestClient, 'find_available_qos',
return_value=(None, []))
def test_create_smartqos_on_v3r3_with_no_qos(self,
mock_find_available_qos,
mock_qos_value,
mock_array_version):
self.driver.support_func = FAKE_POOLS_SUPPORT_REPORT
lun_info = self.driver.create_volume(self.volume)
self.assertEqual('1', lun_info['provider_location'])
@mock.patch.object(smartx.SmartQos, 'get_qos_by_volume_type',
return_value={'MINIOPS': '100',
'IOType': '2'})
@mock.patch.object(rest_client.RestClient, 'find_array_version',
return_value='V300R003C00')
@mock.patch.object(rest_client.RestClient, 'find_available_qos',
return_value=('11', u'["0", "2", "3"]'))
def test_create_smartqos_on_v3r3_with_qos(self,
mock_find_available_qos,
mock_qos_value,
mock_array_version):
self.driver.support_func = FAKE_POOLS_SUPPORT_REPORT
lun_info = self.driver.create_volume(self.volume)
self.assertEqual('1', lun_info['provider_location'])
@mock.patch.object(smartx.SmartQos, 'get_qos_by_volume_type',
return_value={'MINIOPS': '100',
'IOType': '2'})
@mock.patch.object(rest_client.RestClient, 'find_array_version',
return_value='V300R003C00')
@mock.patch.object(rest_client.RestClient, 'find_available_qos',
return_value=('11', u'["0", "2", "3"]'))
def test_create_smartqos_on_v3r3_with_unsupport_qos(
self, mock_find_available_qos,
mock_qos_value, mock_array_version):
self.driver.support_func = FAKE_POOLS_UNSUPPORT_REPORT
self.assertRaises(exception.VolumeBackendAPIException,
self.driver.create_volume, self.volume)
@ddt.data(FAKE_POOLS_UNSUPPORT_REPORT, FAKE_POOLS_SUPPORT_REPORT)
@mock.patch.object(smartx.SmartQos, 'get_qos_by_volume_type',
return_value={'MINIOPS': '100',
'IOType': '2'})
@mock.patch.object(rest_client.RestClient, 'find_array_version',
return_value='V300R003C00')
@mock.patch.object(rest_client.RestClient, 'find_available_qos',
return_value=(None, []))
@mock.patch.object(rest_client.RestClient, 'activate_deactivate_qos')
def test_create_smartqos_on_v3r3_active_failed(self,
pool_data,
mock_activate_qos,
mock_find_available_qos,
mock_qos_value,
mock_array_version):
self.driver.support_func = pool_data
mock_activate_qos.side_effect = (
exception.VolumeBackendAPIException(data='Activate or deactivate '
'QoS error. '))
self.assertRaises(exception.VolumeBackendAPIException,
self.driver.create_volume, self.volume)
@ddt.data(FAKE_POOLS_UNSUPPORT_REPORT, FAKE_POOLS_SUPPORT_REPORT)
@mock.patch.object(smartx.SmartQos, 'get_qos_by_volume_type',
return_value={'MINIOPS': '100',
'IOType': '2'})
@mock.patch.object(rest_client.RestClient, 'find_array_version',
return_value='V300R003C00')
@mock.patch.object(rest_client.RestClient, 'find_available_qos',
return_value=(None, []))
@mock.patch.object(rest_client.RestClient, 'create_qos_policy')
def test_create_smartqos_on_v3r3_qos_failed(self,
pool_data,
mock_create_qos,
mock_find_available_qos,
mock_qos_value,
mock_array_version):
self.driver.support_func = pool_data
mock_create_qos.side_effect = (
exception.VolumeBackendAPIException(data='Create QoS policy '
'error.'))
self.assertRaises(exception.VolumeBackendAPIException,
self.driver.create_volume, self.volume)
@ddt.data(FAKE_POOLS_UNSUPPORT_REPORT, FAKE_POOLS_SUPPORT_REPORT)
@mock.patch.object(rest_client.RestClient, 'get_qos_info',
return_value={"LUNLIST": u'["1", "2", "3"]',
"RUNNINGSTATUS": "2"})
def test_delete_smartqos_with_lun_left(self, mock_qos_info, pool_data):
self.driver.support_func = pool_data
self.driver.delete_volume(self.volume)
@ddt.data(FAKE_POOLS_UNSUPPORT_REPORT, FAKE_POOLS_SUPPORT_REPORT)
@mock.patch.object(rest_client.RestClient, 'get_qos_info',
return_value={"LUNLIST": u'["1"]',
"RUNNINGSTATUS": "2"})
def test_delete_smartqos_with_no_lun_left(self, mock_qos_info, pool_data):
self.driver.support_func = pool_data
self.driver.delete_volume(self.volume)
@mock.patch.object(rest_client.RestClient, 'add_lun_to_partition')
@mock.patch.object(huawei_driver.HuaweiBaseDriver, '_get_volume_params',
return_value={'smarttier': 'true',
'smartcache': 'true',
'smartpartition': 'true',
'thin_provisioning_support': 'true',
'thick_provisioning_support': 'false',
'policy': '2',
'cachename': 'cache-test',
'partitionname': 'partition-test'})
def test_create_smartx(self, mock_volume_types, mock_add_lun_to_partition):
lun_info = self.driver.create_volume(self.volume)
self.assertEqual('1', lun_info['provider_location'])
@ddt.data([{'smarttier': 'true', 'smartcache': 'true',
'smartpartition': 'true',
'thin_provisioning_support': 'true',
'thick_provisioning_support': 'false',
'policy': '2', 'cachename': None,
'partitionname': 'partition-test'},
FAKE_POOLS_UNSUPPORT_REPORT],
[{'smarttier': 'true', 'smartcache': 'true',
'smartpartition': 'true',
'thin_provisioning_support': 'true',
'thick_provisioning_support': 'false',
'policy': '2', 'cachename': 'cache-test',
'partitionname': None},
FAKE_POOLS_SUPPORT_REPORT],
[{'smarttier': 'true', 'smartcache': 'true',
'smartpartition': 'true',
'thin_provisioning_support': 'true',
'thick_provisioning_support': 'false',
'policy': '2', 'cachename': None,
'partitionname': 'partition-test'},
FAKE_POOLS_SUPPORT_REPORT],
[{'smarttier': 'true', 'smartcache': 'true',
'smartpartition': 'true',
'thin_provisioning_support': 'true',
'thick_provisioning_support': 'false',
'policy': '2', 'cachename': 'cache-test',
'partitionname': None},
FAKE_POOLS_UNSUPPORT_REPORT])
@ddt.unpack
def test_create_smartCache_failed(self, opts, pool_data):
self.driver.support_func = pool_data
self.mock_object(
huawei_driver.HuaweiBaseDriver,
'_get_volume_params',
return_value=opts)
self.assertRaises(exception.VolumeBackendAPIException,
self.driver.create_volume, self.volume)
@ddt.data(FAKE_POOLS_UNSUPPORT_REPORT, FAKE_POOLS_SUPPORT_REPORT)
@mock.patch.object(huawei_driver.HuaweiBaseDriver, '_get_volume_params',
return_value={'smarttier': 'true',
'smartcache': 'true',
'smartpartition': 'true',
'thin_provisioning_support': 'true',
'thick_provisioning_support': 'false',
'policy': '2',
'cachename': 'cache-test',
'partitionname': 'partition-test'})
def test_create_smartCache_failed_with_no_cacheid(self,
mock_volume_type,
pool_data):
self.driver.client.cache_not_exist = True
self.driver.support_func = pool_data
self.assertRaises(exception.VolumeBackendAPIException,
self.driver.create_volume, self.volume)
@ddt.data(FAKE_POOLS_UNSUPPORT_REPORT, FAKE_POOLS_SUPPORT_REPORT)
@mock.patch.object(huawei_driver.HuaweiBaseDriver, '_get_volume_params',
return_value={'smarttier': 'true',
'smartcache': 'true',
'smartpartition': 'true',
'thin_provisioning_support': 'true',
'thick_provisioning_support': 'false',
'policy': '2',
'cachename': 'cache-test',
'partitionname': 'partition-test'})
def test_create_smartPartition_failed_with_no_partid(self,
mock_volume_type,
pool_data):
self.driver.client.partition_not_exist = True
self.driver.support_func = pool_data
self.assertRaises(exception.VolumeBackendAPIException,
self.driver.create_volume, self.volume)
def test_find_available_qos(self):
qos = {'MAXIOPS': '100', 'IOType': '2'}
fake_qos_info_response_equal = {
"error": {
"code": 0
},
"data": [{
"ID": "11",
"MAXIOPS": "100",
"LATENCY": "0",
"IOType": "2",
"FSLIST": u'[""]',
'RUNNINGSTATUS': "2",
"NAME": "OpenStack_57_20151225102851",
"LUNLIST": u'["1", "2", "3", "4", "5", "6", "7", "8", "9",\
"10", ,"11", "12", "13", "14", "15", "16", "17", "18", "19",\
"20", ,"21", "22", "23", "24", "25", "26", "27", "28", "29",\
"30", ,"31", "32", "33", "34", "35", "36", "37", "38", "39",\
"40", ,"41", "42", "43", "44", "45", "46", "47", "48", "49",\
"50", ,"51", "52", "53", "54", "55", "56", "57", "58", "59",\
"60", ,"61", "62", "63", "64"]'
}]
}
with mock.patch.object(rest_client.RestClient, 'get_qos',
return_value=fake_qos_info_response_equal):
(qos_id, lun_list) = self.driver.client.find_available_qos(qos)
self.assertEqual((None, []), (qos_id, lun_list))
fake_qos_info_response_less = {
"error": {
"code": 0
},
"data": [{
"ID": "11",
"MAXIOPS": "100",
"LATENCY": "0",
"IOType": "2",
"FSLIST": u'[""]',
'RUNNINGSTATUS': "2",
"NAME": "OpenStack_57_20151225102851",
"LUNLIST": u'["0", "1", "2"]'
}]
}
with mock.patch.object(rest_client.RestClient, 'get_qos',
return_value=fake_qos_info_response_less):
(qos_id, lun_list) = self.driver.client.find_available_qos(qos)
self.assertEqual(("11", u'["0", "1", "2"]'), (qos_id, lun_list))
@mock.patch.object(huawei_driver.HuaweiBaseDriver, '_get_volume_params',
return_value=fake_hypermetro_opts)
@mock.patch.object(rest_client.RestClient, 'get_all_pools',
return_value=FAKE_STORAGE_POOL_RESPONSE)
@mock.patch.object(rest_client.RestClient, 'get_pool_info',
return_value=FAKE_FIND_POOL_RESPONSE)
@mock.patch.object(rest_client.RestClient, 'get_hyper_domain_id',
return_value='11')
@mock.patch.object(hypermetro.HuaweiHyperMetro, '_wait_volume_ready',
return_value=True)
def test_create_hypermetro_success(self,
mock_volume_ready,
mock_hyper_domain,
mock_pool_info,
mock_all_pool_info,
mock_login_return):
metadata = {"hypermetro_id": '11',
"remote_lun_id": '1'}
lun_info = self.driver.create_volume(self.hyper_volume)
self.assertEqual(metadata, lun_info['metadata'])
@ddt.data(FAKE_POOLS_UNSUPPORT_REPORT, FAKE_POOLS_SUPPORT_REPORT)
@mock.patch.object(huawei_driver.HuaweiBaseDriver, '_get_volume_params',
return_value=fake_hypermetro_opts)
@mock.patch.object(rest_client.RestClient, 'get_all_pools',
return_value=FAKE_STORAGE_POOL_RESPONSE)
@mock.patch.object(rest_client.RestClient, 'get_pool_info',
return_value=FAKE_FIND_POOL_RESPONSE)
@mock.patch.object(rest_client.RestClient, 'get_hyper_domain_id',
return_value='11')
@mock.patch.object(hypermetro.HuaweiHyperMetro, '_wait_volume_ready',
return_value=True)
@mock.patch.object(hypermetro.HuaweiHyperMetro,
'_create_hypermetro_pair')
@mock.patch.object(rest_client.RestClient, 'delete_lun')
def test_create_hypermetro_fail(self,
pool_data,
mock_delete_lun,
mock_hyper_pair_info,
mock_volume_ready,
mock_hyper_domain,
mock_pool_info,
mock_all_pool_info,
mock_hypermetro_opts
):
self.driver.client.login()
self.driver.support_func = pool_data
mock_hyper_pair_info.side_effect = exception.VolumeBackendAPIException(
data='Create hypermetro error.')
self.assertRaises(exception.VolumeBackendAPIException,
self.driver.create_volume, self.hyper_volume)
mock_delete_lun.assert_called_with('1')
@mock.patch.object(rest_client.RestClient, 'get_all_pools',
return_value=FAKE_STORAGE_POOL_RESPONSE)
@mock.patch.object(rest_client.RestClient, 'get_pool_info',
return_value={})
def test_create_hypermetro_remote_pool_none_fail(self,
mock_pool_info,
mock_all_pool_info):
param = {'TYPE': '11',
'PARENTID': ''}
self.driver.client.login()
self.assertRaises(exception.VolumeBackendAPIException,
self.driver.metro.create_hypermetro,
'2', param)
@mock.patch.object(rest_client.RestClient, 'get_all_pools',
return_value=FAKE_STORAGE_POOL_RESPONSE)
@mock.patch.object(rest_client.RestClient, 'get_pool_info',
return_value=FAKE_FIND_POOL_RESPONSE)
@mock.patch.object(rest_client.RestClient, 'create_lun',
return_value={'CAPACITY': '2097152',
'DESCRIPTION': '2f0635',
'HEALTHSTATUS': '1',
'ALLOCTYPE': '1',
'WWN': '6643e8c1004c5f6723e9f454003',
'ID': '1',
'RUNNINGSTATUS': '27',
'NAME': '5mFHcBv4RkCcD'})
@mock.patch.object(rest_client.RestClient, 'get_hyper_domain_id',
return_value='11')
@mock.patch.object(hypermetro.HuaweiHyperMetro, '_wait_volume_ready',
return_value=True)
def test_create_hypermetro_remote_pool_parentid(self,
mock_volume_ready,
mock_hyper_domain,
mock_create_lun,
mock_pool_info,
mock_all_pool_info):
param = {'TYPE': '11',
'PARENTID': ''}
self.driver.metro.create_hypermetro('2', param)
lun_PARENTID = mock_create_lun.call_args[0][0]['PARENTID']
self.assertEqual(FAKE_FIND_POOL_RESPONSE['ID'], lun_PARENTID)
@mock.patch.object(huawei_driver.huawei_utils, 'get_volume_metadata',
return_value={'hypermetro_id': '3400a30d844d0007',
'remote_lun_id': '1'})
def test_hypermetro_none_map_info_fail(self, mock_metadata):
self.assertRaises(exception.VolumeBackendAPIException,
self.driver.metro.connect_volume_fc,
self.volume,
FakeConnector)
@ddt.data(FAKE_POOLS_UNSUPPORT_REPORT, FAKE_POOLS_SUPPORT_REPORT)
@mock.patch.object(rest_client.RestClient, 'check_lun_exist',
return_value=True)
@mock.patch.object(rest_client.RestClient, 'check_hypermetro_exist',
return_value=True)
@mock.patch.object(rest_client.RestClient, 'delete_hypermetro',
return_value=FAKE_COMMON_SUCCESS_RESPONSE)
@mock.patch.object(rest_client.RestClient, 'delete_lun',
return_value=None)
def test_delete_hypermetro_success(self,
mock_delete_lun,
mock_delete_hypermetro,
mock_check_hyermetro,
mock_lun_exit,
pool_data):
self.driver.support_func = pool_data
self.driver.delete_volume(self.hyper_volume)
@ddt.data(FAKE_POOLS_UNSUPPORT_REPORT, FAKE_POOLS_SUPPORT_REPORT)
@mock.patch.object(rest_client.RestClient, 'check_lun_exist',
return_value=True)
@mock.patch.object(rest_client.RestClient, 'check_hypermetro_exist',
return_value=True)
@mock.patch.object(rest_client.RestClient, 'get_hypermetro_by_id',
return_value=FAKE_METRO_INFO_RESPONSE)
@mock.patch.object(rest_client.RestClient, 'delete_hypermetro')
@mock.patch.object(rest_client.RestClient, 'delete_lun',
return_value=None)
def test_delete_hypermetro_fail(self,
pool_data,
mock_delete_lun,
mock_delete_hypermetro,
mock_metro_info,
mock_check_hyermetro,
mock_lun_exit):
self.driver.support_func = pool_data
mock_delete_hypermetro.side_effect = (
exception.VolumeBackendAPIException(data='Delete hypermetro '
'error.'))
self.assertRaises(exception.VolumeBackendAPIException,
self.driver.delete_volume, self.hyper_volume)
mock_delete_lun.assert_called_with('11')
def test_manage_existing_get_size_invalid_reference(self):
external_ref = {'source-name': 'LUN1'}
with mock.patch.object(rest_client.RestClient, 'get_lun_id_by_name',
return_value=None):
ex = self.assertRaises(exception.ManageExistingInvalidReference,
self.driver.manage_existing_get_size,
self.volume, external_ref)
self.assertIsNotNone(re.search('please check the source-name '
'or source-id', ex.msg))
# Can't find LUN by source-id.
external_ref = {'source-id': 'ID1'}
with mock.patch.object(rest_client.RestClient, 'get_lun_info') as m_gt:
m_gt.side_effect = exception.VolumeBackendAPIException(
data='Error')
self.assertRaises(exception.VolumeBackendAPIException,
self.driver.manage_existing_get_size,
self.volume, external_ref)
self.assertIsNotNone(re.search('please check the source-name '
'or source-id', ex.msg))
@ddt.data({'source-id': 'ID1'}, {'source-name': 'LUN1'},
{'source-name': 'LUN1', 'source-id': 'ID1'})
@mock.patch.object(rest_client.RestClient, 'get_lun_info',
return_value={'CAPACITY': 3097152})
@mock.patch.object(rest_client.RestClient, 'get_lun_id_by_name',
return_value='ID1')
def test_manage_existing_get_size_success(self, mock_get_lun_id_by_name,
mock_get_lun_info,
external_ref):
size = self.driver.manage_existing_get_size(self.volume,
external_ref)
self.assertEqual(2, size)
@mock.patch.object(rest_client.RestClient, 'get_lun_info',
return_value={'CAPACITY': 2097152,
'ID': 'ID1',
'PARENTNAME': 'OpenStack_Pool'})
@mock.patch.object(rest_client.RestClient, 'get_lun_id_by_name',
return_value='ID1')
def test_manage_existing_pool_mismatch(self, mock_get_by_name,
mock_get_info):
with mock.patch.object(huawei_driver.HuaweiBaseDriver,
'_get_lun_info_by_ref',
return_value={'PARENTNAME': 'StoragePool'}):
external_ref = {'source-name': 'LUN1'}
ex = self.assertRaises(exception.ManageExistingInvalidReference,
self.driver.manage_existing,
self.volume, external_ref)
self.assertIsNotNone(re.search('The specified LUN does not belong'
' to the given pool', ex.msg))
@mock.patch.object(rest_client.RestClient, 'get_lun_info',
return_value={'CAPACITY': 2097152,
'ID': 'ID1',
'PARENTNAME': 'OpenStack_Pool'})
@mock.patch.object(rest_client.RestClient, 'get_lun_id_by_name',
return_value='ID1')
def test_manage_existing_lun_abnormal(self, mock_get_by_name,
mock_get_info):
ret = {'PARENTNAME': "OpenStack_Pool",
'HEALTHSTATUS': '2'}
with mock.patch.object(huawei_driver.HuaweiBaseDriver,
'_get_lun_info_by_ref',
return_value=ret):
external_ref = {'source-name': 'LUN1'}
ex = self.assertRaises(exception.ManageExistingInvalidReference,
self.driver.manage_existing,
self.volume, external_ref)
self.assertIsNotNone(re.search('LUN status is not normal', ex.msg))
@ddt.data(FAKE_POOLS_UNSUPPORT_REPORT, FAKE_POOLS_SUPPORT_REPORT)
@mock.patch.object(rest_client.RestClient, 'get_hypermetro_pairs',
return_value=[{'LOCALOBJID': 'ID1'}])
@mock.patch.object(rest_client.RestClient, 'get_lun_info',
return_value={'CAPACITY': 2097152,
'ID': 'ID1',
'PARENTNAME': 'OpenStack_Pool',
'HEALTHSTATUS': constants.STATUS_HEALTH})
@mock.patch.object(rest_client.RestClient, 'get_lun_id_by_name',
return_value='ID1')
def test_manage_existing_with_hypermetro(self, mock_get_by_name,
mock_get_info,
mock_get_hyper_pairs,
pool_data):
self.driver.support_func = pool_data
with mock.patch.object(rest_client.RestClient,
'get_hypermetro_pairs',
return_value=[{'LOCALOBJID': 'ID1'}]):
external_ref = {'source-name': 'LUN1'}
ex = self.assertRaises(exception.ManageExistingInvalidReference,
self.driver.manage_existing,
self.volume, external_ref)
self.assertIsNotNone(re.search('HyperMetroPair', ex.msg))
@ddt.data(FAKE_POOLS_UNSUPPORT_REPORT, FAKE_POOLS_SUPPORT_REPORT)
@mock.patch.object(rest_client.RestClient, 'get_hypermetro_pairs')
@mock.patch.object(rest_client.RestClient, 'rename_lun')
@mock.patch.object(rest_client.RestClient, 'get_lun_info',
return_value={'CAPACITY': 2097152,
'ID': 'ID1',
'PARENTNAME': 'OpenStack_Pool',
'HEALTHSTATUS': constants.STATUS_HEALTH,
'WWN': '6643e8c1004c5f6723e9f454003'})
@mock.patch.object(rest_client.RestClient, 'get_lun_id_by_name',
return_value='ID1')
def test_manage_existing_with_lower_version(self, pool_data,
mock_get_by_name,
mock_get_info, mock_rename,
mock_get_hyper_pairs):
self.driver.support_func = pool_data
mock_get_hyper_pairs.side_effect = (
exception.VolumeBackendAPIException(data='err'))
external_ref = {'source-name': 'LUN1'}
model_update = self.driver.manage_existing(self.volume,
external_ref)
expected_val = {
'admin_metadata': {
'huawei_lun_wwn': '6643e8c1004c5f6723e9f454003'
},
'provider_location': 'ID1'}
self.assertEqual(expected_val, model_update)
@ddt.data([[{'PRILUNID': 'ID1'}], []],
[[{'PRILUNID': 'ID2'}], ['ID1', 'ID2']])
@mock.patch.object(rest_client.RestClient, 'get_lun_info',
return_value={'CAPACITY': 2097152,
'ID': 'ID1',
'PARENTNAME': 'OpenStack_Pool',
'HEALTHSTATUS': constants.STATUS_HEALTH})
@mock.patch.object(rest_client.RestClient, 'get_lun_id_by_name',
return_value='ID1')
def test_manage_existing_with_splitmirror(self, ddt_data,
mock_get_by_name,
mock_get_info):
self.driver.support_func = FAKE_POOLS_SUPPORT_REPORT
with mock.patch.object(rest_client.RestClient, 'get_split_mirrors',
return_value=ddt_data[0]), \
mock.patch.object(rest_client.RestClient, 'get_target_luns',
return_value=ddt_data[1]):
external_ref = {'source-name': 'LUN1'}
ex = self.assertRaises(exception.ManageExistingInvalidReference,
self.driver.manage_existing,
self.volume, external_ref)
self.assertIsNotNone(re.search('SplitMirror', ex.msg))
@ddt.data([[{'PARENTID': 'ID1'}], FAKE_POOLS_UNSUPPORT_REPORT],
[[{'TARGETLUNID': 'ID1'}], FAKE_POOLS_UNSUPPORT_REPORT],
[[{'PARENTID': 'ID1'}], FAKE_POOLS_SUPPORT_REPORT],
[[{'TARGETLUNID': 'ID1'}], FAKE_POOLS_SUPPORT_REPORT])
@mock.patch.object(rest_client.RestClient, 'get_lun_info',
return_value={'CAPACITY': 2097152,
'ID': 'ID1',
'PARENTNAME': 'OpenStack_Pool',
'HEALTHSTATUS': constants.STATUS_HEALTH})
@mock.patch.object(rest_client.RestClient, 'get_lun_id_by_name',
return_value='ID1')
@ddt.unpack
def test_manage_existing_under_migration(self, ddt_data, pool_data,
mock_get_by_name,
mock_get_info):
self.driver.support_func = pool_data
with mock.patch.object(rest_client.RestClient, 'get_migration_task',
return_value=ddt_data):
external_ref = {'source-name': 'LUN1'}
ex = self.assertRaises(exception.ManageExistingInvalidReference,
self.driver.manage_existing,
self.volume, external_ref)
self.assertIsNotNone(re.search('migration', ex.msg))
@mock.patch.object(rest_client.RestClient, 'get_lun_info',
return_value={'CAPACITY': 2097152,
'ID': 'ID1',
'PARENTNAME': 'OpenStack_Pool',
'SNAPSHOTIDS': [],
'ISADD2LUNGROUP': 'true',
'HEALTHSTATUS': constants.STATUS_HEALTH})
@mock.patch.object(rest_client.RestClient, 'get_lun_id_by_name',
return_value='ID1')
def test_manage_existing_with_lungroup(self, mock_get_by_name,
mock_get_info):
external_ref = {'source-name': 'LUN1'}
ex = self.assertRaises(exception.ManageExistingInvalidReference,
self.driver.manage_existing,
self.volume, external_ref)
self.assertIsNotNone(re.search('Already exists in a LUN group',
ex.msg))
@ddt.data([{'source-name': 'LUN1'}, FAKE_POOLS_UNSUPPORT_REPORT],
[{'source-name': 'LUN1'}, FAKE_POOLS_SUPPORT_REPORT],
[{'source-id': 'ID1'}, FAKE_POOLS_UNSUPPORT_REPORT],
[{'source-id': 'ID1'}, FAKE_POOLS_SUPPORT_REPORT])
@mock.patch.object(rest_client.RestClient, 'rename_lun')
@mock.patch.object(huawei_driver.HuaweiBaseDriver,
'_get_lun_info_by_ref',
return_value={'PARENTNAME': 'OpenStack_Pool',
'SNAPSHOTIDS': [],
'ID': 'ID1',
'HEALTHSTATUS': constants.STATUS_HEALTH,
'WWN': '6643e8c1004c5f6723e9f454003'})
@mock.patch.object(rest_client.RestClient, 'get_lun_info',
return_value={'CAPACITY': 2097152,
'ALLOCTYPE': 1})
@mock.patch.object(rest_client.RestClient, 'get_lun_id_by_name',
return_value='ID1')
@ddt.unpack
def test_manage_existing_success(self, mock_get_by_name, mock_get_info,
mock_check_lun, mock_rename,
external_ref, pool_data):
self.driver.support_func = pool_data
model_update = self.driver.manage_existing(self.volume,
external_ref)
expected_val = {
'admin_metadata': {
'huawei_lun_wwn': '6643e8c1004c5f6723e9f454003'
},
'provider_location': 'ID1'}
self.assertEqual(expected_val, model_update)
def test_unmanage(self):
self.driver.unmanage(self.volume)
def test_manage_existing_snapshot_abnormal(self):
with mock.patch.object(huawei_driver.HuaweiBaseDriver,
'_get_snapshot_info_by_ref',
return_value={'HEALTHSTATUS': '2',
'PARENTID': '11'}):
external_ref = {'source-name': 'test1'}
ex = self.assertRaises(exception.ManageExistingInvalidReference,
self.driver.manage_existing_snapshot,
self.snapshot, external_ref)
self.assertIsNotNone(re.search('Snapshot status is not normal',
ex.msg))
@mock.patch.object(rest_client.RestClient, 'get_snapshot_info',
return_value={'ID': 'ID1',
'EXPOSEDTOINITIATOR': 'true',
'NAME': 'test1',
'PARENTID': '11',
'USERCAPACITY': 2097152,
'HEALTHSTATUS': constants.STATUS_HEALTH})
@mock.patch.object(rest_client.RestClient, 'get_snapshot_id_by_name',
return_value='ID1')
def test_manage_existing_snapshot_with_lungroup(self, mock_get_by_name,
mock_get_info):
external_ref = {'source-name': 'test1'}
ex = self.assertRaises(exception.ManageExistingInvalidReference,
self.driver.manage_existing_snapshot,
self.snapshot, external_ref)
self.assertIsNotNone(re.search('Snapshot is exposed to initiator',
ex.msg))
@mock.patch.object(rest_client.RestClient, 'rename_snapshot')
@mock.patch.object(huawei_driver.HuaweiBaseDriver,
'_get_snapshot_info_by_ref',
return_value={'ID': 'ID1',
'EXPOSEDTOINITIATOR': 'false',
'NAME': 'test1',
'PARENTID': '11',
'USERCAPACITY': 2097152,
'HEALTHSTATUS': constants.STATUS_HEALTH})
def test_manage_existing_snapshot_success(self, mock_get_info,
mock_rename):
external_ref = {'source-name': 'test1'}
model_update = self.driver.manage_existing_snapshot(self.snapshot,
external_ref)
self.assertEqual({'provider_location': 'ID1'}, model_update)
external_ref = {'source-id': 'ID1'}
model_update = self.driver.manage_existing_snapshot(self.snapshot,
external_ref)
self.assertEqual({'provider_location': 'ID1'}, model_update)
@mock.patch.object(rest_client.RestClient, 'get_snapshot_info',
return_value={'ID': 'ID1',
'EXPOSEDTOINITIATOR': 'false',
'NAME': 'test1',
'USERCAPACITY': 2097152,
'PARENTID': '12',
'HEALTHSTATUS': constants.STATUS_HEALTH})
@mock.patch.object(rest_client.RestClient, 'get_snapshot_id_by_name',
return_value='ID1')
def test_manage_existing_snapshot_mismatch_lun(self, mock_get_by_name,
mock_get_info):
external_ref = {'source-name': 'test1'}
ex = self.assertRaises(exception.ManageExistingInvalidReference,
self.driver.manage_existing_snapshot,
self.snapshot, external_ref)
self.assertIsNotNone(re.search("Snapshot doesn't belong to volume",
ex.msg))
@mock.patch.object(rest_client.RestClient, 'get_snapshot_info',
return_value={'USERCAPACITY': 3097152})
@mock.patch.object(rest_client.RestClient, 'get_snapshot_id_by_name',
return_value='ID1')
def test_manage_existing_snapshot_get_size_success(self,
mock_get_id_by_name,
mock_get_info):
external_ref = {'source-name': 'test1',
'source-id': 'ID1'}
size = self.driver.manage_existing_snapshot_get_size(self.snapshot,
external_ref)
self.assertEqual(2, size)
external_ref = {'source-name': 'test1'}
size = self.driver.manage_existing_snapshot_get_size(self.snapshot,
external_ref)
self.assertEqual(2, size)
external_ref = {'source-id': 'ID1'}
size = self.driver.manage_existing_snapshot_get_size(self.snapshot,
external_ref)
self.assertEqual(2, size)
def test_unmanage_snapshot(self):
self.driver.unmanage_snapshot(self.snapshot)
@ddt.data(sync_replica_specs, async_replica_specs)
def test_create_replication_success(self, mock_type):
self.mock_object(replication.ReplicaCommonDriver, 'sync')
self.mock_object(
huawei_driver.HuaweiBaseDriver,
'_get_volume_type',
return_value={'extra_specs': mock_type})
model_update = self.driver.create_volume(self.replica_volume)
driver_data = {'pair_id': TEST_PAIR_ID,
'rmt_lun_id': '1'}
driver_data = replication.to_string(driver_data)
self.assertEqual(driver_data, model_update['replication_driver_data'])
self.assertEqual('available', model_update['replication_status'])
@ddt.data(
[
rest_client.RestClient,
'get_array_info',
mock.Mock(
side_effect=exception.VolumeBackendAPIException(data='err')),
FAKE_POOLS_UNSUPPORT_REPORT
],
[
rest_client.RestClient,
'get_remote_devices',
mock.Mock(
side_effect=exception.VolumeBackendAPIException(data='err')),
FAKE_POOLS_UNSUPPORT_REPORT
],
[
rest_client.RestClient,
'get_remote_devices',
mock.Mock(return_value={}),
FAKE_POOLS_UNSUPPORT_REPORT
],
[
replication.ReplicaPairManager,
'wait_volume_online',
mock.Mock(side_effect=[
None,
exception.VolumeBackendAPIException(data='err')]),
FAKE_POOLS_UNSUPPORT_REPORT
],
[
rest_client.RestClient,
'create_pair',
mock.Mock(
side_effect=exception.VolumeBackendAPIException(data='err')),
FAKE_POOLS_UNSUPPORT_REPORT
],
[
replication.ReplicaCommonDriver,
'sync',
mock.Mock(
side_effect=exception.VolumeBackendAPIException(data='err')),
FAKE_POOLS_UNSUPPORT_REPORT
],
[
rest_client.RestClient,
'get_array_info',
mock.Mock(
side_effect=exception.VolumeBackendAPIException(data='err')),
FAKE_POOLS_SUPPORT_REPORT
],
[
rest_client.RestClient,
'get_remote_devices',
mock.Mock(
side_effect=exception.VolumeBackendAPIException(data='err')),
FAKE_POOLS_SUPPORT_REPORT
],
[
rest_client.RestClient,
'get_remote_devices',
mock.Mock(return_value={}),
FAKE_POOLS_SUPPORT_REPORT
],
[
replication.ReplicaPairManager,
'wait_volume_online',
mock.Mock(side_effect=[
None,
exception.VolumeBackendAPIException(data='err')]),
FAKE_POOLS_SUPPORT_REPORT
],
[
rest_client.RestClient,
'create_pair',
mock.Mock(
side_effect=exception.VolumeBackendAPIException(data='err')),
FAKE_POOLS_SUPPORT_REPORT
],
[
replication.ReplicaCommonDriver,
'sync',
mock.Mock(
side_effect=exception.VolumeBackendAPIException(data='err')),
FAKE_POOLS_SUPPORT_REPORT
],
)
@ddt.unpack
def test_create_replication_fail(self, mock_module, mock_func,
mock_value, pool_data):
self.driver.support_func = pool_data
self.mock_object(
huawei_driver.HuaweiBaseDriver,
'_get_volume_type',
return_value={'extra_specs': sync_replica_specs})
self.mock_object(replication.ReplicaPairManager, '_delete_pair')
self.mock_object(mock_module, mock_func, mock_value)
self.assertRaises(
exception.VolumeBackendAPIException,
self.driver.create_volume, self.replica_volume)
@ddt.data(FAKE_POOLS_UNSUPPORT_REPORT, FAKE_POOLS_SUPPORT_REPORT)
def test_delete_replication_success(self, pool_data):
self.driver.support_func = pool_data
self.mock_object(replication.ReplicaCommonDriver, 'split')
self.mock_object(
huawei_driver.HuaweiBaseDriver,
'_get_volume_type',
return_value={'extra_specs': sync_replica_specs})
self.driver.delete_volume(self.replica_volume)
self.mock_object(rest_client.RestClient, 'check_lun_exist',
return_value=False)
self.driver.delete_volume(self.replica_volume)
@unittest.skip("Skip until bug #1578986 is fixed")
def test_wait_volume_online(self):
replica = FakeReplicaPairManager(self.driver.client,
self.driver.replica_client,
self.configuration)
lun_info = {'ID': '11'}
replica.wait_volume_online(self.driver.client, lun_info)
offline_status = {'RUNNINGSTATUS': '28'}
replica.wait_volume_online(self.driver.client, lun_info)
with mock.patch.object(rest_client.RestClient, 'get_lun_info',
offline_status):
self.assertRaises(exception.VolumeBackendAPIException,
replica.wait_volume_online,
self.driver.client,
lun_info)
@unittest.skip("Skip until bug #1578986 is fixed")
def test_wait_second_access(self):
pair_id = '1'
access_ro = constants.REPLICA_SECOND_RO
access_rw = constants.REPLICA_SECOND_RW
op = replication.PairOp(self.driver.client)
common_driver = replication.ReplicaCommonDriver(self.configuration, op)
self.mock_object(replication.PairOp, 'get_replica_info',
return_value={'SECRESACCESS': access_ro})
self.mock_object(huawei_utils.time, 'time',
side_effect=utils.generate_timeout_series(
constants.DEFAULT_REPLICA_WAIT_TIMEOUT))
common_driver.wait_second_access(pair_id, access_ro)
self.assertRaises(exception.VolumeBackendAPIException,
common_driver.wait_second_access, pair_id, access_rw)
@unittest.skip("Skip until bug #1578986 is fixed")
def test_wait_replica_ready(self):
normal_status = {
'RUNNINGSTATUS': constants.REPLICA_RUNNING_STATUS_NORMAL,
'HEALTHSTATUS': constants.REPLICA_HEALTH_STATUS_NORMAL
}
split_status = {
'RUNNINGSTATUS': constants.REPLICA_RUNNING_STATUS_SPLIT,
'HEALTHSTATUS': constants.REPLICA_HEALTH_STATUS_NORMAL
}
sync_status = {
'RUNNINGSTATUS': constants.REPLICA_RUNNING_STATUS_SYNC,
'HEALTHSTATUS': constants.REPLICA_HEALTH_STATUS_NORMAL
}
pair_id = '1'
op = replication.PairOp(self.driver.client)
common_driver = replication.ReplicaCommonDriver(self.configuration, op)
with mock.patch.object(replication.PairOp, 'get_replica_info',
return_value=normal_status):
common_driver.wait_replica_ready(pair_id)
with mock.patch.object(
replication.PairOp,
'get_replica_info',
side_effect=[sync_status, normal_status]):
common_driver.wait_replica_ready(pair_id)
with mock.patch.object(replication.PairOp, 'get_replica_info',
return_value=split_status):
self.assertRaises(exception.VolumeBackendAPIException,
common_driver.wait_replica_ready, pair_id)
def test_failover_to_current(self):
driver = FakeISCSIStorage(configuration=self.configuration)
driver.do_setup()
old_client = driver.client
old_replica_client = driver.replica_client
old_replica = driver.replica
secondary_id, volumes_update = driver.failover_host(
None, [self.volume], 'default')
self.assertIn(driver.active_backend_id, ('', None))
self.assertEqual(old_client, driver.client)
self.assertEqual(old_replica_client, driver.replica_client)
self.assertEqual(old_replica, driver.replica)
self.assertEqual('default', secondary_id)
self.assertEqual(0, len(volumes_update))
def test_failover_normal_volumes(self):
driver = FakeISCSIStorage(configuration=self.configuration)
driver.do_setup()
old_client = driver.client
old_replica_client = driver.replica_client
old_replica = driver.replica
secondary_id, volumes_update = driver.failover_host(
None, [self.volume], REPLICA_BACKEND_ID)
self.assertEqual(REPLICA_BACKEND_ID, driver.active_backend_id)
self.assertEqual(old_client, driver.replica_client)
self.assertEqual(old_replica_client, driver.client)
self.assertNotEqual(old_replica, driver.replica)
self.assertEqual(REPLICA_BACKEND_ID, secondary_id)
self.assertEqual(1, len(volumes_update))
v_id = volumes_update[0]['volume_id']
v_update = volumes_update[0]['updates']
self.assertEqual(self.volume.id, v_id)
self.assertEqual('error', v_update['status'])
self.assertEqual(self.volume['status'],
v_update['metadata']['old_status'])
def test_failback_to_current(self):
driver = FakeISCSIStorage(configuration=self.configuration)
driver.active_backend_id = REPLICA_BACKEND_ID
driver.do_setup()
old_client = driver.client
old_replica_client = driver.replica_client
old_replica = driver.replica
secondary_id, volumes_update = driver.failover_host(
None, [self.volume], REPLICA_BACKEND_ID)
self.assertEqual(REPLICA_BACKEND_ID, driver.active_backend_id)
self.assertEqual(old_client, driver.client)
self.assertEqual(old_replica_client, driver.replica_client)
self.assertEqual(old_replica, driver.replica)
self.assertEqual(REPLICA_BACKEND_ID, secondary_id)
self.assertEqual(0, len(volumes_update))
def test_failback_normal_volumes(self):
self.volume.status = 'error'
self.volume.metadata = {'old_status': 'available'}
driver = FakeISCSIStorage(configuration=self.configuration)
driver.active_backend_id = REPLICA_BACKEND_ID
driver.do_setup()
old_client = driver.client
old_replica_client = driver.replica_client
old_replica = driver.replica
secondary_id, volumes_update = driver.failover_host(
None, [self.volume], 'default')
self.assertIn(driver.active_backend_id, ('', None))
self.assertEqual(old_client, driver.replica_client)
self.assertEqual(old_replica_client, driver.client)
self.assertNotEqual(old_replica, driver.replica)
self.assertEqual('default', secondary_id)
self.assertEqual(1, len(volumes_update))
v_id = volumes_update[0]['volume_id']
v_update = volumes_update[0]['updates']
self.assertEqual(self.volume.id, v_id)
self.assertEqual('available', v_update['status'])
self.assertNotIn('old_status', v_update['metadata'])
def test_failover_replica_volumes(self):
driver = FakeISCSIStorage(configuration=self.configuration)
driver.do_setup()
old_client = driver.client
old_replica_client = driver.replica_client
old_replica = driver.replica
self.mock_object(replication.ReplicaCommonDriver, 'failover')
self.mock_object(huawei_driver.HuaweiBaseDriver, '_get_volume_params',
return_value={'replication_enabled': 'true'})
secondary_id, volumes_update = driver.failover_host(
None, [self.replica_volume], REPLICA_BACKEND_ID)
self.assertEqual(REPLICA_BACKEND_ID, driver.active_backend_id)
self.assertEqual(old_client, driver.replica_client)
self.assertEqual(old_replica_client, driver.client)
self.assertNotEqual(old_replica, driver.replica)
self.assertEqual(REPLICA_BACKEND_ID, secondary_id)
self.assertEqual(1, len(volumes_update))
v_id = volumes_update[0]['volume_id']
v_update = volumes_update[0]['updates']
self.assertEqual(self.replica_volume.id, v_id)
self.assertEqual('1', v_update['provider_location'])
self.assertEqual('failed-over', v_update['replication_status'])
new_drv_data = {'pair_id': TEST_PAIR_ID,
'rmt_lun_id': self.replica_volume.provider_location}
new_drv_data = replication.to_string(new_drv_data)
self.assertEqual(new_drv_data, v_update['replication_driver_data'])
@ddt.data({}, {'pair_id': TEST_PAIR_ID})
def test_failover_replica_volumes_invalid_drv_data(self, mock_drv_data):
volume = self.replica_volume
volume['replication_driver_data'] = replication.to_string(
mock_drv_data)
driver = FakeISCSIStorage(configuration=self.configuration)
driver.do_setup()
old_client = driver.client
old_replica_client = driver.replica_client
old_replica = driver.replica
self.mock_object(huawei_driver.HuaweiBaseDriver, '_get_volume_params',
return_value={'replication_enabled': 'true'})
secondary_id, volumes_update = driver.failover_host(
None, [volume], REPLICA_BACKEND_ID)
self.assertEqual(driver.active_backend_id, REPLICA_BACKEND_ID)
self.assertEqual(old_client, driver.replica_client)
self.assertEqual(old_replica_client, driver.client)
self.assertNotEqual(old_replica, driver.replica)
self.assertEqual(REPLICA_BACKEND_ID, secondary_id)
self.assertEqual(1, len(volumes_update))
v_id = volumes_update[0]['volume_id']
v_update = volumes_update[0]['updates']
self.assertEqual(volume.id, v_id)
self.assertEqual('error', v_update['replication_status'])
def test_failback_replica_volumes(self):
self.mock_object(replication.ReplicaCommonDriver, 'enable')
self.mock_object(replication.ReplicaCommonDriver, 'wait_replica_ready')
self.mock_object(replication.ReplicaCommonDriver, 'failover')
self.mock_object(huawei_driver.HuaweiBaseDriver, '_get_volume_params',
return_value={'replication_enabled': 'true'})
volume = self.replica_volume
driver = FakeISCSIStorage(configuration=self.configuration)
driver.active_backend_id = REPLICA_BACKEND_ID
driver.do_setup()
old_client = driver.client
old_replica_client = driver.replica_client
old_replica = driver.replica
secondary_id, volumes_update = driver.failover_host(
None, [volume], 'default')
self.assertIn(driver.active_backend_id, ('', None))
self.assertEqual(old_client, driver.replica_client)
self.assertEqual(old_replica_client, driver.client)
self.assertNotEqual(old_replica, driver.replica)
self.assertEqual('default', secondary_id)
self.assertEqual(1, len(volumes_update))
v_id = volumes_update[0]['volume_id']
v_update = volumes_update[0]['updates']
self.assertEqual(self.replica_volume.id, v_id)
self.assertEqual('1', v_update['provider_location'])
self.assertEqual('available', v_update['replication_status'])
new_drv_data = {'pair_id': TEST_PAIR_ID,
'rmt_lun_id': self.replica_volume.provider_location}
new_drv_data = replication.to_string(new_drv_data)
self.assertEqual(new_drv_data, v_update['replication_driver_data'])
@ddt.data({}, {'pair_id': TEST_PAIR_ID})
def test_failback_replica_volumes_invalid_drv_data(self, mock_drv_data):
self.mock_object(huawei_driver.HuaweiBaseDriver, '_get_volume_params',
return_value={'replication_enabled': 'true'})
volume = self.replica_volume
volume['replication_driver_data'] = replication.to_string(
mock_drv_data)
driver = FakeISCSIStorage(configuration=self.configuration)
driver.active_backend_id = REPLICA_BACKEND_ID
driver.do_setup()
old_client = driver.client
old_replica_client = driver.replica_client
old_replica = driver.replica
secondary_id, volumes_update = driver.failover_host(
None, [volume], 'default')
self.assertIn(driver.active_backend_id, ('', None))
self.assertEqual(old_client, driver.replica_client)
self.assertEqual(old_replica_client, driver.client)
self.assertNotEqual(old_replica, driver.replica)
self.assertEqual('default', secondary_id)
self.assertEqual(1, len(volumes_update))
v_id = volumes_update[0]['volume_id']
v_update = volumes_update[0]['updates']
self.assertEqual(self.replica_volume.id, v_id)
self.assertEqual('error', v_update['replication_status'])
@unittest.skip("Skip until bug #1578986 is fixed")
@mock.patch('oslo_service.loopingcall.FixedIntervalLoopingCall',
new=utils.ZeroIntervalLoopingCall)
@mock.patch.object(replication.PairOp, 'is_primary',
side_effect=[False, True])
@mock.patch.object(replication.ReplicaCommonDriver, 'split')
@mock.patch.object(replication.ReplicaCommonDriver, 'unprotect_second')
def test_replication_driver_enable_success(self,
mock_unprotect,
mock_split,
mock_is_primary):
replica_id = TEST_PAIR_ID
op = replication.PairOp(self.driver.client)
common_driver = replication.ReplicaCommonDriver(self.configuration, op)
common_driver.enable(replica_id)
self.assertTrue(mock_unprotect.called)
self.assertTrue(mock_split.called)
self.assertTrue(mock_is_primary.called)
@mock.patch.object(replication.PairOp, 'is_primary', return_value=False)
@mock.patch.object(replication.ReplicaCommonDriver, 'split')
def test_replication_driver_failover_success(self,
mock_split,
mock_is_primary):
replica_id = TEST_PAIR_ID
op = replication.PairOp(self.driver.client)
common_driver = replication.ReplicaCommonDriver(self.configuration, op)
common_driver.failover(replica_id)
self.assertTrue(mock_split.called)
self.assertTrue(mock_is_primary.called)
@mock.patch.object(replication.PairOp, 'is_primary', return_value=True)
def test_replication_driver_failover_fail(self, mock_is_primary):
replica_id = TEST_PAIR_ID
op = replication.PairOp(self.driver.client)
common_driver = replication.ReplicaCommonDriver(self.configuration, op)
self.assertRaises(
exception.VolumeBackendAPIException,
common_driver.failover,
replica_id)
@ddt.data(constants.REPLICA_SECOND_RW, constants.REPLICA_SECOND_RO)
def test_replication_driver_protect_second(self, mock_access):
replica_id = TEST_PAIR_ID
op = replication.PairOp(self.driver.client)
common_driver = replication.ReplicaCommonDriver(self.configuration, op)
self.mock_object(replication.ReplicaCommonDriver, 'wait_second_access')
self.mock_object(
replication.PairOp,
'get_replica_info',
return_value={'SECRESACCESS': mock_access})
common_driver.protect_second(replica_id)
common_driver.unprotect_second(replica_id)
@unittest.skip("Skip until bug #1578986 is fixed")
def test_replication_driver_sync(self):
replica_id = TEST_PAIR_ID
op = replication.PairOp(self.driver.client)
common_driver = replication.ReplicaCommonDriver(self.configuration, op)
async_normal_status = {
'REPLICATIONMODEL': constants.REPLICA_ASYNC_MODEL,
'RUNNINGSTATUS': constants.REPLICA_RUNNING_STATUS_NORMAL,
'HEALTHSTATUS': constants.REPLICA_HEALTH_STATUS_NORMAL
}
self.mock_object(replication.ReplicaCommonDriver, 'protect_second')
self.mock_object(replication.PairOp, 'get_replica_info',
return_value=async_normal_status)
common_driver.sync(replica_id, True)
common_driver.sync(replica_id, False)
def test_replication_driver_split(self):
replica_id = TEST_PAIR_ID
op = replication.PairOp(self.driver.client)
common_driver = replication.ReplicaCommonDriver(self.configuration, op)
self.mock_object(replication.ReplicaCommonDriver, 'wait_expect_state')
self.mock_object(
replication.PairOp, 'split',
side_effect=exception.VolumeBackendAPIException(data='err'))
common_driver.split(replica_id)
@mock.patch.object(replication.PairOp, 'split')
@ddt.data(constants.REPLICA_RUNNING_STATUS_SPLIT,
constants.REPLICA_RUNNING_STATUS_INVALID,
constants.REPLICA_RUNNING_STATUS_ERRUPTED)
def test_replication_driver_split_already_disabled(self, mock_status,
mock_op_split):
replica_id = TEST_PAIR_ID
op = replication.PairOp(self.driver.client)
common_driver = replication.ReplicaCommonDriver(self.configuration, op)
pair_info = json.loads(FAKE_GET_PAIR_NORMAL_RESPONSE)['data']
pair_info['RUNNINGSTATUS'] = mock_status
self.mock_object(rest_client.RestClient, 'get_pair_by_id',
return_value=pair_info)
common_driver.split(replica_id)
self.assertFalse(mock_op_split.called)
def test_replication_base_op(self):
replica_id = '1'
op = replication.AbsReplicaOp(None)
op.create()
op.delete(replica_id)
op.protect_second(replica_id)
op.unprotect_second(replica_id)
op.sync(replica_id)
op.split(replica_id)
op.switch(replica_id)
op.is_primary({})
op.get_replica_info(replica_id)
op._is_status(None, {'key': 'volue'}, None)
@mock.patch.object(rest_client.RestClient, 'call',
return_value={"error": {"code": 0}})
def test_get_tgt_port_group_no_portg_exist(self, mock_call):
portg = self.driver.client.get_tgt_port_group('test_portg')
self.assertIsNone(portg)
def test_get_tgt_iqn_from_rest_match(self):
match_res = {
'data': [{
'TYPE': 249,
'ID': '0+iqn.2006-08.com: 210048cee9d: 111.111.111.19,t,0x01'
}, {
'TYPE': 249,
'ID': '0+iqn.2006-08.com: 210048cee9d: 111.111.111.191,t,0x01'
}],
'error': {
'code': 0
}
}
ip = '111.111.111.19'
expected_iqn = 'iqn.2006-08.com: 210048cee9d: 111.111.111.19'
self.mock_object(rest_client.RestClient, 'call',
return_value=match_res)
iqn = self.driver.client._get_tgt_iqn_from_rest(ip)
self.assertEqual(expected_iqn, iqn)
def test_get_tgt_iqn_from_rest_mismatch(self):
match_res = {
'data': [{
'TYPE': 249,
'ID': '0+iqn.2006-08.com: 210048cee9d: 192.0.2.191,t,0x01'
}, {
'TYPE': 249,
'ID': '0+iqn.2006-08.com: 210048cee9d: 192.0.2.192,t,0x01'
}],
'error': {
'code': 0
}
}
ip = '192.0.2.19'
self.mock_object(rest_client.RestClient, 'call',
return_value=match_res)
iqn = self.driver.client._get_tgt_iqn_from_rest(ip)
self.assertIsNone(iqn)
def test_create_cgsnapshot(self):
test_snapshots = [self.snapshot]
ctxt = context.get_admin_context()
model, snapshots = self.driver.create_cgsnapshot(ctxt,
self.cgsnapshot,
test_snapshots)
snapshots_model_update = [{'id': '21ec7341-9256-497b-97d9'
'-ef48edcf0635',
'status': 'available',
'provider_location': 11}]
self.assertEqual(snapshots_model_update, snapshots)
self.assertEqual('available', model['status'])
def test_create_cgsnapshot_create_snapshot_fail(self):
test_snapshots = [self.snapshot]
ctxt = context.get_admin_context()
self.mock_object(rest_client.RestClient, 'create_snapshot',
side_effect=(
exception.VolumeBackendAPIException(data='err')))
self.assertRaises(exception.VolumeBackendAPIException,
self.driver.create_cgsnapshot,
ctxt,
self.cgsnapshot,
test_snapshots)
def test_create_cgsnapshot_active_snapshot_fail(self):
test_snapshots = [self.snapshot]
ctxt = context.get_admin_context()
self.mock_object(rest_client.RestClient, 'activate_snapshot',
side_effect=(
exception.VolumeBackendAPIException(data='err')))
self.assertRaises(exception.VolumeBackendAPIException,
self.driver.create_cgsnapshot,
ctxt,
self.cgsnapshot,
test_snapshots)
def test_delete_cgsnapshot(self):
test_snapshots = [self.snapshot]
ctxt = context.get_admin_context()
self.driver.delete_cgsnapshot(ctxt,
self.cgsnapshot,
test_snapshots)
class FCSanLookupService(object):
def get_device_mapping_from_network(self, initiator_list,
target_list):
return fake_fabric_mapping
@ddt.ddt
class HuaweiFCDriverTestCase(HuaweiTestBase):
def setUp(self):
super(HuaweiFCDriverTestCase, self).setUp()
self.configuration = mock.Mock(spec=conf.Configuration)
self.flags(rpc_backend='oslo_messaging._drivers.impl_fake')
self.huawei_conf = FakeHuaweiConf(self.configuration, 'FC')
self.configuration.hypermetro_devices = hypermetro_devices
driver = FakeFCStorage(configuration=self.configuration)
self.driver = driver
self.driver.do_setup()
self.driver.client.login()
def test_login_success(self):
device_id = self.driver.client.login()
self.assertEqual('210235G7J20000000000', device_id)
def test_create_volume_success(self):
lun_info = self.driver.create_volume(self.volume)
self.assertEqual('1', lun_info['provider_location'])
@ddt.data(FAKE_POOLS_UNSUPPORT_REPORT, FAKE_POOLS_SUPPORT_REPORT)
def test_delete_volume_success(self, pool_data):
self.driver.support_func = pool_data
self.driver.delete_volume(self.volume)
def test_delete_snapshot_success(self):
self.driver.delete_snapshot(self.snapshot)
@unittest.skip("Skip until bug #1578986 is fixed")
def test_create_volume_from_snapsuccess(self):
lun_info = self.driver.create_volume_from_snapshot(self.volume,
self.volume)
self.assertEqual('1', lun_info['provider_location'])
@mock.patch.object(huawei_driver.HuaweiFCDriver,
'initialize_connection',
return_value={"data": {'target_lun': 1}})
def test_initialize_connection_snapshot_success(self, mock_fc_init):
iscsi_properties = self.driver.initialize_connection_snapshot(
self.snapshot, FakeConnector)
volume = Volume(id=self.snapshot.id,
provider_location=self.snapshot.provider_location,
lun_type='27',
metadata=None)
self.assertEqual(1, iscsi_properties['data']['target_lun'])
mock_fc_init.assert_called_with(volume, FakeConnector)
def test_initialize_connection_success(self):
iscsi_properties = self.driver.initialize_connection(self.volume,
FakeConnector)
self.assertEqual(1, iscsi_properties['data']['target_lun'])
def test_initialize_connection_fail_no_online_wwns_in_host(self):
self.mock_object(rest_client.RestClient, 'get_online_free_wwns',
return_value=[])
self.assertRaises(exception.VolumeBackendAPIException,
self.driver.initialize_connection,
self.volume, FakeConnector)
def test_initialize_connection_no_local_ini_tgt_map(self):
self.mock_object(rest_client.RestClient, 'get_init_targ_map',
return_value=('', ''))
self.mock_object(huawei_driver.HuaweiFCDriver, '_get_same_hostid',
return_value='')
self.mock_object(rest_client.RestClient, 'change_hostlun_id',
return_value=None)
self.mock_object(rest_client.RestClient, 'do_mapping',
return_value={'lun_id': '1',
'view_id': '1',
'aval_luns': '[1]'})
self.driver.initialize_connection(self.hyper_volume, FakeConnector)
def test_hypermetro_connection_success(self):
self.mock_object(rest_client.RestClient, 'find_array_version',
return_value='V300R003C00')
fc_properties = self.driver.initialize_connection(self.hyper_volume,
FakeConnector)
self.assertEqual(1, fc_properties['data']['target_lun'])
@mock.patch.object(huawei_driver.HuaweiFCDriver,
'terminate_connection')
def test_terminate_connection_snapshot_success(self, mock_fc_term):
self.driver.terminate_connection_snapshot(self.snapshot,
FakeConnector)
volume = Volume(id=self.snapshot.id,
provider_location=self.snapshot.provider_location,
lun_type='27',
metadata=None)
mock_fc_term.assert_called_with(volume, FakeConnector)
def test_terminate_connection_success(self):
self.driver.client.terminateFlag = True
self.driver.terminate_connection(self.volume, FakeConnector)
self.assertTrue(self.driver.client.terminateFlag)
def test_terminate_connection_portgroup_associated(self):
self.mock_object(rest_client.RestClient,
'is_portgroup_associated_to_view',
return_value=True)
self.mock_object(huawei_driver.HuaweiFCDriver,
'_delete_zone_and_remove_fc_initiators',
return_value=({}, 1))
self.driver.terminate_connection(self.volume, FakeConnector)
def test_terminate_connection_fc_initiators_exist_in_host(self):
self.mock_object(rest_client.RestClient,
'check_fc_initiators_exist_in_host',
return_value=True)
self.driver.terminate_connection(self.volume, FakeConnector)
def test_terminate_connection_hypermetro_in_metadata(self):
self.driver.terminate_connection(self.hyper_volume, FakeConnector)
def test_get_volume_status(self):
remote_device_info = {"ARRAYTYPE": "1",
"HEALTHSTATUS": "1",
"RUNNINGSTATUS": "10"}
self.mock_object(
replication.ReplicaPairManager,
'get_remote_device_by_wwn',
return_value=remote_device_info)
data = self.driver.get_volume_stats()
self.assertEqual(self.driver.VERSION, data['driver_version'])
self.assertTrue(data['pools'][0]['replication_enabled'])
self.assertListEqual(['sync', 'async'],
data['pools'][0]['replication_type'])
self.mock_object(
replication.ReplicaPairManager,
'get_remote_device_by_wwn',
return_value={})
data = self.driver.get_volume_stats()
self.assertNotIn('replication_enabled', data['pools'][0])
self.mock_object(
replication.ReplicaPairManager,
'try_get_remote_wwn',
return_value={})
data = self.driver.get_volume_stats()
self.assertEqual(self.driver.VERSION, data['driver_version'])
self.assertNotIn('replication_enabled', data['pools'][0])
@ddt.data({'TIER0CAPACITY': '100',
'TIER1CAPACITY': '0',
'TIER2CAPACITY': '0',
'disktype': 'ssd'},
{'TIER0CAPACITY': '0',
'TIER1CAPACITY': '100',
'TIER2CAPACITY': '0',
'disktype': 'sas'},
{'TIER0CAPACITY': '0',
'TIER1CAPACITY': '0',
'TIER2CAPACITY': '100',
'disktype': 'nl_sas'},
{'TIER0CAPACITY': '100',
'TIER1CAPACITY': '100',
'TIER2CAPACITY': '100',
'disktype': 'mix'},
{'TIER0CAPACITY': '0',
'TIER1CAPACITY': '0',
'TIER2CAPACITY': '0',
'disktype': ''})
def test_get_volume_disk_type(self, disk_type_value):
response_dict = json.loads(FAKE_STORAGE_POOL_RESPONSE)
storage_pool_sas = copy.deepcopy(response_dict)
storage_pool_sas['data'][0]['TIER0CAPACITY'] = (
disk_type_value['TIER0CAPACITY'])
storage_pool_sas['data'][0]['TIER1CAPACITY'] = (
disk_type_value['TIER1CAPACITY'])
storage_pool_sas['data'][0]['TIER2CAPACITY'] = (
disk_type_value['TIER2CAPACITY'])
driver = FakeISCSIStorage(configuration=self.configuration)
driver.do_setup()
driver.replica = None
self.mock_object(rest_client.RestClient, 'get_all_pools',
return_value=storage_pool_sas['data'])
data = driver.get_volume_stats()
if disk_type_value['disktype']:
self.assertEqual(disk_type_value['disktype'],
data['pools'][0]['disk_type'])
else:
self.assertIsNone(data['pools'][0].get('disk_type'))
def test_get_disk_type_pool_info_none(self):
driver = FakeISCSIStorage(configuration=self.configuration)
driver.do_setup()
driver.replica = None
self.mock_object(rest_client.RestClient, 'get_pool_info',
return_value=None)
data = driver.get_volume_stats()
self.assertIsNone(data['pools'][0].get('disk_type'))
def test_extend_volume(self):
self.driver.extend_volume(self.volume, 3)
def test_login_fail(self):
self.driver.client.test_fail = True
self.assertRaises(exception.VolumeBackendAPIException,
self.driver.client.login)
def test_create_snapshot_fail(self):
self.driver.client.test_fail = True
self.assertRaises(exception.VolumeBackendAPIException,
self.driver.create_snapshot, self.snapshot)
def test_create_volume_fail(self):
self.driver.client.test_fail = True
self.assertRaises(exception.VolumeBackendAPIException,
self.driver.create_volume, self.volume)
def test_delete_volume_fail(self):
self.driver.client.test_fail = True
self.driver.delete_volume(self.volume)
def test_delete_snapshot_fail(self):
self.driver.client.test_fail = True
self.driver.delete_snapshot(self.snapshot)
def test_initialize_connection_fail(self):
self.driver.client.test_fail = True
self.assertRaises(exception.VolumeBackendAPIException,
self.driver.initialize_connection,
self.volume, FakeConnector)
def test_lun_is_associated_to_lungroup(self):
self.driver.client.associate_lun_to_lungroup('11', '11')
result = self.driver.client._is_lun_associated_to_lungroup('11',
'11')
self.assertTrue(result)
def test_lun_is_not_associated_to_lun_group(self):
self.driver.client.associate_lun_to_lungroup('12', '12')
self.driver.client.remove_lun_from_lungroup('12', '12')
result = self.driver.client._is_lun_associated_to_lungroup('12',
'12')
self.assertFalse(result)
@unittest.skip("Skip until bug #1578986 is fixed")
@ddt.data(FAKE_POOLS_UNSUPPORT_REPORT, FAKE_POOLS_SUPPORT_REPORT)
@mock.patch.object(rest_client, 'RestClient')
def test_migrate_volume_success(self, mock_add_lun_to_partition,
pool_data):
# Migrate volume without new type.
empty_dict = {}
self.driver.support_func = pool_data
moved, model_update = self.driver.migrate_volume(None,
self.volume,
test_host,
None)
self.assertTrue(moved)
self.assertEqual(empty_dict, model_update)
# Migrate volume with new type.
empty_dict = {}
new_type = {'extra_specs':
{'smarttier': '<is> true',
'smartcache': '<is> true',
'smartpartition': '<is> true',
'thin_provisioning_support': '<is> true',
'thick_provisioning_support': '<is> False',
'policy': '2',
'smartcache:cachename': 'cache-test',
'smartpartition:partitionname': 'partition-test'}}
moved, model_update = self.driver.migrate_volume(None,
self.volume,
test_host,
new_type)
self.assertTrue(moved)
self.assertEqual(empty_dict, model_update)
def test_migrate_volume_fail(self):
self.driver.client.test_fail = True
# Migrate volume without new type.
self.assertRaises(exception.VolumeBackendAPIException,
self.driver.migrate_volume, None,
self.volume, test_host, None)
# Migrate volume with new type.
new_type = {'extra_specs':
{'smarttier': '<is> true',
'smartcache': '<is> true',
'thin_provisioning_support': '<is> true',
'thick_provisioning_support': '<is> False',
'policy': '2',
'smartcache:cachename': 'cache-test',
'partitionname': 'partition-test'}}
self.driver.client.test_fail = True
self.assertRaises(exception.VolumeBackendAPIException,
self.driver.migrate_volume, None,
self.volume, test_host, new_type)
def test_check_migration_valid(self):
is_valid = self.driver._check_migration_valid(test_host,
self.volume)
self.assertTrue(is_valid)
# No pool_name in capabilities.
invalid_host1 = {'host': 'ubuntu001@backend002
'capabilities':
{'location_info': '210235G7J20000000000',
'allocated_capacity_gb': 0,
'volume_backend_name': 'HuaweiFCDriver',
'storage_protocol': 'FC'}}
is_valid = self.driver._check_migration_valid(invalid_host1,
self.volume)
self.assertFalse(is_valid)
# location_info in capabilities is not matched.
invalid_host2 = {'host': 'ubuntu001@backend002
'capabilities':
{'location_info': '210235G7J20000000001',
'allocated_capacity_gb': 0,
'pool_name': 'OpenStack_Pool',
'volume_backend_name': 'HuaweiFCDriver',
'storage_protocol': 'FC'}}
is_valid = self.driver._check_migration_valid(invalid_host2,
self.volume)
self.assertFalse(is_valid)
# storage_protocol is not match current protocol and volume status is
# 'in-use'.
volume_in_use = {'name': 'volume-21ec7341-9256-497b-97d9-ef48edcf0635',
'size': 2,
'volume_name': 'vol1',
'id': ID,
'volume_id': '21ec7341-9256-497b-97d9-ef48edcf0635',
'volume_attachment': 'in-use',
'provider_location': '11'}
invalid_host2 = {'host': 'ubuntu001@backend002
'capabilities':
{'location_info': '210235G7J20000000001',
'allocated_capacity_gb': 0,
'pool_name': 'OpenStack_Pool',
'volume_backend_name': 'HuaweiFCDriver',
'storage_protocol': 'iSCSI'}}
is_valid = self.driver._check_migration_valid(invalid_host2,
volume_in_use)
self.assertFalse(is_valid)
# pool_name is empty.
invalid_host3 = {'host': 'ubuntu001@backend002
'capabilities':
{'location_info': '210235G7J20000000001',
'allocated_capacity_gb': 0,
'pool_name': '',
'volume_backend_name': 'HuaweiFCDriver',
'storage_protocol': 'iSCSI'}}
is_valid = self.driver._check_migration_valid(invalid_host3,
self.volume)
self.assertFalse(is_valid)
@mock.patch.object(rest_client.RestClient, 'rename_lun')
def test_update_migrated_volume_success(self, mock_rename_lun):
model_update = self.driver.update_migrated_volume(None,
self.original_volume,
self.current_volume,
'available')
self.assertEqual({'_name_id': None}, model_update)
@mock.patch.object(rest_client.RestClient, 'rename_lun')
def test_update_migrated_volume_fail(self, mock_rename_lun):
mock_rename_lun.side_effect = exception.VolumeBackendAPIException(
data='Error occurred.')
model_update = self.driver.update_migrated_volume(None,
self.original_volume,
self.current_volume,
'available')
self.assertEqual(self.current_volume.name_id,
model_update['_name_id'])
@mock.patch.object(rest_client.RestClient, 'add_lun_to_partition')
def test_retype_volume_success(self, mock_add_lun_to_partition):
self.driver.support_func = FAKE_POOLS_SUPPORT_REPORT
retype = self.driver.retype(None, self.volume,
test_new_type, None, test_host)
self.assertTrue(retype)
@unittest.skip("Skip until bug #1578986 is fixed")
@ddt.data(FAKE_POOLS_UNSUPPORT_REPORT, FAKE_POOLS_SUPPORT_REPORT)
@mock.patch.object(rest_client, 'RestClient')
@mock.patch.object(
huawei_driver.HuaweiBaseDriver,
'_get_volume_type',
return_value={'extra_specs': sync_replica_specs})
def test_retype_replication_volume_success(self, mock_get_type,
mock_add_lun_to_partition,
pool_data):
self.driver.support_func = pool_data
retype = self.driver.retype(None, self.volume,
test_new_replication_type, None, test_host)
self.assertTrue(retype)
@ddt.data(
[
replication.ReplicaPairManager,
'create_replica',
exception.VolumeBackendAPIException(
data='Can\'t support smarttier on the array.'),
FAKE_POOLS_UNSUPPORT_REPORT
],
[
replication.ReplicaPairManager,
'create_replica',
exception.VolumeBackendAPIException(
data='Can\'t support smarttier on the array.'),
FAKE_POOLS_SUPPORT_REPORT
],
[
replication.ReplicaPairManager,
'delete_replica',
exception.VolumeBackendAPIException(
data='Can\'t support smarttier on the array.'),
FAKE_POOLS_SUPPORT_REPORT
],
[
replication.ReplicaPairManager,
'delete_replica',
exception.VolumeBackendAPIException(
data='Can\'t support smarttier on the array.'),
FAKE_POOLS_UNSUPPORT_REPORT
],
)
@ddt.unpack
def test_retype_replication_volume_fail(self,
mock_module,
mock_func,
side_effect,
pool_data):
self.driver.support_func = pool_data
self.mock_object(mock_module, mock_func, side_effect=side_effect)
self.mock_object(rest_client.RestClient, 'add_lun_to_partition')
self.mock_object(
huawei_driver.HuaweiBaseDriver,
'_get_volume_type',
return_value={'extra_specs': sync_replica_specs})
retype = self.driver.retype(None, self.volume,
test_new_replication_type, None, test_host)
self.assertFalse(retype)
@ddt.data(FAKE_POOLS_UNSUPPORT_REPORT, FAKE_POOLS_SUPPORT_REPORT)
def test_retype_volume_cache_fail(self, pool_data):
self.driver.client.cache_not_exist = True
self.driver.support_func = pool_data
self.assertRaises(exception.VolumeBackendAPIException,
self.driver.retype, None,
self.volume, test_new_type, None, test_host)
@ddt.data(FAKE_POOLS_UNSUPPORT_REPORT, FAKE_POOLS_SUPPORT_REPORT)
def test_retype_volume_partition_fail(self, pool_data):
self.driver.support_func = pool_data
self.driver.client.partition_not_exist = True
self.assertRaises(exception.VolumeBackendAPIException,
self.driver.retype, None,
self.volume, test_new_type, None, test_host)
@mock.patch.object(rest_client.RestClient, 'add_lun_to_partition')
def test_retype_volume_fail(self, mock_add_lun_to_partition):
self.driver.support_func = FAKE_POOLS_SUPPORT_REPORT
mock_add_lun_to_partition.side_effect = (
exception.VolumeBackendAPIException(data='Error occurred.'))
retype = self.driver.retype(None, self.volume,
test_new_type, None, test_host)
self.assertFalse(retype)
@mock.patch.object(rest_client.RestClient, 'get_all_engines',
return_value=[{'NODELIST': '["0A","0B"]', 'ID': '0'}])
def test_build_ini_targ_map_engie_recorded(self, mock_engines):
fake_lookup_service = FCSanLookupService()
zone_helper = fc_zone_helper.FCZoneHelper(
fake_lookup_service, self.driver.client)
(tgt_wwns, portg_id, init_targ_map) = zone_helper.build_ini_targ_map(
['10000090fa0d6754'], '1', '11')
target_port_wwns = ['2000643e8c4c5f66']
self.assertEqual(target_port_wwns, tgt_wwns)
self.assertEqual({}, init_targ_map)
@ddt.data(fake_fabric_mapping_no_ports, fake_fabric_mapping_no_wwn)
def test_filter_by_fabric_fail(self, ddt_map):
self.mock_object(
FCSanLookupService, 'get_device_mapping_from_network',
return_value=ddt_map)
fake_lookup_service = FCSanLookupService()
zone_helper = fc_zone_helper.FCZoneHelper(
fake_lookup_service, self.driver.client)
self.assertRaises(exception.VolumeBackendAPIException,
zone_helper._filter_by_fabric, ['10000090fa0d6754'],
None)
@mock.patch.object(rest_client.RestClient, 'get_all_engines',
return_value=[{'NODELIST': '["0A"]', 'ID': '0'},
{'NODELIST': '["0B"]', 'ID': '1'}])
@mock.patch.object(fc_zone_helper.FCZoneHelper, '_build_contr_port_map',
return_value={'0B': ['2000643e8c4c5f67']})
def test_build_ini_targ_map_engie_not_recorded(self, mock_engines, map):
fake_lookup_service = FCSanLookupService()
zone_helper = fc_zone_helper.FCZoneHelper(
fake_lookup_service, self.driver.client)
(tgt_wwns, portg_id, init_targ_map) = zone_helper.build_ini_targ_map(
['10000090fa0d6754'], '1', '11')
expected_wwns = ['2000643e8c4c5f67', '2000643e8c4c5f66']
expected_map = {'10000090fa0d6754': expected_wwns}
self.assertEqual(expected_wwns, tgt_wwns)
self.assertEqual(expected_map, init_targ_map)
@mock.patch.object(rest_client.RestClient, 'get_all_engines',
return_value=[{'NODELIST': '["0A", "0B"]', 'ID': '0'}])
def test_build_ini_targ_map_no_map(self, mock_engines):
fake_lookup_service = FCSanLookupService()
zone_helper = fc_zone_helper.FCZoneHelper(
fake_lookup_service, self.driver.client)
# Host with id '5' has no map on the array.
(tgt_wwns, portg_id, init_targ_map) = zone_helper.build_ini_targ_map(
['10000090fa0d6754'], '5', '11')
expected_wwns = ['2000643e8c4c5f66']
expected_map = {'10000090fa0d6754': ['2000643e8c4c5f66']}
self.assertEqual(expected_wwns, tgt_wwns)
self.assertEqual(expected_map, init_targ_map)
@mock.patch.object(rest_client.RestClient, 'get_all_engines',
return_value=[{'NODELIST': '["0A", "0B"]', 'ID': '0'}])
@mock.patch.object(rest_client.RestClient, 'get_tgt_port_group',
return_value='0')
@mock.patch.object(rest_client.RestClient, 'delete_portgroup')
def test_build_ini_targ_map_exist_portg(self, delete, engines, portg):
fake_lookup_service = FCSanLookupService()
zone_helper = fc_zone_helper.FCZoneHelper(
fake_lookup_service, self.driver.client)
# Host with id '5' has no map on the array.
(tgt_wwns, portg_id, init_targ_map) = zone_helper.build_ini_targ_map(
['10000090fa0d6754'], '5', '11')
expected_wwns = ['2000643e8c4c5f66']
expected_map = {'10000090fa0d6754': ['2000643e8c4c5f66']}
self.assertEqual(expected_wwns, tgt_wwns)
self.assertEqual(expected_map, init_targ_map)
self.assertEqual(1, delete.call_count)
def test_get_init_targ_map(self):
fake_lookup_service = FCSanLookupService()
zone_helper = fc_zone_helper.FCZoneHelper(
fake_lookup_service, self.driver.client)
(tgt_wwns, portg_id, init_targ_map) = zone_helper.get_init_targ_map(
['10000090fa0d6754'], '1')
expected_wwns = ['2000643e8c4c5f66']
expected_map = {'10000090fa0d6754': ['2000643e8c4c5f66']}
self.assertEqual(expected_wwns, tgt_wwns)
self.assertEqual(expected_map, init_targ_map)
def test_get_init_targ_map_no_host(self):
fake_lookup_service = FCSanLookupService()
zone_helper = fc_zone_helper.FCZoneHelper(
fake_lookup_service, self.driver.client)
ret = zone_helper.get_init_targ_map(
['10000090fa0d6754'], None)
expected_ret = ([], None, {})
self.assertEqual(expected_ret, ret)
def test_multi_resturls_success(self):
self.driver.client.test_multi_url_flag = True
lun_info = self.driver.create_volume(self.volume)
self.assertEqual('1', lun_info['provider_location'])
def test_get_id_from_result(self):
result = {}
name = 'test_name'
key = 'NAME'
re = self.driver.client._get_id_from_result(result, name, key)
self.assertIsNone(re)
result = {'data': {}}
re = self.driver.client._get_id_from_result(result, name, key)
self.assertIsNone(re)
result = {'data': [{'COUNT': 1, 'ID': '1'},
{'COUNT': 2, 'ID': '2'}]}
re = self.driver.client._get_id_from_result(result, name, key)
self.assertIsNone(re)
result = {'data': [{'NAME': 'test_name1', 'ID': '1'},
{'NAME': 'test_name2', 'ID': '2'}]}
re = self.driver.client._get_id_from_result(result, name, key)
self.assertIsNone(re)
result = {'data': [{'NAME': 'test_name', 'ID': '1'},
{'NAME': 'test_name2', 'ID': '2'}]}
re = self.driver.client._get_id_from_result(result, name, key)
self.assertEqual('1', re)
@mock.patch.object(rest_client.RestClient, 'get_pool_info',
return_value={'ID': 1,
'CAPACITY': 110362624,
'TOTALCAPACITY': 209715200})
def test_get_capacity(self, mock_get_pool_info):
expected_pool_capacity = {'total_capacity': 100.0,
'free_capacity': 52.625}
pool_capacity = self.driver.client._get_capacity(None,
None)
self.assertEqual(expected_pool_capacity, pool_capacity)
@mock.patch.object(huawei_driver.HuaweiBaseDriver, '_get_volume_params',
return_value=fake_hypermetro_opts)
@mock.patch.object(rest_client.RestClient, 'get_all_pools',
return_value=FAKE_STORAGE_POOL_RESPONSE)
@mock.patch.object(rest_client.RestClient, 'get_pool_info',
return_value=FAKE_FIND_POOL_RESPONSE)
@mock.patch.object(rest_client.RestClient, 'get_hyper_domain_id',
return_value='11')
@mock.patch.object(hypermetro.HuaweiHyperMetro, '_wait_volume_ready',
return_value=True)
@mock.patch.object(hypermetro.HuaweiHyperMetro,
'_create_hypermetro_pair',
return_value={"ID": '11',
"NAME": 'hypermetro-pair'})
@mock.patch.object(rest_client.RestClient, 'logout',
return_value=None)
def test_create_hypermetro_success(self, mock_hypermetro_opts,
mock_login_return,
mock_all_pool_info,
mock_pool_info,
mock_hyper_domain,
mock_volume_ready,
mock_logout):
metadata = {"hypermetro_id": '11',
"remote_lun_id": '1'}
lun_info = self.driver.create_volume(self.hyper_volume)
self.assertEqual(metadata, lun_info['metadata'])
@ddt.data(FAKE_POOLS_UNSUPPORT_REPORT, FAKE_POOLS_SUPPORT_REPORT)
@mock.patch.object(huawei_driver.HuaweiBaseDriver, '_get_volume_params',
return_value=fake_hypermetro_opts)
@mock.patch.object(rest_client.RestClient, 'get_all_pools',
return_value=FAKE_STORAGE_POOL_RESPONSE)
@mock.patch.object(rest_client.RestClient, 'get_pool_info',
return_value=FAKE_FIND_POOL_RESPONSE)
@mock.patch.object(rest_client.RestClient, 'get_hyper_domain_id',
return_value='11')
@mock.patch.object(hypermetro.HuaweiHyperMetro, '_wait_volume_ready',
return_value=True)
@mock.patch.object(rest_client.RestClient, 'create_hypermetro')
def test_create_hypermetro_fail(self,
pool_data,
mock_pair_info,
mock_hypermetro_opts,
mock_all_pool_info,
mock_pool_info,
mock_hyper_domain,
mock_volume_ready
):
self.driver.support_func = pool_data
mock_pair_info.side_effect = (
exception.VolumeBackendAPIException(data='Error occurred.'))
self.assertRaises(exception.VolumeBackendAPIException,
self.driver.metro.create_hypermetro, "11", {})
@mock.patch.object(huawei_driver.huawei_utils, 'get_volume_metadata',
return_value={'hypermetro_id': '3400a30d844d0007',
'remote_lun_id': '1'})
@mock.patch.object(rest_client.RestClient, 'do_mapping',
return_value={'lun_id': '1',
'view_id': '1',
'aval_luns': '[1]'})
def test_hypermetro_connection_success_2(self, mock_map, mock_metadata):
fc_properties = self.driver.metro.connect_volume_fc(self.volume,
FakeConnector)
self.assertEqual(1, fc_properties['data']['target_lun'])
@mock.patch.object(huawei_driver.huawei_utils, 'get_volume_metadata',
return_value={'hypermetro_id': '3400a30d844d0007',
'remote_lun_id': '1'})
def test_terminate_hypermetro_connection_success(self, mock_metradata):
self.driver.metro.disconnect_volume_fc(self.volume, FakeConnector)
@mock.patch.object(huawei_driver.huawei_utils, 'get_volume_metadata',
return_value={'hypermetro_id': '3400a30d844d0007',
'remote_lun_id': None})
@mock.patch.object(rest_client.RestClient, 'get_lun_id_by_name',
return_value=None)
def test_hypermetroid_none_fail(self, mock_metadata, moke_metro_name):
self.assertRaises(exception.VolumeBackendAPIException,
self.driver.metro.connect_volume_fc,
self.volume,
FakeConnector)
@unittest.skip("Skip until bug #1578986 is fixed")
def test_wait_volume_ready_success(self):
flag = self.driver.metro._wait_volume_ready("11")
self.assertIsNone(flag)
@mock.patch.object(huawei_driver.huawei_utils, 'get_volume_metadata',
return_value={'hypermetro_id': '3400a30d844d0007',
'remote_lun_id': '1'})
@mock.patch.object(rest_client.RestClient, 'get_online_free_wwns',
return_value=[])
@mock.patch.object(rest_client.RestClient, 'get_host_iscsi_initiators',
return_value=[])
def test_hypermetro_connection_fail(self, mock_metadata,
mock_fc_initiator,
mock_host_initiators):
self.assertRaises(exception.VolumeBackendAPIException,
self.driver.metro.connect_volume_fc,
self.volume,
FakeConnector)
def test_create_snapshot_fail_hypermetro(self):
self.mock_object(
huawei_driver.HuaweiBaseDriver,
'_get_volume_type',
return_value={'extra_specs': replica_hypermetro_specs})
self.assertRaises(exception.VolumeBackendAPIException,
self.driver.create_volume_from_snapshot,
self.volume, self.snapshot)
def test_create_snapshot_fail_no_snapshot_id(self):
self.snapshot.provider_location = None
self.mock_object(rest_client.RestClient, 'get_snapshot_id_by_name',
return_value=None)
self.assertRaises(exception.VolumeBackendAPIException,
self.driver.create_volume_from_snapshot,
self.volume, self.snapshot)
@mock.patch.object(rest_client.RestClient, 'call',
return_value={"data": [{"RUNNINGSTATUS": "27",
"ID": '1'},
{"RUNNINGSTATUS": "26",
"ID": '2'}],
"error": {"code": 0}})
def test_get_online_free_wwns(self, mock_call):
wwns = self.driver.client.get_online_free_wwns()
self.assertEqual(['1'], wwns)
@mock.patch.object(rest_client.RestClient, 'call',
return_value={"data": {"ID": 1}, "error": {"code": 0}})
def test_rename_lun(self, mock_call):
des = 'This LUN is renamed.'
new_name = 'test_name'
self.driver.client.rename_lun('1', new_name, des)
self.assertEqual(1, mock_call.call_count)
url = "/lun/1"
data = {"NAME": new_name, "DESCRIPTION": des}
mock_call.assert_called_once_with(url, data, "PUT")
@mock.patch.object(rest_client.RestClient, 'call',
return_value={"data": {}})
def test_is_host_associated_to_hostgroup_no_data(self, mock_call):
res = self.driver.client.is_host_associated_to_hostgroup('1')
self.assertFalse(res)
@mock.patch.object(rest_client.RestClient, 'call',
return_value={"data": {'ISADD2HOSTGROUP': 'true'}})
def test_is_host_associated_to_hostgroup_true(self, mock_call):
res = self.driver.client.is_host_associated_to_hostgroup('1')
self.assertTrue(res)
@mock.patch.object(rest_client.RestClient, 'call',
return_value={"data": {'ISADD2HOSTGROUP': 'false'}})
def test_is_host_associated_to_hostgroup_false(self, mock_call):
res = self.driver.client.is_host_associated_to_hostgroup('1')
self.assertFalse(res)
@mock.patch.object(huawei_driver.HuaweiBaseDriver,
'_get_consistencygroup_type',
return_value={"hypermetro": "true"})
def test_create_hypermetro_consistencygroup_success(self, mock_grouptype):
ctxt = context.get_admin_context()
# Create consistency group
model_update = self.driver.create_consistencygroup(ctxt, self.cg)
self.assertEqual('available',
model_update['status'],
"Consistency Group created failed")
@mock.patch.object(huawei_driver.HuaweiBaseDriver,
'_get_consistencygroup_type',
return_value={"hypermetro": "false"})
def test_create_normal_consistencygroup_success(self,
mock_grouptype):
ctxt = context.get_admin_context()
# Create consistency group
model_update = self.driver.create_consistencygroup(ctxt, self.cg)
self.assertEqual('available',
model_update['status'],
"Consistency Group created failed")
@mock.patch.object(huawei_driver.HuaweiBaseDriver,
'_get_consistencygroup_type',
return_value={"hypermetro": "true"})
def test_delete_hypermetro_consistencygroup_success(self, mock_grouptype):
test_volumes = [self.volume]
ctxt = context.get_admin_context()
# Create consistency group
model, volumes = self.driver.delete_consistencygroup(ctxt,
self.cg,
test_volumes)
self.assertEqual('available',
model['status'],
"Consistency Group created failed")
def test_delete_normal_consistencygroup_success(self):
ctxt = context.get_admin_context()
test_volumes = [self.volume]
self.mock_object(huawei_driver.HuaweiBaseDriver,
'_get_consistencygroup_type',
return_value={"hypermetro": "false"})
model, volumes = self.driver.delete_consistencygroup(ctxt,
self.cg,
test_volumes)
self.assertEqual('available',
model['status'],
"Consistency Group created failed")
@mock.patch.object(huawei_driver.HuaweiBaseDriver,
'_get_consistencygroup_type',
return_value={"hypermetro": "true"})
@mock.patch.object(huawei_driver.huawei_utils, 'get_volume_metadata',
return_value={'hypermetro_id': '3400a30d844d0007',
'remote_lun_id': '59'})
def test_update_consistencygroup_success(self,
mock_grouptype,
mock_metadata):
ctxt = context.get_admin_context()
add_volumes = [self.volume]
remove_volumes = [self.volume]
# Create consistency group
model_update = self.driver.update_consistencygroup(ctxt,
self.cg,
add_volumes,
remove_volumes)
self.assertEqual('available',
model_update[0]['status'],
"Consistency Group update failed")
def test_create_hypermetro_consistencygroup_success_2(self):
ctxt = context.get_admin_context()
# Create consistency group
temp_cg = copy.deepcopy(self.cg)
temp_cg['volume_type_id'] = '550c089b-bfdd-4f7f-86e1-3ba88125555c,'
self.mock_object(volume_types, 'get_volume_type',
return_value=test_hypermetro_type)
model_update = self.driver.create_consistencygroup(ctxt, temp_cg)
self.assertEqual('available',
model_update['status'],
"Consistency Group created failed")
def test_is_initiator_associated_to_host_raise(self):
self.assertRaises(exception.VolumeBackendAPIException,
self.driver.client.is_initiator_associated_to_host,
'ini-2', '1')
def test_is_initiator_associated_to_host_true(self):
ret = self.driver.client.is_initiator_associated_to_host('ini-1', '1')
self.assertFalse(ret)
ret = self.driver.client.is_initiator_associated_to_host('ini-2', '2')
self.assertTrue(ret)
class HuaweiConfTestCase(test.TestCase):
def setUp(self):
super(HuaweiConfTestCase, self).setUp()
self.tmp_dir = tempfile.mkdtemp()
self.fake_xml_file = self.tmp_dir + '/cinder_huawei_conf.xml'
self.conf = mock.Mock()
self.conf.cinder_huawei_conf_file = self.fake_xml_file
self.huawei_conf = huawei_conf.HuaweiConf(self.conf)
def _create_fake_conf_file(self):
doc = minidom.Document()
config = doc.createElement('config')
doc.appendChild(config)
storage = doc.createElement('Storage')
config.appendChild(storage)
url = doc.createElement('RestURL')
url_text = doc.createTextNode('http://192.0.2.69:8082/'
'deviceManager/rest/')
url.appendChild(url_text)
storage.appendChild(url)
username = doc.createElement('UserName')
username_text = doc.createTextNode('admin')
username.appendChild(username_text)
storage.appendChild(username)
password = doc.createElement('UserPassword')
password_text = doc.createTextNode('Admin@storage')
password.appendChild(password_text)
storage.appendChild(password)
product = doc.createElement('Product')
product_text = doc.createTextNode('V3')
product.appendChild(product_text)
storage.appendChild(product)
protocol = doc.createElement('Protocol')
protocol_text = doc.createTextNode('iSCSI')
protocol.appendChild(protocol_text)
storage.appendChild(protocol)
lun = doc.createElement('LUN')
config.appendChild(lun)
luntype = doc.createElement('LUNType')
luntype_text = doc.createTextNode('Thick')
luntype.appendChild(luntype_text)
lun.appendChild(luntype)
lun_ready_wait_interval = doc.createElement('LUNReadyWaitInterval')
lun_ready_wait_interval_text = doc.createTextNode('2')
lun_ready_wait_interval.appendChild(lun_ready_wait_interval_text)
lun.appendChild(lun_ready_wait_interval)
lun_copy_wait_interval = doc.createElement('LUNcopyWaitInterval')
lun_copy_wait_interval_text = doc.createTextNode('2')
lun_copy_wait_interval.appendChild(lun_copy_wait_interval_text)
lun.appendChild(lun_copy_wait_interval)
timeout = doc.createElement('Timeout')
timeout_text = doc.createTextNode('43200')
timeout.appendChild(timeout_text)
lun.appendChild(timeout)
write_type = doc.createElement('WriteType')
write_type_text = doc.createTextNode('1')
write_type.appendChild(write_type_text)
lun.appendChild(write_type)
mirror_switch = doc.createElement('MirrorSwitch')
mirror_switch_text = doc.createTextNode('1')
mirror_switch.appendChild(mirror_switch_text)
lun.appendChild(mirror_switch)
prefetch = doc.createElement('Prefetch')
prefetch.setAttribute('Type', '1')
prefetch.setAttribute('Value', '0')
lun.appendChild(prefetch)
pool = doc.createElement('StoragePool')
pool_text = doc.createTextNode('OpenStack_Pool')
pool.appendChild(pool_text)
lun.appendChild(pool)
iscsi = doc.createElement('iSCSI')
config.appendChild(iscsi)
defaulttargetip = doc.createElement('DefaultTargetIP')
defaulttargetip_text = doc.createTextNode('192.0.2.68')
defaulttargetip.appendChild(defaulttargetip_text)
iscsi.appendChild(defaulttargetip)
initiator = doc.createElement('Initiator')
initiator.setAttribute('Name', 'iqn.1993-08.debian:01:ec2bff7ac3a3')
initiator.setAttribute('TargetIP', '192.0.2.2')
initiator.setAttribute('CHAPinfo', 'mm-user;mm-user@storage')
initiator.setAttribute('ALUA', '1')
initiator.setAttribute('TargetPortGroup', 'PortGroup001')
iscsi.appendChild(initiator)
fakefile = open(self.conf.cinder_huawei_conf_file, 'w')
fakefile.write(doc.toprettyxml(indent=''))
fakefile.close()
| true | true |
f71a9b3881862c5eb958a16f5a70f95f5060726c | 5,616 | py | Python | Briefly/api/Punc/punctuator/tests.py | q815101630/Briefly2.0 | d92ba52308ef8c644fe8fb453169d0bee1a7f47e | [
"MIT"
] | 20 | 2019-12-03T06:06:58.000Z | 2022-02-23T21:49:03.000Z | Briefly/api/Punc/punctuator/tests.py | q815101630/Briefly2.0 | d92ba52308ef8c644fe8fb453169d0bee1a7f47e | [
"MIT"
] | 9 | 2020-06-15T14:56:38.000Z | 2022-02-12T13:09:38.000Z | Briefly/api/Punc/punctuator/tests.py | q815101630/Briefly2.0 | d92ba52308ef8c644fe8fb453169d0bee1a7f47e | [
"MIT"
] | 8 | 2020-07-27T14:00:37.000Z | 2022-02-20T17:59:04.000Z | from __future__ import absolute_import
import time
import os
import unittest
import tempfile
import shutil
from io import StringIO
from . import punc
from .punc import Punctuator, download_model
class Tests(unittest.TestCase):
samples = [
(
'mary had a little lamb its fleece was white as snow and anywhere that mary went the lamb was sure to go',
'Mary had a little lamb, its fleece was white as snow and anywhere that mary went, the lamb was sure to go.'
),
(
"they say it's only as cold as it feels in your mind i don't buy into that theory much what do you think",
"They say it's only as cold as it feels in your mind. I don't buy into that theory much. What do you think."
),
(
"he's a do me a favor go home to your wife",
"He's a do me: a favor go home to your wife.",
),
(
"they'll even negotiate your rate with the insurance company",
"They'll even negotiate your rate with the insurance company.",
),
(
"for me i wanted to get into commentary some sort of way i didn't know how to do that so i left the firm and i started a business",
"For me, I wanted to get into commentary some sort of way. I didn't know how to do that. So I left the firm and I started a business."
),
]
def test_punctuate(self):
# Create temp directory for downloading data.
d = tempfile.mkdtemp()
os.makedirs(punc.PUNCTUATOR_DATA_DIR, exist_ok=True)
model_file = os.path.join(punc.PUNCTUATOR_DATA_DIR, 'Demo-Europarl-EN.pcl')
print('Temp dir:', d)
os.chdir(d)
try:
# Download pre-trained model.
if not os.path.isfile(model_file):
model_file = download_model()
print('Model file:', model_file)
# Create punctuator.
t0 = time.time()
p = Punctuator(model_file=model_file)
td = time.time() - t0
print('Loaded in %s seconds from path.' % td)
# Add punctuation.
for input_text, expect_output_text in self.samples:
fout = StringIO()
actual_output_text = p.punctuate(input_text)
print('expect_output_text:', expect_output_text)
print('actual_output_text:', actual_output_text)
self.assertEqual(actual_output_text, expect_output_text)
# Serialize the entire punctuator, not just the model.
print('Writing...')
t0 = time.time()
fn = 'data.pickle'
p.save(fn)
td = time.time() - t0
print('Wrote in %s seconds.' % td)
# Load puncutator.
print('Loading...')
t0 = time.time()
p2 = Punctuator.load(fn)
td = time.time() - t0
print('Loaded in %s seconds.' % td)
# Confirm punctuations match previous.
for input_text, expect_output_text in self.samples:
fout = StringIO()
actual_output_text = p2.punctuate(input_text)
print('expect_output_text:', expect_output_text)
print('actual_output_text:', actual_output_text)
self.assertEqual(actual_output_text, expect_output_text)
finally:
shutil.rmtree(d)
def test_punctuate_stream(self):
# Create temp directory for downloading data.
d = tempfile.mkdtemp()
os.makedirs(punc.PUNCTUATOR_DATA_DIR, exist_ok=True)
model_file = os.path.join(punc.PUNCTUATOR_DATA_DIR, 'Demo-Europarl-EN.pcl')
print('Temp dir:', d)
os.chdir(d)
try:
# Download pre-trained model.
if not os.path.isfile(model_file):
model_file = download_model()
print('Model file:', model_file)
# Check if file can be read in as bytes
infile = open(model_file, 'rb')
data = infile.read()
t0 = time.time()
p = Punctuator(data)
td = time.time() - t0
print('Loaded in %s seconds as bytes.' % td)
# Add punctuation.
for input_text, expect_output_text in self.samples:
fout = StringIO()
actual_output_text = p.punctuate(input_text)
print('expect_output_text:', expect_output_text)
print('actual_output_text:', actual_output_text)
self.assertEqual(actual_output_text, expect_output_text)
# Serialize the entire punctuator, not just the model.
print('Writing...')
t0 = time.time()
fn = 'data.pickle'
p.save(fn)
td = time.time() - t0
print('Wrote in %s seconds.' % td)
# Load puncutator.
print('Loading...')
t0 = time.time()
p2 = Punctuator.load(fn)
td = time.time() - t0
print('Loaded in %s seconds.' % td)
# Confirm punctuations match previous.
for input_text, expect_output_text in self.samples:
fout = StringIO()
actual_output_text = p2.punctuate(input_text)
print('expect_output_text:', expect_output_text)
print('actual_output_text:', actual_output_text)
self.assertEqual(actual_output_text, expect_output_text)
finally:
shutil.rmtree(d)
if __name__ == '__main__':
unittest.main()
| 36.705882 | 146 | 0.570691 | from __future__ import absolute_import
import time
import os
import unittest
import tempfile
import shutil
from io import StringIO
from . import punc
from .punc import Punctuator, download_model
class Tests(unittest.TestCase):
samples = [
(
'mary had a little lamb its fleece was white as snow and anywhere that mary went the lamb was sure to go',
'Mary had a little lamb, its fleece was white as snow and anywhere that mary went, the lamb was sure to go.'
),
(
"they say it's only as cold as it feels in your mind i don't buy into that theory much what do you think",
"They say it's only as cold as it feels in your mind. I don't buy into that theory much. What do you think."
),
(
"he's a do me a favor go home to your wife",
"He's a do me: a favor go home to your wife.",
),
(
"they'll even negotiate your rate with the insurance company",
"They'll even negotiate your rate with the insurance company.",
),
(
"for me i wanted to get into commentary some sort of way i didn't know how to do that so i left the firm and i started a business",
"For me, I wanted to get into commentary some sort of way. I didn't know how to do that. So I left the firm and I started a business."
),
]
def test_punctuate(self):
d = tempfile.mkdtemp()
os.makedirs(punc.PUNCTUATOR_DATA_DIR, exist_ok=True)
model_file = os.path.join(punc.PUNCTUATOR_DATA_DIR, 'Demo-Europarl-EN.pcl')
print('Temp dir:', d)
os.chdir(d)
try:
if not os.path.isfile(model_file):
model_file = download_model()
print('Model file:', model_file)
t0 = time.time()
p = Punctuator(model_file=model_file)
td = time.time() - t0
print('Loaded in %s seconds from path.' % td)
for input_text, expect_output_text in self.samples:
fout = StringIO()
actual_output_text = p.punctuate(input_text)
print('expect_output_text:', expect_output_text)
print('actual_output_text:', actual_output_text)
self.assertEqual(actual_output_text, expect_output_text)
print('Writing...')
t0 = time.time()
fn = 'data.pickle'
p.save(fn)
td = time.time() - t0
print('Wrote in %s seconds.' % td)
print('Loading...')
t0 = time.time()
p2 = Punctuator.load(fn)
td = time.time() - t0
print('Loaded in %s seconds.' % td)
for input_text, expect_output_text in self.samples:
fout = StringIO()
actual_output_text = p2.punctuate(input_text)
print('expect_output_text:', expect_output_text)
print('actual_output_text:', actual_output_text)
self.assertEqual(actual_output_text, expect_output_text)
finally:
shutil.rmtree(d)
def test_punctuate_stream(self):
d = tempfile.mkdtemp()
os.makedirs(punc.PUNCTUATOR_DATA_DIR, exist_ok=True)
model_file = os.path.join(punc.PUNCTUATOR_DATA_DIR, 'Demo-Europarl-EN.pcl')
print('Temp dir:', d)
os.chdir(d)
try:
if not os.path.isfile(model_file):
model_file = download_model()
print('Model file:', model_file)
infile = open(model_file, 'rb')
data = infile.read()
t0 = time.time()
p = Punctuator(data)
td = time.time() - t0
print('Loaded in %s seconds as bytes.' % td)
for input_text, expect_output_text in self.samples:
fout = StringIO()
actual_output_text = p.punctuate(input_text)
print('expect_output_text:', expect_output_text)
print('actual_output_text:', actual_output_text)
self.assertEqual(actual_output_text, expect_output_text)
print('Writing...')
t0 = time.time()
fn = 'data.pickle'
p.save(fn)
td = time.time() - t0
print('Wrote in %s seconds.' % td)
print('Loading...')
t0 = time.time()
p2 = Punctuator.load(fn)
td = time.time() - t0
print('Loaded in %s seconds.' % td)
for input_text, expect_output_text in self.samples:
fout = StringIO()
actual_output_text = p2.punctuate(input_text)
print('expect_output_text:', expect_output_text)
print('actual_output_text:', actual_output_text)
self.assertEqual(actual_output_text, expect_output_text)
finally:
shutil.rmtree(d)
if __name__ == '__main__':
unittest.main()
| true | true |
f71a9c2559fc2833e574b56aa245554739a58e09 | 8,913 | py | Python | sleekxmpp/features/feature_mechanisms/mechanisms.py | RedbackThomson/LoLShadow | c47dd2826b43f47663eed55bb3f8a6866609c5b4 | [
"MIT"
] | 1 | 2015-09-04T05:52:45.000Z | 2015-09-04T05:52:45.000Z | sleekxmpp/features/feature_mechanisms/mechanisms.py | RedbackThomson/LoLShadow | c47dd2826b43f47663eed55bb3f8a6866609c5b4 | [
"MIT"
] | null | null | null | sleekxmpp/features/feature_mechanisms/mechanisms.py | RedbackThomson/LoLShadow | c47dd2826b43f47663eed55bb3f8a6866609c5b4 | [
"MIT"
] | null | null | null | """
SleekXMPP: The Sleek XMPP Library
Copyright (C) 2011 Nathanael C. Fritz
This file is part of SleekXMPP.
See the file LICENSE for copying permission.
"""
import ssl
import logging
from sleekxmpp.util import sasl
from sleekxmpp.util.stringprep_profiles import StringPrepError
from sleekxmpp.stanza import StreamFeatures
from sleekxmpp.xmlstream import RestartStream, register_stanza_plugin
from sleekxmpp.plugins import BasePlugin
from sleekxmpp.xmlstream.matcher import MatchXPath
from sleekxmpp.xmlstream.handler import Callback
from sleekxmpp.features.feature_mechanisms import stanza
log = logging.getLogger(__name__)
class FeatureMechanisms(BasePlugin):
name = 'feature_mechanisms'
description = 'RFC 6120: Stream Feature: SASL'
dependencies = set()
stanza = stanza
default_config = {
'use_mech': None,
'use_mechs': None,
'min_mech': None,
'sasl_callback': None,
'security_callback': None,
'encrypted_plain': True,
'unencrypted_plain': False,
'unencrypted_digest': False,
'unencrypted_cram': False,
'unencrypted_scram': True,
'order': 100
}
def plugin_init(self):
if self.sasl_callback is None:
self.sasl_callback = self._default_credentials
if self.security_callback is None:
self.security_callback = self._default_security
creds = self.sasl_callback(set(['username']), set())
if not self.use_mech and not creds['username']:
self.use_mech = 'ANONYMOUS'
self.mech = None
self.mech_list = set()
self.attempted_mechs = set()
register_stanza_plugin(StreamFeatures, stanza.Mechanisms)
self.xmpp.register_stanza(stanza.Success)
self.xmpp.register_stanza(stanza.Failure)
self.xmpp.register_stanza(stanza.Auth)
self.xmpp.register_stanza(stanza.Challenge)
self.xmpp.register_stanza(stanza.Response)
self.xmpp.register_stanza(stanza.Abort)
self.xmpp.register_handler(
Callback('SASL Success',
MatchXPath(stanza.Success.tag_name()),
self._handle_success,
instream=True))
self.xmpp.register_handler(
Callback('SASL Failure',
MatchXPath(stanza.Failure.tag_name()),
self._handle_fail,
instream=True))
self.xmpp.register_handler(
Callback('SASL Challenge',
MatchXPath(stanza.Challenge.tag_name()),
self._handle_challenge))
self.xmpp.register_feature('mechanisms',
self._handle_sasl_auth,
restart=True,
order=self.order)
def _default_credentials(self, required_values, optional_values):
creds = self.xmpp.credentials
result = {}
values = required_values.union(optional_values)
for value in values:
if value == 'username':
result[value] = creds.get('username', self.xmpp.requested_jid.user)
elif value == 'email':
jid = self.xmpp.requested_jid.bare
result[value] = creds.get('email', jid)
elif value == 'channel_binding':
if hasattr(self.xmpp.socket, 'get_channel_binding'):
result[value] = self.xmpp.socket.get_channel_binding()
else:
log.debug("Channel binding not supported.")
log.debug("Use Python 3.3+ for channel binding and " + \
"SCRAM-SHA-1-PLUS support")
result[value] = None
elif value == 'host':
result[value] = creds.get('host', self.xmpp.requested_jid.domain)
elif value == 'realm':
result[value] = creds.get('realm', self.xmpp.requested_jid.domain)
elif value == 'service-name':
result[value] = creds.get('service-name', self.xmpp._service_name)
elif value == 'service':
result[value] = creds.get('service', 'xmpp')
elif value in creds:
result[value] = creds[value]
return result
def _default_security(self, values):
result = {}
for value in values:
if value == 'encrypted':
if 'starttls' in self.xmpp.features:
result[value] = True
elif isinstance(self.xmpp.socket, ssl.SSLSocket):
result[value] = True
else:
result[value] = False
else:
result[value] = self.config.get(value, False)
return result
def _handle_sasl_auth(self, features):
"""
Handle authenticating using SASL.
Arguments:
features -- The stream features stanza.
"""
if 'mechanisms' in self.xmpp.features:
# SASL authentication has already succeeded, but the
# server has incorrectly offered it again.
return False
enforce_limit = False
limited_mechs = self.use_mechs
if limited_mechs is None:
limited_mechs = set()
elif limited_mechs and not isinstance(limited_mechs, set):
limited_mechs = set(limited_mechs)
enforce_limit = True
if self.use_mech:
limited_mechs.add(self.use_mech)
enforce_limit = True
if enforce_limit:
self.use_mechs = limited_mechs
self.mech_list = set(features['mechanisms'])
return self._send_auth()
def _send_auth(self):
mech_list = self.mech_list - self.attempted_mechs
try:
self.mech = sasl.choose(mech_list,
self.sasl_callback,
self.security_callback,
limit=self.use_mechs,
min_mech=self.min_mech)
except sasl.SASLNoAppropriateMechanism:
log.error("No appropriate login method.")
self.xmpp.event("no_auth", direct=True)
self.xmpp.event("failed_auth", direct=True)
self.attempted_mechs = set()
return self.xmpp.disconnect()
except StringPrepError:
log.exception("A credential value did not pass SASLprep.")
self.xmpp.disconnect()
resp = stanza.Auth(self.xmpp)
resp['mechanism'] = self.mech.name
try:
resp['value'] = self.mech.process()
except sasl.SASLCancelled:
self.attempted_mechs.add(self.mech.name)
self._send_auth()
except sasl.SASLFailed:
self.attempted_mechs.add(self.mech.name)
self._send_auth()
except sasl.SASLMutualAuthFailed:
log.error("Mutual authentication failed! " + \
"A security breach is possible.")
self.attempted_mechs.add(self.mech.name)
self.xmpp.disconnect()
else:
resp.send(now=True)
return True
def _handle_challenge(self, stanza):
"""SASL challenge received. Process and send response."""
resp = self.stanza.Response(self.xmpp)
try:
resp['value'] = self.mech.process(stanza['value'])
except sasl.SASLCancelled:
self.stanza.Abort(self.xmpp).send()
except sasl.SASLFailed:
self.stanza.Abort(self.xmpp).send()
except sasl.SASLMutualAuthFailed:
log.error("Mutual authentication failed! " + \
"A security breach is possible.")
self.attempted_mechs.add(self.mech.name)
self.xmpp.disconnect()
else:
resp.send(now=True)
def _handle_success(self, stanza):
"""SASL authentication succeeded. Restart the stream."""
try:
final = self.mech.process(stanza['value'])
except sasl.SASLMutualAuthFailed:
log.error("Mutual authentication failed! " + \
"A security breach is possible.")
self.attempted_mechs.add(self.mech.name)
self.xmpp.disconnect()
else:
self.attempted_mechs = set()
self.xmpp.authenticated = True
self.xmpp.features.add('mechanisms')
self.xmpp.event('auth_success', stanza, direct=True)
raise RestartStream()
def _handle_fail(self, stanza):
"""SASL authentication failed. Disconnect and shutdown."""
self.attempted_mechs.add(self.mech.name)
log.info("Authentication failed: %s", stanza['condition'])
self.xmpp.event("failed_auth", stanza, direct=True)
self._send_auth()
return True
| 36.679012 | 83 | 0.58151 |
import ssl
import logging
from sleekxmpp.util import sasl
from sleekxmpp.util.stringprep_profiles import StringPrepError
from sleekxmpp.stanza import StreamFeatures
from sleekxmpp.xmlstream import RestartStream, register_stanza_plugin
from sleekxmpp.plugins import BasePlugin
from sleekxmpp.xmlstream.matcher import MatchXPath
from sleekxmpp.xmlstream.handler import Callback
from sleekxmpp.features.feature_mechanisms import stanza
log = logging.getLogger(__name__)
class FeatureMechanisms(BasePlugin):
name = 'feature_mechanisms'
description = 'RFC 6120: Stream Feature: SASL'
dependencies = set()
stanza = stanza
default_config = {
'use_mech': None,
'use_mechs': None,
'min_mech': None,
'sasl_callback': None,
'security_callback': None,
'encrypted_plain': True,
'unencrypted_plain': False,
'unencrypted_digest': False,
'unencrypted_cram': False,
'unencrypted_scram': True,
'order': 100
}
def plugin_init(self):
if self.sasl_callback is None:
self.sasl_callback = self._default_credentials
if self.security_callback is None:
self.security_callback = self._default_security
creds = self.sasl_callback(set(['username']), set())
if not self.use_mech and not creds['username']:
self.use_mech = 'ANONYMOUS'
self.mech = None
self.mech_list = set()
self.attempted_mechs = set()
register_stanza_plugin(StreamFeatures, stanza.Mechanisms)
self.xmpp.register_stanza(stanza.Success)
self.xmpp.register_stanza(stanza.Failure)
self.xmpp.register_stanza(stanza.Auth)
self.xmpp.register_stanza(stanza.Challenge)
self.xmpp.register_stanza(stanza.Response)
self.xmpp.register_stanza(stanza.Abort)
self.xmpp.register_handler(
Callback('SASL Success',
MatchXPath(stanza.Success.tag_name()),
self._handle_success,
instream=True))
self.xmpp.register_handler(
Callback('SASL Failure',
MatchXPath(stanza.Failure.tag_name()),
self._handle_fail,
instream=True))
self.xmpp.register_handler(
Callback('SASL Challenge',
MatchXPath(stanza.Challenge.tag_name()),
self._handle_challenge))
self.xmpp.register_feature('mechanisms',
self._handle_sasl_auth,
restart=True,
order=self.order)
def _default_credentials(self, required_values, optional_values):
creds = self.xmpp.credentials
result = {}
values = required_values.union(optional_values)
for value in values:
if value == 'username':
result[value] = creds.get('username', self.xmpp.requested_jid.user)
elif value == 'email':
jid = self.xmpp.requested_jid.bare
result[value] = creds.get('email', jid)
elif value == 'channel_binding':
if hasattr(self.xmpp.socket, 'get_channel_binding'):
result[value] = self.xmpp.socket.get_channel_binding()
else:
log.debug("Channel binding not supported.")
log.debug("Use Python 3.3+ for channel binding and " + \
"SCRAM-SHA-1-PLUS support")
result[value] = None
elif value == 'host':
result[value] = creds.get('host', self.xmpp.requested_jid.domain)
elif value == 'realm':
result[value] = creds.get('realm', self.xmpp.requested_jid.domain)
elif value == 'service-name':
result[value] = creds.get('service-name', self.xmpp._service_name)
elif value == 'service':
result[value] = creds.get('service', 'xmpp')
elif value in creds:
result[value] = creds[value]
return result
def _default_security(self, values):
result = {}
for value in values:
if value == 'encrypted':
if 'starttls' in self.xmpp.features:
result[value] = True
elif isinstance(self.xmpp.socket, ssl.SSLSocket):
result[value] = True
else:
result[value] = False
else:
result[value] = self.config.get(value, False)
return result
def _handle_sasl_auth(self, features):
if 'mechanisms' in self.xmpp.features:
return False
enforce_limit = False
limited_mechs = self.use_mechs
if limited_mechs is None:
limited_mechs = set()
elif limited_mechs and not isinstance(limited_mechs, set):
limited_mechs = set(limited_mechs)
enforce_limit = True
if self.use_mech:
limited_mechs.add(self.use_mech)
enforce_limit = True
if enforce_limit:
self.use_mechs = limited_mechs
self.mech_list = set(features['mechanisms'])
return self._send_auth()
def _send_auth(self):
mech_list = self.mech_list - self.attempted_mechs
try:
self.mech = sasl.choose(mech_list,
self.sasl_callback,
self.security_callback,
limit=self.use_mechs,
min_mech=self.min_mech)
except sasl.SASLNoAppropriateMechanism:
log.error("No appropriate login method.")
self.xmpp.event("no_auth", direct=True)
self.xmpp.event("failed_auth", direct=True)
self.attempted_mechs = set()
return self.xmpp.disconnect()
except StringPrepError:
log.exception("A credential value did not pass SASLprep.")
self.xmpp.disconnect()
resp = stanza.Auth(self.xmpp)
resp['mechanism'] = self.mech.name
try:
resp['value'] = self.mech.process()
except sasl.SASLCancelled:
self.attempted_mechs.add(self.mech.name)
self._send_auth()
except sasl.SASLFailed:
self.attempted_mechs.add(self.mech.name)
self._send_auth()
except sasl.SASLMutualAuthFailed:
log.error("Mutual authentication failed! " + \
"A security breach is possible.")
self.attempted_mechs.add(self.mech.name)
self.xmpp.disconnect()
else:
resp.send(now=True)
return True
def _handle_challenge(self, stanza):
resp = self.stanza.Response(self.xmpp)
try:
resp['value'] = self.mech.process(stanza['value'])
except sasl.SASLCancelled:
self.stanza.Abort(self.xmpp).send()
except sasl.SASLFailed:
self.stanza.Abort(self.xmpp).send()
except sasl.SASLMutualAuthFailed:
log.error("Mutual authentication failed! " + \
"A security breach is possible.")
self.attempted_mechs.add(self.mech.name)
self.xmpp.disconnect()
else:
resp.send(now=True)
def _handle_success(self, stanza):
try:
final = self.mech.process(stanza['value'])
except sasl.SASLMutualAuthFailed:
log.error("Mutual authentication failed! " + \
"A security breach is possible.")
self.attempted_mechs.add(self.mech.name)
self.xmpp.disconnect()
else:
self.attempted_mechs = set()
self.xmpp.authenticated = True
self.xmpp.features.add('mechanisms')
self.xmpp.event('auth_success', stanza, direct=True)
raise RestartStream()
def _handle_fail(self, stanza):
self.attempted_mechs.add(self.mech.name)
log.info("Authentication failed: %s", stanza['condition'])
self.xmpp.event("failed_auth", stanza, direct=True)
self._send_auth()
return True
| true | true |
f71a9c42ba701b954c3fcb36fd4b72ea81d1eb78 | 7,255 | py | Python | duke-cs671-fall21-coupon-recommendation/outputs/rules/RF/20_features/numtrees_8/rule_1.py | apcarrik/kaggle | 6e2d4db58017323e7ba5510bcc2598e01a4ee7bf | [
"MIT"
] | null | null | null | duke-cs671-fall21-coupon-recommendation/outputs/rules/RF/20_features/numtrees_8/rule_1.py | apcarrik/kaggle | 6e2d4db58017323e7ba5510bcc2598e01a4ee7bf | [
"MIT"
] | null | null | null | duke-cs671-fall21-coupon-recommendation/outputs/rules/RF/20_features/numtrees_8/rule_1.py | apcarrik/kaggle | 6e2d4db58017323e7ba5510bcc2598e01a4ee7bf | [
"MIT"
] | null | null | null | def findDecision(obj): #obj[0]: Driving_to, obj[1]: Passanger, obj[2]: Weather, obj[3]: Temperature, obj[4]: Time, obj[5]: Coupon, obj[6]: Coupon_validity, obj[7]: Gender, obj[8]: Age, obj[9]: Maritalstatus, obj[10]: Children, obj[11]: Education, obj[12]: Occupation, obj[13]: Income, obj[14]: Bar, obj[15]: Coffeehouse, obj[16]: Restaurantlessthan20, obj[17]: Restaurant20to50, obj[18]: Direction_same, obj[19]: Distance
# {"feature": "Age", "instances": 127, "metric_value": 0.9978, "depth": 1}
if obj[8]>1:
# {"feature": "Education", "instances": 88, "metric_value": 0.9865, "depth": 2}
if obj[11]<=3:
# {"feature": "Coupon", "instances": 84, "metric_value": 0.9737, "depth": 3}
if obj[5]>0:
# {"feature": "Direction_same", "instances": 73, "metric_value": 0.9934, "depth": 4}
if obj[18]<=0:
# {"feature": "Occupation", "instances": 63, "metric_value": 0.9691, "depth": 5}
if obj[12]>1:
# {"feature": "Bar", "instances": 57, "metric_value": 0.9348, "depth": 6}
if obj[14]<=2.0:
# {"feature": "Restaurantlessthan20", "instances": 52, "metric_value": 0.8905, "depth": 7}
if obj[16]>1.0:
# {"feature": "Income", "instances": 46, "metric_value": 0.8281, "depth": 8}
if obj[13]<=6:
# {"feature": "Restaurant20to50", "instances": 43, "metric_value": 0.8542, "depth": 9}
if obj[17]<=1.0:
# {"feature": "Driving_to", "instances": 28, "metric_value": 0.7496, "depth": 10}
if obj[0]<=1:
# {"feature": "Maritalstatus", "instances": 21, "metric_value": 0.5917, "depth": 11}
if obj[9]>0:
return 'False'
elif obj[9]<=0:
# {"feature": "Passanger", "instances": 8, "metric_value": 0.9544, "depth": 12}
if obj[1]>0:
# {"feature": "Coupon_validity", "instances": 7, "metric_value": 0.8631, "depth": 13}
if obj[6]>0:
# {"feature": "Temperature", "instances": 4, "metric_value": 1.0, "depth": 14}
if obj[3]>55:
# {"feature": "Coffeehouse", "instances": 3, "metric_value": 0.9183, "depth": 15}
if obj[15]>1.0:
return 'False'
elif obj[15]<=1.0:
return 'True'
else: return 'True'
elif obj[3]<=55:
return 'True'
else: return 'True'
elif obj[6]<=0:
return 'False'
else: return 'False'
elif obj[1]<=0:
return 'True'
else: return 'True'
else: return 'False'
elif obj[0]>1:
# {"feature": "Coupon_validity", "instances": 7, "metric_value": 0.9852, "depth": 11}
if obj[6]>0:
return 'False'
elif obj[6]<=0:
return 'True'
else: return 'True'
else: return 'False'
elif obj[17]>1.0:
# {"feature": "Time", "instances": 15, "metric_value": 0.971, "depth": 10}
if obj[4]<=1:
# {"feature": "Maritalstatus", "instances": 8, "metric_value": 0.5436, "depth": 11}
if obj[9]<=1:
return 'False'
elif obj[9]>1:
# {"feature": "Weather", "instances": 2, "metric_value": 1.0, "depth": 12}
if obj[2]<=0:
return 'False'
elif obj[2]>0:
return 'True'
else: return 'True'
else: return 'False'
elif obj[4]>1:
# {"feature": "Coffeehouse", "instances": 7, "metric_value": 0.8631, "depth": 11}
if obj[15]>0.0:
return 'True'
elif obj[15]<=0.0:
# {"feature": "Coupon_validity", "instances": 3, "metric_value": 0.9183, "depth": 12}
if obj[6]>0:
return 'False'
elif obj[6]<=0:
return 'True'
else: return 'True'
else: return 'False'
else: return 'True'
else: return 'False'
elif obj[13]>6:
return 'False'
else: return 'False'
elif obj[16]<=1.0:
# {"feature": "Maritalstatus", "instances": 6, "metric_value": 0.9183, "depth": 8}
if obj[9]<=0:
return 'True'
elif obj[9]>0:
# {"feature": "Temperature", "instances": 3, "metric_value": 0.9183, "depth": 9}
if obj[3]>30:
return 'False'
elif obj[3]<=30:
return 'True'
else: return 'True'
else: return 'False'
else: return 'True'
elif obj[14]>2.0:
# {"feature": "Time", "instances": 5, "metric_value": 0.7219, "depth": 7}
if obj[4]<=2:
return 'True'
elif obj[4]>2:
return 'False'
else: return 'False'
else: return 'True'
elif obj[12]<=1:
# {"feature": "Children", "instances": 6, "metric_value": 0.65, "depth": 6}
if obj[10]>0:
return 'True'
elif obj[10]<=0:
return 'False'
else: return 'False'
else: return 'True'
elif obj[18]>0:
# {"feature": "Occupation", "instances": 10, "metric_value": 0.7219, "depth": 5}
if obj[12]>5:
return 'True'
elif obj[12]<=5:
# {"feature": "Driving_to", "instances": 4, "metric_value": 1.0, "depth": 6}
if obj[0]<=1:
# {"feature": "Maritalstatus", "instances": 3, "metric_value": 0.9183, "depth": 7}
if obj[9]<=1:
return 'False'
elif obj[9]>1:
return 'True'
else: return 'True'
elif obj[0]>1:
return 'True'
else: return 'True'
else: return 'True'
else: return 'True'
elif obj[5]<=0:
# {"feature": "Passanger", "instances": 11, "metric_value": 0.4395, "depth": 4}
if obj[1]>0:
return 'False'
elif obj[1]<=0:
return 'True'
else: return 'True'
else: return 'False'
elif obj[11]>3:
return 'True'
else: return 'True'
elif obj[8]<=1:
# {"feature": "Restaurant20to50", "instances": 39, "metric_value": 0.8213, "depth": 2}
if obj[17]<=1.0:
# {"feature": "Occupation", "instances": 25, "metric_value": 0.5294, "depth": 3}
if obj[12]<=20:
# {"feature": "Income", "instances": 22, "metric_value": 0.2668, "depth": 4}
if obj[13]<=6:
return 'True'
elif obj[13]>6:
return 'False'
else: return 'False'
elif obj[12]>20:
# {"feature": "Time", "instances": 3, "metric_value": 0.9183, "depth": 4}
if obj[4]>0:
return 'False'
elif obj[4]<=0:
return 'True'
else: return 'True'
else: return 'False'
elif obj[17]>1.0:
# {"feature": "Passanger", "instances": 14, "metric_value": 1.0, "depth": 3}
if obj[1]<=2:
# {"feature": "Income", "instances": 11, "metric_value": 0.9457, "depth": 4}
if obj[13]>2:
# {"feature": "Coupon", "instances": 6, "metric_value": 0.9183, "depth": 5}
if obj[5]>2:
return 'False'
elif obj[5]<=2:
# {"feature": "Weather", "instances": 3, "metric_value": 0.9183, "depth": 6}
if obj[2]<=1:
return 'True'
elif obj[2]>1:
return 'False'
else: return 'False'
else: return 'True'
elif obj[13]<=2:
return 'True'
else: return 'True'
elif obj[1]>2:
return 'False'
else: return 'False'
else: return 'True'
else: return 'True'
| 38.590426 | 421 | 0.513853 | def findDecision(obj):
if obj[8]>1:
if obj[11]<=3:
if obj[5]>0:
if obj[18]<=0:
if obj[12]>1:
if obj[14]<=2.0:
if obj[16]>1.0:
if obj[13]<=6:
if obj[17]<=1.0:
if obj[0]<=1:
if obj[9]>0:
return 'False'
elif obj[9]<=0:
if obj[1]>0:
if obj[6]>0:
if obj[3]>55:
if obj[15]>1.0:
return 'False'
elif obj[15]<=1.0:
return 'True'
else: return 'True'
elif obj[3]<=55:
return 'True'
else: return 'True'
elif obj[6]<=0:
return 'False'
else: return 'False'
elif obj[1]<=0:
return 'True'
else: return 'True'
else: return 'False'
elif obj[0]>1:
if obj[6]>0:
return 'False'
elif obj[6]<=0:
return 'True'
else: return 'True'
else: return 'False'
elif obj[17]>1.0:
if obj[4]<=1:
if obj[9]<=1:
return 'False'
elif obj[9]>1:
if obj[2]<=0:
return 'False'
elif obj[2]>0:
return 'True'
else: return 'True'
else: return 'False'
elif obj[4]>1:
if obj[15]>0.0:
return 'True'
elif obj[15]<=0.0:
if obj[6]>0:
return 'False'
elif obj[6]<=0:
return 'True'
else: return 'True'
else: return 'False'
else: return 'True'
else: return 'False'
elif obj[13]>6:
return 'False'
else: return 'False'
elif obj[16]<=1.0:
if obj[9]<=0:
return 'True'
elif obj[9]>0:
if obj[3]>30:
return 'False'
elif obj[3]<=30:
return 'True'
else: return 'True'
else: return 'False'
else: return 'True'
elif obj[14]>2.0:
if obj[4]<=2:
return 'True'
elif obj[4]>2:
return 'False'
else: return 'False'
else: return 'True'
elif obj[12]<=1:
if obj[10]>0:
return 'True'
elif obj[10]<=0:
return 'False'
else: return 'False'
else: return 'True'
elif obj[18]>0:
if obj[12]>5:
return 'True'
elif obj[12]<=5:
if obj[0]<=1:
if obj[9]<=1:
return 'False'
elif obj[9]>1:
return 'True'
else: return 'True'
elif obj[0]>1:
return 'True'
else: return 'True'
else: return 'True'
else: return 'True'
elif obj[5]<=0:
if obj[1]>0:
return 'False'
elif obj[1]<=0:
return 'True'
else: return 'True'
else: return 'False'
elif obj[11]>3:
return 'True'
else: return 'True'
elif obj[8]<=1:
if obj[17]<=1.0:
if obj[12]<=20:
if obj[13]<=6:
return 'True'
elif obj[13]>6:
return 'False'
else: return 'False'
elif obj[12]>20:
if obj[4]>0:
return 'False'
elif obj[4]<=0:
return 'True'
else: return 'True'
else: return 'False'
elif obj[17]>1.0:
if obj[1]<=2:
if obj[13]>2:
if obj[5]>2:
return 'False'
elif obj[5]<=2:
if obj[2]<=1:
return 'True'
elif obj[2]>1:
return 'False'
else: return 'False'
else: return 'True'
elif obj[13]<=2:
return 'True'
else: return 'True'
elif obj[1]>2:
return 'False'
else: return 'False'
else: return 'True'
else: return 'True'
| true | true |
f71a9c674644f0d53c2687dddfa077e5ece93d13 | 62 | py | Python | acq4/modules/TaskRunner/analysisModules/Photostim/__init__.py | aleonlein/acq4 | 4b1fcb9ad2c5e8d4595a2b9cf99d50ece0c0f555 | [
"MIT"
] | 47 | 2015-01-05T16:18:10.000Z | 2022-03-16T13:09:30.000Z | acq4/modules/TaskRunner/analysisModules/Photostim/__init__.py | aleonlein/acq4 | 4b1fcb9ad2c5e8d4595a2b9cf99d50ece0c0f555 | [
"MIT"
] | 48 | 2015-04-19T16:51:41.000Z | 2022-03-31T14:48:16.000Z | acq4/modules/TaskRunner/analysisModules/Photostim/__init__.py | sensapex/acq4 | 9561ba73caff42c609bd02270527858433862ad8 | [
"MIT"
] | 32 | 2015-01-15T14:11:49.000Z | 2021-07-15T13:44:52.000Z | from __future__ import print_function
from .Photostim import * | 31 | 37 | 0.854839 | from __future__ import print_function
from .Photostim import * | true | true |
f71a9cbf524b1e94c7bb76e86a3a25344ade1dab | 21,169 | py | Python | cages/.shared/protocol_xmlrpc.py | targeted/pythomnic3k | c59f8c11302c0a568f45ec626ec6a0065527aa79 | [
"BSD-3-Clause"
] | null | null | null | cages/.shared/protocol_xmlrpc.py | targeted/pythomnic3k | c59f8c11302c0a568f45ec626ec6a0065527aa79 | [
"BSD-3-Clause"
] | 7 | 2019-06-06T15:47:56.000Z | 2019-06-15T18:09:30.000Z | cages/.shared/protocol_xmlrpc.py | targeted/pythomnic3k | c59f8c11302c0a568f45ec626ec6a0065527aa79 | [
"BSD-3-Clause"
] | null | null | null | #!/usr/bin/env python3
#-*- coding: iso-8859-1 -*-
################################################################################
#
# This module contains an implementation of XMLRPC interface/resource.
#
# Sample XMLRPC interface configuration (config_interface_xmlrpc_1.py):
#
# config = dict \
# (
# protocol = "xmlrpc", # meta
# request_timeout = None, # meta, optional
# listener_address = ("127.0.0.1", 8000), # tcp
# max_connections = 100, # tcp
# ssl_key_cert_file = None, # ssl, optional filename
# ssl_ca_cert_file = None, # ssl, optional filename
# ssl_ciphers = None, # ssl, optional str
# ssl_protocol = None, # ssl, optional "SSLv23", "TLSv1", "TLSv1_1", "TLSv1_2" or "TLS"
# response_encoding = "windows-1251", # http
# original_ip_header_fields = ("X-Forwarded-For", ), # http
# keep_alive_support = True, # http
# keep_alive_idle_timeout = 120.0, # http
# keep_alive_max_requests = 10, # http
# allow_none = False, # xmlrpc, Python-specific, optional
# )
#
# Sample processing module (interface_xmlrpc_1.py):
#
# def process_request(request, response):
# module, method = request["method"].split(".")
# args = request["args"]
# result = pmnc.__getattr__(module).__getattr__(method)(*args)
# response["result"] = result
#
# Sample XMLRPC resource configuration (config_resource_xmlrpc_1.py)
#
# config = dict \
# (
# protocol = "xmlrpc", # meta
# server_address = ("127.0.0.1", 8000), # tcp
# connect_timeout = 3.0, # tcp
# ssl_key_cert_file = None, # ssl, optional filename
# ssl_ca_cert_file = None, # ssl, optional filename
# ssl_ciphers = None, # ssl, optional str
# ssl_protocol = None, # ssl, optional "SSLv23", "TLSv1", "TLSv1_1", "TLSv1_2" or "TLS"
# ssl_server_hostname = None, # ssl, optional str
# ssl_ignore_hostname = False, # ssl, ignore certificate common/alt name name mismatch
# extra_headers = { "Authorization": "Basic dXNlcjpwYXNz" }, # http
# http_version = "HTTP/1.1", # http
# server_uri = "/xmlrpc", # xmlrpc
# request_encoding = "windows-1251", # xmlrpc
# allow_none = False, # xmlrpc, Python-specific, optional
# )
#
# Sample resource usage (anywhere):
#
# xa = pmnc.transaction.create()
# xa.xmlrpc_1.Module.Method(*args)
# result = xa.execute()[0]
#
# or if the only transaction participant:
#
# result = pmnc.transaction.xmlrpc_1.Module.Method(*args)
#
# Pythomnic3k project
# (c) 2005-2019, Dmitry Dvoinikov <dmitry@targeted.org>
# Distributed under BSD license
#
###############################################################################
__all__ = [ "Interface", "Resource", "process_http_request" ]
###############################################################################
import os; from os import path as os_path
import xmlrpc.client; from xmlrpc.client import loads, dumps, Fault
if __name__ == "__main__": # add pythomnic/lib to sys.path
import os; import sys
main_module_dir = os.path.dirname(sys.modules["__main__"].__file__) or os.getcwd()
sys.path.insert(0, os.path.normpath(os.path.join(main_module_dir, "..", "..", "lib")))
import typecheck; from typecheck import typecheck, typecheck_with_exceptions, \
optional, tuple_of, dict_of, callable, one_of
import exc_string; from exc_string import exc_string
import pmnc.resource_pool; from pmnc.resource_pool import TransactionalResource, ResourceError
###############################################################################
class Interface: # XMLRPC interface built on top of HTTP interface
@typecheck
def __init__(self, name: str, *,
listener_address: (str, int),
max_connections: int,
ssl_key_cert_file: optional(os_path.isfile),
ssl_ca_cert_file: optional(os_path.isfile),
ssl_ciphers: optional(str) = None,
ssl_protocol: optional(one_of("SSLv23", "TLSv1", "TLSv1_1", "TLSv1_2", "TLS")) = None,
response_encoding: str,
original_ip_header_fields: tuple_of(str),
keep_alive_support: bool,
keep_alive_idle_timeout: float,
keep_alive_max_requests: int,
request_timeout: optional(float) = None,
allow_none: optional(bool) = False,
**kwargs): # this kwargs allows for extra application-specific
# settings in config_interface_xmlrpc_X.py
# create an instance of underlying HTTP interface
request_timeout = request_timeout or \
pmnc.config_interfaces.get("request_timeout") # this is now static
self._http_interface = \
pmnc.protocol_http.Interface(name,
listener_address = listener_address,
max_connections = max_connections,
ssl_key_cert_file = ssl_key_cert_file,
ssl_ca_cert_file = ssl_ca_cert_file,
ssl_ciphers = ssl_ciphers,
ssl_protocol = ssl_protocol,
response_encoding = response_encoding,
original_ip_header_fields = original_ip_header_fields,
allowed_methods = ("POST", ),
keep_alive_support = keep_alive_support,
keep_alive_idle_timeout = keep_alive_idle_timeout,
keep_alive_max_requests = keep_alive_max_requests,
gzip_content_types = (),
request_timeout = request_timeout)
# override the default process_http_request method of the created HTTP interface,
# having the HTTP handler method to be called through a pmnc call allows
# online modifications to this module, when it is reloaded
if pmnc.request.self_test == __name__: # self-test
self.process_xmlrpc_request = kwargs["process_xmlrpc_request"]
self._http_interface.process_http_request = \
lambda http_request, http_response: \
pmnc.__getattr__(__name__).process_http_request(http_request, http_response,
self.process_xmlrpc_request,
response_encoding = response_encoding,
allow_none = allow_none or False)
name = property(lambda self: self._http_interface.name)
listener_address = property(lambda self: self._http_interface.listener_address)
###################################
def start(self):
self._http_interface.start()
def cease(self):
self._http_interface.cease()
def stop(self):
self._http_interface.stop()
###################################
def process_xmlrpc_request(self, request, response):
handler_module_name = "interface_{0:s}".format(self.name)
pmnc.__getattr__(handler_module_name).process_request(request, response)
###############################################################################
def process_http_request(http_request: dict, http_response: dict,
process_xmlrpc_request: callable, *,
response_encoding: str, allow_none: bool):
assert http_request["method"] == "POST"
headers = http_request["headers"]
content = http_request["content"]
content_type = headers.get("content-type", "application/octet-stream")
if not content_type.startswith("text/xml"):
http_response["status_code"] = 415 # unsupported media type
return
# extract xmlrpc request from http request content, the parser
# will deduce the bytes encoding from the <?xml encoding attribute
try:
args, method = loads(content)
except:
raise Exception("invalid XMLRPC request: {0:s}".format(exc_string()))
# now we know more about the request
auth_tokens = pmnc.request.parameters["auth_tokens"]
pmnc.request.describe("XMLRPC{0:s} request {1:s} from {2:s}".\
format(auth_tokens["encrypted"] and "S" or "",
method, auth_tokens["peer_ip"]))
# the request contained a valid xmlrpc packet,
# it would be polite to respond with one as well
try:
# populate the request parameters with XMLRPC-specific values
pmnc.request.protocol = "xmlrpc"
xmlrpc_request = dict(method = method, args = args)
xmlrpc_response = dict(result = None)
# invoke the application handler
process_xmlrpc_request(xmlrpc_request, xmlrpc_response)
# fetch the XMLRPC call result
result = xmlrpc_response["result"]
if result is None:
result = ()
# marshal the result in an XMLRPC packet
content = dumps((result, ), methodresponse = True,
encoding = response_encoding, allow_none = allow_none)
except:
error = exc_string()
content = dumps(Fault(500, error), methodresponse = True, # 500 as in "Internal Server Error"
encoding = response_encoding, allow_none = allow_none)
pmnc.log.error("returning XMLRPC fault: {0:s}".format(error))
else:
if pmnc.log.debug:
pmnc.log.debug("returning XMLRPC response")
http_response["headers"]["content-type"] = "text/xml"
http_response["content"] = content
###############################################################################
class Resource(TransactionalResource): # XMLRPC resource
@typecheck
def __init__(self, name, *,
server_address: (str, int),
connect_timeout: float,
ssl_key_cert_file: optional(os_path.isfile),
ssl_ca_cert_file: optional(os_path.isfile),
ssl_ciphers: optional(str) = None,
ssl_protocol: optional(one_of("SSLv23", "TLSv1", "TLSv1_1", "TLSv1_2", "TLS")) = None,
ssl_server_hostname: optional(str) = None,
ssl_ignore_hostname: optional(bool) = False,
extra_headers: dict_of(str, str),
http_version: str,
server_uri: str,
request_encoding: str,
allow_none: optional(bool) = False):
TransactionalResource.__init__(self, name)
self._server_uri = server_uri
self._request_encoding = request_encoding
self._allow_none = allow_none
self._http_resource = \
pmnc.protocol_http.Resource(name,
server_address = server_address,
connect_timeout = connect_timeout,
ssl_key_cert_file = ssl_key_cert_file,
ssl_ca_cert_file = ssl_ca_cert_file,
ssl_ciphers = ssl_ciphers,
ssl_protocol = ssl_protocol,
ssl_server_hostname = ssl_server_hostname,
ssl_ignore_hostname = ssl_ignore_hostname,
extra_headers = extra_headers,
http_version = http_version)
###################################
def connect(self):
TransactionalResource.connect(self)
self._attrs = []
self._http_resource.connect()
def disconnect(self):
try:
self._http_resource.disconnect()
finally:
TransactionalResource.disconnect(self)
###################################
# overriding the following methods allows the contained HTTP
# resource to time out at the same time with this resource
def set_idle_timeout(self, idle_timeout):
self._http_resource.set_idle_timeout(idle_timeout)
TransactionalResource.set_idle_timeout(self, idle_timeout)
def reset_idle_timeout(self):
self._http_resource.reset_idle_timeout()
TransactionalResource.reset_idle_timeout(self)
def set_max_age(self, max_age):
self._http_resource.set_max_age(max_age)
TransactionalResource.set_max_age(self, max_age)
def _expired(self):
return self._http_resource.expired or \
TransactionalResource._expired(self)
###################################
def __getattr__(self, name):
self._attrs.append(name)
return self
###################################
def __call__(self, *args):
try:
method, self._attrs = ".".join(self._attrs), []
request = dumps(args, methodname = method,
encoding = self._request_encoding, allow_none = self._allow_none)
request_description = "XMLRPC request {0:s} to {1:s}".\
format(method, self._http_resource.server_info)
except:
ResourceError.rethrow(recoverable = True)
pmnc.log.info("sending {0:s}".format(request_description))
try:
status_code, headers, content = \
self._http_resource.post(self._server_uri, request.encode(self._request_encoding),
{ "Content-Type": "text/xml" })
if status_code != 200:
raise Exception("HTTP request returned code {0:d}".format(status_code))
result = loads(content)[0][0]
except Fault as e:
pmnc.log.warning("{0:s} returned fault {1:d}: {2:s}".\
format(request_description, e.faultCode, e.faultString))
ResourceError.rethrow(code = e.faultCode,
description = e.faultString, terminal = False)
except:
pmnc.log.warning("{0:s} failed: {1:s}".\
format(request_description, exc_string()))
raise
else:
pmnc.log.info("XMLRPC request returned successfully")
return result
###############################################################################
def self_test():
from socket import socket, AF_INET, SOCK_STREAM
from pmnc.request import fake_request
from pmnc.self_test import active_interface
def sendall(ifc, data):
s = socket(AF_INET, SOCK_STREAM)
s.connect(ifc.listener_address)
s.sendall(data)
return s
def recvall(s):
result = b""
data = s.recv(1024)
while data:
result += data
data = s.recv(1024)
return result
rus = "\u0410\u0411\u0412\u0413\u0414\u0415\u0401\u0416\u0417\u0418\u0419" \
"\u041a\u041b\u041c\u041d\u041e\u041f\u0420\u0421\u0422\u0423\u0424" \
"\u0425\u0426\u0427\u0428\u0429\u042c\u042b\u042a\u042d\u042e\u042f" \
"\u0430\u0431\u0432\u0433\u0434\u0435\u0451\u0436\u0437\u0438\u0439" \
"\u043a\u043b\u043c\u043d\u043e\u043f\u0440\u0441\u0442\u0443\u0444" \
"\u0445\u0446\u0447\u0448\u0449\u044c\u044b\u044a\u044d\u044e\u044f"
def post_string(ifc, method, s, request_encoding):
req = "<?xml version=\"1.0\" encoding=\"{0:s}\"?>" \
"<methodCall><methodName>{1:s}</methodName>" \
"<params><param><value><string>{2:s}</string>" \
"</value></param></params></methodCall>".format(request_encoding, method, s).encode(request_encoding)
hdr = "POST / HTTP/1.0\nContent-Type: text/xml\nContent-Length: {0:d}\n\n".format(len(req))
s = sendall(ifc, hdr.encode(request_encoding) + req)
resp = recvall(s)
assert resp.startswith(b"HTTP/1.1 200 OK\r\n")
resp = resp.split(b"\r\n\r\n", 1)[1]
return loads(resp)[0][0]
###################################
test_interface_config = dict \
(
protocol = "xmlrpc",
listener_address = ("127.0.0.1", 23673),
max_connections = 100,
ssl_key_cert_file = None,
ssl_ca_cert_file = None,
ssl_ciphers = None,
ssl_protocol = None,
response_encoding = "windows-1251",
original_ip_header_fields = ("X-Forwarded-For", ),
keep_alive_support = True,
keep_alive_idle_timeout = 3.0,
keep_alive_max_requests = 3,
allow_none = True
)
def interface_config(**kwargs):
result = test_interface_config.copy()
result.update(kwargs)
return result
###################################
def test_interface_start_stop():
def process_xmlrpc_request(request, response):
pass
with active_interface("xmlrpc_1", **interface_config(process_xmlrpc_request = process_xmlrpc_request)):
pass
test_interface_start_stop()
###################################
def test_interface_broken_requests():
def process_xmlrpc_request(request, response):
pass
with active_interface("xmlrpc_1", **interface_config(process_xmlrpc_request = process_xmlrpc_request)) as ifc:
s = sendall(ifc, b"POST / HTTP/1.0\nContent-Type: text/plain\n\n")
resp = recvall(s)
assert resp.startswith(b"HTTP/1.1 415 Unsupported Media Type\r\n")
s = sendall(ifc, b"POST / HTTP/1.0\nContent-Type: text/xml\nContent-Length: 3\n\nfoo")
resp = recvall(s)
assert resp.startswith(b"HTTP/1.1 500 Internal Server Error\r\n")
assert b"invalid XMLRPC request" in resp
test_interface_broken_requests()
###################################
def test_interface_marshaling():
def process_xmlrpc_request(request, response):
if request["method"] == "raise":
raise Exception(request["args"][0])
response["result"] = [request["method"], request["args"]]
with active_interface("xmlrpc_1", **interface_config(process_xmlrpc_request = process_xmlrpc_request)) as ifc:
assert post_string(ifc, "MethodName", "foo", "utf-8") == ["MethodName", ["foo"]]
assert post_string(ifc, rus, rus, "cp866") == [rus, [rus]]
try:
post_string(ifc, "raise", "foo", "iso-8859-5")
except Fault as e:
assert e.faultCode == 500 and e.faultString.startswith("Exception(\"foo\")")
else:
assert False
try:
post_string(ifc, "raise", rus, "utf-8")
except Fault as e:
assert e.faultCode == 500 and e.faultString.startswith("Exception(\"" + rus + "\")")
else:
assert False
test_interface_marshaling()
################################### TESTING RESOURCE
def test_resource():
def process_xmlrpc_request(request, response):
if request["method"] == "ShouldBe.Failing":
raise Exception(request["args"][0])
else:
response["result"] = request, pmnc.request.parameters["auth_tokens"]
with active_interface("xmlrpc_1", **interface_config(process_xmlrpc_request = process_xmlrpc_request)):
fake_request(10.0)
for i in range(16):
s = "*" * 2 ** i
n = "n" + str(i)
result = pmnc.transaction.xmlrpc_1.Module.Method(i, s, [ s ], { s: i, n: None })
assert result == [ { "method": "Module.Method", "args": [ i, s, [ s ], { s: i, n: None } ] },
{ "username": "user", "peer_ip": "127.0.0.1", "password": "pass", "encrypted": False } ]
try:
pmnc.transaction.xmlrpc_1.ShouldBe.Failing("some error")
except ResourceError as e:
assert e.code == 500 and e.description.startswith("Exception(\"some error\")")
assert not e.recoverable and not e.terminal
test_resource()
###################################
if __name__ == "__main__": import pmnc.self_test; pmnc.self_test.run()
###############################################################################
# EOF
| 41.184825 | 125 | 0.544003 | true | true | |
f71a9d6ee5b89554965ee5cfb0da2b1898c17923 | 529 | py | Python | examples/ethernet/eth_connection.py | ingeniamc/ingenialink-python | 6011931697e48456f5638c2848303aac2e5bcb75 | [
"MIT"
] | 15 | 2017-08-30T13:43:14.000Z | 2022-03-29T07:04:30.000Z | examples/ethernet/eth_connection.py | ingeniamc/ingenialink-python | 6011931697e48456f5638c2848303aac2e5bcb75 | [
"MIT"
] | 11 | 2017-08-28T11:23:18.000Z | 2022-03-28T23:48:11.000Z | examples/ethernet/eth_connection.py | ingeniamc/ingenialink-python | 6011931697e48456f5638c2848303aac2e5bcb75 | [
"MIT"
] | 9 | 2017-09-30T08:28:42.000Z | 2022-03-12T19:11:43.000Z | import sys
from ingenialink.ethernet.network import EthernetNetwork, NET_TRANS_PROT
def connection_example():
net = EthernetNetwork()
servo = net.connect_to_slave("192.168.2.22",
"../../resources/dictionaries/eve-net-c_eth_1.8.1.xdf",
1061,
NET_TRANS_PROT.UDP)
print(servo.read('DRV_ID_SOFTWARE_VERSION'))
net.disconnect_from_slave(servo)
if __name__ == '__main__':
connection_example()
sys.exit(0)
| 25.190476 | 88 | 0.597353 | import sys
from ingenialink.ethernet.network import EthernetNetwork, NET_TRANS_PROT
def connection_example():
net = EthernetNetwork()
servo = net.connect_to_slave("192.168.2.22",
"../../resources/dictionaries/eve-net-c_eth_1.8.1.xdf",
1061,
NET_TRANS_PROT.UDP)
print(servo.read('DRV_ID_SOFTWARE_VERSION'))
net.disconnect_from_slave(servo)
if __name__ == '__main__':
connection_example()
sys.exit(0)
| true | true |
f71a9e181df7b219ef25d20d2a8f66302f4a6696 | 355 | py | Python | experiments/heat-3d/tmp_files/9010.py | LoopTilingBenchmark/benchmark | 52a3d2e70216552a498fd91de02a2fa9cb62122c | [
"BSD-2-Clause"
] | null | null | null | experiments/heat-3d/tmp_files/9010.py | LoopTilingBenchmark/benchmark | 52a3d2e70216552a498fd91de02a2fa9cb62122c | [
"BSD-2-Clause"
] | null | null | null | experiments/heat-3d/tmp_files/9010.py | LoopTilingBenchmark/benchmark | 52a3d2e70216552a498fd91de02a2fa9cb62122c | [
"BSD-2-Clause"
] | null | null | null | from chill import *
source('/uufs/chpc.utah.edu/common/home/u1142914/lib/ytopt_vinu/polybench/polybench-code/stencils/heat-3d/kernel.c')
destination('/uufs/chpc.utah.edu/common/home/u1142914/lib/ytopt_vinu/experiments/heat-3d/tmp_files/9010.c')
procedure('kernel_heat_3d')
loop(0)
known('n>3')
tile(0,2,8,2)
tile(0,4,64,3)
tile(1,2,8,2)
tile(1,4,64,3)
| 25.357143 | 116 | 0.752113 | from chill import *
source('/uufs/chpc.utah.edu/common/home/u1142914/lib/ytopt_vinu/polybench/polybench-code/stencils/heat-3d/kernel.c')
destination('/uufs/chpc.utah.edu/common/home/u1142914/lib/ytopt_vinu/experiments/heat-3d/tmp_files/9010.c')
procedure('kernel_heat_3d')
loop(0)
known('n>3')
tile(0,2,8,2)
tile(0,4,64,3)
tile(1,2,8,2)
tile(1,4,64,3)
| true | true |
f71aa1575b68c457900ef0939ac431d1293e82a4 | 2,733 | py | Python | tensorflow_datasets/testing/starcraft.py | haideraltahan/datasets | aad5c7ea705949d20817fcc49a892bb2a21532f0 | [
"Apache-2.0"
] | 14 | 2019-03-30T02:11:29.000Z | 2021-11-16T12:06:32.000Z | tensorflow_datasets/testing/starcraft.py | haideraltahan/datasets | aad5c7ea705949d20817fcc49a892bb2a21532f0 | [
"Apache-2.0"
] | 1 | 2019-09-13T15:10:18.000Z | 2019-09-13T21:05:46.000Z | tensorflow_datasets/testing/starcraft.py | haideraltahan/datasets | aad5c7ea705949d20817fcc49a892bb2a21532f0 | [
"Apache-2.0"
] | 10 | 2019-03-31T08:35:29.000Z | 2021-09-01T06:28:43.000Z | # coding=utf-8
# Copyright 2019 The TensorFlow Datasets Authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Tool for preparing test example of Starcraft dataset.
./starcraft --resolution=64 --output_file=test.tfrecords
./starcraft --resolution=64 --output_file=train_0.tfrecords
./starcraft --resolution=64 --output_file=train_1.tfrecords
./starcraft --resolution=64 --output_file=valid.tfrecords
"""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
from absl import app
from absl import flags
import numpy as np
import png
import six
import tensorflow as tf
FLAGS = flags.FLAGS
flags.DEFINE_integer("resolution", 64, "Resolution of the video.")
flags.DEFINE_string("output_file", None, "Path to the output file.")
def main(argv):
if len(argv) > 1:
raise tf.app.UsageError("Too many command-line arguments.")
writer = tf.io.TFRecordWriter(FLAGS.output_file)
feature_list = {}
frame_list = []
for _ in range(20):
# generate 20 frames.
png_image = six.StringIO()
png.from_array(
np.random.randint(
low=0,
high=255,
size=(FLAGS.resolution, FLAGS.resolution, 3),
dtype=np.uint8), "RGB").save(png_image)
frame_list.append(
tf.train.Feature(
bytes_list=tf.train.BytesList(value=[png_image.getvalue()])))
png_image.close()
feature_list["rgb_screen"] = tf.train.FeatureList(feature=frame_list)
context_feature = {}
context_feature["game_duration_loops"] = tf.train.Feature(
int64_list=tf.train.Int64List(value=[20]))
context_feature["game_duration_seconds"] = tf.train.Feature(
float_list=tf.train.FloatList(value=[20.0]))
context_feature["n_steps"] = tf.train.Feature(
int64_list=tf.train.Int64List(value=[20]))
context_feature["screen_size"] = tf.train.Feature(
int64_list=tf.train.Int64List(value=[FLAGS.resolution, FLAGS.resolution]))
example = tf.train.SequenceExample(
feature_lists=tf.train.FeatureLists(feature_list=feature_list),
context=tf.train.Features(feature=context_feature))
writer.write(example.SerializeToString())
writer.close()
if __name__ == "__main__":
app.run(main)
| 32.152941 | 80 | 0.726674 |
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
from absl import app
from absl import flags
import numpy as np
import png
import six
import tensorflow as tf
FLAGS = flags.FLAGS
flags.DEFINE_integer("resolution", 64, "Resolution of the video.")
flags.DEFINE_string("output_file", None, "Path to the output file.")
def main(argv):
if len(argv) > 1:
raise tf.app.UsageError("Too many command-line arguments.")
writer = tf.io.TFRecordWriter(FLAGS.output_file)
feature_list = {}
frame_list = []
for _ in range(20):
png_image = six.StringIO()
png.from_array(
np.random.randint(
low=0,
high=255,
size=(FLAGS.resolution, FLAGS.resolution, 3),
dtype=np.uint8), "RGB").save(png_image)
frame_list.append(
tf.train.Feature(
bytes_list=tf.train.BytesList(value=[png_image.getvalue()])))
png_image.close()
feature_list["rgb_screen"] = tf.train.FeatureList(feature=frame_list)
context_feature = {}
context_feature["game_duration_loops"] = tf.train.Feature(
int64_list=tf.train.Int64List(value=[20]))
context_feature["game_duration_seconds"] = tf.train.Feature(
float_list=tf.train.FloatList(value=[20.0]))
context_feature["n_steps"] = tf.train.Feature(
int64_list=tf.train.Int64List(value=[20]))
context_feature["screen_size"] = tf.train.Feature(
int64_list=tf.train.Int64List(value=[FLAGS.resolution, FLAGS.resolution]))
example = tf.train.SequenceExample(
feature_lists=tf.train.FeatureLists(feature_list=feature_list),
context=tf.train.Features(feature=context_feature))
writer.write(example.SerializeToString())
writer.close()
if __name__ == "__main__":
app.run(main)
| true | true |
f71aa2e9fb55e8ff5df09593abc82b3ea64662a2 | 3,133 | py | Python | core/storage/recommendations/gae_models.py | kaylahardie/oppia | e93ed02dfc7f654ef4fb62268c1a9b9d9ded30ec | [
"Apache-2.0"
] | 1 | 2021-06-26T00:31:08.000Z | 2021-06-26T00:31:08.000Z | core/storage/recommendations/gae_models.py | kaylahardie/oppia | e93ed02dfc7f654ef4fb62268c1a9b9d9ded30ec | [
"Apache-2.0"
] | 1 | 2020-03-02T21:05:42.000Z | 2020-03-03T07:09:51.000Z | core/storage/recommendations/gae_models.py | kaylahardie/oppia | e93ed02dfc7f654ef4fb62268c1a9b9d9ded30ec | [
"Apache-2.0"
] | null | null | null | # coding: utf-8
#
# Copyright 2015 The Oppia Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS-IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Models for Oppia recommendations."""
from __future__ import absolute_import # pylint: disable=import-only-modules
from __future__ import unicode_literals # pylint: disable=import-only-modules
from core.platform import models
from google.appengine.ext import ndb
(base_models,) = models.Registry.import_models([models.NAMES.base_model])
TOPIC_SIMILARITIES_ID = 'topics'
class ExplorationRecommendationsModel(
base_models.BaseMapReduceBatchResultsModel):
"""A list of recommended explorations similar to an exploration.
Instances of this class are keyed by exploration id.
"""
# Ids of recommended explorations.
recommended_exploration_ids = ndb.StringProperty(
repeated=True, indexed=False)
@staticmethod
def get_deletion_policy():
"""Exploration recommendations are deleted only if the corresponding
exploration is not public.
"""
return base_models.DELETION_POLICY.KEEP_IF_PUBLIC
@staticmethod
def get_export_policy():
"""Model does not contain user data."""
return base_models.EXPORT_POLICY.NOT_APPLICABLE
@classmethod
def has_reference_to_user_id(cls, unused_user_id):
"""ExplorationRecommendationsModel doesn't reference any user_id
directly.
Args:
unused_user_id: str. The (unused) ID of the user whose data
should be checked.
Returns:
bool. Whether any models refer to the given user ID.
"""
return False
class TopicSimilaritiesModel(base_models.BaseModel):
"""This model stores the similarity between any two topics. The topic
similarities are stored as a JSON object, representing a 2D dict where the
keys are topic names and the values are the similarities. The dict should
be symmetric. A similarity value is a real number between 0.0 and 1.0.
There should only be one instance of this class, and it is keyed by
TOPIC_SIMILARITIES_ID.
Currently, topics are the same as the default categories. However, this may
change in the future.
"""
content = ndb.JsonProperty(required=True)
@staticmethod
def get_deletion_policy():
"""There is only a single TopicSimilaritiesModel in the entire
codebase.
"""
return base_models.DELETION_POLICY.NOT_APPLICABLE
@staticmethod
def get_export_policy():
"""Model does not contain user data."""
return base_models.EXPORT_POLICY.NOT_APPLICABLE
| 32.978947 | 79 | 0.719757 |
from __future__ import absolute_import
from __future__ import unicode_literals
from core.platform import models
from google.appengine.ext import ndb
(base_models,) = models.Registry.import_models([models.NAMES.base_model])
TOPIC_SIMILARITIES_ID = 'topics'
class ExplorationRecommendationsModel(
base_models.BaseMapReduceBatchResultsModel):
recommended_exploration_ids = ndb.StringProperty(
repeated=True, indexed=False)
@staticmethod
def get_deletion_policy():
return base_models.DELETION_POLICY.KEEP_IF_PUBLIC
@staticmethod
def get_export_policy():
return base_models.EXPORT_POLICY.NOT_APPLICABLE
@classmethod
def has_reference_to_user_id(cls, unused_user_id):
return False
class TopicSimilaritiesModel(base_models.BaseModel):
content = ndb.JsonProperty(required=True)
@staticmethod
def get_deletion_policy():
return base_models.DELETION_POLICY.NOT_APPLICABLE
@staticmethod
def get_export_policy():
return base_models.EXPORT_POLICY.NOT_APPLICABLE
| true | true |
f71aa357327a98795cb190e3909dda5f261e7b6a | 25,206 | py | Python | acore/classifier_cov_pow_toy_pvalue.py | zhao-david/ACORE-LFI | 91de88b77f0be110e42ed91bbb7a50b7ca83319a | [
"MIT"
] | null | null | null | acore/classifier_cov_pow_toy_pvalue.py | zhao-david/ACORE-LFI | 91de88b77f0be110e42ed91bbb7a50b7ca83319a | [
"MIT"
] | null | null | null | acore/classifier_cov_pow_toy_pvalue.py | zhao-david/ACORE-LFI | 91de88b77f0be110e42ed91bbb7a50b7ca83319a | [
"MIT"
] | null | null | null | from warnings import simplefilter
simplefilter(action='ignore', category=FutureWarning)
import numpy as np
import argparse
import pandas as pd
from tqdm.auto import tqdm
from datetime import datetime
from sklearn.metrics import log_loss
import seaborn as sns
import matplotlib.pyplot as plt
from utils.functions import train_clf, compute_statistics_single_t0, clf_prob_value, compute_bayesfactor_single_t0, \
odds_ratio_loss, train_pvalue_clf
from models.toy_poisson import ToyPoissonLoader
from models.toy_gmm import ToyGMMLoader
from models.toy_gamma import ToyGammaLoader
from or_classifiers.toy_example_list import classifier_dict, classifier_dict_mlpcomp, classifier_pvalue_dict
model_dict = {
'poisson': ToyPoissonLoader,
'gmm': ToyGMMLoader,
'gamma': ToyGammaLoader
}
def main(run, rep, b, b_prime, alpha, t0_val, sample_size_obs, test_statistic, mlp_comp=False,
monte_carlo_samples=500, debug=False, seed=7, size_check=1000, verbose=False, marginal=False,
size_marginal=1000, guided_sim=False, guided_sample=1000, empirical_marginal=True):
# Changing values if debugging
b = b if not debug else 100
b_prime = b_prime if not debug else 100
size_check = size_check if not debug else 100
rep = rep if not debug else 2
model_obj = model_dict[run](marginal=marginal, size_marginal=size_marginal, empirical_marginal=empirical_marginal)
classifier_dict_run = classifier_dict_mlpcomp if mlp_comp else classifier_dict
# Get the correct functions
msnh_sampling_func = model_obj.sample_msnh_algo5
grid_param = model_obj.grid
gen_obs_func = model_obj.sample_sim
gen_sample_func = model_obj.generate_sample
gen_param_fun = model_obj.sample_param_values
t0_grid = model_obj.pred_grid
tp_func = model_obj.compute_exact_prob
# Creating sample to check entropy about
np.random.seed(seed)
sample_check = gen_sample_func(sample_size=size_check, marginal=marginal)
theta_vec = sample_check[:, :model_obj.d]
x_vec = sample_check[:, (model_obj.d + 1):]
bern_vec = sample_check[:, model_obj.d]
true_prob_vec = tp_func(theta_vec=theta_vec, x_vec=x_vec)
entropy_est = -np.average([np.log(true_prob_vec[kk]) if el == 1
else np.log(1 - true_prob_vec[kk])
for kk, el in enumerate(bern_vec)])
# Loop over repetitions and classifiers
# Each time we train the different classifiers, we build the intervals and we record
# whether the point is in or not.
out_val = []
out_cols = ['test_statistic', 'b_prime', 'b', 'classifier', 'classifier_pvalue', 'run', 'rep', 'sample_size_obs',
'cross_entropy_loss', 'cross_entropy_loss_pvalue', 't0_true_val', 'theta_0_current', 'on_true_t0',
'estimated_pvalue', 'in_confint', 'out_confint', 'size_CI', 'true_entropy', 'or_loss_value',
'monte_carlo_samples', 'guided_sim', 'empirical_marginal', 'guided_sample']
pbar = tqdm(total=rep, desc='Toy Example for Simulations, n=%s, b=%s' % (sample_size_obs, b))
rep_counter = 0
not_update_flag = False
while rep_counter < rep:
# Generates samples for each t0 values, so to be able to check both coverage and power
x_obs = gen_obs_func(sample_size=sample_size_obs, true_param=t0_val)
# Train the classifier for the odds
clf_odds_fitted = {}
clf_pvalue_fitted = {}
for clf_name, clf_model in sorted(classifier_dict_run.items(), key=lambda x: x[0]):
clf_odds = train_clf(sample_size=b, clf_model=clf_model, gen_function=gen_sample_func,
clf_name=clf_name, nn_square_root=True)
if verbose:
print('----- %s Trained' % clf_name)
if test_statistic == 'acore':
tau_obs = np.array([
compute_statistics_single_t0(
clf=clf_odds, obs_sample=x_obs, t0=theta_0, grid_param_t1=grid_param,
d=model_obj.d, d_obs=model_obj.d_obs) for theta_0 in t0_grid])
elif test_statistic == 'avgacore':
tau_obs = np.array([
compute_bayesfactor_single_t0(
clf=clf_odds, obs_sample=x_obs, t0=theta_0, gen_param_fun=gen_param_fun,
d=model_obj.d, d_obs=model_obj.d_obs, log_out=False) for theta_0 in t0_grid])
elif test_statistic == 'logavgacore':
tau_obs = np.array([
compute_bayesfactor_single_t0(
clf=clf_odds, obs_sample=x_obs, t0=theta_0, gen_param_fun=gen_param_fun,
d=model_obj.d, d_obs=model_obj.d_obs, log_out=True) for theta_0 in t0_grid])
else:
raise ValueError('The variable test_statistic needs to be either acore, avgacore, logavgacore.'
' Currently %s' % test_statistic)
# Calculating cross-entropy
est_prob_vec = clf_prob_value(clf=clf_odds, x_vec=x_vec, theta_vec=theta_vec, d=model_obj.d,
d_obs=model_obj.d_obs)
loss_value = log_loss(y_true=bern_vec, y_pred=est_prob_vec)
# Calculating or loss
or_loss_value = odds_ratio_loss(clf=clf_odds, x_vec=x_vec, theta_vec=theta_vec,
bern_vec=bern_vec, d=1, d_obs=1)
clf_odds_fitted[clf_name] = (tau_obs, loss_value, or_loss_value)
# Train the P-value regression algorithm for confidence levels
if guided_sim:
# Commenting the above -- we now sample a set of thetas from the parameter (of size guided_sample)
# budget, then resample them according to the odds values, fit a gaussian and then sample the
# datasets from that.
theta_mat_sample = gen_param_fun(sample_size=guided_sample)
if test_statistic == 'acore':
stats_sample = np.apply_along_axis(arr=theta_mat_sample.reshape(-1, 1), axis=1,
func1d=lambda row: compute_statistics_single_t0(
clf=clf_odds,
obs_sample=x_obs,
t0=row,
grid_param_t1=grid_param,
d=model_obj.d,
d_obs=model_obj.d_obs
))
elif test_statistic == 'avgacore':
stats_sample = np.apply_along_axis(arr=theta_mat_sample.reshape(-1, 1), axis=1,
func1d=lambda row: compute_bayesfactor_single_t0(
clf=clf_odds,
obs_sample=x_obs,
t0=row,
gen_param_fun=gen_param_fun,
d=model_obj.d,
d_obs=model_obj.d_obs,
monte_carlo_samples=monte_carlo_samples
))
elif test_statistic == 'logavgacore':
stats_sample = np.apply_along_axis(arr=theta_mat_sample.reshape(-1, 1), axis=1,
func1d=lambda row: compute_bayesfactor_single_t0(
clf=clf_odds,
obs_sample=x_obs,
t0=row,
gen_param_fun=gen_param_fun,
d=model_obj.d,
d_obs=model_obj.d_obs,
monte_carlo_samples=monte_carlo_samples,
log_out=True
))
else:
raise ValueError('The variable test_statistic needs to be either acore, avgacore, logavgacore.'
' Currently %s' % test_statistic)
# If there are log-odds, then some of the values might be negative, so we need to exponentiate them
# so to make sure that the large negative numbers are counted correctly (i.e. as very low probability,
# not probabilities with large magnitudes).
if test_statistic in ['acore', 'logavgacore']:
stats_sample = np.exp(stats_sample)
stats_sample = stats_sample/np.sum(stats_sample)
theta_mat_gaussian_fit = np.random.choice(a=theta_mat_sample, p=stats_sample.reshape(-1, ),
size=guided_sample)
std_gaussian_fit = np.std(theta_mat_gaussian_fit) if np.std(theta_mat_gaussian_fit) == 0.0 else 1.0
theta_mat = np.clip(
a=np.random.normal(size=b_prime, loc=np.mean(theta_mat_gaussian_fit),
scale=std_gaussian_fit),
a_min=model_obj.low_int, a_max=model_obj.high_int)
sample_mat = np.apply_along_axis(arr=theta_mat.reshape(-1, 1), axis=1,
func1d=lambda row: gen_obs_func(sample_size=sample_size_obs,
true_param=row))
else:
# Generate a matrix with values for both the sampled thetas as the actual samples
theta_mat, sample_mat = msnh_sampling_func(b_prime=b_prime, sample_size=sample_size_obs)
full_mat = np.hstack((theta_mat.reshape(-1, 1), sample_mat))
if test_statistic == 'acore':
stats_mat_generated = np.apply_along_axis(arr=full_mat, axis=1,
func1d=lambda row: compute_statistics_single_t0(
clf=clf_odds,
obs_sample=row[model_obj.d:],
t0=row[:model_obj.d],
grid_param_t1=grid_param,
d=model_obj.d,
d_obs=model_obj.d_obs
))
stats_mat_observed = np.apply_along_axis(arr=full_mat, axis=1,
func1d=lambda row: compute_statistics_single_t0(
clf=clf_odds,
obs_sample=x_obs,
t0=row[:model_obj.d],
grid_param_t1=grid_param,
d=model_obj.d,
d_obs=model_obj.d_obs
))
elif test_statistic == 'avgacore':
stats_mat_generated = np.apply_along_axis(arr=full_mat, axis=1,
func1d=lambda row: compute_bayesfactor_single_t0(
clf=clf_odds,
obs_sample=row[model_obj.d:],
t0=row[:model_obj.d],
gen_param_fun=gen_param_fun,
d=model_obj.d,
d_obs=model_obj.d_obs,
monte_carlo_samples=monte_carlo_samples
))
stats_mat_observed = np.apply_along_axis(arr=full_mat, axis=1,
func1d=lambda row: compute_bayesfactor_single_t0(
clf=clf_odds,
obs_sample=x_obs,
t0=row[:model_obj.d],
gen_param_fun=gen_param_fun,
d=model_obj.d,
d_obs=model_obj.d_obs,
monte_carlo_samples=monte_carlo_samples
))
elif test_statistic == 'logavgacore':
stats_mat_generated = np.apply_along_axis(arr=full_mat, axis=1,
func1d=lambda row: compute_bayesfactor_single_t0(
clf=clf_odds,
obs_sample=row[model_obj.d:],
t0=row[:model_obj.d],
gen_param_fun=gen_param_fun,
d=model_obj.d,
d_obs=model_obj.d_obs,
monte_carlo_samples=monte_carlo_samples,
log_out=True
))
stats_mat_observed = np.apply_along_axis(arr=full_mat, axis=1,
func1d=lambda row: compute_bayesfactor_single_t0(
clf=clf_odds,
obs_sample=x_obs,
t0=row[:model_obj.d],
gen_param_fun=gen_param_fun,
d=model_obj.d,
d_obs=model_obj.d_obs,
monte_carlo_samples=monte_carlo_samples,
log_out=True
))
else:
raise ValueError('The variable test_statistic needs to be either acore, avgacore, logavgacore.'
' Currently %s' % test_statistic)
if np.any(np.isnan(stats_mat_generated)) or not np.all(np.isfinite(stats_mat_generated)) or \
np.any(np.isnan(stats_mat_observed)) or not np.all(np.isfinite(stats_mat_observed)):
not_update_flag = True
break
# Comparing the two vectors of values
clf_pvalue_fitted[clf_name] = {}
indicator_vec = np.greater(stats_mat_observed, stats_mat_generated).astype(int)
for clf_name_pvalue, clf_model_pvalue in sorted(classifier_pvalue_dict.items(), key=lambda x: x[0]):
# If there the indicator_vec is either all 0 or all 1, do not fit a classifier or sklearn will throw
# an error out. Just return the class.
if sum(indicator_vec) <= 1 or sum(indicator_vec) >= len(indicator_vec) - 1:
pval_pred = np.repeat(sum(indicator_vec) / len(indicator_vec), b_prime)
loss_value_pval = np.nan
else:
clf_pvalue = train_pvalue_clf(clf_model=clf_model_pvalue, X=theta_mat.reshape(-1, model_obj.d),
y=indicator_vec.reshape(-1, ), clf_name=clf_name_pvalue,
nn_square_root=True)
pval_pred = clf_pvalue.predict_proba(t0_grid.reshape(-1, model_obj.d))[:, 1]
theta_mat_pred = clf_pvalue.predict_proba(theta_mat.reshape(-1, model_obj.d))[:, 1]
loss_value_pval = log_loss(y_true=indicator_vec, y_pred=theta_mat_pred)
clf_pvalue_fitted[clf_name][clf_name_pvalue] = (pval_pred, loss_value_pval)
# If there were some problems in calculating the statistics, get out of the loop
if not_update_flag:
not_update_flag = False
continue
# At this point all it's left is to record
for clf_name, (tau_obs_val, cross_ent_loss, or_loss_value) in clf_odds_fitted.items():
for clf_name_qr, (pvalue_val, pvalue_celoss_val) in clf_pvalue_fitted[clf_name].items():
size_temp = np.mean((pvalue_val > alpha).astype(int))
for kk, theta_0_current in enumerate(t0_grid):
out_val.append([
test_statistic, b_prime, b, clf_name, clf_name_qr, run, rep_counter, sample_size_obs,
cross_ent_loss, pvalue_celoss_val, t0_val, theta_0_current, int(t0_val == theta_0_current),
pvalue_val[kk], int(pvalue_val[kk] > alpha),
int(pvalue_val[kk] <= alpha), size_temp, entropy_est, or_loss_value,
monte_carlo_samples, int(guided_sim), int(empirical_marginal), guided_sample
])
pbar.update(1)
rep_counter += 1
# Saving the results
out_df = pd.DataFrame.from_records(data=out_val, index=range(len(out_val)), columns=out_cols)
out_dir = 'sims/classifier_cov_pow_toy/'
out_filename = 'classifier_reps_cov_pow_toy_pvalues_%steststats_%s_%sB_%sBprime_%s_%srep_alpha%s_sampleobs%s_t0val%s%s_%s.csv' % (
test_statistic, 'mlp_comp' if mlp_comp else 'toyclassifiers', b, b_prime, run, rep,
str(alpha).replace('.', '-'), sample_size_obs,
str(t0_val).replace('.', '-'),
'_empirmarg' if empirical_marginal else '',
datetime.strftime(datetime.today(), '%Y-%m-%d-%H-%M')
)
out_df.to_csv(out_dir + out_filename)
# Print results
cov_df = out_df[out_df['on_true_t0'] == 1][['classifier', 'classifier_pvalue', 'in_confint',
'cross_entropy_loss', 'cross_entropy_loss_pvalue', 'size_CI']]
print(cov_df.groupby(['classifier', 'classifier_pvalue']).agg({'in_confint': [np.average],
'size_CI': [np.average, np.std],
'cross_entropy_loss': [np.average],
'cross_entropy_loss_pvalue': [np.average]}))
# Power plots
out_df['class_combo'] = out_df[['classifier', 'classifier_pvalue']].apply(lambda x: x[0] + '---' + x[1], axis = 1)
plot_df = out_df[['class_combo', 'theta_0_current', 'out_confint']].groupby(
['class_combo', 'theta_0_current']).mean().reset_index()
fig = plt.figure(figsize=(20, 10))
sns.lineplot(x='theta_0_current', y='out_confint', hue='class_combo', data=plot_df, palette='cubehelix')
plt.legend(loc='best', fontsize=25)
plt.xlabel(r'$\theta$', fontsize=25)
plt.ylabel('Power', fontsize=25)
plt.title("Power of Hypothesis Test, B=%s, B'=%s, n=%s, %s" % (
b, b_prime, sample_size_obs, run.title()), fontsize=25)
out_dir = 'images/classifier_cov_pow_toy/'
outfile_name = 'power_classifier_reps_pvalue_%steststats_%sB_%sBprime_%s_%srep_alpha%s_sampleobs%s_t0val%s_%s.pdf' % (
test_statistic, b, b_prime, run, rep, str(alpha).replace('.', '-'), sample_size_obs,
str(t0_val).replace('.', '-'),
datetime.strftime(datetime.today(), '%Y-%m-%d')
)
plt.tight_layout()
plt.savefig(out_dir + outfile_name)
plt.close()
if __name__ == '__main__':
parser = argparse.ArgumentParser()
parser.add_argument('--seed', action="store", type=int, default=7,
help='Random State')
parser.add_argument('--rep', action="store", type=int, default=10,
help='Number of Repetitions for calculating the Pinball loss')
parser.add_argument('--b', action="store", type=int, default=5000,
help='Sample size to train the classifier for calculating odds')
parser.add_argument('--b_prime', action="store", type=int, default=1000,
help='Sample size to train the quantile regression algorithm')
parser.add_argument('--marginal', action='store_true', default=False,
help='Whether we are using a parametric approximation of the marginal or'
'the baseline reference G')
parser.add_argument('--alpha', action="store", type=float, default=0.1,
help='Statistical confidence level')
parser.add_argument('--run', action="store", type=str, default='poisson',
help='Problem to run')
parser.add_argument('--debug', action='store_true', default=False,
help='If true, a very small value for the sample sizes is fit to make sure the'
'file can run quickly for debugging purposes')
parser.add_argument('--verbose', action='store_true', default=False,
help='If true, logs are printed to the terminal')
parser.add_argument('--sample_size_obs', action="store", type=int, default=10,
help='Sample size of the actual observed data.')
parser.add_argument('--t0_val', action="store", type=float, default=10.0,
help='True parameter which generates the observed dataset')
parser.add_argument('--size_marginal', action="store", type=int, default=1000,
help='Sample size of the actual marginal distribution, if marginal is True.')
parser.add_argument('--monte_carlo_samples', action="store", type=int, default=500,
help='Sample size for the calculation of the avgacore and logavgacore statistic.')
parser.add_argument('--test_statistic', action="store", type=str, default='acore',
help='Test statistic to compute confidence intervals. Can be acore|avgacore|logavgacore')
parser.add_argument('--mlp_comp', action='store_true', default=False,
help='If true, we compare different MLP training algorithm.')
parser.add_argument('--empirical_marginal', action='store_true', default=False,
help='Whether we are sampling directly from the empirical marginal for G')
parser.add_argument('--guided_sim', action='store_true', default=False,
help='If true, we guided the sampling for the B prime in order to get meaningful results.')
parser.add_argument('--guided_sample', action="store", type=int, default=2500,
help='The sample size to be used for the guided simulation. Only used if guided_sim is True.')
argument_parsed = parser.parse_args()
# b_vec = [100, 500, 1000]
# for b_val in b_vec:
main(
run=argument_parsed.run,
rep=argument_parsed.rep,
marginal=argument_parsed.marginal,
b=argument_parsed.b,
b_prime=argument_parsed.b_prime,
alpha=argument_parsed.alpha,
debug=argument_parsed.debug,
sample_size_obs=argument_parsed.sample_size_obs,
t0_val=argument_parsed.t0_val,
seed=argument_parsed.seed,
verbose=argument_parsed.verbose,
size_marginal=argument_parsed.size_marginal,
monte_carlo_samples=argument_parsed.monte_carlo_samples,
test_statistic=argument_parsed.test_statistic,
mlp_comp=argument_parsed.mlp_comp,
empirical_marginal=argument_parsed.empirical_marginal,
guided_sim=argument_parsed.guided_sim,
guided_sample=argument_parsed.guided_sample
)
| 63.491184 | 134 | 0.519281 | from warnings import simplefilter
simplefilter(action='ignore', category=FutureWarning)
import numpy as np
import argparse
import pandas as pd
from tqdm.auto import tqdm
from datetime import datetime
from sklearn.metrics import log_loss
import seaborn as sns
import matplotlib.pyplot as plt
from utils.functions import train_clf, compute_statistics_single_t0, clf_prob_value, compute_bayesfactor_single_t0, \
odds_ratio_loss, train_pvalue_clf
from models.toy_poisson import ToyPoissonLoader
from models.toy_gmm import ToyGMMLoader
from models.toy_gamma import ToyGammaLoader
from or_classifiers.toy_example_list import classifier_dict, classifier_dict_mlpcomp, classifier_pvalue_dict
model_dict = {
'poisson': ToyPoissonLoader,
'gmm': ToyGMMLoader,
'gamma': ToyGammaLoader
}
def main(run, rep, b, b_prime, alpha, t0_val, sample_size_obs, test_statistic, mlp_comp=False,
monte_carlo_samples=500, debug=False, seed=7, size_check=1000, verbose=False, marginal=False,
size_marginal=1000, guided_sim=False, guided_sample=1000, empirical_marginal=True):
b = b if not debug else 100
b_prime = b_prime if not debug else 100
size_check = size_check if not debug else 100
rep = rep if not debug else 2
model_obj = model_dict[run](marginal=marginal, size_marginal=size_marginal, empirical_marginal=empirical_marginal)
classifier_dict_run = classifier_dict_mlpcomp if mlp_comp else classifier_dict
msnh_sampling_func = model_obj.sample_msnh_algo5
grid_param = model_obj.grid
gen_obs_func = model_obj.sample_sim
gen_sample_func = model_obj.generate_sample
gen_param_fun = model_obj.sample_param_values
t0_grid = model_obj.pred_grid
tp_func = model_obj.compute_exact_prob
np.random.seed(seed)
sample_check = gen_sample_func(sample_size=size_check, marginal=marginal)
theta_vec = sample_check[:, :model_obj.d]
x_vec = sample_check[:, (model_obj.d + 1):]
bern_vec = sample_check[:, model_obj.d]
true_prob_vec = tp_func(theta_vec=theta_vec, x_vec=x_vec)
entropy_est = -np.average([np.log(true_prob_vec[kk]) if el == 1
else np.log(1 - true_prob_vec[kk])
for kk, el in enumerate(bern_vec)])
out_val = []
out_cols = ['test_statistic', 'b_prime', 'b', 'classifier', 'classifier_pvalue', 'run', 'rep', 'sample_size_obs',
'cross_entropy_loss', 'cross_entropy_loss_pvalue', 't0_true_val', 'theta_0_current', 'on_true_t0',
'estimated_pvalue', 'in_confint', 'out_confint', 'size_CI', 'true_entropy', 'or_loss_value',
'monte_carlo_samples', 'guided_sim', 'empirical_marginal', 'guided_sample']
pbar = tqdm(total=rep, desc='Toy Example for Simulations, n=%s, b=%s' % (sample_size_obs, b))
rep_counter = 0
not_update_flag = False
while rep_counter < rep:
x_obs = gen_obs_func(sample_size=sample_size_obs, true_param=t0_val)
clf_odds_fitted = {}
clf_pvalue_fitted = {}
for clf_name, clf_model in sorted(classifier_dict_run.items(), key=lambda x: x[0]):
clf_odds = train_clf(sample_size=b, clf_model=clf_model, gen_function=gen_sample_func,
clf_name=clf_name, nn_square_root=True)
if verbose:
print('----- %s Trained' % clf_name)
if test_statistic == 'acore':
tau_obs = np.array([
compute_statistics_single_t0(
clf=clf_odds, obs_sample=x_obs, t0=theta_0, grid_param_t1=grid_param,
d=model_obj.d, d_obs=model_obj.d_obs) for theta_0 in t0_grid])
elif test_statistic == 'avgacore':
tau_obs = np.array([
compute_bayesfactor_single_t0(
clf=clf_odds, obs_sample=x_obs, t0=theta_0, gen_param_fun=gen_param_fun,
d=model_obj.d, d_obs=model_obj.d_obs, log_out=False) for theta_0 in t0_grid])
elif test_statistic == 'logavgacore':
tau_obs = np.array([
compute_bayesfactor_single_t0(
clf=clf_odds, obs_sample=x_obs, t0=theta_0, gen_param_fun=gen_param_fun,
d=model_obj.d, d_obs=model_obj.d_obs, log_out=True) for theta_0 in t0_grid])
else:
raise ValueError('The variable test_statistic needs to be either acore, avgacore, logavgacore.'
' Currently %s' % test_statistic)
est_prob_vec = clf_prob_value(clf=clf_odds, x_vec=x_vec, theta_vec=theta_vec, d=model_obj.d,
d_obs=model_obj.d_obs)
loss_value = log_loss(y_true=bern_vec, y_pred=est_prob_vec)
or_loss_value = odds_ratio_loss(clf=clf_odds, x_vec=x_vec, theta_vec=theta_vec,
bern_vec=bern_vec, d=1, d_obs=1)
clf_odds_fitted[clf_name] = (tau_obs, loss_value, or_loss_value)
if guided_sim:
theta_mat_sample = gen_param_fun(sample_size=guided_sample)
if test_statistic == 'acore':
stats_sample = np.apply_along_axis(arr=theta_mat_sample.reshape(-1, 1), axis=1,
func1d=lambda row: compute_statistics_single_t0(
clf=clf_odds,
obs_sample=x_obs,
t0=row,
grid_param_t1=grid_param,
d=model_obj.d,
d_obs=model_obj.d_obs
))
elif test_statistic == 'avgacore':
stats_sample = np.apply_along_axis(arr=theta_mat_sample.reshape(-1, 1), axis=1,
func1d=lambda row: compute_bayesfactor_single_t0(
clf=clf_odds,
obs_sample=x_obs,
t0=row,
gen_param_fun=gen_param_fun,
d=model_obj.d,
d_obs=model_obj.d_obs,
monte_carlo_samples=monte_carlo_samples
))
elif test_statistic == 'logavgacore':
stats_sample = np.apply_along_axis(arr=theta_mat_sample.reshape(-1, 1), axis=1,
func1d=lambda row: compute_bayesfactor_single_t0(
clf=clf_odds,
obs_sample=x_obs,
t0=row,
gen_param_fun=gen_param_fun,
d=model_obj.d,
d_obs=model_obj.d_obs,
monte_carlo_samples=monte_carlo_samples,
log_out=True
))
else:
raise ValueError('The variable test_statistic needs to be either acore, avgacore, logavgacore.'
' Currently %s' % test_statistic)
if test_statistic in ['acore', 'logavgacore']:
stats_sample = np.exp(stats_sample)
stats_sample = stats_sample/np.sum(stats_sample)
theta_mat_gaussian_fit = np.random.choice(a=theta_mat_sample, p=stats_sample.reshape(-1, ),
size=guided_sample)
std_gaussian_fit = np.std(theta_mat_gaussian_fit) if np.std(theta_mat_gaussian_fit) == 0.0 else 1.0
theta_mat = np.clip(
a=np.random.normal(size=b_prime, loc=np.mean(theta_mat_gaussian_fit),
scale=std_gaussian_fit),
a_min=model_obj.low_int, a_max=model_obj.high_int)
sample_mat = np.apply_along_axis(arr=theta_mat.reshape(-1, 1), axis=1,
func1d=lambda row: gen_obs_func(sample_size=sample_size_obs,
true_param=row))
else:
theta_mat, sample_mat = msnh_sampling_func(b_prime=b_prime, sample_size=sample_size_obs)
full_mat = np.hstack((theta_mat.reshape(-1, 1), sample_mat))
if test_statistic == 'acore':
stats_mat_generated = np.apply_along_axis(arr=full_mat, axis=1,
func1d=lambda row: compute_statistics_single_t0(
clf=clf_odds,
obs_sample=row[model_obj.d:],
t0=row[:model_obj.d],
grid_param_t1=grid_param,
d=model_obj.d,
d_obs=model_obj.d_obs
))
stats_mat_observed = np.apply_along_axis(arr=full_mat, axis=1,
func1d=lambda row: compute_statistics_single_t0(
clf=clf_odds,
obs_sample=x_obs,
t0=row[:model_obj.d],
grid_param_t1=grid_param,
d=model_obj.d,
d_obs=model_obj.d_obs
))
elif test_statistic == 'avgacore':
stats_mat_generated = np.apply_along_axis(arr=full_mat, axis=1,
func1d=lambda row: compute_bayesfactor_single_t0(
clf=clf_odds,
obs_sample=row[model_obj.d:],
t0=row[:model_obj.d],
gen_param_fun=gen_param_fun,
d=model_obj.d,
d_obs=model_obj.d_obs,
monte_carlo_samples=monte_carlo_samples
))
stats_mat_observed = np.apply_along_axis(arr=full_mat, axis=1,
func1d=lambda row: compute_bayesfactor_single_t0(
clf=clf_odds,
obs_sample=x_obs,
t0=row[:model_obj.d],
gen_param_fun=gen_param_fun,
d=model_obj.d,
d_obs=model_obj.d_obs,
monte_carlo_samples=monte_carlo_samples
))
elif test_statistic == 'logavgacore':
stats_mat_generated = np.apply_along_axis(arr=full_mat, axis=1,
func1d=lambda row: compute_bayesfactor_single_t0(
clf=clf_odds,
obs_sample=row[model_obj.d:],
t0=row[:model_obj.d],
gen_param_fun=gen_param_fun,
d=model_obj.d,
d_obs=model_obj.d_obs,
monte_carlo_samples=monte_carlo_samples,
log_out=True
))
stats_mat_observed = np.apply_along_axis(arr=full_mat, axis=1,
func1d=lambda row: compute_bayesfactor_single_t0(
clf=clf_odds,
obs_sample=x_obs,
t0=row[:model_obj.d],
gen_param_fun=gen_param_fun,
d=model_obj.d,
d_obs=model_obj.d_obs,
monte_carlo_samples=monte_carlo_samples,
log_out=True
))
else:
raise ValueError('The variable test_statistic needs to be either acore, avgacore, logavgacore.'
' Currently %s' % test_statistic)
if np.any(np.isnan(stats_mat_generated)) or not np.all(np.isfinite(stats_mat_generated)) or \
np.any(np.isnan(stats_mat_observed)) or not np.all(np.isfinite(stats_mat_observed)):
not_update_flag = True
break
clf_pvalue_fitted[clf_name] = {}
indicator_vec = np.greater(stats_mat_observed, stats_mat_generated).astype(int)
for clf_name_pvalue, clf_model_pvalue in sorted(classifier_pvalue_dict.items(), key=lambda x: x[0]):
if sum(indicator_vec) <= 1 or sum(indicator_vec) >= len(indicator_vec) - 1:
pval_pred = np.repeat(sum(indicator_vec) / len(indicator_vec), b_prime)
loss_value_pval = np.nan
else:
clf_pvalue = train_pvalue_clf(clf_model=clf_model_pvalue, X=theta_mat.reshape(-1, model_obj.d),
y=indicator_vec.reshape(-1, ), clf_name=clf_name_pvalue,
nn_square_root=True)
pval_pred = clf_pvalue.predict_proba(t0_grid.reshape(-1, model_obj.d))[:, 1]
theta_mat_pred = clf_pvalue.predict_proba(theta_mat.reshape(-1, model_obj.d))[:, 1]
loss_value_pval = log_loss(y_true=indicator_vec, y_pred=theta_mat_pred)
clf_pvalue_fitted[clf_name][clf_name_pvalue] = (pval_pred, loss_value_pval)
if not_update_flag:
not_update_flag = False
continue
for clf_name, (tau_obs_val, cross_ent_loss, or_loss_value) in clf_odds_fitted.items():
for clf_name_qr, (pvalue_val, pvalue_celoss_val) in clf_pvalue_fitted[clf_name].items():
size_temp = np.mean((pvalue_val > alpha).astype(int))
for kk, theta_0_current in enumerate(t0_grid):
out_val.append([
test_statistic, b_prime, b, clf_name, clf_name_qr, run, rep_counter, sample_size_obs,
cross_ent_loss, pvalue_celoss_val, t0_val, theta_0_current, int(t0_val == theta_0_current),
pvalue_val[kk], int(pvalue_val[kk] > alpha),
int(pvalue_val[kk] <= alpha), size_temp, entropy_est, or_loss_value,
monte_carlo_samples, int(guided_sim), int(empirical_marginal), guided_sample
])
pbar.update(1)
rep_counter += 1
# Saving the results
out_df = pd.DataFrame.from_records(data=out_val, index=range(len(out_val)), columns=out_cols)
out_dir = 'sims/classifier_cov_pow_toy/'
out_filename = 'classifier_reps_cov_pow_toy_pvalues_%steststats_%s_%sB_%sBprime_%s_%srep_alpha%s_sampleobs%s_t0val%s%s_%s.csv' % (
test_statistic, 'mlp_comp' if mlp_comp else 'toyclassifiers', b, b_prime, run, rep,
str(alpha).replace('.', '-'), sample_size_obs,
str(t0_val).replace('.', '-'),
'_empirmarg' if empirical_marginal else '',
datetime.strftime(datetime.today(), '%Y-%m-%d-%H-%M')
)
out_df.to_csv(out_dir + out_filename)
# Print results
cov_df = out_df[out_df['on_true_t0'] == 1][['classifier', 'classifier_pvalue', 'in_confint',
'cross_entropy_loss', 'cross_entropy_loss_pvalue', 'size_CI']]
print(cov_df.groupby(['classifier', 'classifier_pvalue']).agg({'in_confint': [np.average],
'size_CI': [np.average, np.std],
'cross_entropy_loss': [np.average],
'cross_entropy_loss_pvalue': [np.average]}))
# Power plots
out_df['class_combo'] = out_df[['classifier', 'classifier_pvalue']].apply(lambda x: x[0] + '---' + x[1], axis = 1)
plot_df = out_df[['class_combo', 'theta_0_current', 'out_confint']].groupby(
['class_combo', 'theta_0_current']).mean().reset_index()
fig = plt.figure(figsize=(20, 10))
sns.lineplot(x='theta_0_current', y='out_confint', hue='class_combo', data=plot_df, palette='cubehelix')
plt.legend(loc='best', fontsize=25)
plt.xlabel(r'$\theta$', fontsize=25)
plt.ylabel('Power', fontsize=25)
plt.title("Power of Hypothesis Test, B=%s, B'=%s, n=%s, %s" % (
b, b_prime, sample_size_obs, run.title()), fontsize=25)
out_dir = 'images/classifier_cov_pow_toy/'
outfile_name = 'power_classifier_reps_pvalue_%steststats_%sB_%sBprime_%s_%srep_alpha%s_sampleobs%s_t0val%s_%s.pdf' % (
test_statistic, b, b_prime, run, rep, str(alpha).replace('.', '-'), sample_size_obs,
str(t0_val).replace('.', '-'),
datetime.strftime(datetime.today(), '%Y-%m-%d')
)
plt.tight_layout()
plt.savefig(out_dir + outfile_name)
plt.close()
if __name__ == '__main__':
parser = argparse.ArgumentParser()
parser.add_argument('--seed', action="store", type=int, default=7,
help='Random State')
parser.add_argument('--rep', action="store", type=int, default=10,
help='Number of Repetitions for calculating the Pinball loss')
parser.add_argument('--b', action="store", type=int, default=5000,
help='Sample size to train the classifier for calculating odds')
parser.add_argument('--b_prime', action="store", type=int, default=1000,
help='Sample size to train the quantile regression algorithm')
parser.add_argument('--marginal', action='store_true', default=False,
help='Whether we are using a parametric approximation of the marginal or'
'the baseline reference G')
parser.add_argument('--alpha', action="store", type=float, default=0.1,
help='Statistical confidence level')
parser.add_argument('--run', action="store", type=str, default='poisson',
help='Problem to run')
parser.add_argument('--debug', action='store_true', default=False,
help='If true, a very small value for the sample sizes is fit to make sure the'
'file can run quickly for debugging purposes')
parser.add_argument('--verbose', action='store_true', default=False,
help='If true, logs are printed to the terminal')
parser.add_argument('--sample_size_obs', action="store", type=int, default=10,
help='Sample size of the actual observed data.')
parser.add_argument('--t0_val', action="store", type=float, default=10.0,
help='True parameter which generates the observed dataset')
parser.add_argument('--size_marginal', action="store", type=int, default=1000,
help='Sample size of the actual marginal distribution, if marginal is True.')
parser.add_argument('--monte_carlo_samples', action="store", type=int, default=500,
help='Sample size for the calculation of the avgacore and logavgacore statistic.')
parser.add_argument('--test_statistic', action="store", type=str, default='acore',
help='Test statistic to compute confidence intervals. Can be acore|avgacore|logavgacore')
parser.add_argument('--mlp_comp', action='store_true', default=False,
help='If true, we compare different MLP training algorithm.')
parser.add_argument('--empirical_marginal', action='store_true', default=False,
help='Whether we are sampling directly from the empirical marginal for G')
parser.add_argument('--guided_sim', action='store_true', default=False,
help='If true, we guided the sampling for the B prime in order to get meaningful results.')
parser.add_argument('--guided_sample', action="store", type=int, default=2500,
help='The sample size to be used for the guided simulation. Only used if guided_sim is True.')
argument_parsed = parser.parse_args()
main(
run=argument_parsed.run,
rep=argument_parsed.rep,
marginal=argument_parsed.marginal,
b=argument_parsed.b,
b_prime=argument_parsed.b_prime,
alpha=argument_parsed.alpha,
debug=argument_parsed.debug,
sample_size_obs=argument_parsed.sample_size_obs,
t0_val=argument_parsed.t0_val,
seed=argument_parsed.seed,
verbose=argument_parsed.verbose,
size_marginal=argument_parsed.size_marginal,
monte_carlo_samples=argument_parsed.monte_carlo_samples,
test_statistic=argument_parsed.test_statistic,
mlp_comp=argument_parsed.mlp_comp,
empirical_marginal=argument_parsed.empirical_marginal,
guided_sim=argument_parsed.guided_sim,
guided_sample=argument_parsed.guided_sample
)
| true | true |
f71aa447e93126ff1ef79e05d8bb36f39e9bc2a4 | 4,210 | py | Python | openshift/test/test_v1_load_balancer_ingress.py | flaper87/openshift-restclient-python | 13d5d86ca89035b9f596032e7a34f3cc33bf8f18 | [
"Apache-2.0"
] | null | null | null | openshift/test/test_v1_load_balancer_ingress.py | flaper87/openshift-restclient-python | 13d5d86ca89035b9f596032e7a34f3cc33bf8f18 | [
"Apache-2.0"
] | null | null | null | openshift/test/test_v1_load_balancer_ingress.py | flaper87/openshift-restclient-python | 13d5d86ca89035b9f596032e7a34f3cc33bf8f18 | [
"Apache-2.0"
] | null | null | null | # coding: utf-8
"""
OpenShift API (with Kubernetes)
OpenShift provides builds, application lifecycle, image content management, and administrative policy on top of Kubernetes. The API allows consistent management of those objects. All API operations are authenticated via an Authorization bearer token that is provided for service accounts as a generated secret (in JWT form) or via the native OAuth endpoint located at /oauth/authorize. Core infrastructure components may use openshift.client certificates that require no authentication. All API operations return a 'resourceVersion' string that represents the version of the object in the underlying storage. The standard LIST operation performs a snapshot read of the underlying objects, returning a resourceVersion representing a consistent version of the listed objects. The WATCH operation allows all updates to a set of objects after the provided resourceVersion to be observed by a openshift.client. By listing and beginning a watch from the returned resourceVersion, openshift.clients may observe a consistent view of the state of one or more objects. Note that WATCH always returns the update after the provided resourceVersion. Watch may be extended a limited time in the past - using etcd 2 the watch window is 1000 events (which on a large cluster may only be a few tens of seconds) so openshift.clients must explicitly handle the \"watch to old error\" by re-listing. Objects are divided into two rough categories - those that have a lifecycle and must reflect the state of the cluster, and those that have no state. Objects with lifecycle typically have three main sections: * 'metadata' common to all objects * a 'spec' that represents the desired state * a 'status' that represents how much of the desired state is reflected on the cluster at the current time Objects that have no state have 'metadata' but may lack a 'spec' or 'status' section. Objects are divided into those that are namespace scoped (only exist inside of a namespace) and those that are cluster scoped (exist outside of a namespace). A namespace scoped resource will be deleted when the namespace is deleted and cannot be created if the namespace has not yet been created or is in the process of deletion. Cluster scoped resources are typically only accessible to admins - resources like nodes, persistent volumes, and cluster policy. All objects have a schema that is a combination of the 'kind' and 'apiVersion' fields. This schema is additive only for any given version - no backwards incompatible changes are allowed without incrementing the apiVersion. The server will return and accept a number of standard responses that share a common schema - for instance, the common error type is 'unversioned.Status' (described below) and will be returned on any error from the API server. The API is available in multiple serialization formats - the default is JSON (Accept: application/json and Content-Type: application/json) but openshift.clients may also use YAML (application/yaml) or the native Protobuf schema (application/vnd.kubernetes.protobuf). Note that the format of the WATCH API call is slightly different - for JSON it returns newline delimited objects while for Protobuf it returns length-delimited frames (4 bytes in network-order) that contain a 'versioned.Watch' Protobuf object. See the OpenShift documentation at https://docs.openshift.org for more information.
OpenAPI spec version: v3.6.0-alpha.0
Generated by: https://github.com/swagger-api/swagger-codegen.git
"""
from __future__ import absolute_import
import os
import sys
import unittest
import openshift.client
from kubernetes.client.rest import ApiException
from openshift.client.models.v1_load_balancer_ingress import V1LoadBalancerIngress
class TestV1LoadBalancerIngress(unittest.TestCase):
""" V1LoadBalancerIngress unit test stubs """
def setUp(self):
pass
def tearDown(self):
pass
def testV1LoadBalancerIngress(self):
"""
Test V1LoadBalancerIngress
"""
model = openshift.client.models.v1_load_balancer_ingress.V1LoadBalancerIngress()
if __name__ == '__main__':
unittest.main()
| 97.906977 | 3,380 | 0.791211 |
from __future__ import absolute_import
import os
import sys
import unittest
import openshift.client
from kubernetes.client.rest import ApiException
from openshift.client.models.v1_load_balancer_ingress import V1LoadBalancerIngress
class TestV1LoadBalancerIngress(unittest.TestCase):
def setUp(self):
pass
def tearDown(self):
pass
def testV1LoadBalancerIngress(self):
model = openshift.client.models.v1_load_balancer_ingress.V1LoadBalancerIngress()
if __name__ == '__main__':
unittest.main()
| true | true |
f71aa5297e2e652741a2be68088de722b87d9713 | 3,419 | py | Python | openGaussBase/testcase/TOOLS/INTERNAL_TOOLS/gaussdb/Opengauss_Function_Tools_Gaussdb_Case0014.py | opengauss-mirror/Yat | aef107a8304b94e5d99b4f1f36eb46755eb8919e | [
"MulanPSL-1.0"
] | null | null | null | openGaussBase/testcase/TOOLS/INTERNAL_TOOLS/gaussdb/Opengauss_Function_Tools_Gaussdb_Case0014.py | opengauss-mirror/Yat | aef107a8304b94e5d99b4f1f36eb46755eb8919e | [
"MulanPSL-1.0"
] | null | null | null | openGaussBase/testcase/TOOLS/INTERNAL_TOOLS/gaussdb/Opengauss_Function_Tools_Gaussdb_Case0014.py | opengauss-mirror/Yat | aef107a8304b94e5d99b4f1f36eb46755eb8919e | [
"MulanPSL-1.0"
] | null | null | null | """
Copyright (c) 2022 Huawei Technologies Co.,Ltd.
openGauss is licensed under Mulan PSL v2.
You can use this software according to the terms and conditions of the Mulan PSL v2.
You may obtain a copy of Mulan PSL v2 at:
http://license.coscl.org.cn/MulanPSL2
THIS SOFTWARE IS PROVIDED ON AN "AS IS" BASIS, WITHOUT WARRANTIES OF ANY KIND,
EITHER EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO NON-INFRINGEMENT,
MERCHANTABILITY OR FIT FOR A PARTICULAR PURPOSE.
See the Mulan PSL v2 for more details.
"""
"""
Case Type : tools
Case Name : 启动gaussdb进程时,使用-e参数把缺省日期风格设置为"European"是否成功
Description :
1.查看当前日期风格
show datestyle;
2.关闭正在运行的数据库
gs_ctl stop -D /opt/openGauss_zl/cluster/dn1
3.查看进程,确定关闭成功
ps -ef|grep zl
4.使用gaussdb工具后台运行进程,缺省日期风格设置为"European"
gaussdb -D /opt/openGauss_zl/cluster/dn1 -p 19701 -e -M primary &
5.查看当前日期风格,是否为European风格
show datestyle;
Expect :
1.查看当前日期风格成功,显示为:ISO, MDY
2.关闭正在运行的数据库成功
3.查看进程,确定关闭成功
查看进程成功,确认数据库已关闭
4.使用gaussdb工具后台运行进程,缺省日期风格设置为"European"成功
5.查看当前日期风格,为European风格,显示为:ISO, DMY
show datestyle;
History :
"""
import unittest
from testcase.utils.ComThread import ComThread
from yat.test import Node
from yat.test import macro
from testcase.utils.Common import Common
from testcase.utils.CommonSH import CommonSH
from testcase.utils.Logger import Logger
class Tools(unittest.TestCase):
def setUp(self):
self.logger = Logger()
self.logger.info('--Opengauss_Function_Tools_Gaussdb_Case0014 start--')
self.userNode = Node('PrimaryDbUser')
self.userNode2 = Node('PrimaryDbUser')
self.DB_ENV_PATH = macro.DB_ENV_PATH
self.DB_INSTANCE_PATH = macro.DB_INSTANCE_PATH
self.sh_primy = CommonSH('PrimaryDbUser')
self.common = Common()
def test_systools(self):
self.logger.info('--------关闭正在运行的数据库--------')
excute_cmd1 = f'source {self.DB_ENV_PATH};' \
f'gs_ctl stop -D {self.DB_INSTANCE_PATH}'
self.logger.info(excute_cmd1)
msg1 = self.userNode.sh(excute_cmd1).result()
self.logger.info(msg1)
self.logger.info('--------查看进程,确定关闭成功--------')
excute_cmd2 = f'ps -ef|grep {self.userNode.ssh_user}'
self.logger.info(excute_cmd2)
msg2 = self.userNode.sh(excute_cmd2).result()
self.logger.info(msg2)
self.assertFalse(self.DB_INSTANCE_PATH in msg2)
self.logger.info('使用gaussdb工具后台运行进程,缺省日期风格设置为European')
excute_cmd3 = f'source {self.DB_ENV_PATH};' \
f'gaussdb -D {self.DB_INSTANCE_PATH} -p ' \
f'{self.userNode.db_port} -e -M primary'
self.logger.info(excute_cmd3)
thread_2 = ComThread(self.userNode2.sh, args=(excute_cmd3,))
thread_2.setDaemon(True)
thread_2.start()
thread_2.join(10)
msg_result_2 = thread_2.get_result()
self.logger.info(msg_result_2)
self.logger.info('--------查看当前日期风格,是否为European风格--------')
sql_cmd3 = f'show datestyle;'
self.logger.info(excute_cmd3)
msg3 = self.sh_primy.execut_db_sql(sql_cmd3)
self.logger.info(msg3)
self.common.equal_sql_mdg(msg3, 'DateStyle', 'ISO, DMY', '(1 row)',
flag='1')
def tearDown(self):
self.logger.info('-Opengauss_Function_Tools_Gaussdb_Case0014 finish-')
| 36.763441 | 84 | 0.664814 | import unittest
from testcase.utils.ComThread import ComThread
from yat.test import Node
from yat.test import macro
from testcase.utils.Common import Common
from testcase.utils.CommonSH import CommonSH
from testcase.utils.Logger import Logger
class Tools(unittest.TestCase):
def setUp(self):
self.logger = Logger()
self.logger.info('--Opengauss_Function_Tools_Gaussdb_Case0014 start--')
self.userNode = Node('PrimaryDbUser')
self.userNode2 = Node('PrimaryDbUser')
self.DB_ENV_PATH = macro.DB_ENV_PATH
self.DB_INSTANCE_PATH = macro.DB_INSTANCE_PATH
self.sh_primy = CommonSH('PrimaryDbUser')
self.common = Common()
def test_systools(self):
self.logger.info('--------关闭正在运行的数据库--------')
excute_cmd1 = f'source {self.DB_ENV_PATH};' \
f'gs_ctl stop -D {self.DB_INSTANCE_PATH}'
self.logger.info(excute_cmd1)
msg1 = self.userNode.sh(excute_cmd1).result()
self.logger.info(msg1)
self.logger.info('--------查看进程,确定关闭成功--------')
excute_cmd2 = f'ps -ef|grep {self.userNode.ssh_user}'
self.logger.info(excute_cmd2)
msg2 = self.userNode.sh(excute_cmd2).result()
self.logger.info(msg2)
self.assertFalse(self.DB_INSTANCE_PATH in msg2)
self.logger.info('使用gaussdb工具后台运行进程,缺省日期风格设置为European')
excute_cmd3 = f'source {self.DB_ENV_PATH};' \
f'gaussdb -D {self.DB_INSTANCE_PATH} -p ' \
f'{self.userNode.db_port} -e -M primary'
self.logger.info(excute_cmd3)
thread_2 = ComThread(self.userNode2.sh, args=(excute_cmd3,))
thread_2.setDaemon(True)
thread_2.start()
thread_2.join(10)
msg_result_2 = thread_2.get_result()
self.logger.info(msg_result_2)
self.logger.info('--------查看当前日期风格,是否为European风格--------')
sql_cmd3 = f'show datestyle;'
self.logger.info(excute_cmd3)
msg3 = self.sh_primy.execut_db_sql(sql_cmd3)
self.logger.info(msg3)
self.common.equal_sql_mdg(msg3, 'DateStyle', 'ISO, DMY', '(1 row)',
flag='1')
def tearDown(self):
self.logger.info('-Opengauss_Function_Tools_Gaussdb_Case0014 finish-')
| true | true |
f71aa56817ca77eba5df4a2dd11cb0c4a9a7ea1c | 3,699 | py | Python | tqdm/_monitor.py | insilications/tqdm-clr | b09a24af7ffe5c85ed0e8e64b33059b43b1be020 | [
"MIT"
] | 22,617 | 2015-06-03T20:26:05.000Z | 2022-03-31T22:25:42.000Z | tqdm/_monitor.py | insilications/tqdm-clr | b09a24af7ffe5c85ed0e8e64b33059b43b1be020 | [
"MIT"
] | 1,230 | 2015-06-03T13:56:41.000Z | 2022-03-30T06:03:12.000Z | tqdm/_monitor.py | insilications/tqdm-clr | b09a24af7ffe5c85ed0e8e64b33059b43b1be020 | [
"MIT"
] | 1,445 | 2015-06-03T14:01:33.000Z | 2022-03-29T14:41:52.000Z | import atexit
from threading import Event, Thread, current_thread
from time import time
from warnings import warn
__all__ = ["TMonitor", "TqdmSynchronisationWarning"]
class TqdmSynchronisationWarning(RuntimeWarning):
"""tqdm multi-thread/-process errors which may cause incorrect nesting
but otherwise no adverse effects"""
pass
class TMonitor(Thread):
"""
Monitoring thread for tqdm bars.
Monitors if tqdm bars are taking too much time to display
and readjusts miniters automatically if necessary.
Parameters
----------
tqdm_cls : class
tqdm class to use (can be core tqdm or a submodule).
sleep_interval : float
Time to sleep between monitoring checks.
"""
_test = {} # internal vars for unit testing
def __init__(self, tqdm_cls, sleep_interval):
Thread.__init__(self)
self.daemon = True # kill thread when main killed (KeyboardInterrupt)
self.woken = 0 # last time woken up, to sync with monitor
self.tqdm_cls = tqdm_cls
self.sleep_interval = sleep_interval
self._time = self._test.get("time", time)
self.was_killed = self._test.get("Event", Event)()
atexit.register(self.exit)
self.start()
def exit(self):
self.was_killed.set()
if self is not current_thread():
self.join()
return self.report()
def get_instances(self):
# returns a copy of started `tqdm_cls` instances
return [i for i in self.tqdm_cls._instances.copy()
# Avoid race by checking that the instance started
if hasattr(i, 'start_t')]
def run(self):
cur_t = self._time()
while True:
# After processing and before sleeping, notify that we woke
# Need to be done just before sleeping
self.woken = cur_t
# Sleep some time...
self.was_killed.wait(self.sleep_interval)
# Quit if killed
if self.was_killed.is_set():
return
# Then monitor!
# Acquire lock (to access _instances)
with self.tqdm_cls.get_lock():
cur_t = self._time()
# Check tqdm instances are waiting too long to print
instances = self.get_instances()
for instance in instances:
# Check event in loop to reduce blocking time on exit
if self.was_killed.is_set():
return
# Only if mininterval > 1 (else iterations are just slow)
# and last refresh exceeded maxinterval
if (
instance.miniters > 1
and (cur_t - instance.last_print_t) >= instance.maxinterval
):
# force bypassing miniters on next iteration
# (dynamic_miniters adjusts mininterval automatically)
instance.miniters = 1
# Refresh now! (works only for manual tqdm)
instance.refresh(nolock=True)
# Remove accidental long-lived strong reference
del instance
if instances != self.get_instances(): # pragma: nocover
warn("Set changed size during iteration" +
" (see https://github.com/tqdm/tqdm/issues/481)",
TqdmSynchronisationWarning, stacklevel=2)
# Remove accidental long-lived strong references
del instances
def report(self):
return not self.was_killed.is_set()
| 38.53125 | 83 | 0.575561 | import atexit
from threading import Event, Thread, current_thread
from time import time
from warnings import warn
__all__ = ["TMonitor", "TqdmSynchronisationWarning"]
class TqdmSynchronisationWarning(RuntimeWarning):
pass
class TMonitor(Thread):
_test = {}
def __init__(self, tqdm_cls, sleep_interval):
Thread.__init__(self)
self.daemon = True
self.woken = 0
self.tqdm_cls = tqdm_cls
self.sleep_interval = sleep_interval
self._time = self._test.get("time", time)
self.was_killed = self._test.get("Event", Event)()
atexit.register(self.exit)
self.start()
def exit(self):
self.was_killed.set()
if self is not current_thread():
self.join()
return self.report()
def get_instances(self):
return [i for i in self.tqdm_cls._instances.copy()
if hasattr(i, 'start_t')]
def run(self):
cur_t = self._time()
while True:
self.woken = cur_t
self.was_killed.wait(self.sleep_interval)
if self.was_killed.is_set():
return
with self.tqdm_cls.get_lock():
cur_t = self._time()
instances = self.get_instances()
for instance in instances:
if self.was_killed.is_set():
return
if (
instance.miniters > 1
and (cur_t - instance.last_print_t) >= instance.maxinterval
):
instance.miniters = 1
instance.refresh(nolock=True)
del instance
if instances != self.get_instances():
warn("Set changed size during iteration" +
" (see https://github.com/tqdm/tqdm/issues/481)",
TqdmSynchronisationWarning, stacklevel=2)
del instances
def report(self):
return not self.was_killed.is_set()
| true | true |
f71aa6cce65ae0f1ec42a02146d24feaa44f2307 | 98 | py | Python | alg4.py | devilnotcry77/devil_not_cry | a9d342d053c788ec6db2d1c5967ed55104b40045 | [
"Apache-2.0"
] | null | null | null | alg4.py | devilnotcry77/devil_not_cry | a9d342d053c788ec6db2d1c5967ed55104b40045 | [
"Apache-2.0"
] | null | null | null | alg4.py | devilnotcry77/devil_not_cry | a9d342d053c788ec6db2d1c5967ed55104b40045 | [
"Apache-2.0"
] | null | null | null | n=int(100)
for i in range(n):
for j in range(10):
print("*", end="")
print()
| 16.333333 | 27 | 0.459184 | n=int(100)
for i in range(n):
for j in range(10):
print("*", end="")
print()
| true | true |
f71aa81665c674b5cc3278ea94c533b98549fe90 | 935 | py | Python | Swap Nodes in Pairs.py | H-isaac23/Data-Structures | 2a860549ebc87155cdcf98ca951f1e345dd40499 | [
"MIT"
] | null | null | null | Swap Nodes in Pairs.py | H-isaac23/Data-Structures | 2a860549ebc87155cdcf98ca951f1e345dd40499 | [
"MIT"
] | null | null | null | Swap Nodes in Pairs.py | H-isaac23/Data-Structures | 2a860549ebc87155cdcf98ca951f1e345dd40499 | [
"MIT"
] | null | null | null | """Given a linked list, swap every two adjacent nodes and return its head.
Example 1:
Input: head = [1,2,3,4]
Output: [2,1,4,3]
Example 2:
Input: head = []
Output: []
Example 3:
Input: head = [1]
Output: [1]
Constraints:
The number of nodes in the list is in the range [0, 100].
0 <= Node.val <= 100
Follow up: Can you solve the problem without modifying the values in the list's nodes? (i.e., Only nodes themselves may
be changed.)"""
# Definition for singly-linked list.
class ListNode:
def __init__(self, val=0, next=None):
self.val = val
self.next = next
class Solution:
def swapPairs(self, head: ListNode) -> ListNode:
if head is None or head.next is None:
return head
first = head.next
second = head.next.next
first.next = head
head.next = self.swapPairs(second)
return first
# Submission Details:
# Runtime: >85.13%
# Memory: >50.67%
| 21.744186 | 119 | 0.640642 |
class ListNode:
def __init__(self, val=0, next=None):
self.val = val
self.next = next
class Solution:
def swapPairs(self, head: ListNode) -> ListNode:
if head is None or head.next is None:
return head
first = head.next
second = head.next.next
first.next = head
head.next = self.swapPairs(second)
return first
| true | true |
f71aa89acd39eaae1c4ded0a372a3dc7b494d67c | 189 | py | Python | blueapps/account/components/bk_token/forms.py | jin-cc/bastion-test | 9feecbe927e5446213ab25b4da4a5eca23cf6bae | [
"Apache-2.0"
] | 42 | 2021-06-16T12:06:03.000Z | 2022-03-29T13:18:00.000Z | blueapps/account/components/bk_token/forms.py | jin-cc/bastion-test | 9feecbe927e5446213ab25b4da4a5eca23cf6bae | [
"Apache-2.0"
] | 3 | 2020-06-05T20:56:09.000Z | 2021-06-10T21:29:05.000Z | blueapps/account/components/bk_token/forms.py | wangzishuo111/bk_prometheus | c6aa16d8a547a3d00fbca317f6846ad35b1297ea | [
"MIT"
] | 16 | 2021-07-13T01:17:57.000Z | 2022-03-01T12:39:32.000Z | # -*- coding: utf-8 -*-
from django import forms
class AuthenticationForm(forms.Form):
# bk_token format: KH7P4-VSFi_nOEoV3kj0ytcs0uZnGOegIBLV-eM3rw8
bk_token = forms.CharField()
| 23.625 | 66 | 0.740741 |
from django import forms
class AuthenticationForm(forms.Form):
bk_token = forms.CharField()
| true | true |
f71aa8c11ea59751ae59caa6184f21489f218f12 | 422 | py | Python | CookieTTS/_2_ttm/GANTTS/run_every_epoch.py | AstraliteHeart/cookietts | c871f5f7b5790656d5b57bcd9e63946a2da52f0f | [
"BSD-3-Clause"
] | 25 | 2020-07-07T20:07:41.000Z | 2021-12-17T11:27:36.000Z | CookieTTS/_2_ttm/GANTTS/run_every_epoch.py | AstraliteHeart/cookietts | c871f5f7b5790656d5b57bcd9e63946a2da52f0f | [
"BSD-3-Clause"
] | 26 | 2020-07-04T00:06:25.000Z | 2022-02-10T03:28:35.000Z | CookieTTS/_2_ttm/GANTTS/run_every_epoch.py | AstraliteHeart/cookietts | c871f5f7b5790656d5b57bcd9e63946a2da52f0f | [
"BSD-3-Clause"
] | 11 | 2020-07-02T21:39:59.000Z | 2022-01-17T22:09:46.000Z | current_iteration = iteration
##########################################################################
### GAN-TTS : HIGH FIDELITY SPEECH SYNTHESIS WITH ADVERSARIAL NETWORKS ###
##########################################################################
# Learning Rate / Optimization
decay_start = 99999999
A_ = 0.2e-5
B_ = 40000
C_ = 0e-5
min_learning_rate = 1e-6
grad_clip_thresh = 75
descriminator_loss_scale = 0.1 | 28.133333 | 74 | 0.490521 | current_iteration = iteration
| true | true |
f71aa8d7c382bafc56b06793ddb3976f1a195ca1 | 11,480 | py | Python | StructVBERT/tasks/vqa.py | onlyrico/AliceMind | a6a070b1610e4c4bfe84ee6c4195b2bc4f725ded | [
"Apache-2.0"
] | 1 | 2021-08-05T05:41:50.000Z | 2021-08-05T05:41:50.000Z | StructVBERT/tasks/vqa.py | onlyrico/AliceMind | a6a070b1610e4c4bfe84ee6c4195b2bc4f725ded | [
"Apache-2.0"
] | null | null | null | StructVBERT/tasks/vqa.py | onlyrico/AliceMind | a6a070b1610e4c4bfe84ee6c4195b2bc4f725ded | [
"Apache-2.0"
] | 1 | 2021-07-10T09:50:47.000Z | 2021-07-10T09:50:47.000Z | # coding=utf-8
# Copyleft 2019 project LXRT.
import os
import collections
import torch
import torch.nn as nn
import logging
from torch.utils.data.dataloader import DataLoader
from tqdm import tqdm
from param import args
from lxrt.qa_answer_table import load_lxmert_qa
from tasks.vqa_model import VQAModel
from tasks.vqa_data import VQADataset, VQATorchDataset, VQAEvaluator
DataTuple = collections.namedtuple("DataTuple", 'dataset loader evaluator')
logging.basicConfig(format='%(asctime)s - %(levelname)s - %(name)s - %(message)s',
datefmt='%m/%d/%Y %H:%M:%S',
level=logging.INFO)
logger = logging.getLogger(__name__)
def get_data_tuple(splits: str, bs:int, shuffle=False, drop_last=False) -> DataTuple:
dset = VQADataset(splits)
tset = VQATorchDataset(dset)
evaluator = VQAEvaluator(dset)
data_loader = DataLoader(
tset, batch_size=bs,
shuffle=shuffle, num_workers=args.num_workers,
drop_last=drop_last, pin_memory=True
)
return DataTuple(dataset=dset, loader=data_loader, evaluator=evaluator)
class WarmupOptimizer(object):
def __init__(self, _lr_base, optimizer, _data_size, _batch_size):
self.optimizer = optimizer
self._step = 0
self._lr_base = _lr_base
self._rate = 0
self._data_size = _data_size
self._batch_size = _batch_size
def step(self):
self._step += 1
rate = self.rate()
for p in self.optimizer.param_groups:
p['lr'] = rate
self._rate = rate
self.optimizer.step()
def zero_grad(self):
self.optimizer.zero_grad()
def rate(self, step=None):
if step is None:
step = self._step
if step <= int(self._data_size / self._batch_size * 1):
r = self._lr_base * 1/4.
elif step <= int(self._data_size / self._batch_size * 2):
r = self._lr_base * 2/4.
elif step <= int(self._data_size / self._batch_size * 3):
r = self._lr_base * 3/4.
else:
r = self._lr_base
return r
def adjust_learning_rate(optimizer, decay_rate):
optimizer._lr_base *= decay_rate
class VQA:
def __init__(self):
# Datasets
self.train_tuple = get_data_tuple(
args.train, bs=args.batch_size, shuffle=True, drop_last=True
)
if args.valid != "":
self.valid_tuple = get_data_tuple(
args.valid, bs=256, # for large model
shuffle=False, drop_last=False
)
else:
self.valid_tuple = None
# Model
self.model = VQAModel(self.train_tuple.dataset.num_answers)
self._lr_decay_epoch_list = [8, 10]
self._lr_decay_rate = 0.2
# Load pre-trained weights
if args.load_lxmert is not None:
self.model.lxrt_encoder.load(args.load_lxmert)
if args.load_lxmert_qa is not None:
load_lxmert_qa(args.load_lxmert_qa, self.model,
label2ans=self.train_tuple.dataset.label2ans)
if args.fix_language_bert:
assert args.patial_load
state_dict = torch.load(args.patial_load)
for k in state_dict.copy():
if not k.startswith('bert.'):
state_dict['bert.' + k.replace('gamma', 'weight').replace('beta', 'bias')] = state_dict.pop(k)
# fix bert parameters
for name, param in self.model.lxrt_encoder.model.named_parameters():
# if 'pooler' in name: # pooler not fixed
# continue
if name in state_dict:
logger.info('fix param for: {}'.format(name))
param.requires_grad = False
# GPU options
self.model = self.model.cuda()
# Loss and Optimizer
self.bce_loss = nn.BCEWithLogitsLoss()
if 'bert' in args.optim:
batch_per_epoch = len(self.train_tuple.loader)
t_total = int(batch_per_epoch * args.epochs)
logger.info("BertAdam Total Iters: %d" % t_total)
from lxrt.optimization import BertAdam
self.optim = BertAdam(list(self.model.parameters()),
lr=args.lr,
warmup=0.1,
t_total=t_total)
elif 'adam' in args.optim:
batch_per_epoch = len(self.train_tuple.loader)
optim = args.optimizer(filter(lambda p: p.requires_grad, self.model.parameters()), lr=0, betas=(0.9, 0.98), eps=1e-9)
self.optim = WarmupOptimizer(args.lr, optim, batch_per_epoch * args.batch_size, args.batch_size)
else:
self.optim = args.optimizer(self.model.parameters(), args.lr)
if args.amp_type is not None:
try:
from apex import amp
except ImportError:
raise ImportError("Please install apex from https://www.github.com/nvidia/apex to run this example.")
self.model, self.optim = amp.initialize(self.model, self.optim, opt_level=args.amp_type)
if args.multiGPU:
self.model.lxrt_encoder.multi_gpu()
# Output Directory
self.output = args.output
os.makedirs(self.output, exist_ok=True)
def train(self, train_tuple, eval_tuple):
dset, loader, evaluator = train_tuple
iter_wrapper = (lambda x: tqdm(x, total=len(loader))) if args.tqdm else (lambda x: x)
best_valid = 0.
for epoch in range(args.epochs):
quesid2ans = {}
if 'adam' in args.optim and epoch in self._lr_decay_epoch_list:
adjust_learning_rate(self.optim, self._lr_decay_rate)
for i, (ques_id, feats, boxes, sent, target) in iter_wrapper(enumerate(loader)):
self.model.train()
self.optim.zero_grad()
feats, boxes, target = feats.cuda(), boxes.cuda(), target.cuda()
logit = self.model(feats, boxes, sent)
assert logit.dim() == target.dim() == 2
loss = self.bce_loss(logit, target)
loss = loss * logit.size(1)
if args.multiGPU:
loss = loss.mean() # mean() to average on multi-gpu.
if args.amp_type is not None:
from apex import amp
with amp.scale_loss(loss, self.optim) as scaled_loss:
scaled_loss.backward()
else:
loss.backward()
nn.utils.clip_grad_norm_(self.model.parameters(), args.clip_norm)
self.optim.step()
score, label = logit.max(1)
for qid, l in zip(ques_id, label.cpu().numpy()):
ans = dset.label2ans[l]
quesid2ans[qid.item()] = ans
log_str = "\nEpoch %d: Train %0.2f\n" % (epoch, evaluator.evaluate(quesid2ans) * 100.)
if self.valid_tuple is not None: # Do Validation
valid_score = self.evaluate(eval_tuple)
if valid_score > best_valid:
best_valid = valid_score
self.save("BEST")
log_str += "Epoch %d: Valid %0.2f\n" % (epoch, valid_score * 100.) + \
"Epoch %d: Best %0.2f\n" % (epoch, best_valid * 100.)
logger.info(log_str)
with open(self.output + "/log.log", 'a') as f:
f.write(log_str)
f.flush()
self.save("LAST")
def predict(self, eval_tuple: DataTuple, dump=None):
"""
Predict the answers to questions in a data split.
:param eval_tuple: The data tuple to be evaluated.
:param dump: The path of saved file to dump results.
:return: A dict of question_id to answer.
"""
self.model.eval()
dset, loader, evaluator = eval_tuple
quesid2ans = {}
for i, datum_tuple in enumerate(loader):
ques_id, feats, boxes, sent = datum_tuple[:4] # Avoid seeing ground truth
with torch.no_grad():
feats, boxes = feats.cuda(), boxes.cuda()
logit = self.model(feats, boxes, sent)
if args.with_score:
logit = nn.Softmax(dim=1)(logit)
score, label = logit.max(1)
if args.with_score:
for qid, l, s in zip(ques_id, label.cpu().numpy(), score.cpu().numpy()):
ans = dset.label2ans[l]
quesid2ans[qid.item()] = (ans, str(s))
else:
for qid, l in zip(ques_id, label.cpu().numpy()):
ans = dset.label2ans[l]
quesid2ans[qid.item()] = ans
if dump is not None:
evaluator.dump_result(quesid2ans, dump)
return quesid2ans
def evaluate(self, eval_tuple: DataTuple, dump=None):
"""Evaluate all data in data_tuple."""
quesid2ans = self.predict(eval_tuple, dump)
return eval_tuple.evaluator.evaluate(quesid2ans)
@staticmethod
def oracle_score(data_tuple):
dset, loader, evaluator = data_tuple
quesid2ans = {}
for i, (ques_id, feats, boxes, sent, target) in enumerate(loader):
_, label = target.max(1)
for qid, l in zip(ques_id, label.cpu().numpy()):
ans = dset.label2ans[l]
quesid2ans[qid.item()] = ans
return evaluator.evaluate(quesid2ans)
def save(self, name):
torch.save(self.model.state_dict(),
os.path.join(self.output, "%s.pth" % name))
def load(self, path):
logger.info("Load model from %s" % path)
state_dict = torch.load("%s.pth" % path)
self.model.load_state_dict(state_dict)
if __name__ == "__main__":
# Build Class
vqa = VQA()
# Load VQA model weights
if args.load is not None:
vqa.load(args.load)
# Test or Train
if args.test is not None:
args.fast = args.tiny = False # Always loading all data in test
if 'test' in args.test:
vqa.predict(
get_data_tuple(args.test, bs=950,
shuffle=False, drop_last=False),
dump=os.path.join(args.output, 'test_predict.json')
)
elif 'val' in args.test:
# Since part of valididation data are used in pre-training/fine-tuning,
# only validate on the minival set.
result = vqa.evaluate(
get_data_tuple('minival', bs=950,
shuffle=False, drop_last=False),
dump=os.path.join(args.output, 'minival_predict.json')
)
logger.info(result)
else:
assert False, "No such test option for %s" % args.test
else:
# print('Splits in Train data:', vqa.train_tuple.dataset.splits)
logger.info('Splits in Train data: {}'.format(vqa.train_tuple.dataset.splits))
if vqa.valid_tuple is not None:
logger.info('Splits in Valid data: {}'.format(vqa.valid_tuple.dataset.splits))
logger.info("Valid Oracle: %0.2f" % (vqa.oracle_score(vqa.valid_tuple) * 100))
else:
logger.info("DO NOT USE VALIDATION")
vqa.train(vqa.train_tuple, vqa.valid_tuple)
| 38.394649 | 129 | 0.567334 |
import os
import collections
import torch
import torch.nn as nn
import logging
from torch.utils.data.dataloader import DataLoader
from tqdm import tqdm
from param import args
from lxrt.qa_answer_table import load_lxmert_qa
from tasks.vqa_model import VQAModel
from tasks.vqa_data import VQADataset, VQATorchDataset, VQAEvaluator
DataTuple = collections.namedtuple("DataTuple", 'dataset loader evaluator')
logging.basicConfig(format='%(asctime)s - %(levelname)s - %(name)s - %(message)s',
datefmt='%m/%d/%Y %H:%M:%S',
level=logging.INFO)
logger = logging.getLogger(__name__)
def get_data_tuple(splits: str, bs:int, shuffle=False, drop_last=False) -> DataTuple:
dset = VQADataset(splits)
tset = VQATorchDataset(dset)
evaluator = VQAEvaluator(dset)
data_loader = DataLoader(
tset, batch_size=bs,
shuffle=shuffle, num_workers=args.num_workers,
drop_last=drop_last, pin_memory=True
)
return DataTuple(dataset=dset, loader=data_loader, evaluator=evaluator)
class WarmupOptimizer(object):
def __init__(self, _lr_base, optimizer, _data_size, _batch_size):
self.optimizer = optimizer
self._step = 0
self._lr_base = _lr_base
self._rate = 0
self._data_size = _data_size
self._batch_size = _batch_size
def step(self):
self._step += 1
rate = self.rate()
for p in self.optimizer.param_groups:
p['lr'] = rate
self._rate = rate
self.optimizer.step()
def zero_grad(self):
self.optimizer.zero_grad()
def rate(self, step=None):
if step is None:
step = self._step
if step <= int(self._data_size / self._batch_size * 1):
r = self._lr_base * 1/4.
elif step <= int(self._data_size / self._batch_size * 2):
r = self._lr_base * 2/4.
elif step <= int(self._data_size / self._batch_size * 3):
r = self._lr_base * 3/4.
else:
r = self._lr_base
return r
def adjust_learning_rate(optimizer, decay_rate):
optimizer._lr_base *= decay_rate
class VQA:
def __init__(self):
self.train_tuple = get_data_tuple(
args.train, bs=args.batch_size, shuffle=True, drop_last=True
)
if args.valid != "":
self.valid_tuple = get_data_tuple(
args.valid, bs=256,
shuffle=False, drop_last=False
)
else:
self.valid_tuple = None
self.model = VQAModel(self.train_tuple.dataset.num_answers)
self._lr_decay_epoch_list = [8, 10]
self._lr_decay_rate = 0.2
if args.load_lxmert is not None:
self.model.lxrt_encoder.load(args.load_lxmert)
if args.load_lxmert_qa is not None:
load_lxmert_qa(args.load_lxmert_qa, self.model,
label2ans=self.train_tuple.dataset.label2ans)
if args.fix_language_bert:
assert args.patial_load
state_dict = torch.load(args.patial_load)
for k in state_dict.copy():
if not k.startswith('bert.'):
state_dict['bert.' + k.replace('gamma', 'weight').replace('beta', 'bias')] = state_dict.pop(k)
for name, param in self.model.lxrt_encoder.model.named_parameters():
if name in state_dict:
logger.info('fix param for: {}'.format(name))
param.requires_grad = False
self.model = self.model.cuda()
self.bce_loss = nn.BCEWithLogitsLoss()
if 'bert' in args.optim:
batch_per_epoch = len(self.train_tuple.loader)
t_total = int(batch_per_epoch * args.epochs)
logger.info("BertAdam Total Iters: %d" % t_total)
from lxrt.optimization import BertAdam
self.optim = BertAdam(list(self.model.parameters()),
lr=args.lr,
warmup=0.1,
t_total=t_total)
elif 'adam' in args.optim:
batch_per_epoch = len(self.train_tuple.loader)
optim = args.optimizer(filter(lambda p: p.requires_grad, self.model.parameters()), lr=0, betas=(0.9, 0.98), eps=1e-9)
self.optim = WarmupOptimizer(args.lr, optim, batch_per_epoch * args.batch_size, args.batch_size)
else:
self.optim = args.optimizer(self.model.parameters(), args.lr)
if args.amp_type is not None:
try:
from apex import amp
except ImportError:
raise ImportError("Please install apex from https://www.github.com/nvidia/apex to run this example.")
self.model, self.optim = amp.initialize(self.model, self.optim, opt_level=args.amp_type)
if args.multiGPU:
self.model.lxrt_encoder.multi_gpu()
self.output = args.output
os.makedirs(self.output, exist_ok=True)
def train(self, train_tuple, eval_tuple):
dset, loader, evaluator = train_tuple
iter_wrapper = (lambda x: tqdm(x, total=len(loader))) if args.tqdm else (lambda x: x)
best_valid = 0.
for epoch in range(args.epochs):
quesid2ans = {}
if 'adam' in args.optim and epoch in self._lr_decay_epoch_list:
adjust_learning_rate(self.optim, self._lr_decay_rate)
for i, (ques_id, feats, boxes, sent, target) in iter_wrapper(enumerate(loader)):
self.model.train()
self.optim.zero_grad()
feats, boxes, target = feats.cuda(), boxes.cuda(), target.cuda()
logit = self.model(feats, boxes, sent)
assert logit.dim() == target.dim() == 2
loss = self.bce_loss(logit, target)
loss = loss * logit.size(1)
if args.multiGPU:
loss = loss.mean()
if args.amp_type is not None:
from apex import amp
with amp.scale_loss(loss, self.optim) as scaled_loss:
scaled_loss.backward()
else:
loss.backward()
nn.utils.clip_grad_norm_(self.model.parameters(), args.clip_norm)
self.optim.step()
score, label = logit.max(1)
for qid, l in zip(ques_id, label.cpu().numpy()):
ans = dset.label2ans[l]
quesid2ans[qid.item()] = ans
log_str = "\nEpoch %d: Train %0.2f\n" % (epoch, evaluator.evaluate(quesid2ans) * 100.)
if self.valid_tuple is not None:
valid_score = self.evaluate(eval_tuple)
if valid_score > best_valid:
best_valid = valid_score
self.save("BEST")
log_str += "Epoch %d: Valid %0.2f\n" % (epoch, valid_score * 100.) + \
"Epoch %d: Best %0.2f\n" % (epoch, best_valid * 100.)
logger.info(log_str)
with open(self.output + "/log.log", 'a') as f:
f.write(log_str)
f.flush()
self.save("LAST")
def predict(self, eval_tuple: DataTuple, dump=None):
self.model.eval()
dset, loader, evaluator = eval_tuple
quesid2ans = {}
for i, datum_tuple in enumerate(loader):
ques_id, feats, boxes, sent = datum_tuple[:4]
with torch.no_grad():
feats, boxes = feats.cuda(), boxes.cuda()
logit = self.model(feats, boxes, sent)
if args.with_score:
logit = nn.Softmax(dim=1)(logit)
score, label = logit.max(1)
if args.with_score:
for qid, l, s in zip(ques_id, label.cpu().numpy(), score.cpu().numpy()):
ans = dset.label2ans[l]
quesid2ans[qid.item()] = (ans, str(s))
else:
for qid, l in zip(ques_id, label.cpu().numpy()):
ans = dset.label2ans[l]
quesid2ans[qid.item()] = ans
if dump is not None:
evaluator.dump_result(quesid2ans, dump)
return quesid2ans
def evaluate(self, eval_tuple: DataTuple, dump=None):
quesid2ans = self.predict(eval_tuple, dump)
return eval_tuple.evaluator.evaluate(quesid2ans)
@staticmethod
def oracle_score(data_tuple):
dset, loader, evaluator = data_tuple
quesid2ans = {}
for i, (ques_id, feats, boxes, sent, target) in enumerate(loader):
_, label = target.max(1)
for qid, l in zip(ques_id, label.cpu().numpy()):
ans = dset.label2ans[l]
quesid2ans[qid.item()] = ans
return evaluator.evaluate(quesid2ans)
def save(self, name):
torch.save(self.model.state_dict(),
os.path.join(self.output, "%s.pth" % name))
def load(self, path):
logger.info("Load model from %s" % path)
state_dict = torch.load("%s.pth" % path)
self.model.load_state_dict(state_dict)
if __name__ == "__main__":
vqa = VQA()
if args.load is not None:
vqa.load(args.load)
if args.test is not None:
args.fast = args.tiny = False
if 'test' in args.test:
vqa.predict(
get_data_tuple(args.test, bs=950,
shuffle=False, drop_last=False),
dump=os.path.join(args.output, 'test_predict.json')
)
elif 'val' in args.test:
result = vqa.evaluate(
get_data_tuple('minival', bs=950,
shuffle=False, drop_last=False),
dump=os.path.join(args.output, 'minival_predict.json')
)
logger.info(result)
else:
assert False, "No such test option for %s" % args.test
else:
logger.info('Splits in Train data: {}'.format(vqa.train_tuple.dataset.splits))
if vqa.valid_tuple is not None:
logger.info('Splits in Valid data: {}'.format(vqa.valid_tuple.dataset.splits))
logger.info("Valid Oracle: %0.2f" % (vqa.oracle_score(vqa.valid_tuple) * 100))
else:
logger.info("DO NOT USE VALIDATION")
vqa.train(vqa.train_tuple, vqa.valid_tuple)
| true | true |
f71aa988a5098b28bbada6d39c5173f2c7f1034c | 1,683 | py | Python | python/ctci/1_arrays_strings/6_Compression.py | othonreyes/code_problems | 6e65b26120b0b9d6e5ac7342a4d964696b7bd5bf | [
"MIT"
] | null | null | null | python/ctci/1_arrays_strings/6_Compression.py | othonreyes/code_problems | 6e65b26120b0b9d6e5ac7342a4d964696b7bd5bf | [
"MIT"
] | null | null | null | python/ctci/1_arrays_strings/6_Compression.py | othonreyes/code_problems | 6e65b26120b0b9d6e5ac7342a4d964696b7bd5bf | [
"MIT"
] | null | null | null | # Create a function that implements a basic compression algorithm by counting the chars
# thtat are present in a string, if the result string is longer than input
# then return original input.
#
# Examples:
# aaabcccccaaa: a3b1c5a3
# abcdef: abcdef
# aaaaaaaaaaba: a10b1a1
### Note: Don't use extra space
import unittest
from collections import Counter
def compress2(s1):
newStr = []
count = 0
for i in range(len(s1)):
# Explanation
# the i != 0 is used to deal with the first character.
# we could have done but requirs extra code:
# char = s1[0] # requires to check if the s1 is not empty
# - or -
# char = '' # requires to check if char != ''
if i != 0 and s1[i] != s1[i-1]:
newStr.append(s1[i-1] + str(count))
count = 0
count += 1
newStr.append(s1[-1] + str(count)) # we do this to deal with the last characters
return min(s1, ''.join(newStr), key=len)
def compress(s1):
newStr = ''
char = ''
count = 0
for i in range(len(s1)):
if char != s1[i]:
if char != '': # we do this to deal with the initial case
newStr += char + str(count)
char = s1[i]
count = 1
else:
count += 1
newStr += char + str(count) # we do this to deal with the last characters
if len(newStr) > len(s1):
return s1
return newStr
class Test(unittest.TestCase):
valid = (
('aaabcccccaaa', 'a3b1c5a3'),
('abcdef', 'abcdef'),
('aaaaaaaaaaba', 'a10b1a1')
)
def test(self):
for [input, expected] in self.valid:
print(input,' vs ',expected)
result = compress(input)
self.assertEqual(result, expected)
if __name__ == "__main__":
unittest.main() | 25.892308 | 87 | 0.618538 |
ress2(s1):
newStr = []
count = 0
for i in range(len(s1)):
# Explanation
# the i != 0 is used to deal with the first character.
# we could have done but requirs extra code:
# char = s1[0] # requires to check if the s1 is not empty
# - or -
# char = '' # requires to check if char != ''
if i != 0 and s1[i] != s1[i-1]:
newStr.append(s1[i-1] + str(count))
count = 0
count += 1
newStr.append(s1[-1] + str(count)) # we do this to deal with the last characters
return min(s1, ''.join(newStr), key=len)
def compress(s1):
newStr = ''
char = ''
count = 0
for i in range(len(s1)):
if char != s1[i]:
if char != '': # we do this to deal with the initial case
newStr += char + str(count)
char = s1[i]
count = 1
else:
count += 1
newStr += char + str(count) # we do this to deal with the last characters
if len(newStr) > len(s1):
return s1
return newStr
class Test(unittest.TestCase):
valid = (
('aaabcccccaaa', 'a3b1c5a3'),
('abcdef', 'abcdef'),
('aaaaaaaaaaba', 'a10b1a1')
)
def test(self):
for [input, expected] in self.valid:
print(input,' vs ',expected)
result = compress(input)
self.assertEqual(result, expected)
if __name__ == "__main__":
unittest.main() | true | true |
f71aaa4225770dc4b16e09cec972c3086fd80ff7 | 291 | py | Python | subsets/subsets.py | YasinEhsan/interview-prep | ed9f95af5a37b05304e45b41511068b6f72533e7 | [
"Apache-2.0"
] | 11 | 2019-05-02T22:27:01.000Z | 2020-10-30T08:43:02.000Z | subsets/subsets.py | YasinEhsan/interview-prep | ed9f95af5a37b05304e45b41511068b6f72533e7 | [
"Apache-2.0"
] | null | null | null | subsets/subsets.py | YasinEhsan/interview-prep | ed9f95af5a37b05304e45b41511068b6f72533e7 | [
"Apache-2.0"
] | 3 | 2019-11-01T01:35:01.000Z | 2020-01-11T18:00:39.000Z | def find_subsets(nums):
subsets = []
# TODO: Write your code here
subsets.append([])
for i in range(len(nums)):
storeLen = len(subsets)
for j in range(0,storeLen):
currSet = list(subsets[j])
currSet.append(nums[i])
subsets.append(currSet)
return subsets
| 22.384615 | 32 | 0.639175 | def find_subsets(nums):
subsets = []
subsets.append([])
for i in range(len(nums)):
storeLen = len(subsets)
for j in range(0,storeLen):
currSet = list(subsets[j])
currSet.append(nums[i])
subsets.append(currSet)
return subsets
| true | true |
f71aaa5221fcf2fa717ae33f34cf3b565947d0e8 | 6,099 | py | Python | lib/models/spin.py | ziniuwan/maed | 9e1f1c37eba81da86c8d9c62dc9be41a01abff5b | [
"MIT"
] | 145 | 2021-08-15T13:22:08.000Z | 2022-03-29T13:37:19.000Z | lib/models/spin.py | vkirilenko/maed | 9e1f1c37eba81da86c8d9c62dc9be41a01abff5b | [
"MIT"
] | 9 | 2021-09-17T14:58:15.000Z | 2022-03-29T07:43:08.000Z | lib/models/spin.py | vkirilenko/maed | 9e1f1c37eba81da86c8d9c62dc9be41a01abff5b | [
"MIT"
] | 17 | 2021-08-15T13:22:10.000Z | 2022-01-17T02:34:14.000Z | """
This script is brought from https://github.com/nkolot/SPIN
Adhere to their licence to use this script
"""
import math
import torch
import numpy as np
import os.path as osp
import torch.nn as nn
from lib.core.config import DATA_DIR
from lib.utils.geometry import rotation_matrix_to_angle_axis, rot6d_to_rotmat
from lib.models.smpl import SMPL, SMPL_MODEL_DIR, H36M_TO_J17, SMPL_MEAN_PARAMS
class Regressor(nn.Module):
def __init__(self, smpl_mean_params=SMPL_MEAN_PARAMS, feat_dim=2048, hidden_dim=1024, **kwargs):
super(Regressor, self).__init__()
self.smpl = SMPL(
SMPL_MODEL_DIR,
create_transl=False,
create_global_orient=False,
create_body_pose=False,
create_betas=False,
)
npose = 24 * 6
nshape = 10
self.fc1 = nn.Linear(feat_dim + npose + nshape + 3, hidden_dim)
self.drop1 = nn.Dropout()
self.fc2 = nn.Linear(hidden_dim, hidden_dim)
self.drop2 = nn.Dropout()
self.decpose = nn.Linear(hidden_dim, npose)
self.decshape = nn.Linear(hidden_dim, nshape)
self.deccam = nn.Linear(hidden_dim, 3)
nn.init.xavier_uniform_(self.decpose.weight, gain=0.01)
nn.init.xavier_uniform_(self.decshape.weight, gain=0.01)
nn.init.xavier_uniform_(self.deccam.weight, gain=0.01)
mean_params = np.load(smpl_mean_params)
init_pose = torch.from_numpy(mean_params['pose'][:]).unsqueeze(0)
init_shape = torch.from_numpy(mean_params['shape'][:].astype('float32')).unsqueeze(0)
init_cam = torch.from_numpy(mean_params['cam']).unsqueeze(0)
self.register_buffer('init_pose', init_pose)
self.register_buffer('init_shape', init_shape)
self.register_buffer('init_cam', init_cam)
def iterative_regress(self, x, init_pose=None, init_shape=None, init_cam=None, n_iter=3):
nt = x.shape[0]
if init_pose is None:
init_pose = self.init_pose.expand(nt, -1)
if init_shape is None:
init_shape = self.init_shape.expand(nt, -1)
if init_cam is None:
init_cam = self.init_cam.expand(nt, -1)
pred_pose = init_pose
pred_shape = init_shape
pred_cam = init_cam
for i in range(n_iter):
xc = torch.cat([x, pred_pose, pred_shape, pred_cam], 1)
xc = self.fc1(xc)
xc = self.drop1(xc)
xc = self.fc2(xc)
xc = self.drop2(xc)
pred_pose = self.decpose(xc) + pred_pose
pred_shape = self.decshape(xc) + pred_shape
pred_cam = self.deccam(xc) + pred_cam
return pred_pose, pred_shape, pred_cam
def forward(self, x, seqlen, J_regressor=None,
init_pose=None, init_shape=None, init_cam=None, n_iter=3, **kwargs):
nt = x.shape[0]
N = nt//seqlen
pred_pose, pred_shape, pred_cam = self.iterative_regress(x, init_pose, init_shape, init_cam, n_iter=3)
output_regress = self.get_output(pred_pose, pred_shape, pred_cam, J_regressor)
return output_regress
def get_output(self, pred_pose, pred_shape, pred_cam, J_regressor):
output = {}
nt = pred_pose.shape[0]
pred_rotmat = rot6d_to_rotmat(pred_pose).reshape(nt, -1, 3, 3)
pred_output = self.smpl(
betas=pred_shape,
body_pose=pred_rotmat[:, 1:],
global_orient=pred_rotmat[:, 0].unsqueeze(1),
pose2rot=False
)
pred_vertices = pred_output.vertices[:nt]
pred_joints = pred_output.joints[:nt]
if J_regressor is not None:
J_regressor_batch = J_regressor[None, :].expand(pred_vertices.shape[0], -1, -1).to(pred_vertices.device)
pred_joints = torch.matmul(J_regressor_batch, pred_vertices)
pred_keypoints_2d = projection(pred_joints, pred_cam)
pose = rotation_matrix_to_angle_axis(pred_rotmat.reshape(-1, 3, 3)).reshape(nt, -1)
output['theta'] = torch.cat([pred_cam, pose, pred_shape], dim=1)
output['verts'] = pred_vertices
output['kp_2d'] = pred_keypoints_2d
output['kp_3d'] = pred_joints
output['rotmat'] = pred_rotmat
return output
def projection(pred_joints, pred_camera):
pred_cam_t = torch.stack([pred_camera[:, 1],
pred_camera[:, 2],
2 * 5000. / (224. * pred_camera[:, 0] + 1e-9)], dim=-1)
batch_size = pred_joints.shape[0]
camera_center = torch.zeros(batch_size, 2)
pred_keypoints_2d = perspective_projection(pred_joints,
rotation=torch.eye(3).unsqueeze(0).expand(batch_size, -1, -1).to(pred_joints.device),
translation=pred_cam_t,
focal_length=5000.,
camera_center=camera_center)
# Normalize keypoints to [-1,1]
pred_keypoints_2d = pred_keypoints_2d / (224. / 2.)
return pred_keypoints_2d
def perspective_projection(points, rotation, translation,
focal_length, camera_center):
"""
This function computes the perspective projection of a set of points.
Input:
points (bs, N, 3): 3D points
rotation (bs, 3, 3): Camera rotation
translation (bs, 3): Camera translation
focal_length (bs,) or scalar: Focal length
camera_center (bs, 2): Camera center
"""
batch_size = points.shape[0]
K = torch.zeros([batch_size, 3, 3], device=points.device)
K[:,0,0] = focal_length
K[:,1,1] = focal_length
K[:,2,2] = 1.
K[:,:-1, -1] = camera_center
# Transform points
points = torch.einsum('bij,bkj->bki', rotation, points)
points = points + translation.unsqueeze(1)
# Apply perspective distortion
projected_points = points / points[:,:,-1].unsqueeze(-1)
# Apply camera intrinsics
projected_points = torch.einsum('bij,bkj->bki', K, projected_points)
return projected_points[:, :, :-1]
| 38.601266 | 132 | 0.620102 |
import math
import torch
import numpy as np
import os.path as osp
import torch.nn as nn
from lib.core.config import DATA_DIR
from lib.utils.geometry import rotation_matrix_to_angle_axis, rot6d_to_rotmat
from lib.models.smpl import SMPL, SMPL_MODEL_DIR, H36M_TO_J17, SMPL_MEAN_PARAMS
class Regressor(nn.Module):
def __init__(self, smpl_mean_params=SMPL_MEAN_PARAMS, feat_dim=2048, hidden_dim=1024, **kwargs):
super(Regressor, self).__init__()
self.smpl = SMPL(
SMPL_MODEL_DIR,
create_transl=False,
create_global_orient=False,
create_body_pose=False,
create_betas=False,
)
npose = 24 * 6
nshape = 10
self.fc1 = nn.Linear(feat_dim + npose + nshape + 3, hidden_dim)
self.drop1 = nn.Dropout()
self.fc2 = nn.Linear(hidden_dim, hidden_dim)
self.drop2 = nn.Dropout()
self.decpose = nn.Linear(hidden_dim, npose)
self.decshape = nn.Linear(hidden_dim, nshape)
self.deccam = nn.Linear(hidden_dim, 3)
nn.init.xavier_uniform_(self.decpose.weight, gain=0.01)
nn.init.xavier_uniform_(self.decshape.weight, gain=0.01)
nn.init.xavier_uniform_(self.deccam.weight, gain=0.01)
mean_params = np.load(smpl_mean_params)
init_pose = torch.from_numpy(mean_params['pose'][:]).unsqueeze(0)
init_shape = torch.from_numpy(mean_params['shape'][:].astype('float32')).unsqueeze(0)
init_cam = torch.from_numpy(mean_params['cam']).unsqueeze(0)
self.register_buffer('init_pose', init_pose)
self.register_buffer('init_shape', init_shape)
self.register_buffer('init_cam', init_cam)
def iterative_regress(self, x, init_pose=None, init_shape=None, init_cam=None, n_iter=3):
nt = x.shape[0]
if init_pose is None:
init_pose = self.init_pose.expand(nt, -1)
if init_shape is None:
init_shape = self.init_shape.expand(nt, -1)
if init_cam is None:
init_cam = self.init_cam.expand(nt, -1)
pred_pose = init_pose
pred_shape = init_shape
pred_cam = init_cam
for i in range(n_iter):
xc = torch.cat([x, pred_pose, pred_shape, pred_cam], 1)
xc = self.fc1(xc)
xc = self.drop1(xc)
xc = self.fc2(xc)
xc = self.drop2(xc)
pred_pose = self.decpose(xc) + pred_pose
pred_shape = self.decshape(xc) + pred_shape
pred_cam = self.deccam(xc) + pred_cam
return pred_pose, pred_shape, pred_cam
def forward(self, x, seqlen, J_regressor=None,
init_pose=None, init_shape=None, init_cam=None, n_iter=3, **kwargs):
nt = x.shape[0]
N = nt//seqlen
pred_pose, pred_shape, pred_cam = self.iterative_regress(x, init_pose, init_shape, init_cam, n_iter=3)
output_regress = self.get_output(pred_pose, pred_shape, pred_cam, J_regressor)
return output_regress
def get_output(self, pred_pose, pred_shape, pred_cam, J_regressor):
output = {}
nt = pred_pose.shape[0]
pred_rotmat = rot6d_to_rotmat(pred_pose).reshape(nt, -1, 3, 3)
pred_output = self.smpl(
betas=pred_shape,
body_pose=pred_rotmat[:, 1:],
global_orient=pred_rotmat[:, 0].unsqueeze(1),
pose2rot=False
)
pred_vertices = pred_output.vertices[:nt]
pred_joints = pred_output.joints[:nt]
if J_regressor is not None:
J_regressor_batch = J_regressor[None, :].expand(pred_vertices.shape[0], -1, -1).to(pred_vertices.device)
pred_joints = torch.matmul(J_regressor_batch, pred_vertices)
pred_keypoints_2d = projection(pred_joints, pred_cam)
pose = rotation_matrix_to_angle_axis(pred_rotmat.reshape(-1, 3, 3)).reshape(nt, -1)
output['theta'] = torch.cat([pred_cam, pose, pred_shape], dim=1)
output['verts'] = pred_vertices
output['kp_2d'] = pred_keypoints_2d
output['kp_3d'] = pred_joints
output['rotmat'] = pred_rotmat
return output
def projection(pred_joints, pred_camera):
pred_cam_t = torch.stack([pred_camera[:, 1],
pred_camera[:, 2],
2 * 5000. / (224. * pred_camera[:, 0] + 1e-9)], dim=-1)
batch_size = pred_joints.shape[0]
camera_center = torch.zeros(batch_size, 2)
pred_keypoints_2d = perspective_projection(pred_joints,
rotation=torch.eye(3).unsqueeze(0).expand(batch_size, -1, -1).to(pred_joints.device),
translation=pred_cam_t,
focal_length=5000.,
camera_center=camera_center)
pred_keypoints_2d = pred_keypoints_2d / (224. / 2.)
return pred_keypoints_2d
def perspective_projection(points, rotation, translation,
focal_length, camera_center):
batch_size = points.shape[0]
K = torch.zeros([batch_size, 3, 3], device=points.device)
K[:,0,0] = focal_length
K[:,1,1] = focal_length
K[:,2,2] = 1.
K[:,:-1, -1] = camera_center
points = torch.einsum('bij,bkj->bki', rotation, points)
points = points + translation.unsqueeze(1)
projected_points = points / points[:,:,-1].unsqueeze(-1)
projected_points = torch.einsum('bij,bkj->bki', K, projected_points)
return projected_points[:, :, :-1]
| true | true |
f71aabf71da050ef5d5829467e28176e4164c3ea | 8,924 | py | Python | sk_typing/decomposition.py | thomasjpfan/sk_typing | e6aacfedbce44d7748cf7c49cd2b949952f2e427 | [
"MIT"
] | 1 | 2021-02-19T20:57:36.000Z | 2021-02-19T20:57:36.000Z | sk_typing/decomposition.py | thomasjpfan/sk_typing | e6aacfedbce44d7748cf7c49cd2b949952f2e427 | [
"MIT"
] | null | null | null | sk_typing/decomposition.py | thomasjpfan/sk_typing | e6aacfedbce44d7748cf7c49cd2b949952f2e427 | [
"MIT"
] | null | null | null | from typing import Optional
from typing import Union
from collections.abc import Callable
import numpy as np
from .typing import RandomStateType
from .typing import Literal
class DictionaryLearning:
components_: np.ndarray
error_: np.ndarray
n_iter_: int
def __init__(
self,
n_components: Optional[int] = None,
alpha: float = 1,
max_iter: int = 1000,
tol: float = 1e-08,
fit_algorithm: Literal["lars", "cd"] = "lars",
transform_algorithm: Literal[
"lasso_lars", "lasso_cd", "lars", "omp", "threshold"
] = "omp",
transform_n_nonzero_coefs: Optional[int] = None,
transform_alpha: Optional[float] = None,
n_jobs: Optional[int] = None,
code_init: Optional[np.ndarray] = None,
dict_init: Optional[np.ndarray] = None,
verbose: bool = False,
split_sign: bool = False,
random_state: RandomStateType = None,
positive_code: bool = False,
positive_dict: bool = False,
transform_max_iter: int = 1000,
):
...
class FactorAnalysis:
components_: np.ndarray
loglike_: list
noise_variance_: np.ndarray
n_iter_: int
mean_: np.ndarray
def __init__(
self,
n_components: Optional[int] = None,
tol: float = 0.01,
copy: bool = True,
max_iter: int = 1000,
noise_variance_init: Optional[np.ndarray] = None,
svd_method: Literal["lapack", "randomized"] = "randomized",
iterated_power: int = 3,
random_state: RandomStateType = 0,
):
...
class FastICA:
components_: np.ndarray
mixing_: np.ndarray
mean_: np.ndarray
n_iter_: int
whitening_: np.ndarray
def __init__(
self,
n_components: Optional[int] = None,
algorithm: Literal["parallel", "deflation"] = "parallel",
whiten: bool = True,
fun: Union[Literal["logcosh", "exp", "cube"], Callable] = "logcosh",
fun_args: Optional[dict] = None,
max_iter: int = 200,
tol: float = 0.0001,
w_init: Optional[np.ndarray] = None,
random_state: RandomStateType = None,
):
...
class IncrementalPCA:
components_: np.ndarray
explained_variance_: np.ndarray
explained_variance_ratio_: np.ndarray
singular_values_: np.ndarray
mean_: np.ndarray
var_: np.ndarray
noise_variance_: float
n_components_: int
n_samples_seen_: int
def __init__(
self,
n_components: Optional[int] = None,
whiten: bool = False,
copy: bool = True,
batch_size: Optional[int] = None,
):
...
class KernelPCA:
lambdas_: np.ndarray
alphas_: np.ndarray
dual_coef_: np.ndarray
X_transformed_fit_: np.ndarray
X_fit_: np.ndarray
def __init__(
self,
n_components: Optional[None] = None,
kernel: Literal[
"linear", "poly", "rbf", "sigmoid", "cosine", "precomputed"
] = "linear",
gamma: Optional[float] = None,
degree: int = 3,
coef0: float = 1,
kernel_params: Optional[dict] = None,
alpha: float = 1.0,
fit_inverse_transform: bool = False,
eigen_solver: Literal["auto", "dense", "arpack"] = "auto",
tol: float = 0,
max_iter: Optional[None] = None,
remove_zero_eig: bool = False,
random_state: RandomStateType = None,
copy_X: bool = True,
n_jobs: Optional[int] = None,
):
...
class LatentDirichletAllocation:
components_: np.ndarray
n_batch_iter_: int
n_iter_: int
bound_: float
doc_topic_prior_: float
topic_word_prior_: float
def __init__(
self,
n_components: int = 10,
doc_topic_prior: Optional[float] = None,
topic_word_prior: Optional[float] = None,
learning_method: Literal["batch", "online"] = "batch",
learning_decay: float = 0.7,
learning_offset: float = 10.0,
max_iter: int = 10,
batch_size: int = 128,
evaluate_every: int = -1,
total_samples: int = 1_000_000,
perp_tol: float = 0.1,
mean_change_tol: float = 0.001,
max_doc_update_iter: int = 100,
n_jobs: Optional[int] = None,
verbose: int = 0,
random_state: RandomStateType = None,
):
...
class MiniBatchDictionaryLearning:
components_: np.ndarray
inner_stats_: tuple
n_iter_: int
iter_offset_: int
random_state_: np.random.RandomState
def __init__(
self,
n_components: Optional[None] = None,
alpha: float = 1,
n_iter: int = 1000,
fit_algorithm: Literal["lars", "cd"] = "lars",
n_jobs: Optional[int] = None,
batch_size: int = 3,
shuffle: bool = True,
dict_init: Optional[np.ndarray] = None,
transform_algorithm: Literal[
"lasso_lars", "lasso_cd", "lars", "omp", "threshold"
] = "omp",
transform_n_nonzero_coefs: Optional[int] = None,
transform_alpha: Optional[float] = None,
verbose: bool = False,
split_sign: bool = False,
random_state: RandomStateType = None,
positive_code: bool = False,
positive_dict: bool = False,
transform_max_iter: int = 1000,
):
...
class MiniBatchSparsePCA:
components_: np.ndarray
n_iter_: int
mean_: np.ndarray
def __init__(
self,
n_components: Optional[int] = None,
alpha: int = 1,
ridge_alpha: float = 0.01,
n_iter: int = 100,
callback: Optional[Callable] = None,
batch_size: int = 3,
verbose: Union[int, bool] = False,
shuffle: bool = True,
n_jobs: Optional[int] = None,
method: Literal["lars", "cd"] = "lars",
random_state: RandomStateType = None,
normalize_components: str = "deprecated",
):
...
class NMF:
components_: np.ndarray
n_components_: int
reconstruction_err_: float
n_iter_: int
def __init__(
self,
n_components: Optional[int] = None,
init: Optional[
Literal["random", "nndsvd", "nndsvda", "nndsvdar", "custom", "warn"]
] = None,
solver: Literal["cd", "mu"] = "cd",
beta_loss: Union[
float, Literal["frobenius", "kullback-leibler", "itakura-saito"]
] = "frobenius",
tol: float = 0.0001,
max_iter: int = 200,
random_state: RandomStateType = None,
alpha: float = 0.0,
l1_ratio: float = 0.0,
verbose: int = 0,
shuffle: bool = False,
):
...
class PCA:
components_: np.ndarray
explained_variance_: np.ndarray
explained_variance_ratio_: np.ndarray
singular_values_: np.ndarray
mean_: np.ndarray
n_components_: np.ndarray
n_features_: int
n_samples_: int
noise_variance_: float
def __init__(
self,
n_components: Union[int, float, None, Literal["mle"]] = None,
copy: bool = True,
whiten: bool = False,
svd_solver: Literal["auto", "full", "arpack", "randomized"] = "auto",
tol: float = 0.0,
iterated_power: Union[int, Literal["auto"]] = "auto",
random_state: RandomStateType = None,
):
...
class SparseCoder:
components_: np.ndarray
def __init__(
self,
dictionary: np.ndarray,
transform_algorithm: Literal[
"lasso_lars", "lasso_cd", "lars", "omp", "threshold"
] = "omp",
transform_n_nonzero_coefs: Optional[int] = None,
transform_alpha: Optional[float] = None,
split_sign: bool = False,
n_jobs: Optional[int] = None,
positive_code: bool = False,
transform_max_iter: int = 1000,
):
...
class SparsePCA:
components_: np.ndarray
error_: np.ndarray
n_iter_: int
mean_: np.ndarray
def __init__(
self,
n_components: Optional[int] = None,
alpha: float = 1,
ridge_alpha: float = 0.01,
max_iter: int = 1000,
tol: float = 1e-08,
method: Literal["lars", "cd"] = "lars",
n_jobs: Optional[int] = None,
U_init: Optional[np.ndarray] = None,
V_init: Optional[np.ndarray] = None,
verbose: Union[int, bool] = False,
random_state: RandomStateType = None,
normalize_components: str = "deprecated",
):
...
class TruncatedSVD:
components_: np.ndarray
explained_variance_: np.ndarray
explained_variance_ratio_: np.ndarray
singular_values_: np.ndarray
def __init__(
self,
n_components: int = 2,
algorithm: Literal["arpack", "randomized"] = "randomized",
n_iter: int = 5,
random_state: RandomStateType = None,
tol: float = 0.0,
):
...
| 27.12462 | 80 | 0.584043 | from typing import Optional
from typing import Union
from collections.abc import Callable
import numpy as np
from .typing import RandomStateType
from .typing import Literal
class DictionaryLearning:
components_: np.ndarray
error_: np.ndarray
n_iter_: int
def __init__(
self,
n_components: Optional[int] = None,
alpha: float = 1,
max_iter: int = 1000,
tol: float = 1e-08,
fit_algorithm: Literal["lars", "cd"] = "lars",
transform_algorithm: Literal[
"lasso_lars", "lasso_cd", "lars", "omp", "threshold"
] = "omp",
transform_n_nonzero_coefs: Optional[int] = None,
transform_alpha: Optional[float] = None,
n_jobs: Optional[int] = None,
code_init: Optional[np.ndarray] = None,
dict_init: Optional[np.ndarray] = None,
verbose: bool = False,
split_sign: bool = False,
random_state: RandomStateType = None,
positive_code: bool = False,
positive_dict: bool = False,
transform_max_iter: int = 1000,
):
...
class FactorAnalysis:
components_: np.ndarray
loglike_: list
noise_variance_: np.ndarray
n_iter_: int
mean_: np.ndarray
def __init__(
self,
n_components: Optional[int] = None,
tol: float = 0.01,
copy: bool = True,
max_iter: int = 1000,
noise_variance_init: Optional[np.ndarray] = None,
svd_method: Literal["lapack", "randomized"] = "randomized",
iterated_power: int = 3,
random_state: RandomStateType = 0,
):
...
class FastICA:
components_: np.ndarray
mixing_: np.ndarray
mean_: np.ndarray
n_iter_: int
whitening_: np.ndarray
def __init__(
self,
n_components: Optional[int] = None,
algorithm: Literal["parallel", "deflation"] = "parallel",
whiten: bool = True,
fun: Union[Literal["logcosh", "exp", "cube"], Callable] = "logcosh",
fun_args: Optional[dict] = None,
max_iter: int = 200,
tol: float = 0.0001,
w_init: Optional[np.ndarray] = None,
random_state: RandomStateType = None,
):
...
class IncrementalPCA:
components_: np.ndarray
explained_variance_: np.ndarray
explained_variance_ratio_: np.ndarray
singular_values_: np.ndarray
mean_: np.ndarray
var_: np.ndarray
noise_variance_: float
n_components_: int
n_samples_seen_: int
def __init__(
self,
n_components: Optional[int] = None,
whiten: bool = False,
copy: bool = True,
batch_size: Optional[int] = None,
):
...
class KernelPCA:
lambdas_: np.ndarray
alphas_: np.ndarray
dual_coef_: np.ndarray
X_transformed_fit_: np.ndarray
X_fit_: np.ndarray
def __init__(
self,
n_components: Optional[None] = None,
kernel: Literal[
"linear", "poly", "rbf", "sigmoid", "cosine", "precomputed"
] = "linear",
gamma: Optional[float] = None,
degree: int = 3,
coef0: float = 1,
kernel_params: Optional[dict] = None,
alpha: float = 1.0,
fit_inverse_transform: bool = False,
eigen_solver: Literal["auto", "dense", "arpack"] = "auto",
tol: float = 0,
max_iter: Optional[None] = None,
remove_zero_eig: bool = False,
random_state: RandomStateType = None,
copy_X: bool = True,
n_jobs: Optional[int] = None,
):
...
class LatentDirichletAllocation:
components_: np.ndarray
n_batch_iter_: int
n_iter_: int
bound_: float
doc_topic_prior_: float
topic_word_prior_: float
def __init__(
self,
n_components: int = 10,
doc_topic_prior: Optional[float] = None,
topic_word_prior: Optional[float] = None,
learning_method: Literal["batch", "online"] = "batch",
learning_decay: float = 0.7,
learning_offset: float = 10.0,
max_iter: int = 10,
batch_size: int = 128,
evaluate_every: int = -1,
total_samples: int = 1_000_000,
perp_tol: float = 0.1,
mean_change_tol: float = 0.001,
max_doc_update_iter: int = 100,
n_jobs: Optional[int] = None,
verbose: int = 0,
random_state: RandomStateType = None,
):
...
class MiniBatchDictionaryLearning:
components_: np.ndarray
inner_stats_: tuple
n_iter_: int
iter_offset_: int
random_state_: np.random.RandomState
def __init__(
self,
n_components: Optional[None] = None,
alpha: float = 1,
n_iter: int = 1000,
fit_algorithm: Literal["lars", "cd"] = "lars",
n_jobs: Optional[int] = None,
batch_size: int = 3,
shuffle: bool = True,
dict_init: Optional[np.ndarray] = None,
transform_algorithm: Literal[
"lasso_lars", "lasso_cd", "lars", "omp", "threshold"
] = "omp",
transform_n_nonzero_coefs: Optional[int] = None,
transform_alpha: Optional[float] = None,
verbose: bool = False,
split_sign: bool = False,
random_state: RandomStateType = None,
positive_code: bool = False,
positive_dict: bool = False,
transform_max_iter: int = 1000,
):
...
class MiniBatchSparsePCA:
components_: np.ndarray
n_iter_: int
mean_: np.ndarray
def __init__(
self,
n_components: Optional[int] = None,
alpha: int = 1,
ridge_alpha: float = 0.01,
n_iter: int = 100,
callback: Optional[Callable] = None,
batch_size: int = 3,
verbose: Union[int, bool] = False,
shuffle: bool = True,
n_jobs: Optional[int] = None,
method: Literal["lars", "cd"] = "lars",
random_state: RandomStateType = None,
normalize_components: str = "deprecated",
):
...
class NMF:
components_: np.ndarray
n_components_: int
reconstruction_err_: float
n_iter_: int
def __init__(
self,
n_components: Optional[int] = None,
init: Optional[
Literal["random", "nndsvd", "nndsvda", "nndsvdar", "custom", "warn"]
] = None,
solver: Literal["cd", "mu"] = "cd",
beta_loss: Union[
float, Literal["frobenius", "kullback-leibler", "itakura-saito"]
] = "frobenius",
tol: float = 0.0001,
max_iter: int = 200,
random_state: RandomStateType = None,
alpha: float = 0.0,
l1_ratio: float = 0.0,
verbose: int = 0,
shuffle: bool = False,
):
...
class PCA:
components_: np.ndarray
explained_variance_: np.ndarray
explained_variance_ratio_: np.ndarray
singular_values_: np.ndarray
mean_: np.ndarray
n_components_: np.ndarray
n_features_: int
n_samples_: int
noise_variance_: float
def __init__(
self,
n_components: Union[int, float, None, Literal["mle"]] = None,
copy: bool = True,
whiten: bool = False,
svd_solver: Literal["auto", "full", "arpack", "randomized"] = "auto",
tol: float = 0.0,
iterated_power: Union[int, Literal["auto"]] = "auto",
random_state: RandomStateType = None,
):
...
class SparseCoder:
components_: np.ndarray
def __init__(
self,
dictionary: np.ndarray,
transform_algorithm: Literal[
"lasso_lars", "lasso_cd", "lars", "omp", "threshold"
] = "omp",
transform_n_nonzero_coefs: Optional[int] = None,
transform_alpha: Optional[float] = None,
split_sign: bool = False,
n_jobs: Optional[int] = None,
positive_code: bool = False,
transform_max_iter: int = 1000,
):
...
class SparsePCA:
components_: np.ndarray
error_: np.ndarray
n_iter_: int
mean_: np.ndarray
def __init__(
self,
n_components: Optional[int] = None,
alpha: float = 1,
ridge_alpha: float = 0.01,
max_iter: int = 1000,
tol: float = 1e-08,
method: Literal["lars", "cd"] = "lars",
n_jobs: Optional[int] = None,
U_init: Optional[np.ndarray] = None,
V_init: Optional[np.ndarray] = None,
verbose: Union[int, bool] = False,
random_state: RandomStateType = None,
normalize_components: str = "deprecated",
):
...
class TruncatedSVD:
components_: np.ndarray
explained_variance_: np.ndarray
explained_variance_ratio_: np.ndarray
singular_values_: np.ndarray
def __init__(
self,
n_components: int = 2,
algorithm: Literal["arpack", "randomized"] = "randomized",
n_iter: int = 5,
random_state: RandomStateType = None,
tol: float = 0.0,
):
...
| true | true |
f71aac40a529a6f8ae2786769f649c443c11c279 | 8,926 | py | Python | Model Monitoring.py | MSJemutai/DSCC202-402-Forecasting-Flight-Delay-Final-Project | e6fc287ebfac59fd2edbc7d19241b61787ce14fb | [
"MIT"
] | null | null | null | Model Monitoring.py | MSJemutai/DSCC202-402-Forecasting-Flight-Delay-Final-Project | e6fc287ebfac59fd2edbc7d19241b61787ce14fb | [
"MIT"
] | null | null | null | Model Monitoring.py | MSJemutai/DSCC202-402-Forecasting-Flight-Delay-Final-Project | e6fc287ebfac59fd2edbc7d19241b61787ce14fb | [
"MIT"
] | null | null | null | # Databricks notebook source
# MAGIC %md
# MAGIC ## Model Monitoring
# COMMAND ----------
# MAGIC %run ./includes/utilities
# COMMAND ----------
# MAGIC %run ./includes/configuration
# COMMAND ----------
# grab the station information (system wide)
stationDF=get_bike_stations()[['name','station_id','lat','lon']]
# grab the stations of interest
stationsOfInterestDF = spark.sql("""select distinct(station_id) from from citibike.forecast_regression_timeweather;""").toPandas()
stationDF = stationDF[stationDF['station_id'].apply(lambda x: int(x) in list(stationsOfInterestDF.values.flatten()))]
# COMMAND ----------
from datetime import datetime as dt
from datetime import timedelta
dbutils.widgets.removeAll()
dbutils.widgets.dropdown("00.Airport_Code", "JFK", ["JFK","SEA","BOS","ATL","LAX","SFO","DEN","DFW","ORD","CVG","CLT","DCA","IAH"])
dbutils.widgets.text('01.training_start_date', "2018-01-01")
dbutils.widgets.text('02.training_end_date', "2019-03-15")
dbutils.widgets.text('03.inference_date', (dt.strptime(str(dbutils.widgets.get('02.training_end_date')), "%Y-%m-%d") + timedelta(days=1)).strftime("%Y-%m-%d"))
dbutils.widgets.text('04.promote_model', "No")
training_start_date = str(dbutils.widgets.get('01.training_start_date'))
training_end_date = str(dbutils.widgets.get('02.training_end_date'))
inference_date = str(dbutils.widgets.get('03.inference_date'))
airport_code = str(dbutils.widgets.get('00.Airport_Code'))
if dbutils.widgets.get("05.promote_model")=='Yes':
promote_model = True
else:
promote_model = False
print(airport_code,training_start_date,training_end_date,inference_date,promote_model)
# COMMAND ----------
# MAGIC %md
# MAGIC ## Forecast flight delay at selected airport
# COMMAND ----------
import mlflow
from pprint import pprint
from mlflow.tracking import MlflowClient
import plotly.express as px
from datetime import timedelta, datetime
client = MlflowClient()
# COMMAND ----------
# assemble dataset for forecasting
fdf = spark.sql('''
SELECT
a.hour as ds,
EXTRACT(year from a.hour) as year,
EXTRACT(dayofweek from a.hour) as dayofweek,
EXTRACT(hour from a.hour) as hour,
CASE WHEN d.date IS NULL THEN 0 ELSE 1 END as is_holiday,
COALESCE(c.tot_precip_mm,0) as precip_mm,
c.avg_temp_f as temp_f
FROM ( -- all rental hours by currently active stations
SELECT
y.station_id,
x.hour
FROM citibike.periods x
INNER JOIN citibike.stations_most_active y
ON x.hour BETWEEN '{0}' AND '{1}'
) a
LEFT OUTER JOIN citibike.rentals b
ON a.station_id=b.station_id AND a.hour=b.hour
LEFT OUTER JOIN citibike.weather c
ON a.hour=c.time
LEFT OUTER JOIN citibike.holidays d
ON TO_DATE(a.hour)=d.date
WHERE a.station_id = '{2}'
'''.format(end_date, (datetime.strptime(end_date, '%Y-%m-%d') + timedelta(hours=int(hours_to_forecast))).strftime("%Y-%m-%d %H:%M:%S"), station_id)
)
# COMMAND ----------
# Forecast using the production and staging models
df1=fdf.toPandas().fillna(method='ffill').fillna(method='bfill')
df1['model']='Production'
df1['yhat']=prod_model.predict(df1.drop(["ds","model"], axis=1).values)
df2=fdf.toPandas().fillna(method='ffill').fillna(method='bfill')
df2['model']='Staging'
df2['yhat']=stage_model.predict(df2.drop(["ds","model"], axis=1).values)
# COMMAND ----------
df = pd.concat([df1,df2]).reset_index()
labels={
"ds": "Forecast Time",
"yhat": "Forecasted Delay",
"model": "Model Stage"
}
fig = px.line(df, x="ds", y="yhat", color='model', title=f"{airport_code} delay forecast by model stage", labels=labels)
fig.show()
# COMMAND ----------
# MAGIC %md
# MAGIC ## Monitoring the model performance
# COMMAND ----------
train_df = spark.sql('''
SELECT
a.hour as ds,
EXTRACT(year from a.hour) as year,
EXTRACT(dayofweek from a.hour) as dayofweek,
EXTRACT(hour from a.hour) as hour,
CASE WHEN d.date IS NULL THEN 0 ELSE 1 END as is_holiday,
COALESCE(c.tot_precip_mm,0) as precip_mm,
c.avg_temp_f as temp_f
FROM ( -- all rental hours by currently active stations
SELECT
y.station_id,
x.hour
FROM citibike.periods x
INNER JOIN citibike.stations_most_active y
ON x.hour BETWEEN '{0}' AND '{1}'
) a
LEFT OUTER JOIN citibike.rentals b
ON a.station_id=b.station_id AND a.hour=b.hour
LEFT OUTER JOIN citibike.weather c
ON a.hour=c.time
LEFT OUTER JOIN citibike.holidays d
ON TO_DATE(a.hour)=d.date
WHERE a.station_id = '{2}'
'''.format((datetime.strptime(end_date, '%Y-%m-%d') - timedelta(hours=int(hours_to_forecast))).strftime("%Y-%m-%d %H:%M:%S"), end_date, station_id)
)
# COMMAND ----------
airport = dbutils.widgets.get('00.Airport_Code')
airport_id = stationDF[stationDF['name']==airport]['station_id'].values[0]
model_name = "{}-reg-rf-model".format(airport_id)
prod_version = None
stage_version = None
# get the respective versions
for mv in client.search_model_versions(f"name='{model_name}'"):
if dict(mv)['current_stage'] == 'Staging':
stage_version=dict(mv)['version']
elif dict(mv)['current_stage'] == 'Production':
prod_version=dict(mv)['version']
if prod_version is not None:
# load the training data associated with the production model
prod_model = mlflow.sklearn.load_model(f"models:/{model_name}/Production")
pdf = spark.sql(f"""SELECT * from citibike.forecast_regression_timeweather WHERE station_id = '{station_id}' and model_version = '{prod_version}';""").toPandas()
if stage_version is not None:
# load the training data assocaited with the staging model
stage_model = mlflow.sklearn.load_model(f"models:/{model_name}/Staging")
sdf = spark.sql(f"""SELECT * from citibike.forecast_regression_timeweather WHERE station_id = '{station_id}' and model_version = '{stage_version}';""").toPandas()
# COMMAND ----------
pdf['stage']="prod"
pdf['residual']=pdf['y']-pdf['yhat']
sdf['stage']="staging"
sdf['residual']=sdf['y']-sdf['yhat']
df=pd.concat([pdf,sdf])
# COMMAND ----------
fig = px.scatter(
df, x='yhat', y='residual',
marginal_y='violin',
color='stage', trendline='ols',
title=f"{airport} delay forecast model performance comparison"
)
fig.show()
# COMMAND ----------
# MAGIC %md
# MAGIC ## Use Tensorflow Validation Library
# MAGIC - check schema between the training and serving periods of time
# MAGIC - check for data drift and skew between training and serving
# COMMAND ----------
from sklearn.model_selection import train_test_split
import tensorflow_data_validation as tfdv
from tensorflow_data_validation.utils.display_util import get_statistics_html
import warnings
warnings.filterwarnings("ignore", message=r"Passing", category=FutureWarning)
stats_train=tfdv.generate_statistics_from_dataframe(dataframe=train_df.toPandas())
stats_serve=tfdv.generate_statistics_from_dataframe(dataframe=fdf.toPandas())
schema = tfdv.infer_schema(statistics=stats_train)
tfdv.display_schema(schema=schema)
# COMMAND ----------
# Compare evaluation data with training data
displayHTML(get_statistics_html(lhs_statistics=stats_serve, rhs_statistics=stats_train,
lhs_name='SERVE_DATASET', rhs_name='TRAIN_DATASET'))
# COMMAND ----------
anomalies = tfdv.validate_statistics(statistics=stats_serve, schema=schema)
tfdv.display_anomalies(anomalies)
# COMMAND ----------
# Add skew and drift comparators
temp_f = tfdv.get_feature(schema, 'temp_f')
temp_f.skew_comparator.jensen_shannon_divergence.threshold = 0
temp_f.drift_comparator.jensen_shannon_divergence.threshold = 0
precip_mm = tfdv.get_feature(schema, 'precip_mm')
precip_mm.skew_comparator.jensen_shannon_divergence.threshold = 0
precip_mm.drift_comparator.jensen_shannon_divergence.threshold = 0
_anomalies = tfdv.validate_statistics(stats_train, schema, serving_statistics=stats_serve)
hour = tfdv.get_feature(schema, 'hour')
hour.skew_comparator.jensen_shannon_divergence.threshold = 0
hour.drift_comparator.jensen_shannon_divergence.threshold = 0
dayofweek = tfdv.get_feature(schema, 'dayofweek')
dayofweek.skew_comparator.jensen_shannon_divergence.threshold = 0
dayofweek.drift_comparator.jensen_shannon_divergence.threshold = 0
_anomalies = tfdv.validate_statistics(stats_train, schema, serving_statistics=stats_serve)
tfdv.display_anomalies(_anomalies)
# COMMAND ----------
# MAGIC %md
# MAGIC ## Promote model if selected
# COMMAND ----------
# promote staging to production
if promote_model and stage_version is not None and prod_version is not None:
# Archive the production model
client.transition_model_version_stage(
name=model_name,
version=prod_version,
stage="Archived"
)
# Staging --> Production
client.transition_model_version_stage(
name=model_name,
version=stage_version,
stage="Production"
)
# COMMAND ----------
import json
# Return Success
dbutils.notebook.exit(json.dumps({"exit_code": "Success"})) | 31.652482 | 164 | 0.720816 |
ons()[['name','station_id','lat','lon']]
stationsOfInterestDF = spark.sql("""select distinct(station_id) from from citibike.forecast_regression_timeweather;""").toPandas()
stationDF = stationDF[stationDF['station_id'].apply(lambda x: int(x) in list(stationsOfInterestDF.values.flatten()))]
from datetime import datetime as dt
from datetime import timedelta
dbutils.widgets.removeAll()
dbutils.widgets.dropdown("00.Airport_Code", "JFK", ["JFK","SEA","BOS","ATL","LAX","SFO","DEN","DFW","ORD","CVG","CLT","DCA","IAH"])
dbutils.widgets.text('01.training_start_date', "2018-01-01")
dbutils.widgets.text('02.training_end_date', "2019-03-15")
dbutils.widgets.text('03.inference_date', (dt.strptime(str(dbutils.widgets.get('02.training_end_date')), "%Y-%m-%d") + timedelta(days=1)).strftime("%Y-%m-%d"))
dbutils.widgets.text('04.promote_model', "No")
training_start_date = str(dbutils.widgets.get('01.training_start_date'))
training_end_date = str(dbutils.widgets.get('02.training_end_date'))
inference_date = str(dbutils.widgets.get('03.inference_date'))
airport_code = str(dbutils.widgets.get('00.Airport_Code'))
if dbutils.widgets.get("05.promote_model")=='Yes':
promote_model = True
else:
promote_model = False
print(airport_code,training_start_date,training_end_date,inference_date,promote_model)
port plotly.express as px
from datetime import timedelta, datetime
client = MlflowClient()
fdf = spark.sql('''
SELECT
a.hour as ds,
EXTRACT(year from a.hour) as year,
EXTRACT(dayofweek from a.hour) as dayofweek,
EXTRACT(hour from a.hour) as hour,
CASE WHEN d.date IS NULL THEN 0 ELSE 1 END as is_holiday,
COALESCE(c.tot_precip_mm,0) as precip_mm,
c.avg_temp_f as temp_f
FROM ( -- all rental hours by currently active stations
SELECT
y.station_id,
x.hour
FROM citibike.periods x
INNER JOIN citibike.stations_most_active y
ON x.hour BETWEEN '{0}' AND '{1}'
) a
LEFT OUTER JOIN citibike.rentals b
ON a.station_id=b.station_id AND a.hour=b.hour
LEFT OUTER JOIN citibike.weather c
ON a.hour=c.time
LEFT OUTER JOIN citibike.holidays d
ON TO_DATE(a.hour)=d.date
WHERE a.station_id = '{2}'
'''.format(end_date, (datetime.strptime(end_date, '%Y-%m-%d') + timedelta(hours=int(hours_to_forecast))).strftime("%Y-%m-%d %H:%M:%S"), station_id)
)
df1=fdf.toPandas().fillna(method='ffill').fillna(method='bfill')
df1['model']='Production'
df1['yhat']=prod_model.predict(df1.drop(["ds","model"], axis=1).values)
df2=fdf.toPandas().fillna(method='ffill').fillna(method='bfill')
df2['model']='Staging'
df2['yhat']=stage_model.predict(df2.drop(["ds","model"], axis=1).values)
df = pd.concat([df1,df2]).reset_index()
labels={
"ds": "Forecast Time",
"yhat": "Forecasted Delay",
"model": "Model Stage"
}
fig = px.line(df, x="ds", y="yhat", color='model', title=f"{airport_code} delay forecast by model stage", labels=labels)
fig.show()
year from a.hour) as year,
EXTRACT(dayofweek from a.hour) as dayofweek,
EXTRACT(hour from a.hour) as hour,
CASE WHEN d.date IS NULL THEN 0 ELSE 1 END as is_holiday,
COALESCE(c.tot_precip_mm,0) as precip_mm,
c.avg_temp_f as temp_f
FROM ( -- all rental hours by currently active stations
SELECT
y.station_id,
x.hour
FROM citibike.periods x
INNER JOIN citibike.stations_most_active y
ON x.hour BETWEEN '{0}' AND '{1}'
) a
LEFT OUTER JOIN citibike.rentals b
ON a.station_id=b.station_id AND a.hour=b.hour
LEFT OUTER JOIN citibike.weather c
ON a.hour=c.time
LEFT OUTER JOIN citibike.holidays d
ON TO_DATE(a.hour)=d.date
WHERE a.station_id = '{2}'
'''.format((datetime.strptime(end_date, '%Y-%m-%d') - timedelta(hours=int(hours_to_forecast))).strftime("%Y-%m-%d %H:%M:%S"), end_date, station_id)
)
airport = dbutils.widgets.get('00.Airport_Code')
airport_id = stationDF[stationDF['name']==airport]['station_id'].values[0]
model_name = "{}-reg-rf-model".format(airport_id)
prod_version = None
stage_version = None
for mv in client.search_model_versions(f"name='{model_name}'"):
if dict(mv)['current_stage'] == 'Staging':
stage_version=dict(mv)['version']
elif dict(mv)['current_stage'] == 'Production':
prod_version=dict(mv)['version']
if prod_version is not None:
prod_model = mlflow.sklearn.load_model(f"models:/{model_name}/Production")
pdf = spark.sql(f"""SELECT * from citibike.forecast_regression_timeweather WHERE station_id = '{station_id}' and model_version = '{prod_version}';""").toPandas()
if stage_version is not None:
stage_model = mlflow.sklearn.load_model(f"models:/{model_name}/Staging")
sdf = spark.sql(f"""SELECT * from citibike.forecast_regression_timeweather WHERE station_id = '{station_id}' and model_version = '{stage_version}';""").toPandas()
pdf['stage']="prod"
pdf['residual']=pdf['y']-pdf['yhat']
sdf['stage']="staging"
sdf['residual']=sdf['y']-sdf['yhat']
df=pd.concat([pdf,sdf])
fig = px.scatter(
df, x='yhat', y='residual',
marginal_y='violin',
color='stage', trendline='ols',
title=f"{airport} delay forecast model performance comparison"
)
fig.show()
rflow_data_validation as tfdv
from tensorflow_data_validation.utils.display_util import get_statistics_html
import warnings
warnings.filterwarnings("ignore", message=r"Passing", category=FutureWarning)
stats_train=tfdv.generate_statistics_from_dataframe(dataframe=train_df.toPandas())
stats_serve=tfdv.generate_statistics_from_dataframe(dataframe=fdf.toPandas())
schema = tfdv.infer_schema(statistics=stats_train)
tfdv.display_schema(schema=schema)
displayHTML(get_statistics_html(lhs_statistics=stats_serve, rhs_statistics=stats_train,
lhs_name='SERVE_DATASET', rhs_name='TRAIN_DATASET'))
anomalies = tfdv.validate_statistics(statistics=stats_serve, schema=schema)
tfdv.display_anomalies(anomalies)
temp_f = tfdv.get_feature(schema, 'temp_f')
temp_f.skew_comparator.jensen_shannon_divergence.threshold = 0
temp_f.drift_comparator.jensen_shannon_divergence.threshold = 0
precip_mm = tfdv.get_feature(schema, 'precip_mm')
precip_mm.skew_comparator.jensen_shannon_divergence.threshold = 0
precip_mm.drift_comparator.jensen_shannon_divergence.threshold = 0
_anomalies = tfdv.validate_statistics(stats_train, schema, serving_statistics=stats_serve)
hour = tfdv.get_feature(schema, 'hour')
hour.skew_comparator.jensen_shannon_divergence.threshold = 0
hour.drift_comparator.jensen_shannon_divergence.threshold = 0
dayofweek = tfdv.get_feature(schema, 'dayofweek')
dayofweek.skew_comparator.jensen_shannon_divergence.threshold = 0
dayofweek.drift_comparator.jensen_shannon_divergence.threshold = 0
_anomalies = tfdv.validate_statistics(stats_train, schema, serving_statistics=stats_serve)
tfdv.display_anomalies(_anomalies)
prod_version is not None:
client.transition_model_version_stage(
name=model_name,
version=prod_version,
stage="Archived"
)
client.transition_model_version_stage(
name=model_name,
version=stage_version,
stage="Production"
)
import json
dbutils.notebook.exit(json.dumps({"exit_code": "Success"})) | true | true |
f71aac54f88d8ccd203f824b5e35a7cfb34c929b | 15,156 | py | Python | colour/models/rgb/transfer_functions/canon_log.py | soma2000-lang/colour | bb7ee23ac65e09613af78bd18dd98dffb1a2904a | [
"BSD-3-Clause"
] | 1 | 2022-02-12T06:28:15.000Z | 2022-02-12T06:28:15.000Z | colour/models/rgb/transfer_functions/canon_log.py | soma2000-lang/colour | bb7ee23ac65e09613af78bd18dd98dffb1a2904a | [
"BSD-3-Clause"
] | null | null | null | colour/models/rgb/transfer_functions/canon_log.py | soma2000-lang/colour | bb7ee23ac65e09613af78bd18dd98dffb1a2904a | [
"BSD-3-Clause"
] | null | null | null | """
Canon Log Encodings
===================
Defines the *Canon Log* encodings:
- :func:`colour.models.log_encoding_CanonLog`
- :func:`colour.models.log_decoding_CanonLog`
- :func:`colour.models.log_encoding_CanonLog2`
- :func:`colour.models.log_decoding_CanonLog2`
- :func:`colour.models.log_encoding_CanonLog3`
- :func:`colour.models.log_decoding_CanonLog3`
Notes
-----
- :cite:`Canona` is available as a *Drivers & Downloads* *Software* for
Windows 10 (x64) *Operating System*, a copy of the archive is hosted at
this url: https://drive.google.com/open?id=0B_IQZQdc4Vy8ZGYyY29pMEVwZU0
References
----------
- :cite:`Canona` : Canon. (2016). EOS C300 Mark II - EOS C300 Mark II Input
Transform Version 2.0 (for Cinema Gamut / BT.2020). Retrieved August 23,
2016, from
https://www.usa.canon.com/internet/portal/us/home/support/details/cameras/cinema-eos/eos-c300-mark-ii
- :cite:`Thorpe2012a` : Thorpe, L. (2012). CANON-LOG TRANSFER CHARACTERISTIC.
Retrieved September 25, 2014, from
http://downloads.canon.com/CDLC/Canon-Log_Transfer_Characteristic_6-20-2012.pdf
"""
from __future__ import annotations
import numpy as np
from colour.hints import (
Boolean,
FloatingOrArrayLike,
FloatingOrNDArray,
Integer,
)
from colour.models.rgb.transfer_functions import full_to_legal, legal_to_full
from colour.utilities import (
as_float,
domain_range_scale,
from_range_1,
to_domain_1,
)
__author__ = "Colour Developers"
__copyright__ = "Copyright (C) 2013-2022 - Colour Developers"
__license__ = "New BSD License - https://opensource.org/licenses/BSD-3-Clause"
__maintainer__ = "Colour Developers"
__email__ = "colour-developers@colour-science.org"
__status__ = "Production"
__all__ = [
"log_encoding_CanonLog",
"log_decoding_CanonLog",
"log_encoding_CanonLog2",
"log_decoding_CanonLog2",
"log_encoding_CanonLog3",
"log_decoding_CanonLog3",
]
def log_encoding_CanonLog(
x: FloatingOrArrayLike,
bit_depth: Integer = 10,
out_normalised_code_value: Boolean = True,
in_reflection: Boolean = True,
) -> FloatingOrNDArray:
"""
Defines the *Canon Log* log encoding curve / opto-electronic transfer
function.
Parameters
----------
x
Linear data :math:`x`.
bit_depth
Bit depth used for conversion.
out_normalised_code_value
Whether the *Canon Log* non-linear data is encoded as normalised code
values.
in_reflection
Whether the light level :math:`x` to a camera is reflection.
Returns
-------
:class:`numpy.floating` or :class:`numpy.ndarray`
*Canon Log* non-linear data.
References
----------
:cite:`Thorpe2012a`
Notes
-----
+------------+-----------------------+---------------+
| **Domain** | **Scale - Reference** | **Scale - 1** |
+============+=======================+===============+
| ``x`` | [0, 1] | [0, 1] |
+------------+-----------------------+---------------+
+------------+-----------------------+---------------+
| **Range** | **Scale - Reference** | **Scale - 1** |
+============+=======================+===============+
| ``clog`` | [0, 1] | [0, 1] |
+------------+-----------------------+---------------+
Examples
--------
>>> log_encoding_CanonLog(0.18) * 100 # doctest: +ELLIPSIS
34.3389651...
The values of *Table 2 Canon-Log Code Values* table in :cite:`Thorpe2012a`
are obtained as follows:
>>> x = np.array([0, 2, 18, 90, 720]) / 100
>>> np.around(log_encoding_CanonLog(x) * (2 ** 10 - 1)).astype(np.int)
array([ 128, 169, 351, 614, 1016])
>>> np.around(log_encoding_CanonLog(x, 10, False) * 100, 1)
array([ 7.3, 12. , 32.8, 62.7, 108.7])
"""
x = to_domain_1(x)
if in_reflection:
x = x / 0.9
with domain_range_scale("ignore"):
clog = np.where(
x < log_decoding_CanonLog(0.0730597, bit_depth, False),
-(0.529136 * (np.log10(-x * 10.1596 + 1)) - 0.0730597),
0.529136 * np.log10(10.1596 * x + 1) + 0.0730597,
)
clog_cv = (
full_to_legal(clog, bit_depth) if out_normalised_code_value else clog
)
return as_float(from_range_1(clog_cv))
def log_decoding_CanonLog(
clog: FloatingOrArrayLike,
bit_depth: Integer = 10,
in_normalised_code_value: Boolean = True,
out_reflection: Boolean = True,
) -> FloatingOrNDArray:
"""
Defines the *Canon Log* log decoding curve / electro-optical transfer
function.
Parameters
----------
clog
*Canon Log* non-linear data.
bit_depth
Bit depth used for conversion.
in_normalised_code_value
Whether the *Canon Log* non-linear data is encoded with normalised
code values.
out_reflection
Whether the light level :math:`x` to a camera is reflection.
Returns
-------
:class:`numpy.floating` or :class:`numpy.ndarray`
Linear data :math:`x`.
Notes
-----
+------------+-----------------------+---------------+
| **Domain** | **Scale - Reference** | **Scale - 1** |
+============+=======================+===============+
| ``clog`` | [0, 1] | [0, 1] |
+------------+-----------------------+---------------+
+------------+-----------------------+---------------+
| **Range** | **Scale - Reference** | **Scale - 1** |
+============+=======================+===============+
| ``x`` | [0, 1] | [0, 1] |
+------------+-----------------------+---------------+
References
----------
:cite:`Thorpe2012a`
Examples
--------
>>> log_decoding_CanonLog(34.338965172606912 / 100) # doctest: +ELLIPSIS
0.17999999...
"""
clog = to_domain_1(clog)
clog = legal_to_full(clog, bit_depth) if in_normalised_code_value else clog
x = np.where(
clog < 0.0730597,
-(10 ** ((0.0730597 - clog) / 0.529136) - 1) / 10.1596,
(10 ** ((clog - 0.0730597) / 0.529136) - 1) / 10.1596,
)
if out_reflection:
x = x * 0.9
return as_float(from_range_1(x))
def log_encoding_CanonLog2(
x: FloatingOrArrayLike,
bit_depth: Integer = 10,
out_normalised_code_value: Boolean = True,
in_reflection: Boolean = True,
) -> FloatingOrNDArray:
"""
Defines the *Canon Log 2* log encoding curve / opto-electronic transfer
function.
Parameters
----------
x
Linear data :math:`x`.
bit_depth
Bit depth used for conversion.
out_normalised_code_value
Whether the *Canon Log 2* non-linear data is encoded as normalised
code values.
in_reflection
Whether the light level :math:`x` to a camera is reflection.
Returns
-------
:class:`numpy.floating` or :class:`numpy.ndarray`
*Canon Log 2* non-linear data.
Notes
-----
+------------+-----------------------+---------------+
| **Domain** | **Scale - Reference** | **Scale - 1** |
+============+=======================+===============+
| ``x`` | [0, 1] | [0, 1] |
+------------+-----------------------+---------------+
+------------+-----------------------+---------------+
| **Range** | **Scale - Reference** | **Scale - 1** |
+============+=======================+===============+
| ``clog2`` | [0, 1] | [0, 1] |
+------------+-----------------------+---------------+
References
----------
:cite:`Canona`
Examples
--------
>>> log_encoding_CanonLog2(0.18) * 100 # doctest: +ELLIPSIS
39.8254694...
"""
x = to_domain_1(x)
if in_reflection:
x = x / 0.9
with domain_range_scale("ignore"):
clog2 = np.where(
x < log_decoding_CanonLog2(0.035388128, bit_depth, False),
-(0.281863093 * (np.log10(-x * 87.09937546 + 1)) - 0.035388128),
0.281863093 * np.log10(x * 87.09937546 + 1) + 0.035388128,
)
clog2_cv = (
full_to_legal(clog2, bit_depth) if out_normalised_code_value else clog2
)
return as_float(from_range_1(clog2_cv))
def log_decoding_CanonLog2(
clog2: FloatingOrArrayLike,
bit_depth: Integer = 10,
in_normalised_code_value: Boolean = True,
out_reflection: Boolean = True,
) -> FloatingOrNDArray:
"""
Defines the *Canon Log 2* log decoding curve / electro-optical transfer
function.
Parameters
----------
clog2
*Canon Log 2* non-linear data.
bit_depth
Bit depth used for conversion.
in_normalised_code_value
Whether the *Canon Log 2* non-linear data is encoded with normalised
code values.
out_reflection
Whether the light level :math:`x` to a camera is reflection.
Returns
-------
:class:`numpy.floating` or :class:`numpy.ndarray`
Linear data :math:`x`.
Notes
-----
+------------+-----------------------+---------------+
| **Domain** | **Scale - Reference** | **Scale - 1** |
+============+=======================+===============+
| ``clog2`` | [0, 1] | [0, 1] |
+------------+-----------------------+---------------+
+------------+-----------------------+---------------+
| **Range** | **Scale - Reference** | **Scale - 1** |
+============+=======================+===============+
| ``x`` | [0, 1] | [0, 1] |
+------------+-----------------------+---------------+
References
----------
:cite:`Canona`
Examples
--------
>>> log_decoding_CanonLog2(39.825469498316735 / 100) # doctest: +ELLIPSIS
0.1799999...
"""
clog2 = to_domain_1(clog2)
clog2 = (
legal_to_full(clog2, bit_depth) if in_normalised_code_value else clog2
)
x = np.where(
clog2 < 0.035388128,
-(10 ** ((0.035388128 - clog2) / 0.281863093) - 1) / 87.09937546,
(10 ** ((clog2 - 0.035388128) / 0.281863093) - 1) / 87.09937546,
)
if out_reflection:
x = x * 0.9
return as_float(from_range_1(x))
def log_encoding_CanonLog3(
x: FloatingOrArrayLike,
bit_depth: Integer = 10,
out_normalised_code_value: Boolean = True,
in_reflection: Boolean = True,
) -> FloatingOrNDArray:
"""
Defines the *Canon Log 3* log encoding curve / opto-electronic transfer
function.
Parameters
----------
x
Linear data :math:`x`.
bit_depth
Bit depth used for conversion.
out_normalised_code_value
Whether the *Canon Log 3* non-linear data is encoded as normalised code
values.
in_reflection
Whether the light level :math:`x` to a camera is reflection.
Returns
-------
:class:`numpy.floating` or :class:`numpy.ndarray`
*Canon Log 3* non-linear data.
Notes
-----
- Introspection of the grafting points by Shaw, N. (2018) shows that the
*Canon Log 3* IDT was likely derived from its encoding curve as the
later is grafted at *+/-0.014*::
>>> clog3 = 0.04076162
>>> (clog3 - 0.073059361) / 2.3069815
-0.014000000000000002
>>> clog3 = 0.105357102
>>> (clog3 - 0.073059361) / 2.3069815
0.013999999999999997
+------------+-----------------------+---------------+
| **Domain** | **Scale - Reference** | **Scale - 1** |
+============+=======================+===============+
| ``x`` | [0, 1] | [0, 1] |
+------------+-----------------------+---------------+
+------------+-----------------------+---------------+
| **Range** | **Scale - Reference** | **Scale - 1** |
+============+=======================+===============+
| ``clog3`` | [0, 1] | [0, 1] |
+------------+-----------------------+---------------+
References
----------
:cite:`Canona`
Examples
--------
>>> log_encoding_CanonLog3(0.18) * 100 # doctest: +ELLIPSIS
34.3389369...
"""
x = to_domain_1(x)
if in_reflection:
x = x / 0.9
with domain_range_scale("ignore"):
clog3 = np.select(
(
x
< log_decoding_CanonLog3(0.04076162, bit_depth, False, False),
x
<= log_decoding_CanonLog3(
0.105357102, bit_depth, False, False
),
x
> log_decoding_CanonLog3(0.105357102, bit_depth, False, False),
),
(
-0.42889912 * np.log10(-x * 14.98325 + 1) + 0.07623209,
2.3069815 * x + 0.073059361,
0.42889912 * np.log10(x * 14.98325 + 1) + 0.069886632,
),
)
clog3_cv = (
full_to_legal(clog3, bit_depth) if out_normalised_code_value else clog3
)
return as_float(from_range_1(clog3_cv))
def log_decoding_CanonLog3(
clog3: FloatingOrArrayLike,
bit_depth: Integer = 10,
in_normalised_code_value: Boolean = True,
out_reflection: Boolean = True,
) -> FloatingOrNDArray:
"""
Defines the *Canon Log 3* log decoding curve / electro-optical transfer
function.
Parameters
----------
clog3
*Canon Log 3* non-linear data.
bit_depth
Bit depth used for conversion.
in_normalised_code_value
Whether the *Canon Log 3* non-linear data is encoded with normalised
code values.
out_reflection
Whether the light level :math:`x` to a camera is reflection.
Returns
-------
:class:`numpy.floating` or :class:`numpy.ndarray`
Linear data :math:`x`.
Notes
-----
+------------+-----------------------+---------------+
| **Domain** | **Scale - Reference** | **Scale - 1** |
+============+=======================+===============+
| ``clog3`` | [0, 1] | [0, 1] |
+------------+-----------------------+---------------+
+------------+-----------------------+---------------+
| **Range** | **Scale - Reference** | **Scale - 1** |
+============+=======================+===============+
| ``x`` | [0, 1] | [0, 1] |
+------------+-----------------------+---------------+
References
----------
:cite:`Canona`
Examples
--------
>>> log_decoding_CanonLog3(34.338936938868677 / 100) # doctest: +ELLIPSIS
0.1800000...
"""
clog3 = to_domain_1(clog3)
clog3 = (
legal_to_full(clog3, bit_depth) if in_normalised_code_value else clog3
)
x = np.select(
(clog3 < 0.04076162, clog3 <= 0.105357102, clog3 > 0.105357102),
(
-(10 ** ((0.07623209 - clog3) / 0.42889912) - 1) / 14.98325,
(clog3 - 0.073059361) / 2.3069815,
(10 ** ((clog3 - 0.069886632) / 0.42889912) - 1) / 14.98325,
),
)
if out_reflection:
x = x * 0.9
return as_float(from_range_1(x))
| 29.202312 | 105 | 0.490433 |
from __future__ import annotations
import numpy as np
from colour.hints import (
Boolean,
FloatingOrArrayLike,
FloatingOrNDArray,
Integer,
)
from colour.models.rgb.transfer_functions import full_to_legal, legal_to_full
from colour.utilities import (
as_float,
domain_range_scale,
from_range_1,
to_domain_1,
)
__author__ = "Colour Developers"
__copyright__ = "Copyright (C) 2013-2022 - Colour Developers"
__license__ = "New BSD License - https://opensource.org/licenses/BSD-3-Clause"
__maintainer__ = "Colour Developers"
__email__ = "colour-developers@colour-science.org"
__status__ = "Production"
__all__ = [
"log_encoding_CanonLog",
"log_decoding_CanonLog",
"log_encoding_CanonLog2",
"log_decoding_CanonLog2",
"log_encoding_CanonLog3",
"log_decoding_CanonLog3",
]
def log_encoding_CanonLog(
x: FloatingOrArrayLike,
bit_depth: Integer = 10,
out_normalised_code_value: Boolean = True,
in_reflection: Boolean = True,
) -> FloatingOrNDArray:
x = to_domain_1(x)
if in_reflection:
x = x / 0.9
with domain_range_scale("ignore"):
clog = np.where(
x < log_decoding_CanonLog(0.0730597, bit_depth, False),
-(0.529136 * (np.log10(-x * 10.1596 + 1)) - 0.0730597),
0.529136 * np.log10(10.1596 * x + 1) + 0.0730597,
)
clog_cv = (
full_to_legal(clog, bit_depth) if out_normalised_code_value else clog
)
return as_float(from_range_1(clog_cv))
def log_decoding_CanonLog(
clog: FloatingOrArrayLike,
bit_depth: Integer = 10,
in_normalised_code_value: Boolean = True,
out_reflection: Boolean = True,
) -> FloatingOrNDArray:
clog = to_domain_1(clog)
clog = legal_to_full(clog, bit_depth) if in_normalised_code_value else clog
x = np.where(
clog < 0.0730597,
-(10 ** ((0.0730597 - clog) / 0.529136) - 1) / 10.1596,
(10 ** ((clog - 0.0730597) / 0.529136) - 1) / 10.1596,
)
if out_reflection:
x = x * 0.9
return as_float(from_range_1(x))
def log_encoding_CanonLog2(
x: FloatingOrArrayLike,
bit_depth: Integer = 10,
out_normalised_code_value: Boolean = True,
in_reflection: Boolean = True,
) -> FloatingOrNDArray:
x = to_domain_1(x)
if in_reflection:
x = x / 0.9
with domain_range_scale("ignore"):
clog2 = np.where(
x < log_decoding_CanonLog2(0.035388128, bit_depth, False),
-(0.281863093 * (np.log10(-x * 87.09937546 + 1)) - 0.035388128),
0.281863093 * np.log10(x * 87.09937546 + 1) + 0.035388128,
)
clog2_cv = (
full_to_legal(clog2, bit_depth) if out_normalised_code_value else clog2
)
return as_float(from_range_1(clog2_cv))
def log_decoding_CanonLog2(
clog2: FloatingOrArrayLike,
bit_depth: Integer = 10,
in_normalised_code_value: Boolean = True,
out_reflection: Boolean = True,
) -> FloatingOrNDArray:
clog2 = to_domain_1(clog2)
clog2 = (
legal_to_full(clog2, bit_depth) if in_normalised_code_value else clog2
)
x = np.where(
clog2 < 0.035388128,
-(10 ** ((0.035388128 - clog2) / 0.281863093) - 1) / 87.09937546,
(10 ** ((clog2 - 0.035388128) / 0.281863093) - 1) / 87.09937546,
)
if out_reflection:
x = x * 0.9
return as_float(from_range_1(x))
def log_encoding_CanonLog3(
x: FloatingOrArrayLike,
bit_depth: Integer = 10,
out_normalised_code_value: Boolean = True,
in_reflection: Boolean = True,
) -> FloatingOrNDArray:
x = to_domain_1(x)
if in_reflection:
x = x / 0.9
with domain_range_scale("ignore"):
clog3 = np.select(
(
x
< log_decoding_CanonLog3(0.04076162, bit_depth, False, False),
x
<= log_decoding_CanonLog3(
0.105357102, bit_depth, False, False
),
x
> log_decoding_CanonLog3(0.105357102, bit_depth, False, False),
),
(
-0.42889912 * np.log10(-x * 14.98325 + 1) + 0.07623209,
2.3069815 * x + 0.073059361,
0.42889912 * np.log10(x * 14.98325 + 1) + 0.069886632,
),
)
clog3_cv = (
full_to_legal(clog3, bit_depth) if out_normalised_code_value else clog3
)
return as_float(from_range_1(clog3_cv))
def log_decoding_CanonLog3(
clog3: FloatingOrArrayLike,
bit_depth: Integer = 10,
in_normalised_code_value: Boolean = True,
out_reflection: Boolean = True,
) -> FloatingOrNDArray:
clog3 = to_domain_1(clog3)
clog3 = (
legal_to_full(clog3, bit_depth) if in_normalised_code_value else clog3
)
x = np.select(
(clog3 < 0.04076162, clog3 <= 0.105357102, clog3 > 0.105357102),
(
-(10 ** ((0.07623209 - clog3) / 0.42889912) - 1) / 14.98325,
(clog3 - 0.073059361) / 2.3069815,
(10 ** ((clog3 - 0.069886632) / 0.42889912) - 1) / 14.98325,
),
)
if out_reflection:
x = x * 0.9
return as_float(from_range_1(x))
| true | true |
f71aad03581521af34e46f4263fc80abdb4a99c3 | 6,135 | py | Python | asposewordscloud/models/requests/insert_list_online_request.py | aspose-words-cloud/aspose-words-cloud-python | 65c7b55fa4aac69b60d41e7f54aed231df285479 | [
"MIT"
] | 14 | 2018-07-15T17:01:52.000Z | 2018-11-29T06:15:33.000Z | asposewordscloud/models/requests/insert_list_online_request.py | aspose-words-cloud/aspose-words-cloud-python | 65c7b55fa4aac69b60d41e7f54aed231df285479 | [
"MIT"
] | 1 | 2018-09-28T12:59:34.000Z | 2019-10-08T08:42:59.000Z | asposewordscloud/models/requests/insert_list_online_request.py | aspose-words-cloud/aspose-words-cloud-python | 65c7b55fa4aac69b60d41e7f54aed231df285479 | [
"MIT"
] | 2 | 2020-12-21T07:59:17.000Z | 2022-02-16T21:41:25.000Z | # coding: utf-8
# -----------------------------------------------------------------------------------
# <copyright company="Aspose" file="insert_list_online_request.py">
# Copyright (c) 2021 Aspose.Words for Cloud
# </copyright>
# <summary>
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in all
# copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
# SOFTWARE.
# </summary>
# -----------------------------------------------------------------------------------
import json
from six.moves.urllib.parse import quote
from asposewordscloud import *
from asposewordscloud.models import *
from asposewordscloud.models.requests import *
from asposewordscloud.models.responses import *
class InsertListOnlineRequest(BaseRequestObject):
"""
Request model for insert_list_online operation.
Initializes a new instance.
:param document The document.
:param list_insert List object.
:param load_encoding Encoding that will be used to load an HTML (or TXT) document if the encoding is not specified in HTML.
:param password Password for opening an encrypted document.
:param dest_file_name Result path of the document after the operation. If this parameter is omitted then result of the operation will be saved as the source document.
:param revision_author Initials of the author to use for revisions.If you set this parameter and then make some changes to the document programmatically, save the document and later open the document in MS Word you will see these changes as revisions.
:param revision_date_time The date and time to use for revisions.
"""
def __init__(self, document, list_insert, load_encoding=None, password=None, dest_file_name=None, revision_author=None, revision_date_time=None):
self.document = document
self.list_insert = list_insert
self.load_encoding = load_encoding
self.password = password
self.dest_file_name = dest_file_name
self.revision_author = revision_author
self.revision_date_time = revision_date_time
def create_http_request(self, api_client):
# verify the required parameter 'document' is set
if self.document is None:
raise ValueError("Missing the required parameter `document` when calling `insert_list_online`") # noqa: E501
# verify the required parameter 'list_insert' is set
if self.list_insert is None:
raise ValueError("Missing the required parameter `list_insert` when calling `insert_list_online`") # noqa: E501
path = '/v4.0/words/online/post/lists'
path_params = {}
# path parameters
collection_formats = {}
if path_params:
path_params = api_client.sanitize_for_serialization(path_params)
path_params = api_client.parameters_to_tuples(path_params, collection_formats)
for k, v in path_params:
# specified safe chars, encode everything
path = path.replace(
'{%s}' % k,
quote(str(v), safe=api_client.configuration.safe_chars_for_path_param)
)
# remove optional path parameters
path = path.replace('//', '/')
query_params = []
if self.load_encoding is not None:
query_params.append(('loadEncoding', self.load_encoding)) # noqa: E501
if self.password is not None:
query_params.append(('password', self.password)) # noqa: E501
if self.dest_file_name is not None:
query_params.append(('destFileName', self.dest_file_name)) # noqa: E501
if self.revision_author is not None:
query_params.append(('revisionAuthor', self.revision_author)) # noqa: E501
if self.revision_date_time is not None:
query_params.append(('revisionDateTime', self.revision_date_time)) # noqa: E501
header_params = {}
# HTTP header `Content-Type`
header_params['Content-Type'] = api_client.select_header_content_type( # noqa: E501
['multipart/form-data']) # noqa: E501
form_params = []
if self.document is not None:
form_params.append(['document', self.document, 'file']) # noqa: E501
if self.list_insert is not None:
form_params.append(['listInsert', self.list_insert.to_json(), 'string']) # noqa: E501
body_params = None
return {
"method": "PUT",
"path": path,
"query_params": query_params,
"header_params": header_params,
"form_params": form_params,
"body": body_params,
"collection_formats": collection_formats,
"response_type": 'InsertListOnlineResponse' # noqa: E501
}
def get_response_type(self):
return 'InsertListOnlineResponse' # noqa: E501
def deserialize_response(self, api_client, response):
multipart = self.getparts(response)
return InsertListOnlineResponse(
self.deserialize(json.loads(multipart[0].text), ListResponse, api_client),
self.deserialize_file(multipart[1].content, multipart[1].headers, api_client))
| 49.08 | 255 | 0.669927 |
import json
from six.moves.urllib.parse import quote
from asposewordscloud import *
from asposewordscloud.models import *
from asposewordscloud.models.requests import *
from asposewordscloud.models.responses import *
class InsertListOnlineRequest(BaseRequestObject):
def __init__(self, document, list_insert, load_encoding=None, password=None, dest_file_name=None, revision_author=None, revision_date_time=None):
self.document = document
self.list_insert = list_insert
self.load_encoding = load_encoding
self.password = password
self.dest_file_name = dest_file_name
self.revision_author = revision_author
self.revision_date_time = revision_date_time
def create_http_request(self, api_client):
if self.document is None:
raise ValueError("Missing the required parameter `document` when calling `insert_list_online`")
if self.list_insert is None:
raise ValueError("Missing the required parameter `list_insert` when calling `insert_list_online`")
path = '/v4.0/words/online/post/lists'
path_params = {}
collection_formats = {}
if path_params:
path_params = api_client.sanitize_for_serialization(path_params)
path_params = api_client.parameters_to_tuples(path_params, collection_formats)
for k, v in path_params:
path = path.replace(
'{%s}' % k,
quote(str(v), safe=api_client.configuration.safe_chars_for_path_param)
)
path = path.replace('//', '/')
query_params = []
if self.load_encoding is not None:
query_params.append(('loadEncoding', self.load_encoding))
if self.password is not None:
query_params.append(('password', self.password))
if self.dest_file_name is not None:
query_params.append(('destFileName', self.dest_file_name))
if self.revision_author is not None:
query_params.append(('revisionAuthor', self.revision_author))
if self.revision_date_time is not None:
query_params.append(('revisionDateTime', self.revision_date_time))
header_params = {}
header_params['Content-Type'] = api_client.select_header_content_type(
['multipart/form-data'])
form_params = []
if self.document is not None:
form_params.append(['document', self.document, 'file'])
if self.list_insert is not None:
form_params.append(['listInsert', self.list_insert.to_json(), 'string'])
body_params = None
return {
"method": "PUT",
"path": path,
"query_params": query_params,
"header_params": header_params,
"form_params": form_params,
"body": body_params,
"collection_formats": collection_formats,
"response_type": 'InsertListOnlineResponse'
}
def get_response_type(self):
return 'InsertListOnlineResponse'
def deserialize_response(self, api_client, response):
multipart = self.getparts(response)
return InsertListOnlineResponse(
self.deserialize(json.loads(multipart[0].text), ListResponse, api_client),
self.deserialize_file(multipart[1].content, multipart[1].headers, api_client))
| true | true |
f71aad2d5eeb4c38a35396239e2ecb41a34883a8 | 1,177 | py | Python | test/test_execute_python.py | RuneLjungmann/excelbind | 29522ec43ce691dfd591b0452d63b7e1b36ad875 | [
"MIT"
] | 8 | 2020-09-25T08:57:31.000Z | 2022-02-02T18:52:09.000Z | test/test_execute_python.py | RuneLjungmann/excelbind | 29522ec43ce691dfd591b0452d63b7e1b36ad875 | [
"MIT"
] | 2 | 2021-09-05T11:19:36.000Z | 2021-09-08T00:13:48.000Z | test/test_execute_python.py | RuneLjungmann/excelbind | 29522ec43ce691dfd591b0452d63b7e1b36ad875 | [
"MIT"
] | 1 | 2020-09-25T08:56:25.000Z | 2020-09-25T08:56:25.000Z | from test.utilities.env_vars import set_env_vars
from test.utilities.excel import Excel
def test_simple_script_for_addition(xll_addin_path):
with set_env_vars('basic_functions'):
with Excel() as excel:
excel.register_xll(xll_addin_path)
(
excel.new_workbook()
.range('A1').set(3.0)
.range('A2').set(4.0)
.range('B1').set_formula('=excelbind.execute_python("return arg0 + arg1", A1, A2)')
.calculate()
)
assert excel.range('B1').value == 7.0
print("done testing")
def test_combination_str_n_float(xll_addin_path):
with set_env_vars('basic_functions'):
with Excel() as excel:
excel.register_xll(xll_addin_path)
(
excel.new_workbook()
.range('A1').set("Hello times ")
.range('A2').set(3.0)
.range('B1').set_formula('=excelbind.execute_python("return arg0 + str(arg1)", A1, A2)')
.calculate()
)
assert excel.range('B1').value == 'Hello times 3.0'
print("done testing")
| 31.810811 | 104 | 0.548853 | from test.utilities.env_vars import set_env_vars
from test.utilities.excel import Excel
def test_simple_script_for_addition(xll_addin_path):
with set_env_vars('basic_functions'):
with Excel() as excel:
excel.register_xll(xll_addin_path)
(
excel.new_workbook()
.range('A1').set(3.0)
.range('A2').set(4.0)
.range('B1').set_formula('=excelbind.execute_python("return arg0 + arg1", A1, A2)')
.calculate()
)
assert excel.range('B1').value == 7.0
print("done testing")
def test_combination_str_n_float(xll_addin_path):
with set_env_vars('basic_functions'):
with Excel() as excel:
excel.register_xll(xll_addin_path)
(
excel.new_workbook()
.range('A1').set("Hello times ")
.range('A2').set(3.0)
.range('B1').set_formula('=excelbind.execute_python("return arg0 + str(arg1)", A1, A2)')
.calculate()
)
assert excel.range('B1').value == 'Hello times 3.0'
print("done testing")
| true | true |
f71aad9b00e3ad94ed69d13f4f8b2c42d39eda6d | 2,324 | py | Python | tempest/tests/lib/services/compute/test_tenant_networks_client.py | mail2nsrajesh/tempest | 1a3b3dc50b418d3a15839830d7d1ff88c8c76cff | [
"Apache-2.0"
] | 2 | 2015-08-13T00:07:49.000Z | 2020-08-07T06:38:44.000Z | tempest/tests/lib/services/compute/test_tenant_networks_client.py | mail2nsrajesh/tempest | 1a3b3dc50b418d3a15839830d7d1ff88c8c76cff | [
"Apache-2.0"
] | 1 | 2019-08-08T10:36:44.000Z | 2019-08-09T05:58:23.000Z | tempest/tests/lib/services/compute/test_tenant_networks_client.py | mail2nsrajesh/tempest | 1a3b3dc50b418d3a15839830d7d1ff88c8c76cff | [
"Apache-2.0"
] | 5 | 2016-06-24T20:03:52.000Z | 2020-02-05T10:14:54.000Z | # Copyright 2015 NEC Corporation. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from tempest.lib.services.compute import tenant_networks_client
from tempest.tests.lib import fake_auth_provider
from tempest.tests.lib.services import base
class TestTenantNetworksClient(base.BaseServiceTest):
FAKE_NETWORK = {
"cidr": "None",
"id": "c2329eb4-cc8e-4439-ac4c-932369309e36",
"label": u'\u30d7'
}
FAKE_NETWORKS = [FAKE_NETWORK]
NETWORK_ID = FAKE_NETWORK['id']
def setUp(self):
super(TestTenantNetworksClient, self).setUp()
fake_auth = fake_auth_provider.FakeAuthProvider()
self.client = tenant_networks_client.TenantNetworksClient(
fake_auth, 'compute', 'regionOne')
def _test_list_tenant_networks(self, bytes_body=False):
self.check_service_client_function(
self.client.list_tenant_networks,
'tempest.lib.common.rest_client.RestClient.get',
{"networks": self.FAKE_NETWORKS},
bytes_body)
def test_list_tenant_networks_with_str_body(self):
self._test_list_tenant_networks()
def test_list_tenant_networks_with_bytes_body(self):
self._test_list_tenant_networks(bytes_body=True)
def _test_show_tenant_network(self, bytes_body=False):
self.check_service_client_function(
self.client.show_tenant_network,
'tempest.lib.common.rest_client.RestClient.get',
{"network": self.FAKE_NETWORK},
bytes_body,
network_id=self.NETWORK_ID)
def test_show_tenant_network_with_str_body(self):
self._test_show_tenant_network()
def test_show_tenant_network_with_bytes_body(self):
self._test_show_tenant_network(bytes_body=True)
| 36.3125 | 78 | 0.711274 |
from tempest.lib.services.compute import tenant_networks_client
from tempest.tests.lib import fake_auth_provider
from tempest.tests.lib.services import base
class TestTenantNetworksClient(base.BaseServiceTest):
FAKE_NETWORK = {
"cidr": "None",
"id": "c2329eb4-cc8e-4439-ac4c-932369309e36",
"label": u'\u30d7'
}
FAKE_NETWORKS = [FAKE_NETWORK]
NETWORK_ID = FAKE_NETWORK['id']
def setUp(self):
super(TestTenantNetworksClient, self).setUp()
fake_auth = fake_auth_provider.FakeAuthProvider()
self.client = tenant_networks_client.TenantNetworksClient(
fake_auth, 'compute', 'regionOne')
def _test_list_tenant_networks(self, bytes_body=False):
self.check_service_client_function(
self.client.list_tenant_networks,
'tempest.lib.common.rest_client.RestClient.get',
{"networks": self.FAKE_NETWORKS},
bytes_body)
def test_list_tenant_networks_with_str_body(self):
self._test_list_tenant_networks()
def test_list_tenant_networks_with_bytes_body(self):
self._test_list_tenant_networks(bytes_body=True)
def _test_show_tenant_network(self, bytes_body=False):
self.check_service_client_function(
self.client.show_tenant_network,
'tempest.lib.common.rest_client.RestClient.get',
{"network": self.FAKE_NETWORK},
bytes_body,
network_id=self.NETWORK_ID)
def test_show_tenant_network_with_str_body(self):
self._test_show_tenant_network()
def test_show_tenant_network_with_bytes_body(self):
self._test_show_tenant_network(bytes_body=True)
| true | true |
f71aadd3961afa04dc66e19d75c3c36540a1b948 | 1,264 | py | Python | bilalcoin/flatpages_main/migrations/0001_initial.py | jphaser/bilalcoin | 31d8b466912e009c31615b0b1df1afe68ab4bdb8 | [
"MIT"
] | null | null | null | bilalcoin/flatpages_main/migrations/0001_initial.py | jphaser/bilalcoin | 31d8b466912e009c31615b0b1df1afe68ab4bdb8 | [
"MIT"
] | 1 | 2022-03-31T03:16:16.000Z | 2022-03-31T03:16:16.000Z | bilalcoin/flatpages_main/migrations/0001_initial.py | jphaser/bilalcoin | 31d8b466912e009c31615b0b1df1afe68ab4bdb8 | [
"MIT"
] | null | null | null | # Generated by Django 3.2.3 on 2021-05-21 04:17
from django.db import migrations, models
import django.utils.timezone
import model_utils.fields
class Migration(migrations.Migration):
initial = True
dependencies = [
]
operations = [
migrations.CreateModel(
name='FAQ',
fields=[
('id', models.BigAutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('created', model_utils.fields.AutoCreatedField(default=django.utils.timezone.now, editable=False, verbose_name='created')),
('modified', model_utils.fields.AutoLastModifiedField(default=django.utils.timezone.now, editable=False, verbose_name='modified')),
('question', models.CharField(blank=True, max_length=500, null=True, unique=True, verbose_name='FAQ Question')),
('answer', models.TextField(blank=True, null=True, unique=True, verbose_name='FAQ Answer')),
('active', models.BooleanField(default=False, verbose_name='FAQ Active?')),
],
options={
'verbose_name': 'FAQ',
'verbose_name_plural': 'FAQs',
'ordering': ['created'],
},
),
]
| 38.30303 | 147 | 0.613924 |
from django.db import migrations, models
import django.utils.timezone
import model_utils.fields
class Migration(migrations.Migration):
initial = True
dependencies = [
]
operations = [
migrations.CreateModel(
name='FAQ',
fields=[
('id', models.BigAutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('created', model_utils.fields.AutoCreatedField(default=django.utils.timezone.now, editable=False, verbose_name='created')),
('modified', model_utils.fields.AutoLastModifiedField(default=django.utils.timezone.now, editable=False, verbose_name='modified')),
('question', models.CharField(blank=True, max_length=500, null=True, unique=True, verbose_name='FAQ Question')),
('answer', models.TextField(blank=True, null=True, unique=True, verbose_name='FAQ Answer')),
('active', models.BooleanField(default=False, verbose_name='FAQ Active?')),
],
options={
'verbose_name': 'FAQ',
'verbose_name_plural': 'FAQs',
'ordering': ['created'],
},
),
]
| true | true |
f71aaddfd333847ace11e0163cb2a3644b0168e0 | 49,007 | py | Python | salt/crypt.py | preoctopus/salt | aceaaa0e2f2f2ff29a694393bd82bba0d88fa44d | [
"Apache-2.0"
] | null | null | null | salt/crypt.py | preoctopus/salt | aceaaa0e2f2f2ff29a694393bd82bba0d88fa44d | [
"Apache-2.0"
] | null | null | null | salt/crypt.py | preoctopus/salt | aceaaa0e2f2f2ff29a694393bd82bba0d88fa44d | [
"Apache-2.0"
] | null | null | null | # -*- coding: utf-8 -*-
'''
The crypt module manages all of the cryptography functions for minions and
masters, encrypting and decrypting payloads, preparing messages, and
authenticating peers
'''
# Import python libs
from __future__ import absolute_import, print_function
import os
import sys
import copy
import time
import hmac
import base64
import hashlib
import logging
import stat
import traceback
import binascii
import weakref
# Import third party libs
import salt.ext.six as six
from salt.ext.six.moves import zip # pylint: disable=import-error,redefined-builtin
try:
from Crypto.Cipher import AES, PKCS1_OAEP
from Crypto.Hash import SHA
from Crypto.PublicKey import RSA
from Crypto.Signature import PKCS1_v1_5
# let this be imported, if possible
import Crypto.Random # pylint: disable=W0611
except ImportError:
# No need for crypt in local mode
pass
# Import salt libs
import salt.defaults.exitcodes
import salt.utils
import salt.payload
import salt.transport.client
import salt.utils.rsax931
import salt.utils.verify
import salt.version
from salt.exceptions import (
AuthenticationError, SaltClientError, SaltReqTimeoutError, SaltSystemExit
)
import tornado.gen
log = logging.getLogger(__name__)
def dropfile(cachedir, user=None):
'''
Set an AES dropfile to request the master update the publish session key
'''
dfn = os.path.join(cachedir, '.dfn')
# set a mask (to avoid a race condition on file creation) and store original.
mask = os.umask(191)
try:
log.info('Rotating AES key')
if os.path.isfile(dfn):
log.info('AES key rotation already requested')
return
if os.path.isfile(dfn) and not os.access(dfn, os.W_OK):
os.chmod(dfn, stat.S_IRUSR | stat.S_IWUSR)
with salt.utils.fopen(dfn, 'wb+') as fp_:
fp_.write('')
os.chmod(dfn, stat.S_IRUSR)
if user:
try:
import pwd
uid = pwd.getpwnam(user).pw_uid
os.chown(dfn, uid, -1)
except (KeyError, ImportError, OSError, IOError):
pass
finally:
os.umask(mask) # restore original umask
def gen_keys(keydir, keyname, keysize, user=None):
'''
Generate a RSA public keypair for use with salt
:param str keydir: The directory to write the keypair to
:param str keyname: The type of salt server for whom this key should be written. (i.e. 'master' or 'minion')
:param int keysize: The number of bits in the key
:param str user: The user on the system who should own this keypair
:rtype: str
:return: Path on the filesystem to the RSA private key
'''
base = os.path.join(keydir, keyname)
priv = '{0}.pem'.format(base)
pub = '{0}.pub'.format(base)
salt.utils.reinit_crypto()
gen = RSA.generate(bits=keysize, e=65537)
if os.path.isfile(priv):
# Between first checking and the generation another process has made
# a key! Use the winner's key
return priv
cumask = os.umask(191)
with salt.utils.fopen(priv, 'wb+') as f:
f.write(gen.exportKey('PEM'))
os.umask(cumask)
with salt.utils.fopen(pub, 'wb+') as f:
f.write(gen.publickey().exportKey('PEM'))
os.chmod(priv, 256)
if user:
try:
import pwd
uid = pwd.getpwnam(user).pw_uid
os.chown(priv, uid, -1)
os.chown(pub, uid, -1)
except (KeyError, ImportError, OSError):
# The specified user was not found, allow the backup systems to
# report the error
pass
return priv
def sign_message(privkey_path, message):
'''
Use Crypto.Signature.PKCS1_v1_5 to sign a message. Returns the signature.
'''
log.debug('salt.crypt.sign_message: Loading private key')
with salt.utils.fopen(privkey_path) as f:
key = RSA.importKey(f.read())
log.debug('salt.crypt.sign_message: Signing message.')
signer = PKCS1_v1_5.new(key)
return signer.sign(SHA.new(message))
def verify_signature(pubkey_path, message, signature):
'''
Use Crypto.Signature.PKCS1_v1_5 to verify the signature on a message.
Returns True for valid signature.
'''
log.debug('salt.crypt.verify_signature: Loading public key')
with salt.utils.fopen(pubkey_path) as f:
pubkey = RSA.importKey(f.read())
log.debug('salt.crypt.verify_signature: Verifying signature')
verifier = PKCS1_v1_5.new(pubkey)
return verifier.verify(SHA.new(message), signature)
def gen_signature(priv_path, pub_path, sign_path):
'''
creates a signature for the given public-key with
the given private key and writes it to sign_path
'''
with salt.utils.fopen(pub_path) as fp_:
mpub_64 = fp_.read()
mpub_sig = sign_message(priv_path, mpub_64)
mpub_sig_64 = binascii.b2a_base64(mpub_sig)
if os.path.isfile(sign_path):
return False
log.trace('Calculating signature for {0} with {1}'
.format(os.path.basename(pub_path),
os.path.basename(priv_path)))
if os.path.isfile(sign_path):
log.trace('Signature file {0} already exists, please '
'remove it first and try again'.format(sign_path))
else:
with salt.utils.fopen(sign_path, 'wb+') as sig_f:
sig_f.write(mpub_sig_64)
log.trace('Wrote signature to {0}'.format(sign_path))
return True
def private_encrypt(key, message):
'''
Generate an M2Crypto-compatible signature
:param Crypto.PublicKey.RSA._RSAobj key: The RSA key object
:param str message: The message to sign
:rtype: str
:return: The signature, or an empty string if the signature operation failed
'''
signer = salt.utils.rsax931.RSAX931Signer(key.exportKey('PEM'))
return signer.sign(message)
def public_decrypt(pub, message):
'''
Verify an M2Crypto-compatible signature
:param Crypto.PublicKey.RSA._RSAobj key: The RSA public key object
:param str message: The signed message to verify
:rtype: str
:return: The message (or digest) recovered from the signature, or an
empty string if the verification failed
'''
verifier = salt.utils.rsax931.RSAX931Verifier(pub.exportKey('PEM'))
return verifier.verify(message)
class MasterKeys(dict):
'''
The Master Keys class is used to manage the RSA public key pair used for
authentication by the master.
It also generates a signing key-pair if enabled with master_sign_key_name.
'''
def __init__(self, opts):
super(MasterKeys, self).__init__()
self.opts = opts
self.pub_path = os.path.join(self.opts['pki_dir'], 'master.pub')
self.rsa_path = os.path.join(self.opts['pki_dir'], 'master.pem')
self.key = self.__get_keys()
self.pub_signature = None
# set names for the signing key-pairs
if opts['master_sign_pubkey']:
# if only the signature is available, use that
if opts['master_use_pubkey_signature']:
self.sig_path = os.path.join(self.opts['pki_dir'],
opts['master_pubkey_signature'])
if os.path.isfile(self.sig_path):
self.pub_signature = salt.utils.fopen(self.sig_path).read()
log.info('Read {0}\'s signature from {1}'
''.format(os.path.basename(self.pub_path),
self.opts['master_pubkey_signature']))
else:
log.error('Signing the master.pub key with a signature is enabled '
'but no signature file found at the defined location '
'{0}'.format(self.sig_path))
log.error('The signature-file may be either named differently '
'or has to be created with \'salt-key --gen-signature\'')
sys.exit(1)
# create a new signing key-pair to sign the masters
# auth-replies when a minion tries to connect
else:
self.pub_sign_path = os.path.join(self.opts['pki_dir'],
opts['master_sign_key_name'] + '.pub')
self.rsa_sign_path = os.path.join(self.opts['pki_dir'],
opts['master_sign_key_name'] + '.pem')
self.sign_key = self.__get_keys(name=opts['master_sign_key_name'])
# We need __setstate__ and __getstate__ to avoid pickling errors since
# some of the member variables correspond to Cython objects which are
# not picklable.
# These methods are only used when pickling so will not be used on
# non-Windows platforms.
def __setstate__(self, state):
self.__init__(state['opts'])
def __getstate__(self):
return {'opts': self.opts}
def __get_keys(self, name='master'):
'''
Returns a key object for a key in the pki-dir
'''
path = os.path.join(self.opts['pki_dir'],
name + '.pem')
if os.path.exists(path):
with salt.utils.fopen(path) as f:
key = RSA.importKey(f.read())
log.debug('Loaded {0} key: {1}'.format(name, path))
else:
log.info('Generating {0} keys: {1}'.format(name, self.opts['pki_dir']))
gen_keys(self.opts['pki_dir'],
name,
self.opts['keysize'],
self.opts.get('user'))
with salt.utils.fopen(self.rsa_path) as f:
key = RSA.importKey(f.read())
return key
def get_pub_str(self, name='master'):
'''
Return the string representation of a public key
in the pki-directory
'''
path = os.path.join(self.opts['pki_dir'],
name + '.pub')
if not os.path.isfile(path):
key = self.__get_keys()
with salt.utils.fopen(path, 'wb+') as f:
f.write(key.publickey().exportKey('PEM'))
return salt.utils.fopen(path).read()
def get_mkey_paths(self):
return self.pub_path, self.rsa_path
def get_sign_paths(self):
return self.pub_sign_path, self.rsa_sign_path
def pubkey_signature(self):
'''
returns the base64 encoded signature from the signature file
or None if the master has its own signing keys
'''
return self.pub_signature
class AsyncAuth(object):
'''
Set up an Async object to maintain authentication with the salt master
'''
# This class is only a singleton per minion/master pair
# mapping of io_loop -> {key -> auth}
instance_map = weakref.WeakKeyDictionary()
# mapping of key -> creds
creds_map = {}
def __new__(cls, opts, io_loop=None):
'''
Only create one instance of SAuth per __key()
'''
# do we have any mapping for this io_loop
io_loop = io_loop or tornado.ioloop.IOLoop.current()
if io_loop not in AsyncAuth.instance_map:
AsyncAuth.instance_map[io_loop] = weakref.WeakValueDictionary()
loop_instance_map = AsyncAuth.instance_map[io_loop]
key = cls.__key(opts)
if key not in loop_instance_map:
log.debug('Initializing new SAuth for {0}'.format(key))
# we need to make a local variable for this, as we are going to store
# it in a WeakValueDictionary-- which will remove the item if no one
# references it-- this forces a reference while we return to the caller
new_auth = object.__new__(cls)
new_auth.__singleton_init__(opts, io_loop=io_loop)
loop_instance_map[key] = new_auth
else:
log.debug('Re-using SAuth for {0}'.format(key))
return loop_instance_map[key]
@classmethod
def __key(cls, opts, io_loop=None):
return (opts['pki_dir'], # where the keys are stored
opts['id'], # minion ID
opts['master_uri'], # master ID
)
# has to remain empty for singletons, since __init__ will *always* be called
def __init__(self, opts, io_loop=None):
pass
# an init for the singleton instance to call
def __singleton_init__(self, opts, io_loop=None):
'''
Init an Auth instance
:param dict opts: Options for this server
:return: Auth instance
:rtype: Auth
'''
self.opts = opts
self.token = Crypticle.generate_key_string()
self.serial = salt.payload.Serial(self.opts)
self.pub_path = os.path.join(self.opts['pki_dir'], 'minion.pub')
self.rsa_path = os.path.join(self.opts['pki_dir'], 'minion.pem')
if 'syndic_master' in self.opts:
self.mpub = 'syndic_master.pub'
elif 'alert_master' in self.opts:
self.mpub = 'monitor_master.pub'
else:
self.mpub = 'minion_master.pub'
if not os.path.isfile(self.pub_path):
self.get_keys()
self.io_loop = io_loop or tornado.ioloop.IOLoop.current()
salt.utils.reinit_crypto()
key = self.__key(self.opts)
# TODO: if we already have creds for this key, lets just re-use
if key in AsyncAuth.creds_map:
creds = AsyncAuth.creds_map[key]
self._creds = creds
self._crypticle = Crypticle(self.opts, creds['aes'])
self._authenticate_future = tornado.concurrent.Future()
self._authenticate_future.set_result(True)
else:
self.authenticate()
def __deepcopy__(self, memo):
cls = self.__class__
result = cls.__new__(cls, copy.deepcopy(self.opts, memo), io_loop=None)
memo[id(self)] = result
for key in self.__dict__:
if key in ('io_loop',):
# The io_loop has a thread Lock which will fail to be deep
# copied. Skip it because it will just be recreated on the
# new copy.
continue
setattr(result, key, copy.deepcopy(self.__dict__[key], memo))
return result
@property
def creds(self):
return self._creds
@property
def crypticle(self):
return self._crypticle
@property
def authenticated(self):
return hasattr(self, '_authenticate_future') and \
self._authenticate_future.done() and \
self._authenticate_future.exception() is None
def invalidate(self):
if self.authenticated:
del self._authenticate_future
key = self.__key(self.opts)
if key in AsyncAuth.creds_map:
del AsyncAuth.creds_map[key]
def authenticate(self, callback=None):
'''
Ask for this client to reconnect to the origin
This function will de-dupe all calls here and return a *single* future
for the sign-in-- whis way callers can all assume there aren't others
'''
# if an auth is in flight-- and not done-- just pass that back as the future to wait on
if hasattr(self, '_authenticate_future') and not self._authenticate_future.done():
future = self._authenticate_future
else:
future = tornado.concurrent.Future()
self._authenticate_future = future
self.io_loop.add_callback(self._authenticate)
if callback is not None:
def handle_future(future):
response = future.result()
self.io_loop.add_callback(callback, response)
future.add_done_callback(handle_future)
return future
@tornado.gen.coroutine
def _authenticate(self):
'''
Authenticate with the master, this method breaks the functional
paradigm, it will update the master information from a fresh sign
in, signing in can occur as often as needed to keep up with the
revolving master AES key.
:rtype: Crypticle
:returns: A crypticle used for encryption operations
'''
acceptance_wait_time = self.opts['acceptance_wait_time']
acceptance_wait_time_max = self.opts['acceptance_wait_time_max']
if not acceptance_wait_time_max:
acceptance_wait_time_max = acceptance_wait_time
creds = None
while True:
try:
creds = yield self.sign_in()
except SaltClientError:
break
if creds == 'retry':
if self.opts.get('caller'):
print('Minion failed to authenticate with the master, '
'has the minion key been accepted?')
sys.exit(2)
if acceptance_wait_time:
log.info('Waiting {0} seconds before retry.'.format(acceptance_wait_time))
yield tornado.gen.sleep(acceptance_wait_time)
if acceptance_wait_time < acceptance_wait_time_max:
acceptance_wait_time += acceptance_wait_time
log.debug('Authentication wait time is {0}'.format(acceptance_wait_time))
continue
break
if not isinstance(creds, dict) or 'aes' not in creds:
try:
del AsyncAuth.creds_map[self.__key(self.opts)]
except KeyError:
pass
self._authenticate_future.set_exception(
SaltClientError('Attempt to authenticate with the salt master failed')
)
else:
AsyncAuth.creds_map[self.__key(self.opts)] = creds
self._creds = creds
self._crypticle = Crypticle(self.opts, creds['aes'])
self._authenticate_future.set_result(True) # mark the sign-in as complete
@tornado.gen.coroutine
def sign_in(self, timeout=60, safe=True, tries=1):
'''
Send a sign in request to the master, sets the key information and
returns a dict containing the master publish interface to bind to
and the decrypted aes key for transport decryption.
:param int timeout: Number of seconds to wait before timing out the sign-in request
:param bool safe: If True, do not raise an exception on timeout. Retry instead.
:param int tries: The number of times to try to authenticate before giving up.
:raises SaltReqTimeoutError: If the sign-in request has timed out and :param safe: is not set
:return: Return a string on failure indicating the reason for failure. On success, return a dictionary
with the publication port and the shared AES key.
'''
auth = {}
auth_timeout = self.opts.get('auth_timeout', None)
if auth_timeout is not None:
timeout = auth_timeout
auth_safemode = self.opts.get('auth_safemode', None)
if auth_safemode is not None:
safe = auth_safemode
auth_tries = self.opts.get('auth_tries', None)
if auth_tries is not None:
tries = auth_tries
m_pub_fn = os.path.join(self.opts['pki_dir'], self.mpub)
auth['master_uri'] = self.opts['master_uri']
channel = salt.transport.client.AsyncReqChannel.factory(self.opts,
crypt='clear',
io_loop=self.io_loop)
try:
payload = yield channel.send(
self.minion_sign_in_payload(),
tries=tries,
timeout=timeout
)
except SaltReqTimeoutError as e:
if safe:
log.warning('SaltReqTimeoutError: {0}'.format(e))
raise tornado.gen.Return('retry')
raise SaltClientError('Attempt to authenticate with the salt master failed with timeout error')
if 'load' in payload:
if 'ret' in payload['load']:
if not payload['load']['ret']:
if self.opts['rejected_retry']:
log.error(
'The Salt Master has rejected this minion\'s public '
'key.\nTo repair this issue, delete the public key '
'for this minion on the Salt Master.\nThe Salt '
'Minion will attempt to to re-authenicate.'
)
raise tornado.gen.Return('retry')
else:
log.critical(
'The Salt Master has rejected this minion\'s public '
'key!\nTo repair this issue, delete the public key '
'for this minion on the Salt Master and restart this '
'minion.\nOr restart the Salt Master in open mode to '
'clean out the keys. The Salt Minion will now exit.'
)
sys.exit(salt.defaults.exitcodes.EX_OK)
# has the master returned that its maxed out with minions?
elif payload['load']['ret'] == 'full':
raise tornado.gen.Return('full')
else:
log.error(
'The Salt Master has cached the public key for this '
'node, this salt minion will wait for {0} seconds '
'before attempting to re-authenticate'.format(
self.opts['acceptance_wait_time']
)
)
raise tornado.gen.Return('retry')
auth['aes'] = self.verify_master(payload)
if not auth['aes']:
log.critical(
'The Salt Master server\'s public key did not authenticate!\n'
'The master may need to be updated if it is a version of Salt '
'lower than {0}, or\n'
'If you are confident that you are connecting to a valid Salt '
'Master, then remove the master public key and restart the '
'Salt Minion.\nThe master public key can be found '
'at:\n{1}'.format(salt.version.__version__, m_pub_fn)
)
raise SaltSystemExit('Invalid master key')
if self.opts.get('syndic_master', False): # Is syndic
syndic_finger = self.opts.get('syndic_finger', self.opts.get('master_finger', False))
if syndic_finger:
if salt.utils.pem_finger(m_pub_fn) != syndic_finger:
self._finger_fail(syndic_finger, m_pub_fn)
else:
if self.opts.get('master_finger', False):
if salt.utils.pem_finger(m_pub_fn) != self.opts['master_finger']:
self._finger_fail(self.opts['master_finger'], m_pub_fn)
auth['publish_port'] = payload['publish_port']
raise tornado.gen.Return(auth)
def get_keys(self):
'''
Return keypair object for the minion.
:rtype: Crypto.PublicKey.RSA._RSAobj
:return: The RSA keypair
'''
# Make sure all key parent directories are accessible
user = self.opts.get('user', 'root')
salt.utils.verify.check_path_traversal(self.opts['pki_dir'], user)
if os.path.exists(self.rsa_path):
with salt.utils.fopen(self.rsa_path) as f:
key = RSA.importKey(f.read())
log.debug('Loaded minion key: {0}'.format(self.rsa_path))
else:
log.info('Generating keys: {0}'.format(self.opts['pki_dir']))
gen_keys(self.opts['pki_dir'],
'minion',
self.opts['keysize'],
self.opts.get('user'))
with salt.utils.fopen(self.rsa_path) as f:
key = RSA.importKey(f.read())
return key
def gen_token(self, clear_tok):
'''
Encrypt a string with the minion private key to verify identity
with the master.
:param str clear_tok: A plaintext token to encrypt
:return: Encrypted token
:rtype: str
'''
return private_encrypt(self.get_keys(), clear_tok)
def minion_sign_in_payload(self):
'''
Generates the payload used to authenticate with the master
server. This payload consists of the passed in id_ and the ssh
public key to encrypt the AES key sent back from the master.
:return: Payload dictionary
:rtype: dict
'''
payload = {}
payload['cmd'] = '_auth'
payload['id'] = self.opts['id']
try:
pubkey_path = os.path.join(self.opts['pki_dir'], self.mpub)
with salt.utils.fopen(pubkey_path) as f:
pub = RSA.importKey(f.read())
cipher = PKCS1_OAEP.new(pub)
payload['token'] = cipher.encrypt(self.token)
except Exception:
pass
with salt.utils.fopen(self.pub_path) as f:
payload['pub'] = f.read()
return payload
def decrypt_aes(self, payload, master_pub=True):
'''
This function is used to decrypt the AES seed phrase returned from
the master server. The seed phrase is decrypted with the SSH RSA
host key.
Pass in the encrypted AES key.
Returns the decrypted AES seed key, a string
:param dict payload: The incoming payload. This is a dictionary which may have the following keys:
'aes': The shared AES key
'enc': The format of the message. ('clear', 'pub', etc)
'sig': The message signature
'publish_port': The TCP port which published the message
'token': The encrypted token used to verify the message.
'pub_key': The public key of the sender.
:rtype: str
:return: The decrypted token that was provided, with padding.
:rtype: str
:return: The decrypted AES seed key
'''
if self.opts.get('auth_trb', False):
log.warning(
'Auth Called: {0}'.format(
''.join(traceback.format_stack())
)
)
else:
log.debug('Decrypting the current master AES key')
key = self.get_keys()
cipher = PKCS1_OAEP.new(key)
key_str = cipher.decrypt(payload['aes'])
if 'sig' in payload:
m_path = os.path.join(self.opts['pki_dir'], self.mpub)
if os.path.exists(m_path):
try:
with salt.utils.fopen(m_path) as f:
mkey = RSA.importKey(f.read())
except Exception:
return '', ''
digest = hashlib.sha256(key_str).hexdigest()
m_digest = public_decrypt(mkey.publickey(), payload['sig'])
if m_digest != digest:
return '', ''
else:
return '', ''
if '_|-' in key_str:
return key_str.split('_|-')
else:
if 'token' in payload:
token = cipher.decrypt(payload['token'])
return key_str, token
elif not master_pub:
return key_str, ''
return '', ''
def verify_pubkey_sig(self, message, sig):
'''
Wraps the verify_signature method so we have
additional checks.
:rtype: bool
:return: Success or failure of public key verification
'''
if self.opts['master_sign_key_name']:
path = os.path.join(self.opts['pki_dir'],
self.opts['master_sign_key_name'] + '.pub')
if os.path.isfile(path):
res = verify_signature(path,
message,
binascii.a2b_base64(sig))
else:
log.error('Verification public key {0} does not exist. You '
'need to copy it from the master to the minions '
'pki directory'.format(os.path.basename(path)))
return False
if res:
log.debug('Successfully verified signature of master '
'public key with verification public key '
'{0}'.format(self.opts['master_sign_key_name'] + '.pub'))
return True
else:
log.debug('Failed to verify signature of public key')
return False
else:
log.error('Failed to verify the signature of the message because '
'the verification key-pairs name is not defined. Please '
'make sure that master_sign_key_name is defined.')
return False
def verify_signing_master(self, payload):
try:
if self.verify_pubkey_sig(payload['pub_key'],
payload['pub_sig']):
log.info('Received signed and verified master pubkey '
'from master {0}'.format(self.opts['master']))
m_pub_fn = os.path.join(self.opts['pki_dir'], self.mpub)
uid = salt.utils.get_uid(self.opts.get('user', None))
with salt.utils.fpopen(m_pub_fn, 'wb+', uid=uid) as wfh:
wfh.write(payload['pub_key'])
return True
else:
log.error('Received signed public-key from master {0} '
'but signature verification failed!'.format(self.opts['master']))
return False
except Exception as sign_exc:
log.error('There was an error while verifying the masters public-key signature')
raise Exception(sign_exc)
def check_auth_deps(self, payload):
'''
Checks if both master and minion either sign (master) and
verify (minion). If one side does not, it should fail.
:param dict payload: The incoming payload. This is a dictionary which may have the following keys:
'aes': The shared AES key
'enc': The format of the message. ('clear', 'pub', 'aes')
'publish_port': The TCP port which published the message
'token': The encrypted token used to verify the message.
'pub_key': The RSA public key of the sender.
'''
# master and minion sign and verify
if 'pub_sig' in payload and self.opts['verify_master_pubkey_sign']:
return True
# master and minion do NOT sign and do NOT verify
elif 'pub_sig' not in payload and not self.opts['verify_master_pubkey_sign']:
return True
# master signs, but minion does NOT verify
elif 'pub_sig' in payload and not self.opts['verify_master_pubkey_sign']:
log.error('The masters sent its public-key signature, but signature '
'verification is not enabled on the minion. Either enable '
'signature verification on the minion or disable signing '
'the public key on the master!')
return False
# master does NOT sign but minion wants to verify
elif 'pub_sig' not in payload and self.opts['verify_master_pubkey_sign']:
log.error('The master did not send its public-key signature, but '
'signature verification is enabled on the minion. Either '
'disable signature verification on the minion or enable '
'signing the public on the master!')
return False
def extract_aes(self, payload, master_pub=True):
'''
Return the AES key received from the master after the minion has been
successfully authenticated.
:param dict payload: The incoming payload. This is a dictionary which may have the following keys:
'aes': The shared AES key
'enc': The format of the message. ('clear', 'pub', etc)
'publish_port': The TCP port which published the message
'token': The encrypted token used to verify the message.
'pub_key': The RSA public key of the sender.
:rtype: str
:return: The shared AES key received from the master.
'''
if master_pub:
try:
aes, token = self.decrypt_aes(payload, master_pub)
if token != self.token:
log.error(
'The master failed to decrypt the random minion token'
)
return ''
except Exception:
log.error(
'The master failed to decrypt the random minion token'
)
return ''
return aes
else:
aes, token = self.decrypt_aes(payload, master_pub)
return aes
def verify_master(self, payload):
'''
Verify that the master is the same one that was previously accepted.
:param dict payload: The incoming payload. This is a dictionary which may have the following keys:
'aes': The shared AES key
'enc': The format of the message. ('clear', 'pub', etc)
'publish_port': The TCP port which published the message
'token': The encrypted token used to verify the message.
'pub_key': The RSA public key of the sender.
:rtype: str
:return: An empty string on verification failure. On success, the decrypted AES message in the payload.
'''
m_pub_fn = os.path.join(self.opts['pki_dir'], self.mpub)
if os.path.isfile(m_pub_fn) and not self.opts['open_mode']:
local_master_pub = salt.utils.fopen(m_pub_fn).read()
if payload['pub_key'].replace('\n', '').replace('\r', '') != \
local_master_pub.replace('\n', '').replace('\r', ''):
if not self.check_auth_deps(payload):
return ''
if self.opts['verify_master_pubkey_sign']:
if self.verify_signing_master(payload):
return self.extract_aes(payload, master_pub=False)
else:
return ''
else:
# This is not the last master we connected to
log.error('The master key has changed, the salt master could '
'have been subverted, verify salt master\'s public '
'key')
return ''
else:
if not self.check_auth_deps(payload):
return ''
# verify the signature of the pubkey even if it has
# not changed compared with the one we already have
if self.opts['always_verify_signature']:
if self.verify_signing_master(payload):
return self.extract_aes(payload)
else:
log.error('The masters public could not be verified. Is the '
'verification pubkey {0} up to date?'
''.format(self.opts['master_sign_key_name'] + '.pub'))
return ''
else:
return self.extract_aes(payload)
else:
if not self.check_auth_deps(payload):
return ''
# verify the masters pubkey signature if the minion
# has not received any masters pubkey before
if self.opts['verify_master_pubkey_sign']:
if self.verify_signing_master(payload):
return self.extract_aes(payload, master_pub=False)
else:
return ''
# the minion has not received any masters pubkey yet, write
# the newly received pubkey to minion_master.pub
else:
salt.utils.fopen(m_pub_fn, 'wb+').write(payload['pub_key'])
return self.extract_aes(payload, master_pub=False)
# TODO: remove, we should just return a sync wrapper of AsyncAuth
class SAuth(AsyncAuth):
'''
Set up an object to maintain authentication with the salt master
'''
# This class is only a singleton per minion/master pair
instances = weakref.WeakValueDictionary()
def __new__(cls, opts, io_loop=None):
'''
Only create one instance of SAuth per __key()
'''
key = cls.__key(opts)
if key not in SAuth.instances:
log.debug('Initializing new SAuth for {0}'.format(key))
new_auth = object.__new__(cls)
new_auth.__singleton_init__(opts)
SAuth.instances[key] = new_auth
else:
log.debug('Re-using SAuth for {0}'.format(key))
return SAuth.instances[key]
@classmethod
def __key(cls, opts, io_loop=None):
return (opts['pki_dir'], # where the keys are stored
opts['id'], # minion ID
opts['master_uri'], # master ID
)
# has to remain empty for singletons, since __init__ will *always* be called
def __init__(self, opts, io_loop=None):
super(SAuth, self).__init__(opts, io_loop=io_loop)
# an init for the singleton instance to call
def __singleton_init__(self, opts, io_loop=None):
'''
Init an Auth instance
:param dict opts: Options for this server
:return: Auth instance
:rtype: Auth
'''
self.opts = opts
self.token = Crypticle.generate_key_string()
self.serial = salt.payload.Serial(self.opts)
self.pub_path = os.path.join(self.opts['pki_dir'], 'minion.pub')
self.rsa_path = os.path.join(self.opts['pki_dir'], 'minion.pem')
if 'syndic_master' in self.opts:
self.mpub = 'syndic_master.pub'
elif 'alert_master' in self.opts:
self.mpub = 'monitor_master.pub'
else:
self.mpub = 'minion_master.pub'
if not os.path.isfile(self.pub_path):
self.get_keys()
@property
def creds(self):
if not hasattr(self, '_creds'):
self.authenticate()
return self._creds
@property
def crypticle(self):
if not hasattr(self, '_crypticle'):
self.authenticate()
return self._crypticle
def authenticate(self, _=None): # TODO: remove unused var
'''
Authenticate with the master, this method breaks the functional
paradigm, it will update the master information from a fresh sign
in, signing in can occur as often as needed to keep up with the
revolving master AES key.
:rtype: Crypticle
:returns: A crypticle used for encryption operations
'''
acceptance_wait_time = self.opts['acceptance_wait_time']
acceptance_wait_time_max = self.opts['acceptance_wait_time_max']
if not acceptance_wait_time_max:
acceptance_wait_time_max = acceptance_wait_time
while True:
creds = self.sign_in()
if creds == 'retry':
if self.opts.get('caller'):
print('Minion failed to authenticate with the master, '
'has the minion key been accepted?')
sys.exit(2)
if acceptance_wait_time:
log.info('Waiting {0} seconds before retry.'.format(acceptance_wait_time))
time.sleep(acceptance_wait_time)
if acceptance_wait_time < acceptance_wait_time_max:
acceptance_wait_time += acceptance_wait_time
log.debug('Authentication wait time is {0}'.format(acceptance_wait_time))
continue
break
self._creds = creds
self._crypticle = Crypticle(self.opts, creds['aes'])
def sign_in(self, timeout=60, safe=True, tries=1):
'''
Send a sign in request to the master, sets the key information and
returns a dict containing the master publish interface to bind to
and the decrypted aes key for transport decryption.
:param int timeout: Number of seconds to wait before timing out the sign-in request
:param bool safe: If True, do not raise an exception on timeout. Retry instead.
:param int tries: The number of times to try to authenticate before giving up.
:raises SaltReqTimeoutError: If the sign-in request has timed out and :param safe: is not set
:return: Return a string on failure indicating the reason for failure. On success, return a dictionary
with the publication port and the shared AES key.
'''
auth = {}
auth_timeout = self.opts.get('auth_timeout', None)
if auth_timeout is not None:
timeout = auth_timeout
auth_safemode = self.opts.get('auth_safemode', None)
if auth_safemode is not None:
safe = auth_safemode
auth_tries = self.opts.get('auth_tries', None)
if auth_tries is not None:
tries = auth_tries
m_pub_fn = os.path.join(self.opts['pki_dir'], self.mpub)
auth['master_uri'] = self.opts['master_uri']
channel = salt.transport.client.ReqChannel.factory(self.opts, crypt='clear')
try:
payload = channel.send(
self.minion_sign_in_payload(),
tries=tries,
timeout=timeout
)
except SaltReqTimeoutError as e:
if safe:
log.warning('SaltReqTimeoutError: {0}'.format(e))
return 'retry'
raise SaltClientError('Attempt to authenticate with the salt master failed')
if 'load' in payload:
if 'ret' in payload['load']:
if not payload['load']['ret']:
if self.opts['rejected_retry']:
log.error(
'The Salt Master has rejected this minion\'s public '
'key.\nTo repair this issue, delete the public key '
'for this minion on the Salt Master.\nThe Salt '
'Minion will attempt to to re-authenicate.'
)
return 'retry'
else:
log.critical(
'The Salt Master has rejected this minion\'s public '
'key!\nTo repair this issue, delete the public key '
'for this minion on the Salt Master and restart this '
'minion.\nOr restart the Salt Master in open mode to '
'clean out the keys. The Salt Minion will now exit.'
)
sys.exit(salt.defaults.exitcodes.EX_OK)
# has the master returned that its maxed out with minions?
elif payload['load']['ret'] == 'full':
return 'full'
else:
log.error(
'The Salt Master has cached the public key for this '
'node. If this is the first time connecting to this master '
'then this key may need to be accepted using \'salt-key -a {0}\' on '
'the salt master. This salt minion will wait for {1} seconds '
'before attempting to re-authenticate.'.format(
self.opts['id'],
self.opts['acceptance_wait_time']
)
)
return 'retry'
auth['aes'] = self.verify_master(payload)
if not auth['aes']:
log.critical(
'The Salt Master server\'s public key did not authenticate!\n'
'The master may need to be updated if it is a version of Salt '
'lower than {0}, or\n'
'If you are confident that you are connecting to a valid Salt '
'Master, then remove the master public key and restart the '
'Salt Minion.\nThe master public key can be found '
'at:\n{1}'.format(salt.version.__version__, m_pub_fn)
)
sys.exit(42)
if self.opts.get('syndic_master', False): # Is syndic
syndic_finger = self.opts.get('syndic_finger', self.opts.get('master_finger', False))
if syndic_finger:
if salt.utils.pem_finger(m_pub_fn, sum_type=self.opts['hash_type']) != syndic_finger:
self._finger_fail(syndic_finger, m_pub_fn)
else:
if self.opts.get('master_finger', False):
if salt.utils.pem_finger(m_pub_fn, sum_type=self.opts['hash_type']) != self.opts['master_finger']:
self._finger_fail(self.opts['master_finger'], m_pub_fn)
auth['publish_port'] = payload['publish_port']
return auth
def _finger_fail(self, finger, master_key):
log.critical(
'The specified fingerprint in the master configuration '
'file:\n{0}\nDoes not match the authenticating master\'s '
'key:\n{1}\nVerify that the configured fingerprint '
'matches the fingerprint of the correct master and that '
'this minion is not subject to a man-in-the-middle attack.'
.format(
finger,
salt.utils.pem_finger(master_key, sum_type=self.opts['hash_type'])
)
)
sys.exit(42)
class Crypticle(object):
'''
Authenticated encryption class
Encryption algorithm: AES-CBC
Signing algorithm: HMAC-SHA256
'''
PICKLE_PAD = 'pickle::'
AES_BLOCK_SIZE = 16
SIG_SIZE = hashlib.sha256().digest_size
def __init__(self, opts, key_string, key_size=192):
self.key_string = key_string
self.keys = self.extract_keys(self.key_string, key_size)
self.key_size = key_size
self.serial = salt.payload.Serial(opts)
@classmethod
def generate_key_string(cls, key_size=192):
key = os.urandom(key_size // 8 + cls.SIG_SIZE)
b64key = base64.b64encode(key)
if six.PY3:
b64key = b64key.decode('utf-8')
return b64key.replace('\n', '')
@classmethod
def extract_keys(cls, key_string, key_size):
key = key_string.decode('base64')
assert len(key) == key_size / 8 + cls.SIG_SIZE, 'invalid key'
return key[:-cls.SIG_SIZE], key[-cls.SIG_SIZE:]
def encrypt(self, data):
'''
encrypt data with AES-CBC and sign it with HMAC-SHA256
'''
aes_key, hmac_key = self.keys
pad = self.AES_BLOCK_SIZE - len(data) % self.AES_BLOCK_SIZE
data = data + pad * chr(pad)
iv_bytes = os.urandom(self.AES_BLOCK_SIZE)
cypher = AES.new(aes_key, AES.MODE_CBC, iv_bytes)
data = iv_bytes + cypher.encrypt(data)
sig = hmac.new(hmac_key, data, hashlib.sha256).digest()
return data + sig
def decrypt(self, data):
'''
verify HMAC-SHA256 signature and decrypt data with AES-CBC
'''
aes_key, hmac_key = self.keys
sig = data[-self.SIG_SIZE:]
data = data[:-self.SIG_SIZE]
mac_bytes = hmac.new(hmac_key, data, hashlib.sha256).digest()
if len(mac_bytes) != len(sig):
log.debug('Failed to authenticate message')
raise AuthenticationError('message authentication failed')
result = 0
for zipped_x, zipped_y in zip(mac_bytes, sig):
result |= ord(zipped_x) ^ ord(zipped_y)
if result != 0:
log.debug('Failed to authenticate message')
raise AuthenticationError('message authentication failed')
iv_bytes = data[:self.AES_BLOCK_SIZE]
data = data[self.AES_BLOCK_SIZE:]
cypher = AES.new(aes_key, AES.MODE_CBC, iv_bytes)
data = cypher.decrypt(data)
return data[:-ord(data[-1])]
def dumps(self, obj):
'''
Serialize and encrypt a python object
'''
return self.encrypt(self.PICKLE_PAD + self.serial.dumps(obj))
def loads(self, data):
'''
Decrypt and un-serialize a python object
'''
data = self.decrypt(data)
# simple integrity check to verify that we got meaningful data
if not data.startswith(self.PICKLE_PAD):
return {}
return self.serial.loads(data[len(self.PICKLE_PAD):])
| 40.568709 | 114 | 0.576061 |
from __future__ import absolute_import, print_function
import os
import sys
import copy
import time
import hmac
import base64
import hashlib
import logging
import stat
import traceback
import binascii
import weakref
import salt.ext.six as six
from salt.ext.six.moves import zip
try:
from Crypto.Cipher import AES, PKCS1_OAEP
from Crypto.Hash import SHA
from Crypto.PublicKey import RSA
from Crypto.Signature import PKCS1_v1_5
import Crypto.Random
except ImportError:
pass
import salt.defaults.exitcodes
import salt.utils
import salt.payload
import salt.transport.client
import salt.utils.rsax931
import salt.utils.verify
import salt.version
from salt.exceptions import (
AuthenticationError, SaltClientError, SaltReqTimeoutError, SaltSystemExit
)
import tornado.gen
log = logging.getLogger(__name__)
def dropfile(cachedir, user=None):
dfn = os.path.join(cachedir, '.dfn')
mask = os.umask(191)
try:
log.info('Rotating AES key')
if os.path.isfile(dfn):
log.info('AES key rotation already requested')
return
if os.path.isfile(dfn) and not os.access(dfn, os.W_OK):
os.chmod(dfn, stat.S_IRUSR | stat.S_IWUSR)
with salt.utils.fopen(dfn, 'wb+') as fp_:
fp_.write('')
os.chmod(dfn, stat.S_IRUSR)
if user:
try:
import pwd
uid = pwd.getpwnam(user).pw_uid
os.chown(dfn, uid, -1)
except (KeyError, ImportError, OSError, IOError):
pass
finally:
os.umask(mask)
def gen_keys(keydir, keyname, keysize, user=None):
base = os.path.join(keydir, keyname)
priv = '{0}.pem'.format(base)
pub = '{0}.pub'.format(base)
salt.utils.reinit_crypto()
gen = RSA.generate(bits=keysize, e=65537)
if os.path.isfile(priv):
return priv
cumask = os.umask(191)
with salt.utils.fopen(priv, 'wb+') as f:
f.write(gen.exportKey('PEM'))
os.umask(cumask)
with salt.utils.fopen(pub, 'wb+') as f:
f.write(gen.publickey().exportKey('PEM'))
os.chmod(priv, 256)
if user:
try:
import pwd
uid = pwd.getpwnam(user).pw_uid
os.chown(priv, uid, -1)
os.chown(pub, uid, -1)
except (KeyError, ImportError, OSError):
# The specified user was not found, allow the backup systems to
# report the error
pass
return priv
def sign_message(privkey_path, message):
log.debug('salt.crypt.sign_message: Loading private key')
with salt.utils.fopen(privkey_path) as f:
key = RSA.importKey(f.read())
log.debug('salt.crypt.sign_message: Signing message.')
signer = PKCS1_v1_5.new(key)
return signer.sign(SHA.new(message))
def verify_signature(pubkey_path, message, signature):
log.debug('salt.crypt.verify_signature: Loading public key')
with salt.utils.fopen(pubkey_path) as f:
pubkey = RSA.importKey(f.read())
log.debug('salt.crypt.verify_signature: Verifying signature')
verifier = PKCS1_v1_5.new(pubkey)
return verifier.verify(SHA.new(message), signature)
def gen_signature(priv_path, pub_path, sign_path):
with salt.utils.fopen(pub_path) as fp_:
mpub_64 = fp_.read()
mpub_sig = sign_message(priv_path, mpub_64)
mpub_sig_64 = binascii.b2a_base64(mpub_sig)
if os.path.isfile(sign_path):
return False
log.trace('Calculating signature for {0} with {1}'
.format(os.path.basename(pub_path),
os.path.basename(priv_path)))
if os.path.isfile(sign_path):
log.trace('Signature file {0} already exists, please '
'remove it first and try again'.format(sign_path))
else:
with salt.utils.fopen(sign_path, 'wb+') as sig_f:
sig_f.write(mpub_sig_64)
log.trace('Wrote signature to {0}'.format(sign_path))
return True
def private_encrypt(key, message):
signer = salt.utils.rsax931.RSAX931Signer(key.exportKey('PEM'))
return signer.sign(message)
def public_decrypt(pub, message):
verifier = salt.utils.rsax931.RSAX931Verifier(pub.exportKey('PEM'))
return verifier.verify(message)
class MasterKeys(dict):
def __init__(self, opts):
super(MasterKeys, self).__init__()
self.opts = opts
self.pub_path = os.path.join(self.opts['pki_dir'], 'master.pub')
self.rsa_path = os.path.join(self.opts['pki_dir'], 'master.pem')
self.key = self.__get_keys()
self.pub_signature = None
# set names for the signing key-pairs
if opts['master_sign_pubkey']:
# if only the signature is available, use that
if opts['master_use_pubkey_signature']:
self.sig_path = os.path.join(self.opts['pki_dir'],
opts['master_pubkey_signature'])
if os.path.isfile(self.sig_path):
self.pub_signature = salt.utils.fopen(self.sig_path).read()
log.info('Read {0}\'s signature from {1}'
''.format(os.path.basename(self.pub_path),
self.opts['master_pubkey_signature']))
else:
log.error('Signing the master.pub key with a signature is enabled '
'but no signature file found at the defined location '
'{0}'.format(self.sig_path))
log.error('The signature-file may be either named differently '
'or has to be created with \'salt-key --gen-signature\'')
sys.exit(1)
else:
self.pub_sign_path = os.path.join(self.opts['pki_dir'],
opts['master_sign_key_name'] + '.pub')
self.rsa_sign_path = os.path.join(self.opts['pki_dir'],
opts['master_sign_key_name'] + '.pem')
self.sign_key = self.__get_keys(name=opts['master_sign_key_name'])
def __setstate__(self, state):
self.__init__(state['opts'])
def __getstate__(self):
return {'opts': self.opts}
def __get_keys(self, name='master'):
path = os.path.join(self.opts['pki_dir'],
name + '.pem')
if os.path.exists(path):
with salt.utils.fopen(path) as f:
key = RSA.importKey(f.read())
log.debug('Loaded {0} key: {1}'.format(name, path))
else:
log.info('Generating {0} keys: {1}'.format(name, self.opts['pki_dir']))
gen_keys(self.opts['pki_dir'],
name,
self.opts['keysize'],
self.opts.get('user'))
with salt.utils.fopen(self.rsa_path) as f:
key = RSA.importKey(f.read())
return key
def get_pub_str(self, name='master'):
path = os.path.join(self.opts['pki_dir'],
name + '.pub')
if not os.path.isfile(path):
key = self.__get_keys()
with salt.utils.fopen(path, 'wb+') as f:
f.write(key.publickey().exportKey('PEM'))
return salt.utils.fopen(path).read()
def get_mkey_paths(self):
return self.pub_path, self.rsa_path
def get_sign_paths(self):
return self.pub_sign_path, self.rsa_sign_path
def pubkey_signature(self):
return self.pub_signature
class AsyncAuth(object):
instance_map = weakref.WeakKeyDictionary()
creds_map = {}
def __new__(cls, opts, io_loop=None):
io_loop = io_loop or tornado.ioloop.IOLoop.current()
if io_loop not in AsyncAuth.instance_map:
AsyncAuth.instance_map[io_loop] = weakref.WeakValueDictionary()
loop_instance_map = AsyncAuth.instance_map[io_loop]
key = cls.__key(opts)
if key not in loop_instance_map:
log.debug('Initializing new SAuth for {0}'.format(key))
new_auth = object.__new__(cls)
new_auth.__singleton_init__(opts, io_loop=io_loop)
loop_instance_map[key] = new_auth
else:
log.debug('Re-using SAuth for {0}'.format(key))
return loop_instance_map[key]
@classmethod
def __key(cls, opts, io_loop=None):
return (opts['pki_dir'],
opts['id'],
opts['master_uri'],
)
def __init__(self, opts, io_loop=None):
pass
def __singleton_init__(self, opts, io_loop=None):
self.opts = opts
self.token = Crypticle.generate_key_string()
self.serial = salt.payload.Serial(self.opts)
self.pub_path = os.path.join(self.opts['pki_dir'], 'minion.pub')
self.rsa_path = os.path.join(self.opts['pki_dir'], 'minion.pem')
if 'syndic_master' in self.opts:
self.mpub = 'syndic_master.pub'
elif 'alert_master' in self.opts:
self.mpub = 'monitor_master.pub'
else:
self.mpub = 'minion_master.pub'
if not os.path.isfile(self.pub_path):
self.get_keys()
self.io_loop = io_loop or tornado.ioloop.IOLoop.current()
salt.utils.reinit_crypto()
key = self.__key(self.opts)
if key in AsyncAuth.creds_map:
creds = AsyncAuth.creds_map[key]
self._creds = creds
self._crypticle = Crypticle(self.opts, creds['aes'])
self._authenticate_future = tornado.concurrent.Future()
self._authenticate_future.set_result(True)
else:
self.authenticate()
def __deepcopy__(self, memo):
cls = self.__class__
result = cls.__new__(cls, copy.deepcopy(self.opts, memo), io_loop=None)
memo[id(self)] = result
for key in self.__dict__:
if key in ('io_loop',):
continue
setattr(result, key, copy.deepcopy(self.__dict__[key], memo))
return result
@property
def creds(self):
return self._creds
@property
def crypticle(self):
return self._crypticle
@property
def authenticated(self):
return hasattr(self, '_authenticate_future') and \
self._authenticate_future.done() and \
self._authenticate_future.exception() is None
def invalidate(self):
if self.authenticated:
del self._authenticate_future
key = self.__key(self.opts)
if key in AsyncAuth.creds_map:
del AsyncAuth.creds_map[key]
def authenticate(self, callback=None):
if hasattr(self, '_authenticate_future') and not self._authenticate_future.done():
future = self._authenticate_future
else:
future = tornado.concurrent.Future()
self._authenticate_future = future
self.io_loop.add_callback(self._authenticate)
if callback is not None:
def handle_future(future):
response = future.result()
self.io_loop.add_callback(callback, response)
future.add_done_callback(handle_future)
return future
@tornado.gen.coroutine
def _authenticate(self):
acceptance_wait_time = self.opts['acceptance_wait_time']
acceptance_wait_time_max = self.opts['acceptance_wait_time_max']
if not acceptance_wait_time_max:
acceptance_wait_time_max = acceptance_wait_time
creds = None
while True:
try:
creds = yield self.sign_in()
except SaltClientError:
break
if creds == 'retry':
if self.opts.get('caller'):
print('Minion failed to authenticate with the master, '
'has the minion key been accepted?')
sys.exit(2)
if acceptance_wait_time:
log.info('Waiting {0} seconds before retry.'.format(acceptance_wait_time))
yield tornado.gen.sleep(acceptance_wait_time)
if acceptance_wait_time < acceptance_wait_time_max:
acceptance_wait_time += acceptance_wait_time
log.debug('Authentication wait time is {0}'.format(acceptance_wait_time))
continue
break
if not isinstance(creds, dict) or 'aes' not in creds:
try:
del AsyncAuth.creds_map[self.__key(self.opts)]
except KeyError:
pass
self._authenticate_future.set_exception(
SaltClientError('Attempt to authenticate with the salt master failed')
)
else:
AsyncAuth.creds_map[self.__key(self.opts)] = creds
self._creds = creds
self._crypticle = Crypticle(self.opts, creds['aes'])
self._authenticate_future.set_result(True)
@tornado.gen.coroutine
def sign_in(self, timeout=60, safe=True, tries=1):
auth = {}
auth_timeout = self.opts.get('auth_timeout', None)
if auth_timeout is not None:
timeout = auth_timeout
auth_safemode = self.opts.get('auth_safemode', None)
if auth_safemode is not None:
safe = auth_safemode
auth_tries = self.opts.get('auth_tries', None)
if auth_tries is not None:
tries = auth_tries
m_pub_fn = os.path.join(self.opts['pki_dir'], self.mpub)
auth['master_uri'] = self.opts['master_uri']
channel = salt.transport.client.AsyncReqChannel.factory(self.opts,
crypt='clear',
io_loop=self.io_loop)
try:
payload = yield channel.send(
self.minion_sign_in_payload(),
tries=tries,
timeout=timeout
)
except SaltReqTimeoutError as e:
if safe:
log.warning('SaltReqTimeoutError: {0}'.format(e))
raise tornado.gen.Return('retry')
raise SaltClientError('Attempt to authenticate with the salt master failed with timeout error')
if 'load' in payload:
if 'ret' in payload['load']:
if not payload['load']['ret']:
if self.opts['rejected_retry']:
log.error(
'The Salt Master has rejected this minion\'s public '
'key.\nTo repair this issue, delete the public key '
'for this minion on the Salt Master.\nThe Salt '
'Minion will attempt to to re-authenicate.'
)
raise tornado.gen.Return('retry')
else:
log.critical(
'The Salt Master has rejected this minion\'s public '
'key!\nTo repair this issue, delete the public key '
'for this minion on the Salt Master and restart this '
'minion.\nOr restart the Salt Master in open mode to '
'clean out the keys. The Salt Minion will now exit.'
)
sys.exit(salt.defaults.exitcodes.EX_OK)
elif payload['load']['ret'] == 'full':
raise tornado.gen.Return('full')
else:
log.error(
'The Salt Master has cached the public key for this '
'node, this salt minion will wait for {0} seconds '
'before attempting to re-authenticate'.format(
self.opts['acceptance_wait_time']
)
)
raise tornado.gen.Return('retry')
auth['aes'] = self.verify_master(payload)
if not auth['aes']:
log.critical(
'The Salt Master server\'s public key did not authenticate!\n'
'The master may need to be updated if it is a version of Salt '
'lower than {0}, or\n'
'If you are confident that you are connecting to a valid Salt '
'Master, then remove the master public key and restart the '
'Salt Minion.\nThe master public key can be found '
'at:\n{1}'.format(salt.version.__version__, m_pub_fn)
)
raise SaltSystemExit('Invalid master key')
if self.opts.get('syndic_master', False): # Is syndic
syndic_finger = self.opts.get('syndic_finger', self.opts.get('master_finger', False))
if syndic_finger:
if salt.utils.pem_finger(m_pub_fn) != syndic_finger:
self._finger_fail(syndic_finger, m_pub_fn)
else:
if self.opts.get('master_finger', False):
if salt.utils.pem_finger(m_pub_fn) != self.opts['master_finger']:
self._finger_fail(self.opts['master_finger'], m_pub_fn)
auth['publish_port'] = payload['publish_port']
raise tornado.gen.Return(auth)
def get_keys(self):
# Make sure all key parent directories are accessible
user = self.opts.get('user', 'root')
salt.utils.verify.check_path_traversal(self.opts['pki_dir'], user)
if os.path.exists(self.rsa_path):
with salt.utils.fopen(self.rsa_path) as f:
key = RSA.importKey(f.read())
log.debug('Loaded minion key: {0}'.format(self.rsa_path))
else:
log.info('Generating keys: {0}'.format(self.opts['pki_dir']))
gen_keys(self.opts['pki_dir'],
'minion',
self.opts['keysize'],
self.opts.get('user'))
with salt.utils.fopen(self.rsa_path) as f:
key = RSA.importKey(f.read())
return key
def gen_token(self, clear_tok):
return private_encrypt(self.get_keys(), clear_tok)
def minion_sign_in_payload(self):
payload = {}
payload['cmd'] = '_auth'
payload['id'] = self.opts['id']
try:
pubkey_path = os.path.join(self.opts['pki_dir'], self.mpub)
with salt.utils.fopen(pubkey_path) as f:
pub = RSA.importKey(f.read())
cipher = PKCS1_OAEP.new(pub)
payload['token'] = cipher.encrypt(self.token)
except Exception:
pass
with salt.utils.fopen(self.pub_path) as f:
payload['pub'] = f.read()
return payload
def decrypt_aes(self, payload, master_pub=True):
if self.opts.get('auth_trb', False):
log.warning(
'Auth Called: {0}'.format(
''.join(traceback.format_stack())
)
)
else:
log.debug('Decrypting the current master AES key')
key = self.get_keys()
cipher = PKCS1_OAEP.new(key)
key_str = cipher.decrypt(payload['aes'])
if 'sig' in payload:
m_path = os.path.join(self.opts['pki_dir'], self.mpub)
if os.path.exists(m_path):
try:
with salt.utils.fopen(m_path) as f:
mkey = RSA.importKey(f.read())
except Exception:
return '', ''
digest = hashlib.sha256(key_str).hexdigest()
m_digest = public_decrypt(mkey.publickey(), payload['sig'])
if m_digest != digest:
return '', ''
else:
return '', ''
if '_|-' in key_str:
return key_str.split('_|-')
else:
if 'token' in payload:
token = cipher.decrypt(payload['token'])
return key_str, token
elif not master_pub:
return key_str, ''
return '', ''
def verify_pubkey_sig(self, message, sig):
if self.opts['master_sign_key_name']:
path = os.path.join(self.opts['pki_dir'],
self.opts['master_sign_key_name'] + '.pub')
if os.path.isfile(path):
res = verify_signature(path,
message,
binascii.a2b_base64(sig))
else:
log.error('Verification public key {0} does not exist. You '
'need to copy it from the master to the minions '
'pki directory'.format(os.path.basename(path)))
return False
if res:
log.debug('Successfully verified signature of master '
'public key with verification public key '
'{0}'.format(self.opts['master_sign_key_name'] + '.pub'))
return True
else:
log.debug('Failed to verify signature of public key')
return False
else:
log.error('Failed to verify the signature of the message because '
'the verification key-pairs name is not defined. Please '
'make sure that master_sign_key_name is defined.')
return False
def verify_signing_master(self, payload):
try:
if self.verify_pubkey_sig(payload['pub_key'],
payload['pub_sig']):
log.info('Received signed and verified master pubkey '
'from master {0}'.format(self.opts['master']))
m_pub_fn = os.path.join(self.opts['pki_dir'], self.mpub)
uid = salt.utils.get_uid(self.opts.get('user', None))
with salt.utils.fpopen(m_pub_fn, 'wb+', uid=uid) as wfh:
wfh.write(payload['pub_key'])
return True
else:
log.error('Received signed public-key from master {0} '
'but signature verification failed!'.format(self.opts['master']))
return False
except Exception as sign_exc:
log.error('There was an error while verifying the masters public-key signature')
raise Exception(sign_exc)
def check_auth_deps(self, payload):
# master and minion sign and verify
if 'pub_sig' in payload and self.opts['verify_master_pubkey_sign']:
return True
# master and minion do NOT sign and do NOT verify
elif 'pub_sig' not in payload and not self.opts['verify_master_pubkey_sign']:
return True
# master signs, but minion does NOT verify
elif 'pub_sig' in payload and not self.opts['verify_master_pubkey_sign']:
log.error('The masters sent its public-key signature, but signature '
'verification is not enabled on the minion. Either enable '
'signature verification on the minion or disable signing '
'the public key on the master!')
return False
# master does NOT sign but minion wants to verify
elif 'pub_sig' not in payload and self.opts['verify_master_pubkey_sign']:
log.error('The master did not send its public-key signature, but '
'signature verification is enabled on the minion. Either '
'disable signature verification on the minion or enable '
'signing the public on the master!')
return False
def extract_aes(self, payload, master_pub=True):
if master_pub:
try:
aes, token = self.decrypt_aes(payload, master_pub)
if token != self.token:
log.error(
'The master failed to decrypt the random minion token'
)
return ''
except Exception:
log.error(
'The master failed to decrypt the random minion token'
)
return ''
return aes
else:
aes, token = self.decrypt_aes(payload, master_pub)
return aes
def verify_master(self, payload):
m_pub_fn = os.path.join(self.opts['pki_dir'], self.mpub)
if os.path.isfile(m_pub_fn) and not self.opts['open_mode']:
local_master_pub = salt.utils.fopen(m_pub_fn).read()
if payload['pub_key'].replace('\n', '').replace('\r', '') != \
local_master_pub.replace('\n', '').replace('\r', ''):
if not self.check_auth_deps(payload):
return ''
if self.opts['verify_master_pubkey_sign']:
if self.verify_signing_master(payload):
return self.extract_aes(payload, master_pub=False)
else:
return ''
else:
# This is not the last master we connected to
log.error('The master key has changed, the salt master could '
'have been subverted, verify salt master\'s public '
'key')
return ''
else:
if not self.check_auth_deps(payload):
return ''
if self.opts['always_verify_signature']:
if self.verify_signing_master(payload):
return self.extract_aes(payload)
else:
log.error('The masters public could not be verified. Is the '
'verification pubkey {0} up to date?'
''.format(self.opts['master_sign_key_name'] + '.pub'))
return ''
else:
return self.extract_aes(payload)
else:
if not self.check_auth_deps(payload):
return ''
if self.opts['verify_master_pubkey_sign']:
if self.verify_signing_master(payload):
return self.extract_aes(payload, master_pub=False)
else:
return ''
else:
salt.utils.fopen(m_pub_fn, 'wb+').write(payload['pub_key'])
return self.extract_aes(payload, master_pub=False)
class SAuth(AsyncAuth):
instances = weakref.WeakValueDictionary()
def __new__(cls, opts, io_loop=None):
key = cls.__key(opts)
if key not in SAuth.instances:
log.debug('Initializing new SAuth for {0}'.format(key))
new_auth = object.__new__(cls)
new_auth.__singleton_init__(opts)
SAuth.instances[key] = new_auth
else:
log.debug('Re-using SAuth for {0}'.format(key))
return SAuth.instances[key]
@classmethod
def __key(cls, opts, io_loop=None):
return (opts['pki_dir'],
opts['id'],
opts['master_uri'],
)
def __init__(self, opts, io_loop=None):
super(SAuth, self).__init__(opts, io_loop=io_loop)
def __singleton_init__(self, opts, io_loop=None):
self.opts = opts
self.token = Crypticle.generate_key_string()
self.serial = salt.payload.Serial(self.opts)
self.pub_path = os.path.join(self.opts['pki_dir'], 'minion.pub')
self.rsa_path = os.path.join(self.opts['pki_dir'], 'minion.pem')
if 'syndic_master' in self.opts:
self.mpub = 'syndic_master.pub'
elif 'alert_master' in self.opts:
self.mpub = 'monitor_master.pub'
else:
self.mpub = 'minion_master.pub'
if not os.path.isfile(self.pub_path):
self.get_keys()
@property
def creds(self):
if not hasattr(self, '_creds'):
self.authenticate()
return self._creds
@property
def crypticle(self):
if not hasattr(self, '_crypticle'):
self.authenticate()
return self._crypticle
def authenticate(self, _=None):
acceptance_wait_time = self.opts['acceptance_wait_time']
acceptance_wait_time_max = self.opts['acceptance_wait_time_max']
if not acceptance_wait_time_max:
acceptance_wait_time_max = acceptance_wait_time
while True:
creds = self.sign_in()
if creds == 'retry':
if self.opts.get('caller'):
print('Minion failed to authenticate with the master, '
'has the minion key been accepted?')
sys.exit(2)
if acceptance_wait_time:
log.info('Waiting {0} seconds before retry.'.format(acceptance_wait_time))
time.sleep(acceptance_wait_time)
if acceptance_wait_time < acceptance_wait_time_max:
acceptance_wait_time += acceptance_wait_time
log.debug('Authentication wait time is {0}'.format(acceptance_wait_time))
continue
break
self._creds = creds
self._crypticle = Crypticle(self.opts, creds['aes'])
def sign_in(self, timeout=60, safe=True, tries=1):
auth = {}
auth_timeout = self.opts.get('auth_timeout', None)
if auth_timeout is not None:
timeout = auth_timeout
auth_safemode = self.opts.get('auth_safemode', None)
if auth_safemode is not None:
safe = auth_safemode
auth_tries = self.opts.get('auth_tries', None)
if auth_tries is not None:
tries = auth_tries
m_pub_fn = os.path.join(self.opts['pki_dir'], self.mpub)
auth['master_uri'] = self.opts['master_uri']
channel = salt.transport.client.ReqChannel.factory(self.opts, crypt='clear')
try:
payload = channel.send(
self.minion_sign_in_payload(),
tries=tries,
timeout=timeout
)
except SaltReqTimeoutError as e:
if safe:
log.warning('SaltReqTimeoutError: {0}'.format(e))
return 'retry'
raise SaltClientError('Attempt to authenticate with the salt master failed')
if 'load' in payload:
if 'ret' in payload['load']:
if not payload['load']['ret']:
if self.opts['rejected_retry']:
log.error(
'The Salt Master has rejected this minion\'s public '
'key.\nTo repair this issue, delete the public key '
'for this minion on the Salt Master.\nThe Salt '
'Minion will attempt to to re-authenicate.'
)
return 'retry'
else:
log.critical(
'The Salt Master has rejected this minion\'s public '
'key!\nTo repair this issue, delete the public key '
'for this minion on the Salt Master and restart this '
'minion.\nOr restart the Salt Master in open mode to '
'clean out the keys. The Salt Minion will now exit.'
)
sys.exit(salt.defaults.exitcodes.EX_OK)
elif payload['load']['ret'] == 'full':
return 'full'
else:
log.error(
'The Salt Master has cached the public key for this '
'node. If this is the first time connecting to this master '
'then this key may need to be accepted using \'salt-key -a {0}\' on '
'the salt master. This salt minion will wait for {1} seconds '
'before attempting to re-authenticate.'.format(
self.opts['id'],
self.opts['acceptance_wait_time']
)
)
return 'retry'
auth['aes'] = self.verify_master(payload)
if not auth['aes']:
log.critical(
'The Salt Master server\'s public key did not authenticate!\n'
'The master may need to be updated if it is a version of Salt '
'lower than {0}, or\n'
'If you are confident that you are connecting to a valid Salt '
'Master, then remove the master public key and restart the '
'Salt Minion.\nThe master public key can be found '
'at:\n{1}'.format(salt.version.__version__, m_pub_fn)
)
sys.exit(42)
if self.opts.get('syndic_master', False): # Is syndic
syndic_finger = self.opts.get('syndic_finger', self.opts.get('master_finger', False))
if syndic_finger:
if salt.utils.pem_finger(m_pub_fn, sum_type=self.opts['hash_type']) != syndic_finger:
self._finger_fail(syndic_finger, m_pub_fn)
else:
if self.opts.get('master_finger', False):
if salt.utils.pem_finger(m_pub_fn, sum_type=self.opts['hash_type']) != self.opts['master_finger']:
self._finger_fail(self.opts['master_finger'], m_pub_fn)
auth['publish_port'] = payload['publish_port']
return auth
def _finger_fail(self, finger, master_key):
log.critical(
'The specified fingerprint in the master configuration '
'file:\n{0}\nDoes not match the authenticating master\'s '
'key:\n{1}\nVerify that the configured fingerprint '
'matches the fingerprint of the correct master and that '
'this minion is not subject to a man-in-the-middle attack.'
.format(
finger,
salt.utils.pem_finger(master_key, sum_type=self.opts['hash_type'])
)
)
sys.exit(42)
class Crypticle(object):
PICKLE_PAD = 'pickle::'
AES_BLOCK_SIZE = 16
SIG_SIZE = hashlib.sha256().digest_size
def __init__(self, opts, key_string, key_size=192):
self.key_string = key_string
self.keys = self.extract_keys(self.key_string, key_size)
self.key_size = key_size
self.serial = salt.payload.Serial(opts)
@classmethod
def generate_key_string(cls, key_size=192):
key = os.urandom(key_size // 8 + cls.SIG_SIZE)
b64key = base64.b64encode(key)
if six.PY3:
b64key = b64key.decode('utf-8')
return b64key.replace('\n', '')
@classmethod
def extract_keys(cls, key_string, key_size):
key = key_string.decode('base64')
assert len(key) == key_size / 8 + cls.SIG_SIZE, 'invalid key'
return key[:-cls.SIG_SIZE], key[-cls.SIG_SIZE:]
def encrypt(self, data):
aes_key, hmac_key = self.keys
pad = self.AES_BLOCK_SIZE - len(data) % self.AES_BLOCK_SIZE
data = data + pad * chr(pad)
iv_bytes = os.urandom(self.AES_BLOCK_SIZE)
cypher = AES.new(aes_key, AES.MODE_CBC, iv_bytes)
data = iv_bytes + cypher.encrypt(data)
sig = hmac.new(hmac_key, data, hashlib.sha256).digest()
return data + sig
def decrypt(self, data):
aes_key, hmac_key = self.keys
sig = data[-self.SIG_SIZE:]
data = data[:-self.SIG_SIZE]
mac_bytes = hmac.new(hmac_key, data, hashlib.sha256).digest()
if len(mac_bytes) != len(sig):
log.debug('Failed to authenticate message')
raise AuthenticationError('message authentication failed')
result = 0
for zipped_x, zipped_y in zip(mac_bytes, sig):
result |= ord(zipped_x) ^ ord(zipped_y)
if result != 0:
log.debug('Failed to authenticate message')
raise AuthenticationError('message authentication failed')
iv_bytes = data[:self.AES_BLOCK_SIZE]
data = data[self.AES_BLOCK_SIZE:]
cypher = AES.new(aes_key, AES.MODE_CBC, iv_bytes)
data = cypher.decrypt(data)
return data[:-ord(data[-1])]
def dumps(self, obj):
return self.encrypt(self.PICKLE_PAD + self.serial.dumps(obj))
def loads(self, data):
data = self.decrypt(data)
if not data.startswith(self.PICKLE_PAD):
return {}
return self.serial.loads(data[len(self.PICKLE_PAD):])
| true | true |
f71aae58eb581c2971eaadde48b721f1d5ace501 | 396 | py | Python | booktrade/booktrade/wsgi.py | rocity/dj-booktrade | 7ec0876635931e540ce4c0e1c74653b6626fd3fd | [
"Apache-2.0"
] | null | null | null | booktrade/booktrade/wsgi.py | rocity/dj-booktrade | 7ec0876635931e540ce4c0e1c74653b6626fd3fd | [
"Apache-2.0"
] | null | null | null | booktrade/booktrade/wsgi.py | rocity/dj-booktrade | 7ec0876635931e540ce4c0e1c74653b6626fd3fd | [
"Apache-2.0"
] | null | null | null | """
WSGI config for booktrade project.
It exposes the WSGI callable as a module-level variable named ``application``.
For more information on this file, see
https://docs.djangoproject.com/en/1.10/howto/deployment/wsgi/
"""
import os
from django.core.wsgi import get_wsgi_application
os.environ.setdefault("DJANGO_SETTINGS_MODULE", "booktrade.settings")
application = get_wsgi_application()
| 23.294118 | 78 | 0.787879 |
import os
from django.core.wsgi import get_wsgi_application
os.environ.setdefault("DJANGO_SETTINGS_MODULE", "booktrade.settings")
application = get_wsgi_application()
| true | true |
f71aae7b0231777a5578550493465da27589a5fd | 12,552 | py | Python | utils.py | chaitanyamalaviya/NeuralFactorGraph | 6cd664b7edc43d56c6f1165baa7e7625eb0f7cd8 | [
"MIT"
] | 48 | 2018-05-15T12:46:36.000Z | 2021-03-11T09:34:10.000Z | utils.py | chaitanyamalaviya/NeuralFactorGraph | 6cd664b7edc43d56c6f1165baa7e7625eb0f7cd8 | [
"MIT"
] | 1 | 2018-10-28T21:11:47.000Z | 2018-10-31T20:31:09.000Z | utils.py | chaitanyamalaviya/NeuralFactorGraph | 6cd664b7edc43d56c6f1165baa7e7625eb0f7cd8 | [
"MIT"
] | 6 | 2018-07-03T01:28:41.000Z | 2020-01-23T13:25:49.000Z | from __future__ import division, print_function
from conllu.parser import parse, parse_tree
from tags import Tags, Tag, Label
import os
import re
import math
import numpy as np
import itertools
import pdb
import pickle
import matplotlib
matplotlib.use("Agg")
import matplotlib.pyplot as plt
import torch
from torch.autograd import Variable
import torch.nn.functional as F
np.set_printoptions(threshold=np.nan)
FROZEN_TAG = "__frozen__"
def freeze_dict(obj):
if isinstance(obj, dict):
dict_items = list(obj.items())
dict_items.append((FROZEN_TAG, True))
return tuple([(k, freeze_dict(v)) for k, v in dict_items])
return obj
def unfreeze_dict(obj):
if isinstance(obj, tuple):
if (FROZEN_TAG, True) in obj:
out = dict((k, unfreeze_dict(v)) for k, v in obj)
del out[FROZEN_TAG]
return out
return obj
def get_lang_code_dicts():
"""
Returns lang_to_code, code_to_lang dictionaries
"""
lang_to_code = {}
code_to_lang = {}
bad_chars = ",''"
rgx = re.compile('[%s]' % bad_chars)
with open("data/lang_codes.txt") as f:
data = f.read()
lines = data.split("\n")
split_line = [line.split() for line in lines]
for line in split_line[:-2]:
lang = rgx.sub('', line[0])
code = rgx.sub('', line[2])
lang_to_code[lang] = code
code_to_lang = {v: k for k, v in lang_to_code.iteritems()}
return lang_to_code, code_to_lang
def read_conll(treebank_path, langs, code_to_lang, train_or_dev, tgt_size=None, test=False):
"""
Reads conll formatted file
langs: list of languages
train: read training data
returns: dict with data for each language
as list of tuples of sentences and morph-tags
"""
annot_sents = {}
unique = []
for lang in langs:
train = train_or_dev if not test else "test"
if not test:
for file in os.listdir(treebank_path + "UD_" + code_to_lang[lang]):
if file.endswith("train.conllu"):
filepath = os.path.join(treebank_path + "UD_" + code_to_lang[lang], file)
break
else:
for file in os.listdir(treebank_path + "UD_" + code_to_lang[lang]):
if file.endswith("dev.conllu"):
filepath = os.path.join(treebank_path+ "UD_" + code_to_lang[lang], file)
break
with open(filepath) as f:
data = f.readlines()[:-1]
data = [line for line in data if line[0]!='#']
split_data = " ".join(data).split("\n \n")
ud = [parse(sent)[0] for sent in split_data]
all_text = []
all_tags = []
if langs[-1]==lang and tgt_size:
tgt_size = min(tgt_size, len(ud))
ud = ud[:tgt_size]
for sent in ud:
sent_text = []
sent_tags = []
for word in sent:
word_tags = {}
if word['feats']:
word_tags = dict(word['feats'])
if word['upostag']:
if word_tags:
word_tags.update({'POS':word['upostag']})
else:
word_tags = {'POS':word['upostag']}
if word_tags:
word_tags = freeze_dict(word_tags)
if word_tags not in unique:
unique.append(word_tags)
sent_text.append(word['form'])
sent_tags.append(freeze_dict(word_tags))
all_text.append(sent_text)
all_tags.append(sent_tags)
annot_sents[lang] = [(w, m) for w, m in zip(all_text, all_tags)]
return annot_sents, unique
def addNullLabels(annot_sents, langs, unique_tags):
for lang in langs:
i = 0
for w, m in annot_sents[lang]:
new_tags = []
for tags in m:
tag_dict = unfreeze_dict(tags)
for tag in unique_tags:
if tag.name not in tag_dict:
tag_dict[tag.name] = "NULL"
new_tags.append(freeze_dict(tag_dict))
annot_sents[lang][i] = (w, new_tags)
i += 1
return annot_sents
def sortbylength(data, lang_ids, maxlen=500):
"""
:param data: List of tuples of source sentences and morph tags
:param lang_ids: List of lang IDs for each sentence
:param maxlen: Maximum sentence length permitted
:return: Sorted data and sorted langIDs
"""
src = [elem[0] for elem in data]
tgt = [elem[1] for elem in data]
indexed_src = [(i,src[i]) for i in range(len(src))]
sorted_indexed_src = sorted(indexed_src, key=lambda x: -len(x[1]))
sorted_src = [item[1] for item in sorted_indexed_src if len(item[1])<maxlen]
sort_order = [item[0] for item in sorted_indexed_src if len(item[1])<maxlen]
sorted_tgt = [tgt[i] for i in sort_order]
sorted_lang_ids = [lang_ids[i] for i in sort_order]
sorted_data = [(src, tgt) for src, tgt in zip(sorted_src, sorted_tgt)]
return sorted_data, sorted_lang_ids
def get_train_order(training_data, batch_size, startIdx=0):
"""
:param data: List of tuples of source sentences and morph tags
:return: start idxs of batches
"""
lengths = [len(elem[0]) for elem in training_data]
start_idxs = []
end_idxs = []
prev_length=-1
batch_counter = 0
for i, length in enumerate(lengths, start=startIdx):
if length!=prev_length or batch_counter>batch_size:
start_idxs.append(i)
if prev_length!=-1:
end_idxs.append(i-1)
batch_counter = 1
batch_counter += 1
prev_length = length
end_idxs.append(startIdx + len(lengths)-1)
return [(s,e) for s,e in zip(start_idxs, end_idxs)]
def find_unique_tags(train_data_tags, null_label=False):
unique_tags = Tags()
for tags in train_data_tags:
for tag, label in unfreeze_dict(tags).items():
if not unique_tags.tagExists(tag):
unique_tags.addTag(tag)
curTag = unique_tags.getTagbyName(tag)
if not curTag.labelExists(label):
curTag.addLabel(label)
# Add null labels to unseen tags in each tag set
if null_label:
for tag in unique_tags:
tag.addLabel("NULL")
return unique_tags
def plot_heatmap(uniqueTags, weights, kind):
font = {'family' : 'normal',
'size' : 14,
'weight' : 'bold'}
matplotlib.rc('font', **font)
pairs = list(itertools.combinations(range(uniqueTags.size()), 2))
# weights is a ParameterList
for k, weight in enumerate(weights):
if kind=="pair":
i, j = pairs[k]
tag1 = uniqueTags.getTagbyIdx(i)
tag2 = uniqueTags.getTagbyIdx(j)
tag1_labels = [label.name for label in tag1.labels]
tag2_labels = [label.name for label in tag2.labels]
plt.figure(figsize=(20, 18), dpi=80)
plt.xticks(range(0, len(tag2_labels)), tag2_labels)
plt.yticks(range(0, len(tag1_labels)), tag1_labels)
plt.tick_params(labelsize=25)
plt.xlabel(tag2.name, fontsize=40)
plt.ylabel(tag1.name, fontsize=50)
plt.imshow(weight.data.cpu().numpy(), cmap='Reds', interpolation='nearest')
plt.savefig("figures/" + tag1.name + "_" + tag2.name + ".png", bbox_inches='tight')
plt.close()
elif kind=="trans":
tag = uniqueTags.getTagbyIdx(k)
tag_labels = [label.name for label in tag.labels]
plt.figure(figsize=(20, 18), dpi=80)
plt.xticks(range(0, len(tag_labels)), tag_labels, rotation=45)
plt.yticks(range(0, len(tag_labels)), tag_labels)
plt.tick_params(labelsize=40)
plt.xlabel(tag.name, fontsize=50)
plt.ylabel(tag.name, fontsize=50)
plt.imshow(weight.data.cpu().numpy(), cmap='Greys', interpolation='nearest')
plt.savefig("figures/" + tag.name + "_" + tag.name + ".png", bbox_inches='tight')
plt.close()
def get_var(x, gpu=False, volatile=False):
x = Variable(x, volatile=volatile)
if gpu:
x = x.cuda()
return x
def prepare_sequence(seq, to_ix, gpu=False):
if isinstance(to_ix, dict):
idxs = [to_ix[w] if w in to_ix else to_ix["UNK"] for w in seq]
elif isinstance(to_ix, list):
idxs = [to_ix.index(w) if w in to_ix else to_ix.index("UNK") for w in seq]
tensor = torch.LongTensor(idxs)
return get_var(tensor, gpu)
def to_scalar(var):
# returns a python float
return var.view(-1).data.tolist()[0]
def argmax(vec):
# return the argmax as a python int
_, idx = torch.max(vec, 1)
return to_scalar(idx)
def logSumExp(a, b):
maxi = np.maximum(a, b)
aexp = a - maxi
bexp = b - maxi
sumOfExp = np.exp(aexp) + np.exp(bexp)
return maxi + np.log(sumOfExp)
def logSumExpTensor(vec):
# vec -> 16, tag_size
batch_size = vec.size()[0]
vec = vec.view(batch_size, -1)
max_score = torch.max(vec, 1)[0]
max_score_broadcast = max_score.view(-1, 1).expand(-1, vec.size()[1])
return max_score + \
torch.log(torch.sum(torch.exp(vec - max_score_broadcast), 1))
def logSumExpTensors(a, b):
maxi = torch.max(a, b)
aexp = a - maxi
bexp = b - maxi
sumOfExp = torch.exp(aexp) + torch.exp(bexp)
return maxi + torch.log(sumOfExp)
def logDot(a, b, redAxis=None):
if redAxis==1:
b = b.transpose()
max_a = np.amax(a)
max_b = np.amax(b)
C = np.dot(np.exp(a - max_a), np.exp(b - max_b))
np.log(C, out=C)
# else:
# np.log(C + 1e-300, out=C)
C += max_a + max_b
return C.transpose() if redAxis==1 else C
def logMax(a, b, redAxis=None):
if redAxis==1:
b = b.transpose()
max_a = np.amax(a)
max_b = np.amax(b)
C = np.max(np.exp(a[:, :, None]-max_a) * np.exp(b[None, :, :]-max_b), axis=1)
# if np.isfinite(C).all():
np.log(C, out=C)
# else:
# np.log(C + 1e-300, out=C)
C += max_a + max_b
return C.transpose() if redAxis==1 else C
def logNormalize(a):
denom = np.logaddexp.reduce(a, 1)
return (a.transpose()- denom).transpose()
def logNormalizeTensor(a):
denom = logSumExpTensor(a)
if len(a.size())==2:
denom = denom.view(-1, 1).expand(-1, a.size()[1])
elif len(a.size())==3:
denom = denom.view(a.size()[0], 1, 1).expand(-1, a.size()[1], a.size()[2])
return (a-denom)
def computeF1(hyps, golds, prefix, labels_to_ix=None, baseline=False, write_results=False):
"""
hyps: List of dicts for predicted morphological tags
golds: List of dicts for gold morphological tags
"""
f1_precision_scores = {}
f1_precision_total = {}
f1_recall_scores = {}
f1_recall_total = {}
f1_average = 0.0
if baseline:
hyps = [unfreeze_dict(h) for h in hyps]
golds = [unfreeze_dict(t) for t in golds]
# calculate precision
for i, word_tags in enumerate(hyps, start=0):
for k, v in word_tags.items():
if v=="NULL":
continue
if k not in f1_precision_scores:
f1_precision_scores[k] = 0
f1_precision_total[k] = 0
if k in golds[i]:
if v==golds[i][k]:
f1_precision_scores[k] += 1
f1_precision_total[k] += 1
f1_micro_precision = sum(f1_precision_scores.values())/sum(f1_precision_total.values())
for k in f1_precision_scores.keys():
f1_precision_scores[k] = f1_precision_scores[k]/f1_precision_total[k]
# calculate recall
for i, word_tags in enumerate(golds, start=0):
for k, v in word_tags.items():
if v=="NULL":
continue
if k not in f1_recall_scores:
f1_recall_scores[k] = 0
f1_recall_total[k] = 0
if k in hyps[i]:
if v==hyps[i][k]:
f1_recall_scores[k] += 1
f1_recall_total[k] += 1
f1_micro_recall = sum(f1_recall_scores.values())/sum(f1_recall_total.values())
f1_scores = {}
for k in f1_recall_scores.keys():
f1_recall_scores[k] = f1_recall_scores[k]/f1_recall_total[k]
if f1_recall_scores[k]==0 or k not in f1_precision_scores:
f1_scores[k] = 0
else:
f1_scores[k] = 2 * (f1_precision_scores[k] * f1_recall_scores[k]) / (f1_precision_scores[k] + f1_recall_scores[k])
f1_average += f1_recall_total[k] * f1_scores[k]
f1_average /= sum(f1_recall_total.values())
f1_micro_score = 2 * (f1_micro_precision * f1_micro_recall) / (f1_micro_precision + f1_micro_recall)
if write_results:
print("Writing F1 scores...")
with open(prefix + '_results_f1.txt', 'ab') as file:
file.write(pickle.dumps(f1_scores))
file.write("\nMacro-averaged F1 Score: " + str(f1_average))
file.write("\nMicro-averaged F1 Score: " + str(f1_micro_score))
return f1_average, f1_micro_score
def getCorrectCount(golds, hyps):
correct = 0
for i, word_tags in enumerate(golds, start=0):
allCorrect = True
for k, v in word_tags.items():
if k in hyps[i]:
if v!=hyps[i][k]:
allCorrect = False
break
if allCorrect==True:
correct += 1
return correct
| 27.769912 | 120 | 0.634242 | from __future__ import division, print_function
from conllu.parser import parse, parse_tree
from tags import Tags, Tag, Label
import os
import re
import math
import numpy as np
import itertools
import pdb
import pickle
import matplotlib
matplotlib.use("Agg")
import matplotlib.pyplot as plt
import torch
from torch.autograd import Variable
import torch.nn.functional as F
np.set_printoptions(threshold=np.nan)
FROZEN_TAG = "__frozen__"
def freeze_dict(obj):
if isinstance(obj, dict):
dict_items = list(obj.items())
dict_items.append((FROZEN_TAG, True))
return tuple([(k, freeze_dict(v)) for k, v in dict_items])
return obj
def unfreeze_dict(obj):
if isinstance(obj, tuple):
if (FROZEN_TAG, True) in obj:
out = dict((k, unfreeze_dict(v)) for k, v in obj)
del out[FROZEN_TAG]
return out
return obj
def get_lang_code_dicts():
lang_to_code = {}
code_to_lang = {}
bad_chars = ",''"
rgx = re.compile('[%s]' % bad_chars)
with open("data/lang_codes.txt") as f:
data = f.read()
lines = data.split("\n")
split_line = [line.split() for line in lines]
for line in split_line[:-2]:
lang = rgx.sub('', line[0])
code = rgx.sub('', line[2])
lang_to_code[lang] = code
code_to_lang = {v: k for k, v in lang_to_code.iteritems()}
return lang_to_code, code_to_lang
def read_conll(treebank_path, langs, code_to_lang, train_or_dev, tgt_size=None, test=False):
annot_sents = {}
unique = []
for lang in langs:
train = train_or_dev if not test else "test"
if not test:
for file in os.listdir(treebank_path + "UD_" + code_to_lang[lang]):
if file.endswith("train.conllu"):
filepath = os.path.join(treebank_path + "UD_" + code_to_lang[lang], file)
break
else:
for file in os.listdir(treebank_path + "UD_" + code_to_lang[lang]):
if file.endswith("dev.conllu"):
filepath = os.path.join(treebank_path+ "UD_" + code_to_lang[lang], file)
break
with open(filepath) as f:
data = f.readlines()[:-1]
data = [line for line in data if line[0]!='#']
split_data = " ".join(data).split("\n \n")
ud = [parse(sent)[0] for sent in split_data]
all_text = []
all_tags = []
if langs[-1]==lang and tgt_size:
tgt_size = min(tgt_size, len(ud))
ud = ud[:tgt_size]
for sent in ud:
sent_text = []
sent_tags = []
for word in sent:
word_tags = {}
if word['feats']:
word_tags = dict(word['feats'])
if word['upostag']:
if word_tags:
word_tags.update({'POS':word['upostag']})
else:
word_tags = {'POS':word['upostag']}
if word_tags:
word_tags = freeze_dict(word_tags)
if word_tags not in unique:
unique.append(word_tags)
sent_text.append(word['form'])
sent_tags.append(freeze_dict(word_tags))
all_text.append(sent_text)
all_tags.append(sent_tags)
annot_sents[lang] = [(w, m) for w, m in zip(all_text, all_tags)]
return annot_sents, unique
def addNullLabels(annot_sents, langs, unique_tags):
for lang in langs:
i = 0
for w, m in annot_sents[lang]:
new_tags = []
for tags in m:
tag_dict = unfreeze_dict(tags)
for tag in unique_tags:
if tag.name not in tag_dict:
tag_dict[tag.name] = "NULL"
new_tags.append(freeze_dict(tag_dict))
annot_sents[lang][i] = (w, new_tags)
i += 1
return annot_sents
def sortbylength(data, lang_ids, maxlen=500):
src = [elem[0] for elem in data]
tgt = [elem[1] for elem in data]
indexed_src = [(i,src[i]) for i in range(len(src))]
sorted_indexed_src = sorted(indexed_src, key=lambda x: -len(x[1]))
sorted_src = [item[1] for item in sorted_indexed_src if len(item[1])<maxlen]
sort_order = [item[0] for item in sorted_indexed_src if len(item[1])<maxlen]
sorted_tgt = [tgt[i] for i in sort_order]
sorted_lang_ids = [lang_ids[i] for i in sort_order]
sorted_data = [(src, tgt) for src, tgt in zip(sorted_src, sorted_tgt)]
return sorted_data, sorted_lang_ids
def get_train_order(training_data, batch_size, startIdx=0):
lengths = [len(elem[0]) for elem in training_data]
start_idxs = []
end_idxs = []
prev_length=-1
batch_counter = 0
for i, length in enumerate(lengths, start=startIdx):
if length!=prev_length or batch_counter>batch_size:
start_idxs.append(i)
if prev_length!=-1:
end_idxs.append(i-1)
batch_counter = 1
batch_counter += 1
prev_length = length
end_idxs.append(startIdx + len(lengths)-1)
return [(s,e) for s,e in zip(start_idxs, end_idxs)]
def find_unique_tags(train_data_tags, null_label=False):
unique_tags = Tags()
for tags in train_data_tags:
for tag, label in unfreeze_dict(tags).items():
if not unique_tags.tagExists(tag):
unique_tags.addTag(tag)
curTag = unique_tags.getTagbyName(tag)
if not curTag.labelExists(label):
curTag.addLabel(label)
if null_label:
for tag in unique_tags:
tag.addLabel("NULL")
return unique_tags
def plot_heatmap(uniqueTags, weights, kind):
font = {'family' : 'normal',
'size' : 14,
'weight' : 'bold'}
matplotlib.rc('font', **font)
pairs = list(itertools.combinations(range(uniqueTags.size()), 2))
for k, weight in enumerate(weights):
if kind=="pair":
i, j = pairs[k]
tag1 = uniqueTags.getTagbyIdx(i)
tag2 = uniqueTags.getTagbyIdx(j)
tag1_labels = [label.name for label in tag1.labels]
tag2_labels = [label.name for label in tag2.labels]
plt.figure(figsize=(20, 18), dpi=80)
plt.xticks(range(0, len(tag2_labels)), tag2_labels)
plt.yticks(range(0, len(tag1_labels)), tag1_labels)
plt.tick_params(labelsize=25)
plt.xlabel(tag2.name, fontsize=40)
plt.ylabel(tag1.name, fontsize=50)
plt.imshow(weight.data.cpu().numpy(), cmap='Reds', interpolation='nearest')
plt.savefig("figures/" + tag1.name + "_" + tag2.name + ".png", bbox_inches='tight')
plt.close()
elif kind=="trans":
tag = uniqueTags.getTagbyIdx(k)
tag_labels = [label.name for label in tag.labels]
plt.figure(figsize=(20, 18), dpi=80)
plt.xticks(range(0, len(tag_labels)), tag_labels, rotation=45)
plt.yticks(range(0, len(tag_labels)), tag_labels)
plt.tick_params(labelsize=40)
plt.xlabel(tag.name, fontsize=50)
plt.ylabel(tag.name, fontsize=50)
plt.imshow(weight.data.cpu().numpy(), cmap='Greys', interpolation='nearest')
plt.savefig("figures/" + tag.name + "_" + tag.name + ".png", bbox_inches='tight')
plt.close()
def get_var(x, gpu=False, volatile=False):
x = Variable(x, volatile=volatile)
if gpu:
x = x.cuda()
return x
def prepare_sequence(seq, to_ix, gpu=False):
if isinstance(to_ix, dict):
idxs = [to_ix[w] if w in to_ix else to_ix["UNK"] for w in seq]
elif isinstance(to_ix, list):
idxs = [to_ix.index(w) if w in to_ix else to_ix.index("UNK") for w in seq]
tensor = torch.LongTensor(idxs)
return get_var(tensor, gpu)
def to_scalar(var):
return var.view(-1).data.tolist()[0]
def argmax(vec):
_, idx = torch.max(vec, 1)
return to_scalar(idx)
def logSumExp(a, b):
maxi = np.maximum(a, b)
aexp = a - maxi
bexp = b - maxi
sumOfExp = np.exp(aexp) + np.exp(bexp)
return maxi + np.log(sumOfExp)
def logSumExpTensor(vec):
batch_size = vec.size()[0]
vec = vec.view(batch_size, -1)
max_score = torch.max(vec, 1)[0]
max_score_broadcast = max_score.view(-1, 1).expand(-1, vec.size()[1])
return max_score + \
torch.log(torch.sum(torch.exp(vec - max_score_broadcast), 1))
def logSumExpTensors(a, b):
maxi = torch.max(a, b)
aexp = a - maxi
bexp = b - maxi
sumOfExp = torch.exp(aexp) + torch.exp(bexp)
return maxi + torch.log(sumOfExp)
def logDot(a, b, redAxis=None):
if redAxis==1:
b = b.transpose()
max_a = np.amax(a)
max_b = np.amax(b)
C = np.dot(np.exp(a - max_a), np.exp(b - max_b))
np.log(C, out=C)
C += max_a + max_b
return C.transpose() if redAxis==1 else C
def logMax(a, b, redAxis=None):
if redAxis==1:
b = b.transpose()
max_a = np.amax(a)
max_b = np.amax(b)
C = np.max(np.exp(a[:, :, None]-max_a) * np.exp(b[None, :, :]-max_b), axis=1)
np.log(C, out=C)
C += max_a + max_b
return C.transpose() if redAxis==1 else C
def logNormalize(a):
denom = np.logaddexp.reduce(a, 1)
return (a.transpose()- denom).transpose()
def logNormalizeTensor(a):
denom = logSumExpTensor(a)
if len(a.size())==2:
denom = denom.view(-1, 1).expand(-1, a.size()[1])
elif len(a.size())==3:
denom = denom.view(a.size()[0], 1, 1).expand(-1, a.size()[1], a.size()[2])
return (a-denom)
def computeF1(hyps, golds, prefix, labels_to_ix=None, baseline=False, write_results=False):
f1_precision_scores = {}
f1_precision_total = {}
f1_recall_scores = {}
f1_recall_total = {}
f1_average = 0.0
if baseline:
hyps = [unfreeze_dict(h) for h in hyps]
golds = [unfreeze_dict(t) for t in golds]
for i, word_tags in enumerate(hyps, start=0):
for k, v in word_tags.items():
if v=="NULL":
continue
if k not in f1_precision_scores:
f1_precision_scores[k] = 0
f1_precision_total[k] = 0
if k in golds[i]:
if v==golds[i][k]:
f1_precision_scores[k] += 1
f1_precision_total[k] += 1
f1_micro_precision = sum(f1_precision_scores.values())/sum(f1_precision_total.values())
for k in f1_precision_scores.keys():
f1_precision_scores[k] = f1_precision_scores[k]/f1_precision_total[k]
for i, word_tags in enumerate(golds, start=0):
for k, v in word_tags.items():
if v=="NULL":
continue
if k not in f1_recall_scores:
f1_recall_scores[k] = 0
f1_recall_total[k] = 0
if k in hyps[i]:
if v==hyps[i][k]:
f1_recall_scores[k] += 1
f1_recall_total[k] += 1
f1_micro_recall = sum(f1_recall_scores.values())/sum(f1_recall_total.values())
f1_scores = {}
for k in f1_recall_scores.keys():
f1_recall_scores[k] = f1_recall_scores[k]/f1_recall_total[k]
if f1_recall_scores[k]==0 or k not in f1_precision_scores:
f1_scores[k] = 0
else:
f1_scores[k] = 2 * (f1_precision_scores[k] * f1_recall_scores[k]) / (f1_precision_scores[k] + f1_recall_scores[k])
f1_average += f1_recall_total[k] * f1_scores[k]
f1_average /= sum(f1_recall_total.values())
f1_micro_score = 2 * (f1_micro_precision * f1_micro_recall) / (f1_micro_precision + f1_micro_recall)
if write_results:
print("Writing F1 scores...")
with open(prefix + '_results_f1.txt', 'ab') as file:
file.write(pickle.dumps(f1_scores))
file.write("\nMacro-averaged F1 Score: " + str(f1_average))
file.write("\nMicro-averaged F1 Score: " + str(f1_micro_score))
return f1_average, f1_micro_score
def getCorrectCount(golds, hyps):
correct = 0
for i, word_tags in enumerate(golds, start=0):
allCorrect = True
for k, v in word_tags.items():
if k in hyps[i]:
if v!=hyps[i][k]:
allCorrect = False
break
if allCorrect==True:
correct += 1
return correct
| true | true |
f71aaf4aad518c6d9db764a08f3d7f8432489eb7 | 32,580 | py | Python | prody/utilities/catchall.py | bwingert/ProDy | 7377a20b4a4841ec59dccaa93fa58e2ee0fe89bc | [
"MIT"
] | null | null | null | prody/utilities/catchall.py | bwingert/ProDy | 7377a20b4a4841ec59dccaa93fa58e2ee0fe89bc | [
"MIT"
] | null | null | null | prody/utilities/catchall.py | bwingert/ProDy | 7377a20b4a4841ec59dccaa93fa58e2ee0fe89bc | [
"MIT"
] | null | null | null | """This module defines miscellaneous utility functions that is public to users."""
import numpy as np
from numpy import unique, linalg, diag, sqrt, dot
from Bio.Phylo.BaseTree import Tree, Clade
from prody import PY3K
from .misctools import addEnds, interpY, index, isListLike
from .checkers import checkCoords
from .logger import LOGGER
__all__ = ['calcTree', 'clusterMatrix', 'showLines', 'showMatrix',
'reorderMatrix', 'findSubgroups', 'getCoords',
'getLinkage', 'getTreeFromLinkage', 'clusterSubfamilies']
class LinkageError(Exception):
pass
def clusterSubfamilies(similarities, n_clusters=0, linkage='all', method='tsne', cutoff=0.0, **kwargs):
"""Perform clustering based on members of the *ensemble* projected into lower a reduced
dimension.
:arg similarities: a matrix of similarities for each structure in the ensemble, such as
RMSD-matrix, dynamics-based spectral overlap, sequence similarity
:type similarities: :class:`~numpy.ndarray`
:arg n_clusters: the number of clusters to generate. If **0**, will scan a range of
number of clusters and return the best one based on highest
silhouette score. Default is **0**.
:type n_clusters: int
:arg linkage: if **all**, will test all linkage types (ward, average, complete,
single). Otherwise will use only the one(s) given as input. Default is
**all**.
:type linkage: str, list, tuple, :class:`~numpy.ndarray`
:arg method: if set to **spectral**, will generate a Kirchoff matrix based on the
cutoff value given and use that as input as clustering instead of
the values themselves. Default is **tsne**.
:type method: str
:arg cutoff: only used if *method* is set to **spectral**. This value is used for
generating the Kirchoff matrix to use for generating clusters when
doing spectral clustering. Default is **0.0**.
:type cutoff: float
"""
# Import necessary packages
try:
from sklearn.manifold import SpectralEmbedding
from sklearn.cluster import AgglomerativeClustering
from sklearn.metrics import silhouette_score
from sklearn.manifold import TSNE
except ImportError:
raise ImportError('need sklearn module')
'''
try:
import Bio
except ImportError:
raise ImportError('Phylo module could not be imported. '
'Reinstall ProDy or install Biopython '
'to solve the problem.')
'''
# Check inputs to make sure are of valid types/values
if not isinstance(similarities, np.ndarray):
raise TypeError('similarities should be a numpy ndarray')
dim = similarities.shape
if dim[0] != dim[1]:
raise ValueError('similarities must be a square matrix')
if n_clusters != 0:
if not isinstance(n_clusters, int):
raise TypeError('clusters must be an instance of int')
if n_clusters < 1:
raise ValueError('clusters must be a positive integer')
elif n_clusters > similarities.shape[0]:
raise ValueError('clusters can\'t be longer than similarities matrix')
nclusts = range(n_clusters,n_clusters+1)
else:
nclusts = range(2,10,1)
if linkage != 'all':
# Check if given input for linkage is list-like
if isListLike(linkage):
for val in linkage:
if val.lower() not in ['ward', 'average', 'complete', 'single']:
raise ValueError('linkage must be one or more of: \'ward\', \'average\', \'complete\', or \'single\'')
if len(linkage) > 4:
raise ValueError('linkage must be one or more of: \'ward\', \'average\', \'complete\', or \'single\'')
linkages = [ x.lower() for x in linkage ]
# If not, check if it is a valid string and method name
else:
if not isinstance(linkage, str):
raise TypeError('linkage must be an instance of str or list-like of strs')
if linkage not in ['ward', 'average', 'complete', 'single']:
raise ValueError('linkage must one or more of: \'ward\', \'average\', \'complete\', or \'single\'')
linkages = [linkage]
else:
linkages = ['ward', 'average', 'complete', 'single']
if method != 'tsne':
if not isinstance(method, str):
raise TypeError('method must be an instance of str')
if method != 'spectral':
raise ValueError('method must be either \'tsne\' or \'spectral\'')
if not isinstance(cutoff, float):
raise TypeError('cutoff must be an instance of float')
best_score = -1
best_nclust = 0
best_link = ''
best_labels = []
# Scan over range of clusters
for x in nclusts:
if method == 'tsne':
embedding = TSNE(n_components=2)
transform = embedding.fit_transform(similarities)
else:
kirchhoff = np.where(similarities > cutoff, 0, -1)
embedding = SpectralEmbedding(n_components=2)
transform = embedding.fit_transform(kirchhoff)
for link in linkages:
clustering = AgglomerativeClustering(linkage=link, n_clusters=x)
clustering.fit(transform)
silhouette_avg = silhouette_score(transform, clustering.labels_)
if silhouette_avg > best_score:
best_score = silhouette_avg
best_nclust = x
best_link = link
best_labels = clustering.labels_
return best_labels
def getCoords(data):
try:
data = (data._getCoords() if hasattr(data, '_getCoords') else
data.getCoords())
except AttributeError:
try:
checkCoords(data)
except TypeError:
raise TypeError('data must be a Numpy array or an object '
'with `getCoords` method')
return data
def getLinkage(names, tree):
""" Obtain the :func:`~scipy.cluster.hierarchy.linkage` matrix encoding
``tree``.
:arg names: a list of names, the order determines the values in the
linkage matrix
:type names: list, :class:`~numpy.ndarray`
:arg tree: tree to be converted
:type tree: :class:`~Bio.Phylo.BaseTree.Tree`
"""
tree_terminals = tree.get_terminals()
if len(tree_terminals) != len(names):
raise ValueError('inconsistent number of terminals in tree and names')
terminals = [None] * len(names)
for clade in tree_terminals:
i = index(names, clade.name)
terminals[i] = clade
n = len(terminals)
nonterminals = [c for c in reversed(tree.get_nonterminals())]
if len(nonterminals) != n-1:
raise LinkageError('wrong number of terminal clades')
Z = np.zeros((n-1, 4))
root = tree.root
def _indexOfClade(clade):
if clade.is_terminal():
i = index(terminals, clade)
else:
i = index(nonterminals, clade) + n
return i
def _height_of(clade):
if clade.is_terminal():
height = 0
else:
height = max(_height_of(c) + c.branch_length for c in clade.clades)
return height
def _dfs(clade):
if clade.is_terminal():
return
i = _indexOfClade(clade)
clade_a = clade.clades[0]
clade_b = clade.clades[1]
a = _indexOfClade(clade_a)
b = _indexOfClade(clade_b)
l = min(a, b)
r = max(a, b)
Z[i-n, 0] = l
Z[i-n, 1] = r
Z[i-n, 2] = _height_of(clade) * 2.
Z[i-n, 3] = clade.count_terminals()
_dfs(clade_a)
_dfs(clade_b)
_dfs(root)
return Z
def getTreeFromLinkage(names, linkage):
""" Obtain the tree encoded by ``linkage``.
:arg names: a list of names, the order should correspond to the values in
linkage
:type names: list, :class:`~numpy.ndarray`
:arg linkage: linkage matrix
:type linkage: :class:`~numpy.ndarray`
"""
try:
import Bio
except ImportError:
raise ImportError('Phylo module could not be imported. '
'Reinstall ProDy or install Biopython '
'to solve the problem.')
from Bio.Phylo.BaseTree import Tree, Clade
if not isinstance(linkage, np.ndarray):
raise TypeError('linkage must be a numpy.ndarray instance')
if linkage.ndim != 2:
raise LinkageError('linkage must be a 2-dimensional matrix')
if linkage.shape[1] != 4:
raise LinkageError('linkage must have exactly 4 columns')
n_terms = len(names)
if linkage.shape[0] != n_terms-1:
raise LinkageError('linkage must have exactly len(names)-1 rows')
clades = []
heights = []
for name in names:
clade = Clade(None, name)
clades.append(clade)
heights.append(0.)
for link in linkage:
l = int(link[0])
r = int(link[1])
height = link[2]
left = clades[l]
right = clades[r]
lh = heights[l]
rh = heights[r]
left.branch_length = height - lh
right.branch_length = height - rh
clade = Clade(None, None)
clade.clades.append(left)
clade.clades.append(right)
clades.append(clade)
heights.append(height)
return Tree(clade)
def calcTree(names, distance_matrix, method='upgma', linkage=False):
""" Given a distance matrix, it creates an returns a tree structure.
:arg names: a list of names
:type names: list, :class:`~numpy.ndarray`
:arg distance_matrix: a square matrix with length of ensemble. If numbers does not match *names*
it will raise an error
:type distance_matrix: :class:`~numpy.ndarray`
:arg method: method used for constructing the tree. Acceptable options are ``"upgma"``, ``"nj"``,
or methods supported by :func:`~scipy.cluster.hierarchy.linkage` such as ``"single"``,
``"average"``, ``"ward"``, etc. Default is ``"upgma"``
:type method: str
:arg linkage: whether the linkage matrix is returned. Note that NJ trees do not support linkage
:type linkage: bool
"""
try:
import Bio
except ImportError:
raise ImportError('Phylo module could not be imported. '
'Reinstall ProDy or install Biopython '
'to solve the problem.')
from .TreeConstruction import DistanceMatrix, DistanceTreeConstructor
if len(names) != distance_matrix.shape[0] or len(names) != distance_matrix.shape[1]:
raise ValueError("Mismatch between the sizes of matrix and names.")
method = method.lower().strip()
if method in ['ward', 'single', 'average', 'weighted', 'centroid', 'median']:
from scipy.cluster.hierarchy import linkage as hlinkage
from scipy.spatial.distance import squareform
Z = hlinkage(squareform(distance_matrix), method=method)
tree = getTreeFromLinkage(names, Z)
else:
matrix = []
k = 1
Z = None
for row in distance_matrix:
matrix.append(list(row[:k]))
k = k + 1
if isinstance(names, np.ndarray):
names = names.tolist()
dm = DistanceMatrix(names, matrix)
constructor = DistanceTreeConstructor()
method = method.strip().lower()
if method == 'nj':
tree = constructor.nj(dm)
elif method == 'upgma':
tree = constructor.upgma(dm)
if linkage:
Z = getLinkage(names, tree)
else:
raise ValueError('Method can be only either "nj", "upgma" or '
'hierarchical clustering such as "single", "average", etc.')
for node in tree.get_nonterminals():
node.name = None
if linkage:
return tree, Z
else:
return tree
def writeTree(filename, tree, format_str='newick'):
""" Write a tree to file using Biopython.
:arg filename: name for output file
:type filename: str
:arg tree: a square matrix with length of ensemble. If numbers does not match *names*
it will raise an error
:type tree: :class:`~Bio.Phylo.BaseTree.Tree`
:arg format_str: a string specifying the format for the tree
:type format_str: str
"""
try:
from Bio import Phylo
except ImportError:
raise ImportError('Phylo module could not be imported. '
'Reinstall ProDy or install Biopython '
'to solve the problem.')
if not isinstance(filename, str):
raise TypeError('filename should be a string')
if not isinstance(tree, Phylo.BaseTree.Tree):
raise TypeError('tree should be a Biopython.Phylo Tree object')
if not isinstance(format_str, str):
raise TypeError('format_str should be a string')
Phylo.write(tree, filename, format_str)
def clusterMatrix(distance_matrix=None, similarity_matrix=None, labels=None, return_linkage=None, **kwargs):
"""
Cluster a distance matrix using scipy.cluster.hierarchy and
return the sorted matrix, indices used for sorting, sorted labels (if **labels** are passed),
and linkage matrix (if **return_linkage** is **True**). Set ``similarity=True`` for clustering a similarity matrix
:arg distance_matrix: an N-by-N matrix containing some measure of distance
such as 1. - seqid_matrix, rmsds, or distances in PCA space
:type similarity_matrix: :class:`~numpy.ndarray`
:arg similarity_matrix: an N-by-N matrix containing some measure of similarity
such as sequence identity, mode-mode overlap, or spectral overlap
:type similarity_matrix: :class:`~numpy.ndarray`
:arg labels: labels for each matrix row that can be returned sorted
:type labels: list
:arg no_plot: if **True**, don't plot the dendrogram.
default is **True**
:type no_plot: bool
:arg reversed: if set to **True**, then the sorting indices will be reversed.
:type reversed: bool
Other arguments for :func:`~scipy.hierarchy.linkage` and :func:`~scipy.hierarchy.dendrogram`
can also be provided and will be taken as **kwargs**.
"""
import scipy.cluster.hierarchy as sch
from scipy import spatial
if similarity_matrix is None and distance_matrix is None:
raise ValueError('Please provide a distance matrix or a similarity matrix')
orientation = kwargs.pop('orientiation', 'right')
reversed = kwargs.pop('reversed', False)
no_plot = kwargs.pop('no_plot', True)
if distance_matrix is None:
matrix = similarity_matrix
distance_matrix = 1. - similarity_matrix
else:
matrix = distance_matrix
formatted_distance_matrix = spatial.distance.squareform(distance_matrix)
linkage_matrix = sch.linkage(formatted_distance_matrix, **kwargs)
sorting_dendrogram = sch.dendrogram(linkage_matrix, orientation=orientation, labels=labels, no_plot=no_plot)
indices = sorting_dendrogram['leaves']
sorted_labels = sorting_dendrogram['ivl']
if reversed:
indices = indices[::-1]
sorted_labels = sorted_labels[::-1]
sorted_matrix = matrix[indices, :]
sorted_matrix = sorted_matrix[:, indices]
return_vals = [sorted_matrix, indices]
if labels is not None:
return_vals.append(sorted_labels)
if return_linkage:
return_vals.append(linkage_matrix)
return tuple(return_vals) # convert to tuple to avoid [pylint] E0632:Possible unbalanced tuple unpacking
def showLines(*args, **kwargs):
"""
Show 1-D data using :func:`~matplotlib.axes.Axes.plot`.
:arg x: (optional) x coordinates. *x* can be an 1-D array or a 2-D matrix of
column vectors.
:type x: :class:`~numpy.ndarray`
:arg y: data array. *y* can be an 1-D array or a 2-D matrix of
column vectors.
:type y: :class:`~numpy.ndarray`
:arg dy: an array of variances of *y* which will be plotted as a
band along *y*. It should have the same shape with *y*.
:type dy: :class:`~numpy.ndarray`
:arg lower: an array of lower bounds which will be plotted as a
band along *y*. It should have the same shape with *y* and should be
paired with *upper*.
:type lower: :class:`~numpy.ndarray`
:arg upper: an array of upper bounds which will be plotted as a
band along *y*. It should have the same shape with *y* and should be
paired with *lower*.
:type upper: :class:`~numpy.ndarray`
:arg alpha: the transparency of the band(s) for plotting *dy*.
:type alpha: float
:arg beta: the transparency of the band(s) for plotting *miny* and *maxy*.
:type beta: float
:arg ticklabels: user-defined tick labels for x-axis.
:type ticklabels: list
"""
# note for developers: this function serves as a low-level
# plotting function which provides basic utilities for other
# plotting functions. Therefore showFigure is not handled
# in this function as it should be already handled in the caller.
ticklabels = kwargs.pop('ticklabels', None)
dy = kwargs.pop('dy', None)
miny = kwargs.pop('lower', None)
maxy = kwargs.pop('upper', None)
alpha = kwargs.pop('alpha', 0.5)
beta = kwargs.pop('beta', 0.25)
gap = kwargs.pop('gap', False)
labels = kwargs.pop('label', None)
from matplotlib import cm, ticker
from matplotlib.pyplot import figure, gca, xlim
ax = gca()
lines = ax.plot(*args, **kwargs)
polys = []
for i, line in enumerate(lines):
color = line.get_color()
x, y = line.get_data()
if gap:
x_new, y_new = addEnds(x, y)
line.set_data(x_new, y_new)
else:
x_new, y_new = x, y
if labels is not None:
if np.isscalar(labels):
line.set_label(labels)
else:
try:
line.set_label(labels[i])
except IndexError:
raise ValueError('The number of labels ({0}) and that of y ({1}) do not match.'
.format(len(labels), len(line)))
# the following function needs to be here so that line exists
def sub_array(a, i, tag='a'):
ndim = 0
if a is not None:
if np.isscalar(a[0]):
ndim = 1 # a plain list (array)
else:
ndim = 2 # a nested list (array)
else:
return None
if ndim == 1:
_a = a
else:
try:
_a = a[i]
except IndexError:
raise ValueError('The number of {2} ({0}) and that of y ({1}) do not match.'
.format(len(miny), len(line), tag))
if len(_a) != len(y):
raise ValueError('The shapes of {2} ({0}) and y ({1}) do not match.'
.format(len(_miny), len(y), tag))
return _a
if miny is not None and maxy is not None:
_miny = sub_array(miny, i)
_maxy = sub_array(maxy, i)
if gap:
_, _miny = addEnds(x, _miny)
_, _maxy = addEnds(x, _maxy)
poly = ax.fill_between(x_new, _miny, _maxy,
alpha=beta, facecolor=color, edgecolor=None,
linewidth=1, antialiased=True)
polys.append(poly)
if dy is not None:
_dy = sub_array(dy, i)
if gap:
_, _dy = addEnds(x, _dy)
poly = ax.fill_between(x_new, y_new-_dy, y_new+_dy,
alpha=alpha, facecolor=color, edgecolor=None,
linewidth=1, antialiased=True)
polys.append(poly)
ax.margins(x=0)
if ticklabels is not None:
if callable(ticklabels):
ax.get_xaxis().set_major_formatter(ticker.FuncFormatter(ticklabels))
else:
ax.get_xaxis().set_major_formatter(ticker.IndexFormatter(ticklabels))
ax.xaxis.set_major_locator(ticker.AutoLocator())
ax.xaxis.set_minor_locator(ticker.AutoMinorLocator())
return lines, polys
def showMatrix(matrix, x_array=None, y_array=None, **kwargs):
"""Show a matrix using :meth:`~matplotlib.axes.Axes.imshow`. Curves on x- and y-axis can be added.
:arg matrix: matrix to be displayed
:type matrix: :class:`~numpy.ndarray`
:arg x_array: data to be plotted above the matrix
:type x_array: :class:`~numpy.ndarray`
:arg y_array: data to be plotted on the left side of the matrix
:type y_array: :class:`~numpy.ndarray`
:arg percentile: a percentile threshold to remove outliers, i.e. only showing data within *p*-th
to *100-p*-th percentile
:type percentile: float
:arg interactive: turn on or off the interactive options
:type interactive: bool
:arg xtickrotation: how much to rotate the xticklabels in degrees
default is 0
:type xtickrotation: float
"""
from matplotlib import ticker
from matplotlib.gridspec import GridSpec
from matplotlib.collections import LineCollection
from matplotlib.pyplot import gca, sca, sci, colorbar, subplot
from .drawtools import drawTree
p = kwargs.pop('percentile', None)
vmin = vmax = None
if p is not None:
vmin = np.percentile(matrix, p)
vmax = np.percentile(matrix, 100-p)
vmin = kwargs.pop('vmin', vmin)
vmax = kwargs.pop('vmax', vmax)
vcenter = kwargs.pop('vcenter', None)
norm = kwargs.pop('norm', None)
if vcenter is not None and norm is None:
if PY3K:
try:
from matplotlib.colors import DivergingNorm
except ImportError:
from matplotlib.colors import TwoSlopeNorm as DivergingNorm
norm = DivergingNorm(vmin=vmin, vcenter=0., vmax=vmax)
else:
LOGGER.warn('vcenter cannot be used in Python 2 so norm remains None')
lw = kwargs.pop('linewidth', 1)
W = H = kwargs.pop('ratio', 6)
ticklabels = kwargs.pop('ticklabels', None)
xticklabels = kwargs.pop('xticklabels', ticklabels)
yticklabels = kwargs.pop('yticklabels', ticklabels)
xtickrotation = kwargs.pop('xtickrotation', 0.)
show_colorbar = kwargs.pop('colorbar', True)
cb_extend = kwargs.pop('cb_extend', 'neither')
allticks = kwargs.pop('allticks', False) # this argument is temporary and will be replaced by better implementation
interactive = kwargs.pop('interactive', True)
cmap = kwargs.pop('cmap', 'jet')
origin = kwargs.pop('origin', 'lower')
try:
from Bio import Phylo
except ImportError:
raise ImportError('Phylo module could not be imported. '
'Reinstall ProDy or install Biopython '
'to solve the problem.')
tree_mode_y = isinstance(y_array, Phylo.BaseTree.Tree)
tree_mode_x = isinstance(x_array, Phylo.BaseTree.Tree)
if x_array is not None and y_array is not None:
nrow = 2; ncol = 2
i = 1; j = 1
width_ratios = [1, W]
height_ratios = [1, H]
aspect = 'auto'
elif x_array is not None and y_array is None:
nrow = 2; ncol = 1
i = 1; j = 0
width_ratios = [W]
height_ratios = [1, H]
aspect = 'auto'
elif x_array is None and y_array is not None:
nrow = 1; ncol = 2
i = 0; j = 1
width_ratios = [1, W]
height_ratios = [H]
aspect = 'auto'
else:
nrow = 1; ncol = 1
i = 0; j = 0
width_ratios = [W]
height_ratios = [H]
aspect = kwargs.pop('aspect', None)
main_index = (i, j)
upper_index = (i-1, j)
left_index = (i, j-1)
complex_layout = nrow > 1 or ncol > 1
ax1 = ax2 = ax3 = None
if complex_layout:
gs = GridSpec(nrow, ncol, width_ratios=width_ratios,
height_ratios=height_ratios, hspace=0., wspace=0.)
## draw matrix
if complex_layout:
ax3 = subplot(gs[main_index])
else:
ax3 = gca()
im = ax3.imshow(matrix, aspect=aspect, vmin=vmin, vmax=vmax,
norm=norm, cmap=cmap, origin=origin, **kwargs)
#ax3.set_xlim([-0.5, matrix.shape[0]+0.5])
#ax3.set_ylim([-0.5, matrix.shape[1]+0.5])
if xticklabels is not None:
ax3.xaxis.set_major_formatter(ticker.IndexFormatter(xticklabels))
if yticklabels is not None and ncol == 1:
ax3.yaxis.set_major_formatter(ticker.IndexFormatter(yticklabels))
if allticks:
ax3.xaxis.set_major_locator(ticker.IndexLocator(offset=0.5, base=1.))
ax3.yaxis.set_major_locator(ticker.IndexLocator(offset=0.5, base=1.))
else:
locator = ticker.AutoLocator()
locator.set_params(integer=True)
minor_locator = ticker.AutoMinorLocator()
ax3.xaxis.set_major_locator(locator)
ax3.xaxis.set_minor_locator(minor_locator)
locator = ticker.AutoLocator()
locator.set_params(integer=True)
minor_locator = ticker.AutoMinorLocator()
ax3.yaxis.set_major_locator(locator)
ax3.yaxis.set_minor_locator(minor_locator)
if ncol > 1:
ax3.yaxis.set_major_formatter(ticker.NullFormatter())
## draw x_ and y_array
lines = []
if nrow > 1:
ax1 = subplot(gs[upper_index])
if tree_mode_x:
Y, X = drawTree(x_array, label_func=None, orientation='vertical',
inverted=True)
miny = min(Y.values())
maxy = max(Y.values())
minx = min(X.values())
maxx = max(X.values())
ax1.set_xlim(minx-.5, maxx+.5)
ax1.set_ylim(miny, 1.05*maxy)
else:
ax1.set_xticklabels([])
y = x_array
xp, yp = interpY(y)
points = np.array([xp, yp]).T.reshape(-1, 1, 2)
segments = np.concatenate([points[:-1], points[1:]], axis=1)
lcy = LineCollection(segments, array=yp, linewidths=lw, cmap=cmap)
lines.append(lcy)
ax1.add_collection(lcy)
ax1.set_xlim(xp.min()-.5, xp.max()+.5)
ax1.set_ylim(yp.min(), yp.max())
if ax3.xaxis_inverted():
ax2.invert_xaxis()
ax1.axis('off')
if ncol > 1:
ax2 = subplot(gs[left_index])
if tree_mode_y:
X, Y = drawTree(y_array, label_func=None, inverted=True)
miny = min(Y.values())
maxy = max(Y.values())
minx = min(X.values())
maxx = max(X.values())
ax2.set_ylim(miny-.5, maxy+.5)
ax2.set_xlim(minx, 1.05*maxx)
else:
ax2.set_xticklabels([])
y = y_array
xp, yp = interpY(y)
points = np.array([yp, xp]).T.reshape(-1, 1, 2)
segments = np.concatenate([points[:-1], points[1:]], axis=1)
lcx = LineCollection(segments, array=yp, linewidths=lw, cmap=cmap)
lines.append(lcx)
ax2.add_collection(lcx)
ax2.set_xlim(yp.min(), yp.max())
ax2.set_ylim(xp.min()-.5, xp.max()+.5)
ax2.invert_xaxis()
if ax3.yaxis_inverted():
ax2.invert_yaxis()
ax2.axis('off')
## draw colorbar
sca(ax3)
cb = None
if show_colorbar:
if nrow > 1:
axes = [ax1, ax2, ax3]
while None in axes:
axes.remove(None)
s = H / (H + 1.)
cb = colorbar(mappable=im, ax=axes, anchor=(0, 0), shrink=s, extend=cb_extend)
else:
cb = colorbar(mappable=im, extend=cb_extend)
sca(ax3)
sci(im)
if interactive:
from prody.utilities import ImageCursor
from matplotlib.pyplot import connect
cursor = ImageCursor(ax3, im)
connect('button_press_event', cursor.onClick)
ax3.tick_params(axis='x', rotation=xtickrotation)
return im, lines, cb
def reorderMatrix(names, matrix, tree, axis=None):
"""
Reorder a matrix based on a tree and return the reordered matrix
and indices for reordering other things.
:arg names: a list of names associated with the rows of the matrix
These names must match the ones used to generate the tree
:type names: list
:arg matrix: any square matrix
:type matrix: :class:`~numpy.ndarray`
:arg tree: any tree from :func:`calcTree`
:type tree: :class:`~Bio.Phylo.BaseTree.Tree`
:arg axis: along which axis the matrix should be reordered.
Default is **None** which reorder along all the axes
:type axis: int
"""
try:
from Bio import Phylo
except ImportError:
raise ImportError('Phylo module could not be imported. '
'Reinstall ProDy or install Biopython '
'to solve the problem.')
try:
if matrix.ndim != 2:
raise ValueError('matrix should be a 2D matrix.')
except AttributeError:
raise TypeError('matrix should be a numpy array.')
if np.shape(matrix)[0] != np.shape(matrix)[1]:
raise ValueError('matrix should be a square matrix')
names = np.asarray(names)
if np.isscalar(names):
raise TypeError('names should be list-like')
if not len(names):
raise TypeError('names is empty')
if not isinstance(tree, Phylo.BaseTree.Tree):
raise TypeError('tree should be a BioPython Tree')
if len(names) != len(matrix):
raise ValueError('names should have entries for each matrix row/column')
terminals = tree.get_terminals()
if len(names) != len(terminals):
raise ValueError('names should have entries for each tree terminal')
if len(terminals) != len(matrix):
raise ValueError('matrix should have a row for each tree terminal')
indices = []
for terminal in terminals:
name = terminal.name
locs = np.where(names == name)[0]
if not len(locs):
raise ValueError('inconsistent names and tree: %s not in names'%name)
if len(locs) > 1:
raise ValueError('inconsistent names and tree: duplicate name %s in names'%name)
indices.append(locs[0])
# rmatrix = matrix[:, indices]
# rmatrix = rmatrix[indices, :]
if axis is not None:
I = [np.arange(s) for s in matrix.shape]
axes = [axis] if np.isscalar(axis) else axis
for ax in axes:
I[ax] = indices
else:
I = [indices] * matrix.ndim
rmatrix = matrix[np.ix_(*I)]
return rmatrix, indices
def findSubgroups(tree, c, method='naive', **kwargs):
"""
Divide a tree into subgroups using a criterion and a cutoff.
Returns a list of lists with labels divided into subgroups.
"""
method = method.lower().strip()
terminals = tree.get_terminals()
names = [clade.name for clade in terminals]
Z = None
if method != 'naive':
try:
Z = getLinkage(names, tree)
except LinkageError:
print('Failed to build linkage; fall back to naive criterion')
method = 'naive'
if method == 'naive':
subgroups = [[names[0]]]
for i in range(len(terminals)-1):
curr_clade = terminals[i]
next_clade = terminals[i + 1]
d = tree.distance(curr_clade, next_clade)
if d > c:
subgroups.append([])
subgroups[-1].append(next_clade.name)
else:
from scipy.cluster.hierarchy import fcluster
T = fcluster(Z, c, criterion=method, **kwargs)
labels = np.unique(T)
subgroups = [[] for _ in range(len(labels))]
for i, t in enumerate(T):
subgroups[t-1].append(names[i])
return subgroups
| 33.449692 | 122 | 0.594045 |
import numpy as np
from numpy import unique, linalg, diag, sqrt, dot
from Bio.Phylo.BaseTree import Tree, Clade
from prody import PY3K
from .misctools import addEnds, interpY, index, isListLike
from .checkers import checkCoords
from .logger import LOGGER
__all__ = ['calcTree', 'clusterMatrix', 'showLines', 'showMatrix',
'reorderMatrix', 'findSubgroups', 'getCoords',
'getLinkage', 'getTreeFromLinkage', 'clusterSubfamilies']
class LinkageError(Exception):
pass
def clusterSubfamilies(similarities, n_clusters=0, linkage='all', method='tsne', cutoff=0.0, **kwargs):
try:
from sklearn.manifold import SpectralEmbedding
from sklearn.cluster import AgglomerativeClustering
from sklearn.metrics import silhouette_score
from sklearn.manifold import TSNE
except ImportError:
raise ImportError('need sklearn module')
'''
try:
import Bio
except ImportError:
raise ImportError('Phylo module could not be imported. '
'Reinstall ProDy or install Biopython '
'to solve the problem.')
'''
if not isinstance(similarities, np.ndarray):
raise TypeError('similarities should be a numpy ndarray')
dim = similarities.shape
if dim[0] != dim[1]:
raise ValueError('similarities must be a square matrix')
if n_clusters != 0:
if not isinstance(n_clusters, int):
raise TypeError('clusters must be an instance of int')
if n_clusters < 1:
raise ValueError('clusters must be a positive integer')
elif n_clusters > similarities.shape[0]:
raise ValueError('clusters can\'t be longer than similarities matrix')
nclusts = range(n_clusters,n_clusters+1)
else:
nclusts = range(2,10,1)
if linkage != 'all':
# Check if given input for linkage is list-like
if isListLike(linkage):
for val in linkage:
if val.lower() not in ['ward', 'average', 'complete', 'single']:
raise ValueError('linkage must be one or more of: \'ward\', \'average\', \'complete\', or \'single\'')
if len(linkage) > 4:
raise ValueError('linkage must be one or more of: \'ward\', \'average\', \'complete\', or \'single\'')
linkages = [ x.lower() for x in linkage ]
# If not, check if it is a valid string and method name
else:
if not isinstance(linkage, str):
raise TypeError('linkage must be an instance of str or list-like of strs')
if linkage not in ['ward', 'average', 'complete', 'single']:
raise ValueError('linkage must one or more of: \'ward\', \'average\', \'complete\', or \'single\'')
linkages = [linkage]
else:
linkages = ['ward', 'average', 'complete', 'single']
if method != 'tsne':
if not isinstance(method, str):
raise TypeError('method must be an instance of str')
if method != 'spectral':
raise ValueError('method must be either \'tsne\' or \'spectral\'')
if not isinstance(cutoff, float):
raise TypeError('cutoff must be an instance of float')
best_score = -1
best_nclust = 0
best_link = ''
best_labels = []
# Scan over range of clusters
for x in nclusts:
if method == 'tsne':
embedding = TSNE(n_components=2)
transform = embedding.fit_transform(similarities)
else:
kirchhoff = np.where(similarities > cutoff, 0, -1)
embedding = SpectralEmbedding(n_components=2)
transform = embedding.fit_transform(kirchhoff)
for link in linkages:
clustering = AgglomerativeClustering(linkage=link, n_clusters=x)
clustering.fit(transform)
silhouette_avg = silhouette_score(transform, clustering.labels_)
if silhouette_avg > best_score:
best_score = silhouette_avg
best_nclust = x
best_link = link
best_labels = clustering.labels_
return best_labels
def getCoords(data):
try:
data = (data._getCoords() if hasattr(data, '_getCoords') else
data.getCoords())
except AttributeError:
try:
checkCoords(data)
except TypeError:
raise TypeError('data must be a Numpy array or an object '
'with `getCoords` method')
return data
def getLinkage(names, tree):
tree_terminals = tree.get_terminals()
if len(tree_terminals) != len(names):
raise ValueError('inconsistent number of terminals in tree and names')
terminals = [None] * len(names)
for clade in tree_terminals:
i = index(names, clade.name)
terminals[i] = clade
n = len(terminals)
nonterminals = [c for c in reversed(tree.get_nonterminals())]
if len(nonterminals) != n-1:
raise LinkageError('wrong number of terminal clades')
Z = np.zeros((n-1, 4))
root = tree.root
def _indexOfClade(clade):
if clade.is_terminal():
i = index(terminals, clade)
else:
i = index(nonterminals, clade) + n
return i
def _height_of(clade):
if clade.is_terminal():
height = 0
else:
height = max(_height_of(c) + c.branch_length for c in clade.clades)
return height
def _dfs(clade):
if clade.is_terminal():
return
i = _indexOfClade(clade)
clade_a = clade.clades[0]
clade_b = clade.clades[1]
a = _indexOfClade(clade_a)
b = _indexOfClade(clade_b)
l = min(a, b)
r = max(a, b)
Z[i-n, 0] = l
Z[i-n, 1] = r
Z[i-n, 2] = _height_of(clade) * 2.
Z[i-n, 3] = clade.count_terminals()
_dfs(clade_a)
_dfs(clade_b)
_dfs(root)
return Z
def getTreeFromLinkage(names, linkage):
try:
import Bio
except ImportError:
raise ImportError('Phylo module could not be imported. '
'Reinstall ProDy or install Biopython '
'to solve the problem.')
from Bio.Phylo.BaseTree import Tree, Clade
if not isinstance(linkage, np.ndarray):
raise TypeError('linkage must be a numpy.ndarray instance')
if linkage.ndim != 2:
raise LinkageError('linkage must be a 2-dimensional matrix')
if linkage.shape[1] != 4:
raise LinkageError('linkage must have exactly 4 columns')
n_terms = len(names)
if linkage.shape[0] != n_terms-1:
raise LinkageError('linkage must have exactly len(names)-1 rows')
clades = []
heights = []
for name in names:
clade = Clade(None, name)
clades.append(clade)
heights.append(0.)
for link in linkage:
l = int(link[0])
r = int(link[1])
height = link[2]
left = clades[l]
right = clades[r]
lh = heights[l]
rh = heights[r]
left.branch_length = height - lh
right.branch_length = height - rh
clade = Clade(None, None)
clade.clades.append(left)
clade.clades.append(right)
clades.append(clade)
heights.append(height)
return Tree(clade)
def calcTree(names, distance_matrix, method='upgma', linkage=False):
try:
import Bio
except ImportError:
raise ImportError('Phylo module could not be imported. '
'Reinstall ProDy or install Biopython '
'to solve the problem.')
from .TreeConstruction import DistanceMatrix, DistanceTreeConstructor
if len(names) != distance_matrix.shape[0] or len(names) != distance_matrix.shape[1]:
raise ValueError("Mismatch between the sizes of matrix and names.")
method = method.lower().strip()
if method in ['ward', 'single', 'average', 'weighted', 'centroid', 'median']:
from scipy.cluster.hierarchy import linkage as hlinkage
from scipy.spatial.distance import squareform
Z = hlinkage(squareform(distance_matrix), method=method)
tree = getTreeFromLinkage(names, Z)
else:
matrix = []
k = 1
Z = None
for row in distance_matrix:
matrix.append(list(row[:k]))
k = k + 1
if isinstance(names, np.ndarray):
names = names.tolist()
dm = DistanceMatrix(names, matrix)
constructor = DistanceTreeConstructor()
method = method.strip().lower()
if method == 'nj':
tree = constructor.nj(dm)
elif method == 'upgma':
tree = constructor.upgma(dm)
if linkage:
Z = getLinkage(names, tree)
else:
raise ValueError('Method can be only either "nj", "upgma" or '
'hierarchical clustering such as "single", "average", etc.')
for node in tree.get_nonterminals():
node.name = None
if linkage:
return tree, Z
else:
return tree
def writeTree(filename, tree, format_str='newick'):
try:
from Bio import Phylo
except ImportError:
raise ImportError('Phylo module could not be imported. '
'Reinstall ProDy or install Biopython '
'to solve the problem.')
if not isinstance(filename, str):
raise TypeError('filename should be a string')
if not isinstance(tree, Phylo.BaseTree.Tree):
raise TypeError('tree should be a Biopython.Phylo Tree object')
if not isinstance(format_str, str):
raise TypeError('format_str should be a string')
Phylo.write(tree, filename, format_str)
def clusterMatrix(distance_matrix=None, similarity_matrix=None, labels=None, return_linkage=None, **kwargs):
import scipy.cluster.hierarchy as sch
from scipy import spatial
if similarity_matrix is None and distance_matrix is None:
raise ValueError('Please provide a distance matrix or a similarity matrix')
orientation = kwargs.pop('orientiation', 'right')
reversed = kwargs.pop('reversed', False)
no_plot = kwargs.pop('no_plot', True)
if distance_matrix is None:
matrix = similarity_matrix
distance_matrix = 1. - similarity_matrix
else:
matrix = distance_matrix
formatted_distance_matrix = spatial.distance.squareform(distance_matrix)
linkage_matrix = sch.linkage(formatted_distance_matrix, **kwargs)
sorting_dendrogram = sch.dendrogram(linkage_matrix, orientation=orientation, labels=labels, no_plot=no_plot)
indices = sorting_dendrogram['leaves']
sorted_labels = sorting_dendrogram['ivl']
if reversed:
indices = indices[::-1]
sorted_labels = sorted_labels[::-1]
sorted_matrix = matrix[indices, :]
sorted_matrix = sorted_matrix[:, indices]
return_vals = [sorted_matrix, indices]
if labels is not None:
return_vals.append(sorted_labels)
if return_linkage:
return_vals.append(linkage_matrix)
return tuple(return_vals) # convert to tuple to avoid [pylint] E0632:Possible unbalanced tuple unpacking
def showLines(*args, **kwargs):
# note for developers: this function serves as a low-level
# plotting function which provides basic utilities for other
# plotting functions. Therefore showFigure is not handled
# in this function as it should be already handled in the caller.
ticklabels = kwargs.pop('ticklabels', None)
dy = kwargs.pop('dy', None)
miny = kwargs.pop('lower', None)
maxy = kwargs.pop('upper', None)
alpha = kwargs.pop('alpha', 0.5)
beta = kwargs.pop('beta', 0.25)
gap = kwargs.pop('gap', False)
labels = kwargs.pop('label', None)
from matplotlib import cm, ticker
from matplotlib.pyplot import figure, gca, xlim
ax = gca()
lines = ax.plot(*args, **kwargs)
polys = []
for i, line in enumerate(lines):
color = line.get_color()
x, y = line.get_data()
if gap:
x_new, y_new = addEnds(x, y)
line.set_data(x_new, y_new)
else:
x_new, y_new = x, y
if labels is not None:
if np.isscalar(labels):
line.set_label(labels)
else:
try:
line.set_label(labels[i])
except IndexError:
raise ValueError('The number of labels ({0}) and that of y ({1}) do not match.'
.format(len(labels), len(line)))
# the following function needs to be here so that line exists
def sub_array(a, i, tag='a'):
ndim = 0
if a is not None:
if np.isscalar(a[0]):
ndim = 1 # a plain list (array)
else:
ndim = 2 # a nested list (array)
else:
return None
if ndim == 1:
_a = a
else:
try:
_a = a[i]
except IndexError:
raise ValueError('The number of {2} ({0}) and that of y ({1}) do not match.'
.format(len(miny), len(line), tag))
if len(_a) != len(y):
raise ValueError('The shapes of {2} ({0}) and y ({1}) do not match.'
.format(len(_miny), len(y), tag))
return _a
if miny is not None and maxy is not None:
_miny = sub_array(miny, i)
_maxy = sub_array(maxy, i)
if gap:
_, _miny = addEnds(x, _miny)
_, _maxy = addEnds(x, _maxy)
poly = ax.fill_between(x_new, _miny, _maxy,
alpha=beta, facecolor=color, edgecolor=None,
linewidth=1, antialiased=True)
polys.append(poly)
if dy is not None:
_dy = sub_array(dy, i)
if gap:
_, _dy = addEnds(x, _dy)
poly = ax.fill_between(x_new, y_new-_dy, y_new+_dy,
alpha=alpha, facecolor=color, edgecolor=None,
linewidth=1, antialiased=True)
polys.append(poly)
ax.margins(x=0)
if ticklabels is not None:
if callable(ticklabels):
ax.get_xaxis().set_major_formatter(ticker.FuncFormatter(ticklabels))
else:
ax.get_xaxis().set_major_formatter(ticker.IndexFormatter(ticklabels))
ax.xaxis.set_major_locator(ticker.AutoLocator())
ax.xaxis.set_minor_locator(ticker.AutoMinorLocator())
return lines, polys
def showMatrix(matrix, x_array=None, y_array=None, **kwargs):
from matplotlib import ticker
from matplotlib.gridspec import GridSpec
from matplotlib.collections import LineCollection
from matplotlib.pyplot import gca, sca, sci, colorbar, subplot
from .drawtools import drawTree
p = kwargs.pop('percentile', None)
vmin = vmax = None
if p is not None:
vmin = np.percentile(matrix, p)
vmax = np.percentile(matrix, 100-p)
vmin = kwargs.pop('vmin', vmin)
vmax = kwargs.pop('vmax', vmax)
vcenter = kwargs.pop('vcenter', None)
norm = kwargs.pop('norm', None)
if vcenter is not None and norm is None:
if PY3K:
try:
from matplotlib.colors import DivergingNorm
except ImportError:
from matplotlib.colors import TwoSlopeNorm as DivergingNorm
norm = DivergingNorm(vmin=vmin, vcenter=0., vmax=vmax)
else:
LOGGER.warn('vcenter cannot be used in Python 2 so norm remains None')
lw = kwargs.pop('linewidth', 1)
W = H = kwargs.pop('ratio', 6)
ticklabels = kwargs.pop('ticklabels', None)
xticklabels = kwargs.pop('xticklabels', ticklabels)
yticklabels = kwargs.pop('yticklabels', ticklabels)
xtickrotation = kwargs.pop('xtickrotation', 0.)
show_colorbar = kwargs.pop('colorbar', True)
cb_extend = kwargs.pop('cb_extend', 'neither')
allticks = kwargs.pop('allticks', False) # this argument is temporary and will be replaced by better implementation
interactive = kwargs.pop('interactive', True)
cmap = kwargs.pop('cmap', 'jet')
origin = kwargs.pop('origin', 'lower')
try:
from Bio import Phylo
except ImportError:
raise ImportError('Phylo module could not be imported. '
'Reinstall ProDy or install Biopython '
'to solve the problem.')
tree_mode_y = isinstance(y_array, Phylo.BaseTree.Tree)
tree_mode_x = isinstance(x_array, Phylo.BaseTree.Tree)
if x_array is not None and y_array is not None:
nrow = 2; ncol = 2
i = 1; j = 1
width_ratios = [1, W]
height_ratios = [1, H]
aspect = 'auto'
elif x_array is not None and y_array is None:
nrow = 2; ncol = 1
i = 1; j = 0
width_ratios = [W]
height_ratios = [1, H]
aspect = 'auto'
elif x_array is None and y_array is not None:
nrow = 1; ncol = 2
i = 0; j = 1
width_ratios = [1, W]
height_ratios = [H]
aspect = 'auto'
else:
nrow = 1; ncol = 1
i = 0; j = 0
width_ratios = [W]
height_ratios = [H]
aspect = kwargs.pop('aspect', None)
main_index = (i, j)
upper_index = (i-1, j)
left_index = (i, j-1)
complex_layout = nrow > 1 or ncol > 1
ax1 = ax2 = ax3 = None
if complex_layout:
gs = GridSpec(nrow, ncol, width_ratios=width_ratios,
height_ratios=height_ratios, hspace=0., wspace=0.)
## draw matrix
if complex_layout:
ax3 = subplot(gs[main_index])
else:
ax3 = gca()
im = ax3.imshow(matrix, aspect=aspect, vmin=vmin, vmax=vmax,
norm=norm, cmap=cmap, origin=origin, **kwargs)
#ax3.set_xlim([-0.5, matrix.shape[0]+0.5])
#ax3.set_ylim([-0.5, matrix.shape[1]+0.5])
if xticklabels is not None:
ax3.xaxis.set_major_formatter(ticker.IndexFormatter(xticklabels))
if yticklabels is not None and ncol == 1:
ax3.yaxis.set_major_formatter(ticker.IndexFormatter(yticklabels))
if allticks:
ax3.xaxis.set_major_locator(ticker.IndexLocator(offset=0.5, base=1.))
ax3.yaxis.set_major_locator(ticker.IndexLocator(offset=0.5, base=1.))
else:
locator = ticker.AutoLocator()
locator.set_params(integer=True)
minor_locator = ticker.AutoMinorLocator()
ax3.xaxis.set_major_locator(locator)
ax3.xaxis.set_minor_locator(minor_locator)
locator = ticker.AutoLocator()
locator.set_params(integer=True)
minor_locator = ticker.AutoMinorLocator()
ax3.yaxis.set_major_locator(locator)
ax3.yaxis.set_minor_locator(minor_locator)
if ncol > 1:
ax3.yaxis.set_major_formatter(ticker.NullFormatter())
## draw x_ and y_array
lines = []
if nrow > 1:
ax1 = subplot(gs[upper_index])
if tree_mode_x:
Y, X = drawTree(x_array, label_func=None, orientation='vertical',
inverted=True)
miny = min(Y.values())
maxy = max(Y.values())
minx = min(X.values())
maxx = max(X.values())
ax1.set_xlim(minx-.5, maxx+.5)
ax1.set_ylim(miny, 1.05*maxy)
else:
ax1.set_xticklabels([])
y = x_array
xp, yp = interpY(y)
points = np.array([xp, yp]).T.reshape(-1, 1, 2)
segments = np.concatenate([points[:-1], points[1:]], axis=1)
lcy = LineCollection(segments, array=yp, linewidths=lw, cmap=cmap)
lines.append(lcy)
ax1.add_collection(lcy)
ax1.set_xlim(xp.min()-.5, xp.max()+.5)
ax1.set_ylim(yp.min(), yp.max())
if ax3.xaxis_inverted():
ax2.invert_xaxis()
ax1.axis('off')
if ncol > 1:
ax2 = subplot(gs[left_index])
if tree_mode_y:
X, Y = drawTree(y_array, label_func=None, inverted=True)
miny = min(Y.values())
maxy = max(Y.values())
minx = min(X.values())
maxx = max(X.values())
ax2.set_ylim(miny-.5, maxy+.5)
ax2.set_xlim(minx, 1.05*maxx)
else:
ax2.set_xticklabels([])
y = y_array
xp, yp = interpY(y)
points = np.array([yp, xp]).T.reshape(-1, 1, 2)
segments = np.concatenate([points[:-1], points[1:]], axis=1)
lcx = LineCollection(segments, array=yp, linewidths=lw, cmap=cmap)
lines.append(lcx)
ax2.add_collection(lcx)
ax2.set_xlim(yp.min(), yp.max())
ax2.set_ylim(xp.min()-.5, xp.max()+.5)
ax2.invert_xaxis()
if ax3.yaxis_inverted():
ax2.invert_yaxis()
ax2.axis('off')
## draw colorbar
sca(ax3)
cb = None
if show_colorbar:
if nrow > 1:
axes = [ax1, ax2, ax3]
while None in axes:
axes.remove(None)
s = H / (H + 1.)
cb = colorbar(mappable=im, ax=axes, anchor=(0, 0), shrink=s, extend=cb_extend)
else:
cb = colorbar(mappable=im, extend=cb_extend)
sca(ax3)
sci(im)
if interactive:
from prody.utilities import ImageCursor
from matplotlib.pyplot import connect
cursor = ImageCursor(ax3, im)
connect('button_press_event', cursor.onClick)
ax3.tick_params(axis='x', rotation=xtickrotation)
return im, lines, cb
def reorderMatrix(names, matrix, tree, axis=None):
try:
from Bio import Phylo
except ImportError:
raise ImportError('Phylo module could not be imported. '
'Reinstall ProDy or install Biopython '
'to solve the problem.')
try:
if matrix.ndim != 2:
raise ValueError('matrix should be a 2D matrix.')
except AttributeError:
raise TypeError('matrix should be a numpy array.')
if np.shape(matrix)[0] != np.shape(matrix)[1]:
raise ValueError('matrix should be a square matrix')
names = np.asarray(names)
if np.isscalar(names):
raise TypeError('names should be list-like')
if not len(names):
raise TypeError('names is empty')
if not isinstance(tree, Phylo.BaseTree.Tree):
raise TypeError('tree should be a BioPython Tree')
if len(names) != len(matrix):
raise ValueError('names should have entries for each matrix row/column')
terminals = tree.get_terminals()
if len(names) != len(terminals):
raise ValueError('names should have entries for each tree terminal')
if len(terminals) != len(matrix):
raise ValueError('matrix should have a row for each tree terminal')
indices = []
for terminal in terminals:
name = terminal.name
locs = np.where(names == name)[0]
if not len(locs):
raise ValueError('inconsistent names and tree: %s not in names'%name)
if len(locs) > 1:
raise ValueError('inconsistent names and tree: duplicate name %s in names'%name)
indices.append(locs[0])
# rmatrix = matrix[:, indices]
# rmatrix = rmatrix[indices, :]
if axis is not None:
I = [np.arange(s) for s in matrix.shape]
axes = [axis] if np.isscalar(axis) else axis
for ax in axes:
I[ax] = indices
else:
I = [indices] * matrix.ndim
rmatrix = matrix[np.ix_(*I)]
return rmatrix, indices
def findSubgroups(tree, c, method='naive', **kwargs):
method = method.lower().strip()
terminals = tree.get_terminals()
names = [clade.name for clade in terminals]
Z = None
if method != 'naive':
try:
Z = getLinkage(names, tree)
except LinkageError:
print('Failed to build linkage; fall back to naive criterion')
method = 'naive'
if method == 'naive':
subgroups = [[names[0]]]
for i in range(len(terminals)-1):
curr_clade = terminals[i]
next_clade = terminals[i + 1]
d = tree.distance(curr_clade, next_clade)
if d > c:
subgroups.append([])
subgroups[-1].append(next_clade.name)
else:
from scipy.cluster.hierarchy import fcluster
T = fcluster(Z, c, criterion=method, **kwargs)
labels = np.unique(T)
subgroups = [[] for _ in range(len(labels))]
for i, t in enumerate(T):
subgroups[t-1].append(names[i])
return subgroups
| true | true |
f71ab0cfdecb4656998e375e331065ba5d5988ae | 15,809 | py | Python | fkie_iop_rqt_access_control/src/fkie_iop_rqt_access_control/robot.py | fkie/iop_gui | 918353b5767c6ff4a42b294316a03e08501fed28 | [
"BSD-3-Clause"
] | null | null | null | fkie_iop_rqt_access_control/src/fkie_iop_rqt_access_control/robot.py | fkie/iop_gui | 918353b5767c6ff4a42b294316a03e08501fed28 | [
"BSD-3-Clause"
] | null | null | null | fkie_iop_rqt_access_control/src/fkie_iop_rqt_access_control/robot.py | fkie/iop_gui | 918353b5767c6ff4a42b294316a03e08501fed28 | [
"BSD-3-Clause"
] | 1 | 2018-11-27T03:39:41.000Z | 2018-11-27T03:39:41.000Z | # ROS/IOP Bridge
# Copyright (c) 2017 Fraunhofer
#
# This program is dual licensed; you can redistribute it and/or
# modify it under the terms of the GNU General Public License
# version 2 as published by the Free Software Foundation, or
# enter into a proprietary license agreement with the copyright
# holder.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; or you can read the full license at
# <http://www.gnu.de/documents/gpl-2.0.html>
#
# :author: Alexander Tiderko
import os
from python_qt_binding import loadUi
from python_qt_binding.QtCore import QObject, Signal, Qt
from python_qt_binding.QtGui import QIcon
try:
from python_qt_binding.QtGui import QWidget, QDialog, QTreeWidget, QTreeWidgetItem
except:
from python_qt_binding.QtWidgets import QWidget, QDialog, QTreeWidget, QTreeWidgetItem
import rospy
from .address import Address
from fkie_iop_msgs.msg import OcuCmdEntry, JausAddress
from .handoff_dialog import HandoffDialog
class Robot(QObject):
MAX_AGE = 30
control_activated = Signal(Address)
control_deactivated = Signal(Address)
view_activated = Signal(Address)
view_deactivated = Signal(Address)
def __init__(self, subsystem, settings, authority=205):
QObject.__init__(self)
self._subsystem = subsystem
self._settings = settings
self._authority = authority
ui_file = os.path.join(os.path.dirname(os.path.realpath(__file__)), 'robot.ui')
self._widget = QWidget()
loadUi(ui_file, self._widget)
self._last_update = rospy.Time.now()
self._component_names = dict()
self._warnings = []
self._feedback_warnings = dict()
self._ocu_client = None
# address reported by access control client
self._control_addr = Address(JausAddress())
self._warning_dialog = self._create_warning_dialog()
self._detailed_dialog = self._create_detailed_dialog()
self.handoff_dialog = HandoffDialog(self.name, self.subsystem_id, self._settings, self._widget)
self.handoff_dialog.button_blink.connect(self._widget.button_handoff.setEnabled)
self._widget.button_view.clicked.connect(self._on_robot_view)
self._widget.button_control.setText("%s - %d" % (subsystem.ident.name, self._subsystem.ident.address.subsystem_id))
self._widget.button_control.clicked.connect(self._on_robot_control)
self._widget.button_control.setObjectName(subsystem.ident.name)
self._widget.button_handoff.setEnabled(False)
self._widget.button_handoff.clicked.connect(self.on_show_handoff)
self._widget.button_warnings.setEnabled(False)
self._widget.button_warnings.clicked.connect(self.on_show_warnings)
self._widget.button_details.clicked.connect(self.on_show_details)
def __del__(self):
self.handoff_dialog.setParent(None)
self.handoff_dialog.shutdown()
self.handoff_dialog = None
self._detailed_dialog = None
self._warning_dialog = None
self._ocu_client = None
self._feedback_warnings.clear()
self._component_names.clear()
del self._warnings[:]
@property
def name(self):
return self._subsystem.ident.name
@property
def subsystem_id(self):
# return the subsystem_id of the robot
return self._subsystem.ident.address.subsystem_id
@property
def ocu_client(self):
return self._ocu_client
@ocu_client.setter
def ocu_client(self, ocu_client):
self.set_warnings([])
if self._ocu_client is not None:
self._ocu_client.control_subsystem = -1
self._ocu_client = ocu_client
if self._ocu_client is not None:
self._ocu_client.control_subsystem = self.subsystem_id
if ocu_client.subsystem_restricted == self.subsystem_id:
self._widget.button_control.setEnabled(not ocu_client.only_monitor)
self.handoff_dialog.set_client(self._ocu_client)
self.update_feedback_warnings()
elif self.has_view() or self.has_control():
self.set_warnings(["No free OCU client available!", "Start an ocu_client with different nodeID to be able to listen for sensors on second robot."])
self.handoff_dialog.set_client(None)
if self._ocu_client is not None:
self._widget.button_handoff.setVisible(self._ocu_client.has_handoff_publisher())
else:
self._widget.button_handoff.setVisible(True)
@property
def ocu_client_restricted(self):
if self._ocu_client is not None:
if self._ocu_client.subsystem_restricted == self.subsystem_id:
return self._ocu_client
return None
@property
def control_addr(self):
return self._control_addr
@control_addr.setter
def control_addr(self, address):
self._control_addr = address
self._update_warnings_button()
def set_control_active(self, state):
self._widget.button_control.setEnabled(state)
def _on_robot_control(self, checked=False):
'''
Click on control robot button. Change to controlled or monitor state.
Publishes the signals: control_activated or view_activated.
'''
addr = Address(JausAddress(self._subsystem.ident.address.subsystem_id, 255, 255))
if checked:
self._widget.button_view.setChecked(checked)
self.control_activated.emit(addr)
self.handoff_dialog.on_access = True
else:
self.release_control()
self.control_deactivated.emit(addr)
self.handoff_dialog.cancel_handoff()
self.handoff_dialog.on_access = False
# if self.has_view():
# self.view_activated.emit(addr)
def _on_robot_view(self, checked=False):
'''
Click on view robot button. Change to monitor or not controlled state.
Publishes the signals: view_activated or control_deactivated.
'''
addr = Address(JausAddress(self._subsystem.ident.address.subsystem_id, 255, 255))
if checked:
self._widget.button_view.setChecked(checked)
self.view_activated.emit(addr)
else:
if self.has_control():
self._widget.button_control.setChecked(False)
self.control_deactivated.emit(addr)
self.view_deactivated.emit(addr)
def has_control(self):
return self._widget.button_control.isChecked()
def has_view(self):
return self._widget.button_view.isChecked()
def release_control(self):
self._widget.button_view.setChecked(False)
self._widget.button_control.setChecked(False)
def activate_view(self):
self._widget.button_view.setChecked(True)
def state_to_cmd(self):
cmd = OcuCmdEntry()
cmd.authority = self._settings.authority
cmd.name = self.name
cmd.address.subsystem_id = self._subsystem.ident.address.subsystem_id
cmd.address.node_id = 255
cmd.address.component_id = 255
if self._widget.button_control.isChecked():
cmd.access_control = 12
elif self._widget.button_view.isChecked():
cmd.access_control = 11
else:
cmd.access_control = 10
if self.ocu_client is not None:
cmd.ocu_client = self.ocu_client.address
else:
cmd.ocu_client.subsystem_id = 65535
cmd.ocu_client.node_id = 255
cmd.ocu_client.component_id = 255
return cmd
def update(self, subsystem):
'''
Applies the updated description of the subsystem.
:type feedback: fkie_iop_msgs/System
'''
if self._subsystem.ident.address.subsystem_id != subsystem.ident.address.subsystem_id:
return False
# if self._subsystem.ident.node_id != subsystem.ident.node_id:
# return False
if self._subsystem.ident.name != subsystem.ident.name:
return False
self._subsystem = subsystem
# self._last_update = rospy.Time.now()
return True
def on_show_handoff(self):
self.handoff_dialog.setVisible(not self.handoff_dialog.isVisible())
def on_show_details(self):
'''
Shows the subsystem in a new dialog as tree view.
'''
twc = self._detailed_dialog.treewidget_components
twc.clear()
client_info = "OCU client: ---"
if self._ocu_client is not None:
add_info = ''
if self.ocu_client.subsystem_restricted == self.subsystem_id:
if self.ocu_client.only_monitor:
add_info = ' [restricted, only monitor]'
else:
add_info = ' [restricted]'
client_info = "OCU client: %s%s" % (self.ocu_client.address, add_info)
elif self.control_addr.subsystem_id != 0:
client_info = 'Controlled by other OCU: %s' % self.control_addr
self._detailed_dialog.label_info.setText(client_info)
if self.name == self._subsystem.ident.name:
for node in self._subsystem.nodes:
node_item = QTreeWidgetItem(twc)
node_name = node.ident.name if node.ident.name else "NODE"
node_item.setText(0, "%s [id: %d]" % (node_name, node.ident.address.node_id))
for comp in node.components:
cmp_item = QTreeWidgetItem(node_item)
cmp_name = self._get_component_name(comp.address)
cmp_item.setText(0, "%s [%d.%d.%d]" % (cmp_name, comp.address.subsystem_id, comp.address.node_id, comp.address.component_id))
twc.expandItem(node_item)
for srv in comp.services:
srv_item = QTreeWidgetItem(cmp_item)
srv_item.setText(0, "%s v%d.%d" % (srv.uri, srv.major_version, srv.minor_version))
if self._detailed_dialog.isVisible():
self._detailed_dialog.setFocus(Qt.ActiveWindowFocusReason)
else:
self._detailed_dialog.show()
def on_show_warnings(self):
'''
Shows warning received by feedback.
'''
text_browser = self._warning_dialog.warnings
text_browser.clear()
if not self._warnings and not self._feedback_warnings:
text_browser.append('No known warnings!')
else:
for msg in self._warnings:
text_browser.append(msg)
if self._feedback_warnings:
text_browser.append('Services with warning state:')
for client, service_infos in self._feedback_warnings.items():
text_browser.append("Client %s:" % client)
for service_info in service_infos:
text_browser.append(" %s[%s]: %s" % (service_info.uri, Address(service_info.addr_control), self.access_state_to_str(service_info.access_state)))
self._warning_dialog.show()
def update_feedback_warnings(self):
'''
:type warnigns: dict(Address of the ocu client: ServiceInfo)
'''
# get all warnings for each subsystem
warnings = dict()
if self._ocu_client is not None:
cw = self._ocu_client.get_warnings(self.subsystem_id, self.has_control())
warnings.update(cw)
# get insufficient authority reports to update handoff state button
insathority = dict()
cw = self._ocu_client.get_srvs_ins_authority(self.subsystem_id)
insathority.update(cw)
# update insufficient authority to activate handoff dialog
self.handoff_dialog.update_authority_problems(insathority)
self._feedback_warnings = warnings
self._update_warnings_button()
def set_warnings(self, warnings):
'''
:type warnigns: list of strings
'''
self._warnings = warnings
self._update_warnings_button()
def _update_warnings_button(self):
has_warning = (len(self._warnings) + len(self._feedback_warnings)) > 0
if has_warning and self.has_control():
self._widget.button_control.setStyleSheet("QPushButton { background-color: #FE9A2E;}")
elif self.has_control():
self._widget.button_control.setStyleSheet("QPushButton { background-color: #98FB98;}")
self._widget.button_view.setStyleSheet("QPushButton { background-color: #98FB98;}")
elif self.has_view():
self._widget.button_control.setStyleSheet("QPushButton { background-color: None;}")
self._widget.button_view.setStyleSheet("QPushButton { background-color: #98FB98;}")
elif self.control_addr.subsystem_id != 0 and (self._ocu_client is None or self.control_addr.subsystem_id != self._ocu_client.subsystem_id):
self._widget.button_control.setStyleSheet("QPushButton { background-color: #A9A9A9;}")
self._widget.button_view.setStyleSheet("QPushButton { background-color: None;}")
else:
self._widget.button_control.setStyleSheet("QPushButton { background-color: None;}")
self._widget.button_view.setStyleSheet("QPushButton { background-color: None;}")
self._widget.button_warnings.setEnabled(has_warning)
def update_ident(self, ident):
if Address(ident.address) == Address(self._subsystem.ident.address):
self._last_update = rospy.Time.now()
if ident.system_type == 60001 or ident.request_type == 4:
if ident.address.subsystem_id == self._subsystem.ident.address.subsystem_id:
self._component_names[Address(ident.address)] = ident.name
return False
def _get_component_name(self, msg_address):
addr = Address(msg_address)
try:
return self._component_names[addr]
except Exception:
pass
return "Component"
def is_old(self):
return rospy.Time.now() - self._last_update > rospy.Duration(self.MAX_AGE)
def get_widget(self):
return self._widget
def _create_warning_dialog(self):
diag = QDialog(self._widget)
ui_file = os.path.join(os.path.dirname(os.path.realpath(__file__)), 'warning_info.ui')
loadUi(ui_file, diag)
diag.resize(600, 250)
diag.setWindowTitle("Warning for %s[%d]" % (self.name, self.subsystem_id))
diag.setWindowIcon(QIcon.fromTheme("dialog-warning"))
return diag
def _create_detailed_dialog(self):
diag = QDialog(self._widget)
ui_file = os.path.join(os.path.dirname(os.path.realpath(__file__)), 'system_info.ui')
loadUi(ui_file, diag)
diag.treewidget_components.setHeaderLabel("%s [%d]" % (self.name, self.subsystem_id))
diag.resize(500, 300)
diag.setWindowTitle("subsystem %s[%d]" % (self.name, self.subsystem_id))
diag.setWindowIcon(QIcon.fromTheme("help-about"))
return diag
def access_state_to_str(self, state):
if state == 0:
return 'NOT_AVAILABLE'
if state == 1:
return 'NOT_CONTROLLED'
if state == 2:
return 'CONTROL_RELEASED'
if state == 3:
return 'CONTROL_ACCEPTED'
if state == 4:
return 'TIMEOUT'
if state == 5:
return 'INSUFFICIENT_AUTHORITY'
if state == 6:
return 'MONITORING'
return 'UNKNOWN'
| 41.712401 | 171 | 0.656335 |
import os
from python_qt_binding import loadUi
from python_qt_binding.QtCore import QObject, Signal, Qt
from python_qt_binding.QtGui import QIcon
try:
from python_qt_binding.QtGui import QWidget, QDialog, QTreeWidget, QTreeWidgetItem
except:
from python_qt_binding.QtWidgets import QWidget, QDialog, QTreeWidget, QTreeWidgetItem
import rospy
from .address import Address
from fkie_iop_msgs.msg import OcuCmdEntry, JausAddress
from .handoff_dialog import HandoffDialog
class Robot(QObject):
MAX_AGE = 30
control_activated = Signal(Address)
control_deactivated = Signal(Address)
view_activated = Signal(Address)
view_deactivated = Signal(Address)
def __init__(self, subsystem, settings, authority=205):
QObject.__init__(self)
self._subsystem = subsystem
self._settings = settings
self._authority = authority
ui_file = os.path.join(os.path.dirname(os.path.realpath(__file__)), 'robot.ui')
self._widget = QWidget()
loadUi(ui_file, self._widget)
self._last_update = rospy.Time.now()
self._component_names = dict()
self._warnings = []
self._feedback_warnings = dict()
self._ocu_client = None
self._control_addr = Address(JausAddress())
self._warning_dialog = self._create_warning_dialog()
self._detailed_dialog = self._create_detailed_dialog()
self.handoff_dialog = HandoffDialog(self.name, self.subsystem_id, self._settings, self._widget)
self.handoff_dialog.button_blink.connect(self._widget.button_handoff.setEnabled)
self._widget.button_view.clicked.connect(self._on_robot_view)
self._widget.button_control.setText("%s - %d" % (subsystem.ident.name, self._subsystem.ident.address.subsystem_id))
self._widget.button_control.clicked.connect(self._on_robot_control)
self._widget.button_control.setObjectName(subsystem.ident.name)
self._widget.button_handoff.setEnabled(False)
self._widget.button_handoff.clicked.connect(self.on_show_handoff)
self._widget.button_warnings.setEnabled(False)
self._widget.button_warnings.clicked.connect(self.on_show_warnings)
self._widget.button_details.clicked.connect(self.on_show_details)
def __del__(self):
self.handoff_dialog.setParent(None)
self.handoff_dialog.shutdown()
self.handoff_dialog = None
self._detailed_dialog = None
self._warning_dialog = None
self._ocu_client = None
self._feedback_warnings.clear()
self._component_names.clear()
del self._warnings[:]
@property
def name(self):
return self._subsystem.ident.name
@property
def subsystem_id(self):
return self._subsystem.ident.address.subsystem_id
@property
def ocu_client(self):
return self._ocu_client
@ocu_client.setter
def ocu_client(self, ocu_client):
self.set_warnings([])
if self._ocu_client is not None:
self._ocu_client.control_subsystem = -1
self._ocu_client = ocu_client
if self._ocu_client is not None:
self._ocu_client.control_subsystem = self.subsystem_id
if ocu_client.subsystem_restricted == self.subsystem_id:
self._widget.button_control.setEnabled(not ocu_client.only_monitor)
self.handoff_dialog.set_client(self._ocu_client)
self.update_feedback_warnings()
elif self.has_view() or self.has_control():
self.set_warnings(["No free OCU client available!", "Start an ocu_client with different nodeID to be able to listen for sensors on second robot."])
self.handoff_dialog.set_client(None)
if self._ocu_client is not None:
self._widget.button_handoff.setVisible(self._ocu_client.has_handoff_publisher())
else:
self._widget.button_handoff.setVisible(True)
@property
def ocu_client_restricted(self):
if self._ocu_client is not None:
if self._ocu_client.subsystem_restricted == self.subsystem_id:
return self._ocu_client
return None
@property
def control_addr(self):
return self._control_addr
@control_addr.setter
def control_addr(self, address):
self._control_addr = address
self._update_warnings_button()
def set_control_active(self, state):
self._widget.button_control.setEnabled(state)
def _on_robot_control(self, checked=False):
addr = Address(JausAddress(self._subsystem.ident.address.subsystem_id, 255, 255))
if checked:
self._widget.button_view.setChecked(checked)
self.control_activated.emit(addr)
self.handoff_dialog.on_access = True
else:
self.release_control()
self.control_deactivated.emit(addr)
self.handoff_dialog.cancel_handoff()
self.handoff_dialog.on_access = False
def _on_robot_view(self, checked=False):
addr = Address(JausAddress(self._subsystem.ident.address.subsystem_id, 255, 255))
if checked:
self._widget.button_view.setChecked(checked)
self.view_activated.emit(addr)
else:
if self.has_control():
self._widget.button_control.setChecked(False)
self.control_deactivated.emit(addr)
self.view_deactivated.emit(addr)
def has_control(self):
return self._widget.button_control.isChecked()
def has_view(self):
return self._widget.button_view.isChecked()
def release_control(self):
self._widget.button_view.setChecked(False)
self._widget.button_control.setChecked(False)
def activate_view(self):
self._widget.button_view.setChecked(True)
def state_to_cmd(self):
cmd = OcuCmdEntry()
cmd.authority = self._settings.authority
cmd.name = self.name
cmd.address.subsystem_id = self._subsystem.ident.address.subsystem_id
cmd.address.node_id = 255
cmd.address.component_id = 255
if self._widget.button_control.isChecked():
cmd.access_control = 12
elif self._widget.button_view.isChecked():
cmd.access_control = 11
else:
cmd.access_control = 10
if self.ocu_client is not None:
cmd.ocu_client = self.ocu_client.address
else:
cmd.ocu_client.subsystem_id = 65535
cmd.ocu_client.node_id = 255
cmd.ocu_client.component_id = 255
return cmd
def update(self, subsystem):
if self._subsystem.ident.address.subsystem_id != subsystem.ident.address.subsystem_id:
return False
if self._subsystem.ident.name != subsystem.ident.name:
return False
self._subsystem = subsystem
return True
def on_show_handoff(self):
self.handoff_dialog.setVisible(not self.handoff_dialog.isVisible())
def on_show_details(self):
twc = self._detailed_dialog.treewidget_components
twc.clear()
client_info = "OCU client: ---"
if self._ocu_client is not None:
add_info = ''
if self.ocu_client.subsystem_restricted == self.subsystem_id:
if self.ocu_client.only_monitor:
add_info = ' [restricted, only monitor]'
else:
add_info = ' [restricted]'
client_info = "OCU client: %s%s" % (self.ocu_client.address, add_info)
elif self.control_addr.subsystem_id != 0:
client_info = 'Controlled by other OCU: %s' % self.control_addr
self._detailed_dialog.label_info.setText(client_info)
if self.name == self._subsystem.ident.name:
for node in self._subsystem.nodes:
node_item = QTreeWidgetItem(twc)
node_name = node.ident.name if node.ident.name else "NODE"
node_item.setText(0, "%s [id: %d]" % (node_name, node.ident.address.node_id))
for comp in node.components:
cmp_item = QTreeWidgetItem(node_item)
cmp_name = self._get_component_name(comp.address)
cmp_item.setText(0, "%s [%d.%d.%d]" % (cmp_name, comp.address.subsystem_id, comp.address.node_id, comp.address.component_id))
twc.expandItem(node_item)
for srv in comp.services:
srv_item = QTreeWidgetItem(cmp_item)
srv_item.setText(0, "%s v%d.%d" % (srv.uri, srv.major_version, srv.minor_version))
if self._detailed_dialog.isVisible():
self._detailed_dialog.setFocus(Qt.ActiveWindowFocusReason)
else:
self._detailed_dialog.show()
def on_show_warnings(self):
text_browser = self._warning_dialog.warnings
text_browser.clear()
if not self._warnings and not self._feedback_warnings:
text_browser.append('No known warnings!')
else:
for msg in self._warnings:
text_browser.append(msg)
if self._feedback_warnings:
text_browser.append('Services with warning state:')
for client, service_infos in self._feedback_warnings.items():
text_browser.append("Client %s:" % client)
for service_info in service_infos:
text_browser.append(" %s[%s]: %s" % (service_info.uri, Address(service_info.addr_control), self.access_state_to_str(service_info.access_state)))
self._warning_dialog.show()
def update_feedback_warnings(self):
warnings = dict()
if self._ocu_client is not None:
cw = self._ocu_client.get_warnings(self.subsystem_id, self.has_control())
warnings.update(cw)
insathority = dict()
cw = self._ocu_client.get_srvs_ins_authority(self.subsystem_id)
insathority.update(cw)
self.handoff_dialog.update_authority_problems(insathority)
self._feedback_warnings = warnings
self._update_warnings_button()
def set_warnings(self, warnings):
self._warnings = warnings
self._update_warnings_button()
def _update_warnings_button(self):
has_warning = (len(self._warnings) + len(self._feedback_warnings)) > 0
if has_warning and self.has_control():
self._widget.button_control.setStyleSheet("QPushButton { background-color: #FE9A2E;}")
elif self.has_control():
self._widget.button_control.setStyleSheet("QPushButton { background-color: #98FB98;}")
self._widget.button_view.setStyleSheet("QPushButton { background-color: #98FB98;}")
elif self.has_view():
self._widget.button_control.setStyleSheet("QPushButton { background-color: None;}")
self._widget.button_view.setStyleSheet("QPushButton { background-color: #98FB98;}")
elif self.control_addr.subsystem_id != 0 and (self._ocu_client is None or self.control_addr.subsystem_id != self._ocu_client.subsystem_id):
self._widget.button_control.setStyleSheet("QPushButton { background-color: #A9A9A9;}")
self._widget.button_view.setStyleSheet("QPushButton { background-color: None;}")
else:
self._widget.button_control.setStyleSheet("QPushButton { background-color: None;}")
self._widget.button_view.setStyleSheet("QPushButton { background-color: None;}")
self._widget.button_warnings.setEnabled(has_warning)
def update_ident(self, ident):
if Address(ident.address) == Address(self._subsystem.ident.address):
self._last_update = rospy.Time.now()
if ident.system_type == 60001 or ident.request_type == 4:
if ident.address.subsystem_id == self._subsystem.ident.address.subsystem_id:
self._component_names[Address(ident.address)] = ident.name
return False
def _get_component_name(self, msg_address):
addr = Address(msg_address)
try:
return self._component_names[addr]
except Exception:
pass
return "Component"
def is_old(self):
return rospy.Time.now() - self._last_update > rospy.Duration(self.MAX_AGE)
def get_widget(self):
return self._widget
def _create_warning_dialog(self):
diag = QDialog(self._widget)
ui_file = os.path.join(os.path.dirname(os.path.realpath(__file__)), 'warning_info.ui')
loadUi(ui_file, diag)
diag.resize(600, 250)
diag.setWindowTitle("Warning for %s[%d]" % (self.name, self.subsystem_id))
diag.setWindowIcon(QIcon.fromTheme("dialog-warning"))
return diag
def _create_detailed_dialog(self):
diag = QDialog(self._widget)
ui_file = os.path.join(os.path.dirname(os.path.realpath(__file__)), 'system_info.ui')
loadUi(ui_file, diag)
diag.treewidget_components.setHeaderLabel("%s [%d]" % (self.name, self.subsystem_id))
diag.resize(500, 300)
diag.setWindowTitle("subsystem %s[%d]" % (self.name, self.subsystem_id))
diag.setWindowIcon(QIcon.fromTheme("help-about"))
return diag
def access_state_to_str(self, state):
if state == 0:
return 'NOT_AVAILABLE'
if state == 1:
return 'NOT_CONTROLLED'
if state == 2:
return 'CONTROL_RELEASED'
if state == 3:
return 'CONTROL_ACCEPTED'
if state == 4:
return 'TIMEOUT'
if state == 5:
return 'INSUFFICIENT_AUTHORITY'
if state == 6:
return 'MONITORING'
return 'UNKNOWN'
| true | true |
f71ab0e75e50d66af2bfe69ef2fd8400a56a4fd4 | 1,903 | py | Python | Assessments 1-8/Ass8/Q2_b_1.py | ZHANG-CAIQI/COMP1001 | abfad8101b4b58697dfbc8599eebf466beebb9ec | [
"MIT"
] | 1 | 2020-05-17T03:28:17.000Z | 2020-05-17T03:28:17.000Z | Assessments 1-8/Ass8/Q2_b_1.py | ZHANG-CAIQI/COMP1001 | abfad8101b4b58697dfbc8599eebf466beebb9ec | [
"MIT"
] | null | null | null | Assessments 1-8/Ass8/Q2_b_1.py | ZHANG-CAIQI/COMP1001 | abfad8101b4b58697dfbc8599eebf466beebb9ec | [
"MIT"
] | null | null | null | import matplotlib.pyplot as plt
import numpy as np
def stockUp(priceFile):
# read the file
infile = open(priceFile, "r")
date = []
stock = []
# store only the dates and closing price
day = 1
firstLine = True
for line in infile:
if firstLine:
firstLine = False
else:
count_item = 0
for item in line.split(","):
if count_item == 0:
date.append(day)
elif count_item == 4:
stock.append(float(item))
count_item += 1
day += 1
infile.close()
# Compute the up periods
up = len(date)*[0]
for k in range(1,len(stock)): # skip the heading
i = k # i = k = 1
while ((i>0) and float(stock[k])>=float(stock[i])):
up[k] += 1
i -= 1
fig, ax1 = plt.subplots()
color = 'tab:red'
ax1.set_xlabel('Days started from 11/13/2017 and end on 11/12/2018')
ax1.set_ylabel('Stock prices', color=color)
ax1.plot(date, stock, color=color)
ax1.tick_params(axis='y', labelcolor=color)
ax2 = ax1.twinx() # instantiate a second axes that shares the same x-axis
color = 'tab:blue'
ax2.set_ylabel('Up periods', color=color) # we already handled the x-label with ax1
ax2.plot(date, up, color=color)
ax2.tick_params(axis='y', labelcolor=color)
fig.tight_layout() # otherwise the right y-label is slightly clipped
plt.show()
return
"""
plt.plot(date, up, marker='x')
plt.plot(date, stock, marker='o')
plt.title('The up periods for 11/13/2017-11/12/2018')
plt.xlabel('Days started from 11/13/2017 and end on 11/12/2018')
plt.ylabel('The up periods of GOOGL at closing')
plt.show()
"""
stockUp("GOOGL.csv")
| 27.185714 | 89 | 0.543878 | import matplotlib.pyplot as plt
import numpy as np
def stockUp(priceFile):
infile = open(priceFile, "r")
date = []
stock = []
day = 1
firstLine = True
for line in infile:
if firstLine:
firstLine = False
else:
count_item = 0
for item in line.split(","):
if count_item == 0:
date.append(day)
elif count_item == 4:
stock.append(float(item))
count_item += 1
day += 1
infile.close()
up = len(date)*[0]
for k in range(1,len(stock)):
i = k
while ((i>0) and float(stock[k])>=float(stock[i])):
up[k] += 1
i -= 1
fig, ax1 = plt.subplots()
color = 'tab:red'
ax1.set_xlabel('Days started from 11/13/2017 and end on 11/12/2018')
ax1.set_ylabel('Stock prices', color=color)
ax1.plot(date, stock, color=color)
ax1.tick_params(axis='y', labelcolor=color)
ax2 = ax1.twinx()
color = 'tab:blue'
ax2.set_ylabel('Up periods', color=color)
ax2.plot(date, up, color=color)
ax2.tick_params(axis='y', labelcolor=color)
fig.tight_layout()
plt.show()
return
stockUp("GOOGL.csv")
| true | true |
f71ab0f98895a9582d987bf35cfa556cbf1224e1 | 694 | py | Python | GENERAL/slots_manager.py | Couso99/EEG-Environment | d67de00c08c5892baebe5bf993cac0a5db6e70b1 | [
"MIT"
] | null | null | null | GENERAL/slots_manager.py | Couso99/EEG-Environment | d67de00c08c5892baebe5bf993cac0a5db6e70b1 | [
"MIT"
] | null | null | null | GENERAL/slots_manager.py | Couso99/EEG-Environment | d67de00c08c5892baebe5bf993cac0a5db6e70b1 | [
"MIT"
] | null | null | null | # -*- coding: utf-8 -*-
"""
@author: %(Mikel Val Calvo)s
@email: %(mikel1982mail@gmail.com)
@institution: %(Dpto. de Inteligencia Artificial, Universidad Nacional de Educación a Distancia (UNED))
@DOI: 10.5281/zenodo.3759306
"""
#%%
class SlotsManager:
# Inicializa la lista de callbacks
def __init__(self):
self.callbacks = []
# Ejecuta los callbacks de la lista
def trigger(self):
for callback in self.callbacks:
callback()
print(callback)
# [callback() for callback in self.callbacks]
# Añade un slot a la lista de callbacks
def append(self, slot):
self.callbacks.append(slot)
print(slot)
| 23.931034 | 103 | 0.628242 |
class SlotsManager:
def __init__(self):
self.callbacks = []
def trigger(self):
for callback in self.callbacks:
callback()
print(callback)
def append(self, slot):
self.callbacks.append(slot)
print(slot)
| true | true |
f71ab3032781cd41199cec50632738defd8f52ca | 116,626 | py | Python | test/orm/test_joins.py | petit87/sqlalchemy | 67d674bd63ca36ac32b23f96e2b19e9dac6b0863 | [
"MIT"
] | null | null | null | test/orm/test_joins.py | petit87/sqlalchemy | 67d674bd63ca36ac32b23f96e2b19e9dac6b0863 | [
"MIT"
] | null | null | null | test/orm/test_joins.py | petit87/sqlalchemy | 67d674bd63ca36ac32b23f96e2b19e9dac6b0863 | [
"MIT"
] | null | null | null | import itertools
import sqlalchemy as sa
from sqlalchemy import and_
from sqlalchemy import desc
from sqlalchemy import exc as sa_exc
from sqlalchemy import ForeignKey
from sqlalchemy import func
from sqlalchemy import inspect
from sqlalchemy import Integer
from sqlalchemy import lateral
from sqlalchemy import literal_column
from sqlalchemy import MetaData
from sqlalchemy import not_
from sqlalchemy import or_
from sqlalchemy import select
from sqlalchemy import String
from sqlalchemy import Table
from sqlalchemy import testing
from sqlalchemy import true
from sqlalchemy import union
from sqlalchemy.engine import default
from sqlalchemy.orm import aliased
from sqlalchemy.orm import backref
from sqlalchemy.orm import join
from sqlalchemy.orm import joinedload
from sqlalchemy.orm import outerjoin
from sqlalchemy.orm import relationship
from sqlalchemy.orm import Session
from sqlalchemy.orm import synonym
from sqlalchemy.sql.selectable import LABEL_STYLE_TABLENAME_PLUS_COL
from sqlalchemy.testing import assert_raises
from sqlalchemy.testing import assert_raises_message
from sqlalchemy.testing import AssertsCompiledSQL
from sqlalchemy.testing import eq_
from sqlalchemy.testing import fixtures
from sqlalchemy.testing.assertions import expect_raises_message
from sqlalchemy.testing.fixtures import fixture_session
from sqlalchemy.testing.schema import Column
from test.orm import _fixtures
from .inheritance import _poly_fixtures
from .test_query import QueryTest
class InheritedTest(_poly_fixtures._Polymorphic):
run_setup_mappers = "once"
class InheritedJoinTest(InheritedTest, AssertsCompiledSQL):
def test_single_prop(self):
Company = self.classes.Company
sess = fixture_session()
self.assert_compile(
sess.query(Company).join(Company.employees),
"SELECT companies.company_id AS companies_company_id, "
"companies.name AS companies_name "
"FROM companies JOIN people "
"ON companies.company_id = people.company_id",
use_default_dialect=True,
)
def test_force_via_select_from(self):
Company, Engineer = self.classes.Company, self.classes.Engineer
sess = fixture_session()
self.assert_compile(
sess.query(Company)
.filter(Company.company_id == Engineer.company_id)
.filter(Engineer.primary_language == "java"),
"SELECT companies.company_id AS companies_company_id, "
"companies.name AS companies_name "
"FROM companies, people, engineers "
"WHERE companies.company_id = people.company_id "
"AND engineers.primary_language "
"= :primary_language_1",
use_default_dialect=True,
)
self.assert_compile(
sess.query(Company)
.select_from(Company, Engineer)
.filter(Company.company_id == Engineer.company_id)
.filter(Engineer.primary_language == "java"),
"SELECT companies.company_id AS companies_company_id, "
"companies.name AS companies_name "
"FROM companies, people JOIN engineers "
"ON people.person_id = engineers.person_id "
"WHERE companies.company_id = people.company_id "
"AND engineers.primary_language ="
" :primary_language_1",
use_default_dialect=True,
)
def test_single_prop_of_type(self):
Company, Engineer = self.classes.Company, self.classes.Engineer
sess = fixture_session()
self.assert_compile(
sess.query(Company).join(Company.employees.of_type(Engineer)),
"SELECT companies.company_id AS companies_company_id, "
"companies.name AS companies_name "
"FROM companies JOIN "
"(people JOIN engineers "
"ON people.person_id = engineers.person_id) "
"ON companies.company_id = people.company_id",
use_default_dialect=True,
)
def test_explicit_polymorphic_join_one(self):
Company, Engineer = self.classes.Company, self.classes.Engineer
sess = fixture_session()
self.assert_compile(
sess.query(Company)
.join(Engineer)
.filter(Engineer.engineer_name == "vlad"),
"SELECT companies.company_id AS companies_company_id, "
"companies.name AS companies_name "
"FROM companies JOIN (people JOIN engineers "
"ON people.person_id = engineers.person_id) "
"ON "
"companies.company_id = people.company_id "
"WHERE engineers.engineer_name = :engineer_name_1",
use_default_dialect=True,
)
def test_explicit_polymorphic_join_two(self):
Company, Engineer = self.classes.Company, self.classes.Engineer
sess = fixture_session()
self.assert_compile(
sess.query(Company)
.join(Engineer, Company.company_id == Engineer.company_id)
.filter(Engineer.engineer_name == "vlad"),
"SELECT companies.company_id AS companies_company_id, "
"companies.name AS companies_name "
"FROM companies JOIN "
"(people JOIN engineers "
"ON people.person_id = engineers.person_id) "
"ON "
"companies.company_id = people.company_id "
"WHERE engineers.engineer_name = :engineer_name_1",
use_default_dialect=True,
)
def test_auto_aliasing_multi_link(self):
# test [ticket:2903]
sess = fixture_session()
Company, Engineer, Manager, Boss = (
self.classes.Company,
self.classes.Engineer,
self.classes.Manager,
self.classes.Boss,
)
q = (
sess.query(Company)
.join(Company.employees.of_type(Engineer))
.join(Company.employees.of_type(Manager))
.join(Company.employees.of_type(Boss))
)
with testing.expect_warnings(
"An alias is being generated automatically against joined entity "
r"Mapper\[Manager\(managers\)\] due to overlapping",
"An alias is being generated automatically against joined entity "
r"Mapper\[Boss\(boss\)\] due to overlapping",
raise_on_any_unexpected=True,
):
self.assert_compile(
q,
"SELECT companies.company_id AS companies_company_id, "
"companies.name AS companies_name FROM companies "
"JOIN (people JOIN engineers "
"ON people.person_id = engineers.person_id) "
"ON companies.company_id = people.company_id "
"JOIN (people AS people_1 JOIN managers AS managers_1 "
"ON people_1.person_id = managers_1.person_id) "
"ON companies.company_id = people_1.company_id "
"JOIN (people AS people_2 JOIN managers AS managers_2 "
"ON people_2.person_id = managers_2.person_id "
"JOIN boss AS boss_1 "
"ON managers_2.person_id = boss_1.boss_id) "
"ON companies.company_id = people_2.company_id",
use_default_dialect=True,
)
class JoinOnSynonymTest(_fixtures.FixtureTest, AssertsCompiledSQL):
__dialect__ = "default"
@classmethod
def setup_mappers(cls):
User = cls.classes.User
Address = cls.classes.Address
users, addresses = (cls.tables.users, cls.tables.addresses)
cls.mapper_registry.map_imperatively(
User,
users,
properties={
"addresses": relationship(Address),
"ad_syn": synonym("addresses"),
},
)
cls.mapper_registry.map_imperatively(Address, addresses)
def test_join_on_synonym(self):
User = self.classes.User
self.assert_compile(
fixture_session().query(User).join(User.ad_syn),
"SELECT users.id AS users_id, users.name AS users_name "
"FROM users JOIN addresses ON users.id = addresses.user_id",
)
class JoinTest(QueryTest, AssertsCompiledSQL):
__dialect__ = "default"
@testing.combinations_list(
set(
itertools.product(
[
"relationship",
"relationship_only",
"none",
"explicit",
"table_none",
"table_explicit",
],
[True, False],
)
),
argnames="onclause_type, use_legacy",
)
def test_filter_by_from_join(self, onclause_type, use_legacy):
User, Address = self.classes("User", "Address")
(address_table,) = self.tables("addresses")
(user_table,) = self.tables("users")
if use_legacy:
sess = fixture_session()
q = sess.query(User)
else:
q = select(User).set_label_style(LABEL_STYLE_TABLENAME_PLUS_COL)
if onclause_type == "relationship":
q = q.join(Address, User.addresses)
elif onclause_type == "relationship_only":
q = q.join(User.addresses)
elif onclause_type == "none":
q = q.join(Address)
elif onclause_type == "explicit":
q = q.join(Address, User.id == Address.user_id)
elif onclause_type == "table_none":
q = q.join(address_table)
elif onclause_type == "table_explicit":
q = q.join(
address_table, user_table.c.id == address_table.c.user_id
)
else:
assert False
q2 = q.filter_by(email_address="foo")
self.assert_compile(
q2,
"SELECT users.id AS users_id, users.name AS users_name "
"FROM users JOIN addresses ON users.id = addresses.user_id "
"WHERE addresses.email_address = :email_address_1",
)
if use_legacy:
q2 = q.reset_joinpoint().filter_by(name="user")
self.assert_compile(
q2,
"SELECT users.id AS users_id, users.name AS users_name "
"FROM users JOIN addresses ON users.id = addresses.user_id "
"WHERE users.name = :name_1",
)
def test_join_relationship_propagate_attrs(self):
"""test #6558"""
User = self.classes.User
users = self.tables.users
stmt = select(users).join(User.addresses)
eq_(
stmt._propagate_attrs,
{"compile_state_plugin": "orm", "plugin_subject": inspect(User)},
)
self.assert_compile(
stmt,
"SELECT users.id, users.name FROM users "
"JOIN addresses ON users.id = addresses.user_id",
)
@testing.combinations((True,), (False,), argnames="legacy")
@testing.combinations((True,), (False,), argnames="threelevel")
def test_join_with_entities(self, legacy, threelevel):
"""test issue #6503"""
User, Address, Dingaling = self.classes("User", "Address", "Dingaling")
if legacy:
sess = fixture_session()
stmt = sess.query(User).join(Address).with_entities(Address.id)
else:
stmt = select(User).join(Address).with_only_columns(Address.id)
stmt = stmt.set_label_style(LABEL_STYLE_TABLENAME_PLUS_COL)
if threelevel:
if legacy:
stmt = stmt.join(Address.dingaling).with_entities(Dingaling.id)
else:
stmt = stmt.join(Address.dingaling).with_only_columns(
Dingaling.id
)
if threelevel:
self.assert_compile(
stmt,
"SELECT dingalings.id AS dingalings_id "
"FROM users JOIN addresses ON users.id = addresses.user_id "
"JOIN dingalings ON addresses.id = dingalings.address_id",
)
else:
self.assert_compile(
stmt,
"SELECT addresses.id AS addresses_id FROM users "
"JOIN addresses ON users.id = addresses.user_id",
)
@testing.combinations((True,), (False,), argnames="legacy")
@testing.combinations((True,), (False,), argnames="threelevel")
def test_join_and_union_with_entities(self, legacy, threelevel):
"""test issue #6698, regression caused by #6503"""
User, Address, Dingaling = self.classes("User", "Address", "Dingaling")
if legacy:
sess = fixture_session()
stmt = sess.query(User).join(Address).with_entities(Address.id)
else:
stmt = select(User).join(Address).with_only_columns(Address.id)
stmt = stmt.set_label_style(LABEL_STYLE_TABLENAME_PLUS_COL)
if threelevel:
if legacy:
stmt = stmt.join(Address.dingaling).with_entities(Dingaling.id)
to_union = sess.query(Dingaling.id)
else:
stmt = stmt.join(Address.dingaling).with_only_columns(
Dingaling.id
)
to_union = select(Dingaling.id).set_label_style(
LABEL_STYLE_TABLENAME_PLUS_COL
)
else:
if legacy:
to_union = sess.query(Address.id)
else:
to_union = select(Address.id).set_label_style(
LABEL_STYLE_TABLENAME_PLUS_COL
)
if legacy:
stmt = stmt.union(to_union)
else:
stmt = (
union(stmt, to_union)
.subquery()
.select()
.set_label_style(LABEL_STYLE_TABLENAME_PLUS_COL)
)
if threelevel:
self.assert_compile(
stmt,
"SELECT anon_1.dingalings_id AS anon_1_dingalings_id FROM "
"(SELECT dingalings.id AS dingalings_id "
"FROM users JOIN addresses ON users.id = addresses.user_id "
"JOIN dingalings ON addresses.id = dingalings.address_id "
"UNION "
"SELECT dingalings.id AS dingalings_id FROM dingalings) "
"AS anon_1",
)
else:
self.assert_compile(
stmt,
"SELECT anon_1.addresses_id AS anon_1_addresses_id FROM "
"(SELECT addresses.id AS addresses_id FROM users "
"JOIN addresses ON users.id = addresses.user_id "
"UNION "
"SELECT addresses.id AS addresses_id FROM addresses) "
"AS anon_1",
)
def test_invalid_kwarg_join(self):
User = self.classes.User
sess = fixture_session()
assert_raises_message(
TypeError,
r".*join\(\) .*unexpected .*keyword",
sess.query(User).join,
"address",
foob="bar",
bar="bat",
)
assert_raises_message(
TypeError,
r".*outerjoin\(\) .*unexpected .*keyword",
sess.query(User).outerjoin,
"address",
foob="bar",
bar="bat",
)
def test_left_w_no_entity(self):
User = self.classes.User
Address = self.classes.Address
sess = fixture_session()
self.assert_compile(
sess.query(User, literal_column("x")).join(Address),
"SELECT users.id AS users_id, users.name AS users_name, x "
"FROM users JOIN addresses ON users.id = addresses.user_id",
)
self.assert_compile(
sess.query(literal_column("x"), User).join(Address),
"SELECT x, users.id AS users_id, users.name AS users_name "
"FROM users JOIN addresses ON users.id = addresses.user_id",
)
def test_left_is_none_and_query_has_no_entities(self):
Address = self.classes.Address
sess = fixture_session()
assert_raises_message(
sa_exc.InvalidRequestError,
r"No entities to join from; please use select_from\(\) to "
r"establish the left entity/selectable of this join",
sess.query().join(Address)._compile_context,
)
def test_isouter_flag(self):
User = self.classes.User
self.assert_compile(
fixture_session().query(User).join(User.orders, isouter=True),
"SELECT users.id AS users_id, users.name AS users_name "
"FROM users LEFT OUTER JOIN orders ON users.id = orders.user_id",
)
def test_full_flag(self):
User = self.classes.User
self.assert_compile(
fixture_session().query(User).outerjoin(User.orders, full=True),
"SELECT users.id AS users_id, users.name AS users_name "
"FROM users FULL OUTER JOIN orders ON users.id = orders.user_id",
)
def test_single_prop_1(self):
User = self.classes.User
sess = fixture_session()
self.assert_compile(
sess.query(User).join(User.orders),
"SELECT users.id AS users_id, users.name AS users_name "
"FROM users JOIN orders ON users.id = orders.user_id",
)
def test_single_prop_2(self):
Order, User = (self.classes.Order, self.classes.User)
sess = fixture_session()
self.assert_compile(
sess.query(User).join(Order.user),
"SELECT users.id AS users_id, users.name AS users_name "
"FROM orders JOIN users ON users.id = orders.user_id",
)
def test_single_prop_3(self):
Order, User = (self.classes.Order, self.classes.User)
sess = fixture_session()
oalias1 = aliased(Order)
self.assert_compile(
sess.query(User).join(oalias1.user),
"SELECT users.id AS users_id, users.name AS users_name "
"FROM orders AS orders_1 JOIN users "
"ON users.id = orders_1.user_id",
)
def test_single_prop_4(self):
(
Order,
User,
) = (self.classes.Order, self.classes.User)
sess = fixture_session()
oalias1 = aliased(Order)
oalias2 = aliased(Order)
# another nonsensical query. (from [ticket:1537]).
# in this case, the contract of "left to right" is honored
self.assert_compile(
sess.query(User).join(oalias1.user).join(oalias2.user),
"SELECT users.id AS users_id, users.name AS users_name "
"FROM orders AS orders_1 JOIN users "
"ON users.id = orders_1.user_id, "
"orders AS orders_2 JOIN users ON users.id = orders_2.user_id",
)
def test_single_prop_6(self):
User = self.classes.User
sess = fixture_session()
ualias = aliased(User)
self.assert_compile(
sess.query(ualias).join(ualias.orders),
"SELECT users_1.id AS users_1_id, users_1.name AS users_1_name "
"FROM users AS users_1 JOIN orders ON users_1.id = orders.user_id",
)
def test_single_prop_9(self):
User = self.classes.User
sess = fixture_session()
subq = (
sess.query(User)
.filter(User.name == "ed")
.set_label_style(LABEL_STYLE_TABLENAME_PLUS_COL)
.subquery()
)
ua = aliased(User, subq)
self.assert_compile(
sess.query(ua).join(ua.orders),
"SELECT anon_1.users_id AS anon_1_users_id, "
"anon_1.users_name AS anon_1_users_name "
"FROM (SELECT users.id AS users_id, users.name AS users_name "
"FROM users "
"WHERE users.name = :name_1) AS anon_1 JOIN orders "
"ON anon_1.users_id = orders.user_id",
)
def test_single_prop_12(self):
Order, User, Address = (
self.classes.Order,
self.classes.User,
self.classes.Address,
)
sess = fixture_session()
oalias1 = aliased(Order)
# test #1 for [ticket:1706]
ualias = aliased(User)
self.assert_compile(
sess.query(ualias)
.join(oalias1, ualias.orders)
.join(Address, ualias.addresses),
"SELECT users_1.id AS users_1_id, users_1.name AS "
"users_1_name FROM users AS users_1 JOIN orders AS orders_1 "
"ON users_1.id = orders_1.user_id JOIN addresses ON users_1.id "
"= addresses.user_id",
)
def test_single_prop_13(self):
Order, User, Address = (
self.classes.Order,
self.classes.User,
self.classes.Address,
)
sess = fixture_session()
# test #2 for [ticket:1706]
ualias = aliased(User)
ualias2 = aliased(User)
self.assert_compile(
sess.query(ualias)
.join(Address, ualias.addresses)
.join(ualias2, Address.user)
.join(Order, ualias.orders),
"SELECT users_1.id AS users_1_id, users_1.name AS users_1_name "
"FROM users "
"AS users_1 JOIN addresses ON users_1.id = addresses.user_id "
"JOIN users AS users_2 "
"ON users_2.id = addresses.user_id JOIN orders "
"ON users_1.id = orders.user_id",
)
def test_overlapping_paths_one_legacy(self):
User = self.classes.User
Order = self.classes.Order
sess = fixture_session()
# test overlapping paths. User->orders is used by both joins, but
# rendered once.
self.assert_compile(
sess.query(User)
.join(User.orders)
.join(Order.items)
.join(User.orders)
.join(Order.address),
"SELECT users.id AS users_id, users.name AS users_name FROM users "
"JOIN orders "
"ON users.id = orders.user_id "
"JOIN order_items AS order_items_1 "
"ON orders.id = order_items_1.order_id "
"JOIN items ON items.id = order_items_1.item_id JOIN addresses "
"ON addresses.id = orders.address_id",
)
def test_overlapping_paths_multilevel_legacy(self):
User = self.classes.User
Order = self.classes.Order
Address = self.classes.Address
s = fixture_session()
q = (
s.query(User)
.join(User.orders)
.join(User.addresses)
.join(User.orders)
.join(Order.items)
.join(User.addresses)
.join(Address.dingaling)
)
self.assert_compile(
q,
"SELECT users.id AS users_id, users.name AS users_name "
"FROM users JOIN orders ON users.id = orders.user_id "
"JOIN addresses ON users.id = addresses.user_id "
"JOIN order_items AS order_items_1 ON orders.id = "
"order_items_1.order_id "
"JOIN items ON items.id = order_items_1.item_id "
"JOIN dingalings ON addresses.id = dingalings.address_id",
)
def test_overlapping_paths_one_modern(self):
User = self.classes.User
Order = self.classes.Order
# test overlapping paths. User->orders is used by both joins, but
# rendered once.
# label style is for comparison to legacy version. 1.4 version
# of select().join() did not behave the same as Query.join()
self.assert_compile(
select(User)
.join(User.orders)
.join(Order.items)
.join(User.orders)
.join(Order.address)
.set_label_style(LABEL_STYLE_TABLENAME_PLUS_COL),
"SELECT users.id AS users_id, users.name AS users_name FROM users "
"JOIN orders "
"ON users.id = orders.user_id "
"JOIN order_items AS order_items_1 "
"ON orders.id = order_items_1.order_id "
"JOIN items ON items.id = order_items_1.item_id JOIN addresses "
"ON addresses.id = orders.address_id",
)
def test_overlapping_paths_multilevel_modern(self):
User = self.classes.User
Order = self.classes.Order
Address = self.classes.Address
# label style is for comparison to legacy version. 1.4 version
# of select().join() did not behave the same as Query.join()
q = (
select(User)
.join(User.orders)
.join(User.addresses)
.join(User.orders)
.join(Order.items)
.join(User.addresses)
.join(Address.dingaling)
.set_label_style(LABEL_STYLE_TABLENAME_PLUS_COL)
)
self.assert_compile(
q,
"SELECT users.id AS users_id, users.name AS users_name "
"FROM users JOIN orders ON users.id = orders.user_id "
"JOIN addresses ON users.id = addresses.user_id "
"JOIN order_items AS order_items_1 ON orders.id = "
"order_items_1.order_id "
"JOIN items ON items.id = order_items_1.item_id "
"JOIN dingalings ON addresses.id = dingalings.address_id",
)
def test_join_nonmapped_column(self):
"""test that the search for a 'left' doesn't trip on non-mapped cols"""
Order, User = self.classes.Order, self.classes.User
sess = fixture_session()
# intentionally join() with a non-existent "left" side
self.assert_compile(
sess.query(User.id, literal_column("foo")).join(Order.user),
"SELECT users.id AS users_id, foo FROM "
"orders JOIN users ON users.id = orders.user_id",
)
def test_backwards_join(self):
User, Address = self.classes.User, self.classes.Address
# a more controversial feature. join from
# User->Address, but the onclause is Address.user.
sess = fixture_session()
eq_(
sess.query(User)
.join(Address.user)
.filter(Address.email_address == "ed@wood.com")
.all(),
[User(id=8, name="ed")],
)
# its actually not so controversial if you view it in terms
# of multiple entities.
eq_(
sess.query(User, Address)
.join(Address.user)
.filter(Address.email_address == "ed@wood.com")
.all(),
[(User(id=8, name="ed"), Address(email_address="ed@wood.com"))],
)
# this was the controversial part. now, raise an error if the feature
# is abused.
# before the error raise was added, this would silently work.....
assert_raises(
sa_exc.InvalidRequestError,
sess.query(User).join(Address, Address.user)._compile_context,
)
# but this one would silently fail
adalias = aliased(Address)
assert_raises(
sa_exc.InvalidRequestError,
sess.query(User).join(adalias, Address.user)._compile_context,
)
def test_multiple_with_aliases(self):
Order, User = self.classes.Order, self.classes.User
sess = fixture_session()
ualias = aliased(User)
oalias1 = aliased(Order)
oalias2 = aliased(Order)
self.assert_compile(
sess.query(ualias)
.join(oalias1, ualias.orders)
.join(oalias2, ualias.orders)
.filter(or_(oalias1.user_id == 9, oalias2.user_id == 7)),
"SELECT users_1.id AS users_1_id, users_1.name AS users_1_name "
"FROM users AS users_1 "
"JOIN orders AS orders_1 ON users_1.id = orders_1.user_id "
"JOIN orders AS orders_2 ON "
"users_1.id = orders_2.user_id "
"WHERE orders_1.user_id = :user_id_1 "
"OR orders_2.user_id = :user_id_2",
use_default_dialect=True,
)
def test_select_from_orm_joins(self):
User, Order = self.classes.User, self.classes.Order
sess = fixture_session()
ualias = aliased(User)
oalias1 = aliased(Order)
oalias2 = aliased(Order)
self.assert_compile(
join(User, oalias2, User.id == oalias2.user_id),
"users JOIN orders AS orders_1 ON users.id = orders_1.user_id",
use_default_dialect=True,
)
self.assert_compile(
join(User, oalias2, User.id == oalias2.user_id, full=True),
"users FULL OUTER JOIN orders AS orders_1 "
"ON users.id = orders_1.user_id",
use_default_dialect=True,
)
self.assert_compile(
join(User, oalias2, User.id == oalias2.user_id, isouter=True),
"users LEFT OUTER JOIN orders AS orders_1 "
"ON users.id = orders_1.user_id",
use_default_dialect=True,
)
self.assert_compile(
join(
User,
oalias2,
User.id == oalias2.user_id,
isouter=True,
full=True,
),
"users FULL OUTER JOIN orders AS orders_1 "
"ON users.id = orders_1.user_id",
use_default_dialect=True,
)
self.assert_compile(
join(User, oalias1).join(oalias2),
"users JOIN orders AS orders_1 ON users.id = orders_1.user_id "
"JOIN orders AS orders_2 ON users.id = orders_2.user_id",
use_default_dialect=True,
)
self.assert_compile(
join(User, oalias1).join(oalias2, isouter=True),
"users JOIN orders AS orders_1 ON users.id = orders_1.user_id "
"LEFT OUTER JOIN orders AS orders_2 "
"ON users.id = orders_2.user_id",
use_default_dialect=True,
)
self.assert_compile(
join(User, oalias1).join(oalias2, full=True),
"users JOIN orders AS orders_1 ON users.id = orders_1.user_id "
"FULL OUTER JOIN orders AS orders_2 "
"ON users.id = orders_2.user_id",
use_default_dialect=True,
)
self.assert_compile(
join(User, oalias1).join(oalias2, full=True, isouter=True),
"users JOIN orders AS orders_1 ON users.id = orders_1.user_id "
"FULL OUTER JOIN orders AS orders_2 "
"ON users.id = orders_2.user_id",
use_default_dialect=True,
)
self.assert_compile(
join(ualias, oalias1, ualias.orders),
"users AS users_1 JOIN orders AS orders_1 "
"ON users_1.id = orders_1.user_id",
use_default_dialect=True,
)
self.assert_compile(
sess.query(ualias).select_from(
join(ualias, oalias1, ualias.orders)
),
"SELECT users_1.id AS users_1_id, users_1.name AS users_1_name "
"FROM users AS users_1 "
"JOIN orders AS orders_1 ON users_1.id = orders_1.user_id",
use_default_dialect=True,
)
self.assert_compile(
sess.query(User, ualias).select_from(
join(ualias, oalias1, ualias.orders)
),
"SELECT users.id AS users_id, users.name AS users_name, "
"users_1.id AS users_1_id, "
"users_1.name AS users_1_name FROM users, users AS users_1 "
"JOIN orders AS orders_1 ON users_1.id = orders_1.user_id",
use_default_dialect=True,
)
# this fails (and we can't quite fix right now).
if False:
self.assert_compile(
sess.query(User, ualias)
.join(oalias1, ualias.orders)
.join(oalias2, User.id == oalias2.user_id)
.filter(or_(oalias1.user_id == 9, oalias2.user_id == 7)),
"SELECT users.id AS users_id, users.name AS users_name, "
"users_1.id AS users_1_id, users_1.name AS "
"users_1_name FROM users JOIN orders AS orders_2 "
"ON users.id = orders_2.user_id, "
"users AS users_1 JOIN orders AS orders_1 "
"ON users_1.id = orders_1.user_id "
"WHERE orders_1.user_id = :user_id_1 "
"OR orders_2.user_id = :user_id_2",
use_default_dialect=True,
)
# this is the same thing using explicit orm.join() (which now offers
# multiple again)
self.assert_compile(
sess.query(User, ualias)
.select_from(
join(ualias, oalias1, ualias.orders),
join(User, oalias2, User.id == oalias2.user_id),
)
.filter(or_(oalias1.user_id == 9, oalias2.user_id == 7)),
"SELECT users.id AS users_id, users.name AS users_name, "
"users_1.id AS users_1_id, users_1.name AS "
"users_1_name FROM users AS users_1 JOIN orders AS orders_1 "
"ON users_1.id = orders_1.user_id, "
"users JOIN orders AS orders_2 ON users.id = orders_2.user_id "
"WHERE orders_1.user_id = :user_id_1 "
"OR orders_2.user_id = :user_id_2",
use_default_dialect=True,
)
def test_overlapping_backwards_joins(self):
User, Order = self.classes.User, self.classes.Order
sess = fixture_session()
oalias1 = aliased(Order)
oalias2 = aliased(Order)
# this is invalid SQL - joins from orders_1/orders_2 to User twice.
# but that is what was asked for so they get it !
self.assert_compile(
sess.query(User).join(oalias1.user).join(oalias2.user),
"SELECT users.id AS users_id, users.name AS users_name "
"FROM orders AS orders_1 "
"JOIN users ON users.id = orders_1.user_id, orders AS orders_2 "
"JOIN users ON users.id = orders_2.user_id",
use_default_dialect=True,
)
def test_replace_multiple_from_clause(self):
"""test adding joins onto multiple FROM clauses"""
User, Order, Address = (
self.classes.User,
self.classes.Order,
self.classes.Address,
)
sess = fixture_session()
self.assert_compile(
sess.query(Address, User)
.join(Address.dingaling)
.join(User.orders)
.join(Order.items),
"SELECT addresses.id AS addresses_id, "
"addresses.user_id AS addresses_user_id, "
"addresses.email_address AS addresses_email_address, "
"users.id AS users_id, "
"users.name AS users_name FROM addresses JOIN dingalings "
"ON addresses.id = dingalings.address_id, "
"users JOIN orders ON users.id = orders.user_id "
"JOIN order_items AS order_items_1 "
"ON orders.id = order_items_1.order_id JOIN items "
"ON items.id = order_items_1.item_id",
use_default_dialect=True,
)
def test_invalid_join_entity_from_single_from_clause(self):
Address, Item = (self.classes.Address, self.classes.Item)
sess = fixture_session()
q = sess.query(Address).select_from(Address)
assert_raises_message(
sa.exc.InvalidRequestError,
"Don't know how to join to .*Item.*. "
r"Please use the .select_from\(\) "
"method to establish an explicit left side, as well as",
q.join(Item)._compile_context,
)
def test_invalid_join_entity_from_no_from_clause(self):
Address, Item = (self.classes.Address, self.classes.Item)
sess = fixture_session()
q = sess.query(Address)
assert_raises_message(
sa.exc.InvalidRequestError,
"Don't know how to join to .*Item.*. "
r"Please use the .select_from\(\) "
"method to establish an explicit left side, as well as",
q.join(Item)._compile_context,
)
def test_invalid_join_entity_from_multiple_from_clause(self):
"""test adding joins onto multiple FROM clauses where
we still need to say there's nothing to JOIN from"""
User, Address, Item = (
self.classes.User,
self.classes.Address,
self.classes.Item,
)
sess = fixture_session()
q = sess.query(Address, User).join(Address.dingaling).join(User.orders)
assert_raises_message(
sa.exc.InvalidRequestError,
"Don't know how to join to .*Item.*. "
r"Please use the .select_from\(\) "
"method to establish an explicit left side, as well as",
q.join(Item)._compile_context,
)
def test_join_explicit_left_multiple_from_clause(self):
"""test adding joins onto multiple FROM clauses where
it is ambiguous which FROM should be used when an
ON clause is given"""
User = self.classes.User
sess = fixture_session()
u1 = aliased(User)
# in this case, two FROM objects, one
# is users, the other is u1_alias.
# User.addresses looks for the "users" table and can match
# to both u1_alias and users if the match is not specific enough
q = sess.query(User, u1).select_from(User, u1).join(User.addresses)
self.assert_compile(
q,
"SELECT users.id AS users_id, users.name AS users_name, "
"users_1.id AS users_1_id, users_1.name AS users_1_name "
"FROM users AS users_1, "
"users JOIN addresses ON users.id = addresses.user_id",
)
q = sess.query(User, u1).select_from(User, u1).join(u1.addresses)
self.assert_compile(
q,
"SELECT users.id AS users_id, users.name AS users_name, "
"users_1.id AS users_1_id, users_1.name AS users_1_name "
"FROM users, "
"users AS users_1 JOIN addresses "
"ON users_1.id = addresses.user_id",
)
def test_join_explicit_left_multiple_adapted(self):
"""test adding joins onto multiple FROM clauses where
it is ambiguous which FROM should be used when an
ON clause is given"""
User = self.classes.User
sess = fixture_session()
u1 = aliased(User)
u2 = aliased(User)
# in this case, two FROM objects, one
# is users, the other is u1_alias.
# User.addresses looks for the "users" table and can match
# to both u1_alias and users if the match is not specific enough
assert_raises_message(
sa_exc.InvalidRequestError,
"Can't identify which entity in which to assign the "
"left side of this join.",
sess.query(u1, u2)
.select_from(u1, u2)
.join(User.addresses)
._compile_context,
)
# more specific ON clause
self.assert_compile(
sess.query(u1, u2).select_from(u1, u2).join(u2.addresses),
"SELECT users_1.id AS users_1_id, users_1.name AS users_1_name, "
"users_2.id AS users_2_id, users_2.name AS users_2_name "
"FROM users AS users_1, "
"users AS users_2 JOIN addresses "
"ON users_2.id = addresses.user_id",
)
def test_join_entity_from_multiple_from_clause(self):
"""test adding joins onto multiple FROM clauses where
it is ambiguous which FROM should be used"""
User, Order, Address, Dingaling = (
self.classes.User,
self.classes.Order,
self.classes.Address,
self.classes.Dingaling,
)
sess = fixture_session()
q = sess.query(Address, User).join(Address.dingaling).join(User.orders)
a1 = aliased(Address)
assert_raises_message(
sa.exc.InvalidRequestError,
"Can't determine which FROM clause to join from, there are "
"multiple FROMS which can join to this entity. "
r"Please use the .select_from\(\) "
"method to establish an explicit left side, as well as",
q.join(a1)._compile_context,
)
# to resolve, add an ON clause
# the user->orders join is chosen to join to a1
self.assert_compile(
q.join(a1, Order.address_id == a1.id),
"SELECT addresses.id AS addresses_id, "
"addresses.user_id AS addresses_user_id, "
"addresses.email_address AS addresses_email_address, "
"users.id AS users_id, users.name AS users_name "
"FROM addresses JOIN dingalings "
"ON addresses.id = dingalings.address_id, "
"users JOIN orders "
"ON users.id = orders.user_id "
"JOIN addresses AS addresses_1 "
"ON orders.address_id = addresses_1.id",
)
# the address->dingalings join is chosen to join to a1
self.assert_compile(
q.join(a1, Dingaling.address_id == a1.id),
"SELECT addresses.id AS addresses_id, "
"addresses.user_id AS addresses_user_id, "
"addresses.email_address AS addresses_email_address, "
"users.id AS users_id, users.name AS users_name "
"FROM addresses JOIN dingalings "
"ON addresses.id = dingalings.address_id "
"JOIN addresses AS addresses_1 "
"ON dingalings.address_id = addresses_1.id, "
"users JOIN orders ON users.id = orders.user_id",
)
def test_join_entity_from_multiple_entities(self):
"""test adding joins onto multiple FROM clauses where
it is ambiguous which FROM should be used"""
Order, Address, Dingaling = (
self.classes.Order,
self.classes.Address,
self.classes.Dingaling,
)
sess = fixture_session()
q = sess.query(Order, Dingaling)
a1 = aliased(Address)
assert_raises_message(
sa.exc.InvalidRequestError,
"Can't determine which FROM clause to join from, there are "
"multiple FROMS which can join to this entity. "
r"Please use the .select_from\(\) "
"method to establish an explicit left side, as well as",
q.join(a1)._compile_context,
)
# to resolve, add an ON clause
# Order is chosen to join to a1
self.assert_compile(
q.join(a1, Order.address_id == a1.id),
"SELECT orders.id AS orders_id, orders.user_id AS orders_user_id, "
"orders.address_id AS orders_address_id, "
"orders.description AS orders_description, "
"orders.isopen AS orders_isopen, dingalings.id AS dingalings_id, "
"dingalings.address_id AS dingalings_address_id, "
"dingalings.data AS dingalings_data "
"FROM dingalings, orders "
"JOIN addresses AS addresses_1 "
"ON orders.address_id = addresses_1.id",
)
# Dingaling is chosen to join to a1
self.assert_compile(
q.join(a1, Dingaling.address_id == a1.id),
"SELECT orders.id AS orders_id, orders.user_id AS orders_user_id, "
"orders.address_id AS orders_address_id, "
"orders.description AS orders_description, "
"orders.isopen AS orders_isopen, dingalings.id AS dingalings_id, "
"dingalings.address_id AS dingalings_address_id, "
"dingalings.data AS dingalings_data "
"FROM orders, dingalings JOIN addresses AS addresses_1 "
"ON dingalings.address_id = addresses_1.id",
)
def test_clause_present_in_froms_twice_w_onclause(self):
# test [ticket:4584]
Order, Address, User = (
self.classes.Order,
self.classes.Address,
self.classes.User,
)
sess = fixture_session()
a1 = aliased(Address)
q = sess.query(Order).select_from(Order, a1, User)
assert_raises_message(
sa.exc.InvalidRequestError,
"Can't determine which FROM clause to join from, there are "
"multiple FROMS which can join to this entity. "
r"Please use the .select_from\(\) "
"method to establish an explicit left side, as well as",
q.outerjoin(a1)._compile_context,
)
# the condition which occurs here is: Query._from_obj contains both
# "a1" by itself as well as a join that "a1" is part of.
# find_left_clause_to_join_from() needs to include removal of froms
# that are in the _hide_froms of joins the same way
# Selectable._get_display_froms does.
q = sess.query(Order).select_from(Order, a1, User)
q = q.outerjoin(a1, a1.id == Order.address_id)
q = q.outerjoin(User, a1.user_id == User.id)
self.assert_compile(
q,
"SELECT orders.id AS orders_id, orders.user_id AS orders_user_id, "
"orders.address_id AS orders_address_id, "
"orders.description AS orders_description, "
"orders.isopen AS orders_isopen "
"FROM orders "
"LEFT OUTER JOIN addresses AS addresses_1 "
"ON addresses_1.id = orders.address_id "
"LEFT OUTER JOIN users ON addresses_1.user_id = users.id",
)
def test_clause_present_in_froms_twice_wo_onclause(self):
# test [ticket:4584]
Address, Dingaling, User = (
self.classes.Address,
self.classes.Dingaling,
self.classes.User,
)
sess = fixture_session()
a1 = aliased(Address)
# the condition which occurs here is: Query._from_obj contains both
# "a1" by itself as well as a join that "a1" is part of.
# find_left_clause_to_join_from() needs to include removal of froms
# that are in the _hide_froms of joins the same way
# Selectable._get_display_froms does.
q = sess.query(User).select_from(Dingaling, a1, User)
q = q.outerjoin(a1, User.id == a1.user_id)
q = q.outerjoin(Dingaling)
self.assert_compile(
q,
"SELECT users.id AS users_id, users.name AS users_name "
"FROM users LEFT OUTER JOIN addresses AS addresses_1 "
"ON users.id = addresses_1.user_id "
"LEFT OUTER JOIN dingalings "
"ON addresses_1.id = dingalings.address_id",
)
def test_pure_expression(self):
# this was actually false-passing due to the assertions
# fixture not following the regular codepath for Query
addresses, users = self.tables.addresses, self.tables.users
sess = fixture_session()
self.assert_compile(
sess.query(users).join(addresses),
"SELECT users.id AS users_id, users.name AS users_name "
"FROM users JOIN addresses ON users.id = addresses.user_id",
)
def test_no_onclause(self):
Item, User, Order = (
self.classes.Item,
self.classes.User,
self.classes.Order,
)
sess = fixture_session()
eq_(
sess.query(User)
.select_from(join(User, Order).join(Item, Order.items))
.filter(Item.description == "item 4")
.all(),
[User(name="jack")],
)
eq_(
sess.query(User.name)
.select_from(join(User, Order).join(Item, Order.items))
.filter(Item.description == "item 4")
.all(),
[("jack",)],
)
eq_(
sess.query(User)
.join(Order)
.join(Item, Order.items)
.filter(Item.description == "item 4")
.all(),
[User(name="jack")],
)
def test_clause_onclause(self):
Item, Order, order_items, User = (
self.classes.Item,
self.classes.Order,
self.tables.order_items,
self.classes.User,
)
sess = fixture_session()
eq_(
sess.query(User)
.join(Order, User.id == Order.user_id)
.join(order_items, Order.id == order_items.c.order_id)
.join(Item, order_items.c.item_id == Item.id)
.filter(Item.description == "item 4")
.all(),
[User(name="jack")],
)
eq_(
sess.query(User.name)
.join(Order, User.id == Order.user_id)
.join(order_items, Order.id == order_items.c.order_id)
.join(Item, order_items.c.item_id == Item.id)
.filter(Item.description == "item 4")
.all(),
[("jack",)],
)
ualias = aliased(User)
eq_(
sess.query(ualias.name)
.join(Order, ualias.id == Order.user_id)
.join(order_items, Order.id == order_items.c.order_id)
.join(Item, order_items.c.item_id == Item.id)
.filter(Item.description == "item 4")
.all(),
[("jack",)],
)
# explicit onclause with from_self(), means
# the onclause must be aliased against the query's custom
# FROM object
subq = sess.query(User).order_by(User.id).offset(2).subquery()
ua = aliased(User, subq)
eq_(
sess.query(ua).join(Order, ua.id == Order.user_id).all(),
[User(name="fred")],
)
def test_aliased_classes(self):
User, Address = self.classes.User, self.classes.Address
sess = fixture_session()
(user7, user8, user9, user10) = sess.query(User).all()
(address1, address2, address3, address4, address5) = sess.query(
Address
).all()
expected = [
(user7, address1),
(user8, address2),
(user8, address3),
(user8, address4),
(user9, address5),
(user10, None),
]
q = sess.query(User)
AdAlias = aliased(Address)
q = q.add_entity(AdAlias).select_from(outerjoin(User, AdAlias))
result = q.order_by(User.id, AdAlias.id).all()
eq_(result, expected)
sess.expunge_all()
q = sess.query(User).add_entity(AdAlias)
result = (
q.select_from(outerjoin(User, AdAlias))
.filter(AdAlias.email_address == "ed@bettyboop.com")
.all()
)
eq_(result, [(user8, address3)])
result = (
q.select_from(outerjoin(User, AdAlias, "addresses"))
.filter(AdAlias.email_address == "ed@bettyboop.com")
.all()
)
eq_(result, [(user8, address3)])
result = (
q.select_from(outerjoin(User, AdAlias, User.id == AdAlias.user_id))
.filter(AdAlias.email_address == "ed@bettyboop.com")
.all()
)
eq_(result, [(user8, address3)])
# this is the first test where we are joining "backwards" - from
# AdAlias to User even though
# the query is against User
q = sess.query(User, AdAlias)
result = (
q.join(AdAlias.user)
.filter(User.name == "ed")
.order_by(User.id, AdAlias.id)
)
eq_(
result.all(),
[(user8, address2), (user8, address3), (user8, address4)],
)
q = (
sess.query(User, AdAlias)
.select_from(join(AdAlias, User, AdAlias.user))
.filter(User.name == "ed")
)
eq_(
result.all(),
[(user8, address2), (user8, address3), (user8, address4)],
)
def test_expression_onclauses(self):
Order, User = self.classes.Order, self.classes.User
sess = fixture_session()
subq = sess.query(User).subquery()
self.assert_compile(
sess.query(User).join(subq, User.name == subq.c.name),
"SELECT users.id AS users_id, users.name AS users_name "
"FROM users JOIN (SELECT users.id AS id, users.name "
"AS name FROM users) AS anon_1 ON users.name = anon_1.name",
use_default_dialect=True,
)
subq = sess.query(Order).subquery()
self.assert_compile(
sess.query(User).join(subq, User.id == subq.c.user_id),
"SELECT users.id AS users_id, users.name AS users_name FROM "
"users JOIN (SELECT orders.id AS id, orders.user_id AS user_id, "
"orders.address_id AS address_id, orders.description AS "
"description, orders.isopen AS isopen FROM orders) AS "
"anon_1 ON users.id = anon_1.user_id",
use_default_dialect=True,
)
self.assert_compile(
sess.query(User).join(Order, User.id == Order.user_id),
"SELECT users.id AS users_id, users.name AS users_name "
"FROM users JOIN orders ON users.id = orders.user_id",
use_default_dialect=True,
)
def test_aliased_classes_m2m(self):
Item, Order = self.classes.Item, self.classes.Order
sess = fixture_session()
(order1, order2, order3, order4, order5) = sess.query(Order).all()
(item1, item2, item3, item4, item5) = sess.query(Item).all()
expected = [
(order1, item1),
(order1, item2),
(order1, item3),
(order2, item1),
(order2, item2),
(order2, item3),
(order3, item3),
(order3, item4),
(order3, item5),
(order4, item1),
(order4, item5),
(order5, item5),
]
q = sess.query(Order)
q = (
q.add_entity(Item)
.select_from(join(Order, Item, "items"))
.order_by(Order.id, Item.id)
)
result = q.all()
eq_(result, expected)
IAlias = aliased(Item)
q = (
sess.query(Order, IAlias)
.select_from(join(Order, IAlias, "items"))
.filter(IAlias.description == "item 3")
)
result = q.all()
eq_(result, [(order1, item3), (order2, item3), (order3, item3)])
def test_joins_from_adapted_entities(self):
User = self.classes.User
# test for #1853
session = fixture_session()
first = session.query(User)
second = session.query(User)
unioned = first.union(second)
subquery = session.query(User.id).subquery()
join = subquery, subquery.c.id == User.id
joined = unioned.outerjoin(*join)
self.assert_compile(
joined,
"SELECT anon_1.users_id AS "
"anon_1_users_id, anon_1.users_name AS "
"anon_1_users_name FROM (SELECT users.id "
"AS users_id, users.name AS users_name "
"FROM users UNION SELECT users.id AS "
"users_id, users.name AS users_name FROM "
"users) AS anon_1 LEFT OUTER JOIN (SELECT "
"users.id AS id FROM users) AS anon_2 ON "
"anon_2.id = anon_1.users_id",
use_default_dialect=True,
)
first = session.query(User.id)
second = session.query(User.id)
unioned = first.union(second)
subquery = session.query(User.id).subquery()
join = subquery, subquery.c.id == User.id
joined = unioned.outerjoin(*join)
self.assert_compile(
joined,
"SELECT anon_1.users_id AS anon_1_users_id "
"FROM (SELECT users.id AS users_id FROM "
"users UNION SELECT users.id AS users_id "
"FROM users) AS anon_1 LEFT OUTER JOIN "
"(SELECT users.id AS id FROM users) AS "
"anon_2 ON anon_2.id = anon_1.users_id",
use_default_dialect=True,
)
def test_joins_from_adapted_entities_isouter(self):
User = self.classes.User
# test for #1853
session = fixture_session()
first = session.query(User)
second = session.query(User)
unioned = first.union(second)
subquery = session.query(User.id).subquery()
join = subquery, subquery.c.id == User.id
joined = unioned.join(*join, isouter=True)
self.assert_compile(
joined,
"SELECT anon_1.users_id AS "
"anon_1_users_id, anon_1.users_name AS "
"anon_1_users_name FROM (SELECT users.id "
"AS users_id, users.name AS users_name "
"FROM users UNION SELECT users.id AS "
"users_id, users.name AS users_name FROM "
"users) AS anon_1 LEFT OUTER JOIN (SELECT "
"users.id AS id FROM users) AS anon_2 ON "
"anon_2.id = anon_1.users_id",
use_default_dialect=True,
)
first = session.query(User.id)
second = session.query(User.id)
unioned = first.union(second)
subquery = session.query(User.id).subquery()
join = subquery, subquery.c.id == User.id
joined = unioned.join(*join, isouter=True)
self.assert_compile(
joined,
"SELECT anon_1.users_id AS anon_1_users_id "
"FROM (SELECT users.id AS users_id FROM "
"users UNION SELECT users.id AS users_id "
"FROM users) AS anon_1 LEFT OUTER JOIN "
"(SELECT users.id AS id FROM users) AS "
"anon_2 ON anon_2.id = anon_1.users_id",
use_default_dialect=True,
)
def test_overlap_with_aliases(self):
orders, User, users = (
self.tables.orders,
self.classes.User,
self.tables.users,
)
Order = self.classes.Order
oalias = orders.alias("oalias")
result = (
fixture_session()
.query(User)
.select_from(users.join(oalias))
.filter(
oalias.c.description.in_(["order 1", "order 2", "order 3"])
)
.join(User.orders)
.join(Order.items)
.order_by(User.id)
.all()
)
assert [User(id=7, name="jack"), User(id=9, name="fred")] == result
result = (
fixture_session()
.query(User)
.select_from(users.join(oalias))
.filter(
oalias.c.description.in_(["order 1", "order 2", "order 3"])
)
.join(User.orders)
.join(Order.items)
.filter_by(id=4)
.all()
)
assert [User(id=7, name="jack")] == result
def test_aliased_order_by(self):
User = self.classes.User
sess = fixture_session()
ualias = aliased(User)
eq_(
sess.query(User, ualias)
.filter(User.id > ualias.id)
.order_by(desc(ualias.id), User.name)
.all(),
[
(User(id=10, name="chuck"), User(id=9, name="fred")),
(User(id=10, name="chuck"), User(id=8, name="ed")),
(User(id=9, name="fred"), User(id=8, name="ed")),
(User(id=10, name="chuck"), User(id=7, name="jack")),
(User(id=8, name="ed"), User(id=7, name="jack")),
(User(id=9, name="fred"), User(id=7, name="jack")),
],
)
def test_plain_table(self):
addresses, User = self.tables.addresses, self.classes.User
sess = fixture_session()
eq_(
sess.query(User.name)
.join(addresses, User.id == addresses.c.user_id)
.order_by(User.id)
.all(),
[("jack",), ("ed",), ("ed",), ("ed",), ("fred",)],
)
def test_no_joinpoint_expr(self):
User, users = self.classes.User, self.tables.users
sess = fixture_session()
# these are consistent regardless of
# select_from() being present.
assert_raises_message(
sa_exc.InvalidRequestError,
"Don't know how to join to .*User.*. "
r"Please use the .select_from\(\) "
"method to establish an explicit left side, as well as",
sess.query(users.c.id).join(User)._compile_context,
)
assert_raises_message(
sa_exc.InvalidRequestError,
"Don't know how to join to .*User.* "
r"Please use the .select_from\(\) "
"method to establish an explicit left side, as well as",
sess.query(users.c.id)
.select_from(users)
.join(User)
._compile_context,
)
def test_on_clause_no_right_side_one(self):
User = self.classes.User
Address = self.classes.Address
sess = fixture_session()
# coercions does not catch this due to the
# legacy=True flag for JoinTargetRole
with expect_raises_message(
sa_exc.ArgumentError,
"Join target, typically a FROM expression, or ORM relationship "
"attribute expected, got",
):
sess.query(User).join(User.id == Address.user_id)
def test_on_clause_no_right_side_one_future(self):
User = self.classes.User
Address = self.classes.Address
# future mode can raise a more specific error at the coercions level
assert_raises_message(
sa_exc.ArgumentError,
"Join target, typically a FROM expression, "
"or ORM relationship attribute expected",
select(User).join,
User.id == Address.user_id,
)
def test_no_legacy_multi_join_two_element(self):
User = self.classes.User
Order = self.classes.Order
sess = fixture_session()
with expect_raises_message(
sa_exc.InvalidRequestError,
"No 'on clause' argument may be passed when joining to a "
"relationship path as a target",
):
sess.query(User).join(User.orders, Order.items)._compile_context()
def test_no_modern_multi_join_two_element(self):
User = self.classes.User
Order = self.classes.Order
sess = fixture_session()
with expect_raises_message(
sa_exc.InvalidRequestError,
"No 'on clause' argument may be passed when joining to a "
"relationship path as a target",
):
sess.execute(select(User).join(User.orders, Order.items))
def test_kw_only_blocks_legacy_multi_join(self):
User = self.classes.User
Order = self.classes.Order
Item = self.classes.Item
sess = fixture_session()
with expect_raises_message(
TypeError,
r".*join\(\) takes from 2 to 3 positional arguments but "
"4 were given",
):
sess.query(User).join(User.orders, Order.items, Item.keywords)
def test_on_clause_no_right_side_two(self):
User = self.classes.User
Address = self.classes.Address
sess = fixture_session()
assert_raises_message(
sa_exc.ArgumentError,
"Join target Address.user_id does not refer to a mapped entity",
sess.query(User).join(Address.user_id)._compile_context,
)
def test_on_clause_no_right_side_two_future(self):
User = self.classes.User
Address = self.classes.Address
stmt = select(User).join(Address.user_id)
assert_raises_message(
sa_exc.ArgumentError,
"Join target Address.user_id does not refer to a mapped entity",
stmt.compile,
)
def test_no_strings_for_single_onclause_legacy_query(self):
User = self.classes.User
sess = fixture_session()
with expect_raises_message(
sa_exc.ArgumentError,
"Join target, typically a FROM expression, or ORM relationship "
"attribute expected, got 'addresses'",
):
sess.query(User).join("addresses")
def test_no_strings_for_single_onclause_newstyle(self):
User = self.classes.User
with expect_raises_message(
sa_exc.ArgumentError,
"Join target, typically a FROM expression, or ORM relationship "
"attribute expected, got 'addresses'",
):
select(User).join("addresses")
def test_no_strings_for_dual_onclause_legacy_query(self):
User = self.classes.User
Address = self.classes.Address
sess = fixture_session()
with expect_raises_message(
sa_exc.ArgumentError,
"ON clause, typically a SQL expression or ORM relationship "
"attribute expected, got 'addresses'",
):
sess.query(User).join(Address, "addresses")
def test_no_strings_for_dual_onclause_newstyle(self):
User = self.classes.User
Address = self.classes.Address
with expect_raises_message(
sa_exc.ArgumentError,
"ON clause, typically a SQL expression or ORM relationship "
"attribute expected, got 'addresses'.",
):
select(User).join(Address, "addresses")
def test_select_from(self):
"""Test that the left edge of the join can be set reliably with
select_from()."""
Item, Order, User = (
self.classes.Item,
self.classes.Order,
self.classes.User,
)
sess = fixture_session()
self.assert_compile(
sess.query(Item.id)
.select_from(User)
.join(User.orders)
.join(Order.items),
"SELECT items.id AS items_id FROM users JOIN orders ON "
"users.id = orders.user_id JOIN order_items AS order_items_1 "
"ON orders.id = order_items_1.order_id JOIN items ON items.id = "
"order_items_1.item_id",
use_default_dialect=True,
)
# here, the join really wants to add a second FROM clause
# for "Item". but select_from disallows that
self.assert_compile(
sess.query(Item.id)
.select_from(User)
.join(Item, User.id == Item.id),
"SELECT items.id AS items_id FROM users JOIN items "
"ON users.id = items.id",
use_default_dialect=True,
)
class JoinFromSelectableTest(fixtures.MappedTest, AssertsCompiledSQL):
__dialect__ = "default"
run_setup_mappers = "once"
@classmethod
def define_tables(cls, metadata):
Table("table1", metadata, Column("id", Integer, primary_key=True))
Table(
"table2",
metadata,
Column("id", Integer, primary_key=True),
Column("t1_id", Integer),
)
@classmethod
def setup_classes(cls):
class T1(cls.Comparable):
pass
class T2(cls.Comparable):
pass
@classmethod
def setup_mappers(cls):
table1, table2 = cls.tables.table1, cls.tables.table2
T1, T2 = cls.classes("T1", "T2")
cls.mapper_registry.map_imperatively(T1, table1)
cls.mapper_registry.map_imperatively(T2, table2)
def test_select_mapped_to_mapped_explicit_left(self):
T1, T2 = self.classes.T1, self.classes.T2
sess = fixture_session()
subq = (
sess.query(T2.t1_id, func.count(T2.id).label("count"))
.group_by(T2.t1_id)
.subquery()
)
self.assert_compile(
sess.query(subq.c.count, T1.id)
.select_from(subq)
.join(T1, subq.c.t1_id == T1.id),
"SELECT anon_1.count AS anon_1_count, table1.id AS table1_id "
"FROM (SELECT table2.t1_id AS t1_id, "
"count(table2.id) AS count FROM table2 "
"GROUP BY table2.t1_id) AS anon_1 JOIN table1 "
"ON anon_1.t1_id = table1.id",
)
def test_select_mapped_to_mapped_implicit_left(self):
T1, T2 = self.classes.T1, self.classes.T2
sess = fixture_session()
subq = (
sess.query(T2.t1_id, func.count(T2.id).label("count"))
.group_by(T2.t1_id)
.subquery()
)
self.assert_compile(
sess.query(subq.c.count, T1.id).join(T1, subq.c.t1_id == T1.id),
"SELECT anon_1.count AS anon_1_count, table1.id AS table1_id "
"FROM (SELECT table2.t1_id AS t1_id, "
"count(table2.id) AS count FROM table2 "
"GROUP BY table2.t1_id) AS anon_1 JOIN table1 "
"ON anon_1.t1_id = table1.id",
)
def test_select_mapped_to_select_explicit_left(self):
T1, T2 = self.classes.T1, self.classes.T2
sess = fixture_session()
subq = (
sess.query(T2.t1_id, func.count(T2.id).label("count"))
.group_by(T2.t1_id)
.subquery()
)
self.assert_compile(
sess.query(subq.c.count, T1.id)
.select_from(T1)
.join(subq, subq.c.t1_id == T1.id),
"SELECT anon_1.count AS anon_1_count, table1.id AS table1_id "
"FROM table1 JOIN (SELECT table2.t1_id AS t1_id, "
"count(table2.id) AS count FROM table2 GROUP BY table2.t1_id) "
"AS anon_1 ON anon_1.t1_id = table1.id",
)
def test_select_mapped_to_select_implicit_left(self):
T1, T2 = self.classes.T1, self.classes.T2
sess = fixture_session()
subq = (
sess.query(T2.t1_id, func.count(T2.id).label("count"))
.group_by(T2.t1_id)
.subquery()
)
# without select_from
self.assert_compile(
sess.query(subq.c.count, T1.id).join(subq, subq.c.t1_id == T1.id),
"SELECT anon_1.count AS anon_1_count, table1.id AS table1_id "
"FROM table1 JOIN "
"(SELECT table2.t1_id AS t1_id, count(table2.id) AS count "
"FROM table2 GROUP BY table2.t1_id) "
"AS anon_1 ON anon_1.t1_id = table1.id",
)
# with select_from, same query
self.assert_compile(
sess.query(subq.c.count, T1.id)
.select_from(T1)
.join(subq, subq.c.t1_id == T1.id),
"SELECT anon_1.count AS anon_1_count, table1.id AS table1_id "
"FROM table1 JOIN "
"(SELECT table2.t1_id AS t1_id, count(table2.id) AS count "
"FROM table2 GROUP BY table2.t1_id) "
"AS anon_1 ON anon_1.t1_id = table1.id",
)
def test_mapped_select_to_mapped_implicit_left(self):
T1, T2 = self.classes.T1, self.classes.T2
sess = fixture_session()
subq = (
sess.query(T2.t1_id, func.count(T2.id).label("count"))
.group_by(T2.t1_id)
.subquery()
)
# without select_from
self.assert_compile(
sess.query(T1.id, subq.c.count).join(T1, subq.c.t1_id == T1.id),
"SELECT table1.id AS table1_id, anon_1.count AS anon_1_count "
"FROM (SELECT table2.t1_id AS t1_id, count(table2.id) AS count "
"FROM table2 GROUP BY table2.t1_id) AS anon_1 "
"JOIN table1 ON anon_1.t1_id = table1.id",
)
# with select_from, same query
self.assert_compile(
sess.query(T1.id, subq.c.count)
.select_from(subq)
.join(T1, subq.c.t1_id == T1.id),
"SELECT table1.id AS table1_id, anon_1.count AS anon_1_count "
"FROM (SELECT table2.t1_id AS t1_id, count(table2.id) AS count "
"FROM table2 GROUP BY table2.t1_id) AS anon_1 "
"JOIN table1 ON anon_1.t1_id = table1.id",
)
def test_mapped_select_to_mapped_explicit_left(self):
T1, T2 = self.classes.T1, self.classes.T2
sess = fixture_session()
subq = (
sess.query(T2.t1_id, func.count(T2.id).label("count"))
.group_by(T2.t1_id)
.subquery()
)
self.assert_compile(
sess.query(T1.id, subq.c.count)
.select_from(subq)
.join(T1, subq.c.t1_id == T1.id),
"SELECT table1.id AS table1_id, anon_1.count AS anon_1_count "
"FROM (SELECT table2.t1_id AS t1_id, count(table2.id) AS count "
"FROM table2 GROUP BY table2.t1_id) AS anon_1 JOIN table1 "
"ON anon_1.t1_id = table1.id",
)
def test_mapped_select_to_select_explicit_left(self):
T1, T2 = self.classes.T1, self.classes.T2
sess = fixture_session()
subq = (
sess.query(T2.t1_id, func.count(T2.id).label("count"))
.group_by(T2.t1_id)
.subquery()
)
self.assert_compile(
sess.query(T1.id, subq.c.count)
.select_from(T1)
.join(subq, subq.c.t1_id == T1.id),
"SELECT table1.id AS table1_id, anon_1.count AS anon_1_count "
"FROM table1 JOIN (SELECT table2.t1_id AS t1_id, "
"count(table2.id) AS count "
"FROM table2 GROUP BY table2.t1_id) AS anon_1 "
"ON anon_1.t1_id = table1.id",
)
def test_mapped_select_to_select_implicit_left(self):
T1, T2 = self.classes.T1, self.classes.T2
sess = fixture_session()
subq = (
sess.query(T2.t1_id, func.count(T2.id).label("count"))
.group_by(T2.t1_id)
.subquery()
)
self.assert_compile(
sess.query(T1.id, subq.c.count).join(subq, subq.c.t1_id == T1.id),
"SELECT table1.id AS table1_id, anon_1.count AS anon_1_count "
"FROM table1 JOIN (SELECT table2.t1_id AS t1_id, "
"count(table2.id) AS count "
"FROM table2 GROUP BY table2.t1_id) AS anon_1 "
"ON anon_1.t1_id = table1.id",
)
class SelfRefMixedTest(fixtures.MappedTest, AssertsCompiledSQL):
run_setup_mappers = "once"
__dialect__ = default.DefaultDialect()
@classmethod
def define_tables(cls, metadata):
Table(
"nodes",
metadata,
Column(
"id", Integer, primary_key=True, test_needs_autoincrement=True
),
Column("parent_id", Integer, ForeignKey("nodes.id")),
)
Table(
"sub_table",
metadata,
Column(
"id", Integer, primary_key=True, test_needs_autoincrement=True
),
Column("node_id", Integer, ForeignKey("nodes.id")),
)
Table(
"assoc_table",
metadata,
Column("left_id", Integer, ForeignKey("nodes.id")),
Column("right_id", Integer, ForeignKey("nodes.id")),
)
@classmethod
def setup_classes(cls):
class Node(cls.Comparable):
pass
class Sub(cls.Comparable):
pass
@classmethod
def setup_mappers(cls):
nodes, assoc_table, sub_table = (
cls.tables.nodes,
cls.tables.assoc_table,
cls.tables.sub_table,
)
Node, Sub = cls.classes("Node", "Sub")
cls.mapper_registry.map_imperatively(
Node,
nodes,
properties={
"children": relationship(
Node,
lazy="select",
join_depth=3,
backref=backref("parent", remote_side=[nodes.c.id]),
),
"subs": relationship(Sub),
"assoc": relationship(
Node,
secondary=assoc_table,
primaryjoin=nodes.c.id == assoc_table.c.left_id,
secondaryjoin=nodes.c.id == assoc_table.c.right_id,
),
},
)
cls.mapper_registry.map_imperatively(Sub, sub_table)
def test_o2m_aliased_plus_o2m(self):
Node, Sub = self.classes.Node, self.classes.Sub
sess = fixture_session()
n1 = aliased(Node)
self.assert_compile(
sess.query(Node).join(n1, Node.children).join(Sub, n1.subs),
"SELECT nodes.id AS nodes_id, nodes.parent_id AS nodes_parent_id "
"FROM nodes JOIN nodes AS nodes_1 ON nodes.id = nodes_1.parent_id "
"JOIN sub_table ON nodes_1.id = sub_table.node_id",
)
self.assert_compile(
sess.query(Node).join(n1, Node.children).join(Sub, Node.subs),
"SELECT nodes.id AS nodes_id, nodes.parent_id AS nodes_parent_id "
"FROM nodes JOIN nodes AS nodes_1 ON nodes.id = nodes_1.parent_id "
"JOIN sub_table ON nodes.id = sub_table.node_id",
)
def test_m2m_aliased_plus_o2m(self):
Node, Sub = self.classes.Node, self.classes.Sub
sess = fixture_session()
n1 = aliased(Node)
self.assert_compile(
sess.query(Node).join(n1, Node.assoc).join(Sub, n1.subs),
"SELECT nodes.id AS nodes_id, nodes.parent_id AS nodes_parent_id "
"FROM nodes JOIN assoc_table AS assoc_table_1 ON nodes.id = "
"assoc_table_1.left_id JOIN nodes AS nodes_1 ON nodes_1.id = "
"assoc_table_1.right_id JOIN sub_table "
"ON nodes_1.id = sub_table.node_id",
)
self.assert_compile(
sess.query(Node).join(n1, Node.assoc).join(Sub, Node.subs),
"SELECT nodes.id AS nodes_id, nodes.parent_id AS nodes_parent_id "
"FROM nodes JOIN assoc_table AS assoc_table_1 ON nodes.id = "
"assoc_table_1.left_id JOIN nodes AS nodes_1 ON nodes_1.id = "
"assoc_table_1.right_id JOIN sub_table "
"ON nodes.id = sub_table.node_id",
)
class CreateJoinsTest(fixtures.MappedTest, AssertsCompiledSQL):
__dialect__ = "default"
def _inherits_fixture(self):
m = MetaData()
base = Table("base", m, Column("id", Integer, primary_key=True))
a = Table(
"a",
m,
Column("id", Integer, ForeignKey("base.id"), primary_key=True),
Column("b_id", Integer, ForeignKey("b.id")),
)
b = Table(
"b",
m,
Column("id", Integer, ForeignKey("base.id"), primary_key=True),
Column("c_id", Integer, ForeignKey("c.id")),
)
c = Table(
"c",
m,
Column("id", Integer, ForeignKey("base.id"), primary_key=True),
)
class Base:
pass
class A(Base):
pass
class B(Base):
pass
class C(Base):
pass
self.mapper_registry.map_imperatively(Base, base)
self.mapper_registry.map_imperatively(
A,
a,
inherits=Base,
properties={"b": relationship(B, primaryjoin=a.c.b_id == b.c.id)},
)
self.mapper_registry.map_imperatively(
B,
b,
inherits=Base,
properties={"c": relationship(C, primaryjoin=b.c.c_id == c.c.id)},
)
self.mapper_registry.map_imperatively(C, c, inherits=Base)
return A, B, C, Base
def test_double_level_aliased_exists(self):
A, B, C, Base = self._inherits_fixture()
s = fixture_session()
self.assert_compile(
s.query(A).filter(A.b.has(B.c.has(C.id == 5))),
"SELECT a.id AS a_id, base.id AS base_id, a.b_id AS a_b_id "
"FROM base JOIN a ON base.id = a.id WHERE "
"EXISTS (SELECT 1 FROM (SELECT base.id AS base_id, b.id AS "
"b_id, b.c_id AS b_c_id FROM base JOIN b ON base.id = b.id) "
"AS anon_1 WHERE a.b_id = anon_1.b_id AND (EXISTS "
"(SELECT 1 FROM (SELECT base.id AS base_id, c.id AS c_id "
"FROM base JOIN c ON base.id = c.id) AS anon_2 "
"WHERE anon_1.b_c_id = anon_2.c_id AND anon_2.c_id = :id_1"
")))",
)
class JoinToNonPolyAliasesTest(fixtures.MappedTest, AssertsCompiledSQL):
"""test joins to an aliased selectable and that we can refer to that
aliased selectable in filter criteria.
Basically testing that the aliasing Query applies to with_polymorphic
targets doesn't leak into non-polymorphic mappers.
"""
__dialect__ = "default"
run_create_tables = None
run_deletes = None
@classmethod
def define_tables(cls, metadata):
Table(
"parent",
metadata,
Column("id", Integer, primary_key=True),
Column("data", String(50)),
)
Table(
"child",
metadata,
Column("id", Integer, primary_key=True),
Column("parent_id", Integer, ForeignKey("parent.id")),
Column("data", String(50)),
)
@classmethod
def setup_mappers(cls):
parent, child = cls.tables.parent, cls.tables.child
class Parent(cls.Comparable):
pass
class Child(cls.Comparable):
pass
mp = cls.mapper_registry.map_imperatively(Parent, parent)
cls.mapper_registry.map_imperatively(Child, child)
derived = select(child).alias()
npc = aliased(Child, derived)
cls.npc = npc
cls.derived = derived
mp.add_property("npc", relationship(npc))
def test_join_parent_child(self):
Parent = self.classes.Parent
sess = fixture_session()
self.assert_compile(
sess.query(Parent)
.join(Parent.npc)
.filter(self.derived.c.data == "x"),
"SELECT parent.id AS parent_id, parent.data AS parent_data "
"FROM parent JOIN (SELECT child.id AS id, "
"child.parent_id AS parent_id, "
"child.data AS data "
"FROM child) AS anon_1 ON parent.id = anon_1.parent_id "
"WHERE anon_1.data = :data_1",
)
def test_join_parent_child_select_from(self):
Parent = self.classes.Parent
npc = self.npc
sess = fixture_session()
self.assert_compile(
sess.query(npc)
.select_from(Parent)
.join(Parent.npc)
.filter(self.derived.c.data == "x"),
"SELECT anon_1.id AS anon_1_id, anon_1.parent_id "
"AS anon_1_parent_id, anon_1.data AS anon_1_data "
"FROM parent JOIN (SELECT child.id AS id, child.parent_id AS "
"parent_id, child.data AS data FROM child) AS anon_1 ON "
"parent.id = anon_1.parent_id WHERE anon_1.data = :data_1",
)
def test_join_select_parent_child(self):
Parent = self.classes.Parent
npc = self.npc
sess = fixture_session()
self.assert_compile(
sess.query(Parent, npc)
.join(Parent.npc)
.filter(self.derived.c.data == "x"),
"SELECT parent.id AS parent_id, parent.data AS parent_data, "
"anon_1.id AS anon_1_id, anon_1.parent_id AS anon_1_parent_id, "
"anon_1.data AS anon_1_data FROM parent JOIN "
"(SELECT child.id AS id, child.parent_id AS parent_id, "
"child.data AS data FROM child) AS anon_1 ON parent.id = "
"anon_1.parent_id WHERE anon_1.data = :data_1",
)
class SelfReferentialTest(fixtures.MappedTest, AssertsCompiledSQL):
run_setup_mappers = "once"
run_inserts = "once"
run_deletes = None
__dialect__ = "default"
@classmethod
def define_tables(cls, metadata):
Table(
"nodes",
metadata,
Column(
"id", Integer, primary_key=True, test_needs_autoincrement=True
),
Column("parent_id", Integer, ForeignKey("nodes.id")),
Column("data", String(30)),
)
@classmethod
def setup_classes(cls):
class Node(cls.Comparable):
def append(self, node):
self.children.append(node)
@classmethod
def setup_mappers(cls):
Node, nodes = cls.classes.Node, cls.tables.nodes
cls.mapper_registry.map_imperatively(
Node,
nodes,
properties={
"children": relationship(
Node,
lazy="select",
join_depth=3,
backref=backref("parent", remote_side=[nodes.c.id]),
)
},
)
@classmethod
def insert_data(cls, connection):
Node = cls.classes.Node
sess = Session(connection)
n1 = Node(data="n1")
n1.append(Node(data="n11"))
n1.append(Node(data="n12"))
n1.append(Node(data="n13"))
n1.children[1].append(Node(data="n121"))
n1.children[1].append(Node(data="n122"))
n1.children[1].append(Node(data="n123"))
sess.add(n1)
sess.flush()
sess.close()
def test_join_4_explicit_join(self):
Node = self.classes.Node
sess = fixture_session()
na = aliased(Node)
na2 = aliased(Node)
# this one is a great example of how to show how the API changes;
# while it requires the explicitness of aliased(Node), the whole
# guesswork of joinpoint / aliased goes away and the whole thing
# is simpler
#
# .join("parent", aliased=True)
# .filter(Node.data == "n12")
# .join("parent", aliased=True, from_joinpoint=True)
# .filter(Node.data == "n1")
#
# becomes:
#
# na = aliased(Node)
# na2 = aliased(Node)
#
# ...
# .join(na, Node.parent)
# .filter(na.data == "n12")
# .join(na2, na.parent)
# .filter(na2.data == "n1")
#
q = (
sess.query(Node)
.filter(Node.data == "n122")
.join(na, Node.parent)
.filter(na.data == "n12")
.join(na2, na.parent)
.filter(na2.data == "n1")
)
self.assert_compile(
q,
"SELECT nodes.id AS nodes_id, nodes.parent_id AS nodes_parent_id, "
"nodes.data AS nodes_data FROM nodes JOIN nodes AS nodes_1 "
"ON nodes_1.id = nodes.parent_id JOIN nodes AS nodes_2 "
"ON nodes_2.id = nodes_1.parent_id WHERE nodes.data = :data_1 "
"AND nodes_1.data = :data_2 AND nodes_2.data = :data_3",
checkparams={"data_1": "n122", "data_2": "n12", "data_3": "n1"},
)
node = q.first()
eq_(node.data, "n122")
def test_from_self_inside_excludes_outside(self):
"""test the propagation of aliased() from inside to outside
on a from_self()..
"""
Node = self.classes.Node
sess = fixture_session()
n1 = aliased(Node)
# n1 is not inside the from_self(), so all cols must be maintained
# on the outside
subq = (
sess.query(Node)
.filter(Node.data == "n122")
.set_label_style(LABEL_STYLE_TABLENAME_PLUS_COL)
.subquery()
)
na = aliased(Node, subq)
self.assert_compile(
sess.query(n1, na.id),
"SELECT nodes_1.id AS nodes_1_id, "
"nodes_1.parent_id AS nodes_1_parent_id, "
"nodes_1.data AS nodes_1_data, anon_1.nodes_id AS anon_1_nodes_id "
"FROM nodes AS nodes_1, (SELECT nodes.id AS nodes_id, "
"nodes.parent_id AS nodes_parent_id, "
"nodes.data AS nodes_data FROM "
"nodes WHERE nodes.data = :data_1) AS anon_1",
use_default_dialect=True,
)
parent = aliased(Node)
grandparent = aliased(Node)
subq = (
sess.query(Node, parent, grandparent)
.join(parent, Node.parent)
.join(grandparent, parent.parent)
.filter(Node.data == "n122")
.filter(parent.data == "n12")
.filter(grandparent.data == "n1")
.set_label_style(LABEL_STYLE_TABLENAME_PLUS_COL)
.subquery()
)
na = aliased(Node, subq)
pa = aliased(parent, subq)
ga = aliased(grandparent, subq)
q = sess.query(na, pa, ga).limit(1)
# parent, grandparent *are* inside the from_self(), so they
# should get aliased to the outside.
self.assert_compile(
q,
"SELECT anon_1.nodes_id AS anon_1_nodes_id, "
"anon_1.nodes_parent_id AS anon_1_nodes_parent_id, "
"anon_1.nodes_data AS anon_1_nodes_data, "
"anon_1.nodes_1_id AS anon_1_nodes_1_id, "
"anon_1.nodes_1_parent_id AS anon_1_nodes_1_parent_id, "
"anon_1.nodes_1_data AS anon_1_nodes_1_data, "
"anon_1.nodes_2_id AS anon_1_nodes_2_id, "
"anon_1.nodes_2_parent_id AS anon_1_nodes_2_parent_id, "
"anon_1.nodes_2_data AS anon_1_nodes_2_data "
"FROM (SELECT nodes.id AS nodes_id, nodes.parent_id "
"AS nodes_parent_id, nodes.data AS nodes_data, "
"nodes_1.id AS nodes_1_id, "
"nodes_1.parent_id AS nodes_1_parent_id, "
"nodes_1.data AS nodes_1_data, nodes_2.id AS nodes_2_id, "
"nodes_2.parent_id AS nodes_2_parent_id, nodes_2.data AS "
"nodes_2_data FROM nodes JOIN nodes AS nodes_1 ON "
"nodes_1.id = nodes.parent_id JOIN nodes AS nodes_2 "
"ON nodes_2.id = nodes_1.parent_id "
"WHERE nodes.data = :data_1 AND nodes_1.data = :data_2 AND "
"nodes_2.data = :data_3) AS anon_1 LIMIT :param_1",
{"param_1": 1},
use_default_dialect=True,
)
def test_join_to_self_no_aliases_raises(self):
Node = self.classes.Node
s = fixture_session()
assert_raises_message(
sa.exc.InvalidRequestError,
r"Can't construct a join from Mapper\[Node\(nodes\)\] to "
r"Mapper\[Node\(nodes\)\], they are the same entity",
s.query(Node).join(Node.children)._compile_context,
)
def test_explicit_join_1(self):
Node = self.classes.Node
n1 = aliased(Node)
n2 = aliased(Node)
self.assert_compile(
join(Node, n1, "children").join(n2, "children"),
"nodes JOIN nodes AS nodes_1 ON nodes.id = nodes_1.parent_id "
"JOIN nodes AS nodes_2 ON nodes_1.id = nodes_2.parent_id",
use_default_dialect=True,
)
def test_explicit_join_2(self):
Node = self.classes.Node
n1 = aliased(Node)
n2 = aliased(Node)
self.assert_compile(
join(Node, n1, Node.children).join(n2, n1.children),
"nodes JOIN nodes AS nodes_1 ON nodes.id = nodes_1.parent_id "
"JOIN nodes AS nodes_2 ON nodes_1.id = nodes_2.parent_id",
use_default_dialect=True,
)
def test_explicit_join_3(self):
Node = self.classes.Node
n1 = aliased(Node)
n2 = aliased(Node)
# the join_to_left=False here is unfortunate. the default on this
# flag should be False.
self.assert_compile(
join(Node, n1, Node.children).join(
n2, Node.children, join_to_left=False
),
"nodes JOIN nodes AS nodes_1 ON nodes.id = nodes_1.parent_id "
"JOIN nodes AS nodes_2 ON nodes.id = nodes_2.parent_id",
use_default_dialect=True,
)
def test_explicit_join_4(self):
Node = self.classes.Node
sess = fixture_session()
n1 = aliased(Node)
n2 = aliased(Node)
self.assert_compile(
sess.query(Node).join(n1, Node.children).join(n2, n1.children),
"SELECT nodes.id AS nodes_id, nodes.parent_id AS nodes_parent_id, "
"nodes.data AS nodes_data FROM nodes JOIN nodes AS nodes_1 "
"ON nodes.id = nodes_1.parent_id "
"JOIN nodes AS nodes_2 ON nodes_1.id = nodes_2.parent_id",
use_default_dialect=True,
)
def test_explicit_join_5(self):
Node = self.classes.Node
sess = fixture_session()
n1 = aliased(Node)
n2 = aliased(Node)
self.assert_compile(
sess.query(Node).join(n1, Node.children).join(n2, Node.children),
"SELECT nodes.id AS nodes_id, nodes.parent_id AS nodes_parent_id, "
"nodes.data AS nodes_data FROM nodes JOIN nodes AS nodes_1 "
"ON nodes.id = nodes_1.parent_id "
"JOIN nodes AS nodes_2 ON nodes.id = nodes_2.parent_id",
use_default_dialect=True,
)
def test_explicit_join_6(self):
Node = self.classes.Node
sess = fixture_session()
n1 = aliased(Node)
node = (
sess.query(Node)
.select_from(join(Node, n1, "children"))
.filter(n1.data == "n122")
.first()
)
assert node.data == "n12"
def test_explicit_join_7(self):
Node = self.classes.Node
sess = fixture_session()
n1 = aliased(Node)
n2 = aliased(Node)
node = (
sess.query(Node)
.select_from(join(Node, n1, "children").join(n2, "children"))
.filter(n2.data == "n122")
.first()
)
assert node.data == "n1"
def test_explicit_join_8(self):
Node = self.classes.Node
sess = fixture_session()
n1 = aliased(Node)
n2 = aliased(Node)
# mix explicit and named onclauses
node = (
sess.query(Node)
.select_from(
join(Node, n1, Node.id == n1.parent_id).join(n2, "children")
)
.filter(n2.data == "n122")
.first()
)
assert node.data == "n1"
def test_explicit_join_9(self):
Node = self.classes.Node
sess = fixture_session()
n1 = aliased(Node)
n2 = aliased(Node)
node = (
sess.query(Node)
.select_from(join(Node, n1, "parent").join(n2, "parent"))
.filter(
and_(Node.data == "n122", n1.data == "n12", n2.data == "n1")
)
.first()
)
assert node.data == "n122"
def test_explicit_join_10(self):
Node = self.classes.Node
sess = fixture_session()
n1 = aliased(Node)
n2 = aliased(Node)
eq_(
list(
sess.query(Node)
.select_from(join(Node, n1, "parent").join(n2, "parent"))
.filter(
and_(
Node.data == "n122", n1.data == "n12", n2.data == "n1"
)
)
.with_entities(Node.data, n1.data, n2.data)
),
[("n122", "n12", "n1")],
)
def test_join_to_nonaliased(self):
Node = self.classes.Node
sess = fixture_session()
n1 = aliased(Node)
# using 'n1.parent' implicitly joins to unaliased Node
eq_(
sess.query(n1).join(n1.parent).filter(Node.data == "n1").all(),
[
Node(parent_id=1, data="n11", id=2),
Node(parent_id=1, data="n12", id=3),
Node(parent_id=1, data="n13", id=4),
],
)
# explicit (new syntax)
eq_(
sess.query(n1)
.join(Node, n1.parent)
.filter(Node.data == "n1")
.all(),
[
Node(parent_id=1, data="n11", id=2),
Node(parent_id=1, data="n12", id=3),
Node(parent_id=1, data="n13", id=4),
],
)
def test_multiple_explicit_entities_one(self):
Node = self.classes.Node
sess = fixture_session()
parent = aliased(Node)
grandparent = aliased(Node)
eq_(
sess.query(Node, parent, grandparent)
.join(parent, Node.parent)
.join(grandparent, parent.parent)
.filter(Node.data == "n122")
.filter(parent.data == "n12")
.filter(grandparent.data == "n1")
.first(),
(Node(data="n122"), Node(data="n12"), Node(data="n1")),
)
def test_multiple_explicit_entities_two(self):
Node = self.classes.Node
sess = fixture_session()
parent = aliased(Node)
grandparent = aliased(Node)
subq = (
sess.query(Node, parent, grandparent)
.join(parent, Node.parent)
.join(grandparent, parent.parent)
.filter(Node.data == "n122")
.filter(parent.data == "n12")
.filter(grandparent.data == "n1")
.subquery()
)
na = aliased(Node, subq)
pa = aliased(parent, subq)
ga = aliased(grandparent, subq)
eq_(
sess.query(na, pa, ga).first(),
(Node(data="n122"), Node(data="n12"), Node(data="n1")),
)
def test_multiple_explicit_entities_three(self):
Node = self.classes.Node
sess = fixture_session()
parent = aliased(Node)
grandparent = aliased(Node)
# same, change order around
subq = (
sess.query(parent, grandparent, Node)
.join(parent, Node.parent)
.join(grandparent, parent.parent)
.filter(Node.data == "n122")
.filter(parent.data == "n12")
.filter(grandparent.data == "n1")
.subquery()
)
na = aliased(Node, subq)
pa = aliased(parent, subq)
ga = aliased(grandparent, subq)
eq_(
sess.query(pa, ga, na).first(),
(Node(data="n12"), Node(data="n1"), Node(data="n122")),
)
def test_multiple_explicit_entities_four(self):
Node = self.classes.Node
sess = fixture_session()
parent = aliased(Node)
grandparent = aliased(Node)
eq_(
sess.query(Node, parent, grandparent)
.join(parent, Node.parent)
.join(grandparent, parent.parent)
.filter(Node.data == "n122")
.filter(parent.data == "n12")
.filter(grandparent.data == "n1")
.options(joinedload(Node.children))
.first(),
(Node(data="n122"), Node(data="n12"), Node(data="n1")),
)
def test_multiple_explicit_entities_five(self):
Node = self.classes.Node
sess = fixture_session()
parent = aliased(Node)
grandparent = aliased(Node)
subq = (
sess.query(Node, parent, grandparent)
.join(parent, Node.parent)
.join(grandparent, parent.parent)
.filter(Node.data == "n122")
.filter(parent.data == "n12")
.filter(grandparent.data == "n1")
.subquery()
)
na = aliased(Node, subq)
pa = aliased(parent, subq)
ga = aliased(grandparent, subq)
eq_(
sess.query(na, pa, ga).options(joinedload(na.children)).first(),
(Node(data="n122"), Node(data="n12"), Node(data="n1")),
)
def test_any(self):
Node = self.classes.Node
sess = fixture_session()
eq_(
sess.query(Node)
.filter(Node.children.any(Node.data == "n1"))
.all(),
[],
)
eq_(
sess.query(Node)
.filter(Node.children.any(Node.data == "n12"))
.all(),
[Node(data="n1")],
)
eq_(
sess.query(Node)
.filter(~Node.children.any())
.order_by(Node.id)
.all(),
[
Node(data="n11"),
Node(data="n13"),
Node(data="n121"),
Node(data="n122"),
Node(data="n123"),
],
)
def test_has(self):
Node = self.classes.Node
sess = fixture_session()
eq_(
sess.query(Node)
.filter(Node.parent.has(Node.data == "n12"))
.order_by(Node.id)
.all(),
[Node(data="n121"), Node(data="n122"), Node(data="n123")],
)
eq_(
sess.query(Node)
.filter(Node.parent.has(Node.data == "n122"))
.all(),
[],
)
eq_(
sess.query(Node).filter(~Node.parent.has()).all(),
[Node(data="n1")],
)
def test_contains(self):
Node = self.classes.Node
sess = fixture_session()
n122 = sess.query(Node).filter(Node.data == "n122").one()
eq_(
sess.query(Node).filter(Node.children.contains(n122)).all(),
[Node(data="n12")],
)
n13 = sess.query(Node).filter(Node.data == "n13").one()
eq_(
sess.query(Node).filter(Node.children.contains(n13)).all(),
[Node(data="n1")],
)
def test_eq_ne(self):
Node = self.classes.Node
sess = fixture_session()
n12 = sess.query(Node).filter(Node.data == "n12").one()
eq_(
sess.query(Node).filter(Node.parent == n12).all(),
[Node(data="n121"), Node(data="n122"), Node(data="n123")],
)
eq_(
sess.query(Node).filter(Node.parent != n12).all(),
[
Node(data="n1"),
Node(data="n11"),
Node(data="n12"),
Node(data="n13"),
],
)
class SelfReferentialM2MTest(fixtures.MappedTest):
run_setup_mappers = "once"
run_inserts = "once"
run_deletes = None
@classmethod
def define_tables(cls, metadata):
Table(
"nodes",
metadata,
Column(
"id", Integer, primary_key=True, test_needs_autoincrement=True
),
Column("data", String(30)),
)
Table(
"node_to_nodes",
metadata,
Column(
"left_node_id",
Integer,
ForeignKey("nodes.id"),
primary_key=True,
),
Column(
"right_node_id",
Integer,
ForeignKey("nodes.id"),
primary_key=True,
),
)
@classmethod
def setup_classes(cls):
class Node(cls.Comparable):
pass
@classmethod
def insert_data(cls, connection):
Node, nodes, node_to_nodes = (
cls.classes.Node,
cls.tables.nodes,
cls.tables.node_to_nodes,
)
cls.mapper_registry.map_imperatively(
Node,
nodes,
properties={
"children": relationship(
Node,
lazy="select",
secondary=node_to_nodes,
primaryjoin=nodes.c.id == node_to_nodes.c.left_node_id,
secondaryjoin=nodes.c.id == node_to_nodes.c.right_node_id,
)
},
)
sess = Session(connection)
n1 = Node(data="n1")
n2 = Node(data="n2")
n3 = Node(data="n3")
n4 = Node(data="n4")
n5 = Node(data="n5")
n6 = Node(data="n6")
n7 = Node(data="n7")
n1.children = [n2, n3, n4]
n2.children = [n3, n6, n7]
n3.children = [n5, n4]
sess.add(n1)
sess.add(n2)
sess.add(n3)
sess.add(n4)
sess.flush()
sess.close()
def test_any(self):
Node = self.classes.Node
sess = fixture_session()
eq_(
sess.query(Node)
.filter(Node.children.any(Node.data == "n3"))
.order_by(Node.data)
.all(),
[Node(data="n1"), Node(data="n2")],
)
def test_contains(self):
Node = self.classes.Node
sess = fixture_session()
n4 = sess.query(Node).filter_by(data="n4").one()
eq_(
sess.query(Node)
.filter(Node.children.contains(n4))
.order_by(Node.data)
.all(),
[Node(data="n1"), Node(data="n3")],
)
eq_(
sess.query(Node)
.filter(not_(Node.children.contains(n4)))
.order_by(Node.data)
.all(),
[
Node(data="n2"),
Node(data="n4"),
Node(data="n5"),
Node(data="n6"),
Node(data="n7"),
],
)
def test_explicit_join(self):
Node = self.classes.Node
sess = fixture_session()
n1 = aliased(Node)
eq_(
sess.query(Node)
.select_from(join(Node, n1, "children"))
.filter(n1.data.in_(["n3", "n7"]))
.order_by(Node.id)
.all(),
[Node(data="n1"), Node(data="n2")],
)
class JoinLateralTest(fixtures.MappedTest, AssertsCompiledSQL):
__dialect__ = default.DefaultDialect(supports_native_boolean=True)
run_setup_bind = None
run_setup_mappers = "once"
run_create_tables = None
@classmethod
def define_tables(cls, metadata):
Table(
"people",
metadata,
Column("people_id", Integer, primary_key=True),
Column("age", Integer),
Column("name", String(30)),
)
Table(
"bookcases",
metadata,
Column("bookcase_id", Integer, primary_key=True),
Column(
"bookcase_owner_id", Integer, ForeignKey("people.people_id")
),
Column("bookcase_shelves", Integer),
Column("bookcase_width", Integer),
)
Table(
"books",
metadata,
Column("book_id", Integer, primary_key=True),
Column(
"bookcase_id", Integer, ForeignKey("bookcases.bookcase_id")
),
Column("book_owner_id", Integer, ForeignKey("people.people_id")),
Column("book_weight", Integer),
)
@classmethod
def setup_classes(cls):
class Person(cls.Comparable):
pass
class Bookcase(cls.Comparable):
pass
class Book(cls.Comparable):
pass
@classmethod
def setup_mappers(cls):
Person, Bookcase, Book = cls.classes("Person", "Bookcase", "Book")
people, bookcases, books = cls.tables("people", "bookcases", "books")
cls.mapper_registry.map_imperatively(Person, people)
cls.mapper_registry.map_imperatively(
Bookcase,
bookcases,
properties={
"owner": relationship(Person),
"books": relationship(Book),
},
)
cls.mapper_registry.map_imperatively(Book, books)
def test_select_subquery(self):
Person, Book = self.classes("Person", "Book")
s = fixture_session()
subq = (
s.query(Book.book_id)
.correlate(Person)
.filter(Person.people_id == Book.book_owner_id)
.subquery()
.lateral()
)
stmt = s.query(Person, subq.c.book_id).join(subq, true())
self.assert_compile(
stmt,
"SELECT people.people_id AS people_people_id, "
"people.age AS people_age, people.name AS people_name, "
"anon_1.book_id AS anon_1_book_id "
"FROM people JOIN LATERAL "
"(SELECT books.book_id AS book_id FROM books "
"WHERE people.people_id = books.book_owner_id) AS anon_1 ON true",
)
# "aas" == "aliased against select"
def test_select_subquery_aas_implicit_correlate(self):
Person, Book = self.classes("Person", "Book")
s = fixture_session()
stmt = s.query(Person).subquery()
pa = aliased(Person, stmt)
subq = (
s.query(Book.book_id)
.filter(pa.people_id == Book.book_owner_id)
.subquery()
.lateral()
)
stmt = s.query(pa, subq.c.book_id).join(subq, true())
self.assert_compile(
stmt,
"SELECT anon_1.people_id AS anon_1_people_id, "
"anon_1.age AS anon_1_age, anon_1.name AS anon_1_name, "
"anon_2.book_id AS anon_2_book_id "
"FROM "
"(SELECT people.people_id AS people_id, people.age AS age, "
"people.name AS name FROM people) AS anon_1 "
"JOIN LATERAL "
"(SELECT books.book_id AS book_id FROM books "
"WHERE anon_1.people_id = books.book_owner_id) AS anon_2 ON true",
)
def test_select_subquery_aas_implicit_correlate_coreonly(self):
Person, Book = self.classes("Person", "Book")
s = fixture_session()
stmt = s.query(Person).subquery()
pa = aliased(Person, stmt)
subq = (
select(Book.book_id)
.where(pa.people_id == Book.book_owner_id)
.subquery()
.lateral()
)
stmt = s.query(pa, subq.c.book_id).join(subq, true())
self.assert_compile(
stmt,
"SELECT anon_1.people_id AS anon_1_people_id, "
"anon_1.age AS anon_1_age, anon_1.name AS anon_1_name, "
"anon_2.book_id AS anon_2_book_id "
"FROM "
"(SELECT people.people_id AS people_id, people.age AS age, "
"people.name AS name FROM people) AS anon_1 "
"JOIN LATERAL "
"(SELECT books.book_id AS book_id FROM books "
"WHERE anon_1.people_id = books.book_owner_id) AS anon_2 ON true",
)
def test_select_subquery_aas_explicit_correlate_coreonly(self):
Person, Book = self.classes("Person", "Book")
s = fixture_session()
stmt = s.query(Person).subquery()
pa = aliased(Person, stmt)
subq = (
select(Book.book_id)
.correlate(pa)
.where(pa.people_id == Book.book_owner_id)
.subquery()
.lateral()
)
stmt = s.query(pa, subq.c.book_id).join(subq, true())
self.assert_compile(
stmt,
"SELECT anon_1.people_id AS anon_1_people_id, "
"anon_1.age AS anon_1_age, anon_1.name AS anon_1_name, "
"anon_2.book_id AS anon_2_book_id "
"FROM "
"(SELECT people.people_id AS people_id, people.age AS age, "
"people.name AS name FROM people) AS anon_1 "
"JOIN LATERAL "
"(SELECT books.book_id AS book_id FROM books "
"WHERE anon_1.people_id = books.book_owner_id) AS anon_2 ON true",
)
def test_select_subquery_aas_explicit_correlate(self):
Person, Book = self.classes("Person", "Book")
s = fixture_session()
stmt = s.query(Person).subquery()
pa = aliased(Person, stmt)
subq = (
s.query(Book.book_id)
.correlate(pa)
.filter(pa.people_id == Book.book_owner_id)
.subquery()
.lateral()
)
stmt = s.query(pa, subq.c.book_id).join(subq, true())
self.assert_compile(
stmt,
"SELECT anon_1.people_id AS anon_1_people_id, "
"anon_1.age AS anon_1_age, anon_1.name AS anon_1_name, "
"anon_2.book_id AS anon_2_book_id "
"FROM "
"(SELECT people.people_id AS people_id, people.age AS age, "
"people.name AS name FROM people) AS anon_1 "
"JOIN LATERAL "
"(SELECT books.book_id AS book_id FROM books "
"WHERE anon_1.people_id = books.book_owner_id) AS anon_2 ON true",
)
def test_from_function(self):
Bookcase = self.classes.Bookcase
s = fixture_session()
srf = lateral(func.generate_series(1, Bookcase.bookcase_shelves))
self.assert_compile(
s.query(Bookcase).join(srf, true()),
"SELECT bookcases.bookcase_id AS bookcases_bookcase_id, "
"bookcases.bookcase_owner_id AS bookcases_bookcase_owner_id, "
"bookcases.bookcase_shelves AS bookcases_bookcase_shelves, "
"bookcases.bookcase_width AS bookcases_bookcase_width "
"FROM bookcases JOIN "
"LATERAL generate_series(:generate_series_1, "
"bookcases.bookcase_shelves) AS anon_1 ON true",
)
def test_from_function_aas(self):
Bookcase = self.classes.Bookcase
s = fixture_session()
subq = s.query(Bookcase).subquery()
ba = aliased(Bookcase, subq)
srf = lateral(func.generate_series(1, ba.bookcase_shelves))
self.assert_compile(
s.query(ba).join(srf, true()),
"SELECT anon_1.bookcase_id AS anon_1_bookcase_id, "
"anon_1.bookcase_owner_id AS anon_1_bookcase_owner_id, "
"anon_1.bookcase_shelves AS anon_1_bookcase_shelves, "
"anon_1.bookcase_width AS anon_1_bookcase_width "
"FROM (SELECT bookcases.bookcase_id AS bookcase_id, "
"bookcases.bookcase_owner_id AS bookcase_owner_id, "
"bookcases.bookcase_shelves AS bookcase_shelves, "
"bookcases.bookcase_width AS bookcase_width FROM bookcases) "
"AS anon_1 "
"JOIN LATERAL "
"generate_series(:generate_series_1, anon_1.bookcase_shelves) "
"AS anon_2 ON true",
)
class JoinRawTablesWLegacyTest(QueryTest, AssertsCompiledSQL):
"""test issue 6003 where creating a legacy query with only Core elements
fails to accommodate for the ORM context thus producing a query
that ignores the "legacy" joins
"""
__dialect__ = "default"
@testing.combinations(
(
lambda sess, User, Address: sess.query(User).join(Address),
"SELECT users.id AS users_id, users.name AS users_name FROM "
"users JOIN addresses ON users.id = addresses.user_id",
),
(
lambda sess, user_table, address_table: sess.query(
user_table
).join(address_table),
"SELECT users.id AS users_id, users.name AS users_name FROM "
"users JOIN addresses ON users.id = addresses.user_id",
),
(
lambda sess, User, Address, Order: sess.query(User)
.outerjoin(Order)
.join(Address),
"SELECT users.id AS users_id, users.name AS users_name FROM "
"users LEFT OUTER JOIN orders ON users.id = orders.user_id "
"JOIN addresses ON addresses.id = orders.address_id",
),
(
lambda sess, user_table, address_table, order_table: sess.query(
user_table
)
.outerjoin(order_table)
.join(address_table),
"SELECT users.id AS users_id, users.name AS users_name FROM "
"users LEFT OUTER JOIN orders ON users.id = orders.user_id "
"JOIN addresses ON addresses.id = orders.address_id",
),
)
def test_join_render(self, spec, expected):
User, Address, Order = self.classes("User", "Address", "Order")
user_table, address_table, order_table = self.tables(
"users", "addresses", "orders"
)
sess = fixture_session()
q = testing.resolve_lambda(spec, **locals())
self.assert_compile(q, expected)
self.assert_compile(
q.set_label_style(LABEL_STYLE_TABLENAME_PLUS_COL).statement,
expected,
)
def test_core_round_trip(self):
user_table, address_table = self.tables("users", "addresses")
sess = fixture_session()
q = (
sess.query(user_table)
.join(address_table)
.where(address_table.c.email_address.startswith("ed"))
)
eq_(q.all(), [(8, "ed"), (8, "ed"), (8, "ed")])
| 34.041448 | 79 | 0.563099 | import itertools
import sqlalchemy as sa
from sqlalchemy import and_
from sqlalchemy import desc
from sqlalchemy import exc as sa_exc
from sqlalchemy import ForeignKey
from sqlalchemy import func
from sqlalchemy import inspect
from sqlalchemy import Integer
from sqlalchemy import lateral
from sqlalchemy import literal_column
from sqlalchemy import MetaData
from sqlalchemy import not_
from sqlalchemy import or_
from sqlalchemy import select
from sqlalchemy import String
from sqlalchemy import Table
from sqlalchemy import testing
from sqlalchemy import true
from sqlalchemy import union
from sqlalchemy.engine import default
from sqlalchemy.orm import aliased
from sqlalchemy.orm import backref
from sqlalchemy.orm import join
from sqlalchemy.orm import joinedload
from sqlalchemy.orm import outerjoin
from sqlalchemy.orm import relationship
from sqlalchemy.orm import Session
from sqlalchemy.orm import synonym
from sqlalchemy.sql.selectable import LABEL_STYLE_TABLENAME_PLUS_COL
from sqlalchemy.testing import assert_raises
from sqlalchemy.testing import assert_raises_message
from sqlalchemy.testing import AssertsCompiledSQL
from sqlalchemy.testing import eq_
from sqlalchemy.testing import fixtures
from sqlalchemy.testing.assertions import expect_raises_message
from sqlalchemy.testing.fixtures import fixture_session
from sqlalchemy.testing.schema import Column
from test.orm import _fixtures
from .inheritance import _poly_fixtures
from .test_query import QueryTest
class InheritedTest(_poly_fixtures._Polymorphic):
run_setup_mappers = "once"
class InheritedJoinTest(InheritedTest, AssertsCompiledSQL):
def test_single_prop(self):
Company = self.classes.Company
sess = fixture_session()
self.assert_compile(
sess.query(Company).join(Company.employees),
"SELECT companies.company_id AS companies_company_id, "
"companies.name AS companies_name "
"FROM companies JOIN people "
"ON companies.company_id = people.company_id",
use_default_dialect=True,
)
def test_force_via_select_from(self):
Company, Engineer = self.classes.Company, self.classes.Engineer
sess = fixture_session()
self.assert_compile(
sess.query(Company)
.filter(Company.company_id == Engineer.company_id)
.filter(Engineer.primary_language == "java"),
"SELECT companies.company_id AS companies_company_id, "
"companies.name AS companies_name "
"FROM companies, people, engineers "
"WHERE companies.company_id = people.company_id "
"AND engineers.primary_language "
"= :primary_language_1",
use_default_dialect=True,
)
self.assert_compile(
sess.query(Company)
.select_from(Company, Engineer)
.filter(Company.company_id == Engineer.company_id)
.filter(Engineer.primary_language == "java"),
"SELECT companies.company_id AS companies_company_id, "
"companies.name AS companies_name "
"FROM companies, people JOIN engineers "
"ON people.person_id = engineers.person_id "
"WHERE companies.company_id = people.company_id "
"AND engineers.primary_language ="
" :primary_language_1",
use_default_dialect=True,
)
def test_single_prop_of_type(self):
Company, Engineer = self.classes.Company, self.classes.Engineer
sess = fixture_session()
self.assert_compile(
sess.query(Company).join(Company.employees.of_type(Engineer)),
"SELECT companies.company_id AS companies_company_id, "
"companies.name AS companies_name "
"FROM companies JOIN "
"(people JOIN engineers "
"ON people.person_id = engineers.person_id) "
"ON companies.company_id = people.company_id",
use_default_dialect=True,
)
def test_explicit_polymorphic_join_one(self):
Company, Engineer = self.classes.Company, self.classes.Engineer
sess = fixture_session()
self.assert_compile(
sess.query(Company)
.join(Engineer)
.filter(Engineer.engineer_name == "vlad"),
"SELECT companies.company_id AS companies_company_id, "
"companies.name AS companies_name "
"FROM companies JOIN (people JOIN engineers "
"ON people.person_id = engineers.person_id) "
"ON "
"companies.company_id = people.company_id "
"WHERE engineers.engineer_name = :engineer_name_1",
use_default_dialect=True,
)
def test_explicit_polymorphic_join_two(self):
Company, Engineer = self.classes.Company, self.classes.Engineer
sess = fixture_session()
self.assert_compile(
sess.query(Company)
.join(Engineer, Company.company_id == Engineer.company_id)
.filter(Engineer.engineer_name == "vlad"),
"SELECT companies.company_id AS companies_company_id, "
"companies.name AS companies_name "
"FROM companies JOIN "
"(people JOIN engineers "
"ON people.person_id = engineers.person_id) "
"ON "
"companies.company_id = people.company_id "
"WHERE engineers.engineer_name = :engineer_name_1",
use_default_dialect=True,
)
def test_auto_aliasing_multi_link(self):
sess = fixture_session()
Company, Engineer, Manager, Boss = (
self.classes.Company,
self.classes.Engineer,
self.classes.Manager,
self.classes.Boss,
)
q = (
sess.query(Company)
.join(Company.employees.of_type(Engineer))
.join(Company.employees.of_type(Manager))
.join(Company.employees.of_type(Boss))
)
with testing.expect_warnings(
"An alias is being generated automatically against joined entity "
r"Mapper\[Manager\(managers\)\] due to overlapping",
"An alias is being generated automatically against joined entity "
r"Mapper\[Boss\(boss\)\] due to overlapping",
raise_on_any_unexpected=True,
):
self.assert_compile(
q,
"SELECT companies.company_id AS companies_company_id, "
"companies.name AS companies_name FROM companies "
"JOIN (people JOIN engineers "
"ON people.person_id = engineers.person_id) "
"ON companies.company_id = people.company_id "
"JOIN (people AS people_1 JOIN managers AS managers_1 "
"ON people_1.person_id = managers_1.person_id) "
"ON companies.company_id = people_1.company_id "
"JOIN (people AS people_2 JOIN managers AS managers_2 "
"ON people_2.person_id = managers_2.person_id "
"JOIN boss AS boss_1 "
"ON managers_2.person_id = boss_1.boss_id) "
"ON companies.company_id = people_2.company_id",
use_default_dialect=True,
)
class JoinOnSynonymTest(_fixtures.FixtureTest, AssertsCompiledSQL):
__dialect__ = "default"
@classmethod
def setup_mappers(cls):
User = cls.classes.User
Address = cls.classes.Address
users, addresses = (cls.tables.users, cls.tables.addresses)
cls.mapper_registry.map_imperatively(
User,
users,
properties={
"addresses": relationship(Address),
"ad_syn": synonym("addresses"),
},
)
cls.mapper_registry.map_imperatively(Address, addresses)
def test_join_on_synonym(self):
User = self.classes.User
self.assert_compile(
fixture_session().query(User).join(User.ad_syn),
"SELECT users.id AS users_id, users.name AS users_name "
"FROM users JOIN addresses ON users.id = addresses.user_id",
)
class JoinTest(QueryTest, AssertsCompiledSQL):
__dialect__ = "default"
@testing.combinations_list(
set(
itertools.product(
[
"relationship",
"relationship_only",
"none",
"explicit",
"table_none",
"table_explicit",
],
[True, False],
)
),
argnames="onclause_type, use_legacy",
)
def test_filter_by_from_join(self, onclause_type, use_legacy):
User, Address = self.classes("User", "Address")
(address_table,) = self.tables("addresses")
(user_table,) = self.tables("users")
if use_legacy:
sess = fixture_session()
q = sess.query(User)
else:
q = select(User).set_label_style(LABEL_STYLE_TABLENAME_PLUS_COL)
if onclause_type == "relationship":
q = q.join(Address, User.addresses)
elif onclause_type == "relationship_only":
q = q.join(User.addresses)
elif onclause_type == "none":
q = q.join(Address)
elif onclause_type == "explicit":
q = q.join(Address, User.id == Address.user_id)
elif onclause_type == "table_none":
q = q.join(address_table)
elif onclause_type == "table_explicit":
q = q.join(
address_table, user_table.c.id == address_table.c.user_id
)
else:
assert False
q2 = q.filter_by(email_address="foo")
self.assert_compile(
q2,
"SELECT users.id AS users_id, users.name AS users_name "
"FROM users JOIN addresses ON users.id = addresses.user_id "
"WHERE addresses.email_address = :email_address_1",
)
if use_legacy:
q2 = q.reset_joinpoint().filter_by(name="user")
self.assert_compile(
q2,
"SELECT users.id AS users_id, users.name AS users_name "
"FROM users JOIN addresses ON users.id = addresses.user_id "
"WHERE users.name = :name_1",
)
def test_join_relationship_propagate_attrs(self):
User = self.classes.User
users = self.tables.users
stmt = select(users).join(User.addresses)
eq_(
stmt._propagate_attrs,
{"compile_state_plugin": "orm", "plugin_subject": inspect(User)},
)
self.assert_compile(
stmt,
"SELECT users.id, users.name FROM users "
"JOIN addresses ON users.id = addresses.user_id",
)
@testing.combinations((True,), (False,), argnames="legacy")
@testing.combinations((True,), (False,), argnames="threelevel")
def test_join_with_entities(self, legacy, threelevel):
User, Address, Dingaling = self.classes("User", "Address", "Dingaling")
if legacy:
sess = fixture_session()
stmt = sess.query(User).join(Address).with_entities(Address.id)
else:
stmt = select(User).join(Address).with_only_columns(Address.id)
stmt = stmt.set_label_style(LABEL_STYLE_TABLENAME_PLUS_COL)
if threelevel:
if legacy:
stmt = stmt.join(Address.dingaling).with_entities(Dingaling.id)
else:
stmt = stmt.join(Address.dingaling).with_only_columns(
Dingaling.id
)
if threelevel:
self.assert_compile(
stmt,
"SELECT dingalings.id AS dingalings_id "
"FROM users JOIN addresses ON users.id = addresses.user_id "
"JOIN dingalings ON addresses.id = dingalings.address_id",
)
else:
self.assert_compile(
stmt,
"SELECT addresses.id AS addresses_id FROM users "
"JOIN addresses ON users.id = addresses.user_id",
)
@testing.combinations((True,), (False,), argnames="legacy")
@testing.combinations((True,), (False,), argnames="threelevel")
def test_join_and_union_with_entities(self, legacy, threelevel):
User, Address, Dingaling = self.classes("User", "Address", "Dingaling")
if legacy:
sess = fixture_session()
stmt = sess.query(User).join(Address).with_entities(Address.id)
else:
stmt = select(User).join(Address).with_only_columns(Address.id)
stmt = stmt.set_label_style(LABEL_STYLE_TABLENAME_PLUS_COL)
if threelevel:
if legacy:
stmt = stmt.join(Address.dingaling).with_entities(Dingaling.id)
to_union = sess.query(Dingaling.id)
else:
stmt = stmt.join(Address.dingaling).with_only_columns(
Dingaling.id
)
to_union = select(Dingaling.id).set_label_style(
LABEL_STYLE_TABLENAME_PLUS_COL
)
else:
if legacy:
to_union = sess.query(Address.id)
else:
to_union = select(Address.id).set_label_style(
LABEL_STYLE_TABLENAME_PLUS_COL
)
if legacy:
stmt = stmt.union(to_union)
else:
stmt = (
union(stmt, to_union)
.subquery()
.select()
.set_label_style(LABEL_STYLE_TABLENAME_PLUS_COL)
)
if threelevel:
self.assert_compile(
stmt,
"SELECT anon_1.dingalings_id AS anon_1_dingalings_id FROM "
"(SELECT dingalings.id AS dingalings_id "
"FROM users JOIN addresses ON users.id = addresses.user_id "
"JOIN dingalings ON addresses.id = dingalings.address_id "
"UNION "
"SELECT dingalings.id AS dingalings_id FROM dingalings) "
"AS anon_1",
)
else:
self.assert_compile(
stmt,
"SELECT anon_1.addresses_id AS anon_1_addresses_id FROM "
"(SELECT addresses.id AS addresses_id FROM users "
"JOIN addresses ON users.id = addresses.user_id "
"UNION "
"SELECT addresses.id AS addresses_id FROM addresses) "
"AS anon_1",
)
def test_invalid_kwarg_join(self):
User = self.classes.User
sess = fixture_session()
assert_raises_message(
TypeError,
r".*join\(\) .*unexpected .*keyword",
sess.query(User).join,
"address",
foob="bar",
bar="bat",
)
assert_raises_message(
TypeError,
r".*outerjoin\(\) .*unexpected .*keyword",
sess.query(User).outerjoin,
"address",
foob="bar",
bar="bat",
)
def test_left_w_no_entity(self):
User = self.classes.User
Address = self.classes.Address
sess = fixture_session()
self.assert_compile(
sess.query(User, literal_column("x")).join(Address),
"SELECT users.id AS users_id, users.name AS users_name, x "
"FROM users JOIN addresses ON users.id = addresses.user_id",
)
self.assert_compile(
sess.query(literal_column("x"), User).join(Address),
"SELECT x, users.id AS users_id, users.name AS users_name "
"FROM users JOIN addresses ON users.id = addresses.user_id",
)
def test_left_is_none_and_query_has_no_entities(self):
Address = self.classes.Address
sess = fixture_session()
assert_raises_message(
sa_exc.InvalidRequestError,
r"No entities to join from; please use select_from\(\) to "
r"establish the left entity/selectable of this join",
sess.query().join(Address)._compile_context,
)
def test_isouter_flag(self):
User = self.classes.User
self.assert_compile(
fixture_session().query(User).join(User.orders, isouter=True),
"SELECT users.id AS users_id, users.name AS users_name "
"FROM users LEFT OUTER JOIN orders ON users.id = orders.user_id",
)
def test_full_flag(self):
User = self.classes.User
self.assert_compile(
fixture_session().query(User).outerjoin(User.orders, full=True),
"SELECT users.id AS users_id, users.name AS users_name "
"FROM users FULL OUTER JOIN orders ON users.id = orders.user_id",
)
def test_single_prop_1(self):
User = self.classes.User
sess = fixture_session()
self.assert_compile(
sess.query(User).join(User.orders),
"SELECT users.id AS users_id, users.name AS users_name "
"FROM users JOIN orders ON users.id = orders.user_id",
)
def test_single_prop_2(self):
Order, User = (self.classes.Order, self.classes.User)
sess = fixture_session()
self.assert_compile(
sess.query(User).join(Order.user),
"SELECT users.id AS users_id, users.name AS users_name "
"FROM orders JOIN users ON users.id = orders.user_id",
)
def test_single_prop_3(self):
Order, User = (self.classes.Order, self.classes.User)
sess = fixture_session()
oalias1 = aliased(Order)
self.assert_compile(
sess.query(User).join(oalias1.user),
"SELECT users.id AS users_id, users.name AS users_name "
"FROM orders AS orders_1 JOIN users "
"ON users.id = orders_1.user_id",
)
def test_single_prop_4(self):
(
Order,
User,
) = (self.classes.Order, self.classes.User)
sess = fixture_session()
oalias1 = aliased(Order)
oalias2 = aliased(Order)
self.assert_compile(
sess.query(User).join(oalias1.user).join(oalias2.user),
"SELECT users.id AS users_id, users.name AS users_name "
"FROM orders AS orders_1 JOIN users "
"ON users.id = orders_1.user_id, "
"orders AS orders_2 JOIN users ON users.id = orders_2.user_id",
)
def test_single_prop_6(self):
User = self.classes.User
sess = fixture_session()
ualias = aliased(User)
self.assert_compile(
sess.query(ualias).join(ualias.orders),
"SELECT users_1.id AS users_1_id, users_1.name AS users_1_name "
"FROM users AS users_1 JOIN orders ON users_1.id = orders.user_id",
)
def test_single_prop_9(self):
User = self.classes.User
sess = fixture_session()
subq = (
sess.query(User)
.filter(User.name == "ed")
.set_label_style(LABEL_STYLE_TABLENAME_PLUS_COL)
.subquery()
)
ua = aliased(User, subq)
self.assert_compile(
sess.query(ua).join(ua.orders),
"SELECT anon_1.users_id AS anon_1_users_id, "
"anon_1.users_name AS anon_1_users_name "
"FROM (SELECT users.id AS users_id, users.name AS users_name "
"FROM users "
"WHERE users.name = :name_1) AS anon_1 JOIN orders "
"ON anon_1.users_id = orders.user_id",
)
def test_single_prop_12(self):
Order, User, Address = (
self.classes.Order,
self.classes.User,
self.classes.Address,
)
sess = fixture_session()
oalias1 = aliased(Order)
iased(User)
self.assert_compile(
sess.query(ualias)
.join(oalias1, ualias.orders)
.join(Address, ualias.addresses),
"SELECT users_1.id AS users_1_id, users_1.name AS "
"users_1_name FROM users AS users_1 JOIN orders AS orders_1 "
"ON users_1.id = orders_1.user_id JOIN addresses ON users_1.id "
"= addresses.user_id",
)
def test_single_prop_13(self):
Order, User, Address = (
self.classes.Order,
self.classes.User,
self.classes.Address,
)
sess = fixture_session()
iased(User)
ualias2 = aliased(User)
self.assert_compile(
sess.query(ualias)
.join(Address, ualias.addresses)
.join(ualias2, Address.user)
.join(Order, ualias.orders),
"SELECT users_1.id AS users_1_id, users_1.name AS users_1_name "
"FROM users "
"AS users_1 JOIN addresses ON users_1.id = addresses.user_id "
"JOIN users AS users_2 "
"ON users_2.id = addresses.user_id JOIN orders "
"ON users_1.id = orders.user_id",
)
def test_overlapping_paths_one_legacy(self):
User = self.classes.User
Order = self.classes.Order
sess = fixture_session()
self.assert_compile(
sess.query(User)
.join(User.orders)
.join(Order.items)
.join(User.orders)
.join(Order.address),
"SELECT users.id AS users_id, users.name AS users_name FROM users "
"JOIN orders "
"ON users.id = orders.user_id "
"JOIN order_items AS order_items_1 "
"ON orders.id = order_items_1.order_id "
"JOIN items ON items.id = order_items_1.item_id JOIN addresses "
"ON addresses.id = orders.address_id",
)
def test_overlapping_paths_multilevel_legacy(self):
User = self.classes.User
Order = self.classes.Order
Address = self.classes.Address
s = fixture_session()
q = (
s.query(User)
.join(User.orders)
.join(User.addresses)
.join(User.orders)
.join(Order.items)
.join(User.addresses)
.join(Address.dingaling)
)
self.assert_compile(
q,
"SELECT users.id AS users_id, users.name AS users_name "
"FROM users JOIN orders ON users.id = orders.user_id "
"JOIN addresses ON users.id = addresses.user_id "
"JOIN order_items AS order_items_1 ON orders.id = "
"order_items_1.order_id "
"JOIN items ON items.id = order_items_1.item_id "
"JOIN dingalings ON addresses.id = dingalings.address_id",
)
def test_overlapping_paths_one_modern(self):
User = self.classes.User
Order = self.classes.Order
self.assert_compile(
select(User)
.join(User.orders)
.join(Order.items)
.join(User.orders)
.join(Order.address)
.set_label_style(LABEL_STYLE_TABLENAME_PLUS_COL),
"SELECT users.id AS users_id, users.name AS users_name FROM users "
"JOIN orders "
"ON users.id = orders.user_id "
"JOIN order_items AS order_items_1 "
"ON orders.id = order_items_1.order_id "
"JOIN items ON items.id = order_items_1.item_id JOIN addresses "
"ON addresses.id = orders.address_id",
)
def test_overlapping_paths_multilevel_modern(self):
User = self.classes.User
Order = self.classes.Order
Address = self.classes.Address
q = (
select(User)
.join(User.orders)
.join(User.addresses)
.join(User.orders)
.join(Order.items)
.join(User.addresses)
.join(Address.dingaling)
.set_label_style(LABEL_STYLE_TABLENAME_PLUS_COL)
)
self.assert_compile(
q,
"SELECT users.id AS users_id, users.name AS users_name "
"FROM users JOIN orders ON users.id = orders.user_id "
"JOIN addresses ON users.id = addresses.user_id "
"JOIN order_items AS order_items_1 ON orders.id = "
"order_items_1.order_id "
"JOIN items ON items.id = order_items_1.item_id "
"JOIN dingalings ON addresses.id = dingalings.address_id",
)
def test_join_nonmapped_column(self):
Order, User = self.classes.Order, self.classes.User
sess = fixture_session()
self.assert_compile(
sess.query(User.id, literal_column("foo")).join(Order.user),
"SELECT users.id AS users_id, foo FROM "
"orders JOIN users ON users.id = orders.user_id",
)
def test_backwards_join(self):
User, Address = self.classes.User, self.classes.Address
sess = fixture_session()
eq_(
sess.query(User)
.join(Address.user)
.filter(Address.email_address == "ed@wood.com")
.all(),
[User(id=8, name="ed")],
)
eq_(
sess.query(User, Address)
.join(Address.user)
.filter(Address.email_address == "ed@wood.com")
.all(),
[(User(id=8, name="ed"), Address(email_address="ed@wood.com"))],
)
assert_raises(
sa_exc.InvalidRequestError,
sess.query(User).join(Address, Address.user)._compile_context,
)
adalias = aliased(Address)
assert_raises(
sa_exc.InvalidRequestError,
sess.query(User).join(adalias, Address.user)._compile_context,
)
def test_multiple_with_aliases(self):
Order, User = self.classes.Order, self.classes.User
sess = fixture_session()
ualias = aliased(User)
oalias1 = aliased(Order)
oalias2 = aliased(Order)
self.assert_compile(
sess.query(ualias)
.join(oalias1, ualias.orders)
.join(oalias2, ualias.orders)
.filter(or_(oalias1.user_id == 9, oalias2.user_id == 7)),
"SELECT users_1.id AS users_1_id, users_1.name AS users_1_name "
"FROM users AS users_1 "
"JOIN orders AS orders_1 ON users_1.id = orders_1.user_id "
"JOIN orders AS orders_2 ON "
"users_1.id = orders_2.user_id "
"WHERE orders_1.user_id = :user_id_1 "
"OR orders_2.user_id = :user_id_2",
use_default_dialect=True,
)
def test_select_from_orm_joins(self):
User, Order = self.classes.User, self.classes.Order
sess = fixture_session()
ualias = aliased(User)
oalias1 = aliased(Order)
oalias2 = aliased(Order)
self.assert_compile(
join(User, oalias2, User.id == oalias2.user_id),
"users JOIN orders AS orders_1 ON users.id = orders_1.user_id",
use_default_dialect=True,
)
self.assert_compile(
join(User, oalias2, User.id == oalias2.user_id, full=True),
"users FULL OUTER JOIN orders AS orders_1 "
"ON users.id = orders_1.user_id",
use_default_dialect=True,
)
self.assert_compile(
join(User, oalias2, User.id == oalias2.user_id, isouter=True),
"users LEFT OUTER JOIN orders AS orders_1 "
"ON users.id = orders_1.user_id",
use_default_dialect=True,
)
self.assert_compile(
join(
User,
oalias2,
User.id == oalias2.user_id,
isouter=True,
full=True,
),
"users FULL OUTER JOIN orders AS orders_1 "
"ON users.id = orders_1.user_id",
use_default_dialect=True,
)
self.assert_compile(
join(User, oalias1).join(oalias2),
"users JOIN orders AS orders_1 ON users.id = orders_1.user_id "
"JOIN orders AS orders_2 ON users.id = orders_2.user_id",
use_default_dialect=True,
)
self.assert_compile(
join(User, oalias1).join(oalias2, isouter=True),
"users JOIN orders AS orders_1 ON users.id = orders_1.user_id "
"LEFT OUTER JOIN orders AS orders_2 "
"ON users.id = orders_2.user_id",
use_default_dialect=True,
)
self.assert_compile(
join(User, oalias1).join(oalias2, full=True),
"users JOIN orders AS orders_1 ON users.id = orders_1.user_id "
"FULL OUTER JOIN orders AS orders_2 "
"ON users.id = orders_2.user_id",
use_default_dialect=True,
)
self.assert_compile(
join(User, oalias1).join(oalias2, full=True, isouter=True),
"users JOIN orders AS orders_1 ON users.id = orders_1.user_id "
"FULL OUTER JOIN orders AS orders_2 "
"ON users.id = orders_2.user_id",
use_default_dialect=True,
)
self.assert_compile(
join(ualias, oalias1, ualias.orders),
"users AS users_1 JOIN orders AS orders_1 "
"ON users_1.id = orders_1.user_id",
use_default_dialect=True,
)
self.assert_compile(
sess.query(ualias).select_from(
join(ualias, oalias1, ualias.orders)
),
"SELECT users_1.id AS users_1_id, users_1.name AS users_1_name "
"FROM users AS users_1 "
"JOIN orders AS orders_1 ON users_1.id = orders_1.user_id",
use_default_dialect=True,
)
self.assert_compile(
sess.query(User, ualias).select_from(
join(ualias, oalias1, ualias.orders)
),
"SELECT users.id AS users_id, users.name AS users_name, "
"users_1.id AS users_1_id, "
"users_1.name AS users_1_name FROM users, users AS users_1 "
"JOIN orders AS orders_1 ON users_1.id = orders_1.user_id",
use_default_dialect=True,
)
if False:
self.assert_compile(
sess.query(User, ualias)
.join(oalias1, ualias.orders)
.join(oalias2, User.id == oalias2.user_id)
.filter(or_(oalias1.user_id == 9, oalias2.user_id == 7)),
"SELECT users.id AS users_id, users.name AS users_name, "
"users_1.id AS users_1_id, users_1.name AS "
"users_1_name FROM users JOIN orders AS orders_2 "
"ON users.id = orders_2.user_id, "
"users AS users_1 JOIN orders AS orders_1 "
"ON users_1.id = orders_1.user_id "
"WHERE orders_1.user_id = :user_id_1 "
"OR orders_2.user_id = :user_id_2",
use_default_dialect=True,
)
# this is the same thing using explicit orm.join() (which now offers
# multiple again)
self.assert_compile(
sess.query(User, ualias)
.select_from(
join(ualias, oalias1, ualias.orders),
join(User, oalias2, User.id == oalias2.user_id),
)
.filter(or_(oalias1.user_id == 9, oalias2.user_id == 7)),
"SELECT users.id AS users_id, users.name AS users_name, "
"users_1.id AS users_1_id, users_1.name AS "
"users_1_name FROM users AS users_1 JOIN orders AS orders_1 "
"ON users_1.id = orders_1.user_id, "
"users JOIN orders AS orders_2 ON users.id = orders_2.user_id "
"WHERE orders_1.user_id = :user_id_1 "
"OR orders_2.user_id = :user_id_2",
use_default_dialect=True,
)
def test_overlapping_backwards_joins(self):
User, Order = self.classes.User, self.classes.Order
sess = fixture_session()
oalias1 = aliased(Order)
oalias2 = aliased(Order)
# this is invalid SQL - joins from orders_1/orders_2 to User twice.
# but that is what was asked for so they get it !
self.assert_compile(
sess.query(User).join(oalias1.user).join(oalias2.user),
"SELECT users.id AS users_id, users.name AS users_name "
"FROM orders AS orders_1 "
"JOIN users ON users.id = orders_1.user_id, orders AS orders_2 "
"JOIN users ON users.id = orders_2.user_id",
use_default_dialect=True,
)
def test_replace_multiple_from_clause(self):
User, Order, Address = (
self.classes.User,
self.classes.Order,
self.classes.Address,
)
sess = fixture_session()
self.assert_compile(
sess.query(Address, User)
.join(Address.dingaling)
.join(User.orders)
.join(Order.items),
"SELECT addresses.id AS addresses_id, "
"addresses.user_id AS addresses_user_id, "
"addresses.email_address AS addresses_email_address, "
"users.id AS users_id, "
"users.name AS users_name FROM addresses JOIN dingalings "
"ON addresses.id = dingalings.address_id, "
"users JOIN orders ON users.id = orders.user_id "
"JOIN order_items AS order_items_1 "
"ON orders.id = order_items_1.order_id JOIN items "
"ON items.id = order_items_1.item_id",
use_default_dialect=True,
)
def test_invalid_join_entity_from_single_from_clause(self):
Address, Item = (self.classes.Address, self.classes.Item)
sess = fixture_session()
q = sess.query(Address).select_from(Address)
assert_raises_message(
sa.exc.InvalidRequestError,
"Don't know how to join to .*Item.*. "
r"Please use the .select_from\(\) "
"method to establish an explicit left side, as well as",
q.join(Item)._compile_context,
)
def test_invalid_join_entity_from_no_from_clause(self):
Address, Item = (self.classes.Address, self.classes.Item)
sess = fixture_session()
q = sess.query(Address)
assert_raises_message(
sa.exc.InvalidRequestError,
"Don't know how to join to .*Item.*. "
r"Please use the .select_from\(\) "
"method to establish an explicit left side, as well as",
q.join(Item)._compile_context,
)
def test_invalid_join_entity_from_multiple_from_clause(self):
User, Address, Item = (
self.classes.User,
self.classes.Address,
self.classes.Item,
)
sess = fixture_session()
q = sess.query(Address, User).join(Address.dingaling).join(User.orders)
assert_raises_message(
sa.exc.InvalidRequestError,
"Don't know how to join to .*Item.*. "
r"Please use the .select_from\(\) "
"method to establish an explicit left side, as well as",
q.join(Item)._compile_context,
)
def test_join_explicit_left_multiple_from_clause(self):
User = self.classes.User
sess = fixture_session()
u1 = aliased(User)
q = sess.query(User, u1).select_from(User, u1).join(User.addresses)
self.assert_compile(
q,
"SELECT users.id AS users_id, users.name AS users_name, "
"users_1.id AS users_1_id, users_1.name AS users_1_name "
"FROM users AS users_1, "
"users JOIN addresses ON users.id = addresses.user_id",
)
q = sess.query(User, u1).select_from(User, u1).join(u1.addresses)
self.assert_compile(
q,
"SELECT users.id AS users_id, users.name AS users_name, "
"users_1.id AS users_1_id, users_1.name AS users_1_name "
"FROM users, "
"users AS users_1 JOIN addresses "
"ON users_1.id = addresses.user_id",
)
def test_join_explicit_left_multiple_adapted(self):
User = self.classes.User
sess = fixture_session()
u1 = aliased(User)
u2 = aliased(User)
assert_raises_message(
sa_exc.InvalidRequestError,
"Can't identify which entity in which to assign the "
"left side of this join.",
sess.query(u1, u2)
.select_from(u1, u2)
.join(User.addresses)
._compile_context,
)
# more specific ON clause
self.assert_compile(
sess.query(u1, u2).select_from(u1, u2).join(u2.addresses),
"SELECT users_1.id AS users_1_id, users_1.name AS users_1_name, "
"users_2.id AS users_2_id, users_2.name AS users_2_name "
"FROM users AS users_1, "
"users AS users_2 JOIN addresses "
"ON users_2.id = addresses.user_id",
)
def test_join_entity_from_multiple_from_clause(self):
User, Order, Address, Dingaling = (
self.classes.User,
self.classes.Order,
self.classes.Address,
self.classes.Dingaling,
)
sess = fixture_session()
q = sess.query(Address, User).join(Address.dingaling).join(User.orders)
a1 = aliased(Address)
assert_raises_message(
sa.exc.InvalidRequestError,
"Can't determine which FROM clause to join from, there are "
"multiple FROMS which can join to this entity. "
r"Please use the .select_from\(\) "
"method to establish an explicit left side, as well as",
q.join(a1)._compile_context,
)
self.assert_compile(
q.join(a1, Order.address_id == a1.id),
"SELECT addresses.id AS addresses_id, "
"addresses.user_id AS addresses_user_id, "
"addresses.email_address AS addresses_email_address, "
"users.id AS users_id, users.name AS users_name "
"FROM addresses JOIN dingalings "
"ON addresses.id = dingalings.address_id, "
"users JOIN orders "
"ON users.id = orders.user_id "
"JOIN addresses AS addresses_1 "
"ON orders.address_id = addresses_1.id",
)
self.assert_compile(
q.join(a1, Dingaling.address_id == a1.id),
"SELECT addresses.id AS addresses_id, "
"addresses.user_id AS addresses_user_id, "
"addresses.email_address AS addresses_email_address, "
"users.id AS users_id, users.name AS users_name "
"FROM addresses JOIN dingalings "
"ON addresses.id = dingalings.address_id "
"JOIN addresses AS addresses_1 "
"ON dingalings.address_id = addresses_1.id, "
"users JOIN orders ON users.id = orders.user_id",
)
def test_join_entity_from_multiple_entities(self):
Order, Address, Dingaling = (
self.classes.Order,
self.classes.Address,
self.classes.Dingaling,
)
sess = fixture_session()
q = sess.query(Order, Dingaling)
a1 = aliased(Address)
assert_raises_message(
sa.exc.InvalidRequestError,
"Can't determine which FROM clause to join from, there are "
"multiple FROMS which can join to this entity. "
r"Please use the .select_from\(\) "
"method to establish an explicit left side, as well as",
q.join(a1)._compile_context,
)
# to resolve, add an ON clause
# Order is chosen to join to a1
self.assert_compile(
q.join(a1, Order.address_id == a1.id),
"SELECT orders.id AS orders_id, orders.user_id AS orders_user_id, "
"orders.address_id AS orders_address_id, "
"orders.description AS orders_description, "
"orders.isopen AS orders_isopen, dingalings.id AS dingalings_id, "
"dingalings.address_id AS dingalings_address_id, "
"dingalings.data AS dingalings_data "
"FROM dingalings, orders "
"JOIN addresses AS addresses_1 "
"ON orders.address_id = addresses_1.id",
)
# Dingaling is chosen to join to a1
self.assert_compile(
q.join(a1, Dingaling.address_id == a1.id),
"SELECT orders.id AS orders_id, orders.user_id AS orders_user_id, "
"orders.address_id AS orders_address_id, "
"orders.description AS orders_description, "
"orders.isopen AS orders_isopen, dingalings.id AS dingalings_id, "
"dingalings.address_id AS dingalings_address_id, "
"dingalings.data AS dingalings_data "
"FROM orders, dingalings JOIN addresses AS addresses_1 "
"ON dingalings.address_id = addresses_1.id",
)
def test_clause_present_in_froms_twice_w_onclause(self):
# test [ticket:4584]
Order, Address, User = (
self.classes.Order,
self.classes.Address,
self.classes.User,
)
sess = fixture_session()
a1 = aliased(Address)
q = sess.query(Order).select_from(Order, a1, User)
assert_raises_message(
sa.exc.InvalidRequestError,
"Can't determine which FROM clause to join from, there are "
"multiple FROMS which can join to this entity. "
r"Please use the .select_from\(\) "
"method to establish an explicit left side, as well as",
q.outerjoin(a1)._compile_context,
)
q = sess.query(Order).select_from(Order, a1, User)
q = q.outerjoin(a1, a1.id == Order.address_id)
q = q.outerjoin(User, a1.user_id == User.id)
self.assert_compile(
q,
"SELECT orders.id AS orders_id, orders.user_id AS orders_user_id, "
"orders.address_id AS orders_address_id, "
"orders.description AS orders_description, "
"orders.isopen AS orders_isopen "
"FROM orders "
"LEFT OUTER JOIN addresses AS addresses_1 "
"ON addresses_1.id = orders.address_id "
"LEFT OUTER JOIN users ON addresses_1.user_id = users.id",
)
def test_clause_present_in_froms_twice_wo_onclause(self):
Address, Dingaling, User = (
self.classes.Address,
self.classes.Dingaling,
self.classes.User,
)
sess = fixture_session()
a1 = aliased(Address)
q = sess.query(User).select_from(Dingaling, a1, User)
q = q.outerjoin(a1, User.id == a1.user_id)
q = q.outerjoin(Dingaling)
self.assert_compile(
q,
"SELECT users.id AS users_id, users.name AS users_name "
"FROM users LEFT OUTER JOIN addresses AS addresses_1 "
"ON users.id = addresses_1.user_id "
"LEFT OUTER JOIN dingalings "
"ON addresses_1.id = dingalings.address_id",
)
def test_pure_expression(self):
addresses, users = self.tables.addresses, self.tables.users
sess = fixture_session()
self.assert_compile(
sess.query(users).join(addresses),
"SELECT users.id AS users_id, users.name AS users_name "
"FROM users JOIN addresses ON users.id = addresses.user_id",
)
def test_no_onclause(self):
Item, User, Order = (
self.classes.Item,
self.classes.User,
self.classes.Order,
)
sess = fixture_session()
eq_(
sess.query(User)
.select_from(join(User, Order).join(Item, Order.items))
.filter(Item.description == "item 4")
.all(),
[User(name="jack")],
)
eq_(
sess.query(User.name)
.select_from(join(User, Order).join(Item, Order.items))
.filter(Item.description == "item 4")
.all(),
[("jack",)],
)
eq_(
sess.query(User)
.join(Order)
.join(Item, Order.items)
.filter(Item.description == "item 4")
.all(),
[User(name="jack")],
)
def test_clause_onclause(self):
Item, Order, order_items, User = (
self.classes.Item,
self.classes.Order,
self.tables.order_items,
self.classes.User,
)
sess = fixture_session()
eq_(
sess.query(User)
.join(Order, User.id == Order.user_id)
.join(order_items, Order.id == order_items.c.order_id)
.join(Item, order_items.c.item_id == Item.id)
.filter(Item.description == "item 4")
.all(),
[User(name="jack")],
)
eq_(
sess.query(User.name)
.join(Order, User.id == Order.user_id)
.join(order_items, Order.id == order_items.c.order_id)
.join(Item, order_items.c.item_id == Item.id)
.filter(Item.description == "item 4")
.all(),
[("jack",)],
)
ualias = aliased(User)
eq_(
sess.query(ualias.name)
.join(Order, ualias.id == Order.user_id)
.join(order_items, Order.id == order_items.c.order_id)
.join(Item, order_items.c.item_id == Item.id)
.filter(Item.description == "item 4")
.all(),
[("jack",)],
)
# FROM object
subq = sess.query(User).order_by(User.id).offset(2).subquery()
ua = aliased(User, subq)
eq_(
sess.query(ua).join(Order, ua.id == Order.user_id).all(),
[User(name="fred")],
)
def test_aliased_classes(self):
User, Address = self.classes.User, self.classes.Address
sess = fixture_session()
(user7, user8, user9, user10) = sess.query(User).all()
(address1, address2, address3, address4, address5) = sess.query(
Address
).all()
expected = [
(user7, address1),
(user8, address2),
(user8, address3),
(user8, address4),
(user9, address5),
(user10, None),
]
q = sess.query(User)
AdAlias = aliased(Address)
q = q.add_entity(AdAlias).select_from(outerjoin(User, AdAlias))
result = q.order_by(User.id, AdAlias.id).all()
eq_(result, expected)
sess.expunge_all()
q = sess.query(User).add_entity(AdAlias)
result = (
q.select_from(outerjoin(User, AdAlias))
.filter(AdAlias.email_address == "ed@bettyboop.com")
.all()
)
eq_(result, [(user8, address3)])
result = (
q.select_from(outerjoin(User, AdAlias, "addresses"))
.filter(AdAlias.email_address == "ed@bettyboop.com")
.all()
)
eq_(result, [(user8, address3)])
result = (
q.select_from(outerjoin(User, AdAlias, User.id == AdAlias.user_id))
.filter(AdAlias.email_address == "ed@bettyboop.com")
.all()
)
eq_(result, [(user8, address3)])
# this is the first test where we are joining "backwards" - from
# AdAlias to User even though
# the query is against User
q = sess.query(User, AdAlias)
result = (
q.join(AdAlias.user)
.filter(User.name == "ed")
.order_by(User.id, AdAlias.id)
)
eq_(
result.all(),
[(user8, address2), (user8, address3), (user8, address4)],
)
q = (
sess.query(User, AdAlias)
.select_from(join(AdAlias, User, AdAlias.user))
.filter(User.name == "ed")
)
eq_(
result.all(),
[(user8, address2), (user8, address3), (user8, address4)],
)
def test_expression_onclauses(self):
Order, User = self.classes.Order, self.classes.User
sess = fixture_session()
subq = sess.query(User).subquery()
self.assert_compile(
sess.query(User).join(subq, User.name == subq.c.name),
"SELECT users.id AS users_id, users.name AS users_name "
"FROM users JOIN (SELECT users.id AS id, users.name "
"AS name FROM users) AS anon_1 ON users.name = anon_1.name",
use_default_dialect=True,
)
subq = sess.query(Order).subquery()
self.assert_compile(
sess.query(User).join(subq, User.id == subq.c.user_id),
"SELECT users.id AS users_id, users.name AS users_name FROM "
"users JOIN (SELECT orders.id AS id, orders.user_id AS user_id, "
"orders.address_id AS address_id, orders.description AS "
"description, orders.isopen AS isopen FROM orders) AS "
"anon_1 ON users.id = anon_1.user_id",
use_default_dialect=True,
)
self.assert_compile(
sess.query(User).join(Order, User.id == Order.user_id),
"SELECT users.id AS users_id, users.name AS users_name "
"FROM users JOIN orders ON users.id = orders.user_id",
use_default_dialect=True,
)
def test_aliased_classes_m2m(self):
Item, Order = self.classes.Item, self.classes.Order
sess = fixture_session()
(order1, order2, order3, order4, order5) = sess.query(Order).all()
(item1, item2, item3, item4, item5) = sess.query(Item).all()
expected = [
(order1, item1),
(order1, item2),
(order1, item3),
(order2, item1),
(order2, item2),
(order2, item3),
(order3, item3),
(order3, item4),
(order3, item5),
(order4, item1),
(order4, item5),
(order5, item5),
]
q = sess.query(Order)
q = (
q.add_entity(Item)
.select_from(join(Order, Item, "items"))
.order_by(Order.id, Item.id)
)
result = q.all()
eq_(result, expected)
IAlias = aliased(Item)
q = (
sess.query(Order, IAlias)
.select_from(join(Order, IAlias, "items"))
.filter(IAlias.description == "item 3")
)
result = q.all()
eq_(result, [(order1, item3), (order2, item3), (order3, item3)])
def test_joins_from_adapted_entities(self):
User = self.classes.User
# test for #1853
session = fixture_session()
first = session.query(User)
second = session.query(User)
unioned = first.union(second)
subquery = session.query(User.id).subquery()
join = subquery, subquery.c.id == User.id
joined = unioned.outerjoin(*join)
self.assert_compile(
joined,
"SELECT anon_1.users_id AS "
"anon_1_users_id, anon_1.users_name AS "
"anon_1_users_name FROM (SELECT users.id "
"AS users_id, users.name AS users_name "
"FROM users UNION SELECT users.id AS "
"users_id, users.name AS users_name FROM "
"users) AS anon_1 LEFT OUTER JOIN (SELECT "
"users.id AS id FROM users) AS anon_2 ON "
"anon_2.id = anon_1.users_id",
use_default_dialect=True,
)
first = session.query(User.id)
second = session.query(User.id)
unioned = first.union(second)
subquery = session.query(User.id).subquery()
join = subquery, subquery.c.id == User.id
joined = unioned.outerjoin(*join)
self.assert_compile(
joined,
"SELECT anon_1.users_id AS anon_1_users_id "
"FROM (SELECT users.id AS users_id FROM "
"users UNION SELECT users.id AS users_id "
"FROM users) AS anon_1 LEFT OUTER JOIN "
"(SELECT users.id AS id FROM users) AS "
"anon_2 ON anon_2.id = anon_1.users_id",
use_default_dialect=True,
)
def test_joins_from_adapted_entities_isouter(self):
User = self.classes.User
# test for #1853
session = fixture_session()
first = session.query(User)
second = session.query(User)
unioned = first.union(second)
subquery = session.query(User.id).subquery()
join = subquery, subquery.c.id == User.id
joined = unioned.join(*join, isouter=True)
self.assert_compile(
joined,
"SELECT anon_1.users_id AS "
"anon_1_users_id, anon_1.users_name AS "
"anon_1_users_name FROM (SELECT users.id "
"AS users_id, users.name AS users_name "
"FROM users UNION SELECT users.id AS "
"users_id, users.name AS users_name FROM "
"users) AS anon_1 LEFT OUTER JOIN (SELECT "
"users.id AS id FROM users) AS anon_2 ON "
"anon_2.id = anon_1.users_id",
use_default_dialect=True,
)
first = session.query(User.id)
second = session.query(User.id)
unioned = first.union(second)
subquery = session.query(User.id).subquery()
join = subquery, subquery.c.id == User.id
joined = unioned.join(*join, isouter=True)
self.assert_compile(
joined,
"SELECT anon_1.users_id AS anon_1_users_id "
"FROM (SELECT users.id AS users_id FROM "
"users UNION SELECT users.id AS users_id "
"FROM users) AS anon_1 LEFT OUTER JOIN "
"(SELECT users.id AS id FROM users) AS "
"anon_2 ON anon_2.id = anon_1.users_id",
use_default_dialect=True,
)
def test_overlap_with_aliases(self):
orders, User, users = (
self.tables.orders,
self.classes.User,
self.tables.users,
)
Order = self.classes.Order
oalias = orders.alias("oalias")
result = (
fixture_session()
.query(User)
.select_from(users.join(oalias))
.filter(
oalias.c.description.in_(["order 1", "order 2", "order 3"])
)
.join(User.orders)
.join(Order.items)
.order_by(User.id)
.all()
)
assert [User(id=7, name="jack"), User(id=9, name="fred")] == result
result = (
fixture_session()
.query(User)
.select_from(users.join(oalias))
.filter(
oalias.c.description.in_(["order 1", "order 2", "order 3"])
)
.join(User.orders)
.join(Order.items)
.filter_by(id=4)
.all()
)
assert [User(id=7, name="jack")] == result
def test_aliased_order_by(self):
User = self.classes.User
sess = fixture_session()
ualias = aliased(User)
eq_(
sess.query(User, ualias)
.filter(User.id > ualias.id)
.order_by(desc(ualias.id), User.name)
.all(),
[
(User(id=10, name="chuck"), User(id=9, name="fred")),
(User(id=10, name="chuck"), User(id=8, name="ed")),
(User(id=9, name="fred"), User(id=8, name="ed")),
(User(id=10, name="chuck"), User(id=7, name="jack")),
(User(id=8, name="ed"), User(id=7, name="jack")),
(User(id=9, name="fred"), User(id=7, name="jack")),
],
)
def test_plain_table(self):
addresses, User = self.tables.addresses, self.classes.User
sess = fixture_session()
eq_(
sess.query(User.name)
.join(addresses, User.id == addresses.c.user_id)
.order_by(User.id)
.all(),
[("jack",), ("ed",), ("ed",), ("ed",), ("fred",)],
)
def test_no_joinpoint_expr(self):
User, users = self.classes.User, self.tables.users
sess = fixture_session()
# these are consistent regardless of
# select_from() being present.
assert_raises_message(
sa_exc.InvalidRequestError,
"Don't know how to join to .*User.*. "
r"Please use the .select_from\(\) "
"method to establish an explicit left side, as well as",
sess.query(users.c.id).join(User)._compile_context,
)
assert_raises_message(
sa_exc.InvalidRequestError,
"Don't know how to join to .*User.* "
r"Please use the .select_from\(\) "
"method to establish an explicit left side, as well as",
sess.query(users.c.id)
.select_from(users)
.join(User)
._compile_context,
)
def test_on_clause_no_right_side_one(self):
User = self.classes.User
Address = self.classes.Address
sess = fixture_session()
# coercions does not catch this due to the
# legacy=True flag for JoinTargetRole
with expect_raises_message(
sa_exc.ArgumentError,
"Join target, typically a FROM expression, or ORM relationship "
"attribute expected, got",
):
sess.query(User).join(User.id == Address.user_id)
def test_on_clause_no_right_side_one_future(self):
User = self.classes.User
Address = self.classes.Address
# future mode can raise a more specific error at the coercions level
assert_raises_message(
sa_exc.ArgumentError,
"Join target, typically a FROM expression, "
"or ORM relationship attribute expected",
select(User).join,
User.id == Address.user_id,
)
def test_no_legacy_multi_join_two_element(self):
User = self.classes.User
Order = self.classes.Order
sess = fixture_session()
with expect_raises_message(
sa_exc.InvalidRequestError,
"No 'on clause' argument may be passed when joining to a "
"relationship path as a target",
):
sess.query(User).join(User.orders, Order.items)._compile_context()
def test_no_modern_multi_join_two_element(self):
User = self.classes.User
Order = self.classes.Order
sess = fixture_session()
with expect_raises_message(
sa_exc.InvalidRequestError,
"No 'on clause' argument may be passed when joining to a "
"relationship path as a target",
):
sess.execute(select(User).join(User.orders, Order.items))
def test_kw_only_blocks_legacy_multi_join(self):
User = self.classes.User
Order = self.classes.Order
Item = self.classes.Item
sess = fixture_session()
with expect_raises_message(
TypeError,
r".*join\(\) takes from 2 to 3 positional arguments but "
"4 were given",
):
sess.query(User).join(User.orders, Order.items, Item.keywords)
def test_on_clause_no_right_side_two(self):
User = self.classes.User
Address = self.classes.Address
sess = fixture_session()
assert_raises_message(
sa_exc.ArgumentError,
"Join target Address.user_id does not refer to a mapped entity",
sess.query(User).join(Address.user_id)._compile_context,
)
def test_on_clause_no_right_side_two_future(self):
User = self.classes.User
Address = self.classes.Address
stmt = select(User).join(Address.user_id)
assert_raises_message(
sa_exc.ArgumentError,
"Join target Address.user_id does not refer to a mapped entity",
stmt.compile,
)
def test_no_strings_for_single_onclause_legacy_query(self):
User = self.classes.User
sess = fixture_session()
with expect_raises_message(
sa_exc.ArgumentError,
"Join target, typically a FROM expression, or ORM relationship "
"attribute expected, got 'addresses'",
):
sess.query(User).join("addresses")
def test_no_strings_for_single_onclause_newstyle(self):
User = self.classes.User
with expect_raises_message(
sa_exc.ArgumentError,
"Join target, typically a FROM expression, or ORM relationship "
"attribute expected, got 'addresses'",
):
select(User).join("addresses")
def test_no_strings_for_dual_onclause_legacy_query(self):
User = self.classes.User
Address = self.classes.Address
sess = fixture_session()
with expect_raises_message(
sa_exc.ArgumentError,
"ON clause, typically a SQL expression or ORM relationship "
"attribute expected, got 'addresses'",
):
sess.query(User).join(Address, "addresses")
def test_no_strings_for_dual_onclause_newstyle(self):
User = self.classes.User
Address = self.classes.Address
with expect_raises_message(
sa_exc.ArgumentError,
"ON clause, typically a SQL expression or ORM relationship "
"attribute expected, got 'addresses'.",
):
select(User).join(Address, "addresses")
def test_select_from(self):
Item, Order, User = (
self.classes.Item,
self.classes.Order,
self.classes.User,
)
sess = fixture_session()
self.assert_compile(
sess.query(Item.id)
.select_from(User)
.join(User.orders)
.join(Order.items),
"SELECT items.id AS items_id FROM users JOIN orders ON "
"users.id = orders.user_id JOIN order_items AS order_items_1 "
"ON orders.id = order_items_1.order_id JOIN items ON items.id = "
"order_items_1.item_id",
use_default_dialect=True,
)
# here, the join really wants to add a second FROM clause
# for "Item". but select_from disallows that
self.assert_compile(
sess.query(Item.id)
.select_from(User)
.join(Item, User.id == Item.id),
"SELECT items.id AS items_id FROM users JOIN items "
"ON users.id = items.id",
use_default_dialect=True,
)
class JoinFromSelectableTest(fixtures.MappedTest, AssertsCompiledSQL):
__dialect__ = "default"
run_setup_mappers = "once"
@classmethod
def define_tables(cls, metadata):
Table("table1", metadata, Column("id", Integer, primary_key=True))
Table(
"table2",
metadata,
Column("id", Integer, primary_key=True),
Column("t1_id", Integer),
)
@classmethod
def setup_classes(cls):
class T1(cls.Comparable):
pass
class T2(cls.Comparable):
pass
@classmethod
def setup_mappers(cls):
table1, table2 = cls.tables.table1, cls.tables.table2
T1, T2 = cls.classes("T1", "T2")
cls.mapper_registry.map_imperatively(T1, table1)
cls.mapper_registry.map_imperatively(T2, table2)
def test_select_mapped_to_mapped_explicit_left(self):
T1, T2 = self.classes.T1, self.classes.T2
sess = fixture_session()
subq = (
sess.query(T2.t1_id, func.count(T2.id).label("count"))
.group_by(T2.t1_id)
.subquery()
)
self.assert_compile(
sess.query(subq.c.count, T1.id)
.select_from(subq)
.join(T1, subq.c.t1_id == T1.id),
"SELECT anon_1.count AS anon_1_count, table1.id AS table1_id "
"FROM (SELECT table2.t1_id AS t1_id, "
"count(table2.id) AS count FROM table2 "
"GROUP BY table2.t1_id) AS anon_1 JOIN table1 "
"ON anon_1.t1_id = table1.id",
)
def test_select_mapped_to_mapped_implicit_left(self):
T1, T2 = self.classes.T1, self.classes.T2
sess = fixture_session()
subq = (
sess.query(T2.t1_id, func.count(T2.id).label("count"))
.group_by(T2.t1_id)
.subquery()
)
self.assert_compile(
sess.query(subq.c.count, T1.id).join(T1, subq.c.t1_id == T1.id),
"SELECT anon_1.count AS anon_1_count, table1.id AS table1_id "
"FROM (SELECT table2.t1_id AS t1_id, "
"count(table2.id) AS count FROM table2 "
"GROUP BY table2.t1_id) AS anon_1 JOIN table1 "
"ON anon_1.t1_id = table1.id",
)
def test_select_mapped_to_select_explicit_left(self):
T1, T2 = self.classes.T1, self.classes.T2
sess = fixture_session()
subq = (
sess.query(T2.t1_id, func.count(T2.id).label("count"))
.group_by(T2.t1_id)
.subquery()
)
self.assert_compile(
sess.query(subq.c.count, T1.id)
.select_from(T1)
.join(subq, subq.c.t1_id == T1.id),
"SELECT anon_1.count AS anon_1_count, table1.id AS table1_id "
"FROM table1 JOIN (SELECT table2.t1_id AS t1_id, "
"count(table2.id) AS count FROM table2 GROUP BY table2.t1_id) "
"AS anon_1 ON anon_1.t1_id = table1.id",
)
def test_select_mapped_to_select_implicit_left(self):
T1, T2 = self.classes.T1, self.classes.T2
sess = fixture_session()
subq = (
sess.query(T2.t1_id, func.count(T2.id).label("count"))
.group_by(T2.t1_id)
.subquery()
)
# without select_from
self.assert_compile(
sess.query(subq.c.count, T1.id).join(subq, subq.c.t1_id == T1.id),
"SELECT anon_1.count AS anon_1_count, table1.id AS table1_id "
"FROM table1 JOIN "
"(SELECT table2.t1_id AS t1_id, count(table2.id) AS count "
"FROM table2 GROUP BY table2.t1_id) "
"AS anon_1 ON anon_1.t1_id = table1.id",
)
# with select_from, same query
self.assert_compile(
sess.query(subq.c.count, T1.id)
.select_from(T1)
.join(subq, subq.c.t1_id == T1.id),
"SELECT anon_1.count AS anon_1_count, table1.id AS table1_id "
"FROM table1 JOIN "
"(SELECT table2.t1_id AS t1_id, count(table2.id) AS count "
"FROM table2 GROUP BY table2.t1_id) "
"AS anon_1 ON anon_1.t1_id = table1.id",
)
def test_mapped_select_to_mapped_implicit_left(self):
T1, T2 = self.classes.T1, self.classes.T2
sess = fixture_session()
subq = (
sess.query(T2.t1_id, func.count(T2.id).label("count"))
.group_by(T2.t1_id)
.subquery()
)
# without select_from
self.assert_compile(
sess.query(T1.id, subq.c.count).join(T1, subq.c.t1_id == T1.id),
"SELECT table1.id AS table1_id, anon_1.count AS anon_1_count "
"FROM (SELECT table2.t1_id AS t1_id, count(table2.id) AS count "
"FROM table2 GROUP BY table2.t1_id) AS anon_1 "
"JOIN table1 ON anon_1.t1_id = table1.id",
)
# with select_from, same query
self.assert_compile(
sess.query(T1.id, subq.c.count)
.select_from(subq)
.join(T1, subq.c.t1_id == T1.id),
"SELECT table1.id AS table1_id, anon_1.count AS anon_1_count "
"FROM (SELECT table2.t1_id AS t1_id, count(table2.id) AS count "
"FROM table2 GROUP BY table2.t1_id) AS anon_1 "
"JOIN table1 ON anon_1.t1_id = table1.id",
)
def test_mapped_select_to_mapped_explicit_left(self):
T1, T2 = self.classes.T1, self.classes.T2
sess = fixture_session()
subq = (
sess.query(T2.t1_id, func.count(T2.id).label("count"))
.group_by(T2.t1_id)
.subquery()
)
self.assert_compile(
sess.query(T1.id, subq.c.count)
.select_from(subq)
.join(T1, subq.c.t1_id == T1.id),
"SELECT table1.id AS table1_id, anon_1.count AS anon_1_count "
"FROM (SELECT table2.t1_id AS t1_id, count(table2.id) AS count "
"FROM table2 GROUP BY table2.t1_id) AS anon_1 JOIN table1 "
"ON anon_1.t1_id = table1.id",
)
def test_mapped_select_to_select_explicit_left(self):
T1, T2 = self.classes.T1, self.classes.T2
sess = fixture_session()
subq = (
sess.query(T2.t1_id, func.count(T2.id).label("count"))
.group_by(T2.t1_id)
.subquery()
)
self.assert_compile(
sess.query(T1.id, subq.c.count)
.select_from(T1)
.join(subq, subq.c.t1_id == T1.id),
"SELECT table1.id AS table1_id, anon_1.count AS anon_1_count "
"FROM table1 JOIN (SELECT table2.t1_id AS t1_id, "
"count(table2.id) AS count "
"FROM table2 GROUP BY table2.t1_id) AS anon_1 "
"ON anon_1.t1_id = table1.id",
)
def test_mapped_select_to_select_implicit_left(self):
T1, T2 = self.classes.T1, self.classes.T2
sess = fixture_session()
subq = (
sess.query(T2.t1_id, func.count(T2.id).label("count"))
.group_by(T2.t1_id)
.subquery()
)
self.assert_compile(
sess.query(T1.id, subq.c.count).join(subq, subq.c.t1_id == T1.id),
"SELECT table1.id AS table1_id, anon_1.count AS anon_1_count "
"FROM table1 JOIN (SELECT table2.t1_id AS t1_id, "
"count(table2.id) AS count "
"FROM table2 GROUP BY table2.t1_id) AS anon_1 "
"ON anon_1.t1_id = table1.id",
)
class SelfRefMixedTest(fixtures.MappedTest, AssertsCompiledSQL):
run_setup_mappers = "once"
__dialect__ = default.DefaultDialect()
@classmethod
def define_tables(cls, metadata):
Table(
"nodes",
metadata,
Column(
"id", Integer, primary_key=True, test_needs_autoincrement=True
),
Column("parent_id", Integer, ForeignKey("nodes.id")),
)
Table(
"sub_table",
metadata,
Column(
"id", Integer, primary_key=True, test_needs_autoincrement=True
),
Column("node_id", Integer, ForeignKey("nodes.id")),
)
Table(
"assoc_table",
metadata,
Column("left_id", Integer, ForeignKey("nodes.id")),
Column("right_id", Integer, ForeignKey("nodes.id")),
)
@classmethod
def setup_classes(cls):
class Node(cls.Comparable):
pass
class Sub(cls.Comparable):
pass
@classmethod
def setup_mappers(cls):
nodes, assoc_table, sub_table = (
cls.tables.nodes,
cls.tables.assoc_table,
cls.tables.sub_table,
)
Node, Sub = cls.classes("Node", "Sub")
cls.mapper_registry.map_imperatively(
Node,
nodes,
properties={
"children": relationship(
Node,
lazy="select",
join_depth=3,
backref=backref("parent", remote_side=[nodes.c.id]),
),
"subs": relationship(Sub),
"assoc": relationship(
Node,
secondary=assoc_table,
primaryjoin=nodes.c.id == assoc_table.c.left_id,
secondaryjoin=nodes.c.id == assoc_table.c.right_id,
),
},
)
cls.mapper_registry.map_imperatively(Sub, sub_table)
def test_o2m_aliased_plus_o2m(self):
Node, Sub = self.classes.Node, self.classes.Sub
sess = fixture_session()
n1 = aliased(Node)
self.assert_compile(
sess.query(Node).join(n1, Node.children).join(Sub, n1.subs),
"SELECT nodes.id AS nodes_id, nodes.parent_id AS nodes_parent_id "
"FROM nodes JOIN nodes AS nodes_1 ON nodes.id = nodes_1.parent_id "
"JOIN sub_table ON nodes_1.id = sub_table.node_id",
)
self.assert_compile(
sess.query(Node).join(n1, Node.children).join(Sub, Node.subs),
"SELECT nodes.id AS nodes_id, nodes.parent_id AS nodes_parent_id "
"FROM nodes JOIN nodes AS nodes_1 ON nodes.id = nodes_1.parent_id "
"JOIN sub_table ON nodes.id = sub_table.node_id",
)
def test_m2m_aliased_plus_o2m(self):
Node, Sub = self.classes.Node, self.classes.Sub
sess = fixture_session()
n1 = aliased(Node)
self.assert_compile(
sess.query(Node).join(n1, Node.assoc).join(Sub, n1.subs),
"SELECT nodes.id AS nodes_id, nodes.parent_id AS nodes_parent_id "
"FROM nodes JOIN assoc_table AS assoc_table_1 ON nodes.id = "
"assoc_table_1.left_id JOIN nodes AS nodes_1 ON nodes_1.id = "
"assoc_table_1.right_id JOIN sub_table "
"ON nodes_1.id = sub_table.node_id",
)
self.assert_compile(
sess.query(Node).join(n1, Node.assoc).join(Sub, Node.subs),
"SELECT nodes.id AS nodes_id, nodes.parent_id AS nodes_parent_id "
"FROM nodes JOIN assoc_table AS assoc_table_1 ON nodes.id = "
"assoc_table_1.left_id JOIN nodes AS nodes_1 ON nodes_1.id = "
"assoc_table_1.right_id JOIN sub_table "
"ON nodes.id = sub_table.node_id",
)
class CreateJoinsTest(fixtures.MappedTest, AssertsCompiledSQL):
__dialect__ = "default"
def _inherits_fixture(self):
m = MetaData()
base = Table("base", m, Column("id", Integer, primary_key=True))
a = Table(
"a",
m,
Column("id", Integer, ForeignKey("base.id"), primary_key=True),
Column("b_id", Integer, ForeignKey("b.id")),
)
b = Table(
"b",
m,
Column("id", Integer, ForeignKey("base.id"), primary_key=True),
Column("c_id", Integer, ForeignKey("c.id")),
)
c = Table(
"c",
m,
Column("id", Integer, ForeignKey("base.id"), primary_key=True),
)
class Base:
pass
class A(Base):
pass
class B(Base):
pass
class C(Base):
pass
self.mapper_registry.map_imperatively(Base, base)
self.mapper_registry.map_imperatively(
A,
a,
inherits=Base,
properties={"b": relationship(B, primaryjoin=a.c.b_id == b.c.id)},
)
self.mapper_registry.map_imperatively(
B,
b,
inherits=Base,
properties={"c": relationship(C, primaryjoin=b.c.c_id == c.c.id)},
)
self.mapper_registry.map_imperatively(C, c, inherits=Base)
return A, B, C, Base
def test_double_level_aliased_exists(self):
A, B, C, Base = self._inherits_fixture()
s = fixture_session()
self.assert_compile(
s.query(A).filter(A.b.has(B.c.has(C.id == 5))),
"SELECT a.id AS a_id, base.id AS base_id, a.b_id AS a_b_id "
"FROM base JOIN a ON base.id = a.id WHERE "
"EXISTS (SELECT 1 FROM (SELECT base.id AS base_id, b.id AS "
"b_id, b.c_id AS b_c_id FROM base JOIN b ON base.id = b.id) "
"AS anon_1 WHERE a.b_id = anon_1.b_id AND (EXISTS "
"(SELECT 1 FROM (SELECT base.id AS base_id, c.id AS c_id "
"FROM base JOIN c ON base.id = c.id) AS anon_2 "
"WHERE anon_1.b_c_id = anon_2.c_id AND anon_2.c_id = :id_1"
")))",
)
class JoinToNonPolyAliasesTest(fixtures.MappedTest, AssertsCompiledSQL):
__dialect__ = "default"
run_create_tables = None
run_deletes = None
@classmethod
def define_tables(cls, metadata):
Table(
"parent",
metadata,
Column("id", Integer, primary_key=True),
Column("data", String(50)),
)
Table(
"child",
metadata,
Column("id", Integer, primary_key=True),
Column("parent_id", Integer, ForeignKey("parent.id")),
Column("data", String(50)),
)
@classmethod
def setup_mappers(cls):
parent, child = cls.tables.parent, cls.tables.child
class Parent(cls.Comparable):
pass
class Child(cls.Comparable):
pass
mp = cls.mapper_registry.map_imperatively(Parent, parent)
cls.mapper_registry.map_imperatively(Child, child)
derived = select(child).alias()
npc = aliased(Child, derived)
cls.npc = npc
cls.derived = derived
mp.add_property("npc", relationship(npc))
def test_join_parent_child(self):
Parent = self.classes.Parent
sess = fixture_session()
self.assert_compile(
sess.query(Parent)
.join(Parent.npc)
.filter(self.derived.c.data == "x"),
"SELECT parent.id AS parent_id, parent.data AS parent_data "
"FROM parent JOIN (SELECT child.id AS id, "
"child.parent_id AS parent_id, "
"child.data AS data "
"FROM child) AS anon_1 ON parent.id = anon_1.parent_id "
"WHERE anon_1.data = :data_1",
)
def test_join_parent_child_select_from(self):
Parent = self.classes.Parent
npc = self.npc
sess = fixture_session()
self.assert_compile(
sess.query(npc)
.select_from(Parent)
.join(Parent.npc)
.filter(self.derived.c.data == "x"),
"SELECT anon_1.id AS anon_1_id, anon_1.parent_id "
"AS anon_1_parent_id, anon_1.data AS anon_1_data "
"FROM parent JOIN (SELECT child.id AS id, child.parent_id AS "
"parent_id, child.data AS data FROM child) AS anon_1 ON "
"parent.id = anon_1.parent_id WHERE anon_1.data = :data_1",
)
def test_join_select_parent_child(self):
Parent = self.classes.Parent
npc = self.npc
sess = fixture_session()
self.assert_compile(
sess.query(Parent, npc)
.join(Parent.npc)
.filter(self.derived.c.data == "x"),
"SELECT parent.id AS parent_id, parent.data AS parent_data, "
"anon_1.id AS anon_1_id, anon_1.parent_id AS anon_1_parent_id, "
"anon_1.data AS anon_1_data FROM parent JOIN "
"(SELECT child.id AS id, child.parent_id AS parent_id, "
"child.data AS data FROM child) AS anon_1 ON parent.id = "
"anon_1.parent_id WHERE anon_1.data = :data_1",
)
class SelfReferentialTest(fixtures.MappedTest, AssertsCompiledSQL):
run_setup_mappers = "once"
run_inserts = "once"
run_deletes = None
__dialect__ = "default"
@classmethod
def define_tables(cls, metadata):
Table(
"nodes",
metadata,
Column(
"id", Integer, primary_key=True, test_needs_autoincrement=True
),
Column("parent_id", Integer, ForeignKey("nodes.id")),
Column("data", String(30)),
)
@classmethod
def setup_classes(cls):
class Node(cls.Comparable):
def append(self, node):
self.children.append(node)
@classmethod
def setup_mappers(cls):
Node, nodes = cls.classes.Node, cls.tables.nodes
cls.mapper_registry.map_imperatively(
Node,
nodes,
properties={
"children": relationship(
Node,
lazy="select",
join_depth=3,
backref=backref("parent", remote_side=[nodes.c.id]),
)
},
)
@classmethod
def insert_data(cls, connection):
Node = cls.classes.Node
sess = Session(connection)
n1 = Node(data="n1")
n1.append(Node(data="n11"))
n1.append(Node(data="n12"))
n1.append(Node(data="n13"))
n1.children[1].append(Node(data="n121"))
n1.children[1].append(Node(data="n122"))
n1.children[1].append(Node(data="n123"))
sess.add(n1)
sess.flush()
sess.close()
def test_join_4_explicit_join(self):
Node = self.classes.Node
sess = fixture_session()
na = aliased(Node)
na2 = aliased(Node)
# this one is a great example of how to show how the API changes;
# while it requires the explicitness of aliased(Node), the whole
# guesswork of joinpoint / aliased goes away and the whole thing
# is simpler
#
# .join("parent", aliased=True)
# .filter(Node.data == "n12")
# .join("parent", aliased=True, from_joinpoint=True)
# .filter(Node.data == "n1")
#
# becomes:
#
# na = aliased(Node)
# na2 = aliased(Node)
#
# ...
# .join(na, Node.parent)
# .filter(na.data == "n12")
# .join(na2, na.parent)
# .filter(na2.data == "n1")
#
q = (
sess.query(Node)
.filter(Node.data == "n122")
.join(na, Node.parent)
.filter(na.data == "n12")
.join(na2, na.parent)
.filter(na2.data == "n1")
)
self.assert_compile(
q,
"SELECT nodes.id AS nodes_id, nodes.parent_id AS nodes_parent_id, "
"nodes.data AS nodes_data FROM nodes JOIN nodes AS nodes_1 "
"ON nodes_1.id = nodes.parent_id JOIN nodes AS nodes_2 "
"ON nodes_2.id = nodes_1.parent_id WHERE nodes.data = :data_1 "
"AND nodes_1.data = :data_2 AND nodes_2.data = :data_3",
checkparams={"data_1": "n122", "data_2": "n12", "data_3": "n1"},
)
node = q.first()
eq_(node.data, "n122")
def test_from_self_inside_excludes_outside(self):
Node = self.classes.Node
sess = fixture_session()
n1 = aliased(Node)
# n1 is not inside the from_self(), so all cols must be maintained
# on the outside
subq = (
sess.query(Node)
.filter(Node.data == "n122")
.set_label_style(LABEL_STYLE_TABLENAME_PLUS_COL)
.subquery()
)
na = aliased(Node, subq)
self.assert_compile(
sess.query(n1, na.id),
"SELECT nodes_1.id AS nodes_1_id, "
"nodes_1.parent_id AS nodes_1_parent_id, "
"nodes_1.data AS nodes_1_data, anon_1.nodes_id AS anon_1_nodes_id "
"FROM nodes AS nodes_1, (SELECT nodes.id AS nodes_id, "
"nodes.parent_id AS nodes_parent_id, "
"nodes.data AS nodes_data FROM "
"nodes WHERE nodes.data = :data_1) AS anon_1",
use_default_dialect=True,
)
parent = aliased(Node)
grandparent = aliased(Node)
subq = (
sess.query(Node, parent, grandparent)
.join(parent, Node.parent)
.join(grandparent, parent.parent)
.filter(Node.data == "n122")
.filter(parent.data == "n12")
.filter(grandparent.data == "n1")
.set_label_style(LABEL_STYLE_TABLENAME_PLUS_COL)
.subquery()
)
na = aliased(Node, subq)
pa = aliased(parent, subq)
ga = aliased(grandparent, subq)
q = sess.query(na, pa, ga).limit(1)
# parent, grandparent *are* inside the from_self(), so they
# should get aliased to the outside.
self.assert_compile(
q,
"SELECT anon_1.nodes_id AS anon_1_nodes_id, "
"anon_1.nodes_parent_id AS anon_1_nodes_parent_id, "
"anon_1.nodes_data AS anon_1_nodes_data, "
"anon_1.nodes_1_id AS anon_1_nodes_1_id, "
"anon_1.nodes_1_parent_id AS anon_1_nodes_1_parent_id, "
"anon_1.nodes_1_data AS anon_1_nodes_1_data, "
"anon_1.nodes_2_id AS anon_1_nodes_2_id, "
"anon_1.nodes_2_parent_id AS anon_1_nodes_2_parent_id, "
"anon_1.nodes_2_data AS anon_1_nodes_2_data "
"FROM (SELECT nodes.id AS nodes_id, nodes.parent_id "
"AS nodes_parent_id, nodes.data AS nodes_data, "
"nodes_1.id AS nodes_1_id, "
"nodes_1.parent_id AS nodes_1_parent_id, "
"nodes_1.data AS nodes_1_data, nodes_2.id AS nodes_2_id, "
"nodes_2.parent_id AS nodes_2_parent_id, nodes_2.data AS "
"nodes_2_data FROM nodes JOIN nodes AS nodes_1 ON "
"nodes_1.id = nodes.parent_id JOIN nodes AS nodes_2 "
"ON nodes_2.id = nodes_1.parent_id "
"WHERE nodes.data = :data_1 AND nodes_1.data = :data_2 AND "
"nodes_2.data = :data_3) AS anon_1 LIMIT :param_1",
{"param_1": 1},
use_default_dialect=True,
)
def test_join_to_self_no_aliases_raises(self):
Node = self.classes.Node
s = fixture_session()
assert_raises_message(
sa.exc.InvalidRequestError,
r"Can't construct a join from Mapper\[Node\(nodes\)\] to "
r"Mapper\[Node\(nodes\)\], they are the same entity",
s.query(Node).join(Node.children)._compile_context,
)
def test_explicit_join_1(self):
Node = self.classes.Node
n1 = aliased(Node)
n2 = aliased(Node)
self.assert_compile(
join(Node, n1, "children").join(n2, "children"),
"nodes JOIN nodes AS nodes_1 ON nodes.id = nodes_1.parent_id "
"JOIN nodes AS nodes_2 ON nodes_1.id = nodes_2.parent_id",
use_default_dialect=True,
)
def test_explicit_join_2(self):
Node = self.classes.Node
n1 = aliased(Node)
n2 = aliased(Node)
self.assert_compile(
join(Node, n1, Node.children).join(n2, n1.children),
"nodes JOIN nodes AS nodes_1 ON nodes.id = nodes_1.parent_id "
"JOIN nodes AS nodes_2 ON nodes_1.id = nodes_2.parent_id",
use_default_dialect=True,
)
def test_explicit_join_3(self):
Node = self.classes.Node
n1 = aliased(Node)
n2 = aliased(Node)
self.assert_compile(
join(Node, n1, Node.children).join(
n2, Node.children, join_to_left=False
),
"nodes JOIN nodes AS nodes_1 ON nodes.id = nodes_1.parent_id "
"JOIN nodes AS nodes_2 ON nodes.id = nodes_2.parent_id",
use_default_dialect=True,
)
def test_explicit_join_4(self):
Node = self.classes.Node
sess = fixture_session()
n1 = aliased(Node)
n2 = aliased(Node)
self.assert_compile(
sess.query(Node).join(n1, Node.children).join(n2, n1.children),
"SELECT nodes.id AS nodes_id, nodes.parent_id AS nodes_parent_id, "
"nodes.data AS nodes_data FROM nodes JOIN nodes AS nodes_1 "
"ON nodes.id = nodes_1.parent_id "
"JOIN nodes AS nodes_2 ON nodes_1.id = nodes_2.parent_id",
use_default_dialect=True,
)
def test_explicit_join_5(self):
Node = self.classes.Node
sess = fixture_session()
n1 = aliased(Node)
n2 = aliased(Node)
self.assert_compile(
sess.query(Node).join(n1, Node.children).join(n2, Node.children),
"SELECT nodes.id AS nodes_id, nodes.parent_id AS nodes_parent_id, "
"nodes.data AS nodes_data FROM nodes JOIN nodes AS nodes_1 "
"ON nodes.id = nodes_1.parent_id "
"JOIN nodes AS nodes_2 ON nodes.id = nodes_2.parent_id",
use_default_dialect=True,
)
def test_explicit_join_6(self):
Node = self.classes.Node
sess = fixture_session()
n1 = aliased(Node)
node = (
sess.query(Node)
.select_from(join(Node, n1, "children"))
.filter(n1.data == "n122")
.first()
)
assert node.data == "n12"
def test_explicit_join_7(self):
Node = self.classes.Node
sess = fixture_session()
n1 = aliased(Node)
n2 = aliased(Node)
node = (
sess.query(Node)
.select_from(join(Node, n1, "children").join(n2, "children"))
.filter(n2.data == "n122")
.first()
)
assert node.data == "n1"
def test_explicit_join_8(self):
Node = self.classes.Node
sess = fixture_session()
n1 = aliased(Node)
n2 = aliased(Node)
node = (
sess.query(Node)
.select_from(
join(Node, n1, Node.id == n1.parent_id).join(n2, "children")
)
.filter(n2.data == "n122")
.first()
)
assert node.data == "n1"
def test_explicit_join_9(self):
Node = self.classes.Node
sess = fixture_session()
n1 = aliased(Node)
n2 = aliased(Node)
node = (
sess.query(Node)
.select_from(join(Node, n1, "parent").join(n2, "parent"))
.filter(
and_(Node.data == "n122", n1.data == "n12", n2.data == "n1")
)
.first()
)
assert node.data == "n122"
def test_explicit_join_10(self):
Node = self.classes.Node
sess = fixture_session()
n1 = aliased(Node)
n2 = aliased(Node)
eq_(
list(
sess.query(Node)
.select_from(join(Node, n1, "parent").join(n2, "parent"))
.filter(
and_(
Node.data == "n122", n1.data == "n12", n2.data == "n1"
)
)
.with_entities(Node.data, n1.data, n2.data)
),
[("n122", "n12", "n1")],
)
def test_join_to_nonaliased(self):
Node = self.classes.Node
sess = fixture_session()
n1 = aliased(Node)
eq_(
sess.query(n1).join(n1.parent).filter(Node.data == "n1").all(),
[
Node(parent_id=1, data="n11", id=2),
Node(parent_id=1, data="n12", id=3),
Node(parent_id=1, data="n13", id=4),
],
)
eq_(
sess.query(n1)
.join(Node, n1.parent)
.filter(Node.data == "n1")
.all(),
[
Node(parent_id=1, data="n11", id=2),
Node(parent_id=1, data="n12", id=3),
Node(parent_id=1, data="n13", id=4),
],
)
def test_multiple_explicit_entities_one(self):
Node = self.classes.Node
sess = fixture_session()
parent = aliased(Node)
grandparent = aliased(Node)
eq_(
sess.query(Node, parent, grandparent)
.join(parent, Node.parent)
.join(grandparent, parent.parent)
.filter(Node.data == "n122")
.filter(parent.data == "n12")
.filter(grandparent.data == "n1")
.first(),
(Node(data="n122"), Node(data="n12"), Node(data="n1")),
)
def test_multiple_explicit_entities_two(self):
Node = self.classes.Node
sess = fixture_session()
parent = aliased(Node)
grandparent = aliased(Node)
subq = (
sess.query(Node, parent, grandparent)
.join(parent, Node.parent)
.join(grandparent, parent.parent)
.filter(Node.data == "n122")
.filter(parent.data == "n12")
.filter(grandparent.data == "n1")
.subquery()
)
na = aliased(Node, subq)
pa = aliased(parent, subq)
ga = aliased(grandparent, subq)
eq_(
sess.query(na, pa, ga).first(),
(Node(data="n122"), Node(data="n12"), Node(data="n1")),
)
def test_multiple_explicit_entities_three(self):
Node = self.classes.Node
sess = fixture_session()
parent = aliased(Node)
grandparent = aliased(Node)
subq = (
sess.query(parent, grandparent, Node)
.join(parent, Node.parent)
.join(grandparent, parent.parent)
.filter(Node.data == "n122")
.filter(parent.data == "n12")
.filter(grandparent.data == "n1")
.subquery()
)
na = aliased(Node, subq)
pa = aliased(parent, subq)
ga = aliased(grandparent, subq)
eq_(
sess.query(pa, ga, na).first(),
(Node(data="n12"), Node(data="n1"), Node(data="n122")),
)
def test_multiple_explicit_entities_four(self):
Node = self.classes.Node
sess = fixture_session()
parent = aliased(Node)
grandparent = aliased(Node)
eq_(
sess.query(Node, parent, grandparent)
.join(parent, Node.parent)
.join(grandparent, parent.parent)
.filter(Node.data == "n122")
.filter(parent.data == "n12")
.filter(grandparent.data == "n1")
.options(joinedload(Node.children))
.first(),
(Node(data="n122"), Node(data="n12"), Node(data="n1")),
)
def test_multiple_explicit_entities_five(self):
Node = self.classes.Node
sess = fixture_session()
parent = aliased(Node)
grandparent = aliased(Node)
subq = (
sess.query(Node, parent, grandparent)
.join(parent, Node.parent)
.join(grandparent, parent.parent)
.filter(Node.data == "n122")
.filter(parent.data == "n12")
.filter(grandparent.data == "n1")
.subquery()
)
na = aliased(Node, subq)
pa = aliased(parent, subq)
ga = aliased(grandparent, subq)
eq_(
sess.query(na, pa, ga).options(joinedload(na.children)).first(),
(Node(data="n122"), Node(data="n12"), Node(data="n1")),
)
def test_any(self):
Node = self.classes.Node
sess = fixture_session()
eq_(
sess.query(Node)
.filter(Node.children.any(Node.data == "n1"))
.all(),
[],
)
eq_(
sess.query(Node)
.filter(Node.children.any(Node.data == "n12"))
.all(),
[Node(data="n1")],
)
eq_(
sess.query(Node)
.filter(~Node.children.any())
.order_by(Node.id)
.all(),
[
Node(data="n11"),
Node(data="n13"),
Node(data="n121"),
Node(data="n122"),
Node(data="n123"),
],
)
def test_has(self):
Node = self.classes.Node
sess = fixture_session()
eq_(
sess.query(Node)
.filter(Node.parent.has(Node.data == "n12"))
.order_by(Node.id)
.all(),
[Node(data="n121"), Node(data="n122"), Node(data="n123")],
)
eq_(
sess.query(Node)
.filter(Node.parent.has(Node.data == "n122"))
.all(),
[],
)
eq_(
sess.query(Node).filter(~Node.parent.has()).all(),
[Node(data="n1")],
)
def test_contains(self):
Node = self.classes.Node
sess = fixture_session()
n122 = sess.query(Node).filter(Node.data == "n122").one()
eq_(
sess.query(Node).filter(Node.children.contains(n122)).all(),
[Node(data="n12")],
)
n13 = sess.query(Node).filter(Node.data == "n13").one()
eq_(
sess.query(Node).filter(Node.children.contains(n13)).all(),
[Node(data="n1")],
)
def test_eq_ne(self):
Node = self.classes.Node
sess = fixture_session()
n12 = sess.query(Node).filter(Node.data == "n12").one()
eq_(
sess.query(Node).filter(Node.parent == n12).all(),
[Node(data="n121"), Node(data="n122"), Node(data="n123")],
)
eq_(
sess.query(Node).filter(Node.parent != n12).all(),
[
Node(data="n1"),
Node(data="n11"),
Node(data="n12"),
Node(data="n13"),
],
)
class SelfReferentialM2MTest(fixtures.MappedTest):
run_setup_mappers = "once"
run_inserts = "once"
run_deletes = None
@classmethod
def define_tables(cls, metadata):
Table(
"nodes",
metadata,
Column(
"id", Integer, primary_key=True, test_needs_autoincrement=True
),
Column("data", String(30)),
)
Table(
"node_to_nodes",
metadata,
Column(
"left_node_id",
Integer,
ForeignKey("nodes.id"),
primary_key=True,
),
Column(
"right_node_id",
Integer,
ForeignKey("nodes.id"),
primary_key=True,
),
)
@classmethod
def setup_classes(cls):
class Node(cls.Comparable):
pass
@classmethod
def insert_data(cls, connection):
Node, nodes, node_to_nodes = (
cls.classes.Node,
cls.tables.nodes,
cls.tables.node_to_nodes,
)
cls.mapper_registry.map_imperatively(
Node,
nodes,
properties={
"children": relationship(
Node,
lazy="select",
secondary=node_to_nodes,
primaryjoin=nodes.c.id == node_to_nodes.c.left_node_id,
secondaryjoin=nodes.c.id == node_to_nodes.c.right_node_id,
)
},
)
sess = Session(connection)
n1 = Node(data="n1")
n2 = Node(data="n2")
n3 = Node(data="n3")
n4 = Node(data="n4")
n5 = Node(data="n5")
n6 = Node(data="n6")
n7 = Node(data="n7")
n1.children = [n2, n3, n4]
n2.children = [n3, n6, n7]
n3.children = [n5, n4]
sess.add(n1)
sess.add(n2)
sess.add(n3)
sess.add(n4)
sess.flush()
sess.close()
def test_any(self):
Node = self.classes.Node
sess = fixture_session()
eq_(
sess.query(Node)
.filter(Node.children.any(Node.data == "n3"))
.order_by(Node.data)
.all(),
[Node(data="n1"), Node(data="n2")],
)
def test_contains(self):
Node = self.classes.Node
sess = fixture_session()
n4 = sess.query(Node).filter_by(data="n4").one()
eq_(
sess.query(Node)
.filter(Node.children.contains(n4))
.order_by(Node.data)
.all(),
[Node(data="n1"), Node(data="n3")],
)
eq_(
sess.query(Node)
.filter(not_(Node.children.contains(n4)))
.order_by(Node.data)
.all(),
[
Node(data="n2"),
Node(data="n4"),
Node(data="n5"),
Node(data="n6"),
Node(data="n7"),
],
)
def test_explicit_join(self):
Node = self.classes.Node
sess = fixture_session()
n1 = aliased(Node)
eq_(
sess.query(Node)
.select_from(join(Node, n1, "children"))
.filter(n1.data.in_(["n3", "n7"]))
.order_by(Node.id)
.all(),
[Node(data="n1"), Node(data="n2")],
)
class JoinLateralTest(fixtures.MappedTest, AssertsCompiledSQL):
__dialect__ = default.DefaultDialect(supports_native_boolean=True)
run_setup_bind = None
run_setup_mappers = "once"
run_create_tables = None
@classmethod
def define_tables(cls, metadata):
Table(
"people",
metadata,
Column("people_id", Integer, primary_key=True),
Column("age", Integer),
Column("name", String(30)),
)
Table(
"bookcases",
metadata,
Column("bookcase_id", Integer, primary_key=True),
Column(
"bookcase_owner_id", Integer, ForeignKey("people.people_id")
),
Column("bookcase_shelves", Integer),
Column("bookcase_width", Integer),
)
Table(
"books",
metadata,
Column("book_id", Integer, primary_key=True),
Column(
"bookcase_id", Integer, ForeignKey("bookcases.bookcase_id")
),
Column("book_owner_id", Integer, ForeignKey("people.people_id")),
Column("book_weight", Integer),
)
@classmethod
def setup_classes(cls):
class Person(cls.Comparable):
pass
class Bookcase(cls.Comparable):
pass
class Book(cls.Comparable):
pass
@classmethod
def setup_mappers(cls):
Person, Bookcase, Book = cls.classes("Person", "Bookcase", "Book")
people, bookcases, books = cls.tables("people", "bookcases", "books")
cls.mapper_registry.map_imperatively(Person, people)
cls.mapper_registry.map_imperatively(
Bookcase,
bookcases,
properties={
"owner": relationship(Person),
"books": relationship(Book),
},
)
cls.mapper_registry.map_imperatively(Book, books)
def test_select_subquery(self):
Person, Book = self.classes("Person", "Book")
s = fixture_session()
subq = (
s.query(Book.book_id)
.correlate(Person)
.filter(Person.people_id == Book.book_owner_id)
.subquery()
.lateral()
)
stmt = s.query(Person, subq.c.book_id).join(subq, true())
self.assert_compile(
stmt,
"SELECT people.people_id AS people_people_id, "
"people.age AS people_age, people.name AS people_name, "
"anon_1.book_id AS anon_1_book_id "
"FROM people JOIN LATERAL "
"(SELECT books.book_id AS book_id FROM books "
"WHERE people.people_id = books.book_owner_id) AS anon_1 ON true",
)
def test_select_subquery_aas_implicit_correlate(self):
Person, Book = self.classes("Person", "Book")
s = fixture_session()
stmt = s.query(Person).subquery()
pa = aliased(Person, stmt)
subq = (
s.query(Book.book_id)
.filter(pa.people_id == Book.book_owner_id)
.subquery()
.lateral()
)
stmt = s.query(pa, subq.c.book_id).join(subq, true())
self.assert_compile(
stmt,
"SELECT anon_1.people_id AS anon_1_people_id, "
"anon_1.age AS anon_1_age, anon_1.name AS anon_1_name, "
"anon_2.book_id AS anon_2_book_id "
"FROM "
"(SELECT people.people_id AS people_id, people.age AS age, "
"people.name AS name FROM people) AS anon_1 "
"JOIN LATERAL "
"(SELECT books.book_id AS book_id FROM books "
"WHERE anon_1.people_id = books.book_owner_id) AS anon_2 ON true",
)
def test_select_subquery_aas_implicit_correlate_coreonly(self):
Person, Book = self.classes("Person", "Book")
s = fixture_session()
stmt = s.query(Person).subquery()
pa = aliased(Person, stmt)
subq = (
select(Book.book_id)
.where(pa.people_id == Book.book_owner_id)
.subquery()
.lateral()
)
stmt = s.query(pa, subq.c.book_id).join(subq, true())
self.assert_compile(
stmt,
"SELECT anon_1.people_id AS anon_1_people_id, "
"anon_1.age AS anon_1_age, anon_1.name AS anon_1_name, "
"anon_2.book_id AS anon_2_book_id "
"FROM "
"(SELECT people.people_id AS people_id, people.age AS age, "
"people.name AS name FROM people) AS anon_1 "
"JOIN LATERAL "
"(SELECT books.book_id AS book_id FROM books "
"WHERE anon_1.people_id = books.book_owner_id) AS anon_2 ON true",
)
def test_select_subquery_aas_explicit_correlate_coreonly(self):
Person, Book = self.classes("Person", "Book")
s = fixture_session()
stmt = s.query(Person).subquery()
pa = aliased(Person, stmt)
subq = (
select(Book.book_id)
.correlate(pa)
.where(pa.people_id == Book.book_owner_id)
.subquery()
.lateral()
)
stmt = s.query(pa, subq.c.book_id).join(subq, true())
self.assert_compile(
stmt,
"SELECT anon_1.people_id AS anon_1_people_id, "
"anon_1.age AS anon_1_age, anon_1.name AS anon_1_name, "
"anon_2.book_id AS anon_2_book_id "
"FROM "
"(SELECT people.people_id AS people_id, people.age AS age, "
"people.name AS name FROM people) AS anon_1 "
"JOIN LATERAL "
"(SELECT books.book_id AS book_id FROM books "
"WHERE anon_1.people_id = books.book_owner_id) AS anon_2 ON true",
)
def test_select_subquery_aas_explicit_correlate(self):
Person, Book = self.classes("Person", "Book")
s = fixture_session()
stmt = s.query(Person).subquery()
pa = aliased(Person, stmt)
subq = (
s.query(Book.book_id)
.correlate(pa)
.filter(pa.people_id == Book.book_owner_id)
.subquery()
.lateral()
)
stmt = s.query(pa, subq.c.book_id).join(subq, true())
self.assert_compile(
stmt,
"SELECT anon_1.people_id AS anon_1_people_id, "
"anon_1.age AS anon_1_age, anon_1.name AS anon_1_name, "
"anon_2.book_id AS anon_2_book_id "
"FROM "
"(SELECT people.people_id AS people_id, people.age AS age, "
"people.name AS name FROM people) AS anon_1 "
"JOIN LATERAL "
"(SELECT books.book_id AS book_id FROM books "
"WHERE anon_1.people_id = books.book_owner_id) AS anon_2 ON true",
)
def test_from_function(self):
Bookcase = self.classes.Bookcase
s = fixture_session()
srf = lateral(func.generate_series(1, Bookcase.bookcase_shelves))
self.assert_compile(
s.query(Bookcase).join(srf, true()),
"SELECT bookcases.bookcase_id AS bookcases_bookcase_id, "
"bookcases.bookcase_owner_id AS bookcases_bookcase_owner_id, "
"bookcases.bookcase_shelves AS bookcases_bookcase_shelves, "
"bookcases.bookcase_width AS bookcases_bookcase_width "
"FROM bookcases JOIN "
"LATERAL generate_series(:generate_series_1, "
"bookcases.bookcase_shelves) AS anon_1 ON true",
)
def test_from_function_aas(self):
Bookcase = self.classes.Bookcase
s = fixture_session()
subq = s.query(Bookcase).subquery()
ba = aliased(Bookcase, subq)
srf = lateral(func.generate_series(1, ba.bookcase_shelves))
self.assert_compile(
s.query(ba).join(srf, true()),
"SELECT anon_1.bookcase_id AS anon_1_bookcase_id, "
"anon_1.bookcase_owner_id AS anon_1_bookcase_owner_id, "
"anon_1.bookcase_shelves AS anon_1_bookcase_shelves, "
"anon_1.bookcase_width AS anon_1_bookcase_width "
"FROM (SELECT bookcases.bookcase_id AS bookcase_id, "
"bookcases.bookcase_owner_id AS bookcase_owner_id, "
"bookcases.bookcase_shelves AS bookcase_shelves, "
"bookcases.bookcase_width AS bookcase_width FROM bookcases) "
"AS anon_1 "
"JOIN LATERAL "
"generate_series(:generate_series_1, anon_1.bookcase_shelves) "
"AS anon_2 ON true",
)
class JoinRawTablesWLegacyTest(QueryTest, AssertsCompiledSQL):
__dialect__ = "default"
@testing.combinations(
(
lambda sess, User, Address: sess.query(User).join(Address),
"SELECT users.id AS users_id, users.name AS users_name FROM "
"users JOIN addresses ON users.id = addresses.user_id",
),
(
lambda sess, user_table, address_table: sess.query(
user_table
).join(address_table),
"SELECT users.id AS users_id, users.name AS users_name FROM "
"users JOIN addresses ON users.id = addresses.user_id",
),
(
lambda sess, User, Address, Order: sess.query(User)
.outerjoin(Order)
.join(Address),
"SELECT users.id AS users_id, users.name AS users_name FROM "
"users LEFT OUTER JOIN orders ON users.id = orders.user_id "
"JOIN addresses ON addresses.id = orders.address_id",
),
(
lambda sess, user_table, address_table, order_table: sess.query(
user_table
)
.outerjoin(order_table)
.join(address_table),
"SELECT users.id AS users_id, users.name AS users_name FROM "
"users LEFT OUTER JOIN orders ON users.id = orders.user_id "
"JOIN addresses ON addresses.id = orders.address_id",
),
)
def test_join_render(self, spec, expected):
User, Address, Order = self.classes("User", "Address", "Order")
user_table, address_table, order_table = self.tables(
"users", "addresses", "orders"
)
sess = fixture_session()
q = testing.resolve_lambda(spec, **locals())
self.assert_compile(q, expected)
self.assert_compile(
q.set_label_style(LABEL_STYLE_TABLENAME_PLUS_COL).statement,
expected,
)
def test_core_round_trip(self):
user_table, address_table = self.tables("users", "addresses")
sess = fixture_session()
q = (
sess.query(user_table)
.join(address_table)
.where(address_table.c.email_address.startswith("ed"))
)
eq_(q.all(), [(8, "ed"), (8, "ed"), (8, "ed")])
| true | true |
f71ab4470632fb3e14e414c8dba8614f764a6ebe | 8,218 | py | Python | bokeh_root_cmd/main.py | ideonate/bokeh-root-cmd | c26eee1414d3305749a8724b8740d9a4eaca0cf7 | [
"Apache-2.0"
] | 1 | 2021-06-29T03:57:26.000Z | 2021-06-29T03:57:26.000Z | bokeh_root_cmd/main.py | ideonate/bokeh-root-cmd | c26eee1414d3305749a8724b8740d9a4eaca0cf7 | [
"Apache-2.0"
] | 4 | 2021-06-18T10:45:03.000Z | 2021-09-13T22:12:45.000Z | bokeh_root_cmd/main.py | ideonate/bokeh-root-cmd | c26eee1414d3305749a8724b8740d9a4eaca0cf7 | [
"Apache-2.0"
] | 2 | 2021-04-29T03:27:19.000Z | 2021-09-13T21:44:39.000Z | """Command line wrapper to serve one or more named Bokeh scripts or folders."""
import logging
import os
import re
import pathlib
import tempfile
from typing import Any, Dict, Tuple
import bokeh.server.views
import click
from bokeh.application.application import Application
from bokeh.command.util import build_single_handler_application
from bokeh.server.server import Server as _BkServer
from bokeh.server.views.root_handler import RootHandler
import logging
from .readycheck import create_ready_app
FORMAT = '%(asctime)s - %(name)s - %(levelname)s - %(message)s'
logging.basicConfig(format=FORMAT)
root_logger = logging.getLogger()
root_logger.setLevel(logging.INFO)
logger = logging.getLogger('bokeh_root_cmd')
class BokehServer:
def __init__(self, prefix=''):
self.prefix = prefix
if self.prefix != '':
self.html_file = None
def __del__(self):
if self.prefix != '' and self.html_file is not None:
self.html_file.close()
def _get_default_index_html(self):
return str(pathlib.Path(bokeh.server.views.__file__).parent / "app_index.html")
def _get_index_html(self):
"""
Where there is a prefix (e.g. /user/dan/dash-test) supplied, Bokeh/Panel's server doesn't work for us.
It doesn't distinguish between server-side and client-side URLs.
We want it to serve sub-apps at the URL /PanelNotebook
(so accessible at /user/dan/dash-test/PanelNotebook behind the cdsdashboards reverse proxy)
but for URLs on the index page to point the browser to /user/dan/dash-test/PanelNotebook.
Setting prefix in Bokeh results in correct client-side behavior, but unhelpfully also
serves at the prefix (So, combined with cdsdashboards reverse proxy it is only accessible at
/user/dan/dash-test/user/dan/dash-test/PanelNotebook).
"""
if hasattr(self, 'html_file'):
if self.html_file is None:
self.html_file = tempfile.NamedTemporaryFile("wt", suffix='.html')
with open(self._get_default_index_html(), "rt") as f:
for r in f.readlines():
r = re.sub(r'\{\{\s*prefix\s*\}\}', self.prefix, r)
self.html_file.write(r)
self.html_file.flush()
return self.html_file.name
return self._get_default_index_html()
@staticmethod
def _get_server_class():
return _BkServer
@staticmethod
def _make_app(command: str, url: str = "/", debug: bool = False) -> Application:
cwd_original = os.getcwd()
# Command can be absolute, or could be relative to cwd
app_py_path = os.path.join(os.getcwd(), command)
if os.path.isdir(app_py_path):
dirname = app_py_path
else:
dirname = os.path.dirname(app_py_path)
if app_py_path==dirname:
logger.debug("Fetching folder {}".format(app_py_path))
else:
logger.debug("Fetching script {}".format(app_py_path))
if os.path.isdir(dirname):
logger.debug("Changing working dir to {}".format(dirname))
os.chdir(dirname)
app = build_single_handler_application(app_py_path, [url])
os.chdir(cwd_original)
logger.debug("Changing working dir back to {}".format(cwd_original))
return app
@classmethod
def _is_single_app(cls, cmd: str):
"""
Return True if the path specified in `cmd` is exactly one app: either a single py/ipynb file
or a folder containing a main.py or main.ipynb file.
"""
cmd_path = pathlib.Path(cmd)
return cmd_path.is_file() or (cmd_path / "main.py").is_file() or (cmd_path / "main.ipynb").is_file()
@classmethod
def _get_applications(cls, command: Tuple[str], debug=False) -> Dict[str, Application]:
if len(command) == 1 and cls._is_single_app(command[0]):
return {"/": cls._make_app(command[0], debug)}
apps = {}
for cmd in command:
if cls._is_single_app(cmd):
cmds = [cmd]
else:
cmd_path = pathlib.Path(cmd)
cmds = list(cmd_path.glob("*.ipynb")) + list(cmd_path.glob("*.py"))
for singlecmd in cmds:
application = cls._make_app(singlecmd, debug)
route = application.handlers[0].url_path()
apps[route] = application
return apps
def _get_server_kwargs(self, port, ip, allow_websocket_origin, is_single_app) -> Dict[str, Any]:
server_kwargs = {"port": port, "ip": ip}
if allow_websocket_origin:
server_kwargs["allow_websocket_origin"] = list(allow_websocket_origin)
if not is_single_app:
index_html = self._get_index_html()
logger.debug("Using HTML template %s", index_html)
server_kwargs.update(
{"use_index": True, "redirect_root": True, "index": index_html}
)
return server_kwargs
def run(self, port, ip, debug, allow_websocket_origin, prefix, command):
logger.info("Starting %s", type(self).__name__)
if debug:
root_logger.setLevel(logging.DEBUG)
logger.debug("ip = %s", ip)
logger.debug("port = %s", port)
logger.debug("debug = %s", debug)
logger.debug("allow_websocket_origin = %s", allow_websocket_origin)
logger.debug("prefix = %s", prefix)
logger.debug("command = %s", command)
applications = self._get_applications(command, debug)
applications["/ready-check"] = create_ready_app()
logger.debug("applications = %s", list(applications.keys()))
server_kwargs = self._get_server_kwargs(port, ip, allow_websocket_origin, len(applications) <= 2)
if debug:
server_kwargs["log_level"]="debug"
server_kwargs["log_format"]=FORMAT
logger.debug("server_kwargs = %s", server_kwargs)
server = self._get_server_class()(applications, **server_kwargs)
server.run_until_shutdown()
class PanelServer(BokehServer):
@staticmethod
def _get_server_class():
from panel.io.server import Server as _PnServer
return _PnServer
def _get_default_index_html(self):
from panel.io.server import INDEX_HTML as _PANEL_INDEX_HTML
return _PANEL_INDEX_HTML
@click.command()
@click.option("--port", default=8888, type=click.INT, help="port for the proxy server to listen on")
@click.option("--ip", default=None, help="Address to listen on")
@click.option(
"--allow-websocket-origin", default=None, multiple=True, help="Web socket origins allowed"
)
@click.option("--debug/--no-debug", default=False, help="To display debug level logs")
@click.option(
"--server", default="bokeh", type=click.STRING, help="The server to use. One of bokeh or panel. Default is bokeh."
)
@click.option(
"--prefix", default="", type=click.STRING, help="URL prefix (for"
)
@click.argument("command", nargs=-1, required=True)
def run(port, ip, debug, allow_websocket_origin, server, prefix, command):
if server=="panel":
server = PanelServer(prefix)
else:
server = BokehServer(prefix)
server.run(
port=port,
ip=ip,
debug=debug,
allow_websocket_origin=allow_websocket_origin,
prefix=prefix,
command=command,
)
# Bokeh/ Panel can serve an index page with a list of applications at "/"
# The below is a workaround to avoid including the 'ready-check' application
def _root_handler_initialize_without_ready_check(self, *args, **kw):
kw["applications"]=kw["applications"].copy()
if "/ready-check" in kw["applications"]:
kw["applications"].pop("/ready-check")
self.applications = kw["applications"]
self.prefix = kw["prefix"]
self.index = kw["index"]
self.use_redirect = kw["use_redirect"]
RootHandler.initialize = _root_handler_initialize_without_ready_check
if __name__ == "__main__":
try:
run()
except SystemExit as se:
logger.error("Caught SystemExit {}".format(se))
| 35.886463 | 118 | 0.643344 | import logging
import os
import re
import pathlib
import tempfile
from typing import Any, Dict, Tuple
import bokeh.server.views
import click
from bokeh.application.application import Application
from bokeh.command.util import build_single_handler_application
from bokeh.server.server import Server as _BkServer
from bokeh.server.views.root_handler import RootHandler
import logging
from .readycheck import create_ready_app
FORMAT = '%(asctime)s - %(name)s - %(levelname)s - %(message)s'
logging.basicConfig(format=FORMAT)
root_logger = logging.getLogger()
root_logger.setLevel(logging.INFO)
logger = logging.getLogger('bokeh_root_cmd')
class BokehServer:
def __init__(self, prefix=''):
self.prefix = prefix
if self.prefix != '':
self.html_file = None
def __del__(self):
if self.prefix != '' and self.html_file is not None:
self.html_file.close()
def _get_default_index_html(self):
return str(pathlib.Path(bokeh.server.views.__file__).parent / "app_index.html")
def _get_index_html(self):
if hasattr(self, 'html_file'):
if self.html_file is None:
self.html_file = tempfile.NamedTemporaryFile("wt", suffix='.html')
with open(self._get_default_index_html(), "rt") as f:
for r in f.readlines():
r = re.sub(r'\{\{\s*prefix\s*\}\}', self.prefix, r)
self.html_file.write(r)
self.html_file.flush()
return self.html_file.name
return self._get_default_index_html()
@staticmethod
def _get_server_class():
return _BkServer
@staticmethod
def _make_app(command: str, url: str = "/", debug: bool = False) -> Application:
cwd_original = os.getcwd()
app_py_path = os.path.join(os.getcwd(), command)
if os.path.isdir(app_py_path):
dirname = app_py_path
else:
dirname = os.path.dirname(app_py_path)
if app_py_path==dirname:
logger.debug("Fetching folder {}".format(app_py_path))
else:
logger.debug("Fetching script {}".format(app_py_path))
if os.path.isdir(dirname):
logger.debug("Changing working dir to {}".format(dirname))
os.chdir(dirname)
app = build_single_handler_application(app_py_path, [url])
os.chdir(cwd_original)
logger.debug("Changing working dir back to {}".format(cwd_original))
return app
@classmethod
def _is_single_app(cls, cmd: str):
cmd_path = pathlib.Path(cmd)
return cmd_path.is_file() or (cmd_path / "main.py").is_file() or (cmd_path / "main.ipynb").is_file()
@classmethod
def _get_applications(cls, command: Tuple[str], debug=False) -> Dict[str, Application]:
if len(command) == 1 and cls._is_single_app(command[0]):
return {"/": cls._make_app(command[0], debug)}
apps = {}
for cmd in command:
if cls._is_single_app(cmd):
cmds = [cmd]
else:
cmd_path = pathlib.Path(cmd)
cmds = list(cmd_path.glob("*.ipynb")) + list(cmd_path.glob("*.py"))
for singlecmd in cmds:
application = cls._make_app(singlecmd, debug)
route = application.handlers[0].url_path()
apps[route] = application
return apps
def _get_server_kwargs(self, port, ip, allow_websocket_origin, is_single_app) -> Dict[str, Any]:
server_kwargs = {"port": port, "ip": ip}
if allow_websocket_origin:
server_kwargs["allow_websocket_origin"] = list(allow_websocket_origin)
if not is_single_app:
index_html = self._get_index_html()
logger.debug("Using HTML template %s", index_html)
server_kwargs.update(
{"use_index": True, "redirect_root": True, "index": index_html}
)
return server_kwargs
def run(self, port, ip, debug, allow_websocket_origin, prefix, command):
logger.info("Starting %s", type(self).__name__)
if debug:
root_logger.setLevel(logging.DEBUG)
logger.debug("ip = %s", ip)
logger.debug("port = %s", port)
logger.debug("debug = %s", debug)
logger.debug("allow_websocket_origin = %s", allow_websocket_origin)
logger.debug("prefix = %s", prefix)
logger.debug("command = %s", command)
applications = self._get_applications(command, debug)
applications["/ready-check"] = create_ready_app()
logger.debug("applications = %s", list(applications.keys()))
server_kwargs = self._get_server_kwargs(port, ip, allow_websocket_origin, len(applications) <= 2)
if debug:
server_kwargs["log_level"]="debug"
server_kwargs["log_format"]=FORMAT
logger.debug("server_kwargs = %s", server_kwargs)
server = self._get_server_class()(applications, **server_kwargs)
server.run_until_shutdown()
class PanelServer(BokehServer):
@staticmethod
def _get_server_class():
from panel.io.server import Server as _PnServer
return _PnServer
def _get_default_index_html(self):
from panel.io.server import INDEX_HTML as _PANEL_INDEX_HTML
return _PANEL_INDEX_HTML
@click.command()
@click.option("--port", default=8888, type=click.INT, help="port for the proxy server to listen on")
@click.option("--ip", default=None, help="Address to listen on")
@click.option(
"--allow-websocket-origin", default=None, multiple=True, help="Web socket origins allowed"
)
@click.option("--debug/--no-debug", default=False, help="To display debug level logs")
@click.option(
"--server", default="bokeh", type=click.STRING, help="The server to use. One of bokeh or panel. Default is bokeh."
)
@click.option(
"--prefix", default="", type=click.STRING, help="URL prefix (for"
)
@click.argument("command", nargs=-1, required=True)
def run(port, ip, debug, allow_websocket_origin, server, prefix, command):
if server=="panel":
server = PanelServer(prefix)
else:
server = BokehServer(prefix)
server.run(
port=port,
ip=ip,
debug=debug,
allow_websocket_origin=allow_websocket_origin,
prefix=prefix,
command=command,
)
def _root_handler_initialize_without_ready_check(self, *args, **kw):
kw["applications"]=kw["applications"].copy()
if "/ready-check" in kw["applications"]:
kw["applications"].pop("/ready-check")
self.applications = kw["applications"]
self.prefix = kw["prefix"]
self.index = kw["index"]
self.use_redirect = kw["use_redirect"]
RootHandler.initialize = _root_handler_initialize_without_ready_check
if __name__ == "__main__":
try:
run()
except SystemExit as se:
logger.error("Caught SystemExit {}".format(se))
| true | true |
f71ab48c915466e77fb663ba45f13600446b8c5f | 1,481 | py | Python | invoices/api/viewsets.py | elcolie/zero-to-deploy | 6191a33ef55af7c550c0e529a4e373bfe40bc014 | [
"MIT"
] | null | null | null | invoices/api/viewsets.py | elcolie/zero-to-deploy | 6191a33ef55af7c550c0e529a4e373bfe40bc014 | [
"MIT"
] | 6 | 2020-06-05T19:09:26.000Z | 2022-01-13T00:54:56.000Z | invoices/api/viewsets.py | elcolie/zero-to-deploy | 6191a33ef55af7c550c0e529a4e373bfe40bc014 | [
"MIT"
] | null | null | null | from django_filters import rest_framework as filters
from rest_framework import viewsets
from rest_framework.filters import SearchFilter, OrderingFilter
from rest_framework.permissions import IsAuthenticated, BasePermission
from invoices.api.serializers import InvoiceSerializer
from invoices.models import Invoice
class IsStaffPermission(BasePermission):
def has_permission(self, request, view):
return request.user.is_staff
class InvoiceFilter(filters.FilterSet):
customer_username = filters.CharFilter(name='order__customer__username', lookup_expr='icontains')
customer_first_name = filters.CharFilter(name='order__customer__first_name', lookup_expr='icontains')
created_at = filters.DateTimeFilter(name='created_at', lookup_expr='gte')
updated_at = filters.DateTimeFilter(name='updated_at', lookup_expr='gte')
class Meta:
model = Invoice
fields = [
'customer_username',
'customer_first_name',
'created_at',
'updated_at',
]
class InvoiceViewSet(viewsets.ModelViewSet):
permission_classes = (IsAuthenticated, IsStaffPermission)
queryset = Invoice.objects.all()
serializer_class = InvoiceSerializer
filter_backends = (filters.DjangoFilterBackend, SearchFilter, OrderingFilter)
filter_class = InvoiceFilter
search_fields = (
'order__customer__username',
'order__customer__first_name',
'order__customer__last_name',
)
| 35.261905 | 105 | 0.748143 | from django_filters import rest_framework as filters
from rest_framework import viewsets
from rest_framework.filters import SearchFilter, OrderingFilter
from rest_framework.permissions import IsAuthenticated, BasePermission
from invoices.api.serializers import InvoiceSerializer
from invoices.models import Invoice
class IsStaffPermission(BasePermission):
def has_permission(self, request, view):
return request.user.is_staff
class InvoiceFilter(filters.FilterSet):
customer_username = filters.CharFilter(name='order__customer__username', lookup_expr='icontains')
customer_first_name = filters.CharFilter(name='order__customer__first_name', lookup_expr='icontains')
created_at = filters.DateTimeFilter(name='created_at', lookup_expr='gte')
updated_at = filters.DateTimeFilter(name='updated_at', lookup_expr='gte')
class Meta:
model = Invoice
fields = [
'customer_username',
'customer_first_name',
'created_at',
'updated_at',
]
class InvoiceViewSet(viewsets.ModelViewSet):
permission_classes = (IsAuthenticated, IsStaffPermission)
queryset = Invoice.objects.all()
serializer_class = InvoiceSerializer
filter_backends = (filters.DjangoFilterBackend, SearchFilter, OrderingFilter)
filter_class = InvoiceFilter
search_fields = (
'order__customer__username',
'order__customer__first_name',
'order__customer__last_name',
)
| true | true |
f71ab63c599cebc6ea849c3b34a83ddb2a9b964d | 1,266 | py | Python | tests/test_experimental.py | daoluan/pandaSDMX | 2efcb5a429a5306efd89bed4cd55946d1ad5067b | [
"Apache-2.0"
] | null | null | null | tests/test_experimental.py | daoluan/pandaSDMX | 2efcb5a429a5306efd89bed4cd55946d1ad5067b | [
"Apache-2.0"
] | null | null | null | tests/test_experimental.py | daoluan/pandaSDMX | 2efcb5a429a5306efd89bed4cd55946d1ad5067b | [
"Apache-2.0"
] | null | null | null | """Tests for experimental code using pandas objects for internal storage.
See pandasdmx.experimental for more information.
"""
from pandasdmx.experimental import DataSet as PandasDataSet
from pandasdmx.model import (
AttributeValue,
DataAttribute,
DataSet,
Key,
Observation,
)
import pytest
pytestmark = pytest.mark.experimental
# Run the tests on both the standard DataSet class, and the experimental,
# PandasDataSet version
@pytest.mark.parametrize('DataSetType', [DataSet, PandasDataSet])
def test_add_obs(DataSetType):
# Create a Key and Attributes
key = Key(CURRENCY='NZD', CURRENCY_DENOM='EUR',
TIME_PERIOD='2018-01-01')
obs_status = DataAttribute(id='OBS_STATUS')
attr = {'OBS_STATUS': AttributeValue(value_for=obs_status, value='A')}
obs = []
for day, value in enumerate([5, 6, 7]):
key = key.copy(TIME_PERIOD='2018-01-{:02d}'.format(day))
obs.append(Observation(dimension=key, value=value,
attached_attribute=attr))
ds = DataSetType()
ds.add_obs(obs)
# PandasDataSet does not store Observation objects internally, but should
# emit them when the .obs property is accessed
assert all(a == b for a, b in zip(ds.obs, obs))
| 31.65 | 77 | 0.690363 | from pandasdmx.experimental import DataSet as PandasDataSet
from pandasdmx.model import (
AttributeValue,
DataAttribute,
DataSet,
Key,
Observation,
)
import pytest
pytestmark = pytest.mark.experimental
@pytest.mark.parametrize('DataSetType', [DataSet, PandasDataSet])
def test_add_obs(DataSetType):
key = Key(CURRENCY='NZD', CURRENCY_DENOM='EUR',
TIME_PERIOD='2018-01-01')
obs_status = DataAttribute(id='OBS_STATUS')
attr = {'OBS_STATUS': AttributeValue(value_for=obs_status, value='A')}
obs = []
for day, value in enumerate([5, 6, 7]):
key = key.copy(TIME_PERIOD='2018-01-{:02d}'.format(day))
obs.append(Observation(dimension=key, value=value,
attached_attribute=attr))
ds = DataSetType()
ds.add_obs(obs)
assert all(a == b for a, b in zip(ds.obs, obs))
| true | true |
f71ab6ca83c0cccdc98f7bc0e6a9815f90dc10b0 | 4,878 | py | Python | bamboo/unit_tests/test_unit_layer_gather.py | steffi7574/lbann | 6a6b86d3cbcf4ca50730c652a5014f7cb3afa5e6 | [
"Apache-2.0"
] | null | null | null | bamboo/unit_tests/test_unit_layer_gather.py | steffi7574/lbann | 6a6b86d3cbcf4ca50730c652a5014f7cb3afa5e6 | [
"Apache-2.0"
] | 5 | 2021-07-15T20:51:21.000Z | 2022-01-01T03:18:05.000Z | bamboo/unit_tests/test_unit_layer_gather.py | ekmixon/lbann | 665797a112dc96d15bd1d958de61f48bf5d3d21f | [
"Apache-2.0"
] | null | null | null | import functools
import operator
import os
import os.path
import sys
import numpy as np
# Bamboo utilities
current_file = os.path.realpath(__file__)
current_dir = os.path.dirname(current_file)
sys.path.insert(0, os.path.join(os.path.dirname(current_dir), 'common_python'))
import tools
# ==============================================
# Objects for Python data reader
# ==============================================
# Note: The Python data reader imports this file as a module and calls
# the functions below to ingest data.
# Data
input_size = 23
output_size = 15
seed = 202101280
# Sample access functions
def get_sample(index):
np.random.seed(seed+index)
values = [np.random.normal() for _ in range(input_size)]
indices = [
np.random.uniform(-1, input_size+1)
for _ in range(output_size)
]
return values + indices
def num_samples():
return 25
def sample_dims():
return (input_size+output_size,)
# ==============================================
# Setup LBANN experiment
# ==============================================
def setup_experiment(lbann):
"""Construct LBANN experiment.
Args:
lbann (module): Module for LBANN Python frontend
"""
mini_batch_size = num_samples() // 2
trainer = lbann.Trainer(mini_batch_size)
model = construct_model(lbann)
data_reader = construct_data_reader(lbann)
optimizer = lbann.NoOptimizer()
return trainer, model, data_reader, optimizer
def construct_model(lbann):
"""Construct LBANN model.
Args:
lbann (module): Module for LBANN Python frontend
"""
# Input data
# Note: Sum with a weights layer so that gradient checking will
# verify that error signals are correct.
x = lbann.Identity(lbann.Input())
x_slice = lbann.Slice(
x,
slice_points=tools.str_list([0,input_size,input_size+output_size]),
)
x0_weights = lbann.Weights(
optimizer=lbann.SGD(),
initializer=lbann.ConstantInitializer(value=0.0),
name='input_weights',
)
x0 = lbann.Sum(
lbann.Identity(x_slice),
lbann.WeightsLayer(weights=x0_weights, dims=tools.str_list(input_size)),
)
x1 = lbann.Identity(x_slice)
# Apply gather
y0 = lbann.Gather(x0, x1)
y1 = lbann.Concatenation([
lbann.Constant(value=i+1, num_neurons='1')
for i in range(output_size)
])
y = lbann.Multiply(y0, y1)
z = lbann.L2Norm2(y)
# Objects for LBANN model
layers = list(lbann.traverse_layer_graph(x))
metric = lbann.Metric(z, name='obj')
obj = lbann.ObjectiveFunction(z)
callbacks = []
# Compute expected metric value
vals = []
for i in range(num_samples()):
x = get_sample(i)
x0 = x[:input_size]
x1 = x[input_size:]
y0 = np.zeros(output_size)
for i in range(output_size):
if 0 <= x1[i] < input_size:
y0[i] = x0[int(x1[i])]
z = 0
for i in range(output_size):
z += ((i+1)*y0[i]) ** 2
vals.append(z)
val = np.mean(vals)
tol = 8 * val * np.finfo(np.float32).eps
callbacks.append(lbann.CallbackCheckMetric(
metric=metric.name,
lower_bound=val-tol,
upper_bound=val+tol,
error_on_failure=True,
execution_modes='test'))
# Gradient checking
callbacks.append(lbann.CallbackCheckGradients(error_on_failure=True))
# Construct model
num_epochs = 0
return lbann.Model(num_epochs,
layers=layers,
objective_function=obj,
metrics=[metric],
callbacks=callbacks)
def construct_data_reader(lbann):
"""Construct Protobuf message for Python data reader.
The Python data reader will import the current Python file to
access the sample access functions.
Args:
lbann (module): Module for LBANN Python frontend
"""
# Note: The training data reader should be removed when
# https://github.com/LLNL/lbann/issues/1098 is resolved.
message = lbann.reader_pb2.DataReader()
message.reader.extend([
tools.create_python_data_reader(
lbann,
current_file,
'get_sample',
'num_samples',
'sample_dims',
'train'
)
])
message.reader.extend([
tools.create_python_data_reader(
lbann,
current_file,
'get_sample',
'num_samples',
'sample_dims',
'test'
)
])
return message
# ==============================================
# Setup PyTest
# ==============================================
# Create test functions that can interact with PyTest
for _test_func in tools.create_tests(setup_experiment, __file__):
globals()[_test_func.__name__] = _test_func
| 27.715909 | 80 | 0.593276 | import functools
import operator
import os
import os.path
import sys
import numpy as np
current_file = os.path.realpath(__file__)
current_dir = os.path.dirname(current_file)
sys.path.insert(0, os.path.join(os.path.dirname(current_dir), 'common_python'))
import tools
input_size = 23
output_size = 15
seed = 202101280
def get_sample(index):
np.random.seed(seed+index)
values = [np.random.normal() for _ in range(input_size)]
indices = [
np.random.uniform(-1, input_size+1)
for _ in range(output_size)
]
return values + indices
def num_samples():
return 25
def sample_dims():
return (input_size+output_size,)
def setup_experiment(lbann):
mini_batch_size = num_samples() // 2
trainer = lbann.Trainer(mini_batch_size)
model = construct_model(lbann)
data_reader = construct_data_reader(lbann)
optimizer = lbann.NoOptimizer()
return trainer, model, data_reader, optimizer
def construct_model(lbann):
x = lbann.Identity(lbann.Input())
x_slice = lbann.Slice(
x,
slice_points=tools.str_list([0,input_size,input_size+output_size]),
)
x0_weights = lbann.Weights(
optimizer=lbann.SGD(),
initializer=lbann.ConstantInitializer(value=0.0),
name='input_weights',
)
x0 = lbann.Sum(
lbann.Identity(x_slice),
lbann.WeightsLayer(weights=x0_weights, dims=tools.str_list(input_size)),
)
x1 = lbann.Identity(x_slice)
y0 = lbann.Gather(x0, x1)
y1 = lbann.Concatenation([
lbann.Constant(value=i+1, num_neurons='1')
for i in range(output_size)
])
y = lbann.Multiply(y0, y1)
z = lbann.L2Norm2(y)
layers = list(lbann.traverse_layer_graph(x))
metric = lbann.Metric(z, name='obj')
obj = lbann.ObjectiveFunction(z)
callbacks = []
vals = []
for i in range(num_samples()):
x = get_sample(i)
x0 = x[:input_size]
x1 = x[input_size:]
y0 = np.zeros(output_size)
for i in range(output_size):
if 0 <= x1[i] < input_size:
y0[i] = x0[int(x1[i])]
z = 0
for i in range(output_size):
z += ((i+1)*y0[i]) ** 2
vals.append(z)
val = np.mean(vals)
tol = 8 * val * np.finfo(np.float32).eps
callbacks.append(lbann.CallbackCheckMetric(
metric=metric.name,
lower_bound=val-tol,
upper_bound=val+tol,
error_on_failure=True,
execution_modes='test'))
callbacks.append(lbann.CallbackCheckGradients(error_on_failure=True))
num_epochs = 0
return lbann.Model(num_epochs,
layers=layers,
objective_function=obj,
metrics=[metric],
callbacks=callbacks)
def construct_data_reader(lbann):
message = lbann.reader_pb2.DataReader()
message.reader.extend([
tools.create_python_data_reader(
lbann,
current_file,
'get_sample',
'num_samples',
'sample_dims',
'train'
)
])
message.reader.extend([
tools.create_python_data_reader(
lbann,
current_file,
'get_sample',
'num_samples',
'sample_dims',
'test'
)
])
return message
for _test_func in tools.create_tests(setup_experiment, __file__):
globals()[_test_func.__name__] = _test_func
| true | true |
f71ab703aedaaca8057f1f775130036f5d78f355 | 1,470 | py | Python | datasets/imagename_dataset.py | bigvideoresearch/SCC | f26cdb6aaf248b5112812dbdac1f1b5086aebccc | [
"MIT"
] | 5 | 2021-09-15T21:48:55.000Z | 2022-03-22T11:21:58.000Z | datasets/imagename_dataset.py | bigvideoresearch/SCC | f26cdb6aaf248b5112812dbdac1f1b5086aebccc | [
"MIT"
] | null | null | null | datasets/imagename_dataset.py | bigvideoresearch/SCC | f26cdb6aaf248b5112812dbdac1f1b5086aebccc | [
"MIT"
] | 1 | 2021-08-20T08:40:15.000Z | 2021-08-20T08:40:15.000Z | from runner_master import runner
import os
import io
import torch
import logging
from PIL import Image, ImageFile
from runner_master.runner.data import datasets
# to fix "OSError: image file is truncated"
ImageFile.LOAD_TRUNCATED_IMAGES = True
class ImagenameDataset(datasets.ImglistDatasetV2):
def getitem(self, index):
line = self.imglist[index].strip('\n')
tokens = line.split(' ', maxsplit=1)
#if len(tokens) != 2:
# raise RuntimeError('split tokens < 2')
image_name, extra_str = tokens[0], tokens[1]
if self.root != '' and image_name.startswith('/'):
raise RuntimeError('root not empty but image_name starts with "/"')
path = os.path.join(self.root, image_name)
sample = dict()
sample['image_name'] = image_name
try:
if not self.dummy_read:
filebytes = self.reader(path)
buff = io.BytesIO(filebytes)
if self.dummy_size is not None:
sample['data'] = torch.rand(self.dummy_size)
else:
image = Image.open(buff)
sample['data'] = self.transform_image(image)
for key, value in self.transform_extra(extra_str).items():
sample[key] = value
except Exception as e:
logging.error('[{}] broken'.format(path))
raise e
return sample
runner.patch_dataset('ImagenameDataset', ImagenameDataset)
| 35 | 79 | 0.612245 | from runner_master import runner
import os
import io
import torch
import logging
from PIL import Image, ImageFile
from runner_master.runner.data import datasets
ImageFile.LOAD_TRUNCATED_IMAGES = True
class ImagenameDataset(datasets.ImglistDatasetV2):
def getitem(self, index):
line = self.imglist[index].strip('\n')
tokens = line.split(' ', maxsplit=1)
image_name, extra_str = tokens[0], tokens[1]
if self.root != '' and image_name.startswith('/'):
raise RuntimeError('root not empty but image_name starts with "/"')
path = os.path.join(self.root, image_name)
sample = dict()
sample['image_name'] = image_name
try:
if not self.dummy_read:
filebytes = self.reader(path)
buff = io.BytesIO(filebytes)
if self.dummy_size is not None:
sample['data'] = torch.rand(self.dummy_size)
else:
image = Image.open(buff)
sample['data'] = self.transform_image(image)
for key, value in self.transform_extra(extra_str).items():
sample[key] = value
except Exception as e:
logging.error('[{}] broken'.format(path))
raise e
return sample
runner.patch_dataset('ImagenameDataset', ImagenameDataset)
| true | true |
f71ab7463ba7c30d460e7f06958ca0812996c4f2 | 1,439 | py | Python | setup.py | genevera/slack-backup | 0ffb9f940608c364249d027c0f96ecf08dd7e59a | [
"BSD-3-Clause"
] | null | null | null | setup.py | genevera/slack-backup | 0ffb9f940608c364249d027c0f96ecf08dd7e59a | [
"BSD-3-Clause"
] | null | null | null | setup.py | genevera/slack-backup | 0ffb9f940608c364249d027c0f96ecf08dd7e59a | [
"BSD-3-Clause"
] | null | null | null | #!/usr/bin/env python3
"""
Setup for the slack-backup project
"""
try:
from setuptools import setup
except ImportError:
from distutils.core import setup
setup(name="slack-backup",
packages=["slack_backup"],
version="0.7",
description="Make copy of slack converstaions",
author="Roman Dobosz",
author_email="gryf73@gmail.com",
url="https://github.com/gryf/slack-backup",
download_url="https://github.com/gryf/slack-backup",
keywords=["chat", "backup", "history", "slack"],
install_requires=["sqlalchemy", "slackclient"],
scripts=["scripts/slack-backup"],
classifiers=["Programming Language :: Python :: 3",
"Programming Language :: Python :: 3.4",
"Development Status :: 4 - Beta",
"Environment :: Console",
"Intended Audience :: End Users/Desktop",
"License :: OSI Approved :: BSD License",
"Operating System :: OS Independent",
"Topic :: Internet :: WWW/HTTP",
"Topic :: Database :: Front-Ends",
"Topic :: Communications :: Chat",
"Topic :: Text Processing :: Markup",
"Topic :: Text Processing :: Markup :: HTML"],
long_description=open("README.rst").read(),
options={'test': {'verbose': False,
'coverage': False}})
| 38.891892 | 65 | 0.551077 |
try:
from setuptools import setup
except ImportError:
from distutils.core import setup
setup(name="slack-backup",
packages=["slack_backup"],
version="0.7",
description="Make copy of slack converstaions",
author="Roman Dobosz",
author_email="gryf73@gmail.com",
url="https://github.com/gryf/slack-backup",
download_url="https://github.com/gryf/slack-backup",
keywords=["chat", "backup", "history", "slack"],
install_requires=["sqlalchemy", "slackclient"],
scripts=["scripts/slack-backup"],
classifiers=["Programming Language :: Python :: 3",
"Programming Language :: Python :: 3.4",
"Development Status :: 4 - Beta",
"Environment :: Console",
"Intended Audience :: End Users/Desktop",
"License :: OSI Approved :: BSD License",
"Operating System :: OS Independent",
"Topic :: Internet :: WWW/HTTP",
"Topic :: Database :: Front-Ends",
"Topic :: Communications :: Chat",
"Topic :: Text Processing :: Markup",
"Topic :: Text Processing :: Markup :: HTML"],
long_description=open("README.rst").read(),
options={'test': {'verbose': False,
'coverage': False}})
| true | true |
f71ab74bbb37c06ec87292445a3616dd3669f146 | 7,850 | py | Python | openprompt/prompts/one2one_verbalizer.py | hlzhang109/OpenPrompt | 8a1ec1ceac3805a11b09dda9b96ad7406d222f26 | [
"Apache-2.0"
] | null | null | null | openprompt/prompts/one2one_verbalizer.py | hlzhang109/OpenPrompt | 8a1ec1ceac3805a11b09dda9b96ad7406d222f26 | [
"Apache-2.0"
] | null | null | null | openprompt/prompts/one2one_verbalizer.py | hlzhang109/OpenPrompt | 8a1ec1ceac3805a11b09dda9b96ad7406d222f26 | [
"Apache-2.0"
] | null | null | null | import json
from transformers.tokenization_utils import PreTrainedTokenizer
from yacs.config import CfgNode
from openprompt.data_utils.data_utils import InputFeatures
import re
from openprompt import Verbalizer
from typing import *
import torch
import torch.nn as nn
import torch.nn.functional as F
from openprompt.utils.logging import logger
class One2oneVerbalizer(Verbalizer):
r"""
The basic manually defined verbalizer class, this class is inherited from the :obj:`Verbalizer` class.
This class restrict the use of label words to one words per label. For a verbalzer with less constraints,
please use Basic ManualVerbalizer.
Args:
tokenizer (:obj:`PreTrainedTokenizer`): The tokenizer of the current pre-trained model to point out the vocabulary.
classes (:obj:`classes`): The classes (or labels) of the current task.
num_classes (:obj:`int`): Optional. The number of classes of the verbalizer. Only one of `classes` and `num_classes` should be used.
label_words (:obj:`Union[Sequence[str], Mapping[str, str]]`, optional): The label words that are projected by the labels.
prefix (:obj:`str`, optional): The prefix string of the verbalizer. (used in PLMs like RoBERTa, which is sensitive to prefix space)
multi_token_handler (:obj:`str`, optional): The handling strategy for multiple tokens produced by the tokenizer.
"""
def __init__(self,
tokenizer: PreTrainedTokenizer,
num_classes: Optional[int] = None,
classes: Optional[List] = None,
label_words: Optional[Union[Sequence[str], Mapping[str, str]]] = None,
prefix: Optional[str] = " ",
multi_token_handler: Optional[str] = "first",
):
super().__init__(tokenizer=tokenizer, num_classes=num_classes, classes=classes)
self.prefix = prefix
self.multi_token_handler = multi_token_handler
self.label_words = label_words
def on_label_words_set(self):
super().on_label_words_set()
self.label_words = self.add_prefix(self.label_words, self.prefix)
self.generate_parameters()
@staticmethod
def add_prefix(label_words, prefix):
r"""Add prefix to label words. For example, if a label words is in the middle of a template,
the prefix should be ``' '``.
Args:
label_words (:obj:`Union[Sequence[str], Mapping[str, str]]`, optional): The label words that are projected by the labels.
prefix (:obj:`str`, optional): The prefix string of the verbalizer.
Returns:
:obj:`Sequence[str]`: New label words with prefix.
"""
new_label_words = []
if isinstance(label_words[0], list):
assert max([len(w) for w in label_words]) == 1, "Providing multiple label words, you should use other verbalizers instead."
label_words = [w[0] for w in label_words]
for word in label_words:
if word.startswith("<!>"):
new_label_words.append(word.split("<!>")[1])
else:
new_label_words.append(prefix + word)
return new_label_words
def generate_parameters(self) -> List:
r"""In basic manual template, the parameters are generated from label words directly.
In this implementation, the label_words should not be tokenized into more than one token.
"""
words_ids = []
for word in self.label_words:
word_ids = self.tokenizer.encode(word, add_special_tokens=False)
if len(word_ids) > 1:
logger.warning("Word {} is split into multiple tokens: {}. \
If this is not what you expect, try using another word for this verbalizer" \
.format(word, self.tokenizer.convert_ids_to_tokens(word_ids)))
words_ids.append(word_ids)
max_len = max([len(ids) for ids in words_ids])
words_ids_mask = [[1]*len(ids) + [0]*(max_len-len(ids)) for ids in words_ids]
words_ids = [ids+[0]*(max_len-len(ids)) for ids in words_ids]
words_ids_tensor = torch.tensor(words_ids)
words_ids_mask = torch.tensor(words_ids_mask)
self.label_words_ids = nn.Parameter(words_ids_tensor, requires_grad=False)
self.label_words_mask = nn.Parameter(words_ids_mask, requires_grad=False)
def project(self,
logits: torch.Tensor,
**kwargs,
) -> torch.Tensor:
r"""
Project the labels, the return value is the normalized (sum to 1) probs of label words.
Args:
logits (:obj:`torch.Tensor`): The orginal logits of label words.
Returns:
:obj:`torch.Tensor`: The normalized logits of label words
"""
label_words_logits = logits[:, self.label_words_ids]
label_words_logits = self.handle_multi_token(label_words_logits, self.label_words_mask)
return label_words_logits
def process_logits(self, logits: torch.Tensor, **kwargs):
r"""A whole framework to process the original logits over the vocabulary, which contains four steps:
(1) Project the logits into logits of label words
(2) Normalize over all label words
(3) Calibrate (optional)
Args:
logits (:obj:`torch.Tensor`): The orginal logits.
Returns:
(:obj:`torch.Tensor`): The final processed logits over the label words set.
"""
# project
label_words_logits = self.project(logits, **kwargs) #Output: (batch_size, num_classes) or (batch_size, num_classes, num_label_words_per_label)
# normalize
label_words_probs = self.normalize(label_words_logits)
# calibrate
if hasattr(self, "_calibrate_logits") and self._calibrate_logits is not None:
label_words_probs = self.calibrate(label_words_probs=label_words_probs)
# convert to logits
label_words_logits = torch.log(label_words_probs+1e-15)
return label_words_logits
def normalize(self, logits: torch.Tensor) -> torch.Tensor:
"""
Given logits regarding the entire vocabulary, return the probs over the label words set.
Args:
logits (:obj:`Tensor`): The logits over the entire vocabulary.
Returns:
:obj:`Tensor`: The logits over the label words set.
"""
batch_size = logits.shape[0]
return F.softmax(logits.reshape(batch_size, -1), dim=-1).reshape(*logits.shape)
def calibrate(self, label_words_probs: torch.Tensor, **kwargs) -> torch.Tensor:
r"""
Args:
label_words_probs (:obj:`torch.Tensor`): The probability distribution of the label words with the shape of [``batch_size``, ``num_classes``, ``num_label_words_per_class``]
Returns:
:obj:`torch.Tensor`: The calibrated probability of label words.
"""
shape = label_words_probs.shape
assert self._calibrate_logits.dim() == 1, "self._calibrate_logits are not 1-d tensor"
calibrate_label_words_probs = self.normalize(self.project(self._calibrate_logits.unsqueeze(0), **kwargs))
assert calibrate_label_words_probs.shape[1:] == label_words_probs.shape[1:] \
and calibrate_label_words_probs.shape[0]==1, "shape not match"
label_words_probs /= (calibrate_label_words_probs+1e-15)
# normalize # TODO Test the performance
norm = label_words_probs.reshape(shape[0], -1).sum(dim=-1,keepdim=True) # TODO Test the performance of detaching()
label_words_probs /= norm
return label_words_probs
| 44.101124 | 183 | 0.643057 | import json
from transformers.tokenization_utils import PreTrainedTokenizer
from yacs.config import CfgNode
from openprompt.data_utils.data_utils import InputFeatures
import re
from openprompt import Verbalizer
from typing import *
import torch
import torch.nn as nn
import torch.nn.functional as F
from openprompt.utils.logging import logger
class One2oneVerbalizer(Verbalizer):
def __init__(self,
tokenizer: PreTrainedTokenizer,
num_classes: Optional[int] = None,
classes: Optional[List] = None,
label_words: Optional[Union[Sequence[str], Mapping[str, str]]] = None,
prefix: Optional[str] = " ",
multi_token_handler: Optional[str] = "first",
):
super().__init__(tokenizer=tokenizer, num_classes=num_classes, classes=classes)
self.prefix = prefix
self.multi_token_handler = multi_token_handler
self.label_words = label_words
def on_label_words_set(self):
super().on_label_words_set()
self.label_words = self.add_prefix(self.label_words, self.prefix)
self.generate_parameters()
@staticmethod
def add_prefix(label_words, prefix):
new_label_words = []
if isinstance(label_words[0], list):
assert max([len(w) for w in label_words]) == 1, "Providing multiple label words, you should use other verbalizers instead."
label_words = [w[0] for w in label_words]
for word in label_words:
if word.startswith("<!>"):
new_label_words.append(word.split("<!>")[1])
else:
new_label_words.append(prefix + word)
return new_label_words
def generate_parameters(self) -> List:
words_ids = []
for word in self.label_words:
word_ids = self.tokenizer.encode(word, add_special_tokens=False)
if len(word_ids) > 1:
logger.warning("Word {} is split into multiple tokens: {}. \
If this is not what you expect, try using another word for this verbalizer" \
.format(word, self.tokenizer.convert_ids_to_tokens(word_ids)))
words_ids.append(word_ids)
max_len = max([len(ids) for ids in words_ids])
words_ids_mask = [[1]*len(ids) + [0]*(max_len-len(ids)) for ids in words_ids]
words_ids = [ids+[0]*(max_len-len(ids)) for ids in words_ids]
words_ids_tensor = torch.tensor(words_ids)
words_ids_mask = torch.tensor(words_ids_mask)
self.label_words_ids = nn.Parameter(words_ids_tensor, requires_grad=False)
self.label_words_mask = nn.Parameter(words_ids_mask, requires_grad=False)
def project(self,
logits: torch.Tensor,
**kwargs,
) -> torch.Tensor:
label_words_logits = logits[:, self.label_words_ids]
label_words_logits = self.handle_multi_token(label_words_logits, self.label_words_mask)
return label_words_logits
def process_logits(self, logits: torch.Tensor, **kwargs):
label_words_logits = self.project(logits, **kwargs)
label_words_probs = self.normalize(label_words_logits)
if hasattr(self, "_calibrate_logits") and self._calibrate_logits is not None:
label_words_probs = self.calibrate(label_words_probs=label_words_probs)
label_words_logits = torch.log(label_words_probs+1e-15)
return label_words_logits
def normalize(self, logits: torch.Tensor) -> torch.Tensor:
batch_size = logits.shape[0]
return F.softmax(logits.reshape(batch_size, -1), dim=-1).reshape(*logits.shape)
def calibrate(self, label_words_probs: torch.Tensor, **kwargs) -> torch.Tensor:
shape = label_words_probs.shape
assert self._calibrate_logits.dim() == 1, "self._calibrate_logits are not 1-d tensor"
calibrate_label_words_probs = self.normalize(self.project(self._calibrate_logits.unsqueeze(0), **kwargs))
assert calibrate_label_words_probs.shape[1:] == label_words_probs.shape[1:] \
and calibrate_label_words_probs.shape[0]==1, "shape not match"
label_words_probs /= (calibrate_label_words_probs+1e-15)
_probs.reshape(shape[0], -1).sum(dim=-1,keepdim=True)
label_words_probs /= norm
return label_words_probs
| true | true |
f71ab83062ace9e091517b08758d3a356d00ee8f | 643 | py | Python | CPSC362_Project1/migrations/versions/57642bbc5015_add_price.py | KonechyJ/CPSC-362_Project1 | c338f2e0e8e621e2fb1846277dcc0c1caaf1e41a | [
"MIT"
] | null | null | null | CPSC362_Project1/migrations/versions/57642bbc5015_add_price.py | KonechyJ/CPSC-362_Project1 | c338f2e0e8e621e2fb1846277dcc0c1caaf1e41a | [
"MIT"
] | null | null | null | CPSC362_Project1/migrations/versions/57642bbc5015_add_price.py | KonechyJ/CPSC-362_Project1 | c338f2e0e8e621e2fb1846277dcc0c1caaf1e41a | [
"MIT"
] | 2 | 2021-09-10T03:47:29.000Z | 2021-12-23T06:16:34.000Z | """Add price
Revision ID: 57642bbc5015
Revises: 6b66b7cc2f1f
Create Date: 2021-11-18 17:58:58.263480
"""
from alembic import op
import sqlalchemy as sa
# revision identifiers, used by Alembic.
revision = '57642bbc5015'
down_revision = '6b66b7cc2f1f'
branch_labels = None
depends_on = None
def upgrade():
# ### commands auto generated by Alembic - please adjust! ###
op.add_column('post', sa.Column('price', sa.Integer(), nullable=False))
# ### end Alembic commands ###
def downgrade():
# ### commands auto generated by Alembic - please adjust! ###
op.drop_column('post', 'price')
# ### end Alembic commands ###
| 22.172414 | 75 | 0.685848 | from alembic import op
import sqlalchemy as sa
revision = '57642bbc5015'
down_revision = '6b66b7cc2f1f'
branch_labels = None
depends_on = None
def upgrade():
| true | true |
f71ab8ed89dcd84727dfd18c9a588273b4b1ffe5 | 476 | py | Python | tests/container/elements.py | nadirhamid/protean | d31bc634e05c9221e82136bf18c2ceaa0982c1c8 | [
"BSD-3-Clause"
] | null | null | null | tests/container/elements.py | nadirhamid/protean | d31bc634e05c9221e82136bf18c2ceaa0982c1c8 | [
"BSD-3-Clause"
] | null | null | null | tests/container/elements.py | nadirhamid/protean | d31bc634e05c9221e82136bf18c2ceaa0982c1c8 | [
"BSD-3-Clause"
] | null | null | null | # Protean
from protean.core.field.basic import String
from protean.utils.container import BaseContainer
class CustomBaseContainer(BaseContainer):
def __new__(cls, *args, **kwargs):
if cls is CustomBaseContainer:
raise TypeError("CustomBaseContainer cannot be instantiated")
return super().__new__(cls)
class CustomContainer(CustomBaseContainer):
foo = String(max_length=50, required=True)
bar = String(max_length=50, required=True)
| 29.75 | 73 | 0.741597 |
from protean.core.field.basic import String
from protean.utils.container import BaseContainer
class CustomBaseContainer(BaseContainer):
def __new__(cls, *args, **kwargs):
if cls is CustomBaseContainer:
raise TypeError("CustomBaseContainer cannot be instantiated")
return super().__new__(cls)
class CustomContainer(CustomBaseContainer):
foo = String(max_length=50, required=True)
bar = String(max_length=50, required=True)
| true | true |
f71ab9c19de52f584719fbedb002bf798830562d | 544 | py | Python | py_pdf_term/endtoend/_endtoend/mappers/caches/xml.py | kumachan-mis/py-pdf-term | 282505826ce8c626003e753068d15738d772ce46 | [
"MIT"
] | null | null | null | py_pdf_term/endtoend/_endtoend/mappers/caches/xml.py | kumachan-mis/py-pdf-term | 282505826ce8c626003e753068d15738d772ce46 | [
"MIT"
] | 1 | 2021-08-02T13:02:12.000Z | 2021-08-02T13:02:12.000Z | py_pdf_term/endtoend/_endtoend/mappers/caches/xml.py | kumachan-mis/py-pdf-term | 282505826ce8c626003e753068d15738d772ce46 | [
"MIT"
] | null | null | null | from typing import Type
from ...caches import BaseXMLLayerCache, XMLLayerFileCache, XMLLayerNoCache
from ..base import BaseMapper
from ..consts import PACKAGE_NAME
class XMLLayerCacheMapper(BaseMapper[Type[BaseXMLLayerCache]]):
@classmethod
def default_mapper(cls) -> "XMLLayerCacheMapper":
default_mapper = cls()
cache_clses = [XMLLayerNoCache, XMLLayerFileCache]
for cache_cls in cache_clses:
default_mapper.add(f"{PACKAGE_NAME}.{cache_cls.__name__}", cache_cls)
return default_mapper
| 30.222222 | 81 | 0.740809 | from typing import Type
from ...caches import BaseXMLLayerCache, XMLLayerFileCache, XMLLayerNoCache
from ..base import BaseMapper
from ..consts import PACKAGE_NAME
class XMLLayerCacheMapper(BaseMapper[Type[BaseXMLLayerCache]]):
@classmethod
def default_mapper(cls) -> "XMLLayerCacheMapper":
default_mapper = cls()
cache_clses = [XMLLayerNoCache, XMLLayerFileCache]
for cache_cls in cache_clses:
default_mapper.add(f"{PACKAGE_NAME}.{cache_cls.__name__}", cache_cls)
return default_mapper
| true | true |
f71aba11c3ef384c490f493c022cda6fbf1433c8 | 3,220 | py | Python | rafter/blueprints.py | olivier-m/rafter | aafcf8fd019f24abcf519307c4484cc6b4697c04 | [
"MIT"
] | 1 | 2018-09-10T14:04:22.000Z | 2018-09-10T14:04:22.000Z | rafter/blueprints.py | olivier-m/rafter | aafcf8fd019f24abcf519307c4484cc6b4697c04 | [
"MIT"
] | null | null | null | rafter/blueprints.py | olivier-m/rafter | aafcf8fd019f24abcf519307c4484cc6b4697c04 | [
"MIT"
] | null | null | null | # -*- coding: utf-8 -*-
"""
.. autoclass:: Blueprint
"""
from sanic.blueprints import Blueprint as BaseBlueprint, FutureRoute
__all__ = ('Blueprint',)
class Blueprint(BaseBlueprint):
"""Create a new blueprint.
:param name: unique name of the blueprint
:param url_prefix: URL to be prefixed before all route URLs
:param strict_slashes: strict to trailing slash
.. automethod:: add_resource
.. automethod:: resource
"""
def __init__(self, *args, **kwargs):
super(Blueprint, self).__init__(*args, **kwargs)
self.resources = []
def register(self, app, options):
super(Blueprint, self).register(app, options)
url_prefix = options.get('url_prefix', self.url_prefix)
for future, kwargs in self.resources:
future.handler.__blueprintname__ = self.name
uri = url_prefix + future.uri if url_prefix else future.uri
version = future.version or self.version
app.resource(uri=uri[1:] if uri.startswith('//') else uri,
methods=future.methods,
host=future.host or self.host,
strict_slashes=future.strict_slashes,
stream=future.stream,
version=version,
name=future.name,
**kwargs)(future.handler)
def resource(self, uri, methods=frozenset({'GET'}), host=None,
strict_slashes=None, stream=False, version=None, name=None,
**kwargs):
"""
Create a blueprint resource route from a decorated function.
:param uri: endpoint at which the route will be accessible.
:param methods: list of acceptable HTTP methods.
:param host:
:param strict_slashes:
:param version:
:param name: user defined route name for url_for
:return: function or class instance
Accepts any keyword argument that will be passed to the app resource.
"""
if strict_slashes is None:
strict_slashes = self.strict_slashes
def decorator(handler):
self.resources.append((
FutureRoute(handler, uri, methods, host, strict_slashes,
stream, version, name),
kwargs))
return handler
return decorator
def add_resource(self, handler, uri, methods=frozenset({'GET'}),
host=None, strict_slashes=None, version=None, name=None,
**kwargs):
"""
Create a blueprint resource route from a function.
:param uri: endpoint at which the route will be accessible.
:param methods: list of acceptable HTTP methods.
:param host:
:param strict_slashes:
:param version:
:param name: user defined route name for url_for
:return: function or class instance
Accepts any keyword argument that will be passed to the app resource.
"""
self.resource(uri=uri, methods=methods, host=host,
strict_slashes=strict_slashes, version=version,
name=name, **kwargs)(handler)
| 34.623656 | 77 | 0.591925 |
from sanic.blueprints import Blueprint as BaseBlueprint, FutureRoute
__all__ = ('Blueprint',)
class Blueprint(BaseBlueprint):
def __init__(self, *args, **kwargs):
super(Blueprint, self).__init__(*args, **kwargs)
self.resources = []
def register(self, app, options):
super(Blueprint, self).register(app, options)
url_prefix = options.get('url_prefix', self.url_prefix)
for future, kwargs in self.resources:
future.handler.__blueprintname__ = self.name
uri = url_prefix + future.uri if url_prefix else future.uri
version = future.version or self.version
app.resource(uri=uri[1:] if uri.startswith('//') else uri,
methods=future.methods,
host=future.host or self.host,
strict_slashes=future.strict_slashes,
stream=future.stream,
version=version,
name=future.name,
**kwargs)(future.handler)
def resource(self, uri, methods=frozenset({'GET'}), host=None,
strict_slashes=None, stream=False, version=None, name=None,
**kwargs):
if strict_slashes is None:
strict_slashes = self.strict_slashes
def decorator(handler):
self.resources.append((
FutureRoute(handler, uri, methods, host, strict_slashes,
stream, version, name),
kwargs))
return handler
return decorator
def add_resource(self, handler, uri, methods=frozenset({'GET'}),
host=None, strict_slashes=None, version=None, name=None,
**kwargs):
self.resource(uri=uri, methods=methods, host=host,
strict_slashes=strict_slashes, version=version,
name=name, **kwargs)(handler)
| true | true |
f71aba25d68cbd1b1da66df6ca5eaabc6b86db83 | 1,244 | py | Python | setup.py | vtunr/VTun | f82b23945e95a3610e9bb7c54e62d0c51cac23a7 | [
"MIT"
] | 2 | 2020-04-14T19:14:07.000Z | 2022-02-14T14:49:44.000Z | setup.py | vtunr/VTun | f82b23945e95a3610e9bb7c54e62d0c51cac23a7 | [
"MIT"
] | 16 | 2020-01-20T10:19:17.000Z | 2022-01-15T18:05:55.000Z | setup.py | vtunr/VTunit | f82b23945e95a3610e9bb7c54e62d0c51cac23a7 | [
"MIT"
] | null | null | null | import setuptools
import subprocess
with open("README.md", "r") as fh:
long_description = fh.read()
packages = [dep.rstrip('\n') for dep in open("requirements.txt", "r")]
def get_git_version():
return subprocess.check_output(['git', 'describe','--dirty', '--tags']).strip()
setuptools.setup(
name="VTunit", # Replace with your own username
version=get_git_version(),
author="Tony Martinet",
author_email="tonymartinet@gmail.com",
description="Unit test helper",
long_description=long_description,
long_description_content_type="text/markdown",
url="https://github.com/vtunr/VTunit",
packages=setuptools.find_packages(),
classifiers=[
"Programming Language :: Python :: 2.7",
"License :: OSI Approved :: MIT License",
"Operating System :: OS Independent",
],
entry_points = {
'console_scripts': ['vtunit=vtunit:main',
'vtunit_cmake_generator=generator.mock_generator:main',
'vtunit_test_runner_generator=generator.test_runner_generator:main',
'vtunit_output_generator=generator.output_generator:main']
},
python_requires='>=2.7',
install_requires=packages
) | 36.588235 | 96 | 0.653537 | import setuptools
import subprocess
with open("README.md", "r") as fh:
long_description = fh.read()
packages = [dep.rstrip('\n') for dep in open("requirements.txt", "r")]
def get_git_version():
return subprocess.check_output(['git', 'describe','--dirty', '--tags']).strip()
setuptools.setup(
name="VTunit",
version=get_git_version(),
author="Tony Martinet",
author_email="tonymartinet@gmail.com",
description="Unit test helper",
long_description=long_description,
long_description_content_type="text/markdown",
url="https://github.com/vtunr/VTunit",
packages=setuptools.find_packages(),
classifiers=[
"Programming Language :: Python :: 2.7",
"License :: OSI Approved :: MIT License",
"Operating System :: OS Independent",
],
entry_points = {
'console_scripts': ['vtunit=vtunit:main',
'vtunit_cmake_generator=generator.mock_generator:main',
'vtunit_test_runner_generator=generator.test_runner_generator:main',
'vtunit_output_generator=generator.output_generator:main']
},
python_requires='>=2.7',
install_requires=packages
) | true | true |
f71abb077d128f03c4fd2fe2aa978ca83223d79e | 6,608 | py | Python | built-in/PyTorch/Official/cv/image_classification/Gluon_ResNet50_v1d_for_PyTorch/timm/optim/radam.py | Ascend/modelzoo | f018cfed33dbb1cc2110b9ea2e233333f71cc509 | [
"Apache-2.0"
] | 12 | 2020-12-13T08:34:24.000Z | 2022-03-20T15:17:17.000Z | built-in/PyTorch/Official/cv/image_classification/Gluon_ResNet50_v1d_for_PyTorch/timm/optim/radam.py | Ascend/modelzoo | f018cfed33dbb1cc2110b9ea2e233333f71cc509 | [
"Apache-2.0"
] | 1 | 2022-01-20T03:11:05.000Z | 2022-01-20T06:53:39.000Z | built-in/PyTorch/Official/cv/image_classification/Gluon_ResNet50_v1d_for_PyTorch/timm/optim/radam.py | Ascend/modelzoo | f018cfed33dbb1cc2110b9ea2e233333f71cc509 | [
"Apache-2.0"
] | 2 | 2021-07-10T12:40:46.000Z | 2021-12-17T07:55:15.000Z | # Copyright [yyyy] [name of copyright owner]
# Copyright 2021 Huawei Technologies Co., Ltd
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
"""RAdam Optimizer.
Implementation lifted from: https://github.com/LiyuanLucasLiu/RAdam
Paper: `On the Variance of the Adaptive Learning Rate and Beyond` - https://arxiv.org/abs/1908.03265
"""
import math
import torch
from torch.optim.optimizer import Optimizer, required
class RAdam(Optimizer):
def __init__(self, params, lr=1e-3, betas=(0.9, 0.999), eps=1e-8, weight_decay=0):
defaults = dict(lr=lr, betas=betas, eps=eps, weight_decay=weight_decay)
self.buffer = [[None, None, None] for ind in range(10)]
super(RAdam, self).__init__(params, defaults)
def __setstate__(self, state):
super(RAdam, self).__setstate__(state)
def step(self, closure=None):
loss = None
if closure is not None:
loss = closure()
for group in self.param_groups:
for p in group['params']:
if p.grad is None:
continue
grad = p.grad.data.float()
if grad.is_sparse:
raise RuntimeError('RAdam does not support sparse gradients')
p_data_fp32 = p.data.float()
state = self.state[p]
if len(state) == 0:
state['step'] = 0
state['exp_avg'] = torch.zeros_like(p_data_fp32)
state['exp_avg_sq'] = torch.zeros_like(p_data_fp32)
else:
state['exp_avg'] = state['exp_avg'].type_as(p_data_fp32)
state['exp_avg_sq'] = state['exp_avg_sq'].type_as(p_data_fp32)
exp_avg, exp_avg_sq = state['exp_avg'], state['exp_avg_sq']
beta1, beta2 = group['betas']
exp_avg_sq.mul_(beta2).addcmul_(1 - beta2, grad, grad)
exp_avg.mul_(beta1).add_(1 - beta1, grad)
state['step'] += 1
buffered = self.buffer[int(state['step'] % 10)]
if state['step'] == buffered[0]:
N_sma, step_size = buffered[1], buffered[2]
else:
buffered[0] = state['step']
beta2_t = beta2 ** state['step']
N_sma_max = 2 / (1 - beta2) - 1
N_sma = N_sma_max - 2 * state['step'] * beta2_t / (1 - beta2_t)
buffered[1] = N_sma
# more conservative since it's an approximated value
if N_sma >= 5:
step_size = group['lr'] * math.sqrt(
(1 - beta2_t) * (N_sma - 4) / (N_sma_max - 4) * (N_sma - 2) / N_sma * N_sma_max / (
N_sma_max - 2)) / (1 - beta1 ** state['step'])
else:
step_size = group['lr'] / (1 - beta1 ** state['step'])
buffered[2] = step_size
if group['weight_decay'] != 0:
p_data_fp32.add_(-group['weight_decay'] * group['lr'], p_data_fp32)
# more conservative since it's an approximated value
if N_sma >= 5:
denom = exp_avg_sq.sqrt().add_(group['eps'])
p_data_fp32.addcdiv_(-step_size, exp_avg, denom)
else:
p_data_fp32.add_(-step_size, exp_avg)
p.data.copy_(p_data_fp32)
return loss
class PlainRAdam(Optimizer):
def __init__(self, params, lr=1e-3, betas=(0.9, 0.999), eps=1e-8, weight_decay=0):
defaults = dict(lr=lr, betas=betas, eps=eps, weight_decay=weight_decay)
super(PlainRAdam, self).__init__(params, defaults)
def __setstate__(self, state):
super(PlainRAdam, self).__setstate__(state)
def step(self, closure=None):
loss = None
if closure is not None:
loss = closure()
for group in self.param_groups:
for p in group['params']:
if p.grad is None:
continue
grad = p.grad.data.float()
if grad.is_sparse:
raise RuntimeError('RAdam does not support sparse gradients')
p_data_fp32 = p.data.float()
state = self.state[p]
if len(state) == 0:
state['step'] = 0
state['exp_avg'] = torch.zeros_like(p_data_fp32)
state['exp_avg_sq'] = torch.zeros_like(p_data_fp32)
else:
state['exp_avg'] = state['exp_avg'].type_as(p_data_fp32)
state['exp_avg_sq'] = state['exp_avg_sq'].type_as(p_data_fp32)
exp_avg, exp_avg_sq = state['exp_avg'], state['exp_avg_sq']
beta1, beta2 = group['betas']
exp_avg_sq.mul_(beta2).addcmul_(1 - beta2, grad, grad)
exp_avg.mul_(beta1).add_(1 - beta1, grad)
state['step'] += 1
beta2_t = beta2 ** state['step']
N_sma_max = 2 / (1 - beta2) - 1
N_sma = N_sma_max - 2 * state['step'] * beta2_t / (1 - beta2_t)
if group['weight_decay'] != 0:
p_data_fp32.add_(-group['weight_decay'] * group['lr'], p_data_fp32)
# more conservative since it's an approximated value
if N_sma >= 5:
step_size = group['lr'] * math.sqrt(
(1 - beta2_t) * (N_sma - 4) / (N_sma_max - 4) * (N_sma - 2) / N_sma * N_sma_max / (
N_sma_max - 2)) / (1 - beta1 ** state['step'])
denom = exp_avg_sq.sqrt().add_(group['eps'])
p_data_fp32.addcdiv_(-step_size, exp_avg, denom)
else:
step_size = group['lr'] / (1 - beta1 ** state['step'])
p_data_fp32.add_(-step_size, exp_avg)
p.data.copy_(p_data_fp32)
return loss
| 39.100592 | 111 | 0.525272 |
import math
import torch
from torch.optim.optimizer import Optimizer, required
class RAdam(Optimizer):
def __init__(self, params, lr=1e-3, betas=(0.9, 0.999), eps=1e-8, weight_decay=0):
defaults = dict(lr=lr, betas=betas, eps=eps, weight_decay=weight_decay)
self.buffer = [[None, None, None] for ind in range(10)]
super(RAdam, self).__init__(params, defaults)
def __setstate__(self, state):
super(RAdam, self).__setstate__(state)
def step(self, closure=None):
loss = None
if closure is not None:
loss = closure()
for group in self.param_groups:
for p in group['params']:
if p.grad is None:
continue
grad = p.grad.data.float()
if grad.is_sparse:
raise RuntimeError('RAdam does not support sparse gradients')
p_data_fp32 = p.data.float()
state = self.state[p]
if len(state) == 0:
state['step'] = 0
state['exp_avg'] = torch.zeros_like(p_data_fp32)
state['exp_avg_sq'] = torch.zeros_like(p_data_fp32)
else:
state['exp_avg'] = state['exp_avg'].type_as(p_data_fp32)
state['exp_avg_sq'] = state['exp_avg_sq'].type_as(p_data_fp32)
exp_avg, exp_avg_sq = state['exp_avg'], state['exp_avg_sq']
beta1, beta2 = group['betas']
exp_avg_sq.mul_(beta2).addcmul_(1 - beta2, grad, grad)
exp_avg.mul_(beta1).add_(1 - beta1, grad)
state['step'] += 1
buffered = self.buffer[int(state['step'] % 10)]
if state['step'] == buffered[0]:
N_sma, step_size = buffered[1], buffered[2]
else:
buffered[0] = state['step']
beta2_t = beta2 ** state['step']
N_sma_max = 2 / (1 - beta2) - 1
N_sma = N_sma_max - 2 * state['step'] * beta2_t / (1 - beta2_t)
buffered[1] = N_sma
if N_sma >= 5:
step_size = group['lr'] * math.sqrt(
(1 - beta2_t) * (N_sma - 4) / (N_sma_max - 4) * (N_sma - 2) / N_sma * N_sma_max / (
N_sma_max - 2)) / (1 - beta1 ** state['step'])
else:
step_size = group['lr'] / (1 - beta1 ** state['step'])
buffered[2] = step_size
if group['weight_decay'] != 0:
p_data_fp32.add_(-group['weight_decay'] * group['lr'], p_data_fp32)
# more conservative since it's an approximated value
if N_sma >= 5:
denom = exp_avg_sq.sqrt().add_(group['eps'])
p_data_fp32.addcdiv_(-step_size, exp_avg, denom)
else:
p_data_fp32.add_(-step_size, exp_avg)
p.data.copy_(p_data_fp32)
return loss
class PlainRAdam(Optimizer):
def __init__(self, params, lr=1e-3, betas=(0.9, 0.999), eps=1e-8, weight_decay=0):
defaults = dict(lr=lr, betas=betas, eps=eps, weight_decay=weight_decay)
super(PlainRAdam, self).__init__(params, defaults)
def __setstate__(self, state):
super(PlainRAdam, self).__setstate__(state)
def step(self, closure=None):
loss = None
if closure is not None:
loss = closure()
for group in self.param_groups:
for p in group['params']:
if p.grad is None:
continue
grad = p.grad.data.float()
if grad.is_sparse:
raise RuntimeError('RAdam does not support sparse gradients')
p_data_fp32 = p.data.float()
state = self.state[p]
if len(state) == 0:
state['step'] = 0
state['exp_avg'] = torch.zeros_like(p_data_fp32)
state['exp_avg_sq'] = torch.zeros_like(p_data_fp32)
else:
state['exp_avg'] = state['exp_avg'].type_as(p_data_fp32)
state['exp_avg_sq'] = state['exp_avg_sq'].type_as(p_data_fp32)
exp_avg, exp_avg_sq = state['exp_avg'], state['exp_avg_sq']
beta1, beta2 = group['betas']
exp_avg_sq.mul_(beta2).addcmul_(1 - beta2, grad, grad)
exp_avg.mul_(beta1).add_(1 - beta1, grad)
state['step'] += 1
beta2_t = beta2 ** state['step']
N_sma_max = 2 / (1 - beta2) - 1
N_sma = N_sma_max - 2 * state['step'] * beta2_t / (1 - beta2_t)
if group['weight_decay'] != 0:
p_data_fp32.add_(-group['weight_decay'] * group['lr'], p_data_fp32)
if N_sma >= 5:
step_size = group['lr'] * math.sqrt(
(1 - beta2_t) * (N_sma - 4) / (N_sma_max - 4) * (N_sma - 2) / N_sma * N_sma_max / (
N_sma_max - 2)) / (1 - beta1 ** state['step'])
denom = exp_avg_sq.sqrt().add_(group['eps'])
p_data_fp32.addcdiv_(-step_size, exp_avg, denom)
else:
step_size = group['lr'] / (1 - beta1 ** state['step'])
p_data_fp32.add_(-step_size, exp_avg)
p.data.copy_(p_data_fp32)
return loss
| true | true |
f71abbabdf4197e4dad1e27bc472d450790c4613 | 44,512 | py | Python | theano/gof/graph.py | MarcCote/Theano | f0d293161a624ccf10c60ee8405a92e7d321151a | [
"BSD-3-Clause"
] | null | null | null | theano/gof/graph.py | MarcCote/Theano | f0d293161a624ccf10c60ee8405a92e7d321151a | [
"BSD-3-Clause"
] | null | null | null | theano/gof/graph.py | MarcCote/Theano | f0d293161a624ccf10c60ee8405a92e7d321151a | [
"BSD-3-Clause"
] | 1 | 2019-09-09T18:31:41.000Z | 2019-09-09T18:31:41.000Z | """
Node classes (`Apply`, `Variable`) and expression graph algorithms.
"""
from __future__ import absolute_import, print_function, division
from collections import deque
from copy import copy
from itertools import count
import theano
from theano import config
from theano.gof import utils
from six import string_types, integer_types, iteritems
from theano.misc.ordered_set import OrderedSet
__docformat__ = "restructuredtext en"
# Lazy imports to avoid circular dependencies.
is_same_graph_with_merge = None
equal_computations = None
NoParams = object()
class Node(utils.object2):
"""
A Node in a theano graph.
Graphs contain two kinds of Nodes -- Variable and Apply.
Edges in the graph are not explicitly represented.
Instead each Node keeps track of its parents via
Variable.owner / Apply.inputs and its children
via Variable.clients / Apply.outputs.
"""
def get_parents(self):
"""
Return a list of the parents of this node.
Should return a copy--i.e., modifying the return
value should not modify the graph structure.
"""
raise NotImplementedError()
class Apply(Node):
"""
An :term:`Apply` instance is a node in an expression graph which represents
the application of an `Op` to some input `Variable` nodes, producing some
output `Variable` nodes.
This class is typically instantiated by an Op's make_node() function, which
is typically called by that Op's __call__() function.
An Apply instance serves as a simple structure with three important
attributes:
- :literal:`inputs` : a list of `Variable` nodes that represent the
arguments of the expression,
- :literal:`outputs` : a list of `Variable` nodes that represent the
variable of the expression, and
- :literal:`op` : an `Op` instance that determines the nature of the
expression being applied.
The driver `compile.function` uses Apply's inputs attribute together with
Variable's owner attribute to search the expression graph and determine
which inputs are necessary to compute the function's outputs.
A `Linker` uses the Apply instance's `op` field to compute the variables.
Comparing with the Python language, an `Apply` instance is theano's version
of a function call (or expression instance) whereas `Op` is theano's version
of a function definition.
Parameters
----------
op : `Op` instance
inputs : list of Variable instances
outputs : list of Variable instances
Notes
-----
The owner field of each output in the outputs list will be set to self.
If an output element has an owner that is neither None nor self, then a
ValueError exception will be raised.
"""
def __init__(self, op, inputs, outputs):
self.op = op
self.inputs = []
self.tag = utils.scratchpad()
if not isinstance(inputs, (list, tuple)):
raise TypeError("The inputs of an Apply must be a list or tuple")
if not isinstance(outputs, (list, tuple)):
raise TypeError("The output of an Apply must be a list or tuple")
# filter inputs to make sure each element is a Variable
for input in inputs:
if isinstance(input, Variable):
self.inputs.append(input)
else:
raise TypeError("The 'inputs' argument to Apply must contain Variable instances, not %s" % input)
self.outputs = []
# filter outputs to make sure each element is a Variable
for i, output in enumerate(outputs):
if isinstance(output, Variable):
if output.owner is None:
output.owner = self
output.index = i
elif output.owner is not self or output.index != i:
raise ValueError("All output variables passed to Apply must belong to it.")
self.outputs.append(output)
else:
raise TypeError("The 'outputs' argument to Apply must contain Variable instances with no owner, not %s" % output)
def run_params(self):
"""
Returns the params for the node, or NoParams if no params is set.
"""
if hasattr(self.op, 'get_params'):
return self.op.get_params(self)
return NoParams
def __getstate__(self):
d = self.__dict__
# ufunc don't pickle/unpickle well
if hasattr(self.tag, 'ufunc'):
d = copy(self.__dict__)
t = d["tag"]
del t.ufunc
d["tag"] = t
return d
def default_output(self):
"""
Returns the default output for this node.
Returns
-------
Variable instance
An element of self.outputs, typically self.outputs[0].
Notes
-----
May raise AttributeError self.op.default_output is out of range, or if
there are multiple outputs and self.op.default_output does not exist.
"""
do = getattr(self.op, 'default_output', None)
if do is None:
if len(self.outputs) == 1:
return self.outputs[0]
else:
raise AttributeError(
"%s.default_output should be an output index." % self.op)
elif not isinstance(do, integer_types):
raise AttributeError("%s.default_output should be an int or long" %
self.op)
elif do < 0 or do >= len(self.outputs):
raise AttributeError("%s.default_output is out of range." %
self.op)
return self.outputs[do]
out = property(default_output,
doc="alias for self.default_output()")
"""
Alias for self.default_output().
"""
def __str__(self):
return op_as_string(self.inputs, self)
def __repr__(self):
return str(self)
def __asapply__(self):
return self
def clone(self):
"""
Duplicate this Apply instance with inputs = self.inputs.
Returns
-------
object
A new Apply instance (or subclass instance) with new outputs.
Notes
-----
Tags are copied from self to the returned instance.
"""
cp = self.__class__(self.op, self.inputs,
[output.clone() for output in self.outputs])
cp.tag = copy(self.tag)
return cp
def clone_with_new_inputs(self, inputs, strict=True):
"""
Duplicate this Apply instance in a new graph.
Parameters
----------
inputs
List of Variable instances to use as inputs.
strict : bool
If True, the type fields of all the inputs must be equal
to the current ones (or compatible, for instance Tensor /
CudaNdarray of the same dtype and broadcastable patterns,
in which case they will be converted into current Type), and
returned outputs are guaranteed to have the same types as
self.outputs. If False, then there's no guarantee that the
clone's outputs will have the same types as self.outputs,
and cloning may not even be possible (it depends on the Op).
Returns
-------
object
An Apply instance with the same op but different outputs.
"""
assert isinstance(inputs, (list, tuple))
remake_node = False
new_inputs = inputs[:]
for i, (curr, new) in enumerate(zip(self.inputs, new_inputs)):
if not curr.type == new.type:
if strict:
# If compatible, casts new into curr.type
new_inputs[i] = curr.type.filter_variable(new)
else:
remake_node = True
if remake_node:
new_node = self.op.make_node(*new_inputs)
new_node.tag = copy(self.tag).__update__(new_node.tag)
else:
new_node = self.clone()
new_node.inputs = new_inputs
return new_node
def get_parents(self):
return list(self.inputs)
# convenience properties
nin = property(lambda self: len(self.inputs), doc='same as len(self.inputs)')
"""
Property: Number of inputs.
"""
nout = property(lambda self: len(self.outputs), doc='same as len(self.outputs)')
"""
Property: Number of outputs.
"""
params_type = property(lambda self: self.op.params_type, doc='type to use for the params')
class Variable(Node):
"""
A :term:`Variable` is a node in an expression graph that represents a
variable.
The inputs and outputs of every `Apply` (theano.gof.Apply) are `Variable`
instances. The input and output arguments to create a `function` are also
`Variable` instances. A `Variable` is like a strongly-typed variable in
some other languages; each `Variable` contains a reference to a `Type`
instance that defines the kind of value the `Variable` can take in a
computation.
A `Variable` is a container for four important attributes:
- :literal:`type` a `Type` instance defining the kind of value this
`Variable` can have,
- :literal:`owner` either None (for graph roots) or the `Apply` instance
of which `self` is an output,
- :literal:`index` the integer such that :literal:`owner.outputs[index] is
this_variable` (ignored if `owner` is None),
- :literal:`name` a string to use in pretty-printing and debugging.
There are a few kinds of Variables to be aware of: A Variable which is the
output of a symbolic computation has a reference to the Apply instance to
which it belongs (property: owner) and the position of itself in the owner's
output list (property: index).
- `Variable` (this base type) is typically the output of a symbolic
computation.
- `Constant` (a subclass) which adds a default and un-replaceable
:literal:`value`, and requires that owner is None.
- `TensorVariable` subclass of Variable that represents a numpy.ndarray
object.
- `TensorSharedVariable` Shared version of TensorVariable.
- `SparseVariable` subclass of Variable that represents
a scipy.sparse.{csc,csr}_matrix object.
- `CudaNdarrayVariable` subclass of Variable that represents our object on
the GPU that is a subset of numpy.ndarray.
- `RandomVariable`.
A Variable which is the output of a symbolic computation will have an owner
not equal to None.
Using the Variables' owner field and the Apply nodes' inputs fields, one can
navigate a graph from an output all the way to the inputs. The opposite
direction is not possible until a FunctionGraph has annotated the Variables
with the clients field, ie, before the compilation process has begun a
Variable does not know which Apply nodes take it as input.
Parameters
----------
type : a Type instance
The type governs the kind of data that can be associated with this
variable.
owner : None or Apply instance
The Apply instance which computes the value for this variable.
index : None or int
The position of this Variable in owner.outputs.
name : None or str
A string for pretty-printing and debugging.
Examples
--------
.. code-block:: python
import theano
from theano import tensor
a = tensor.constant(1.5) # declare a symbolic constant
b = tensor.fscalar() # declare a symbolic floating-point scalar
c = a + b # create a simple expression
f = theano.function([b], [c]) # this works because a has a value associated with it already
assert 4.0 == f(2.5) # bind 2.5 to an internal copy of b and evaluate an internal c
theano.function([a], [c]) # compilation error because b (required by c) is undefined
theano.function([a,b], [c]) # compilation error because a is constant, it can't be an input
d = tensor.value(1.5) # create a value similar to the constant 'a'
e = d + b
theano.function([d,b], [e]) # this works. d's default value of 1.5 is ignored.
The python variables :literal:`a,b,c` all refer to instances of type
`Variable`. The `Variable` refered to by `a` is also an instance of
`Constant`.
`compile.function` uses each `Apply` instance's `inputs` attribute together
with each Variable's `owner` field to determine which inputs are necessary
to compute the function's outputs.
"""
# __slots__ = ['type', 'owner', 'index', 'name']
__count__ = count(0)
def __init__(self, type, owner=None, index=None, name=None):
super(Variable, self).__init__()
self.tag = utils.scratchpad()
self.type = type
if owner is not None and not isinstance(owner, Apply):
raise TypeError("owner must be an Apply instance", owner)
self.owner = owner
if index is not None and not isinstance(index, integer_types):
raise TypeError("index must be an int", index)
self.index = index
if name is not None and not isinstance(name, string_types):
raise TypeError("name must be a string", name)
self.name = name
self.auto_name = 'auto_' + str(next(self.__count__))
def __str__(self):
"""Return a str representation of the Variable.
"""
if self.name is not None:
return self.name
if self.owner is not None:
op = self.owner.op
if self.index == op.default_output:
return str(self.owner.op) + ".out"
else:
return str(self.owner.op) + "." + str(self.index)
else:
return "<%s>" % str(self.type)
def __repr_test_value__(self):
"""Return a repr of the test value.
Return a printable representation of the test value. It can be
overridden by classes with non printable test_value to provide a
suitable representation of the test_value.
"""
return repr(theano.gof.op.get_test_value(self))
def __repr__(self, firstPass=True):
"""Return a repr of the Variable.
Return a printable name or description of the Variable. If
config.print_test_value is True it will also print the test_value if
any.
"""
to_print = [str(self)]
if config.print_test_value and firstPass:
try:
to_print.append(self.__repr_test_value__())
except AttributeError:
pass
return '\n'.join(to_print)
def clone(self):
"""
Return a new Variable like self.
Returns
-------
Variable instance
A new Variable instance (or subclass instance) with no owner or
index.
Notes
-----
Tags are copied to the returned instance.
Name is copied to the returned instance.
"""
# return copy(self)
cp = self.__class__(self.type, None, None, self.name)
cp.tag = copy(self.tag)
return cp
def __lt__(self, other):
raise NotImplementedError('Subclasses of Variable must provide __lt__',
self.__class__.__name__)
def __le__(self, other):
raise NotImplementedError('Subclasses of Variable must provide __le__',
self.__class__.__name__)
def __gt__(self, other):
raise NotImplementedError('Subclasses of Variable must provide __gt__',
self.__class__.__name__)
def __ge__(self, other):
raise NotImplementedError('Subclasses of Variable must provide __ge__',
self.__class__.__name__)
def get_parents(self):
if self.owner is not None:
return [self.owner]
return []
def eval(self, inputs_to_values=None):
"""
Evaluates this variable.
Parameters
----------
inputs_to_values
A dictionary mapping theano Variables to values.
Examples
--------
>>> import numpy as np
>>> import theano.tensor as T
>>> x = T.dscalar('x')
>>> y = T.dscalar('y')
>>> z = x + y
>>> np.allclose(z.eval({x : 16.3, y : 12.1}), 28.4)
True
We passed :func:`eval` a dictionary mapping symbolic theano
variables to the values to substitute for them, and it returned
the numerical value of the expression.
Notes
-----
`eval` will be slow the first time you call it on a variable --
it needs to call :func:`function` to compile the expression behind
the scenes. Subsequent calls to :func:`eval` on that same variable
will be fast, because the variable caches the compiled function.
This way of computing has more overhead than a normal Theano
function, so don't use it too much in real scripts.
"""
if inputs_to_values is None:
inputs_to_values = {}
if not hasattr(self, '_fn_cache'):
self._fn_cache = dict()
inputs = tuple(sorted(inputs_to_values.keys(), key=id))
if inputs not in self._fn_cache:
self._fn_cache[inputs] = theano.function(inputs, self)
args = [inputs_to_values[param] for param in inputs]
rval = self._fn_cache[inputs](*args)
return rval
def __getstate__(self):
d = self.__dict__.copy()
d.pop("_fn_cache", None)
return d
class Constant(Variable):
"""
A :term:`Constant` is a `Variable` with a `value` field that cannot be
changed at runtime.
Constant nodes make eligible numerous optimizations: constant inlining in
C code, constant folding, etc.
Notes
-----
The data field is filtered by what is provided in the constructor for the
Constant's type field.
WRITEME
"""
# __slots__ = ['data']
def __init__(self, type, data, name=None):
Variable.__init__(self, type, None, None, name)
self.data = type.filter(data)
utils.add_tag_trace(self)
def equals(self, other):
# this does what __eq__ should do, but Variable and Apply should always be hashable by id
return isinstance(other, Constant) and self.signature() == other.signature()
def signature(self):
return (self.type, self.data)
def merge_signature(self):
return self.signature()
def __str__(self):
if self.name is not None:
return self.name
else:
name = str(self.data)
if len(name) > 20:
name = name[:10] + '...' + name[-10:]
return 'Constant{%s}' % name
def clone(self):
"""
We clone this object, but we don't clone the data to lower memory
requirement. We suppose that the data will never change.
"""
cp = self.__class__(self.type, self.data, self.name)
cp.tag = copy(self.tag)
return cp
def __set_owner(self, value):
"""
WRITEME
Raises
------
ValueError
If `value` is not `None`.
"""
if value is not None:
raise ValueError("Constant instances cannot have an owner.")
owner = property(lambda self: None, __set_owner)
value = property(lambda self: self.data, doc='read-only data access method')
# index is not defined, because the `owner` attribute must necessarily be None
def stack_search(start, expand, mode='bfs', build_inv=False):
"""
Search through a graph, either breadth- or depth-first.
Parameters
----------
start : deque
Search from these nodes.
expand : callable
When we get to a node, add expand(node) to the list of nodes to visit.
This function should return a list, or None.
Returns
-------
list of `Variable` or `Apply` instances (depends on `expend`)
The list of nodes in order of traversal.
Notes
-----
A node will appear at most once in the return value, even if it
appears multiple times in the start parameter.
:postcondition: every element of start is transferred to the returned list.
:postcondition: start is empty.
"""
if mode not in ('bfs', 'dfs'):
raise ValueError('mode should be bfs or dfs', mode)
rval_set = set()
rval_list = list()
if mode == 'bfs':
start_pop = start.popleft
else:
start_pop = start.pop
expand_inv = {}
while start:
l = start_pop()
if id(l) not in rval_set:
rval_list.append(l)
rval_set.add(id(l))
expand_l = expand(l)
if expand_l:
if build_inv:
for r in expand_l:
expand_inv.setdefault(r, []).append(l)
start.extend(expand_l)
assert len(rval_list) == len(rval_set)
if build_inv:
return rval_list, expand_inv
return rval_list
def ancestors(variable_list, blockers=None):
"""
Return the variables that contribute to those in variable_list (inclusive).
Parameters
----------
variable_list : list of `Variable` instances
Output `Variable` instances from which to search backward through
owners.
Returns
-------
list of `Variable` instances
All input nodes, in the order found by a left-recursive depth-first
search started at the nodes in `variable_list`.
"""
def expand(r):
if r.owner and (not blockers or r not in blockers):
return reversed(r.owner.inputs)
dfs_variables = stack_search(deque(variable_list), expand, 'dfs')
return dfs_variables
def inputs(variable_list, blockers=None):
"""
Return the inputs required to compute the given Variables.
Parameters
----------
variable_list : list of `Variable` instances
Output `Variable` instances from which to search backward through
owners.
Returns
-------
list of `Variable` instances
Input nodes with no owner, in the order found by a left-recursive
depth-first search started at the nodes in `variable_list`.
"""
vlist = ancestors(variable_list, blockers)
rval = [r for r in vlist if r.owner is None]
return rval
def variables_and_orphans(i, o):
"""
Extract list of variables between i and o nodes via
dfs traversal and chooses the orphans among them
Parameters
----------
i : list
Input variables.
o : list
Output variables.
"""
def expand(r):
if r.owner and r not in i:
l = list(r.owner.inputs) + list(r.owner.outputs)
l.reverse()
return l
variables = stack_search(deque(o), expand, 'dfs')
orphans = [r for r in variables if r.owner is None and r not in i]
return variables, orphans
def ops(i, o):
"""
Set of Ops contained within the subgraph between i and o
Parameters
----------
i : list
Input variables.
o : list
Output variables.
Returns
-------
object
The set of ops that are contained within the subgraph that lies
between i and o, including the owners of the variables in o and
intermediary ops between i and o, but not the owners of the variables
in i.
"""
ops = set()
variables, orphans = variables_and_orphans(i, o)
for r in variables:
if r not in i and r not in orphans:
if r.owner is not None:
ops.add(r.owner)
return ops
def variables(i, o):
"""
Extracts list of variables within input and output nodes via dfs travesal
Parameters
----------
i : list
Input variables.
o : list
Output variables.
Returns
-------
object
The set of Variables that are involved in the subgraph that lies
between i and o. This includes i, o, orphans(i, o) and all values of
all intermediary steps from i to o.
"""
return variables_and_orphans(i, o)[0]
def orphans(i, o):
"""
Extracts list of variables within input and output nodes
via dfs travesal and returns the orphans among them
Parameters
----------
i : list
Input Variables.
o : list
Output Variables.
Returns
-------
object
The set of Variables which one or more Variables in o depend on but are
neither in i nor in the subgraph that lies between i and o.
Examples
--------
orphans([x], [(x+y).out]) => [y]
"""
return variables_and_orphans(i, o)[1]
def clone(i, o, copy_inputs=True):
"""
Copies the subgraph contained between i and o.
Parameters
----------
i : list
Input Variables.
o : list
Output Variables.
copy_inputs : bool
If True, the inputs will be copied (defaults to True).
Returns
-------
object
The inputs and outputs of that copy.
"""
equiv = clone_get_equiv(i, o, copy_inputs)
return [equiv[input] for input in i], [equiv[output] for output in o]
def clone_get_equiv(inputs, outputs, copy_inputs_and_orphans=True, memo=None):
"""
Return a dictionary that maps from Variable and Apply nodes in the
original graph to a new node (a clone) in a new graph.
This function works by recursively cloning inputs... rebuilding a directed
graph from the inputs up to eventually building new outputs.
Parameters
----------
inputs : a list of Variables
outputs : a list of Variables
copy_inputs_and_orphans : bool
True means to create the cloned graph from new input and constant
nodes (the bottom of a feed-upward graph).
False means to clone a graph that is rooted at the original input
nodes.
memo : None or dict
Optionally start with a partly-filled dictionary for the return value.
If a dictionary is passed, this function will work in-place on that
dictionary and return it.
"""
if memo is None:
memo = {}
# clone the inputs if necessary
for input in inputs:
if copy_inputs_and_orphans:
cpy = input.clone()
cpy.owner = None
cpy.index = None
memo.setdefault(input, cpy)
else:
memo.setdefault(input, input)
# go through the inputs -> outputs graph cloning as we go
for apply in io_toposort(inputs, outputs):
for input in apply.inputs:
if input not in memo:
if copy_inputs_and_orphans:
cpy = input.clone()
memo[input] = cpy
else:
memo[input] = input
new_apply = apply.clone_with_new_inputs([memo[i] for i in apply.inputs])
memo.setdefault(apply, new_apply)
for output, new_output in zip(apply.outputs, new_apply.outputs):
memo.setdefault(output, new_output)
# finish up by cloning any remaining outputs (it can happen)
for output in outputs:
if output not in memo:
memo[output] = output.clone()
return memo
def general_toposort(r_out, deps, debug_print=False,
compute_deps_cache=None, deps_cache=None,
clients=None):
"""
WRITEME
Parameters
----------
deps
A python function that takes a node as input and returns its dependence.
compute_deps_cache : optional
If provided deps_cache should also be provided. This is a function like
deps, but that also cache its results in a dict passed as deps_cache.
deps_cache : dict
Must be used with compute_deps_cache.
clients : dict
If a dict is passed it will be filled with a mapping of node
-> clients for each node in the subgraph.
Notes
-----
deps(i) should behave like a pure function (no funny business with
internal state).
deps(i) will be cached by this function (to be fast).
The order of the return value list is determined by the order of nodes
returned by the deps() function.
deps should be provided or can be None and the caller provides
compute_deps_cache and deps_cache. The second option removes a Python
function call, and allows for more specialized code, so it can be
faster.
"""
if compute_deps_cache is None:
deps_cache = {}
def compute_deps_cache(io):
if io not in deps_cache:
d = deps(io)
if d:
if not isinstance(d, (list, OrderedSet)):
raise TypeError(
"Non-deterministic collections here make"
" toposort non-deterministic.")
deps_cache[io] = list(d)
else:
deps_cache[io] = d
return d
else:
return deps_cache[io]
assert deps_cache is not None
assert isinstance(r_out, (tuple, list, deque))
reachable, _clients = stack_search(deque(r_out), compute_deps_cache,
'dfs', True)
if clients is not None:
clients.update(_clients)
sources = deque([r for r in reachable if not deps_cache.get(r, None)])
rset = set()
rlist = []
while sources:
node = sources.popleft()
if node not in rset:
rlist.append(node)
rset.add(node)
for client in _clients.get(node, []):
deps_cache[client] = [a for a in deps_cache[client]
if a is not node]
if not deps_cache[client]:
sources.append(client)
if len(rlist) != len(reachable):
if debug_print:
print('')
print(reachable)
print(rlist)
raise ValueError('graph contains cycles')
return rlist
def io_toposort(inputs, outputs, orderings=None, clients=None):
"""
Perform topological sort from input and output nodes
Parameters
----------
inputs : list or tuple of Variable instances
outputs : list or tuple of Apply instances
orderings : dict
Key: Apply instance. Value: list of Apply instance.
It is important that the value be a container with a deterministic
iteration order. No sets allowed!
clients : dict
If a dict is provided it will be filled with mappings of
node->clients for each node in the subgraph that is sorted
"""
# the inputs are used only here in the function that decides what 'predecessors' to explore
iset = set(inputs)
# We build 2 functions as a speed up
deps_cache = {}
compute_deps = None
compute_deps_cache = None
if not orderings: # can be None or empty dict
# Specialized function that is faster when no ordering.
# Also include the cache in the function itself for speed up.
def compute_deps_cache(obj):
if obj in deps_cache:
return deps_cache[obj]
rval = []
if obj not in iset:
if isinstance(obj, Variable):
if obj.owner:
rval = [obj.owner]
elif isinstance(obj, Apply):
rval = list(obj.inputs)
if rval:
if not isinstance(rval, (list, OrderedSet)):
raise TypeError(
"Non-deterministic collections here make"
" toposort non-deterministic.")
deps_cache[obj] = list(rval)
else:
deps_cache[obj] = rval
else:
deps_cache[obj] = rval
return rval
else:
def compute_deps(obj):
rval = []
if obj not in iset:
if isinstance(obj, Variable):
if obj.owner:
rval = [obj.owner]
elif isinstance(obj, Apply):
rval = list(obj.inputs)
rval.extend(orderings.get(obj, []))
else:
assert not orderings.get(obj, [])
return rval
topo = general_toposort(outputs, deps=compute_deps,
compute_deps_cache=compute_deps_cache,
deps_cache=deps_cache, clients=clients)
return [o for o in topo if isinstance(o, Apply)]
default_leaf_formatter = str
def default_node_formatter(op, argstrings):
return "%s(%s)" % (op.op, ", ".join(argstrings))
def io_connection_pattern(inputs, outputs):
"""
Returns the connection pattern of a subgraph defined by given
inputs and outputs.
"""
inner_nodes = io_toposort(inputs, outputs)
# Initialize 'connect_pattern_by_var' by establishing each input as
# connected only to itself
connect_pattern_by_var = {}
nb_inputs = len(inputs)
for i in range(nb_inputs):
input = inputs[i]
inp_connection_pattern = [i == j for j in range(nb_inputs)]
connect_pattern_by_var[input] = inp_connection_pattern
# Iterate through the nodes used to produce the outputs from the
# inputs and, for every node, infer their connection pattern to
# every input from the connection patterns of their parents.
for n in inner_nodes:
# Get the connection pattern of the inner node's op. If the op
# does not define a connection_pattern method, assume that
# every node output is connected to every node input
try:
op_connection_pattern = n.op.connection_pattern(n)
except AttributeError:
op_connection_pattern = ([[True] * len(n.outputs)] *
len(n.inputs))
# For every output of the inner node, figure out which inputs it
# is connected to by combining the connection pattern of the inner
# node and the connection patterns of the inner node's inputs.
for out_idx in range(len(n.outputs)):
out = n.outputs[out_idx]
out_connection_pattern = [False] * nb_inputs
for inp_idx in range(len(n.inputs)):
inp = n.inputs[inp_idx]
if inp in connect_pattern_by_var:
inp_connection_pattern = connect_pattern_by_var[inp]
# If the node output is connected to the node input, it
# means it is connected to every inner input that the
# node inputs is connected to
if op_connection_pattern[inp_idx][out_idx]:
out_connection_pattern = [out_connection_pattern[i] or
inp_connection_pattern[i]
for i in range(nb_inputs)]
# Store the connection pattern of the node output
connect_pattern_by_var[out] = out_connection_pattern
# Obtain the global connection pattern by combining the
# connnection patterns of the individual outputs
global_connection_pattern = [[] for o in range(len(inputs))]
for out in outputs:
out_connection_pattern = connect_pattern_by_var.get(out)
if out_connection_pattern is None:
# the output is completely isolated from inputs
out_connection_pattern = [False] * len(inputs)
for i in range(len(inputs)):
global_connection_pattern[i].append(out_connection_pattern[i])
return global_connection_pattern
def is_same_graph(var1, var2, givens=None, debug=False):
"""
Return True iff Variables `var1` and `var2` perform the same computation.
By 'performing the same computation', we mean that they must share the same
graph, so that for instance this function will return False when comparing
(x * (y * z)) with ((x * y) * z).
The current implementation is not efficient since, when possible, it
verifies equality by calling two different functions that are expected to
return the same output. The goal is to verify this assumption, to
eventually get rid of one of them in the future.
Parameters
----------
var1
The first Variable to compare.
var2
The second Variable to compare.
givens
Similar to the `givens` argument of `theano.function`, it can be used
to perform substitutions in the computational graph of `var1` and
`var2`. This argument is associated to neither `var1` nor `var2`:
substitutions may affect both graphs if the substituted variable
is present in both.
debug : bool
If True, then an exception is raised when we are in a situation where
the `equal_computations` implementation cannot be called.
This parameter is intended to be used in tests only, to make sure we
properly test both implementations.
Examples
--------
====== ====== ====== ======
var1 var2 givens output
====== ====== ====== ======
x + 1 x + 1 {} True
x + 1 y + 1 {} False
x + 1 y + 1 {x: y} True
====== ====== ====== ======
"""
# Lazy import.
if givens is None:
givens = {}
global equal_computations, is_same_graph_with_merge
if equal_computations is None:
from theano.gof.opt import is_same_graph_with_merge
from theano.scan_module.scan_utils import equal_computations
# Convert `givens` to dictionary.
if not isinstance(givens, dict):
givens = dict(givens)
# Get result from the merge-based function.
rval1 = is_same_graph_with_merge(var1=var1, var2=var2, givens=givens)
# Get result from the function `equal_computations` from scan_utils.
use_equal_computations = True
if givens:
# We need to build the `in_xs` and `in_ys` lists. To do this, we need
# to be able to tell whether a variable belongs to the computational
# graph of `var1` or `var2`.
# The typical case we want to handle is when `to_replace` belongs to
# one of these graphs, and `replace_by` belongs to the other one. In
# other situations, the current implementation of `equal_computations`
# is probably not appropriate, so we do not call it.
ok = True
in_xs = []
in_ys = []
# Compute the sets of all variables found in each computational graph.
inputs_var = list(map(inputs, ([var1], [var2])))
all_vars = [set(variables(v_i, v_o))
for v_i, v_o in ((inputs_var[0], [var1]),
(inputs_var[1], [var2]))]
def in_var(x, k):
# Return True iff `x` is in computation graph of variable `vark`.
return x in all_vars[k - 1]
for to_replace, replace_by in iteritems(givens):
# Map a substitution variable to the computational graphs it
# belongs to.
inside = dict((v, [in_var(v, k) for k in (1, 2)])
for v in (to_replace, replace_by))
if (inside[to_replace][0] and not inside[to_replace][1] and
inside[replace_by][1] and not inside[replace_by][0]):
# Substitute variable in `var1` by one from `var2`.
in_xs.append(to_replace)
in_ys.append(replace_by)
elif (inside[to_replace][1] and not inside[to_replace][0] and
inside[replace_by][0] and not inside[replace_by][1]):
# Substitute variable in `var2` by one from `var1`.
in_xs.append(replace_by)
in_ys.append(to_replace)
else:
ok = False
break
if not ok:
# We cannot directly use `equal_computations`.
if debug:
raise AssertionError(
'When `debug` is True we want to make sure we are also '
'using the `equal_computations` implementation')
use_equal_computations = False
else:
in_xs = None
in_ys = None
if use_equal_computations:
rval2 = equal_computations(xs=[var1], ys=[var2],
in_xs=in_xs, in_ys=in_ys)
assert rval2 == rval1
return rval1
def op_as_string(i, op,
leaf_formatter=default_leaf_formatter,
node_formatter=default_node_formatter):
"""
Op to return a string representation of the subgraph
between i and o
"""
strs = as_string(i, op.inputs, leaf_formatter, node_formatter)
return node_formatter(op, strs)
def as_string(i, o,
leaf_formatter=default_leaf_formatter,
node_formatter=default_node_formatter):
"""
Returns a string representation of the subgraph between i and o
Parameters
----------
i : list
Input `Variable` s.
o : list
Output `Variable` s.
leaf_formatter : callable
Takes a `Variable` and returns a string to describe it.
node_formatter : callable
Takes an `Op` and the list of strings corresponding to its arguments
and returns a string to describe it.
Returns
-------
str
Returns a string representation of the subgraph between i and o. If the
same op is used by several other ops, the first occurrence will be
marked as :literal:`*n -> description` and all subsequent occurrences
will be marked as :literal:`*n`, where n is an id number (ids are
attributed in an unspecified order and only exist for viewing
convenience).
"""
i = set(i)
orph = orphans(i, o)
multi = set()
seen = set()
for output in o:
op = output.owner
if op in seen:
multi.add(op)
else:
seen.add(op)
for op in ops(i, o):
for input in op.inputs:
op2 = input.owner
if input in i or input in orph or op2 is None:
continue
if op2 in seen:
multi.add(op2)
else:
seen.add(input.owner)
multi = [x for x in multi]
done = set()
def multi_index(x):
return multi.index(x) + 1
def describe(r):
if r.owner is not None and r not in i and r not in orph:
op = r.owner
idx = op.outputs.index(r)
if len(op.outputs) == 1:
idxs = ""
else:
idxs = "::%i" % idx
if op in done:
return "*%i%s" % (multi_index(op), idxs)
else:
done.add(op)
s = node_formatter(op, [describe(input) for input in op.inputs])
if op in multi:
return "*%i -> %s" % (multi_index(op), s)
else:
return s
else:
return leaf_formatter(r)
return [describe(output) for output in o]
def view_roots(r):
"""
Utility function that returns the leaves of a search through
consecutive view_map()s.
WRITEME
"""
owner = r.owner
if owner is not None:
try:
view_map = owner.op.view_map
view_map = dict((owner.outputs[o], i)
for o, i in iteritems(view_map))
except AttributeError:
return [r]
if r in view_map:
answer = []
for i in view_map[r]:
answer += view_roots(owner.inputs[i])
return answer
else:
return [r]
else:
return [r]
def list_of_nodes(inputs, outputs):
"""
Return the apply nodes of the graph between inputs and outputs.
"""
return stack_search(
deque([o.owner for o in outputs]),
lambda o: [inp.owner for inp in o.inputs
if inp.owner and
not any(i in inp.owner.outputs for i in inputs)])
| 32.898744 | 129 | 0.596671 | from __future__ import absolute_import, print_function, division
from collections import deque
from copy import copy
from itertools import count
import theano
from theano import config
from theano.gof import utils
from six import string_types, integer_types, iteritems
from theano.misc.ordered_set import OrderedSet
__docformat__ = "restructuredtext en"
is_same_graph_with_merge = None
equal_computations = None
NoParams = object()
class Node(utils.object2):
def get_parents(self):
raise NotImplementedError()
class Apply(Node):
def __init__(self, op, inputs, outputs):
self.op = op
self.inputs = []
self.tag = utils.scratchpad()
if not isinstance(inputs, (list, tuple)):
raise TypeError("The inputs of an Apply must be a list or tuple")
if not isinstance(outputs, (list, tuple)):
raise TypeError("The output of an Apply must be a list or tuple")
for input in inputs:
if isinstance(input, Variable):
self.inputs.append(input)
else:
raise TypeError("The 'inputs' argument to Apply must contain Variable instances, not %s" % input)
self.outputs = []
for i, output in enumerate(outputs):
if isinstance(output, Variable):
if output.owner is None:
output.owner = self
output.index = i
elif output.owner is not self or output.index != i:
raise ValueError("All output variables passed to Apply must belong to it.")
self.outputs.append(output)
else:
raise TypeError("The 'outputs' argument to Apply must contain Variable instances with no owner, not %s" % output)
def run_params(self):
if hasattr(self.op, 'get_params'):
return self.op.get_params(self)
return NoParams
def __getstate__(self):
d = self.__dict__
if hasattr(self.tag, 'ufunc'):
d = copy(self.__dict__)
t = d["tag"]
del t.ufunc
d["tag"] = t
return d
def default_output(self):
do = getattr(self.op, 'default_output', None)
if do is None:
if len(self.outputs) == 1:
return self.outputs[0]
else:
raise AttributeError(
"%s.default_output should be an output index." % self.op)
elif not isinstance(do, integer_types):
raise AttributeError("%s.default_output should be an int or long" %
self.op)
elif do < 0 or do >= len(self.outputs):
raise AttributeError("%s.default_output is out of range." %
self.op)
return self.outputs[do]
out = property(default_output,
doc="alias for self.default_output()")
def __str__(self):
return op_as_string(self.inputs, self)
def __repr__(self):
return str(self)
def __asapply__(self):
return self
def clone(self):
cp = self.__class__(self.op, self.inputs,
[output.clone() for output in self.outputs])
cp.tag = copy(self.tag)
return cp
def clone_with_new_inputs(self, inputs, strict=True):
assert isinstance(inputs, (list, tuple))
remake_node = False
new_inputs = inputs[:]
for i, (curr, new) in enumerate(zip(self.inputs, new_inputs)):
if not curr.type == new.type:
if strict:
# If compatible, casts new into curr.type
new_inputs[i] = curr.type.filter_variable(new)
else:
remake_node = True
if remake_node:
new_node = self.op.make_node(*new_inputs)
new_node.tag = copy(self.tag).__update__(new_node.tag)
else:
new_node = self.clone()
new_node.inputs = new_inputs
return new_node
def get_parents(self):
return list(self.inputs)
# convenience properties
nin = property(lambda self: len(self.inputs), doc='same as len(self.inputs)')
nout = property(lambda self: len(self.outputs), doc='same as len(self.outputs)')
params_type = property(lambda self: self.op.params_type, doc='type to use for the params')
class Variable(Node):
# __slots__ = ['type', 'owner', 'index', 'name']
__count__ = count(0)
def __init__(self, type, owner=None, index=None, name=None):
super(Variable, self).__init__()
self.tag = utils.scratchpad()
self.type = type
if owner is not None and not isinstance(owner, Apply):
raise TypeError("owner must be an Apply instance", owner)
self.owner = owner
if index is not None and not isinstance(index, integer_types):
raise TypeError("index must be an int", index)
self.index = index
if name is not None and not isinstance(name, string_types):
raise TypeError("name must be a string", name)
self.name = name
self.auto_name = 'auto_' + str(next(self.__count__))
def __str__(self):
if self.name is not None:
return self.name
if self.owner is not None:
op = self.owner.op
if self.index == op.default_output:
return str(self.owner.op) + ".out"
else:
return str(self.owner.op) + "." + str(self.index)
else:
return "<%s>" % str(self.type)
def __repr_test_value__(self):
return repr(theano.gof.op.get_test_value(self))
def __repr__(self, firstPass=True):
to_print = [str(self)]
if config.print_test_value and firstPass:
try:
to_print.append(self.__repr_test_value__())
except AttributeError:
pass
return '\n'.join(to_print)
def clone(self):
# return copy(self)
cp = self.__class__(self.type, None, None, self.name)
cp.tag = copy(self.tag)
return cp
def __lt__(self, other):
raise NotImplementedError('Subclasses of Variable must provide __lt__',
self.__class__.__name__)
def __le__(self, other):
raise NotImplementedError('Subclasses of Variable must provide __le__',
self.__class__.__name__)
def __gt__(self, other):
raise NotImplementedError('Subclasses of Variable must provide __gt__',
self.__class__.__name__)
def __ge__(self, other):
raise NotImplementedError('Subclasses of Variable must provide __ge__',
self.__class__.__name__)
def get_parents(self):
if self.owner is not None:
return [self.owner]
return []
def eval(self, inputs_to_values=None):
if inputs_to_values is None:
inputs_to_values = {}
if not hasattr(self, '_fn_cache'):
self._fn_cache = dict()
inputs = tuple(sorted(inputs_to_values.keys(), key=id))
if inputs not in self._fn_cache:
self._fn_cache[inputs] = theano.function(inputs, self)
args = [inputs_to_values[param] for param in inputs]
rval = self._fn_cache[inputs](*args)
return rval
def __getstate__(self):
d = self.__dict__.copy()
d.pop("_fn_cache", None)
return d
class Constant(Variable):
# __slots__ = ['data']
def __init__(self, type, data, name=None):
Variable.__init__(self, type, None, None, name)
self.data = type.filter(data)
utils.add_tag_trace(self)
def equals(self, other):
# this does what __eq__ should do, but Variable and Apply should always be hashable by id
return isinstance(other, Constant) and self.signature() == other.signature()
def signature(self):
return (self.type, self.data)
def merge_signature(self):
return self.signature()
def __str__(self):
if self.name is not None:
return self.name
else:
name = str(self.data)
if len(name) > 20:
name = name[:10] + '...' + name[-10:]
return 'Constant{%s}' % name
def clone(self):
cp = self.__class__(self.type, self.data, self.name)
cp.tag = copy(self.tag)
return cp
def __set_owner(self, value):
if value is not None:
raise ValueError("Constant instances cannot have an owner.")
owner = property(lambda self: None, __set_owner)
value = property(lambda self: self.data, doc='read-only data access method')
# index is not defined, because the `owner` attribute must necessarily be None
def stack_search(start, expand, mode='bfs', build_inv=False):
if mode not in ('bfs', 'dfs'):
raise ValueError('mode should be bfs or dfs', mode)
rval_set = set()
rval_list = list()
if mode == 'bfs':
start_pop = start.popleft
else:
start_pop = start.pop
expand_inv = {}
while start:
l = start_pop()
if id(l) not in rval_set:
rval_list.append(l)
rval_set.add(id(l))
expand_l = expand(l)
if expand_l:
if build_inv:
for r in expand_l:
expand_inv.setdefault(r, []).append(l)
start.extend(expand_l)
assert len(rval_list) == len(rval_set)
if build_inv:
return rval_list, expand_inv
return rval_list
def ancestors(variable_list, blockers=None):
def expand(r):
if r.owner and (not blockers or r not in blockers):
return reversed(r.owner.inputs)
dfs_variables = stack_search(deque(variable_list), expand, 'dfs')
return dfs_variables
def inputs(variable_list, blockers=None):
vlist = ancestors(variable_list, blockers)
rval = [r for r in vlist if r.owner is None]
return rval
def variables_and_orphans(i, o):
def expand(r):
if r.owner and r not in i:
l = list(r.owner.inputs) + list(r.owner.outputs)
l.reverse()
return l
variables = stack_search(deque(o), expand, 'dfs')
orphans = [r for r in variables if r.owner is None and r not in i]
return variables, orphans
def ops(i, o):
ops = set()
variables, orphans = variables_and_orphans(i, o)
for r in variables:
if r not in i and r not in orphans:
if r.owner is not None:
ops.add(r.owner)
return ops
def variables(i, o):
return variables_and_orphans(i, o)[0]
def orphans(i, o):
return variables_and_orphans(i, o)[1]
def clone(i, o, copy_inputs=True):
equiv = clone_get_equiv(i, o, copy_inputs)
return [equiv[input] for input in i], [equiv[output] for output in o]
def clone_get_equiv(inputs, outputs, copy_inputs_and_orphans=True, memo=None):
if memo is None:
memo = {}
# clone the inputs if necessary
for input in inputs:
if copy_inputs_and_orphans:
cpy = input.clone()
cpy.owner = None
cpy.index = None
memo.setdefault(input, cpy)
else:
memo.setdefault(input, input)
# go through the inputs -> outputs graph cloning as we go
for apply in io_toposort(inputs, outputs):
for input in apply.inputs:
if input not in memo:
if copy_inputs_and_orphans:
cpy = input.clone()
memo[input] = cpy
else:
memo[input] = input
new_apply = apply.clone_with_new_inputs([memo[i] for i in apply.inputs])
memo.setdefault(apply, new_apply)
for output, new_output in zip(apply.outputs, new_apply.outputs):
memo.setdefault(output, new_output)
# finish up by cloning any remaining outputs (it can happen)
for output in outputs:
if output not in memo:
memo[output] = output.clone()
return memo
def general_toposort(r_out, deps, debug_print=False,
compute_deps_cache=None, deps_cache=None,
clients=None):
if compute_deps_cache is None:
deps_cache = {}
def compute_deps_cache(io):
if io not in deps_cache:
d = deps(io)
if d:
if not isinstance(d, (list, OrderedSet)):
raise TypeError(
"Non-deterministic collections here make"
" toposort non-deterministic.")
deps_cache[io] = list(d)
else:
deps_cache[io] = d
return d
else:
return deps_cache[io]
assert deps_cache is not None
assert isinstance(r_out, (tuple, list, deque))
reachable, _clients = stack_search(deque(r_out), compute_deps_cache,
'dfs', True)
if clients is not None:
clients.update(_clients)
sources = deque([r for r in reachable if not deps_cache.get(r, None)])
rset = set()
rlist = []
while sources:
node = sources.popleft()
if node not in rset:
rlist.append(node)
rset.add(node)
for client in _clients.get(node, []):
deps_cache[client] = [a for a in deps_cache[client]
if a is not node]
if not deps_cache[client]:
sources.append(client)
if len(rlist) != len(reachable):
if debug_print:
print('')
print(reachable)
print(rlist)
raise ValueError('graph contains cycles')
return rlist
def io_toposort(inputs, outputs, orderings=None, clients=None):
# the inputs are used only here in the function that decides what 'predecessors' to explore
iset = set(inputs)
# We build 2 functions as a speed up
deps_cache = {}
compute_deps = None
compute_deps_cache = None
if not orderings: # can be None or empty dict
# Specialized function that is faster when no ordering.
# Also include the cache in the function itself for speed up.
def compute_deps_cache(obj):
if obj in deps_cache:
return deps_cache[obj]
rval = []
if obj not in iset:
if isinstance(obj, Variable):
if obj.owner:
rval = [obj.owner]
elif isinstance(obj, Apply):
rval = list(obj.inputs)
if rval:
if not isinstance(rval, (list, OrderedSet)):
raise TypeError(
"Non-deterministic collections here make"
" toposort non-deterministic.")
deps_cache[obj] = list(rval)
else:
deps_cache[obj] = rval
else:
deps_cache[obj] = rval
return rval
else:
def compute_deps(obj):
rval = []
if obj not in iset:
if isinstance(obj, Variable):
if obj.owner:
rval = [obj.owner]
elif isinstance(obj, Apply):
rval = list(obj.inputs)
rval.extend(orderings.get(obj, []))
else:
assert not orderings.get(obj, [])
return rval
topo = general_toposort(outputs, deps=compute_deps,
compute_deps_cache=compute_deps_cache,
deps_cache=deps_cache, clients=clients)
return [o for o in topo if isinstance(o, Apply)]
default_leaf_formatter = str
def default_node_formatter(op, argstrings):
return "%s(%s)" % (op.op, ", ".join(argstrings))
def io_connection_pattern(inputs, outputs):
inner_nodes = io_toposort(inputs, outputs)
# Initialize 'connect_pattern_by_var' by establishing each input as
# connected only to itself
connect_pattern_by_var = {}
nb_inputs = len(inputs)
for i in range(nb_inputs):
input = inputs[i]
inp_connection_pattern = [i == j for j in range(nb_inputs)]
connect_pattern_by_var[input] = inp_connection_pattern
# Iterate through the nodes used to produce the outputs from the
# inputs and, for every node, infer their connection pattern to
# every input from the connection patterns of their parents.
for n in inner_nodes:
# Get the connection pattern of the inner node's op. If the op
try:
op_connection_pattern = n.op.connection_pattern(n)
except AttributeError:
op_connection_pattern = ([[True] * len(n.outputs)] *
len(n.inputs))
for out_idx in range(len(n.outputs)):
out = n.outputs[out_idx]
out_connection_pattern = [False] * nb_inputs
for inp_idx in range(len(n.inputs)):
inp = n.inputs[inp_idx]
if inp in connect_pattern_by_var:
inp_connection_pattern = connect_pattern_by_var[inp]
# If the node output is connected to the node input, it
# means it is connected to every inner input that the
# node inputs is connected to
if op_connection_pattern[inp_idx][out_idx]:
out_connection_pattern = [out_connection_pattern[i] or
inp_connection_pattern[i]
for i in range(nb_inputs)]
# Store the connection pattern of the node output
connect_pattern_by_var[out] = out_connection_pattern
# Obtain the global connection pattern by combining the
# connnection patterns of the individual outputs
global_connection_pattern = [[] for o in range(len(inputs))]
for out in outputs:
out_connection_pattern = connect_pattern_by_var.get(out)
if out_connection_pattern is None:
# the output is completely isolated from inputs
out_connection_pattern = [False] * len(inputs)
for i in range(len(inputs)):
global_connection_pattern[i].append(out_connection_pattern[i])
return global_connection_pattern
def is_same_graph(var1, var2, givens=None, debug=False):
# Lazy import.
if givens is None:
givens = {}
global equal_computations, is_same_graph_with_merge
if equal_computations is None:
from theano.gof.opt import is_same_graph_with_merge
from theano.scan_module.scan_utils import equal_computations
# Convert `givens` to dictionary.
if not isinstance(givens, dict):
givens = dict(givens)
# Get result from the merge-based function.
rval1 = is_same_graph_with_merge(var1=var1, var2=var2, givens=givens)
# Get result from the function `equal_computations` from scan_utils.
use_equal_computations = True
if givens:
# We need to build the `in_xs` and `in_ys` lists. To do this, we need
# to be able to tell whether a variable belongs to the computational
# graph of `var1` or `var2`.
# The typical case we want to handle is when `to_replace` belongs to
# one of these graphs, and `replace_by` belongs to the other one. In
# other situations, the current implementation of `equal_computations`
# is probably not appropriate, so we do not call it.
ok = True
in_xs = []
in_ys = []
# Compute the sets of all variables found in each computational graph.
inputs_var = list(map(inputs, ([var1], [var2])))
all_vars = [set(variables(v_i, v_o))
for v_i, v_o in ((inputs_var[0], [var1]),
(inputs_var[1], [var2]))]
def in_var(x, k):
# Return True iff `x` is in computation graph of variable `vark`.
return x in all_vars[k - 1]
for to_replace, replace_by in iteritems(givens):
# Map a substitution variable to the computational graphs it
# belongs to.
inside = dict((v, [in_var(v, k) for k in (1, 2)])
for v in (to_replace, replace_by))
if (inside[to_replace][0] and not inside[to_replace][1] and
inside[replace_by][1] and not inside[replace_by][0]):
# Substitute variable in `var1` by one from `var2`.
in_xs.append(to_replace)
in_ys.append(replace_by)
elif (inside[to_replace][1] and not inside[to_replace][0] and
inside[replace_by][0] and not inside[replace_by][1]):
# Substitute variable in `var2` by one from `var1`.
in_xs.append(replace_by)
in_ys.append(to_replace)
else:
ok = False
break
if not ok:
# We cannot directly use `equal_computations`.
if debug:
raise AssertionError(
'When `debug` is True we want to make sure we are also '
'using the `equal_computations` implementation')
use_equal_computations = False
else:
in_xs = None
in_ys = None
if use_equal_computations:
rval2 = equal_computations(xs=[var1], ys=[var2],
in_xs=in_xs, in_ys=in_ys)
assert rval2 == rval1
return rval1
def op_as_string(i, op,
leaf_formatter=default_leaf_formatter,
node_formatter=default_node_formatter):
strs = as_string(i, op.inputs, leaf_formatter, node_formatter)
return node_formatter(op, strs)
def as_string(i, o,
leaf_formatter=default_leaf_formatter,
node_formatter=default_node_formatter):
i = set(i)
orph = orphans(i, o)
multi = set()
seen = set()
for output in o:
op = output.owner
if op in seen:
multi.add(op)
else:
seen.add(op)
for op in ops(i, o):
for input in op.inputs:
op2 = input.owner
if input in i or input in orph or op2 is None:
continue
if op2 in seen:
multi.add(op2)
else:
seen.add(input.owner)
multi = [x for x in multi]
done = set()
def multi_index(x):
return multi.index(x) + 1
def describe(r):
if r.owner is not None and r not in i and r not in orph:
op = r.owner
idx = op.outputs.index(r)
if len(op.outputs) == 1:
idxs = ""
else:
idxs = "::%i" % idx
if op in done:
return "*%i%s" % (multi_index(op), idxs)
else:
done.add(op)
s = node_formatter(op, [describe(input) for input in op.inputs])
if op in multi:
return "*%i -> %s" % (multi_index(op), s)
else:
return s
else:
return leaf_formatter(r)
return [describe(output) for output in o]
def view_roots(r):
owner = r.owner
if owner is not None:
try:
view_map = owner.op.view_map
view_map = dict((owner.outputs[o], i)
for o, i in iteritems(view_map))
except AttributeError:
return [r]
if r in view_map:
answer = []
for i in view_map[r]:
answer += view_roots(owner.inputs[i])
return answer
else:
return [r]
else:
return [r]
def list_of_nodes(inputs, outputs):
return stack_search(
deque([o.owner for o in outputs]),
lambda o: [inp.owner for inp in o.inputs
if inp.owner and
not any(i in inp.owner.outputs for i in inputs)])
| true | true |
f71abc41fa2bd110c77062474f73a192caded073 | 2,015 | py | Python | tools/perf/page_sets/intl_ja_zh.py | kjthegod/chromium | cf940f7f418436b77e15b1ea23e6fa100ca1c91a | [
"BSD-3-Clause-No-Nuclear-License-2014",
"BSD-3-Clause"
] | 1 | 2019-11-28T10:46:52.000Z | 2019-11-28T10:46:52.000Z | tools/perf/page_sets/intl_ja_zh.py | kjthegod/chromium | cf940f7f418436b77e15b1ea23e6fa100ca1c91a | [
"BSD-3-Clause-No-Nuclear-License-2014",
"BSD-3-Clause"
] | null | null | null | tools/perf/page_sets/intl_ja_zh.py | kjthegod/chromium | cf940f7f418436b77e15b1ea23e6fa100ca1c91a | [
"BSD-3-Clause-No-Nuclear-License-2014",
"BSD-3-Clause"
] | 2 | 2015-03-27T11:15:39.000Z | 2016-08-17T14:19:56.000Z | # Copyright 2014 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
from telemetry.page import page as page_module
from telemetry.page import page_set as page_set_module
class IntlJaZhPage(page_module.Page):
def __init__(self, url, page_set):
super(IntlJaZhPage, self).__init__(url=url, page_set=page_set)
self.user_agent_type = 'desktop'
self.archive_data_file = 'data/intl_ja_zh.json'
def RunPageInteractions(self, action_runner):
interaction = action_runner.BeginGestureInteraction(
'ScrollAction', is_smooth=True)
action_runner.ScrollPage()
interaction.End()
class IntlJaZhPageSet(page_set_module.PageSet):
""" Popular pages in Japanese and Chinese. """
def __init__(self):
super(IntlJaZhPageSet, self).__init__(
user_agent_type='desktop',
archive_data_file='data/intl_ja_zh.json',
bucket=page_set_module.PARTNER_BUCKET)
urls_list = [
# Why: #5 Japanese site
'http://www.amazon.co.jp',
'http://mixi.jp/',
'http://dtiblog.com/',
'http://2ch.net/',
'http://jugem.jp/',
'http://hatena.ne.jp/',
'http://goo.ne.jp/',
# Why: #1 Japanese site
'http://www.yahoo.co.jp/',
# Why: #3 Japanese site
'http://fc2.com/ja/',
'http://kakaku.com/',
'http://zol.com.cn/',
'http://cn.yahoo.com/',
# Why: #1 Chinese site
'http://www.baidu.com/s?wd=%D0%C2%20%CE%C5',
# Why: #2 Chinese site
'http://www.qq.com/',
# Why: #3 Chinese site
'http://www.taobao.com/index_global.php',
# Why: #4 Chinese site
'http://www.sina.com.cn/',
# Why: #5 Chinese site
# pylint: disable=C0301
'http://www.google.com.hk/#q=%E9%82%84%E6%8F%90%E4%BE%9B&fp=c44d333e710cb480',
'http://udn.com/NEWS/mainpage.shtml',
'http://ruten.com.tw/'
]
for url in urls_list:
self.AddUserStory(IntlJaZhPage(url, self))
| 31 | 84 | 0.63871 |
from telemetry.page import page as page_module
from telemetry.page import page_set as page_set_module
class IntlJaZhPage(page_module.Page):
def __init__(self, url, page_set):
super(IntlJaZhPage, self).__init__(url=url, page_set=page_set)
self.user_agent_type = 'desktop'
self.archive_data_file = 'data/intl_ja_zh.json'
def RunPageInteractions(self, action_runner):
interaction = action_runner.BeginGestureInteraction(
'ScrollAction', is_smooth=True)
action_runner.ScrollPage()
interaction.End()
class IntlJaZhPageSet(page_set_module.PageSet):
def __init__(self):
super(IntlJaZhPageSet, self).__init__(
user_agent_type='desktop',
archive_data_file='data/intl_ja_zh.json',
bucket=page_set_module.PARTNER_BUCKET)
urls_list = [
ww.amazon.co.jp',
'http://mixi.jp/',
'http://dtiblog.com/',
'http://2ch.net/',
'http://jugem.jp/',
'http://hatena.ne.jp/',
'http://goo.ne.jp/',
ww.yahoo.co.jp/',
c2.com/ja/',
'http://kakaku.com/',
'http://zol.com.cn/',
'http://cn.yahoo.com/',
www.baidu.com/s?wd=%D0%C2%20%CE%C5',
www.qq.com/',
www.taobao.com/index_global.php',
www.sina.com.cn/',
http://www.google.com.hk/#q=%E9%82%84%E6%8F%90%E4%BE%9B&fp=c44d333e710cb480',
'http://udn.com/NEWS/mainpage.shtml',
'http://ruten.com.tw/'
]
for url in urls_list:
self.AddUserStory(IntlJaZhPage(url, self))
| true | true |
f71abc4360294fb27af4d518e22ffc96882ac8b4 | 1,646 | py | Python | src/util/losses.py | anglixjtu/MeshCNN_ | 83826e66d8989ed4967047c2ed6d099568c5781c | [
"MIT"
] | 2 | 2021-08-02T05:39:43.000Z | 2021-08-04T04:15:02.000Z | src/util/losses.py | anglixjtu/MeshCNN_ | 83826e66d8989ed4967047c2ed6d099568c5781c | [
"MIT"
] | null | null | null | src/util/losses.py | anglixjtu/MeshCNN_ | 83826e66d8989ed4967047c2ed6d099568c5781c | [
"MIT"
] | null | null | null | import torch
import torch.nn as nn
class ChamferLoss(nn.Module):
def __init__(self):
super(ChamferLoss, self).__init__()
self.use_cuda = torch.cuda.is_available()
def forward(self, preds, gts, reverse=True, bidirectional=True):
def compute_loss(preds, gts):
P = self.batch_pairwise_dist(gts, preds)
mins, _ = torch.min(P, 1)
loss_1 = torch.sum(mins)
mins, _ = torch.min(P, 2)
loss_2 = torch.sum(mins)
return loss_1 + loss_2
if bidirectional or reverse:
backward_loss = compute_loss(gts, preds)
if reverse:
return backward_loss
else:
forward_loss = compute_loss(preds, gts)
return forward_loss + backward_loss
else:
forward_loss = compute_loss(preds, gts)
return forward_loss
def batch_pairwise_dist(self, x, y):
bs, num_points_x, points_dim = x.size()
_, num_points_y, _ = y.size()
xx = torch.bmm(x, x.transpose(2, 1))
yy = torch.bmm(y, y.transpose(2, 1))
zz = torch.bmm(x, y.transpose(2, 1))
if self.use_cuda:
dtype = torch.cuda.LongTensor
else:
dtype = torch.LongTensor
diag_ind_x = torch.arange(0, num_points_x).type(dtype)
diag_ind_y = torch.arange(0, num_points_y).type(dtype)
rx = xx[:, diag_ind_x, diag_ind_x].unsqueeze(1).expand_as(
zz.transpose(2, 1))
ry = yy[:, diag_ind_y, diag_ind_y].unsqueeze(1).expand_as(zz)
P = rx.transpose(2, 1) + ry - 2 * zz
return P | 35.021277 | 69 | 0.572904 | import torch
import torch.nn as nn
class ChamferLoss(nn.Module):
def __init__(self):
super(ChamferLoss, self).__init__()
self.use_cuda = torch.cuda.is_available()
def forward(self, preds, gts, reverse=True, bidirectional=True):
def compute_loss(preds, gts):
P = self.batch_pairwise_dist(gts, preds)
mins, _ = torch.min(P, 1)
loss_1 = torch.sum(mins)
mins, _ = torch.min(P, 2)
loss_2 = torch.sum(mins)
return loss_1 + loss_2
if bidirectional or reverse:
backward_loss = compute_loss(gts, preds)
if reverse:
return backward_loss
else:
forward_loss = compute_loss(preds, gts)
return forward_loss + backward_loss
else:
forward_loss = compute_loss(preds, gts)
return forward_loss
def batch_pairwise_dist(self, x, y):
bs, num_points_x, points_dim = x.size()
_, num_points_y, _ = y.size()
xx = torch.bmm(x, x.transpose(2, 1))
yy = torch.bmm(y, y.transpose(2, 1))
zz = torch.bmm(x, y.transpose(2, 1))
if self.use_cuda:
dtype = torch.cuda.LongTensor
else:
dtype = torch.LongTensor
diag_ind_x = torch.arange(0, num_points_x).type(dtype)
diag_ind_y = torch.arange(0, num_points_y).type(dtype)
rx = xx[:, diag_ind_x, diag_ind_x].unsqueeze(1).expand_as(
zz.transpose(2, 1))
ry = yy[:, diag_ind_y, diag_ind_y].unsqueeze(1).expand_as(zz)
P = rx.transpose(2, 1) + ry - 2 * zz
return P | true | true |
f71abc9fb39ef5fd0daeb69a86632bd9e5ed8709 | 5,028 | py | Python | pytorch_toolkit/nncf/examples/object_detection/layers/modules/multibox_loss.py | morkovka1337/openvino_training_extensions | 846db45c264d6b061505213f51763520b9432ba9 | [
"Apache-2.0"
] | 3 | 2020-12-29T02:47:32.000Z | 2021-11-12T08:12:51.000Z | pytorch_toolkit/nncf/examples/object_detection/layers/modules/multibox_loss.py | morkovka1337/openvino_training_extensions | 846db45c264d6b061505213f51763520b9432ba9 | [
"Apache-2.0"
] | 23 | 2020-09-25T22:41:48.000Z | 2021-12-13T20:43:37.000Z | pytorch_toolkit/nncf/examples/object_detection/layers/modules/multibox_loss.py | morkovka1337/openvino_training_extensions | 846db45c264d6b061505213f51763520b9432ba9 | [
"Apache-2.0"
] | 1 | 2021-03-12T10:08:44.000Z | 2021-03-12T10:08:44.000Z | """
Copyright (c) 2019 Intel Corporation
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
"""
import torch
import torch.nn as nn
import torch.nn.functional as F
from ..box_utils import match, log_sum_exp
class MultiBoxLoss(nn.Module):
"""SSD Weighted Loss Function
Compute Targets:
1) Produce Confidence Target Indices by matching ground truth boxes
with (default) 'priorboxes' that have jaccard index > threshold parameter
(default threshold: 0.5).
2) Produce localization target by 'encoding' variance into offsets of ground
truth boxes and their matched 'priorboxes'.
3) Hard negative mining to filter the excessive number of negative examples
that comes with using a large number of default bounding boxes.
(default negative:positive ratio 3:1)
Objective Loss:
L(x,c,l,g) = (Lconf(x, c) + αLloc(x,l,g)) / N
Where, Lconf is the CrossEntropy Loss and Lloc is the SmoothL1 Loss
weighted by α which is set to 1 by cross val.
Args:
c: class confidences,
l: predicted boxes,
g: ground truth boxes
N: number of matched default boxes
See: https://arxiv.org/pdf/1512.02325.pdf for more details.
"""
def __init__(self, cfg, num_classes, overlap_thresh, prior_for_matching,
bkg_label, neg_mining, neg_pos, neg_overlap, encode_target, device=None):
super(MultiBoxLoss, self).__init__()
self.device = device
self.num_classes = num_classes
self.threshold = overlap_thresh
self.background_label = bkg_label
self.encode_target = encode_target
self.use_prior_for_matching = prior_for_matching
self.do_neg_mining = neg_mining
self.negpos_ratio = neg_pos
self.neg_overlap = neg_overlap
def forward(self, predictions, targets):
"""Multibox Loss
Args:
predictions (tuple): A tuple containing loc preds, conf preds,
and prior boxes from SSD net.
conf shape: torch.size(batch_size,num_priors,num_classes)
loc shape: torch.size(batch_size,num_priors,4)
priors shape: torch.size(num_priors,4)
ground_truth (tensor): Ground truth boxes and labels for a batch,
shape: [batch_size,num_objs,5] (last idx is the label).
"""
loc_data, conf_data, priors = predictions
batch = loc_data.size(0)
num_priors = loc_data.size(1)
# match priors (default boxes) and ground truth boxes
loc_t = torch.Tensor(batch, num_priors, 4).to(self.device)
conf_t = torch.LongTensor(batch, num_priors).to(self.device)
for idx in range(batch):
truths = targets[idx][:, :-1].data
labels = targets[idx][:, -1].data
defaults = priors.data
match(self.threshold, truths, defaults[0], labels, loc_t, conf_t, idx)
pos = conf_t > 0
num_pos = pos.sum(dim=1, keepdim=True)
# Localization Loss (Smooth L1)
# Shape: [batch,num_priors,4]
pos_idx = pos.unsqueeze(pos.dim()).expand_as(loc_data)
loc_p = loc_data[pos_idx].view(-1, 4)
loc_t = loc_t[pos_idx].view(-1, 4)
loss_l = F.smooth_l1_loss(loc_p, loc_t, reduction='sum')
# Compute max conf across batch for hard negative mining
batch_conf = conf_data.view(-1, self.num_classes)
loss_c = log_sum_exp(batch_conf) - batch_conf.gather(1, conf_t.view(-1, 1))
# Hard Negative Mining
loss_c = loss_c.view(batch, -1)
loss_c[pos] = 0 # filter out pos boxes for now
_, loss_idx = loss_c.sort(1, descending=True)
_, idx_rank = loss_idx.sort(1)
num_pos = pos.long().sum(1, keepdim=True)
num_neg = torch.clamp(self.negpos_ratio * num_pos, max=pos.size(1) - 1)
neg = idx_rank < num_neg.expand_as(idx_rank)
# Confidence Loss Including Positive and Negative Examples
pos_idx = pos.unsqueeze(2).expand_as(conf_data)
neg_idx = neg.unsqueeze(2).expand_as(conf_data)
conf_p = conf_data[(pos_idx + neg_idx).gt(0)].view(-1, self.num_classes)
targets_weighted = conf_t[(pos + neg).gt(0)]
loss_c = F.cross_entropy(conf_p, targets_weighted, reduction='sum')
# Sum of losses: L(x,c,l,g) = (Lconf(x, c) + αLloc(x,l,g)) / N
N = num_pos.data.sum().to(torch.float)
loss_l /= N
loss_c /= N
return loss_l, loss_c
| 42.610169 | 90 | 0.646181 |
import torch
import torch.nn as nn
import torch.nn.functional as F
from ..box_utils import match, log_sum_exp
class MultiBoxLoss(nn.Module):
def __init__(self, cfg, num_classes, overlap_thresh, prior_for_matching,
bkg_label, neg_mining, neg_pos, neg_overlap, encode_target, device=None):
super(MultiBoxLoss, self).__init__()
self.device = device
self.num_classes = num_classes
self.threshold = overlap_thresh
self.background_label = bkg_label
self.encode_target = encode_target
self.use_prior_for_matching = prior_for_matching
self.do_neg_mining = neg_mining
self.negpos_ratio = neg_pos
self.neg_overlap = neg_overlap
def forward(self, predictions, targets):
loc_data, conf_data, priors = predictions
batch = loc_data.size(0)
num_priors = loc_data.size(1)
loc_t = torch.Tensor(batch, num_priors, 4).to(self.device)
conf_t = torch.LongTensor(batch, num_priors).to(self.device)
for idx in range(batch):
truths = targets[idx][:, :-1].data
labels = targets[idx][:, -1].data
defaults = priors.data
match(self.threshold, truths, defaults[0], labels, loc_t, conf_t, idx)
pos = conf_t > 0
num_pos = pos.sum(dim=1, keepdim=True)
pos_idx = pos.unsqueeze(pos.dim()).expand_as(loc_data)
loc_p = loc_data[pos_idx].view(-1, 4)
loc_t = loc_t[pos_idx].view(-1, 4)
loss_l = F.smooth_l1_loss(loc_p, loc_t, reduction='sum')
batch_conf = conf_data.view(-1, self.num_classes)
loss_c = log_sum_exp(batch_conf) - batch_conf.gather(1, conf_t.view(-1, 1))
loss_c = loss_c.view(batch, -1)
loss_c[pos] = 0
_, loss_idx = loss_c.sort(1, descending=True)
_, idx_rank = loss_idx.sort(1)
num_pos = pos.long().sum(1, keepdim=True)
num_neg = torch.clamp(self.negpos_ratio * num_pos, max=pos.size(1) - 1)
neg = idx_rank < num_neg.expand_as(idx_rank)
pos_idx = pos.unsqueeze(2).expand_as(conf_data)
neg_idx = neg.unsqueeze(2).expand_as(conf_data)
conf_p = conf_data[(pos_idx + neg_idx).gt(0)].view(-1, self.num_classes)
targets_weighted = conf_t[(pos + neg).gt(0)]
loss_c = F.cross_entropy(conf_p, targets_weighted, reduction='sum')
N = num_pos.data.sum().to(torch.float)
loss_l /= N
loss_c /= N
return loss_l, loss_c
| true | true |
f71abd4fc53838f6ee6c2abce3c48015aa6d6d6c | 1,513 | py | Python | src/gluonts/transform/dataset.py | lfywork/gluon-ts | 399dbad20f6e78685b707a30817b3a2f97925f8a | [
"Apache-2.0"
] | 1 | 2021-08-22T19:42:55.000Z | 2021-08-22T19:42:55.000Z | src/gluonts/transform/dataset.py | lfywork/gluon-ts | 399dbad20f6e78685b707a30817b3a2f97925f8a | [
"Apache-2.0"
] | null | null | null | src/gluonts/transform/dataset.py | lfywork/gluon-ts | 399dbad20f6e78685b707a30817b3a2f97925f8a | [
"Apache-2.0"
] | null | null | null | # Copyright 2018 Amazon.com, Inc. or its affiliates. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License").
# You may not use this file except in compliance with the License.
# A copy of the License is located at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# or in the "license" file accompanying this file. This file is distributed
# on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either
# express or implied. See the License for the specific language governing
# permissions and limitations under the License.
from typing import Iterator, List
from gluonts.dataset.common import DataEntry, Dataset
from gluonts.transform import Chain, Transformation
class TransformedDataset(Dataset):
"""
A dataset that corresponds to applying a list of transformations to each
element in the base_dataset.
This only supports SimpleTransformations, which do the same thing at
prediction and training time.
Parameters
----------
base_dataset
Dataset to transform
transformations
List of transformations to apply
"""
def __init__(
self, base_dataset: Dataset, transformations: List[Transformation]
) -> None:
self.base_dataset = base_dataset
self.transformations = Chain(transformations)
def __iter__(self) -> Iterator[DataEntry]:
yield from self.transformations(self.base_dataset, is_train=True)
def __len__(self):
return sum(1 for _ in self)
| 31.520833 | 76 | 0.723067 |
from typing import Iterator, List
from gluonts.dataset.common import DataEntry, Dataset
from gluonts.transform import Chain, Transformation
class TransformedDataset(Dataset):
def __init__(
self, base_dataset: Dataset, transformations: List[Transformation]
) -> None:
self.base_dataset = base_dataset
self.transformations = Chain(transformations)
def __iter__(self) -> Iterator[DataEntry]:
yield from self.transformations(self.base_dataset, is_train=True)
def __len__(self):
return sum(1 for _ in self)
| true | true |
f71abea7e87d7a468b3566906416d9861f1ed252 | 2,585 | py | Python | db.py | tunir27/Attendr-Hardware-Scripts | cdc9293157d1810c2a9c8af0318b04203a8b2bf5 | [
"Apache-2.0"
] | 1 | 2018-08-15T06:27:53.000Z | 2018-08-15T06:27:53.000Z | db.py | tunir27/Attendr-Hardware-Scripts | cdc9293157d1810c2a9c8af0318b04203a8b2bf5 | [
"Apache-2.0"
] | null | null | null | db.py | tunir27/Attendr-Hardware-Scripts | cdc9293157d1810c2a9c8af0318b04203a8b2bf5 | [
"Apache-2.0"
] | null | null | null | import sqlite3
import datetime
import time
#import Read1
#import sync
#from datetime import datetime
conn = sqlite3.connect('att.db')
c = conn.cursor()
def db(sid):
#conn = sqlite3.connect('att.db')
#c = conn.cursor()
start_time = time.time()
c.execute('''CREATE TABLE IF NOT EXISTS attendance(ID integer PRIMARY KEY,std_id varchar2,entry_date varchar2,entry_time varchar2,leave_time varchar2,duration varchar2,status varchar2)''')
#print("Enter the values to be inserted")
#print("Student ID")
std_id=sid
t = (std_id,)
c.execute('SELECT * FROM attendance where std_id=?',t)
d=c.fetchone()
#print(d)
if d:
#c.execute('SELECT entry_time FROM attendance where std_id=?',t)
datetime_object = datetime.datetime.strptime(d[3],'%H:%M:%S')
dtime=datetime_object.strftime("%H:%M:%S")
FMT = "%H:%M:%S"
now = datetime.datetime.now()
ntime=now.strftime("%H:%M:%S")
date = datetime.datetime.strptime(str(ntime), FMT) - datetime.datetime.strptime(str(dtime), FMT)
tdelta = datetime.datetime.strptime(str(date),"%H:%M:%S")
#h,m,s=tdelta.split(':')
rtime=int(tdelta.hour)*60+int(tdelta.minute)+(int(tdelta.second)/60)
#print(rtime)
#chtime=datetime.datetime.now()-datetime.timedelta(minutes=30)
if rtime>1:
exit_att(std_id,d[3])
#entry_att(std_id)
#print("Data Inserted")
else:
entry_att(std_id)
#print("Data Inserted")
#c.execute('''drop table attendance''')
#entry_att(std_id)
#printr()
#sync()
#conn.close()
#print(time.time()-start_time)
def entry_att(std_id):
now = datetime.datetime.now()
date=now.strftime("%y/%m/%d")
time=now.strftime("%H:%M:%S")
c.execute('''INSERT INTO attendance(std_id,entry_date,entry_time,status) values(?,?,?,?)''',(std_id,date,time,'0'))
conn.commit()
def exit_att(std_id,ptime):
now = datetime.datetime.now()
#date=now.strftime("%Y-%m-%d")
ltime=now.strftime("%H:%M:%S")
FMT = '%H:%M:%S'
duration = datetime.datetime.strptime(str(ltime), FMT) - datetime.datetime.strptime(str(ptime), FMT)
utime=datetime.datetime.strptime(str(duration),"%H:%M:%S")
dtime=utime.strftime("%H:%M:%S")
#print(duration,dtime)
#print(type(duration))
#print(type(dtime))
c.execute('''UPDATE attendance SET leave_time=?,duration=?,status=? where std_id=?''',(ltime,dtime,'0',std_id))
conn.commit()
def printr():
c.execute('''SELECT * FROM attendance''')
print(c.fetchall())
| 33.141026 | 192 | 0.630174 | import sqlite3
import datetime
import time
conn = sqlite3.connect('att.db')
c = conn.cursor()
def db(sid):
start_time = time.time()
c.execute('''CREATE TABLE IF NOT EXISTS attendance(ID integer PRIMARY KEY,std_id varchar2,entry_date varchar2,entry_time varchar2,leave_time varchar2,duration varchar2,status varchar2)''')
std_id=sid
t = (std_id,)
c.execute('SELECT * FROM attendance where std_id=?',t)
d=c.fetchone()
if d:
datetime_object = datetime.datetime.strptime(d[3],'%H:%M:%S')
dtime=datetime_object.strftime("%H:%M:%S")
FMT = "%H:%M:%S"
now = datetime.datetime.now()
ntime=now.strftime("%H:%M:%S")
date = datetime.datetime.strptime(str(ntime), FMT) - datetime.datetime.strptime(str(dtime), FMT)
tdelta = datetime.datetime.strptime(str(date),"%H:%M:%S")
rtime=int(tdelta.hour)*60+int(tdelta.minute)+(int(tdelta.second)/60)
if rtime>1:
exit_att(std_id,d[3])
else:
entry_att(std_id)
def entry_att(std_id):
now = datetime.datetime.now()
date=now.strftime("%y/%m/%d")
time=now.strftime("%H:%M:%S")
c.execute('''INSERT INTO attendance(std_id,entry_date,entry_time,status) values(?,?,?,?)''',(std_id,date,time,'0'))
conn.commit()
def exit_att(std_id,ptime):
now = datetime.datetime.now()
ltime=now.strftime("%H:%M:%S")
FMT = '%H:%M:%S'
duration = datetime.datetime.strptime(str(ltime), FMT) - datetime.datetime.strptime(str(ptime), FMT)
utime=datetime.datetime.strptime(str(duration),"%H:%M:%S")
dtime=utime.strftime("%H:%M:%S")
c.execute('''UPDATE attendance SET leave_time=?,duration=?,status=? where std_id=?''',(ltime,dtime,'0',std_id))
conn.commit()
def printr():
c.execute('''SELECT * FROM attendance''')
print(c.fetchall())
| true | true |
f71abeab574e7cf7dd44a881bb82f87cfbfbc051 | 2,915 | py | Python | __init__.py | LevinJac/viseme-mqtt-skill-mycroft | 5f2feb4336bfff1f2a293daf5f6feb43f7d98988 | [
"Apache-2.0"
] | null | null | null | __init__.py | LevinJac/viseme-mqtt-skill-mycroft | 5f2feb4336bfff1f2a293daf5f6feb43f7d98988 | [
"Apache-2.0"
] | null | null | null | __init__.py | LevinJac/viseme-mqtt-skill-mycroft | 5f2feb4336bfff1f2a293daf5f6feb43f7d98988 | [
"Apache-2.0"
] | null | null | null |
from mycroft import MycroftSkill
from mycroft.messagebus import Message
import json
from .lib import MqttService
class MessageListener(MycroftSkill):
# Initializing the skill
def initialize(self):
self.log.info("Initializing Skill MessageListener")
self.add_event('speak', self.handler_speak)
self.add_event('enclosure.mouth.viseme_list', self.handler_enclosure_mouth_viseme_list)
self.mqttservice = MqttService("VisemeSkill", "mosquitto", self.log.info)
self.prepare_for_webapp_message()
def prepare_for_webapp_message(self):
self.mqttservice.loopStart()
self.mqttservice.subscribe("faceme/webapp", self.message_recieved)
# acquiring speak data (the text mycroft will output):
def handler_speak(self, message):
self.text = message.data.get('utterance')
# acquiring mouth_viseme_list data:
def handler_enclosure_mouth_viseme_list(self, message):
self.startTime = message.data.get('start')
self.visemes = message.data.get('visemes')
# Call method send_visemelist(build_json()) to send our now complete dataset via mqtt in a json string format
self.send_visemelist(self.build_json())
# Function to convert the strings acquired from the messagebus into a json string and return it:
def build_json(self):
data_set = {"text": self.text, "start": self.startTime, "visemes": self.visemes}
json_dump = json.dumps(data_set)
return json_dump
def send_visemelist(self, payload):
self.mqttservice.subscribe("faceme/mycroft/visemes", self.message_recieved) # Printet on_message von MQTT_service
# Publish the payload we created in build_json() Wird richtig übertragen
self.mqttservice.publish("faceme/mycroft/visemes", payload)
def message_recieved(self, message):
self.log.info("Es ist eine Nachricht angekommen: " + str(message.payload) + " topic: " + message.topic)
if message.topic == "faceme/webapp":
self.webapp_message(message)
def webapp_message(self, message):
decoded_message = str(message.payload.decode("utf-8"))
msg = json.loads(decoded_message)
self.bus.emit(Message(msg["type"], msg["data"]))
def shutdown(self):
self.mqttservice.loopStop()
self.mqttservice.disconnect()
def create_skill():
return MessageListener()
###### Unused Function #######
# Function adds the duration each viseme should be displayed to it's array so the data would be: "visemes": [[CODE, END_TIME, DURATION], ...]
#def addDuration(self):
#self.visemes[0].append(self.visemes[0][1]) # Do we need this?
#for x in range(len(self.visemes)):
#if x < (len(self.visemes)-1):
#duration = self.visemes[x+1][1] - self.visemes[x][1]
#self.visemes[x+1].append(duration) | 41.642857 | 145 | 0.67753 |
from mycroft import MycroftSkill
from mycroft.messagebus import Message
import json
from .lib import MqttService
class MessageListener(MycroftSkill):
def initialize(self):
self.log.info("Initializing Skill MessageListener")
self.add_event('speak', self.handler_speak)
self.add_event('enclosure.mouth.viseme_list', self.handler_enclosure_mouth_viseme_list)
self.mqttservice = MqttService("VisemeSkill", "mosquitto", self.log.info)
self.prepare_for_webapp_message()
def prepare_for_webapp_message(self):
self.mqttservice.loopStart()
self.mqttservice.subscribe("faceme/webapp", self.message_recieved)
def handler_speak(self, message):
self.text = message.data.get('utterance')
def handler_enclosure_mouth_viseme_list(self, message):
self.startTime = message.data.get('start')
self.visemes = message.data.get('visemes')
self.send_visemelist(self.build_json())
def build_json(self):
data_set = {"text": self.text, "start": self.startTime, "visemes": self.visemes}
json_dump = json.dumps(data_set)
return json_dump
def send_visemelist(self, payload):
self.mqttservice.subscribe("faceme/mycroft/visemes", self.message_recieved)
self.mqttservice.publish("faceme/mycroft/visemes", payload)
def message_recieved(self, message):
self.log.info("Es ist eine Nachricht angekommen: " + str(message.payload) + " topic: " + message.topic)
if message.topic == "faceme/webapp":
self.webapp_message(message)
def webapp_message(self, message):
decoded_message = str(message.payload.decode("utf-8"))
msg = json.loads(decoded_message)
self.bus.emit(Message(msg["type"], msg["data"]))
def shutdown(self):
self.mqttservice.loopStop()
self.mqttservice.disconnect()
def create_skill():
return MessageListener()
(len(self.visemes)-1):
#duration = self.visemes[x+1][1] - self.visemes[x][1]
#self.visemes[x+1].append(duration) | true | true |
f71abf4ef891fb18baa38ab3843f5a02e2198d3b | 297 | py | Python | src/example_d/trade/get_position.py | Han1018/Cryptocurrency-Automated-Trading | 52a5b6d15eb9b2a3a69cc53bd159f6eec073614d | [
"MIT"
] | 1 | 2020-11-24T20:01:37.000Z | 2020-11-24T20:01:37.000Z | example_d/trade/get_position.py | vanshwassan/binance-python-futures | f5a1664ef1e18bc8a53479fab3fd6d5e512dba07 | [
"MIT"
] | 1 | 2021-07-20T15:25:11.000Z | 2021-07-20T15:25:11.000Z | example_d/trade/get_position.py | vanshwassan/binance-python-futures | f5a1664ef1e18bc8a53479fab3fd6d5e512dba07 | [
"MIT"
] | 1 | 2021-12-14T02:39:04.000Z | 2021-12-14T02:39:04.000Z | from binance_d import RequestClient
from binance_d.constant.test import *
from binance_d.base.printobject import *
from binance_d.model.constant import *
request_client = RequestClient(api_key=g_api_key, secret_key=g_secret_key)
result = request_client.get_position()
PrintMix.print_data(result)
| 33 | 74 | 0.841751 | from binance_d import RequestClient
from binance_d.constant.test import *
from binance_d.base.printobject import *
from binance_d.model.constant import *
request_client = RequestClient(api_key=g_api_key, secret_key=g_secret_key)
result = request_client.get_position()
PrintMix.print_data(result)
| true | true |
f71abf924989b3e0fac8c1f6862cb9ab2a3fcdff | 266 | py | Python | spectra/__init__.py | jevandezande/spectra | 95cf4aa7599c30183263740c88f94714d55e1d0a | [
"MIT"
] | 16 | 2019-10-03T21:30:45.000Z | 2022-03-09T22:18:44.000Z | spectra/__init__.py | jevandezande/spectra | 95cf4aa7599c30183263740c88f94714d55e1d0a | [
"MIT"
] | 8 | 2021-03-15T20:45:32.000Z | 2022-03-03T15:17:42.000Z | spectra/__init__.py | jevandezande/spectra | 95cf4aa7599c30183263740c88f94714d55e1d0a | [
"MIT"
] | 1 | 2021-07-26T18:50:06.000Z | 2021-07-26T18:50:06.000Z | """Top-level package for spectra."""
from .conv_spectrum import ConvSpectrum
from .sticks_spectrum import SticksSpectrum
__author__ = """Jonathon Vandezande"""
__email__ = "jevandezande@gmail.com"
__version__ = "0.4.0"
__all__ = ["ConvSpectrum", "SticksSpectrum"]
| 26.6 | 44 | 0.763158 | from .conv_spectrum import ConvSpectrum
from .sticks_spectrum import SticksSpectrum
__author__ = """Jonathon Vandezande"""
__email__ = "jevandezande@gmail.com"
__version__ = "0.4.0"
__all__ = ["ConvSpectrum", "SticksSpectrum"]
| true | true |
f71ac12590c5ba69a6a44f3ffa552f4ea88234a3 | 17,149 | py | Python | aslam_offline_calibration/kalibr/python/kalibr_camera_calibration/CameraIntializers.py | CORAL-CMU/kalibr | ebd759286944f156c3ae6202c27fe47667929744 | [
"BSD-4-Clause"
] | null | null | null | aslam_offline_calibration/kalibr/python/kalibr_camera_calibration/CameraIntializers.py | CORAL-CMU/kalibr | ebd759286944f156c3ae6202c27fe47667929744 | [
"BSD-4-Clause"
] | null | null | null | aslam_offline_calibration/kalibr/python/kalibr_camera_calibration/CameraIntializers.py | CORAL-CMU/kalibr | ebd759286944f156c3ae6202c27fe47667929744 | [
"BSD-4-Clause"
] | null | null | null | import sm
import aslam_backend as aopt
import aslam_cv as cv
import numpy as np
def addPoseDesignVariable(problem, T0=sm.Transformation()):
q_Dv = aopt.RotationQuaternionDv( T0.q() )
q_Dv.setActive( True )
problem.addDesignVariable(q_Dv)
t_Dv = aopt.EuclideanPointDv( T0.t() )
t_Dv.setActive( True )
problem.addDesignVariable(t_Dv)
return aopt.TransformationBasicDv( q_Dv.toExpression(), t_Dv.toExpression() )
def stereoCalibrate(camL_geometry, camH_geometry, obslist, distortionActive=False, baseline=None):
#####################################################
## find initial guess as median of all pnp solutions
#####################################################
if baseline is None:
r=[]; t=[]
for obsL, obsH in obslist:
#if we have observations for both camss
if obsL is not None and obsH is not None:
success, T_L = camL_geometry.geometry.estimateTransformation(obsL)
success, T_H = camH_geometry.geometry.estimateTransformation(obsH)
baseline = T_H.inverse()*T_L
t.append(baseline.t())
rv=sm.RotationVector()
r.append(rv.rotationMatrixToParameters( baseline.C() ))
r_median = np.median(np.asmatrix(r), axis=0).flatten().T
R_median = rv.parametersToRotationMatrix(r_median)
t_median = np.median(np.asmatrix(t), axis=0).flatten().T
baseline_HL = sm.Transformation( sm.rt2Transform(R_median, t_median) )
else:
baseline_HL = baseline
#verbose output
if sm.getLoggingLevel()==sm.LoggingLevel.Debug:
dL = camL_geometry.geometry.projection().distortion().getParameters().flatten()
pL = camL_geometry.geometry.projection().getParameters().flatten()
dH = camH_geometry.geometry.projection().distortion().getParameters().flatten()
pH = camH_geometry.geometry.projection().getParameters().flatten()
sm.logDebug("initial guess for stereo calib: {0}".format(baseline_HL.T()))
sm.logDebug("initial guess for intrinsics camL: {0}".format(pL))
sm.logDebug("initial guess for intrinsics camH: {0}".format(pH))
sm.logDebug("initial guess for distortion camL: {0}".format(dL))
sm.logDebug("initial guess for distortion camH: {0}".format(dH))
############################################
## solve the bundle adjustment
############################################
problem = aopt.OptimizationProblem()
#baseline design variable
baseline_dv = addPoseDesignVariable(problem, baseline_HL)
#target pose dv for all target views (=T_camL_w)
target_pose_dvs = list()
for obsL, obsH in obslist:
if obsL is not None: #use camL if we have an obs for this one
success, T_t_cL = camL_geometry.geometry.estimateTransformation(obsL)
else:
success, T_t_cH = camH_geometry.geometry.estimateTransformation(obsH)
T_t_cL = T_t_cH*baseline_HL #apply baseline for the second camera
target_pose_dv = addPoseDesignVariable(problem, T_t_cL)
target_pose_dvs.append(target_pose_dv)
#add camera dvs
camL_geometry.setDvActiveStatus(camL_geometry.projectionActive, distortionActive or camL_geometry.distortionActive, False)
camH_geometry.setDvActiveStatus(camH_geometry.projectionActive, distortionActive or camH_geometry.distortionActive, False)
problem.addDesignVariable(camL_geometry.dv.distortionDesignVariable())
problem.addDesignVariable(camL_geometry.dv.projectionDesignVariable())
problem.addDesignVariable(camL_geometry.dv.shutterDesignVariable())
problem.addDesignVariable(camH_geometry.dv.distortionDesignVariable())
problem.addDesignVariable(camH_geometry.dv.projectionDesignVariable())
problem.addDesignVariable(camH_geometry.dv.shutterDesignVariable())
############################################
## add error terms
############################################
#corner uncertainty
# \todo pass in the detector uncertainty somehow.
cornerUncertainty = 1.0
R = np.eye(2) * cornerUncertainty * cornerUncertainty
invR = np.linalg.inv(R)
#Add reprojection error terms for both cameras
reprojectionErrors0 = []; reprojectionErrors1 = []
for cidx, cam in enumerate([camL_geometry, camH_geometry]):
sm.logDebug("stereoCalibration: adding camera error terms for {0} calibration targets".format(len(obslist)))
#get the image and target points corresponding to the frame
target = cam.ctarget.detector.target()
#add error terms for all observations
for view_id, obstuple in enumerate(obslist):
#add error terms if we have an observation for this cam
obs=obstuple[cidx]
if obs is not None:
T_cam_w = target_pose_dvs[view_id].toExpression().inverse()
#add the baseline for the second camera
if cidx!=0:
T_cam_w = baseline_dv.toExpression() * T_cam_w
for i in range(0, target.size()):
p_target = aopt.HomogeneousExpression(sm.toHomogeneous(target.point(i)));
valid, y = obs.imagePoint(i)
if valid:
# Create an error term.
rerr = cam.model.reprojectionError(y, invR, T_cam_w * p_target, cam.dv)
rerr.idx = i
problem.addErrorTerm(rerr)
if cidx==0:
reprojectionErrors0.append(rerr)
else:
reprojectionErrors1.append(rerr)
sm.logDebug("stereoCalibrate: added {0} camera error terms".format( len(reprojectionErrors0)+len(reprojectionErrors1) ))
############################################
## solve
############################################
options = aopt.Optimizer2Options()
options.verbose = True if sm.getLoggingLevel()==sm.LoggingLevel.Debug else False
options.nThreads = 4
options.convergenceDeltaX = 1e-3
options.convergenceDeltaJ = 1
options.maxIterations = 200
options.trustRegionPolicy = aopt.LevenbergMarquardtTrustRegionPolicy(10)
optimizer = aopt.Optimizer2(options)
optimizer.setProblem(problem)
#verbose output
if sm.getLoggingLevel()==sm.LoggingLevel.Debug:
sm.logDebug("Before optimization:")
e2 = np.array([ e.evaluateError() for e in reprojectionErrors0 ])
sm.logDebug( " Reprojection error squarred (camL): mean {0}, median {1}, std: {2}".format(np.mean(e2), np.median(e2), np.std(e2) ) )
e2 = np.array([ e.evaluateError() for e in reprojectionErrors1 ])
sm.logDebug( " Reprojection error squarred (camH): mean {0}, median {1}, std: {2}".format(np.mean(e2), np.median(e2), np.std(e2) ) )
sm.logDebug("baseline={0}".format(baseline_dv.toTransformationMatrix()))
try:
retval = optimizer.optimize()
if retval.linearSolverFailure:
sm.logError("stereoCalibrate: Optimization failed!")
success = not retval.linearSolverFailure
except:
sm.logError("stereoCalibrate: Optimization failed!")
success = False
if sm.getLoggingLevel()==sm.LoggingLevel.Debug:
sm.logDebug("After optimization:")
e2 = np.array([ e.evaluateError() for e in reprojectionErrors0 ])
sm.logDebug( " Reprojection error squarred (camL): mean {0}, median {1}, std: {2}".format(np.mean(e2), np.median(e2), np.std(e2) ) )
e2 = np.array([ e.evaluateError() for e in reprojectionErrors1 ])
sm.logDebug( " Reprojection error squarred (camH): mean {0}, median {1}, std: {2}".format(np.mean(e2), np.median(e2), np.std(e2) ) )
#verbose output
if sm.getLoggingLevel()==sm.LoggingLevel.Debug:
dL = camL_geometry.geometry.projection().distortion().getParameters().flatten()
pL = camL_geometry.geometry.projection().getParameters().flatten()
dH = camH_geometry.geometry.projection().distortion().getParameters().flatten()
pH = camH_geometry.geometry.projection().getParameters().flatten()
sm.logDebug("guess for intrinsics camL: {0}".format(pL))
sm.logDebug("guess for intrinsics camH: {0}".format(pH))
sm.logDebug("guess for distortion camL: {0}".format(dL))
sm.logDebug("guess for distortion camH: {0}".format(dH))
if success:
baseline_HL = sm.Transformation(baseline_dv.toTransformationMatrix())
return success, baseline_HL
else:
#return the intiial guess if we fail
return success, baseline_HL
def calibrateIntrinsics(cam_geometry, obslist, distortionActive=True, intrinsicsActive=True):
#verbose output
if sm.getLoggingLevel()==sm.LoggingLevel.Debug:
d = cam_geometry.geometry.projection().distortion().getParameters().flatten()
p = cam_geometry.geometry.projection().getParameters().flatten()
sm.logDebug("calibrateIntrinsics: intrinsics guess: {0}".format(p))
sm.logDebug("calibrateIntrinsics: distortion guess: {0}".format(d))
############################################
## solve the bundle adjustment
############################################
problem = aopt.OptimizationProblem()
#add camera dvs
cam_geometry.setDvActiveStatus(intrinsicsActive, distortionActive, False)
problem.addDesignVariable(cam_geometry.dv.distortionDesignVariable())
problem.addDesignVariable(cam_geometry.dv.projectionDesignVariable())
problem.addDesignVariable(cam_geometry.dv.shutterDesignVariable())
#corner uncertainty
cornerUncertainty = 1.0
R = np.eye(2) * cornerUncertainty * cornerUncertainty
invR = np.linalg.inv(R)
#get the image and target points corresponding to the frame
target = cam_geometry.ctarget.detector.target()
#target pose dv for all target views (=T_camL_w)
reprojectionErrors = [];
sm.logDebug("calibrateIntrinsics: adding camera error terms for {0} calibration targets".format(len(obslist)))
target_pose_dvs=list()
for obs in obslist:
success, T_t_c = cam_geometry.geometry.estimateTransformation(obs)
target_pose_dv = addPoseDesignVariable(problem, T_t_c)
target_pose_dvs.append(target_pose_dv)
T_cam_w = target_pose_dv.toExpression().inverse()
## add error terms
for i in range(0, target.size()):
p_target = aopt.HomogeneousExpression(sm.toHomogeneous(target.point(i)));
valid, y = obs.imagePoint(i)
if valid:
rerr = cam_geometry.model.reprojectionError(y, invR, T_cam_w * p_target, cam_geometry.dv)
problem.addErrorTerm(rerr)
reprojectionErrors.append(rerr)
sm.logDebug("calibrateIntrinsics: added {0} camera error terms".format(len(reprojectionErrors)))
############################################
## solve
############################################
options = aopt.Optimizer2Options()
options.verbose = True if sm.getLoggingLevel()==sm.LoggingLevel.Debug else False
options.nThreads = 4
options.convergenceDeltaX = 1e-3
options.convergenceDeltaJ = 1
options.maxIterations = 200
options.trustRegionPolicy = aopt.LevenbergMarquardtTrustRegionPolicy(10)
optimizer = aopt.Optimizer2(options)
optimizer.setProblem(problem)
#verbose output
if sm.getLoggingLevel()==sm.LoggingLevel.Debug:
sm.logDebug("Before optimization:")
e2 = np.array([ e.evaluateError() for e in reprojectionErrors ])
sm.logDebug( " Reprojection error squarred (camL): mean {0}, median {1}, std: {2}".format(np.mean(e2), np.median(e2), np.std(e2) ) )
#run intrinsic calibration
try:
retval = optimizer.optimize()
if retval.linearSolverFailure:
sm.logError("calibrateIntrinsics: Optimization failed!")
success = not retval.linearSolverFailure
except:
sm.logError("calibrateIntrinsics: Optimization failed!")
success = False
#verbose output
if sm.getLoggingLevel()==sm.LoggingLevel.Debug:
d = cam_geometry.geometry.projection().distortion().getParameters().flatten()
p = cam_geometry.geometry.projection().getParameters().flatten()
sm.logDebug("calibrateIntrinsics: guess for intrinsics cam: {0}".format(p))
sm.logDebug("calibrateIntrinsics: guess for distortion cam: {0}".format(d))
return success
def solveFullBatch(cameras, baseline_guesses, graph):
############################################
## solve the bundle adjustment
############################################
problem = aopt.OptimizationProblem()
#add camera dvs
for cam in cameras:
cam.setDvActiveStatus(cam.projectionActive, cam.distortionActive, False)
problem.addDesignVariable(cam.dv.distortionDesignVariable())
problem.addDesignVariable(cam.dv.projectionDesignVariable())
problem.addDesignVariable(cam.dv.shutterDesignVariable())
baseline_dvs = list()
for baseline_idx in range(0, len(cameras)-1):
baseline_dv = aopt.TransformationDv(baseline_guesses[baseline_idx])
for i in range(0, baseline_dv.numDesignVariables()):
problem.addDesignVariable(baseline_dv.getDesignVariable(i))
baseline_dvs.append( baseline_dv )
#corner uncertainty
cornerUncertainty = 1.0
R = np.eye(2) * cornerUncertainty * cornerUncertainty
invR = np.linalg.inv(R)
#get the target
target = cameras[0].ctarget.detector.target()
#Add calibration target reprojection error terms for all camera in chain
target_pose_dvs = list()
#shuffle the views
reprojectionErrors = [];
timestamps = graph.obs_db.getAllViewTimestamps()
for view_id, timestamp in enumerate(timestamps):
#get all observations for all cams at this time
obs_tuple = graph.obs_db.getAllObsAtTimestamp(timestamp)
#create a target pose dv for all target views (= T_cam0_w)
T0 = graph.getTargetPoseGuess(timestamp, cameras, baseline_guesses)
target_pose_dv = addPoseDesignVariable(problem, T0)
target_pose_dvs.append(target_pose_dv)
for cidx, obs in obs_tuple:
cam = cameras[cidx]
#calibration target coords to camera X coords
T_cam0_calib = target_pose_dv.toExpression().inverse()
#build pose chain (target->cam0->baselines->camN)
T_camN_calib = T_cam0_calib
for idx in range(0, cidx):
T_camN_calib = baseline_dvs[idx].toExpression() * T_camN_calib
## add error terms
for i in range(0, target.size()):
p_target = aopt.HomogeneousExpression(sm.toHomogeneous(target.point(i)));
valid, y = obs.imagePoint(i)
if valid:
rerr = cameras[cidx].model.reprojectionError(y, invR, T_camN_calib * p_target, cameras[cidx].dv)
problem.addErrorTerm(rerr)
reprojectionErrors.append(rerr)
sm.logDebug("solveFullBatch: added {0} camera error terms".format(len(reprojectionErrors)))
############################################
## solve
############################################
options = aopt.Optimizer2Options()
options.verbose = True if sm.getLoggingLevel()==sm.LoggingLevel.Debug else False
options.nThreads = 4
options.convergenceDeltaX = 1e-3
options.convergenceDeltaJ = 1
options.maxIterations = 250
options.trustRegionPolicy = aopt.LevenbergMarquardtTrustRegionPolicy(10)
optimizer = aopt.Optimizer2(options)
optimizer.setProblem(problem)
#verbose output
if sm.getLoggingLevel()==sm.LoggingLevel.Debug:
sm.logDebug("Before optimization:")
e2 = np.array([ e.evaluateError() for e in reprojectionErrors ])
sm.logDebug( " Reprojection error squarred (camL): mean {0}, median {1}, std: {2}".format(np.mean(e2), np.median(e2), np.std(e2) ) )
#run intrinsic calibration
try:
retval = optimizer.optimize()
if retval.linearSolverFailure:
sm.logError("calibrateIntrinsics: Optimization failed!")
success = not retval.linearSolverFailure
except:
sm.logError("calibrateIntrinsics: Optimization failed!")
success = False
baselines=list()
for baseline_dv in baseline_dvs:
baselines.append( sm.Transformation(baseline_dv.T()) )
return success, baselines
| 44.542857 | 141 | 0.623068 | import sm
import aslam_backend as aopt
import aslam_cv as cv
import numpy as np
def addPoseDesignVariable(problem, T0=sm.Transformation()):
q_Dv = aopt.RotationQuaternionDv( T0.q() )
q_Dv.setActive( True )
problem.addDesignVariable(q_Dv)
t_Dv = aopt.EuclideanPointDv( T0.t() )
t_Dv.setActive( True )
problem.addDesignVariable(t_Dv)
return aopt.TransformationBasicDv( q_Dv.toExpression(), t_Dv.toExpression() )
def stereoCalibrate(camL_geometry, camH_geometry, obslist, distortionActive=False, baseline=None):
| true | true |
f71ac1809f6473acb6bd2afca69ff45e16538c2b | 12,263 | py | Python | tests/python/unittest/test_gluon_rnn.py | ymaxgit/mxnet | 01ae629c6593e0352fd30979bccd0196854ef882 | [
"Apache-2.0"
] | 1 | 2022-03-03T18:36:42.000Z | 2022-03-03T18:36:42.000Z | tests/python/unittest/test_gluon_rnn.py | ymaxgit/mxnet | 01ae629c6593e0352fd30979bccd0196854ef882 | [
"Apache-2.0"
] | 1 | 2022-02-28T21:23:12.000Z | 2022-03-03T18:33:42.000Z | tests/python/unittest/test_gluon_rnn.py | ymaxgit/mxnet | 01ae629c6593e0352fd30979bccd0196854ef882 | [
"Apache-2.0"
] | 1 | 2022-03-03T18:36:37.000Z | 2022-03-03T18:36:37.000Z | # Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
import mxnet as mx
from mxnet import gluon
import numpy as np
from numpy.testing import assert_allclose
import unittest
from mxnet.test_utils import almost_equal
def test_rnn():
cell = gluon.rnn.RNNCell(100, prefix='rnn_')
inputs = [mx.sym.Variable('rnn_t%d_data'%i) for i in range(3)]
outputs, _ = cell.unroll(3, inputs)
outputs = mx.sym.Group(outputs)
assert sorted(cell.collect_params().keys()) == ['rnn_h2h_bias', 'rnn_h2h_weight', 'rnn_i2h_bias', 'rnn_i2h_weight']
assert outputs.list_outputs() == ['rnn_t0_out_output', 'rnn_t1_out_output', 'rnn_t2_out_output']
args, outs, auxs = outputs.infer_shape(rnn_t0_data=(10,50), rnn_t1_data=(10,50), rnn_t2_data=(10,50))
assert outs == [(10, 100), (10, 100), (10, 100)]
def test_lstm():
cell = gluon.rnn.LSTMCell(100, prefix='rnn_')
inputs = [mx.sym.Variable('rnn_t%d_data'%i) for i in range(3)]
outputs, _ = cell.unroll(3, inputs)
outputs = mx.sym.Group(outputs)
assert sorted(cell.collect_params().keys()) == ['rnn_h2h_bias', 'rnn_h2h_weight', 'rnn_i2h_bias', 'rnn_i2h_weight']
assert outputs.list_outputs() == ['rnn_t0_out_output', 'rnn_t1_out_output', 'rnn_t2_out_output']
args, outs, auxs = outputs.infer_shape(rnn_t0_data=(10,50), rnn_t1_data=(10,50), rnn_t2_data=(10,50))
assert outs == [(10, 100), (10, 100), (10, 100)]
def test_lstm_forget_bias():
forget_bias = 2.0
stack = gluon.rnn.SequentialRNNCell()
stack.add(gluon.rnn.LSTMCell(100, i2h_bias_initializer=mx.init.LSTMBias(forget_bias), prefix='l0_'))
stack.add(gluon.rnn.LSTMCell(100, i2h_bias_initializer=mx.init.LSTMBias(forget_bias), prefix='l1_'))
dshape = (32, 1, 200)
data = mx.sym.Variable('data')
sym, _ = stack.unroll(1, data, merge_outputs=True)
mod = mx.mod.Module(sym, label_names=None, context=mx.cpu(0))
mod.bind(data_shapes=[('data', dshape)], label_shapes=None)
mod.init_params()
bias_argument = next(x for x in sym.list_arguments() if x.endswith('i2h_bias'))
expected_bias = np.hstack([np.zeros((100,)),
forget_bias * np.ones(100, ), np.zeros((2 * 100,))])
assert_allclose(mod.get_params()[0][bias_argument].asnumpy(), expected_bias)
def test_gru():
cell = gluon.rnn.GRUCell(100, prefix='rnn_')
inputs = [mx.sym.Variable('rnn_t%d_data'%i) for i in range(3)]
outputs, _ = cell.unroll(3, inputs)
outputs = mx.sym.Group(outputs)
assert sorted(cell.collect_params().keys()) == ['rnn_h2h_bias', 'rnn_h2h_weight', 'rnn_i2h_bias', 'rnn_i2h_weight']
assert outputs.list_outputs() == ['rnn_t0_out_output', 'rnn_t1_out_output', 'rnn_t2_out_output']
args, outs, auxs = outputs.infer_shape(rnn_t0_data=(10,50), rnn_t1_data=(10,50), rnn_t2_data=(10,50))
assert outs == [(10, 100), (10, 100), (10, 100)]
def test_residual():
cell = gluon.rnn.ResidualCell(gluon.rnn.GRUCell(50, prefix='rnn_'))
inputs = [mx.sym.Variable('rnn_t%d_data'%i) for i in range(2)]
outputs, _ = cell.unroll(2, inputs)
outputs = mx.sym.Group(outputs)
assert sorted(cell.collect_params().keys()) == \
['rnn_h2h_bias', 'rnn_h2h_weight', 'rnn_i2h_bias', 'rnn_i2h_weight']
# assert outputs.list_outputs() == \
# ['rnn_t0_out_plus_residual_output', 'rnn_t1_out_plus_residual_output']
args, outs, auxs = outputs.infer_shape(rnn_t0_data=(10, 50), rnn_t1_data=(10, 50))
assert outs == [(10, 50), (10, 50)]
outputs = outputs.eval(rnn_t0_data=mx.nd.ones((10, 50)),
rnn_t1_data=mx.nd.ones((10, 50)),
rnn_i2h_weight=mx.nd.zeros((150, 50)),
rnn_i2h_bias=mx.nd.zeros((150,)),
rnn_h2h_weight=mx.nd.zeros((150, 50)),
rnn_h2h_bias=mx.nd.zeros((150,)))
expected_outputs = np.ones((10, 50))
assert np.array_equal(outputs[0].asnumpy(), expected_outputs)
assert np.array_equal(outputs[1].asnumpy(), expected_outputs)
def test_residual_bidirectional():
cell = gluon.rnn.ResidualCell(
gluon.rnn.BidirectionalCell(
gluon.rnn.GRUCell(25, prefix='rnn_l_'),
gluon.rnn.GRUCell(25, prefix='rnn_r_')))
inputs = [mx.sym.Variable('rnn_t%d_data'%i) for i in range(2)]
outputs, _ = cell.unroll(2, inputs, merge_outputs=False)
outputs = mx.sym.Group(outputs)
assert sorted(cell.collect_params().keys()) == \
['rnn_l_h2h_bias', 'rnn_l_h2h_weight', 'rnn_l_i2h_bias', 'rnn_l_i2h_weight',
'rnn_r_h2h_bias', 'rnn_r_h2h_weight', 'rnn_r_i2h_bias', 'rnn_r_i2h_weight']
# assert outputs.list_outputs() == \
# ['bi_t0_plus_residual_output', 'bi_t1_plus_residual_output']
args, outs, auxs = outputs.infer_shape(rnn_t0_data=(10, 50), rnn_t1_data=(10, 50))
assert outs == [(10, 50), (10, 50)]
outputs = outputs.eval(rnn_t0_data=mx.nd.ones((10, 50))+5,
rnn_t1_data=mx.nd.ones((10, 50))+5,
rnn_l_i2h_weight=mx.nd.zeros((75, 50)),
rnn_l_i2h_bias=mx.nd.zeros((75,)),
rnn_l_h2h_weight=mx.nd.zeros((75, 25)),
rnn_l_h2h_bias=mx.nd.zeros((75,)),
rnn_r_i2h_weight=mx.nd.zeros((75, 50)),
rnn_r_i2h_bias=mx.nd.zeros((75,)),
rnn_r_h2h_weight=mx.nd.zeros((75, 25)),
rnn_r_h2h_bias=mx.nd.zeros((75,)))
expected_outputs = np.ones((10, 50))+5
assert np.array_equal(outputs[0].asnumpy(), expected_outputs)
assert np.array_equal(outputs[1].asnumpy(), expected_outputs)
def test_stack():
cell = gluon.rnn.SequentialRNNCell()
for i in range(5):
if i == 1:
cell.add(gluon.rnn.ResidualCell(gluon.rnn.LSTMCell(100, prefix='rnn_stack%d_' % i)))
else:
cell.add(gluon.rnn.LSTMCell(100, prefix='rnn_stack%d_'%i))
inputs = [mx.sym.Variable('rnn_t%d_data'%i) for i in range(3)]
outputs, _ = cell.unroll(3, inputs)
outputs = mx.sym.Group(outputs)
keys = sorted(cell.collect_params().keys())
for i in range(5):
assert 'rnn_stack%d_h2h_weight'%i in keys
assert 'rnn_stack%d_h2h_bias'%i in keys
assert 'rnn_stack%d_i2h_weight'%i in keys
assert 'rnn_stack%d_i2h_bias'%i in keys
assert outputs.list_outputs() == ['rnn_stack4_t0_out_output', 'rnn_stack4_t1_out_output', 'rnn_stack4_t2_out_output']
args, outs, auxs = outputs.infer_shape(rnn_t0_data=(10,50), rnn_t1_data=(10,50), rnn_t2_data=(10,50))
assert outs == [(10, 100), (10, 100), (10, 100)]
def test_bidirectional():
cell = gluon.rnn.BidirectionalCell(
gluon.rnn.LSTMCell(100, prefix='rnn_l0_'),
gluon.rnn.LSTMCell(100, prefix='rnn_r0_'),
output_prefix='rnn_bi_')
inputs = [mx.sym.Variable('rnn_t%d_data'%i) for i in range(3)]
outputs, _ = cell.unroll(3, inputs)
outputs = mx.sym.Group(outputs)
assert outputs.list_outputs() == ['rnn_bi_t0_output', 'rnn_bi_t1_output', 'rnn_bi_t2_output']
args, outs, auxs = outputs.infer_shape(rnn_t0_data=(10,50), rnn_t1_data=(10,50), rnn_t2_data=(10,50))
assert outs == [(10, 200), (10, 200), (10, 200)]
def test_zoneout():
cell = gluon.rnn.ZoneoutCell(gluon.rnn.RNNCell(100, prefix='rnn_'), zoneout_outputs=0.5,
zoneout_states=0.5)
inputs = [mx.sym.Variable('rnn_t%d_data'%i) for i in range(3)]
outputs, _ = cell.unroll(3, inputs)
outputs = mx.sym.Group(outputs)
args, outs, auxs = outputs.infer_shape(rnn_t0_data=(10,50), rnn_t1_data=(10,50), rnn_t2_data=(10,50))
assert outs == [(10, 100), (10, 100), (10, 100)]
def check_rnn_forward(layer, inputs, deterministic=True):
inputs.attach_grad()
layer.collect_params().initialize()
with mx.autograd.record():
out = layer.unroll(3, inputs, merge_outputs=False)[0]
mx.autograd.backward(out)
out = layer.unroll(3, inputs, merge_outputs=True)[0]
out.backward()
np_out = out.asnumpy()
np_dx = inputs.grad.asnumpy()
layer.hybridize()
with mx.autograd.record():
out = layer.unroll(3, inputs, merge_outputs=False)[0]
mx.autograd.backward(out)
out = layer.unroll(3, inputs, merge_outputs=True)[0]
out.backward()
if deterministic:
mx.test_utils.assert_almost_equal(np_out, out.asnumpy(), rtol=1e-3, atol=1e-5)
mx.test_utils.assert_almost_equal(np_dx, inputs.grad.asnumpy(), rtol=1e-3, atol=1e-5)
def test_rnn_cells():
check_rnn_forward(gluon.rnn.LSTMCell(100, input_size=200), mx.nd.ones((8, 3, 200)))
check_rnn_forward(gluon.rnn.RNNCell(100, input_size=200), mx.nd.ones((8, 3, 200)))
check_rnn_forward(gluon.rnn.GRUCell(100, input_size=200), mx.nd.ones((8, 3, 200)))
bilayer = gluon.rnn.BidirectionalCell(gluon.rnn.LSTMCell(100, input_size=200),
gluon.rnn.LSTMCell(100, input_size=200))
check_rnn_forward(bilayer, mx.nd.ones((8, 3, 200)))
check_rnn_forward(gluon.rnn.DropoutCell(0.5), mx.nd.ones((8, 3, 200)), False)
check_rnn_forward(gluon.rnn.ZoneoutCell(gluon.rnn.LSTMCell(100, input_size=200),
0.5, 0.2),
mx.nd.ones((8, 3, 200)), False)
net = gluon.rnn.SequentialRNNCell()
net.add(gluon.rnn.LSTMCell(100, input_size=200))
net.add(gluon.rnn.RNNCell(100, input_size=100))
net.add(gluon.rnn.GRUCell(100, input_size=100))
check_rnn_forward(net, mx.nd.ones((8, 3, 200)))
def check_rnn_layer_forward(layer, inputs, states=None):
layer.collect_params().initialize()
inputs.attach_grad()
with mx.autograd.record():
out = layer(inputs, states)
if states is not None:
assert isinstance(out, tuple) and len(out) == 2
out = out[0]
else:
assert isinstance(out, mx.nd.NDArray)
out.backward()
np_out = out.asnumpy()
np_dx = inputs.grad.asnumpy()
layer.hybridize()
with mx.autograd.record():
out = layer(inputs, states)
if states is not None:
assert isinstance(out, tuple) and len(out) == 2
out = out[0]
else:
assert isinstance(out, mx.nd.NDArray)
out.backward()
mx.test_utils.assert_almost_equal(np_out, out.asnumpy(), rtol=1e-3, atol=1e-5)
mx.test_utils.assert_almost_equal(np_dx, inputs.grad.asnumpy(), rtol=1e-3, atol=1e-5)
def test_rnn_layers():
check_rnn_layer_forward(gluon.rnn.RNN(10, 2), mx.nd.ones((8, 3, 20)))
check_rnn_layer_forward(gluon.rnn.RNN(10, 2), mx.nd.ones((8, 3, 20)), mx.nd.ones((2, 3, 10)))
check_rnn_layer_forward(gluon.rnn.LSTM(10, 2), mx.nd.ones((8, 3, 20)))
check_rnn_layer_forward(gluon.rnn.LSTM(10, 2), mx.nd.ones((8, 3, 20)), [mx.nd.ones((2, 3, 10)), mx.nd.ones((2, 3, 10))])
check_rnn_layer_forward(gluon.rnn.GRU(10, 2), mx.nd.ones((8, 3, 20)))
check_rnn_layer_forward(gluon.rnn.GRU(10, 2), mx.nd.ones((8, 3, 20)), mx.nd.ones((2, 3, 10)))
net = gluon.nn.Sequential()
net.add(gluon.rnn.LSTM(10, 2, bidirectional=True))
net.add(gluon.nn.BatchNorm(axis=2))
net.add(gluon.nn.Flatten())
net.add(gluon.nn.Dense(3, activation='relu'))
net.collect_params().initialize()
with mx.autograd.record():
net(mx.nd.ones((2, 3, 10))).backward()
if __name__ == '__main__':
import nose
nose.runmodule()
| 43.640569 | 124 | 0.643562 |
import mxnet as mx
from mxnet import gluon
import numpy as np
from numpy.testing import assert_allclose
import unittest
from mxnet.test_utils import almost_equal
def test_rnn():
cell = gluon.rnn.RNNCell(100, prefix='rnn_')
inputs = [mx.sym.Variable('rnn_t%d_data'%i) for i in range(3)]
outputs, _ = cell.unroll(3, inputs)
outputs = mx.sym.Group(outputs)
assert sorted(cell.collect_params().keys()) == ['rnn_h2h_bias', 'rnn_h2h_weight', 'rnn_i2h_bias', 'rnn_i2h_weight']
assert outputs.list_outputs() == ['rnn_t0_out_output', 'rnn_t1_out_output', 'rnn_t2_out_output']
args, outs, auxs = outputs.infer_shape(rnn_t0_data=(10,50), rnn_t1_data=(10,50), rnn_t2_data=(10,50))
assert outs == [(10, 100), (10, 100), (10, 100)]
def test_lstm():
cell = gluon.rnn.LSTMCell(100, prefix='rnn_')
inputs = [mx.sym.Variable('rnn_t%d_data'%i) for i in range(3)]
outputs, _ = cell.unroll(3, inputs)
outputs = mx.sym.Group(outputs)
assert sorted(cell.collect_params().keys()) == ['rnn_h2h_bias', 'rnn_h2h_weight', 'rnn_i2h_bias', 'rnn_i2h_weight']
assert outputs.list_outputs() == ['rnn_t0_out_output', 'rnn_t1_out_output', 'rnn_t2_out_output']
args, outs, auxs = outputs.infer_shape(rnn_t0_data=(10,50), rnn_t1_data=(10,50), rnn_t2_data=(10,50))
assert outs == [(10, 100), (10, 100), (10, 100)]
def test_lstm_forget_bias():
forget_bias = 2.0
stack = gluon.rnn.SequentialRNNCell()
stack.add(gluon.rnn.LSTMCell(100, i2h_bias_initializer=mx.init.LSTMBias(forget_bias), prefix='l0_'))
stack.add(gluon.rnn.LSTMCell(100, i2h_bias_initializer=mx.init.LSTMBias(forget_bias), prefix='l1_'))
dshape = (32, 1, 200)
data = mx.sym.Variable('data')
sym, _ = stack.unroll(1, data, merge_outputs=True)
mod = mx.mod.Module(sym, label_names=None, context=mx.cpu(0))
mod.bind(data_shapes=[('data', dshape)], label_shapes=None)
mod.init_params()
bias_argument = next(x for x in sym.list_arguments() if x.endswith('i2h_bias'))
expected_bias = np.hstack([np.zeros((100,)),
forget_bias * np.ones(100, ), np.zeros((2 * 100,))])
assert_allclose(mod.get_params()[0][bias_argument].asnumpy(), expected_bias)
def test_gru():
cell = gluon.rnn.GRUCell(100, prefix='rnn_')
inputs = [mx.sym.Variable('rnn_t%d_data'%i) for i in range(3)]
outputs, _ = cell.unroll(3, inputs)
outputs = mx.sym.Group(outputs)
assert sorted(cell.collect_params().keys()) == ['rnn_h2h_bias', 'rnn_h2h_weight', 'rnn_i2h_bias', 'rnn_i2h_weight']
assert outputs.list_outputs() == ['rnn_t0_out_output', 'rnn_t1_out_output', 'rnn_t2_out_output']
args, outs, auxs = outputs.infer_shape(rnn_t0_data=(10,50), rnn_t1_data=(10,50), rnn_t2_data=(10,50))
assert outs == [(10, 100), (10, 100), (10, 100)]
def test_residual():
cell = gluon.rnn.ResidualCell(gluon.rnn.GRUCell(50, prefix='rnn_'))
inputs = [mx.sym.Variable('rnn_t%d_data'%i) for i in range(2)]
outputs, _ = cell.unroll(2, inputs)
outputs = mx.sym.Group(outputs)
assert sorted(cell.collect_params().keys()) == \
['rnn_h2h_bias', 'rnn_h2h_weight', 'rnn_i2h_bias', 'rnn_i2h_weight']
args, outs, auxs = outputs.infer_shape(rnn_t0_data=(10, 50), rnn_t1_data=(10, 50))
assert outs == [(10, 50), (10, 50)]
outputs = outputs.eval(rnn_t0_data=mx.nd.ones((10, 50)),
rnn_t1_data=mx.nd.ones((10, 50)),
rnn_i2h_weight=mx.nd.zeros((150, 50)),
rnn_i2h_bias=mx.nd.zeros((150,)),
rnn_h2h_weight=mx.nd.zeros((150, 50)),
rnn_h2h_bias=mx.nd.zeros((150,)))
expected_outputs = np.ones((10, 50))
assert np.array_equal(outputs[0].asnumpy(), expected_outputs)
assert np.array_equal(outputs[1].asnumpy(), expected_outputs)
def test_residual_bidirectional():
cell = gluon.rnn.ResidualCell(
gluon.rnn.BidirectionalCell(
gluon.rnn.GRUCell(25, prefix='rnn_l_'),
gluon.rnn.GRUCell(25, prefix='rnn_r_')))
inputs = [mx.sym.Variable('rnn_t%d_data'%i) for i in range(2)]
outputs, _ = cell.unroll(2, inputs, merge_outputs=False)
outputs = mx.sym.Group(outputs)
assert sorted(cell.collect_params().keys()) == \
['rnn_l_h2h_bias', 'rnn_l_h2h_weight', 'rnn_l_i2h_bias', 'rnn_l_i2h_weight',
'rnn_r_h2h_bias', 'rnn_r_h2h_weight', 'rnn_r_i2h_bias', 'rnn_r_i2h_weight']
args, outs, auxs = outputs.infer_shape(rnn_t0_data=(10, 50), rnn_t1_data=(10, 50))
assert outs == [(10, 50), (10, 50)]
outputs = outputs.eval(rnn_t0_data=mx.nd.ones((10, 50))+5,
rnn_t1_data=mx.nd.ones((10, 50))+5,
rnn_l_i2h_weight=mx.nd.zeros((75, 50)),
rnn_l_i2h_bias=mx.nd.zeros((75,)),
rnn_l_h2h_weight=mx.nd.zeros((75, 25)),
rnn_l_h2h_bias=mx.nd.zeros((75,)),
rnn_r_i2h_weight=mx.nd.zeros((75, 50)),
rnn_r_i2h_bias=mx.nd.zeros((75,)),
rnn_r_h2h_weight=mx.nd.zeros((75, 25)),
rnn_r_h2h_bias=mx.nd.zeros((75,)))
expected_outputs = np.ones((10, 50))+5
assert np.array_equal(outputs[0].asnumpy(), expected_outputs)
assert np.array_equal(outputs[1].asnumpy(), expected_outputs)
def test_stack():
cell = gluon.rnn.SequentialRNNCell()
for i in range(5):
if i == 1:
cell.add(gluon.rnn.ResidualCell(gluon.rnn.LSTMCell(100, prefix='rnn_stack%d_' % i)))
else:
cell.add(gluon.rnn.LSTMCell(100, prefix='rnn_stack%d_'%i))
inputs = [mx.sym.Variable('rnn_t%d_data'%i) for i in range(3)]
outputs, _ = cell.unroll(3, inputs)
outputs = mx.sym.Group(outputs)
keys = sorted(cell.collect_params().keys())
for i in range(5):
assert 'rnn_stack%d_h2h_weight'%i in keys
assert 'rnn_stack%d_h2h_bias'%i in keys
assert 'rnn_stack%d_i2h_weight'%i in keys
assert 'rnn_stack%d_i2h_bias'%i in keys
assert outputs.list_outputs() == ['rnn_stack4_t0_out_output', 'rnn_stack4_t1_out_output', 'rnn_stack4_t2_out_output']
args, outs, auxs = outputs.infer_shape(rnn_t0_data=(10,50), rnn_t1_data=(10,50), rnn_t2_data=(10,50))
assert outs == [(10, 100), (10, 100), (10, 100)]
def test_bidirectional():
cell = gluon.rnn.BidirectionalCell(
gluon.rnn.LSTMCell(100, prefix='rnn_l0_'),
gluon.rnn.LSTMCell(100, prefix='rnn_r0_'),
output_prefix='rnn_bi_')
inputs = [mx.sym.Variable('rnn_t%d_data'%i) for i in range(3)]
outputs, _ = cell.unroll(3, inputs)
outputs = mx.sym.Group(outputs)
assert outputs.list_outputs() == ['rnn_bi_t0_output', 'rnn_bi_t1_output', 'rnn_bi_t2_output']
args, outs, auxs = outputs.infer_shape(rnn_t0_data=(10,50), rnn_t1_data=(10,50), rnn_t2_data=(10,50))
assert outs == [(10, 200), (10, 200), (10, 200)]
def test_zoneout():
cell = gluon.rnn.ZoneoutCell(gluon.rnn.RNNCell(100, prefix='rnn_'), zoneout_outputs=0.5,
zoneout_states=0.5)
inputs = [mx.sym.Variable('rnn_t%d_data'%i) for i in range(3)]
outputs, _ = cell.unroll(3, inputs)
outputs = mx.sym.Group(outputs)
args, outs, auxs = outputs.infer_shape(rnn_t0_data=(10,50), rnn_t1_data=(10,50), rnn_t2_data=(10,50))
assert outs == [(10, 100), (10, 100), (10, 100)]
def check_rnn_forward(layer, inputs, deterministic=True):
inputs.attach_grad()
layer.collect_params().initialize()
with mx.autograd.record():
out = layer.unroll(3, inputs, merge_outputs=False)[0]
mx.autograd.backward(out)
out = layer.unroll(3, inputs, merge_outputs=True)[0]
out.backward()
np_out = out.asnumpy()
np_dx = inputs.grad.asnumpy()
layer.hybridize()
with mx.autograd.record():
out = layer.unroll(3, inputs, merge_outputs=False)[0]
mx.autograd.backward(out)
out = layer.unroll(3, inputs, merge_outputs=True)[0]
out.backward()
if deterministic:
mx.test_utils.assert_almost_equal(np_out, out.asnumpy(), rtol=1e-3, atol=1e-5)
mx.test_utils.assert_almost_equal(np_dx, inputs.grad.asnumpy(), rtol=1e-3, atol=1e-5)
def test_rnn_cells():
check_rnn_forward(gluon.rnn.LSTMCell(100, input_size=200), mx.nd.ones((8, 3, 200)))
check_rnn_forward(gluon.rnn.RNNCell(100, input_size=200), mx.nd.ones((8, 3, 200)))
check_rnn_forward(gluon.rnn.GRUCell(100, input_size=200), mx.nd.ones((8, 3, 200)))
bilayer = gluon.rnn.BidirectionalCell(gluon.rnn.LSTMCell(100, input_size=200),
gluon.rnn.LSTMCell(100, input_size=200))
check_rnn_forward(bilayer, mx.nd.ones((8, 3, 200)))
check_rnn_forward(gluon.rnn.DropoutCell(0.5), mx.nd.ones((8, 3, 200)), False)
check_rnn_forward(gluon.rnn.ZoneoutCell(gluon.rnn.LSTMCell(100, input_size=200),
0.5, 0.2),
mx.nd.ones((8, 3, 200)), False)
net = gluon.rnn.SequentialRNNCell()
net.add(gluon.rnn.LSTMCell(100, input_size=200))
net.add(gluon.rnn.RNNCell(100, input_size=100))
net.add(gluon.rnn.GRUCell(100, input_size=100))
check_rnn_forward(net, mx.nd.ones((8, 3, 200)))
def check_rnn_layer_forward(layer, inputs, states=None):
layer.collect_params().initialize()
inputs.attach_grad()
with mx.autograd.record():
out = layer(inputs, states)
if states is not None:
assert isinstance(out, tuple) and len(out) == 2
out = out[0]
else:
assert isinstance(out, mx.nd.NDArray)
out.backward()
np_out = out.asnumpy()
np_dx = inputs.grad.asnumpy()
layer.hybridize()
with mx.autograd.record():
out = layer(inputs, states)
if states is not None:
assert isinstance(out, tuple) and len(out) == 2
out = out[0]
else:
assert isinstance(out, mx.nd.NDArray)
out.backward()
mx.test_utils.assert_almost_equal(np_out, out.asnumpy(), rtol=1e-3, atol=1e-5)
mx.test_utils.assert_almost_equal(np_dx, inputs.grad.asnumpy(), rtol=1e-3, atol=1e-5)
def test_rnn_layers():
check_rnn_layer_forward(gluon.rnn.RNN(10, 2), mx.nd.ones((8, 3, 20)))
check_rnn_layer_forward(gluon.rnn.RNN(10, 2), mx.nd.ones((8, 3, 20)), mx.nd.ones((2, 3, 10)))
check_rnn_layer_forward(gluon.rnn.LSTM(10, 2), mx.nd.ones((8, 3, 20)))
check_rnn_layer_forward(gluon.rnn.LSTM(10, 2), mx.nd.ones((8, 3, 20)), [mx.nd.ones((2, 3, 10)), mx.nd.ones((2, 3, 10))])
check_rnn_layer_forward(gluon.rnn.GRU(10, 2), mx.nd.ones((8, 3, 20)))
check_rnn_layer_forward(gluon.rnn.GRU(10, 2), mx.nd.ones((8, 3, 20)), mx.nd.ones((2, 3, 10)))
net = gluon.nn.Sequential()
net.add(gluon.rnn.LSTM(10, 2, bidirectional=True))
net.add(gluon.nn.BatchNorm(axis=2))
net.add(gluon.nn.Flatten())
net.add(gluon.nn.Dense(3, activation='relu'))
net.collect_params().initialize()
with mx.autograd.record():
net(mx.nd.ones((2, 3, 10))).backward()
if __name__ == '__main__':
import nose
nose.runmodule()
| true | true |
f71ac220110425c4090ee4f6700cf2ea38162317 | 2,372 | py | Python | foxlink/me_zrl_bound_evolvers.py | lamsoa729/FoXlink | 3c061b02968cdab1def752d5c145a6df4615504b | [
"BSD-3-Clause"
] | null | null | null | foxlink/me_zrl_bound_evolvers.py | lamsoa729/FoXlink | 3c061b02968cdab1def752d5c145a6df4615504b | [
"BSD-3-Clause"
] | null | null | null | foxlink/me_zrl_bound_evolvers.py | lamsoa729/FoXlink | 3c061b02968cdab1def752d5c145a6df4615504b | [
"BSD-3-Clause"
] | 2 | 2019-06-18T16:48:03.000Z | 2019-06-20T23:50:02.000Z | #!/usr/bin/env python
"""@package docstring
File: me_zrl_bound_evolvers.py
Author: Adam Lamson
Email: adam.lamson@colorado.edu
Description:
"""
import numpy as np
# from scipy.integrate import dblquad
from .me_helpers import dr_dt, convert_sol_to_geom
from .me_zrl_odes import (rod_geom_derivs_zrl, calc_moment_derivs_zrl,
calc_moment_derivs_zrl_B_terms,
calc_boundary_derivs_zrl)
from .me_zrl_helpers import (avg_force_zrl,
prep_zrl_bound_evolver,
get_zrl_moments_and_boundary_terms)
from .rod_steric_forces import calc_wca_force_torque
from .me_zrl_evolvers import prep_zrl_evolver
def evolver_zrl_bound(sol, fric_coeff, params):
"""!Calculate all time derivatives necessary to solve the moment expansion
evolution of the Fokker-Planck equation of zero rest length (zrl) crosslinkers
bound to moving rods. d<var> is the time derivative of corresponding
variable
@param sol: Solution vector to solve_ivp
@param fric_coeff: friction coefficients of rod
@param params: Constant parameters of the simulation
@return: Time-derivatives of all time varying quantities in a flattened
array
"""
# Define useful parameters for functions
hL_i, hL_j = (.5 * params['L_i'], .5 * params['L_j'])
ks = params['ks']
r_i, r_j, u_i, u_j = convert_sol_to_geom(sol)
r_ij = r_j - r_i
(scalar_geom, q_arr, Q_arr) = prep_zrl_bound_evolver(sol, params)
(mu_kl, B_terms) = get_zrl_moments_and_boundary_terms(sol)
if mu_kl[0] < 0.:
mu_kl[0] = 0.
if mu_kl[4] < 0.:
mu_kl[4] = 0.
if mu_kl[5] < 0.:
mu_kl[5] = 0.
# Get average force of crosslinkers on rod2
f_ij = avg_force_zrl(r_ij, u_i, u_j, mu_kl[0], mu_kl[1], mu_kl[2], ks)
# Evolution of rod positions
dgeom = rod_geom_derivs_zrl(f_ij, r_ij, u_i, u_j, scalar_geom,
mu_kl, fric_coeff, ks)
# Evolution of moments
dmu_kl = calc_moment_derivs_zrl_B_terms(mu_kl, scalar_geom,
q_arr, B_terms, params)
# Evolution of boundary condtions
dB_terms = calc_boundary_derivs_zrl(B_terms, scalar_geom, Q_arr, params)
dsol = np.concatenate(dgeom, dmu_kl, dB_terms)
return dsol
##########################################
| 35.939394 | 82 | 0.660624 |
import numpy as np
from .me_helpers import dr_dt, convert_sol_to_geom
from .me_zrl_odes import (rod_geom_derivs_zrl, calc_moment_derivs_zrl,
calc_moment_derivs_zrl_B_terms,
calc_boundary_derivs_zrl)
from .me_zrl_helpers import (avg_force_zrl,
prep_zrl_bound_evolver,
get_zrl_moments_and_boundary_terms)
from .rod_steric_forces import calc_wca_force_torque
from .me_zrl_evolvers import prep_zrl_evolver
def evolver_zrl_bound(sol, fric_coeff, params):
hL_i, hL_j = (.5 * params['L_i'], .5 * params['L_j'])
ks = params['ks']
r_i, r_j, u_i, u_j = convert_sol_to_geom(sol)
r_ij = r_j - r_i
(scalar_geom, q_arr, Q_arr) = prep_zrl_bound_evolver(sol, params)
(mu_kl, B_terms) = get_zrl_moments_and_boundary_terms(sol)
if mu_kl[0] < 0.:
mu_kl[0] = 0.
if mu_kl[4] < 0.:
mu_kl[4] = 0.
if mu_kl[5] < 0.:
mu_kl[5] = 0.
f_ij = avg_force_zrl(r_ij, u_i, u_j, mu_kl[0], mu_kl[1], mu_kl[2], ks)
dgeom = rod_geom_derivs_zrl(f_ij, r_ij, u_i, u_j, scalar_geom,
mu_kl, fric_coeff, ks)
dmu_kl = calc_moment_derivs_zrl_B_terms(mu_kl, scalar_geom,
q_arr, B_terms, params)
dB_terms = calc_boundary_derivs_zrl(B_terms, scalar_geom, Q_arr, params)
dsol = np.concatenate(dgeom, dmu_kl, dB_terms)
return dsol
| true | true |
f71ac5ae55c84dae849e3d0cc87c208a05d7bfcc | 264 | py | Python | antipetros_discordbot/engine/replacements/command_replacements/__init__.py | official-antistasi-community/Antipetros_Discord_Bot | 1b5c8b61c09e61cdff671e259f0478d343a50c8d | [
"MIT"
] | null | null | null | antipetros_discordbot/engine/replacements/command_replacements/__init__.py | official-antistasi-community/Antipetros_Discord_Bot | 1b5c8b61c09e61cdff671e259f0478d343a50c8d | [
"MIT"
] | null | null | null | antipetros_discordbot/engine/replacements/command_replacements/__init__.py | official-antistasi-community/Antipetros_Discord_Bot | 1b5c8b61c09e61cdff671e259f0478d343a50c8d | [
"MIT"
] | 1 | 2021-02-12T01:10:51.000Z | 2021-02-12T01:10:51.000Z | from .base_command import AntiPetrosBaseCommand
from .flag_command import AntiPetrosFlagCommand
from .creation_decorators import auto_meta_info_command, auto_meta_info_group
from .base_group import AntiPetrosBaseGroup
from .command_category import CommandCategory
| 44 | 77 | 0.897727 | from .base_command import AntiPetrosBaseCommand
from .flag_command import AntiPetrosFlagCommand
from .creation_decorators import auto_meta_info_command, auto_meta_info_group
from .base_group import AntiPetrosBaseGroup
from .command_category import CommandCategory
| true | true |
f71ac7200feac49fd738de102b33055f7d33fc8f | 1,793 | py | Python | setup.py | endreszabo/py-radix | 2efbefb87d278be5c33166ca108e3cdcd28637b9 | [
"BSD-4-Clause-UC"
] | null | null | null | setup.py | endreszabo/py-radix | 2efbefb87d278be5c33166ca108e3cdcd28637b9 | [
"BSD-4-Clause-UC"
] | null | null | null | setup.py | endreszabo/py-radix | 2efbefb87d278be5c33166ca108e3cdcd28637b9 | [
"BSD-4-Clause-UC"
] | null | null | null | #!/usr/bin/env python
# Copyright (c) 2004 Damien Miller <djm@mindrot.org>
#
# Permission to use, copy, modify, and distribute this software for any
# purpose with or without fee is hereby granted, provided that the above
# copyright notice and this permission notice appear in all copies.
#
# THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES
# WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF
# MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR
# ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
# WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN
# ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF
# OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
# $Id$
import platform
import sys
from distutils.core import setup, Extension
VERSION = "0.5"
if __name__ == '__main__':
libs = []
src = [ 'radix.c', 'radix_python.c' ]
if sys.platform == 'win32':
libs += [ 'ws2_32' ]
src += [ 'strlcpy.c' ]
if platform.version() < '6.0': # not newer than Vista
src += [ 'inet_ntop.c' ]
radix = Extension('radix', libraries = libs, sources = src)
setup( name = "radix",
version = VERSION,
author = "Damien Miller",
author_email = "djm@mindrot.org",
url = "http://www.mindrot.org/py-radix.html",
description = "Radix tree implementation",
long_description = """\
py-radix is an implementation of a radix tree data structure for the storage
and retrieval of IPv4 and IPv6 network prefixes.
The radix tree is the data structure most commonly used for routing table
lookups. It efficiently stores network prefixes of varying lengths and
allows fast lookups of containing networks.
""",
license = "BSD",
ext_modules = [radix]
)
| 35.156863 | 77 | 0.727273 |
import platform
import sys
from distutils.core import setup, Extension
VERSION = "0.5"
if __name__ == '__main__':
libs = []
src = [ 'radix.c', 'radix_python.c' ]
if sys.platform == 'win32':
libs += [ 'ws2_32' ]
src += [ 'strlcpy.c' ]
if platform.version() < '6.0':
src += [ 'inet_ntop.c' ]
radix = Extension('radix', libraries = libs, sources = src)
setup( name = "radix",
version = VERSION,
author = "Damien Miller",
author_email = "djm@mindrot.org",
url = "http://www.mindrot.org/py-radix.html",
description = "Radix tree implementation",
long_description = """\
py-radix is an implementation of a radix tree data structure for the storage
and retrieval of IPv4 and IPv6 network prefixes.
The radix tree is the data structure most commonly used for routing table
lookups. It efficiently stores network prefixes of varying lengths and
allows fast lookups of containing networks.
""",
license = "BSD",
ext_modules = [radix]
)
| true | true |
f71ac8c34ec504c775b0e08c86a5e168fd54c6a6 | 842 | py | Python | code/preprocessing/download_wordvecs.py | theblind/squad_challenge | 3cc81be6ca73e7160abffcc47dde6e188cd02fbb | [
"Apache-2.0"
] | null | null | null | code/preprocessing/download_wordvecs.py | theblind/squad_challenge | 3cc81be6ca73e7160abffcc47dde6e188cd02fbb | [
"Apache-2.0"
] | null | null | null | code/preprocessing/download_wordvecs.py | theblind/squad_challenge | 3cc81be6ca73e7160abffcc47dde6e188cd02fbb | [
"Apache-2.0"
] | null | null | null | import zipfile
import argparse
import os
from squad_preprocess import maybe_download
def setup_args():
parser = argparse.ArgumentParser()
parser.add_argument("--download_dir", required=True) # where to put the downloaded glove files
return parser.parse_args()
def main():
args = setup_args()
glove_base_url = "http://nlp.stanford.edu/data/"
glove_filename = "glove.6B.zip"
print("\nDownloading wordvecs to {}".format(args.download_dir))
if not os.path.exists(args.download_dir):
os.makedirs(args.download_dir)
maybe_download(glove_base_url, glove_filename, args.download_dir, 862182613)
glove_zip_ref = zipfile.ZipFile(os.path.join(args.download_dir, glove_filename), 'r')
glove_zip_ref.extractall(args.download_dir)
glove_zip_ref.close()
if __name__ == '__main__':
main()
| 27.16129 | 98 | 0.731591 | import zipfile
import argparse
import os
from squad_preprocess import maybe_download
def setup_args():
parser = argparse.ArgumentParser()
parser.add_argument("--download_dir", required=True)
return parser.parse_args()
def main():
args = setup_args()
glove_base_url = "http://nlp.stanford.edu/data/"
glove_filename = "glove.6B.zip"
print("\nDownloading wordvecs to {}".format(args.download_dir))
if not os.path.exists(args.download_dir):
os.makedirs(args.download_dir)
maybe_download(glove_base_url, glove_filename, args.download_dir, 862182613)
glove_zip_ref = zipfile.ZipFile(os.path.join(args.download_dir, glove_filename), 'r')
glove_zip_ref.extractall(args.download_dir)
glove_zip_ref.close()
if __name__ == '__main__':
main()
| true | true |
f71ac992ef0211e206b3d27bddfec1270d1c095f | 6,545 | py | Python | data_clean/preprocessing.py | shuishoudage/music_generator | 7c17ef5bb3a5d872bff5ac8e1664f57f5b4ea08f | [
"MIT"
] | null | null | null | data_clean/preprocessing.py | shuishoudage/music_generator | 7c17ef5bb3a5d872bff5ac8e1664f57f5b4ea08f | [
"MIT"
] | null | null | null | data_clean/preprocessing.py | shuishoudage/music_generator | 7c17ef5bb3a5d872bff5ac8e1664f57f5b4ea08f | [
"MIT"
] | 1 | 2019-10-14T11:48:23.000Z | 2019-10-14T11:48:23.000Z | from typing import List, Tuple, Dict, Any
from collections import Counter
import pretty_midi
import matplotlib.pyplot as plt
import librosa.display
import os
from os import listdir, walk
from os.path import isfile, isdir, join
from sys import argv
import traceback
import logging
import numpy as np
from shutil import copyfile
import shutil
# Ideas behind the preprocessing class
#
# 1. only use those midi with one tempo and one key, since some midi music
# have key and tempo changes inside. Which might make some unpredictable result
#
# 2. list distribution for all keys contained in the corpus. Only select those
# most frequent appeared. (different keys may increase training difficulty)
#
# 3. only select similar tempo music, based on the mean and std of tempos,
# simple one will be left boundary = mean - std, right boundary = mean + std
#
# 4. find the mean of highest and lowest pitch in the corpus. filter out those not
# the range. We have pitch range from 0-128, no meaning cover two extreme sides.
class FileReport(object):
"""
This class is mainly for generating meta information for our report
"""
def __init__(self,
tempos: List[float],
freq_key: Dict[int, int],
min_pitch: List[int],
max_pitch: List[int]):
self.tempos = tempos
self.freq_key = freq_key
self.min_pitch = min_pitch
self.max_pitch = max_pitch
def aggregation_report(self):
"""
two important variable are min_pitch and max_pitch,
since they will be used to decode from pitch to audio
"""
temp_mean = np.array(self.tempos).mean()
temp_std = np.array(self.tempos).std()
most_freq_key = self.getMostFreqValue(self.freq_key)
min_pitch = int(np.array(self.min_pitch).mean())
max_pitch = int(np.array(self.max_pitch).mean())
return temp_mean, temp_std, most_freq_key, min_pitch, max_pitch
def plots(self):
# implement later on
pass
def getMostFreqValue(self, keys: Dict[int, int], reversed=True) -> int:
return sorted(keys.items(), key=lambda kv: kv[1], reverse=reversed)[0][0]
class Preprocess(object):
def __init__(self, path: str):
self.path = path
self.fileFilter()
def generateMidiFileReport(self) -> FileReport:
"""
meta information like tempos, keys, pitches will be generated for
filtering the midi files
"""
tempos = []
keys = []
max_pitchs = []
min_pitchs = []
for pm in self.pms:
try:
tempos.append(pm.estimate_tempo())
key = pm.key_signature_changes[0].key_number
keys.append(key)
min_pitch, max_pitch = self.getMinMaxPitch(pm)
max_pitchs.append(max_pitch)
min_pitchs.append(min_pitch)
except:
pass
self.report = FileReport(tempos, dict(
Counter(keys)), min_pitchs, max_pitchs)
return self.report
def getMinMaxPitch(self, pm: pretty_midi.PrettyMIDI):
"""
find the min and max pitch inside a midi file
"""
notes = [
note.pitch for instrument in pm.instruments for note in instrument.notes
]
return min(notes), max(notes)
def SaveFilterMIDIfiles(self):
"""
according generated meta data info to filter out those not in range
"""
report = self.generateMidiFileReport()
temp_mean, temp_std, key, left_boundary, right_boundary = report.aggregation_report()
piano_roll_paths = []
for pm, path in zip(self.pms, self.paths):
try:
tempo = pm.estimate_tempo()
min_pitch, max_pitch = self.getMinMaxPitch(pm)
if self.isTempoInRange(tempo, temp_mean, temp_std) \
and self.isPitchInRange(min_pitch, max_pitch, left_boundary, right_boundary) \
and self.isKeyMatch(pm.key_signature_changes[0].key_number, key):
savedPath = os.path.join(os.getcwd(), 'filterData')
if not os.path.exists(savedPath):
os.makedirs(savedPath, exist_ok=True)
shutil.move(
path, os.path.join(os.getcwd(), 'filterData', os.path.basename(path)))
except:
pass
def isTempoInRange(self, tempo: float, mean: float, std: float) -> bool:
"""
a helper function that can be used check if a midi file's tempo in range
"""
if tempo > (mean - std) and tempo < (mean + std):
return True
return False
def isKeyMatch(self, key: int, grand_truth_key: int) -> bool:
if key == grand_truth_key:
return True
return False
def isPitchInRange(self, low_pitch: int,
high_pitch: int,
left_boundary: int,
right_boundary: int) -> bool:
if low_pitch >= left_boundary and high_pitch <= right_boundary:
return True
return False
def fileFilter(self):
"""
first filtering that only allow one tempo and one key inside a midi file
"""
self.pms: List[pretty_midi.PrettyMIDI] = []
self.paths: List[str] = []
for (dirPath, _, files) in walk(self.path): # type: ignore
for file in files:
# get the absoluted path of file
path = join(dirPath, file)
try:
pm = pretty_midi.PrettyMIDI(path)
# only handle files contain one key and one tempo
if len(pm.key_signature_changes) == 1 \
and len(pm.time_signature_changes) == 1:
self.pms.append(pm)
self.paths.append(path)
except: # skip all parsing exceptions
pass
def cliArgParser(argv) -> Any:
if len(argv) != 2:
raise ValueError(f"path of folder must be provided")
if isdir(argv[1]):
path = os.path.abspath(argv[1])
return path
else:
raise ValueError(f"provided path is not a folder")
if __name__ == "__main__":
try:
path = cliArgParser(argv)
p = Preprocess(path)
p.SaveFilterMIDIfiles()
except Exception as err:
print(traceback.format_exc())
exit(1)
| 35.570652 | 98 | 0.59343 | from typing import List, Tuple, Dict, Any
from collections import Counter
import pretty_midi
import matplotlib.pyplot as plt
import librosa.display
import os
from os import listdir, walk
from os.path import isfile, isdir, join
from sys import argv
import traceback
import logging
import numpy as np
from shutil import copyfile
import shutil
class FileReport(object):
def __init__(self,
tempos: List[float],
freq_key: Dict[int, int],
min_pitch: List[int],
max_pitch: List[int]):
self.tempos = tempos
self.freq_key = freq_key
self.min_pitch = min_pitch
self.max_pitch = max_pitch
def aggregation_report(self):
temp_mean = np.array(self.tempos).mean()
temp_std = np.array(self.tempos).std()
most_freq_key = self.getMostFreqValue(self.freq_key)
min_pitch = int(np.array(self.min_pitch).mean())
max_pitch = int(np.array(self.max_pitch).mean())
return temp_mean, temp_std, most_freq_key, min_pitch, max_pitch
def plots(self):
pass
def getMostFreqValue(self, keys: Dict[int, int], reversed=True) -> int:
return sorted(keys.items(), key=lambda kv: kv[1], reverse=reversed)[0][0]
class Preprocess(object):
def __init__(self, path: str):
self.path = path
self.fileFilter()
def generateMidiFileReport(self) -> FileReport:
tempos = []
keys = []
max_pitchs = []
min_pitchs = []
for pm in self.pms:
try:
tempos.append(pm.estimate_tempo())
key = pm.key_signature_changes[0].key_number
keys.append(key)
min_pitch, max_pitch = self.getMinMaxPitch(pm)
max_pitchs.append(max_pitch)
min_pitchs.append(min_pitch)
except:
pass
self.report = FileReport(tempos, dict(
Counter(keys)), min_pitchs, max_pitchs)
return self.report
def getMinMaxPitch(self, pm: pretty_midi.PrettyMIDI):
notes = [
note.pitch for instrument in pm.instruments for note in instrument.notes
]
return min(notes), max(notes)
def SaveFilterMIDIfiles(self):
report = self.generateMidiFileReport()
temp_mean, temp_std, key, left_boundary, right_boundary = report.aggregation_report()
piano_roll_paths = []
for pm, path in zip(self.pms, self.paths):
try:
tempo = pm.estimate_tempo()
min_pitch, max_pitch = self.getMinMaxPitch(pm)
if self.isTempoInRange(tempo, temp_mean, temp_std) \
and self.isPitchInRange(min_pitch, max_pitch, left_boundary, right_boundary) \
and self.isKeyMatch(pm.key_signature_changes[0].key_number, key):
savedPath = os.path.join(os.getcwd(), 'filterData')
if not os.path.exists(savedPath):
os.makedirs(savedPath, exist_ok=True)
shutil.move(
path, os.path.join(os.getcwd(), 'filterData', os.path.basename(path)))
except:
pass
def isTempoInRange(self, tempo: float, mean: float, std: float) -> bool:
if tempo > (mean - std) and tempo < (mean + std):
return True
return False
def isKeyMatch(self, key: int, grand_truth_key: int) -> bool:
if key == grand_truth_key:
return True
return False
def isPitchInRange(self, low_pitch: int,
high_pitch: int,
left_boundary: int,
right_boundary: int) -> bool:
if low_pitch >= left_boundary and high_pitch <= right_boundary:
return True
return False
def fileFilter(self):
self.pms: List[pretty_midi.PrettyMIDI] = []
self.paths: List[str] = []
for (dirPath, _, files) in walk(self.path):
for file in files:
path = join(dirPath, file)
try:
pm = pretty_midi.PrettyMIDI(path)
if len(pm.key_signature_changes) == 1 \
and len(pm.time_signature_changes) == 1:
self.pms.append(pm)
self.paths.append(path)
except:
pass
def cliArgParser(argv) -> Any:
if len(argv) != 2:
raise ValueError(f"path of folder must be provided")
if isdir(argv[1]):
path = os.path.abspath(argv[1])
return path
else:
raise ValueError(f"provided path is not a folder")
if __name__ == "__main__":
try:
path = cliArgParser(argv)
p = Preprocess(path)
p.SaveFilterMIDIfiles()
except Exception as err:
print(traceback.format_exc())
exit(1)
| true | true |
f71aca40733f04d9dbf52d3494976b80319f27ac | 1,059 | py | Python | installer/core/providers/aws/boto3/cloudwatch_event.py | dabest1/pacbot | 83189006905f7d43f48d416166490773edd89cb1 | [
"Apache-2.0"
] | null | null | null | installer/core/providers/aws/boto3/cloudwatch_event.py | dabest1/pacbot | 83189006905f7d43f48d416166490773edd89cb1 | [
"Apache-2.0"
] | null | null | null | installer/core/providers/aws/boto3/cloudwatch_event.py | dabest1/pacbot | 83189006905f7d43f48d416166490773edd89cb1 | [
"Apache-2.0"
] | 1 | 2019-06-11T11:14:05.000Z | 2019-06-11T11:14:05.000Z | import boto3
def get_event_client(access_key, secret_key, region):
"""
Returns the client object for AWS Events
Args:
access_key (str): AWS Access Key
secret_key (str): AWS Secret Key
region (str): AWS Region
Returns:
obj: AWS Cloudwatch Event Client Obj
"""
return boto3.client(
"events",
region_name=region,
aws_access_key_id=access_key,
aws_secret_access_key=secret_key)
def check_rule_exists(rule_name, access_key, secret_key, region):
"""
Check wheter the given cloudwatch rule already exists in AWS account
Args:
rule_name (str): Cloudwatch rule name
access_key (str): AWS Access Key
secret_key (str): AWS Secret Key
region (str): AWS Region
Returns:
Boolean: True if env exists else False
"""
client = get_event_client(access_key, secret_key, region)
try:
response = client.describe_rule(Name=rule_name)
return True if response else False
except:
return False
| 25.214286 | 72 | 0.648725 | import boto3
def get_event_client(access_key, secret_key, region):
return boto3.client(
"events",
region_name=region,
aws_access_key_id=access_key,
aws_secret_access_key=secret_key)
def check_rule_exists(rule_name, access_key, secret_key, region):
client = get_event_client(access_key, secret_key, region)
try:
response = client.describe_rule(Name=rule_name)
return True if response else False
except:
return False
| true | true |
f71aca5cb50d6e0d40cf7342ca3cded4cb68b824 | 1,870 | py | Python | finorch/sessions/cit/session.py | ADACS-Australia/SS2021B-DBrown | 67b93b316e6f9ab09e3bd5105edbbc71108e0723 | [
"MIT"
] | null | null | null | finorch/sessions/cit/session.py | ADACS-Australia/SS2021B-DBrown | 67b93b316e6f9ab09e3bd5105edbbc71108e0723 | [
"MIT"
] | null | null | null | finorch/sessions/cit/session.py | ADACS-Australia/SS2021B-DBrown | 67b93b316e6f9ab09e3bd5105edbbc71108e0723 | [
"MIT"
] | null | null | null | import logging
from finorch.config.config import api_config_manager
from finorch.sessions.cit.client import CITClient
from finorch.sessions.abstract_session import AbstractSession
from finorch.sessions.cit.wrapper import CITWrapper
from finorch.transport.ssh import SshTransport
class CITSession(AbstractSession):
callsign = "cit"
client_klass = CITClient
wrapper_klass = CITWrapper
transport_klass = SshTransport
def __init__(self, exec_path, username, python_path, env_file=None, *args, **kwargs):
"""
Creates a new cit session that can be used to run finesse jobs in parallel on cit.
:param exec_path: The path to where jobs should be executed (and results stored), if not specified the path
will be a temporary directory that is cleaned up when the client is terminated.
"""
super().__init__()
self._transport = CITSession.transport_klass(
self,
exec_path,
username=username,
python_path=python_path,
env_file=env_file,
host="ldas-grid.ligo.caltech.edu",
callsign=self.callsign,
*args,
**kwargs
)
cit_config = api_config_manager.get_section('cit')
remote_port = cit_config.get('remote_port', None) if cit_config else None
if remote_port:
logging.info("Attempting to reconnect remote client last seen on remote port " + str(remote_port))
else:
logging.info("Attempting to connect remote client")
remote_port = self._transport.connect(
remote_port=remote_port
)
logging.info("Remote client connected on port " + str(remote_port))
api_config_manager.set('cit', 'remote_port', str(remote_port))
@property
def transport(self):
return self._transport
| 32.807018 | 115 | 0.667914 | import logging
from finorch.config.config import api_config_manager
from finorch.sessions.cit.client import CITClient
from finorch.sessions.abstract_session import AbstractSession
from finorch.sessions.cit.wrapper import CITWrapper
from finorch.transport.ssh import SshTransport
class CITSession(AbstractSession):
callsign = "cit"
client_klass = CITClient
wrapper_klass = CITWrapper
transport_klass = SshTransport
def __init__(self, exec_path, username, python_path, env_file=None, *args, **kwargs):
super().__init__()
self._transport = CITSession.transport_klass(
self,
exec_path,
username=username,
python_path=python_path,
env_file=env_file,
host="ldas-grid.ligo.caltech.edu",
callsign=self.callsign,
*args,
**kwargs
)
cit_config = api_config_manager.get_section('cit')
remote_port = cit_config.get('remote_port', None) if cit_config else None
if remote_port:
logging.info("Attempting to reconnect remote client last seen on remote port " + str(remote_port))
else:
logging.info("Attempting to connect remote client")
remote_port = self._transport.connect(
remote_port=remote_port
)
logging.info("Remote client connected on port " + str(remote_port))
api_config_manager.set('cit', 'remote_port', str(remote_port))
@property
def transport(self):
return self._transport
| true | true |
f71acbda8152b39dcd69a9518aee969805ce1605 | 4,092 | py | Python | plotly/validators/scattergeo/marker/_line.py | fcollonval/plotly.py | 5c7f100db1af8c82bb740a38ef684955a8ed6d0e | [
"MIT"
] | 2 | 2020-03-24T11:41:14.000Z | 2021-01-14T07:59:43.000Z | plotly/validators/scattergeo/marker/_line.py | fcollonval/plotly.py | 5c7f100db1af8c82bb740a38ef684955a8ed6d0e | [
"MIT"
] | null | null | null | plotly/validators/scattergeo/marker/_line.py | fcollonval/plotly.py | 5c7f100db1af8c82bb740a38ef684955a8ed6d0e | [
"MIT"
] | 4 | 2019-06-03T14:49:12.000Z | 2022-01-06T01:05:12.000Z | import _plotly_utils.basevalidators
class LineValidator(_plotly_utils.basevalidators.CompoundValidator):
def __init__(
self, plotly_name='line', parent_name='scattergeo.marker', **kwargs
):
super(LineValidator, self).__init__(
plotly_name=plotly_name,
parent_name=parent_name,
data_class_str='Line',
data_docs="""
autocolorscale
Determines whether the colorscale is a default
palette (`autocolorscale: true`) or the palette
determined by `marker.line.colorscale`. Has an
effect only if in `marker.line.color`is set to
a numerical array. In case `colorscale` is
unspecified or `autocolorscale` is true, the
default palette will be chosen according to
whether numbers in the `color` array are all
positive, all negative or mixed.
cauto
Determines whether or not the color domain is
computed with respect to the input data (here
in `marker.line.color`) or the bounds set in
`marker.line.cmin` and `marker.line.cmax` Has
an effect only if in `marker.line.color`is set
to a numerical array. Defaults to `false` when
`marker.line.cmin` and `marker.line.cmax` are
set by the user.
cmax
Sets the upper bound of the color domain. Has
an effect only if in `marker.line.color`is set
to a numerical array. Value should have the
same units as in `marker.line.color` and if
set, `marker.line.cmin` must be set as well.
cmin
Sets the lower bound of the color domain. Has
an effect only if in `marker.line.color`is set
to a numerical array. Value should have the
same units as in `marker.line.color` and if
set, `marker.line.cmax` must be set as well.
color
Sets themarker.linecolor. It accepts either a
specific color or an array of numbers that are
mapped to the colorscale relative to the max
and min values of the array or relative to
`marker.line.cmin` and `marker.line.cmax` if
set.
colorscale
Sets the colorscale. Has an effect only if in
`marker.line.color`is set to a numerical array.
The colorscale must be an array containing
arrays mapping a normalized value to an rgb,
rgba, hex, hsl, hsv, or named color string. At
minimum, a mapping for the lowest (0) and
highest (1) values are required. For example,
`[[0, 'rgb(0,0,255)', [1, 'rgb(255,0,0)']]`. To
control the bounds of the colorscale in color
space, use`marker.line.cmin` and
`marker.line.cmax`. Alternatively, `colorscale`
may be a palette name string of the following
list: Greys,YlGnBu,Greens,YlOrRd,Bluered,RdBu,R
eds,Blues,Picnic,Rainbow,Portland,Jet,Hot,Black
body,Earth,Electric,Viridis,Cividis.
colorsrc
Sets the source reference on plot.ly for color
.
reversescale
Reverses the color mapping if true. Has an
effect only if in `marker.line.color`is set to
a numerical array. If true, `marker.line.cmin`
will correspond to the last color in the array
and `marker.line.cmax` will correspond to the
first color.
width
Sets the width (in px) of the lines bounding
the marker points.
widthsrc
Sets the source reference on plot.ly for width
.
""",
**kwargs
)
| 47.034483 | 75 | 0.554497 | import _plotly_utils.basevalidators
class LineValidator(_plotly_utils.basevalidators.CompoundValidator):
def __init__(
self, plotly_name='line', parent_name='scattergeo.marker', **kwargs
):
super(LineValidator, self).__init__(
plotly_name=plotly_name,
parent_name=parent_name,
data_class_str='Line',
data_docs="""
autocolorscale
Determines whether the colorscale is a default
palette (`autocolorscale: true`) or the palette
determined by `marker.line.colorscale`. Has an
effect only if in `marker.line.color`is set to
a numerical array. In case `colorscale` is
unspecified or `autocolorscale` is true, the
default palette will be chosen according to
whether numbers in the `color` array are all
positive, all negative or mixed.
cauto
Determines whether or not the color domain is
computed with respect to the input data (here
in `marker.line.color`) or the bounds set in
`marker.line.cmin` and `marker.line.cmax` Has
an effect only if in `marker.line.color`is set
to a numerical array. Defaults to `false` when
`marker.line.cmin` and `marker.line.cmax` are
set by the user.
cmax
Sets the upper bound of the color domain. Has
an effect only if in `marker.line.color`is set
to a numerical array. Value should have the
same units as in `marker.line.color` and if
set, `marker.line.cmin` must be set as well.
cmin
Sets the lower bound of the color domain. Has
an effect only if in `marker.line.color`is set
to a numerical array. Value should have the
same units as in `marker.line.color` and if
set, `marker.line.cmax` must be set as well.
color
Sets themarker.linecolor. It accepts either a
specific color or an array of numbers that are
mapped to the colorscale relative to the max
and min values of the array or relative to
`marker.line.cmin` and `marker.line.cmax` if
set.
colorscale
Sets the colorscale. Has an effect only if in
`marker.line.color`is set to a numerical array.
The colorscale must be an array containing
arrays mapping a normalized value to an rgb,
rgba, hex, hsl, hsv, or named color string. At
minimum, a mapping for the lowest (0) and
highest (1) values are required. For example,
`[[0, 'rgb(0,0,255)', [1, 'rgb(255,0,0)']]`. To
control the bounds of the colorscale in color
space, use`marker.line.cmin` and
`marker.line.cmax`. Alternatively, `colorscale`
may be a palette name string of the following
list: Greys,YlGnBu,Greens,YlOrRd,Bluered,RdBu,R
eds,Blues,Picnic,Rainbow,Portland,Jet,Hot,Black
body,Earth,Electric,Viridis,Cividis.
colorsrc
Sets the source reference on plot.ly for color
.
reversescale
Reverses the color mapping if true. Has an
effect only if in `marker.line.color`is set to
a numerical array. If true, `marker.line.cmin`
will correspond to the last color in the array
and `marker.line.cmax` will correspond to the
first color.
width
Sets the width (in px) of the lines bounding
the marker points.
widthsrc
Sets the source reference on plot.ly for width
.
""",
**kwargs
)
| true | true |
f71acbff3ef602966bb7796ad13e0aeba23cd1e4 | 203,023 | py | Python | gmusicapi/protocol/locker_pb2.py | siebert/Unofficial-Google-Music-API | 8222d566f5048c03f14beee031632fa80e3c0794 | [
"BSD-3-Clause"
] | 2 | 2016-09-06T07:32:06.000Z | 2019-11-20T22:22:39.000Z | gmusicapi/protocol/locker_pb2.py | siebert/Unofficial-Google-Music-API | 8222d566f5048c03f14beee031632fa80e3c0794 | [
"BSD-3-Clause"
] | null | null | null | gmusicapi/protocol/locker_pb2.py | siebert/Unofficial-Google-Music-API | 8222d566f5048c03f14beee031632fa80e3c0794 | [
"BSD-3-Clause"
] | 1 | 2019-11-20T22:22:41.000Z | 2019-11-20T22:22:41.000Z | # Generated by the protocol buffer compiler. DO NOT EDIT!
from google.protobuf import descriptor
from google.protobuf import message
from google.protobuf import reflection
from google.protobuf import descriptor_pb2
# @@protoc_insertion_point(imports)
import uits_pb2
DESCRIPTOR = descriptor.FileDescriptor(
name='locker.proto',
package='',
serialized_pb='\n\x0clocker.proto\x1a\nuits.proto\"\xf8\x01\n\x08\x41udioRef\x12\x1e\n\x05store\x18\x01 \x02(\x0e\x32\x0f.AudioRef.Store\x12\x0b\n\x03ref\x18\x02 \x02(\x0c\x12\x0b\n\x03url\x18\x04 \x01(\t\x12\x10\n\x08\x62it_rate\x18\x05 \x01(\x05\x12\x13\n\x0bsample_rate\x18\x06 \x01(\x05\x12\x14\n\x0c\x64ownloadable\x18\x07 \x01(\x08\x12\x17\n\x0f\x64uration_millis\x18\x08 \x01(\x03\x12\x19\n\x11rematch_timestamp\x18\t \x01(\x03\x12\x1e\n\x16invalid_due_to_wipeout\x18\n \x01(\x08\"!\n\x05Store\x12\r\n\tBLOBSTORE\x10\x01\x12\t\n\x05SM_V2\x10\x02\"\xd1\x01\n\x08ImageRef\x12\x1e\n\x05store\x18\x01 \x01(\x0e\x32\x0f.ImageRef.Store\x12\r\n\x05width\x18\x02 \x01(\r\x12\x0e\n\x06height\x18\x03 \x01(\r\x12\x0b\n\x03url\x18\x06 \x01(\t\x12\x1e\n\x16invalid_due_to_wipeout\x18\x07 \x01(\x08\x12 \n\x06origin\x18\x08 \x01(\x0e\x32\x10.ImageRef.Origin\"\x14\n\x05Store\x12\x0b\n\x07SHOEBOX\x10\x03\"!\n\x06Origin\x12\x0c\n\x08PERSONAL\x10\x01\x12\t\n\x05STORE\x10\x02\"1\n\x12UploadedUitsId3Tag\x12\r\n\x05owner\x18\x01 \x01(\t\x12\x0c\n\x04\x64\x61ta\x18\x02 \x01(\x0c\"\x8c\x10\n\x05Track\x12\n\n\x02id\x18\x01 \x01(\t\x12\x11\n\tclient_id\x18\x02 \x01(\t\x12\x1a\n\x12\x63reation_timestamp\x18\x03 \x01(\x03\x12\x1f\n\x17last_modified_timestamp\x18\x04 \x01(\x03\x12\x16\n\x07\x64\x65leted\x18\x05 \x01(\x08:\x05\x66\x61lse\x12\r\n\x05title\x18\x06 \x01(\t\x12\x0e\n\x06\x61rtist\x18\x07 \x01(\t\x12\x13\n\x0b\x61rtist_hash\x18. \x01(\x03\x12\x10\n\x08\x63omposer\x18\x08 \x01(\t\x12\r\n\x05\x61lbum\x18\t \x01(\t\x12\x14\n\x0c\x61lbum_artist\x18\n \x01(\t\x12\x17\n\x0f\x63\x61nonical_album\x18\x38 \x01(\t\x12\x18\n\x10\x63\x61nonical_artist\x18\x39 \x01(\t\x12\x1d\n\x15\x63\x61nonical_genre_album\x18: \x01(\t\x12\x0c\n\x04year\x18\x0b \x01(\x05\x12\x0f\n\x07\x63omment\x18\x0c \x01(\t\x12\x14\n\x0ctrack_number\x18\r \x01(\x05\x12\r\n\x05genre\x18\x0e \x01(\t\x12\x17\n\x0f\x64uration_millis\x18\x0f \x01(\x03\x12\x18\n\x10\x62\x65\x61ts_per_minute\x18\x10 \x01(\x05\x12\x19\n\x11original_bit_rate\x18, \x01(\x05\x12\x1c\n\taudio_ref\x18\x11 \x03(\x0b\x32\t.AudioRef\x12 \n\ralbum_art_ref\x18\x12 \x03(\x0b\x32\t.ImageRef\x12\x36\n\x13\x61vailability_status\x18\x13 \x01(\x0e\x32\x19.Track.AvailabilityStatus\x12\x12\n\nplay_count\x18\x14 \x01(\x05\x12(\n\x0c\x63ontent_type\x18\x19 \x01(\x0e\x32\x12.Track.ContentType\x12\x19\n\x11total_track_count\x18\x1a \x01(\x05\x12\x13\n\x0b\x64isc_number\x18\x1b \x01(\x05\x12\x18\n\x10total_disc_count\x18\x1c \x01(\x05\x12!\n\x08\x63hannels\x18\x1d \x01(\x0e\x32\x0f.Track.Channels\x12$\n\ntrack_type\x18\x1e \x01(\x0e\x32\x10.Track.TrackType\x12\x1e\n\x16use_single_server_copy\x18; \x01(\x08\x12\x1d\n\x06rating\x18\x1f \x01(\x0e\x32\r.Track.Rating\x12\x16\n\x0e\x65stimated_size\x18 \x01(\x03\x12\x10\n\x08store_id\x18! \x01(\t\x12\x12\n\nmetajam_id\x18\" \x01(\t\x12 \n\x15metajam_id_confidence\x18+ \x01(\x01:\x01\x30\x12\x0c\n\x04uits\x18# \x01(\t\x12$\n\ruits_metadata\x18( \x01(\x0b\x32\r.UitsMetadata\x12\x13\n\x0b\x63ompilation\x18$ \x01(\x08\x12\x19\n\x11\x63lient_date_added\x18% \x01(\x03\x12\x18\n\x10recent_timestamp\x18& \x01(\x03\x12\x1d\n\x0e\x64o_not_rematch\x18\' \x01(\x08:\x05\x66\x61lse\x12\x1b\n\x13\x66rom_album_purchase\x18) \x01(\x08\x12\x18\n\x10\x61lbum_metajam_id\x18* \x01(\t\x12\x16\n\x0etransaction_id\x18- \x01(\t\x12\x13\n\x0b\x64\x65\x62ug_track\x18/ \x01(\x08\x12\x18\n\x10normalized_title\x18\x30 \x01(\t\x12\x19\n\x11normalized_artist\x18\x31 \x01(\t\x12\x18\n\x10normalized_album\x18\x32 \x01(\t\x12\x1f\n\x17normalized_album_artist\x18\x33 \x01(\t\x12\"\n\x1anormalized_canonical_album\x18\x36 \x01(\t\x12#\n\x1bnormalized_canonical_artist\x18\x37 \x01(\t\x12\x13\n\x0buploader_id\x18\x34 \x01(\t\x12\x17\n\x0f\x63lient_album_id\x18\x35 \x01(\t\x12\x18\n\x10label_owner_code\x18< \x01(\t\x12\x31\n\x15original_content_type\x18= \x01(\x0e\x32\x12.Track.ContentType\x12*\n\ruploaded_uits\x18G \x03(\x0b\x32\x13.UploadedUitsId3Tag\"\x86\x01\n\x12\x41vailabilityStatus\x12\x0b\n\x07PENDING\x10\x01\x12\x0b\n\x07MATCHED\x10\x02\x12\x14\n\x10UPLOAD_REQUESTED\x10\x03\x12\r\n\tAVAILABLE\x10\x04\x12\x12\n\x0e\x46ORCE_REUPLOAD\x10\x05\x12\x1d\n\x19UPLOAD_PERMANENTLY_FAILED\x10\x06\"W\n\x0b\x43ontentType\x12\x07\n\x03MP3\x10\x01\x12\x07\n\x03M4A\x10\x02\x12\x07\n\x03\x41\x41\x43\x10\x03\x12\x08\n\x04\x46LAC\x10\x04\x12\x07\n\x03OGG\x10\x05\x12\x07\n\x03WMA\x10\x06\x12\x07\n\x03M4P\x10\x07\x12\x08\n\x04\x41LAC\x10\x08\" \n\x08\x43hannels\x12\x08\n\x04MONO\x10\x01\x12\n\n\x06STEREO\x10\x02\"\x8b\x01\n\tTrackType\x12\x11\n\rMATCHED_TRACK\x10\x01\x12\x13\n\x0fUNMATCHED_TRACK\x10\x02\x12\x0f\n\x0bLOCAL_TRACK\x10\x03\x12\x13\n\x0fPURCHASED_TRACK\x10\x04\x12\x1f\n\x1bMETADATA_ONLY_MATCHED_TRACK\x10\x05\x12\x0f\n\x0bPROMO_TRACK\x10\x06\"e\n\x06Rating\x12\r\n\tNOT_RATED\x10\x01\x12\x0c\n\x08ONE_STAR\x10\x02\x12\r\n\tTWO_STARS\x10\x03\x12\x0f\n\x0bTHREE_STARS\x10\x04\x12\x0e\n\nFOUR_STARS\x10\x05\x12\x0e\n\nFIVE_STARS\x10\x06\"\x1f\n\x06Tracks\x12\x15\n\x05track\x18\x01 \x03(\x0b\x32\x06.Track\"\xb4\x02\n\x08Playlist\x12\n\n\x02id\x18\x01 \x01(\t\x12\x11\n\tclient_id\x18\x02 \x01(\t\x12\x1a\n\x12\x63reation_timestamp\x18\x03 \x01(\x03\x12\x1f\n\x17last_modified_timestamp\x18\x04 \x01(\x03\x12\x16\n\x07\x64\x65leted\x18\x05 \x01(\x08:\x05\x66\x61lse\x12\x0c\n\x04name\x18\x06 \x01(\t\x12-\n\rplaylist_type\x18\x07 \x01(\x0e\x32\x16.Playlist.PlaylistType\x12#\n\x10playlist_art_ref\x18\x08 \x01(\x0b\x32\t.ImageRef\x12\x18\n\x10recent_timestamp\x18\t \x01(\x03\"8\n\x0cPlaylistType\x12\x12\n\x0eUSER_GENERATED\x10\x01\x12\t\n\x05MAGIC\x10\x02\x12\t\n\x05PROMO\x10\x03\"\xae\x03\n\rPlaylistEntry\x12\x13\n\x0bplaylist_id\x18\x01 \x01(\t\x12\x19\n\x11\x61\x62solute_position\x18\x02 \x01(\x03\x12\x1c\n\x14place_after_entry_id\x18\x03 \x01(\t\x12\x10\n\x08track_id\x18\x04 \x01(\t\x12\n\n\x02id\x18\x05 \x01(\t\x12\x11\n\tclient_id\x18\x06 \x01(\t\x12\x1a\n\x12\x63reation_timestamp\x18\x07 \x01(\x03\x12\x1f\n\x17last_modified_timestamp\x18\x08 \x01(\x03\x12\x16\n\x07\x64\x65leted\x18\t \x01(\x08:\x05\x66\x61lse\x12H\n\x19relative_position_id_type\x18\n \x01(\x0e\x32%.PlaylistEntry.RelativePositionIdType\x12\x15\n\x05track\x18\x0f \x01(\x0b\x32\x06.Track\x12\x1d\n\x15place_before_entry_id\x18\x10 \x01(\t\x12\x17\n\x0fstring_position\x18\x11 \x01(\t\"0\n\x16RelativePositionIdType\x12\n\n\x06SERVER\x10\x01\x12\n\n\x06\x43LIENT\x10\x02\"\xd0\x03\n\x16TrackSearchRestriction\x12\x39\n\tattribute\x18\x01 \x02(\x0e\x32&.TrackSearchRestriction.TrackAttribute\x12\r\n\x05value\x18\x02 \x02(\t\x12?\n\x0f\x63omparison_type\x18\x03 \x02(\x0e\x32&.TrackSearchRestriction.ComparisonType\"\xa6\x01\n\x0eTrackAttribute\x12\t\n\x05TITLE\x10\x01\x12\n\n\x06\x41RTIST\x10\x02\x12\t\n\x05\x41LBUM\x10\x03\x12\x10\n\x0c\x41LBUM_ARTIST\x10\x04\x12\t\n\x05GENRE\x10\x05\x12\x17\n\x13\x41VAILABILITY_STATUS\x10\x06\x12\x0e\n\nTRACK_TYPE\x10\x07\x12\x08\n\x04YEAR\x10\x08\x12\x0c\n\x08STORE_ID\x10\t\x12\x14\n\x10\x41LBUM_METAJAM_ID\x10\n\"\x81\x01\n\x0e\x43omparisonType\x12\t\n\x05\x45QUAL\x10\x00\x12\r\n\tNOT_EQUAL\x10\x01\x12\x10\n\x0cGREATER_THAN\x10\x02\x12\x11\n\rGREATER_EQUAL\x10\x03\x12\r\n\tLESS_THAN\x10\x04\x12\x0e\n\nLESS_EQUAL\x10\x05\x12\x11\n\rPARTIAL_MATCH\x10\x06\"\xda\x01\n\x19TrackSearchRestrictionSet\x12;\n\x04type\x18\x01 \x01(\x0e\x32-.TrackSearchRestrictionSet.RestrictionSetType\x12,\n\x0brestriction\x18\x02 \x03(\x0b\x32\x17.TrackSearchRestriction\x12+\n\x07sub_set\x18\x03 \x03(\x0b\x32\x1a.TrackSearchRestrictionSet\"%\n\x12RestrictionSetType\x12\x07\n\x03\x41ND\x10\x00\x12\x06\n\x02OR\x10\x01\"\x80\x02\n\x0eTrackSortOrder\x12\x31\n\tattribute\x18\x01 \x01(\x0e\x32\x1e.TrackSortOrder.TrackAttribute\x12\x18\n\ndescending\x18\x02 \x01(\x08:\x04true\"\xa0\x01\n\x0eTrackAttribute\x12\x16\n\x12LAST_MODIFIED_TIME\x10\x01\x12\n\n\x06\x41RTIST\x10\x02\x12\t\n\x05\x41LBUM\x10\x03\x12\t\n\x05TITLE\x10\x04\x12\x10\n\x0cTRACK_NUMBER\x10\x06\x12\x0e\n\nPLAY_COUNT\x10\t\x12\x13\n\x0f\x44URATION_MILLIS\x10\n\x12\n\n\x06RATING\x10\x0b\x12\x11\n\rCREATION_TIME\x10\x0c\"\xfe\x02\n\x10GetTracksRequest\x12\x0f\n\x07gaia_id\x18\x01 \x02(\x03\x12\x13\n\x0bupdated_min\x18\x02 \x01(\x03\x12\x17\n\x0finclude_deleted\x18\x03 \x01(\x08\x12\x13\n\x0bmax_results\x18\x04 \x01(\x05\x12\x1a\n\x12\x63ontinuation_token\x18\x05 \x01(\t\x12\x33\n\x12search_restriction\x18\x06 \x03(\x0b\x32\x17.TrackSearchRestriction\x12#\n\nsort_order\x18\x07 \x03(\x0b\x32\x0f.TrackSortOrder\x12\x33\n\x0frestriction_set\x18\x08 \x01(\x0b\x32\x1a.TrackSearchRestrictionSet\x12;\n\x10track_projection\x18\t \x01(\x0e\x32!.GetTracksRequest.TrackProjection\".\n\x0fTrackProjection\x12\x08\n\x04\x46ULL\x10\x01\x12\x11\n\rFRONTEND_VIEW\x10\x02\"\xd3\x01\n\x11GetTracksResponse\x12\x36\n\rresponse_code\x18\x01 \x02(\x0e\x32\x1f.GetTracksResponse.ResponseCode\x12\x15\n\x05track\x18\x02 \x03(\x0b\x32\x06.Track\x12\x1f\n\x17\x65stimated_total_results\x18\x03 \x01(\x03\x12\x1a\n\x12\x63ontinuation_token\x18\x04 \x01(\t\"2\n\x0cResponseCode\x12\x06\n\x02OK\x10\x01\x12\x10\n\x0cNOT_MODIFIED\x10\x02\x12\x08\n\x04GONE\x10\x03\"\xfc\x01\n\x19GetPlaylistEntriesRequest\x12\x0f\n\x07gaia_id\x18\x01 \x02(\x03\x12\x13\n\x0bupdated_min\x18\x02 \x01(\x03\x12\x17\n\x0finclude_deleted\x18\x03 \x01(\x08\x12\x13\n\x0bmax_results\x18\x04 \x01(\x05\x12\x1a\n\x12\x63ontinuation_token\x18\x05 \x01(\t\x12\x1a\n\x12playlist_id_filter\x18\x06 \x01(\t\x12)\n\x1ainclude_all_track_metadata\x18\x07 \x01(\x08:\x05\x66\x61lse\x12(\n\x1aonly_show_available_tracks\x18\x08 \x01(\x08:\x04true\"\xf6\x01\n\x1aGetPlaylistEntriesResponse\x12?\n\rresponse_code\x18\x01 \x02(\x0e\x32(.GetPlaylistEntriesResponse.ResponseCode\x12&\n\x0eplaylist_entry\x18\x02 \x03(\x0b\x32\x0e.PlaylistEntry\x12\x1f\n\x17\x65stimated_total_results\x18\x03 \x01(\x03\x12\x1a\n\x12\x63ontinuation_token\x18\x04 \x01(\t\"2\n\x0cResponseCode\x12\x06\n\x02OK\x10\x01\x12\x10\n\x0cNOT_MODIFIED\x10\x02\x12\x08\n\x04GONE\x10\x03\"\xc8\x01\n\x11PlaylistSortOrder\x12\x37\n\tattribute\x18\x01 \x01(\x0e\x32$.PlaylistSortOrder.PlaylistAttribute\x12\x19\n\ndescending\x18\x02 \x01(\x08:\x05\x66\x61lse\"_\n\x11PlaylistAttribute\x12\x16\n\x12LAST_MODIFIED_TIME\x10\x01\x12\t\n\x05TITLE\x10\x02\x12\x11\n\rCREATION_TIME\x10\x03\x12\x14\n\x10RECENT_TIMESTAMP\x10\x04\"\xad\x01\n\x13GetPlaylistsRequest\x12\x0f\n\x07gaia_id\x18\x01 \x02(\x03\x12\x13\n\x0bupdated_min\x18\x02 \x01(\x03\x12\x17\n\x0finclude_deleted\x18\x03 \x01(\x08\x12\x13\n\x0bmax_results\x18\x04 \x01(\x05\x12\x1a\n\x12\x63ontinuation_token\x18\x05 \x01(\t\x12&\n\nsort_order\x18\x06 \x01(\x0b\x32\x12.PlaylistSortOrder\"\xdf\x01\n\x14GetPlaylistsResponse\x12\x39\n\rresponse_code\x18\x01 \x02(\x0e\x32\".GetPlaylistsResponse.ResponseCode\x12\x1b\n\x08playlist\x18\x02 \x03(\x0b\x32\t.Playlist\x12\x1f\n\x17\x65stimated_total_results\x18\x03 \x01(\x03\x12\x1a\n\x12\x63ontinuation_token\x18\x04 \x01(\t\"2\n\x0cResponseCode\x12\x06\n\x02OK\x10\x01\x12\x10\n\x0cNOT_MODIFIED\x10\x02\x12\x08\n\x04GONE\x10\x03\"3\n\x12LookupTrackRequest\x12\n\n\x02id\x18\x01 \x01(\t\x12\x11\n\tclient_id\x18\x02 \x01(\t\";\n\x1aLookupPlaylistEntryRequest\x12\n\n\x02id\x18\x01 \x01(\t\x12\x11\n\tclient_id\x18\x02 \x01(\t\"6\n\x15LookupPlaylistRequest\x12\n\n\x02id\x18\x01 \x01(\t\x12\x11\n\tclient_id\x18\x02 \x01(\t\"\xbe\x02\n\x12\x42\x61tchLookupRequest\x12\x0f\n\x07gaia_id\x18\x01 \x02(\x03\x12\"\n\x05track\x18\x02 \x03(\x0b\x32\x13.LookupTrackRequest\x12(\n\x08playlist\x18\x03 \x03(\x0b\x32\x16.LookupPlaylistRequest\x12\x37\n\rmetadata_type\x18\x04 \x01(\x0e\x32 .BatchLookupRequest.MetadataType\x12\x33\n\x0eplaylist_entry\x18\x05 \x03(\x0b\x32\x1b.LookupPlaylistEntryRequest\x12\x1e\n\x0finclude_deleted\x18\x06 \x01(\x08:\x05\x66\x61lse\";\n\x0cMetadataType\x12\t\n\x05TRACK\x10\x01\x12\x0c\n\x08PLAYLIST\x10\x02\x12\x12\n\x0ePLAYLIST_ENTRY\x10\x03\"q\n\x13\x42\x61tchLookupResponse\x12\x15\n\x05track\x18\x01 \x03(\x0b\x32\x06.Track\x12\x1b\n\x08playlist\x18\x02 \x03(\x0b\x32\t.Playlist\x12&\n\x0eplaylist_entry\x18\x03 \x03(\x0b\x32\x0e.PlaylistEntry\"\xba\x01\n\x12MutateTrackRequest\x12\x1c\n\x0c\x63reate_track\x18\x01 \x01(\x0b\x32\x06.Track\x12\x1c\n\x0cupdate_track\x18\x02 \x01(\x0b\x32\x06.Track\x12\x14\n\x0c\x64\x65lete_track\x18\x03 \x01(\t\x12\x16\n\x0epartial_update\x18\x04 \x01(\x08\x12\"\n\x14update_last_modified\x18\x05 \x01(\x08:\x04true\x12\x16\n\x0eundelete_track\x18\x06 \x01(\t\"\xb6\x03\n\x0eMutateResponse\x12\x39\n\rresponse_code\x18\x01 \x01(\x0e\x32\".MutateResponse.MutateResponseCode\x12\n\n\x02id\x18\x02 \x01(\t\x12\x10\n\x08\x63hild_id\x18\x03 \x03(\t\x12\x11\n\tclient_id\x18\x04 \x01(\t\x12?\n\x13\x61vailability_status\x18\x05 \x01(\x0e\x32\".MutateResponse.AvailabilityStatus\x12\x15\n\rerror_message\x18\x06 \x01(\t\"W\n\x12MutateResponseCode\x12\x06\n\x02OK\x10\x01\x12\x0c\n\x08\x43ONFLICT\x10\x02\x12\x13\n\x0fINVALID_REQUEST\x10\x03\x12\x16\n\x12METADATA_TOO_LARGE\x10\x04\"\x86\x01\n\x12\x41vailabilityStatus\x12\x0b\n\x07PENDING\x10\x01\x12\x0b\n\x07MATCHED\x10\x02\x12\x14\n\x10UPLOAD_REQUESTED\x10\x03\x12\r\n\tAVAILABLE\x10\x04\x12\x12\n\x0e\x46ORCE_REUPLOAD\x10\x05\x12\x1d\n\x19UPLOAD_PERMANENTLY_FAILED\x10\x06\"\xcd\x01\n\x18\x42\x61tchMutateTracksRequest\x12\x0f\n\x07gaia_id\x18\x01 \x02(\x03\x12+\n\x0etrack_mutation\x18\x02 \x03(\x0b\x32\x13.MutateTrackRequest\x12\x1f\n\x11send_notification\x18\x03 \x01(\x08:\x04true\x12\'\n\x19\x64\x65tect_timestamp_conflict\x18\x04 \x01(\x08:\x04true\x12)\n\x1bnotify_fine_grained_updates\x18\x05 \x01(\x08:\x04true\"\xcd\x01\n\x19\x42\x61tchMutateTracksResponse\x12O\n\rresponse_code\x18\x01 \x03(\x0e\x32\x38.BatchMutateTracksResponse.BatchMutateTracksResponseCode\x12(\n\x0fmutate_response\x18\x02 \x03(\x0b\x32\x0f.MutateResponse\"5\n\x1d\x42\x61tchMutateTracksResponseCode\x12\x06\n\x02OK\x10\x01\x12\x0c\n\x08\x43ONFLICT\x10\x02\"\xf7\x01\n\x15MutatePlaylistRequest\x12\"\n\x0f\x63reate_playlist\x18\x01 \x01(\x0b\x32\t.Playlist\x12\"\n\x0fupdate_playlist\x18\x02 \x01(\x0b\x32\t.Playlist\x12\x17\n\x0f\x64\x65lete_playlist\x18\x03 \x01(\t\x12\x16\n\x0epartial_update\x18\x04 \x01(\x08\x12&\n\x0eplaylist_entry\x18\x05 \x03(\x0b\x32\x0e.PlaylistEntry\x12\"\n\x14update_last_modified\x18\x06 \x01(\x08:\x04true\x12\x19\n\x11undelete_playlist\x18\x07 \x01(\t\"\xd7\x01\n\x1b\x42\x61tchMutatePlaylistsRequest\x12\x0f\n\x07gaia_id\x18\x01 \x02(\x03\x12\x31\n\x11playlist_mutation\x18\x02 \x03(\x0b\x32\x16.MutatePlaylistRequest\x12\x1f\n\x11send_notification\x18\x03 \x01(\x08:\x04true\x12\'\n\x19\x64\x65tect_timestamp_conflict\x18\x04 \x01(\x08:\x04true\x12*\n\x1bnotify_fine_grained_updates\x18\x05 \x01(\x08:\x05\x66\x61lse\"\xd9\x01\n\x1c\x42\x61tchMutatePlaylistsResponse\x12U\n\rresponse_code\x18\x01 \x03(\x0e\x32>.BatchMutatePlaylistsResponse.BatchMutatePlaylistsResponseCode\x12(\n\x0fmutate_response\x18\x02 \x03(\x0b\x32\x0f.MutateResponse\"8\n BatchMutatePlaylistsResponseCode\x12\x06\n\x02OK\x10\x01\x12\x0c\n\x08\x43ONFLICT\x10\x02\"\xee\x01\n\x1aMutatePlaylistEntryRequest\x12-\n\x15\x63reate_playlist_entry\x18\x01 \x01(\x0b\x32\x0e.PlaylistEntry\x12-\n\x15update_playlist_entry\x18\x02 \x01(\x0b\x32\x0e.PlaylistEntry\x12-\n\x15\x64\x65lete_playlist_entry\x18\x03 \x01(\x0b\x32\x0e.PlaylistEntry\x12\"\n\x14update_last_modified\x18\x04 \x01(\x08:\x04true\x12\x1f\n\x17undelete_playlist_entry\x18\x05 \x01(\t\"\xe8\x01\n!BatchMutatePlaylistEntriesRequest\x12\x0f\n\x07gaia_id\x18\x01 \x02(\x03\x12<\n\x17playlist_entry_mutation\x18\x02 \x03(\x0b\x32\x1b.MutatePlaylistEntryRequest\x12\x1f\n\x11send_notification\x18\x03 \x01(\x08:\x04true\x12\'\n\x19\x64\x65tect_timestamp_conflict\x18\x04 \x01(\x08:\x04true\x12*\n\x1bnotify_fine_grained_updates\x18\x05 \x01(\x08:\x05\x66\x61lse\"\xf1\x01\n\"BatchMutatePlaylistEntriesResponse\x12\x61\n\rresponse_code\x18\x01 \x03(\x0e\x32J.BatchMutatePlaylistEntriesResponse.BatchMutatePlaylistEntriesResponseCode\x12(\n\x0fmutate_response\x18\x02 \x03(\x0b\x32\x0f.MutateResponse\">\n&BatchMutatePlaylistEntriesResponseCode\x12\x06\n\x02OK\x10\x01\x12\x0c\n\x08\x43ONFLICT\x10\x02\"\x90\x01\n\x11MagicPlaylistSeed\x12.\n\tseed_type\x18\x01 \x02(\x0e\x32\x1b.MagicPlaylistSeed.SeedType\x12\x0c\n\x04seed\x18\x02 \x02(\t\"=\n\x08SeedType\x12\t\n\x05TRACK\x10\x00\x12\n\n\x06\x41RTIST\x10\x01\x12\t\n\x05\x41LBUM\x10\x02\x12\x0f\n\x0bOPAQUE_SEED\x10\x03\"\xd1\x01\n\x14MagicPlaylistRequest\x12\x0f\n\x07gaia_id\x18\x01 \x02(\x03\x12\x15\n\rplaylist_name\x18\x02 \x01(\t\x12\x13\n\x0bplaylist_id\x18\x03 \x01(\t\x12 \n\x04seed\x18\x04 \x03(\x0b\x32\x12.MagicPlaylistSeed\x12\x1b\n\x13num_recommendations\x18\x05 \x01(\x05\x12)\n\x1ainclude_all_track_metadata\x18\x06 \x01(\x08:\x05\x66\x61lse\x12\x12\n\nmodel_name\x18\x07 \x01(\t\"\\\n\x15MagicPlaylistResponse\x12\x1b\n\x08playlist\x18\x01 \x01(\x0b\x32\t.Playlist\x12&\n\x0eplaylist_entry\x18\x02 \x03(\x0b\x32\x0e.PlaylistEntry\"\xf8\x01\n\x12\x46lushLockerRequest\x12\x0f\n\x07gaia_id\x18\x01 \x01(\x03\x12\x13\n\x0bgaia_cookie\x18\x02 \x01(\t\x12#\n\x15remove_audio_binaries\x18\x03 \x01(\x08:\x04true\x12#\n\x15remove_image_binaries\x18\x04 \x01(\x08:\x04true\x12\x1f\n\x11send_notification\x18\x05 \x01(\x08:\x04true\x12&\n\x17reset_subscription_type\x18\x06 \x01(\x08:\x05\x66\x61lse\x12)\n\x1bnotify_fine_grained_updates\x18\x08 \x01(\x08:\x04true\"\x8a\x01\n\x13\x46lushLockerResponse\x12\x16\n\x0etracks_removed\x18\x01 \x01(\x05\x12\x17\n\x0f\x65ntries_removed\x18\x02 \x01(\x05\x12\x19\n\x11playlists_removed\x18\x03 \x01(\x05\x12\'\n\x1fsuccess_reset_subscription_type\x18\x04 \x01(\x08\"6\n\x12LockerNotification\x12\x0f\n\x07gaia_id\x18\x01 \x02(\x03\x12\x0f\n\x07payload\x18\x02 \x01(\x0c\"\xd6\x01\n\x05\x41lbum\x12\x0c\n\x04name\x18\x01 \x01(\t\x12\x14\n\x0c\x61lbum_artist\x18\x02 \x01(\t\x12\x1c\n\talbum_art\x18\x03 \x01(\x0b\x32\t.ImageRef\x12\x13\n\x0btrack_count\x18\x04 \x01(\x05\x12\x18\n\x10last_time_played\x18\x05 \x01(\x03\x12\x16\n\x0eis_compilation\x18\x06 \x01(\x08\x12\x18\n\x10\x61lbum_metajam_id\x18\x07 \x01(\t\x12\x1a\n\x12\x63reation_timestamp\x18\x08 \x01(\x03\x12\x0e\n\x06\x61rtist\x18\t \x01(\t\"\xa3\x01\n\x0e\x41lbumSortOrder\x12\x31\n\tattribute\x18\x01 \x01(\x0e\x32\x1e.AlbumSortOrder.AlbumAttribute\x12\x19\n\ndescending\x18\x02 \x01(\x08:\x05\x66\x61lse\"C\n\x0e\x41lbumAttribute\x12\x14\n\x10LAST_PLAYED_TIME\x10\x01\x12\x08\n\x04NAME\x10\x02\x12\x11\n\rCREATION_TIME\x10\x03\"]\n\x10GetAlbumsRequest\x12\x0f\n\x07gaia_id\x18\x01 \x02(\x03\x12#\n\nsort_order\x18\x02 \x01(\x0b\x32\x0f.AlbumSortOrder\x12\x13\n\x0bmax_results\x18\x03 \x01(\x05\"*\n\x11GetAlbumsResponse\x12\x15\n\x05\x61lbum\x18\x01 \x03(\x0b\x32\x06.Album\"H\n\x06\x41rtist\x12\x0c\n\x04name\x18\x01 \x01(\t\x12\x19\n\x11total_track_count\x18\x02 \x01(\x05\x12\x15\n\x05\x61lbum\x18\x03 \x03(\x0b\x32\x06.Album\",\n\x0f\x41rtistSortOrder\x12\x19\n\ndescending\x18\x02 \x01(\x08:\x05\x66\x61lse\"_\n\x11GetArtistsRequest\x12\x0f\n\x07gaia_id\x18\x01 \x02(\x03\x12$\n\nsort_order\x18\x02 \x01(\x0b\x32\x10.ArtistSortOrder\x12\x13\n\x0bmax_results\x18\x03 \x01(\x05\"-\n\x12GetArtistsResponse\x12\x17\n\x06\x61rtist\x18\x01 \x03(\x0b\x32\x07.Artist\"L\n\nMusicGenre\x12\x0c\n\x04name\x18\x01 \x01(\t\x12\x19\n\x11total_track_count\x18\x02 \x01(\x05\x12\x15\n\x05\x61lbum\x18\x03 \x03(\x0b\x32\x06.Album\"+\n\x0eGenreSortOrder\x12\x19\n\ndescending\x18\x02 \x01(\x08:\x05\x66\x61lse\"]\n\x10GetGenresRequest\x12\x0f\n\x07gaia_id\x18\x01 \x02(\x03\x12#\n\nsort_order\x18\x02 \x01(\x0b\x32\x0f.GenreSortOrder\x12\x13\n\x0bmax_results\x18\x03 \x01(\x05\"/\n\x11GetGenresResponse\x12\x1a\n\x05genre\x18\x01 \x03(\x0b\x32\x0b.MusicGenre\"\xe6\x02\n GetDynamicPlaylistEntriesRequest\x12\x0f\n\x07gaia_id\x18\x01 \x02(\x03\x12[\n\x15playlist_entries_type\x18\x04 \x02(\x0e\x32<.GetDynamicPlaylistEntriesRequest.DynamicPlaylistEntriesType\x12\x13\n\x0bmax_results\x18\x02 \x01(\x05\x12\x1a\n\x12\x63ontinuation_token\x18\x03 \x01(\t\x12)\n\x1ainclude_all_track_metadata\x18\x05 \x01(\x08:\x05\x66\x61lse\"x\n\x1a\x44ynamicPlaylistEntriesType\x12\r\n\tPURCHASED\x10\x01\x12\r\n\tTHUMBS_UP\x10\x02\x12\x12\n\x0eRECENTLY_ADDED\x10\x03\x12\x0c\n\x08PROMOTED\x10\x04\x12\x1a\n\x16PROMOTED_AND_PURCHASED\x10\x05\"\xda\x03\n!GetDynamicPlaylistEntriesResponse\x12\x46\n\rresponse_code\x18\x01 \x02(\x0e\x32/.GetDynamicPlaylistEntriesResponse.ResponseCode\x12&\n\x0eplaylist_entry\x18\x02 \x03(\x0b\x32\x0e.PlaylistEntry\x12\x1f\n\x17\x65stimated_total_results\x18\x03 \x01(\x03\x12\x1a\n\x12\x63ontinuation_token\x18\x04 \x01(\t\x12\\\n\x15playlist_entries_type\x18\x05 \x01(\x0e\x32=.GetDynamicPlaylistEntriesResponse.DynamicPlaylistEntriesType\"\x85\x01\n\x1a\x44ynamicPlaylistEntriesType\x12\r\n\tPURCHASED\x10\x01\x12\r\n\tTHUMBS_UP\x10\x02\x12\x12\n\x0eRECENTLY_ADDED\x10\x03\x12\x0c\n\x08PROMOTED\x10\x04\x12\x0b\n\x07UNKNOWN\x10\x05\x12\x1a\n\x16PROMOTED_AND_PURCHASED\x10\x06\"\"\n\x0cResponseCode\x12\x06\n\x02OK\x10\x01\x12\n\n\x06NOT_OK\x10\x02\"4\n!GetAggregationsByTrackTypeRequest\x12\x0f\n\x07gaia_id\x18\x01 \x02(\x03\"\xea\x01\n\x12TrackTypeAggregate\x12\x37\n\x10track_type_value\x18\x01 \x01(\x0e\x32\x1d.TrackTypeAggregate.TrackType\x12\r\n\x05\x63ount\x18\x02 \x01(\x05\"\x8b\x01\n\tTrackType\x12\x11\n\rMATCHED_TRACK\x10\x01\x12\x13\n\x0fUNMATCHED_TRACK\x10\x02\x12\x0f\n\x0bLOCAL_TRACK\x10\x03\x12\x13\n\x0fPURCHASED_TRACK\x10\x04\x12\x1f\n\x1bMETADATA_ONLY_MATCHED_TRACK\x10\x05\x12\x0f\n\x0bPROMO_TRACK\x10\x06\"W\n\"GetAggregationsByTrackTypeResponse\x12\x31\n\x14track_type_aggregate\x18\x01 \x03(\x0b\x32\x13.TrackTypeAggregate\"=\n*GetAggregationsByAvailabilityStatusRequest\x12\x0f\n\x07gaia_id\x18\x01 \x02(\x03\"\x83\x02\n\x1b\x41vailabilityStatusAggregate\x12L\n\x13\x61vailability_status\x18\x01 \x01(\x0e\x32/.AvailabilityStatusAggregate.AvailabilityStatus\x12\r\n\x05\x63ount\x18\x02 \x01(\x05\"\x86\x01\n\x12\x41vailabilityStatus\x12\x0b\n\x07PENDING\x10\x01\x12\x0b\n\x07MATCHED\x10\x02\x12\x14\n\x10UPLOAD_REQUESTED\x10\x03\x12\r\n\tAVAILABLE\x10\x04\x12\x12\n\x0e\x46ORCE_REUPLOAD\x10\x05\x12\x1d\n\x19UPLOAD_PERMANENTLY_FAILED\x10\x06\"r\n+GetAggregationsByAvailabilityStatusResponse\x12\x43\n\x1d\x61vailability_status_aggregate\x18\x01 \x03(\x0b\x32\x1c.AvailabilityStatusAggregate\"7\n\x15\x41\x64\x64PromoTracksRequest\x12\x0f\n\x07gaia_id\x18\x01 \x02(\x03\x12\r\n\x05genre\x18\x02 \x03(\t\"/\n\x16\x41\x64\x64PromoTracksResponse\x12\x15\n\x05track\x18\x01 \x03(\x0b\x32\x06.Track\"J\n\x1eGetPlaylistAggregationsRequest\x12\x0f\n\x07gaia_id\x18\x01 \x02(\x03\x12\x17\n\x0bmax_results\x18\x02 \x01(\x05:\x02\x31\x34\"\x83\x01\n\x11PlaylistAggregate\x12\x13\n\x0bplaylist_id\x18\x01 \x01(\t\x12\x0c\n\x04name\x18\x02 \x01(\t\x12\x1c\n\talbum_art\x18\x03 \x01(\x0b\x32\t.ImageRef\x12\x13\n\x0btrack_count\x18\x04 \x01(\x03\x12\x18\n\x10last_time_played\x18\x05 \x01(\x03\"Q\n\x1fGetPlaylistAggregationsResponse\x12.\n\x12playlist_aggregate\x18\x01 \x03(\x0b\x32\x12.PlaylistAggregate\"?\n\x1bRemoteControlCommandRequest\x12\x0f\n\x07gaia_id\x18\x01 \x01(\x03\x12\x0f\n\x07\x63ommand\x18\x02 \x01(\t\"\xb3\x01\n\x1cRemoteControlCommandResponse\x12\x41\n\rresponse_code\x18\x01 \x01(\x0e\x32*.RemoteControlCommandResponse.ResponseCode\"P\n\x0cResponseCode\x12\x06\n\x02OK\x10\x01\x12\x10\n\x0cNO_PUBLISHER\x10\x02\x12\x13\n\x0fINVALID_REQUEST\x10\x03\x12\x11\n\rPUBLISH_ERROR\x10\x04')
_AUDIOREF_STORE = descriptor.EnumDescriptor(
name='Store',
full_name='AudioRef.Store',
filename=None,
file=DESCRIPTOR,
values=[
descriptor.EnumValueDescriptor(
name='BLOBSTORE', index=0, number=1,
options=None,
type=None),
descriptor.EnumValueDescriptor(
name='SM_V2', index=1, number=2,
options=None,
type=None),
],
containing_type=None,
options=None,
serialized_start=244,
serialized_end=277,
)
_IMAGEREF_STORE = descriptor.EnumDescriptor(
name='Store',
full_name='ImageRef.Store',
filename=None,
file=DESCRIPTOR,
values=[
descriptor.EnumValueDescriptor(
name='SHOEBOX', index=0, number=3,
options=None,
type=None),
],
containing_type=None,
options=None,
serialized_start=434,
serialized_end=454,
)
_IMAGEREF_ORIGIN = descriptor.EnumDescriptor(
name='Origin',
full_name='ImageRef.Origin',
filename=None,
file=DESCRIPTOR,
values=[
descriptor.EnumValueDescriptor(
name='PERSONAL', index=0, number=1,
options=None,
type=None),
descriptor.EnumValueDescriptor(
name='STORE', index=1, number=2,
options=None,
type=None),
],
containing_type=None,
options=None,
serialized_start=456,
serialized_end=489,
)
_TRACK_AVAILABILITYSTATUS = descriptor.EnumDescriptor(
name='AvailabilityStatus',
full_name='Track.AvailabilityStatus',
filename=None,
file=DESCRIPTOR,
values=[
descriptor.EnumValueDescriptor(
name='PENDING', index=0, number=1,
options=None,
type=None),
descriptor.EnumValueDescriptor(
name='MATCHED', index=1, number=2,
options=None,
type=None),
descriptor.EnumValueDescriptor(
name='UPLOAD_REQUESTED', index=2, number=3,
options=None,
type=None),
descriptor.EnumValueDescriptor(
name='AVAILABLE', index=3, number=4,
options=None,
type=None),
descriptor.EnumValueDescriptor(
name='FORCE_REUPLOAD', index=4, number=5,
options=None,
type=None),
descriptor.EnumValueDescriptor(
name='UPLOAD_PERMANENTLY_FAILED', index=5, number=6,
options=None,
type=None),
],
containing_type=None,
options=None,
serialized_start=2101,
serialized_end=2235,
)
_TRACK_CONTENTTYPE = descriptor.EnumDescriptor(
name='ContentType',
full_name='Track.ContentType',
filename=None,
file=DESCRIPTOR,
values=[
descriptor.EnumValueDescriptor(
name='MP3', index=0, number=1,
options=None,
type=None),
descriptor.EnumValueDescriptor(
name='M4A', index=1, number=2,
options=None,
type=None),
descriptor.EnumValueDescriptor(
name='AAC', index=2, number=3,
options=None,
type=None),
descriptor.EnumValueDescriptor(
name='FLAC', index=3, number=4,
options=None,
type=None),
descriptor.EnumValueDescriptor(
name='OGG', index=4, number=5,
options=None,
type=None),
descriptor.EnumValueDescriptor(
name='WMA', index=5, number=6,
options=None,
type=None),
descriptor.EnumValueDescriptor(
name='M4P', index=6, number=7,
options=None,
type=None),
descriptor.EnumValueDescriptor(
name='ALAC', index=7, number=8,
options=None,
type=None),
],
containing_type=None,
options=None,
serialized_start=2237,
serialized_end=2324,
)
_TRACK_CHANNELS = descriptor.EnumDescriptor(
name='Channels',
full_name='Track.Channels',
filename=None,
file=DESCRIPTOR,
values=[
descriptor.EnumValueDescriptor(
name='MONO', index=0, number=1,
options=None,
type=None),
descriptor.EnumValueDescriptor(
name='STEREO', index=1, number=2,
options=None,
type=None),
],
containing_type=None,
options=None,
serialized_start=2326,
serialized_end=2358,
)
_TRACK_TRACKTYPE = descriptor.EnumDescriptor(
name='TrackType',
full_name='Track.TrackType',
filename=None,
file=DESCRIPTOR,
values=[
descriptor.EnumValueDescriptor(
name='MATCHED_TRACK', index=0, number=1,
options=None,
type=None),
descriptor.EnumValueDescriptor(
name='UNMATCHED_TRACK', index=1, number=2,
options=None,
type=None),
descriptor.EnumValueDescriptor(
name='LOCAL_TRACK', index=2, number=3,
options=None,
type=None),
descriptor.EnumValueDescriptor(
name='PURCHASED_TRACK', index=3, number=4,
options=None,
type=None),
descriptor.EnumValueDescriptor(
name='METADATA_ONLY_MATCHED_TRACK', index=4, number=5,
options=None,
type=None),
descriptor.EnumValueDescriptor(
name='PROMO_TRACK', index=5, number=6,
options=None,
type=None),
],
containing_type=None,
options=None,
serialized_start=2361,
serialized_end=2500,
)
_TRACK_RATING = descriptor.EnumDescriptor(
name='Rating',
full_name='Track.Rating',
filename=None,
file=DESCRIPTOR,
values=[
descriptor.EnumValueDescriptor(
name='NOT_RATED', index=0, number=1,
options=None,
type=None),
descriptor.EnumValueDescriptor(
name='ONE_STAR', index=1, number=2,
options=None,
type=None),
descriptor.EnumValueDescriptor(
name='TWO_STARS', index=2, number=3,
options=None,
type=None),
descriptor.EnumValueDescriptor(
name='THREE_STARS', index=3, number=4,
options=None,
type=None),
descriptor.EnumValueDescriptor(
name='FOUR_STARS', index=4, number=5,
options=None,
type=None),
descriptor.EnumValueDescriptor(
name='FIVE_STARS', index=5, number=6,
options=None,
type=None),
],
containing_type=None,
options=None,
serialized_start=2502,
serialized_end=2603,
)
_PLAYLIST_PLAYLISTTYPE = descriptor.EnumDescriptor(
name='PlaylistType',
full_name='Playlist.PlaylistType',
filename=None,
file=DESCRIPTOR,
values=[
descriptor.EnumValueDescriptor(
name='USER_GENERATED', index=0, number=1,
options=None,
type=None),
descriptor.EnumValueDescriptor(
name='MAGIC', index=1, number=2,
options=None,
type=None),
descriptor.EnumValueDescriptor(
name='PROMO', index=2, number=3,
options=None,
type=None),
],
containing_type=None,
options=None,
serialized_start=2891,
serialized_end=2947,
)
_PLAYLISTENTRY_RELATIVEPOSITIONIDTYPE = descriptor.EnumDescriptor(
name='RelativePositionIdType',
full_name='PlaylistEntry.RelativePositionIdType',
filename=None,
file=DESCRIPTOR,
values=[
descriptor.EnumValueDescriptor(
name='SERVER', index=0, number=1,
options=None,
type=None),
descriptor.EnumValueDescriptor(
name='CLIENT', index=1, number=2,
options=None,
type=None),
],
containing_type=None,
options=None,
serialized_start=3332,
serialized_end=3380,
)
_TRACKSEARCHRESTRICTION_TRACKATTRIBUTE = descriptor.EnumDescriptor(
name='TrackAttribute',
full_name='TrackSearchRestriction.TrackAttribute',
filename=None,
file=DESCRIPTOR,
values=[
descriptor.EnumValueDescriptor(
name='TITLE', index=0, number=1,
options=None,
type=None),
descriptor.EnumValueDescriptor(
name='ARTIST', index=1, number=2,
options=None,
type=None),
descriptor.EnumValueDescriptor(
name='ALBUM', index=2, number=3,
options=None,
type=None),
descriptor.EnumValueDescriptor(
name='ALBUM_ARTIST', index=3, number=4,
options=None,
type=None),
descriptor.EnumValueDescriptor(
name='GENRE', index=4, number=5,
options=None,
type=None),
descriptor.EnumValueDescriptor(
name='AVAILABILITY_STATUS', index=5, number=6,
options=None,
type=None),
descriptor.EnumValueDescriptor(
name='TRACK_TYPE', index=6, number=7,
options=None,
type=None),
descriptor.EnumValueDescriptor(
name='YEAR', index=7, number=8,
options=None,
type=None),
descriptor.EnumValueDescriptor(
name='STORE_ID', index=8, number=9,
options=None,
type=None),
descriptor.EnumValueDescriptor(
name='ALBUM_METAJAM_ID', index=9, number=10,
options=None,
type=None),
],
containing_type=None,
options=None,
serialized_start=3549,
serialized_end=3715,
)
_TRACKSEARCHRESTRICTION_COMPARISONTYPE = descriptor.EnumDescriptor(
name='ComparisonType',
full_name='TrackSearchRestriction.ComparisonType',
filename=None,
file=DESCRIPTOR,
values=[
descriptor.EnumValueDescriptor(
name='EQUAL', index=0, number=0,
options=None,
type=None),
descriptor.EnumValueDescriptor(
name='NOT_EQUAL', index=1, number=1,
options=None,
type=None),
descriptor.EnumValueDescriptor(
name='GREATER_THAN', index=2, number=2,
options=None,
type=None),
descriptor.EnumValueDescriptor(
name='GREATER_EQUAL', index=3, number=3,
options=None,
type=None),
descriptor.EnumValueDescriptor(
name='LESS_THAN', index=4, number=4,
options=None,
type=None),
descriptor.EnumValueDescriptor(
name='LESS_EQUAL', index=5, number=5,
options=None,
type=None),
descriptor.EnumValueDescriptor(
name='PARTIAL_MATCH', index=6, number=6,
options=None,
type=None),
],
containing_type=None,
options=None,
serialized_start=3718,
serialized_end=3847,
)
_TRACKSEARCHRESTRICTIONSET_RESTRICTIONSETTYPE = descriptor.EnumDescriptor(
name='RestrictionSetType',
full_name='TrackSearchRestrictionSet.RestrictionSetType',
filename=None,
file=DESCRIPTOR,
values=[
descriptor.EnumValueDescriptor(
name='AND', index=0, number=0,
options=None,
type=None),
descriptor.EnumValueDescriptor(
name='OR', index=1, number=1,
options=None,
type=None),
],
containing_type=None,
options=None,
serialized_start=4031,
serialized_end=4068,
)
_TRACKSORTORDER_TRACKATTRIBUTE = descriptor.EnumDescriptor(
name='TrackAttribute',
full_name='TrackSortOrder.TrackAttribute',
filename=None,
file=DESCRIPTOR,
values=[
descriptor.EnumValueDescriptor(
name='LAST_MODIFIED_TIME', index=0, number=1,
options=None,
type=None),
descriptor.EnumValueDescriptor(
name='ARTIST', index=1, number=2,
options=None,
type=None),
descriptor.EnumValueDescriptor(
name='ALBUM', index=2, number=3,
options=None,
type=None),
descriptor.EnumValueDescriptor(
name='TITLE', index=3, number=4,
options=None,
type=None),
descriptor.EnumValueDescriptor(
name='TRACK_NUMBER', index=4, number=6,
options=None,
type=None),
descriptor.EnumValueDescriptor(
name='PLAY_COUNT', index=5, number=9,
options=None,
type=None),
descriptor.EnumValueDescriptor(
name='DURATION_MILLIS', index=6, number=10,
options=None,
type=None),
descriptor.EnumValueDescriptor(
name='RATING', index=7, number=11,
options=None,
type=None),
descriptor.EnumValueDescriptor(
name='CREATION_TIME', index=8, number=12,
options=None,
type=None),
],
containing_type=None,
options=None,
serialized_start=4167,
serialized_end=4327,
)
_GETTRACKSREQUEST_TRACKPROJECTION = descriptor.EnumDescriptor(
name='TrackProjection',
full_name='GetTracksRequest.TrackProjection',
filename=None,
file=DESCRIPTOR,
values=[
descriptor.EnumValueDescriptor(
name='FULL', index=0, number=1,
options=None,
type=None),
descriptor.EnumValueDescriptor(
name='FRONTEND_VIEW', index=1, number=2,
options=None,
type=None),
],
containing_type=None,
options=None,
serialized_start=4666,
serialized_end=4712,
)
_GETTRACKSRESPONSE_RESPONSECODE = descriptor.EnumDescriptor(
name='ResponseCode',
full_name='GetTracksResponse.ResponseCode',
filename=None,
file=DESCRIPTOR,
values=[
descriptor.EnumValueDescriptor(
name='OK', index=0, number=1,
options=None,
type=None),
descriptor.EnumValueDescriptor(
name='NOT_MODIFIED', index=1, number=2,
options=None,
type=None),
descriptor.EnumValueDescriptor(
name='GONE', index=2, number=3,
options=None,
type=None),
],
containing_type=None,
options=None,
serialized_start=4876,
serialized_end=4926,
)
_GETPLAYLISTENTRIESRESPONSE_RESPONSECODE = descriptor.EnumDescriptor(
name='ResponseCode',
full_name='GetPlaylistEntriesResponse.ResponseCode',
filename=None,
file=DESCRIPTOR,
values=[
descriptor.EnumValueDescriptor(
name='OK', index=0, number=1,
options=None,
type=None),
descriptor.EnumValueDescriptor(
name='NOT_MODIFIED', index=1, number=2,
options=None,
type=None),
descriptor.EnumValueDescriptor(
name='GONE', index=2, number=3,
options=None,
type=None),
],
containing_type=None,
options=None,
serialized_start=4876,
serialized_end=4926,
)
_PLAYLISTSORTORDER_PLAYLISTATTRIBUTE = descriptor.EnumDescriptor(
name='PlaylistAttribute',
full_name='PlaylistSortOrder.PlaylistAttribute',
filename=None,
file=DESCRIPTOR,
values=[
descriptor.EnumValueDescriptor(
name='LAST_MODIFIED_TIME', index=0, number=1,
options=None,
type=None),
descriptor.EnumValueDescriptor(
name='TITLE', index=1, number=2,
options=None,
type=None),
descriptor.EnumValueDescriptor(
name='CREATION_TIME', index=2, number=3,
options=None,
type=None),
descriptor.EnumValueDescriptor(
name='RECENT_TIMESTAMP', index=3, number=4,
options=None,
type=None),
],
containing_type=None,
options=None,
serialized_start=5538,
serialized_end=5633,
)
_GETPLAYLISTSRESPONSE_RESPONSECODE = descriptor.EnumDescriptor(
name='ResponseCode',
full_name='GetPlaylistsResponse.ResponseCode',
filename=None,
file=DESCRIPTOR,
values=[
descriptor.EnumValueDescriptor(
name='OK', index=0, number=1,
options=None,
type=None),
descriptor.EnumValueDescriptor(
name='NOT_MODIFIED', index=1, number=2,
options=None,
type=None),
descriptor.EnumValueDescriptor(
name='GONE', index=2, number=3,
options=None,
type=None),
],
containing_type=None,
options=None,
serialized_start=4876,
serialized_end=4926,
)
_BATCHLOOKUPREQUEST_METADATATYPE = descriptor.EnumDescriptor(
name='MetadataType',
full_name='BatchLookupRequest.MetadataType',
filename=None,
file=DESCRIPTOR,
values=[
descriptor.EnumValueDescriptor(
name='TRACK', index=0, number=1,
options=None,
type=None),
descriptor.EnumValueDescriptor(
name='PLAYLIST', index=1, number=2,
options=None,
type=None),
descriptor.EnumValueDescriptor(
name='PLAYLIST_ENTRY', index=2, number=3,
options=None,
type=None),
],
containing_type=None,
options=None,
serialized_start=6467,
serialized_end=6526,
)
_MUTATERESPONSE_MUTATERESPONSECODE = descriptor.EnumDescriptor(
name='MutateResponseCode',
full_name='MutateResponse.MutateResponseCode',
filename=None,
file=DESCRIPTOR,
values=[
descriptor.EnumValueDescriptor(
name='OK', index=0, number=1,
options=None,
type=None),
descriptor.EnumValueDescriptor(
name='CONFLICT', index=1, number=2,
options=None,
type=None),
descriptor.EnumValueDescriptor(
name='INVALID_REQUEST', index=2, number=3,
options=None,
type=None),
descriptor.EnumValueDescriptor(
name='METADATA_TOO_LARGE', index=3, number=4,
options=None,
type=None),
],
containing_type=None,
options=None,
serialized_start=7047,
serialized_end=7134,
)
_MUTATERESPONSE_AVAILABILITYSTATUS = descriptor.EnumDescriptor(
name='AvailabilityStatus',
full_name='MutateResponse.AvailabilityStatus',
filename=None,
file=DESCRIPTOR,
values=[
descriptor.EnumValueDescriptor(
name='PENDING', index=0, number=1,
options=None,
type=None),
descriptor.EnumValueDescriptor(
name='MATCHED', index=1, number=2,
options=None,
type=None),
descriptor.EnumValueDescriptor(
name='UPLOAD_REQUESTED', index=2, number=3,
options=None,
type=None),
descriptor.EnumValueDescriptor(
name='AVAILABLE', index=3, number=4,
options=None,
type=None),
descriptor.EnumValueDescriptor(
name='FORCE_REUPLOAD', index=4, number=5,
options=None,
type=None),
descriptor.EnumValueDescriptor(
name='UPLOAD_PERMANENTLY_FAILED', index=5, number=6,
options=None,
type=None),
],
containing_type=None,
options=None,
serialized_start=2101,
serialized_end=2235,
)
_BATCHMUTATETRACKSRESPONSE_BATCHMUTATETRACKSRESPONSECODE = descriptor.EnumDescriptor(
name='BatchMutateTracksResponseCode',
full_name='BatchMutateTracksResponse.BatchMutateTracksResponseCode',
filename=None,
file=DESCRIPTOR,
values=[
descriptor.EnumValueDescriptor(
name='OK', index=0, number=1,
options=None,
type=None),
descriptor.EnumValueDescriptor(
name='CONFLICT', index=1, number=2,
options=None,
type=None),
],
containing_type=None,
options=None,
serialized_start=7634,
serialized_end=7687,
)
_BATCHMUTATEPLAYLISTSRESPONSE_BATCHMUTATEPLAYLISTSRESPONSECODE = descriptor.EnumDescriptor(
name='BatchMutatePlaylistsResponseCode',
full_name='BatchMutatePlaylistsResponse.BatchMutatePlaylistsResponseCode',
filename=None,
file=DESCRIPTOR,
values=[
descriptor.EnumValueDescriptor(
name='OK', index=0, number=1,
options=None,
type=None),
descriptor.EnumValueDescriptor(
name='CONFLICT', index=1, number=2,
options=None,
type=None),
],
containing_type=None,
options=None,
serialized_start=8319,
serialized_end=8375,
)
_BATCHMUTATEPLAYLISTENTRIESRESPONSE_BATCHMUTATEPLAYLISTENTRIESRESPONSECODE = descriptor.EnumDescriptor(
name='BatchMutatePlaylistEntriesResponseCode',
full_name='BatchMutatePlaylistEntriesResponse.BatchMutatePlaylistEntriesResponseCode',
filename=None,
file=DESCRIPTOR,
values=[
descriptor.EnumValueDescriptor(
name='OK', index=0, number=1,
options=None,
type=None),
descriptor.EnumValueDescriptor(
name='CONFLICT', index=1, number=2,
options=None,
type=None),
],
containing_type=None,
options=None,
serialized_start=9033,
serialized_end=9095,
)
_MAGICPLAYLISTSEED_SEEDTYPE = descriptor.EnumDescriptor(
name='SeedType',
full_name='MagicPlaylistSeed.SeedType',
filename=None,
file=DESCRIPTOR,
values=[
descriptor.EnumValueDescriptor(
name='TRACK', index=0, number=0,
options=None,
type=None),
descriptor.EnumValueDescriptor(
name='ARTIST', index=1, number=1,
options=None,
type=None),
descriptor.EnumValueDescriptor(
name='ALBUM', index=2, number=2,
options=None,
type=None),
descriptor.EnumValueDescriptor(
name='OPAQUE_SEED', index=3, number=3,
options=None,
type=None),
],
containing_type=None,
options=None,
serialized_start=9181,
serialized_end=9242,
)
_ALBUMSORTORDER_ALBUMATTRIBUTE = descriptor.EnumDescriptor(
name='AlbumAttribute',
full_name='AlbumSortOrder.AlbumAttribute',
filename=None,
file=DESCRIPTOR,
values=[
descriptor.EnumValueDescriptor(
name='LAST_PLAYED_TIME', index=0, number=1,
options=None,
type=None),
descriptor.EnumValueDescriptor(
name='NAME', index=1, number=2,
options=None,
type=None),
descriptor.EnumValueDescriptor(
name='CREATION_TIME', index=2, number=3,
options=None,
type=None),
],
containing_type=None,
options=None,
serialized_start=10312,
serialized_end=10379,
)
_GETDYNAMICPLAYLISTENTRIESREQUEST_DYNAMICPLAYLISTENTRIESTYPE = descriptor.EnumDescriptor(
name='DynamicPlaylistEntriesType',
full_name='GetDynamicPlaylistEntriesRequest.DynamicPlaylistEntriesType',
filename=None,
file=DESCRIPTOR,
values=[
descriptor.EnumValueDescriptor(
name='PURCHASED', index=0, number=1,
options=None,
type=None),
descriptor.EnumValueDescriptor(
name='THUMBS_UP', index=1, number=2,
options=None,
type=None),
descriptor.EnumValueDescriptor(
name='RECENTLY_ADDED', index=2, number=3,
options=None,
type=None),
descriptor.EnumValueDescriptor(
name='PROMOTED', index=3, number=4,
options=None,
type=None),
descriptor.EnumValueDescriptor(
name='PROMOTED_AND_PURCHASED', index=4, number=5,
options=None,
type=None),
],
containing_type=None,
options=None,
serialized_start=11290,
serialized_end=11410,
)
_GETDYNAMICPLAYLISTENTRIESRESPONSE_DYNAMICPLAYLISTENTRIESTYPE = descriptor.EnumDescriptor(
name='DynamicPlaylistEntriesType',
full_name='GetDynamicPlaylistEntriesResponse.DynamicPlaylistEntriesType',
filename=None,
file=DESCRIPTOR,
values=[
descriptor.EnumValueDescriptor(
name='PURCHASED', index=0, number=1,
options=None,
type=None),
descriptor.EnumValueDescriptor(
name='THUMBS_UP', index=1, number=2,
options=None,
type=None),
descriptor.EnumValueDescriptor(
name='RECENTLY_ADDED', index=2, number=3,
options=None,
type=None),
descriptor.EnumValueDescriptor(
name='PROMOTED', index=3, number=4,
options=None,
type=None),
descriptor.EnumValueDescriptor(
name='UNKNOWN', index=4, number=5,
options=None,
type=None),
descriptor.EnumValueDescriptor(
name='PROMOTED_AND_PURCHASED', index=5, number=6,
options=None,
type=None),
],
containing_type=None,
options=None,
serialized_start=11718,
serialized_end=11851,
)
_GETDYNAMICPLAYLISTENTRIESRESPONSE_RESPONSECODE = descriptor.EnumDescriptor(
name='ResponseCode',
full_name='GetDynamicPlaylistEntriesResponse.ResponseCode',
filename=None,
file=DESCRIPTOR,
values=[
descriptor.EnumValueDescriptor(
name='OK', index=0, number=1,
options=None,
type=None),
descriptor.EnumValueDescriptor(
name='NOT_OK', index=1, number=2,
options=None,
type=None),
],
containing_type=None,
options=None,
serialized_start=11853,
serialized_end=11887,
)
_TRACKTYPEAGGREGATE_TRACKTYPE = descriptor.EnumDescriptor(
name='TrackType',
full_name='TrackTypeAggregate.TrackType',
filename=None,
file=DESCRIPTOR,
values=[
descriptor.EnumValueDescriptor(
name='MATCHED_TRACK', index=0, number=1,
options=None,
type=None),
descriptor.EnumValueDescriptor(
name='UNMATCHED_TRACK', index=1, number=2,
options=None,
type=None),
descriptor.EnumValueDescriptor(
name='LOCAL_TRACK', index=2, number=3,
options=None,
type=None),
descriptor.EnumValueDescriptor(
name='PURCHASED_TRACK', index=3, number=4,
options=None,
type=None),
descriptor.EnumValueDescriptor(
name='METADATA_ONLY_MATCHED_TRACK', index=4, number=5,
options=None,
type=None),
descriptor.EnumValueDescriptor(
name='PROMO_TRACK', index=5, number=6,
options=None,
type=None),
],
containing_type=None,
options=None,
serialized_start=2361,
serialized_end=2500,
)
_AVAILABILITYSTATUSAGGREGATE_AVAILABILITYSTATUS = descriptor.EnumDescriptor(
name='AvailabilityStatus',
full_name='AvailabilityStatusAggregate.AvailabilityStatus',
filename=None,
file=DESCRIPTOR,
values=[
descriptor.EnumValueDescriptor(
name='PENDING', index=0, number=1,
options=None,
type=None),
descriptor.EnumValueDescriptor(
name='MATCHED', index=1, number=2,
options=None,
type=None),
descriptor.EnumValueDescriptor(
name='UPLOAD_REQUESTED', index=2, number=3,
options=None,
type=None),
descriptor.EnumValueDescriptor(
name='AVAILABLE', index=3, number=4,
options=None,
type=None),
descriptor.EnumValueDescriptor(
name='FORCE_REUPLOAD', index=4, number=5,
options=None,
type=None),
descriptor.EnumValueDescriptor(
name='UPLOAD_PERMANENTLY_FAILED', index=5, number=6,
options=None,
type=None),
],
containing_type=None,
options=None,
serialized_start=2101,
serialized_end=2235,
)
_REMOTECONTROLCOMMANDRESPONSE_RESPONSECODE = descriptor.EnumDescriptor(
name='ResponseCode',
full_name='RemoteControlCommandResponse.ResponseCode',
filename=None,
file=DESCRIPTOR,
values=[
descriptor.EnumValueDescriptor(
name='OK', index=0, number=1,
options=None,
type=None),
descriptor.EnumValueDescriptor(
name='NO_PUBLISHER', index=1, number=2,
options=None,
type=None),
descriptor.EnumValueDescriptor(
name='INVALID_REQUEST', index=2, number=3,
options=None,
type=None),
descriptor.EnumValueDescriptor(
name='PUBLISH_ERROR', index=3, number=4,
options=None,
type=None),
],
containing_type=None,
options=None,
serialized_start=13274,
serialized_end=13354,
)
_AUDIOREF = descriptor.Descriptor(
name='AudioRef',
full_name='AudioRef',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
descriptor.FieldDescriptor(
name='store', full_name='AudioRef.store', index=0,
number=1, type=14, cpp_type=8, label=2,
has_default_value=False, default_value=1,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
descriptor.FieldDescriptor(
name='ref', full_name='AudioRef.ref', index=1,
number=2, type=12, cpp_type=9, label=2,
has_default_value=False, default_value="",
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
descriptor.FieldDescriptor(
name='url', full_name='AudioRef.url', index=2,
number=4, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=unicode("", "utf-8"),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
descriptor.FieldDescriptor(
name='bit_rate', full_name='AudioRef.bit_rate', index=3,
number=5, type=5, cpp_type=1, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
descriptor.FieldDescriptor(
name='sample_rate', full_name='AudioRef.sample_rate', index=4,
number=6, type=5, cpp_type=1, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
descriptor.FieldDescriptor(
name='downloadable', full_name='AudioRef.downloadable', index=5,
number=7, type=8, cpp_type=7, label=1,
has_default_value=False, default_value=False,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
descriptor.FieldDescriptor(
name='duration_millis', full_name='AudioRef.duration_millis', index=6,
number=8, type=3, cpp_type=2, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
descriptor.FieldDescriptor(
name='rematch_timestamp', full_name='AudioRef.rematch_timestamp', index=7,
number=9, type=3, cpp_type=2, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
descriptor.FieldDescriptor(
name='invalid_due_to_wipeout', full_name='AudioRef.invalid_due_to_wipeout', index=8,
number=10, type=8, cpp_type=7, label=1,
has_default_value=False, default_value=False,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
],
extensions=[
],
nested_types=[],
enum_types=[
_AUDIOREF_STORE,
],
options=None,
is_extendable=False,
extension_ranges=[],
serialized_start=29,
serialized_end=277,
)
_IMAGEREF = descriptor.Descriptor(
name='ImageRef',
full_name='ImageRef',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
descriptor.FieldDescriptor(
name='store', full_name='ImageRef.store', index=0,
number=1, type=14, cpp_type=8, label=1,
has_default_value=False, default_value=3,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
descriptor.FieldDescriptor(
name='width', full_name='ImageRef.width', index=1,
number=2, type=13, cpp_type=3, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
descriptor.FieldDescriptor(
name='height', full_name='ImageRef.height', index=2,
number=3, type=13, cpp_type=3, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
descriptor.FieldDescriptor(
name='url', full_name='ImageRef.url', index=3,
number=6, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=unicode("", "utf-8"),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
descriptor.FieldDescriptor(
name='invalid_due_to_wipeout', full_name='ImageRef.invalid_due_to_wipeout', index=4,
number=7, type=8, cpp_type=7, label=1,
has_default_value=False, default_value=False,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
descriptor.FieldDescriptor(
name='origin', full_name='ImageRef.origin', index=5,
number=8, type=14, cpp_type=8, label=1,
has_default_value=False, default_value=1,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
],
extensions=[
],
nested_types=[],
enum_types=[
_IMAGEREF_STORE,
_IMAGEREF_ORIGIN,
],
options=None,
is_extendable=False,
extension_ranges=[],
serialized_start=280,
serialized_end=489,
)
_UPLOADEDUITSID3TAG = descriptor.Descriptor(
name='UploadedUitsId3Tag',
full_name='UploadedUitsId3Tag',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
descriptor.FieldDescriptor(
name='owner', full_name='UploadedUitsId3Tag.owner', index=0,
number=1, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=unicode("", "utf-8"),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
descriptor.FieldDescriptor(
name='data', full_name='UploadedUitsId3Tag.data', index=1,
number=2, type=12, cpp_type=9, label=1,
has_default_value=False, default_value="",
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
],
extensions=[
],
nested_types=[],
enum_types=[
],
options=None,
is_extendable=False,
extension_ranges=[],
serialized_start=491,
serialized_end=540,
)
_TRACK = descriptor.Descriptor(
name='Track',
full_name='Track',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
descriptor.FieldDescriptor(
name='id', full_name='Track.id', index=0,
number=1, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=unicode("", "utf-8"),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
descriptor.FieldDescriptor(
name='client_id', full_name='Track.client_id', index=1,
number=2, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=unicode("", "utf-8"),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
descriptor.FieldDescriptor(
name='creation_timestamp', full_name='Track.creation_timestamp', index=2,
number=3, type=3, cpp_type=2, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
descriptor.FieldDescriptor(
name='last_modified_timestamp', full_name='Track.last_modified_timestamp', index=3,
number=4, type=3, cpp_type=2, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
descriptor.FieldDescriptor(
name='deleted', full_name='Track.deleted', index=4,
number=5, type=8, cpp_type=7, label=1,
has_default_value=True, default_value=False,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
descriptor.FieldDescriptor(
name='title', full_name='Track.title', index=5,
number=6, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=unicode("", "utf-8"),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
descriptor.FieldDescriptor(
name='artist', full_name='Track.artist', index=6,
number=7, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=unicode("", "utf-8"),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
descriptor.FieldDescriptor(
name='artist_hash', full_name='Track.artist_hash', index=7,
number=46, type=3, cpp_type=2, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
descriptor.FieldDescriptor(
name='composer', full_name='Track.composer', index=8,
number=8, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=unicode("", "utf-8"),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
descriptor.FieldDescriptor(
name='album', full_name='Track.album', index=9,
number=9, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=unicode("", "utf-8"),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
descriptor.FieldDescriptor(
name='album_artist', full_name='Track.album_artist', index=10,
number=10, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=unicode("", "utf-8"),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
descriptor.FieldDescriptor(
name='canonical_album', full_name='Track.canonical_album', index=11,
number=56, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=unicode("", "utf-8"),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
descriptor.FieldDescriptor(
name='canonical_artist', full_name='Track.canonical_artist', index=12,
number=57, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=unicode("", "utf-8"),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
descriptor.FieldDescriptor(
name='canonical_genre_album', full_name='Track.canonical_genre_album', index=13,
number=58, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=unicode("", "utf-8"),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
descriptor.FieldDescriptor(
name='year', full_name='Track.year', index=14,
number=11, type=5, cpp_type=1, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
descriptor.FieldDescriptor(
name='comment', full_name='Track.comment', index=15,
number=12, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=unicode("", "utf-8"),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
descriptor.FieldDescriptor(
name='track_number', full_name='Track.track_number', index=16,
number=13, type=5, cpp_type=1, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
descriptor.FieldDescriptor(
name='genre', full_name='Track.genre', index=17,
number=14, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=unicode("", "utf-8"),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
descriptor.FieldDescriptor(
name='duration_millis', full_name='Track.duration_millis', index=18,
number=15, type=3, cpp_type=2, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
descriptor.FieldDescriptor(
name='beats_per_minute', full_name='Track.beats_per_minute', index=19,
number=16, type=5, cpp_type=1, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
descriptor.FieldDescriptor(
name='original_bit_rate', full_name='Track.original_bit_rate', index=20,
number=44, type=5, cpp_type=1, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
descriptor.FieldDescriptor(
name='audio_ref', full_name='Track.audio_ref', index=21,
number=17, type=11, cpp_type=10, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
descriptor.FieldDescriptor(
name='album_art_ref', full_name='Track.album_art_ref', index=22,
number=18, type=11, cpp_type=10, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
descriptor.FieldDescriptor(
name='availability_status', full_name='Track.availability_status', index=23,
number=19, type=14, cpp_type=8, label=1,
has_default_value=False, default_value=1,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
descriptor.FieldDescriptor(
name='play_count', full_name='Track.play_count', index=24,
number=20, type=5, cpp_type=1, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
descriptor.FieldDescriptor(
name='content_type', full_name='Track.content_type', index=25,
number=25, type=14, cpp_type=8, label=1,
has_default_value=False, default_value=1,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
descriptor.FieldDescriptor(
name='total_track_count', full_name='Track.total_track_count', index=26,
number=26, type=5, cpp_type=1, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
descriptor.FieldDescriptor(
name='disc_number', full_name='Track.disc_number', index=27,
number=27, type=5, cpp_type=1, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
descriptor.FieldDescriptor(
name='total_disc_count', full_name='Track.total_disc_count', index=28,
number=28, type=5, cpp_type=1, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
descriptor.FieldDescriptor(
name='channels', full_name='Track.channels', index=29,
number=29, type=14, cpp_type=8, label=1,
has_default_value=False, default_value=1,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
descriptor.FieldDescriptor(
name='track_type', full_name='Track.track_type', index=30,
number=30, type=14, cpp_type=8, label=1,
has_default_value=False, default_value=1,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
descriptor.FieldDescriptor(
name='use_single_server_copy', full_name='Track.use_single_server_copy', index=31,
number=59, type=8, cpp_type=7, label=1,
has_default_value=False, default_value=False,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
descriptor.FieldDescriptor(
name='rating', full_name='Track.rating', index=32,
number=31, type=14, cpp_type=8, label=1,
has_default_value=False, default_value=1,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
descriptor.FieldDescriptor(
name='estimated_size', full_name='Track.estimated_size', index=33,
number=32, type=3, cpp_type=2, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
descriptor.FieldDescriptor(
name='store_id', full_name='Track.store_id', index=34,
number=33, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=unicode("", "utf-8"),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
descriptor.FieldDescriptor(
name='metajam_id', full_name='Track.metajam_id', index=35,
number=34, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=unicode("", "utf-8"),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
descriptor.FieldDescriptor(
name='metajam_id_confidence', full_name='Track.metajam_id_confidence', index=36,
number=43, type=1, cpp_type=5, label=1,
has_default_value=True, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
descriptor.FieldDescriptor(
name='uits', full_name='Track.uits', index=37,
number=35, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=unicode("", "utf-8"),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
descriptor.FieldDescriptor(
name='uits_metadata', full_name='Track.uits_metadata', index=38,
number=40, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
descriptor.FieldDescriptor(
name='compilation', full_name='Track.compilation', index=39,
number=36, type=8, cpp_type=7, label=1,
has_default_value=False, default_value=False,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
descriptor.FieldDescriptor(
name='client_date_added', full_name='Track.client_date_added', index=40,
number=37, type=3, cpp_type=2, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
descriptor.FieldDescriptor(
name='recent_timestamp', full_name='Track.recent_timestamp', index=41,
number=38, type=3, cpp_type=2, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
descriptor.FieldDescriptor(
name='do_not_rematch', full_name='Track.do_not_rematch', index=42,
number=39, type=8, cpp_type=7, label=1,
has_default_value=True, default_value=False,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
descriptor.FieldDescriptor(
name='from_album_purchase', full_name='Track.from_album_purchase', index=43,
number=41, type=8, cpp_type=7, label=1,
has_default_value=False, default_value=False,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
descriptor.FieldDescriptor(
name='album_metajam_id', full_name='Track.album_metajam_id', index=44,
number=42, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=unicode("", "utf-8"),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
descriptor.FieldDescriptor(
name='transaction_id', full_name='Track.transaction_id', index=45,
number=45, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=unicode("", "utf-8"),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
descriptor.FieldDescriptor(
name='debug_track', full_name='Track.debug_track', index=46,
number=47, type=8, cpp_type=7, label=1,
has_default_value=False, default_value=False,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
descriptor.FieldDescriptor(
name='normalized_title', full_name='Track.normalized_title', index=47,
number=48, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=unicode("", "utf-8"),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
descriptor.FieldDescriptor(
name='normalized_artist', full_name='Track.normalized_artist', index=48,
number=49, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=unicode("", "utf-8"),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
descriptor.FieldDescriptor(
name='normalized_album', full_name='Track.normalized_album', index=49,
number=50, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=unicode("", "utf-8"),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
descriptor.FieldDescriptor(
name='normalized_album_artist', full_name='Track.normalized_album_artist', index=50,
number=51, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=unicode("", "utf-8"),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
descriptor.FieldDescriptor(
name='normalized_canonical_album', full_name='Track.normalized_canonical_album', index=51,
number=54, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=unicode("", "utf-8"),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
descriptor.FieldDescriptor(
name='normalized_canonical_artist', full_name='Track.normalized_canonical_artist', index=52,
number=55, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=unicode("", "utf-8"),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
descriptor.FieldDescriptor(
name='uploader_id', full_name='Track.uploader_id', index=53,
number=52, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=unicode("", "utf-8"),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
descriptor.FieldDescriptor(
name='client_album_id', full_name='Track.client_album_id', index=54,
number=53, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=unicode("", "utf-8"),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
descriptor.FieldDescriptor(
name='label_owner_code', full_name='Track.label_owner_code', index=55,
number=60, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=unicode("", "utf-8"),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
descriptor.FieldDescriptor(
name='original_content_type', full_name='Track.original_content_type', index=56,
number=61, type=14, cpp_type=8, label=1,
has_default_value=False, default_value=1,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
descriptor.FieldDescriptor(
name='uploaded_uits', full_name='Track.uploaded_uits', index=57,
number=71, type=11, cpp_type=10, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
],
extensions=[
],
nested_types=[],
enum_types=[
_TRACK_AVAILABILITYSTATUS,
_TRACK_CONTENTTYPE,
_TRACK_CHANNELS,
_TRACK_TRACKTYPE,
_TRACK_RATING,
],
options=None,
is_extendable=False,
extension_ranges=[],
serialized_start=543,
serialized_end=2603,
)
_TRACKS = descriptor.Descriptor(
name='Tracks',
full_name='Tracks',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
descriptor.FieldDescriptor(
name='track', full_name='Tracks.track', index=0,
number=1, type=11, cpp_type=10, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
],
extensions=[
],
nested_types=[],
enum_types=[
],
options=None,
is_extendable=False,
extension_ranges=[],
serialized_start=2605,
serialized_end=2636,
)
_PLAYLIST = descriptor.Descriptor(
name='Playlist',
full_name='Playlist',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
descriptor.FieldDescriptor(
name='id', full_name='Playlist.id', index=0,
number=1, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=unicode("", "utf-8"),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
descriptor.FieldDescriptor(
name='client_id', full_name='Playlist.client_id', index=1,
number=2, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=unicode("", "utf-8"),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
descriptor.FieldDescriptor(
name='creation_timestamp', full_name='Playlist.creation_timestamp', index=2,
number=3, type=3, cpp_type=2, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
descriptor.FieldDescriptor(
name='last_modified_timestamp', full_name='Playlist.last_modified_timestamp', index=3,
number=4, type=3, cpp_type=2, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
descriptor.FieldDescriptor(
name='deleted', full_name='Playlist.deleted', index=4,
number=5, type=8, cpp_type=7, label=1,
has_default_value=True, default_value=False,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
descriptor.FieldDescriptor(
name='name', full_name='Playlist.name', index=5,
number=6, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=unicode("", "utf-8"),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
descriptor.FieldDescriptor(
name='playlist_type', full_name='Playlist.playlist_type', index=6,
number=7, type=14, cpp_type=8, label=1,
has_default_value=False, default_value=1,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
descriptor.FieldDescriptor(
name='playlist_art_ref', full_name='Playlist.playlist_art_ref', index=7,
number=8, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
descriptor.FieldDescriptor(
name='recent_timestamp', full_name='Playlist.recent_timestamp', index=8,
number=9, type=3, cpp_type=2, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
],
extensions=[
],
nested_types=[],
enum_types=[
_PLAYLIST_PLAYLISTTYPE,
],
options=None,
is_extendable=False,
extension_ranges=[],
serialized_start=2639,
serialized_end=2947,
)
_PLAYLISTENTRY = descriptor.Descriptor(
name='PlaylistEntry',
full_name='PlaylistEntry',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
descriptor.FieldDescriptor(
name='playlist_id', full_name='PlaylistEntry.playlist_id', index=0,
number=1, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=unicode("", "utf-8"),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
descriptor.FieldDescriptor(
name='absolute_position', full_name='PlaylistEntry.absolute_position', index=1,
number=2, type=3, cpp_type=2, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
descriptor.FieldDescriptor(
name='place_after_entry_id', full_name='PlaylistEntry.place_after_entry_id', index=2,
number=3, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=unicode("", "utf-8"),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
descriptor.FieldDescriptor(
name='track_id', full_name='PlaylistEntry.track_id', index=3,
number=4, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=unicode("", "utf-8"),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
descriptor.FieldDescriptor(
name='id', full_name='PlaylistEntry.id', index=4,
number=5, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=unicode("", "utf-8"),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
descriptor.FieldDescriptor(
name='client_id', full_name='PlaylistEntry.client_id', index=5,
number=6, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=unicode("", "utf-8"),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
descriptor.FieldDescriptor(
name='creation_timestamp', full_name='PlaylistEntry.creation_timestamp', index=6,
number=7, type=3, cpp_type=2, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
descriptor.FieldDescriptor(
name='last_modified_timestamp', full_name='PlaylistEntry.last_modified_timestamp', index=7,
number=8, type=3, cpp_type=2, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
descriptor.FieldDescriptor(
name='deleted', full_name='PlaylistEntry.deleted', index=8,
number=9, type=8, cpp_type=7, label=1,
has_default_value=True, default_value=False,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
descriptor.FieldDescriptor(
name='relative_position_id_type', full_name='PlaylistEntry.relative_position_id_type', index=9,
number=10, type=14, cpp_type=8, label=1,
has_default_value=False, default_value=1,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
descriptor.FieldDescriptor(
name='track', full_name='PlaylistEntry.track', index=10,
number=15, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
descriptor.FieldDescriptor(
name='place_before_entry_id', full_name='PlaylistEntry.place_before_entry_id', index=11,
number=16, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=unicode("", "utf-8"),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
descriptor.FieldDescriptor(
name='string_position', full_name='PlaylistEntry.string_position', index=12,
number=17, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=unicode("", "utf-8"),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
],
extensions=[
],
nested_types=[],
enum_types=[
_PLAYLISTENTRY_RELATIVEPOSITIONIDTYPE,
],
options=None,
is_extendable=False,
extension_ranges=[],
serialized_start=2950,
serialized_end=3380,
)
_TRACKSEARCHRESTRICTION = descriptor.Descriptor(
name='TrackSearchRestriction',
full_name='TrackSearchRestriction',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
descriptor.FieldDescriptor(
name='attribute', full_name='TrackSearchRestriction.attribute', index=0,
number=1, type=14, cpp_type=8, label=2,
has_default_value=False, default_value=1,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
descriptor.FieldDescriptor(
name='value', full_name='TrackSearchRestriction.value', index=1,
number=2, type=9, cpp_type=9, label=2,
has_default_value=False, default_value=unicode("", "utf-8"),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
descriptor.FieldDescriptor(
name='comparison_type', full_name='TrackSearchRestriction.comparison_type', index=2,
number=3, type=14, cpp_type=8, label=2,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
],
extensions=[
],
nested_types=[],
enum_types=[
_TRACKSEARCHRESTRICTION_TRACKATTRIBUTE,
_TRACKSEARCHRESTRICTION_COMPARISONTYPE,
],
options=None,
is_extendable=False,
extension_ranges=[],
serialized_start=3383,
serialized_end=3847,
)
_TRACKSEARCHRESTRICTIONSET = descriptor.Descriptor(
name='TrackSearchRestrictionSet',
full_name='TrackSearchRestrictionSet',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
descriptor.FieldDescriptor(
name='type', full_name='TrackSearchRestrictionSet.type', index=0,
number=1, type=14, cpp_type=8, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
descriptor.FieldDescriptor(
name='restriction', full_name='TrackSearchRestrictionSet.restriction', index=1,
number=2, type=11, cpp_type=10, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
descriptor.FieldDescriptor(
name='sub_set', full_name='TrackSearchRestrictionSet.sub_set', index=2,
number=3, type=11, cpp_type=10, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
],
extensions=[
],
nested_types=[],
enum_types=[
_TRACKSEARCHRESTRICTIONSET_RESTRICTIONSETTYPE,
],
options=None,
is_extendable=False,
extension_ranges=[],
serialized_start=3850,
serialized_end=4068,
)
_TRACKSORTORDER = descriptor.Descriptor(
name='TrackSortOrder',
full_name='TrackSortOrder',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
descriptor.FieldDescriptor(
name='attribute', full_name='TrackSortOrder.attribute', index=0,
number=1, type=14, cpp_type=8, label=1,
has_default_value=False, default_value=1,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
descriptor.FieldDescriptor(
name='descending', full_name='TrackSortOrder.descending', index=1,
number=2, type=8, cpp_type=7, label=1,
has_default_value=True, default_value=True,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
],
extensions=[
],
nested_types=[],
enum_types=[
_TRACKSORTORDER_TRACKATTRIBUTE,
],
options=None,
is_extendable=False,
extension_ranges=[],
serialized_start=4071,
serialized_end=4327,
)
_GETTRACKSREQUEST = descriptor.Descriptor(
name='GetTracksRequest',
full_name='GetTracksRequest',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
descriptor.FieldDescriptor(
name='gaia_id', full_name='GetTracksRequest.gaia_id', index=0,
number=1, type=3, cpp_type=2, label=2,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
descriptor.FieldDescriptor(
name='updated_min', full_name='GetTracksRequest.updated_min', index=1,
number=2, type=3, cpp_type=2, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
descriptor.FieldDescriptor(
name='include_deleted', full_name='GetTracksRequest.include_deleted', index=2,
number=3, type=8, cpp_type=7, label=1,
has_default_value=False, default_value=False,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
descriptor.FieldDescriptor(
name='max_results', full_name='GetTracksRequest.max_results', index=3,
number=4, type=5, cpp_type=1, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
descriptor.FieldDescriptor(
name='continuation_token', full_name='GetTracksRequest.continuation_token', index=4,
number=5, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=unicode("", "utf-8"),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
descriptor.FieldDescriptor(
name='search_restriction', full_name='GetTracksRequest.search_restriction', index=5,
number=6, type=11, cpp_type=10, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
descriptor.FieldDescriptor(
name='sort_order', full_name='GetTracksRequest.sort_order', index=6,
number=7, type=11, cpp_type=10, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
descriptor.FieldDescriptor(
name='restriction_set', full_name='GetTracksRequest.restriction_set', index=7,
number=8, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
descriptor.FieldDescriptor(
name='track_projection', full_name='GetTracksRequest.track_projection', index=8,
number=9, type=14, cpp_type=8, label=1,
has_default_value=False, default_value=1,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
],
extensions=[
],
nested_types=[],
enum_types=[
_GETTRACKSREQUEST_TRACKPROJECTION,
],
options=None,
is_extendable=False,
extension_ranges=[],
serialized_start=4330,
serialized_end=4712,
)
_GETTRACKSRESPONSE = descriptor.Descriptor(
name='GetTracksResponse',
full_name='GetTracksResponse',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
descriptor.FieldDescriptor(
name='response_code', full_name='GetTracksResponse.response_code', index=0,
number=1, type=14, cpp_type=8, label=2,
has_default_value=False, default_value=1,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
descriptor.FieldDescriptor(
name='track', full_name='GetTracksResponse.track', index=1,
number=2, type=11, cpp_type=10, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
descriptor.FieldDescriptor(
name='estimated_total_results', full_name='GetTracksResponse.estimated_total_results', index=2,
number=3, type=3, cpp_type=2, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
descriptor.FieldDescriptor(
name='continuation_token', full_name='GetTracksResponse.continuation_token', index=3,
number=4, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=unicode("", "utf-8"),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
],
extensions=[
],
nested_types=[],
enum_types=[
_GETTRACKSRESPONSE_RESPONSECODE,
],
options=None,
is_extendable=False,
extension_ranges=[],
serialized_start=4715,
serialized_end=4926,
)
_GETPLAYLISTENTRIESREQUEST = descriptor.Descriptor(
name='GetPlaylistEntriesRequest',
full_name='GetPlaylistEntriesRequest',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
descriptor.FieldDescriptor(
name='gaia_id', full_name='GetPlaylistEntriesRequest.gaia_id', index=0,
number=1, type=3, cpp_type=2, label=2,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
descriptor.FieldDescriptor(
name='updated_min', full_name='GetPlaylistEntriesRequest.updated_min', index=1,
number=2, type=3, cpp_type=2, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
descriptor.FieldDescriptor(
name='include_deleted', full_name='GetPlaylistEntriesRequest.include_deleted', index=2,
number=3, type=8, cpp_type=7, label=1,
has_default_value=False, default_value=False,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
descriptor.FieldDescriptor(
name='max_results', full_name='GetPlaylistEntriesRequest.max_results', index=3,
number=4, type=5, cpp_type=1, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
descriptor.FieldDescriptor(
name='continuation_token', full_name='GetPlaylistEntriesRequest.continuation_token', index=4,
number=5, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=unicode("", "utf-8"),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
descriptor.FieldDescriptor(
name='playlist_id_filter', full_name='GetPlaylistEntriesRequest.playlist_id_filter', index=5,
number=6, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=unicode("", "utf-8"),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
descriptor.FieldDescriptor(
name='include_all_track_metadata', full_name='GetPlaylistEntriesRequest.include_all_track_metadata', index=6,
number=7, type=8, cpp_type=7, label=1,
has_default_value=True, default_value=False,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
descriptor.FieldDescriptor(
name='only_show_available_tracks', full_name='GetPlaylistEntriesRequest.only_show_available_tracks', index=7,
number=8, type=8, cpp_type=7, label=1,
has_default_value=True, default_value=True,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
],
extensions=[
],
nested_types=[],
enum_types=[
],
options=None,
is_extendable=False,
extension_ranges=[],
serialized_start=4929,
serialized_end=5181,
)
_GETPLAYLISTENTRIESRESPONSE = descriptor.Descriptor(
name='GetPlaylistEntriesResponse',
full_name='GetPlaylistEntriesResponse',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
descriptor.FieldDescriptor(
name='response_code', full_name='GetPlaylistEntriesResponse.response_code', index=0,
number=1, type=14, cpp_type=8, label=2,
has_default_value=False, default_value=1,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
descriptor.FieldDescriptor(
name='playlist_entry', full_name='GetPlaylistEntriesResponse.playlist_entry', index=1,
number=2, type=11, cpp_type=10, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
descriptor.FieldDescriptor(
name='estimated_total_results', full_name='GetPlaylistEntriesResponse.estimated_total_results', index=2,
number=3, type=3, cpp_type=2, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
descriptor.FieldDescriptor(
name='continuation_token', full_name='GetPlaylistEntriesResponse.continuation_token', index=3,
number=4, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=unicode("", "utf-8"),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
],
extensions=[
],
nested_types=[],
enum_types=[
_GETPLAYLISTENTRIESRESPONSE_RESPONSECODE,
],
options=None,
is_extendable=False,
extension_ranges=[],
serialized_start=5184,
serialized_end=5430,
)
_PLAYLISTSORTORDER = descriptor.Descriptor(
name='PlaylistSortOrder',
full_name='PlaylistSortOrder',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
descriptor.FieldDescriptor(
name='attribute', full_name='PlaylistSortOrder.attribute', index=0,
number=1, type=14, cpp_type=8, label=1,
has_default_value=False, default_value=1,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
descriptor.FieldDescriptor(
name='descending', full_name='PlaylistSortOrder.descending', index=1,
number=2, type=8, cpp_type=7, label=1,
has_default_value=True, default_value=False,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
],
extensions=[
],
nested_types=[],
enum_types=[
_PLAYLISTSORTORDER_PLAYLISTATTRIBUTE,
],
options=None,
is_extendable=False,
extension_ranges=[],
serialized_start=5433,
serialized_end=5633,
)
_GETPLAYLISTSREQUEST = descriptor.Descriptor(
name='GetPlaylistsRequest',
full_name='GetPlaylistsRequest',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
descriptor.FieldDescriptor(
name='gaia_id', full_name='GetPlaylistsRequest.gaia_id', index=0,
number=1, type=3, cpp_type=2, label=2,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
descriptor.FieldDescriptor(
name='updated_min', full_name='GetPlaylistsRequest.updated_min', index=1,
number=2, type=3, cpp_type=2, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
descriptor.FieldDescriptor(
name='include_deleted', full_name='GetPlaylistsRequest.include_deleted', index=2,
number=3, type=8, cpp_type=7, label=1,
has_default_value=False, default_value=False,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
descriptor.FieldDescriptor(
name='max_results', full_name='GetPlaylistsRequest.max_results', index=3,
number=4, type=5, cpp_type=1, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
descriptor.FieldDescriptor(
name='continuation_token', full_name='GetPlaylistsRequest.continuation_token', index=4,
number=5, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=unicode("", "utf-8"),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
descriptor.FieldDescriptor(
name='sort_order', full_name='GetPlaylistsRequest.sort_order', index=5,
number=6, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
],
extensions=[
],
nested_types=[],
enum_types=[
],
options=None,
is_extendable=False,
extension_ranges=[],
serialized_start=5636,
serialized_end=5809,
)
_GETPLAYLISTSRESPONSE = descriptor.Descriptor(
name='GetPlaylistsResponse',
full_name='GetPlaylistsResponse',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
descriptor.FieldDescriptor(
name='response_code', full_name='GetPlaylistsResponse.response_code', index=0,
number=1, type=14, cpp_type=8, label=2,
has_default_value=False, default_value=1,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
descriptor.FieldDescriptor(
name='playlist', full_name='GetPlaylistsResponse.playlist', index=1,
number=2, type=11, cpp_type=10, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
descriptor.FieldDescriptor(
name='estimated_total_results', full_name='GetPlaylistsResponse.estimated_total_results', index=2,
number=3, type=3, cpp_type=2, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
descriptor.FieldDescriptor(
name='continuation_token', full_name='GetPlaylistsResponse.continuation_token', index=3,
number=4, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=unicode("", "utf-8"),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
],
extensions=[
],
nested_types=[],
enum_types=[
_GETPLAYLISTSRESPONSE_RESPONSECODE,
],
options=None,
is_extendable=False,
extension_ranges=[],
serialized_start=5812,
serialized_end=6035,
)
_LOOKUPTRACKREQUEST = descriptor.Descriptor(
name='LookupTrackRequest',
full_name='LookupTrackRequest',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
descriptor.FieldDescriptor(
name='id', full_name='LookupTrackRequest.id', index=0,
number=1, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=unicode("", "utf-8"),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
descriptor.FieldDescriptor(
name='client_id', full_name='LookupTrackRequest.client_id', index=1,
number=2, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=unicode("", "utf-8"),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
],
extensions=[
],
nested_types=[],
enum_types=[
],
options=None,
is_extendable=False,
extension_ranges=[],
serialized_start=6037,
serialized_end=6088,
)
_LOOKUPPLAYLISTENTRYREQUEST = descriptor.Descriptor(
name='LookupPlaylistEntryRequest',
full_name='LookupPlaylistEntryRequest',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
descriptor.FieldDescriptor(
name='id', full_name='LookupPlaylistEntryRequest.id', index=0,
number=1, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=unicode("", "utf-8"),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
descriptor.FieldDescriptor(
name='client_id', full_name='LookupPlaylistEntryRequest.client_id', index=1,
number=2, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=unicode("", "utf-8"),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
],
extensions=[
],
nested_types=[],
enum_types=[
],
options=None,
is_extendable=False,
extension_ranges=[],
serialized_start=6090,
serialized_end=6149,
)
_LOOKUPPLAYLISTREQUEST = descriptor.Descriptor(
name='LookupPlaylistRequest',
full_name='LookupPlaylistRequest',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
descriptor.FieldDescriptor(
name='id', full_name='LookupPlaylistRequest.id', index=0,
number=1, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=unicode("", "utf-8"),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
descriptor.FieldDescriptor(
name='client_id', full_name='LookupPlaylistRequest.client_id', index=1,
number=2, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=unicode("", "utf-8"),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
],
extensions=[
],
nested_types=[],
enum_types=[
],
options=None,
is_extendable=False,
extension_ranges=[],
serialized_start=6151,
serialized_end=6205,
)
_BATCHLOOKUPREQUEST = descriptor.Descriptor(
name='BatchLookupRequest',
full_name='BatchLookupRequest',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
descriptor.FieldDescriptor(
name='gaia_id', full_name='BatchLookupRequest.gaia_id', index=0,
number=1, type=3, cpp_type=2, label=2,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
descriptor.FieldDescriptor(
name='track', full_name='BatchLookupRequest.track', index=1,
number=2, type=11, cpp_type=10, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
descriptor.FieldDescriptor(
name='playlist', full_name='BatchLookupRequest.playlist', index=2,
number=3, type=11, cpp_type=10, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
descriptor.FieldDescriptor(
name='metadata_type', full_name='BatchLookupRequest.metadata_type', index=3,
number=4, type=14, cpp_type=8, label=1,
has_default_value=False, default_value=1,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
descriptor.FieldDescriptor(
name='playlist_entry', full_name='BatchLookupRequest.playlist_entry', index=4,
number=5, type=11, cpp_type=10, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
descriptor.FieldDescriptor(
name='include_deleted', full_name='BatchLookupRequest.include_deleted', index=5,
number=6, type=8, cpp_type=7, label=1,
has_default_value=True, default_value=False,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
],
extensions=[
],
nested_types=[],
enum_types=[
_BATCHLOOKUPREQUEST_METADATATYPE,
],
options=None,
is_extendable=False,
extension_ranges=[],
serialized_start=6208,
serialized_end=6526,
)
_BATCHLOOKUPRESPONSE = descriptor.Descriptor(
name='BatchLookupResponse',
full_name='BatchLookupResponse',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
descriptor.FieldDescriptor(
name='track', full_name='BatchLookupResponse.track', index=0,
number=1, type=11, cpp_type=10, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
descriptor.FieldDescriptor(
name='playlist', full_name='BatchLookupResponse.playlist', index=1,
number=2, type=11, cpp_type=10, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
descriptor.FieldDescriptor(
name='playlist_entry', full_name='BatchLookupResponse.playlist_entry', index=2,
number=3, type=11, cpp_type=10, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
],
extensions=[
],
nested_types=[],
enum_types=[
],
options=None,
is_extendable=False,
extension_ranges=[],
serialized_start=6528,
serialized_end=6641,
)
_MUTATETRACKREQUEST = descriptor.Descriptor(
name='MutateTrackRequest',
full_name='MutateTrackRequest',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
descriptor.FieldDescriptor(
name='create_track', full_name='MutateTrackRequest.create_track', index=0,
number=1, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
descriptor.FieldDescriptor(
name='update_track', full_name='MutateTrackRequest.update_track', index=1,
number=2, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
descriptor.FieldDescriptor(
name='delete_track', full_name='MutateTrackRequest.delete_track', index=2,
number=3, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=unicode("", "utf-8"),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
descriptor.FieldDescriptor(
name='partial_update', full_name='MutateTrackRequest.partial_update', index=3,
number=4, type=8, cpp_type=7, label=1,
has_default_value=False, default_value=False,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
descriptor.FieldDescriptor(
name='update_last_modified', full_name='MutateTrackRequest.update_last_modified', index=4,
number=5, type=8, cpp_type=7, label=1,
has_default_value=True, default_value=True,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
descriptor.FieldDescriptor(
name='undelete_track', full_name='MutateTrackRequest.undelete_track', index=5,
number=6, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=unicode("", "utf-8"),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
],
extensions=[
],
nested_types=[],
enum_types=[
],
options=None,
is_extendable=False,
extension_ranges=[],
serialized_start=6644,
serialized_end=6830,
)
_MUTATERESPONSE = descriptor.Descriptor(
name='MutateResponse',
full_name='MutateResponse',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
descriptor.FieldDescriptor(
name='response_code', full_name='MutateResponse.response_code', index=0,
number=1, type=14, cpp_type=8, label=1,
has_default_value=False, default_value=1,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
descriptor.FieldDescriptor(
name='id', full_name='MutateResponse.id', index=1,
number=2, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=unicode("", "utf-8"),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
descriptor.FieldDescriptor(
name='child_id', full_name='MutateResponse.child_id', index=2,
number=3, type=9, cpp_type=9, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
descriptor.FieldDescriptor(
name='client_id', full_name='MutateResponse.client_id', index=3,
number=4, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=unicode("", "utf-8"),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
descriptor.FieldDescriptor(
name='availability_status', full_name='MutateResponse.availability_status', index=4,
number=5, type=14, cpp_type=8, label=1,
has_default_value=False, default_value=1,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
descriptor.FieldDescriptor(
name='error_message', full_name='MutateResponse.error_message', index=5,
number=6, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=unicode("", "utf-8"),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
],
extensions=[
],
nested_types=[],
enum_types=[
_MUTATERESPONSE_MUTATERESPONSECODE,
_MUTATERESPONSE_AVAILABILITYSTATUS,
],
options=None,
is_extendable=False,
extension_ranges=[],
serialized_start=6833,
serialized_end=7271,
)
_BATCHMUTATETRACKSREQUEST = descriptor.Descriptor(
name='BatchMutateTracksRequest',
full_name='BatchMutateTracksRequest',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
descriptor.FieldDescriptor(
name='gaia_id', full_name='BatchMutateTracksRequest.gaia_id', index=0,
number=1, type=3, cpp_type=2, label=2,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
descriptor.FieldDescriptor(
name='track_mutation', full_name='BatchMutateTracksRequest.track_mutation', index=1,
number=2, type=11, cpp_type=10, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
descriptor.FieldDescriptor(
name='send_notification', full_name='BatchMutateTracksRequest.send_notification', index=2,
number=3, type=8, cpp_type=7, label=1,
has_default_value=True, default_value=True,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
descriptor.FieldDescriptor(
name='detect_timestamp_conflict', full_name='BatchMutateTracksRequest.detect_timestamp_conflict', index=3,
number=4, type=8, cpp_type=7, label=1,
has_default_value=True, default_value=True,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
descriptor.FieldDescriptor(
name='notify_fine_grained_updates', full_name='BatchMutateTracksRequest.notify_fine_grained_updates', index=4,
number=5, type=8, cpp_type=7, label=1,
has_default_value=True, default_value=True,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
],
extensions=[
],
nested_types=[],
enum_types=[
],
options=None,
is_extendable=False,
extension_ranges=[],
serialized_start=7274,
serialized_end=7479,
)
_BATCHMUTATETRACKSRESPONSE = descriptor.Descriptor(
name='BatchMutateTracksResponse',
full_name='BatchMutateTracksResponse',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
descriptor.FieldDescriptor(
name='response_code', full_name='BatchMutateTracksResponse.response_code', index=0,
number=1, type=14, cpp_type=8, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
descriptor.FieldDescriptor(
name='mutate_response', full_name='BatchMutateTracksResponse.mutate_response', index=1,
number=2, type=11, cpp_type=10, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
],
extensions=[
],
nested_types=[],
enum_types=[
_BATCHMUTATETRACKSRESPONSE_BATCHMUTATETRACKSRESPONSECODE,
],
options=None,
is_extendable=False,
extension_ranges=[],
serialized_start=7482,
serialized_end=7687,
)
_MUTATEPLAYLISTREQUEST = descriptor.Descriptor(
name='MutatePlaylistRequest',
full_name='MutatePlaylistRequest',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
descriptor.FieldDescriptor(
name='create_playlist', full_name='MutatePlaylistRequest.create_playlist', index=0,
number=1, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
descriptor.FieldDescriptor(
name='update_playlist', full_name='MutatePlaylistRequest.update_playlist', index=1,
number=2, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
descriptor.FieldDescriptor(
name='delete_playlist', full_name='MutatePlaylistRequest.delete_playlist', index=2,
number=3, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=unicode("", "utf-8"),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
descriptor.FieldDescriptor(
name='partial_update', full_name='MutatePlaylistRequest.partial_update', index=3,
number=4, type=8, cpp_type=7, label=1,
has_default_value=False, default_value=False,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
descriptor.FieldDescriptor(
name='playlist_entry', full_name='MutatePlaylistRequest.playlist_entry', index=4,
number=5, type=11, cpp_type=10, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
descriptor.FieldDescriptor(
name='update_last_modified', full_name='MutatePlaylistRequest.update_last_modified', index=5,
number=6, type=8, cpp_type=7, label=1,
has_default_value=True, default_value=True,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
descriptor.FieldDescriptor(
name='undelete_playlist', full_name='MutatePlaylistRequest.undelete_playlist', index=6,
number=7, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=unicode("", "utf-8"),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
],
extensions=[
],
nested_types=[],
enum_types=[
],
options=None,
is_extendable=False,
extension_ranges=[],
serialized_start=7690,
serialized_end=7937,
)
_BATCHMUTATEPLAYLISTSREQUEST = descriptor.Descriptor(
name='BatchMutatePlaylistsRequest',
full_name='BatchMutatePlaylistsRequest',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
descriptor.FieldDescriptor(
name='gaia_id', full_name='BatchMutatePlaylistsRequest.gaia_id', index=0,
number=1, type=3, cpp_type=2, label=2,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
descriptor.FieldDescriptor(
name='playlist_mutation', full_name='BatchMutatePlaylistsRequest.playlist_mutation', index=1,
number=2, type=11, cpp_type=10, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
descriptor.FieldDescriptor(
name='send_notification', full_name='BatchMutatePlaylistsRequest.send_notification', index=2,
number=3, type=8, cpp_type=7, label=1,
has_default_value=True, default_value=True,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
descriptor.FieldDescriptor(
name='detect_timestamp_conflict', full_name='BatchMutatePlaylistsRequest.detect_timestamp_conflict', index=3,
number=4, type=8, cpp_type=7, label=1,
has_default_value=True, default_value=True,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
descriptor.FieldDescriptor(
name='notify_fine_grained_updates', full_name='BatchMutatePlaylistsRequest.notify_fine_grained_updates', index=4,
number=5, type=8, cpp_type=7, label=1,
has_default_value=True, default_value=False,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
],
extensions=[
],
nested_types=[],
enum_types=[
],
options=None,
is_extendable=False,
extension_ranges=[],
serialized_start=7940,
serialized_end=8155,
)
_BATCHMUTATEPLAYLISTSRESPONSE = descriptor.Descriptor(
name='BatchMutatePlaylistsResponse',
full_name='BatchMutatePlaylistsResponse',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
descriptor.FieldDescriptor(
name='response_code', full_name='BatchMutatePlaylistsResponse.response_code', index=0,
number=1, type=14, cpp_type=8, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
descriptor.FieldDescriptor(
name='mutate_response', full_name='BatchMutatePlaylistsResponse.mutate_response', index=1,
number=2, type=11, cpp_type=10, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
],
extensions=[
],
nested_types=[],
enum_types=[
_BATCHMUTATEPLAYLISTSRESPONSE_BATCHMUTATEPLAYLISTSRESPONSECODE,
],
options=None,
is_extendable=False,
extension_ranges=[],
serialized_start=8158,
serialized_end=8375,
)
_MUTATEPLAYLISTENTRYREQUEST = descriptor.Descriptor(
name='MutatePlaylistEntryRequest',
full_name='MutatePlaylistEntryRequest',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
descriptor.FieldDescriptor(
name='create_playlist_entry', full_name='MutatePlaylistEntryRequest.create_playlist_entry', index=0,
number=1, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
descriptor.FieldDescriptor(
name='update_playlist_entry', full_name='MutatePlaylistEntryRequest.update_playlist_entry', index=1,
number=2, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
descriptor.FieldDescriptor(
name='delete_playlist_entry', full_name='MutatePlaylistEntryRequest.delete_playlist_entry', index=2,
number=3, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
descriptor.FieldDescriptor(
name='update_last_modified', full_name='MutatePlaylistEntryRequest.update_last_modified', index=3,
number=4, type=8, cpp_type=7, label=1,
has_default_value=True, default_value=True,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
descriptor.FieldDescriptor(
name='undelete_playlist_entry', full_name='MutatePlaylistEntryRequest.undelete_playlist_entry', index=4,
number=5, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=unicode("", "utf-8"),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
],
extensions=[
],
nested_types=[],
enum_types=[
],
options=None,
is_extendable=False,
extension_ranges=[],
serialized_start=8378,
serialized_end=8616,
)
_BATCHMUTATEPLAYLISTENTRIESREQUEST = descriptor.Descriptor(
name='BatchMutatePlaylistEntriesRequest',
full_name='BatchMutatePlaylistEntriesRequest',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
descriptor.FieldDescriptor(
name='gaia_id', full_name='BatchMutatePlaylistEntriesRequest.gaia_id', index=0,
number=1, type=3, cpp_type=2, label=2,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
descriptor.FieldDescriptor(
name='playlist_entry_mutation', full_name='BatchMutatePlaylistEntriesRequest.playlist_entry_mutation', index=1,
number=2, type=11, cpp_type=10, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
descriptor.FieldDescriptor(
name='send_notification', full_name='BatchMutatePlaylistEntriesRequest.send_notification', index=2,
number=3, type=8, cpp_type=7, label=1,
has_default_value=True, default_value=True,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
descriptor.FieldDescriptor(
name='detect_timestamp_conflict', full_name='BatchMutatePlaylistEntriesRequest.detect_timestamp_conflict', index=3,
number=4, type=8, cpp_type=7, label=1,
has_default_value=True, default_value=True,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
descriptor.FieldDescriptor(
name='notify_fine_grained_updates', full_name='BatchMutatePlaylistEntriesRequest.notify_fine_grained_updates', index=4,
number=5, type=8, cpp_type=7, label=1,
has_default_value=True, default_value=False,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
],
extensions=[
],
nested_types=[],
enum_types=[
],
options=None,
is_extendable=False,
extension_ranges=[],
serialized_start=8619,
serialized_end=8851,
)
_BATCHMUTATEPLAYLISTENTRIESRESPONSE = descriptor.Descriptor(
name='BatchMutatePlaylistEntriesResponse',
full_name='BatchMutatePlaylistEntriesResponse',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
descriptor.FieldDescriptor(
name='response_code', full_name='BatchMutatePlaylistEntriesResponse.response_code', index=0,
number=1, type=14, cpp_type=8, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
descriptor.FieldDescriptor(
name='mutate_response', full_name='BatchMutatePlaylistEntriesResponse.mutate_response', index=1,
number=2, type=11, cpp_type=10, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
],
extensions=[
],
nested_types=[],
enum_types=[
_BATCHMUTATEPLAYLISTENTRIESRESPONSE_BATCHMUTATEPLAYLISTENTRIESRESPONSECODE,
],
options=None,
is_extendable=False,
extension_ranges=[],
serialized_start=8854,
serialized_end=9095,
)
_MAGICPLAYLISTSEED = descriptor.Descriptor(
name='MagicPlaylistSeed',
full_name='MagicPlaylistSeed',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
descriptor.FieldDescriptor(
name='seed_type', full_name='MagicPlaylistSeed.seed_type', index=0,
number=1, type=14, cpp_type=8, label=2,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
descriptor.FieldDescriptor(
name='seed', full_name='MagicPlaylistSeed.seed', index=1,
number=2, type=9, cpp_type=9, label=2,
has_default_value=False, default_value=unicode("", "utf-8"),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
],
extensions=[
],
nested_types=[],
enum_types=[
_MAGICPLAYLISTSEED_SEEDTYPE,
],
options=None,
is_extendable=False,
extension_ranges=[],
serialized_start=9098,
serialized_end=9242,
)
_MAGICPLAYLISTREQUEST = descriptor.Descriptor(
name='MagicPlaylistRequest',
full_name='MagicPlaylistRequest',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
descriptor.FieldDescriptor(
name='gaia_id', full_name='MagicPlaylistRequest.gaia_id', index=0,
number=1, type=3, cpp_type=2, label=2,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
descriptor.FieldDescriptor(
name='playlist_name', full_name='MagicPlaylistRequest.playlist_name', index=1,
number=2, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=unicode("", "utf-8"),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
descriptor.FieldDescriptor(
name='playlist_id', full_name='MagicPlaylistRequest.playlist_id', index=2,
number=3, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=unicode("", "utf-8"),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
descriptor.FieldDescriptor(
name='seed', full_name='MagicPlaylistRequest.seed', index=3,
number=4, type=11, cpp_type=10, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
descriptor.FieldDescriptor(
name='num_recommendations', full_name='MagicPlaylistRequest.num_recommendations', index=4,
number=5, type=5, cpp_type=1, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
descriptor.FieldDescriptor(
name='include_all_track_metadata', full_name='MagicPlaylistRequest.include_all_track_metadata', index=5,
number=6, type=8, cpp_type=7, label=1,
has_default_value=True, default_value=False,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
descriptor.FieldDescriptor(
name='model_name', full_name='MagicPlaylistRequest.model_name', index=6,
number=7, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=unicode("", "utf-8"),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
],
extensions=[
],
nested_types=[],
enum_types=[
],
options=None,
is_extendable=False,
extension_ranges=[],
serialized_start=9245,
serialized_end=9454,
)
_MAGICPLAYLISTRESPONSE = descriptor.Descriptor(
name='MagicPlaylistResponse',
full_name='MagicPlaylistResponse',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
descriptor.FieldDescriptor(
name='playlist', full_name='MagicPlaylistResponse.playlist', index=0,
number=1, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
descriptor.FieldDescriptor(
name='playlist_entry', full_name='MagicPlaylistResponse.playlist_entry', index=1,
number=2, type=11, cpp_type=10, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
],
extensions=[
],
nested_types=[],
enum_types=[
],
options=None,
is_extendable=False,
extension_ranges=[],
serialized_start=9456,
serialized_end=9548,
)
_FLUSHLOCKERREQUEST = descriptor.Descriptor(
name='FlushLockerRequest',
full_name='FlushLockerRequest',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
descriptor.FieldDescriptor(
name='gaia_id', full_name='FlushLockerRequest.gaia_id', index=0,
number=1, type=3, cpp_type=2, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
descriptor.FieldDescriptor(
name='gaia_cookie', full_name='FlushLockerRequest.gaia_cookie', index=1,
number=2, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=unicode("", "utf-8"),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
descriptor.FieldDescriptor(
name='remove_audio_binaries', full_name='FlushLockerRequest.remove_audio_binaries', index=2,
number=3, type=8, cpp_type=7, label=1,
has_default_value=True, default_value=True,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
descriptor.FieldDescriptor(
name='remove_image_binaries', full_name='FlushLockerRequest.remove_image_binaries', index=3,
number=4, type=8, cpp_type=7, label=1,
has_default_value=True, default_value=True,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
descriptor.FieldDescriptor(
name='send_notification', full_name='FlushLockerRequest.send_notification', index=4,
number=5, type=8, cpp_type=7, label=1,
has_default_value=True, default_value=True,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
descriptor.FieldDescriptor(
name='reset_subscription_type', full_name='FlushLockerRequest.reset_subscription_type', index=5,
number=6, type=8, cpp_type=7, label=1,
has_default_value=True, default_value=False,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
descriptor.FieldDescriptor(
name='notify_fine_grained_updates', full_name='FlushLockerRequest.notify_fine_grained_updates', index=6,
number=8, type=8, cpp_type=7, label=1,
has_default_value=True, default_value=True,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
],
extensions=[
],
nested_types=[],
enum_types=[
],
options=None,
is_extendable=False,
extension_ranges=[],
serialized_start=9551,
serialized_end=9799,
)
_FLUSHLOCKERRESPONSE = descriptor.Descriptor(
name='FlushLockerResponse',
full_name='FlushLockerResponse',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
descriptor.FieldDescriptor(
name='tracks_removed', full_name='FlushLockerResponse.tracks_removed', index=0,
number=1, type=5, cpp_type=1, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
descriptor.FieldDescriptor(
name='entries_removed', full_name='FlushLockerResponse.entries_removed', index=1,
number=2, type=5, cpp_type=1, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
descriptor.FieldDescriptor(
name='playlists_removed', full_name='FlushLockerResponse.playlists_removed', index=2,
number=3, type=5, cpp_type=1, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
descriptor.FieldDescriptor(
name='success_reset_subscription_type', full_name='FlushLockerResponse.success_reset_subscription_type', index=3,
number=4, type=8, cpp_type=7, label=1,
has_default_value=False, default_value=False,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
],
extensions=[
],
nested_types=[],
enum_types=[
],
options=None,
is_extendable=False,
extension_ranges=[],
serialized_start=9802,
serialized_end=9940,
)
_LOCKERNOTIFICATION = descriptor.Descriptor(
name='LockerNotification',
full_name='LockerNotification',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
descriptor.FieldDescriptor(
name='gaia_id', full_name='LockerNotification.gaia_id', index=0,
number=1, type=3, cpp_type=2, label=2,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
descriptor.FieldDescriptor(
name='payload', full_name='LockerNotification.payload', index=1,
number=2, type=12, cpp_type=9, label=1,
has_default_value=False, default_value="",
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
],
extensions=[
],
nested_types=[],
enum_types=[
],
options=None,
is_extendable=False,
extension_ranges=[],
serialized_start=9942,
serialized_end=9996,
)
_ALBUM = descriptor.Descriptor(
name='Album',
full_name='Album',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
descriptor.FieldDescriptor(
name='name', full_name='Album.name', index=0,
number=1, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=unicode("", "utf-8"),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
descriptor.FieldDescriptor(
name='album_artist', full_name='Album.album_artist', index=1,
number=2, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=unicode("", "utf-8"),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
descriptor.FieldDescriptor(
name='album_art', full_name='Album.album_art', index=2,
number=3, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
descriptor.FieldDescriptor(
name='track_count', full_name='Album.track_count', index=3,
number=4, type=5, cpp_type=1, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
descriptor.FieldDescriptor(
name='last_time_played', full_name='Album.last_time_played', index=4,
number=5, type=3, cpp_type=2, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
descriptor.FieldDescriptor(
name='is_compilation', full_name='Album.is_compilation', index=5,
number=6, type=8, cpp_type=7, label=1,
has_default_value=False, default_value=False,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
descriptor.FieldDescriptor(
name='album_metajam_id', full_name='Album.album_metajam_id', index=6,
number=7, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=unicode("", "utf-8"),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
descriptor.FieldDescriptor(
name='creation_timestamp', full_name='Album.creation_timestamp', index=7,
number=8, type=3, cpp_type=2, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
descriptor.FieldDescriptor(
name='artist', full_name='Album.artist', index=8,
number=9, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=unicode("", "utf-8"),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
],
extensions=[
],
nested_types=[],
enum_types=[
],
options=None,
is_extendable=False,
extension_ranges=[],
serialized_start=9999,
serialized_end=10213,
)
_ALBUMSORTORDER = descriptor.Descriptor(
name='AlbumSortOrder',
full_name='AlbumSortOrder',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
descriptor.FieldDescriptor(
name='attribute', full_name='AlbumSortOrder.attribute', index=0,
number=1, type=14, cpp_type=8, label=1,
has_default_value=False, default_value=1,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
descriptor.FieldDescriptor(
name='descending', full_name='AlbumSortOrder.descending', index=1,
number=2, type=8, cpp_type=7, label=1,
has_default_value=True, default_value=False,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
],
extensions=[
],
nested_types=[],
enum_types=[
_ALBUMSORTORDER_ALBUMATTRIBUTE,
],
options=None,
is_extendable=False,
extension_ranges=[],
serialized_start=10216,
serialized_end=10379,
)
_GETALBUMSREQUEST = descriptor.Descriptor(
name='GetAlbumsRequest',
full_name='GetAlbumsRequest',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
descriptor.FieldDescriptor(
name='gaia_id', full_name='GetAlbumsRequest.gaia_id', index=0,
number=1, type=3, cpp_type=2, label=2,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
descriptor.FieldDescriptor(
name='sort_order', full_name='GetAlbumsRequest.sort_order', index=1,
number=2, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
descriptor.FieldDescriptor(
name='max_results', full_name='GetAlbumsRequest.max_results', index=2,
number=3, type=5, cpp_type=1, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
],
extensions=[
],
nested_types=[],
enum_types=[
],
options=None,
is_extendable=False,
extension_ranges=[],
serialized_start=10381,
serialized_end=10474,
)
_GETALBUMSRESPONSE = descriptor.Descriptor(
name='GetAlbumsResponse',
full_name='GetAlbumsResponse',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
descriptor.FieldDescriptor(
name='album', full_name='GetAlbumsResponse.album', index=0,
number=1, type=11, cpp_type=10, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
],
extensions=[
],
nested_types=[],
enum_types=[
],
options=None,
is_extendable=False,
extension_ranges=[],
serialized_start=10476,
serialized_end=10518,
)
_ARTIST = descriptor.Descriptor(
name='Artist',
full_name='Artist',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
descriptor.FieldDescriptor(
name='name', full_name='Artist.name', index=0,
number=1, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=unicode("", "utf-8"),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
descriptor.FieldDescriptor(
name='total_track_count', full_name='Artist.total_track_count', index=1,
number=2, type=5, cpp_type=1, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
descriptor.FieldDescriptor(
name='album', full_name='Artist.album', index=2,
number=3, type=11, cpp_type=10, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
],
extensions=[
],
nested_types=[],
enum_types=[
],
options=None,
is_extendable=False,
extension_ranges=[],
serialized_start=10520,
serialized_end=10592,
)
_ARTISTSORTORDER = descriptor.Descriptor(
name='ArtistSortOrder',
full_name='ArtistSortOrder',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
descriptor.FieldDescriptor(
name='descending', full_name='ArtistSortOrder.descending', index=0,
number=2, type=8, cpp_type=7, label=1,
has_default_value=True, default_value=False,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
],
extensions=[
],
nested_types=[],
enum_types=[
],
options=None,
is_extendable=False,
extension_ranges=[],
serialized_start=10594,
serialized_end=10638,
)
_GETARTISTSREQUEST = descriptor.Descriptor(
name='GetArtistsRequest',
full_name='GetArtistsRequest',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
descriptor.FieldDescriptor(
name='gaia_id', full_name='GetArtistsRequest.gaia_id', index=0,
number=1, type=3, cpp_type=2, label=2,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
descriptor.FieldDescriptor(
name='sort_order', full_name='GetArtistsRequest.sort_order', index=1,
number=2, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
descriptor.FieldDescriptor(
name='max_results', full_name='GetArtistsRequest.max_results', index=2,
number=3, type=5, cpp_type=1, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
],
extensions=[
],
nested_types=[],
enum_types=[
],
options=None,
is_extendable=False,
extension_ranges=[],
serialized_start=10640,
serialized_end=10735,
)
_GETARTISTSRESPONSE = descriptor.Descriptor(
name='GetArtistsResponse',
full_name='GetArtistsResponse',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
descriptor.FieldDescriptor(
name='artist', full_name='GetArtistsResponse.artist', index=0,
number=1, type=11, cpp_type=10, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
],
extensions=[
],
nested_types=[],
enum_types=[
],
options=None,
is_extendable=False,
extension_ranges=[],
serialized_start=10737,
serialized_end=10782,
)
_MUSICGENRE = descriptor.Descriptor(
name='MusicGenre',
full_name='MusicGenre',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
descriptor.FieldDescriptor(
name='name', full_name='MusicGenre.name', index=0,
number=1, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=unicode("", "utf-8"),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
descriptor.FieldDescriptor(
name='total_track_count', full_name='MusicGenre.total_track_count', index=1,
number=2, type=5, cpp_type=1, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
descriptor.FieldDescriptor(
name='album', full_name='MusicGenre.album', index=2,
number=3, type=11, cpp_type=10, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
],
extensions=[
],
nested_types=[],
enum_types=[
],
options=None,
is_extendable=False,
extension_ranges=[],
serialized_start=10784,
serialized_end=10860,
)
_GENRESORTORDER = descriptor.Descriptor(
name='GenreSortOrder',
full_name='GenreSortOrder',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
descriptor.FieldDescriptor(
name='descending', full_name='GenreSortOrder.descending', index=0,
number=2, type=8, cpp_type=7, label=1,
has_default_value=True, default_value=False,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
],
extensions=[
],
nested_types=[],
enum_types=[
],
options=None,
is_extendable=False,
extension_ranges=[],
serialized_start=10862,
serialized_end=10905,
)
_GETGENRESREQUEST = descriptor.Descriptor(
name='GetGenresRequest',
full_name='GetGenresRequest',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
descriptor.FieldDescriptor(
name='gaia_id', full_name='GetGenresRequest.gaia_id', index=0,
number=1, type=3, cpp_type=2, label=2,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
descriptor.FieldDescriptor(
name='sort_order', full_name='GetGenresRequest.sort_order', index=1,
number=2, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
descriptor.FieldDescriptor(
name='max_results', full_name='GetGenresRequest.max_results', index=2,
number=3, type=5, cpp_type=1, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
],
extensions=[
],
nested_types=[],
enum_types=[
],
options=None,
is_extendable=False,
extension_ranges=[],
serialized_start=10907,
serialized_end=11000,
)
_GETGENRESRESPONSE = descriptor.Descriptor(
name='GetGenresResponse',
full_name='GetGenresResponse',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
descriptor.FieldDescriptor(
name='genre', full_name='GetGenresResponse.genre', index=0,
number=1, type=11, cpp_type=10, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
],
extensions=[
],
nested_types=[],
enum_types=[
],
options=None,
is_extendable=False,
extension_ranges=[],
serialized_start=11002,
serialized_end=11049,
)
_GETDYNAMICPLAYLISTENTRIESREQUEST = descriptor.Descriptor(
name='GetDynamicPlaylistEntriesRequest',
full_name='GetDynamicPlaylistEntriesRequest',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
descriptor.FieldDescriptor(
name='gaia_id', full_name='GetDynamicPlaylistEntriesRequest.gaia_id', index=0,
number=1, type=3, cpp_type=2, label=2,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
descriptor.FieldDescriptor(
name='playlist_entries_type', full_name='GetDynamicPlaylistEntriesRequest.playlist_entries_type', index=1,
number=4, type=14, cpp_type=8, label=2,
has_default_value=False, default_value=1,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
descriptor.FieldDescriptor(
name='max_results', full_name='GetDynamicPlaylistEntriesRequest.max_results', index=2,
number=2, type=5, cpp_type=1, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
descriptor.FieldDescriptor(
name='continuation_token', full_name='GetDynamicPlaylistEntriesRequest.continuation_token', index=3,
number=3, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=unicode("", "utf-8"),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
descriptor.FieldDescriptor(
name='include_all_track_metadata', full_name='GetDynamicPlaylistEntriesRequest.include_all_track_metadata', index=4,
number=5, type=8, cpp_type=7, label=1,
has_default_value=True, default_value=False,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
],
extensions=[
],
nested_types=[],
enum_types=[
_GETDYNAMICPLAYLISTENTRIESREQUEST_DYNAMICPLAYLISTENTRIESTYPE,
],
options=None,
is_extendable=False,
extension_ranges=[],
serialized_start=11052,
serialized_end=11410,
)
_GETDYNAMICPLAYLISTENTRIESRESPONSE = descriptor.Descriptor(
name='GetDynamicPlaylistEntriesResponse',
full_name='GetDynamicPlaylistEntriesResponse',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
descriptor.FieldDescriptor(
name='response_code', full_name='GetDynamicPlaylistEntriesResponse.response_code', index=0,
number=1, type=14, cpp_type=8, label=2,
has_default_value=False, default_value=1,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
descriptor.FieldDescriptor(
name='playlist_entry', full_name='GetDynamicPlaylistEntriesResponse.playlist_entry', index=1,
number=2, type=11, cpp_type=10, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
descriptor.FieldDescriptor(
name='estimated_total_results', full_name='GetDynamicPlaylistEntriesResponse.estimated_total_results', index=2,
number=3, type=3, cpp_type=2, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
descriptor.FieldDescriptor(
name='continuation_token', full_name='GetDynamicPlaylistEntriesResponse.continuation_token', index=3,
number=4, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=unicode("", "utf-8"),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
descriptor.FieldDescriptor(
name='playlist_entries_type', full_name='GetDynamicPlaylistEntriesResponse.playlist_entries_type', index=4,
number=5, type=14, cpp_type=8, label=1,
has_default_value=False, default_value=1,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
],
extensions=[
],
nested_types=[],
enum_types=[
_GETDYNAMICPLAYLISTENTRIESRESPONSE_DYNAMICPLAYLISTENTRIESTYPE,
_GETDYNAMICPLAYLISTENTRIESRESPONSE_RESPONSECODE,
],
options=None,
is_extendable=False,
extension_ranges=[],
serialized_start=11413,
serialized_end=11887,
)
_GETAGGREGATIONSBYTRACKTYPEREQUEST = descriptor.Descriptor(
name='GetAggregationsByTrackTypeRequest',
full_name='GetAggregationsByTrackTypeRequest',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
descriptor.FieldDescriptor(
name='gaia_id', full_name='GetAggregationsByTrackTypeRequest.gaia_id', index=0,
number=1, type=3, cpp_type=2, label=2,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
],
extensions=[
],
nested_types=[],
enum_types=[
],
options=None,
is_extendable=False,
extension_ranges=[],
serialized_start=11889,
serialized_end=11941,
)
_TRACKTYPEAGGREGATE = descriptor.Descriptor(
name='TrackTypeAggregate',
full_name='TrackTypeAggregate',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
descriptor.FieldDescriptor(
name='track_type_value', full_name='TrackTypeAggregate.track_type_value', index=0,
number=1, type=14, cpp_type=8, label=1,
has_default_value=False, default_value=1,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
descriptor.FieldDescriptor(
name='count', full_name='TrackTypeAggregate.count', index=1,
number=2, type=5, cpp_type=1, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
],
extensions=[
],
nested_types=[],
enum_types=[
_TRACKTYPEAGGREGATE_TRACKTYPE,
],
options=None,
is_extendable=False,
extension_ranges=[],
serialized_start=11944,
serialized_end=12178,
)
_GETAGGREGATIONSBYTRACKTYPERESPONSE = descriptor.Descriptor(
name='GetAggregationsByTrackTypeResponse',
full_name='GetAggregationsByTrackTypeResponse',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
descriptor.FieldDescriptor(
name='track_type_aggregate', full_name='GetAggregationsByTrackTypeResponse.track_type_aggregate', index=0,
number=1, type=11, cpp_type=10, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
],
extensions=[
],
nested_types=[],
enum_types=[
],
options=None,
is_extendable=False,
extension_ranges=[],
serialized_start=12180,
serialized_end=12267,
)
_GETAGGREGATIONSBYAVAILABILITYSTATUSREQUEST = descriptor.Descriptor(
name='GetAggregationsByAvailabilityStatusRequest',
full_name='GetAggregationsByAvailabilityStatusRequest',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
descriptor.FieldDescriptor(
name='gaia_id', full_name='GetAggregationsByAvailabilityStatusRequest.gaia_id', index=0,
number=1, type=3, cpp_type=2, label=2,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
],
extensions=[
],
nested_types=[],
enum_types=[
],
options=None,
is_extendable=False,
extension_ranges=[],
serialized_start=12269,
serialized_end=12330,
)
_AVAILABILITYSTATUSAGGREGATE = descriptor.Descriptor(
name='AvailabilityStatusAggregate',
full_name='AvailabilityStatusAggregate',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
descriptor.FieldDescriptor(
name='availability_status', full_name='AvailabilityStatusAggregate.availability_status', index=0,
number=1, type=14, cpp_type=8, label=1,
has_default_value=False, default_value=1,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
descriptor.FieldDescriptor(
name='count', full_name='AvailabilityStatusAggregate.count', index=1,
number=2, type=5, cpp_type=1, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
],
extensions=[
],
nested_types=[],
enum_types=[
_AVAILABILITYSTATUSAGGREGATE_AVAILABILITYSTATUS,
],
options=None,
is_extendable=False,
extension_ranges=[],
serialized_start=12333,
serialized_end=12592,
)
_GETAGGREGATIONSBYAVAILABILITYSTATUSRESPONSE = descriptor.Descriptor(
name='GetAggregationsByAvailabilityStatusResponse',
full_name='GetAggregationsByAvailabilityStatusResponse',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
descriptor.FieldDescriptor(
name='availability_status_aggregate', full_name='GetAggregationsByAvailabilityStatusResponse.availability_status_aggregate', index=0,
number=1, type=11, cpp_type=10, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
],
extensions=[
],
nested_types=[],
enum_types=[
],
options=None,
is_extendable=False,
extension_ranges=[],
serialized_start=12594,
serialized_end=12708,
)
_ADDPROMOTRACKSREQUEST = descriptor.Descriptor(
name='AddPromoTracksRequest',
full_name='AddPromoTracksRequest',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
descriptor.FieldDescriptor(
name='gaia_id', full_name='AddPromoTracksRequest.gaia_id', index=0,
number=1, type=3, cpp_type=2, label=2,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
descriptor.FieldDescriptor(
name='genre', full_name='AddPromoTracksRequest.genre', index=1,
number=2, type=9, cpp_type=9, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
],
extensions=[
],
nested_types=[],
enum_types=[
],
options=None,
is_extendable=False,
extension_ranges=[],
serialized_start=12710,
serialized_end=12765,
)
_ADDPROMOTRACKSRESPONSE = descriptor.Descriptor(
name='AddPromoTracksResponse',
full_name='AddPromoTracksResponse',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
descriptor.FieldDescriptor(
name='track', full_name='AddPromoTracksResponse.track', index=0,
number=1, type=11, cpp_type=10, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
],
extensions=[
],
nested_types=[],
enum_types=[
],
options=None,
is_extendable=False,
extension_ranges=[],
serialized_start=12767,
serialized_end=12814,
)
_GETPLAYLISTAGGREGATIONSREQUEST = descriptor.Descriptor(
name='GetPlaylistAggregationsRequest',
full_name='GetPlaylistAggregationsRequest',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
descriptor.FieldDescriptor(
name='gaia_id', full_name='GetPlaylistAggregationsRequest.gaia_id', index=0,
number=1, type=3, cpp_type=2, label=2,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
descriptor.FieldDescriptor(
name='max_results', full_name='GetPlaylistAggregationsRequest.max_results', index=1,
number=2, type=5, cpp_type=1, label=1,
has_default_value=True, default_value=14,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
],
extensions=[
],
nested_types=[],
enum_types=[
],
options=None,
is_extendable=False,
extension_ranges=[],
serialized_start=12816,
serialized_end=12890,
)
_PLAYLISTAGGREGATE = descriptor.Descriptor(
name='PlaylistAggregate',
full_name='PlaylistAggregate',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
descriptor.FieldDescriptor(
name='playlist_id', full_name='PlaylistAggregate.playlist_id', index=0,
number=1, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=unicode("", "utf-8"),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
descriptor.FieldDescriptor(
name='name', full_name='PlaylistAggregate.name', index=1,
number=2, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=unicode("", "utf-8"),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
descriptor.FieldDescriptor(
name='album_art', full_name='PlaylistAggregate.album_art', index=2,
number=3, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
descriptor.FieldDescriptor(
name='track_count', full_name='PlaylistAggregate.track_count', index=3,
number=4, type=3, cpp_type=2, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
descriptor.FieldDescriptor(
name='last_time_played', full_name='PlaylistAggregate.last_time_played', index=4,
number=5, type=3, cpp_type=2, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
],
extensions=[
],
nested_types=[],
enum_types=[
],
options=None,
is_extendable=False,
extension_ranges=[],
serialized_start=12893,
serialized_end=13024,
)
_GETPLAYLISTAGGREGATIONSRESPONSE = descriptor.Descriptor(
name='GetPlaylistAggregationsResponse',
full_name='GetPlaylistAggregationsResponse',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
descriptor.FieldDescriptor(
name='playlist_aggregate', full_name='GetPlaylistAggregationsResponse.playlist_aggregate', index=0,
number=1, type=11, cpp_type=10, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
],
extensions=[
],
nested_types=[],
enum_types=[
],
options=None,
is_extendable=False,
extension_ranges=[],
serialized_start=13026,
serialized_end=13107,
)
_REMOTECONTROLCOMMANDREQUEST = descriptor.Descriptor(
name='RemoteControlCommandRequest',
full_name='RemoteControlCommandRequest',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
descriptor.FieldDescriptor(
name='gaia_id', full_name='RemoteControlCommandRequest.gaia_id', index=0,
number=1, type=3, cpp_type=2, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
descriptor.FieldDescriptor(
name='command', full_name='RemoteControlCommandRequest.command', index=1,
number=2, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=unicode("", "utf-8"),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
],
extensions=[
],
nested_types=[],
enum_types=[
],
options=None,
is_extendable=False,
extension_ranges=[],
serialized_start=13109,
serialized_end=13172,
)
_REMOTECONTROLCOMMANDRESPONSE = descriptor.Descriptor(
name='RemoteControlCommandResponse',
full_name='RemoteControlCommandResponse',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
descriptor.FieldDescriptor(
name='response_code', full_name='RemoteControlCommandResponse.response_code', index=0,
number=1, type=14, cpp_type=8, label=1,
has_default_value=False, default_value=1,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
],
extensions=[
],
nested_types=[],
enum_types=[
_REMOTECONTROLCOMMANDRESPONSE_RESPONSECODE,
],
options=None,
is_extendable=False,
extension_ranges=[],
serialized_start=13175,
serialized_end=13354,
)
_AUDIOREF.fields_by_name['store'].enum_type = _AUDIOREF_STORE
_AUDIOREF_STORE.containing_type = _AUDIOREF;
_IMAGEREF.fields_by_name['store'].enum_type = _IMAGEREF_STORE
_IMAGEREF.fields_by_name['origin'].enum_type = _IMAGEREF_ORIGIN
_IMAGEREF_STORE.containing_type = _IMAGEREF;
_IMAGEREF_ORIGIN.containing_type = _IMAGEREF;
_TRACK.fields_by_name['audio_ref'].message_type = _AUDIOREF
_TRACK.fields_by_name['album_art_ref'].message_type = _IMAGEREF
_TRACK.fields_by_name['availability_status'].enum_type = _TRACK_AVAILABILITYSTATUS
_TRACK.fields_by_name['content_type'].enum_type = _TRACK_CONTENTTYPE
_TRACK.fields_by_name['channels'].enum_type = _TRACK_CHANNELS
_TRACK.fields_by_name['track_type'].enum_type = _TRACK_TRACKTYPE
_TRACK.fields_by_name['rating'].enum_type = _TRACK_RATING
_TRACK.fields_by_name['uits_metadata'].message_type = uits_pb2._UITSMETADATA
_TRACK.fields_by_name['original_content_type'].enum_type = _TRACK_CONTENTTYPE
_TRACK.fields_by_name['uploaded_uits'].message_type = _UPLOADEDUITSID3TAG
_TRACK_AVAILABILITYSTATUS.containing_type = _TRACK;
_TRACK_CONTENTTYPE.containing_type = _TRACK;
_TRACK_CHANNELS.containing_type = _TRACK;
_TRACK_TRACKTYPE.containing_type = _TRACK;
_TRACK_RATING.containing_type = _TRACK;
_TRACKS.fields_by_name['track'].message_type = _TRACK
_PLAYLIST.fields_by_name['playlist_type'].enum_type = _PLAYLIST_PLAYLISTTYPE
_PLAYLIST.fields_by_name['playlist_art_ref'].message_type = _IMAGEREF
_PLAYLIST_PLAYLISTTYPE.containing_type = _PLAYLIST;
_PLAYLISTENTRY.fields_by_name['relative_position_id_type'].enum_type = _PLAYLISTENTRY_RELATIVEPOSITIONIDTYPE
_PLAYLISTENTRY.fields_by_name['track'].message_type = _TRACK
_PLAYLISTENTRY_RELATIVEPOSITIONIDTYPE.containing_type = _PLAYLISTENTRY;
_TRACKSEARCHRESTRICTION.fields_by_name['attribute'].enum_type = _TRACKSEARCHRESTRICTION_TRACKATTRIBUTE
_TRACKSEARCHRESTRICTION.fields_by_name['comparison_type'].enum_type = _TRACKSEARCHRESTRICTION_COMPARISONTYPE
_TRACKSEARCHRESTRICTION_TRACKATTRIBUTE.containing_type = _TRACKSEARCHRESTRICTION;
_TRACKSEARCHRESTRICTION_COMPARISONTYPE.containing_type = _TRACKSEARCHRESTRICTION;
_TRACKSEARCHRESTRICTIONSET.fields_by_name['type'].enum_type = _TRACKSEARCHRESTRICTIONSET_RESTRICTIONSETTYPE
_TRACKSEARCHRESTRICTIONSET.fields_by_name['restriction'].message_type = _TRACKSEARCHRESTRICTION
_TRACKSEARCHRESTRICTIONSET.fields_by_name['sub_set'].message_type = _TRACKSEARCHRESTRICTIONSET
_TRACKSEARCHRESTRICTIONSET_RESTRICTIONSETTYPE.containing_type = _TRACKSEARCHRESTRICTIONSET;
_TRACKSORTORDER.fields_by_name['attribute'].enum_type = _TRACKSORTORDER_TRACKATTRIBUTE
_TRACKSORTORDER_TRACKATTRIBUTE.containing_type = _TRACKSORTORDER;
_GETTRACKSREQUEST.fields_by_name['search_restriction'].message_type = _TRACKSEARCHRESTRICTION
_GETTRACKSREQUEST.fields_by_name['sort_order'].message_type = _TRACKSORTORDER
_GETTRACKSREQUEST.fields_by_name['restriction_set'].message_type = _TRACKSEARCHRESTRICTIONSET
_GETTRACKSREQUEST.fields_by_name['track_projection'].enum_type = _GETTRACKSREQUEST_TRACKPROJECTION
_GETTRACKSREQUEST_TRACKPROJECTION.containing_type = _GETTRACKSREQUEST;
_GETTRACKSRESPONSE.fields_by_name['response_code'].enum_type = _GETTRACKSRESPONSE_RESPONSECODE
_GETTRACKSRESPONSE.fields_by_name['track'].message_type = _TRACK
_GETTRACKSRESPONSE_RESPONSECODE.containing_type = _GETTRACKSRESPONSE;
_GETPLAYLISTENTRIESRESPONSE.fields_by_name['response_code'].enum_type = _GETPLAYLISTENTRIESRESPONSE_RESPONSECODE
_GETPLAYLISTENTRIESRESPONSE.fields_by_name['playlist_entry'].message_type = _PLAYLISTENTRY
_GETPLAYLISTENTRIESRESPONSE_RESPONSECODE.containing_type = _GETPLAYLISTENTRIESRESPONSE;
_PLAYLISTSORTORDER.fields_by_name['attribute'].enum_type = _PLAYLISTSORTORDER_PLAYLISTATTRIBUTE
_PLAYLISTSORTORDER_PLAYLISTATTRIBUTE.containing_type = _PLAYLISTSORTORDER;
_GETPLAYLISTSREQUEST.fields_by_name['sort_order'].message_type = _PLAYLISTSORTORDER
_GETPLAYLISTSRESPONSE.fields_by_name['response_code'].enum_type = _GETPLAYLISTSRESPONSE_RESPONSECODE
_GETPLAYLISTSRESPONSE.fields_by_name['playlist'].message_type = _PLAYLIST
_GETPLAYLISTSRESPONSE_RESPONSECODE.containing_type = _GETPLAYLISTSRESPONSE;
_BATCHLOOKUPREQUEST.fields_by_name['track'].message_type = _LOOKUPTRACKREQUEST
_BATCHLOOKUPREQUEST.fields_by_name['playlist'].message_type = _LOOKUPPLAYLISTREQUEST
_BATCHLOOKUPREQUEST.fields_by_name['metadata_type'].enum_type = _BATCHLOOKUPREQUEST_METADATATYPE
_BATCHLOOKUPREQUEST.fields_by_name['playlist_entry'].message_type = _LOOKUPPLAYLISTENTRYREQUEST
_BATCHLOOKUPREQUEST_METADATATYPE.containing_type = _BATCHLOOKUPREQUEST;
_BATCHLOOKUPRESPONSE.fields_by_name['track'].message_type = _TRACK
_BATCHLOOKUPRESPONSE.fields_by_name['playlist'].message_type = _PLAYLIST
_BATCHLOOKUPRESPONSE.fields_by_name['playlist_entry'].message_type = _PLAYLISTENTRY
_MUTATETRACKREQUEST.fields_by_name['create_track'].message_type = _TRACK
_MUTATETRACKREQUEST.fields_by_name['update_track'].message_type = _TRACK
_MUTATERESPONSE.fields_by_name['response_code'].enum_type = _MUTATERESPONSE_MUTATERESPONSECODE
_MUTATERESPONSE.fields_by_name['availability_status'].enum_type = _MUTATERESPONSE_AVAILABILITYSTATUS
_MUTATERESPONSE_MUTATERESPONSECODE.containing_type = _MUTATERESPONSE;
_MUTATERESPONSE_AVAILABILITYSTATUS.containing_type = _MUTATERESPONSE;
_BATCHMUTATETRACKSREQUEST.fields_by_name['track_mutation'].message_type = _MUTATETRACKREQUEST
_BATCHMUTATETRACKSRESPONSE.fields_by_name['response_code'].enum_type = _BATCHMUTATETRACKSRESPONSE_BATCHMUTATETRACKSRESPONSECODE
_BATCHMUTATETRACKSRESPONSE.fields_by_name['mutate_response'].message_type = _MUTATERESPONSE
_BATCHMUTATETRACKSRESPONSE_BATCHMUTATETRACKSRESPONSECODE.containing_type = _BATCHMUTATETRACKSRESPONSE;
_MUTATEPLAYLISTREQUEST.fields_by_name['create_playlist'].message_type = _PLAYLIST
_MUTATEPLAYLISTREQUEST.fields_by_name['update_playlist'].message_type = _PLAYLIST
_MUTATEPLAYLISTREQUEST.fields_by_name['playlist_entry'].message_type = _PLAYLISTENTRY
_BATCHMUTATEPLAYLISTSREQUEST.fields_by_name['playlist_mutation'].message_type = _MUTATEPLAYLISTREQUEST
_BATCHMUTATEPLAYLISTSRESPONSE.fields_by_name['response_code'].enum_type = _BATCHMUTATEPLAYLISTSRESPONSE_BATCHMUTATEPLAYLISTSRESPONSECODE
_BATCHMUTATEPLAYLISTSRESPONSE.fields_by_name['mutate_response'].message_type = _MUTATERESPONSE
_BATCHMUTATEPLAYLISTSRESPONSE_BATCHMUTATEPLAYLISTSRESPONSECODE.containing_type = _BATCHMUTATEPLAYLISTSRESPONSE;
_MUTATEPLAYLISTENTRYREQUEST.fields_by_name['create_playlist_entry'].message_type = _PLAYLISTENTRY
_MUTATEPLAYLISTENTRYREQUEST.fields_by_name['update_playlist_entry'].message_type = _PLAYLISTENTRY
_MUTATEPLAYLISTENTRYREQUEST.fields_by_name['delete_playlist_entry'].message_type = _PLAYLISTENTRY
_BATCHMUTATEPLAYLISTENTRIESREQUEST.fields_by_name['playlist_entry_mutation'].message_type = _MUTATEPLAYLISTENTRYREQUEST
_BATCHMUTATEPLAYLISTENTRIESRESPONSE.fields_by_name['response_code'].enum_type = _BATCHMUTATEPLAYLISTENTRIESRESPONSE_BATCHMUTATEPLAYLISTENTRIESRESPONSECODE
_BATCHMUTATEPLAYLISTENTRIESRESPONSE.fields_by_name['mutate_response'].message_type = _MUTATERESPONSE
_BATCHMUTATEPLAYLISTENTRIESRESPONSE_BATCHMUTATEPLAYLISTENTRIESRESPONSECODE.containing_type = _BATCHMUTATEPLAYLISTENTRIESRESPONSE;
_MAGICPLAYLISTSEED.fields_by_name['seed_type'].enum_type = _MAGICPLAYLISTSEED_SEEDTYPE
_MAGICPLAYLISTSEED_SEEDTYPE.containing_type = _MAGICPLAYLISTSEED;
_MAGICPLAYLISTREQUEST.fields_by_name['seed'].message_type = _MAGICPLAYLISTSEED
_MAGICPLAYLISTRESPONSE.fields_by_name['playlist'].message_type = _PLAYLIST
_MAGICPLAYLISTRESPONSE.fields_by_name['playlist_entry'].message_type = _PLAYLISTENTRY
_ALBUM.fields_by_name['album_art'].message_type = _IMAGEREF
_ALBUMSORTORDER.fields_by_name['attribute'].enum_type = _ALBUMSORTORDER_ALBUMATTRIBUTE
_ALBUMSORTORDER_ALBUMATTRIBUTE.containing_type = _ALBUMSORTORDER;
_GETALBUMSREQUEST.fields_by_name['sort_order'].message_type = _ALBUMSORTORDER
_GETALBUMSRESPONSE.fields_by_name['album'].message_type = _ALBUM
_ARTIST.fields_by_name['album'].message_type = _ALBUM
_GETARTISTSREQUEST.fields_by_name['sort_order'].message_type = _ARTISTSORTORDER
_GETARTISTSRESPONSE.fields_by_name['artist'].message_type = _ARTIST
_MUSICGENRE.fields_by_name['album'].message_type = _ALBUM
_GETGENRESREQUEST.fields_by_name['sort_order'].message_type = _GENRESORTORDER
_GETGENRESRESPONSE.fields_by_name['genre'].message_type = _MUSICGENRE
_GETDYNAMICPLAYLISTENTRIESREQUEST.fields_by_name['playlist_entries_type'].enum_type = _GETDYNAMICPLAYLISTENTRIESREQUEST_DYNAMICPLAYLISTENTRIESTYPE
_GETDYNAMICPLAYLISTENTRIESREQUEST_DYNAMICPLAYLISTENTRIESTYPE.containing_type = _GETDYNAMICPLAYLISTENTRIESREQUEST;
_GETDYNAMICPLAYLISTENTRIESRESPONSE.fields_by_name['response_code'].enum_type = _GETDYNAMICPLAYLISTENTRIESRESPONSE_RESPONSECODE
_GETDYNAMICPLAYLISTENTRIESRESPONSE.fields_by_name['playlist_entry'].message_type = _PLAYLISTENTRY
_GETDYNAMICPLAYLISTENTRIESRESPONSE.fields_by_name['playlist_entries_type'].enum_type = _GETDYNAMICPLAYLISTENTRIESRESPONSE_DYNAMICPLAYLISTENTRIESTYPE
_GETDYNAMICPLAYLISTENTRIESRESPONSE_DYNAMICPLAYLISTENTRIESTYPE.containing_type = _GETDYNAMICPLAYLISTENTRIESRESPONSE;
_GETDYNAMICPLAYLISTENTRIESRESPONSE_RESPONSECODE.containing_type = _GETDYNAMICPLAYLISTENTRIESRESPONSE;
_TRACKTYPEAGGREGATE.fields_by_name['track_type_value'].enum_type = _TRACKTYPEAGGREGATE_TRACKTYPE
_TRACKTYPEAGGREGATE_TRACKTYPE.containing_type = _TRACKTYPEAGGREGATE;
_GETAGGREGATIONSBYTRACKTYPERESPONSE.fields_by_name['track_type_aggregate'].message_type = _TRACKTYPEAGGREGATE
_AVAILABILITYSTATUSAGGREGATE.fields_by_name['availability_status'].enum_type = _AVAILABILITYSTATUSAGGREGATE_AVAILABILITYSTATUS
_AVAILABILITYSTATUSAGGREGATE_AVAILABILITYSTATUS.containing_type = _AVAILABILITYSTATUSAGGREGATE;
_GETAGGREGATIONSBYAVAILABILITYSTATUSRESPONSE.fields_by_name['availability_status_aggregate'].message_type = _AVAILABILITYSTATUSAGGREGATE
_ADDPROMOTRACKSRESPONSE.fields_by_name['track'].message_type = _TRACK
_PLAYLISTAGGREGATE.fields_by_name['album_art'].message_type = _IMAGEREF
_GETPLAYLISTAGGREGATIONSRESPONSE.fields_by_name['playlist_aggregate'].message_type = _PLAYLISTAGGREGATE
_REMOTECONTROLCOMMANDRESPONSE.fields_by_name['response_code'].enum_type = _REMOTECONTROLCOMMANDRESPONSE_RESPONSECODE
_REMOTECONTROLCOMMANDRESPONSE_RESPONSECODE.containing_type = _REMOTECONTROLCOMMANDRESPONSE;
DESCRIPTOR.message_types_by_name['AudioRef'] = _AUDIOREF
DESCRIPTOR.message_types_by_name['ImageRef'] = _IMAGEREF
DESCRIPTOR.message_types_by_name['UploadedUitsId3Tag'] = _UPLOADEDUITSID3TAG
DESCRIPTOR.message_types_by_name['Track'] = _TRACK
DESCRIPTOR.message_types_by_name['Tracks'] = _TRACKS
DESCRIPTOR.message_types_by_name['Playlist'] = _PLAYLIST
DESCRIPTOR.message_types_by_name['PlaylistEntry'] = _PLAYLISTENTRY
DESCRIPTOR.message_types_by_name['TrackSearchRestriction'] = _TRACKSEARCHRESTRICTION
DESCRIPTOR.message_types_by_name['TrackSearchRestrictionSet'] = _TRACKSEARCHRESTRICTIONSET
DESCRIPTOR.message_types_by_name['TrackSortOrder'] = _TRACKSORTORDER
DESCRIPTOR.message_types_by_name['GetTracksRequest'] = _GETTRACKSREQUEST
DESCRIPTOR.message_types_by_name['GetTracksResponse'] = _GETTRACKSRESPONSE
DESCRIPTOR.message_types_by_name['GetPlaylistEntriesRequest'] = _GETPLAYLISTENTRIESREQUEST
DESCRIPTOR.message_types_by_name['GetPlaylistEntriesResponse'] = _GETPLAYLISTENTRIESRESPONSE
DESCRIPTOR.message_types_by_name['PlaylistSortOrder'] = _PLAYLISTSORTORDER
DESCRIPTOR.message_types_by_name['GetPlaylistsRequest'] = _GETPLAYLISTSREQUEST
DESCRIPTOR.message_types_by_name['GetPlaylistsResponse'] = _GETPLAYLISTSRESPONSE
DESCRIPTOR.message_types_by_name['LookupTrackRequest'] = _LOOKUPTRACKREQUEST
DESCRIPTOR.message_types_by_name['LookupPlaylistEntryRequest'] = _LOOKUPPLAYLISTENTRYREQUEST
DESCRIPTOR.message_types_by_name['LookupPlaylistRequest'] = _LOOKUPPLAYLISTREQUEST
DESCRIPTOR.message_types_by_name['BatchLookupRequest'] = _BATCHLOOKUPREQUEST
DESCRIPTOR.message_types_by_name['BatchLookupResponse'] = _BATCHLOOKUPRESPONSE
DESCRIPTOR.message_types_by_name['MutateTrackRequest'] = _MUTATETRACKREQUEST
DESCRIPTOR.message_types_by_name['MutateResponse'] = _MUTATERESPONSE
DESCRIPTOR.message_types_by_name['BatchMutateTracksRequest'] = _BATCHMUTATETRACKSREQUEST
DESCRIPTOR.message_types_by_name['BatchMutateTracksResponse'] = _BATCHMUTATETRACKSRESPONSE
DESCRIPTOR.message_types_by_name['MutatePlaylistRequest'] = _MUTATEPLAYLISTREQUEST
DESCRIPTOR.message_types_by_name['BatchMutatePlaylistsRequest'] = _BATCHMUTATEPLAYLISTSREQUEST
DESCRIPTOR.message_types_by_name['BatchMutatePlaylistsResponse'] = _BATCHMUTATEPLAYLISTSRESPONSE
DESCRIPTOR.message_types_by_name['MutatePlaylistEntryRequest'] = _MUTATEPLAYLISTENTRYREQUEST
DESCRIPTOR.message_types_by_name['BatchMutatePlaylistEntriesRequest'] = _BATCHMUTATEPLAYLISTENTRIESREQUEST
DESCRIPTOR.message_types_by_name['BatchMutatePlaylistEntriesResponse'] = _BATCHMUTATEPLAYLISTENTRIESRESPONSE
DESCRIPTOR.message_types_by_name['MagicPlaylistSeed'] = _MAGICPLAYLISTSEED
DESCRIPTOR.message_types_by_name['MagicPlaylistRequest'] = _MAGICPLAYLISTREQUEST
DESCRIPTOR.message_types_by_name['MagicPlaylistResponse'] = _MAGICPLAYLISTRESPONSE
DESCRIPTOR.message_types_by_name['FlushLockerRequest'] = _FLUSHLOCKERREQUEST
DESCRIPTOR.message_types_by_name['FlushLockerResponse'] = _FLUSHLOCKERRESPONSE
DESCRIPTOR.message_types_by_name['LockerNotification'] = _LOCKERNOTIFICATION
DESCRIPTOR.message_types_by_name['Album'] = _ALBUM
DESCRIPTOR.message_types_by_name['AlbumSortOrder'] = _ALBUMSORTORDER
DESCRIPTOR.message_types_by_name['GetAlbumsRequest'] = _GETALBUMSREQUEST
DESCRIPTOR.message_types_by_name['GetAlbumsResponse'] = _GETALBUMSRESPONSE
DESCRIPTOR.message_types_by_name['Artist'] = _ARTIST
DESCRIPTOR.message_types_by_name['ArtistSortOrder'] = _ARTISTSORTORDER
DESCRIPTOR.message_types_by_name['GetArtistsRequest'] = _GETARTISTSREQUEST
DESCRIPTOR.message_types_by_name['GetArtistsResponse'] = _GETARTISTSRESPONSE
DESCRIPTOR.message_types_by_name['MusicGenre'] = _MUSICGENRE
DESCRIPTOR.message_types_by_name['GenreSortOrder'] = _GENRESORTORDER
DESCRIPTOR.message_types_by_name['GetGenresRequest'] = _GETGENRESREQUEST
DESCRIPTOR.message_types_by_name['GetGenresResponse'] = _GETGENRESRESPONSE
DESCRIPTOR.message_types_by_name['GetDynamicPlaylistEntriesRequest'] = _GETDYNAMICPLAYLISTENTRIESREQUEST
DESCRIPTOR.message_types_by_name['GetDynamicPlaylistEntriesResponse'] = _GETDYNAMICPLAYLISTENTRIESRESPONSE
DESCRIPTOR.message_types_by_name['GetAggregationsByTrackTypeRequest'] = _GETAGGREGATIONSBYTRACKTYPEREQUEST
DESCRIPTOR.message_types_by_name['TrackTypeAggregate'] = _TRACKTYPEAGGREGATE
DESCRIPTOR.message_types_by_name['GetAggregationsByTrackTypeResponse'] = _GETAGGREGATIONSBYTRACKTYPERESPONSE
DESCRIPTOR.message_types_by_name['GetAggregationsByAvailabilityStatusRequest'] = _GETAGGREGATIONSBYAVAILABILITYSTATUSREQUEST
DESCRIPTOR.message_types_by_name['AvailabilityStatusAggregate'] = _AVAILABILITYSTATUSAGGREGATE
DESCRIPTOR.message_types_by_name['GetAggregationsByAvailabilityStatusResponse'] = _GETAGGREGATIONSBYAVAILABILITYSTATUSRESPONSE
DESCRIPTOR.message_types_by_name['AddPromoTracksRequest'] = _ADDPROMOTRACKSREQUEST
DESCRIPTOR.message_types_by_name['AddPromoTracksResponse'] = _ADDPROMOTRACKSRESPONSE
DESCRIPTOR.message_types_by_name['GetPlaylistAggregationsRequest'] = _GETPLAYLISTAGGREGATIONSREQUEST
DESCRIPTOR.message_types_by_name['PlaylistAggregate'] = _PLAYLISTAGGREGATE
DESCRIPTOR.message_types_by_name['GetPlaylistAggregationsResponse'] = _GETPLAYLISTAGGREGATIONSRESPONSE
DESCRIPTOR.message_types_by_name['RemoteControlCommandRequest'] = _REMOTECONTROLCOMMANDREQUEST
DESCRIPTOR.message_types_by_name['RemoteControlCommandResponse'] = _REMOTECONTROLCOMMANDRESPONSE
class AudioRef(message.Message):
__metaclass__ = reflection.GeneratedProtocolMessageType
DESCRIPTOR = _AUDIOREF
# @@protoc_insertion_point(class_scope:AudioRef)
class ImageRef(message.Message):
__metaclass__ = reflection.GeneratedProtocolMessageType
DESCRIPTOR = _IMAGEREF
# @@protoc_insertion_point(class_scope:ImageRef)
class UploadedUitsId3Tag(message.Message):
__metaclass__ = reflection.GeneratedProtocolMessageType
DESCRIPTOR = _UPLOADEDUITSID3TAG
# @@protoc_insertion_point(class_scope:UploadedUitsId3Tag)
class Track(message.Message):
__metaclass__ = reflection.GeneratedProtocolMessageType
DESCRIPTOR = _TRACK
# @@protoc_insertion_point(class_scope:Track)
class Tracks(message.Message):
__metaclass__ = reflection.GeneratedProtocolMessageType
DESCRIPTOR = _TRACKS
# @@protoc_insertion_point(class_scope:Tracks)
class Playlist(message.Message):
__metaclass__ = reflection.GeneratedProtocolMessageType
DESCRIPTOR = _PLAYLIST
# @@protoc_insertion_point(class_scope:Playlist)
class PlaylistEntry(message.Message):
__metaclass__ = reflection.GeneratedProtocolMessageType
DESCRIPTOR = _PLAYLISTENTRY
# @@protoc_insertion_point(class_scope:PlaylistEntry)
class TrackSearchRestriction(message.Message):
__metaclass__ = reflection.GeneratedProtocolMessageType
DESCRIPTOR = _TRACKSEARCHRESTRICTION
# @@protoc_insertion_point(class_scope:TrackSearchRestriction)
class TrackSearchRestrictionSet(message.Message):
__metaclass__ = reflection.GeneratedProtocolMessageType
DESCRIPTOR = _TRACKSEARCHRESTRICTIONSET
# @@protoc_insertion_point(class_scope:TrackSearchRestrictionSet)
class TrackSortOrder(message.Message):
__metaclass__ = reflection.GeneratedProtocolMessageType
DESCRIPTOR = _TRACKSORTORDER
# @@protoc_insertion_point(class_scope:TrackSortOrder)
class GetTracksRequest(message.Message):
__metaclass__ = reflection.GeneratedProtocolMessageType
DESCRIPTOR = _GETTRACKSREQUEST
# @@protoc_insertion_point(class_scope:GetTracksRequest)
class GetTracksResponse(message.Message):
__metaclass__ = reflection.GeneratedProtocolMessageType
DESCRIPTOR = _GETTRACKSRESPONSE
# @@protoc_insertion_point(class_scope:GetTracksResponse)
class GetPlaylistEntriesRequest(message.Message):
__metaclass__ = reflection.GeneratedProtocolMessageType
DESCRIPTOR = _GETPLAYLISTENTRIESREQUEST
# @@protoc_insertion_point(class_scope:GetPlaylistEntriesRequest)
class GetPlaylistEntriesResponse(message.Message):
__metaclass__ = reflection.GeneratedProtocolMessageType
DESCRIPTOR = _GETPLAYLISTENTRIESRESPONSE
# @@protoc_insertion_point(class_scope:GetPlaylistEntriesResponse)
class PlaylistSortOrder(message.Message):
__metaclass__ = reflection.GeneratedProtocolMessageType
DESCRIPTOR = _PLAYLISTSORTORDER
# @@protoc_insertion_point(class_scope:PlaylistSortOrder)
class GetPlaylistsRequest(message.Message):
__metaclass__ = reflection.GeneratedProtocolMessageType
DESCRIPTOR = _GETPLAYLISTSREQUEST
# @@protoc_insertion_point(class_scope:GetPlaylistsRequest)
class GetPlaylistsResponse(message.Message):
__metaclass__ = reflection.GeneratedProtocolMessageType
DESCRIPTOR = _GETPLAYLISTSRESPONSE
# @@protoc_insertion_point(class_scope:GetPlaylistsResponse)
class LookupTrackRequest(message.Message):
__metaclass__ = reflection.GeneratedProtocolMessageType
DESCRIPTOR = _LOOKUPTRACKREQUEST
# @@protoc_insertion_point(class_scope:LookupTrackRequest)
class LookupPlaylistEntryRequest(message.Message):
__metaclass__ = reflection.GeneratedProtocolMessageType
DESCRIPTOR = _LOOKUPPLAYLISTENTRYREQUEST
# @@protoc_insertion_point(class_scope:LookupPlaylistEntryRequest)
class LookupPlaylistRequest(message.Message):
__metaclass__ = reflection.GeneratedProtocolMessageType
DESCRIPTOR = _LOOKUPPLAYLISTREQUEST
# @@protoc_insertion_point(class_scope:LookupPlaylistRequest)
class BatchLookupRequest(message.Message):
__metaclass__ = reflection.GeneratedProtocolMessageType
DESCRIPTOR = _BATCHLOOKUPREQUEST
# @@protoc_insertion_point(class_scope:BatchLookupRequest)
class BatchLookupResponse(message.Message):
__metaclass__ = reflection.GeneratedProtocolMessageType
DESCRIPTOR = _BATCHLOOKUPRESPONSE
# @@protoc_insertion_point(class_scope:BatchLookupResponse)
class MutateTrackRequest(message.Message):
__metaclass__ = reflection.GeneratedProtocolMessageType
DESCRIPTOR = _MUTATETRACKREQUEST
# @@protoc_insertion_point(class_scope:MutateTrackRequest)
class MutateResponse(message.Message):
__metaclass__ = reflection.GeneratedProtocolMessageType
DESCRIPTOR = _MUTATERESPONSE
# @@protoc_insertion_point(class_scope:MutateResponse)
class BatchMutateTracksRequest(message.Message):
__metaclass__ = reflection.GeneratedProtocolMessageType
DESCRIPTOR = _BATCHMUTATETRACKSREQUEST
# @@protoc_insertion_point(class_scope:BatchMutateTracksRequest)
class BatchMutateTracksResponse(message.Message):
__metaclass__ = reflection.GeneratedProtocolMessageType
DESCRIPTOR = _BATCHMUTATETRACKSRESPONSE
# @@protoc_insertion_point(class_scope:BatchMutateTracksResponse)
class MutatePlaylistRequest(message.Message):
__metaclass__ = reflection.GeneratedProtocolMessageType
DESCRIPTOR = _MUTATEPLAYLISTREQUEST
# @@protoc_insertion_point(class_scope:MutatePlaylistRequest)
class BatchMutatePlaylistsRequest(message.Message):
__metaclass__ = reflection.GeneratedProtocolMessageType
DESCRIPTOR = _BATCHMUTATEPLAYLISTSREQUEST
# @@protoc_insertion_point(class_scope:BatchMutatePlaylistsRequest)
class BatchMutatePlaylistsResponse(message.Message):
__metaclass__ = reflection.GeneratedProtocolMessageType
DESCRIPTOR = _BATCHMUTATEPLAYLISTSRESPONSE
# @@protoc_insertion_point(class_scope:BatchMutatePlaylistsResponse)
class MutatePlaylistEntryRequest(message.Message):
__metaclass__ = reflection.GeneratedProtocolMessageType
DESCRIPTOR = _MUTATEPLAYLISTENTRYREQUEST
# @@protoc_insertion_point(class_scope:MutatePlaylistEntryRequest)
class BatchMutatePlaylistEntriesRequest(message.Message):
__metaclass__ = reflection.GeneratedProtocolMessageType
DESCRIPTOR = _BATCHMUTATEPLAYLISTENTRIESREQUEST
# @@protoc_insertion_point(class_scope:BatchMutatePlaylistEntriesRequest)
class BatchMutatePlaylistEntriesResponse(message.Message):
__metaclass__ = reflection.GeneratedProtocolMessageType
DESCRIPTOR = _BATCHMUTATEPLAYLISTENTRIESRESPONSE
# @@protoc_insertion_point(class_scope:BatchMutatePlaylistEntriesResponse)
class MagicPlaylistSeed(message.Message):
__metaclass__ = reflection.GeneratedProtocolMessageType
DESCRIPTOR = _MAGICPLAYLISTSEED
# @@protoc_insertion_point(class_scope:MagicPlaylistSeed)
class MagicPlaylistRequest(message.Message):
__metaclass__ = reflection.GeneratedProtocolMessageType
DESCRIPTOR = _MAGICPLAYLISTREQUEST
# @@protoc_insertion_point(class_scope:MagicPlaylistRequest)
class MagicPlaylistResponse(message.Message):
__metaclass__ = reflection.GeneratedProtocolMessageType
DESCRIPTOR = _MAGICPLAYLISTRESPONSE
# @@protoc_insertion_point(class_scope:MagicPlaylistResponse)
class FlushLockerRequest(message.Message):
__metaclass__ = reflection.GeneratedProtocolMessageType
DESCRIPTOR = _FLUSHLOCKERREQUEST
# @@protoc_insertion_point(class_scope:FlushLockerRequest)
class FlushLockerResponse(message.Message):
__metaclass__ = reflection.GeneratedProtocolMessageType
DESCRIPTOR = _FLUSHLOCKERRESPONSE
# @@protoc_insertion_point(class_scope:FlushLockerResponse)
class LockerNotification(message.Message):
__metaclass__ = reflection.GeneratedProtocolMessageType
DESCRIPTOR = _LOCKERNOTIFICATION
# @@protoc_insertion_point(class_scope:LockerNotification)
class Album(message.Message):
__metaclass__ = reflection.GeneratedProtocolMessageType
DESCRIPTOR = _ALBUM
# @@protoc_insertion_point(class_scope:Album)
class AlbumSortOrder(message.Message):
__metaclass__ = reflection.GeneratedProtocolMessageType
DESCRIPTOR = _ALBUMSORTORDER
# @@protoc_insertion_point(class_scope:AlbumSortOrder)
class GetAlbumsRequest(message.Message):
__metaclass__ = reflection.GeneratedProtocolMessageType
DESCRIPTOR = _GETALBUMSREQUEST
# @@protoc_insertion_point(class_scope:GetAlbumsRequest)
class GetAlbumsResponse(message.Message):
__metaclass__ = reflection.GeneratedProtocolMessageType
DESCRIPTOR = _GETALBUMSRESPONSE
# @@protoc_insertion_point(class_scope:GetAlbumsResponse)
class Artist(message.Message):
__metaclass__ = reflection.GeneratedProtocolMessageType
DESCRIPTOR = _ARTIST
# @@protoc_insertion_point(class_scope:Artist)
class ArtistSortOrder(message.Message):
__metaclass__ = reflection.GeneratedProtocolMessageType
DESCRIPTOR = _ARTISTSORTORDER
# @@protoc_insertion_point(class_scope:ArtistSortOrder)
class GetArtistsRequest(message.Message):
__metaclass__ = reflection.GeneratedProtocolMessageType
DESCRIPTOR = _GETARTISTSREQUEST
# @@protoc_insertion_point(class_scope:GetArtistsRequest)
class GetArtistsResponse(message.Message):
__metaclass__ = reflection.GeneratedProtocolMessageType
DESCRIPTOR = _GETARTISTSRESPONSE
# @@protoc_insertion_point(class_scope:GetArtistsResponse)
class MusicGenre(message.Message):
__metaclass__ = reflection.GeneratedProtocolMessageType
DESCRIPTOR = _MUSICGENRE
# @@protoc_insertion_point(class_scope:MusicGenre)
class GenreSortOrder(message.Message):
__metaclass__ = reflection.GeneratedProtocolMessageType
DESCRIPTOR = _GENRESORTORDER
# @@protoc_insertion_point(class_scope:GenreSortOrder)
class GetGenresRequest(message.Message):
__metaclass__ = reflection.GeneratedProtocolMessageType
DESCRIPTOR = _GETGENRESREQUEST
# @@protoc_insertion_point(class_scope:GetGenresRequest)
class GetGenresResponse(message.Message):
__metaclass__ = reflection.GeneratedProtocolMessageType
DESCRIPTOR = _GETGENRESRESPONSE
# @@protoc_insertion_point(class_scope:GetGenresResponse)
class GetDynamicPlaylistEntriesRequest(message.Message):
__metaclass__ = reflection.GeneratedProtocolMessageType
DESCRIPTOR = _GETDYNAMICPLAYLISTENTRIESREQUEST
# @@protoc_insertion_point(class_scope:GetDynamicPlaylistEntriesRequest)
class GetDynamicPlaylistEntriesResponse(message.Message):
__metaclass__ = reflection.GeneratedProtocolMessageType
DESCRIPTOR = _GETDYNAMICPLAYLISTENTRIESRESPONSE
# @@protoc_insertion_point(class_scope:GetDynamicPlaylistEntriesResponse)
class GetAggregationsByTrackTypeRequest(message.Message):
__metaclass__ = reflection.GeneratedProtocolMessageType
DESCRIPTOR = _GETAGGREGATIONSBYTRACKTYPEREQUEST
# @@protoc_insertion_point(class_scope:GetAggregationsByTrackTypeRequest)
class TrackTypeAggregate(message.Message):
__metaclass__ = reflection.GeneratedProtocolMessageType
DESCRIPTOR = _TRACKTYPEAGGREGATE
# @@protoc_insertion_point(class_scope:TrackTypeAggregate)
class GetAggregationsByTrackTypeResponse(message.Message):
__metaclass__ = reflection.GeneratedProtocolMessageType
DESCRIPTOR = _GETAGGREGATIONSBYTRACKTYPERESPONSE
# @@protoc_insertion_point(class_scope:GetAggregationsByTrackTypeResponse)
class GetAggregationsByAvailabilityStatusRequest(message.Message):
__metaclass__ = reflection.GeneratedProtocolMessageType
DESCRIPTOR = _GETAGGREGATIONSBYAVAILABILITYSTATUSREQUEST
# @@protoc_insertion_point(class_scope:GetAggregationsByAvailabilityStatusRequest)
class AvailabilityStatusAggregate(message.Message):
__metaclass__ = reflection.GeneratedProtocolMessageType
DESCRIPTOR = _AVAILABILITYSTATUSAGGREGATE
# @@protoc_insertion_point(class_scope:AvailabilityStatusAggregate)
class GetAggregationsByAvailabilityStatusResponse(message.Message):
__metaclass__ = reflection.GeneratedProtocolMessageType
DESCRIPTOR = _GETAGGREGATIONSBYAVAILABILITYSTATUSRESPONSE
# @@protoc_insertion_point(class_scope:GetAggregationsByAvailabilityStatusResponse)
class AddPromoTracksRequest(message.Message):
__metaclass__ = reflection.GeneratedProtocolMessageType
DESCRIPTOR = _ADDPROMOTRACKSREQUEST
# @@protoc_insertion_point(class_scope:AddPromoTracksRequest)
class AddPromoTracksResponse(message.Message):
__metaclass__ = reflection.GeneratedProtocolMessageType
DESCRIPTOR = _ADDPROMOTRACKSRESPONSE
# @@protoc_insertion_point(class_scope:AddPromoTracksResponse)
class GetPlaylistAggregationsRequest(message.Message):
__metaclass__ = reflection.GeneratedProtocolMessageType
DESCRIPTOR = _GETPLAYLISTAGGREGATIONSREQUEST
# @@protoc_insertion_point(class_scope:GetPlaylistAggregationsRequest)
class PlaylistAggregate(message.Message):
__metaclass__ = reflection.GeneratedProtocolMessageType
DESCRIPTOR = _PLAYLISTAGGREGATE
# @@protoc_insertion_point(class_scope:PlaylistAggregate)
class GetPlaylistAggregationsResponse(message.Message):
__metaclass__ = reflection.GeneratedProtocolMessageType
DESCRIPTOR = _GETPLAYLISTAGGREGATIONSRESPONSE
# @@protoc_insertion_point(class_scope:GetPlaylistAggregationsResponse)
class RemoteControlCommandRequest(message.Message):
__metaclass__ = reflection.GeneratedProtocolMessageType
DESCRIPTOR = _REMOTECONTROLCOMMANDREQUEST
# @@protoc_insertion_point(class_scope:RemoteControlCommandRequest)
class RemoteControlCommandResponse(message.Message):
__metaclass__ = reflection.GeneratedProtocolMessageType
DESCRIPTOR = _REMOTECONTROLCOMMANDRESPONSE
# @@protoc_insertion_point(class_scope:RemoteControlCommandResponse)
# @@protoc_insertion_point(module_scope)
| 40.620848 | 23,106 | 0.745915 |
from google.protobuf import descriptor
from google.protobuf import message
from google.protobuf import reflection
from google.protobuf import descriptor_pb2
import uits_pb2
DESCRIPTOR = descriptor.FileDescriptor(
name='locker.proto',
package='',
serialized_pb='\n\x0clocker.proto\x1a\nuits.proto\"\xf8\x01\n\x08\x41udioRef\x12\x1e\n\x05store\x18\x01 \x02(\x0e\x32\x0f.AudioRef.Store\x12\x0b\n\x03ref\x18\x02 \x02(\x0c\x12\x0b\n\x03url\x18\x04 \x01(\t\x12\x10\n\x08\x62it_rate\x18\x05 \x01(\x05\x12\x13\n\x0bsample_rate\x18\x06 \x01(\x05\x12\x14\n\x0c\x64ownloadable\x18\x07 \x01(\x08\x12\x17\n\x0f\x64uration_millis\x18\x08 \x01(\x03\x12\x19\n\x11rematch_timestamp\x18\t \x01(\x03\x12\x1e\n\x16invalid_due_to_wipeout\x18\n \x01(\x08\"!\n\x05Store\x12\r\n\tBLOBSTORE\x10\x01\x12\t\n\x05SM_V2\x10\x02\"\xd1\x01\n\x08ImageRef\x12\x1e\n\x05store\x18\x01 \x01(\x0e\x32\x0f.ImageRef.Store\x12\r\n\x05width\x18\x02 \x01(\r\x12\x0e\n\x06height\x18\x03 \x01(\r\x12\x0b\n\x03url\x18\x06 \x01(\t\x12\x1e\n\x16invalid_due_to_wipeout\x18\x07 \x01(\x08\x12 \n\x06origin\x18\x08 \x01(\x0e\x32\x10.ImageRef.Origin\"\x14\n\x05Store\x12\x0b\n\x07SHOEBOX\x10\x03\"!\n\x06Origin\x12\x0c\n\x08PERSONAL\x10\x01\x12\t\n\x05STORE\x10\x02\"1\n\x12UploadedUitsId3Tag\x12\r\n\x05owner\x18\x01 \x01(\t\x12\x0c\n\x04\x64\x61ta\x18\x02 \x01(\x0c\"\x8c\x10\n\x05Track\x12\n\n\x02id\x18\x01 \x01(\t\x12\x11\n\tclient_id\x18\x02 \x01(\t\x12\x1a\n\x12\x63reation_timestamp\x18\x03 \x01(\x03\x12\x1f\n\x17last_modified_timestamp\x18\x04 \x01(\x03\x12\x16\n\x07\x64\x65leted\x18\x05 \x01(\x08:\x05\x66\x61lse\x12\r\n\x05title\x18\x06 \x01(\t\x12\x0e\n\x06\x61rtist\x18\x07 \x01(\t\x12\x13\n\x0b\x61rtist_hash\x18. \x01(\x03\x12\x10\n\x08\x63omposer\x18\x08 \x01(\t\x12\r\n\x05\x61lbum\x18\t \x01(\t\x12\x14\n\x0c\x61lbum_artist\x18\n \x01(\t\x12\x17\n\x0f\x63\x61nonical_album\x18\x38 \x01(\t\x12\x18\n\x10\x63\x61nonical_artist\x18\x39 \x01(\t\x12\x1d\n\x15\x63\x61nonical_genre_album\x18: \x01(\t\x12\x0c\n\x04year\x18\x0b \x01(\x05\x12\x0f\n\x07\x63omment\x18\x0c \x01(\t\x12\x14\n\x0ctrack_number\x18\r \x01(\x05\x12\r\n\x05genre\x18\x0e \x01(\t\x12\x17\n\x0f\x64uration_millis\x18\x0f \x01(\x03\x12\x18\n\x10\x62\x65\x61ts_per_minute\x18\x10 \x01(\x05\x12\x19\n\x11original_bit_rate\x18, \x01(\x05\x12\x1c\n\taudio_ref\x18\x11 \x03(\x0b\x32\t.AudioRef\x12 \n\ralbum_art_ref\x18\x12 \x03(\x0b\x32\t.ImageRef\x12\x36\n\x13\x61vailability_status\x18\x13 \x01(\x0e\x32\x19.Track.AvailabilityStatus\x12\x12\n\nplay_count\x18\x14 \x01(\x05\x12(\n\x0c\x63ontent_type\x18\x19 \x01(\x0e\x32\x12.Track.ContentType\x12\x19\n\x11total_track_count\x18\x1a \x01(\x05\x12\x13\n\x0b\x64isc_number\x18\x1b \x01(\x05\x12\x18\n\x10total_disc_count\x18\x1c \x01(\x05\x12!\n\x08\x63hannels\x18\x1d \x01(\x0e\x32\x0f.Track.Channels\x12$\n\ntrack_type\x18\x1e \x01(\x0e\x32\x10.Track.TrackType\x12\x1e\n\x16use_single_server_copy\x18; \x01(\x08\x12\x1d\n\x06rating\x18\x1f \x01(\x0e\x32\r.Track.Rating\x12\x16\n\x0e\x65stimated_size\x18 \x01(\x03\x12\x10\n\x08store_id\x18! \x01(\t\x12\x12\n\nmetajam_id\x18\" \x01(\t\x12 \n\x15metajam_id_confidence\x18+ \x01(\x01:\x01\x30\x12\x0c\n\x04uits\x18# \x01(\t\x12$\n\ruits_metadata\x18( \x01(\x0b\x32\r.UitsMetadata\x12\x13\n\x0b\x63ompilation\x18$ \x01(\x08\x12\x19\n\x11\x63lient_date_added\x18% \x01(\x03\x12\x18\n\x10recent_timestamp\x18& \x01(\x03\x12\x1d\n\x0e\x64o_not_rematch\x18\' \x01(\x08:\x05\x66\x61lse\x12\x1b\n\x13\x66rom_album_purchase\x18) \x01(\x08\x12\x18\n\x10\x61lbum_metajam_id\x18* \x01(\t\x12\x16\n\x0etransaction_id\x18- \x01(\t\x12\x13\n\x0b\x64\x65\x62ug_track\x18/ \x01(\x08\x12\x18\n\x10normalized_title\x18\x30 \x01(\t\x12\x19\n\x11normalized_artist\x18\x31 \x01(\t\x12\x18\n\x10normalized_album\x18\x32 \x01(\t\x12\x1f\n\x17normalized_album_artist\x18\x33 \x01(\t\x12\"\n\x1anormalized_canonical_album\x18\x36 \x01(\t\x12#\n\x1bnormalized_canonical_artist\x18\x37 \x01(\t\x12\x13\n\x0buploader_id\x18\x34 \x01(\t\x12\x17\n\x0f\x63lient_album_id\x18\x35 \x01(\t\x12\x18\n\x10label_owner_code\x18< \x01(\t\x12\x31\n\x15original_content_type\x18= \x01(\x0e\x32\x12.Track.ContentType\x12*\n\ruploaded_uits\x18G \x03(\x0b\x32\x13.UploadedUitsId3Tag\"\x86\x01\n\x12\x41vailabilityStatus\x12\x0b\n\x07PENDING\x10\x01\x12\x0b\n\x07MATCHED\x10\x02\x12\x14\n\x10UPLOAD_REQUESTED\x10\x03\x12\r\n\tAVAILABLE\x10\x04\x12\x12\n\x0e\x46ORCE_REUPLOAD\x10\x05\x12\x1d\n\x19UPLOAD_PERMANENTLY_FAILED\x10\x06\"W\n\x0b\x43ontentType\x12\x07\n\x03MP3\x10\x01\x12\x07\n\x03M4A\x10\x02\x12\x07\n\x03\x41\x41\x43\x10\x03\x12\x08\n\x04\x46LAC\x10\x04\x12\x07\n\x03OGG\x10\x05\x12\x07\n\x03WMA\x10\x06\x12\x07\n\x03M4P\x10\x07\x12\x08\n\x04\x41LAC\x10\x08\" \n\x08\x43hannels\x12\x08\n\x04MONO\x10\x01\x12\n\n\x06STEREO\x10\x02\"\x8b\x01\n\tTrackType\x12\x11\n\rMATCHED_TRACK\x10\x01\x12\x13\n\x0fUNMATCHED_TRACK\x10\x02\x12\x0f\n\x0bLOCAL_TRACK\x10\x03\x12\x13\n\x0fPURCHASED_TRACK\x10\x04\x12\x1f\n\x1bMETADATA_ONLY_MATCHED_TRACK\x10\x05\x12\x0f\n\x0bPROMO_TRACK\x10\x06\"e\n\x06Rating\x12\r\n\tNOT_RATED\x10\x01\x12\x0c\n\x08ONE_STAR\x10\x02\x12\r\n\tTWO_STARS\x10\x03\x12\x0f\n\x0bTHREE_STARS\x10\x04\x12\x0e\n\nFOUR_STARS\x10\x05\x12\x0e\n\nFIVE_STARS\x10\x06\"\x1f\n\x06Tracks\x12\x15\n\x05track\x18\x01 \x03(\x0b\x32\x06.Track\"\xb4\x02\n\x08Playlist\x12\n\n\x02id\x18\x01 \x01(\t\x12\x11\n\tclient_id\x18\x02 \x01(\t\x12\x1a\n\x12\x63reation_timestamp\x18\x03 \x01(\x03\x12\x1f\n\x17last_modified_timestamp\x18\x04 \x01(\x03\x12\x16\n\x07\x64\x65leted\x18\x05 \x01(\x08:\x05\x66\x61lse\x12\x0c\n\x04name\x18\x06 \x01(\t\x12-\n\rplaylist_type\x18\x07 \x01(\x0e\x32\x16.Playlist.PlaylistType\x12r.EnumValueDescriptor(
name='NOT_EQUAL', index=1, number=1,
options=None,
type=None),
descriptor.EnumValueDescriptor(
name='GREATER_THAN', index=2, number=2,
options=None,
type=None),
descriptor.EnumValueDescriptor(
name='GREATER_EQUAL', index=3, number=3,
options=None,
type=None),
descriptor.EnumValueDescriptor(
name='LESS_THAN', index=4, number=4,
options=None,
type=None),
descriptor.EnumValueDescriptor(
name='LESS_EQUAL', index=5, number=5,
options=None,
type=None),
descriptor.EnumValueDescriptor(
name='PARTIAL_MATCH', index=6, number=6,
options=None,
type=None),
],
containing_type=None,
options=None,
serialized_start=3718,
serialized_end=3847,
)
_TRACKSEARCHRESTRICTIONSET_RESTRICTIONSETTYPE = descriptor.EnumDescriptor(
name='RestrictionSetType',
full_name='TrackSearchRestrictionSet.RestrictionSetType',
filename=None,
file=DESCRIPTOR,
values=[
descriptor.EnumValueDescriptor(
name='AND', index=0, number=0,
options=None,
type=None),
descriptor.EnumValueDescriptor(
name='OR', index=1, number=1,
options=None,
type=None),
],
containing_type=None,
options=None,
serialized_start=4031,
serialized_end=4068,
)
_TRACKSORTORDER_TRACKATTRIBUTE = descriptor.EnumDescriptor(
name='TrackAttribute',
full_name='TrackSortOrder.TrackAttribute',
filename=None,
file=DESCRIPTOR,
values=[
descriptor.EnumValueDescriptor(
name='LAST_MODIFIED_TIME', index=0, number=1,
options=None,
type=None),
descriptor.EnumValueDescriptor(
name='ARTIST', index=1, number=2,
options=None,
type=None),
descriptor.EnumValueDescriptor(
name='ALBUM', index=2, number=3,
options=None,
type=None),
descriptor.EnumValueDescriptor(
name='TITLE', index=3, number=4,
options=None,
type=None),
descriptor.EnumValueDescriptor(
name='TRACK_NUMBER', index=4, number=6,
options=None,
type=None),
descriptor.EnumValueDescriptor(
name='PLAY_COUNT', index=5, number=9,
options=None,
type=None),
descriptor.EnumValueDescriptor(
name='DURATION_MILLIS', index=6, number=10,
options=None,
type=None),
descriptor.EnumValueDescriptor(
name='RATING', index=7, number=11,
options=None,
type=None),
descriptor.EnumValueDescriptor(
name='CREATION_TIME', index=8, number=12,
options=None,
type=None),
],
containing_type=None,
options=None,
serialized_start=4167,
serialized_end=4327,
)
_GETTRACKSREQUEST_TRACKPROJECTION = descriptor.EnumDescriptor(
name='TrackProjection',
full_name='GetTracksRequest.TrackProjection',
filename=None,
file=DESCRIPTOR,
values=[
descriptor.EnumValueDescriptor(
name='FULL', index=0, number=1,
options=None,
type=None),
descriptor.EnumValueDescriptor(
name='FRONTEND_VIEW', index=1, number=2,
options=None,
type=None),
],
containing_type=None,
options=None,
serialized_start=4666,
serialized_end=4712,
)
_GETTRACKSRESPONSE_RESPONSECODE = descriptor.EnumDescriptor(
name='ResponseCode',
full_name='GetTracksResponse.ResponseCode',
filename=None,
file=DESCRIPTOR,
values=[
descriptor.EnumValueDescriptor(
name='OK', index=0, number=1,
options=None,
type=None),
descriptor.EnumValueDescriptor(
name='NOT_MODIFIED', index=1, number=2,
options=None,
type=None),
descriptor.EnumValueDescriptor(
name='GONE', index=2, number=3,
options=None,
type=None),
],
containing_type=None,
options=None,
serialized_start=4876,
serialized_end=4926,
)
_GETPLAYLISTENTRIESRESPONSE_RESPONSECODE = descriptor.EnumDescriptor(
name='ResponseCode',
full_name='GetPlaylistEntriesResponse.ResponseCode',
filename=None,
file=DESCRIPTOR,
values=[
descriptor.EnumValueDescriptor(
name='OK', index=0, number=1,
options=None,
type=None),
descriptor.EnumValueDescriptor(
name='NOT_MODIFIED', index=1, number=2,
options=None,
type=None),
descriptor.EnumValueDescriptor(
name='GONE', index=2, number=3,
options=None,
type=None),
],
containing_type=None,
options=None,
serialized_start=4876,
serialized_end=4926,
)
_PLAYLISTSORTORDER_PLAYLISTATTRIBUTE = descriptor.EnumDescriptor(
name='PlaylistAttribute',
full_name='PlaylistSortOrder.PlaylistAttribute',
filename=None,
file=DESCRIPTOR,
values=[
descriptor.EnumValueDescriptor(
name='LAST_MODIFIED_TIME', index=0, number=1,
options=None,
type=None),
descriptor.EnumValueDescriptor(
name='TITLE', index=1, number=2,
options=None,
type=None),
descriptor.EnumValueDescriptor(
name='CREATION_TIME', index=2, number=3,
options=None,
type=None),
descriptor.EnumValueDescriptor(
name='RECENT_TIMESTAMP', index=3, number=4,
options=None,
type=None),
],
containing_type=None,
options=None,
serialized_start=5538,
serialized_end=5633,
)
_GETPLAYLISTSRESPONSE_RESPONSECODE = descriptor.EnumDescriptor(
name='ResponseCode',
full_name='GetPlaylistsResponse.ResponseCode',
filename=None,
file=DESCRIPTOR,
values=[
descriptor.EnumValueDescriptor(
name='OK', index=0, number=1,
options=None,
type=None),
descriptor.EnumValueDescriptor(
name='NOT_MODIFIED', index=1, number=2,
options=None,
type=None),
descriptor.EnumValueDescriptor(
name='GONE', index=2, number=3,
options=None,
type=None),
],
containing_type=None,
options=None,
serialized_start=4876,
serialized_end=4926,
)
_BATCHLOOKUPREQUEST_METADATATYPE = descriptor.EnumDescriptor(
name='MetadataType',
full_name='BatchLookupRequest.MetadataType',
filename=None,
file=DESCRIPTOR,
values=[
descriptor.EnumValueDescriptor(
name='TRACK', index=0, number=1,
options=None,
type=None),
descriptor.EnumValueDescriptor(
name='PLAYLIST', index=1, number=2,
options=None,
type=None),
descriptor.EnumValueDescriptor(
name='PLAYLIST_ENTRY', index=2, number=3,
options=None,
type=None),
],
containing_type=None,
options=None,
serialized_start=6467,
serialized_end=6526,
)
_MUTATERESPONSE_MUTATERESPONSECODE = descriptor.EnumDescriptor(
name='MutateResponseCode',
full_name='MutateResponse.MutateResponseCode',
filename=None,
file=DESCRIPTOR,
values=[
descriptor.EnumValueDescriptor(
name='OK', index=0, number=1,
options=None,
type=None),
descriptor.EnumValueDescriptor(
name='CONFLICT', index=1, number=2,
options=None,
type=None),
descriptor.EnumValueDescriptor(
name='INVALID_REQUEST', index=2, number=3,
options=None,
type=None),
descriptor.EnumValueDescriptor(
name='METADATA_TOO_LARGE', index=3, number=4,
options=None,
type=None),
],
containing_type=None,
options=None,
serialized_start=7047,
serialized_end=7134,
)
_MUTATERESPONSE_AVAILABILITYSTATUS = descriptor.EnumDescriptor(
name='AvailabilityStatus',
full_name='MutateResponse.AvailabilityStatus',
filename=None,
file=DESCRIPTOR,
values=[
descriptor.EnumValueDescriptor(
name='PENDING', index=0, number=1,
options=None,
type=None),
descriptor.EnumValueDescriptor(
name='MATCHED', index=1, number=2,
options=None,
type=None),
descriptor.EnumValueDescriptor(
name='UPLOAD_REQUESTED', index=2, number=3,
options=None,
type=None),
descriptor.EnumValueDescriptor(
name='AVAILABLE', index=3, number=4,
options=None,
type=None),
descriptor.EnumValueDescriptor(
name='FORCE_REUPLOAD', index=4, number=5,
options=None,
type=None),
descriptor.EnumValueDescriptor(
name='UPLOAD_PERMANENTLY_FAILED', index=5, number=6,
options=None,
type=None),
],
containing_type=None,
options=None,
serialized_start=2101,
serialized_end=2235,
)
_BATCHMUTATETRACKSRESPONSE_BATCHMUTATETRACKSRESPONSECODE = descriptor.EnumDescriptor(
name='BatchMutateTracksResponseCode',
full_name='BatchMutateTracksResponse.BatchMutateTracksResponseCode',
filename=None,
file=DESCRIPTOR,
values=[
descriptor.EnumValueDescriptor(
name='OK', index=0, number=1,
options=None,
type=None),
descriptor.EnumValueDescriptor(
name='CONFLICT', index=1, number=2,
options=None,
type=None),
],
containing_type=None,
options=None,
serialized_start=7634,
serialized_end=7687,
)
_BATCHMUTATEPLAYLISTSRESPONSE_BATCHMUTATEPLAYLISTSRESPONSECODE = descriptor.EnumDescriptor(
name='BatchMutatePlaylistsResponseCode',
full_name='BatchMutatePlaylistsResponse.BatchMutatePlaylistsResponseCode',
filename=None,
file=DESCRIPTOR,
values=[
descriptor.EnumValueDescriptor(
name='OK', index=0, number=1,
options=None,
type=None),
descriptor.EnumValueDescriptor(
name='CONFLICT', index=1, number=2,
options=None,
type=None),
],
containing_type=None,
options=None,
serialized_start=8319,
serialized_end=8375,
)
_BATCHMUTATEPLAYLISTENTRIESRESPONSE_BATCHMUTATEPLAYLISTENTRIESRESPONSECODE = descriptor.EnumDescriptor(
name='BatchMutatePlaylistEntriesResponseCode',
full_name='BatchMutatePlaylistEntriesResponse.BatchMutatePlaylistEntriesResponseCode',
filename=None,
file=DESCRIPTOR,
values=[
descriptor.EnumValueDescriptor(
name='OK', index=0, number=1,
options=None,
type=None),
descriptor.EnumValueDescriptor(
name='CONFLICT', index=1, number=2,
options=None,
type=None),
],
containing_type=None,
options=None,
serialized_start=9033,
serialized_end=9095,
)
_MAGICPLAYLISTSEED_SEEDTYPE = descriptor.EnumDescriptor(
name='SeedType',
full_name='MagicPlaylistSeed.SeedType',
filename=None,
file=DESCRIPTOR,
values=[
descriptor.EnumValueDescriptor(
name='TRACK', index=0, number=0,
options=None,
type=None),
descriptor.EnumValueDescriptor(
name='ARTIST', index=1, number=1,
options=None,
type=None),
descriptor.EnumValueDescriptor(
name='ALBUM', index=2, number=2,
options=None,
type=None),
descriptor.EnumValueDescriptor(
name='OPAQUE_SEED', index=3, number=3,
options=None,
type=None),
],
containing_type=None,
options=None,
serialized_start=9181,
serialized_end=9242,
)
_ALBUMSORTORDER_ALBUMATTRIBUTE = descriptor.EnumDescriptor(
name='AlbumAttribute',
full_name='AlbumSortOrder.AlbumAttribute',
filename=None,
file=DESCRIPTOR,
values=[
descriptor.EnumValueDescriptor(
name='LAST_PLAYED_TIME', index=0, number=1,
options=None,
type=None),
descriptor.EnumValueDescriptor(
name='NAME', index=1, number=2,
options=None,
type=None),
descriptor.EnumValueDescriptor(
name='CREATION_TIME', index=2, number=3,
options=None,
type=None),
],
containing_type=None,
options=None,
serialized_start=10312,
serialized_end=10379,
)
_GETDYNAMICPLAYLISTENTRIESREQUEST_DYNAMICPLAYLISTENTRIESTYPE = descriptor.EnumDescriptor(
name='DynamicPlaylistEntriesType',
full_name='GetDynamicPlaylistEntriesRequest.DynamicPlaylistEntriesType',
filename=None,
file=DESCRIPTOR,
values=[
descriptor.EnumValueDescriptor(
name='PURCHASED', index=0, number=1,
options=None,
type=None),
descriptor.EnumValueDescriptor(
name='THUMBS_UP', index=1, number=2,
options=None,
type=None),
descriptor.EnumValueDescriptor(
name='RECENTLY_ADDED', index=2, number=3,
options=None,
type=None),
descriptor.EnumValueDescriptor(
name='PROMOTED', index=3, number=4,
options=None,
type=None),
descriptor.EnumValueDescriptor(
name='PROMOTED_AND_PURCHASED', index=4, number=5,
options=None,
type=None),
],
containing_type=None,
options=None,
serialized_start=11290,
serialized_end=11410,
)
_GETDYNAMICPLAYLISTENTRIESRESPONSE_DYNAMICPLAYLISTENTRIESTYPE = descriptor.EnumDescriptor(
name='DynamicPlaylistEntriesType',
full_name='GetDynamicPlaylistEntriesResponse.DynamicPlaylistEntriesType',
filename=None,
file=DESCRIPTOR,
values=[
descriptor.EnumValueDescriptor(
name='PURCHASED', index=0, number=1,
options=None,
type=None),
descriptor.EnumValueDescriptor(
name='THUMBS_UP', index=1, number=2,
options=None,
type=None),
descriptor.EnumValueDescriptor(
name='RECENTLY_ADDED', index=2, number=3,
options=None,
type=None),
descriptor.EnumValueDescriptor(
name='PROMOTED', index=3, number=4,
options=None,
type=None),
descriptor.EnumValueDescriptor(
name='UNKNOWN', index=4, number=5,
options=None,
type=None),
descriptor.EnumValueDescriptor(
name='PROMOTED_AND_PURCHASED', index=5, number=6,
options=None,
type=None),
],
containing_type=None,
options=None,
serialized_start=11718,
serialized_end=11851,
)
_GETDYNAMICPLAYLISTENTRIESRESPONSE_RESPONSECODE = descriptor.EnumDescriptor(
name='ResponseCode',
full_name='GetDynamicPlaylistEntriesResponse.ResponseCode',
filename=None,
file=DESCRIPTOR,
values=[
descriptor.EnumValueDescriptor(
name='OK', index=0, number=1,
options=None,
type=None),
descriptor.EnumValueDescriptor(
name='NOT_OK', index=1, number=2,
options=None,
type=None),
],
containing_type=None,
options=None,
serialized_start=11853,
serialized_end=11887,
)
_TRACKTYPEAGGREGATE_TRACKTYPE = descriptor.EnumDescriptor(
name='TrackType',
full_name='TrackTypeAggregate.TrackType',
filename=None,
file=DESCRIPTOR,
values=[
descriptor.EnumValueDescriptor(
name='MATCHED_TRACK', index=0, number=1,
options=None,
type=None),
descriptor.EnumValueDescriptor(
name='UNMATCHED_TRACK', index=1, number=2,
options=None,
type=None),
descriptor.EnumValueDescriptor(
name='LOCAL_TRACK', index=2, number=3,
options=None,
type=None),
descriptor.EnumValueDescriptor(
name='PURCHASED_TRACK', index=3, number=4,
options=None,
type=None),
descriptor.EnumValueDescriptor(
name='METADATA_ONLY_MATCHED_TRACK', index=4, number=5,
options=None,
type=None),
descriptor.EnumValueDescriptor(
name='PROMO_TRACK', index=5, number=6,
options=None,
type=None),
],
containing_type=None,
options=None,
serialized_start=2361,
serialized_end=2500,
)
_AVAILABILITYSTATUSAGGREGATE_AVAILABILITYSTATUS = descriptor.EnumDescriptor(
name='AvailabilityStatus',
full_name='AvailabilityStatusAggregate.AvailabilityStatus',
filename=None,
file=DESCRIPTOR,
values=[
descriptor.EnumValueDescriptor(
name='PENDING', index=0, number=1,
options=None,
type=None),
descriptor.EnumValueDescriptor(
name='MATCHED', index=1, number=2,
options=None,
type=None),
descriptor.EnumValueDescriptor(
name='UPLOAD_REQUESTED', index=2, number=3,
options=None,
type=None),
descriptor.EnumValueDescriptor(
name='AVAILABLE', index=3, number=4,
options=None,
type=None),
descriptor.EnumValueDescriptor(
name='FORCE_REUPLOAD', index=4, number=5,
options=None,
type=None),
descriptor.EnumValueDescriptor(
name='UPLOAD_PERMANENTLY_FAILED', index=5, number=6,
options=None,
type=None),
],
containing_type=None,
options=None,
serialized_start=2101,
serialized_end=2235,
)
_REMOTECONTROLCOMMANDRESPONSE_RESPONSECODE = descriptor.EnumDescriptor(
name='ResponseCode',
full_name='RemoteControlCommandResponse.ResponseCode',
filename=None,
file=DESCRIPTOR,
values=[
descriptor.EnumValueDescriptor(
name='OK', index=0, number=1,
options=None,
type=None),
descriptor.EnumValueDescriptor(
name='NO_PUBLISHER', index=1, number=2,
options=None,
type=None),
descriptor.EnumValueDescriptor(
name='INVALID_REQUEST', index=2, number=3,
options=None,
type=None),
descriptor.EnumValueDescriptor(
name='PUBLISH_ERROR', index=3, number=4,
options=None,
type=None),
],
containing_type=None,
options=None,
serialized_start=13274,
serialized_end=13354,
)
_AUDIOREF = descriptor.Descriptor(
name='AudioRef',
full_name='AudioRef',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
descriptor.FieldDescriptor(
name='store', full_name='AudioRef.store', index=0,
number=1, type=14, cpp_type=8, label=2,
has_default_value=False, default_value=1,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
descriptor.FieldDescriptor(
name='ref', full_name='AudioRef.ref', index=1,
number=2, type=12, cpp_type=9, label=2,
has_default_value=False, default_value="",
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
descriptor.FieldDescriptor(
name='url', full_name='AudioRef.url', index=2,
number=4, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=unicode("", "utf-8"),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
descriptor.FieldDescriptor(
name='bit_rate', full_name='AudioRef.bit_rate', index=3,
number=5, type=5, cpp_type=1, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
descriptor.FieldDescriptor(
name='sample_rate', full_name='AudioRef.sample_rate', index=4,
number=6, type=5, cpp_type=1, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
descriptor.FieldDescriptor(
name='downloadable', full_name='AudioRef.downloadable', index=5,
number=7, type=8, cpp_type=7, label=1,
has_default_value=False, default_value=False,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
descriptor.FieldDescriptor(
name='duration_millis', full_name='AudioRef.duration_millis', index=6,
number=8, type=3, cpp_type=2, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
descriptor.FieldDescriptor(
name='rematch_timestamp', full_name='AudioRef.rematch_timestamp', index=7,
number=9, type=3, cpp_type=2, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
descriptor.FieldDescriptor(
name='invalid_due_to_wipeout', full_name='AudioRef.invalid_due_to_wipeout', index=8,
number=10, type=8, cpp_type=7, label=1,
has_default_value=False, default_value=False,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
],
extensions=[
],
nested_types=[],
enum_types=[
_AUDIOREF_STORE,
],
options=None,
is_extendable=False,
extension_ranges=[],
serialized_start=29,
serialized_end=277,
)
_IMAGEREF = descriptor.Descriptor(
name='ImageRef',
full_name='ImageRef',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
descriptor.FieldDescriptor(
name='store', full_name='ImageRef.store', index=0,
number=1, type=14, cpp_type=8, label=1,
has_default_value=False, default_value=3,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
descriptor.FieldDescriptor(
name='width', full_name='ImageRef.width', index=1,
number=2, type=13, cpp_type=3, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
descriptor.FieldDescriptor(
name='height', full_name='ImageRef.height', index=2,
number=3, type=13, cpp_type=3, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
descriptor.FieldDescriptor(
name='url', full_name='ImageRef.url', index=3,
number=6, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=unicode("", "utf-8"),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
descriptor.FieldDescriptor(
name='invalid_due_to_wipeout', full_name='ImageRef.invalid_due_to_wipeout', index=4,
number=7, type=8, cpp_type=7, label=1,
has_default_value=False, default_value=False,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
descriptor.FieldDescriptor(
name='origin', full_name='ImageRef.origin', index=5,
number=8, type=14, cpp_type=8, label=1,
has_default_value=False, default_value=1,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
],
extensions=[
],
nested_types=[],
enum_types=[
_IMAGEREF_STORE,
_IMAGEREF_ORIGIN,
],
options=None,
is_extendable=False,
extension_ranges=[],
serialized_start=280,
serialized_end=489,
)
_UPLOADEDUITSID3TAG = descriptor.Descriptor(
name='UploadedUitsId3Tag',
full_name='UploadedUitsId3Tag',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
descriptor.FieldDescriptor(
name='owner', full_name='UploadedUitsId3Tag.owner', index=0,
number=1, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=unicode("", "utf-8"),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
descriptor.FieldDescriptor(
name='data', full_name='UploadedUitsId3Tag.data', index=1,
number=2, type=12, cpp_type=9, label=1,
has_default_value=False, default_value="",
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
],
extensions=[
],
nested_types=[],
enum_types=[
],
options=None,
is_extendable=False,
extension_ranges=[],
serialized_start=491,
serialized_end=540,
)
_TRACK = descriptor.Descriptor(
name='Track',
full_name='Track',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
descriptor.FieldDescriptor(
name='id', full_name='Track.id', index=0,
number=1, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=unicode("", "utf-8"),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
descriptor.FieldDescriptor(
name='client_id', full_name='Track.client_id', index=1,
number=2, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=unicode("", "utf-8"),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
descriptor.FieldDescriptor(
name='creation_timestamp', full_name='Track.creation_timestamp', index=2,
number=3, type=3, cpp_type=2, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
descriptor.FieldDescriptor(
name='last_modified_timestamp', full_name='Track.last_modified_timestamp', index=3,
number=4, type=3, cpp_type=2, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
descriptor.FieldDescriptor(
name='deleted', full_name='Track.deleted', index=4,
number=5, type=8, cpp_type=7, label=1,
has_default_value=True, default_value=False,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
descriptor.FieldDescriptor(
name='title', full_name='Track.title', index=5,
number=6, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=unicode("", "utf-8"),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
descriptor.FieldDescriptor(
name='artist', full_name='Track.artist', index=6,
number=7, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=unicode("", "utf-8"),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
descriptor.FieldDescriptor(
name='artist_hash', full_name='Track.artist_hash', index=7,
number=46, type=3, cpp_type=2, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
descriptor.FieldDescriptor(
name='composer', full_name='Track.composer', index=8,
number=8, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=unicode("", "utf-8"),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
descriptor.FieldDescriptor(
name='album', full_name='Track.album', index=9,
number=9, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=unicode("", "utf-8"),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
descriptor.FieldDescriptor(
name='album_artist', full_name='Track.album_artist', index=10,
number=10, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=unicode("", "utf-8"),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
descriptor.FieldDescriptor(
name='canonical_album', full_name='Track.canonical_album', index=11,
number=56, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=unicode("", "utf-8"),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
descriptor.FieldDescriptor(
name='canonical_artist', full_name='Track.canonical_artist', index=12,
number=57, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=unicode("", "utf-8"),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
descriptor.FieldDescriptor(
name='canonical_genre_album', full_name='Track.canonical_genre_album', index=13,
number=58, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=unicode("", "utf-8"),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
descriptor.FieldDescriptor(
name='year', full_name='Track.year', index=14,
number=11, type=5, cpp_type=1, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
descriptor.FieldDescriptor(
name='comment', full_name='Track.comment', index=15,
number=12, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=unicode("", "utf-8"),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
descriptor.FieldDescriptor(
name='track_number', full_name='Track.track_number', index=16,
number=13, type=5, cpp_type=1, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
descriptor.FieldDescriptor(
name='genre', full_name='Track.genre', index=17,
number=14, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=unicode("", "utf-8"),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
descriptor.FieldDescriptor(
name='duration_millis', full_name='Track.duration_millis', index=18,
number=15, type=3, cpp_type=2, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
descriptor.FieldDescriptor(
name='beats_per_minute', full_name='Track.beats_per_minute', index=19,
number=16, type=5, cpp_type=1, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
descriptor.FieldDescriptor(
name='original_bit_rate', full_name='Track.original_bit_rate', index=20,
number=44, type=5, cpp_type=1, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
descriptor.FieldDescriptor(
name='audio_ref', full_name='Track.audio_ref', index=21,
number=17, type=11, cpp_type=10, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
descriptor.FieldDescriptor(
name='album_art_ref', full_name='Track.album_art_ref', index=22,
number=18, type=11, cpp_type=10, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
descriptor.FieldDescriptor(
name='availability_status', full_name='Track.availability_status', index=23,
number=19, type=14, cpp_type=8, label=1,
has_default_value=False, default_value=1,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
descriptor.FieldDescriptor(
name='play_count', full_name='Track.play_count', index=24,
number=20, type=5, cpp_type=1, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
descriptor.FieldDescriptor(
name='content_type', full_name='Track.content_type', index=25,
number=25, type=14, cpp_type=8, label=1,
has_default_value=False, default_value=1,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
descriptor.FieldDescriptor(
name='total_track_count', full_name='Track.total_track_count', index=26,
number=26, type=5, cpp_type=1, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
descriptor.FieldDescriptor(
name='disc_number', full_name='Track.disc_number', index=27,
number=27, type=5, cpp_type=1, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
descriptor.FieldDescriptor(
name='total_disc_count', full_name='Track.total_disc_count', index=28,
number=28, type=5, cpp_type=1, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
descriptor.FieldDescriptor(
name='channels', full_name='Track.channels', index=29,
number=29, type=14, cpp_type=8, label=1,
has_default_value=False, default_value=1,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
descriptor.FieldDescriptor(
name='track_type', full_name='Track.track_type', index=30,
number=30, type=14, cpp_type=8, label=1,
has_default_value=False, default_value=1,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
descriptor.FieldDescriptor(
name='use_single_server_copy', full_name='Track.use_single_server_copy', index=31,
number=59, type=8, cpp_type=7, label=1,
has_default_value=False, default_value=False,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
descriptor.FieldDescriptor(
name='rating', full_name='Track.rating', index=32,
number=31, type=14, cpp_type=8, label=1,
has_default_value=False, default_value=1,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
descriptor.FieldDescriptor(
name='estimated_size', full_name='Track.estimated_size', index=33,
number=32, type=3, cpp_type=2, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
descriptor.FieldDescriptor(
name='store_id', full_name='Track.store_id', index=34,
number=33, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=unicode("", "utf-8"),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
descriptor.FieldDescriptor(
name='metajam_id', full_name='Track.metajam_id', index=35,
number=34, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=unicode("", "utf-8"),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
descriptor.FieldDescriptor(
name='metajam_id_confidence', full_name='Track.metajam_id_confidence', index=36,
number=43, type=1, cpp_type=5, label=1,
has_default_value=True, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
descriptor.FieldDescriptor(
name='uits', full_name='Track.uits', index=37,
number=35, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=unicode("", "utf-8"),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
descriptor.FieldDescriptor(
name='uits_metadata', full_name='Track.uits_metadata', index=38,
number=40, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
descriptor.FieldDescriptor(
name='compilation', full_name='Track.compilation', index=39,
number=36, type=8, cpp_type=7, label=1,
has_default_value=False, default_value=False,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
descriptor.FieldDescriptor(
name='client_date_added', full_name='Track.client_date_added', index=40,
number=37, type=3, cpp_type=2, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
descriptor.FieldDescriptor(
name='recent_timestamp', full_name='Track.recent_timestamp', index=41,
number=38, type=3, cpp_type=2, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
descriptor.FieldDescriptor(
name='do_not_rematch', full_name='Track.do_not_rematch', index=42,
number=39, type=8, cpp_type=7, label=1,
has_default_value=True, default_value=False,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
descriptor.FieldDescriptor(
name='from_album_purchase', full_name='Track.from_album_purchase', index=43,
number=41, type=8, cpp_type=7, label=1,
has_default_value=False, default_value=False,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
descriptor.FieldDescriptor(
name='album_metajam_id', full_name='Track.album_metajam_id', index=44,
number=42, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=unicode("", "utf-8"),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
descriptor.FieldDescriptor(
name='transaction_id', full_name='Track.transaction_id', index=45,
number=45, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=unicode("", "utf-8"),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
descriptor.FieldDescriptor(
name='debug_track', full_name='Track.debug_track', index=46,
number=47, type=8, cpp_type=7, label=1,
has_default_value=False, default_value=False,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
descriptor.FieldDescriptor(
name='normalized_title', full_name='Track.normalized_title', index=47,
number=48, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=unicode("", "utf-8"),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
descriptor.FieldDescriptor(
name='normalized_artist', full_name='Track.normalized_artist', index=48,
number=49, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=unicode("", "utf-8"),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
descriptor.FieldDescriptor(
name='normalized_album', full_name='Track.normalized_album', index=49,
number=50, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=unicode("", "utf-8"),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
descriptor.FieldDescriptor(
name='normalized_album_artist', full_name='Track.normalized_album_artist', index=50,
number=51, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=unicode("", "utf-8"),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
descriptor.FieldDescriptor(
name='normalized_canonical_album', full_name='Track.normalized_canonical_album', index=51,
number=54, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=unicode("", "utf-8"),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
descriptor.FieldDescriptor(
name='normalized_canonical_artist', full_name='Track.normalized_canonical_artist', index=52,
number=55, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=unicode("", "utf-8"),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
descriptor.FieldDescriptor(
name='uploader_id', full_name='Track.uploader_id', index=53,
number=52, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=unicode("", "utf-8"),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
descriptor.FieldDescriptor(
name='client_album_id', full_name='Track.client_album_id', index=54,
number=53, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=unicode("", "utf-8"),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
descriptor.FieldDescriptor(
name='label_owner_code', full_name='Track.label_owner_code', index=55,
number=60, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=unicode("", "utf-8"),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
descriptor.FieldDescriptor(
name='original_content_type', full_name='Track.original_content_type', index=56,
number=61, type=14, cpp_type=8, label=1,
has_default_value=False, default_value=1,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
descriptor.FieldDescriptor(
name='uploaded_uits', full_name='Track.uploaded_uits', index=57,
number=71, type=11, cpp_type=10, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
],
extensions=[
],
nested_types=[],
enum_types=[
_TRACK_AVAILABILITYSTATUS,
_TRACK_CONTENTTYPE,
_TRACK_CHANNELS,
_TRACK_TRACKTYPE,
_TRACK_RATING,
],
options=None,
is_extendable=False,
extension_ranges=[],
serialized_start=543,
serialized_end=2603,
)
_TRACKS = descriptor.Descriptor(
name='Tracks',
full_name='Tracks',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
descriptor.FieldDescriptor(
name='track', full_name='Tracks.track', index=0,
number=1, type=11, cpp_type=10, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
],
extensions=[
],
nested_types=[],
enum_types=[
],
options=None,
is_extendable=False,
extension_ranges=[],
serialized_start=2605,
serialized_end=2636,
)
_PLAYLIST = descriptor.Descriptor(
name='Playlist',
full_name='Playlist',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
descriptor.FieldDescriptor(
name='id', full_name='Playlist.id', index=0,
number=1, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=unicode("", "utf-8"),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
descriptor.FieldDescriptor(
name='client_id', full_name='Playlist.client_id', index=1,
number=2, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=unicode("", "utf-8"),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
descriptor.FieldDescriptor(
name='creation_timestamp', full_name='Playlist.creation_timestamp', index=2,
number=3, type=3, cpp_type=2, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
descriptor.FieldDescriptor(
name='last_modified_timestamp', full_name='Playlist.last_modified_timestamp', index=3,
number=4, type=3, cpp_type=2, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
descriptor.FieldDescriptor(
name='deleted', full_name='Playlist.deleted', index=4,
number=5, type=8, cpp_type=7, label=1,
has_default_value=True, default_value=False,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
descriptor.FieldDescriptor(
name='name', full_name='Playlist.name', index=5,
number=6, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=unicode("", "utf-8"),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
descriptor.FieldDescriptor(
name='playlist_type', full_name='Playlist.playlist_type', index=6,
number=7, type=14, cpp_type=8, label=1,
has_default_value=False, default_value=1,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
descriptor.FieldDescriptor(
name='playlist_art_ref', full_name='Playlist.playlist_art_ref', index=7,
number=8, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
descriptor.FieldDescriptor(
name='recent_timestamp', full_name='Playlist.recent_timestamp', index=8,
number=9, type=3, cpp_type=2, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
],
extensions=[
],
nested_types=[],
enum_types=[
_PLAYLIST_PLAYLISTTYPE,
],
options=None,
is_extendable=False,
extension_ranges=[],
serialized_start=2639,
serialized_end=2947,
)
_PLAYLISTENTRY = descriptor.Descriptor(
name='PlaylistEntry',
full_name='PlaylistEntry',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
descriptor.FieldDescriptor(
name='playlist_id', full_name='PlaylistEntry.playlist_id', index=0,
number=1, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=unicode("", "utf-8"),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
descriptor.FieldDescriptor(
name='absolute_position', full_name='PlaylistEntry.absolute_position', index=1,
number=2, type=3, cpp_type=2, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
descriptor.FieldDescriptor(
name='place_after_entry_id', full_name='PlaylistEntry.place_after_entry_id', index=2,
number=3, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=unicode("", "utf-8"),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
descriptor.FieldDescriptor(
name='track_id', full_name='PlaylistEntry.track_id', index=3,
number=4, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=unicode("", "utf-8"),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
descriptor.FieldDescriptor(
name='id', full_name='PlaylistEntry.id', index=4,
number=5, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=unicode("", "utf-8"),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
descriptor.FieldDescriptor(
name='client_id', full_name='PlaylistEntry.client_id', index=5,
number=6, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=unicode("", "utf-8"),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
descriptor.FieldDescriptor(
name='creation_timestamp', full_name='PlaylistEntry.creation_timestamp', index=6,
number=7, type=3, cpp_type=2, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
descriptor.FieldDescriptor(
name='last_modified_timestamp', full_name='PlaylistEntry.last_modified_timestamp', index=7,
number=8, type=3, cpp_type=2, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
descriptor.FieldDescriptor(
name='deleted', full_name='PlaylistEntry.deleted', index=8,
number=9, type=8, cpp_type=7, label=1,
has_default_value=True, default_value=False,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
descriptor.FieldDescriptor(
name='relative_position_id_type', full_name='PlaylistEntry.relative_position_id_type', index=9,
number=10, type=14, cpp_type=8, label=1,
has_default_value=False, default_value=1,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
descriptor.FieldDescriptor(
name='track', full_name='PlaylistEntry.track', index=10,
number=15, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
descriptor.FieldDescriptor(
name='place_before_entry_id', full_name='PlaylistEntry.place_before_entry_id', index=11,
number=16, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=unicode("", "utf-8"),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
descriptor.FieldDescriptor(
name='string_position', full_name='PlaylistEntry.string_position', index=12,
number=17, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=unicode("", "utf-8"),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
],
extensions=[
],
nested_types=[],
enum_types=[
_PLAYLISTENTRY_RELATIVEPOSITIONIDTYPE,
],
options=None,
is_extendable=False,
extension_ranges=[],
serialized_start=2950,
serialized_end=3380,
)
_TRACKSEARCHRESTRICTION = descriptor.Descriptor(
name='TrackSearchRestriction',
full_name='TrackSearchRestriction',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
descriptor.FieldDescriptor(
name='attribute', full_name='TrackSearchRestriction.attribute', index=0,
number=1, type=14, cpp_type=8, label=2,
has_default_value=False, default_value=1,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
descriptor.FieldDescriptor(
name='value', full_name='TrackSearchRestriction.value', index=1,
number=2, type=9, cpp_type=9, label=2,
has_default_value=False, default_value=unicode("", "utf-8"),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
descriptor.FieldDescriptor(
name='comparison_type', full_name='TrackSearchRestriction.comparison_type', index=2,
number=3, type=14, cpp_type=8, label=2,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
],
extensions=[
],
nested_types=[],
enum_types=[
_TRACKSEARCHRESTRICTION_TRACKATTRIBUTE,
_TRACKSEARCHRESTRICTION_COMPARISONTYPE,
],
options=None,
is_extendable=False,
extension_ranges=[],
serialized_start=3383,
serialized_end=3847,
)
_TRACKSEARCHRESTRICTIONSET = descriptor.Descriptor(
name='TrackSearchRestrictionSet',
full_name='TrackSearchRestrictionSet',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
descriptor.FieldDescriptor(
name='type', full_name='TrackSearchRestrictionSet.type', index=0,
number=1, type=14, cpp_type=8, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
descriptor.FieldDescriptor(
name='restriction', full_name='TrackSearchRestrictionSet.restriction', index=1,
number=2, type=11, cpp_type=10, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
descriptor.FieldDescriptor(
name='sub_set', full_name='TrackSearchRestrictionSet.sub_set', index=2,
number=3, type=11, cpp_type=10, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
],
extensions=[
],
nested_types=[],
enum_types=[
_TRACKSEARCHRESTRICTIONSET_RESTRICTIONSETTYPE,
],
options=None,
is_extendable=False,
extension_ranges=[],
serialized_start=3850,
serialized_end=4068,
)
_TRACKSORTORDER = descriptor.Descriptor(
name='TrackSortOrder',
full_name='TrackSortOrder',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
descriptor.FieldDescriptor(
name='attribute', full_name='TrackSortOrder.attribute', index=0,
number=1, type=14, cpp_type=8, label=1,
has_default_value=False, default_value=1,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
descriptor.FieldDescriptor(
name='descending', full_name='TrackSortOrder.descending', index=1,
number=2, type=8, cpp_type=7, label=1,
has_default_value=True, default_value=True,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
],
extensions=[
],
nested_types=[],
enum_types=[
_TRACKSORTORDER_TRACKATTRIBUTE,
],
options=None,
is_extendable=False,
extension_ranges=[],
serialized_start=4071,
serialized_end=4327,
)
_GETTRACKSREQUEST = descriptor.Descriptor(
name='GetTracksRequest',
full_name='GetTracksRequest',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
descriptor.FieldDescriptor(
name='gaia_id', full_name='GetTracksRequest.gaia_id', index=0,
number=1, type=3, cpp_type=2, label=2,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
descriptor.FieldDescriptor(
name='updated_min', full_name='GetTracksRequest.updated_min', index=1,
number=2, type=3, cpp_type=2, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
descriptor.FieldDescriptor(
name='include_deleted', full_name='GetTracksRequest.include_deleted', index=2,
number=3, type=8, cpp_type=7, label=1,
has_default_value=False, default_value=False,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
descriptor.FieldDescriptor(
name='max_results', full_name='GetTracksRequest.max_results', index=3,
number=4, type=5, cpp_type=1, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
descriptor.FieldDescriptor(
name='continuation_token', full_name='GetTracksRequest.continuation_token', index=4,
number=5, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=unicode("", "utf-8"),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
descriptor.FieldDescriptor(
name='search_restriction', full_name='GetTracksRequest.search_restriction', index=5,
number=6, type=11, cpp_type=10, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
descriptor.FieldDescriptor(
name='sort_order', full_name='GetTracksRequest.sort_order', index=6,
number=7, type=11, cpp_type=10, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
descriptor.FieldDescriptor(
name='restriction_set', full_name='GetTracksRequest.restriction_set', index=7,
number=8, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
descriptor.FieldDescriptor(
name='track_projection', full_name='GetTracksRequest.track_projection', index=8,
number=9, type=14, cpp_type=8, label=1,
has_default_value=False, default_value=1,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
],
extensions=[
],
nested_types=[],
enum_types=[
_GETTRACKSREQUEST_TRACKPROJECTION,
],
options=None,
is_extendable=False,
extension_ranges=[],
serialized_start=4330,
serialized_end=4712,
)
_GETTRACKSRESPONSE = descriptor.Descriptor(
name='GetTracksResponse',
full_name='GetTracksResponse',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
descriptor.FieldDescriptor(
name='response_code', full_name='GetTracksResponse.response_code', index=0,
number=1, type=14, cpp_type=8, label=2,
has_default_value=False, default_value=1,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
descriptor.FieldDescriptor(
name='track', full_name='GetTracksResponse.track', index=1,
number=2, type=11, cpp_type=10, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
descriptor.FieldDescriptor(
name='estimated_total_results', full_name='GetTracksResponse.estimated_total_results', index=2,
number=3, type=3, cpp_type=2, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
descriptor.FieldDescriptor(
name='continuation_token', full_name='GetTracksResponse.continuation_token', index=3,
number=4, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=unicode("", "utf-8"),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
],
extensions=[
],
nested_types=[],
enum_types=[
_GETTRACKSRESPONSE_RESPONSECODE,
],
options=None,
is_extendable=False,
extension_ranges=[],
serialized_start=4715,
serialized_end=4926,
)
_GETPLAYLISTENTRIESREQUEST = descriptor.Descriptor(
name='GetPlaylistEntriesRequest',
full_name='GetPlaylistEntriesRequest',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
descriptor.FieldDescriptor(
name='gaia_id', full_name='GetPlaylistEntriesRequest.gaia_id', index=0,
number=1, type=3, cpp_type=2, label=2,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
descriptor.FieldDescriptor(
name='updated_min', full_name='GetPlaylistEntriesRequest.updated_min', index=1,
number=2, type=3, cpp_type=2, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
descriptor.FieldDescriptor(
name='include_deleted', full_name='GetPlaylistEntriesRequest.include_deleted', index=2,
number=3, type=8, cpp_type=7, label=1,
has_default_value=False, default_value=False,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
descriptor.FieldDescriptor(
name='max_results', full_name='GetPlaylistEntriesRequest.max_results', index=3,
number=4, type=5, cpp_type=1, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
descriptor.FieldDescriptor(
name='continuation_token', full_name='GetPlaylistEntriesRequest.continuation_token', index=4,
number=5, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=unicode("", "utf-8"),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
descriptor.FieldDescriptor(
name='playlist_id_filter', full_name='GetPlaylistEntriesRequest.playlist_id_filter', index=5,
number=6, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=unicode("", "utf-8"),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
descriptor.FieldDescriptor(
name='include_all_track_metadata', full_name='GetPlaylistEntriesRequest.include_all_track_metadata', index=6,
number=7, type=8, cpp_type=7, label=1,
has_default_value=True, default_value=False,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
descriptor.FieldDescriptor(
name='only_show_available_tracks', full_name='GetPlaylistEntriesRequest.only_show_available_tracks', index=7,
number=8, type=8, cpp_type=7, label=1,
has_default_value=True, default_value=True,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
],
extensions=[
],
nested_types=[],
enum_types=[
],
options=None,
is_extendable=False,
extension_ranges=[],
serialized_start=4929,
serialized_end=5181,
)
_GETPLAYLISTENTRIESRESPONSE = descriptor.Descriptor(
name='GetPlaylistEntriesResponse',
full_name='GetPlaylistEntriesResponse',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
descriptor.FieldDescriptor(
name='response_code', full_name='GetPlaylistEntriesResponse.response_code', index=0,
number=1, type=14, cpp_type=8, label=2,
has_default_value=False, default_value=1,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
descriptor.FieldDescriptor(
name='playlist_entry', full_name='GetPlaylistEntriesResponse.playlist_entry', index=1,
number=2, type=11, cpp_type=10, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
descriptor.FieldDescriptor(
name='estimated_total_results', full_name='GetPlaylistEntriesResponse.estimated_total_results', index=2,
number=3, type=3, cpp_type=2, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
descriptor.FieldDescriptor(
name='continuation_token', full_name='GetPlaylistEntriesResponse.continuation_token', index=3,
number=4, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=unicode("", "utf-8"),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
],
extensions=[
],
nested_types=[],
enum_types=[
_GETPLAYLISTENTRIESRESPONSE_RESPONSECODE,
],
options=None,
is_extendable=False,
extension_ranges=[],
serialized_start=5184,
serialized_end=5430,
)
_PLAYLISTSORTORDER = descriptor.Descriptor(
name='PlaylistSortOrder',
full_name='PlaylistSortOrder',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
descriptor.FieldDescriptor(
name='attribute', full_name='PlaylistSortOrder.attribute', index=0,
number=1, type=14, cpp_type=8, label=1,
has_default_value=False, default_value=1,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
descriptor.FieldDescriptor(
name='descending', full_name='PlaylistSortOrder.descending', index=1,
number=2, type=8, cpp_type=7, label=1,
has_default_value=True, default_value=False,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
],
extensions=[
],
nested_types=[],
enum_types=[
_PLAYLISTSORTORDER_PLAYLISTATTRIBUTE,
],
options=None,
is_extendable=False,
extension_ranges=[],
serialized_start=5433,
serialized_end=5633,
)
_GETPLAYLISTSREQUEST = descriptor.Descriptor(
name='GetPlaylistsRequest',
full_name='GetPlaylistsRequest',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
descriptor.FieldDescriptor(
name='gaia_id', full_name='GetPlaylistsRequest.gaia_id', index=0,
number=1, type=3, cpp_type=2, label=2,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
descriptor.FieldDescriptor(
name='updated_min', full_name='GetPlaylistsRequest.updated_min', index=1,
number=2, type=3, cpp_type=2, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
descriptor.FieldDescriptor(
name='include_deleted', full_name='GetPlaylistsRequest.include_deleted', index=2,
number=3, type=8, cpp_type=7, label=1,
has_default_value=False, default_value=False,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
descriptor.FieldDescriptor(
name='max_results', full_name='GetPlaylistsRequest.max_results', index=3,
number=4, type=5, cpp_type=1, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
descriptor.FieldDescriptor(
name='continuation_token', full_name='GetPlaylistsRequest.continuation_token', index=4,
number=5, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=unicode("", "utf-8"),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
descriptor.FieldDescriptor(
name='sort_order', full_name='GetPlaylistsRequest.sort_order', index=5,
number=6, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
],
extensions=[
],
nested_types=[],
enum_types=[
],
options=None,
is_extendable=False,
extension_ranges=[],
serialized_start=5636,
serialized_end=5809,
)
_GETPLAYLISTSRESPONSE = descriptor.Descriptor(
name='GetPlaylistsResponse',
full_name='GetPlaylistsResponse',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
descriptor.FieldDescriptor(
name='response_code', full_name='GetPlaylistsResponse.response_code', index=0,
number=1, type=14, cpp_type=8, label=2,
has_default_value=False, default_value=1,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
descriptor.FieldDescriptor(
name='playlist', full_name='GetPlaylistsResponse.playlist', index=1,
number=2, type=11, cpp_type=10, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
descriptor.FieldDescriptor(
name='estimated_total_results', full_name='GetPlaylistsResponse.estimated_total_results', index=2,
number=3, type=3, cpp_type=2, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
descriptor.FieldDescriptor(
name='continuation_token', full_name='GetPlaylistsResponse.continuation_token', index=3,
number=4, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=unicode("", "utf-8"),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
],
extensions=[
],
nested_types=[],
enum_types=[
_GETPLAYLISTSRESPONSE_RESPONSECODE,
],
options=None,
is_extendable=False,
extension_ranges=[],
serialized_start=5812,
serialized_end=6035,
)
_LOOKUPTRACKREQUEST = descriptor.Descriptor(
name='LookupTrackRequest',
full_name='LookupTrackRequest',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
descriptor.FieldDescriptor(
name='id', full_name='LookupTrackRequest.id', index=0,
number=1, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=unicode("", "utf-8"),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
descriptor.FieldDescriptor(
name='client_id', full_name='LookupTrackRequest.client_id', index=1,
number=2, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=unicode("", "utf-8"),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
],
extensions=[
],
nested_types=[],
enum_types=[
],
options=None,
is_extendable=False,
extension_ranges=[],
serialized_start=6037,
serialized_end=6088,
)
_LOOKUPPLAYLISTENTRYREQUEST = descriptor.Descriptor(
name='LookupPlaylistEntryRequest',
full_name='LookupPlaylistEntryRequest',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
descriptor.FieldDescriptor(
name='id', full_name='LookupPlaylistEntryRequest.id', index=0,
number=1, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=unicode("", "utf-8"),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
descriptor.FieldDescriptor(
name='client_id', full_name='LookupPlaylistEntryRequest.client_id', index=1,
number=2, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=unicode("", "utf-8"),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
],
extensions=[
],
nested_types=[],
enum_types=[
],
options=None,
is_extendable=False,
extension_ranges=[],
serialized_start=6090,
serialized_end=6149,
)
_LOOKUPPLAYLISTREQUEST = descriptor.Descriptor(
name='LookupPlaylistRequest',
full_name='LookupPlaylistRequest',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
descriptor.FieldDescriptor(
name='id', full_name='LookupPlaylistRequest.id', index=0,
number=1, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=unicode("", "utf-8"),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
descriptor.FieldDescriptor(
name='client_id', full_name='LookupPlaylistRequest.client_id', index=1,
number=2, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=unicode("", "utf-8"),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
],
extensions=[
],
nested_types=[],
enum_types=[
],
options=None,
is_extendable=False,
extension_ranges=[],
serialized_start=6151,
serialized_end=6205,
)
_BATCHLOOKUPREQUEST = descriptor.Descriptor(
name='BatchLookupRequest',
full_name='BatchLookupRequest',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
descriptor.FieldDescriptor(
name='gaia_id', full_name='BatchLookupRequest.gaia_id', index=0,
number=1, type=3, cpp_type=2, label=2,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
descriptor.FieldDescriptor(
name='track', full_name='BatchLookupRequest.track', index=1,
number=2, type=11, cpp_type=10, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
descriptor.FieldDescriptor(
name='playlist', full_name='BatchLookupRequest.playlist', index=2,
number=3, type=11, cpp_type=10, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
descriptor.FieldDescriptor(
name='metadata_type', full_name='BatchLookupRequest.metadata_type', index=3,
number=4, type=14, cpp_type=8, label=1,
has_default_value=False, default_value=1,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
descriptor.FieldDescriptor(
name='playlist_entry', full_name='BatchLookupRequest.playlist_entry', index=4,
number=5, type=11, cpp_type=10, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
descriptor.FieldDescriptor(
name='include_deleted', full_name='BatchLookupRequest.include_deleted', index=5,
number=6, type=8, cpp_type=7, label=1,
has_default_value=True, default_value=False,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
],
extensions=[
],
nested_types=[],
enum_types=[
_BATCHLOOKUPREQUEST_METADATATYPE,
],
options=None,
is_extendable=False,
extension_ranges=[],
serialized_start=6208,
serialized_end=6526,
)
_BATCHLOOKUPRESPONSE = descriptor.Descriptor(
name='BatchLookupResponse',
full_name='BatchLookupResponse',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
descriptor.FieldDescriptor(
name='track', full_name='BatchLookupResponse.track', index=0,
number=1, type=11, cpp_type=10, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
descriptor.FieldDescriptor(
name='playlist', full_name='BatchLookupResponse.playlist', index=1,
number=2, type=11, cpp_type=10, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
descriptor.FieldDescriptor(
name='playlist_entry', full_name='BatchLookupResponse.playlist_entry', index=2,
number=3, type=11, cpp_type=10, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
],
extensions=[
],
nested_types=[],
enum_types=[
],
options=None,
is_extendable=False,
extension_ranges=[],
serialized_start=6528,
serialized_end=6641,
)
_MUTATETRACKREQUEST = descriptor.Descriptor(
name='MutateTrackRequest',
full_name='MutateTrackRequest',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
descriptor.FieldDescriptor(
name='create_track', full_name='MutateTrackRequest.create_track', index=0,
number=1, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
descriptor.FieldDescriptor(
name='update_track', full_name='MutateTrackRequest.update_track', index=1,
number=2, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
descriptor.FieldDescriptor(
name='delete_track', full_name='MutateTrackRequest.delete_track', index=2,
number=3, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=unicode("", "utf-8"),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
descriptor.FieldDescriptor(
name='partial_update', full_name='MutateTrackRequest.partial_update', index=3,
number=4, type=8, cpp_type=7, label=1,
has_default_value=False, default_value=False,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
descriptor.FieldDescriptor(
name='update_last_modified', full_name='MutateTrackRequest.update_last_modified', index=4,
number=5, type=8, cpp_type=7, label=1,
has_default_value=True, default_value=True,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
descriptor.FieldDescriptor(
name='undelete_track', full_name='MutateTrackRequest.undelete_track', index=5,
number=6, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=unicode("", "utf-8"),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
],
extensions=[
],
nested_types=[],
enum_types=[
],
options=None,
is_extendable=False,
extension_ranges=[],
serialized_start=6644,
serialized_end=6830,
)
_MUTATERESPONSE = descriptor.Descriptor(
name='MutateResponse',
full_name='MutateResponse',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
descriptor.FieldDescriptor(
name='response_code', full_name='MutateResponse.response_code', index=0,
number=1, type=14, cpp_type=8, label=1,
has_default_value=False, default_value=1,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
descriptor.FieldDescriptor(
name='id', full_name='MutateResponse.id', index=1,
number=2, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=unicode("", "utf-8"),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
descriptor.FieldDescriptor(
name='child_id', full_name='MutateResponse.child_id', index=2,
number=3, type=9, cpp_type=9, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
descriptor.FieldDescriptor(
name='client_id', full_name='MutateResponse.client_id', index=3,
number=4, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=unicode("", "utf-8"),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
descriptor.FieldDescriptor(
name='availability_status', full_name='MutateResponse.availability_status', index=4,
number=5, type=14, cpp_type=8, label=1,
has_default_value=False, default_value=1,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
descriptor.FieldDescriptor(
name='error_message', full_name='MutateResponse.error_message', index=5,
number=6, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=unicode("", "utf-8"),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
],
extensions=[
],
nested_types=[],
enum_types=[
_MUTATERESPONSE_MUTATERESPONSECODE,
_MUTATERESPONSE_AVAILABILITYSTATUS,
],
options=None,
is_extendable=False,
extension_ranges=[],
serialized_start=6833,
serialized_end=7271,
)
_BATCHMUTATETRACKSREQUEST = descriptor.Descriptor(
name='BatchMutateTracksRequest',
full_name='BatchMutateTracksRequest',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
descriptor.FieldDescriptor(
name='gaia_id', full_name='BatchMutateTracksRequest.gaia_id', index=0,
number=1, type=3, cpp_type=2, label=2,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
descriptor.FieldDescriptor(
name='track_mutation', full_name='BatchMutateTracksRequest.track_mutation', index=1,
number=2, type=11, cpp_type=10, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
descriptor.FieldDescriptor(
name='send_notification', full_name='BatchMutateTracksRequest.send_notification', index=2,
number=3, type=8, cpp_type=7, label=1,
has_default_value=True, default_value=True,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
descriptor.FieldDescriptor(
name='detect_timestamp_conflict', full_name='BatchMutateTracksRequest.detect_timestamp_conflict', index=3,
number=4, type=8, cpp_type=7, label=1,
has_default_value=True, default_value=True,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
descriptor.FieldDescriptor(
name='notify_fine_grained_updates', full_name='BatchMutateTracksRequest.notify_fine_grained_updates', index=4,
number=5, type=8, cpp_type=7, label=1,
has_default_value=True, default_value=True,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
],
extensions=[
],
nested_types=[],
enum_types=[
],
options=None,
is_extendable=False,
extension_ranges=[],
serialized_start=7274,
serialized_end=7479,
)
_BATCHMUTATETRACKSRESPONSE = descriptor.Descriptor(
name='BatchMutateTracksResponse',
full_name='BatchMutateTracksResponse',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
descriptor.FieldDescriptor(
name='response_code', full_name='BatchMutateTracksResponse.response_code', index=0,
number=1, type=14, cpp_type=8, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
descriptor.FieldDescriptor(
name='mutate_response', full_name='BatchMutateTracksResponse.mutate_response', index=1,
number=2, type=11, cpp_type=10, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
],
extensions=[
],
nested_types=[],
enum_types=[
_BATCHMUTATETRACKSRESPONSE_BATCHMUTATETRACKSRESPONSECODE,
],
options=None,
is_extendable=False,
extension_ranges=[],
serialized_start=7482,
serialized_end=7687,
)
_MUTATEPLAYLISTREQUEST = descriptor.Descriptor(
name='MutatePlaylistRequest',
full_name='MutatePlaylistRequest',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
descriptor.FieldDescriptor(
name='create_playlist', full_name='MutatePlaylistRequest.create_playlist', index=0,
number=1, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
descriptor.FieldDescriptor(
name='update_playlist', full_name='MutatePlaylistRequest.update_playlist', index=1,
number=2, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
descriptor.FieldDescriptor(
name='delete_playlist', full_name='MutatePlaylistRequest.delete_playlist', index=2,
number=3, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=unicode("", "utf-8"),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
descriptor.FieldDescriptor(
name='partial_update', full_name='MutatePlaylistRequest.partial_update', index=3,
number=4, type=8, cpp_type=7, label=1,
has_default_value=False, default_value=False,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
descriptor.FieldDescriptor(
name='playlist_entry', full_name='MutatePlaylistRequest.playlist_entry', index=4,
number=5, type=11, cpp_type=10, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
descriptor.FieldDescriptor(
name='update_last_modified', full_name='MutatePlaylistRequest.update_last_modified', index=5,
number=6, type=8, cpp_type=7, label=1,
has_default_value=True, default_value=True,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
descriptor.FieldDescriptor(
name='undelete_playlist', full_name='MutatePlaylistRequest.undelete_playlist', index=6,
number=7, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=unicode("", "utf-8"),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
],
extensions=[
],
nested_types=[],
enum_types=[
],
options=None,
is_extendable=False,
extension_ranges=[],
serialized_start=7690,
serialized_end=7937,
)
_BATCHMUTATEPLAYLISTSREQUEST = descriptor.Descriptor(
name='BatchMutatePlaylistsRequest',
full_name='BatchMutatePlaylistsRequest',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
descriptor.FieldDescriptor(
name='gaia_id', full_name='BatchMutatePlaylistsRequest.gaia_id', index=0,
number=1, type=3, cpp_type=2, label=2,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
descriptor.FieldDescriptor(
name='playlist_mutation', full_name='BatchMutatePlaylistsRequest.playlist_mutation', index=1,
number=2, type=11, cpp_type=10, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
descriptor.FieldDescriptor(
name='send_notification', full_name='BatchMutatePlaylistsRequest.send_notification', index=2,
number=3, type=8, cpp_type=7, label=1,
has_default_value=True, default_value=True,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
descriptor.FieldDescriptor(
name='detect_timestamp_conflict', full_name='BatchMutatePlaylistsRequest.detect_timestamp_conflict', index=3,
number=4, type=8, cpp_type=7, label=1,
has_default_value=True, default_value=True,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
descriptor.FieldDescriptor(
name='notify_fine_grained_updates', full_name='BatchMutatePlaylistsRequest.notify_fine_grained_updates', index=4,
number=5, type=8, cpp_type=7, label=1,
has_default_value=True, default_value=False,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
],
extensions=[
],
nested_types=[],
enum_types=[
],
options=None,
is_extendable=False,
extension_ranges=[],
serialized_start=7940,
serialized_end=8155,
)
_BATCHMUTATEPLAYLISTSRESPONSE = descriptor.Descriptor(
name='BatchMutatePlaylistsResponse',
full_name='BatchMutatePlaylistsResponse',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
descriptor.FieldDescriptor(
name='response_code', full_name='BatchMutatePlaylistsResponse.response_code', index=0,
number=1, type=14, cpp_type=8, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
descriptor.FieldDescriptor(
name='mutate_response', full_name='BatchMutatePlaylistsResponse.mutate_response', index=1,
number=2, type=11, cpp_type=10, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
],
extensions=[
],
nested_types=[],
enum_types=[
_BATCHMUTATEPLAYLISTSRESPONSE_BATCHMUTATEPLAYLISTSRESPONSECODE,
],
options=None,
is_extendable=False,
extension_ranges=[],
serialized_start=8158,
serialized_end=8375,
)
_MUTATEPLAYLISTENTRYREQUEST = descriptor.Descriptor(
name='MutatePlaylistEntryRequest',
full_name='MutatePlaylistEntryRequest',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
descriptor.FieldDescriptor(
name='create_playlist_entry', full_name='MutatePlaylistEntryRequest.create_playlist_entry', index=0,
number=1, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
descriptor.FieldDescriptor(
name='update_playlist_entry', full_name='MutatePlaylistEntryRequest.update_playlist_entry', index=1,
number=2, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
descriptor.FieldDescriptor(
name='delete_playlist_entry', full_name='MutatePlaylistEntryRequest.delete_playlist_entry', index=2,
number=3, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
descriptor.FieldDescriptor(
name='update_last_modified', full_name='MutatePlaylistEntryRequest.update_last_modified', index=3,
number=4, type=8, cpp_type=7, label=1,
has_default_value=True, default_value=True,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
descriptor.FieldDescriptor(
name='undelete_playlist_entry', full_name='MutatePlaylistEntryRequest.undelete_playlist_entry', index=4,
number=5, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=unicode("", "utf-8"),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
],
extensions=[
],
nested_types=[],
enum_types=[
],
options=None,
is_extendable=False,
extension_ranges=[],
serialized_start=8378,
serialized_end=8616,
)
_BATCHMUTATEPLAYLISTENTRIESREQUEST = descriptor.Descriptor(
name='BatchMutatePlaylistEntriesRequest',
full_name='BatchMutatePlaylistEntriesRequest',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
descriptor.FieldDescriptor(
name='gaia_id', full_name='BatchMutatePlaylistEntriesRequest.gaia_id', index=0,
number=1, type=3, cpp_type=2, label=2,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
descriptor.FieldDescriptor(
name='playlist_entry_mutation', full_name='BatchMutatePlaylistEntriesRequest.playlist_entry_mutation', index=1,
number=2, type=11, cpp_type=10, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
descriptor.FieldDescriptor(
name='send_notification', full_name='BatchMutatePlaylistEntriesRequest.send_notification', index=2,
number=3, type=8, cpp_type=7, label=1,
has_default_value=True, default_value=True,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
descriptor.FieldDescriptor(
name='detect_timestamp_conflict', full_name='BatchMutatePlaylistEntriesRequest.detect_timestamp_conflict', index=3,
number=4, type=8, cpp_type=7, label=1,
has_default_value=True, default_value=True,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
descriptor.FieldDescriptor(
name='notify_fine_grained_updates', full_name='BatchMutatePlaylistEntriesRequest.notify_fine_grained_updates', index=4,
number=5, type=8, cpp_type=7, label=1,
has_default_value=True, default_value=False,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
],
extensions=[
],
nested_types=[],
enum_types=[
],
options=None,
is_extendable=False,
extension_ranges=[],
serialized_start=8619,
serialized_end=8851,
)
_BATCHMUTATEPLAYLISTENTRIESRESPONSE = descriptor.Descriptor(
name='BatchMutatePlaylistEntriesResponse',
full_name='BatchMutatePlaylistEntriesResponse',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
descriptor.FieldDescriptor(
name='response_code', full_name='BatchMutatePlaylistEntriesResponse.response_code', index=0,
number=1, type=14, cpp_type=8, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
descriptor.FieldDescriptor(
name='mutate_response', full_name='BatchMutatePlaylistEntriesResponse.mutate_response', index=1,
number=2, type=11, cpp_type=10, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
],
extensions=[
],
nested_types=[],
enum_types=[
_BATCHMUTATEPLAYLISTENTRIESRESPONSE_BATCHMUTATEPLAYLISTENTRIESRESPONSECODE,
],
options=None,
is_extendable=False,
extension_ranges=[],
serialized_start=8854,
serialized_end=9095,
)
_MAGICPLAYLISTSEED = descriptor.Descriptor(
name='MagicPlaylistSeed',
full_name='MagicPlaylistSeed',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
descriptor.FieldDescriptor(
name='seed_type', full_name='MagicPlaylistSeed.seed_type', index=0,
number=1, type=14, cpp_type=8, label=2,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
descriptor.FieldDescriptor(
name='seed', full_name='MagicPlaylistSeed.seed', index=1,
number=2, type=9, cpp_type=9, label=2,
has_default_value=False, default_value=unicode("", "utf-8"),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
],
extensions=[
],
nested_types=[],
enum_types=[
_MAGICPLAYLISTSEED_SEEDTYPE,
],
options=None,
is_extendable=False,
extension_ranges=[],
serialized_start=9098,
serialized_end=9242,
)
_MAGICPLAYLISTREQUEST = descriptor.Descriptor(
name='MagicPlaylistRequest',
full_name='MagicPlaylistRequest',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
descriptor.FieldDescriptor(
name='gaia_id', full_name='MagicPlaylistRequest.gaia_id', index=0,
number=1, type=3, cpp_type=2, label=2,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
descriptor.FieldDescriptor(
name='playlist_name', full_name='MagicPlaylistRequest.playlist_name', index=1,
number=2, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=unicode("", "utf-8"),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
descriptor.FieldDescriptor(
name='playlist_id', full_name='MagicPlaylistRequest.playlist_id', index=2,
number=3, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=unicode("", "utf-8"),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
descriptor.FieldDescriptor(
name='seed', full_name='MagicPlaylistRequest.seed', index=3,
number=4, type=11, cpp_type=10, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
descriptor.FieldDescriptor(
name='num_recommendations', full_name='MagicPlaylistRequest.num_recommendations', index=4,
number=5, type=5, cpp_type=1, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
descriptor.FieldDescriptor(
name='include_all_track_metadata', full_name='MagicPlaylistRequest.include_all_track_metadata', index=5,
number=6, type=8, cpp_type=7, label=1,
has_default_value=True, default_value=False,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
descriptor.FieldDescriptor(
name='model_name', full_name='MagicPlaylistRequest.model_name', index=6,
number=7, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=unicode("", "utf-8"),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
],
extensions=[
],
nested_types=[],
enum_types=[
],
options=None,
is_extendable=False,
extension_ranges=[],
serialized_start=9245,
serialized_end=9454,
)
_MAGICPLAYLISTRESPONSE = descriptor.Descriptor(
name='MagicPlaylistResponse',
full_name='MagicPlaylistResponse',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
descriptor.FieldDescriptor(
name='playlist', full_name='MagicPlaylistResponse.playlist', index=0,
number=1, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
descriptor.FieldDescriptor(
name='playlist_entry', full_name='MagicPlaylistResponse.playlist_entry', index=1,
number=2, type=11, cpp_type=10, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
],
extensions=[
],
nested_types=[],
enum_types=[
],
options=None,
is_extendable=False,
extension_ranges=[],
serialized_start=9456,
serialized_end=9548,
)
_FLUSHLOCKERREQUEST = descriptor.Descriptor(
name='FlushLockerRequest',
full_name='FlushLockerRequest',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
descriptor.FieldDescriptor(
name='gaia_id', full_name='FlushLockerRequest.gaia_id', index=0,
number=1, type=3, cpp_type=2, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
descriptor.FieldDescriptor(
name='gaia_cookie', full_name='FlushLockerRequest.gaia_cookie', index=1,
number=2, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=unicode("", "utf-8"),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
descriptor.FieldDescriptor(
name='remove_audio_binaries', full_name='FlushLockerRequest.remove_audio_binaries', index=2,
number=3, type=8, cpp_type=7, label=1,
has_default_value=True, default_value=True,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
descriptor.FieldDescriptor(
name='remove_image_binaries', full_name='FlushLockerRequest.remove_image_binaries', index=3,
number=4, type=8, cpp_type=7, label=1,
has_default_value=True, default_value=True,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
descriptor.FieldDescriptor(
name='send_notification', full_name='FlushLockerRequest.send_notification', index=4,
number=5, type=8, cpp_type=7, label=1,
has_default_value=True, default_value=True,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
descriptor.FieldDescriptor(
name='reset_subscription_type', full_name='FlushLockerRequest.reset_subscription_type', index=5,
number=6, type=8, cpp_type=7, label=1,
has_default_value=True, default_value=False,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
descriptor.FieldDescriptor(
name='notify_fine_grained_updates', full_name='FlushLockerRequest.notify_fine_grained_updates', index=6,
number=8, type=8, cpp_type=7, label=1,
has_default_value=True, default_value=True,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
],
extensions=[
],
nested_types=[],
enum_types=[
],
options=None,
is_extendable=False,
extension_ranges=[],
serialized_start=9551,
serialized_end=9799,
)
_FLUSHLOCKERRESPONSE = descriptor.Descriptor(
name='FlushLockerResponse',
full_name='FlushLockerResponse',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
descriptor.FieldDescriptor(
name='tracks_removed', full_name='FlushLockerResponse.tracks_removed', index=0,
number=1, type=5, cpp_type=1, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
descriptor.FieldDescriptor(
name='entries_removed', full_name='FlushLockerResponse.entries_removed', index=1,
number=2, type=5, cpp_type=1, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
descriptor.FieldDescriptor(
name='playlists_removed', full_name='FlushLockerResponse.playlists_removed', index=2,
number=3, type=5, cpp_type=1, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
descriptor.FieldDescriptor(
name='success_reset_subscription_type', full_name='FlushLockerResponse.success_reset_subscription_type', index=3,
number=4, type=8, cpp_type=7, label=1,
has_default_value=False, default_value=False,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
],
extensions=[
],
nested_types=[],
enum_types=[
],
options=None,
is_extendable=False,
extension_ranges=[],
serialized_start=9802,
serialized_end=9940,
)
_LOCKERNOTIFICATION = descriptor.Descriptor(
name='LockerNotification',
full_name='LockerNotification',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
descriptor.FieldDescriptor(
name='gaia_id', full_name='LockerNotification.gaia_id', index=0,
number=1, type=3, cpp_type=2, label=2,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
descriptor.FieldDescriptor(
name='payload', full_name='LockerNotification.payload', index=1,
number=2, type=12, cpp_type=9, label=1,
has_default_value=False, default_value="",
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
],
extensions=[
],
nested_types=[],
enum_types=[
],
options=None,
is_extendable=False,
extension_ranges=[],
serialized_start=9942,
serialized_end=9996,
)
_ALBUM = descriptor.Descriptor(
name='Album',
full_name='Album',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
descriptor.FieldDescriptor(
name='name', full_name='Album.name', index=0,
number=1, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=unicode("", "utf-8"),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
descriptor.FieldDescriptor(
name='album_artist', full_name='Album.album_artist', index=1,
number=2, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=unicode("", "utf-8"),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
descriptor.FieldDescriptor(
name='album_art', full_name='Album.album_art', index=2,
number=3, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
descriptor.FieldDescriptor(
name='track_count', full_name='Album.track_count', index=3,
number=4, type=5, cpp_type=1, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
descriptor.FieldDescriptor(
name='last_time_played', full_name='Album.last_time_played', index=4,
number=5, type=3, cpp_type=2, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
descriptor.FieldDescriptor(
name='is_compilation', full_name='Album.is_compilation', index=5,
number=6, type=8, cpp_type=7, label=1,
has_default_value=False, default_value=False,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
descriptor.FieldDescriptor(
name='album_metajam_id', full_name='Album.album_metajam_id', index=6,
number=7, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=unicode("", "utf-8"),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
descriptor.FieldDescriptor(
name='creation_timestamp', full_name='Album.creation_timestamp', index=7,
number=8, type=3, cpp_type=2, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
descriptor.FieldDescriptor(
name='artist', full_name='Album.artist', index=8,
number=9, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=unicode("", "utf-8"),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
],
extensions=[
],
nested_types=[],
enum_types=[
],
options=None,
is_extendable=False,
extension_ranges=[],
serialized_start=9999,
serialized_end=10213,
)
_ALBUMSORTORDER = descriptor.Descriptor(
name='AlbumSortOrder',
full_name='AlbumSortOrder',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
descriptor.FieldDescriptor(
name='attribute', full_name='AlbumSortOrder.attribute', index=0,
number=1, type=14, cpp_type=8, label=1,
has_default_value=False, default_value=1,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
descriptor.FieldDescriptor(
name='descending', full_name='AlbumSortOrder.descending', index=1,
number=2, type=8, cpp_type=7, label=1,
has_default_value=True, default_value=False,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
],
extensions=[
],
nested_types=[],
enum_types=[
_ALBUMSORTORDER_ALBUMATTRIBUTE,
],
options=None,
is_extendable=False,
extension_ranges=[],
serialized_start=10216,
serialized_end=10379,
)
_GETALBUMSREQUEST = descriptor.Descriptor(
name='GetAlbumsRequest',
full_name='GetAlbumsRequest',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
descriptor.FieldDescriptor(
name='gaia_id', full_name='GetAlbumsRequest.gaia_id', index=0,
number=1, type=3, cpp_type=2, label=2,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
descriptor.FieldDescriptor(
name='sort_order', full_name='GetAlbumsRequest.sort_order', index=1,
number=2, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
descriptor.FieldDescriptor(
name='max_results', full_name='GetAlbumsRequest.max_results', index=2,
number=3, type=5, cpp_type=1, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
],
extensions=[
],
nested_types=[],
enum_types=[
],
options=None,
is_extendable=False,
extension_ranges=[],
serialized_start=10381,
serialized_end=10474,
)
_GETALBUMSRESPONSE = descriptor.Descriptor(
name='GetAlbumsResponse',
full_name='GetAlbumsResponse',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
descriptor.FieldDescriptor(
name='album', full_name='GetAlbumsResponse.album', index=0,
number=1, type=11, cpp_type=10, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
],
extensions=[
],
nested_types=[],
enum_types=[
],
options=None,
is_extendable=False,
extension_ranges=[],
serialized_start=10476,
serialized_end=10518,
)
_ARTIST = descriptor.Descriptor(
name='Artist',
full_name='Artist',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
descriptor.FieldDescriptor(
name='name', full_name='Artist.name', index=0,
number=1, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=unicode("", "utf-8"),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
descriptor.FieldDescriptor(
name='total_track_count', full_name='Artist.total_track_count', index=1,
number=2, type=5, cpp_type=1, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
descriptor.FieldDescriptor(
name='album', full_name='Artist.album', index=2,
number=3, type=11, cpp_type=10, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
],
extensions=[
],
nested_types=[],
enum_types=[
],
options=None,
is_extendable=False,
extension_ranges=[],
serialized_start=10520,
serialized_end=10592,
)
_ARTISTSORTORDER = descriptor.Descriptor(
name='ArtistSortOrder',
full_name='ArtistSortOrder',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
descriptor.FieldDescriptor(
name='descending', full_name='ArtistSortOrder.descending', index=0,
number=2, type=8, cpp_type=7, label=1,
has_default_value=True, default_value=False,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
],
extensions=[
],
nested_types=[],
enum_types=[
],
options=None,
is_extendable=False,
extension_ranges=[],
serialized_start=10594,
serialized_end=10638,
)
_GETARTISTSREQUEST = descriptor.Descriptor(
name='GetArtistsRequest',
full_name='GetArtistsRequest',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
descriptor.FieldDescriptor(
name='gaia_id', full_name='GetArtistsRequest.gaia_id', index=0,
number=1, type=3, cpp_type=2, label=2,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
descriptor.FieldDescriptor(
name='sort_order', full_name='GetArtistsRequest.sort_order', index=1,
number=2, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
descriptor.FieldDescriptor(
name='max_results', full_name='GetArtistsRequest.max_results', index=2,
number=3, type=5, cpp_type=1, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
],
extensions=[
],
nested_types=[],
enum_types=[
],
options=None,
is_extendable=False,
extension_ranges=[],
serialized_start=10640,
serialized_end=10735,
)
_GETARTISTSRESPONSE = descriptor.Descriptor(
name='GetArtistsResponse',
full_name='GetArtistsResponse',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
descriptor.FieldDescriptor(
name='artist', full_name='GetArtistsResponse.artist', index=0,
number=1, type=11, cpp_type=10, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
],
extensions=[
],
nested_types=[],
enum_types=[
],
options=None,
is_extendable=False,
extension_ranges=[],
serialized_start=10737,
serialized_end=10782,
)
_MUSICGENRE = descriptor.Descriptor(
name='MusicGenre',
full_name='MusicGenre',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
descriptor.FieldDescriptor(
name='name', full_name='MusicGenre.name', index=0,
number=1, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=unicode("", "utf-8"),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
descriptor.FieldDescriptor(
name='total_track_count', full_name='MusicGenre.total_track_count', index=1,
number=2, type=5, cpp_type=1, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
descriptor.FieldDescriptor(
name='album', full_name='MusicGenre.album', index=2,
number=3, type=11, cpp_type=10, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
],
extensions=[
],
nested_types=[],
enum_types=[
],
options=None,
is_extendable=False,
extension_ranges=[],
serialized_start=10784,
serialized_end=10860,
)
_GENRESORTORDER = descriptor.Descriptor(
name='GenreSortOrder',
full_name='GenreSortOrder',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
descriptor.FieldDescriptor(
name='descending', full_name='GenreSortOrder.descending', index=0,
number=2, type=8, cpp_type=7, label=1,
has_default_value=True, default_value=False,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
],
extensions=[
],
nested_types=[],
enum_types=[
],
options=None,
is_extendable=False,
extension_ranges=[],
serialized_start=10862,
serialized_end=10905,
)
_GETGENRESREQUEST = descriptor.Descriptor(
name='GetGenresRequest',
full_name='GetGenresRequest',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
descriptor.FieldDescriptor(
name='gaia_id', full_name='GetGenresRequest.gaia_id', index=0,
number=1, type=3, cpp_type=2, label=2,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
descriptor.FieldDescriptor(
name='sort_order', full_name='GetGenresRequest.sort_order', index=1,
number=2, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
descriptor.FieldDescriptor(
name='max_results', full_name='GetGenresRequest.max_results', index=2,
number=3, type=5, cpp_type=1, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
],
extensions=[
],
nested_types=[],
enum_types=[
],
options=None,
is_extendable=False,
extension_ranges=[],
serialized_start=10907,
serialized_end=11000,
)
_GETGENRESRESPONSE = descriptor.Descriptor(
name='GetGenresResponse',
full_name='GetGenresResponse',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
descriptor.FieldDescriptor(
name='genre', full_name='GetGenresResponse.genre', index=0,
number=1, type=11, cpp_type=10, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
],
extensions=[
],
nested_types=[],
enum_types=[
],
options=None,
is_extendable=False,
extension_ranges=[],
serialized_start=11002,
serialized_end=11049,
)
_GETDYNAMICPLAYLISTENTRIESREQUEST = descriptor.Descriptor(
name='GetDynamicPlaylistEntriesRequest',
full_name='GetDynamicPlaylistEntriesRequest',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
descriptor.FieldDescriptor(
name='gaia_id', full_name='GetDynamicPlaylistEntriesRequest.gaia_id', index=0,
number=1, type=3, cpp_type=2, label=2,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
descriptor.FieldDescriptor(
name='playlist_entries_type', full_name='GetDynamicPlaylistEntriesRequest.playlist_entries_type', index=1,
number=4, type=14, cpp_type=8, label=2,
has_default_value=False, default_value=1,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
descriptor.FieldDescriptor(
name='max_results', full_name='GetDynamicPlaylistEntriesRequest.max_results', index=2,
number=2, type=5, cpp_type=1, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
descriptor.FieldDescriptor(
name='continuation_token', full_name='GetDynamicPlaylistEntriesRequest.continuation_token', index=3,
number=3, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=unicode("", "utf-8"),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
descriptor.FieldDescriptor(
name='include_all_track_metadata', full_name='GetDynamicPlaylistEntriesRequest.include_all_track_metadata', index=4,
number=5, type=8, cpp_type=7, label=1,
has_default_value=True, default_value=False,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
],
extensions=[
],
nested_types=[],
enum_types=[
_GETDYNAMICPLAYLISTENTRIESREQUEST_DYNAMICPLAYLISTENTRIESTYPE,
],
options=None,
is_extendable=False,
extension_ranges=[],
serialized_start=11052,
serialized_end=11410,
)
_GETDYNAMICPLAYLISTENTRIESRESPONSE = descriptor.Descriptor(
name='GetDynamicPlaylistEntriesResponse',
full_name='GetDynamicPlaylistEntriesResponse',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
descriptor.FieldDescriptor(
name='response_code', full_name='GetDynamicPlaylistEntriesResponse.response_code', index=0,
number=1, type=14, cpp_type=8, label=2,
has_default_value=False, default_value=1,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
descriptor.FieldDescriptor(
name='playlist_entry', full_name='GetDynamicPlaylistEntriesResponse.playlist_entry', index=1,
number=2, type=11, cpp_type=10, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
descriptor.FieldDescriptor(
name='estimated_total_results', full_name='GetDynamicPlaylistEntriesResponse.estimated_total_results', index=2,
number=3, type=3, cpp_type=2, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
descriptor.FieldDescriptor(
name='continuation_token', full_name='GetDynamicPlaylistEntriesResponse.continuation_token', index=3,
number=4, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=unicode("", "utf-8"),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
descriptor.FieldDescriptor(
name='playlist_entries_type', full_name='GetDynamicPlaylistEntriesResponse.playlist_entries_type', index=4,
number=5, type=14, cpp_type=8, label=1,
has_default_value=False, default_value=1,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
],
extensions=[
],
nested_types=[],
enum_types=[
_GETDYNAMICPLAYLISTENTRIESRESPONSE_DYNAMICPLAYLISTENTRIESTYPE,
_GETDYNAMICPLAYLISTENTRIESRESPONSE_RESPONSECODE,
],
options=None,
is_extendable=False,
extension_ranges=[],
serialized_start=11413,
serialized_end=11887,
)
_GETAGGREGATIONSBYTRACKTYPEREQUEST = descriptor.Descriptor(
name='GetAggregationsByTrackTypeRequest',
full_name='GetAggregationsByTrackTypeRequest',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
descriptor.FieldDescriptor(
name='gaia_id', full_name='GetAggregationsByTrackTypeRequest.gaia_id', index=0,
number=1, type=3, cpp_type=2, label=2,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
],
extensions=[
],
nested_types=[],
enum_types=[
],
options=None,
is_extendable=False,
extension_ranges=[],
serialized_start=11889,
serialized_end=11941,
)
_TRACKTYPEAGGREGATE = descriptor.Descriptor(
name='TrackTypeAggregate',
full_name='TrackTypeAggregate',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
descriptor.FieldDescriptor(
name='track_type_value', full_name='TrackTypeAggregate.track_type_value', index=0,
number=1, type=14, cpp_type=8, label=1,
has_default_value=False, default_value=1,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
descriptor.FieldDescriptor(
name='count', full_name='TrackTypeAggregate.count', index=1,
number=2, type=5, cpp_type=1, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
],
extensions=[
],
nested_types=[],
enum_types=[
_TRACKTYPEAGGREGATE_TRACKTYPE,
],
options=None,
is_extendable=False,
extension_ranges=[],
serialized_start=11944,
serialized_end=12178,
)
_GETAGGREGATIONSBYTRACKTYPERESPONSE = descriptor.Descriptor(
name='GetAggregationsByTrackTypeResponse',
full_name='GetAggregationsByTrackTypeResponse',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
descriptor.FieldDescriptor(
name='track_type_aggregate', full_name='GetAggregationsByTrackTypeResponse.track_type_aggregate', index=0,
number=1, type=11, cpp_type=10, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
],
extensions=[
],
nested_types=[],
enum_types=[
],
options=None,
is_extendable=False,
extension_ranges=[],
serialized_start=12180,
serialized_end=12267,
)
_GETAGGREGATIONSBYAVAILABILITYSTATUSREQUEST = descriptor.Descriptor(
name='GetAggregationsByAvailabilityStatusRequest',
full_name='GetAggregationsByAvailabilityStatusRequest',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
descriptor.FieldDescriptor(
name='gaia_id', full_name='GetAggregationsByAvailabilityStatusRequest.gaia_id', index=0,
number=1, type=3, cpp_type=2, label=2,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
],
extensions=[
],
nested_types=[],
enum_types=[
],
options=None,
is_extendable=False,
extension_ranges=[],
serialized_start=12269,
serialized_end=12330,
)
_AVAILABILITYSTATUSAGGREGATE = descriptor.Descriptor(
name='AvailabilityStatusAggregate',
full_name='AvailabilityStatusAggregate',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
descriptor.FieldDescriptor(
name='availability_status', full_name='AvailabilityStatusAggregate.availability_status', index=0,
number=1, type=14, cpp_type=8, label=1,
has_default_value=False, default_value=1,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
descriptor.FieldDescriptor(
name='count', full_name='AvailabilityStatusAggregate.count', index=1,
number=2, type=5, cpp_type=1, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
],
extensions=[
],
nested_types=[],
enum_types=[
_AVAILABILITYSTATUSAGGREGATE_AVAILABILITYSTATUS,
],
options=None,
is_extendable=False,
extension_ranges=[],
serialized_start=12333,
serialized_end=12592,
)
_GETAGGREGATIONSBYAVAILABILITYSTATUSRESPONSE = descriptor.Descriptor(
name='GetAggregationsByAvailabilityStatusResponse',
full_name='GetAggregationsByAvailabilityStatusResponse',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
descriptor.FieldDescriptor(
name='availability_status_aggregate', full_name='GetAggregationsByAvailabilityStatusResponse.availability_status_aggregate', index=0,
number=1, type=11, cpp_type=10, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
],
extensions=[
],
nested_types=[],
enum_types=[
],
options=None,
is_extendable=False,
extension_ranges=[],
serialized_start=12594,
serialized_end=12708,
)
_ADDPROMOTRACKSREQUEST = descriptor.Descriptor(
name='AddPromoTracksRequest',
full_name='AddPromoTracksRequest',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
descriptor.FieldDescriptor(
name='gaia_id', full_name='AddPromoTracksRequest.gaia_id', index=0,
number=1, type=3, cpp_type=2, label=2,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
descriptor.FieldDescriptor(
name='genre', full_name='AddPromoTracksRequest.genre', index=1,
number=2, type=9, cpp_type=9, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
],
extensions=[
],
nested_types=[],
enum_types=[
],
options=None,
is_extendable=False,
extension_ranges=[],
serialized_start=12710,
serialized_end=12765,
)
_ADDPROMOTRACKSRESPONSE = descriptor.Descriptor(
name='AddPromoTracksResponse',
full_name='AddPromoTracksResponse',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
descriptor.FieldDescriptor(
name='track', full_name='AddPromoTracksResponse.track', index=0,
number=1, type=11, cpp_type=10, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
],
extensions=[
],
nested_types=[],
enum_types=[
],
options=None,
is_extendable=False,
extension_ranges=[],
serialized_start=12767,
serialized_end=12814,
)
_GETPLAYLISTAGGREGATIONSREQUEST = descriptor.Descriptor(
name='GetPlaylistAggregationsRequest',
full_name='GetPlaylistAggregationsRequest',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
descriptor.FieldDescriptor(
name='gaia_id', full_name='GetPlaylistAggregationsRequest.gaia_id', index=0,
number=1, type=3, cpp_type=2, label=2,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
descriptor.FieldDescriptor(
name='max_results', full_name='GetPlaylistAggregationsRequest.max_results', index=1,
number=2, type=5, cpp_type=1, label=1,
has_default_value=True, default_value=14,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
],
extensions=[
],
nested_types=[],
enum_types=[
],
options=None,
is_extendable=False,
extension_ranges=[],
serialized_start=12816,
serialized_end=12890,
)
_PLAYLISTAGGREGATE = descriptor.Descriptor(
name='PlaylistAggregate',
full_name='PlaylistAggregate',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
descriptor.FieldDescriptor(
name='playlist_id', full_name='PlaylistAggregate.playlist_id', index=0,
number=1, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=unicode("", "utf-8"),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
descriptor.FieldDescriptor(
name='name', full_name='PlaylistAggregate.name', index=1,
number=2, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=unicode("", "utf-8"),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
descriptor.FieldDescriptor(
name='album_art', full_name='PlaylistAggregate.album_art', index=2,
number=3, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
descriptor.FieldDescriptor(
name='track_count', full_name='PlaylistAggregate.track_count', index=3,
number=4, type=3, cpp_type=2, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
descriptor.FieldDescriptor(
name='last_time_played', full_name='PlaylistAggregate.last_time_played', index=4,
number=5, type=3, cpp_type=2, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
],
extensions=[
],
nested_types=[],
enum_types=[
],
options=None,
is_extendable=False,
extension_ranges=[],
serialized_start=12893,
serialized_end=13024,
)
_GETPLAYLISTAGGREGATIONSRESPONSE = descriptor.Descriptor(
name='GetPlaylistAggregationsResponse',
full_name='GetPlaylistAggregationsResponse',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
descriptor.FieldDescriptor(
name='playlist_aggregate', full_name='GetPlaylistAggregationsResponse.playlist_aggregate', index=0,
number=1, type=11, cpp_type=10, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
],
extensions=[
],
nested_types=[],
enum_types=[
],
options=None,
is_extendable=False,
extension_ranges=[],
serialized_start=13026,
serialized_end=13107,
)
_REMOTECONTROLCOMMANDREQUEST = descriptor.Descriptor(
name='RemoteControlCommandRequest',
full_name='RemoteControlCommandRequest',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
descriptor.FieldDescriptor(
name='gaia_id', full_name='RemoteControlCommandRequest.gaia_id', index=0,
number=1, type=3, cpp_type=2, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
descriptor.FieldDescriptor(
name='command', full_name='RemoteControlCommandRequest.command', index=1,
number=2, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=unicode("", "utf-8"),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
],
extensions=[
],
nested_types=[],
enum_types=[
],
options=None,
is_extendable=False,
extension_ranges=[],
serialized_start=13109,
serialized_end=13172,
)
_REMOTECONTROLCOMMANDRESPONSE = descriptor.Descriptor(
name='RemoteControlCommandResponse',
full_name='RemoteControlCommandResponse',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
descriptor.FieldDescriptor(
name='response_code', full_name='RemoteControlCommandResponse.response_code', index=0,
number=1, type=14, cpp_type=8, label=1,
has_default_value=False, default_value=1,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
],
extensions=[
],
nested_types=[],
enum_types=[
_REMOTECONTROLCOMMANDRESPONSE_RESPONSECODE,
],
options=None,
is_extendable=False,
extension_ranges=[],
serialized_start=13175,
serialized_end=13354,
)
_AUDIOREF.fields_by_name['store'].enum_type = _AUDIOREF_STORE
_AUDIOREF_STORE.containing_type = _AUDIOREF;
_IMAGEREF.fields_by_name['store'].enum_type = _IMAGEREF_STORE
_IMAGEREF.fields_by_name['origin'].enum_type = _IMAGEREF_ORIGIN
_IMAGEREF_STORE.containing_type = _IMAGEREF;
_IMAGEREF_ORIGIN.containing_type = _IMAGEREF;
_TRACK.fields_by_name['audio_ref'].message_type = _AUDIOREF
_TRACK.fields_by_name['album_art_ref'].message_type = _IMAGEREF
_TRACK.fields_by_name['availability_status'].enum_type = _TRACK_AVAILABILITYSTATUS
_TRACK.fields_by_name['content_type'].enum_type = _TRACK_CONTENTTYPE
_TRACK.fields_by_name['channels'].enum_type = _TRACK_CHANNELS
_TRACK.fields_by_name['track_type'].enum_type = _TRACK_TRACKTYPE
_TRACK.fields_by_name['rating'].enum_type = _TRACK_RATING
_TRACK.fields_by_name['uits_metadata'].message_type = uits_pb2._UITSMETADATA
_TRACK.fields_by_name['original_content_type'].enum_type = _TRACK_CONTENTTYPE
_TRACK.fields_by_name['uploaded_uits'].message_type = _UPLOADEDUITSID3TAG
_TRACK_AVAILABILITYSTATUS.containing_type = _TRACK;
_TRACK_CONTENTTYPE.containing_type = _TRACK;
_TRACK_CHANNELS.containing_type = _TRACK;
_TRACK_TRACKTYPE.containing_type = _TRACK;
_TRACK_RATING.containing_type = _TRACK;
_TRACKS.fields_by_name['track'].message_type = _TRACK
_PLAYLIST.fields_by_name['playlist_type'].enum_type = _PLAYLIST_PLAYLISTTYPE
_PLAYLIST.fields_by_name['playlist_art_ref'].message_type = _IMAGEREF
_PLAYLIST_PLAYLISTTYPE.containing_type = _PLAYLIST;
_PLAYLISTENTRY.fields_by_name['relative_position_id_type'].enum_type = _PLAYLISTENTRY_RELATIVEPOSITIONIDTYPE
_PLAYLISTENTRY.fields_by_name['track'].message_type = _TRACK
_PLAYLISTENTRY_RELATIVEPOSITIONIDTYPE.containing_type = _PLAYLISTENTRY;
_TRACKSEARCHRESTRICTION.fields_by_name['attribute'].enum_type = _TRACKSEARCHRESTRICTION_TRACKATTRIBUTE
_TRACKSEARCHRESTRICTION.fields_by_name['comparison_type'].enum_type = _TRACKSEARCHRESTRICTION_COMPARISONTYPE
_TRACKSEARCHRESTRICTION_TRACKATTRIBUTE.containing_type = _TRACKSEARCHRESTRICTION;
_TRACKSEARCHRESTRICTION_COMPARISONTYPE.containing_type = _TRACKSEARCHRESTRICTION;
_TRACKSEARCHRESTRICTIONSET.fields_by_name['type'].enum_type = _TRACKSEARCHRESTRICTIONSET_RESTRICTIONSETTYPE
_TRACKSEARCHRESTRICTIONSET.fields_by_name['restriction'].message_type = _TRACKSEARCHRESTRICTION
_TRACKSEARCHRESTRICTIONSET.fields_by_name['sub_set'].message_type = _TRACKSEARCHRESTRICTIONSET
_TRACKSEARCHRESTRICTIONSET_RESTRICTIONSETTYPE.containing_type = _TRACKSEARCHRESTRICTIONSET;
_TRACKSORTORDER.fields_by_name['attribute'].enum_type = _TRACKSORTORDER_TRACKATTRIBUTE
_TRACKSORTORDER_TRACKATTRIBUTE.containing_type = _TRACKSORTORDER;
_GETTRACKSREQUEST.fields_by_name['search_restriction'].message_type = _TRACKSEARCHRESTRICTION
_GETTRACKSREQUEST.fields_by_name['sort_order'].message_type = _TRACKSORTORDER
_GETTRACKSREQUEST.fields_by_name['restriction_set'].message_type = _TRACKSEARCHRESTRICTIONSET
_GETTRACKSREQUEST.fields_by_name['track_projection'].enum_type = _GETTRACKSREQUEST_TRACKPROJECTION
_GETTRACKSREQUEST_TRACKPROJECTION.containing_type = _GETTRACKSREQUEST;
_GETTRACKSRESPONSE.fields_by_name['response_code'].enum_type = _GETTRACKSRESPONSE_RESPONSECODE
_GETTRACKSRESPONSE.fields_by_name['track'].message_type = _TRACK
_GETTRACKSRESPONSE_RESPONSECODE.containing_type = _GETTRACKSRESPONSE;
_GETPLAYLISTENTRIESRESPONSE.fields_by_name['response_code'].enum_type = _GETPLAYLISTENTRIESRESPONSE_RESPONSECODE
_GETPLAYLISTENTRIESRESPONSE.fields_by_name['playlist_entry'].message_type = _PLAYLISTENTRY
_GETPLAYLISTENTRIESRESPONSE_RESPONSECODE.containing_type = _GETPLAYLISTENTRIESRESPONSE;
_PLAYLISTSORTORDER.fields_by_name['attribute'].enum_type = _PLAYLISTSORTORDER_PLAYLISTATTRIBUTE
_PLAYLISTSORTORDER_PLAYLISTATTRIBUTE.containing_type = _PLAYLISTSORTORDER;
_GETPLAYLISTSREQUEST.fields_by_name['sort_order'].message_type = _PLAYLISTSORTORDER
_GETPLAYLISTSRESPONSE.fields_by_name['response_code'].enum_type = _GETPLAYLISTSRESPONSE_RESPONSECODE
_GETPLAYLISTSRESPONSE.fields_by_name['playlist'].message_type = _PLAYLIST
_GETPLAYLISTSRESPONSE_RESPONSECODE.containing_type = _GETPLAYLISTSRESPONSE;
_BATCHLOOKUPREQUEST.fields_by_name['track'].message_type = _LOOKUPTRACKREQUEST
_BATCHLOOKUPREQUEST.fields_by_name['playlist'].message_type = _LOOKUPPLAYLISTREQUEST
_BATCHLOOKUPREQUEST.fields_by_name['metadata_type'].enum_type = _BATCHLOOKUPREQUEST_METADATATYPE
_BATCHLOOKUPREQUEST.fields_by_name['playlist_entry'].message_type = _LOOKUPPLAYLISTENTRYREQUEST
_BATCHLOOKUPREQUEST_METADATATYPE.containing_type = _BATCHLOOKUPREQUEST;
_BATCHLOOKUPRESPONSE.fields_by_name['track'].message_type = _TRACK
_BATCHLOOKUPRESPONSE.fields_by_name['playlist'].message_type = _PLAYLIST
_BATCHLOOKUPRESPONSE.fields_by_name['playlist_entry'].message_type = _PLAYLISTENTRY
_MUTATETRACKREQUEST.fields_by_name['create_track'].message_type = _TRACK
_MUTATETRACKREQUEST.fields_by_name['update_track'].message_type = _TRACK
_MUTATERESPONSE.fields_by_name['response_code'].enum_type = _MUTATERESPONSE_MUTATERESPONSECODE
_MUTATERESPONSE.fields_by_name['availability_status'].enum_type = _MUTATERESPONSE_AVAILABILITYSTATUS
_MUTATERESPONSE_MUTATERESPONSECODE.containing_type = _MUTATERESPONSE;
_MUTATERESPONSE_AVAILABILITYSTATUS.containing_type = _MUTATERESPONSE;
_BATCHMUTATETRACKSREQUEST.fields_by_name['track_mutation'].message_type = _MUTATETRACKREQUEST
_BATCHMUTATETRACKSRESPONSE.fields_by_name['response_code'].enum_type = _BATCHMUTATETRACKSRESPONSE_BATCHMUTATETRACKSRESPONSECODE
_BATCHMUTATETRACKSRESPONSE.fields_by_name['mutate_response'].message_type = _MUTATERESPONSE
_BATCHMUTATETRACKSRESPONSE_BATCHMUTATETRACKSRESPONSECODE.containing_type = _BATCHMUTATETRACKSRESPONSE;
_MUTATEPLAYLISTREQUEST.fields_by_name['create_playlist'].message_type = _PLAYLIST
_MUTATEPLAYLISTREQUEST.fields_by_name['update_playlist'].message_type = _PLAYLIST
_MUTATEPLAYLISTREQUEST.fields_by_name['playlist_entry'].message_type = _PLAYLISTENTRY
_BATCHMUTATEPLAYLISTSREQUEST.fields_by_name['playlist_mutation'].message_type = _MUTATEPLAYLISTREQUEST
_BATCHMUTATEPLAYLISTSRESPONSE.fields_by_name['response_code'].enum_type = _BATCHMUTATEPLAYLISTSRESPONSE_BATCHMUTATEPLAYLISTSRESPONSECODE
_BATCHMUTATEPLAYLISTSRESPONSE.fields_by_name['mutate_response'].message_type = _MUTATERESPONSE
_BATCHMUTATEPLAYLISTSRESPONSE_BATCHMUTATEPLAYLISTSRESPONSECODE.containing_type = _BATCHMUTATEPLAYLISTSRESPONSE;
_MUTATEPLAYLISTENTRYREQUEST.fields_by_name['create_playlist_entry'].message_type = _PLAYLISTENTRY
_MUTATEPLAYLISTENTRYREQUEST.fields_by_name['update_playlist_entry'].message_type = _PLAYLISTENTRY
_MUTATEPLAYLISTENTRYREQUEST.fields_by_name['delete_playlist_entry'].message_type = _PLAYLISTENTRY
_BATCHMUTATEPLAYLISTENTRIESREQUEST.fields_by_name['playlist_entry_mutation'].message_type = _MUTATEPLAYLISTENTRYREQUEST
_BATCHMUTATEPLAYLISTENTRIESRESPONSE.fields_by_name['response_code'].enum_type = _BATCHMUTATEPLAYLISTENTRIESRESPONSE_BATCHMUTATEPLAYLISTENTRIESRESPONSECODE
_BATCHMUTATEPLAYLISTENTRIESRESPONSE.fields_by_name['mutate_response'].message_type = _MUTATERESPONSE
_BATCHMUTATEPLAYLISTENTRIESRESPONSE_BATCHMUTATEPLAYLISTENTRIESRESPONSECODE.containing_type = _BATCHMUTATEPLAYLISTENTRIESRESPONSE;
_MAGICPLAYLISTSEED.fields_by_name['seed_type'].enum_type = _MAGICPLAYLISTSEED_SEEDTYPE
_MAGICPLAYLISTSEED_SEEDTYPE.containing_type = _MAGICPLAYLISTSEED;
_MAGICPLAYLISTREQUEST.fields_by_name['seed'].message_type = _MAGICPLAYLISTSEED
_MAGICPLAYLISTRESPONSE.fields_by_name['playlist'].message_type = _PLAYLIST
_MAGICPLAYLISTRESPONSE.fields_by_name['playlist_entry'].message_type = _PLAYLISTENTRY
_ALBUM.fields_by_name['album_art'].message_type = _IMAGEREF
_ALBUMSORTORDER.fields_by_name['attribute'].enum_type = _ALBUMSORTORDER_ALBUMATTRIBUTE
_ALBUMSORTORDER_ALBUMATTRIBUTE.containing_type = _ALBUMSORTORDER;
_GETALBUMSREQUEST.fields_by_name['sort_order'].message_type = _ALBUMSORTORDER
_GETALBUMSRESPONSE.fields_by_name['album'].message_type = _ALBUM
_ARTIST.fields_by_name['album'].message_type = _ALBUM
_GETARTISTSREQUEST.fields_by_name['sort_order'].message_type = _ARTISTSORTORDER
_GETARTISTSRESPONSE.fields_by_name['artist'].message_type = _ARTIST
_MUSICGENRE.fields_by_name['album'].message_type = _ALBUM
_GETGENRESREQUEST.fields_by_name['sort_order'].message_type = _GENRESORTORDER
_GETGENRESRESPONSE.fields_by_name['genre'].message_type = _MUSICGENRE
_GETDYNAMICPLAYLISTENTRIESREQUEST.fields_by_name['playlist_entries_type'].enum_type = _GETDYNAMICPLAYLISTENTRIESREQUEST_DYNAMICPLAYLISTENTRIESTYPE
_GETDYNAMICPLAYLISTENTRIESREQUEST_DYNAMICPLAYLISTENTRIESTYPE.containing_type = _GETDYNAMICPLAYLISTENTRIESREQUEST;
_GETDYNAMICPLAYLISTENTRIESRESPONSE.fields_by_name['response_code'].enum_type = _GETDYNAMICPLAYLISTENTRIESRESPONSE_RESPONSECODE
_GETDYNAMICPLAYLISTENTRIESRESPONSE.fields_by_name['playlist_entry'].message_type = _PLAYLISTENTRY
_GETDYNAMICPLAYLISTENTRIESRESPONSE.fields_by_name['playlist_entries_type'].enum_type = _GETDYNAMICPLAYLISTENTRIESRESPONSE_DYNAMICPLAYLISTENTRIESTYPE
_GETDYNAMICPLAYLISTENTRIESRESPONSE_DYNAMICPLAYLISTENTRIESTYPE.containing_type = _GETDYNAMICPLAYLISTENTRIESRESPONSE;
_GETDYNAMICPLAYLISTENTRIESRESPONSE_RESPONSECODE.containing_type = _GETDYNAMICPLAYLISTENTRIESRESPONSE;
_TRACKTYPEAGGREGATE.fields_by_name['track_type_value'].enum_type = _TRACKTYPEAGGREGATE_TRACKTYPE
_TRACKTYPEAGGREGATE_TRACKTYPE.containing_type = _TRACKTYPEAGGREGATE;
_GETAGGREGATIONSBYTRACKTYPERESPONSE.fields_by_name['track_type_aggregate'].message_type = _TRACKTYPEAGGREGATE
_AVAILABILITYSTATUSAGGREGATE.fields_by_name['availability_status'].enum_type = _AVAILABILITYSTATUSAGGREGATE_AVAILABILITYSTATUS
_AVAILABILITYSTATUSAGGREGATE_AVAILABILITYSTATUS.containing_type = _AVAILABILITYSTATUSAGGREGATE;
_GETAGGREGATIONSBYAVAILABILITYSTATUSRESPONSE.fields_by_name['availability_status_aggregate'].message_type = _AVAILABILITYSTATUSAGGREGATE
_ADDPROMOTRACKSRESPONSE.fields_by_name['track'].message_type = _TRACK
_PLAYLISTAGGREGATE.fields_by_name['album_art'].message_type = _IMAGEREF
_GETPLAYLISTAGGREGATIONSRESPONSE.fields_by_name['playlist_aggregate'].message_type = _PLAYLISTAGGREGATE
_REMOTECONTROLCOMMANDRESPONSE.fields_by_name['response_code'].enum_type = _REMOTECONTROLCOMMANDRESPONSE_RESPONSECODE
_REMOTECONTROLCOMMANDRESPONSE_RESPONSECODE.containing_type = _REMOTECONTROLCOMMANDRESPONSE;
DESCRIPTOR.message_types_by_name['AudioRef'] = _AUDIOREF
DESCRIPTOR.message_types_by_name['ImageRef'] = _IMAGEREF
DESCRIPTOR.message_types_by_name['UploadedUitsId3Tag'] = _UPLOADEDUITSID3TAG
DESCRIPTOR.message_types_by_name['Track'] = _TRACK
DESCRIPTOR.message_types_by_name['Tracks'] = _TRACKS
DESCRIPTOR.message_types_by_name['Playlist'] = _PLAYLIST
DESCRIPTOR.message_types_by_name['PlaylistEntry'] = _PLAYLISTENTRY
DESCRIPTOR.message_types_by_name['TrackSearchRestriction'] = _TRACKSEARCHRESTRICTION
DESCRIPTOR.message_types_by_name['TrackSearchRestrictionSet'] = _TRACKSEARCHRESTRICTIONSET
DESCRIPTOR.message_types_by_name['TrackSortOrder'] = _TRACKSORTORDER
DESCRIPTOR.message_types_by_name['GetTracksRequest'] = _GETTRACKSREQUEST
DESCRIPTOR.message_types_by_name['GetTracksResponse'] = _GETTRACKSRESPONSE
DESCRIPTOR.message_types_by_name['GetPlaylistEntriesRequest'] = _GETPLAYLISTENTRIESREQUEST
DESCRIPTOR.message_types_by_name['GetPlaylistEntriesResponse'] = _GETPLAYLISTENTRIESRESPONSE
DESCRIPTOR.message_types_by_name['PlaylistSortOrder'] = _PLAYLISTSORTORDER
DESCRIPTOR.message_types_by_name['GetPlaylistsRequest'] = _GETPLAYLISTSREQUEST
DESCRIPTOR.message_types_by_name['GetPlaylistsResponse'] = _GETPLAYLISTSRESPONSE
DESCRIPTOR.message_types_by_name['LookupTrackRequest'] = _LOOKUPTRACKREQUEST
DESCRIPTOR.message_types_by_name['LookupPlaylistEntryRequest'] = _LOOKUPPLAYLISTENTRYREQUEST
DESCRIPTOR.message_types_by_name['LookupPlaylistRequest'] = _LOOKUPPLAYLISTREQUEST
DESCRIPTOR.message_types_by_name['BatchLookupRequest'] = _BATCHLOOKUPREQUEST
DESCRIPTOR.message_types_by_name['BatchLookupResponse'] = _BATCHLOOKUPRESPONSE
DESCRIPTOR.message_types_by_name['MutateTrackRequest'] = _MUTATETRACKREQUEST
DESCRIPTOR.message_types_by_name['MutateResponse'] = _MUTATERESPONSE
DESCRIPTOR.message_types_by_name['BatchMutateTracksRequest'] = _BATCHMUTATETRACKSREQUEST
DESCRIPTOR.message_types_by_name['BatchMutateTracksResponse'] = _BATCHMUTATETRACKSRESPONSE
DESCRIPTOR.message_types_by_name['MutatePlaylistRequest'] = _MUTATEPLAYLISTREQUEST
DESCRIPTOR.message_types_by_name['BatchMutatePlaylistsRequest'] = _BATCHMUTATEPLAYLISTSREQUEST
DESCRIPTOR.message_types_by_name['BatchMutatePlaylistsResponse'] = _BATCHMUTATEPLAYLISTSRESPONSE
DESCRIPTOR.message_types_by_name['MutatePlaylistEntryRequest'] = _MUTATEPLAYLISTENTRYREQUEST
DESCRIPTOR.message_types_by_name['BatchMutatePlaylistEntriesRequest'] = _BATCHMUTATEPLAYLISTENTRIESREQUEST
DESCRIPTOR.message_types_by_name['BatchMutatePlaylistEntriesResponse'] = _BATCHMUTATEPLAYLISTENTRIESRESPONSE
DESCRIPTOR.message_types_by_name['MagicPlaylistSeed'] = _MAGICPLAYLISTSEED
DESCRIPTOR.message_types_by_name['MagicPlaylistRequest'] = _MAGICPLAYLISTREQUEST
DESCRIPTOR.message_types_by_name['MagicPlaylistResponse'] = _MAGICPLAYLISTRESPONSE
DESCRIPTOR.message_types_by_name['FlushLockerRequest'] = _FLUSHLOCKERREQUEST
DESCRIPTOR.message_types_by_name['FlushLockerResponse'] = _FLUSHLOCKERRESPONSE
DESCRIPTOR.message_types_by_name['LockerNotification'] = _LOCKERNOTIFICATION
DESCRIPTOR.message_types_by_name['Album'] = _ALBUM
DESCRIPTOR.message_types_by_name['AlbumSortOrder'] = _ALBUMSORTORDER
DESCRIPTOR.message_types_by_name['GetAlbumsRequest'] = _GETALBUMSREQUEST
DESCRIPTOR.message_types_by_name['GetAlbumsResponse'] = _GETALBUMSRESPONSE
DESCRIPTOR.message_types_by_name['Artist'] = _ARTIST
DESCRIPTOR.message_types_by_name['ArtistSortOrder'] = _ARTISTSORTORDER
DESCRIPTOR.message_types_by_name['GetArtistsRequest'] = _GETARTISTSREQUEST
DESCRIPTOR.message_types_by_name['GetArtistsResponse'] = _GETARTISTSRESPONSE
DESCRIPTOR.message_types_by_name['MusicGenre'] = _MUSICGENRE
DESCRIPTOR.message_types_by_name['GenreSortOrder'] = _GENRESORTORDER
DESCRIPTOR.message_types_by_name['GetGenresRequest'] = _GETGENRESREQUEST
DESCRIPTOR.message_types_by_name['GetGenresResponse'] = _GETGENRESRESPONSE
DESCRIPTOR.message_types_by_name['GetDynamicPlaylistEntriesRequest'] = _GETDYNAMICPLAYLISTENTRIESREQUEST
DESCRIPTOR.message_types_by_name['GetDynamicPlaylistEntriesResponse'] = _GETDYNAMICPLAYLISTENTRIESRESPONSE
DESCRIPTOR.message_types_by_name['GetAggregationsByTrackTypeRequest'] = _GETAGGREGATIONSBYTRACKTYPEREQUEST
DESCRIPTOR.message_types_by_name['TrackTypeAggregate'] = _TRACKTYPEAGGREGATE
DESCRIPTOR.message_types_by_name['GetAggregationsByTrackTypeResponse'] = _GETAGGREGATIONSBYTRACKTYPERESPONSE
DESCRIPTOR.message_types_by_name['GetAggregationsByAvailabilityStatusRequest'] = _GETAGGREGATIONSBYAVAILABILITYSTATUSREQUEST
DESCRIPTOR.message_types_by_name['AvailabilityStatusAggregate'] = _AVAILABILITYSTATUSAGGREGATE
DESCRIPTOR.message_types_by_name['GetAggregationsByAvailabilityStatusResponse'] = _GETAGGREGATIONSBYAVAILABILITYSTATUSRESPONSE
DESCRIPTOR.message_types_by_name['AddPromoTracksRequest'] = _ADDPROMOTRACKSREQUEST
DESCRIPTOR.message_types_by_name['AddPromoTracksResponse'] = _ADDPROMOTRACKSRESPONSE
DESCRIPTOR.message_types_by_name['GetPlaylistAggregationsRequest'] = _GETPLAYLISTAGGREGATIONSREQUEST
DESCRIPTOR.message_types_by_name['PlaylistAggregate'] = _PLAYLISTAGGREGATE
DESCRIPTOR.message_types_by_name['GetPlaylistAggregationsResponse'] = _GETPLAYLISTAGGREGATIONSRESPONSE
DESCRIPTOR.message_types_by_name['RemoteControlCommandRequest'] = _REMOTECONTROLCOMMANDREQUEST
DESCRIPTOR.message_types_by_name['RemoteControlCommandResponse'] = _REMOTECONTROLCOMMANDRESPONSE
class AudioRef(message.Message):
__metaclass__ = reflection.GeneratedProtocolMessageType
DESCRIPTOR = _AUDIOREF
# @@protoc_insertion_point(class_scope:AudioRef)
class ImageRef(message.Message):
__metaclass__ = reflection.GeneratedProtocolMessageType
DESCRIPTOR = _IMAGEREF
# @@protoc_insertion_point(class_scope:ImageRef)
class UploadedUitsId3Tag(message.Message):
__metaclass__ = reflection.GeneratedProtocolMessageType
DESCRIPTOR = _UPLOADEDUITSID3TAG
# @@protoc_insertion_point(class_scope:UploadedUitsId3Tag)
class Track(message.Message):
__metaclass__ = reflection.GeneratedProtocolMessageType
DESCRIPTOR = _TRACK
# @@protoc_insertion_point(class_scope:Track)
class Tracks(message.Message):
__metaclass__ = reflection.GeneratedProtocolMessageType
DESCRIPTOR = _TRACKS
# @@protoc_insertion_point(class_scope:Tracks)
class Playlist(message.Message):
__metaclass__ = reflection.GeneratedProtocolMessageType
DESCRIPTOR = _PLAYLIST
# @@protoc_insertion_point(class_scope:Playlist)
class PlaylistEntry(message.Message):
__metaclass__ = reflection.GeneratedProtocolMessageType
DESCRIPTOR = _PLAYLISTENTRY
# @@protoc_insertion_point(class_scope:PlaylistEntry)
class TrackSearchRestriction(message.Message):
__metaclass__ = reflection.GeneratedProtocolMessageType
DESCRIPTOR = _TRACKSEARCHRESTRICTION
# @@protoc_insertion_point(class_scope:TrackSearchRestriction)
class TrackSearchRestrictionSet(message.Message):
__metaclass__ = reflection.GeneratedProtocolMessageType
DESCRIPTOR = _TRACKSEARCHRESTRICTIONSET
# @@protoc_insertion_point(class_scope:TrackSearchRestrictionSet)
class TrackSortOrder(message.Message):
__metaclass__ = reflection.GeneratedProtocolMessageType
DESCRIPTOR = _TRACKSORTORDER
# @@protoc_insertion_point(class_scope:TrackSortOrder)
class GetTracksRequest(message.Message):
__metaclass__ = reflection.GeneratedProtocolMessageType
DESCRIPTOR = _GETTRACKSREQUEST
# @@protoc_insertion_point(class_scope:GetTracksRequest)
class GetTracksResponse(message.Message):
__metaclass__ = reflection.GeneratedProtocolMessageType
DESCRIPTOR = _GETTRACKSRESPONSE
# @@protoc_insertion_point(class_scope:GetTracksResponse)
class GetPlaylistEntriesRequest(message.Message):
__metaclass__ = reflection.GeneratedProtocolMessageType
DESCRIPTOR = _GETPLAYLISTENTRIESREQUEST
# @@protoc_insertion_point(class_scope:GetPlaylistEntriesRequest)
class GetPlaylistEntriesResponse(message.Message):
__metaclass__ = reflection.GeneratedProtocolMessageType
DESCRIPTOR = _GETPLAYLISTENTRIESRESPONSE
# @@protoc_insertion_point(class_scope:GetPlaylistEntriesResponse)
class PlaylistSortOrder(message.Message):
__metaclass__ = reflection.GeneratedProtocolMessageType
DESCRIPTOR = _PLAYLISTSORTORDER
# @@protoc_insertion_point(class_scope:PlaylistSortOrder)
class GetPlaylistsRequest(message.Message):
__metaclass__ = reflection.GeneratedProtocolMessageType
DESCRIPTOR = _GETPLAYLISTSREQUEST
# @@protoc_insertion_point(class_scope:GetPlaylistsRequest)
class GetPlaylistsResponse(message.Message):
__metaclass__ = reflection.GeneratedProtocolMessageType
DESCRIPTOR = _GETPLAYLISTSRESPONSE
# @@protoc_insertion_point(class_scope:GetPlaylistsResponse)
class LookupTrackRequest(message.Message):
__metaclass__ = reflection.GeneratedProtocolMessageType
DESCRIPTOR = _LOOKUPTRACKREQUEST
# @@protoc_insertion_point(class_scope:LookupTrackRequest)
class LookupPlaylistEntryRequest(message.Message):
__metaclass__ = reflection.GeneratedProtocolMessageType
DESCRIPTOR = _LOOKUPPLAYLISTENTRYREQUEST
# @@protoc_insertion_point(class_scope:LookupPlaylistEntryRequest)
class LookupPlaylistRequest(message.Message):
__metaclass__ = reflection.GeneratedProtocolMessageType
DESCRIPTOR = _LOOKUPPLAYLISTREQUEST
# @@protoc_insertion_point(class_scope:LookupPlaylistRequest)
class BatchLookupRequest(message.Message):
__metaclass__ = reflection.GeneratedProtocolMessageType
DESCRIPTOR = _BATCHLOOKUPREQUEST
# @@protoc_insertion_point(class_scope:BatchLookupRequest)
class BatchLookupResponse(message.Message):
__metaclass__ = reflection.GeneratedProtocolMessageType
DESCRIPTOR = _BATCHLOOKUPRESPONSE
# @@protoc_insertion_point(class_scope:BatchLookupResponse)
class MutateTrackRequest(message.Message):
__metaclass__ = reflection.GeneratedProtocolMessageType
DESCRIPTOR = _MUTATETRACKREQUEST
# @@protoc_insertion_point(class_scope:MutateTrackRequest)
class MutateResponse(message.Message):
__metaclass__ = reflection.GeneratedProtocolMessageType
DESCRIPTOR = _MUTATERESPONSE
# @@protoc_insertion_point(class_scope:MutateResponse)
class BatchMutateTracksRequest(message.Message):
__metaclass__ = reflection.GeneratedProtocolMessageType
DESCRIPTOR = _BATCHMUTATETRACKSREQUEST
# @@protoc_insertion_point(class_scope:BatchMutateTracksRequest)
class BatchMutateTracksResponse(message.Message):
__metaclass__ = reflection.GeneratedProtocolMessageType
DESCRIPTOR = _BATCHMUTATETRACKSRESPONSE
# @@protoc_insertion_point(class_scope:BatchMutateTracksResponse)
class MutatePlaylistRequest(message.Message):
__metaclass__ = reflection.GeneratedProtocolMessageType
DESCRIPTOR = _MUTATEPLAYLISTREQUEST
# @@protoc_insertion_point(class_scope:MutatePlaylistRequest)
class BatchMutatePlaylistsRequest(message.Message):
__metaclass__ = reflection.GeneratedProtocolMessageType
DESCRIPTOR = _BATCHMUTATEPLAYLISTSREQUEST
# @@protoc_insertion_point(class_scope:BatchMutatePlaylistsRequest)
class BatchMutatePlaylistsResponse(message.Message):
__metaclass__ = reflection.GeneratedProtocolMessageType
DESCRIPTOR = _BATCHMUTATEPLAYLISTSRESPONSE
# @@protoc_insertion_point(class_scope:BatchMutatePlaylistsResponse)
class MutatePlaylistEntryRequest(message.Message):
__metaclass__ = reflection.GeneratedProtocolMessageType
DESCRIPTOR = _MUTATEPLAYLISTENTRYREQUEST
# @@protoc_insertion_point(class_scope:MutatePlaylistEntryRequest)
class BatchMutatePlaylistEntriesRequest(message.Message):
__metaclass__ = reflection.GeneratedProtocolMessageType
DESCRIPTOR = _BATCHMUTATEPLAYLISTENTRIESREQUEST
# @@protoc_insertion_point(class_scope:BatchMutatePlaylistEntriesRequest)
class BatchMutatePlaylistEntriesResponse(message.Message):
__metaclass__ = reflection.GeneratedProtocolMessageType
DESCRIPTOR = _BATCHMUTATEPLAYLISTENTRIESRESPONSE
# @@protoc_insertion_point(class_scope:BatchMutatePlaylistEntriesResponse)
class MagicPlaylistSeed(message.Message):
__metaclass__ = reflection.GeneratedProtocolMessageType
DESCRIPTOR = _MAGICPLAYLISTSEED
# @@protoc_insertion_point(class_scope:MagicPlaylistSeed)
class MagicPlaylistRequest(message.Message):
__metaclass__ = reflection.GeneratedProtocolMessageType
DESCRIPTOR = _MAGICPLAYLISTREQUEST
# @@protoc_insertion_point(class_scope:MagicPlaylistRequest)
class MagicPlaylistResponse(message.Message):
__metaclass__ = reflection.GeneratedProtocolMessageType
DESCRIPTOR = _MAGICPLAYLISTRESPONSE
# @@protoc_insertion_point(class_scope:MagicPlaylistResponse)
class FlushLockerRequest(message.Message):
__metaclass__ = reflection.GeneratedProtocolMessageType
DESCRIPTOR = _FLUSHLOCKERREQUEST
# @@protoc_insertion_point(class_scope:FlushLockerRequest)
class FlushLockerResponse(message.Message):
__metaclass__ = reflection.GeneratedProtocolMessageType
DESCRIPTOR = _FLUSHLOCKERRESPONSE
# @@protoc_insertion_point(class_scope:FlushLockerResponse)
class LockerNotification(message.Message):
__metaclass__ = reflection.GeneratedProtocolMessageType
DESCRIPTOR = _LOCKERNOTIFICATION
# @@protoc_insertion_point(class_scope:LockerNotification)
class Album(message.Message):
__metaclass__ = reflection.GeneratedProtocolMessageType
DESCRIPTOR = _ALBUM
# @@protoc_insertion_point(class_scope:Album)
class AlbumSortOrder(message.Message):
__metaclass__ = reflection.GeneratedProtocolMessageType
DESCRIPTOR = _ALBUMSORTORDER
# @@protoc_insertion_point(class_scope:AlbumSortOrder)
class GetAlbumsRequest(message.Message):
__metaclass__ = reflection.GeneratedProtocolMessageType
DESCRIPTOR = _GETALBUMSREQUEST
# @@protoc_insertion_point(class_scope:GetAlbumsRequest)
class GetAlbumsResponse(message.Message):
__metaclass__ = reflection.GeneratedProtocolMessageType
DESCRIPTOR = _GETALBUMSRESPONSE
# @@protoc_insertion_point(class_scope:GetAlbumsResponse)
class Artist(message.Message):
__metaclass__ = reflection.GeneratedProtocolMessageType
DESCRIPTOR = _ARTIST
# @@protoc_insertion_point(class_scope:Artist)
class ArtistSortOrder(message.Message):
__metaclass__ = reflection.GeneratedProtocolMessageType
DESCRIPTOR = _ARTISTSORTORDER
# @@protoc_insertion_point(class_scope:ArtistSortOrder)
class GetArtistsRequest(message.Message):
__metaclass__ = reflection.GeneratedProtocolMessageType
DESCRIPTOR = _GETARTISTSREQUEST
# @@protoc_insertion_point(class_scope:GetArtistsRequest)
class GetArtistsResponse(message.Message):
__metaclass__ = reflection.GeneratedProtocolMessageType
DESCRIPTOR = _GETARTISTSRESPONSE
# @@protoc_insertion_point(class_scope:GetArtistsResponse)
class MusicGenre(message.Message):
__metaclass__ = reflection.GeneratedProtocolMessageType
DESCRIPTOR = _MUSICGENRE
# @@protoc_insertion_point(class_scope:MusicGenre)
class GenreSortOrder(message.Message):
__metaclass__ = reflection.GeneratedProtocolMessageType
DESCRIPTOR = _GENRESORTORDER
# @@protoc_insertion_point(class_scope:GenreSortOrder)
class GetGenresRequest(message.Message):
__metaclass__ = reflection.GeneratedProtocolMessageType
DESCRIPTOR = _GETGENRESREQUEST
# @@protoc_insertion_point(class_scope:GetGenresRequest)
class GetGenresResponse(message.Message):
__metaclass__ = reflection.GeneratedProtocolMessageType
DESCRIPTOR = _GETGENRESRESPONSE
# @@protoc_insertion_point(class_scope:GetGenresResponse)
class GetDynamicPlaylistEntriesRequest(message.Message):
__metaclass__ = reflection.GeneratedProtocolMessageType
DESCRIPTOR = _GETDYNAMICPLAYLISTENTRIESREQUEST
# @@protoc_insertion_point(class_scope:GetDynamicPlaylistEntriesRequest)
class GetDynamicPlaylistEntriesResponse(message.Message):
__metaclass__ = reflection.GeneratedProtocolMessageType
DESCRIPTOR = _GETDYNAMICPLAYLISTENTRIESRESPONSE
# @@protoc_insertion_point(class_scope:GetDynamicPlaylistEntriesResponse)
class GetAggregationsByTrackTypeRequest(message.Message):
__metaclass__ = reflection.GeneratedProtocolMessageType
DESCRIPTOR = _GETAGGREGATIONSBYTRACKTYPEREQUEST
# @@protoc_insertion_point(class_scope:GetAggregationsByTrackTypeRequest)
class TrackTypeAggregate(message.Message):
__metaclass__ = reflection.GeneratedProtocolMessageType
DESCRIPTOR = _TRACKTYPEAGGREGATE
# @@protoc_insertion_point(class_scope:TrackTypeAggregate)
class GetAggregationsByTrackTypeResponse(message.Message):
__metaclass__ = reflection.GeneratedProtocolMessageType
DESCRIPTOR = _GETAGGREGATIONSBYTRACKTYPERESPONSE
# @@protoc_insertion_point(class_scope:GetAggregationsByTrackTypeResponse)
class GetAggregationsByAvailabilityStatusRequest(message.Message):
__metaclass__ = reflection.GeneratedProtocolMessageType
DESCRIPTOR = _GETAGGREGATIONSBYAVAILABILITYSTATUSREQUEST
# @@protoc_insertion_point(class_scope:GetAggregationsByAvailabilityStatusRequest)
class AvailabilityStatusAggregate(message.Message):
__metaclass__ = reflection.GeneratedProtocolMessageType
DESCRIPTOR = _AVAILABILITYSTATUSAGGREGATE
# @@protoc_insertion_point(class_scope:AvailabilityStatusAggregate)
class GetAggregationsByAvailabilityStatusResponse(message.Message):
__metaclass__ = reflection.GeneratedProtocolMessageType
DESCRIPTOR = _GETAGGREGATIONSBYAVAILABILITYSTATUSRESPONSE
# @@protoc_insertion_point(class_scope:GetAggregationsByAvailabilityStatusResponse)
class AddPromoTracksRequest(message.Message):
__metaclass__ = reflection.GeneratedProtocolMessageType
DESCRIPTOR = _ADDPROMOTRACKSREQUEST
# @@protoc_insertion_point(class_scope:AddPromoTracksRequest)
class AddPromoTracksResponse(message.Message):
__metaclass__ = reflection.GeneratedProtocolMessageType
DESCRIPTOR = _ADDPROMOTRACKSRESPONSE
# @@protoc_insertion_point(class_scope:AddPromoTracksResponse)
class GetPlaylistAggregationsRequest(message.Message):
__metaclass__ = reflection.GeneratedProtocolMessageType
DESCRIPTOR = _GETPLAYLISTAGGREGATIONSREQUEST
# @@protoc_insertion_point(class_scope:GetPlaylistAggregationsRequest)
class PlaylistAggregate(message.Message):
__metaclass__ = reflection.GeneratedProtocolMessageType
DESCRIPTOR = _PLAYLISTAGGREGATE
# @@protoc_insertion_point(class_scope:PlaylistAggregate)
class GetPlaylistAggregationsResponse(message.Message):
__metaclass__ = reflection.GeneratedProtocolMessageType
DESCRIPTOR = _GETPLAYLISTAGGREGATIONSRESPONSE
# @@protoc_insertion_point(class_scope:GetPlaylistAggregationsResponse)
class RemoteControlCommandRequest(message.Message):
__metaclass__ = reflection.GeneratedProtocolMessageType
DESCRIPTOR = _REMOTECONTROLCOMMANDREQUEST
# @@protoc_insertion_point(class_scope:RemoteControlCommandRequest)
class RemoteControlCommandResponse(message.Message):
__metaclass__ = reflection.GeneratedProtocolMessageType
DESCRIPTOR = _REMOTECONTROLCOMMANDRESPONSE
# @@protoc_insertion_point(class_scope:RemoteControlCommandResponse)
# @@protoc_insertion_point(module_scope)
| true | true |
f71acc6d33db887796f3bf9b80851bc5ea533057 | 1,180 | py | Python | deeplmodel/source/wer.py | Haftom2323/AMH-STT | b0292a6c704b3b94eff7a536a4da04f905cb42fb | [
"MIT"
] | 1 | 2022-03-13T19:49:39.000Z | 2022-03-13T19:49:39.000Z | deeplmodel/source/wer.py | eyerus21/AMH-STT | b0292a6c704b3b94eff7a536a4da04f905cb42fb | [
"MIT"
] | null | null | null | deeplmodel/source/wer.py | eyerus21/AMH-STT | b0292a6c704b3b94eff7a536a4da04f905cb42fb | [
"MIT"
] | 11 | 2021-08-02T19:29:47.000Z | 2022-03-13T17:25:17.000Z | def wer(r, h):
"""
Calculation of WER with Levenshtein distance.
Works only for iterables up to 254 elements (uint8).
O(nm) time ans space complexity.
Parameters
----------
r : list
h : list
Returns
-------
int
Examples
--------
>>> wer("who is there".split(), "is there".split())
1
>>> wer("who is there".split(), "".split())
3
>>> wer("".split(), "who is there".split())
3
"""
# initialisation
import numpy
d = numpy.zeros((len(r)+1)*(len(h)+1), dtype=numpy.uint8)
d = d.reshape((len(r)+1, len(h)+1))
for i in range(len(r)+1):
for j in range(len(h)+1):
if i == 0:
d[0][j] = j
elif j == 0:
d[i][0] = i
# computation
for i in range(1, len(r)+1):
for j in range(1, len(h)+1):
if r[i-1] == h[j-1]:
d[i][j] = d[i-1][j-1]
else:
substitution = d[i-1][j-1] + 1
insertion = d[i][j-1] + 1
deletion = d[i-1][j] + 1
d[i][j] = min(substitution, insertion, deletion)
return d[len(r)][len(h)] | 24.583333 | 64 | 0.440678 | def wer(r, h):
import numpy
d = numpy.zeros((len(r)+1)*(len(h)+1), dtype=numpy.uint8)
d = d.reshape((len(r)+1, len(h)+1))
for i in range(len(r)+1):
for j in range(len(h)+1):
if i == 0:
d[0][j] = j
elif j == 0:
d[i][0] = i
for i in range(1, len(r)+1):
for j in range(1, len(h)+1):
if r[i-1] == h[j-1]:
d[i][j] = d[i-1][j-1]
else:
substitution = d[i-1][j-1] + 1
insertion = d[i][j-1] + 1
deletion = d[i-1][j] + 1
d[i][j] = min(substitution, insertion, deletion)
return d[len(r)][len(h)] | true | true |
f71acdd0b906e1300a3decc62a833ed0cf01a8fa | 7,182 | py | Python | club_crm/club_crm/report/fitness_commission_summary/fitness_commission_summary.py | VivekChamp/clubcrm | 82036360d867d3dc5406bc71445a98841b5bffbf | [
"MIT"
] | null | null | null | club_crm/club_crm/report/fitness_commission_summary/fitness_commission_summary.py | VivekChamp/clubcrm | 82036360d867d3dc5406bc71445a98841b5bffbf | [
"MIT"
] | null | null | null | club_crm/club_crm/report/fitness_commission_summary/fitness_commission_summary.py | VivekChamp/clubcrm | 82036360d867d3dc5406bc71445a98841b5bffbf | [
"MIT"
] | null | null | null | # Copyright (c) 2013, Blue Lynx and contributors
# For license information, please see license.txt
from __future__ import unicode_literals
import frappe
from frappe import _
import math
from frappe.utils import getdate, get_time, flt
from datetime import datetime, timedelta, date, time
import calendar
def execute(filters=None):
columns, data = [], []
if filters:
columns = get_column()
data = get_data(filters)
return columns, data
def get_column():
columns = [
{
"label": "Staff Name",
"fieldname": "staff_name",
"fieldtype": "Data",
"width": 120
},
{
"label": "PT Count (Hours)",
"fieldname": "pt_count",
"fieldtype": "Data",
"width": 120
},
{
"label": "GX Count (Hours)",
"fieldname": "gx_count",
"fieldtype": "Data",
"width": 120
},
{
"label": "Others (Hours)",
"fieldname": "ot_count",
"fieldtype": "Data",
"width": 120
},
{
"label": "PT Commissions",
"fieldname": "pt_commission",
"fieldtype": "Currency",
"width": 150
},
{
"label": "GX Commissions",
"fieldname": "gc_commission",
"fieldtype": "Currency",
"width": 150
},
{
"label": "Other Commissions",
"fieldname": "other_commission",
"fieldtype": "Currency",
"width": 150,
"default": 0.0
},
{
"label": "Total Commission",
"fieldname": "total_commission",
"fieldtype": "Currency",
"width": 150
}
]
return columns
def get_data(filters):
data = []
final_data = []
year = int(filters['year'])
if 'date_range' in filters:
if filters['date_range'] == "Month":
month = filters['month']
month_number = int(datetime.strptime(month, '%B').month)
last_day = calendar.monthrange(year, month_number)[1]
start_date = datetime(year, month_number, 1)
start = start_date.date()
end_date = datetime(year, month_number, last_day)
end = end_date.date()
elif filters['date_range'] == "Custom Range":
start = getdate(filters['from_date'])
end = getdate( filters['to_date'])
if 'service_staff' in filters:
staff_list = frappe.get_all('Service Staff', filters={'name': filters['service_staff']})
else:
staff_list = frappe.db.get_list('Service Staff', filters=[['fitness_check', '=', 1]], fields=['name'])
settings = frappe.get_doc('Fitness Training Settings')
if staff_list:
for staff in staff_list:
pt_count = 0.0
ot_count = 0.0
other_commission = 0.0
service_staff = frappe.get_doc('Service Staff', staff.name)
if service_staff.fitness_service_assignment:
for services in service_staff.fitness_service_assignment:
if services.commission_applicable:
appointments_list = frappe.db.get_list('Fitness Training Appointment', filters=[['fitness_service', '=', services.fitness_package], ['appointment_date', 'between', [start, end]], ['payment_status', '=', 'Paid'], ['service_staff', '=', staff.name], ['appointment_status', 'in', {'Completed', 'No Show'}]], fields=['name', 'fitness_service'])
if services.commission_type == "Standard":
if appointments_list:
for appointments in appointments_list:
pt_service = frappe.get_doc('Fitness Services', appointments.fitness_service)
if pt_service.session_for == "Single":
pt_count += settings.single_session
elif pt_service.session_for == "Couple":
pt_count += settings.couple_session
elif services.commission_type == "Custom":
if appointments_list:
for appointments in appointments_list:
other_commission += services.commission_amount
ot_count += 1
staff['staff_name']= staff.name
staff['pt_count'] = pt_count
staff['ot_count'] = ot_count
staff['other_commission'] = other_commission
gc = []
gc_list = frappe.db.get_list('Group Class', filters=[['class_date', 'between', [start, end]], ['trainer_name', '=', staff.name], ['class_status', '=', 'Completed']], fields=['count(name) as gx_count'], group_by='trainer_name')
if gc_list:
for group_class in gc_list:
group_class_attendee = frappe.get_all('Group Class Attendees', filters={'group_class': group_class.name, 'attendee_status': 'Complete' })
if group_class_attendee:
if len(group_class_attendee) >= 3:
gc.append(group_class)
staff['gx_count'] = len(gc)
data.append(staff)
for row in data:
row['gc_commission'] = float(row['gx_count']) * float(settings.group_class_rate)
pt = calculate_pt(row['pt_count'], row['gx_count'])
row['pt_commission'] = pt
row['total_commission'] = row['gc_commission'] + row['pt_commission'] + row['other_commission']
final_data.append(row)
return final_data
def month():
year = 2021
months = 'July'
month_number = datetime.strptime(months, '%B').month
last_day = calendar.monthrange(year, month_number)[1]
start_date = datetime(year, month_number, 1)
start = start_date.date()
end_date = datetime(year, month_number, last_day)
end = end_date.date()
staff_list = frappe.db.get_list('Service Staff', filters=[['fitness_check', '=', 1]], fields=['name'])
for staff in staff_list:
gc_list = frappe.db.get_list('Group Class', filters=[['class_date', 'between', [start, end]], ['trainer_name', '=', 'Jatinder'], ['class_status', '=', 'Completed']], fields=['count(name) as gc_count'], group_by='trainer_name')
for gc in gc_list:
return type(gc.gc_count)
@frappe.whitelist()
def calculate_pt(pt_count, gx_count):
total_count = pt_count + gx_count
scale = {(0, 30): 40, (30, 60): 60, (60, 90): 80, (90, 120): 100, (120, 150): 120, (150, math.inf): 140}
hours_worked = total_count
decimal_rate = next(rate for (lower, upper), rate in scale.items() if lower <= hours_worked and upper >= hours_worked)
decimal_end = hours_worked - int(hours_worked)
end_pay = decimal_end * decimal_rate
# Use an integer for ease of calculation
hours_worked = int(hours_worked)
hours_paid_for = 0
# Beginning total pay is just the decimal "ending"
total_pay = end_pay
while hours_paid_for < hours_worked:
# Find the rate for the current bucket of hours
rate_filter = (rate for (lower, upper), rate in scale.items() if lower <= hours_paid_for and hours_paid_for < upper)
current_level = next(rate_filter)
total_pay += current_level
hours_paid_for += 1
total_session = total_pay
scale_1 = {(0, 30): 40, (30, 60): 60, (60, 90): 80, (90, 120): 100, (120, 150): 120, (150, math.inf): 140}
hours_worked_1 = gx_count
decimal_rate_1 = next(rate for (lower, upper), rate in scale_1.items() if lower <= hours_worked_1 and upper >= hours_worked_1)
decimal_end_1 = hours_worked_1 - int(hours_worked_1)
end_pay_1 = decimal_end_1 * decimal_rate_1
# Use an integer for ease of calculation
hours_worked_1 = int(hours_worked_1)
hours_paid_for_1 = 0
# Beginning total pay is just the decimal "ending"
total_pay_1 = end_pay_1
while hours_paid_for_1 < hours_worked_1:
# Find the rate for the current bucket of hours
rate_filter = (rate for (lower, upper), rate in scale_1.items() if lower <= hours_paid_for_1 and hours_paid_for_1 < upper)
current_level = next(rate_filter)
total_pay_1 += current_level
hours_paid_for_1 += 1
total_gc = total_pay_1
commission_from_pt = total_session - total_gc
return commission_from_pt | 31.778761 | 346 | 0.690058 |
from __future__ import unicode_literals
import frappe
from frappe import _
import math
from frappe.utils import getdate, get_time, flt
from datetime import datetime, timedelta, date, time
import calendar
def execute(filters=None):
columns, data = [], []
if filters:
columns = get_column()
data = get_data(filters)
return columns, data
def get_column():
columns = [
{
"label": "Staff Name",
"fieldname": "staff_name",
"fieldtype": "Data",
"width": 120
},
{
"label": "PT Count (Hours)",
"fieldname": "pt_count",
"fieldtype": "Data",
"width": 120
},
{
"label": "GX Count (Hours)",
"fieldname": "gx_count",
"fieldtype": "Data",
"width": 120
},
{
"label": "Others (Hours)",
"fieldname": "ot_count",
"fieldtype": "Data",
"width": 120
},
{
"label": "PT Commissions",
"fieldname": "pt_commission",
"fieldtype": "Currency",
"width": 150
},
{
"label": "GX Commissions",
"fieldname": "gc_commission",
"fieldtype": "Currency",
"width": 150
},
{
"label": "Other Commissions",
"fieldname": "other_commission",
"fieldtype": "Currency",
"width": 150,
"default": 0.0
},
{
"label": "Total Commission",
"fieldname": "total_commission",
"fieldtype": "Currency",
"width": 150
}
]
return columns
def get_data(filters):
data = []
final_data = []
year = int(filters['year'])
if 'date_range' in filters:
if filters['date_range'] == "Month":
month = filters['month']
month_number = int(datetime.strptime(month, '%B').month)
last_day = calendar.monthrange(year, month_number)[1]
start_date = datetime(year, month_number, 1)
start = start_date.date()
end_date = datetime(year, month_number, last_day)
end = end_date.date()
elif filters['date_range'] == "Custom Range":
start = getdate(filters['from_date'])
end = getdate( filters['to_date'])
if 'service_staff' in filters:
staff_list = frappe.get_all('Service Staff', filters={'name': filters['service_staff']})
else:
staff_list = frappe.db.get_list('Service Staff', filters=[['fitness_check', '=', 1]], fields=['name'])
settings = frappe.get_doc('Fitness Training Settings')
if staff_list:
for staff in staff_list:
pt_count = 0.0
ot_count = 0.0
other_commission = 0.0
service_staff = frappe.get_doc('Service Staff', staff.name)
if service_staff.fitness_service_assignment:
for services in service_staff.fitness_service_assignment:
if services.commission_applicable:
appointments_list = frappe.db.get_list('Fitness Training Appointment', filters=[['fitness_service', '=', services.fitness_package], ['appointment_date', 'between', [start, end]], ['payment_status', '=', 'Paid'], ['service_staff', '=', staff.name], ['appointment_status', 'in', {'Completed', 'No Show'}]], fields=['name', 'fitness_service'])
if services.commission_type == "Standard":
if appointments_list:
for appointments in appointments_list:
pt_service = frappe.get_doc('Fitness Services', appointments.fitness_service)
if pt_service.session_for == "Single":
pt_count += settings.single_session
elif pt_service.session_for == "Couple":
pt_count += settings.couple_session
elif services.commission_type == "Custom":
if appointments_list:
for appointments in appointments_list:
other_commission += services.commission_amount
ot_count += 1
staff['staff_name']= staff.name
staff['pt_count'] = pt_count
staff['ot_count'] = ot_count
staff['other_commission'] = other_commission
gc = []
gc_list = frappe.db.get_list('Group Class', filters=[['class_date', 'between', [start, end]], ['trainer_name', '=', staff.name], ['class_status', '=', 'Completed']], fields=['count(name) as gx_count'], group_by='trainer_name')
if gc_list:
for group_class in gc_list:
group_class_attendee = frappe.get_all('Group Class Attendees', filters={'group_class': group_class.name, 'attendee_status': 'Complete' })
if group_class_attendee:
if len(group_class_attendee) >= 3:
gc.append(group_class)
staff['gx_count'] = len(gc)
data.append(staff)
for row in data:
row['gc_commission'] = float(row['gx_count']) * float(settings.group_class_rate)
pt = calculate_pt(row['pt_count'], row['gx_count'])
row['pt_commission'] = pt
row['total_commission'] = row['gc_commission'] + row['pt_commission'] + row['other_commission']
final_data.append(row)
return final_data
def month():
year = 2021
months = 'July'
month_number = datetime.strptime(months, '%B').month
last_day = calendar.monthrange(year, month_number)[1]
start_date = datetime(year, month_number, 1)
start = start_date.date()
end_date = datetime(year, month_number, last_day)
end = end_date.date()
staff_list = frappe.db.get_list('Service Staff', filters=[['fitness_check', '=', 1]], fields=['name'])
for staff in staff_list:
gc_list = frappe.db.get_list('Group Class', filters=[['class_date', 'between', [start, end]], ['trainer_name', '=', 'Jatinder'], ['class_status', '=', 'Completed']], fields=['count(name) as gc_count'], group_by='trainer_name')
for gc in gc_list:
return type(gc.gc_count)
@frappe.whitelist()
def calculate_pt(pt_count, gx_count):
total_count = pt_count + gx_count
scale = {(0, 30): 40, (30, 60): 60, (60, 90): 80, (90, 120): 100, (120, 150): 120, (150, math.inf): 140}
hours_worked = total_count
decimal_rate = next(rate for (lower, upper), rate in scale.items() if lower <= hours_worked and upper >= hours_worked)
decimal_end = hours_worked - int(hours_worked)
end_pay = decimal_end * decimal_rate
hours_worked = int(hours_worked)
hours_paid_for = 0
total_pay = end_pay
while hours_paid_for < hours_worked:
rate_filter = (rate for (lower, upper), rate in scale.items() if lower <= hours_paid_for and hours_paid_for < upper)
current_level = next(rate_filter)
total_pay += current_level
hours_paid_for += 1
total_session = total_pay
scale_1 = {(0, 30): 40, (30, 60): 60, (60, 90): 80, (90, 120): 100, (120, 150): 120, (150, math.inf): 140}
hours_worked_1 = gx_count
decimal_rate_1 = next(rate for (lower, upper), rate in scale_1.items() if lower <= hours_worked_1 and upper >= hours_worked_1)
decimal_end_1 = hours_worked_1 - int(hours_worked_1)
end_pay_1 = decimal_end_1 * decimal_rate_1
hours_worked_1 = int(hours_worked_1)
hours_paid_for_1 = 0
total_pay_1 = end_pay_1
while hours_paid_for_1 < hours_worked_1:
rate_filter = (rate for (lower, upper), rate in scale_1.items() if lower <= hours_paid_for_1 and hours_paid_for_1 < upper)
current_level = next(rate_filter)
total_pay_1 += current_level
hours_paid_for_1 += 1
total_gc = total_pay_1
commission_from_pt = total_session - total_gc
return commission_from_pt | true | true |
f71ace2a9df90effa5053c4c417c48be91c319fc | 1,241 | py | Python | setup.py | larsrollik/serial_weighing_scale | 312218cbbb6b84b011d83980b3df6e0e99b36e50 | [
"BSD-3-Clause"
] | null | null | null | setup.py | larsrollik/serial_weighing_scale | 312218cbbb6b84b011d83980b3df6e0e99b36e50 | [
"BSD-3-Clause"
] | null | null | null | setup.py | larsrollik/serial_weighing_scale | 312218cbbb6b84b011d83980b3df6e0e99b36e50 | [
"BSD-3-Clause"
] | null | null | null | from os import path
from setuptools import find_packages
from setuptools import setup
this_directory = path.abspath(path.dirname(__file__))
with open(path.join(this_directory, "README.md")) as f:
long_description = f.read()
with open(path.join(this_directory, "LICENSE")) as f:
license_text = f.read()
setup(
name="serial_weighing_scale",
version="0.0.6",
description="serial_weighing_scale",
long_description=long_description,
long_description_content_type="text/markdown",
python_requires=">=3.6",
packages=find_packages(),
url="https://github.com/larsrollik/SerialWeighingScale",
author="Lars B. Rollik",
author_email="L.B.Rollik@protonmail.com",
license=license_text,
install_requires=[
"pyserial",
],
extras_require={
"dev": [
"black",
"pytest-cov",
"pytest",
"gitpython",
"coverage>=5.0.3",
"bump2version",
"pre-commit",
"flake8",
],
},
zip_safe=False,
include_package_data=True,
# entry_points={
# "console_scripts": [
# "console_script_name = module.path.to.function:function_name",
# ],
# },
)
| 24.82 | 76 | 0.611604 | from os import path
from setuptools import find_packages
from setuptools import setup
this_directory = path.abspath(path.dirname(__file__))
with open(path.join(this_directory, "README.md")) as f:
long_description = f.read()
with open(path.join(this_directory, "LICENSE")) as f:
license_text = f.read()
setup(
name="serial_weighing_scale",
version="0.0.6",
description="serial_weighing_scale",
long_description=long_description,
long_description_content_type="text/markdown",
python_requires=">=3.6",
packages=find_packages(),
url="https://github.com/larsrollik/SerialWeighingScale",
author="Lars B. Rollik",
author_email="L.B.Rollik@protonmail.com",
license=license_text,
install_requires=[
"pyserial",
],
extras_require={
"dev": [
"black",
"pytest-cov",
"pytest",
"gitpython",
"coverage>=5.0.3",
"bump2version",
"pre-commit",
"flake8",
],
},
zip_safe=False,
include_package_data=True,
)
| true | true |
f71ace2c76abb44e4261efab937f353dece55020 | 418 | py | Python | mrp_system/migrations/0037_billofmaterials_amount.py | mgeorge8/django_time | f75a442941b0ebbb6cc46a6d18e42b91695b7e57 | [
"MIT"
] | 1 | 2018-11-09T02:09:14.000Z | 2018-11-09T02:09:14.000Z | mrp_system/migrations/0037_billofmaterials_amount.py | mgeorge8/django_time | f75a442941b0ebbb6cc46a6d18e42b91695b7e57 | [
"MIT"
] | null | null | null | mrp_system/migrations/0037_billofmaterials_amount.py | mgeorge8/django_time | f75a442941b0ebbb6cc46a6d18e42b91695b7e57 | [
"MIT"
] | null | null | null | # Generated by Django 2.1.2 on 2019-01-11 14:48
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('mrp_system', '0036_auto_20190111_1357'),
]
operations = [
migrations.AddField(
model_name='billofmaterials',
name='amount',
field=models.IntegerField(blank=True, default=1, null=True),
),
]
| 22 | 72 | 0.614833 |
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('mrp_system', '0036_auto_20190111_1357'),
]
operations = [
migrations.AddField(
model_name='billofmaterials',
name='amount',
field=models.IntegerField(blank=True, default=1, null=True),
),
]
| true | true |
f71acf1492f4b14baf2359d08fc5b2e0b4e5994f | 56,230 | py | Python | src/transformers/modeling_t5.py | kushalj001/transformers | 0538820737bd8fb9ba1eb3a772412c6bbe2433ab | [
"Apache-2.0"
] | 1 | 2020-10-30T09:05:17.000Z | 2020-10-30T09:05:17.000Z | src/transformers/modeling_t5.py | kushalj001/transformers | 0538820737bd8fb9ba1eb3a772412c6bbe2433ab | [
"Apache-2.0"
] | null | null | null | src/transformers/modeling_t5.py | kushalj001/transformers | 0538820737bd8fb9ba1eb3a772412c6bbe2433ab | [
"Apache-2.0"
] | null | null | null | # coding=utf-8
# Copyright 2018 Mesh TensorFlow authors, T5 Authors and HuggingFace Inc. team.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
""" PyTorch T5 model. """
import copy
import math
import os
import warnings
import torch
import torch.nn.functional as F
from torch import nn
from torch.nn import CrossEntropyLoss
from .configuration_t5 import T5Config
from .file_utils import (
DUMMY_INPUTS,
DUMMY_MASK,
add_start_docstrings,
add_start_docstrings_to_model_forward,
replace_return_docstrings,
)
from .modeling_outputs import BaseModelOutput, BaseModelOutputWithPast, Seq2SeqLMOutput, Seq2SeqModelOutput
from .modeling_utils import PreTrainedModel, find_pruneable_heads_and_indices, prune_linear_layer
from .utils import logging
logger = logging.get_logger(__name__)
_CONFIG_FOR_DOC = "T5Config"
_TOKENIZER_FOR_DOC = "T5Tokenizer"
####################################################
# This dict contains shortcut names and associated url
# for the pretrained weights provided with the models
####################################################
T5_PRETRAINED_MODEL_ARCHIVE_LIST = [
"t5-small",
"t5-base",
"t5-large",
"t5-3b",
"t5-11b",
# See all T5 models at https://huggingface.co/models?filter=t5
]
####################################################
# This is a conversion method from TF 1.0 to PyTorch
# More details: https://medium.com/huggingface/from-tensorflow-to-pytorch-265f40ef2a28
####################################################
def load_tf_weights_in_t5(model, config, tf_checkpoint_path):
"""Load tf checkpoints in a pytorch model."""
try:
import re
import numpy as np
import tensorflow as tf
except ImportError:
logger.error(
"Loading a TensorFlow model in PyTorch, requires TensorFlow to be installed. Please see "
"https://www.tensorflow.org/install/ for installation instructions."
)
raise
tf_path = os.path.abspath(tf_checkpoint_path)
logger.info("Converting TensorFlow checkpoint from {}".format(tf_path))
# Load weights from TF model
init_vars = tf.train.list_variables(tf_path)
names = []
tf_weights = {}
for name, shape in init_vars:
logger.info("Loading TF weight {} with shape {}".format(name, shape))
array = tf.train.load_variable(tf_path, name)
names.append(name)
tf_weights[name] = array
for txt_name in names:
name = txt_name.split("/")
# adam_v and adam_m are variables used in AdamWeightDecayOptimizer to calculated m and v
# which are not required for using pretrained model
if any(
n in ["adam_v", "adam_m", "AdamWeightDecayOptimizer", "AdamWeightDecayOptimizer_1", "global_step"]
for n in name
):
logger.info("Skipping {}".format("/".join(name)))
tf_weights.pop(txt_name, None)
continue
if "_slot_" in name[-1]:
logger.info("Skipping {}".format("/".join(name)))
tf_weights.pop(txt_name, None)
continue
pointer = model
array = tf_weights[txt_name]
for m_name in name:
if re.fullmatch(r"[A-Za-z]+_\d+", m_name):
scope_names = re.split(r"_(\d+)", m_name)
else:
scope_names = [m_name]
if scope_names[0] in ["kernel", "scale", "embedding"]:
pointer = getattr(pointer, "weight")
# elif scope_names[0] == 'scale':
# pointer = getattr(pointer, 'weight')
# elif scope_names[0] == 'output_bias' or scope_names[0] == 'beta':
# pointer = getattr(pointer, 'bias')
# elif scope_names[0] == 'squad':
# pointer = getattr(pointer, 'classifier')
else:
try:
pointer = getattr(pointer, scope_names[0])
except AttributeError:
logger.info("Skipping {}".format("/".join(name)))
continue
if len(scope_names) >= 2:
num = int(scope_names[1])
pointer = pointer[num]
if scope_names[0] not in ["kernel", "scale", "embedding"]:
pointer = getattr(pointer, "weight")
if scope_names[0] != "embedding":
logger.info("Transposing numpy weight of shape {} for {}".format(array.shape, name))
array = np.transpose(array)
try:
assert (
pointer.shape == array.shape
), f"Pointer shape {pointer.shape} and array shape {array.shape} mismatched"
except AssertionError as e:
e.args += (pointer.shape, array.shape)
raise
logger.info("Initialize PyTorch weight {}".format(name))
pointer.data = torch.from_numpy(array.astype(np.float32))
tf_weights.pop(txt_name, None)
logger.info("Weights not copied to PyTorch model: {}".format(", ".join(tf_weights.keys())))
# logger.info("Weights not copied to PyTorch model: {}".format(', '.join(tf_weights.keys())))
return model
####################################################
# PyTorch Models are constructed by sub-classing
# - torch.nn.Module for the layers and
# - PreTrainedModel for the models (it-self a sub-class of torch.nn.Module)
####################################################
class T5LayerNorm(nn.Module):
def __init__(self, hidden_size, eps=1e-6):
"""
Construct a layernorm module in the T5 style No bias and no subtraction of mean.
"""
super().__init__()
self.weight = nn.Parameter(torch.ones(hidden_size))
self.variance_epsilon = eps
def forward(self, x):
# layer norm should always be calculated in float32
variance = x.to(torch.float32).pow(2).mean(-1, keepdim=True)
x = x / torch.sqrt(variance + self.variance_epsilon)
if self.weight.dtype == torch.float16:
x = x.to(torch.float16)
return self.weight * x
class T5DenseReluDense(nn.Module):
def __init__(self, config):
super().__init__()
self.wi = nn.Linear(config.d_model, config.d_ff, bias=False)
self.wo = nn.Linear(config.d_ff, config.d_model, bias=False)
self.dropout = nn.Dropout(config.dropout_rate)
def forward(self, hidden_states):
h = self.wi(hidden_states)
h = F.relu(h)
h = self.dropout(h)
h = self.wo(h)
return h
class T5LayerFF(nn.Module):
def __init__(self, config):
super().__init__()
self.DenseReluDense = T5DenseReluDense(config)
self.layer_norm = T5LayerNorm(config.d_model, eps=config.layer_norm_epsilon)
self.dropout = nn.Dropout(config.dropout_rate)
def forward(self, hidden_states):
norm_x = self.layer_norm(hidden_states)
y = self.DenseReluDense(norm_x)
layer_output = hidden_states + self.dropout(y)
return layer_output
class T5Attention(nn.Module):
def __init__(self, config: T5Config, has_relative_attention_bias=False, is_bidirectional=False):
super().__init__()
self.is_bidirectional = is_bidirectional
self.is_decoder = config.is_decoder
self.has_relative_attention_bias = has_relative_attention_bias
self.relative_attention_num_buckets = config.relative_attention_num_buckets
self.d_model = config.d_model
self.d_kv = config.d_kv
self.n_heads = config.num_heads
self.dropout = config.dropout_rate
self.inner_dim = self.n_heads * self.d_kv
# Mesh TensorFlow initialization to avoid scaling before softmax
self.q = nn.Linear(self.d_model, self.inner_dim, bias=False)
self.k = nn.Linear(self.d_model, self.inner_dim, bias=False)
self.v = nn.Linear(self.d_model, self.inner_dim, bias=False)
self.o = nn.Linear(self.inner_dim, self.d_model, bias=False)
if self.has_relative_attention_bias:
self.relative_attention_bias = nn.Embedding(self.relative_attention_num_buckets, self.n_heads)
self.pruned_heads = set()
def prune_heads(self, heads):
if len(heads) == 0:
return
heads, index = find_pruneable_heads_and_indices(heads, self.n_heads, self.d_kv, self.pruned_heads)
# Prune linear layers
self.q = prune_linear_layer(self.q, index)
self.k = prune_linear_layer(self.k, index)
self.v = prune_linear_layer(self.v, index)
self.o = prune_linear_layer(self.o, index, dim=1)
# Update hyper params
self.n_heads = self.n_heads - len(heads)
self.inner_dim = self.d_kv * self.n_heads
self.pruned_heads = self.pruned_heads.union(heads)
@staticmethod
def _relative_position_bucket(relative_position, bidirectional=True, num_buckets=32, max_distance=128):
"""
Adapted from Mesh Tensorflow:
https://github.com/tensorflow/mesh/blob/0cb87fe07da627bf0b7e60475d59f95ed6b5be3d/mesh_tensorflow/transformer/transformer_layers.py#L593
Translate relative position to a bucket number for relative attention. The relative position is defined as
memory_position - query_position, i.e. the distance in tokens from the attending position to the attended-to
position. If bidirectional=False, then positive relative positions are invalid. We use smaller buckets for
small absolute relative_position and larger buckets for larger absolute relative_positions. All relative
positions >=max_distance map to the same bucket. All relative positions <=-max_distance map to the same bucket.
This should allow for more graceful generalization to longer sequences than the model has been trained on
Args:
relative_position: an int32 Tensor
bidirectional: a boolean - whether the attention is bidirectional
num_buckets: an integer
max_distance: an integer
Returns:
a Tensor with the same shape as relative_position, containing int32 values in the range [0, num_buckets)
"""
ret = 0
n = -relative_position
if bidirectional:
num_buckets //= 2
ret += (n < 0).to(torch.long) * num_buckets # mtf.to_int32(mtf.less(n, 0)) * num_buckets
n = torch.abs(n)
else:
n = torch.max(n, torch.zeros_like(n))
# now n is in the range [0, inf)
# half of the buckets are for exact increments in positions
max_exact = num_buckets // 2
is_small = n < max_exact
# The other half of the buckets are for logarithmically bigger bins in positions up to max_distance
val_if_large = max_exact + (
torch.log(n.float() / max_exact) / math.log(max_distance / max_exact) * (num_buckets - max_exact)
).to(torch.long)
val_if_large = torch.min(val_if_large, torch.full_like(val_if_large, num_buckets - 1))
ret += torch.where(is_small, n, val_if_large)
return ret
def compute_bias(self, qlen, klen):
""" Compute binned relative position bias """
context_position = torch.arange(qlen, dtype=torch.long)[:, None]
memory_position = torch.arange(klen, dtype=torch.long)[None, :]
relative_position = memory_position - context_position # shape (qlen, klen)
rp_bucket = self._relative_position_bucket(
relative_position, # shape (qlen, klen)
bidirectional=self.is_bidirectional,
num_buckets=self.relative_attention_num_buckets,
)
rp_bucket = rp_bucket.to(self.relative_attention_bias.weight.device)
values = self.relative_attention_bias(rp_bucket) # shape (qlen, klen, num_heads)
values = values.permute([2, 0, 1]).unsqueeze(0) # shape (1, num_heads, qlen, klen)
return values
def forward(
self,
input,
mask=None,
kv=None,
position_bias=None,
past_key_value=None,
head_mask=None,
query_length=None,
use_cache=False,
output_attentions=False,
):
"""
Self-attention (if kv is None) or attention over source sentence (provided by kv).
"""
# Input is (bs, qlen, dim)
# Mask is (bs, klen) (non-causal) or (bs, klen, klen)
# past_key_value[0] is (bs, n_heads, q_len - 1, dim_per_head)
bs, qlen, dim = input.size()
if past_key_value is not None:
assert self.is_decoder is True, "Encoder cannot cache past key value states"
assert (
len(past_key_value) == 2
), "past_key_value should have 2 past states: keys and values. Got {} past states".format(
len(past_key_value)
)
real_qlen = qlen + past_key_value[0].shape[2] if query_length is None else query_length
else:
real_qlen = qlen
if kv is None:
klen = real_qlen
else:
klen = kv.size(1)
def shape(x):
""" projection """
return x.view(bs, -1, self.n_heads, self.d_kv).transpose(1, 2)
def unshape(x):
""" compute context """
return x.transpose(1, 2).contiguous().view(bs, -1, self.inner_dim)
q = shape(self.q(input)) # (bs, n_heads, qlen, dim_per_head)
if kv is None:
k = shape(self.k(input)) # (bs, n_heads, qlen, dim_per_head)
v = shape(self.v(input)) # (bs, n_heads, qlen, dim_per_head)
elif past_key_value is None:
k = v = kv
k = shape(self.k(k)) # (bs, n_heads, qlen, dim_per_head)
v = shape(self.v(v)) # (bs, n_heads, qlen, dim_per_head)
if past_key_value is not None:
if kv is None:
k_, v_ = past_key_value
k = torch.cat([k_, k], dim=2) # (bs, n_heads, klen, dim_per_head)
v = torch.cat([v_, v], dim=2) # (bs, n_heads, klen, dim_per_head)
else:
k, v = past_key_value
if self.is_decoder and use_cache is True:
present_key_value_state = ((k, v),)
else:
present_key_value_state = (None,)
# (bs, n_heads, qlen, klen)
scores = torch.matmul(
q, k.transpose(3, 2)
) # equivalent of torch.einsum("bnqd,bnkd->bnqk", q, k), compatible with onnx op>9
if position_bias is None:
if not self.has_relative_attention_bias:
raise ValueError("No position_bias provided and no weights to compute position_bias")
position_bias = self.compute_bias(real_qlen, klen)
# if key and values are already calculated
# we want only the last query position bias
if past_key_value is not None:
position_bias = position_bias[:, :, -qlen:, :]
if mask is not None:
position_bias = position_bias + mask # (bs, n_heads, qlen, klen)
scores += position_bias
weights = F.softmax(scores.float(), dim=-1).type_as(scores) # (bs, n_heads, qlen, klen)
weights = F.dropout(weights, p=self.dropout, training=self.training) # (bs, n_heads, qlen, klen)
# Mask heads if we want to
if head_mask is not None:
weights = weights * head_mask
context = torch.matmul(weights, v) # (bs, n_heads, qlen, dim_per_head)
context = unshape(context) # (bs, qlen, dim)
context = self.o(context)
outputs = (context,) + present_key_value_state
if output_attentions:
outputs = outputs + (weights,)
if self.has_relative_attention_bias:
outputs = outputs + (position_bias,)
return outputs
class T5LayerSelfAttention(nn.Module):
def __init__(self, config, has_relative_attention_bias=False):
super().__init__()
self.SelfAttention = T5Attention(
config, has_relative_attention_bias=has_relative_attention_bias, is_bidirectional=not config.is_decoder
)
self.layer_norm = T5LayerNorm(config.d_model, eps=config.layer_norm_epsilon)
self.dropout = nn.Dropout(config.dropout_rate)
def forward(
self,
hidden_states,
attention_mask=None,
position_bias=None,
head_mask=None,
past_key_value=None,
use_cache=False,
output_attentions=False,
):
norm_x = self.layer_norm(hidden_states)
attention_output = self.SelfAttention(
norm_x,
mask=attention_mask,
position_bias=position_bias,
head_mask=head_mask,
past_key_value=past_key_value,
use_cache=use_cache,
output_attentions=output_attentions,
)
y = attention_output[0]
layer_output = hidden_states + self.dropout(y)
outputs = (layer_output,) + attention_output[1:] # add attentions if we output them
return outputs
class T5LayerCrossAttention(nn.Module):
def __init__(self, config, has_relative_attention_bias=False):
super().__init__()
self.EncDecAttention = T5Attention(
config, has_relative_attention_bias=has_relative_attention_bias, is_bidirectional=True
)
self.layer_norm = T5LayerNorm(config.d_model, eps=config.layer_norm_epsilon)
self.dropout = nn.Dropout(config.dropout_rate)
def forward(
self,
hidden_states,
kv,
attention_mask=None,
position_bias=None,
head_mask=None,
past_key_value=None,
use_cache=False,
query_length=None,
output_attentions=False,
):
norm_x = self.layer_norm(hidden_states)
attention_output = self.EncDecAttention(
norm_x,
mask=attention_mask,
kv=kv,
position_bias=position_bias,
head_mask=head_mask,
past_key_value=past_key_value,
use_cache=use_cache,
query_length=query_length,
output_attentions=output_attentions,
)
y = attention_output[0]
layer_output = hidden_states + self.dropout(y)
outputs = (layer_output,) + attention_output[1:] # add attentions if we output them
return outputs
class T5Block(nn.Module):
def __init__(self, config, has_relative_attention_bias=False):
super().__init__()
self.is_decoder = config.is_decoder
self.layer = nn.ModuleList()
self.layer.append(T5LayerSelfAttention(config, has_relative_attention_bias=has_relative_attention_bias))
if self.is_decoder:
self.layer.append(T5LayerCrossAttention(config, has_relative_attention_bias=has_relative_attention_bias))
self.layer.append(T5LayerFF(config))
def forward(
self,
hidden_states,
attention_mask=None,
position_bias=None,
encoder_hidden_states=None,
encoder_attention_mask=None,
encoder_decoder_position_bias=None,
head_mask=None,
past_key_value=None,
use_cache=False,
output_attentions=False,
):
if past_key_value is not None:
assert self.is_decoder, "Only decoder can use `past_key_values`"
expected_num_past_key_values = 2 if encoder_hidden_states is None else 4
error_message = "There should be {} past states. 2 (past / key) for self attention.{} Got {} past key / value states".format(
expected_num_past_key_values,
"2 (past / key) for cross attention" if expected_num_past_key_values == 4 else "",
len(past_key_value),
)
assert len(past_key_value) == expected_num_past_key_values, error_message
self_attn_past_key_value = past_key_value[:2]
cross_attn_past_key_value = past_key_value[2:]
else:
self_attn_past_key_value, cross_attn_past_key_value = None, None
self_attention_outputs = self.layer[0](
hidden_states,
attention_mask=attention_mask,
position_bias=position_bias,
head_mask=head_mask,
past_key_value=self_attn_past_key_value,
use_cache=use_cache,
output_attentions=output_attentions,
)
hidden_states, present_key_value_state = self_attention_outputs[:2]
attention_outputs = self_attention_outputs[2:] # Keep self-attention outputs and relative position weights
if self.is_decoder and encoder_hidden_states is not None:
# the actual query length is unknown for cross attention
# if using past key value states. Need to inject it here
if present_key_value_state is not None:
query_length = present_key_value_state[0].shape[2]
else:
query_length = None
cross_attention_outputs = self.layer[1](
hidden_states,
kv=encoder_hidden_states,
attention_mask=encoder_attention_mask,
position_bias=encoder_decoder_position_bias,
head_mask=head_mask,
past_key_value=cross_attn_past_key_value,
query_length=query_length,
use_cache=use_cache,
output_attentions=output_attentions,
)
hidden_states = cross_attention_outputs[0]
# Combine self attn and cross attn key value states
if present_key_value_state is not None:
present_key_value_state = present_key_value_state + cross_attention_outputs[1]
# Keep cross-attention outputs and relative position weights
attention_outputs = attention_outputs + cross_attention_outputs[2:]
# Apply Feed Forward layer
hidden_states = self.layer[-1](hidden_states)
outputs = (hidden_states,)
# Add attentions if we output them
outputs = outputs + (present_key_value_state,) + attention_outputs
return outputs # hidden-states, present_key_value_states, (self-attention weights), (self-attention position bias), (cross-attention weights), (cross-attention position bias)
class T5PreTrainedModel(PreTrainedModel):
"""
An abstract class to handle weights initialization and a simple interface for downloading and loading pretrained
models.
"""
config_class = T5Config
load_tf_weights = load_tf_weights_in_t5
base_model_prefix = "transformer"
@property
def dummy_inputs(self):
input_ids = torch.tensor(DUMMY_INPUTS)
input_mask = torch.tensor(DUMMY_MASK)
dummy_inputs = {
"decoder_input_ids": input_ids,
"input_ids": input_ids,
"decoder_attention_mask": input_mask,
}
return dummy_inputs
def _init_weights(self, module):
""" Initialize the weights """
factor = self.config.initializer_factor # Used for testing weights initialization
if isinstance(module, T5LayerNorm):
module.weight.data.fill_(factor * 1.0)
elif isinstance(module, (T5Model, T5ForConditionalGeneration)):
# Mesh TensorFlow embeddings initialization
# See https://github.com/tensorflow/mesh/blob/fa19d69eafc9a482aff0b59ddd96b025c0cb207d/mesh_tensorflow/layers.py#L1624
module.shared.weight.data.normal_(mean=0.0, std=factor * 1.0)
elif isinstance(module, T5DenseReluDense):
# Mesh TensorFlow FF initialization
# See https://github.com/tensorflow/mesh/blob/master/mesh_tensorflow/transformer/transformer_layers.py#L56
# and https://github.com/tensorflow/mesh/blob/fa19d69eafc9a482aff0b59ddd96b025c0cb207d/mesh_tensorflow/layers.py#L89
module.wi.weight.data.normal_(mean=0.0, std=factor * ((self.config.d_model) ** -0.5))
if hasattr(module.wi, "bias") and module.wi.bias is not None:
module.wi.bias.data.zero_()
module.wo.weight.data.normal_(mean=0.0, std=factor * ((self.config.d_ff) ** -0.5))
if hasattr(module.wo, "bias") and module.wo.bias is not None:
module.wo.bias.data.zero_()
elif isinstance(module, T5Attention):
# Mesh TensorFlow attention initialization to avoid scaling before softmax
# See https://github.com/tensorflow/mesh/blob/fa19d69eafc9a482aff0b59ddd96b025c0cb207d/mesh_tensorflow/transformer/attention.py#L136
d_model = self.config.d_model
d_kv = self.config.d_kv
n_heads = self.config.num_heads
module.q.weight.data.normal_(mean=0.0, std=factor * ((d_model * d_kv) ** -0.5))
module.k.weight.data.normal_(mean=0.0, std=factor * (d_model ** -0.5))
module.v.weight.data.normal_(mean=0.0, std=factor * (d_model ** -0.5))
module.o.weight.data.normal_(mean=0.0, std=factor * ((n_heads * d_kv) ** -0.5))
if module.has_relative_attention_bias:
module.relative_attention_bias.weight.data.normal_(mean=0.0, std=factor * ((d_model) ** -0.5))
def _shift_right(self, input_ids):
decoder_start_token_id = self.config.decoder_start_token_id
pad_token_id = self.config.pad_token_id
assert (
decoder_start_token_id is not None
), "self.model.config.decoder_start_token_id has to be defined. In T5 it is usually set to the pad_token_id. See T5 docs for more information"
# shift inputs to the right
shifted_input_ids = input_ids.new_zeros(input_ids.shape)
shifted_input_ids[..., 1:] = input_ids[..., :-1].clone()
shifted_input_ids[..., 0] = decoder_start_token_id
assert pad_token_id is not None, "self.model.config.pad_token_id has to be defined."
# replace possible -100 values in labels by `pad_token_id`
shifted_input_ids.masked_fill_(shifted_input_ids == -100, pad_token_id)
assert torch.all(shifted_input_ids >= 0).item(), "Verify that `shifted_input_ids` has only positive values"
return shifted_input_ids
class T5Stack(T5PreTrainedModel):
def __init__(self, config, embed_tokens=None):
super().__init__(config)
self.embed_tokens = embed_tokens
self.is_decoder = config.is_decoder
self.block = nn.ModuleList(
[T5Block(config, has_relative_attention_bias=bool(i == 0)) for i in range(config.num_layers)]
)
self.final_layer_norm = T5LayerNorm(config.d_model, eps=config.layer_norm_epsilon)
self.dropout = nn.Dropout(config.dropout_rate)
self.init_weights()
def get_input_embeddings(self):
return self.embed_tokens
def get_output_embeddings(self):
return self.embed_tokens
def set_input_embeddings(self, new_embeddings):
self.embed_tokens = new_embeddings
def forward(
self,
input_ids=None,
attention_mask=None,
encoder_hidden_states=None,
encoder_attention_mask=None,
inputs_embeds=None,
head_mask=None,
past_key_values=None,
use_cache=None,
output_attentions=None,
output_hidden_states=None,
return_dict=None,
):
use_cache = use_cache if use_cache is not None else self.config.use_cache
output_attentions = output_attentions if output_attentions is not None else self.config.output_attentions
output_hidden_states = (
output_hidden_states if output_hidden_states is not None else self.config.output_hidden_states
)
return_dict = return_dict if return_dict is not None else self.config.use_return_dict
if input_ids is not None and inputs_embeds is not None:
err_msg_prefix = "decoder_" if self.is_decoder else ""
raise ValueError(
f"You cannot specify both {err_msg_prefix}inputs and {err_msg_prefix}inputs_embeds at the same time"
)
elif input_ids is not None:
input_shape = input_ids.size()
input_ids = input_ids.view(-1, input_shape[-1])
elif inputs_embeds is not None:
input_shape = inputs_embeds.size()[:-1]
else:
err_msg_prefix = "decoder_" if self.is_decoder else ""
raise ValueError(f"You have to specify either {err_msg_prefix}inputs or {err_msg_prefix}inputs_embeds")
if inputs_embeds is None:
assert self.embed_tokens is not None, "You have to initialize the model with valid token embeddings"
inputs_embeds = self.embed_tokens(input_ids)
batch_size, seq_length = input_shape
# required mask seq length can be calculated via length of past
mask_seq_length = past_key_values[0][0].shape[2] + seq_length if past_key_values is not None else seq_length
if use_cache is True:
assert self.is_decoder, ":obj:`use_cache` can only be set to `True` if {} is used as a decoder".format(
self
)
if attention_mask is None:
attention_mask = torch.ones(batch_size, mask_seq_length).to(inputs_embeds.device)
if self.is_decoder and encoder_attention_mask is None and encoder_hidden_states is not None:
encoder_seq_length = encoder_hidden_states.shape[1]
encoder_attention_mask = torch.ones(
batch_size, encoder_seq_length, device=inputs_embeds.device, dtype=torch.long
)
# initialize past_key_values with `None` if past does not exist
if past_key_values is None:
past_key_values = [None] * len(self.block)
# ourselves in which case we just need to make it broadcastable to all heads.
extended_attention_mask = self.get_extended_attention_mask(attention_mask, input_shape, inputs_embeds.device)
if self.is_decoder and encoder_attention_mask is not None:
encoder_extended_attention_mask = self.invert_attention_mask(encoder_attention_mask)
else:
encoder_extended_attention_mask = None
# Prepare head mask if needed
head_mask = self.get_head_mask(head_mask, self.config.num_layers)
present_key_value_states = () if use_cache else None
all_hidden_states = () if output_hidden_states else None
all_attentions = () if output_attentions else None
position_bias = None
encoder_decoder_position_bias = None
hidden_states = self.dropout(inputs_embeds)
for i, (layer_module, past_key_value) in enumerate(zip(self.block, past_key_values)):
if output_hidden_states:
all_hidden_states = all_hidden_states + (hidden_states,)
layer_outputs = layer_module(
hidden_states,
attention_mask=extended_attention_mask,
position_bias=position_bias,
encoder_hidden_states=encoder_hidden_states,
encoder_attention_mask=encoder_extended_attention_mask,
encoder_decoder_position_bias=encoder_decoder_position_bias,
head_mask=head_mask[i],
past_key_value=past_key_value,
use_cache=use_cache,
output_attentions=output_attentions,
)
# layer_outputs is a tuple with:
# hidden-states, key-value-states, (self-attention weights), (self-attention position bias), (cross-attention weights), (cross-attention position bias)
hidden_states, present_key_value_state = layer_outputs[:2]
if i == 0:
# We share the position biases between the layers - the first layer store them
# layer_outputs = hidden-states, key-value-states (self-attention weights), (self-attention position bias), (cross-attention weights), (cross-attention position bias)
position_bias = layer_outputs[3 if output_attentions else 2]
if self.is_decoder and encoder_hidden_states is not None:
encoder_decoder_position_bias = layer_outputs[5 if output_attentions else 3]
# append next layer key value states
if use_cache:
present_key_value_states = present_key_value_states + (present_key_value_state,)
if output_attentions:
all_attentions = all_attentions + (layer_outputs[2],) # We keep only self-attention weights for now
hidden_states = self.final_layer_norm(hidden_states)
hidden_states = self.dropout(hidden_states)
# Add last layer
if output_hidden_states:
all_hidden_states = all_hidden_states + (hidden_states,)
if not return_dict:
return tuple(
v
for v in [hidden_states, present_key_value_states, all_hidden_states, all_attentions]
if v is not None
)
return BaseModelOutputWithPast(
last_hidden_state=hidden_states,
past_key_values=present_key_value_states,
hidden_states=all_hidden_states,
attentions=all_attentions,
)
T5_START_DOCSTRING = r"""
The T5 model was proposed in `Exploring the Limits of Transfer Learning with a Unified Text-to-Text Transformer
<https://arxiv.org/abs/1910.10683>`__ by Colin Raffel, Noam Shazeer, Adam Roberts, Katherine Lee, Sharan Narang,
Michael Matena, Yanqi Zhou, Wei Li, Peter J. Liu. It's an encoder decoder transformer pre-trained in a text-to-text
denoising generative setting.
This model inherits from :class:`~transformers.PreTrainedModel`. Check the superclass documentation for the generic
methods the library implements for all its model (such as downloading or saving, resizing the input embeddings,
pruning heads etc.)
This model is also a PyTorch `torch.nn.Module <https://pytorch.org/docs/stable/nn.html#torch.nn.Module>`__
subclass. Use it as a regular PyTorch Module and refer to the PyTorch documentation for all matter related to
general usage and behavior.
Parameters:
config (:class:`~transformers.T5Config`): Model configuration class with all the parameters of the model.
Initializing with a config file does not load the weights associated with the model, only the
configuration. Check out the :meth:`~transformers.PreTrainedModel.from_pretrained` method to load the model
weights.
"""
T5_INPUTS_DOCSTRING = r"""
Args:
input_ids (:obj:`torch.LongTensor` of shape :obj:`(batch_size, sequence_length)`):
Indices of input sequence tokens in the vocabulary. T5 is a model with relative position embeddings so you
should be able to pad the inputs on both the right and the left.
Indices can be obtained using :class:`~transformers.T5Tokenizer`. See
:meth:`transformers.PreTrainedTokenizer.encode` and :meth:`transformers.PreTrainedTokenizer.__call__` for
detail.
To know more on how to prepare :obj:`input_ids` for pretraining take a look a `T5 Training
<./t5.html#training>`__.
attention_mask (:obj:`torch.FloatTensor` of shape :obj:`(batch_size, sequence_length)`, `optional`):
Mask to avoid performing attention on padding token indices. Mask values selected in ``[0, 1]``:
- 1 for tokens that are **not masked**,
- 0 for tokens that are **masked**.
`What are attention masks? <../glossary.html#attention-mask>`__
decoder_input_ids (:obj:`torch.LongTensor` of shape :obj:`(batch_size, target_sequence_length)`, `optional`):
Provide for sequence to sequence training. T5 uses the :obj:`pad_token_id` as the starting token for
:obj:`decoder_input_ids` generation. If :obj:`past_key_values` is used, optionally only the last
:obj:`decoder_input_ids` have to be input (see :obj:`past_key_values`).
To know more on how to prepare :obj:`decoder_input_ids` for pretraining take a look at `T5 Training
<./t5.html#training>`__. If :obj:`decoder_input_ids` and :obj:`decoder_inputs_embeds` are both unset,
:obj:`decoder_input_ids` takes the value of :obj:`input_ids`.
decoder_attention_mask (:obj:`torch.BoolTensor` of shape :obj:`(batch_size, tgt_seq_len)`, `optional`):
Default behavior: generate a tensor that ignores pad tokens in :obj:`decoder_input_ids`. Causal mask will
also be used by default.
encoder_outputs (:obj:`tuple(tuple(torch.FloatTensor)`, `optional`):
Tuple consists of (:obj:`last_hidden_state`, :obj:`optional`: `hidden_states`, :obj:`optional`:
`attentions`) :obj:`last_hidden_state` of shape :obj:`(batch_size, sequence_length, hidden_size)` is a
sequence of hidden states at the output of the last layer of the encoder. Used in the cross-attention of
the decoder.
past_key_values (:obj:`tuple(tuple(torch.FloatTensor))` of length :obj:`config.n_layers` with each tuple having 4 tensors of shape :obj:`(batch_size, num_heads, sequence_length - 1, embed_size_per_head)`):
Contains precomputed key and value hidden states of the attention blocks. Can be used to speed up decoding.
If :obj:`past_key_values` are used, the user can optionally input only the last :obj:`decoder_input_ids`
(those that don't have their past key value states given to this model) of shape :obj:`(batch_size, 1)`
instead of all :obj:`decoder_input_ids` of shape :obj:`(batch_size, sequence_length)`.
head_mask (:obj:`torch.FloatTensor` of shape :obj:`(num_heads,)` or :obj:`(num_layers, num_heads)`, `optional`):
Mask to nullify selected heads of the self-attention modules. Mask values selected in ``[0, 1]``:
- 1 indicates the head is **not masked**,
- 0 indicates the head is **masked**.
inputs_embeds (:obj:`torch.FloatTensor` of shape :obj:`(batch_size, sequence_length, hidden_size)`, `optional`):
Optionally, instead of passing :obj:`input_ids` you can choose to directly pass an embedded representation.
This is useful if you want more control over how to convert :obj:`input_ids` indices into associated
vectors than the model's internal embedding lookup matrix.
decoder_inputs_embeds (:obj:`torch.FloatTensor` of shape :obj:`(batch_size, target_sequence_length, hidden_size)`, `optional`):
Optionally, instead of passing :obj:`decoder_input_ids` you can choose to directly pass an embedded
representation. If :obj:`past_key_values` is used, optionally only the last :obj:`decoder_inputs_embeds`
have to be input (see :obj:`past_key_values`). This is useful if you want more control over how to convert
:obj:`decoder_input_ids` indices into associated vectors than the model's internal embedding lookup matrix.
If :obj:`decoder_input_ids` and :obj:`decoder_inputs_embeds` are both unset, :obj:`decoder_inputs_embeds`
takes the value of :obj:`inputs_embeds`.
use_cache (:obj:`bool`, `optional`):
If set to :obj:`True`, :obj:`past_key_values` key value states are returned and can be used to speed up
decoding (see :obj:`past_key_values`).
output_attentions (:obj:`bool`, `optional`):
Whether or not to return the attentions tensors of all attention layers. See ``attentions`` under returned
tensors for more detail.
output_hidden_states (:obj:`bool`, `optional`):
Whether or not to return the hidden states of all layers. See ``hidden_states`` under returned tensors for
more detail.
return_dict (:obj:`bool`, `optional`):
Whether or not to return a :class:`~transformers.file_utils.ModelOutput` instead of a plain tuple.
"""
@add_start_docstrings(
"The bare T5 Model transformer outputting raw hidden-states" "without any specific head on top.",
T5_START_DOCSTRING,
)
class T5Model(T5PreTrainedModel):
def __init__(self, config: T5Config):
super().__init__(config)
self.shared = nn.Embedding(config.vocab_size, config.d_model)
encoder_config = copy.deepcopy(config)
encoder_config.use_cache = False
encoder_config.is_encoder_decoder = False
self.encoder = T5Stack(encoder_config, self.shared)
decoder_config = copy.deepcopy(config)
decoder_config.is_decoder = True
decoder_config.is_encoder_decoder = False
decoder_config.num_layers = config.num_decoder_layers
self.decoder = T5Stack(decoder_config, self.shared)
self.init_weights()
def get_input_embeddings(self):
return self.shared
def set_input_embeddings(self, new_embeddings):
self.shared = new_embeddings
self.encoder.set_input_embeddings(new_embeddings)
self.decoder.set_input_embeddings(new_embeddings)
def get_encoder(self):
return self.encoder
def get_decoder(self):
return self.decoder
def _prune_heads(self, heads_to_prune):
"""
Prunes heads of the model. heads_to_prune: dict of {layer_num: list of heads to prune in this layer} See base
class PreTrainedModel
"""
for layer, heads in heads_to_prune.items():
self.encoder.layer[layer].attention.prune_heads(heads)
@add_start_docstrings_to_model_forward(T5_INPUTS_DOCSTRING)
@replace_return_docstrings(output_type=Seq2SeqModelOutput, config_class=_CONFIG_FOR_DOC)
def forward(
self,
input_ids=None,
attention_mask=None,
decoder_input_ids=None,
decoder_attention_mask=None,
encoder_outputs=None,
past_key_values=None,
head_mask=None,
inputs_embeds=None,
decoder_inputs_embeds=None,
use_cache=None,
output_attentions=None,
output_hidden_states=None,
return_dict=None,
**kwargs,
):
r"""
Returns:
Example::
>>> from transformers import T5Tokenizer, T5Model
>>> tokenizer = T5Tokenizer.from_pretrained('t5-small')
>>> model = T5Model.from_pretrained('t5-small')
>>> input_ids = tokenizer("Studies have been shown that owning a dog is good for you", return_tensors="pt").input_ids # Batch size 1
>>> decoder_input_ids = tokenizer("Studies show that", return_tensors="pt").input_ids # Batch size 1
>>> outputs = model(input_ids=input_ids, decoder_input_ids=decoder_input_ids, return_dict=True)
>>> last_hidden_states = outputs.last_hidden_state
"""
if "decoder_past_key_value_states" in kwargs:
warnings.warn(
"The `decoder_past_key_value_states` argument is deprecated and will be removed in a future version, use `past_key_values` instead.",
FutureWarning,
)
past_key_values = kwargs.pop("decoder_past_key_value_states")
if "decoder_past_key_values" in kwargs:
warnings.warn(
"The `decoder_past_key_values` argument is deprecated and will be removed in a future version, use `past_key_values` instead.",
FutureWarning,
)
past_key_values = kwargs.pop("decoder_past_key_values")
assert kwargs == {}, f"Unexpected keyword arguments: {list(kwargs.keys())}."
use_cache = use_cache if use_cache is not None else self.config.use_cache
return_dict = return_dict if return_dict is not None else self.config.use_return_dict
# Encode if needed (training, first prediction pass)
if encoder_outputs is None:
encoder_outputs = self.encoder(
input_ids=input_ids,
attention_mask=attention_mask,
inputs_embeds=inputs_embeds,
head_mask=head_mask,
output_attentions=output_attentions,
output_hidden_states=output_hidden_states,
return_dict=return_dict,
)
elif return_dict and not isinstance(encoder_outputs, BaseModelOutput):
encoder_outputs = BaseModelOutput(
last_hidden_state=encoder_outputs[0],
hidden_states=encoder_outputs[1] if len(encoder_outputs) > 1 else None,
attentions=encoder_outputs[2] if len(encoder_outputs) > 2 else None,
)
hidden_states = encoder_outputs[0]
# Decode
decoder_outputs = self.decoder(
input_ids=decoder_input_ids,
attention_mask=decoder_attention_mask,
inputs_embeds=decoder_inputs_embeds,
past_key_values=past_key_values,
encoder_hidden_states=hidden_states,
encoder_attention_mask=attention_mask,
head_mask=head_mask,
use_cache=use_cache,
output_attentions=output_attentions,
output_hidden_states=output_hidden_states,
return_dict=return_dict,
)
if not return_dict:
return decoder_outputs + encoder_outputs
return Seq2SeqModelOutput(
last_hidden_state=decoder_outputs.last_hidden_state,
past_key_values=decoder_outputs.past_key_values,
decoder_hidden_states=decoder_outputs.hidden_states,
decoder_attentions=decoder_outputs.attentions,
encoder_last_hidden_state=encoder_outputs.last_hidden_state,
encoder_hidden_states=encoder_outputs.hidden_states,
encoder_attentions=encoder_outputs.attentions,
)
@add_start_docstrings("""T5 Model with a `language modeling` head on top. """, T5_START_DOCSTRING)
class T5ForConditionalGeneration(T5PreTrainedModel):
authorized_missing_keys = [r"encoder\.embed_tokens\.weight", r"decoder\.embed_tokens\.weight", r"lm_head\.weight"]
def __init__(self, config):
super().__init__(config)
self.model_dim = config.d_model
self.shared = nn.Embedding(config.vocab_size, config.d_model)
encoder_config = copy.deepcopy(config)
encoder_config.use_cache = False
encoder_config.is_encoder_decoder = False
self.encoder = T5Stack(encoder_config, self.shared)
decoder_config = copy.deepcopy(config)
decoder_config.is_decoder = True
decoder_config.is_encoder_decoder = False
decoder_config.num_layers = config.num_decoder_layers
self.decoder = T5Stack(decoder_config, self.shared)
self.lm_head = nn.Linear(config.d_model, config.vocab_size, bias=False)
self.init_weights()
def get_input_embeddings(self):
return self.shared
def set_input_embeddings(self, new_embeddings):
self.shared = new_embeddings
self.encoder.set_input_embeddings(new_embeddings)
self.decoder.set_input_embeddings(new_embeddings)
def get_output_embeddings(self):
return self.lm_head
def get_encoder(self):
return self.encoder
def get_decoder(self):
return self.decoder
@add_start_docstrings_to_model_forward(T5_INPUTS_DOCSTRING)
@replace_return_docstrings(output_type=Seq2SeqLMOutput, config_class=_CONFIG_FOR_DOC)
def forward(
self,
input_ids=None,
attention_mask=None,
decoder_input_ids=None,
decoder_attention_mask=None,
encoder_outputs=None,
past_key_values=None,
head_mask=None,
inputs_embeds=None,
decoder_inputs_embeds=None,
labels=None,
use_cache=None,
output_attentions=None,
output_hidden_states=None,
return_dict=None,
**kwargs,
):
r"""
labels (:obj:`torch.LongTensor` of shape :obj:`(batch_size,)`, `optional`):
Labels for computing the sequence classification/regression loss. Indices should be in :obj:`[-100, 0, ...,
config.vocab_size - 1]`. All labels set to ``-100`` are ignored (masked), the loss is only computed for
labels in ``[0, ..., config.vocab_size]``
kwargs (:obj:`Dict[str, any]`, optional, defaults to `{}`):
Used to hide legacy arguments that have been deprecated.
Returns:
Examples::
>>> from transformers import T5Tokenizer, T5ForConditionalGeneration
>>> tokenizer = T5Tokenizer.from_pretrained('t5-small')
>>> model = T5ForConditionalGeneration.from_pretrained('t5-small', return_dict=True)
>>> input_ids = tokenizer('The <extra_id_0> walks in <extra_id_1> park', return_tensors='pt').input_ids
labels = tokenizer('<extra_id_0> cute dog <extra_id_1> the <extra_id_2> </s>', return_tensors='pt').input_ids
>>> outputs = model(input_ids=input_ids, labels=labels)
>>> loss = outputs.loss
>>> logits = outputs.logits
>>> input_ids = tokenizer("summarize: studies have shown that owning a dog is good for you ", return_tensors="pt").input_ids # Batch size 1
>>> outputs = model.generate(input_ids)
"""
if "lm_labels" in kwargs:
warnings.warn(
"The `lm_labels` argument is deprecated and will be removed in a future version, use `labels` instead.",
FutureWarning,
)
labels = kwargs.pop("lm_labels")
if "decoder_past_key_value_states" in kwargs:
warnings.warn(
"The `decoder_past_key_value_states` argument is deprecated and will be removed in a future version, use `past_key_values` instead.",
FutureWarning,
)
past_key_values = kwargs.pop("decoder_past_key_value_states")
if "decoder_past_key_values" in kwargs:
warnings.warn(
"The `decoder_past_key_values` argument is deprecated and will be removed in a future version, use `past_key_values` instead.",
FutureWarning,
)
past_key_values = kwargs.pop("decoder_past_key_values")
assert kwargs == {}, f"Unexpected keyword arguments: {list(kwargs.keys())}."
use_cache = use_cache if use_cache is not None else self.config.use_cache
return_dict = return_dict if return_dict is not None else self.config.use_return_dict
# Encode if needed (training, first prediction pass)
if encoder_outputs is None:
# Convert encoder inputs in embeddings if needed
encoder_outputs = self.encoder(
input_ids=input_ids,
attention_mask=attention_mask,
inputs_embeds=inputs_embeds,
head_mask=head_mask,
output_attentions=output_attentions,
output_hidden_states=output_hidden_states,
return_dict=return_dict,
)
elif return_dict and not isinstance(encoder_outputs, BaseModelOutput):
encoder_outputs = BaseModelOutput(
last_hidden_state=encoder_outputs[0],
hidden_states=encoder_outputs[1] if len(encoder_outputs) > 1 else None,
attentions=encoder_outputs[2] if len(encoder_outputs) > 2 else None,
)
hidden_states = encoder_outputs[0]
if labels is not None and decoder_input_ids is None and decoder_inputs_embeds is None:
# get decoder inputs from shifting lm labels to the right
decoder_input_ids = self._shift_right(labels)
# If decoding with past key value states, only the last tokens
# should be given as an input
if past_key_values is not None:
assert labels is None, "Decoder should not use cached key value states when training."
if decoder_input_ids is not None:
decoder_input_ids = decoder_input_ids[:, -1:]
if decoder_inputs_embeds is not None:
decoder_inputs_embeds = decoder_inputs_embeds[:, -1:]
# Decode
decoder_outputs = self.decoder(
input_ids=decoder_input_ids,
attention_mask=decoder_attention_mask,
inputs_embeds=decoder_inputs_embeds,
past_key_values=past_key_values,
encoder_hidden_states=hidden_states,
encoder_attention_mask=attention_mask,
head_mask=head_mask,
use_cache=use_cache,
output_attentions=output_attentions,
output_hidden_states=output_hidden_states,
return_dict=return_dict,
)
sequence_output = decoder_outputs[0]
# Rescale output before projecting on vocab
# See https://github.com/tensorflow/mesh/blob/fa19d69eafc9a482aff0b59ddd96b025c0cb207d/mesh_tensorflow/transformer/transformer.py#L586
sequence_output = sequence_output * (self.model_dim ** -0.5)
lm_logits = self.lm_head(sequence_output)
loss = None
if labels is not None:
loss_fct = CrossEntropyLoss(ignore_index=-100)
loss = loss_fct(lm_logits.view(-1, lm_logits.size(-1)), labels.view(-1))
# TODO(thom): Add z_loss https://github.com/tensorflow/mesh/blob/fa19d69eafc9a482aff0b59ddd96b025c0cb207d/mesh_tensorflow/layers.py#L666
if not return_dict:
output = (lm_logits,) + decoder_outputs[1:] + encoder_outputs
return ((loss,) + output) if loss is not None else output
return Seq2SeqLMOutput(
loss=loss,
logits=lm_logits,
past_key_values=decoder_outputs.past_key_values,
decoder_hidden_states=decoder_outputs.hidden_states,
decoder_attentions=decoder_outputs.attentions,
encoder_last_hidden_state=encoder_outputs.last_hidden_state,
encoder_hidden_states=encoder_outputs.hidden_states,
encoder_attentions=encoder_outputs.attentions,
)
def prepare_inputs_for_generation(self, input_ids, past, attention_mask, use_cache, encoder_outputs, **kwargs):
# cut decoder_input_ids if past is used
if past is not None:
input_ids = input_ids[:, -1:]
return {
"decoder_input_ids": input_ids,
"past_key_values": past,
"encoder_outputs": encoder_outputs,
"attention_mask": attention_mask,
"use_cache": use_cache,
}
def _reorder_cache(self, past, beam_idx):
# if decoder past is not included in output
# speedy decoding is disabled and no need to reorder
if past is None:
logger.warning("You might want to consider setting `use_cache=True` to speed up decoding")
return past
reordered_decoder_past = ()
for layer_past_states in past:
# get the correct batch idx from layer past batch dim
# batch dim of `past` is at 2nd position
reordered_layer_past_states = ()
for layer_past_state in layer_past_states:
# need to set correct `past` for each of the four key / value states
reordered_layer_past_states = reordered_layer_past_states + (
layer_past_state.index_select(0, beam_idx),
)
assert reordered_layer_past_states[0].shape == layer_past_states[0].shape
assert len(reordered_layer_past_states) == len(layer_past_states)
reordered_decoder_past = reordered_decoder_past + (reordered_layer_past_states,)
return reordered_decoder_past
| 44.205975 | 213 | 0.650542 |
import copy
import math
import os
import warnings
import torch
import torch.nn.functional as F
from torch import nn
from torch.nn import CrossEntropyLoss
from .configuration_t5 import T5Config
from .file_utils import (
DUMMY_INPUTS,
DUMMY_MASK,
add_start_docstrings,
add_start_docstrings_to_model_forward,
replace_return_docstrings,
)
from .modeling_outputs import BaseModelOutput, BaseModelOutputWithPast, Seq2SeqLMOutput, Seq2SeqModelOutput
from .modeling_utils import PreTrainedModel, find_pruneable_heads_and_indices, prune_linear_layer
from .utils import logging
logger = logging.get_logger(__name__)
_CONFIG_FOR_DOC = "T5Config"
_TOKENIZER_FOR_DOC = "T5Tokenizer"
use_cache=False,
output_attentions=False,
):
bs, qlen, dim = input.size()
if past_key_value is not None:
assert self.is_decoder is True, "Encoder cannot cache past key value states"
assert (
len(past_key_value) == 2
), "past_key_value should have 2 past states: keys and values. Got {} past states".format(
len(past_key_value)
)
real_qlen = qlen + past_key_value[0].shape[2] if query_length is None else query_length
else:
real_qlen = qlen
if kv is None:
klen = real_qlen
else:
klen = kv.size(1)
def shape(x):
return x.view(bs, -1, self.n_heads, self.d_kv).transpose(1, 2)
def unshape(x):
return x.transpose(1, 2).contiguous().view(bs, -1, self.inner_dim)
q = shape(self.q(input))
if kv is None:
k = shape(self.k(input))
v = shape(self.v(input))
elif past_key_value is None:
k = v = kv
k = shape(self.k(k))
v = shape(self.v(v))
if past_key_value is not None:
if kv is None:
k_, v_ = past_key_value
k = torch.cat([k_, k], dim=2)
v = torch.cat([v_, v], dim=2)
else:
k, v = past_key_value
if self.is_decoder and use_cache is True:
present_key_value_state = ((k, v),)
else:
present_key_value_state = (None,)
scores = torch.matmul(
q, k.transpose(3, 2)
)
if position_bias is None:
if not self.has_relative_attention_bias:
raise ValueError("No position_bias provided and no weights to compute position_bias")
position_bias = self.compute_bias(real_qlen, klen)
if past_key_value is not None:
position_bias = position_bias[:, :, -qlen:, :]
if mask is not None:
position_bias = position_bias + mask
scores += position_bias
weights = F.softmax(scores.float(), dim=-1).type_as(scores)
weights = F.dropout(weights, p=self.dropout, training=self.training)
if head_mask is not None:
weights = weights * head_mask
context = torch.matmul(weights, v)
context = unshape(context)
context = self.o(context)
outputs = (context,) + present_key_value_state
if output_attentions:
outputs = outputs + (weights,)
if self.has_relative_attention_bias:
outputs = outputs + (position_bias,)
return outputs
class T5LayerSelfAttention(nn.Module):
def __init__(self, config, has_relative_attention_bias=False):
super().__init__()
self.SelfAttention = T5Attention(
config, has_relative_attention_bias=has_relative_attention_bias, is_bidirectional=not config.is_decoder
)
self.layer_norm = T5LayerNorm(config.d_model, eps=config.layer_norm_epsilon)
self.dropout = nn.Dropout(config.dropout_rate)
def forward(
self,
hidden_states,
attention_mask=None,
position_bias=None,
head_mask=None,
past_key_value=None,
use_cache=False,
output_attentions=False,
):
norm_x = self.layer_norm(hidden_states)
attention_output = self.SelfAttention(
norm_x,
mask=attention_mask,
position_bias=position_bias,
head_mask=head_mask,
past_key_value=past_key_value,
use_cache=use_cache,
output_attentions=output_attentions,
)
y = attention_output[0]
layer_output = hidden_states + self.dropout(y)
outputs = (layer_output,) + attention_output[1:]
return outputs
class T5LayerCrossAttention(nn.Module):
def __init__(self, config, has_relative_attention_bias=False):
super().__init__()
self.EncDecAttention = T5Attention(
config, has_relative_attention_bias=has_relative_attention_bias, is_bidirectional=True
)
self.layer_norm = T5LayerNorm(config.d_model, eps=config.layer_norm_epsilon)
self.dropout = nn.Dropout(config.dropout_rate)
def forward(
self,
hidden_states,
kv,
attention_mask=None,
position_bias=None,
head_mask=None,
past_key_value=None,
use_cache=False,
query_length=None,
output_attentions=False,
):
norm_x = self.layer_norm(hidden_states)
attention_output = self.EncDecAttention(
norm_x,
mask=attention_mask,
kv=kv,
position_bias=position_bias,
head_mask=head_mask,
past_key_value=past_key_value,
use_cache=use_cache,
query_length=query_length,
output_attentions=output_attentions,
)
y = attention_output[0]
layer_output = hidden_states + self.dropout(y)
outputs = (layer_output,) + attention_output[1:]
return outputs
class T5Block(nn.Module):
def __init__(self, config, has_relative_attention_bias=False):
super().__init__()
self.is_decoder = config.is_decoder
self.layer = nn.ModuleList()
self.layer.append(T5LayerSelfAttention(config, has_relative_attention_bias=has_relative_attention_bias))
if self.is_decoder:
self.layer.append(T5LayerCrossAttention(config, has_relative_attention_bias=has_relative_attention_bias))
self.layer.append(T5LayerFF(config))
def forward(
self,
hidden_states,
attention_mask=None,
position_bias=None,
encoder_hidden_states=None,
encoder_attention_mask=None,
encoder_decoder_position_bias=None,
head_mask=None,
past_key_value=None,
use_cache=False,
output_attentions=False,
):
if past_key_value is not None:
assert self.is_decoder, "Only decoder can use `past_key_values`"
expected_num_past_key_values = 2 if encoder_hidden_states is None else 4
error_message = "There should be {} past states. 2 (past / key) for self attention.{} Got {} past key / value states".format(
expected_num_past_key_values,
"2 (past / key) for cross attention" if expected_num_past_key_values == 4 else "",
len(past_key_value),
)
assert len(past_key_value) == expected_num_past_key_values, error_message
self_attn_past_key_value = past_key_value[:2]
cross_attn_past_key_value = past_key_value[2:]
else:
self_attn_past_key_value, cross_attn_past_key_value = None, None
self_attention_outputs = self.layer[0](
hidden_states,
attention_mask=attention_mask,
position_bias=position_bias,
head_mask=head_mask,
past_key_value=self_attn_past_key_value,
use_cache=use_cache,
output_attentions=output_attentions,
)
hidden_states, present_key_value_state = self_attention_outputs[:2]
attention_outputs = self_attention_outputs[2:]
if self.is_decoder and encoder_hidden_states is not None:
if present_key_value_state is not None:
query_length = present_key_value_state[0].shape[2]
else:
query_length = None
cross_attention_outputs = self.layer[1](
hidden_states,
kv=encoder_hidden_states,
attention_mask=encoder_attention_mask,
position_bias=encoder_decoder_position_bias,
head_mask=head_mask,
past_key_value=cross_attn_past_key_value,
query_length=query_length,
use_cache=use_cache,
output_attentions=output_attentions,
)
hidden_states = cross_attention_outputs[0]
if present_key_value_state is not None:
present_key_value_state = present_key_value_state + cross_attention_outputs[1]
attention_outputs = attention_outputs + cross_attention_outputs[2:]
hidden_states = self.layer[-1](hidden_states)
outputs = (hidden_states,)
outputs = outputs + (present_key_value_state,) + attention_outputs
return outputs
class T5PreTrainedModel(PreTrainedModel):
config_class = T5Config
load_tf_weights = load_tf_weights_in_t5
base_model_prefix = "transformer"
@property
def dummy_inputs(self):
input_ids = torch.tensor(DUMMY_INPUTS)
input_mask = torch.tensor(DUMMY_MASK)
dummy_inputs = {
"decoder_input_ids": input_ids,
"input_ids": input_ids,
"decoder_attention_mask": input_mask,
}
return dummy_inputs
def _init_weights(self, module):
factor = self.config.initializer_factor
if isinstance(module, T5LayerNorm):
module.weight.data.fill_(factor * 1.0)
elif isinstance(module, (T5Model, T5ForConditionalGeneration)):
module.shared.weight.data.normal_(mean=0.0, std=factor * 1.0)
elif isinstance(module, T5DenseReluDense):
module.wi.weight.data.normal_(mean=0.0, std=factor * ((self.config.d_model) ** -0.5))
if hasattr(module.wi, "bias") and module.wi.bias is not None:
module.wi.bias.data.zero_()
module.wo.weight.data.normal_(mean=0.0, std=factor * ((self.config.d_ff) ** -0.5))
if hasattr(module.wo, "bias") and module.wo.bias is not None:
module.wo.bias.data.zero_()
elif isinstance(module, T5Attention):
d_model = self.config.d_model
d_kv = self.config.d_kv
n_heads = self.config.num_heads
module.q.weight.data.normal_(mean=0.0, std=factor * ((d_model * d_kv) ** -0.5))
module.k.weight.data.normal_(mean=0.0, std=factor * (d_model ** -0.5))
module.v.weight.data.normal_(mean=0.0, std=factor * (d_model ** -0.5))
module.o.weight.data.normal_(mean=0.0, std=factor * ((n_heads * d_kv) ** -0.5))
if module.has_relative_attention_bias:
module.relative_attention_bias.weight.data.normal_(mean=0.0, std=factor * ((d_model) ** -0.5))
def _shift_right(self, input_ids):
decoder_start_token_id = self.config.decoder_start_token_id
pad_token_id = self.config.pad_token_id
assert (
decoder_start_token_id is not None
), "self.model.config.decoder_start_token_id has to be defined. In T5 it is usually set to the pad_token_id. See T5 docs for more information"
shifted_input_ids = input_ids.new_zeros(input_ids.shape)
shifted_input_ids[..., 1:] = input_ids[..., :-1].clone()
shifted_input_ids[..., 0] = decoder_start_token_id
assert pad_token_id is not None, "self.model.config.pad_token_id has to be defined."
shifted_input_ids.masked_fill_(shifted_input_ids == -100, pad_token_id)
assert torch.all(shifted_input_ids >= 0).item(), "Verify that `shifted_input_ids` has only positive values"
return shifted_input_ids
class T5Stack(T5PreTrainedModel):
def __init__(self, config, embed_tokens=None):
super().__init__(config)
self.embed_tokens = embed_tokens
self.is_decoder = config.is_decoder
self.block = nn.ModuleList(
[T5Block(config, has_relative_attention_bias=bool(i == 0)) for i in range(config.num_layers)]
)
self.final_layer_norm = T5LayerNorm(config.d_model, eps=config.layer_norm_epsilon)
self.dropout = nn.Dropout(config.dropout_rate)
self.init_weights()
def get_input_embeddings(self):
return self.embed_tokens
def get_output_embeddings(self):
return self.embed_tokens
def set_input_embeddings(self, new_embeddings):
self.embed_tokens = new_embeddings
def forward(
self,
input_ids=None,
attention_mask=None,
encoder_hidden_states=None,
encoder_attention_mask=None,
inputs_embeds=None,
head_mask=None,
past_key_values=None,
use_cache=None,
output_attentions=None,
output_hidden_states=None,
return_dict=None,
):
use_cache = use_cache if use_cache is not None else self.config.use_cache
output_attentions = output_attentions if output_attentions is not None else self.config.output_attentions
output_hidden_states = (
output_hidden_states if output_hidden_states is not None else self.config.output_hidden_states
)
return_dict = return_dict if return_dict is not None else self.config.use_return_dict
if input_ids is not None and inputs_embeds is not None:
err_msg_prefix = "decoder_" if self.is_decoder else ""
raise ValueError(
f"You cannot specify both {err_msg_prefix}inputs and {err_msg_prefix}inputs_embeds at the same time"
)
elif input_ids is not None:
input_shape = input_ids.size()
input_ids = input_ids.view(-1, input_shape[-1])
elif inputs_embeds is not None:
input_shape = inputs_embeds.size()[:-1]
else:
err_msg_prefix = "decoder_" if self.is_decoder else ""
raise ValueError(f"You have to specify either {err_msg_prefix}inputs or {err_msg_prefix}inputs_embeds")
if inputs_embeds is None:
assert self.embed_tokens is not None, "You have to initialize the model with valid token embeddings"
inputs_embeds = self.embed_tokens(input_ids)
batch_size, seq_length = input_shape
mask_seq_length = past_key_values[0][0].shape[2] + seq_length if past_key_values is not None else seq_length
if use_cache is True:
assert self.is_decoder, ":obj:`use_cache` can only be set to `True` if {} is used as a decoder".format(
self
)
if attention_mask is None:
attention_mask = torch.ones(batch_size, mask_seq_length).to(inputs_embeds.device)
if self.is_decoder and encoder_attention_mask is None and encoder_hidden_states is not None:
encoder_seq_length = encoder_hidden_states.shape[1]
encoder_attention_mask = torch.ones(
batch_size, encoder_seq_length, device=inputs_embeds.device, dtype=torch.long
)
if past_key_values is None:
past_key_values = [None] * len(self.block)
extended_attention_mask = self.get_extended_attention_mask(attention_mask, input_shape, inputs_embeds.device)
if self.is_decoder and encoder_attention_mask is not None:
encoder_extended_attention_mask = self.invert_attention_mask(encoder_attention_mask)
else:
encoder_extended_attention_mask = None
head_mask = self.get_head_mask(head_mask, self.config.num_layers)
present_key_value_states = () if use_cache else None
all_hidden_states = () if output_hidden_states else None
all_attentions = () if output_attentions else None
position_bias = None
encoder_decoder_position_bias = None
hidden_states = self.dropout(inputs_embeds)
for i, (layer_module, past_key_value) in enumerate(zip(self.block, past_key_values)):
if output_hidden_states:
all_hidden_states = all_hidden_states + (hidden_states,)
layer_outputs = layer_module(
hidden_states,
attention_mask=extended_attention_mask,
position_bias=position_bias,
encoder_hidden_states=encoder_hidden_states,
encoder_attention_mask=encoder_extended_attention_mask,
encoder_decoder_position_bias=encoder_decoder_position_bias,
head_mask=head_mask[i],
past_key_value=past_key_value,
use_cache=use_cache,
output_attentions=output_attentions,
)
hidden_states, present_key_value_state = layer_outputs[:2]
if i == 0:
position_bias = layer_outputs[3 if output_attentions else 2]
if self.is_decoder and encoder_hidden_states is not None:
encoder_decoder_position_bias = layer_outputs[5 if output_attentions else 3]
if use_cache:
present_key_value_states = present_key_value_states + (present_key_value_state,)
if output_attentions:
all_attentions = all_attentions + (layer_outputs[2],)
hidden_states = self.final_layer_norm(hidden_states)
hidden_states = self.dropout(hidden_states)
if output_hidden_states:
all_hidden_states = all_hidden_states + (hidden_states,)
if not return_dict:
return tuple(
v
for v in [hidden_states, present_key_value_states, all_hidden_states, all_attentions]
if v is not None
)
return BaseModelOutputWithPast(
last_hidden_state=hidden_states,
past_key_values=present_key_value_states,
hidden_states=all_hidden_states,
attentions=all_attentions,
)
T5_START_DOCSTRING = r"""
The T5 model was proposed in `Exploring the Limits of Transfer Learning with a Unified Text-to-Text Transformer
<https://arxiv.org/abs/1910.10683>`__ by Colin Raffel, Noam Shazeer, Adam Roberts, Katherine Lee, Sharan Narang,
Michael Matena, Yanqi Zhou, Wei Li, Peter J. Liu. It's an encoder decoder transformer pre-trained in a text-to-text
denoising generative setting.
This model inherits from :class:`~transformers.PreTrainedModel`. Check the superclass documentation for the generic
methods the library implements for all its model (such as downloading or saving, resizing the input embeddings,
pruning heads etc.)
This model is also a PyTorch `torch.nn.Module <https://pytorch.org/docs/stable/nn.html#torch.nn.Module>`__
subclass. Use it as a regular PyTorch Module and refer to the PyTorch documentation for all matter related to
general usage and behavior.
Parameters:
config (:class:`~transformers.T5Config`): Model configuration class with all the parameters of the model.
Initializing with a config file does not load the weights associated with the model, only the
configuration. Check out the :meth:`~transformers.PreTrainedModel.from_pretrained` method to load the model
weights.
"""
T5_INPUTS_DOCSTRING = r"""
Args:
input_ids (:obj:`torch.LongTensor` of shape :obj:`(batch_size, sequence_length)`):
Indices of input sequence tokens in the vocabulary. T5 is a model with relative position embeddings so you
should be able to pad the inputs on both the right and the left.
Indices can be obtained using :class:`~transformers.T5Tokenizer`. See
:meth:`transformers.PreTrainedTokenizer.encode` and :meth:`transformers.PreTrainedTokenizer.__call__` for
detail.
To know more on how to prepare :obj:`input_ids` for pretraining take a look a `T5 Training
<./t5.html#training>`__.
attention_mask (:obj:`torch.FloatTensor` of shape :obj:`(batch_size, sequence_length)`, `optional`):
Mask to avoid performing attention on padding token indices. Mask values selected in ``[0, 1]``:
- 1 for tokens that are **not masked**,
- 0 for tokens that are **masked**.
`What are attention masks? <../glossary.html#attention-mask>`__
decoder_input_ids (:obj:`torch.LongTensor` of shape :obj:`(batch_size, target_sequence_length)`, `optional`):
Provide for sequence to sequence training. T5 uses the :obj:`pad_token_id` as the starting token for
:obj:`decoder_input_ids` generation. If :obj:`past_key_values` is used, optionally only the last
:obj:`decoder_input_ids` have to be input (see :obj:`past_key_values`).
To know more on how to prepare :obj:`decoder_input_ids` for pretraining take a look at `T5 Training
<./t5.html#training>`__. If :obj:`decoder_input_ids` and :obj:`decoder_inputs_embeds` are both unset,
:obj:`decoder_input_ids` takes the value of :obj:`input_ids`.
decoder_attention_mask (:obj:`torch.BoolTensor` of shape :obj:`(batch_size, tgt_seq_len)`, `optional`):
Default behavior: generate a tensor that ignores pad tokens in :obj:`decoder_input_ids`. Causal mask will
also be used by default.
encoder_outputs (:obj:`tuple(tuple(torch.FloatTensor)`, `optional`):
Tuple consists of (:obj:`last_hidden_state`, :obj:`optional`: `hidden_states`, :obj:`optional`:
`attentions`) :obj:`last_hidden_state` of shape :obj:`(batch_size, sequence_length, hidden_size)` is a
sequence of hidden states at the output of the last layer of the encoder. Used in the cross-attention of
the decoder.
past_key_values (:obj:`tuple(tuple(torch.FloatTensor))` of length :obj:`config.n_layers` with each tuple having 4 tensors of shape :obj:`(batch_size, num_heads, sequence_length - 1, embed_size_per_head)`):
Contains precomputed key and value hidden states of the attention blocks. Can be used to speed up decoding.
If :obj:`past_key_values` are used, the user can optionally input only the last :obj:`decoder_input_ids`
(those that don't have their past key value states given to this model) of shape :obj:`(batch_size, 1)`
instead of all :obj:`decoder_input_ids` of shape :obj:`(batch_size, sequence_length)`.
head_mask (:obj:`torch.FloatTensor` of shape :obj:`(num_heads,)` or :obj:`(num_layers, num_heads)`, `optional`):
Mask to nullify selected heads of the self-attention modules. Mask values selected in ``[0, 1]``:
- 1 indicates the head is **not masked**,
- 0 indicates the head is **masked**.
inputs_embeds (:obj:`torch.FloatTensor` of shape :obj:`(batch_size, sequence_length, hidden_size)`, `optional`):
Optionally, instead of passing :obj:`input_ids` you can choose to directly pass an embedded representation.
This is useful if you want more control over how to convert :obj:`input_ids` indices into associated
vectors than the model's internal embedding lookup matrix.
decoder_inputs_embeds (:obj:`torch.FloatTensor` of shape :obj:`(batch_size, target_sequence_length, hidden_size)`, `optional`):
Optionally, instead of passing :obj:`decoder_input_ids` you can choose to directly pass an embedded
representation. If :obj:`past_key_values` is used, optionally only the last :obj:`decoder_inputs_embeds`
have to be input (see :obj:`past_key_values`). This is useful if you want more control over how to convert
:obj:`decoder_input_ids` indices into associated vectors than the model's internal embedding lookup matrix.
If :obj:`decoder_input_ids` and :obj:`decoder_inputs_embeds` are both unset, :obj:`decoder_inputs_embeds`
takes the value of :obj:`inputs_embeds`.
use_cache (:obj:`bool`, `optional`):
If set to :obj:`True`, :obj:`past_key_values` key value states are returned and can be used to speed up
decoding (see :obj:`past_key_values`).
output_attentions (:obj:`bool`, `optional`):
Whether or not to return the attentions tensors of all attention layers. See ``attentions`` under returned
tensors for more detail.
output_hidden_states (:obj:`bool`, `optional`):
Whether or not to return the hidden states of all layers. See ``hidden_states`` under returned tensors for
more detail.
return_dict (:obj:`bool`, `optional`):
Whether or not to return a :class:`~transformers.file_utils.ModelOutput` instead of a plain tuple.
"""
@add_start_docstrings(
"The bare T5 Model transformer outputting raw hidden-states" "without any specific head on top.",
T5_START_DOCSTRING,
)
class T5Model(T5PreTrainedModel):
def __init__(self, config: T5Config):
super().__init__(config)
self.shared = nn.Embedding(config.vocab_size, config.d_model)
encoder_config = copy.deepcopy(config)
encoder_config.use_cache = False
encoder_config.is_encoder_decoder = False
self.encoder = T5Stack(encoder_config, self.shared)
decoder_config = copy.deepcopy(config)
decoder_config.is_decoder = True
decoder_config.is_encoder_decoder = False
decoder_config.num_layers = config.num_decoder_layers
self.decoder = T5Stack(decoder_config, self.shared)
self.init_weights()
def get_input_embeddings(self):
return self.shared
def set_input_embeddings(self, new_embeddings):
self.shared = new_embeddings
self.encoder.set_input_embeddings(new_embeddings)
self.decoder.set_input_embeddings(new_embeddings)
def get_encoder(self):
return self.encoder
def get_decoder(self):
return self.decoder
def _prune_heads(self, heads_to_prune):
for layer, heads in heads_to_prune.items():
self.encoder.layer[layer].attention.prune_heads(heads)
@add_start_docstrings_to_model_forward(T5_INPUTS_DOCSTRING)
@replace_return_docstrings(output_type=Seq2SeqModelOutput, config_class=_CONFIG_FOR_DOC)
def forward(
self,
input_ids=None,
attention_mask=None,
decoder_input_ids=None,
decoder_attention_mask=None,
encoder_outputs=None,
past_key_values=None,
head_mask=None,
inputs_embeds=None,
decoder_inputs_embeds=None,
use_cache=None,
output_attentions=None,
output_hidden_states=None,
return_dict=None,
**kwargs,
):
if "decoder_past_key_value_states" in kwargs:
warnings.warn(
"The `decoder_past_key_value_states` argument is deprecated and will be removed in a future version, use `past_key_values` instead.",
FutureWarning,
)
past_key_values = kwargs.pop("decoder_past_key_value_states")
if "decoder_past_key_values" in kwargs:
warnings.warn(
"The `decoder_past_key_values` argument is deprecated and will be removed in a future version, use `past_key_values` instead.",
FutureWarning,
)
past_key_values = kwargs.pop("decoder_past_key_values")
assert kwargs == {}, f"Unexpected keyword arguments: {list(kwargs.keys())}."
use_cache = use_cache if use_cache is not None else self.config.use_cache
return_dict = return_dict if return_dict is not None else self.config.use_return_dict
if encoder_outputs is None:
encoder_outputs = self.encoder(
input_ids=input_ids,
attention_mask=attention_mask,
inputs_embeds=inputs_embeds,
head_mask=head_mask,
output_attentions=output_attentions,
output_hidden_states=output_hidden_states,
return_dict=return_dict,
)
elif return_dict and not isinstance(encoder_outputs, BaseModelOutput):
encoder_outputs = BaseModelOutput(
last_hidden_state=encoder_outputs[0],
hidden_states=encoder_outputs[1] if len(encoder_outputs) > 1 else None,
attentions=encoder_outputs[2] if len(encoder_outputs) > 2 else None,
)
hidden_states = encoder_outputs[0]
decoder_outputs = self.decoder(
input_ids=decoder_input_ids,
attention_mask=decoder_attention_mask,
inputs_embeds=decoder_inputs_embeds,
past_key_values=past_key_values,
encoder_hidden_states=hidden_states,
encoder_attention_mask=attention_mask,
head_mask=head_mask,
use_cache=use_cache,
output_attentions=output_attentions,
output_hidden_states=output_hidden_states,
return_dict=return_dict,
)
if not return_dict:
return decoder_outputs + encoder_outputs
return Seq2SeqModelOutput(
last_hidden_state=decoder_outputs.last_hidden_state,
past_key_values=decoder_outputs.past_key_values,
decoder_hidden_states=decoder_outputs.hidden_states,
decoder_attentions=decoder_outputs.attentions,
encoder_last_hidden_state=encoder_outputs.last_hidden_state,
encoder_hidden_states=encoder_outputs.hidden_states,
encoder_attentions=encoder_outputs.attentions,
)
@add_start_docstrings("""T5 Model with a `language modeling` head on top. """, T5_START_DOCSTRING)
class T5ForConditionalGeneration(T5PreTrainedModel):
authorized_missing_keys = [r"encoder\.embed_tokens\.weight", r"decoder\.embed_tokens\.weight", r"lm_head\.weight"]
def __init__(self, config):
super().__init__(config)
self.model_dim = config.d_model
self.shared = nn.Embedding(config.vocab_size, config.d_model)
encoder_config = copy.deepcopy(config)
encoder_config.use_cache = False
encoder_config.is_encoder_decoder = False
self.encoder = T5Stack(encoder_config, self.shared)
decoder_config = copy.deepcopy(config)
decoder_config.is_decoder = True
decoder_config.is_encoder_decoder = False
decoder_config.num_layers = config.num_decoder_layers
self.decoder = T5Stack(decoder_config, self.shared)
self.lm_head = nn.Linear(config.d_model, config.vocab_size, bias=False)
self.init_weights()
def get_input_embeddings(self):
return self.shared
def set_input_embeddings(self, new_embeddings):
self.shared = new_embeddings
self.encoder.set_input_embeddings(new_embeddings)
self.decoder.set_input_embeddings(new_embeddings)
def get_output_embeddings(self):
return self.lm_head
def get_encoder(self):
return self.encoder
def get_decoder(self):
return self.decoder
@add_start_docstrings_to_model_forward(T5_INPUTS_DOCSTRING)
@replace_return_docstrings(output_type=Seq2SeqLMOutput, config_class=_CONFIG_FOR_DOC)
def forward(
self,
input_ids=None,
attention_mask=None,
decoder_input_ids=None,
decoder_attention_mask=None,
encoder_outputs=None,
past_key_values=None,
head_mask=None,
inputs_embeds=None,
decoder_inputs_embeds=None,
labels=None,
use_cache=None,
output_attentions=None,
output_hidden_states=None,
return_dict=None,
**kwargs,
):
if "lm_labels" in kwargs:
warnings.warn(
"The `lm_labels` argument is deprecated and will be removed in a future version, use `labels` instead.",
FutureWarning,
)
labels = kwargs.pop("lm_labels")
if "decoder_past_key_value_states" in kwargs:
warnings.warn(
"The `decoder_past_key_value_states` argument is deprecated and will be removed in a future version, use `past_key_values` instead.",
FutureWarning,
)
past_key_values = kwargs.pop("decoder_past_key_value_states")
if "decoder_past_key_values" in kwargs:
warnings.warn(
"The `decoder_past_key_values` argument is deprecated and will be removed in a future version, use `past_key_values` instead.",
FutureWarning,
)
past_key_values = kwargs.pop("decoder_past_key_values")
assert kwargs == {}, f"Unexpected keyword arguments: {list(kwargs.keys())}."
use_cache = use_cache if use_cache is not None else self.config.use_cache
return_dict = return_dict if return_dict is not None else self.config.use_return_dict
if encoder_outputs is None:
encoder_outputs = self.encoder(
input_ids=input_ids,
attention_mask=attention_mask,
inputs_embeds=inputs_embeds,
head_mask=head_mask,
output_attentions=output_attentions,
output_hidden_states=output_hidden_states,
return_dict=return_dict,
)
elif return_dict and not isinstance(encoder_outputs, BaseModelOutput):
encoder_outputs = BaseModelOutput(
last_hidden_state=encoder_outputs[0],
hidden_states=encoder_outputs[1] if len(encoder_outputs) > 1 else None,
attentions=encoder_outputs[2] if len(encoder_outputs) > 2 else None,
)
hidden_states = encoder_outputs[0]
if labels is not None and decoder_input_ids is None and decoder_inputs_embeds is None:
decoder_input_ids = self._shift_right(labels)
if past_key_values is not None:
assert labels is None, "Decoder should not use cached key value states when training."
if decoder_input_ids is not None:
decoder_input_ids = decoder_input_ids[:, -1:]
if decoder_inputs_embeds is not None:
decoder_inputs_embeds = decoder_inputs_embeds[:, -1:]
decoder_outputs = self.decoder(
input_ids=decoder_input_ids,
attention_mask=decoder_attention_mask,
inputs_embeds=decoder_inputs_embeds,
past_key_values=past_key_values,
encoder_hidden_states=hidden_states,
encoder_attention_mask=attention_mask,
head_mask=head_mask,
use_cache=use_cache,
output_attentions=output_attentions,
output_hidden_states=output_hidden_states,
return_dict=return_dict,
)
sequence_output = decoder_outputs[0]
sequence_output = sequence_output * (self.model_dim ** -0.5)
lm_logits = self.lm_head(sequence_output)
loss = None
if labels is not None:
loss_fct = CrossEntropyLoss(ignore_index=-100)
loss = loss_fct(lm_logits.view(-1, lm_logits.size(-1)), labels.view(-1))
if not return_dict:
output = (lm_logits,) + decoder_outputs[1:] + encoder_outputs
return ((loss,) + output) if loss is not None else output
return Seq2SeqLMOutput(
loss=loss,
logits=lm_logits,
past_key_values=decoder_outputs.past_key_values,
decoder_hidden_states=decoder_outputs.hidden_states,
decoder_attentions=decoder_outputs.attentions,
encoder_last_hidden_state=encoder_outputs.last_hidden_state,
encoder_hidden_states=encoder_outputs.hidden_states,
encoder_attentions=encoder_outputs.attentions,
)
def prepare_inputs_for_generation(self, input_ids, past, attention_mask, use_cache, encoder_outputs, **kwargs):
if past is not None:
input_ids = input_ids[:, -1:]
return {
"decoder_input_ids": input_ids,
"past_key_values": past,
"encoder_outputs": encoder_outputs,
"attention_mask": attention_mask,
"use_cache": use_cache,
}
def _reorder_cache(self, past, beam_idx):
if past is None:
logger.warning("You might want to consider setting `use_cache=True` to speed up decoding")
return past
reordered_decoder_past = ()
for layer_past_states in past:
reordered_layer_past_states = ()
for layer_past_state in layer_past_states:
reordered_layer_past_states = reordered_layer_past_states + (
layer_past_state.index_select(0, beam_idx),
)
assert reordered_layer_past_states[0].shape == layer_past_states[0].shape
assert len(reordered_layer_past_states) == len(layer_past_states)
reordered_decoder_past = reordered_decoder_past + (reordered_layer_past_states,)
return reordered_decoder_past
| true | true |
f71acf41bdacbcba980d2fbc41eeab24cc7554c3 | 1,140 | py | Python | pytanga/components/config.py | renatoalmeidaoliveira/Pytanga | aa02f1c0f2573da1330d1d246ab780fa3be336a5 | [
"MIT"
] | null | null | null | pytanga/components/config.py | renatoalmeidaoliveira/Pytanga | aa02f1c0f2573da1330d1d246ab780fa3be336a5 | [
"MIT"
] | null | null | null | pytanga/components/config.py | renatoalmeidaoliveira/Pytanga | aa02f1c0f2573da1330d1d246ab780fa3be336a5 | [
"MIT"
] | null | null | null | # -*- coding: utf-8 -*-
"""Config component.
This module defines the config Component.
<config>
</config>
"""
from . import AbstractComponent
class configComponent(AbstractComponent):
def __init__(self):
self._xmlns = {}
self.attributes = {}
self.parent_xmlns = {}
self._children: List[AbstractComponent] = []
self.childrenData = []
self.tag = 'config'
@property
def xmlns(self):
return self._xmlns
@xmlns.setter
def xmlns(self, xmlns):
self._xmlns = xmlns
def add(self, component) -> None:
self._children.append(component)
def remove(self, component) -> None:
self._children.remove(component)
def is_composite(self) -> bool:
return False
def getXMLNS(self):
childrenData = []
for child in self._children:
child.getXMLNS()
return self._xmlns
def parse(self, serializer):
self.childrenData = []
self.getXMLNS()
for child in self._children:
self.childrenData.append(child.parse(serializer))
return serializer.parse(self)
| 21.923077 | 61 | 0.60614 |
from . import AbstractComponent
class configComponent(AbstractComponent):
def __init__(self):
self._xmlns = {}
self.attributes = {}
self.parent_xmlns = {}
self._children: List[AbstractComponent] = []
self.childrenData = []
self.tag = 'config'
@property
def xmlns(self):
return self._xmlns
@xmlns.setter
def xmlns(self, xmlns):
self._xmlns = xmlns
def add(self, component) -> None:
self._children.append(component)
def remove(self, component) -> None:
self._children.remove(component)
def is_composite(self) -> bool:
return False
def getXMLNS(self):
childrenData = []
for child in self._children:
child.getXMLNS()
return self._xmlns
def parse(self, serializer):
self.childrenData = []
self.getXMLNS()
for child in self._children:
self.childrenData.append(child.parse(serializer))
return serializer.parse(self)
| true | true |
f71acfeb35f54faa88ad90bc14c98d37cd3bbfd8 | 97 | py | Python | InvoiceBook_website/backend/InvoiceBook/apps.py | HumbertMeyers/InvoiceBook | 99af326a529566bdcff5c9c4015f2d89d5df2752 | [
"MIT"
] | null | null | null | InvoiceBook_website/backend/InvoiceBook/apps.py | HumbertMeyers/InvoiceBook | 99af326a529566bdcff5c9c4015f2d89d5df2752 | [
"MIT"
] | null | null | null | InvoiceBook_website/backend/InvoiceBook/apps.py | HumbertMeyers/InvoiceBook | 99af326a529566bdcff5c9c4015f2d89d5df2752 | [
"MIT"
] | null | null | null | from django.apps import AppConfig
class InvoicebookConfig(AppConfig):
name = 'InvoiceBook'
| 16.166667 | 35 | 0.773196 | from django.apps import AppConfig
class InvoicebookConfig(AppConfig):
name = 'InvoiceBook'
| true | true |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.