after_merge
stringlengths 28
79.6k
| before_merge
stringlengths 20
79.6k
| url
stringlengths 38
71
| full_traceback
stringlengths 43
922k
| traceback_type
stringclasses 555
values |
|---|---|---|---|---|
def __init__(
self,
data: RegressionData,
kernel: Kernel,
mean_function: Optional[MeanFunction] = None,
noise_variance: float = 1.0,
):
likelihood = gpflow.likelihoods.Gaussian(noise_variance)
_, Y_data = data
super().__init__(kernel, likelihood, mean_function, num_latent_gps=Y_data.shape[-1])
self.data = data_input_to_tensor(data)
|
def __init__(
self,
data: RegressionData,
kernel: Kernel,
mean_function: Optional[MeanFunction] = None,
noise_variance: float = 1.0,
):
likelihood = gpflow.likelihoods.Gaussian(noise_variance)
_, Y_data = data
super().__init__(kernel, likelihood, mean_function, num_latent_gps=Y_data.shape[-1])
self.data = data
|
https://github.com/GPflow/GPflow/issues/1439
|
Traceback (most recent call last):
File "main.py", line 177, in <module>
main(args)
File "main.py", line 64, in main
build_allele(args)
File "/path/to/1_model_sim/drivers.py", line 226, in build_allele
opt_model_list(m)
File "/path/to/1_model_sim/model.py", line 355, in opt_model_list
m.trainable_variables)
File "/path/to/1_model_sim/venv/lib/python3.6/site-packages/gpflow/optimizers/scipy.py", line 73, in minimize
func, initial_params, jac=True, method=method, **scipy_kwargs
File "/path/to/1_model_sim/venv/lib/python3.6/site-packages/scipy/optimize/_minimize.py", line 610, in minimize
callback=callback, **options)
File "/path/to/1_model_sim/venv/lib/python3.6/site-packages/scipy/optimize/lbfgsb.py", line 345, in _minimize_lbfgsb
f, g = func_and_grad(x)
File "/path/to/1_model_sim/venv/lib/python3.6/site-packages/scipy/optimize/lbfgsb.py", line 295, in func_and_grad
f = fun(x, *args)
File "/path/to/1_model_sim/venv/lib/python3.6/site-packages/scipy/optimize/optimize.py", line 327, in function_wrapper
return function(*(wrapper_args + args))
File "/path/to/1_model_sim/venv/lib/python3.6/site-packages/scipy/optimize/optimize.py", line 65, in __call__
fg = self.fun(x, *args)
File "/path/to/1_model_sim/venv/lib/python3.6/site-packages/gpflow/optimizers/scipy.py", line 95, in _eval
loss, grad = _tf_eval(tf.convert_to_tensor(x))
File "/path/to/1_model_sim/venv/lib/python3.6/site-packages/tensorflow_core/python/eager/def_function.py", line 568, in __call__
result = self._call(*args, **kwds)
File "/path/to/1_model_sim/venv/lib/python3.6/site-packages/tensorflow_core/python/eager/def_function.py", line 615, in _call
self._initialize(args, kwds, add_initializers_to=initializers)
File "/path/to/1_model_sim/venv/lib/python3.6/site-packages/tensorflow_core/python/eager/def_function.py", line 497, in _initialize
*args, **kwds))
File "/path/to/1_model_sim/venv/lib/python3.6/site-packages/tensorflow_core/python/eager/function.py", line 2389, in _get_concrete_function_internal_garbage_collected
graph_function, _, _ = self._maybe_define_function(args, kwargs)
File "/path/to/1_model_sim/venv/lib/python3.6/site-packages/tensorflow_core/python/eager/function.py", line 2703, in _maybe_define_function
graph_function = self._create_graph_function(args, kwargs)
File "/path/to/1_model_sim/venv/lib/python3.6/site-packages/tensorflow_core/python/eager/function.py", line 2593, in _create_graph_function
capture_by_value=self._capture_by_value),
File "/path/to/1_model_sim/venv/lib/python3.6/site-packages/tensorflow_core/python/framework/func_graph.py", line 978, in func_graph_from_py_func
func_outputs = python_func(*func_args, **func_kwargs)
File "/path/to/1_model_sim/venv/lib/python3.6/site-packages/tensorflow_core/python/eager/def_function.py", line 439, in wrapped_fn
return weak_wrapped_fn().__wrapped__(*args, **kwds)
File "/path/to/1_model_sim/venv/lib/python3.6/site-packages/tensorflow_core/python/framework/func_graph.py", line 968, in wrapper
raise e.ag_error_metadata.to_exception(e)
ValueError: in converted code:
/path/to/1_model_sim/venv/lib/python3.6/site-packages/gpflow/optimizers/scipy.py:88 _tf_eval *
loss, grads = _compute_loss_and_gradients(closure, variables)
/path/to/1_model_sim/venv/lib/python3.6/site-packages/gpflow/optimizers/scipy.py:145 _compute_loss_and_gradients *
loss = loss_closure()
/path/to/1_model_sim/model.py:354 None *
opt.minimize(lambda: - m.log_marginal_likelihood(),
/path/to/1_model_sim/venv/lib/python3.6/site-packages/gpflow/models/gpr.py:75 log_marginal_likelihood *
log_prob = multivariate_normal(Y, m, L)
/path/to/1_model_sim/venv/lib/python3.6/site-packages/gpflow/logdensities.py:95 multivariate_normal *
d = x - mu
/path/to/1_model_sim/venv/lib/python3.6/site-packages/tensorflow_core/python/ops/math_ops.py:927 r_binary_op_wrapper
x = ops.convert_to_tensor(x, dtype=y.dtype.base_dtype, name="x")
/path/to/1_model_sim/venv/lib/python3.6/site-packages/tensorflow_core/python/framework/ops.py:1314 convert_to_tensor
ret = conversion_func(value, dtype=dtype, name=name, as_ref=as_ref)
/path/to/1_model_sim/venv/lib/python3.6/site-packages/tensorflow_core/python/framework/tensor_conversion_registry.py:52 _default_conversion_function
return constant_op.constant(value, dtype, name=name)
/path/to/1_model_sim/venv/lib/python3.6/site-packages/tensorflow_core/python/framework/constant_op.py:258 constant
allow_broadcast=True)
/path/to/1_model_sim/venv/lib/python3.6/site-packages/tensorflow_core/python/framework/constant_op.py:296 _constant_impl
allow_broadcast=allow_broadcast))
/path/to/1_model_sim/venv/lib/python3.6/site-packages/tensorflow_core/python/framework/tensor_util.py:522 make_tensor_proto
"Cannot create a tensor proto whose content is larger than 2GB.")
ValueError: Cannot create a tensor proto whose content is larger than 2GB.
|
ValueError
|
def __init__(
self,
data: RegressionData,
kernel: Kernel,
likelihood: Likelihood,
mean_function: Optional[MeanFunction] = None,
num_latent_gps: Optional[int] = None,
inducing_variable: Optional[InducingPoints] = None,
):
"""
data is a tuple of X, Y with X, a data matrix, size [N, D] and Y, a data matrix, size [N, R]
Z is a data matrix, of inducing inputs, size [M, D]
kernel, likelihood, mean_function are appropriate GPflow objects
"""
if num_latent_gps is None:
num_latent_gps = self.calc_num_latent_gps_from_data(data, kernel, likelihood)
super().__init__(kernel, likelihood, mean_function, num_latent_gps=num_latent_gps)
self.data = data_input_to_tensor(data)
self.num_data = data[0].shape[0]
self.inducing_variable = inducingpoint_wrapper(inducing_variable)
self.V = Parameter(np.zeros((len(self.inducing_variable), self.num_latent_gps)))
self.V.prior = tfp.distributions.Normal(
loc=to_default_float(0.0), scale=to_default_float(1.0)
)
|
def __init__(
self,
data: RegressionData,
kernel: Kernel,
likelihood: Likelihood,
mean_function: Optional[MeanFunction] = None,
num_latent_gps: Optional[int] = None,
inducing_variable: Optional[InducingPoints] = None,
):
"""
data is a tuple of X, Y with X, a data matrix, size [N, D] and Y, a data matrix, size [N, R]
Z is a data matrix, of inducing inputs, size [M, D]
kernel, likelihood, mean_function are appropriate GPflow objects
"""
if num_latent_gps is None:
num_latent_gps = self.calc_num_latent_gps_from_data(data, kernel, likelihood)
super().__init__(kernel, likelihood, mean_function, num_latent_gps=num_latent_gps)
self.data = data
self.num_data = data[0].shape[0]
self.inducing_variable = inducingpoint_wrapper(inducing_variable)
self.V = Parameter(np.zeros((len(self.inducing_variable), self.num_latent_gps)))
self.V.prior = tfp.distributions.Normal(
loc=to_default_float(0.0), scale=to_default_float(1.0)
)
|
https://github.com/GPflow/GPflow/issues/1439
|
Traceback (most recent call last):
File "main.py", line 177, in <module>
main(args)
File "main.py", line 64, in main
build_allele(args)
File "/path/to/1_model_sim/drivers.py", line 226, in build_allele
opt_model_list(m)
File "/path/to/1_model_sim/model.py", line 355, in opt_model_list
m.trainable_variables)
File "/path/to/1_model_sim/venv/lib/python3.6/site-packages/gpflow/optimizers/scipy.py", line 73, in minimize
func, initial_params, jac=True, method=method, **scipy_kwargs
File "/path/to/1_model_sim/venv/lib/python3.6/site-packages/scipy/optimize/_minimize.py", line 610, in minimize
callback=callback, **options)
File "/path/to/1_model_sim/venv/lib/python3.6/site-packages/scipy/optimize/lbfgsb.py", line 345, in _minimize_lbfgsb
f, g = func_and_grad(x)
File "/path/to/1_model_sim/venv/lib/python3.6/site-packages/scipy/optimize/lbfgsb.py", line 295, in func_and_grad
f = fun(x, *args)
File "/path/to/1_model_sim/venv/lib/python3.6/site-packages/scipy/optimize/optimize.py", line 327, in function_wrapper
return function(*(wrapper_args + args))
File "/path/to/1_model_sim/venv/lib/python3.6/site-packages/scipy/optimize/optimize.py", line 65, in __call__
fg = self.fun(x, *args)
File "/path/to/1_model_sim/venv/lib/python3.6/site-packages/gpflow/optimizers/scipy.py", line 95, in _eval
loss, grad = _tf_eval(tf.convert_to_tensor(x))
File "/path/to/1_model_sim/venv/lib/python3.6/site-packages/tensorflow_core/python/eager/def_function.py", line 568, in __call__
result = self._call(*args, **kwds)
File "/path/to/1_model_sim/venv/lib/python3.6/site-packages/tensorflow_core/python/eager/def_function.py", line 615, in _call
self._initialize(args, kwds, add_initializers_to=initializers)
File "/path/to/1_model_sim/venv/lib/python3.6/site-packages/tensorflow_core/python/eager/def_function.py", line 497, in _initialize
*args, **kwds))
File "/path/to/1_model_sim/venv/lib/python3.6/site-packages/tensorflow_core/python/eager/function.py", line 2389, in _get_concrete_function_internal_garbage_collected
graph_function, _, _ = self._maybe_define_function(args, kwargs)
File "/path/to/1_model_sim/venv/lib/python3.6/site-packages/tensorflow_core/python/eager/function.py", line 2703, in _maybe_define_function
graph_function = self._create_graph_function(args, kwargs)
File "/path/to/1_model_sim/venv/lib/python3.6/site-packages/tensorflow_core/python/eager/function.py", line 2593, in _create_graph_function
capture_by_value=self._capture_by_value),
File "/path/to/1_model_sim/venv/lib/python3.6/site-packages/tensorflow_core/python/framework/func_graph.py", line 978, in func_graph_from_py_func
func_outputs = python_func(*func_args, **func_kwargs)
File "/path/to/1_model_sim/venv/lib/python3.6/site-packages/tensorflow_core/python/eager/def_function.py", line 439, in wrapped_fn
return weak_wrapped_fn().__wrapped__(*args, **kwds)
File "/path/to/1_model_sim/venv/lib/python3.6/site-packages/tensorflow_core/python/framework/func_graph.py", line 968, in wrapper
raise e.ag_error_metadata.to_exception(e)
ValueError: in converted code:
/path/to/1_model_sim/venv/lib/python3.6/site-packages/gpflow/optimizers/scipy.py:88 _tf_eval *
loss, grads = _compute_loss_and_gradients(closure, variables)
/path/to/1_model_sim/venv/lib/python3.6/site-packages/gpflow/optimizers/scipy.py:145 _compute_loss_and_gradients *
loss = loss_closure()
/path/to/1_model_sim/model.py:354 None *
opt.minimize(lambda: - m.log_marginal_likelihood(),
/path/to/1_model_sim/venv/lib/python3.6/site-packages/gpflow/models/gpr.py:75 log_marginal_likelihood *
log_prob = multivariate_normal(Y, m, L)
/path/to/1_model_sim/venv/lib/python3.6/site-packages/gpflow/logdensities.py:95 multivariate_normal *
d = x - mu
/path/to/1_model_sim/venv/lib/python3.6/site-packages/tensorflow_core/python/ops/math_ops.py:927 r_binary_op_wrapper
x = ops.convert_to_tensor(x, dtype=y.dtype.base_dtype, name="x")
/path/to/1_model_sim/venv/lib/python3.6/site-packages/tensorflow_core/python/framework/ops.py:1314 convert_to_tensor
ret = conversion_func(value, dtype=dtype, name=name, as_ref=as_ref)
/path/to/1_model_sim/venv/lib/python3.6/site-packages/tensorflow_core/python/framework/tensor_conversion_registry.py:52 _default_conversion_function
return constant_op.constant(value, dtype, name=name)
/path/to/1_model_sim/venv/lib/python3.6/site-packages/tensorflow_core/python/framework/constant_op.py:258 constant
allow_broadcast=True)
/path/to/1_model_sim/venv/lib/python3.6/site-packages/tensorflow_core/python/framework/constant_op.py:296 _constant_impl
allow_broadcast=allow_broadcast))
/path/to/1_model_sim/venv/lib/python3.6/site-packages/tensorflow_core/python/framework/tensor_util.py:522 make_tensor_proto
"Cannot create a tensor proto whose content is larger than 2GB.")
ValueError: Cannot create a tensor proto whose content is larger than 2GB.
|
ValueError
|
def __init__(
self,
data: RegressionData,
kernel: Kernel,
inducing_variable: InducingPoints,
*,
mean_function: Optional[MeanFunction] = None,
num_latent_gps: Optional[int] = None,
noise_variance: float = 1.0,
):
"""
`data`: a tuple of (X, Y), where the inputs X has shape [N, D]
and the outputs Y has shape [N, R].
`inducing_variable`: an InducingPoints instance or a matrix of
the pseudo inputs Z, of shape [M, D].
`kernel`, `mean_function` are appropriate GPflow objects
This method only works with a Gaussian likelihood, its variance is
initialized to `noise_variance`.
"""
likelihood = likelihoods.Gaussian(noise_variance)
X_data, Y_data = data_input_to_tensor(data)
num_latent_gps = Y_data.shape[-1] if num_latent_gps is None else num_latent_gps
super().__init__(kernel, likelihood, mean_function, num_latent_gps=num_latent_gps)
self.data = X_data, Y_data
self.num_data = X_data.shape[0]
self.inducing_variable = inducingpoint_wrapper(inducing_variable)
|
def __init__(
self,
data: RegressionData,
kernel: Kernel,
inducing_variable: InducingPoints,
*,
mean_function: Optional[MeanFunction] = None,
num_latent_gps: Optional[int] = None,
noise_variance: float = 1.0,
):
"""
`data`: a tuple of (X, Y), where the inputs X has shape [N, D]
and the outputs Y has shape [N, R].
`inducing_variable`: an InducingPoints instance or a matrix of
the pseudo inputs Z, of shape [M, D].
`kernel`, `mean_function` are appropriate GPflow objects
This method only works with a Gaussian likelihood, its variance is
initialized to `noise_variance`.
"""
likelihood = likelihoods.Gaussian(noise_variance)
X_data, Y_data = data
num_latent_gps = Y_data.shape[-1] if num_latent_gps is None else num_latent_gps
super().__init__(kernel, likelihood, mean_function, num_latent_gps=num_latent_gps)
self.data = data
self.num_data = X_data.shape[0]
self.inducing_variable = inducingpoint_wrapper(inducing_variable)
|
https://github.com/GPflow/GPflow/issues/1439
|
Traceback (most recent call last):
File "main.py", line 177, in <module>
main(args)
File "main.py", line 64, in main
build_allele(args)
File "/path/to/1_model_sim/drivers.py", line 226, in build_allele
opt_model_list(m)
File "/path/to/1_model_sim/model.py", line 355, in opt_model_list
m.trainable_variables)
File "/path/to/1_model_sim/venv/lib/python3.6/site-packages/gpflow/optimizers/scipy.py", line 73, in minimize
func, initial_params, jac=True, method=method, **scipy_kwargs
File "/path/to/1_model_sim/venv/lib/python3.6/site-packages/scipy/optimize/_minimize.py", line 610, in minimize
callback=callback, **options)
File "/path/to/1_model_sim/venv/lib/python3.6/site-packages/scipy/optimize/lbfgsb.py", line 345, in _minimize_lbfgsb
f, g = func_and_grad(x)
File "/path/to/1_model_sim/venv/lib/python3.6/site-packages/scipy/optimize/lbfgsb.py", line 295, in func_and_grad
f = fun(x, *args)
File "/path/to/1_model_sim/venv/lib/python3.6/site-packages/scipy/optimize/optimize.py", line 327, in function_wrapper
return function(*(wrapper_args + args))
File "/path/to/1_model_sim/venv/lib/python3.6/site-packages/scipy/optimize/optimize.py", line 65, in __call__
fg = self.fun(x, *args)
File "/path/to/1_model_sim/venv/lib/python3.6/site-packages/gpflow/optimizers/scipy.py", line 95, in _eval
loss, grad = _tf_eval(tf.convert_to_tensor(x))
File "/path/to/1_model_sim/venv/lib/python3.6/site-packages/tensorflow_core/python/eager/def_function.py", line 568, in __call__
result = self._call(*args, **kwds)
File "/path/to/1_model_sim/venv/lib/python3.6/site-packages/tensorflow_core/python/eager/def_function.py", line 615, in _call
self._initialize(args, kwds, add_initializers_to=initializers)
File "/path/to/1_model_sim/venv/lib/python3.6/site-packages/tensorflow_core/python/eager/def_function.py", line 497, in _initialize
*args, **kwds))
File "/path/to/1_model_sim/venv/lib/python3.6/site-packages/tensorflow_core/python/eager/function.py", line 2389, in _get_concrete_function_internal_garbage_collected
graph_function, _, _ = self._maybe_define_function(args, kwargs)
File "/path/to/1_model_sim/venv/lib/python3.6/site-packages/tensorflow_core/python/eager/function.py", line 2703, in _maybe_define_function
graph_function = self._create_graph_function(args, kwargs)
File "/path/to/1_model_sim/venv/lib/python3.6/site-packages/tensorflow_core/python/eager/function.py", line 2593, in _create_graph_function
capture_by_value=self._capture_by_value),
File "/path/to/1_model_sim/venv/lib/python3.6/site-packages/tensorflow_core/python/framework/func_graph.py", line 978, in func_graph_from_py_func
func_outputs = python_func(*func_args, **func_kwargs)
File "/path/to/1_model_sim/venv/lib/python3.6/site-packages/tensorflow_core/python/eager/def_function.py", line 439, in wrapped_fn
return weak_wrapped_fn().__wrapped__(*args, **kwds)
File "/path/to/1_model_sim/venv/lib/python3.6/site-packages/tensorflow_core/python/framework/func_graph.py", line 968, in wrapper
raise e.ag_error_metadata.to_exception(e)
ValueError: in converted code:
/path/to/1_model_sim/venv/lib/python3.6/site-packages/gpflow/optimizers/scipy.py:88 _tf_eval *
loss, grads = _compute_loss_and_gradients(closure, variables)
/path/to/1_model_sim/venv/lib/python3.6/site-packages/gpflow/optimizers/scipy.py:145 _compute_loss_and_gradients *
loss = loss_closure()
/path/to/1_model_sim/model.py:354 None *
opt.minimize(lambda: - m.log_marginal_likelihood(),
/path/to/1_model_sim/venv/lib/python3.6/site-packages/gpflow/models/gpr.py:75 log_marginal_likelihood *
log_prob = multivariate_normal(Y, m, L)
/path/to/1_model_sim/venv/lib/python3.6/site-packages/gpflow/logdensities.py:95 multivariate_normal *
d = x - mu
/path/to/1_model_sim/venv/lib/python3.6/site-packages/tensorflow_core/python/ops/math_ops.py:927 r_binary_op_wrapper
x = ops.convert_to_tensor(x, dtype=y.dtype.base_dtype, name="x")
/path/to/1_model_sim/venv/lib/python3.6/site-packages/tensorflow_core/python/framework/ops.py:1314 convert_to_tensor
ret = conversion_func(value, dtype=dtype, name=name, as_ref=as_ref)
/path/to/1_model_sim/venv/lib/python3.6/site-packages/tensorflow_core/python/framework/tensor_conversion_registry.py:52 _default_conversion_function
return constant_op.constant(value, dtype, name=name)
/path/to/1_model_sim/venv/lib/python3.6/site-packages/tensorflow_core/python/framework/constant_op.py:258 constant
allow_broadcast=True)
/path/to/1_model_sim/venv/lib/python3.6/site-packages/tensorflow_core/python/framework/constant_op.py:296 _constant_impl
allow_broadcast=allow_broadcast))
/path/to/1_model_sim/venv/lib/python3.6/site-packages/tensorflow_core/python/framework/tensor_util.py:522 make_tensor_proto
"Cannot create a tensor proto whose content is larger than 2GB.")
ValueError: Cannot create a tensor proto whose content is larger than 2GB.
|
ValueError
|
def __init__(
self,
data: RegressionData,
kernel: Kernel,
likelihood: Likelihood,
mean_function: Optional[MeanFunction] = None,
num_latent_gps: Optional[int] = None,
):
"""
data = (X, Y) contains the input points [N, D] and the observations [N, P]
kernel, likelihood, mean_function are appropriate GPflow objects
"""
if num_latent_gps is None:
num_latent_gps = self.calc_num_latent_gps_from_data(data, kernel, likelihood)
super().__init__(kernel, likelihood, mean_function, num_latent_gps)
self.data = data_input_to_tensor(data)
X_data, Y_data = self.data
num_data = X_data.shape[0]
self.num_data = num_data
self.q_mu = Parameter(np.zeros((num_data, self.num_latent_gps)))
q_sqrt = np.array([np.eye(num_data) for _ in range(self.num_latent_gps)])
self.q_sqrt = Parameter(q_sqrt, transform=triangular())
|
def __init__(
self,
data: RegressionData,
kernel: Kernel,
likelihood: Likelihood,
mean_function: Optional[MeanFunction] = None,
num_latent_gps: Optional[int] = None,
):
"""
data = (X, Y) contains the input points [N, D] and the observations [N, P]
kernel, likelihood, mean_function are appropriate GPflow objects
"""
if num_latent_gps is None:
num_latent_gps = self.calc_num_latent_gps_from_data(data, kernel, likelihood)
super().__init__(kernel, likelihood, mean_function, num_latent_gps)
X_data, Y_data = data
num_data = X_data.shape[0]
self.num_data = num_data
self.data = data
self.q_mu = Parameter(np.zeros((num_data, self.num_latent_gps)))
q_sqrt = np.array([np.eye(num_data) for _ in range(self.num_latent_gps)])
self.q_sqrt = Parameter(q_sqrt, transform=triangular())
|
https://github.com/GPflow/GPflow/issues/1439
|
Traceback (most recent call last):
File "main.py", line 177, in <module>
main(args)
File "main.py", line 64, in main
build_allele(args)
File "/path/to/1_model_sim/drivers.py", line 226, in build_allele
opt_model_list(m)
File "/path/to/1_model_sim/model.py", line 355, in opt_model_list
m.trainable_variables)
File "/path/to/1_model_sim/venv/lib/python3.6/site-packages/gpflow/optimizers/scipy.py", line 73, in minimize
func, initial_params, jac=True, method=method, **scipy_kwargs
File "/path/to/1_model_sim/venv/lib/python3.6/site-packages/scipy/optimize/_minimize.py", line 610, in minimize
callback=callback, **options)
File "/path/to/1_model_sim/venv/lib/python3.6/site-packages/scipy/optimize/lbfgsb.py", line 345, in _minimize_lbfgsb
f, g = func_and_grad(x)
File "/path/to/1_model_sim/venv/lib/python3.6/site-packages/scipy/optimize/lbfgsb.py", line 295, in func_and_grad
f = fun(x, *args)
File "/path/to/1_model_sim/venv/lib/python3.6/site-packages/scipy/optimize/optimize.py", line 327, in function_wrapper
return function(*(wrapper_args + args))
File "/path/to/1_model_sim/venv/lib/python3.6/site-packages/scipy/optimize/optimize.py", line 65, in __call__
fg = self.fun(x, *args)
File "/path/to/1_model_sim/venv/lib/python3.6/site-packages/gpflow/optimizers/scipy.py", line 95, in _eval
loss, grad = _tf_eval(tf.convert_to_tensor(x))
File "/path/to/1_model_sim/venv/lib/python3.6/site-packages/tensorflow_core/python/eager/def_function.py", line 568, in __call__
result = self._call(*args, **kwds)
File "/path/to/1_model_sim/venv/lib/python3.6/site-packages/tensorflow_core/python/eager/def_function.py", line 615, in _call
self._initialize(args, kwds, add_initializers_to=initializers)
File "/path/to/1_model_sim/venv/lib/python3.6/site-packages/tensorflow_core/python/eager/def_function.py", line 497, in _initialize
*args, **kwds))
File "/path/to/1_model_sim/venv/lib/python3.6/site-packages/tensorflow_core/python/eager/function.py", line 2389, in _get_concrete_function_internal_garbage_collected
graph_function, _, _ = self._maybe_define_function(args, kwargs)
File "/path/to/1_model_sim/venv/lib/python3.6/site-packages/tensorflow_core/python/eager/function.py", line 2703, in _maybe_define_function
graph_function = self._create_graph_function(args, kwargs)
File "/path/to/1_model_sim/venv/lib/python3.6/site-packages/tensorflow_core/python/eager/function.py", line 2593, in _create_graph_function
capture_by_value=self._capture_by_value),
File "/path/to/1_model_sim/venv/lib/python3.6/site-packages/tensorflow_core/python/framework/func_graph.py", line 978, in func_graph_from_py_func
func_outputs = python_func(*func_args, **func_kwargs)
File "/path/to/1_model_sim/venv/lib/python3.6/site-packages/tensorflow_core/python/eager/def_function.py", line 439, in wrapped_fn
return weak_wrapped_fn().__wrapped__(*args, **kwds)
File "/path/to/1_model_sim/venv/lib/python3.6/site-packages/tensorflow_core/python/framework/func_graph.py", line 968, in wrapper
raise e.ag_error_metadata.to_exception(e)
ValueError: in converted code:
/path/to/1_model_sim/venv/lib/python3.6/site-packages/gpflow/optimizers/scipy.py:88 _tf_eval *
loss, grads = _compute_loss_and_gradients(closure, variables)
/path/to/1_model_sim/venv/lib/python3.6/site-packages/gpflow/optimizers/scipy.py:145 _compute_loss_and_gradients *
loss = loss_closure()
/path/to/1_model_sim/model.py:354 None *
opt.minimize(lambda: - m.log_marginal_likelihood(),
/path/to/1_model_sim/venv/lib/python3.6/site-packages/gpflow/models/gpr.py:75 log_marginal_likelihood *
log_prob = multivariate_normal(Y, m, L)
/path/to/1_model_sim/venv/lib/python3.6/site-packages/gpflow/logdensities.py:95 multivariate_normal *
d = x - mu
/path/to/1_model_sim/venv/lib/python3.6/site-packages/tensorflow_core/python/ops/math_ops.py:927 r_binary_op_wrapper
x = ops.convert_to_tensor(x, dtype=y.dtype.base_dtype, name="x")
/path/to/1_model_sim/venv/lib/python3.6/site-packages/tensorflow_core/python/framework/ops.py:1314 convert_to_tensor
ret = conversion_func(value, dtype=dtype, name=name, as_ref=as_ref)
/path/to/1_model_sim/venv/lib/python3.6/site-packages/tensorflow_core/python/framework/tensor_conversion_registry.py:52 _default_conversion_function
return constant_op.constant(value, dtype, name=name)
/path/to/1_model_sim/venv/lib/python3.6/site-packages/tensorflow_core/python/framework/constant_op.py:258 constant
allow_broadcast=True)
/path/to/1_model_sim/venv/lib/python3.6/site-packages/tensorflow_core/python/framework/constant_op.py:296 _constant_impl
allow_broadcast=allow_broadcast))
/path/to/1_model_sim/venv/lib/python3.6/site-packages/tensorflow_core/python/framework/tensor_util.py:522 make_tensor_proto
"Cannot create a tensor proto whose content is larger than 2GB.")
ValueError: Cannot create a tensor proto whose content is larger than 2GB.
|
ValueError
|
def __init__(
self,
data: RegressionData,
kernel: Kernel,
likelihood: Likelihood,
mean_function: Optional[MeanFunction] = None,
num_latent_gps: Optional[int] = None,
):
"""
data = (X, Y) contains the input points [N, D] and the observations [N, P]
kernel, likelihood, mean_function are appropriate GPflow objects
"""
if num_latent_gps is None:
num_latent_gps = self.calc_num_latent_gps_from_data(data, kernel, likelihood)
super().__init__(kernel, likelihood, mean_function, num_latent_gps)
self.data = data_input_to_tensor(data)
X_data, Y_data = self.data
self.num_data = X_data.shape[0]
self.q_alpha = Parameter(np.zeros((self.num_data, self.num_latent_gps)))
self.q_lambda = Parameter(
np.ones((self.num_data, self.num_latent_gps)),
transform=gpflow.utilities.positive(),
)
|
def __init__(
self,
data: RegressionData,
kernel: Kernel,
likelihood: Likelihood,
mean_function: Optional[MeanFunction] = None,
num_latent_gps: Optional[int] = None,
):
"""
data = (X, Y) contains the input points [N, D] and the observations [N, P]
kernel, likelihood, mean_function are appropriate GPflow objects
"""
if num_latent_gps is None:
num_latent_gps = self.calc_num_latent_gps_from_data(data, kernel, likelihood)
super().__init__(kernel, likelihood, mean_function, num_latent_gps)
X_data, Y_data = data
self.data = data
self.num_data = X_data.shape[0]
self.q_alpha = Parameter(np.zeros((self.num_data, self.num_latent_gps)))
self.q_lambda = Parameter(
np.ones((self.num_data, self.num_latent_gps)),
transform=gpflow.utilities.positive(),
)
|
https://github.com/GPflow/GPflow/issues/1439
|
Traceback (most recent call last):
File "main.py", line 177, in <module>
main(args)
File "main.py", line 64, in main
build_allele(args)
File "/path/to/1_model_sim/drivers.py", line 226, in build_allele
opt_model_list(m)
File "/path/to/1_model_sim/model.py", line 355, in opt_model_list
m.trainable_variables)
File "/path/to/1_model_sim/venv/lib/python3.6/site-packages/gpflow/optimizers/scipy.py", line 73, in minimize
func, initial_params, jac=True, method=method, **scipy_kwargs
File "/path/to/1_model_sim/venv/lib/python3.6/site-packages/scipy/optimize/_minimize.py", line 610, in minimize
callback=callback, **options)
File "/path/to/1_model_sim/venv/lib/python3.6/site-packages/scipy/optimize/lbfgsb.py", line 345, in _minimize_lbfgsb
f, g = func_and_grad(x)
File "/path/to/1_model_sim/venv/lib/python3.6/site-packages/scipy/optimize/lbfgsb.py", line 295, in func_and_grad
f = fun(x, *args)
File "/path/to/1_model_sim/venv/lib/python3.6/site-packages/scipy/optimize/optimize.py", line 327, in function_wrapper
return function(*(wrapper_args + args))
File "/path/to/1_model_sim/venv/lib/python3.6/site-packages/scipy/optimize/optimize.py", line 65, in __call__
fg = self.fun(x, *args)
File "/path/to/1_model_sim/venv/lib/python3.6/site-packages/gpflow/optimizers/scipy.py", line 95, in _eval
loss, grad = _tf_eval(tf.convert_to_tensor(x))
File "/path/to/1_model_sim/venv/lib/python3.6/site-packages/tensorflow_core/python/eager/def_function.py", line 568, in __call__
result = self._call(*args, **kwds)
File "/path/to/1_model_sim/venv/lib/python3.6/site-packages/tensorflow_core/python/eager/def_function.py", line 615, in _call
self._initialize(args, kwds, add_initializers_to=initializers)
File "/path/to/1_model_sim/venv/lib/python3.6/site-packages/tensorflow_core/python/eager/def_function.py", line 497, in _initialize
*args, **kwds))
File "/path/to/1_model_sim/venv/lib/python3.6/site-packages/tensorflow_core/python/eager/function.py", line 2389, in _get_concrete_function_internal_garbage_collected
graph_function, _, _ = self._maybe_define_function(args, kwargs)
File "/path/to/1_model_sim/venv/lib/python3.6/site-packages/tensorflow_core/python/eager/function.py", line 2703, in _maybe_define_function
graph_function = self._create_graph_function(args, kwargs)
File "/path/to/1_model_sim/venv/lib/python3.6/site-packages/tensorflow_core/python/eager/function.py", line 2593, in _create_graph_function
capture_by_value=self._capture_by_value),
File "/path/to/1_model_sim/venv/lib/python3.6/site-packages/tensorflow_core/python/framework/func_graph.py", line 978, in func_graph_from_py_func
func_outputs = python_func(*func_args, **func_kwargs)
File "/path/to/1_model_sim/venv/lib/python3.6/site-packages/tensorflow_core/python/eager/def_function.py", line 439, in wrapped_fn
return weak_wrapped_fn().__wrapped__(*args, **kwds)
File "/path/to/1_model_sim/venv/lib/python3.6/site-packages/tensorflow_core/python/framework/func_graph.py", line 968, in wrapper
raise e.ag_error_metadata.to_exception(e)
ValueError: in converted code:
/path/to/1_model_sim/venv/lib/python3.6/site-packages/gpflow/optimizers/scipy.py:88 _tf_eval *
loss, grads = _compute_loss_and_gradients(closure, variables)
/path/to/1_model_sim/venv/lib/python3.6/site-packages/gpflow/optimizers/scipy.py:145 _compute_loss_and_gradients *
loss = loss_closure()
/path/to/1_model_sim/model.py:354 None *
opt.minimize(lambda: - m.log_marginal_likelihood(),
/path/to/1_model_sim/venv/lib/python3.6/site-packages/gpflow/models/gpr.py:75 log_marginal_likelihood *
log_prob = multivariate_normal(Y, m, L)
/path/to/1_model_sim/venv/lib/python3.6/site-packages/gpflow/logdensities.py:95 multivariate_normal *
d = x - mu
/path/to/1_model_sim/venv/lib/python3.6/site-packages/tensorflow_core/python/ops/math_ops.py:927 r_binary_op_wrapper
x = ops.convert_to_tensor(x, dtype=y.dtype.base_dtype, name="x")
/path/to/1_model_sim/venv/lib/python3.6/site-packages/tensorflow_core/python/framework/ops.py:1314 convert_to_tensor
ret = conversion_func(value, dtype=dtype, name=name, as_ref=as_ref)
/path/to/1_model_sim/venv/lib/python3.6/site-packages/tensorflow_core/python/framework/tensor_conversion_registry.py:52 _default_conversion_function
return constant_op.constant(value, dtype, name=name)
/path/to/1_model_sim/venv/lib/python3.6/site-packages/tensorflow_core/python/framework/constant_op.py:258 constant
allow_broadcast=True)
/path/to/1_model_sim/venv/lib/python3.6/site-packages/tensorflow_core/python/framework/constant_op.py:296 _constant_impl
allow_broadcast=allow_broadcast))
/path/to/1_model_sim/venv/lib/python3.6/site-packages/tensorflow_core/python/framework/tensor_util.py:522 make_tensor_proto
"Cannot create a tensor proto whose content is larger than 2GB.")
ValueError: Cannot create a tensor proto whose content is larger than 2GB.
|
ValueError
|
def __init__(
self,
data: RegressionData,
kernel: Kernel,
likelihood: Likelihood,
mean_function: Optional[MeanFunction] = None,
num_latent_gps: Optional[int] = None,
):
"""
data is a tuple of X, Y with X, a data matrix, size [N, D] and Y, a data matrix, size [N, R]
kernel, likelihood, mean_function are appropriate GPflow objects
This is a vanilla implementation of a GP with a non-Gaussian
likelihood. The latent function values are represented by centered
(whitened) variables, so
v ~ N(0, I)
f = Lv + m(x)
with
L L^T = K
"""
if num_latent_gps is None:
num_latent_gps = self.calc_num_latent_gps_from_data(data, kernel, likelihood)
super().__init__(kernel, likelihood, mean_function, num_latent_gps)
self.data = data_input_to_tensor(data)
self.num_data = self.data[0].shape[0]
self.V = Parameter(np.zeros((self.num_data, self.num_latent_gps)))
self.V.prior = tfp.distributions.Normal(
loc=to_default_float(0.0), scale=to_default_float(1.0)
)
|
def __init__(
self,
data: RegressionData,
kernel: Kernel,
likelihood: Likelihood,
mean_function: Optional[MeanFunction] = None,
num_latent_gps: Optional[int] = None,
):
"""
data is a tuple of X, Y with X, a data matrix, size [N, D] and Y, a data matrix, size [N, R]
kernel, likelihood, mean_function are appropriate GPflow objects
This is a vanilla implementation of a GP with a non-Gaussian
likelihood. The latent function values are represented by centered
(whitened) variables, so
v ~ N(0, I)
f = Lv + m(x)
with
L L^T = K
"""
if num_latent_gps is None:
num_latent_gps = self.calc_num_latent_gps_from_data(data, kernel, likelihood)
super().__init__(kernel, likelihood, mean_function, num_latent_gps)
self.data = data
self.num_data = data[0].shape[0]
self.V = Parameter(np.zeros((self.num_data, self.num_latent_gps)))
self.V.prior = tfp.distributions.Normal(
loc=to_default_float(0.0), scale=to_default_float(1.0)
)
|
https://github.com/GPflow/GPflow/issues/1439
|
Traceback (most recent call last):
File "main.py", line 177, in <module>
main(args)
File "main.py", line 64, in main
build_allele(args)
File "/path/to/1_model_sim/drivers.py", line 226, in build_allele
opt_model_list(m)
File "/path/to/1_model_sim/model.py", line 355, in opt_model_list
m.trainable_variables)
File "/path/to/1_model_sim/venv/lib/python3.6/site-packages/gpflow/optimizers/scipy.py", line 73, in minimize
func, initial_params, jac=True, method=method, **scipy_kwargs
File "/path/to/1_model_sim/venv/lib/python3.6/site-packages/scipy/optimize/_minimize.py", line 610, in minimize
callback=callback, **options)
File "/path/to/1_model_sim/venv/lib/python3.6/site-packages/scipy/optimize/lbfgsb.py", line 345, in _minimize_lbfgsb
f, g = func_and_grad(x)
File "/path/to/1_model_sim/venv/lib/python3.6/site-packages/scipy/optimize/lbfgsb.py", line 295, in func_and_grad
f = fun(x, *args)
File "/path/to/1_model_sim/venv/lib/python3.6/site-packages/scipy/optimize/optimize.py", line 327, in function_wrapper
return function(*(wrapper_args + args))
File "/path/to/1_model_sim/venv/lib/python3.6/site-packages/scipy/optimize/optimize.py", line 65, in __call__
fg = self.fun(x, *args)
File "/path/to/1_model_sim/venv/lib/python3.6/site-packages/gpflow/optimizers/scipy.py", line 95, in _eval
loss, grad = _tf_eval(tf.convert_to_tensor(x))
File "/path/to/1_model_sim/venv/lib/python3.6/site-packages/tensorflow_core/python/eager/def_function.py", line 568, in __call__
result = self._call(*args, **kwds)
File "/path/to/1_model_sim/venv/lib/python3.6/site-packages/tensorflow_core/python/eager/def_function.py", line 615, in _call
self._initialize(args, kwds, add_initializers_to=initializers)
File "/path/to/1_model_sim/venv/lib/python3.6/site-packages/tensorflow_core/python/eager/def_function.py", line 497, in _initialize
*args, **kwds))
File "/path/to/1_model_sim/venv/lib/python3.6/site-packages/tensorflow_core/python/eager/function.py", line 2389, in _get_concrete_function_internal_garbage_collected
graph_function, _, _ = self._maybe_define_function(args, kwargs)
File "/path/to/1_model_sim/venv/lib/python3.6/site-packages/tensorflow_core/python/eager/function.py", line 2703, in _maybe_define_function
graph_function = self._create_graph_function(args, kwargs)
File "/path/to/1_model_sim/venv/lib/python3.6/site-packages/tensorflow_core/python/eager/function.py", line 2593, in _create_graph_function
capture_by_value=self._capture_by_value),
File "/path/to/1_model_sim/venv/lib/python3.6/site-packages/tensorflow_core/python/framework/func_graph.py", line 978, in func_graph_from_py_func
func_outputs = python_func(*func_args, **func_kwargs)
File "/path/to/1_model_sim/venv/lib/python3.6/site-packages/tensorflow_core/python/eager/def_function.py", line 439, in wrapped_fn
return weak_wrapped_fn().__wrapped__(*args, **kwds)
File "/path/to/1_model_sim/venv/lib/python3.6/site-packages/tensorflow_core/python/framework/func_graph.py", line 968, in wrapper
raise e.ag_error_metadata.to_exception(e)
ValueError: in converted code:
/path/to/1_model_sim/venv/lib/python3.6/site-packages/gpflow/optimizers/scipy.py:88 _tf_eval *
loss, grads = _compute_loss_and_gradients(closure, variables)
/path/to/1_model_sim/venv/lib/python3.6/site-packages/gpflow/optimizers/scipy.py:145 _compute_loss_and_gradients *
loss = loss_closure()
/path/to/1_model_sim/model.py:354 None *
opt.minimize(lambda: - m.log_marginal_likelihood(),
/path/to/1_model_sim/venv/lib/python3.6/site-packages/gpflow/models/gpr.py:75 log_marginal_likelihood *
log_prob = multivariate_normal(Y, m, L)
/path/to/1_model_sim/venv/lib/python3.6/site-packages/gpflow/logdensities.py:95 multivariate_normal *
d = x - mu
/path/to/1_model_sim/venv/lib/python3.6/site-packages/tensorflow_core/python/ops/math_ops.py:927 r_binary_op_wrapper
x = ops.convert_to_tensor(x, dtype=y.dtype.base_dtype, name="x")
/path/to/1_model_sim/venv/lib/python3.6/site-packages/tensorflow_core/python/framework/ops.py:1314 convert_to_tensor
ret = conversion_func(value, dtype=dtype, name=name, as_ref=as_ref)
/path/to/1_model_sim/venv/lib/python3.6/site-packages/tensorflow_core/python/framework/tensor_conversion_registry.py:52 _default_conversion_function
return constant_op.constant(value, dtype, name=name)
/path/to/1_model_sim/venv/lib/python3.6/site-packages/tensorflow_core/python/framework/constant_op.py:258 constant
allow_broadcast=True)
/path/to/1_model_sim/venv/lib/python3.6/site-packages/tensorflow_core/python/framework/constant_op.py:296 _constant_impl
allow_broadcast=allow_broadcast))
/path/to/1_model_sim/venv/lib/python3.6/site-packages/tensorflow_core/python/framework/tensor_util.py:522 make_tensor_proto
"Cannot create a tensor proto whose content is larger than 2GB.")
ValueError: Cannot create a tensor proto whose content is larger than 2GB.
|
ValueError
|
def K(self, X: tf.Tensor, X2: Optional[tf.Tensor] = None) -> tf.Tensor:
sig_X = self._sigmoids(X) # N1 x 1 x Ncp
sig_X2 = self._sigmoids(X2) if X2 is not None else sig_X # N2 x 1 x Ncp
# `starters` are the sigmoids going from 0 -> 1, whilst `stoppers` go
# from 1 -> 0, dimensions are N1 x N2 x Ncp
starters = sig_X * tf.transpose(sig_X2, perm=(1, 0, 2))
stoppers = (1 - sig_X) * tf.transpose((1 - sig_X2), perm=(1, 0, 2))
# prepend `starters` with ones and append ones to `stoppers` since the
# first kernel has no start and the last kernel has no end
N1 = tf.shape(X)[0]
N2 = tf.shape(X2)[0] if X2 is not None else N1
ones = tf.ones((N1, N2, 1), dtype=X.dtype)
starters = tf.concat([ones, starters], axis=2)
stoppers = tf.concat([stoppers, ones], axis=2)
# now combine with the underlying kernels
kernel_stack = tf.stack([k(X, X2) for k in self.kernels], axis=2)
return tf.reduce_sum(kernel_stack * starters * stoppers, axis=2)
|
def K(self, X: tf.Tensor, X2: Optional[tf.Tensor] = None) -> tf.Tensor:
sig_X = self._sigmoids(X) # N x 1 x Ncp
sig_X2 = self._sigmoids(X2) if X2 is not None else sig_X
# `starters` are the sigmoids going from 0 -> 1, whilst `stoppers` go
# from 1 -> 0, dimensions are N x N x Ncp
starters = sig_X * tf.transpose(sig_X2, perm=(1, 0, 2))
stoppers = (1 - sig_X) * tf.transpose((1 - sig_X2), perm=(1, 0, 2))
# prepend `starters` with ones and append ones to `stoppers` since the
# first kernel has no start and the last kernel has no end
N = tf.shape(X)[0]
ones = tf.ones((N, N, 1), dtype=X.dtype)
starters = tf.concat([ones, starters], axis=2)
stoppers = tf.concat([stoppers, ones], axis=2)
# now combine with the underlying kernels
kernel_stack = tf.stack([k(X, X2) for k in self.kernels], axis=2)
return tf.reduce_sum(kernel_stack * starters * stoppers, axis=2)
|
https://github.com/GPflow/GPflow/issues/1440
|
---------------------------------------------------------------------------
InvalidArgumentError Traceback (most recent call last)
<ipython-input-25-d1dbc7941bae> in <module>
----> 1 k(X, xx)
~/Code/GPflow/gpflow/kernels/base.py in __call__(self, X, X2, full_cov, presliced)
170
171 else:
--> 172 return self.K(X, X2)
173
174 def __add__(self, other):
~/Code/GPflow/gpflow/kernels/changepoints.py in K(self, X, X2)
83 N = tf.shape(X)[0]
84 ones = tf.ones((N, N, 1), dtype=X.dtype)
---> 85 starters = tf.concat([ones, starters], axis=2)
86 stoppers = tf.concat([stoppers, ones], axis=2)
87
~/anaconda3/envs/gpflux2/lib/python3.7/site-packages/tensorflow_core/python/util/dispatch.py in wrapper(*args, **kwargs)
178 """Call target, and fall back on dispatchers if there is a TypeError."""
179 try:
--> 180 return target(*args, **kwargs)
181 except (TypeError, ValueError):
182 # Note: convert_to_eager_tensor currently raises a ValueError, not a
~/anaconda3/envs/gpflux2/lib/python3.7/site-packages/tensorflow_core/python/ops/array_ops.py in concat(values, axis, name)
1515 dtype=dtypes.int32).get_shape().assert_has_rank(0)
1516 return identity(values[0], name=name)
-> 1517 return gen_array_ops.concat_v2(values=values, axis=axis, name=name)
1518
1519
~/anaconda3/envs/gpflux2/lib/python3.7/site-packages/tensorflow_core/python/ops/gen_array_ops.py in concat_v2(values, axis, name)
1116 pass # Add nodes to the TensorFlow graph.
1117 except _core._NotOkStatusException as e:
-> 1118 _ops.raise_from_not_ok_status(e, name)
1119 # Add nodes to the TensorFlow graph.
1120 if not isinstance(values, (list, tuple)):
~/anaconda3/envs/gpflux2/lib/python3.7/site-packages/tensorflow_core/python/framework/ops.py in raise_from_not_ok_status(e, name)
6604 message = e.message + (" name: " + name if name is not None else "")
6605 # pylint: disable=protected-access
-> 6606 six.raise_from(core._status_to_exception(e.code, message), None)
6607 # pylint: enable=protected-access
6608
~/anaconda3/envs/gpflux2/lib/python3.7/site-packages/six.py in raise_from(value, from_value)
InvalidArgumentError: ConcatOp : Dimensions of inputs should match: shape[0] = [100,100,1] vs. shape[1] = [100,25,1] [Op:ConcatV2] name: concat
|
InvalidArgumentError
|
def autoflow(*af_args, **af_kwargs):
def autoflow_wrapper(method):
@functools.wraps(method)
def runnable(obj, *args, **kwargs):
if not isinstance(obj, Node):
raise GPflowError("AutoFlow works only with node-like objects.")
if obj.is_built_coherence(obj.graph) is Build.NO:
raise GPflowError('Not built with "{graph}".'.format(graph=obj.graph))
name = method.__name__
store = AutoFlow.get_autoflow(obj, name)
session = kwargs.pop("session", None)
session = obj.enquire_session(session=session)
scope_name = _name_scope_name(obj, name)
with session.graph.as_default(), tf.name_scope(scope_name):
if not store:
_setup_storage(store, *af_args, **af_kwargs)
_build_method(method, obj, store)
return _session_run(session, obj, store, *args, **kwargs)
return runnable
return autoflow_wrapper
|
def autoflow(*af_args, **af_kwargs):
def autoflow_wrapper(method):
@functools.wraps(method)
def runnable(obj, *args, **kwargs):
if not isinstance(obj, Node):
raise GPflowError("AutoFlow works only with node-like objects.")
if obj.is_built_coherence(obj.graph) is Build.NO:
raise GPflowError('Not built with "{graph}".'.format(graph=obj.graph))
name = method.__name__
store = AutoFlow.get_autoflow(obj, name)
session = kwargs.pop("session", None)
session = obj.enquire_session(session=session)
if not store:
scope_name = _name_scope_name(obj, name)
with session.graph.as_default(), tf.name_scope(scope_name):
_setup_storage(store, *af_args, **af_kwargs)
_build_method(method, obj, store)
return _session_run(session, obj, store, *args, **kwargs)
return runnable
return autoflow_wrapper
|
https://github.com/GPflow/GPflow/issues/561
|
---------------------------------------------------------------------------
AttributeError Traceback (most recent call last)
<ipython-input-3-e3c07a8fceb7> in <module>()
10 np.random.randn(M, D),
11 minibatch_size=2)
---> 12 model.compute_log_likelihood()
/[...]/GPflow/gpflow/decors.py in runnable(obj, *args, **kwargs)
152 _setup_storage(store, *af_args, **af_kwargs)
153 _build_method(method, obj, store)
--> 154 return _session_run(session, obj, store, *args, **kwargs)
155 return runnable
156 return autoflow_wrapper
/[...]/GPflow/gpflow/decors.py in _session_run(session, obj, store, *args, **kwargs)
189 feed_dict.update(obj.feeds)
190 initialize = kwargs.pop('initialize', False)
--> 191 obj.initialize(session=session, force=initialize)
192 return session.run(store['result'], **kwargs)
193
/[...]/GPflow/gpflow/core/node.py in initialize(self, session, force)
82 session=session,
83 force=force,
---> 84 feed_dict=self.initializable_feeds)
85
86 def clear(self):
/[...]/GPflow/gpflow/misc.py in initialize_variables(variables, session, force, **run_kwargs)
82 initializer = tf.variables_initializer(variables)
83 else:
---> 84 uninitialized = tf.report_uninitialized_variables(var_list=variables)
85 def uninitialized_names():
86 for uv in session.run(uninitialized):
/[...]/anaconda3/envs/py35cpu/lib/python3.5/site-packages/tensorflow/python/util/tf_should_use.py in wrapped(*args, **kwargs)
105 """
106 def wrapped(*args, **kwargs):
--> 107 return _add_should_use_warning(fn(*args, **kwargs))
108 return tf_decorator.make_decorator(
109 fn, wrapped, 'should_use_result',
/[...]/anaconda3/envs/py35cpu/lib/python3.5/site-packages/tensorflow/python/ops/variables.py in report_uninitialized_variables(var_list, name)
1519 variables_mask = math_ops.logical_not(
1520 array_ops.stack(
-> 1521 [state_ops.is_variable_initialized(v) for v in var_list]))
1522 # Get a 1-D string tensor containing all the variable names.
1523 variable_names_tensor = array_ops.constant(
/[...]/anaconda3/envs/py35cpu/lib/python3.5/site-packages/tensorflow/python/ops/variables.py in <listcomp>(.0)
1519 variables_mask = math_ops.logical_not(
1520 array_ops.stack(
-> 1521 [state_ops.is_variable_initialized(v) for v in var_list]))
1522 # Get a 1-D string tensor containing all the variable names.
1523 variable_names_tensor = array_ops.constant(
/[...]/anaconda3/envs/py35cpu/lib/python3.5/site-packages/tensorflow/python/ops/state_ops.py in is_variable_initialized(ref, name)
182 A `Tensor` of type `bool`.
183 """
--> 184 if ref.dtype._is_ref_dtype:
185 return gen_state_ops.is_variable_initialized(ref=ref, name=name)
186 # Handle resource variables.
AttributeError: 'Iterator' object has no attribute 'dtype'
|
AttributeError
|
def autoflow_wrapper(method):
@functools.wraps(method)
def runnable(obj, *args, **kwargs):
if not isinstance(obj, Node):
raise GPflowError("AutoFlow works only with node-like objects.")
if obj.is_built_coherence(obj.graph) is Build.NO:
raise GPflowError('Not built with "{graph}".'.format(graph=obj.graph))
name = method.__name__
store = AutoFlow.get_autoflow(obj, name)
session = kwargs.pop("session", None)
session = obj.enquire_session(session=session)
scope_name = _name_scope_name(obj, name)
with session.graph.as_default(), tf.name_scope(scope_name):
if not store:
_setup_storage(store, *af_args, **af_kwargs)
_build_method(method, obj, store)
return _session_run(session, obj, store, *args, **kwargs)
return runnable
|
def autoflow_wrapper(method):
@functools.wraps(method)
def runnable(obj, *args, **kwargs):
if not isinstance(obj, Node):
raise GPflowError("AutoFlow works only with node-like objects.")
if obj.is_built_coherence(obj.graph) is Build.NO:
raise GPflowError('Not built with "{graph}".'.format(graph=obj.graph))
name = method.__name__
store = AutoFlow.get_autoflow(obj, name)
session = kwargs.pop("session", None)
session = obj.enquire_session(session=session)
if not store:
scope_name = _name_scope_name(obj, name)
with session.graph.as_default(), tf.name_scope(scope_name):
_setup_storage(store, *af_args, **af_kwargs)
_build_method(method, obj, store)
return _session_run(session, obj, store, *args, **kwargs)
return runnable
|
https://github.com/GPflow/GPflow/issues/561
|
---------------------------------------------------------------------------
AttributeError Traceback (most recent call last)
<ipython-input-3-e3c07a8fceb7> in <module>()
10 np.random.randn(M, D),
11 minibatch_size=2)
---> 12 model.compute_log_likelihood()
/[...]/GPflow/gpflow/decors.py in runnable(obj, *args, **kwargs)
152 _setup_storage(store, *af_args, **af_kwargs)
153 _build_method(method, obj, store)
--> 154 return _session_run(session, obj, store, *args, **kwargs)
155 return runnable
156 return autoflow_wrapper
/[...]/GPflow/gpflow/decors.py in _session_run(session, obj, store, *args, **kwargs)
189 feed_dict.update(obj.feeds)
190 initialize = kwargs.pop('initialize', False)
--> 191 obj.initialize(session=session, force=initialize)
192 return session.run(store['result'], **kwargs)
193
/[...]/GPflow/gpflow/core/node.py in initialize(self, session, force)
82 session=session,
83 force=force,
---> 84 feed_dict=self.initializable_feeds)
85
86 def clear(self):
/[...]/GPflow/gpflow/misc.py in initialize_variables(variables, session, force, **run_kwargs)
82 initializer = tf.variables_initializer(variables)
83 else:
---> 84 uninitialized = tf.report_uninitialized_variables(var_list=variables)
85 def uninitialized_names():
86 for uv in session.run(uninitialized):
/[...]/anaconda3/envs/py35cpu/lib/python3.5/site-packages/tensorflow/python/util/tf_should_use.py in wrapped(*args, **kwargs)
105 """
106 def wrapped(*args, **kwargs):
--> 107 return _add_should_use_warning(fn(*args, **kwargs))
108 return tf_decorator.make_decorator(
109 fn, wrapped, 'should_use_result',
/[...]/anaconda3/envs/py35cpu/lib/python3.5/site-packages/tensorflow/python/ops/variables.py in report_uninitialized_variables(var_list, name)
1519 variables_mask = math_ops.logical_not(
1520 array_ops.stack(
-> 1521 [state_ops.is_variable_initialized(v) for v in var_list]))
1522 # Get a 1-D string tensor containing all the variable names.
1523 variable_names_tensor = array_ops.constant(
/[...]/anaconda3/envs/py35cpu/lib/python3.5/site-packages/tensorflow/python/ops/variables.py in <listcomp>(.0)
1519 variables_mask = math_ops.logical_not(
1520 array_ops.stack(
-> 1521 [state_ops.is_variable_initialized(v) for v in var_list]))
1522 # Get a 1-D string tensor containing all the variable names.
1523 variable_names_tensor = array_ops.constant(
/[...]/anaconda3/envs/py35cpu/lib/python3.5/site-packages/tensorflow/python/ops/state_ops.py in is_variable_initialized(ref, name)
182 A `Tensor` of type `bool`.
183 """
--> 184 if ref.dtype._is_ref_dtype:
185 return gen_state_ops.is_variable_initialized(ref=ref, name=name)
186 # Handle resource variables.
AttributeError: 'Iterator' object has no attribute 'dtype'
|
AttributeError
|
def runnable(obj, *args, **kwargs):
if not isinstance(obj, Node):
raise GPflowError("AutoFlow works only with node-like objects.")
if obj.is_built_coherence(obj.graph) is Build.NO:
raise GPflowError('Not built with "{graph}".'.format(graph=obj.graph))
name = method.__name__
store = AutoFlow.get_autoflow(obj, name)
session = kwargs.pop("session", None)
session = obj.enquire_session(session=session)
scope_name = _name_scope_name(obj, name)
with session.graph.as_default(), tf.name_scope(scope_name):
if not store:
_setup_storage(store, *af_args, **af_kwargs)
_build_method(method, obj, store)
return _session_run(session, obj, store, *args, **kwargs)
|
def runnable(obj, *args, **kwargs):
if not isinstance(obj, Node):
raise GPflowError("AutoFlow works only with node-like objects.")
if obj.is_built_coherence(obj.graph) is Build.NO:
raise GPflowError('Not built with "{graph}".'.format(graph=obj.graph))
name = method.__name__
store = AutoFlow.get_autoflow(obj, name)
session = kwargs.pop("session", None)
session = obj.enquire_session(session=session)
if not store:
scope_name = _name_scope_name(obj, name)
with session.graph.as_default(), tf.name_scope(scope_name):
_setup_storage(store, *af_args, **af_kwargs)
_build_method(method, obj, store)
return _session_run(session, obj, store, *args, **kwargs)
|
https://github.com/GPflow/GPflow/issues/561
|
---------------------------------------------------------------------------
AttributeError Traceback (most recent call last)
<ipython-input-3-e3c07a8fceb7> in <module>()
10 np.random.randn(M, D),
11 minibatch_size=2)
---> 12 model.compute_log_likelihood()
/[...]/GPflow/gpflow/decors.py in runnable(obj, *args, **kwargs)
152 _setup_storage(store, *af_args, **af_kwargs)
153 _build_method(method, obj, store)
--> 154 return _session_run(session, obj, store, *args, **kwargs)
155 return runnable
156 return autoflow_wrapper
/[...]/GPflow/gpflow/decors.py in _session_run(session, obj, store, *args, **kwargs)
189 feed_dict.update(obj.feeds)
190 initialize = kwargs.pop('initialize', False)
--> 191 obj.initialize(session=session, force=initialize)
192 return session.run(store['result'], **kwargs)
193
/[...]/GPflow/gpflow/core/node.py in initialize(self, session, force)
82 session=session,
83 force=force,
---> 84 feed_dict=self.initializable_feeds)
85
86 def clear(self):
/[...]/GPflow/gpflow/misc.py in initialize_variables(variables, session, force, **run_kwargs)
82 initializer = tf.variables_initializer(variables)
83 else:
---> 84 uninitialized = tf.report_uninitialized_variables(var_list=variables)
85 def uninitialized_names():
86 for uv in session.run(uninitialized):
/[...]/anaconda3/envs/py35cpu/lib/python3.5/site-packages/tensorflow/python/util/tf_should_use.py in wrapped(*args, **kwargs)
105 """
106 def wrapped(*args, **kwargs):
--> 107 return _add_should_use_warning(fn(*args, **kwargs))
108 return tf_decorator.make_decorator(
109 fn, wrapped, 'should_use_result',
/[...]/anaconda3/envs/py35cpu/lib/python3.5/site-packages/tensorflow/python/ops/variables.py in report_uninitialized_variables(var_list, name)
1519 variables_mask = math_ops.logical_not(
1520 array_ops.stack(
-> 1521 [state_ops.is_variable_initialized(v) for v in var_list]))
1522 # Get a 1-D string tensor containing all the variable names.
1523 variable_names_tensor = array_ops.constant(
/[...]/anaconda3/envs/py35cpu/lib/python3.5/site-packages/tensorflow/python/ops/variables.py in <listcomp>(.0)
1519 variables_mask = math_ops.logical_not(
1520 array_ops.stack(
-> 1521 [state_ops.is_variable_initialized(v) for v in var_list]))
1522 # Get a 1-D string tensor containing all the variable names.
1523 variable_names_tensor = array_ops.constant(
/[...]/anaconda3/envs/py35cpu/lib/python3.5/site-packages/tensorflow/python/ops/state_ops.py in is_variable_initialized(ref, name)
182 A `Tensor` of type `bool`.
183 """
--> 184 if ref.dtype._is_ref_dtype:
185 return gen_state_ops.is_variable_initialized(ref=ref, name=name)
186 # Handle resource variables.
AttributeError: 'Iterator' object has no attribute 'dtype'
|
AttributeError
|
def initialize_variables(variables=None, session=None, force=False, **run_kwargs):
session = tf.get_default_session() if session is None else session
if variables is None:
initializer = tf.global_variables_initializer()
else:
if force:
vars_for_init = list(_initializable_tensors(variables))
else:
vars_for_init = list(_find_initializable_tensors(variables, session))
if not vars_for_init:
return
initializer = tf.variables_initializer(vars_for_init)
session.run(initializer, **run_kwargs)
|
def initialize_variables(variables=None, session=None, force=False, **run_kwargs):
session = tf.get_default_session() if session is None else session
if variables is None:
initializer = tf.global_variables_initializer()
else:
if force:
initializer = tf.variables_initializer(variables)
else:
uninitialized = tf.report_uninitialized_variables(var_list=variables)
def uninitialized_names():
for uv in session.run(uninitialized):
yield uv.decode("utf-8")
# if isinstance(uv, bytes):
# yield uv.decode('utf-8')
# elif isinstance(uv, str):
# yield uv
# else:
# msg = 'Unknown output type "{}" from `tf.report_uninitialized_variables`'
# raise ValueError(msg.format(type(uv)))
names = set(uninitialized_names())
vars_for_init = [v for v in variables if v.name.split(":")[0] in names]
initializer = tf.variables_initializer(vars_for_init)
session.run(initializer, **run_kwargs)
|
https://github.com/GPflow/GPflow/issues/561
|
---------------------------------------------------------------------------
AttributeError Traceback (most recent call last)
<ipython-input-3-e3c07a8fceb7> in <module>()
10 np.random.randn(M, D),
11 minibatch_size=2)
---> 12 model.compute_log_likelihood()
/[...]/GPflow/gpflow/decors.py in runnable(obj, *args, **kwargs)
152 _setup_storage(store, *af_args, **af_kwargs)
153 _build_method(method, obj, store)
--> 154 return _session_run(session, obj, store, *args, **kwargs)
155 return runnable
156 return autoflow_wrapper
/[...]/GPflow/gpflow/decors.py in _session_run(session, obj, store, *args, **kwargs)
189 feed_dict.update(obj.feeds)
190 initialize = kwargs.pop('initialize', False)
--> 191 obj.initialize(session=session, force=initialize)
192 return session.run(store['result'], **kwargs)
193
/[...]/GPflow/gpflow/core/node.py in initialize(self, session, force)
82 session=session,
83 force=force,
---> 84 feed_dict=self.initializable_feeds)
85
86 def clear(self):
/[...]/GPflow/gpflow/misc.py in initialize_variables(variables, session, force, **run_kwargs)
82 initializer = tf.variables_initializer(variables)
83 else:
---> 84 uninitialized = tf.report_uninitialized_variables(var_list=variables)
85 def uninitialized_names():
86 for uv in session.run(uninitialized):
/[...]/anaconda3/envs/py35cpu/lib/python3.5/site-packages/tensorflow/python/util/tf_should_use.py in wrapped(*args, **kwargs)
105 """
106 def wrapped(*args, **kwargs):
--> 107 return _add_should_use_warning(fn(*args, **kwargs))
108 return tf_decorator.make_decorator(
109 fn, wrapped, 'should_use_result',
/[...]/anaconda3/envs/py35cpu/lib/python3.5/site-packages/tensorflow/python/ops/variables.py in report_uninitialized_variables(var_list, name)
1519 variables_mask = math_ops.logical_not(
1520 array_ops.stack(
-> 1521 [state_ops.is_variable_initialized(v) for v in var_list]))
1522 # Get a 1-D string tensor containing all the variable names.
1523 variable_names_tensor = array_ops.constant(
/[...]/anaconda3/envs/py35cpu/lib/python3.5/site-packages/tensorflow/python/ops/variables.py in <listcomp>(.0)
1519 variables_mask = math_ops.logical_not(
1520 array_ops.stack(
-> 1521 [state_ops.is_variable_initialized(v) for v in var_list]))
1522 # Get a 1-D string tensor containing all the variable names.
1523 variable_names_tensor = array_ops.constant(
/[...]/anaconda3/envs/py35cpu/lib/python3.5/site-packages/tensorflow/python/ops/state_ops.py in is_variable_initialized(ref, name)
182 A `Tensor` of type `bool`.
183 """
--> 184 if ref.dtype._is_ref_dtype:
185 return gen_state_ops.is_variable_initialized(ref=ref, name=name)
186 # Handle resource variables.
AttributeError: 'Iterator' object has no attribute 'dtype'
|
AttributeError
|
def _clear(self):
self._reset_name()
self._initial_value_tensor = None
self._dataholder_tensor = None
self._is_initialized_tensor = None
|
def _clear(self):
self._reset_name()
self._initial_value_tensor = None
self._dataholder_tensor = None
|
https://github.com/GPflow/GPflow/issues/561
|
---------------------------------------------------------------------------
AttributeError Traceback (most recent call last)
<ipython-input-3-e3c07a8fceb7> in <module>()
10 np.random.randn(M, D),
11 minibatch_size=2)
---> 12 model.compute_log_likelihood()
/[...]/GPflow/gpflow/decors.py in runnable(obj, *args, **kwargs)
152 _setup_storage(store, *af_args, **af_kwargs)
153 _build_method(method, obj, store)
--> 154 return _session_run(session, obj, store, *args, **kwargs)
155 return runnable
156 return autoflow_wrapper
/[...]/GPflow/gpflow/decors.py in _session_run(session, obj, store, *args, **kwargs)
189 feed_dict.update(obj.feeds)
190 initialize = kwargs.pop('initialize', False)
--> 191 obj.initialize(session=session, force=initialize)
192 return session.run(store['result'], **kwargs)
193
/[...]/GPflow/gpflow/core/node.py in initialize(self, session, force)
82 session=session,
83 force=force,
---> 84 feed_dict=self.initializable_feeds)
85
86 def clear(self):
/[...]/GPflow/gpflow/misc.py in initialize_variables(variables, session, force, **run_kwargs)
82 initializer = tf.variables_initializer(variables)
83 else:
---> 84 uninitialized = tf.report_uninitialized_variables(var_list=variables)
85 def uninitialized_names():
86 for uv in session.run(uninitialized):
/[...]/anaconda3/envs/py35cpu/lib/python3.5/site-packages/tensorflow/python/util/tf_should_use.py in wrapped(*args, **kwargs)
105 """
106 def wrapped(*args, **kwargs):
--> 107 return _add_should_use_warning(fn(*args, **kwargs))
108 return tf_decorator.make_decorator(
109 fn, wrapped, 'should_use_result',
/[...]/anaconda3/envs/py35cpu/lib/python3.5/site-packages/tensorflow/python/ops/variables.py in report_uninitialized_variables(var_list, name)
1519 variables_mask = math_ops.logical_not(
1520 array_ops.stack(
-> 1521 [state_ops.is_variable_initialized(v) for v in var_list]))
1522 # Get a 1-D string tensor containing all the variable names.
1523 variable_names_tensor = array_ops.constant(
/[...]/anaconda3/envs/py35cpu/lib/python3.5/site-packages/tensorflow/python/ops/variables.py in <listcomp>(.0)
1519 variables_mask = math_ops.logical_not(
1520 array_ops.stack(
-> 1521 [state_ops.is_variable_initialized(v) for v in var_list]))
1522 # Get a 1-D string tensor containing all the variable names.
1523 variable_names_tensor = array_ops.constant(
/[...]/anaconda3/envs/py35cpu/lib/python3.5/site-packages/tensorflow/python/ops/state_ops.py in is_variable_initialized(ref, name)
182 A `Tensor` of type `bool`.
183 """
--> 184 if ref.dtype._is_ref_dtype:
185 return gen_state_ops.is_variable_initialized(ref=ref, name=name)
186 # Handle resource variables.
AttributeError: 'Iterator' object has no attribute 'dtype'
|
AttributeError
|
def _build(self):
tensor = self._build_parameter()
self._dataholder_tensor = tensor
self._is_initialized_tensor = tf.is_variable_initialized(tensor)
|
def _build(self):
self._dataholder_tensor = self._build_parameter() # pylint: disable=W0201
|
https://github.com/GPflow/GPflow/issues/561
|
---------------------------------------------------------------------------
AttributeError Traceback (most recent call last)
<ipython-input-3-e3c07a8fceb7> in <module>()
10 np.random.randn(M, D),
11 minibatch_size=2)
---> 12 model.compute_log_likelihood()
/[...]/GPflow/gpflow/decors.py in runnable(obj, *args, **kwargs)
152 _setup_storage(store, *af_args, **af_kwargs)
153 _build_method(method, obj, store)
--> 154 return _session_run(session, obj, store, *args, **kwargs)
155 return runnable
156 return autoflow_wrapper
/[...]/GPflow/gpflow/decors.py in _session_run(session, obj, store, *args, **kwargs)
189 feed_dict.update(obj.feeds)
190 initialize = kwargs.pop('initialize', False)
--> 191 obj.initialize(session=session, force=initialize)
192 return session.run(store['result'], **kwargs)
193
/[...]/GPflow/gpflow/core/node.py in initialize(self, session, force)
82 session=session,
83 force=force,
---> 84 feed_dict=self.initializable_feeds)
85
86 def clear(self):
/[...]/GPflow/gpflow/misc.py in initialize_variables(variables, session, force, **run_kwargs)
82 initializer = tf.variables_initializer(variables)
83 else:
---> 84 uninitialized = tf.report_uninitialized_variables(var_list=variables)
85 def uninitialized_names():
86 for uv in session.run(uninitialized):
/[...]/anaconda3/envs/py35cpu/lib/python3.5/site-packages/tensorflow/python/util/tf_should_use.py in wrapped(*args, **kwargs)
105 """
106 def wrapped(*args, **kwargs):
--> 107 return _add_should_use_warning(fn(*args, **kwargs))
108 return tf_decorator.make_decorator(
109 fn, wrapped, 'should_use_result',
/[...]/anaconda3/envs/py35cpu/lib/python3.5/site-packages/tensorflow/python/ops/variables.py in report_uninitialized_variables(var_list, name)
1519 variables_mask = math_ops.logical_not(
1520 array_ops.stack(
-> 1521 [state_ops.is_variable_initialized(v) for v in var_list]))
1522 # Get a 1-D string tensor containing all the variable names.
1523 variable_names_tensor = array_ops.constant(
/[...]/anaconda3/envs/py35cpu/lib/python3.5/site-packages/tensorflow/python/ops/variables.py in <listcomp>(.0)
1519 variables_mask = math_ops.logical_not(
1520 array_ops.stack(
-> 1521 [state_ops.is_variable_initialized(v) for v in var_list]))
1522 # Get a 1-D string tensor containing all the variable names.
1523 variable_names_tensor = array_ops.constant(
/[...]/anaconda3/envs/py35cpu/lib/python3.5/site-packages/tensorflow/python/ops/state_ops.py in is_variable_initialized(ref, name)
182 A `Tensor` of type `bool`.
183 """
--> 184 if ref.dtype._is_ref_dtype:
185 return gen_state_ops.is_variable_initialized(ref=ref, name=name)
186 # Handle resource variables.
AttributeError: 'Iterator' object has no attribute 'dtype'
|
AttributeError
|
def _init_parameter_defaults(self):
self._initial_value_tensor = None
self._dataholder_tensor = None
self._is_initialized_tensor = None
|
def _init_parameter_defaults(self):
self._initial_value_tensor = None
self._dataholder_tensor = None
|
https://github.com/GPflow/GPflow/issues/561
|
---------------------------------------------------------------------------
AttributeError Traceback (most recent call last)
<ipython-input-3-e3c07a8fceb7> in <module>()
10 np.random.randn(M, D),
11 minibatch_size=2)
---> 12 model.compute_log_likelihood()
/[...]/GPflow/gpflow/decors.py in runnable(obj, *args, **kwargs)
152 _setup_storage(store, *af_args, **af_kwargs)
153 _build_method(method, obj, store)
--> 154 return _session_run(session, obj, store, *args, **kwargs)
155 return runnable
156 return autoflow_wrapper
/[...]/GPflow/gpflow/decors.py in _session_run(session, obj, store, *args, **kwargs)
189 feed_dict.update(obj.feeds)
190 initialize = kwargs.pop('initialize', False)
--> 191 obj.initialize(session=session, force=initialize)
192 return session.run(store['result'], **kwargs)
193
/[...]/GPflow/gpflow/core/node.py in initialize(self, session, force)
82 session=session,
83 force=force,
---> 84 feed_dict=self.initializable_feeds)
85
86 def clear(self):
/[...]/GPflow/gpflow/misc.py in initialize_variables(variables, session, force, **run_kwargs)
82 initializer = tf.variables_initializer(variables)
83 else:
---> 84 uninitialized = tf.report_uninitialized_variables(var_list=variables)
85 def uninitialized_names():
86 for uv in session.run(uninitialized):
/[...]/anaconda3/envs/py35cpu/lib/python3.5/site-packages/tensorflow/python/util/tf_should_use.py in wrapped(*args, **kwargs)
105 """
106 def wrapped(*args, **kwargs):
--> 107 return _add_should_use_warning(fn(*args, **kwargs))
108 return tf_decorator.make_decorator(
109 fn, wrapped, 'should_use_result',
/[...]/anaconda3/envs/py35cpu/lib/python3.5/site-packages/tensorflow/python/ops/variables.py in report_uninitialized_variables(var_list, name)
1519 variables_mask = math_ops.logical_not(
1520 array_ops.stack(
-> 1521 [state_ops.is_variable_initialized(v) for v in var_list]))
1522 # Get a 1-D string tensor containing all the variable names.
1523 variable_names_tensor = array_ops.constant(
/[...]/anaconda3/envs/py35cpu/lib/python3.5/site-packages/tensorflow/python/ops/variables.py in <listcomp>(.0)
1519 variables_mask = math_ops.logical_not(
1520 array_ops.stack(
-> 1521 [state_ops.is_variable_initialized(v) for v in var_list]))
1522 # Get a 1-D string tensor containing all the variable names.
1523 variable_names_tensor = array_ops.constant(
/[...]/anaconda3/envs/py35cpu/lib/python3.5/site-packages/tensorflow/python/ops/state_ops.py in is_variable_initialized(ref, name)
182 A `Tensor` of type `bool`.
183 """
--> 184 if ref.dtype._is_ref_dtype:
185 return gen_state_ops.is_variable_initialized(ref=ref, name=name)
186 # Handle resource variables.
AttributeError: 'Iterator' object has no attribute 'dtype'
|
AttributeError
|
def initializables(self):
if self._externally_defined:
return None
return [(self.parameter_tensor, self.is_initialized_tensor)]
|
def initializables(self):
if self._externally_defined:
return None
return [self.parameter_tensor]
|
https://github.com/GPflow/GPflow/issues/561
|
---------------------------------------------------------------------------
AttributeError Traceback (most recent call last)
<ipython-input-3-e3c07a8fceb7> in <module>()
10 np.random.randn(M, D),
11 minibatch_size=2)
---> 12 model.compute_log_likelihood()
/[...]/GPflow/gpflow/decors.py in runnable(obj, *args, **kwargs)
152 _setup_storage(store, *af_args, **af_kwargs)
153 _build_method(method, obj, store)
--> 154 return _session_run(session, obj, store, *args, **kwargs)
155 return runnable
156 return autoflow_wrapper
/[...]/GPflow/gpflow/decors.py in _session_run(session, obj, store, *args, **kwargs)
189 feed_dict.update(obj.feeds)
190 initialize = kwargs.pop('initialize', False)
--> 191 obj.initialize(session=session, force=initialize)
192 return session.run(store['result'], **kwargs)
193
/[...]/GPflow/gpflow/core/node.py in initialize(self, session, force)
82 session=session,
83 force=force,
---> 84 feed_dict=self.initializable_feeds)
85
86 def clear(self):
/[...]/GPflow/gpflow/misc.py in initialize_variables(variables, session, force, **run_kwargs)
82 initializer = tf.variables_initializer(variables)
83 else:
---> 84 uninitialized = tf.report_uninitialized_variables(var_list=variables)
85 def uninitialized_names():
86 for uv in session.run(uninitialized):
/[...]/anaconda3/envs/py35cpu/lib/python3.5/site-packages/tensorflow/python/util/tf_should_use.py in wrapped(*args, **kwargs)
105 """
106 def wrapped(*args, **kwargs):
--> 107 return _add_should_use_warning(fn(*args, **kwargs))
108 return tf_decorator.make_decorator(
109 fn, wrapped, 'should_use_result',
/[...]/anaconda3/envs/py35cpu/lib/python3.5/site-packages/tensorflow/python/ops/variables.py in report_uninitialized_variables(var_list, name)
1519 variables_mask = math_ops.logical_not(
1520 array_ops.stack(
-> 1521 [state_ops.is_variable_initialized(v) for v in var_list]))
1522 # Get a 1-D string tensor containing all the variable names.
1523 variable_names_tensor = array_ops.constant(
/[...]/anaconda3/envs/py35cpu/lib/python3.5/site-packages/tensorflow/python/ops/variables.py in <listcomp>(.0)
1519 variables_mask = math_ops.logical_not(
1520 array_ops.stack(
-> 1521 [state_ops.is_variable_initialized(v) for v in var_list]))
1522 # Get a 1-D string tensor containing all the variable names.
1523 variable_names_tensor = array_ops.constant(
/[...]/anaconda3/envs/py35cpu/lib/python3.5/site-packages/tensorflow/python/ops/state_ops.py in is_variable_initialized(ref, name)
182 A `Tensor` of type `bool`.
183 """
--> 184 if ref.dtype._is_ref_dtype:
185 return gen_state_ops.is_variable_initialized(ref=ref, name=name)
186 # Handle resource variables.
AttributeError: 'Iterator' object has no attribute 'dtype'
|
AttributeError
|
def read_value(self, session=None):
if session is not None and not isinstance(session, tf.Session):
raise ValueError("TensorFlow session expected as an argument.")
if session is None and self._externally_defined:
raise GPflowError("Externally defined parameter requires session.")
elif session:
is_built = self.is_built_coherence(session.graph)
if is_built is Build.YES:
return self._read_parameter_tensor(session)
return self._value
|
def read_value(self, session=None):
if session is not None:
if not isinstance(session, tf.Session):
raise ValueError("TensorFlow session expected as session argument.")
is_built = self.is_built_coherence(session.graph)
if is_built is Build.YES:
return self._read_parameter_tensor(session)
elif self._externally_defined:
raise GPflowError("Externally defined parameter requires session.")
return self._value
|
https://github.com/GPflow/GPflow/issues/561
|
---------------------------------------------------------------------------
AttributeError Traceback (most recent call last)
<ipython-input-3-e3c07a8fceb7> in <module>()
10 np.random.randn(M, D),
11 minibatch_size=2)
---> 12 model.compute_log_likelihood()
/[...]/GPflow/gpflow/decors.py in runnable(obj, *args, **kwargs)
152 _setup_storage(store, *af_args, **af_kwargs)
153 _build_method(method, obj, store)
--> 154 return _session_run(session, obj, store, *args, **kwargs)
155 return runnable
156 return autoflow_wrapper
/[...]/GPflow/gpflow/decors.py in _session_run(session, obj, store, *args, **kwargs)
189 feed_dict.update(obj.feeds)
190 initialize = kwargs.pop('initialize', False)
--> 191 obj.initialize(session=session, force=initialize)
192 return session.run(store['result'], **kwargs)
193
/[...]/GPflow/gpflow/core/node.py in initialize(self, session, force)
82 session=session,
83 force=force,
---> 84 feed_dict=self.initializable_feeds)
85
86 def clear(self):
/[...]/GPflow/gpflow/misc.py in initialize_variables(variables, session, force, **run_kwargs)
82 initializer = tf.variables_initializer(variables)
83 else:
---> 84 uninitialized = tf.report_uninitialized_variables(var_list=variables)
85 def uninitialized_names():
86 for uv in session.run(uninitialized):
/[...]/anaconda3/envs/py35cpu/lib/python3.5/site-packages/tensorflow/python/util/tf_should_use.py in wrapped(*args, **kwargs)
105 """
106 def wrapped(*args, **kwargs):
--> 107 return _add_should_use_warning(fn(*args, **kwargs))
108 return tf_decorator.make_decorator(
109 fn, wrapped, 'should_use_result',
/[...]/anaconda3/envs/py35cpu/lib/python3.5/site-packages/tensorflow/python/ops/variables.py in report_uninitialized_variables(var_list, name)
1519 variables_mask = math_ops.logical_not(
1520 array_ops.stack(
-> 1521 [state_ops.is_variable_initialized(v) for v in var_list]))
1522 # Get a 1-D string tensor containing all the variable names.
1523 variable_names_tensor = array_ops.constant(
/[...]/anaconda3/envs/py35cpu/lib/python3.5/site-packages/tensorflow/python/ops/variables.py in <listcomp>(.0)
1519 variables_mask = math_ops.logical_not(
1520 array_ops.stack(
-> 1521 [state_ops.is_variable_initialized(v) for v in var_list]))
1522 # Get a 1-D string tensor containing all the variable names.
1523 variable_names_tensor = array_ops.constant(
/[...]/anaconda3/envs/py35cpu/lib/python3.5/site-packages/tensorflow/python/ops/state_ops.py in is_variable_initialized(ref, name)
182 A `Tensor` of type `bool`.
183 """
--> 184 if ref.dtype._is_ref_dtype:
185 return gen_state_ops.is_variable_initialized(ref=ref, name=name)
186 # Handle resource variables.
AttributeError: 'Iterator' object has no attribute 'dtype'
|
AttributeError
|
def _clear(self):
self._reset_name()
self._externally_defined = False
self._is_initialized_tensor = None
self._initial_value_tensor = None
self._unconstrained_tensor = None
self._constrained_tensor = None
self._prior_tensor = None
|
def _clear(self):
self._reset_name()
self._externally_defined = False # pylint: disable=W0201
self._initial_value_tensor = None # pylint: disable=W0201
self._unconstrained_tensor = None # pylint: disable=W0201
self._constrained_tensor = None # pylint: disable=W0201
self._prior_tensor = None # pylint: disable=W0201
|
https://github.com/GPflow/GPflow/issues/561
|
---------------------------------------------------------------------------
AttributeError Traceback (most recent call last)
<ipython-input-3-e3c07a8fceb7> in <module>()
10 np.random.randn(M, D),
11 minibatch_size=2)
---> 12 model.compute_log_likelihood()
/[...]/GPflow/gpflow/decors.py in runnable(obj, *args, **kwargs)
152 _setup_storage(store, *af_args, **af_kwargs)
153 _build_method(method, obj, store)
--> 154 return _session_run(session, obj, store, *args, **kwargs)
155 return runnable
156 return autoflow_wrapper
/[...]/GPflow/gpflow/decors.py in _session_run(session, obj, store, *args, **kwargs)
189 feed_dict.update(obj.feeds)
190 initialize = kwargs.pop('initialize', False)
--> 191 obj.initialize(session=session, force=initialize)
192 return session.run(store['result'], **kwargs)
193
/[...]/GPflow/gpflow/core/node.py in initialize(self, session, force)
82 session=session,
83 force=force,
---> 84 feed_dict=self.initializable_feeds)
85
86 def clear(self):
/[...]/GPflow/gpflow/misc.py in initialize_variables(variables, session, force, **run_kwargs)
82 initializer = tf.variables_initializer(variables)
83 else:
---> 84 uninitialized = tf.report_uninitialized_variables(var_list=variables)
85 def uninitialized_names():
86 for uv in session.run(uninitialized):
/[...]/anaconda3/envs/py35cpu/lib/python3.5/site-packages/tensorflow/python/util/tf_should_use.py in wrapped(*args, **kwargs)
105 """
106 def wrapped(*args, **kwargs):
--> 107 return _add_should_use_warning(fn(*args, **kwargs))
108 return tf_decorator.make_decorator(
109 fn, wrapped, 'should_use_result',
/[...]/anaconda3/envs/py35cpu/lib/python3.5/site-packages/tensorflow/python/ops/variables.py in report_uninitialized_variables(var_list, name)
1519 variables_mask = math_ops.logical_not(
1520 array_ops.stack(
-> 1521 [state_ops.is_variable_initialized(v) for v in var_list]))
1522 # Get a 1-D string tensor containing all the variable names.
1523 variable_names_tensor = array_ops.constant(
/[...]/anaconda3/envs/py35cpu/lib/python3.5/site-packages/tensorflow/python/ops/variables.py in <listcomp>(.0)
1519 variables_mask = math_ops.logical_not(
1520 array_ops.stack(
-> 1521 [state_ops.is_variable_initialized(v) for v in var_list]))
1522 # Get a 1-D string tensor containing all the variable names.
1523 variable_names_tensor = array_ops.constant(
/[...]/anaconda3/envs/py35cpu/lib/python3.5/site-packages/tensorflow/python/ops/state_ops.py in is_variable_initialized(ref, name)
182 A `Tensor` of type `bool`.
183 """
--> 184 if ref.dtype._is_ref_dtype:
185 return gen_state_ops.is_variable_initialized(ref=ref, name=name)
186 # Handle resource variables.
AttributeError: 'Iterator' object has no attribute 'dtype'
|
AttributeError
|
def _build(self):
unconstrained = self._build_parameter()
constrained = self._build_constrained(unconstrained)
prior = self._build_prior(unconstrained, constrained)
self._is_initialized_tensor = tf.is_variable_initialized(unconstrained)
self._unconstrained_tensor = unconstrained
self._constrained_tensor = constrained
self._prior_tensor = prior
|
def _build(self):
unconstrained = self._build_parameter()
constrained = self._build_constrained(unconstrained)
prior = self._build_prior(unconstrained, constrained)
self._unconstrained_tensor = unconstrained # pylint: disable=W0201
self._constrained_tensor = constrained # pylint: disable=W0201
self._prior_tensor = prior # pylint: disable=W0201
|
https://github.com/GPflow/GPflow/issues/561
|
---------------------------------------------------------------------------
AttributeError Traceback (most recent call last)
<ipython-input-3-e3c07a8fceb7> in <module>()
10 np.random.randn(M, D),
11 minibatch_size=2)
---> 12 model.compute_log_likelihood()
/[...]/GPflow/gpflow/decors.py in runnable(obj, *args, **kwargs)
152 _setup_storage(store, *af_args, **af_kwargs)
153 _build_method(method, obj, store)
--> 154 return _session_run(session, obj, store, *args, **kwargs)
155 return runnable
156 return autoflow_wrapper
/[...]/GPflow/gpflow/decors.py in _session_run(session, obj, store, *args, **kwargs)
189 feed_dict.update(obj.feeds)
190 initialize = kwargs.pop('initialize', False)
--> 191 obj.initialize(session=session, force=initialize)
192 return session.run(store['result'], **kwargs)
193
/[...]/GPflow/gpflow/core/node.py in initialize(self, session, force)
82 session=session,
83 force=force,
---> 84 feed_dict=self.initializable_feeds)
85
86 def clear(self):
/[...]/GPflow/gpflow/misc.py in initialize_variables(variables, session, force, **run_kwargs)
82 initializer = tf.variables_initializer(variables)
83 else:
---> 84 uninitialized = tf.report_uninitialized_variables(var_list=variables)
85 def uninitialized_names():
86 for uv in session.run(uninitialized):
/[...]/anaconda3/envs/py35cpu/lib/python3.5/site-packages/tensorflow/python/util/tf_should_use.py in wrapped(*args, **kwargs)
105 """
106 def wrapped(*args, **kwargs):
--> 107 return _add_should_use_warning(fn(*args, **kwargs))
108 return tf_decorator.make_decorator(
109 fn, wrapped, 'should_use_result',
/[...]/anaconda3/envs/py35cpu/lib/python3.5/site-packages/tensorflow/python/ops/variables.py in report_uninitialized_variables(var_list, name)
1519 variables_mask = math_ops.logical_not(
1520 array_ops.stack(
-> 1521 [state_ops.is_variable_initialized(v) for v in var_list]))
1522 # Get a 1-D string tensor containing all the variable names.
1523 variable_names_tensor = array_ops.constant(
/[...]/anaconda3/envs/py35cpu/lib/python3.5/site-packages/tensorflow/python/ops/variables.py in <listcomp>(.0)
1519 variables_mask = math_ops.logical_not(
1520 array_ops.stack(
-> 1521 [state_ops.is_variable_initialized(v) for v in var_list]))
1522 # Get a 1-D string tensor containing all the variable names.
1523 variable_names_tensor = array_ops.constant(
/[...]/anaconda3/envs/py35cpu/lib/python3.5/site-packages/tensorflow/python/ops/state_ops.py in is_variable_initialized(ref, name)
182 A `Tensor` of type `bool`.
183 """
--> 184 if ref.dtype._is_ref_dtype:
185 return gen_state_ops.is_variable_initialized(ref=ref, name=name)
186 # Handle resource variables.
AttributeError: 'Iterator' object has no attribute 'dtype'
|
AttributeError
|
def _build_parameter(self):
if self._externally_defined:
self._check_tensor_trainable(self.parameter_tensor)
return self.parameter_tensor
name = self._parameter_name()
tensor = misc.get_variable_by_name(name)
if tensor is not None:
raise GPflowError(
'Tensor with name "{name}" already exists, {tensor}.'.format(
name=name, tensor=tensor
)
)
value = self._apply_transform(self._value)
shape = value.shape if self.fixed_shape else None
init = tf.placeholder(self.dtype, shape=shape, name="initial_unconstrained_value")
self._initial_value_tensor = init
if self.fixed_shape:
args = dict(trainable=self.trainable)
else:
args = dict(validate_shape=False, trainable=self.trainable)
variable = tf.get_variable(name, initializer=init, **args)
return variable
|
def _build_parameter(self):
if self._externally_defined:
self._check_tensor_trainable(self.parameter_tensor)
return self.parameter_tensor
name = self._parameter_name()
tensor = misc.get_variable_by_name(name)
if tensor is not None:
raise GPflowError(
'Tensor with name "{name}" already exists, {tensor}.'.format(
name=name, tensor=tensor
)
)
value = self._apply_transform(self._value)
shape = value.shape if self.fixed_shape else None
init = tf.placeholder(self.dtype, shape=shape, name="initial_unconstrained_value")
self._initial_value_tensor = init
if self.fixed_shape:
return tf.get_variable(name, initializer=init, trainable=self.trainable)
return tf.get_variable(
name, initializer=init, validate_shape=False, trainable=self.trainable
)
|
https://github.com/GPflow/GPflow/issues/561
|
---------------------------------------------------------------------------
AttributeError Traceback (most recent call last)
<ipython-input-3-e3c07a8fceb7> in <module>()
10 np.random.randn(M, D),
11 minibatch_size=2)
---> 12 model.compute_log_likelihood()
/[...]/GPflow/gpflow/decors.py in runnable(obj, *args, **kwargs)
152 _setup_storage(store, *af_args, **af_kwargs)
153 _build_method(method, obj, store)
--> 154 return _session_run(session, obj, store, *args, **kwargs)
155 return runnable
156 return autoflow_wrapper
/[...]/GPflow/gpflow/decors.py in _session_run(session, obj, store, *args, **kwargs)
189 feed_dict.update(obj.feeds)
190 initialize = kwargs.pop('initialize', False)
--> 191 obj.initialize(session=session, force=initialize)
192 return session.run(store['result'], **kwargs)
193
/[...]/GPflow/gpflow/core/node.py in initialize(self, session, force)
82 session=session,
83 force=force,
---> 84 feed_dict=self.initializable_feeds)
85
86 def clear(self):
/[...]/GPflow/gpflow/misc.py in initialize_variables(variables, session, force, **run_kwargs)
82 initializer = tf.variables_initializer(variables)
83 else:
---> 84 uninitialized = tf.report_uninitialized_variables(var_list=variables)
85 def uninitialized_names():
86 for uv in session.run(uninitialized):
/[...]/anaconda3/envs/py35cpu/lib/python3.5/site-packages/tensorflow/python/util/tf_should_use.py in wrapped(*args, **kwargs)
105 """
106 def wrapped(*args, **kwargs):
--> 107 return _add_should_use_warning(fn(*args, **kwargs))
108 return tf_decorator.make_decorator(
109 fn, wrapped, 'should_use_result',
/[...]/anaconda3/envs/py35cpu/lib/python3.5/site-packages/tensorflow/python/ops/variables.py in report_uninitialized_variables(var_list, name)
1519 variables_mask = math_ops.logical_not(
1520 array_ops.stack(
-> 1521 [state_ops.is_variable_initialized(v) for v in var_list]))
1522 # Get a 1-D string tensor containing all the variable names.
1523 variable_names_tensor = array_ops.constant(
/[...]/anaconda3/envs/py35cpu/lib/python3.5/site-packages/tensorflow/python/ops/variables.py in <listcomp>(.0)
1519 variables_mask = math_ops.logical_not(
1520 array_ops.stack(
-> 1521 [state_ops.is_variable_initialized(v) for v in var_list]))
1522 # Get a 1-D string tensor containing all the variable names.
1523 variable_names_tensor = array_ops.constant(
/[...]/anaconda3/envs/py35cpu/lib/python3.5/site-packages/tensorflow/python/ops/state_ops.py in is_variable_initialized(ref, name)
182 A `Tensor` of type `bool`.
183 """
--> 184 if ref.dtype._is_ref_dtype:
185 return gen_state_ops.is_variable_initialized(ref=ref, name=name)
186 # Handle resource variables.
AttributeError: 'Iterator' object has no attribute 'dtype'
|
AttributeError
|
def _init_parameter_defaults(self):
self._is_initialized_tensor = None
self._initial_value_tensor = None
self._unconstrained_tensor = None
self._prior_tensor = None
self._constrained_tensor = None
|
def _init_parameter_defaults(self):
self._initial_value_tensor = None
self._unconstrained_tensor = None
self._prior_tensor = None
self._constrained_tensor = None
|
https://github.com/GPflow/GPflow/issues/561
|
---------------------------------------------------------------------------
AttributeError Traceback (most recent call last)
<ipython-input-3-e3c07a8fceb7> in <module>()
10 np.random.randn(M, D),
11 minibatch_size=2)
---> 12 model.compute_log_likelihood()
/[...]/GPflow/gpflow/decors.py in runnable(obj, *args, **kwargs)
152 _setup_storage(store, *af_args, **af_kwargs)
153 _build_method(method, obj, store)
--> 154 return _session_run(session, obj, store, *args, **kwargs)
155 return runnable
156 return autoflow_wrapper
/[...]/GPflow/gpflow/decors.py in _session_run(session, obj, store, *args, **kwargs)
189 feed_dict.update(obj.feeds)
190 initialize = kwargs.pop('initialize', False)
--> 191 obj.initialize(session=session, force=initialize)
192 return session.run(store['result'], **kwargs)
193
/[...]/GPflow/gpflow/core/node.py in initialize(self, session, force)
82 session=session,
83 force=force,
---> 84 feed_dict=self.initializable_feeds)
85
86 def clear(self):
/[...]/GPflow/gpflow/misc.py in initialize_variables(variables, session, force, **run_kwargs)
82 initializer = tf.variables_initializer(variables)
83 else:
---> 84 uninitialized = tf.report_uninitialized_variables(var_list=variables)
85 def uninitialized_names():
86 for uv in session.run(uninitialized):
/[...]/anaconda3/envs/py35cpu/lib/python3.5/site-packages/tensorflow/python/util/tf_should_use.py in wrapped(*args, **kwargs)
105 """
106 def wrapped(*args, **kwargs):
--> 107 return _add_should_use_warning(fn(*args, **kwargs))
108 return tf_decorator.make_decorator(
109 fn, wrapped, 'should_use_result',
/[...]/anaconda3/envs/py35cpu/lib/python3.5/site-packages/tensorflow/python/ops/variables.py in report_uninitialized_variables(var_list, name)
1519 variables_mask = math_ops.logical_not(
1520 array_ops.stack(
-> 1521 [state_ops.is_variable_initialized(v) for v in var_list]))
1522 # Get a 1-D string tensor containing all the variable names.
1523 variable_names_tensor = array_ops.constant(
/[...]/anaconda3/envs/py35cpu/lib/python3.5/site-packages/tensorflow/python/ops/variables.py in <listcomp>(.0)
1519 variables_mask = math_ops.logical_not(
1520 array_ops.stack(
-> 1521 [state_ops.is_variable_initialized(v) for v in var_list]))
1522 # Get a 1-D string tensor containing all the variable names.
1523 variable_names_tensor = array_ops.constant(
/[...]/anaconda3/envs/py35cpu/lib/python3.5/site-packages/tensorflow/python/ops/state_ops.py in is_variable_initialized(ref, name)
182 A `Tensor` of type `bool`.
183 """
--> 184 if ref.dtype._is_ref_dtype:
185 return gen_state_ops.is_variable_initialized(ref=ref, name=name)
186 # Handle resource variables.
AttributeError: 'Iterator' object has no attribute 'dtype'
|
AttributeError
|
def minimize(
self,
model,
session=None,
var_list=None,
feed_dict=None,
maxiter=1000,
initialize=False,
anchor=True,
**kwargs,
):
"""
Minimizes objective function of the model.
:param model: GPflow model with objective tensor.
:param session: Session where optimization will be run.
:param var_list: List of extra variables which should be trained during optimization.
:param feed_dict: Feed dictionary of tensors passed to session run method.
:param maxiter: Number of run interation.
:param initialize: If `True` model parameters will be re-initialized even if they were
initialized before for gotten session.
:param anchor: If `True` trained variable values computed during optimization at
particular session will be synchronized with internal parameter values.
:param kwargs: This is a dictionary of extra parameters for session run method.
"""
if model is None or not isinstance(model, Model):
raise ValueError("Unknown type passed for optimization.")
session = model.enquire_session(session)
self._model = model
objective = model.objective
with session.graph.as_default():
full_var_list = self._gen_var_list(model, var_list)
# Create optimizer variables before initialization.
self._minimize_operation = self.optimizer.minimize(
objective, var_list=full_var_list, **kwargs
)
model.initialize(session=session, force=initialize)
self._initialize_optimizer(session, full_var_list)
feed_dict = self._gen_feed_dict(model, feed_dict)
for _i in range(maxiter):
session.run(self.minimize_operation, feed_dict=feed_dict)
if anchor:
model.anchor(session)
|
def minimize(
self,
model,
session=None,
var_list=None,
feed_dict=None,
maxiter=1000,
initialize=True,
anchor=True,
**kwargs,
):
"""
Minimizes objective function of the model.
:param model: GPflow model with objective tensor.
:param session: Session where optimization will be run.
:param var_list: List of extra variables which should be trained during optimization.
:param feed_dict: Feed dictionary of tensors passed to session run method.
:param maxiter: Number of run interation.
:param initialize: If `True` model parameters will be re-initialized even if they were
initialized before for gotten session.
:param anchor: If `True` trained variable values computed during optimization at
particular session will be synchronized with internal parameter values.
:param kwargs: This is a dictionary of extra parameters for session run method.
"""
if model is None or not isinstance(model, Model):
raise ValueError("Unknown type passed for optimization.")
session = model.enquire_session(session)
self._model = model
objective = model.objective
with session.graph.as_default():
full_var_list = self._gen_var_list(model, var_list)
# Create optimizer variables before initialization.
self._minimize_operation = self.optimizer.minimize(
objective, var_list=full_var_list, **kwargs
)
model.initialize(session=session, force=initialize)
self._initialize_optimizer(session, full_var_list)
feed_dict = self._gen_feed_dict(model, feed_dict)
for _i in range(maxiter):
session.run(self.minimize_operation, feed_dict=feed_dict)
if anchor:
model.anchor(session)
|
https://github.com/GPflow/GPflow/issues/561
|
---------------------------------------------------------------------------
AttributeError Traceback (most recent call last)
<ipython-input-3-e3c07a8fceb7> in <module>()
10 np.random.randn(M, D),
11 minibatch_size=2)
---> 12 model.compute_log_likelihood()
/[...]/GPflow/gpflow/decors.py in runnable(obj, *args, **kwargs)
152 _setup_storage(store, *af_args, **af_kwargs)
153 _build_method(method, obj, store)
--> 154 return _session_run(session, obj, store, *args, **kwargs)
155 return runnable
156 return autoflow_wrapper
/[...]/GPflow/gpflow/decors.py in _session_run(session, obj, store, *args, **kwargs)
189 feed_dict.update(obj.feeds)
190 initialize = kwargs.pop('initialize', False)
--> 191 obj.initialize(session=session, force=initialize)
192 return session.run(store['result'], **kwargs)
193
/[...]/GPflow/gpflow/core/node.py in initialize(self, session, force)
82 session=session,
83 force=force,
---> 84 feed_dict=self.initializable_feeds)
85
86 def clear(self):
/[...]/GPflow/gpflow/misc.py in initialize_variables(variables, session, force, **run_kwargs)
82 initializer = tf.variables_initializer(variables)
83 else:
---> 84 uninitialized = tf.report_uninitialized_variables(var_list=variables)
85 def uninitialized_names():
86 for uv in session.run(uninitialized):
/[...]/anaconda3/envs/py35cpu/lib/python3.5/site-packages/tensorflow/python/util/tf_should_use.py in wrapped(*args, **kwargs)
105 """
106 def wrapped(*args, **kwargs):
--> 107 return _add_should_use_warning(fn(*args, **kwargs))
108 return tf_decorator.make_decorator(
109 fn, wrapped, 'should_use_result',
/[...]/anaconda3/envs/py35cpu/lib/python3.5/site-packages/tensorflow/python/ops/variables.py in report_uninitialized_variables(var_list, name)
1519 variables_mask = math_ops.logical_not(
1520 array_ops.stack(
-> 1521 [state_ops.is_variable_initialized(v) for v in var_list]))
1522 # Get a 1-D string tensor containing all the variable names.
1523 variable_names_tensor = array_ops.constant(
/[...]/anaconda3/envs/py35cpu/lib/python3.5/site-packages/tensorflow/python/ops/variables.py in <listcomp>(.0)
1519 variables_mask = math_ops.logical_not(
1520 array_ops.stack(
-> 1521 [state_ops.is_variable_initialized(v) for v in var_list]))
1522 # Get a 1-D string tensor containing all the variable names.
1523 variable_names_tensor = array_ops.constant(
/[...]/anaconda3/envs/py35cpu/lib/python3.5/site-packages/tensorflow/python/ops/state_ops.py in is_variable_initialized(ref, name)
182 A `Tensor` of type `bool`.
183 """
--> 184 if ref.dtype._is_ref_dtype:
185 return gen_state_ops.is_variable_initialized(ref=ref, name=name)
186 # Handle resource variables.
AttributeError: 'Iterator' object has no attribute 'dtype'
|
AttributeError
|
def prob_is_largest(self, Y, mu, var, gh_x, gh_w):
# work out what the mean and variance is of the indicated latent function.
oh_on = tf.cast(
tf.one_hot(tf.reshape(Y, (-1,)), self.num_classes, 1.0, 0.0), tf.float64
)
mu_selected = tf.reduce_sum(oh_on * mu, 1)
var_selected = tf.reduce_sum(oh_on * var, 1)
# generate Gauss Hermite grid
X = tf.reshape(mu_selected, (-1, 1)) + gh_x * tf.reshape(
tf.sqrt(tf.clip_by_value(2.0 * var_selected, 1e-10, np.inf)), (-1, 1)
)
# compute the CDF of the Gaussian between the latent functions and the grid (including the selected function)
dist = (tf.expand_dims(X, 1) - tf.expand_dims(mu, 2)) / tf.expand_dims(
tf.sqrt(tf.clip_by_value(var, 1e-10, np.inf)), 2
)
cdfs = 0.5 * (1.0 + tf.erf(dist / np.sqrt(2.0)))
cdfs = cdfs * (1 - 2e-4) + 1e-4
# blank out all the distances on the selected latent function
oh_off = tf.cast(
tf.one_hot(tf.reshape(Y, (-1,)), self.num_classes, 0.0, 1.0), tf.float64
)
cdfs = cdfs * tf.expand_dims(oh_off, 2) + tf.expand_dims(oh_on, 2)
# take the product over the latent functions, and the sum over the GH grid.
return tf.matmul(
tf.reduce_prod(cdfs, reduction_indices=[1]),
tf.reshape(gh_w / np.sqrt(np.pi), (-1, 1)),
)
|
def prob_is_largest(self, Y, mu, var, gh_x, gh_w):
# work out what the mean and variance is of the indicated latent function.
oh_on = tf.cast(
tf.one_hot(tf.reshape(Y, (-1,)), self.num_classes, 1.0, 0.0), tf.float64
)
mu_selected = tf.reduce_sum(oh_on * mu, 1)
var_selected = tf.reduce_sum(oh_on * var, 1)
# generate Gauss Hermite grid
X = tf.reshape(mu_selected, (-1, 1)) + gh_x * tf.reshape(
tf.sqrt(tf.clip_by_value(2.0 * var_selected, 1e-10, np.inf)), (-1, 1)
)
# compute the CDF of the Gaussian between the latent functions and the grid (including the selected function)
dist = (tf.expand_dims(X, 1) - tf.expand_dims(mu, 2)) / tf.expand_dims(
tf.sqrt(tf.clip_by_value(var, 1e-10, np.inf)), 2
)
cdfs = 0.5 * (1.0 + tf.erf(dist / np.sqrt(2.0)))
cdfs = cdfs * (1 - 2e-4) + 1e-4
# blank out all the distances on the selected latent function
oh_off = tf.cast(
tf.one_hot(tf.reshape(Y, (-1,)), self.num_classes, 0.0, 1.0), tf.float64
)
cdfs = cdfs * tf.expand_dims(oh_off, 2) + tf.expand_dims(oh_on, 2)
# take the product over the latent functions, and the sum over the GH grid.
return tf.matmul(
tf.reduce_prod(cdfs, 1), tf.reshape(gh_w / np.sqrt(np.pi), (-1, 1))
)
|
https://github.com/GPflow/GPflow/issues/161
|
python
---------------------------------------------------------------------------
ValueError Traceback (most recent call last)
<ipython-input-5-e4eebd086840> in <module>()
1 m.kern.white.variance.fixed = True
2 m.Z.fixed = True
----> 3 _ = m.optimize()
/Users/danmarthaler/GPflow/GPflow/model.pyc in optimize(self, method, tol, callback, maxiter, **kw)
207
208 if type(method) is str:
--> 209 return self._optimize_np(method, tol, callback, maxiter, **kw)
210 else:
211 return self._optimize_tf(method, callback, maxiter, **kw)
/Users/danmarthaler/GPflow/GPflow/model.pyc in _optimize_np(self, method, tol, callback, maxiter, **kw)
265 """
266 if self._needs_recompile:
--> 267 self._compile()
268
269 options = dict(disp=True, maxiter=maxiter)
/Users/danmarthaler/GPflow/GPflow/model.pyc in _compile(self, optimizer)
127 with self.tf_mode():
128 f = self.build_likelihood() + self.build_prior()
--> 129 g, = tf.gradients(f, self._free_vars)
130
131 self._minusF = tf.neg(f, name='objective')
/usr/local/lib/python2.7/site-packages/tensorflow/python/ops/gradients.pyc in gradients(ys, xs, grad_ys, name, colocate_gradients_with_ops, gate_gradients, aggregation_method)
476 # If grad_fn was found, do not use SymbolicGradient even for
477 # functions.
--> 478 in_grads = _AsList(grad_fn(op, *out_grads))
479 else:
480 # For function call ops, we add a 'SymbolicGradient'
/usr/local/lib/python2.7/site-packages/tensorflow/python/ops/math_grad.pyc in _ProdGrad(op, grad)
128 reduced = math_ops.cast(op.inputs[1], dtypes.int32)
129 idx = math_ops.range(0, array_ops.rank(op.inputs[0]))
--> 130 other, _ = array_ops.listdiff(idx, reduced)
131 perm = array_ops.concat(0, [reduced, other])
132 reduced_num = math_ops.reduce_prod(array_ops.gather(input_shape, reduced))
/usr/local/lib/python2.7/site-packages/tensorflow/python/ops/gen_array_ops.pyc in list_diff(x, y, name)
1199 idx: A `Tensor` of type `int32`. 1-D. Positions of `x` values preserved in `out`.
1200 """
-> 1201 result = _op_def_lib.apply_op("ListDiff", x=x, y=y, name=name)
1202 return _ListDiffOutput._make(result)
1203
/usr/local/lib/python2.7/site-packages/tensorflow/python/framework/op_def_library.pyc in apply_op(self, op_type_name, name, **keywords)
701 op = g.create_op(op_type_name, inputs, output_types, name=scope,
702 input_types=input_types, attrs=attr_protos,
--> 703 op_def=op_def)
704 outputs = op.outputs
705 return _Restructure(ops.convert_n_to_tensor(outputs),
/usr/local/lib/python2.7/site-packages/tensorflow/python/framework/ops.pyc in create_op(self, op_type, inputs, dtypes, input_types, name, attrs, op_def, compute_shapes, compute_device)
2310 original_op=self._default_original_op, op_def=op_def)
2311 if compute_shapes:
-> 2312 set_shapes_for_outputs(ret)
2313 self._add_op(ret)
2314 self._record_op_seen_by_control_dependencies(ret)
/usr/local/lib/python2.7/site-packages/tensorflow/python/framework/ops.pyc in set_shapes_for_outputs(op)
1702 raise RuntimeError("No shape function registered for standard op: %s"
1703 % op.type)
-> 1704 shapes = shape_func(op)
1705 if shapes is None:
1706 raise RuntimeError(
/usr/local/lib/python2.7/site-packages/tensorflow/python/ops/array_ops.pyc in _ListDiffShape(op)
1979 """Shape function for the ListDiff op."""
1980 op.inputs[0].get_shape().assert_has_rank(1)
-> 1981 op.inputs[1].get_shape().assert_has_rank(1)
1982 # TODO(mrry): Indicate that the length falls within an interval?
1983 return [tensor_shape.vector(None)] * 2
/usr/local/lib/python2.7/site-packages/tensorflow/python/framework/tensor_shape.pyc in assert_has_rank(self, rank)
619 """
620 if self.ndims not in (None, rank):
--> 621 raise ValueError("Shape %s must have rank %d" % (self, rank))
622
623 def with_rank(self, rank):
ValueError: Shape () must have rank 1
|
ValueError
|
def __call__(self, tf_method):
@wraps(tf_method)
def runnable(instance, *np_args):
graph_name = "_" + tf_method.__name__ + "_graph"
if not hasattr(instance, graph_name):
if instance._needs_recompile:
instance._compile() # ensures free_vars is up-to-date.
self.tf_args = [tf.placeholder(*a) for a in self.tf_arg_tuples]
with instance.tf_mode():
graph = tf_method(instance, *self.tf_args)
setattr(instance, graph_name, graph)
feed_dict = dict(zip(self.tf_args, np_args))
feed_dict[instance._free_vars] = instance.get_free_state()
graph = getattr(instance, graph_name)
return instance._session.run(graph, feed_dict=feed_dict)
return runnable
|
def __call__(self, tf_method):
@wraps(tf_method)
def runnable(instance, *np_args):
graph_name = "_" + tf_method.__name__ + "_graph"
if not hasattr(instance, graph_name):
instance._compile()
self.tf_args = [tf.placeholder(*a) for a in self.tf_arg_tuples]
with instance.tf_mode():
graph = tf_method(instance, *self.tf_args)
setattr(instance, graph_name, graph)
feed_dict = dict(zip(self.tf_args, np_args))
feed_dict[instance._free_vars] = instance.get_free_state()
graph = getattr(instance, graph_name)
return instance._session.run(graph, feed_dict=feed_dict)
return runnable
|
https://github.com/GPflow/GPflow/issues/54
|
Traceback (most recent call last):
File "/Users/mqbssaby/PrivateProjects/BranchedGP/runfile.py", line 29, in <module>
[mu,var] = m.predict_f(X)
File "/Users/mqbssaby/pythonlibs/GPflow/GPflow/model.py", line 82, in runnable
return instance._session.run(graph, feed_dict=feed_dict)
File "/Users/mqbssaby/anaconda/lib/python2.7/site-packages/tensorflow/python/client/session.py", line 340, in run
run_metadata_ptr)
File "/Users/mqbssaby/anaconda/lib/python2.7/site-packages/tensorflow/python/client/session.py", line 553, in _run
% (np_val.shape, subfeed_t.name, str(subfeed_t.get_shape())))
ValueError: Cannot feed value of shape (4,) for Tensor u'Variable:0', which has shape '(5,)'
|
ValueError
|
def runnable(instance, *np_args):
graph_name = "_" + tf_method.__name__ + "_graph"
if not hasattr(instance, graph_name):
if instance._needs_recompile:
instance._compile() # ensures free_vars is up-to-date.
self.tf_args = [tf.placeholder(*a) for a in self.tf_arg_tuples]
with instance.tf_mode():
graph = tf_method(instance, *self.tf_args)
setattr(instance, graph_name, graph)
feed_dict = dict(zip(self.tf_args, np_args))
feed_dict[instance._free_vars] = instance.get_free_state()
graph = getattr(instance, graph_name)
return instance._session.run(graph, feed_dict=feed_dict)
|
def runnable(instance, *np_args):
graph_name = "_" + tf_method.__name__ + "_graph"
if not hasattr(instance, graph_name):
instance._compile()
self.tf_args = [tf.placeholder(*a) for a in self.tf_arg_tuples]
with instance.tf_mode():
graph = tf_method(instance, *self.tf_args)
setattr(instance, graph_name, graph)
feed_dict = dict(zip(self.tf_args, np_args))
feed_dict[instance._free_vars] = instance.get_free_state()
graph = getattr(instance, graph_name)
return instance._session.run(graph, feed_dict=feed_dict)
|
https://github.com/GPflow/GPflow/issues/54
|
Traceback (most recent call last):
File "/Users/mqbssaby/PrivateProjects/BranchedGP/runfile.py", line 29, in <module>
[mu,var] = m.predict_f(X)
File "/Users/mqbssaby/pythonlibs/GPflow/GPflow/model.py", line 82, in runnable
return instance._session.run(graph, feed_dict=feed_dict)
File "/Users/mqbssaby/anaconda/lib/python2.7/site-packages/tensorflow/python/client/session.py", line 340, in run
run_metadata_ptr)
File "/Users/mqbssaby/anaconda/lib/python2.7/site-packages/tensorflow/python/client/session.py", line 553, in _run
% (np_val.shape, subfeed_t.name, str(subfeed_t.get_shape())))
ValueError: Cannot feed value of shape (4,) for Tensor u'Variable:0', which has shape '(5,)'
|
ValueError
|
def browse(self, uri):
logger.debug("Browsing files at: %s", uri)
result = []
local_path = path.uri_to_path(uri)
if str(local_path) == "root":
return list(self._get_media_dirs_refs())
if not self._is_in_basedir(local_path):
logger.warning(
"Rejected attempt to browse path (%s) outside dirs defined "
"in file/media_dirs config.",
uri,
)
return []
if path.uri_to_path(uri).is_file():
logger.error("Rejected attempt to browse file (%s)", uri)
return []
for dir_entry in local_path.iterdir():
child_path = dir_entry.resolve()
uri = path.path_to_uri(child_path)
if not self._show_dotfiles and dir_entry.name.startswith("."):
continue
if (
self._excluded_file_extensions
and dir_entry.suffix in self._excluded_file_extensions
):
continue
if child_path.is_symlink() and not self._follow_symlinks:
logger.debug("Ignoring symlink: %s", uri)
continue
if not self._is_in_basedir(child_path):
logger.debug("Ignoring symlink to outside base dir: %s", uri)
continue
if child_path.is_dir():
result.append(models.Ref.directory(name=dir_entry.name, uri=uri))
elif child_path.is_file():
result.append(models.Ref.track(name=dir_entry.name, uri=uri))
def order(item):
return (item.type != models.Ref.DIRECTORY, item.name)
result.sort(key=order)
return result
|
def browse(self, uri):
logger.debug("Browsing files at: %s", uri)
result = []
local_path = path.uri_to_path(uri)
if str(local_path) == "root":
return list(self._get_media_dirs_refs())
if not self._is_in_basedir(local_path):
logger.warning(
"Rejected attempt to browse path (%s) outside dirs defined "
"in file/media_dirs config.",
uri,
)
return []
for dir_entry in local_path.iterdir():
child_path = dir_entry.resolve()
uri = path.path_to_uri(child_path)
if not self._show_dotfiles and dir_entry.name.startswith("."):
continue
if (
self._excluded_file_extensions
and dir_entry.suffix in self._excluded_file_extensions
):
continue
if child_path.is_symlink() and not self._follow_symlinks:
logger.debug("Ignoring symlink: %s", uri)
continue
if not self._is_in_basedir(child_path):
logger.debug("Ignoring symlink to outside base dir: %s", uri)
continue
if child_path.is_dir():
result.append(models.Ref.directory(name=dir_entry.name, uri=uri))
elif child_path.is_file():
result.append(models.Ref.track(name=dir_entry.name, uri=uri))
def order(item):
return (item.type != models.Ref.DIRECTORY, item.name)
result.sort(key=order)
return result
|
https://github.com/mopidy/mopidy/issues/1906
|
FileBackend-2 DEBUG 2020-05-07 14:06:45,889 Browsing files at: file:///home/nick/Music/Tall%20Ships%20-%20Chemistry.mp3
Core-7 ERROR 2020-05-07 14:06:45,889 FileBackend backend caused an exception.
Traceback (most recent call last):
File "/home/nick/Dev/mopidy-dev/mopidy/mopidy/core/library.py", line 17, in _backend_error_handling
yield
File "/home/nick/Dev/mopidy-dev/mopidy/mopidy/core/library.py", line 114, in _browse
result = backend.library.browse(uri).get()
File "/usr/lib/python3/dist-packages/pykka/_threading.py", line 45, in get
_compat.reraise(*self._data['exc_info'])
File "/usr/lib/python3/dist-packages/pykka/_compat/__init__.py", line 29, in reraise
raise value
File "/usr/lib/python3/dist-packages/pykka/_actor.py", line 193, in _actor_loop
response = self._handle_receive(envelope.message)
File "/usr/lib/python3/dist-packages/pykka/_actor.py", line 299, in _handle_receive
return callee(*message.args, **message.kwargs)
File "/home/nick/Dev/mopidy-dev/mopidy/mopidy/file/library.py", line 55, in browse
for dir_entry in local_path.iterdir():
File "/usr/lib/python3.8/pathlib.py", line 1113, in iterdir
for name in self._accessor.listdir(self):
NotADirectoryError: [Errno 20] Not a directory: '/home/nick/Music/Tall Ships - Chemistry.mp3'
|
NotADirectoryError
|
def on_error(self, error, debug):
gst_logger.error(f"GStreamer error: {error.message}")
gst_logger.debug(f"Got ERROR bus message: error={error!r} debug={debug!r}")
# TODO: is this needed?
self._audio.stop_playback()
|
def on_error(self, error, debug):
error_msg = str(error).decode()
debug_msg = debug.decode()
gst_logger.debug("Got ERROR bus message: error=%r debug=%r", error_msg, debug_msg)
gst_logger.error("GStreamer error: %s", error_msg)
# TODO: is this needed?
self._audio.stop_playback()
|
https://github.com/mopidy/mopidy/issues/1851
|
Traceback (most recent call last):
File "/home/jodal/mopidy-dev/mopidy/mopidy/audio/actor.py", line 219, in on_message
self.on_error(error, debug)
File "/home/jodal/mopidy-dev/mopidy/mopidy/audio/actor.py", line 328, in on_error
error_msg = str(error).decode()
AttributeError: 'str' object has no attribute 'decode'
|
AttributeError
|
def on_warning(self, error, debug):
gst_logger.warning(f"GStreamer warning: {error.message}")
gst_logger.debug(f"Got WARNING bus message: error={error!r} debug={debug!r}")
|
def on_warning(self, error, debug):
error_msg = str(error).decode()
debug_msg = debug.decode()
gst_logger.warning("GStreamer warning: %s", error_msg)
gst_logger.debug("Got WARNING bus message: error=%r debug=%r", error_msg, debug_msg)
|
https://github.com/mopidy/mopidy/issues/1851
|
Traceback (most recent call last):
File "/home/jodal/mopidy-dev/mopidy/mopidy/audio/actor.py", line 219, in on_message
self.on_error(error, debug)
File "/home/jodal/mopidy-dev/mopidy/mopidy/audio/actor.py", line 328, in on_error
error_msg = str(error).decode()
AttributeError: 'str' object has no attribute 'decode'
|
AttributeError
|
def _unwrap_stream(uri, timeout, scanner, requests_session):
"""
Get a stream URI from a playlist URI, ``uri``.
Unwraps nested playlists until something that's not a playlist is found or
the ``timeout`` is reached.
"""
original_uri = uri
seen_uris = set()
deadline = time.time() + timeout
while time.time() < deadline:
if uri in seen_uris:
logger.info(
"Unwrapping stream from URI (%s) failed: playlist referenced itself",
uri,
)
return None, None
else:
seen_uris.add(uri)
logger.debug("Unwrapping stream from URI: %s", uri)
try:
scan_timeout = deadline - time.time()
if scan_timeout < 0:
logger.info(
"Unwrapping stream from URI (%s) failed: timed out in %sms",
uri,
timeout,
)
return None, None
scan_result = scanner.scan(uri, timeout=scan_timeout)
except exceptions.ScannerError as exc:
logger.debug("GStreamer failed scanning URI (%s): %s", uri, exc)
scan_result = None
if scan_result is not None:
has_interesting_mime = (
scan_result.mime is not None
and not scan_result.mime.startswith("text/")
and not scan_result.mime.startswith("application/")
)
if scan_result.playable or has_interesting_mime:
logger.debug("Unwrapped potential %s stream: %s", scan_result.mime, uri)
return uri, scan_result
download_timeout = deadline - time.time()
if download_timeout < 0:
logger.info(
"Unwrapping stream from URI (%s) failed: timed out in %sms",
uri,
timeout,
)
return None, None
content = http.download(requests_session, uri, timeout=download_timeout / 1000)
if content is None:
logger.info(
"Unwrapping stream from URI (%s) failed: error downloading URI %s",
original_uri,
uri,
)
return None, None
uris = playlists.parse(content)
if not uris:
logger.debug(
"Failed parsing URI (%s) as playlist; found potential stream.", uri
)
return uri, None
# TODO Test streams and return first that seems to be playable
logger.debug("Parsed playlist (%s) and found new URI: %s", uri, uris[0])
uri = urllib.parse.urljoin(uri, uris[0])
|
def _unwrap_stream(uri, timeout, scanner, requests_session):
"""
Get a stream URI from a playlist URI, ``uri``.
Unwraps nested playlists until something that's not a playlist is found or
the ``timeout`` is reached.
"""
original_uri = uri
seen_uris = set()
deadline = time.time() + timeout
while time.time() < deadline:
if uri in seen_uris:
logger.info(
"Unwrapping stream from URI (%s) failed: playlist referenced itself",
uri,
)
return None, None
else:
seen_uris.add(uri)
logger.debug("Unwrapping stream from URI: %s", uri)
try:
scan_timeout = deadline - time.time()
if scan_timeout < 0:
logger.info(
"Unwrapping stream from URI (%s) failed: timed out in %sms",
uri,
timeout,
)
return None, None
scan_result = scanner.scan(uri, timeout=scan_timeout)
except exceptions.ScannerError as exc:
logger.debug("GStreamer failed scanning URI (%s): %s", uri, exc)
scan_result = None
if scan_result is not None:
if scan_result.playable or (
not scan_result.mime.startswith("text/")
and not scan_result.mime.startswith("application/")
):
logger.debug("Unwrapped potential %s stream: %s", scan_result.mime, uri)
return uri, scan_result
download_timeout = deadline - time.time()
if download_timeout < 0:
logger.info(
"Unwrapping stream from URI (%s) failed: timed out in %sms",
uri,
timeout,
)
return None, None
content = http.download(requests_session, uri, timeout=download_timeout / 1000)
if content is None:
logger.info(
"Unwrapping stream from URI (%s) failed: error downloading URI %s",
original_uri,
uri,
)
return None, None
uris = playlists.parse(content)
if not uris:
logger.debug(
"Failed parsing URI (%s) as playlist; found potential stream.", uri
)
return uri, None
# TODO Test streams and return first that seems to be playable
logger.debug("Parsed playlist (%s) and found new URI: %s", uri, uris[0])
uri = urllib.parse.urljoin(uri, uris[0])
|
https://github.com/mopidy/mopidy/issues/1760
|
ERROR StreamBackend backend caused an exception.
Traceback (most recent call last):
File "/usr/lib64/python2.7/site-packages/mopidy/core/library.py", line 19, in _backend_error_handling
yield
File "/usr/lib64/python2.7/site-packages/mopidy/core/library.py", line 237, in lookup
result = future.get()
File "/usr/lib64/python2.7/site-packages/pykka/threading.py", line 52, in get
compat.reraise(*self._data['exc_info'])
File "/usr/lib64/python2.7/site-packages/pykka/compat.py", line 12, in reraise
exec('raise tp, value, tb')
File "/usr/lib64/python2.7/site-packages/pykka/actor.py", line 201, in _actor_loop
response = self._handle_receive(message)
File "/usr/lib64/python2.7/site-packages/pykka/actor.py", line 295, in _handle_receive
return callee(*message['args'], **message['kwargs'])
File "/usr/lib64/python2.7/site-packages/mopidy/stream/actor.py", line 65, in lookup
requests_session=self.backend._session)
File "/usr/lib64/python2.7/site-packages/mopidy/stream/actor.py", line 131, in _unwrap_stream
not scan_result.mime.startswith('text/') and
AttributeError: 'NoneType' object has no attribute 'startswith'
|
AttributeError
|
def listplaylist(context, name):
"""
*musicpd.org, stored playlists section:*
``listplaylist {NAME}``
Lists the files in the playlist ``NAME.m3u``.
Output format::
file: relative/path/to/file1.flac
file: relative/path/to/file2.ogg
file: relative/path/to/file3.mp3
"""
playlist = _get_playlist(context, name)
return [translator.uri_to_mpd_format(t.uri) for t in playlist.tracks]
|
def listplaylist(context, name):
"""
*musicpd.org, stored playlists section:*
``listplaylist {NAME}``
Lists the files in the playlist ``NAME.m3u``.
Output format::
file: relative/path/to/file1.flac
file: relative/path/to/file2.ogg
file: relative/path/to/file3.mp3
"""
playlist = _get_playlist(context, name)
return ["file: %s" % t.uri for t in playlist.tracks]
|
https://github.com/mopidy/mopidy/issues/1759
|
ERROR Unhandled exception in MpdSession (urn:uuid:76575e20-c10f-46e2-bc60-404ed1cffc27):
Traceback (most recent call last):
File "/usr/lib64/python2.7/site-packages/pykka/actor.py", line 201, in _actor_loop
response = self._handle_receive(message)
File "/usr/lib64/python2.7/site-packages/pykka/actor.py", line 304, in _handle_receive
return self.on_receive(message)
File "/usr/lib64/python2.7/site-packages/mopidy/internal/network.py", line 423, in on_receive
self.on_line_received(line)
File "/usr/lib64/python2.7/site-packages/mopidy/mpd/session.py", line 34, in on_line_received
response = self.dispatcher.handle_request(line)
File "/usr/lib64/python2.7/site-packages/mopidy/mpd/dispatcher.py", line 47, in handle_request
return self._call_next_filter(request, response, filter_chain)
File "/usr/lib64/python2.7/site-packages/mopidy/mpd/dispatcher.py", line 69, in _call_next_filter
return next_filter(request, response, filter_chain)
File "/usr/lib64/python2.7/site-packages/mopidy/mpd/dispatcher.py", line 77, in _catch_mpd_ack_errors_filter
return self._call_next_filter(request, response, filter_chain)
File "/usr/lib64/python2.7/site-packages/mopidy/mpd/dispatcher.py", line 69, in _call_next_filter
return next_filter(request, response, filter_chain)
File "/usr/lib64/python2.7/site-packages/mopidy/mpd/dispatcher.py", line 87, in _authenticate_filter
return self._call_next_filter(request, response, filter_chain)
File "/usr/lib64/python2.7/site-packages/mopidy/mpd/dispatcher.py", line 69, in _call_next_filter
return next_filter(request, response, filter_chain)
File "/usr/lib64/python2.7/site-packages/mopidy/mpd/dispatcher.py", line 106, in _command_list_filter
response = self._call_next_filter(request, response, filter_chain)
File "/usr/lib64/python2.7/site-packages/mopidy/mpd/dispatcher.py", line 69, in _call_next_filter
return next_filter(request, response, filter_chain)
File "/usr/lib64/python2.7/site-packages/mopidy/mpd/dispatcher.py", line 135, in _idle_filter
response = self._call_next_filter(request, response, filter_chain)
File "/usr/lib64/python2.7/site-packages/mopidy/mpd/dispatcher.py", line 69, in _call_next_filter
return next_filter(request, response, filter_chain)
File "/usr/lib64/python2.7/site-packages/mopidy/mpd/dispatcher.py", line 148, in _add_ok_filter
response = self._call_next_filter(request, response, filter_chain)
File "/usr/lib64/python2.7/site-packages/mopidy/mpd/dispatcher.py", line 69, in _call_next_filter
return next_filter(request, response, filter_chain)
File "/usr/lib64/python2.7/site-packages/mopidy/mpd/dispatcher.py", line 160, in _call_handler_filter
response = self._format_response(self._call_handler(request))
File "/usr/lib64/python2.7/site-packages/mopidy/mpd/dispatcher.py", line 184, in _format_response
formatted_response.extend(self._format_lines(element))
File "/usr/lib64/python2.7/site-packages/mopidy/mpd/dispatcher.py", line 210, in _format_lines
return ['%s: %s' % (key, value)]
UnicodeDecodeError: 'ascii' codec can't decode byte 0xc3 in position 38: ordinal not in range(128)
|
UnicodeDecodeError
|
def track_to_mpd_format(track, position=None, stream_title=None):
"""
Format track for output to MPD client.
:param track: the track
:type track: :class:`mopidy.models.Track` or :class:`mopidy.models.TlTrack`
:param position: track's position in playlist
:type position: integer
:param stream_title: the current streams title
:type position: string
:rtype: list of two-tuples
"""
if isinstance(track, TlTrack):
(tlid, track) = track
else:
(tlid, track) = (None, track)
if not track.uri:
logger.warning("Ignoring track without uri")
return []
result = [
uri_to_mpd_format(track.uri),
("Time", track.length and (track.length // 1000) or 0),
("Artist", concat_multi_values(track.artists, "name")),
("Album", track.album and track.album.name or ""),
]
if stream_title is not None:
result.append(("Title", stream_title))
if track.name:
result.append(("Name", track.name))
else:
result.append(("Title", track.name or ""))
if track.date:
result.append(("Date", track.date))
if track.album is not None and track.album.num_tracks is not None:
result.append(
("Track", "%d/%d" % (track.track_no or 0, track.album.num_tracks))
)
else:
result.append(("Track", track.track_no or 0))
if position is not None and tlid is not None:
result.append(("Pos", position))
result.append(("Id", tlid))
if track.album is not None and track.album.musicbrainz_id is not None:
result.append(("MUSICBRAINZ_ALBUMID", track.album.musicbrainz_id))
if track.album is not None and track.album.artists:
result.append(("AlbumArtist", concat_multi_values(track.album.artists, "name")))
musicbrainz_ids = concat_multi_values(track.album.artists, "musicbrainz_id")
if musicbrainz_ids:
result.append(("MUSICBRAINZ_ALBUMARTISTID", musicbrainz_ids))
if track.artists:
musicbrainz_ids = concat_multi_values(track.artists, "musicbrainz_id")
if musicbrainz_ids:
result.append(("MUSICBRAINZ_ARTISTID", musicbrainz_ids))
if track.composers:
result.append(("Composer", concat_multi_values(track.composers, "name")))
if track.performers:
result.append(("Performer", concat_multi_values(track.performers, "name")))
if track.genre:
result.append(("Genre", track.genre))
if track.disc_no:
result.append(("Disc", track.disc_no))
if track.last_modified:
datestring = datetime.datetime.utcfromtimestamp(
track.last_modified // 1000
).isoformat()
result.append(("Last-Modified", datestring + "Z"))
if track.musicbrainz_id is not None:
result.append(("MUSICBRAINZ_TRACKID", track.musicbrainz_id))
if track.album and track.album.uri:
result.append(("X-AlbumUri", track.album.uri))
if track.album and track.album.images:
images = ";".join(i for i in track.album.images if i != "")
result.append(("X-AlbumImage", images))
result = [element for element in result if _has_value(*element)]
return result
|
def track_to_mpd_format(track, position=None, stream_title=None):
"""
Format track for output to MPD client.
:param track: the track
:type track: :class:`mopidy.models.Track` or :class:`mopidy.models.TlTrack`
:param position: track's position in playlist
:type position: integer
:param stream_title: the current streams title
:type position: string
:rtype: list of two-tuples
"""
if isinstance(track, TlTrack):
(tlid, track) = track
else:
(tlid, track) = (None, track)
if not track.uri:
logger.warning("Ignoring track without uri")
return []
result = [
("file", track.uri),
("Time", track.length and (track.length // 1000) or 0),
("Artist", concat_multi_values(track.artists, "name")),
("Album", track.album and track.album.name or ""),
]
if stream_title is not None:
result.append(("Title", stream_title))
if track.name:
result.append(("Name", track.name))
else:
result.append(("Title", track.name or ""))
if track.date:
result.append(("Date", track.date))
if track.album is not None and track.album.num_tracks is not None:
result.append(
("Track", "%d/%d" % (track.track_no or 0, track.album.num_tracks))
)
else:
result.append(("Track", track.track_no or 0))
if position is not None and tlid is not None:
result.append(("Pos", position))
result.append(("Id", tlid))
if track.album is not None and track.album.musicbrainz_id is not None:
result.append(("MUSICBRAINZ_ALBUMID", track.album.musicbrainz_id))
if track.album is not None and track.album.artists:
result.append(("AlbumArtist", concat_multi_values(track.album.artists, "name")))
musicbrainz_ids = concat_multi_values(track.album.artists, "musicbrainz_id")
if musicbrainz_ids:
result.append(("MUSICBRAINZ_ALBUMARTISTID", musicbrainz_ids))
if track.artists:
musicbrainz_ids = concat_multi_values(track.artists, "musicbrainz_id")
if musicbrainz_ids:
result.append(("MUSICBRAINZ_ARTISTID", musicbrainz_ids))
if track.composers:
result.append(("Composer", concat_multi_values(track.composers, "name")))
if track.performers:
result.append(("Performer", concat_multi_values(track.performers, "name")))
if track.genre:
result.append(("Genre", track.genre))
if track.disc_no:
result.append(("Disc", track.disc_no))
if track.last_modified:
datestring = datetime.datetime.utcfromtimestamp(
track.last_modified // 1000
).isoformat()
result.append(("Last-Modified", datestring + "Z"))
if track.musicbrainz_id is not None:
result.append(("MUSICBRAINZ_TRACKID", track.musicbrainz_id))
if track.album and track.album.uri:
result.append(("X-AlbumUri", track.album.uri))
if track.album and track.album.images:
images = ";".join(i for i in track.album.images if i != "")
result.append(("X-AlbumImage", images))
result = [element for element in result if _has_value(*element)]
return result
|
https://github.com/mopidy/mopidy/issues/1759
|
ERROR Unhandled exception in MpdSession (urn:uuid:76575e20-c10f-46e2-bc60-404ed1cffc27):
Traceback (most recent call last):
File "/usr/lib64/python2.7/site-packages/pykka/actor.py", line 201, in _actor_loop
response = self._handle_receive(message)
File "/usr/lib64/python2.7/site-packages/pykka/actor.py", line 304, in _handle_receive
return self.on_receive(message)
File "/usr/lib64/python2.7/site-packages/mopidy/internal/network.py", line 423, in on_receive
self.on_line_received(line)
File "/usr/lib64/python2.7/site-packages/mopidy/mpd/session.py", line 34, in on_line_received
response = self.dispatcher.handle_request(line)
File "/usr/lib64/python2.7/site-packages/mopidy/mpd/dispatcher.py", line 47, in handle_request
return self._call_next_filter(request, response, filter_chain)
File "/usr/lib64/python2.7/site-packages/mopidy/mpd/dispatcher.py", line 69, in _call_next_filter
return next_filter(request, response, filter_chain)
File "/usr/lib64/python2.7/site-packages/mopidy/mpd/dispatcher.py", line 77, in _catch_mpd_ack_errors_filter
return self._call_next_filter(request, response, filter_chain)
File "/usr/lib64/python2.7/site-packages/mopidy/mpd/dispatcher.py", line 69, in _call_next_filter
return next_filter(request, response, filter_chain)
File "/usr/lib64/python2.7/site-packages/mopidy/mpd/dispatcher.py", line 87, in _authenticate_filter
return self._call_next_filter(request, response, filter_chain)
File "/usr/lib64/python2.7/site-packages/mopidy/mpd/dispatcher.py", line 69, in _call_next_filter
return next_filter(request, response, filter_chain)
File "/usr/lib64/python2.7/site-packages/mopidy/mpd/dispatcher.py", line 106, in _command_list_filter
response = self._call_next_filter(request, response, filter_chain)
File "/usr/lib64/python2.7/site-packages/mopidy/mpd/dispatcher.py", line 69, in _call_next_filter
return next_filter(request, response, filter_chain)
File "/usr/lib64/python2.7/site-packages/mopidy/mpd/dispatcher.py", line 135, in _idle_filter
response = self._call_next_filter(request, response, filter_chain)
File "/usr/lib64/python2.7/site-packages/mopidy/mpd/dispatcher.py", line 69, in _call_next_filter
return next_filter(request, response, filter_chain)
File "/usr/lib64/python2.7/site-packages/mopidy/mpd/dispatcher.py", line 148, in _add_ok_filter
response = self._call_next_filter(request, response, filter_chain)
File "/usr/lib64/python2.7/site-packages/mopidy/mpd/dispatcher.py", line 69, in _call_next_filter
return next_filter(request, response, filter_chain)
File "/usr/lib64/python2.7/site-packages/mopidy/mpd/dispatcher.py", line 160, in _call_handler_filter
response = self._format_response(self._call_handler(request))
File "/usr/lib64/python2.7/site-packages/mopidy/mpd/dispatcher.py", line 184, in _format_response
formatted_response.extend(self._format_lines(element))
File "/usr/lib64/python2.7/site-packages/mopidy/mpd/dispatcher.py", line 210, in _format_lines
return ['%s: %s' % (key, value)]
UnicodeDecodeError: 'ascii' codec can't decode byte 0xc3 in position 38: ordinal not in range(128)
|
UnicodeDecodeError
|
def _get_user_dirs(xdg_config_dir):
"""Returns a dict of XDG dirs read from
``$XDG_CONFIG_HOME/user-dirs.dirs``.
This is used at import time for most users of :mod:`mopidy`. By rolling our
own implementation instead of using :meth:`glib.get_user_special_dir` we
make it possible for many extensions to run their test suites, which are
importing parts of :mod:`mopidy`, in a virtualenv with global site-packages
disabled, and thus no :mod:`glib` available.
"""
dirs_file = os.path.join(xdg_config_dir, b"user-dirs.dirs")
if not os.path.exists(dirs_file):
return {}
with open(dirs_file, "rb") as fh:
data = fh.read()
data = b"[XDG_USER_DIRS]\n" + data
data = data.replace(b"$HOME", os.path.expanduser(b"~"))
data = data.replace(b'"', b"")
config = configparser.RawConfigParser()
config.readfp(io.BytesIO(data))
return {
k.upper().decode("utf-8"): os.path.abspath(v)
for k, v in config.items("XDG_USER_DIRS")
if v is not None
}
|
def _get_user_dirs(xdg_config_dir):
"""Returns a dict of XDG dirs read from
``$XDG_CONFIG_HOME/user-dirs.dirs``.
This is used at import time for most users of :mod:`mopidy`. By rolling our
own implementation instead of using :meth:`glib.get_user_special_dir` we
make it possible for many extensions to run their test suites, which are
importing parts of :mod:`mopidy`, in a virtualenv with global site-packages
disabled, and thus no :mod:`glib` available.
"""
dirs_file = os.path.join(xdg_config_dir, b"user-dirs.dirs")
if not os.path.exists(dirs_file):
return {}
with open(dirs_file, "rb") as fh:
data = fh.read().decode("utf-8")
data = "[XDG_USER_DIRS]\n" + data
data = data.replace("$HOME", os.path.expanduser("~"))
data = data.replace('"', "")
config = configparser.RawConfigParser()
config.readfp(io.StringIO(data))
return {
k.upper(): os.path.abspath(v)
for k, v in config.items("XDG_USER_DIRS")
if v is not None
}
|
https://github.com/mopidy/mopidy/issues/1725
|
ERROR FileBackend backend caused an exception.
Traceback (most recent call last):
File "/usr/lib/python2.7/dist-packages/mopidy/core/library.py", line 19, in _backend_error_handling
yield
File "/usr/lib/python2.7/dist-packages/mopidy/core/library.py", line 112, in _browse
result = backend.library.browse(uri).get()
File "/usr/lib/python2.7/dist-packages/pykka/threading.py", line 52, in get
compat.reraise(*self._data['exc_info'])
File "/usr/lib/python2.7/dist-packages/pykka/compat.py", line 12, in reraise
exec('raise tp, value, tb')
File "/usr/lib/python2.7/dist-packages/pykka/actor.py", line 201, in _actor_loop
response = self._handle_receive(message)
File "/usr/lib/python2.7/dist-packages/pykka/actor.py", line 295, in _handle_receive
return callee(*message['args'], **message['kwargs'])
File "/usr/lib/python2.7/dist-packages/mopidy/file/library.py", line 53, in browse
if not self._is_in_basedir(os.path.realpath(local_path)):
File "/usr/lib/python2.7/dist-packages/mopidy/file/library.py", line 146, in _is_in_basedir
for media_dir in self._media_dirs)
File "/usr/lib/python2.7/dist-packages/mopidy/file/library.py", line 146, in <genexpr>
for media_dir in self._media_dirs)
File "/usr/lib/python2.7/dist-packages/mopidy/internal/path.py", line
210, in is_path_inside_base_dir
raise ValueError('base_path is not a bytestring')
ValueError: base_path is not a bytestring
|
ValueError
|
def validate(self, value):
value = super(Identifier, self).validate(value)
if isinstance(value, compat.text_type):
value = value.encode("utf-8")
return compat.intern(value)
|
def validate(self, value):
return compat.intern(str(super(Identifier, self).validate(value)))
|
https://github.com/mopidy/mopidy/issues/1508
|
INFO Scanned 3500 of 5494 files in 25s, ~14s left.
ERROR 'ascii' codec can't encode character u'\ufeff' in position 0: ordinal not in range(128)
Traceback (most recent call last):
File "/usr/local/lib/python2.7/site-packages/mopidy/__main__.py", line 134, in main
return args.command.run(args, proxied_config)
File "/usr/local/lib/python2.7/site-packages/mopidy/local/commands.py", line 150, in run
track = tags.convert_tags_to_track(result.tags).replace(
File "/usr/local/lib/python2.7/site-packages/mopidy/audio/tags.py", line 81, in convert_tags_to_track
'musicbrainz-sortname')
File "/usr/local/lib/python2.7/site-packages/mopidy/audio/tags.py", line 137, in _artists
return [Artist(**attrs)]
File "/usr/local/lib/python2.7/site-packages/mopidy/models/immutable.py", line 158, in __call__
*args, **kwargs)
File "/usr/local/lib/python2.7/site-packages/mopidy/models/immutable.py", line 34, in __init__
self._set_field(key, value)
File "/usr/local/lib/python2.7/site-packages/mopidy/models/immutable.py", line 186, in _set_field
object.__setattr__(self, name, value)
File "/usr/local/lib/python2.7/site-packages/mopidy/models/fields.py", line 50, in __set__
value = self.validate(value)
File "/usr/local/lib/python2.7/site-packages/mopidy/models/fields.py", line 98, in validate
return compat.intern(str(super(Identifier, self).validate(value)))
UnicodeEncodeError: 'ascii' codec can't encode character u'\ufeff' in position 0: ordinal not in range(128)
Traceback (most recent call last):
File "/usr/local/bin/mopidy", line 9, in <module>
load_entry_point('Mopidy==2.0.0', 'console_scripts', 'mopidy')()
File "/usr/local/lib/python2.7/site-packages/mopidy/__main__.py", line 134, in main
return args.command.run(args, proxied_config)
File "/usr/local/lib/python2.7/site-packages/mopidy/local/commands.py", line 150, in run
track = tags.convert_tags_to_track(result.tags).replace(
File "/usr/local/lib/python2.7/site-packages/mopidy/audio/tags.py", line 81, in convert_tags_to_track
'musicbrainz-sortname')
File "/usr/local/lib/python2.7/site-packages/mopidy/audio/tags.py", line 137, in _artists
return [Artist(**attrs)]
File "/usr/local/lib/python2.7/site-packages/mopidy/models/immutable.py", line 158, in __call__
*args, **kwargs)
File "/usr/local/lib/python2.7/site-packages/mopidy/models/immutable.py", line 34, in __init__
self._set_field(key, value)
File "/usr/local/lib/python2.7/site-packages/mopidy/models/immutable.py", line 186, in _set_field
object.__setattr__(self, name, value)
File "/usr/local/lib/python2.7/site-packages/mopidy/models/fields.py", line 50, in __set__
value = self.validate(value)
File "/usr/local/lib/python2.7/site-packages/mopidy/models/fields.py", line 98, in validate
return compat.intern(str(super(Identifier, self).validate(value)))
UnicodeEncodeError: 'ascii' codec can't encode character u'\ufeff' in position 0: ordinal not in range(128)
|
UnicodeEncodeError
|
def on_stream_start(self):
gst_logger.debug("Got STREAM_START bus message")
uri = self._audio._pending_uri
logger.debug("Audio event: stream_changed(uri=%r)", uri)
AudioListener.send("stream_changed", uri=uri)
# Emit any postponed tags that we got after about-to-finish.
tags, self._audio._pending_tags = self._audio._pending_tags, None
self._audio._tags = tags or {}
if tags:
logger.debug("Audio event: tags_changed(tags=%r)", tags.keys())
AudioListener.send("tags_changed", tags=tags.keys())
|
def on_stream_start(self):
gst_logger.debug("Got STREAM_START bus message")
uri = self._audio._pending_uri
logger.debug("Audio event: stream_changed(uri=%r)", uri)
AudioListener.send("stream_changed", uri=uri)
# Emit any postponed tags that we got after about-to-finish.
tags, self._audio._pending_tags = self._audio._pending_tags, None
self._audio._tags = tags
if tags:
logger.debug("Audio event: tags_changed(tags=%r)", tags.keys())
AudioListener.send("tags_changed", tags=tags.keys())
|
https://github.com/mopidy/mopidy/issues/1449
|
Traceback (most recent call last):
File "/home/jodal/mopidy-dev/mopidy/mopidy/audio/actor.py", line 225, in on_message
self.on_tag(taglist)
File "/home/jodal/mopidy-dev/mopidy/mopidy/audio/actor.py", line 340, in on_tag
if self._audio._tags.get(key, unique) != value:
AttributeError: 'NoneType' object has no attribute 'get'
Traceback (most recent call last):
File "/home/jodal/mopidy-dev/mopidy/mopidy/audio/actor.py", line 225, in on_message
self.on_tag(taglist)
File "/home/jodal/mopidy-dev/mopidy/mopidy/audio/actor.py", line 340, in on_tag
if self._audio._tags.get(key, unique) != value:
AttributeError: 'NoneType' object has no attribute 'get'
|
AttributeError
|
def on_playbin_state_changed(self, old_state, new_state, pending_state):
gst_logger.debug(
"Got STATE_CHANGED bus message: old=%s new=%s pending=%s",
old_state.value_name,
new_state.value_name,
pending_state.value_name,
)
if new_state == Gst.State.READY and pending_state == Gst.State.NULL:
# XXX: We're not called on the last state change when going down to
# NULL, so we rewrite the second to last call to get the expected
# behavior.
new_state = Gst.State.NULL
pending_state = Gst.State.VOID_PENDING
if pending_state != Gst.State.VOID_PENDING:
return # Ignore intermediate state changes
if new_state == Gst.State.READY:
return # Ignore READY state as it's GStreamer specific
new_state = _GST_STATE_MAPPING[new_state]
old_state, self._audio.state = self._audio.state, new_state
target_state = _GST_STATE_MAPPING.get(self._audio._target_state)
if target_state is None:
# XXX: Workaround for #1430, to be fixed properly by #1222.
logger.debug("Race condition happened. See #1222 and #1430.")
return
if target_state == new_state:
target_state = None
logger.debug(
"Audio event: state_changed(old_state=%s, new_state=%s, target_state=%s)",
old_state,
new_state,
target_state,
)
AudioListener.send(
"state_changed",
old_state=old_state,
new_state=new_state,
target_state=target_state,
)
if new_state == PlaybackState.STOPPED:
logger.debug("Audio event: stream_changed(uri=None)")
AudioListener.send("stream_changed", uri=None)
if "GST_DEBUG_DUMP_DOT_DIR" in os.environ:
Gst.debug_bin_to_dot_file(
self._audio._playbin, Gst.DebugGraphDetails.ALL, "mopidy"
)
|
def on_playbin_state_changed(self, old_state, new_state, pending_state):
gst_logger.debug(
"Got STATE_CHANGED bus message: old=%s new=%s pending=%s",
old_state.value_name,
new_state.value_name,
pending_state.value_name,
)
if new_state == Gst.State.READY and pending_state == Gst.State.NULL:
# XXX: We're not called on the last state change when going down to
# NULL, so we rewrite the second to last call to get the expected
# behavior.
new_state = Gst.State.NULL
pending_state = Gst.State.VOID_PENDING
if pending_state != Gst.State.VOID_PENDING:
return # Ignore intermediate state changes
if new_state == Gst.State.READY:
return # Ignore READY state as it's GStreamer specific
new_state = _GST_STATE_MAPPING[new_state]
old_state, self._audio.state = self._audio.state, new_state
target_state = _GST_STATE_MAPPING[self._audio._target_state]
if target_state == new_state:
target_state = None
logger.debug(
"Audio event: state_changed(old_state=%s, new_state=%s, target_state=%s)",
old_state,
new_state,
target_state,
)
AudioListener.send(
"state_changed",
old_state=old_state,
new_state=new_state,
target_state=target_state,
)
if new_state == PlaybackState.STOPPED:
logger.debug("Audio event: stream_changed(uri=None)")
AudioListener.send("stream_changed", uri=None)
if "GST_DEBUG_DUMP_DOT_DIR" in os.environ:
Gst.debug_bin_to_dot_file(
self._audio._playbin, Gst.DebugGraphDetails.ALL, "mopidy"
)
|
https://github.com/mopidy/mopidy/issues/1430
|
Traceback (most recent call last):
File "/home/trygve/dev/mopidy/mopidy/mopidy/audio/actor.py", line 210, in on_message
self.on_playbin_state_changed(old_state, new_state, pending_state)
File "/home/trygve/dev/mopidy/mopidy/mopidy/audio/actor.py", line 260, in on_playbin_state_changed
target_state = _GST_STATE_MAPPING[self._audio._target_state]
KeyError: <enum GST_STATE_READY of type GstState>
|
KeyError
|
def playlist_uri_from_name(self, name):
"""
Helper function to retrieve a playlist URI from its unique MPD name.
"""
if name not in self._uri_from_name:
self.refresh_playlists_mapping()
return self._uri_from_name.get(name)
|
def playlist_uri_from_name(self, name):
"""
Helper function to retrieve a playlist URI from its unique MPD name.
"""
if not self._uri_from_name:
self.refresh_playlists_mapping()
return self._uri_from_name.get(name)
|
https://github.com/mopidy/mopidy/issues/1348
|
2015-12-04 23:41:33,959 ERROR [MpdSession-13] /home/adamcik/dev/mopidy-virtualenv/local/lib/python2.7/site-packages/pykka/actor.py:269
pykka Unhandled exception in MpdSession (urn:uuid:093fbff0-33df-4e39-ba0b-c7259431372c):
Traceback (most recent call last):
File "/home/adamcik/dev/mopidy-virtualenv/local/lib/python2.7/site-packages/pykka/actor.py", line 201, in _actor_loop
response = self._handle_receive(message)
File "/home/adamcik/dev/mopidy-virtualenv/local/lib/python2.7/site-packages/pykka/actor.py", line 304, in _handle_receive
return self.on_receive(message)
File "/home/adamcik/dev/mopidy/mopidy/internal/network.py", line 370, in on_receive
self.on_line_received(line)
File "/home/adamcik/dev/mopidy/mopidy/mpd/session.py", line 34, in on_line_received
response = self.dispatcher.handle_request(line)
File "/home/adamcik/dev/mopidy/mopidy/mpd/dispatcher.py", line 47, in handle_request
return self._call_next_filter(request, response, filter_chain)
File "/home/adamcik/dev/mopidy/mopidy/mpd/dispatcher.py", line 68, in _call_next_filter
return next_filter(request, response, filter_chain)
File "/home/adamcik/dev/mopidy/mopidy/mpd/dispatcher.py", line 76, in _catch_mpd_ack_errors_filter
return self._call_next_filter(request, response, filter_chain)
File "/home/adamcik/dev/mopidy/mopidy/mpd/dispatcher.py", line 68, in _call_next_filter
return next_filter(request, response, filter_chain)
File "/home/adamcik/dev/mopidy/mopidy/mpd/dispatcher.py", line 89, in _authenticate_filter
return self._call_next_filter(request, response, filter_chain)
File "/home/adamcik/dev/mopidy/mopidy/mpd/dispatcher.py", line 68, in _call_next_filter
return next_filter(request, response, filter_chain)
File "/home/adamcik/dev/mopidy/mopidy/mpd/dispatcher.py", line 105, in _command_list_filter
response = self._call_next_filter(request, response, filter_chain)
File "/home/adamcik/dev/mopidy/mopidy/mpd/dispatcher.py", line 68, in _call_next_filter
return next_filter(request, response, filter_chain)
File "/home/adamcik/dev/mopidy/mopidy/mpd/dispatcher.py", line 134, in _idle_filter
response = self._call_next_filter(request, response, filter_chain)
File "/home/adamcik/dev/mopidy/mopidy/mpd/dispatcher.py", line 68, in _call_next_filter
return next_filter(request, response, filter_chain)
File "/home/adamcik/dev/mopidy/mopidy/mpd/dispatcher.py", line 147, in _add_ok_filter
response = self._call_next_filter(request, response, filter_chain)
File "/home/adamcik/dev/mopidy/mopidy/mpd/dispatcher.py", line 68, in _call_next_filter
return next_filter(request, response, filter_chain)
File "/home/adamcik/dev/mopidy/mopidy/mpd/dispatcher.py", line 159, in _call_handler_filter
response = self._format_response(self._call_handler(request))
File "/home/adamcik/dev/mopidy/mopidy/mpd/dispatcher.py", line 174, in _call_handler
return protocol.commands.call(tokens, context=self.context)
File "/home/adamcik/dev/mopidy/mopidy/mpd/protocol/__init__.py", line 180, in call
return self.handlers[tokens[0]](context, *tokens[1:])
File "/home/adamcik/dev/mopidy/mopidy/mpd/protocol/__init__.py", line 158, in validate
return func(**callargs)
File "/home/adamcik/dev/mopidy/mopidy/mpd/protocol/stored_playlists.py", line 331, in rm
context.core.playlists.delete(uri).get()
File "/home/adamcik/dev/mopidy-virtualenv/local/lib/python2.7/site-packages/pykka/threading.py", line 52, in get
compat.reraise(*self._data['exc_info'])
File "/home/adamcik/dev/mopidy-virtualenv/local/lib/python2.7/site-packages/pykka/compat.py", line 12, in reraise
exec('raise tp, value, tb')
File "/home/adamcik/dev/mopidy-virtualenv/local/lib/python2.7/site-packages/pykka/actor.py", line 201, in _actor_loop
response = self._handle_receive(message)
File "/home/adamcik/dev/mopidy-virtualenv/local/lib/python2.7/site-packages/pykka/actor.py", line 295, in _handle_receive
return callee(*message['args'], **message['kwargs'])
File "/home/adamcik/dev/mopidy/mopidy/core/playlists.py", line 176, in delete
validation.check_uri(uri)
File "/home/adamcik/dev/mopidy/mopidy/internal/validation.py", line 98, in check_uri
raise exceptions.ValidationError(msg.format(arg=arg))
ValidationError: Expected a valid URI, not None
|
ValidationError
|
def _get_library(args, config):
libraries = dict((l.name, l) for l in args.registry["local:library"])
library_name = config["local"]["library"]
if library_name not in libraries:
logger.error("Local library %s not found", library_name)
return None
logger.debug("Using %s as the local library", library_name)
return libraries[library_name](config)
|
def _get_library(args, config):
libraries = dict((l.name, l) for l in args.registry["local:library"])
library_name = config["local"]["library"]
if library_name not in libraries:
logger.warning("Local library %s not found", library_name)
return 1
logger.debug("Using %s as the local library", library_name)
return libraries[library_name](config)
|
https://github.com/mopidy/mopidy/issues/1298
|
INFO Starting Mopidy 1.1.1
INFO Loading config from builtin defaults
INFO Loading config from /etc/mopidy/mopidy.conf
INFO Loading config from command line options
INFO Enabled extensions: mpd, http, stream, podcast-gpodder, m3u, podcast-itunes, softwaremixer, file, musicbox_webclient, podcast, local, tunein, soundcloud
INFO Disabled extensions: none
WARNING Local library images not found
INFO Found 8597 files in media_dir.
ERROR 'int' object has no attribute 'load'
Traceback (most recent call last):
File "/usr/lib/python2.7/dist-packages/mopidy/__main__.py", line 158, in main
return args.command.run(args, proxied_config)
File "/usr/lib/python2.7/dist-packages/mopidy/local/commands.py", line 91, in run
num_tracks = library.load()
AttributeError: 'int' object has no attribute 'load'
Traceback (most recent call last):
File "/usr/bin/mopidy", line 9, in <module>
load_entry_point('Mopidy==1.1.1', 'console_scripts', 'mopidy')()
File "/usr/lib/python2.7/dist-packages/mopidy/__main__.py", line 158, in main
return args.command.run(args, proxied_config)
File "/usr/lib/python2.7/dist-packages/mopidy/local/commands.py", line 91, in run
num_tracks = library.load()
AttributeError: 'int' object has no attribute 'load'
|
AttributeError
|
def run(self, args, config):
library = _get_library(args, config)
if library is None:
return 1
prompt = "\nAre you sure you want to clear the library? [y/N] "
if compat.input(prompt).lower() != "y":
print("Clearing library aborted.")
return 0
if library.clear():
print("Library successfully cleared.")
return 0
print("Unable to clear library.")
return 1
|
def run(self, args, config):
library = _get_library(args, config)
prompt = "\nAre you sure you want to clear the library? [y/N] "
if compat.input(prompt).lower() != "y":
print("Clearing library aborted.")
return 0
if library.clear():
print("Library successfully cleared.")
return 0
print("Unable to clear library.")
return 1
|
https://github.com/mopidy/mopidy/issues/1298
|
INFO Starting Mopidy 1.1.1
INFO Loading config from builtin defaults
INFO Loading config from /etc/mopidy/mopidy.conf
INFO Loading config from command line options
INFO Enabled extensions: mpd, http, stream, podcast-gpodder, m3u, podcast-itunes, softwaremixer, file, musicbox_webclient, podcast, local, tunein, soundcloud
INFO Disabled extensions: none
WARNING Local library images not found
INFO Found 8597 files in media_dir.
ERROR 'int' object has no attribute 'load'
Traceback (most recent call last):
File "/usr/lib/python2.7/dist-packages/mopidy/__main__.py", line 158, in main
return args.command.run(args, proxied_config)
File "/usr/lib/python2.7/dist-packages/mopidy/local/commands.py", line 91, in run
num_tracks = library.load()
AttributeError: 'int' object has no attribute 'load'
Traceback (most recent call last):
File "/usr/bin/mopidy", line 9, in <module>
load_entry_point('Mopidy==1.1.1', 'console_scripts', 'mopidy')()
File "/usr/lib/python2.7/dist-packages/mopidy/__main__.py", line 158, in main
return args.command.run(args, proxied_config)
File "/usr/lib/python2.7/dist-packages/mopidy/local/commands.py", line 91, in run
num_tracks = library.load()
AttributeError: 'int' object has no attribute 'load'
|
AttributeError
|
def run(self, args, config):
media_dir = config["local"]["media_dir"]
scan_timeout = config["local"]["scan_timeout"]
flush_threshold = config["local"]["scan_flush_threshold"]
excluded_file_extensions = config["local"]["excluded_file_extensions"]
excluded_file_extensions = tuple(
bytes(file_ext.lower()) for file_ext in excluded_file_extensions
)
library = _get_library(args, config)
if library is None:
return 1
file_mtimes, file_errors = path.find_mtimes(
media_dir, follow=config["local"]["scan_follow_symlinks"]
)
logger.info("Found %d files in media_dir.", len(file_mtimes))
if file_errors:
logger.warning(
"Encountered %d errors while scanning media_dir.", len(file_errors)
)
for name in file_errors:
logger.debug("Scan error %r for %r", file_errors[name], name)
num_tracks = library.load()
logger.info("Checking %d tracks from library.", num_tracks)
uris_to_update = set()
uris_to_remove = set()
uris_in_library = set()
for track in library.begin():
abspath = translator.local_track_uri_to_path(track.uri, media_dir)
mtime = file_mtimes.get(abspath)
if mtime is None:
logger.debug("Missing file %s", track.uri)
uris_to_remove.add(track.uri)
elif mtime > track.last_modified or args.force:
uris_to_update.add(track.uri)
uris_in_library.add(track.uri)
logger.info("Removing %d missing tracks.", len(uris_to_remove))
for uri in uris_to_remove:
library.remove(uri)
for abspath in file_mtimes:
relpath = os.path.relpath(abspath, media_dir)
uri = translator.path_to_local_track_uri(relpath)
if b"/." in relpath:
logger.debug("Skipped %s: Hidden directory/file.", uri)
elif relpath.lower().endswith(excluded_file_extensions):
logger.debug("Skipped %s: File extension excluded.", uri)
elif uri not in uris_in_library:
uris_to_update.add(uri)
logger.info("Found %d tracks which need to be updated.", len(uris_to_update))
logger.info("Scanning...")
uris_to_update = sorted(uris_to_update, key=lambda v: v.lower())
uris_to_update = uris_to_update[: args.limit]
scanner = scan.Scanner(scan_timeout)
progress = _Progress(flush_threshold, len(uris_to_update))
for uri in uris_to_update:
try:
relpath = translator.local_track_uri_to_path(uri, media_dir)
file_uri = path.path_to_uri(os.path.join(media_dir, relpath))
result = scanner.scan(file_uri)
tags, duration = result.tags, result.duration
if not result.playable:
logger.warning("Failed %s: No audio found in file.", uri)
elif duration < MIN_DURATION_MS:
logger.warning(
"Failed %s: Track shorter than %dms", uri, MIN_DURATION_MS
)
else:
mtime = file_mtimes.get(os.path.join(media_dir, relpath))
track = utils.convert_tags_to_track(tags).replace(
uri=uri, length=duration, last_modified=mtime
)
if library.add_supports_tags_and_duration:
library.add(track, tags=tags, duration=duration)
else:
library.add(track)
logger.debug("Added %s", track.uri)
except exceptions.ScannerError as error:
logger.warning("Failed %s: %s", uri, error)
if progress.increment():
progress.log()
if library.flush():
logger.debug("Progress flushed.")
progress.log()
library.close()
logger.info("Done scanning.")
return 0
|
def run(self, args, config):
media_dir = config["local"]["media_dir"]
scan_timeout = config["local"]["scan_timeout"]
flush_threshold = config["local"]["scan_flush_threshold"]
excluded_file_extensions = config["local"]["excluded_file_extensions"]
excluded_file_extensions = tuple(
bytes(file_ext.lower()) for file_ext in excluded_file_extensions
)
library = _get_library(args, config)
file_mtimes, file_errors = path.find_mtimes(
media_dir, follow=config["local"]["scan_follow_symlinks"]
)
logger.info("Found %d files in media_dir.", len(file_mtimes))
if file_errors:
logger.warning(
"Encountered %d errors while scanning media_dir.", len(file_errors)
)
for name in file_errors:
logger.debug("Scan error %r for %r", file_errors[name], name)
num_tracks = library.load()
logger.info("Checking %d tracks from library.", num_tracks)
uris_to_update = set()
uris_to_remove = set()
uris_in_library = set()
for track in library.begin():
abspath = translator.local_track_uri_to_path(track.uri, media_dir)
mtime = file_mtimes.get(abspath)
if mtime is None:
logger.debug("Missing file %s", track.uri)
uris_to_remove.add(track.uri)
elif mtime > track.last_modified or args.force:
uris_to_update.add(track.uri)
uris_in_library.add(track.uri)
logger.info("Removing %d missing tracks.", len(uris_to_remove))
for uri in uris_to_remove:
library.remove(uri)
for abspath in file_mtimes:
relpath = os.path.relpath(abspath, media_dir)
uri = translator.path_to_local_track_uri(relpath)
if b"/." in relpath:
logger.debug("Skipped %s: Hidden directory/file.", uri)
elif relpath.lower().endswith(excluded_file_extensions):
logger.debug("Skipped %s: File extension excluded.", uri)
elif uri not in uris_in_library:
uris_to_update.add(uri)
logger.info("Found %d tracks which need to be updated.", len(uris_to_update))
logger.info("Scanning...")
uris_to_update = sorted(uris_to_update, key=lambda v: v.lower())
uris_to_update = uris_to_update[: args.limit]
scanner = scan.Scanner(scan_timeout)
progress = _Progress(flush_threshold, len(uris_to_update))
for uri in uris_to_update:
try:
relpath = translator.local_track_uri_to_path(uri, media_dir)
file_uri = path.path_to_uri(os.path.join(media_dir, relpath))
result = scanner.scan(file_uri)
tags, duration = result.tags, result.duration
if not result.playable:
logger.warning("Failed %s: No audio found in file.", uri)
elif duration < MIN_DURATION_MS:
logger.warning(
"Failed %s: Track shorter than %dms", uri, MIN_DURATION_MS
)
else:
mtime = file_mtimes.get(os.path.join(media_dir, relpath))
track = utils.convert_tags_to_track(tags).replace(
uri=uri, length=duration, last_modified=mtime
)
if library.add_supports_tags_and_duration:
library.add(track, tags=tags, duration=duration)
else:
library.add(track)
logger.debug("Added %s", track.uri)
except exceptions.ScannerError as error:
logger.warning("Failed %s: %s", uri, error)
if progress.increment():
progress.log()
if library.flush():
logger.debug("Progress flushed.")
progress.log()
library.close()
logger.info("Done scanning.")
return 0
|
https://github.com/mopidy/mopidy/issues/1298
|
INFO Starting Mopidy 1.1.1
INFO Loading config from builtin defaults
INFO Loading config from /etc/mopidy/mopidy.conf
INFO Loading config from command line options
INFO Enabled extensions: mpd, http, stream, podcast-gpodder, m3u, podcast-itunes, softwaremixer, file, musicbox_webclient, podcast, local, tunein, soundcloud
INFO Disabled extensions: none
WARNING Local library images not found
INFO Found 8597 files in media_dir.
ERROR 'int' object has no attribute 'load'
Traceback (most recent call last):
File "/usr/lib/python2.7/dist-packages/mopidy/__main__.py", line 158, in main
return args.command.run(args, proxied_config)
File "/usr/lib/python2.7/dist-packages/mopidy/local/commands.py", line 91, in run
num_tracks = library.load()
AttributeError: 'int' object has no attribute 'load'
Traceback (most recent call last):
File "/usr/bin/mopidy", line 9, in <module>
load_entry_point('Mopidy==1.1.1', 'console_scripts', 'mopidy')()
File "/usr/lib/python2.7/dist-packages/mopidy/__main__.py", line 158, in main
return args.command.run(args, proxied_config)
File "/usr/lib/python2.7/dist-packages/mopidy/local/commands.py", line 91, in run
num_tracks = library.load()
AttributeError: 'int' object has no attribute 'load'
|
AttributeError
|
def parse_urilist(data):
result = []
for line in data.splitlines():
if not line.strip() or line.startswith(b"#"):
continue
try:
validation.check_uri(line)
except ValueError:
return []
result.append(line)
return result
|
def parse_urilist(data):
result = []
for line in data.splitlines():
if not line.strip() or line.startswith("#"):
continue
try:
validation.check_uri(line)
except ValueError:
return []
result.append(line)
return result
|
https://github.com/mopidy/mopidy/issues/1265
|
INFO 2015-08-22 22:19:26,991 [855:MpdSession-31] mopidy.mpd.session
New MPD connection from [::ffff:127.0.0.1]:50701
DEBUG 2015-08-22 22:19:26,993 [855:MpdSession-31] mopidy.mpd.session
Request from [::ffff:127.0.0.1]:50701: command_list_begin
DEBUG 2015-08-22 22:19:26,993 [855:MpdSession-31] mopidy.mpd.session
Request from [::ffff:127.0.0.1]:50701: add "http://feedproxy.google.com/~r/WelcomeToNightVale/~5/tXeJa4IGs-8/23-EternalScouts.mp3"
DEBUG 2015-08-22 22:19:26,994 [855:MpdSession-31] mopidy.mpd.session
Request from [::ffff:127.0.0.1]:50701: play "0"
DEBUG 2015-08-22 22:19:26,994 [855:MpdSession-31] mopidy.mpd.session
Request from [::ffff:127.0.0.1]:50701: command_list_end
DEBUG 2015-08-22 22:19:28,176 [855:Core-27] mopidy.core.tracklist
Triggering event: tracklist_changed()
DEBUG 2015-08-22 22:19:28,177 [855:MainThread] mopidy.listener
Sending tracklist_changed to CoreListener: {}
DEBUG 2015-08-22 22:19:28,177 [855:Core-27] mopidy.core.playback
Changing state: stopped -> playing
DEBUG 2015-08-22 22:19:28,177 [855:Core-27] mopidy.core.playback
Triggering playback state change event
DEBUG 2015-08-22 22:19:28,179 [855:MainThread] mopidy.listener
Sending playback_state_changed to CoreListener: {'old_state': u'stopped', 'new_state': u'playing'}
DEBUG 2015-08-22 22:19:28,179 [855:Audio-2] mopidy.audio.gst
State change to GST_STATE_READY: result=GST_STATE_CHANGE_SUCCESS
DEBUG 2015-08-22 22:19:28,179 [855:MainThread] mopidy.audio.gst
Got state-changed message: old=GST_STATE_NULL new=GST_STATE_READY pending=GST_STATE_VOID_PENDING
INFO 2015-08-22 22:19:34,545 [855:MpdSession-32] mopidy.mpd.session
New MPD connection from [::ffff:127.0.0.1]:50713
DEBUG 2015-08-22 22:19:34,547 [855:MpdSession-32] mopidy.mpd.session
Request from [::ffff:127.0.0.1]:50713: status
ERROR 2015-08-22 22:19:38,324 [855:MpdSession-31] pykka
Unhandled exception in MpdSession (urn:uuid:8a894042-6120-4236-a944-cd336bd7c8b3):
Traceback (most recent call last):
File "/usr/local/lib/python2.7/site-packages/pykka/actor.py", line 201, in _actor_loop
response = self._handle_receive(message)
File "/usr/local/lib/python2.7/site-packages/pykka/actor.py", line 304, in _handle_receive
return self.on_receive(message)
File "/usr/local/lib/python2.7/site-packages/mopidy/internal/network.py", line 370, in on_receive
self.on_line_received(line)
File "/usr/local/lib/python2.7/site-packages/mopidy/mpd/session.py", line 34, in on_line_received
response = self.dispatcher.handle_request(line)
File "/usr/local/lib/python2.7/site-packages/mopidy/mpd/dispatcher.py", line 47, in handle_request
return self._call_next_filter(request, response, filter_chain)
File "/usr/local/lib/python2.7/site-packages/mopidy/mpd/dispatcher.py", line 68, in _call_next_filter
return next_filter(request, response, filter_chain)
File "/usr/local/lib/python2.7/site-packages/mopidy/mpd/dispatcher.py", line 76, in _catch_mpd_ack_errors_filter
return self._call_next_filter(request, response, filter_chain)
File "/usr/local/lib/python2.7/site-packages/mopidy/mpd/dispatcher.py", line 68, in _call_next_filter
return next_filter(request, response, filter_chain)
File "/usr/local/lib/python2.7/site-packages/mopidy/mpd/dispatcher.py", line 86, in _authenticate_filter
return self._call_next_filter(request, response, filter_chain)
File "/usr/local/lib/python2.7/site-packages/mopidy/mpd/dispatcher.py", line 68, in _call_next_filter
return next_filter(request, response, filter_chain)
File "/usr/local/lib/python2.7/site-packages/mopidy/mpd/dispatcher.py", line 105, in _command_list_filter
response = self._call_next_filter(request, response, filter_chain)
File "/usr/local/lib/python2.7/site-packages/mopidy/mpd/dispatcher.py", line 68, in _call_next_filter
return next_filter(request, response, filter_chain)
File "/usr/local/lib/python2.7/site-packages/mopidy/mpd/dispatcher.py", line 134, in _idle_filter
response = self._call_next_filter(request, response, filter_chain)
File "/usr/local/lib/python2.7/site-packages/mopidy/mpd/dispatcher.py", line 68, in _call_next_filter
return next_filter(request, response, filter_chain)
File "/usr/local/lib/python2.7/site-packages/mopidy/mpd/dispatcher.py", line 147, in _add_ok_filter
response = self._call_next_filter(request, response, filter_chain)
File "/usr/local/lib/python2.7/site-packages/mopidy/mpd/dispatcher.py", line 68, in _call_next_filter
return next_filter(request, response, filter_chain)
File "/usr/local/lib/python2.7/site-packages/mopidy/mpd/dispatcher.py", line 159, in _call_handler_filter
response = self._format_response(self._call_handler(request))
File "/usr/local/lib/python2.7/site-packages/mopidy/mpd/dispatcher.py", line 174, in _call_handler
return protocol.commands.call(tokens, context=self.context)
File "/usr/local/lib/python2.7/site-packages/mopidy/mpd/protocol/__init__.py", line 180, in call
return self.handlers[tokens[0]](context, *tokens[1:])
File "/usr/local/lib/python2.7/site-packages/mopidy/mpd/protocol/__init__.py", line 158, in validate
return func(**callargs)
File "/usr/local/lib/python2.7/site-packages/mopidy/mpd/protocol/command_list.py", line 42, in command_list_end
command, current_command_list_index=index)
File "/usr/local/lib/python2.7/site-packages/mopidy/mpd/dispatcher.py", line 47, in handle_request
return self._call_next_filter(request, response, filter_chain)
File "/usr/local/lib/python2.7/site-packages/mopidy/mpd/dispatcher.py", line 68, in _call_next_filter
return next_filter(request, response, filter_chain)
File "/usr/local/lib/python2.7/site-packages/mopidy/mpd/dispatcher.py", line 76, in _catch_mpd_ack_errors_filter
return self._call_next_filter(request, response, filter_chain)
File "/usr/local/lib/python2.7/site-packages/mopidy/mpd/dispatcher.py", line 68, in _call_next_filter
return next_filter(request, response, filter_chain)
File "/usr/local/lib/python2.7/site-packages/mopidy/mpd/dispatcher.py", line 86, in _authenticate_filter
return self._call_next_filter(request, response, filter_chain)
File "/usr/local/lib/python2.7/site-packages/mopidy/mpd/dispatcher.py", line 68, in _call_next_filter
return next_filter(request, response, filter_chain)
File "/usr/local/lib/python2.7/site-packages/mopidy/mpd/dispatcher.py", line 105, in _command_list_filter
response = self._call_next_filter(request, response, filter_chain)
File "/usr/local/lib/python2.7/site-packages/mopidy/mpd/dispatcher.py", line 68, in _call_next_filter
return next_filter(request, response, filter_chain)
File "/usr/local/lib/python2.7/site-packages/mopidy/mpd/dispatcher.py", line 134, in _idle_filter
response = self._call_next_filter(request, response, filter_chain)
File "/usr/local/lib/python2.7/site-packages/mopidy/mpd/dispatcher.py", line 68, in _call_next_filter
return next_filter(request, response, filter_chain)
File "/usr/local/lib/python2.7/site-packages/mopidy/mpd/dispatcher.py", line 147, in _add_ok_filter
response = self._call_next_filter(request, response, filter_chain)
File "/usr/local/lib/python2.7/site-packages/mopidy/mpd/dispatcher.py", line 68, in _call_next_filter
return next_filter(request, response, filter_chain)
File "/usr/local/lib/python2.7/site-packages/mopidy/mpd/dispatcher.py", line 159, in _call_handler_filter
response = self._format_response(self._call_handler(request))
File "/usr/local/lib/python2.7/site-packages/mopidy/mpd/dispatcher.py", line 174, in _call_handler
return protocol.commands.call(tokens, context=self.context)
File "/usr/local/lib/python2.7/site-packages/mopidy/mpd/protocol/__init__.py", line 180, in call
return self.handlers[tokens[0]](context, *tokens[1:])
File "/usr/local/lib/python2.7/site-packages/mopidy/mpd/protocol/__init__.py", line 158, in validate
return func(**callargs)
File "/usr/local/lib/python2.7/site-packages/mopidy/mpd/protocol/playback.py", line 181, in play
return context.core.playback.play(tl_track).get()
File "/usr/local/lib/python2.7/site-packages/pykka/threading.py", line 52, in get
compat.reraise(*self._data['exc_info'])
File "/usr/local/lib/python2.7/site-packages/pykka/compat.py", line 12, in reraise
exec('raise tp, value, tb')
File "/usr/local/lib/python2.7/site-packages/pykka/actor.py", line 201, in _actor_loop
response = self._handle_receive(message)
File "/usr/local/lib/python2.7/site-packages/pykka/actor.py", line 295, in _handle_receive
return callee(*message['args'], **message['kwargs'])
File "/usr/local/lib/python2.7/site-packages/mopidy/core/playback.py", line 305, in play
self._play(tl_track=tl_track, tlid=tlid, on_error_step=1)
File "/usr/local/lib/python2.7/site-packages/mopidy/core/playback.py", line 348, in _play
backend.playback.change_track(tl_track.track).get() and
File "/usr/local/lib/python2.7/site-packages/pykka/threading.py", line 52, in get
compat.reraise(*self._data['exc_info'])
File "/usr/local/lib/python2.7/site-packages/pykka/compat.py", line 12, in reraise
exec('raise tp, value, tb')
File "/usr/local/lib/python2.7/site-packages/pykka/actor.py", line 201, in _actor_loop
response = self._handle_receive(message)
File "/usr/local/lib/python2.7/site-packages/pykka/actor.py", line 295, in _handle_receive
return callee(*message['args'], **message['kwargs'])
File "/usr/local/lib/python2.7/site-packages/mopidy/backend.py", line 245, in change_track
uri = self.translate_uri(track.uri)
File "/usr/local/lib/python2.7/site-packages/mopidy/stream/actor.py", line 90, in translate_uri
tracks = list(playlists.parse(content))
File "/usr/local/lib/python2.7/site-packages/mopidy/internal/playlists.py", line 28, in parse
return parse_urilist(data) # Fallback
File "/usr/local/lib/python2.7/site-packages/mopidy/internal/playlists.py", line 125, in parse_urilist
if not line.strip() or line.startswith('#'):
UnicodeDecodeError: 'ascii' codec can't decode byte 0xdf in position 154: ordinal not in range(128)
DEBUG 2015-08-22 22:19:38,326 [855:Audio-2] mopidy.audio.actor
Position query failed
|
UnicodeDecodeError
|
def send(self, data):
"""Send data to client, return any unsent data."""
try:
sent = self.sock.send(data)
return data[sent:]
except socket.error as e:
if e.errno in (errno.EWOULDBLOCK, errno.EINTR):
return data
self.stop("Unexpected client error: %s" % encoding.locale_decode(e))
return b""
|
def send(self, data):
"""Send data to client, return any unsent data."""
try:
sent = self.sock.send(data)
return data[sent:]
except socket.error as e:
if e.errno in (errno.EWOULDBLOCK, errno.EINTR):
return data
self.stop("Unexpected client error: %s" % e)
return b""
|
https://github.com/mopidy/mopidy/issues/971
|
2015-02-04 06:30:11,901 ERROR [3714:MpdSession-27] pykka: Unhandled exception in MpdSession (urn:uuid:9595028c-486c-4c89-813a-785c3cafc057):
Traceback (most recent call last):
File "/usr/lib/python2.7/dist-packages/pykka/actor.py", line 200, in _actor_loop
response = self._handle_receive(message)
File "/usr/lib/python2.7/dist-packages/pykka/actor.py", line 303, in _handle_receive
return self.on_receive(message)
File "/usr/lib/python2.7/dist-packages/mopidy/utils/network.py", line 366, in on_receive
self.on_line_received(line)
File "/usr/lib/python2.7/dist-packages/mopidy/mpd/session.py", line 41, in on_line_received
self.send_lines(response)
File "/usr/lib/python2.7/dist-packages/mopidy/utils/network.py", line 428, in send_lines
self.connection.queue_send(self.encode(data))
File "/usr/lib/python2.7/dist-packages/mopidy/utils/network.py", line 189, in queue_send
self.send_buffer = self.send(self.send_buffer + data)
File "/usr/lib/python2.7/dist-packages/mopidy/utils/network.py", line 202, in send
self.stop('Unexpected client error: %s' % e)
UnicodeDecodeError: 'ascii' codec can't decode byte 0xc3 in position 16: ordinal not in range(128)
|
UnicodeDecodeError
|
def _find_worker(relative, follow, done, work, results, errors):
"""Worker thread for collecting stat() results.
:param str relative: directory to make results relative to
:param bool follow: if symlinks should be followed
:param threading.Event done: event indicating that all work has been done
:param queue.Queue work: queue of paths to process
:param dict results: shared dictionary for storing all the stat() results
:param dict errors: shared dictionary for storing any per path errors
"""
while not done.is_set():
try:
entry, parents = work.get(block=False)
except queue.Empty:
continue
if relative:
path = os.path.relpath(entry, relative)
else:
path = entry
try:
if follow:
st = os.stat(entry)
else:
st = os.lstat(entry)
if (st.st_dev, st.st_ino) in parents:
errors[path] = exceptions.FindError("Sym/hardlink loop found.")
continue
parents = parents + [(st.st_dev, st.st_ino)]
if stat.S_ISDIR(st.st_mode):
for e in os.listdir(entry):
work.put((os.path.join(entry, e), parents))
elif stat.S_ISREG(st.st_mode):
results[path] = st
elif stat.S_ISLNK(st.st_mode):
errors[path] = exceptions.FindError("Not following symlinks.")
else:
errors[path] = exceptions.FindError("Not a file or directory.")
except OSError as e:
errors[path] = exceptions.FindError(
encoding.locale_decode(e.strerror), e.errno
)
finally:
work.task_done()
|
def _find_worker(relative, follow, done, work, results, errors):
"""Worker thread for collecting stat() results.
:param str relative: directory to make results relative to
:param bool follow: if symlinks should be followed
:param threading.Event done: event indicating that all work has been done
:param queue.Queue work: queue of paths to process
:param dict results: shared dictionary for storing all the stat() results
:param dict errors: shared dictionary for storing any per path errors
"""
while not done.is_set():
try:
entry, parents = work.get(block=False)
except queue.Empty:
continue
if relative:
path = os.path.relpath(entry, relative)
else:
path = entry
try:
if follow:
st = os.stat(entry)
else:
st = os.lstat(entry)
if (st.st_dev, st.st_ino) in parents:
errors[path] = exceptions.FindError("Sym/hardlink loop found.")
continue
parents = parents + [(st.st_dev, st.st_ino)]
if stat.S_ISDIR(st.st_mode):
for e in os.listdir(entry):
work.put((os.path.join(entry, e), parents))
elif stat.S_ISREG(st.st_mode):
results[path] = st
elif stat.S_ISLNK(st.st_mode):
errors[path] = exceptions.FindError("Not following symlinks.")
else:
errors[path] = exceptions.FindError("Not a file or directory.")
except OSError as e:
errors[path] = exceptions.FindError(e.strerror, e.errno)
finally:
work.task_done()
|
https://github.com/mopidy/mopidy/issues/971
|
2015-02-04 06:30:11,901 ERROR [3714:MpdSession-27] pykka: Unhandled exception in MpdSession (urn:uuid:9595028c-486c-4c89-813a-785c3cafc057):
Traceback (most recent call last):
File "/usr/lib/python2.7/dist-packages/pykka/actor.py", line 200, in _actor_loop
response = self._handle_receive(message)
File "/usr/lib/python2.7/dist-packages/pykka/actor.py", line 303, in _handle_receive
return self.on_receive(message)
File "/usr/lib/python2.7/dist-packages/mopidy/utils/network.py", line 366, in on_receive
self.on_line_received(line)
File "/usr/lib/python2.7/dist-packages/mopidy/mpd/session.py", line 41, in on_line_received
self.send_lines(response)
File "/usr/lib/python2.7/dist-packages/mopidy/utils/network.py", line 428, in send_lines
self.connection.queue_send(self.encode(data))
File "/usr/lib/python2.7/dist-packages/mopidy/utils/network.py", line 189, in queue_send
self.send_buffer = self.send(self.send_buffer + data)
File "/usr/lib/python2.7/dist-packages/mopidy/utils/network.py", line 202, in send
self.stop('Unexpected client error: %s' % e)
UnicodeDecodeError: 'ascii' codec can't decode byte 0xc3 in position 16: ordinal not in range(128)
|
UnicodeDecodeError
|
def push(self, buffer_):
if self._source is None:
return False
if buffer_ is None:
gst_logger.debug("Sending appsrc end-of-stream event.")
return self._source.emit("end-of-stream") == gst.FLOW_OK
else:
return self._source.emit("push-buffer", buffer_) == gst.FLOW_OK
|
def push(self, buffer_):
if buffer_ is None:
gst_logger.debug("Sending appsrc end-of-stream event.")
return self._source.emit("end-of-stream") == gst.FLOW_OK
else:
return self._source.emit("push-buffer", buffer_) == gst.FLOW_OK
|
https://github.com/mopidy/mopidy/issues/985
|
^CINFO Interrupted. Exiting...
INFO Stopping Mopidy frontends
INFO Stopping Mopidy core
INFO Stopping Mopidy backends
From callback <function music_delivery at 0x7fa14c3c8938>:
Traceback (most recent call last):
File "/home/jodal/dev/pyspotify2/spotify/session.py", line 989, in music_delivery
spotify._session_instance, audio_format, frames_bytes, num_frames)
File "/home/jodal/dev/pyspotify2/spotify/utils.py", line 108, in call
return listener.callback(*args)
File "/home/jodal/dev/mopidy-spotify/mopidy_spotify/playback.py", line 159, in music_delivery_callback
if audio_actor.emit_data(buffer_).get():
File "build/bdist.linux-x86_64/egg/pykka/future.py", line 299, in get
File "build/bdist.linux-x86_64/egg/pykka/actor.py", line 200, in _actor_loop
File "build/bdist.linux-x86_64/egg/pykka/actor.py", line 294, in _handle_receive
File "/home/jodal/dev/mopidy/mopidy/audio/actor.py", line 587, in emit_data
return self._appsrc.push(buffer_)
File "/home/jodal/dev/mopidy/mopidy/audio/actor.py", line 135, in push
return self._source.emit('push-buffer', buffer_) == gst.FLOW_OK
AttributeError: 'NoneType' object has no attribute 'emit'
INFO Stopping Mopidy audio
INFO Stopping Mopidy mixer
|
AttributeError
|
def find_exact(tracks, query=None, uris=None):
# TODO Only return results within URI roots given by ``uris``
if query is None:
query = {}
_validate_query(query)
for field, values in query.items():
if not hasattr(values, "__iter__"):
values = [values]
# FIXME this is bound to be slow for large libraries
for value in values:
if field == "track_no":
q = _convert_to_int(value)
else:
q = value.strip()
uri_filter = lambda t: q == t.uri
track_name_filter = lambda t: q == t.name
album_filter = lambda t: q == getattr(
getattr(t, "album", None), "name", None
)
artist_filter = lambda t: filter(lambda a: q == a.name, t.artists)
albumartist_filter = lambda t: any(
[q == a.name for a in getattr(t.album, "artists", [])]
)
composer_filter = lambda t: any(
[q == a.name for a in getattr(t, "composers", [])]
)
performer_filter = lambda t: any(
[q == a.name for a in getattr(t, "performers", [])]
)
track_no_filter = lambda t: q == t.track_no
genre_filter = lambda t: t.genre and q == t.genre
date_filter = lambda t: q == t.date
comment_filter = lambda t: q == t.comment
any_filter = lambda t: (
uri_filter(t)
or track_name_filter(t)
or album_filter(t)
or artist_filter(t)
or albumartist_filter(t)
or composer_filter(t)
or performer_filter(t)
or track_no_filter(t)
or genre_filter(t)
or date_filter(t)
or comment_filter(t)
)
if field == "uri":
tracks = filter(uri_filter, tracks)
elif field == "track_name":
tracks = filter(track_name_filter, tracks)
elif field == "album":
tracks = filter(album_filter, tracks)
elif field == "artist":
tracks = filter(artist_filter, tracks)
elif field == "albumartist":
tracks = filter(albumartist_filter, tracks)
elif field == "composer":
tracks = filter(composer_filter, tracks)
elif field == "performer":
tracks = filter(performer_filter, tracks)
elif field == "track_no":
tracks = filter(track_no_filter, tracks)
elif field == "genre":
tracks = filter(genre_filter, tracks)
elif field == "date":
tracks = filter(date_filter, tracks)
elif field == "comment":
tracks = filter(comment_filter, tracks)
elif field == "any":
tracks = filter(any_filter, tracks)
else:
raise LookupError("Invalid lookup field: %s" % field)
# TODO: add local:search:<query>
return SearchResult(uri="local:search", tracks=tracks)
|
def find_exact(tracks, query=None, uris=None):
# TODO Only return results within URI roots given by ``uris``
if query is None:
query = {}
_validate_query(query)
for field, values in query.items():
if not hasattr(values, "__iter__"):
values = [values]
# FIXME this is bound to be slow for large libraries
for value in values:
if field == "track_no":
q = _convert_to_int(value)
else:
q = value.strip()
uri_filter = lambda t: q == t.uri
track_name_filter = lambda t: q == t.name
album_filter = lambda t: q == getattr(t, "album", Album()).name
artist_filter = lambda t: filter(lambda a: q == a.name, t.artists)
albumartist_filter = lambda t: any(
[q == a.name for a in getattr(t.album, "artists", [])]
)
composer_filter = lambda t: any(
[q == a.name for a in getattr(t, "composers", [])]
)
performer_filter = lambda t: any(
[q == a.name for a in getattr(t, "performers", [])]
)
track_no_filter = lambda t: q == t.track_no
genre_filter = lambda t: t.genre and q == t.genre
date_filter = lambda t: q == t.date
comment_filter = lambda t: q == t.comment
any_filter = lambda t: (
uri_filter(t)
or track_name_filter(t)
or album_filter(t)
or artist_filter(t)
or albumartist_filter(t)
or composer_filter(t)
or performer_filter(t)
or track_no_filter(t)
or genre_filter(t)
or date_filter(t)
or comment_filter(t)
)
if field == "uri":
tracks = filter(uri_filter, tracks)
elif field == "track_name":
tracks = filter(track_name_filter, tracks)
elif field == "album":
tracks = filter(album_filter, tracks)
elif field == "artist":
tracks = filter(artist_filter, tracks)
elif field == "albumartist":
tracks = filter(albumartist_filter, tracks)
elif field == "composer":
tracks = filter(composer_filter, tracks)
elif field == "performer":
tracks = filter(performer_filter, tracks)
elif field == "track_no":
tracks = filter(track_no_filter, tracks)
elif field == "genre":
tracks = filter(genre_filter, tracks)
elif field == "date":
tracks = filter(date_filter, tracks)
elif field == "comment":
tracks = filter(comment_filter, tracks)
elif field == "any":
tracks = filter(any_filter, tracks)
else:
raise LookupError("Invalid lookup field: %s" % field)
# TODO: add local:search:<query>
return SearchResult(uri="local:search", tracks=tracks)
|
https://github.com/mopidy/mopidy/issues/930
|
INFO New MPD connection from [::1]:55924
ERROR Unhandled exception in MpdSession (urn:uuid:127f99a4-9753-4960-9b72-368255948637):
Traceback (most recent call last):
File "/usr/local/lib/python2.7/site-packages/pykka/actor.py", line 200, in _actor_loop
response = self._handle_receive(message)
File "/usr/local/lib/python2.7/site-packages/pykka/actor.py", line 303, in _handle_receive
return self.on_receive(message)
File "/usr/local/lib/python2.7/site-packages/mopidy/utils/network.py", line 366, in on_receive
self.on_line_received(line)
File "/usr/local/lib/python2.7/site-packages/mopidy/mpd/session.py", line 33, in on_line_received
response = self.dispatcher.handle_request(line)
File "/usr/local/lib/python2.7/site-packages/mopidy/mpd/dispatcher.py", line 46, in handle_request
return self._call_next_filter(request, response, filter_chain)
File "/usr/local/lib/python2.7/site-packages/mopidy/mpd/dispatcher.py", line 67, in _call_next_filter
return next_filter(request, response, filter_chain)
File "/usr/local/lib/python2.7/site-packages/mopidy/mpd/dispatcher.py", line 75, in _catch_mpd_ack_errors_filter
return self._call_next_filter(request, response, filter_chain)
File "/usr/local/lib/python2.7/site-packages/mopidy/mpd/dispatcher.py", line 67, in _call_next_filter
return next_filter(request, response, filter_chain)
File "/usr/local/lib/python2.7/site-packages/mopidy/mpd/dispatcher.py", line 85, in _authenticate_filter
return self._call_next_filter(request, response, filter_chain)
File "/usr/local/lib/python2.7/site-packages/mopidy/mpd/dispatcher.py", line 67, in _call_next_filter
return next_filter(request, response, filter_chain)
File "/usr/local/lib/python2.7/site-packages/mopidy/mpd/dispatcher.py", line 104, in _command_list_filter
response = self._call_next_filter(request, response, filter_chain)
File "/usr/local/lib/python2.7/site-packages/mopidy/mpd/dispatcher.py", line 67, in _call_next_filter
return next_filter(request, response, filter_chain)
File "/usr/local/lib/python2.7/site-packages/mopidy/mpd/dispatcher.py", line 133, in _idle_filter
response = self._call_next_filter(request, response, filter_chain)
File "/usr/local/lib/python2.7/site-packages/mopidy/mpd/dispatcher.py", line 67, in _call_next_filter
return next_filter(request, response, filter_chain)
File "/usr/local/lib/python2.7/site-packages/mopidy/mpd/dispatcher.py", line 146, in _add_ok_filter
response = self._call_next_filter(request, response, filter_chain)
File "/usr/local/lib/python2.7/site-packages/mopidy/mpd/dispatcher.py", line 67, in _call_next_filter
return next_filter(request, response, filter_chain)
File "/usr/local/lib/python2.7/site-packages/mopidy/mpd/dispatcher.py", line 158, in _call_handler_filter
response = self._format_response(self._call_handler(request))
File "/usr/local/lib/python2.7/site-packages/mopidy/mpd/dispatcher.py", line 167, in _call_handler
return protocol.commands.call(tokens, context=self.context)
File "/usr/local/lib/python2.7/site-packages/mopidy/mpd/protocol/__init__.py", line 178, in call
return self.handlers[tokens[0]](context, *tokens[1:])
File "/usr/local/lib/python2.7/site-packages/mopidy/mpd/protocol/__init__.py", line 140, in validate
return func(*args, **kwargs)
File "/usr/local/lib/python2.7/site-packages/mopidy/mpd/protocol/music_db.py", line 135, in find
results = context.core.library.find_exact(**query).get()
File "/usr/local/lib/python2.7/site-packages/pykka/future.py", line 299, in get
exec('raise exc_info[0], exc_info[1], exc_info[2]')
File "/usr/local/lib/python2.7/site-packages/pykka/actor.py", line 200, in _actor_loop
response = self._handle_receive(message)
File "/usr/local/lib/python2.7/site-packages/pykka/actor.py", line 294, in _handle_receive
return callee(*message['args'], **message['kwargs'])
File "/usr/local/lib/python2.7/site-packages/mopidy/core/library.py", line 118, in find_exact
return [result for result in pykka.get_all(futures) if result]
File "/usr/local/lib/python2.7/site-packages/pykka/future.py", line 330, in get_all
return [future.get(timeout=timeout) for future in futures]
File "/usr/local/lib/python2.7/site-packages/pykka/future.py", line 299, in get
exec('raise exc_info[0], exc_info[1], exc_info[2]')
File "/usr/local/lib/python2.7/site-packages/pykka/actor.py", line 200, in _actor_loop
response = self._handle_receive(message)
File "/usr/local/lib/python2.7/site-packages/pykka/actor.py", line 294, in _handle_receive
return callee(*message['args'], **message['kwargs'])
File "/usr/local/lib/python2.7/site-packages/mopidy/local/library.py", line 47, in find_exact
return self._library.search(query=query, uris=uris, exact=True)
File "/usr/local/lib/python2.7/site-packages/mopidy/local/json.py", line 162, in search
return search.find_exact(tracks, query=query, uris=uris)
File "/usr/local/lib/python2.7/site-packages/mopidy/local/search.py", line 60, in find_exact
tracks = filter(album_filter, tracks)
File "/usr/local/lib/python2.7/site-packages/mopidy/local/search.py", line 26, in <lambda>
album_filter = lambda t: q == getattr(t, 'album', Album()).name
AttributeError: 'NoneType' object has no attribute 'name'
Traceback (most recent call last):
File "/usr/local/lib/python2.7/site-packages/mopidy/utils/network.py", line 272, in recv_callback
self.actor_ref.tell({'close': True})
File "/usr/local/lib/python2.7/site-packages/pykka/actor.py", line 437, in tell
raise _ActorDeadError('%s not found' % self)
pykka.exceptions.ActorDeadError: MpdSession (urn:uuid:127f99a4-9753-4960-9b72-368255948637) not found
|
AttributeError
|
def validate_extension(extension):
"""Verify extension's dependencies and environment.
:param extensions: an extension to check
:returns: if extension should be run
"""
logger.debug("Validating extension: %s", extension.ext_name)
if extension.ext_name != extension.entry_point.name:
logger.warning(
"Disabled extension %(ep)s: entry point name (%(ep)s) "
"does not match extension name (%(ext)s)",
{"ep": extension.entry_point.name, "ext": extension.ext_name},
)
return False
try:
extension.entry_point.require()
except pkg_resources.DistributionNotFound as ex:
logger.info(
"Disabled extension %s: Dependency %s not found", extension.ext_name, ex
)
return False
except pkg_resources.VersionConflict as ex:
if len(ex.args) == 2:
found, required = ex.args
logger.info(
"Disabled extension %s: %s required, but found %s at %s",
extension.ext_name,
required,
found,
found.location,
)
else:
logger.info("Disabled extension %s: %s", extension.ext_name, ex)
return False
try:
extension.validate_environment()
except exceptions.ExtensionError as ex:
logger.info("Disabled extension %s: %s", extension.ext_name, ex.message)
return False
return True
|
def validate_extension(extension):
"""Verify extension's dependencies and environment.
:param extensions: an extension to check
:returns: if extension should be run
"""
logger.debug("Validating extension: %s", extension.ext_name)
if extension.ext_name != extension.entry_point.name:
logger.warning(
"Disabled extension %(ep)s: entry point name (%(ep)s) "
"does not match extension name (%(ext)s)",
{"ep": extension.entry_point.name, "ext": extension.ext_name},
)
return False
try:
extension.entry_point.require()
except pkg_resources.DistributionNotFound as ex:
logger.info(
"Disabled extension %s: Dependency %s not found", extension.ext_name, ex
)
return False
except pkg_resources.VersionConflict as ex:
found, required = ex.args
logger.info(
"Disabled extension %s: %s required, but found %s at %s",
extension.ext_name,
required,
found,
found.location,
)
return False
try:
extension.validate_environment()
except exceptions.ExtensionError as ex:
logger.info("Disabled extension %s: %s", extension.ext_name, ex.message)
return False
return True
|
https://github.com/mopidy/mopidy/issues/911
|
2014-12-15 19:54:53,052 INFO [7923:MainThread] mopidy.__main__: Starting Mopidy 0.19.4
2014-12-15 19:54:53,433 INFO [7923:MainThread] mopidy.config: Loading config from: builtin defaults, /usr/share/mopidy/conf.d, /etc/mopidy/mopidy.conf, command line options
2014-12-15 19:54:54,860 ERROR [7923:MainThread] mopidy.__main__: need more than 1 value to unpack
Traceback (most recent call last):
File "/usr/local/lib/python2.7/dist-packages/mopidy/__main__.py", line 79, in main
if not ext.validate_extension(extension):
File "/usr/local/lib/python2.7/dist-packages/mopidy/ext.py", line 190, in validate_extension
found, required = ex.args
ValueError: need more than 1 value to unpack
|
ValueError
|
def recv_callback(self, fd, flags):
if flags & (gobject.IO_ERR | gobject.IO_HUP):
self.stop("Bad client flags: %s" % flags)
return True
try:
data = self.sock.recv(4096)
except socket.error as e:
if e.errno not in (errno.EWOULDBLOCK, errno.EINTR):
self.stop("Unexpected client error: %s" % e)
return True
if not data:
self.disable_recv()
self.actor_ref.tell({"close": True})
return True
try:
self.actor_ref.tell({"received": data})
except pykka.ActorDeadError:
self.stop("Actor is dead.")
return True
|
def recv_callback(self, fd, flags):
if flags & (gobject.IO_ERR | gobject.IO_HUP):
self.stop("Bad client flags: %s" % flags)
return True
try:
data = self.sock.recv(4096)
except socket.error as e:
if e.errno not in (errno.EWOULDBLOCK, errno.EINTR):
self.stop("Unexpected client error: %s" % e)
return True
if not data:
self.actor_ref.tell({"close": True})
self.disable_recv()
return True
try:
self.actor_ref.tell({"received": data})
except pykka.ActorDeadError:
self.stop("Actor is dead.")
return True
|
https://github.com/mopidy/mopidy/issues/781
|
Traceback (most recent call last):
File "/home/adamcik/dev/mopidy/mopidy/utils/network.py", line 272, in recv_callback
self.disable_recv()
File "/home/adamcik/dev/mopidy/mopidy/utils/network.py", line 236, in disable_recv
gobject.source_remove(self.recv_id)
TypeError: an integer is required
|
TypeError
|
def publish(self):
if not dbus:
logger.debug("Zeroconf publish failed: dbus not installed.")
return False
try:
bus = dbus.SystemBus()
except dbus.exceptions.DBusException as e:
logger.debug("Zeroconf publish failed: %s", e)
return False
if not bus.name_has_owner("org.freedesktop.Avahi"):
logger.debug("Zeroconf publish failed: Avahi service not running.")
return False
server = dbus.Interface(
bus.get_object("org.freedesktop.Avahi", "/"), "org.freedesktop.Avahi.Server"
)
self.group = dbus.Interface(
bus.get_object("org.freedesktop.Avahi", server.EntryGroupNew()),
"org.freedesktop.Avahi.EntryGroup",
)
try:
text = [_convert_text_to_dbus_bytes(t) for t in self.text]
self.group.AddService(
_AVAHI_IF_UNSPEC,
_AVAHI_PROTO_UNSPEC,
dbus.UInt32(_AVAHI_PUBLISHFLAGS_NONE),
self.name,
self.stype,
self.domain,
self.host,
dbus.UInt16(self.port),
text,
)
except dbus.exceptions.DBusException as e:
logger.debug("Zeroconf publish failed: %s", e)
return False
self.group.Commit()
return True
|
def publish(self):
if not dbus:
logger.debug("Zeroconf publish failed: dbus not installed.")
return False
try:
bus = dbus.SystemBus()
except dbus.exceptions.DBusException as e:
logger.debug("Zeroconf publish failed: %s", e)
return False
if not bus.name_has_owner("org.freedesktop.Avahi"):
logger.debug("Zeroconf publish failed: Avahi service not running.")
return False
server = dbus.Interface(
bus.get_object("org.freedesktop.Avahi", "/"), "org.freedesktop.Avahi.Server"
)
self.group = dbus.Interface(
bus.get_object("org.freedesktop.Avahi", server.EntryGroupNew()),
"org.freedesktop.Avahi.EntryGroup",
)
text = [_convert_text_to_dbus_bytes(t) for t in self.text]
self.group.AddService(
_AVAHI_IF_UNSPEC,
_AVAHI_PROTO_UNSPEC,
dbus.UInt32(_AVAHI_PUBLISHFLAGS_NONE),
self.name,
self.stype,
self.domain,
self.host,
dbus.UInt16(self.port),
text,
)
self.group.Commit()
return True
|
https://github.com/mopidy/mopidy/issues/576
|
INFO 2013-11-13 23:26:28,001 [4304:MainThread] mopidy.frontends.mpd
MPD server running at [::]:6600
ERROR 2013-11-13 23:26:28,013 [4304:MpdFrontend-5] pykka
Unhandled exception in MpdFrontend (urn:uuid:93b3b156-44ca-42de-a0cf-602474d1c582):
Traceback (most recent call last):
File "/usr/local/lib/python2.7/dist-packages/Pykka-1.2.0-py2.7.egg/pykka/actor.py", line 191, in _actor_loop
self.on_start()
File "/home/zenith/dev/mopidy/mopidy/frontends/mpd/actor.py", line 49, in on_start
if self.zeroconf_service.publish():
File "/home/zenith/dev/mopidy/mopidy/utils/zeroconf.py", line 73, in publish
dbus.UInt16(self.port), text)
File "/usr/lib/python2.7/dist-packages/dbus/proxies.py", line 70, in __call__
return self._proxy_method(*args, **keywords)
File "/usr/lib/python2.7/dist-packages/dbus/proxies.py", line 145, in __call__
**keywords)
File "/usr/lib/python2.7/dist-packages/dbus/connection.py", line 651, in call_blocking
message, timeout)
DBusException: org.freedesktop.Avahi.InvalidHostNameError: Invalid host name
|
DBusException
|
def parse_options():
parser = optparse.OptionParser(version="Mopidy %s" % versioning.get_version())
# NOTE Python 2.6: To support Python versions < 2.6.2rc1 we must use
# bytestrings for the first argument to ``add_option``
# See https://github.com/mopidy/mopidy/issues/302 for details
parser.add_option(
b"--help-gst",
action="store_true",
dest="help_gst",
help="show GStreamer help options",
)
parser.add_option(
b"-i",
"--interactive",
action="store_true",
dest="interactive",
help="ask interactively for required settings which are missing",
)
parser.add_option(
b"-q",
"--quiet",
action="store_const",
const=0,
dest="verbosity_level",
help="less output (warning level)",
)
parser.add_option(
b"-v",
"--verbose",
action="count",
default=1,
dest="verbosity_level",
help="more output (debug level)",
)
parser.add_option(
b"--save-debug-log",
action="store_true",
dest="save_debug_log",
help='save debug log to "./mopidy.log"',
)
parser.add_option(
b"--list-settings",
action="callback",
callback=settings_utils.list_settings_optparse_callback,
help="list current settings",
)
parser.add_option(
b"--list-deps",
action="callback",
callback=deps.list_deps_optparse_callback,
help="list dependencies and their versions",
)
parser.add_option(
b"--debug-thread",
action="store_true",
dest="debug_thread",
help="run background thread that dumps tracebacks on SIGUSR1",
)
return parser.parse_args(args=mopidy_args)[0]
|
def parse_options():
parser = optparse.OptionParser(version="Mopidy %s" % versioning.get_version())
parser.add_option(
"--help-gst",
action="store_true",
dest="help_gst",
help="show GStreamer help options",
)
parser.add_option(
"-i",
"--interactive",
action="store_true",
dest="interactive",
help="ask interactively for required settings which are missing",
)
parser.add_option(
"-q",
"--quiet",
action="store_const",
const=0,
dest="verbosity_level",
help="less output (warning level)",
)
parser.add_option(
"-v",
"--verbose",
action="count",
default=1,
dest="verbosity_level",
help="more output (debug level)",
)
parser.add_option(
"--save-debug-log",
action="store_true",
dest="save_debug_log",
help='save debug log to "./mopidy.log"',
)
parser.add_option(
"--list-settings",
action="callback",
callback=settings_utils.list_settings_optparse_callback,
help="list current settings",
)
parser.add_option(
"--list-deps",
action="callback",
callback=deps.list_deps_optparse_callback,
help="list dependencies and their versions",
)
parser.add_option(
"--debug-thread",
action="store_true",
dest="debug_thread",
help="run background thread that dumps tracebacks on SIGUSR1",
)
return parser.parse_args(args=mopidy_args)[0]
|
https://github.com/mopidy/mopidy/issues/302
|
erik@faust:~$ mopidy
Traceback (most recent call last):
File "/usr/local/bin/mopidy", line 5, in <module>
main()
File "/usr/local/lib64/python2.6/site-packages/mopidy/__main__.py", line 48, in main
options = parse_options()
File "/usr/local/lib64/python2.6/site-packages/mopidy/__main__.py", line 85, in parse_options
help='show GStreamer help options')
File "/usr/lib64/python2.6/optparse.py", line 1012, in add_option
raise TypeError, "invalid arguments"
TypeError: invalid arguments
erik@faust:~$ python --version
Python 2.6
erik@faust:~$ uname -a
Linux faust 3.0.13-0.27-default #1 SMP Wed Feb 15 13:33:49 UTC 2012 (d73692b) x86_64 x86_64 x86_64 GNU/Linux
erik@faust:~$ cat /etc/SuSE-release
SUSE Linux Enterprise Server 11 (x86_64)
VERSION = 11
PATCHLEVEL = 2
erik@faust:~$
|
TypeError
|
def _convert_mpd_data(data, tracks, music_dir):
if not data:
return
# NOTE: kwargs are explicitly made bytestrings to work on Python
# 2.6.0/2.6.1. See https://github.com/mopidy/mopidy/issues/302 for details.
track_kwargs = {}
album_kwargs = {}
artist_kwargs = {}
albumartist_kwargs = {}
if "track" in data:
if "/" in data["track"]:
album_kwargs[b"num_tracks"] = int(data["track"].split("/")[1])
track_kwargs[b"track_no"] = int(data["track"].split("/")[0])
else:
track_kwargs[b"track_no"] = int(data["track"])
if "artist" in data:
artist_kwargs[b"name"] = data["artist"]
albumartist_kwargs[b"name"] = data["artist"]
if "albumartist" in data:
albumartist_kwargs[b"name"] = data["albumartist"]
if "album" in data:
album_kwargs[b"name"] = data["album"]
if "title" in data:
track_kwargs[b"name"] = data["title"]
if "date" in data:
track_kwargs[b"date"] = data["date"]
if "musicbrainz_trackid" in data:
track_kwargs[b"musicbrainz_id"] = data["musicbrainz_trackid"]
if "musicbrainz_albumid" in data:
album_kwargs[b"musicbrainz_id"] = data["musicbrainz_albumid"]
if "musicbrainz_artistid" in data:
artist_kwargs[b"musicbrainz_id"] = data["musicbrainz_artistid"]
if "musicbrainz_albumartistid" in data:
albumartist_kwargs[b"musicbrainz_id"] = data["musicbrainz_albumartistid"]
if data["file"][0] == "/":
path = data["file"][1:]
else:
path = data["file"]
path = urllib.unquote(path)
if artist_kwargs:
artist = Artist(**artist_kwargs)
track_kwargs[b"artists"] = [artist]
if albumartist_kwargs:
albumartist = Artist(**albumartist_kwargs)
album_kwargs[b"artists"] = [albumartist]
if album_kwargs:
album = Album(**album_kwargs)
track_kwargs[b"album"] = album
track_kwargs[b"uri"] = path_to_uri(music_dir, path)
track_kwargs[b"length"] = int(data.get("time", 0)) * 1000
track = Track(**track_kwargs)
tracks.add(track)
|
def _convert_mpd_data(data, tracks, music_dir):
if not data:
return
track_kwargs = {}
album_kwargs = {}
artist_kwargs = {}
albumartist_kwargs = {}
if "track" in data:
if "/" in data["track"]:
album_kwargs["num_tracks"] = int(data["track"].split("/")[1])
track_kwargs["track_no"] = int(data["track"].split("/")[0])
else:
track_kwargs["track_no"] = int(data["track"])
if "artist" in data:
artist_kwargs["name"] = data["artist"]
albumartist_kwargs["name"] = data["artist"]
if "albumartist" in data:
albumartist_kwargs["name"] = data["albumartist"]
if "album" in data:
album_kwargs["name"] = data["album"]
if "title" in data:
track_kwargs["name"] = data["title"]
if "date" in data:
track_kwargs["date"] = data["date"]
if "musicbrainz_trackid" in data:
track_kwargs["musicbrainz_id"] = data["musicbrainz_trackid"]
if "musicbrainz_albumid" in data:
album_kwargs["musicbrainz_id"] = data["musicbrainz_albumid"]
if "musicbrainz_artistid" in data:
artist_kwargs["musicbrainz_id"] = data["musicbrainz_artistid"]
if "musicbrainz_albumartistid" in data:
albumartist_kwargs["musicbrainz_id"] = data["musicbrainz_albumartistid"]
if data["file"][0] == "/":
path = data["file"][1:]
else:
path = data["file"]
path = urllib.unquote(path)
if artist_kwargs:
artist = Artist(**artist_kwargs)
track_kwargs["artists"] = [artist]
if albumartist_kwargs:
albumartist = Artist(**albumartist_kwargs)
album_kwargs["artists"] = [albumartist]
if album_kwargs:
album = Album(**album_kwargs)
track_kwargs["album"] = album
track_kwargs["uri"] = path_to_uri(music_dir, path)
track_kwargs["length"] = int(data.get("time", 0)) * 1000
track = Track(**track_kwargs)
tracks.add(track)
|
https://github.com/mopidy/mopidy/issues/302
|
erik@faust:~$ mopidy
Traceback (most recent call last):
File "/usr/local/bin/mopidy", line 5, in <module>
main()
File "/usr/local/lib64/python2.6/site-packages/mopidy/__main__.py", line 48, in main
options = parse_options()
File "/usr/local/lib64/python2.6/site-packages/mopidy/__main__.py", line 85, in parse_options
help='show GStreamer help options')
File "/usr/lib64/python2.6/optparse.py", line 1012, in add_option
raise TypeError, "invalid arguments"
TypeError: invalid arguments
erik@faust:~$ python --version
Python 2.6
erik@faust:~$ uname -a
Linux faust 3.0.13-0.27-default #1 SMP Wed Feb 15 13:33:49 UTC 2012 (d73692b) x86_64 x86_64 x86_64 GNU/Linux
erik@faust:~$ cat /etc/SuSE-release
SUSE Linux Enterprise Server 11 (x86_64)
VERSION = 11
PATCHLEVEL = 2
erik@faust:~$
|
TypeError
|
def translator(data):
albumartist_kwargs = {}
album_kwargs = {}
artist_kwargs = {}
track_kwargs = {}
# NOTE: kwargs are explicitly made bytestrings to work on Python
# 2.6.0/2.6.1. See https://github.com/mopidy/mopidy/issues/302 for
# details.
def _retrieve(source_key, target_key, target):
if source_key in data:
target[str(target_key)] = data[source_key]
_retrieve(gst.TAG_ALBUM, "name", album_kwargs)
_retrieve(gst.TAG_TRACK_COUNT, "num_tracks", album_kwargs)
_retrieve(gst.TAG_ARTIST, "name", artist_kwargs)
if gst.TAG_DATE in data and data[gst.TAG_DATE]:
date = data[gst.TAG_DATE]
try:
date = datetime.date(date.year, date.month, date.day)
except ValueError:
pass # Ignore invalid dates
else:
track_kwargs[b"date"] = date.isoformat()
_retrieve(gst.TAG_TITLE, "name", track_kwargs)
_retrieve(gst.TAG_TRACK_NUMBER, "track_no", track_kwargs)
# Following keys don't seem to have TAG_* constant.
_retrieve("album-artist", "name", albumartist_kwargs)
_retrieve("musicbrainz-trackid", "musicbrainz_id", track_kwargs)
_retrieve("musicbrainz-artistid", "musicbrainz_id", artist_kwargs)
_retrieve("musicbrainz-albumid", "musicbrainz_id", album_kwargs)
_retrieve("musicbrainz-albumartistid", "musicbrainz_id", albumartist_kwargs)
if albumartist_kwargs:
album_kwargs[b"artists"] = [Artist(**albumartist_kwargs)]
track_kwargs["uri"] = data["uri"]
track_kwargs["length"] = data[gst.TAG_DURATION]
track_kwargs["album"] = Album(**album_kwargs)
track_kwargs["artists"] = [Artist(**artist_kwargs)]
return Track(**track_kwargs)
|
def translator(data):
albumartist_kwargs = {}
album_kwargs = {}
artist_kwargs = {}
track_kwargs = {}
def _retrieve(source_key, target_key, target):
if source_key in data:
target[target_key] = data[source_key]
_retrieve(gst.TAG_ALBUM, "name", album_kwargs)
_retrieve(gst.TAG_TRACK_COUNT, "num_tracks", album_kwargs)
_retrieve(gst.TAG_ARTIST, "name", artist_kwargs)
if gst.TAG_DATE in data and data[gst.TAG_DATE]:
date = data[gst.TAG_DATE]
try:
date = datetime.date(date.year, date.month, date.day)
except ValueError:
pass # Ignore invalid dates
else:
track_kwargs["date"] = date.isoformat()
_retrieve(gst.TAG_TITLE, "name", track_kwargs)
_retrieve(gst.TAG_TRACK_NUMBER, "track_no", track_kwargs)
# Following keys don't seem to have TAG_* constant.
_retrieve("album-artist", "name", albumartist_kwargs)
_retrieve("musicbrainz-trackid", "musicbrainz_id", track_kwargs)
_retrieve("musicbrainz-artistid", "musicbrainz_id", artist_kwargs)
_retrieve("musicbrainz-albumid", "musicbrainz_id", album_kwargs)
_retrieve("musicbrainz-albumartistid", "musicbrainz_id", albumartist_kwargs)
if albumartist_kwargs:
album_kwargs["artists"] = [Artist(**albumartist_kwargs)]
track_kwargs["uri"] = data["uri"]
track_kwargs["length"] = data[gst.TAG_DURATION]
track_kwargs["album"] = Album(**album_kwargs)
track_kwargs["artists"] = [Artist(**artist_kwargs)]
return Track(**track_kwargs)
|
https://github.com/mopidy/mopidy/issues/302
|
erik@faust:~$ mopidy
Traceback (most recent call last):
File "/usr/local/bin/mopidy", line 5, in <module>
main()
File "/usr/local/lib64/python2.6/site-packages/mopidy/__main__.py", line 48, in main
options = parse_options()
File "/usr/local/lib64/python2.6/site-packages/mopidy/__main__.py", line 85, in parse_options
help='show GStreamer help options')
File "/usr/lib64/python2.6/optparse.py", line 1012, in add_option
raise TypeError, "invalid arguments"
TypeError: invalid arguments
erik@faust:~$ python --version
Python 2.6
erik@faust:~$ uname -a
Linux faust 3.0.13-0.27-default #1 SMP Wed Feb 15 13:33:49 UTC 2012 (d73692b) x86_64 x86_64 x86_64 GNU/Linux
erik@faust:~$ cat /etc/SuSE-release
SUSE Linux Enterprise Server 11 (x86_64)
VERSION = 11
PATCHLEVEL = 2
erik@faust:~$
|
TypeError
|
def _retrieve(source_key, target_key, target):
if source_key in data:
target[str(target_key)] = data[source_key]
|
def _retrieve(source_key, target_key, target):
if source_key in data:
target[target_key] = data[source_key]
|
https://github.com/mopidy/mopidy/issues/302
|
erik@faust:~$ mopidy
Traceback (most recent call last):
File "/usr/local/bin/mopidy", line 5, in <module>
main()
File "/usr/local/lib64/python2.6/site-packages/mopidy/__main__.py", line 48, in main
options = parse_options()
File "/usr/local/lib64/python2.6/site-packages/mopidy/__main__.py", line 85, in parse_options
help='show GStreamer help options')
File "/usr/lib64/python2.6/optparse.py", line 1012, in add_option
raise TypeError, "invalid arguments"
TypeError: invalid arguments
erik@faust:~$ python --version
Python 2.6
erik@faust:~$ uname -a
Linux faust 3.0.13-0.27-default #1 SMP Wed Feb 15 13:33:49 UTC 2012 (d73692b) x86_64 x86_64 x86_64 GNU/Linux
erik@faust:~$ cat /etc/SuSE-release
SUSE Linux Enterprise Server 11 (x86_64)
VERSION = 11
PATCHLEVEL = 2
erik@faust:~$
|
TypeError
|
def translator(data):
albumartist_kwargs = {}
album_kwargs = {}
artist_kwargs = {}
track_kwargs = {}
# NOTE: kwargs are explicitly made bytestrings to work on Python
# 2.6.0/2.6.1. See https://github.com/mopidy/mopidy/issues/302 for
# details.
def _retrieve(source_key, target_key, target):
if source_key in data:
target[str(target_key)] = data[source_key]
_retrieve(gst.TAG_ALBUM, "name", album_kwargs)
_retrieve(gst.TAG_TRACK_COUNT, "num_tracks", album_kwargs)
_retrieve(gst.TAG_ARTIST, "name", artist_kwargs)
if gst.TAG_DATE in data and data[gst.TAG_DATE]:
date = data[gst.TAG_DATE]
try:
date = datetime.date(date.year, date.month, date.day)
except ValueError:
pass # Ignore invalid dates
else:
track_kwargs[b"date"] = date.isoformat()
_retrieve(gst.TAG_TITLE, "name", track_kwargs)
_retrieve(gst.TAG_TRACK_NUMBER, "track_no", track_kwargs)
# Following keys don't seem to have TAG_* constant.
_retrieve("album-artist", "name", albumartist_kwargs)
_retrieve("musicbrainz-trackid", "musicbrainz_id", track_kwargs)
_retrieve("musicbrainz-artistid", "musicbrainz_id", artist_kwargs)
_retrieve("musicbrainz-albumid", "musicbrainz_id", album_kwargs)
_retrieve("musicbrainz-albumartistid", "musicbrainz_id", albumartist_kwargs)
if albumartist_kwargs:
album_kwargs[b"artists"] = [Artist(**albumartist_kwargs)]
track_kwargs[b"uri"] = data["uri"]
track_kwargs[b"length"] = data[gst.TAG_DURATION]
track_kwargs[b"album"] = Album(**album_kwargs)
track_kwargs[b"artists"] = [Artist(**artist_kwargs)]
return Track(**track_kwargs)
|
def translator(data):
albumartist_kwargs = {}
album_kwargs = {}
artist_kwargs = {}
track_kwargs = {}
# NOTE: kwargs are explicitly made bytestrings to work on Python
# 2.6.0/2.6.1. See https://github.com/mopidy/mopidy/issues/302 for
# details.
def _retrieve(source_key, target_key, target):
if source_key in data:
target[str(target_key)] = data[source_key]
_retrieve(gst.TAG_ALBUM, "name", album_kwargs)
_retrieve(gst.TAG_TRACK_COUNT, "num_tracks", album_kwargs)
_retrieve(gst.TAG_ARTIST, "name", artist_kwargs)
if gst.TAG_DATE in data and data[gst.TAG_DATE]:
date = data[gst.TAG_DATE]
try:
date = datetime.date(date.year, date.month, date.day)
except ValueError:
pass # Ignore invalid dates
else:
track_kwargs[b"date"] = date.isoformat()
_retrieve(gst.TAG_TITLE, "name", track_kwargs)
_retrieve(gst.TAG_TRACK_NUMBER, "track_no", track_kwargs)
# Following keys don't seem to have TAG_* constant.
_retrieve("album-artist", "name", albumartist_kwargs)
_retrieve("musicbrainz-trackid", "musicbrainz_id", track_kwargs)
_retrieve("musicbrainz-artistid", "musicbrainz_id", artist_kwargs)
_retrieve("musicbrainz-albumid", "musicbrainz_id", album_kwargs)
_retrieve("musicbrainz-albumartistid", "musicbrainz_id", albumartist_kwargs)
if albumartist_kwargs:
album_kwargs[b"artists"] = [Artist(**albumartist_kwargs)]
track_kwargs["uri"] = data["uri"]
track_kwargs["length"] = data[gst.TAG_DURATION]
track_kwargs["album"] = Album(**album_kwargs)
track_kwargs["artists"] = [Artist(**artist_kwargs)]
return Track(**track_kwargs)
|
https://github.com/mopidy/mopidy/issues/302
|
erik@faust:~$ mopidy
Traceback (most recent call last):
File "/usr/local/bin/mopidy", line 5, in <module>
main()
File "/usr/local/lib64/python2.6/site-packages/mopidy/__main__.py", line 48, in main
options = parse_options()
File "/usr/local/lib64/python2.6/site-packages/mopidy/__main__.py", line 85, in parse_options
help='show GStreamer help options')
File "/usr/lib64/python2.6/optparse.py", line 1012, in add_option
raise TypeError, "invalid arguments"
TypeError: invalid arguments
erik@faust:~$ python --version
Python 2.6
erik@faust:~$ uname -a
Linux faust 3.0.13-0.27-default #1 SMP Wed Feb 15 13:33:49 UTC 2012 (d73692b) x86_64 x86_64 x86_64 GNU/Linux
erik@faust:~$ cat /etc/SuSE-release
SUSE Linux Enterprise Server 11 (x86_64)
VERSION = 11
PATCHLEVEL = 2
erik@faust:~$
|
TypeError
|
def parse_options():
parser = optparse.OptionParser(version="Mopidy %s" % versioning.get_version())
# NOTE Python 2.6: To support Python versions < 2.6.2rc1 we must use
# bytestrings for the first argument to ``add_option``
# See https://github.com/mopidy/mopidy/issues/302 for details
parser.add_option(
b"-q",
"--quiet",
action="store_const",
const=0,
dest="verbosity_level",
help="less output (warning level)",
)
parser.add_option(
b"-v",
"--verbose",
action="count",
default=1,
dest="verbosity_level",
help="more output (debug level)",
)
return parser.parse_args(args=mopidy_args)[0]
|
def parse_options():
parser = optparse.OptionParser(version="Mopidy %s" % versioning.get_version())
parser.add_option(
"-q",
"--quiet",
action="store_const",
const=0,
dest="verbosity_level",
help="less output (warning level)",
)
parser.add_option(
"-v",
"--verbose",
action="count",
default=1,
dest="verbosity_level",
help="more output (debug level)",
)
return parser.parse_args(args=mopidy_args)[0]
|
https://github.com/mopidy/mopidy/issues/302
|
erik@faust:~$ mopidy
Traceback (most recent call last):
File "/usr/local/bin/mopidy", line 5, in <module>
main()
File "/usr/local/lib64/python2.6/site-packages/mopidy/__main__.py", line 48, in main
options = parse_options()
File "/usr/local/lib64/python2.6/site-packages/mopidy/__main__.py", line 85, in parse_options
help='show GStreamer help options')
File "/usr/lib64/python2.6/optparse.py", line 1012, in add_option
raise TypeError, "invalid arguments"
TypeError: invalid arguments
erik@faust:~$ python --version
Python 2.6
erik@faust:~$ uname -a
Linux faust 3.0.13-0.27-default #1 SMP Wed Feb 15 13:33:49 UTC 2012 (d73692b) x86_64 x86_64 x86_64 GNU/Linux
erik@faust:~$ cat /etc/SuSE-release
SUSE Linux Enterprise Server 11 (x86_64)
VERSION = 11
PATCHLEVEL = 2
erik@faust:~$
|
TypeError
|
def __init__(self, core):
super(MpdFrontend, self).__init__()
hostname = network.format_hostname(settings.MPD_SERVER_HOSTNAME)
port = settings.MPD_SERVER_PORT
# NOTE: dict key must be bytestring to work on Python < 2.6.5
# See https://github.com/mopidy/mopidy/issues/302 for details
try:
network.Server(
hostname,
port,
protocol=session.MpdSession,
protocol_kwargs={b"core": core},
max_connections=settings.MPD_SERVER_MAX_CONNECTIONS,
timeout=settings.MPD_SERVER_CONNECTION_TIMEOUT,
)
except IOError as error:
logger.error("MPD server startup failed: %s", encoding.locale_decode(error))
sys.exit(1)
logger.info("MPD server running at [%s]:%s", hostname, port)
|
def __init__(self, core):
super(MpdFrontend, self).__init__()
hostname = network.format_hostname(settings.MPD_SERVER_HOSTNAME)
port = settings.MPD_SERVER_PORT
try:
network.Server(
hostname,
port,
protocol=session.MpdSession,
protocol_kwargs={"core": core},
max_connections=settings.MPD_SERVER_MAX_CONNECTIONS,
timeout=settings.MPD_SERVER_CONNECTION_TIMEOUT,
)
except IOError as error:
logger.error("MPD server startup failed: %s", encoding.locale_decode(error))
sys.exit(1)
logger.info("MPD server running at [%s]:%s", hostname, port)
|
https://github.com/mopidy/mopidy/issues/302
|
erik@faust:~$ mopidy
Traceback (most recent call last):
File "/usr/local/bin/mopidy", line 5, in <module>
main()
File "/usr/local/lib64/python2.6/site-packages/mopidy/__main__.py", line 48, in main
options = parse_options()
File "/usr/local/lib64/python2.6/site-packages/mopidy/__main__.py", line 85, in parse_options
help='show GStreamer help options')
File "/usr/lib64/python2.6/optparse.py", line 1012, in add_option
raise TypeError, "invalid arguments"
TypeError: invalid arguments
erik@faust:~$ python --version
Python 2.6
erik@faust:~$ uname -a
Linux faust 3.0.13-0.27-default #1 SMP Wed Feb 15 13:33:49 UTC 2012 (d73692b) x86_64 x86_64 x86_64 GNU/Linux
erik@faust:~$ cat /etc/SuSE-release
SUSE Linux Enterprise Server 11 (x86_64)
VERSION = 11
PATCHLEVEL = 2
erik@faust:~$
|
TypeError
|
def handle_request(pattern, auth_required=True):
"""
Decorator for connecting command handlers to command requests.
If you use named groups in the pattern, the decorated method will get the
groups as keyword arguments. If the group is optional, remember to give the
argument a default value.
For example, if the command is ``do that thing`` the ``what`` argument will
be ``this thing``::
@handle_request('^do (?P<what>.+)$')
def do(what):
...
:param pattern: regexp pattern for matching commands
:type pattern: string
"""
def decorator(func):
match = re.search("([a-z_]+)", pattern)
if match is not None:
mpd_commands.add(
MpdCommand(name=match.group(), auth_required=auth_required)
)
# NOTE: Make pattern a bytestring to get bytestring keys in the dict
# returned from matches.groupdict(), which is again used as a **kwargs
# dict. This is needed to work on Python < 2.6.5. See
# https://github.com/mopidy/mopidy/issues/302 for details.
bytestring_pattern = pattern.encode("utf-8")
compiled_pattern = re.compile(bytestring_pattern, flags=re.UNICODE)
if compiled_pattern in request_handlers:
raise ValueError(
"Tried to redefine handler for %s with %s" % (pattern, func)
)
request_handlers[compiled_pattern] = func
func.__doc__ = " - *Pattern:* ``%s``\n\n%s" % (pattern, func.__doc__ or "")
return func
return decorator
|
def handle_request(pattern, auth_required=True):
"""
Decorator for connecting command handlers to command requests.
If you use named groups in the pattern, the decorated method will get the
groups as keyword arguments. If the group is optional, remember to give the
argument a default value.
For example, if the command is ``do that thing`` the ``what`` argument will
be ``this thing``::
@handle_request('^do (?P<what>.+)$')
def do(what):
...
:param pattern: regexp pattern for matching commands
:type pattern: string
"""
def decorator(func):
match = re.search("([a-z_]+)", pattern)
if match is not None:
mpd_commands.add(
MpdCommand(name=match.group(), auth_required=auth_required)
)
compiled_pattern = re.compile(pattern, flags=re.UNICODE)
if compiled_pattern in request_handlers:
raise ValueError(
"Tried to redefine handler for %s with %s" % (pattern, func)
)
request_handlers[compiled_pattern] = func
func.__doc__ = " - *Pattern:* ``%s``\n\n%s" % (pattern, func.__doc__ or "")
return func
return decorator
|
https://github.com/mopidy/mopidy/issues/302
|
erik@faust:~$ mopidy
Traceback (most recent call last):
File "/usr/local/bin/mopidy", line 5, in <module>
main()
File "/usr/local/lib64/python2.6/site-packages/mopidy/__main__.py", line 48, in main
options = parse_options()
File "/usr/local/lib64/python2.6/site-packages/mopidy/__main__.py", line 85, in parse_options
help='show GStreamer help options')
File "/usr/lib64/python2.6/optparse.py", line 1012, in add_option
raise TypeError, "invalid arguments"
TypeError: invalid arguments
erik@faust:~$ python --version
Python 2.6
erik@faust:~$ uname -a
Linux faust 3.0.13-0.27-default #1 SMP Wed Feb 15 13:33:49 UTC 2012 (d73692b) x86_64 x86_64 x86_64 GNU/Linux
erik@faust:~$ cat /etc/SuSE-release
SUSE Linux Enterprise Server 11 (x86_64)
VERSION = 11
PATCHLEVEL = 2
erik@faust:~$
|
TypeError
|
def decorator(func):
match = re.search("([a-z_]+)", pattern)
if match is not None:
mpd_commands.add(MpdCommand(name=match.group(), auth_required=auth_required))
# NOTE: Make pattern a bytestring to get bytestring keys in the dict
# returned from matches.groupdict(), which is again used as a **kwargs
# dict. This is needed to work on Python < 2.6.5. See
# https://github.com/mopidy/mopidy/issues/302 for details.
bytestring_pattern = pattern.encode("utf-8")
compiled_pattern = re.compile(bytestring_pattern, flags=re.UNICODE)
if compiled_pattern in request_handlers:
raise ValueError("Tried to redefine handler for %s with %s" % (pattern, func))
request_handlers[compiled_pattern] = func
func.__doc__ = " - *Pattern:* ``%s``\n\n%s" % (pattern, func.__doc__ or "")
return func
|
def decorator(func):
match = re.search("([a-z_]+)", pattern)
if match is not None:
mpd_commands.add(MpdCommand(name=match.group(), auth_required=auth_required))
compiled_pattern = re.compile(pattern, flags=re.UNICODE)
if compiled_pattern in request_handlers:
raise ValueError("Tried to redefine handler for %s with %s" % (pattern, func))
request_handlers[compiled_pattern] = func
func.__doc__ = " - *Pattern:* ``%s``\n\n%s" % (pattern, func.__doc__ or "")
return func
|
https://github.com/mopidy/mopidy/issues/302
|
erik@faust:~$ mopidy
Traceback (most recent call last):
File "/usr/local/bin/mopidy", line 5, in <module>
main()
File "/usr/local/lib64/python2.6/site-packages/mopidy/__main__.py", line 48, in main
options = parse_options()
File "/usr/local/lib64/python2.6/site-packages/mopidy/__main__.py", line 85, in parse_options
help='show GStreamer help options')
File "/usr/lib64/python2.6/optparse.py", line 1012, in add_option
raise TypeError, "invalid arguments"
TypeError: invalid arguments
erik@faust:~$ python --version
Python 2.6
erik@faust:~$ uname -a
Linux faust 3.0.13-0.27-default #1 SMP Wed Feb 15 13:33:49 UTC 2012 (d73692b) x86_64 x86_64 x86_64 GNU/Linux
erik@faust:~$ cat /etc/SuSE-release
SUSE Linux Enterprise Server 11 (x86_64)
VERSION = 11
PATCHLEVEL = 2
erik@faust:~$
|
TypeError
|
def parse_options():
parser = optparse.OptionParser(version="Mopidy %s" % versioning.get_version())
# NOTE First argument to add_option must be bytestrings on Python < 2.6.2
# See https://github.com/mopidy/mopidy/issues/302 for details
parser.add_option(
b"--help-gst",
action="store_true",
dest="help_gst",
help="show GStreamer help options",
)
parser.add_option(
b"-i",
"--interactive",
action="store_true",
dest="interactive",
help="ask interactively for required settings which are missing",
)
parser.add_option(
b"-q",
"--quiet",
action="store_const",
const=0,
dest="verbosity_level",
help="less output (warning level)",
)
parser.add_option(
b"-v",
"--verbose",
action="count",
default=1,
dest="verbosity_level",
help="more output (debug level)",
)
parser.add_option(
b"--save-debug-log",
action="store_true",
dest="save_debug_log",
help='save debug log to "./mopidy.log"',
)
parser.add_option(
b"--list-settings",
action="callback",
callback=settings_utils.list_settings_optparse_callback,
help="list current settings",
)
parser.add_option(
b"--list-deps",
action="callback",
callback=deps.list_deps_optparse_callback,
help="list dependencies and their versions",
)
parser.add_option(
b"--debug-thread",
action="store_true",
dest="debug_thread",
help="run background thread that dumps tracebacks on SIGUSR1",
)
return parser.parse_args(args=mopidy_args)[0]
|
def parse_options():
parser = optparse.OptionParser(version="Mopidy %s" % versioning.get_version())
# NOTE Python 2.6: To support Python versions < 2.6.2rc1 we must use
# bytestrings for the first argument to ``add_option``
# See https://github.com/mopidy/mopidy/issues/302 for details
parser.add_option(
b"--help-gst",
action="store_true",
dest="help_gst",
help="show GStreamer help options",
)
parser.add_option(
b"-i",
"--interactive",
action="store_true",
dest="interactive",
help="ask interactively for required settings which are missing",
)
parser.add_option(
b"-q",
"--quiet",
action="store_const",
const=0,
dest="verbosity_level",
help="less output (warning level)",
)
parser.add_option(
b"-v",
"--verbose",
action="count",
default=1,
dest="verbosity_level",
help="more output (debug level)",
)
parser.add_option(
b"--save-debug-log",
action="store_true",
dest="save_debug_log",
help='save debug log to "./mopidy.log"',
)
parser.add_option(
b"--list-settings",
action="callback",
callback=settings_utils.list_settings_optparse_callback,
help="list current settings",
)
parser.add_option(
b"--list-deps",
action="callback",
callback=deps.list_deps_optparse_callback,
help="list dependencies and their versions",
)
parser.add_option(
b"--debug-thread",
action="store_true",
dest="debug_thread",
help="run background thread that dumps tracebacks on SIGUSR1",
)
return parser.parse_args(args=mopidy_args)[0]
|
https://github.com/mopidy/mopidy/issues/302
|
erik@faust:~$ mopidy
Traceback (most recent call last):
File "/usr/local/bin/mopidy", line 5, in <module>
main()
File "/usr/local/lib64/python2.6/site-packages/mopidy/__main__.py", line 48, in main
options = parse_options()
File "/usr/local/lib64/python2.6/site-packages/mopidy/__main__.py", line 85, in parse_options
help='show GStreamer help options')
File "/usr/lib64/python2.6/optparse.py", line 1012, in add_option
raise TypeError, "invalid arguments"
TypeError: invalid arguments
erik@faust:~$ python --version
Python 2.6
erik@faust:~$ uname -a
Linux faust 3.0.13-0.27-default #1 SMP Wed Feb 15 13:33:49 UTC 2012 (d73692b) x86_64 x86_64 x86_64 GNU/Linux
erik@faust:~$ cat /etc/SuSE-release
SUSE Linux Enterprise Server 11 (x86_64)
VERSION = 11
PATCHLEVEL = 2
erik@faust:~$
|
TypeError
|
def _convert_mpd_data(data, tracks, music_dir):
if not data:
return
# NOTE kwargs dict keys must be bytestrings to work on Python < 2.6.5
# See https://github.com/mopidy/mopidy/issues/302 for details.
track_kwargs = {}
album_kwargs = {}
artist_kwargs = {}
albumartist_kwargs = {}
if "track" in data:
if "/" in data["track"]:
album_kwargs[b"num_tracks"] = int(data["track"].split("/")[1])
track_kwargs[b"track_no"] = int(data["track"].split("/")[0])
else:
track_kwargs[b"track_no"] = int(data["track"])
if "artist" in data:
artist_kwargs[b"name"] = data["artist"]
albumartist_kwargs[b"name"] = data["artist"]
if "albumartist" in data:
albumartist_kwargs[b"name"] = data["albumartist"]
if "album" in data:
album_kwargs[b"name"] = data["album"]
if "title" in data:
track_kwargs[b"name"] = data["title"]
if "date" in data:
track_kwargs[b"date"] = data["date"]
if "musicbrainz_trackid" in data:
track_kwargs[b"musicbrainz_id"] = data["musicbrainz_trackid"]
if "musicbrainz_albumid" in data:
album_kwargs[b"musicbrainz_id"] = data["musicbrainz_albumid"]
if "musicbrainz_artistid" in data:
artist_kwargs[b"musicbrainz_id"] = data["musicbrainz_artistid"]
if "musicbrainz_albumartistid" in data:
albumartist_kwargs[b"musicbrainz_id"] = data["musicbrainz_albumartistid"]
if data["file"][0] == "/":
path = data["file"][1:]
else:
path = data["file"]
path = urllib.unquote(path)
if artist_kwargs:
artist = Artist(**artist_kwargs)
track_kwargs[b"artists"] = [artist]
if albumartist_kwargs:
albumartist = Artist(**albumartist_kwargs)
album_kwargs[b"artists"] = [albumartist]
if album_kwargs:
album = Album(**album_kwargs)
track_kwargs[b"album"] = album
track_kwargs[b"uri"] = path_to_uri(music_dir, path)
track_kwargs[b"length"] = int(data.get("time", 0)) * 1000
track = Track(**track_kwargs)
tracks.add(track)
|
def _convert_mpd_data(data, tracks, music_dir):
if not data:
return
# NOTE: kwargs are explicitly made bytestrings to work on Python
# 2.6.0/2.6.1. See https://github.com/mopidy/mopidy/issues/302 for details.
track_kwargs = {}
album_kwargs = {}
artist_kwargs = {}
albumartist_kwargs = {}
if "track" in data:
if "/" in data["track"]:
album_kwargs[b"num_tracks"] = int(data["track"].split("/")[1])
track_kwargs[b"track_no"] = int(data["track"].split("/")[0])
else:
track_kwargs[b"track_no"] = int(data["track"])
if "artist" in data:
artist_kwargs[b"name"] = data["artist"]
albumartist_kwargs[b"name"] = data["artist"]
if "albumartist" in data:
albumartist_kwargs[b"name"] = data["albumartist"]
if "album" in data:
album_kwargs[b"name"] = data["album"]
if "title" in data:
track_kwargs[b"name"] = data["title"]
if "date" in data:
track_kwargs[b"date"] = data["date"]
if "musicbrainz_trackid" in data:
track_kwargs[b"musicbrainz_id"] = data["musicbrainz_trackid"]
if "musicbrainz_albumid" in data:
album_kwargs[b"musicbrainz_id"] = data["musicbrainz_albumid"]
if "musicbrainz_artistid" in data:
artist_kwargs[b"musicbrainz_id"] = data["musicbrainz_artistid"]
if "musicbrainz_albumartistid" in data:
albumartist_kwargs[b"musicbrainz_id"] = data["musicbrainz_albumartistid"]
if data["file"][0] == "/":
path = data["file"][1:]
else:
path = data["file"]
path = urllib.unquote(path)
if artist_kwargs:
artist = Artist(**artist_kwargs)
track_kwargs[b"artists"] = [artist]
if albumartist_kwargs:
albumartist = Artist(**albumartist_kwargs)
album_kwargs[b"artists"] = [albumartist]
if album_kwargs:
album = Album(**album_kwargs)
track_kwargs[b"album"] = album
track_kwargs[b"uri"] = path_to_uri(music_dir, path)
track_kwargs[b"length"] = int(data.get("time", 0)) * 1000
track = Track(**track_kwargs)
tracks.add(track)
|
https://github.com/mopidy/mopidy/issues/302
|
erik@faust:~$ mopidy
Traceback (most recent call last):
File "/usr/local/bin/mopidy", line 5, in <module>
main()
File "/usr/local/lib64/python2.6/site-packages/mopidy/__main__.py", line 48, in main
options = parse_options()
File "/usr/local/lib64/python2.6/site-packages/mopidy/__main__.py", line 85, in parse_options
help='show GStreamer help options')
File "/usr/lib64/python2.6/optparse.py", line 1012, in add_option
raise TypeError, "invalid arguments"
TypeError: invalid arguments
erik@faust:~$ python --version
Python 2.6
erik@faust:~$ uname -a
Linux faust 3.0.13-0.27-default #1 SMP Wed Feb 15 13:33:49 UTC 2012 (d73692b) x86_64 x86_64 x86_64 GNU/Linux
erik@faust:~$ cat /etc/SuSE-release
SUSE Linux Enterprise Server 11 (x86_64)
VERSION = 11
PATCHLEVEL = 2
erik@faust:~$
|
TypeError
|
def __init__(self, core):
super(MpdFrontend, self).__init__()
hostname = network.format_hostname(settings.MPD_SERVER_HOSTNAME)
port = settings.MPD_SERVER_PORT
# NOTE kwargs dict keys must be bytestrings to work on Python < 2.6.5
# See https://github.com/mopidy/mopidy/issues/302 for details.
try:
network.Server(
hostname,
port,
protocol=session.MpdSession,
protocol_kwargs={b"core": core},
max_connections=settings.MPD_SERVER_MAX_CONNECTIONS,
timeout=settings.MPD_SERVER_CONNECTION_TIMEOUT,
)
except IOError as error:
logger.error("MPD server startup failed: %s", encoding.locale_decode(error))
sys.exit(1)
logger.info("MPD server running at [%s]:%s", hostname, port)
|
def __init__(self, core):
super(MpdFrontend, self).__init__()
hostname = network.format_hostname(settings.MPD_SERVER_HOSTNAME)
port = settings.MPD_SERVER_PORT
# NOTE: dict key must be bytestring to work on Python < 2.6.5
# See https://github.com/mopidy/mopidy/issues/302 for details
try:
network.Server(
hostname,
port,
protocol=session.MpdSession,
protocol_kwargs={b"core": core},
max_connections=settings.MPD_SERVER_MAX_CONNECTIONS,
timeout=settings.MPD_SERVER_CONNECTION_TIMEOUT,
)
except IOError as error:
logger.error("MPD server startup failed: %s", encoding.locale_decode(error))
sys.exit(1)
logger.info("MPD server running at [%s]:%s", hostname, port)
|
https://github.com/mopidy/mopidy/issues/302
|
erik@faust:~$ mopidy
Traceback (most recent call last):
File "/usr/local/bin/mopidy", line 5, in <module>
main()
File "/usr/local/lib64/python2.6/site-packages/mopidy/__main__.py", line 48, in main
options = parse_options()
File "/usr/local/lib64/python2.6/site-packages/mopidy/__main__.py", line 85, in parse_options
help='show GStreamer help options')
File "/usr/lib64/python2.6/optparse.py", line 1012, in add_option
raise TypeError, "invalid arguments"
TypeError: invalid arguments
erik@faust:~$ python --version
Python 2.6
erik@faust:~$ uname -a
Linux faust 3.0.13-0.27-default #1 SMP Wed Feb 15 13:33:49 UTC 2012 (d73692b) x86_64 x86_64 x86_64 GNU/Linux
erik@faust:~$ cat /etc/SuSE-release
SUSE Linux Enterprise Server 11 (x86_64)
VERSION = 11
PATCHLEVEL = 2
erik@faust:~$
|
TypeError
|
def handle_request(pattern, auth_required=True):
"""
Decorator for connecting command handlers to command requests.
If you use named groups in the pattern, the decorated method will get the
groups as keyword arguments. If the group is optional, remember to give the
argument a default value.
For example, if the command is ``do that thing`` the ``what`` argument will
be ``this thing``::
@handle_request('^do (?P<what>.+)$')
def do(what):
...
:param pattern: regexp pattern for matching commands
:type pattern: string
"""
def decorator(func):
match = re.search("([a-z_]+)", pattern)
if match is not None:
mpd_commands.add(
MpdCommand(name=match.group(), auth_required=auth_required)
)
# NOTE Make pattern a bytestring to get bytestring keys in the dict
# returned from matches.groupdict(), which is again used as a **kwargs
# dict. This is needed to work on Python < 2.6.5.
# See https://github.com/mopidy/mopidy/issues/302 for details.
bytestring_pattern = pattern.encode("utf-8")
compiled_pattern = re.compile(bytestring_pattern, flags=re.UNICODE)
if compiled_pattern in request_handlers:
raise ValueError(
"Tried to redefine handler for %s with %s" % (pattern, func)
)
request_handlers[compiled_pattern] = func
func.__doc__ = " - *Pattern:* ``%s``\n\n%s" % (pattern, func.__doc__ or "")
return func
return decorator
|
def handle_request(pattern, auth_required=True):
"""
Decorator for connecting command handlers to command requests.
If you use named groups in the pattern, the decorated method will get the
groups as keyword arguments. If the group is optional, remember to give the
argument a default value.
For example, if the command is ``do that thing`` the ``what`` argument will
be ``this thing``::
@handle_request('^do (?P<what>.+)$')
def do(what):
...
:param pattern: regexp pattern for matching commands
:type pattern: string
"""
def decorator(func):
match = re.search("([a-z_]+)", pattern)
if match is not None:
mpd_commands.add(
MpdCommand(name=match.group(), auth_required=auth_required)
)
# NOTE: Make pattern a bytestring to get bytestring keys in the dict
# returned from matches.groupdict(), which is again used as a **kwargs
# dict. This is needed to work on Python < 2.6.5. See
# https://github.com/mopidy/mopidy/issues/302 for details.
bytestring_pattern = pattern.encode("utf-8")
compiled_pattern = re.compile(bytestring_pattern, flags=re.UNICODE)
if compiled_pattern in request_handlers:
raise ValueError(
"Tried to redefine handler for %s with %s" % (pattern, func)
)
request_handlers[compiled_pattern] = func
func.__doc__ = " - *Pattern:* ``%s``\n\n%s" % (pattern, func.__doc__ or "")
return func
return decorator
|
https://github.com/mopidy/mopidy/issues/302
|
erik@faust:~$ mopidy
Traceback (most recent call last):
File "/usr/local/bin/mopidy", line 5, in <module>
main()
File "/usr/local/lib64/python2.6/site-packages/mopidy/__main__.py", line 48, in main
options = parse_options()
File "/usr/local/lib64/python2.6/site-packages/mopidy/__main__.py", line 85, in parse_options
help='show GStreamer help options')
File "/usr/lib64/python2.6/optparse.py", line 1012, in add_option
raise TypeError, "invalid arguments"
TypeError: invalid arguments
erik@faust:~$ python --version
Python 2.6
erik@faust:~$ uname -a
Linux faust 3.0.13-0.27-default #1 SMP Wed Feb 15 13:33:49 UTC 2012 (d73692b) x86_64 x86_64 x86_64 GNU/Linux
erik@faust:~$ cat /etc/SuSE-release
SUSE Linux Enterprise Server 11 (x86_64)
VERSION = 11
PATCHLEVEL = 2
erik@faust:~$
|
TypeError
|
def decorator(func):
match = re.search("([a-z_]+)", pattern)
if match is not None:
mpd_commands.add(MpdCommand(name=match.group(), auth_required=auth_required))
# NOTE Make pattern a bytestring to get bytestring keys in the dict
# returned from matches.groupdict(), which is again used as a **kwargs
# dict. This is needed to work on Python < 2.6.5.
# See https://github.com/mopidy/mopidy/issues/302 for details.
bytestring_pattern = pattern.encode("utf-8")
compiled_pattern = re.compile(bytestring_pattern, flags=re.UNICODE)
if compiled_pattern in request_handlers:
raise ValueError("Tried to redefine handler for %s with %s" % (pattern, func))
request_handlers[compiled_pattern] = func
func.__doc__ = " - *Pattern:* ``%s``\n\n%s" % (pattern, func.__doc__ or "")
return func
|
def decorator(func):
match = re.search("([a-z_]+)", pattern)
if match is not None:
mpd_commands.add(MpdCommand(name=match.group(), auth_required=auth_required))
# NOTE: Make pattern a bytestring to get bytestring keys in the dict
# returned from matches.groupdict(), which is again used as a **kwargs
# dict. This is needed to work on Python < 2.6.5. See
# https://github.com/mopidy/mopidy/issues/302 for details.
bytestring_pattern = pattern.encode("utf-8")
compiled_pattern = re.compile(bytestring_pattern, flags=re.UNICODE)
if compiled_pattern in request_handlers:
raise ValueError("Tried to redefine handler for %s with %s" % (pattern, func))
request_handlers[compiled_pattern] = func
func.__doc__ = " - *Pattern:* ``%s``\n\n%s" % (pattern, func.__doc__ or "")
return func
|
https://github.com/mopidy/mopidy/issues/302
|
erik@faust:~$ mopidy
Traceback (most recent call last):
File "/usr/local/bin/mopidy", line 5, in <module>
main()
File "/usr/local/lib64/python2.6/site-packages/mopidy/__main__.py", line 48, in main
options = parse_options()
File "/usr/local/lib64/python2.6/site-packages/mopidy/__main__.py", line 85, in parse_options
help='show GStreamer help options')
File "/usr/lib64/python2.6/optparse.py", line 1012, in add_option
raise TypeError, "invalid arguments"
TypeError: invalid arguments
erik@faust:~$ python --version
Python 2.6
erik@faust:~$ uname -a
Linux faust 3.0.13-0.27-default #1 SMP Wed Feb 15 13:33:49 UTC 2012 (d73692b) x86_64 x86_64 x86_64 GNU/Linux
erik@faust:~$ cat /etc/SuSE-release
SUSE Linux Enterprise Server 11 (x86_64)
VERSION = 11
PATCHLEVEL = 2
erik@faust:~$
|
TypeError
|
def query_from_mpd_list_format(field, mpd_query):
"""
Converts an MPD ``list`` query to a Mopidy query.
"""
# NOTE kwargs dict keys must be bytestrings to work on Python < 2.6.5
# See https://github.com/mopidy/mopidy/issues/302 for details
if mpd_query is None:
return {}
try:
# shlex does not seem to be friends with unicode objects
tokens = shlex.split(mpd_query.encode("utf-8"))
except ValueError as error:
if str(error) == "No closing quotation":
raise MpdArgError("Invalid unquoted character", command="list")
else:
raise
tokens = [t.decode("utf-8") for t in tokens]
if len(tokens) == 1:
if field == "album":
if not tokens[0]:
raise ValueError
return {b"artist": [tokens[0]]} # See above NOTE
else:
raise MpdArgError('should be "Album" for 3 arguments', command="list")
elif len(tokens) % 2 == 0:
query = {}
while tokens:
key = str(tokens[0].lower()) # See above NOTE
value = tokens[1]
tokens = tokens[2:]
if key not in ("artist", "album", "date", "genre"):
raise MpdArgError("not able to parse args", command="list")
if not value:
raise ValueError
if key in query:
query[key].append(value)
else:
query[key] = [value]
return query
else:
raise MpdArgError("not able to parse args", command="list")
|
def query_from_mpd_list_format(field, mpd_query):
"""
Converts an MPD ``list`` query to a Mopidy query.
"""
if mpd_query is None:
return {}
try:
# shlex does not seem to be friends with unicode objects
tokens = shlex.split(mpd_query.encode("utf-8"))
except ValueError as error:
if str(error) == "No closing quotation":
raise MpdArgError("Invalid unquoted character", command="list")
else:
raise
tokens = [t.decode("utf-8") for t in tokens]
if len(tokens) == 1:
if field == "album":
if not tokens[0]:
raise ValueError
return {"artist": [tokens[0]]}
else:
raise MpdArgError('should be "Album" for 3 arguments', command="list")
elif len(tokens) % 2 == 0:
query = {}
while tokens:
key = tokens[0].lower()
key = str(key) # Needed for kwargs keys on OS X and Windows
value = tokens[1]
tokens = tokens[2:]
if key not in ("artist", "album", "date", "genre"):
raise MpdArgError("not able to parse args", command="list")
if not value:
raise ValueError
if key in query:
query[key].append(value)
else:
query[key] = [value]
return query
else:
raise MpdArgError("not able to parse args", command="list")
|
https://github.com/mopidy/mopidy/issues/302
|
erik@faust:~$ mopidy
Traceback (most recent call last):
File "/usr/local/bin/mopidy", line 5, in <module>
main()
File "/usr/local/lib64/python2.6/site-packages/mopidy/__main__.py", line 48, in main
options = parse_options()
File "/usr/local/lib64/python2.6/site-packages/mopidy/__main__.py", line 85, in parse_options
help='show GStreamer help options')
File "/usr/lib64/python2.6/optparse.py", line 1012, in add_option
raise TypeError, "invalid arguments"
TypeError: invalid arguments
erik@faust:~$ python --version
Python 2.6
erik@faust:~$ uname -a
Linux faust 3.0.13-0.27-default #1 SMP Wed Feb 15 13:33:49 UTC 2012 (d73692b) x86_64 x86_64 x86_64 GNU/Linux
erik@faust:~$ cat /etc/SuSE-release
SUSE Linux Enterprise Server 11 (x86_64)
VERSION = 11
PATCHLEVEL = 2
erik@faust:~$
|
TypeError
|
def copy(self, **values):
"""
Copy the model with ``field`` updated to new value.
Examples::
# Returns a track with a new name
Track(name='foo').copy(name='bar')
# Return an album with a new number of tracks
Album(num_tracks=2).copy(num_tracks=5)
:param values: the model fields to modify
:type values: dict
:rtype: new instance of the model being copied
"""
# NOTE kwargs dict keys must be bytestrings to work on Python < 2.6.5
# See https://github.com/mopidy/mopidy/issues/302 for details
data = {}
for key in self.__dict__.keys():
public_key = key.lstrip("_")
data[str(public_key)] = values.pop(public_key, self.__dict__[key])
for key in values.keys():
if hasattr(self, key):
data[str(key)] = values.pop(key)
if values:
raise TypeError('copy() got an unexpected keyword argument "%s"' % key)
return self.__class__(**data)
|
def copy(self, **values):
"""
Copy the model with ``field`` updated to new value.
Examples::
# Returns a track with a new name
Track(name='foo').copy(name='bar')
# Return an album with a new number of tracks
Album(num_tracks=2).copy(num_tracks=5)
:param values: the model fields to modify
:type values: dict
:rtype: new instance of the model being copied
"""
data = {}
for key in self.__dict__.keys():
public_key = key.lstrip("_")
data[public_key] = values.pop(public_key, self.__dict__[key])
for key in values.keys():
if hasattr(self, key):
data[key] = values.pop(key)
if values:
raise TypeError('copy() got an unexpected keyword argument "%s"' % key)
return self.__class__(**data)
|
https://github.com/mopidy/mopidy/issues/302
|
erik@faust:~$ mopidy
Traceback (most recent call last):
File "/usr/local/bin/mopidy", line 5, in <module>
main()
File "/usr/local/lib64/python2.6/site-packages/mopidy/__main__.py", line 48, in main
options = parse_options()
File "/usr/local/lib64/python2.6/site-packages/mopidy/__main__.py", line 85, in parse_options
help='show GStreamer help options')
File "/usr/lib64/python2.6/optparse.py", line 1012, in add_option
raise TypeError, "invalid arguments"
TypeError: invalid arguments
erik@faust:~$ python --version
Python 2.6
erik@faust:~$ uname -a
Linux faust 3.0.13-0.27-default #1 SMP Wed Feb 15 13:33:49 UTC 2012 (d73692b) x86_64 x86_64 x86_64 GNU/Linux
erik@faust:~$ cat /etc/SuSE-release
SUSE Linux Enterprise Server 11 (x86_64)
VERSION = 11
PATCHLEVEL = 2
erik@faust:~$
|
TypeError
|
def __init__(self, *args, **kwargs):
# NOTE kwargs dict keys must be bytestrings to work on Python < 2.6.5
# See https://github.com/mopidy/mopidy/issues/302 for details
self.__dict__[b"artists"] = frozenset(kwargs.pop("artists", []))
super(Album, self).__init__(*args, **kwargs)
|
def __init__(self, *args, **kwargs):
self.__dict__["artists"] = frozenset(kwargs.pop("artists", []))
super(Album, self).__init__(*args, **kwargs)
|
https://github.com/mopidy/mopidy/issues/302
|
erik@faust:~$ mopidy
Traceback (most recent call last):
File "/usr/local/bin/mopidy", line 5, in <module>
main()
File "/usr/local/lib64/python2.6/site-packages/mopidy/__main__.py", line 48, in main
options = parse_options()
File "/usr/local/lib64/python2.6/site-packages/mopidy/__main__.py", line 85, in parse_options
help='show GStreamer help options')
File "/usr/lib64/python2.6/optparse.py", line 1012, in add_option
raise TypeError, "invalid arguments"
TypeError: invalid arguments
erik@faust:~$ python --version
Python 2.6
erik@faust:~$ uname -a
Linux faust 3.0.13-0.27-default #1 SMP Wed Feb 15 13:33:49 UTC 2012 (d73692b) x86_64 x86_64 x86_64 GNU/Linux
erik@faust:~$ cat /etc/SuSE-release
SUSE Linux Enterprise Server 11 (x86_64)
VERSION = 11
PATCHLEVEL = 2
erik@faust:~$
|
TypeError
|
def __init__(self, *args, **kwargs):
# NOTE kwargs dict keys must be bytestrings to work on Python < 2.6.5
# See https://github.com/mopidy/mopidy/issues/302 for details
self.__dict__[b"artists"] = frozenset(kwargs.pop("artists", []))
super(Track, self).__init__(*args, **kwargs)
|
def __init__(self, *args, **kwargs):
self.__dict__["artists"] = frozenset(kwargs.pop("artists", []))
super(Track, self).__init__(*args, **kwargs)
|
https://github.com/mopidy/mopidy/issues/302
|
erik@faust:~$ mopidy
Traceback (most recent call last):
File "/usr/local/bin/mopidy", line 5, in <module>
main()
File "/usr/local/lib64/python2.6/site-packages/mopidy/__main__.py", line 48, in main
options = parse_options()
File "/usr/local/lib64/python2.6/site-packages/mopidy/__main__.py", line 85, in parse_options
help='show GStreamer help options')
File "/usr/lib64/python2.6/optparse.py", line 1012, in add_option
raise TypeError, "invalid arguments"
TypeError: invalid arguments
erik@faust:~$ python --version
Python 2.6
erik@faust:~$ uname -a
Linux faust 3.0.13-0.27-default #1 SMP Wed Feb 15 13:33:49 UTC 2012 (d73692b) x86_64 x86_64 x86_64 GNU/Linux
erik@faust:~$ cat /etc/SuSE-release
SUSE Linux Enterprise Server 11 (x86_64)
VERSION = 11
PATCHLEVEL = 2
erik@faust:~$
|
TypeError
|
def __init__(self, *args, **kwargs):
# NOTE kwargs dict keys must be bytestrings to work on Python < 2.6.5
# See https://github.com/mopidy/mopidy/issues/302 for details
if len(args) == 2 and len(kwargs) == 0:
kwargs[b"tlid"] = args[0]
kwargs[b"track"] = args[1]
args = []
super(TlTrack, self).__init__(*args, **kwargs)
|
def __init__(self, *args, **kwargs):
if len(args) == 2 and len(kwargs) == 0:
kwargs["tlid"] = args[0]
kwargs["track"] = args[1]
args = []
super(TlTrack, self).__init__(*args, **kwargs)
|
https://github.com/mopidy/mopidy/issues/302
|
erik@faust:~$ mopidy
Traceback (most recent call last):
File "/usr/local/bin/mopidy", line 5, in <module>
main()
File "/usr/local/lib64/python2.6/site-packages/mopidy/__main__.py", line 48, in main
options = parse_options()
File "/usr/local/lib64/python2.6/site-packages/mopidy/__main__.py", line 85, in parse_options
help='show GStreamer help options')
File "/usr/lib64/python2.6/optparse.py", line 1012, in add_option
raise TypeError, "invalid arguments"
TypeError: invalid arguments
erik@faust:~$ python --version
Python 2.6
erik@faust:~$ uname -a
Linux faust 3.0.13-0.27-default #1 SMP Wed Feb 15 13:33:49 UTC 2012 (d73692b) x86_64 x86_64 x86_64 GNU/Linux
erik@faust:~$ cat /etc/SuSE-release
SUSE Linux Enterprise Server 11 (x86_64)
VERSION = 11
PATCHLEVEL = 2
erik@faust:~$
|
TypeError
|
def __init__(self, *args, **kwargs):
# NOTE kwargs dict keys must be bytestrings to work on Python < 2.6.5
# See https://github.com/mopidy/mopidy/issues/302 for details
self.__dict__[b"tracks"] = tuple(kwargs.pop("tracks", []))
super(Playlist, self).__init__(*args, **kwargs)
|
def __init__(self, *args, **kwargs):
self.__dict__["tracks"] = tuple(kwargs.pop("tracks", []))
super(Playlist, self).__init__(*args, **kwargs)
|
https://github.com/mopidy/mopidy/issues/302
|
erik@faust:~$ mopidy
Traceback (most recent call last):
File "/usr/local/bin/mopidy", line 5, in <module>
main()
File "/usr/local/lib64/python2.6/site-packages/mopidy/__main__.py", line 48, in main
options = parse_options()
File "/usr/local/lib64/python2.6/site-packages/mopidy/__main__.py", line 85, in parse_options
help='show GStreamer help options')
File "/usr/lib64/python2.6/optparse.py", line 1012, in add_option
raise TypeError, "invalid arguments"
TypeError: invalid arguments
erik@faust:~$ python --version
Python 2.6
erik@faust:~$ uname -a
Linux faust 3.0.13-0.27-default #1 SMP Wed Feb 15 13:33:49 UTC 2012 (d73692b) x86_64 x86_64 x86_64 GNU/Linux
erik@faust:~$ cat /etc/SuSE-release
SUSE Linux Enterprise Server 11 (x86_64)
VERSION = 11
PATCHLEVEL = 2
erik@faust:~$
|
TypeError
|
def __init__(self, *args, **kwargs):
# NOTE kwargs dict keys must be bytestrings to work on Python < 2.6.5
# See https://github.com/mopidy/mopidy/issues/302 for details
self.__dict__[b"tracks"] = tuple(kwargs.pop("tracks", []))
self.__dict__[b"artists"] = tuple(kwargs.pop("artists", []))
self.__dict__[b"albums"] = tuple(kwargs.pop("albums", []))
super(SearchResult, self).__init__(*args, **kwargs)
|
def __init__(self, *args, **kwargs):
self.__dict__["tracks"] = tuple(kwargs.pop("tracks", []))
self.__dict__["artists"] = tuple(kwargs.pop("artists", []))
self.__dict__["albums"] = tuple(kwargs.pop("albums", []))
super(SearchResult, self).__init__(*args, **kwargs)
|
https://github.com/mopidy/mopidy/issues/302
|
erik@faust:~$ mopidy
Traceback (most recent call last):
File "/usr/local/bin/mopidy", line 5, in <module>
main()
File "/usr/local/lib64/python2.6/site-packages/mopidy/__main__.py", line 48, in main
options = parse_options()
File "/usr/local/lib64/python2.6/site-packages/mopidy/__main__.py", line 85, in parse_options
help='show GStreamer help options')
File "/usr/lib64/python2.6/optparse.py", line 1012, in add_option
raise TypeError, "invalid arguments"
TypeError: invalid arguments
erik@faust:~$ python --version
Python 2.6
erik@faust:~$ uname -a
Linux faust 3.0.13-0.27-default #1 SMP Wed Feb 15 13:33:49 UTC 2012 (d73692b) x86_64 x86_64 x86_64 GNU/Linux
erik@faust:~$ cat /etc/SuSE-release
SUSE Linux Enterprise Server 11 (x86_64)
VERSION = 11
PATCHLEVEL = 2
erik@faust:~$
|
TypeError
|
def parse_options():
parser = optparse.OptionParser(version="Mopidy %s" % versioning.get_version())
# NOTE First argument to add_option must be bytestrings on Python < 2.6.2
# See https://github.com/mopidy/mopidy/issues/302 for details
parser.add_option(
b"-q",
"--quiet",
action="store_const",
const=0,
dest="verbosity_level",
help="less output (warning level)",
)
parser.add_option(
b"-v",
"--verbose",
action="count",
default=1,
dest="verbosity_level",
help="more output (debug level)",
)
return parser.parse_args(args=mopidy_args)[0]
|
def parse_options():
parser = optparse.OptionParser(version="Mopidy %s" % versioning.get_version())
# NOTE Python 2.6: To support Python versions < 2.6.2rc1 we must use
# bytestrings for the first argument to ``add_option``
# See https://github.com/mopidy/mopidy/issues/302 for details
parser.add_option(
b"-q",
"--quiet",
action="store_const",
const=0,
dest="verbosity_level",
help="less output (warning level)",
)
parser.add_option(
b"-v",
"--verbose",
action="count",
default=1,
dest="verbosity_level",
help="more output (debug level)",
)
return parser.parse_args(args=mopidy_args)[0]
|
https://github.com/mopidy/mopidy/issues/302
|
erik@faust:~$ mopidy
Traceback (most recent call last):
File "/usr/local/bin/mopidy", line 5, in <module>
main()
File "/usr/local/lib64/python2.6/site-packages/mopidy/__main__.py", line 48, in main
options = parse_options()
File "/usr/local/lib64/python2.6/site-packages/mopidy/__main__.py", line 85, in parse_options
help='show GStreamer help options')
File "/usr/lib64/python2.6/optparse.py", line 1012, in add_option
raise TypeError, "invalid arguments"
TypeError: invalid arguments
erik@faust:~$ python --version
Python 2.6
erik@faust:~$ uname -a
Linux faust 3.0.13-0.27-default #1 SMP Wed Feb 15 13:33:49 UTC 2012 (d73692b) x86_64 x86_64 x86_64 GNU/Linux
erik@faust:~$ cat /etc/SuSE-release
SUSE Linux Enterprise Server 11 (x86_64)
VERSION = 11
PATCHLEVEL = 2
erik@faust:~$
|
TypeError
|
def translator(data):
albumartist_kwargs = {}
album_kwargs = {}
artist_kwargs = {}
track_kwargs = {}
# NOTE kwargs dict keys must be bytestrings to work on Python < 2.6.5
# See https://github.com/mopidy/mopidy/issues/302 for details.
def _retrieve(source_key, target_key, target):
if source_key in data:
target[str(target_key)] = data[source_key]
_retrieve(gst.TAG_ALBUM, "name", album_kwargs)
_retrieve(gst.TAG_TRACK_COUNT, "num_tracks", album_kwargs)
_retrieve(gst.TAG_ARTIST, "name", artist_kwargs)
if gst.TAG_DATE in data and data[gst.TAG_DATE]:
date = data[gst.TAG_DATE]
try:
date = datetime.date(date.year, date.month, date.day)
except ValueError:
pass # Ignore invalid dates
else:
track_kwargs[b"date"] = date.isoformat()
_retrieve(gst.TAG_TITLE, "name", track_kwargs)
_retrieve(gst.TAG_TRACK_NUMBER, "track_no", track_kwargs)
# Following keys don't seem to have TAG_* constant.
_retrieve("album-artist", "name", albumartist_kwargs)
_retrieve("musicbrainz-trackid", "musicbrainz_id", track_kwargs)
_retrieve("musicbrainz-artistid", "musicbrainz_id", artist_kwargs)
_retrieve("musicbrainz-albumid", "musicbrainz_id", album_kwargs)
_retrieve("musicbrainz-albumartistid", "musicbrainz_id", albumartist_kwargs)
if albumartist_kwargs:
album_kwargs[b"artists"] = [Artist(**albumartist_kwargs)]
track_kwargs[b"uri"] = data["uri"]
track_kwargs[b"length"] = data[gst.TAG_DURATION]
track_kwargs[b"album"] = Album(**album_kwargs)
track_kwargs[b"artists"] = [Artist(**artist_kwargs)]
return Track(**track_kwargs)
|
def translator(data):
albumartist_kwargs = {}
album_kwargs = {}
artist_kwargs = {}
track_kwargs = {}
# NOTE: kwargs are explicitly made bytestrings to work on Python
# 2.6.0/2.6.1. See https://github.com/mopidy/mopidy/issues/302 for
# details.
def _retrieve(source_key, target_key, target):
if source_key in data:
target[str(target_key)] = data[source_key]
_retrieve(gst.TAG_ALBUM, "name", album_kwargs)
_retrieve(gst.TAG_TRACK_COUNT, "num_tracks", album_kwargs)
_retrieve(gst.TAG_ARTIST, "name", artist_kwargs)
if gst.TAG_DATE in data and data[gst.TAG_DATE]:
date = data[gst.TAG_DATE]
try:
date = datetime.date(date.year, date.month, date.day)
except ValueError:
pass # Ignore invalid dates
else:
track_kwargs[b"date"] = date.isoformat()
_retrieve(gst.TAG_TITLE, "name", track_kwargs)
_retrieve(gst.TAG_TRACK_NUMBER, "track_no", track_kwargs)
# Following keys don't seem to have TAG_* constant.
_retrieve("album-artist", "name", albumartist_kwargs)
_retrieve("musicbrainz-trackid", "musicbrainz_id", track_kwargs)
_retrieve("musicbrainz-artistid", "musicbrainz_id", artist_kwargs)
_retrieve("musicbrainz-albumid", "musicbrainz_id", album_kwargs)
_retrieve("musicbrainz-albumartistid", "musicbrainz_id", albumartist_kwargs)
if albumartist_kwargs:
album_kwargs[b"artists"] = [Artist(**albumartist_kwargs)]
track_kwargs[b"uri"] = data["uri"]
track_kwargs[b"length"] = data[gst.TAG_DURATION]
track_kwargs[b"album"] = Album(**album_kwargs)
track_kwargs[b"artists"] = [Artist(**artist_kwargs)]
return Track(**track_kwargs)
|
https://github.com/mopidy/mopidy/issues/302
|
erik@faust:~$ mopidy
Traceback (most recent call last):
File "/usr/local/bin/mopidy", line 5, in <module>
main()
File "/usr/local/lib64/python2.6/site-packages/mopidy/__main__.py", line 48, in main
options = parse_options()
File "/usr/local/lib64/python2.6/site-packages/mopidy/__main__.py", line 85, in parse_options
help='show GStreamer help options')
File "/usr/lib64/python2.6/optparse.py", line 1012, in add_option
raise TypeError, "invalid arguments"
TypeError: invalid arguments
erik@faust:~$ python --version
Python 2.6
erik@faust:~$ uname -a
Linux faust 3.0.13-0.27-default #1 SMP Wed Feb 15 13:33:49 UTC 2012 (d73692b) x86_64 x86_64 x86_64 GNU/Linux
erik@faust:~$ cat /etc/SuSE-release
SUSE Linux Enterprise Server 11 (x86_64)
VERSION = 11
PATCHLEVEL = 2
erik@faust:~$
|
TypeError
|
def model_json_decoder(dct):
"""
Automatically deserialize Mopidy models from JSON.
Usage::
>>> import json
>>> json.loads(
... '{"a_track": {"__model__": "Track", "name": "name"}}',
... object_hook=model_json_decoder)
{u'a_track': Track(artists=[], name=u'name')}
"""
# NOTE kwargs dict keys must be bytestrings to work on Python < 2.6.5
# See https://github.com/mopidy/mopidy/issues/302 for details.
if "__model__" in dct:
model_name = dct.pop("__model__")
cls = globals().get(model_name, None)
if issubclass(cls, ImmutableObject):
kwargs = {}
for key, value in dct.items():
kwargs[str(key)] = value
return cls(**kwargs)
return dct
|
def model_json_decoder(dct):
"""
Automatically deserialize Mopidy models from JSON.
Usage::
>>> import json
>>> json.loads(
... '{"a_track": {"__model__": "Track", "name": "name"}}',
... object_hook=model_json_decoder)
{u'a_track': Track(artists=[], name=u'name')}
"""
if "__model__" in dct:
model_name = dct.pop("__model__")
cls = globals().get(model_name, None)
if issubclass(cls, ImmutableObject):
return cls(**dct)
return dct
|
https://github.com/mopidy/mopidy/issues/302
|
erik@faust:~$ mopidy
Traceback (most recent call last):
File "/usr/local/bin/mopidy", line 5, in <module>
main()
File "/usr/local/lib64/python2.6/site-packages/mopidy/__main__.py", line 48, in main
options = parse_options()
File "/usr/local/lib64/python2.6/site-packages/mopidy/__main__.py", line 85, in parse_options
help='show GStreamer help options')
File "/usr/lib64/python2.6/optparse.py", line 1012, in add_option
raise TypeError, "invalid arguments"
TypeError: invalid arguments
erik@faust:~$ python --version
Python 2.6
erik@faust:~$ uname -a
Linux faust 3.0.13-0.27-default #1 SMP Wed Feb 15 13:33:49 UTC 2012 (d73692b) x86_64 x86_64 x86_64 GNU/Linux
erik@faust:~$ cat /etc/SuSE-release
SUSE Linux Enterprise Server 11 (x86_64)
VERSION = 11
PATCHLEVEL = 2
erik@faust:~$
|
TypeError
|
def _convert_mpd_data(data, tracks, music_dir):
if not data:
return
track_kwargs = {}
album_kwargs = {}
artist_kwargs = {}
albumartist_kwargs = {}
if "track" in data:
if "/" in data["track"]:
album_kwargs["num_tracks"] = int(data["track"].split("/")[1])
track_kwargs["track_no"] = int(data["track"].split("/")[0])
else:
track_kwargs["track_no"] = int(data["track"])
if "artist" in data:
artist_kwargs["name"] = data["artist"]
albumartist_kwargs["name"] = data["artist"]
if "albumartist" in data:
albumartist_kwargs["name"] = data["albumartist"]
if "album" in data:
album_kwargs["name"] = data["album"]
if "title" in data:
track_kwargs["name"] = data["title"]
if "date" in data:
track_kwargs["date"] = data["date"]
if "musicbrainz_trackid" in data:
track_kwargs["musicbrainz_id"] = data["musicbrainz_trackid"]
if "musicbrainz_albumid" in data:
album_kwargs["musicbrainz_id"] = data["musicbrainz_albumid"]
if "musicbrainz_artistid" in data:
artist_kwargs["musicbrainz_id"] = data["musicbrainz_artistid"]
if "musicbrainz_albumartistid" in data:
albumartist_kwargs["musicbrainz_id"] = data["musicbrainz_albumartistid"]
if artist_kwargs:
artist = Artist(**artist_kwargs)
track_kwargs["artists"] = [artist]
if albumartist_kwargs:
albumartist = Artist(**albumartist_kwargs)
album_kwargs["artists"] = [albumartist]
if album_kwargs:
album = Album(**album_kwargs)
track_kwargs["album"] = album
if data["file"][0] == "/":
path = data["file"][1:]
else:
path = data["file"]
path = urllib.unquote(path.encode("utf-8"))
if isinstance(music_dir, unicode):
music_dir = music_dir.encode("utf-8")
# Make sure we only pass bytestrings to path_to_uri to avoid implicit
# decoding of bytestrings to unicode strings
track_kwargs["uri"] = path_to_uri(music_dir, path)
track_kwargs["length"] = int(data.get("time", 0)) * 1000
track = Track(**track_kwargs)
tracks.add(track)
|
def _convert_mpd_data(data, tracks, music_dir):
if not data:
return
# NOTE kwargs dict keys must be bytestrings to work on Python < 2.6.5
# See https://github.com/mopidy/mopidy/issues/302 for details.
track_kwargs = {}
album_kwargs = {}
artist_kwargs = {}
albumartist_kwargs = {}
if "track" in data:
if "/" in data["track"]:
album_kwargs[b"num_tracks"] = int(data["track"].split("/")[1])
track_kwargs[b"track_no"] = int(data["track"].split("/")[0])
else:
track_kwargs[b"track_no"] = int(data["track"])
if "artist" in data:
artist_kwargs[b"name"] = data["artist"]
albumartist_kwargs[b"name"] = data["artist"]
if "albumartist" in data:
albumartist_kwargs[b"name"] = data["albumartist"]
if "album" in data:
album_kwargs[b"name"] = data["album"]
if "title" in data:
track_kwargs[b"name"] = data["title"]
if "date" in data:
track_kwargs[b"date"] = data["date"]
if "musicbrainz_trackid" in data:
track_kwargs[b"musicbrainz_id"] = data["musicbrainz_trackid"]
if "musicbrainz_albumid" in data:
album_kwargs[b"musicbrainz_id"] = data["musicbrainz_albumid"]
if "musicbrainz_artistid" in data:
artist_kwargs[b"musicbrainz_id"] = data["musicbrainz_artistid"]
if "musicbrainz_albumartistid" in data:
albumartist_kwargs[b"musicbrainz_id"] = data["musicbrainz_albumartistid"]
if artist_kwargs:
artist = Artist(**artist_kwargs)
track_kwargs[b"artists"] = [artist]
if albumartist_kwargs:
albumartist = Artist(**albumartist_kwargs)
album_kwargs[b"artists"] = [albumartist]
if album_kwargs:
album = Album(**album_kwargs)
track_kwargs[b"album"] = album
if data["file"][0] == "/":
path = data["file"][1:]
else:
path = data["file"]
path = urllib.unquote(path.encode("utf-8"))
if isinstance(music_dir, unicode):
music_dir = music_dir.encode("utf-8")
# Make sure we only pass bytestrings to path_to_uri to avoid implicit
# decoding of bytestrings to unicode strings
track_kwargs[b"uri"] = path_to_uri(music_dir, path)
track_kwargs[b"length"] = int(data.get("time", 0)) * 1000
track = Track(**track_kwargs)
tracks.add(track)
|
https://github.com/mopidy/mopidy/issues/302
|
erik@faust:~$ mopidy
Traceback (most recent call last):
File "/usr/local/bin/mopidy", line 5, in <module>
main()
File "/usr/local/lib64/python2.6/site-packages/mopidy/__main__.py", line 48, in main
options = parse_options()
File "/usr/local/lib64/python2.6/site-packages/mopidy/__main__.py", line 85, in parse_options
help='show GStreamer help options')
File "/usr/lib64/python2.6/optparse.py", line 1012, in add_option
raise TypeError, "invalid arguments"
TypeError: invalid arguments
erik@faust:~$ python --version
Python 2.6
erik@faust:~$ uname -a
Linux faust 3.0.13-0.27-default #1 SMP Wed Feb 15 13:33:49 UTC 2012 (d73692b) x86_64 x86_64 x86_64 GNU/Linux
erik@faust:~$ cat /etc/SuSE-release
SUSE Linux Enterprise Server 11 (x86_64)
VERSION = 11
PATCHLEVEL = 2
erik@faust:~$
|
TypeError
|
def __init__(self, config, core):
super(MpdFrontend, self).__init__()
hostname = network.format_hostname(config["mpd"]["hostname"])
port = config["mpd"]["port"]
try:
network.Server(
hostname,
port,
protocol=session.MpdSession,
protocol_kwargs={
"config": config,
"core": core,
},
max_connections=config["mpd"]["max_connections"],
timeout=config["mpd"]["connection_timeout"],
)
except IOError as error:
logger.error("MPD server startup failed: %s", encoding.locale_decode(error))
sys.exit(1)
logger.info("MPD server running at [%s]:%s", hostname, port)
|
def __init__(self, config, core):
super(MpdFrontend, self).__init__()
hostname = network.format_hostname(config["mpd"]["hostname"])
port = config["mpd"]["port"]
# NOTE kwargs dict keys must be bytestrings to work on Python < 2.6.5
# See https://github.com/mopidy/mopidy/issues/302 for details.
try:
network.Server(
hostname,
port,
protocol=session.MpdSession,
protocol_kwargs={
b"config": config,
b"core": core,
},
max_connections=config["mpd"]["max_connections"],
timeout=config["mpd"]["connection_timeout"],
)
except IOError as error:
logger.error("MPD server startup failed: %s", encoding.locale_decode(error))
sys.exit(1)
logger.info("MPD server running at [%s]:%s", hostname, port)
|
https://github.com/mopidy/mopidy/issues/302
|
erik@faust:~$ mopidy
Traceback (most recent call last):
File "/usr/local/bin/mopidy", line 5, in <module>
main()
File "/usr/local/lib64/python2.6/site-packages/mopidy/__main__.py", line 48, in main
options = parse_options()
File "/usr/local/lib64/python2.6/site-packages/mopidy/__main__.py", line 85, in parse_options
help='show GStreamer help options')
File "/usr/lib64/python2.6/optparse.py", line 1012, in add_option
raise TypeError, "invalid arguments"
TypeError: invalid arguments
erik@faust:~$ python --version
Python 2.6
erik@faust:~$ uname -a
Linux faust 3.0.13-0.27-default #1 SMP Wed Feb 15 13:33:49 UTC 2012 (d73692b) x86_64 x86_64 x86_64 GNU/Linux
erik@faust:~$ cat /etc/SuSE-release
SUSE Linux Enterprise Server 11 (x86_64)
VERSION = 11
PATCHLEVEL = 2
erik@faust:~$
|
TypeError
|
def handle_request(pattern, auth_required=True):
"""
Decorator for connecting command handlers to command requests.
If you use named groups in the pattern, the decorated method will get the
groups as keyword arguments. If the group is optional, remember to give the
argument a default value.
For example, if the command is ``do that thing`` the ``what`` argument will
be ``this thing``::
@handle_request('^do (?P<what>.+)$')
def do(what):
...
:param pattern: regexp pattern for matching commands
:type pattern: string
"""
def decorator(func):
match = re.search("([a-z_]+)", pattern)
if match is not None:
mpd_commands.add(
MpdCommand(name=match.group(), auth_required=auth_required)
)
compiled_pattern = re.compile(pattern, flags=re.UNICODE)
if compiled_pattern in request_handlers:
raise ValueError(
"Tried to redefine handler for %s with %s" % (pattern, func)
)
request_handlers[compiled_pattern] = func
func.__doc__ = " - *Pattern:* ``%s``\n\n%s" % (pattern, func.__doc__ or "")
return func
return decorator
|
def handle_request(pattern, auth_required=True):
"""
Decorator for connecting command handlers to command requests.
If you use named groups in the pattern, the decorated method will get the
groups as keyword arguments. If the group is optional, remember to give the
argument a default value.
For example, if the command is ``do that thing`` the ``what`` argument will
be ``this thing``::
@handle_request('^do (?P<what>.+)$')
def do(what):
...
:param pattern: regexp pattern for matching commands
:type pattern: string
"""
def decorator(func):
match = re.search("([a-z_]+)", pattern)
if match is not None:
mpd_commands.add(
MpdCommand(name=match.group(), auth_required=auth_required)
)
# NOTE Make pattern a bytestring to get bytestring keys in the dict
# returned from matches.groupdict(), which is again used as a **kwargs
# dict. This is needed to work on Python < 2.6.5.
# See https://github.com/mopidy/mopidy/issues/302 for details.
bytestring_pattern = pattern.encode("utf-8")
compiled_pattern = re.compile(bytestring_pattern, flags=re.UNICODE)
if compiled_pattern in request_handlers:
raise ValueError(
"Tried to redefine handler for %s with %s" % (pattern, func)
)
request_handlers[compiled_pattern] = func
func.__doc__ = " - *Pattern:* ``%s``\n\n%s" % (pattern, func.__doc__ or "")
return func
return decorator
|
https://github.com/mopidy/mopidy/issues/302
|
erik@faust:~$ mopidy
Traceback (most recent call last):
File "/usr/local/bin/mopidy", line 5, in <module>
main()
File "/usr/local/lib64/python2.6/site-packages/mopidy/__main__.py", line 48, in main
options = parse_options()
File "/usr/local/lib64/python2.6/site-packages/mopidy/__main__.py", line 85, in parse_options
help='show GStreamer help options')
File "/usr/lib64/python2.6/optparse.py", line 1012, in add_option
raise TypeError, "invalid arguments"
TypeError: invalid arguments
erik@faust:~$ python --version
Python 2.6
erik@faust:~$ uname -a
Linux faust 3.0.13-0.27-default #1 SMP Wed Feb 15 13:33:49 UTC 2012 (d73692b) x86_64 x86_64 x86_64 GNU/Linux
erik@faust:~$ cat /etc/SuSE-release
SUSE Linux Enterprise Server 11 (x86_64)
VERSION = 11
PATCHLEVEL = 2
erik@faust:~$
|
TypeError
|
def decorator(func):
match = re.search("([a-z_]+)", pattern)
if match is not None:
mpd_commands.add(MpdCommand(name=match.group(), auth_required=auth_required))
compiled_pattern = re.compile(pattern, flags=re.UNICODE)
if compiled_pattern in request_handlers:
raise ValueError("Tried to redefine handler for %s with %s" % (pattern, func))
request_handlers[compiled_pattern] = func
func.__doc__ = " - *Pattern:* ``%s``\n\n%s" % (pattern, func.__doc__ or "")
return func
|
def decorator(func):
match = re.search("([a-z_]+)", pattern)
if match is not None:
mpd_commands.add(MpdCommand(name=match.group(), auth_required=auth_required))
# NOTE Make pattern a bytestring to get bytestring keys in the dict
# returned from matches.groupdict(), which is again used as a **kwargs
# dict. This is needed to work on Python < 2.6.5.
# See https://github.com/mopidy/mopidy/issues/302 for details.
bytestring_pattern = pattern.encode("utf-8")
compiled_pattern = re.compile(bytestring_pattern, flags=re.UNICODE)
if compiled_pattern in request_handlers:
raise ValueError("Tried to redefine handler for %s with %s" % (pattern, func))
request_handlers[compiled_pattern] = func
func.__doc__ = " - *Pattern:* ``%s``\n\n%s" % (pattern, func.__doc__ or "")
return func
|
https://github.com/mopidy/mopidy/issues/302
|
erik@faust:~$ mopidy
Traceback (most recent call last):
File "/usr/local/bin/mopidy", line 5, in <module>
main()
File "/usr/local/lib64/python2.6/site-packages/mopidy/__main__.py", line 48, in main
options = parse_options()
File "/usr/local/lib64/python2.6/site-packages/mopidy/__main__.py", line 85, in parse_options
help='show GStreamer help options')
File "/usr/lib64/python2.6/optparse.py", line 1012, in add_option
raise TypeError, "invalid arguments"
TypeError: invalid arguments
erik@faust:~$ python --version
Python 2.6
erik@faust:~$ uname -a
Linux faust 3.0.13-0.27-default #1 SMP Wed Feb 15 13:33:49 UTC 2012 (d73692b) x86_64 x86_64 x86_64 GNU/Linux
erik@faust:~$ cat /etc/SuSE-release
SUSE Linux Enterprise Server 11 (x86_64)
VERSION = 11
PATCHLEVEL = 2
erik@faust:~$
|
TypeError
|
def query_from_mpd_list_format(field, mpd_query):
"""
Converts an MPD ``list`` query to a Mopidy query.
"""
if mpd_query is None:
return {}
try:
# shlex does not seem to be friends with unicode objects
tokens = shlex.split(mpd_query.encode("utf-8"))
except ValueError as error:
if str(error) == "No closing quotation":
raise MpdArgError("Invalid unquoted character", command="list")
else:
raise
tokens = [t.decode("utf-8") for t in tokens]
if len(tokens) == 1:
if field == "album":
if not tokens[0]:
raise ValueError
return {"artist": [tokens[0]]} # See above NOTE
else:
raise MpdArgError('should be "Album" for 3 arguments', command="list")
elif len(tokens) % 2 == 0:
query = {}
while tokens:
key = str(tokens[0].lower()) # See above NOTE
value = tokens[1]
tokens = tokens[2:]
if key not in ("artist", "album", "date", "genre"):
raise MpdArgError("not able to parse args", command="list")
if not value:
raise ValueError
if key in query:
query[key].append(value)
else:
query[key] = [value]
return query
else:
raise MpdArgError("not able to parse args", command="list")
|
def query_from_mpd_list_format(field, mpd_query):
"""
Converts an MPD ``list`` query to a Mopidy query.
"""
# NOTE kwargs dict keys must be bytestrings to work on Python < 2.6.5
# See https://github.com/mopidy/mopidy/issues/302 for details
if mpd_query is None:
return {}
try:
# shlex does not seem to be friends with unicode objects
tokens = shlex.split(mpd_query.encode("utf-8"))
except ValueError as error:
if str(error) == "No closing quotation":
raise MpdArgError("Invalid unquoted character", command="list")
else:
raise
tokens = [t.decode("utf-8") for t in tokens]
if len(tokens) == 1:
if field == "album":
if not tokens[0]:
raise ValueError
return {b"artist": [tokens[0]]} # See above NOTE
else:
raise MpdArgError('should be "Album" for 3 arguments', command="list")
elif len(tokens) % 2 == 0:
query = {}
while tokens:
key = str(tokens[0].lower()) # See above NOTE
value = tokens[1]
tokens = tokens[2:]
if key not in ("artist", "album", "date", "genre"):
raise MpdArgError("not able to parse args", command="list")
if not value:
raise ValueError
if key in query:
query[key].append(value)
else:
query[key] = [value]
return query
else:
raise MpdArgError("not able to parse args", command="list")
|
https://github.com/mopidy/mopidy/issues/302
|
erik@faust:~$ mopidy
Traceback (most recent call last):
File "/usr/local/bin/mopidy", line 5, in <module>
main()
File "/usr/local/lib64/python2.6/site-packages/mopidy/__main__.py", line 48, in main
options = parse_options()
File "/usr/local/lib64/python2.6/site-packages/mopidy/__main__.py", line 85, in parse_options
help='show GStreamer help options')
File "/usr/lib64/python2.6/optparse.py", line 1012, in add_option
raise TypeError, "invalid arguments"
TypeError: invalid arguments
erik@faust:~$ python --version
Python 2.6
erik@faust:~$ uname -a
Linux faust 3.0.13-0.27-default #1 SMP Wed Feb 15 13:33:49 UTC 2012 (d73692b) x86_64 x86_64 x86_64 GNU/Linux
erik@faust:~$ cat /etc/SuSE-release
SUSE Linux Enterprise Server 11 (x86_64)
VERSION = 11
PATCHLEVEL = 2
erik@faust:~$
|
TypeError
|
def copy(self, **values):
"""
Copy the model with ``field`` updated to new value.
Examples::
# Returns a track with a new name
Track(name='foo').copy(name='bar')
# Return an album with a new number of tracks
Album(num_tracks=2).copy(num_tracks=5)
:param values: the model fields to modify
:type values: dict
:rtype: new instance of the model being copied
"""
data = {}
for key in self.__dict__.keys():
public_key = key.lstrip("_")
data[public_key] = values.pop(public_key, self.__dict__[key])
for key in values.keys():
if hasattr(self, key):
data[key] = values.pop(key)
if values:
raise TypeError('copy() got an unexpected keyword argument "%s"' % key)
return self.__class__(**data)
|
def copy(self, **values):
"""
Copy the model with ``field`` updated to new value.
Examples::
# Returns a track with a new name
Track(name='foo').copy(name='bar')
# Return an album with a new number of tracks
Album(num_tracks=2).copy(num_tracks=5)
:param values: the model fields to modify
:type values: dict
:rtype: new instance of the model being copied
"""
# NOTE kwargs dict keys must be bytestrings to work on Python < 2.6.5
# See https://github.com/mopidy/mopidy/issues/302 for details
data = {}
for key in self.__dict__.keys():
public_key = key.lstrip("_")
data[str(public_key)] = values.pop(public_key, self.__dict__[key])
for key in values.keys():
if hasattr(self, key):
data[str(key)] = values.pop(key)
if values:
raise TypeError('copy() got an unexpected keyword argument "%s"' % key)
return self.__class__(**data)
|
https://github.com/mopidy/mopidy/issues/302
|
erik@faust:~$ mopidy
Traceback (most recent call last):
File "/usr/local/bin/mopidy", line 5, in <module>
main()
File "/usr/local/lib64/python2.6/site-packages/mopidy/__main__.py", line 48, in main
options = parse_options()
File "/usr/local/lib64/python2.6/site-packages/mopidy/__main__.py", line 85, in parse_options
help='show GStreamer help options')
File "/usr/lib64/python2.6/optparse.py", line 1012, in add_option
raise TypeError, "invalid arguments"
TypeError: invalid arguments
erik@faust:~$ python --version
Python 2.6
erik@faust:~$ uname -a
Linux faust 3.0.13-0.27-default #1 SMP Wed Feb 15 13:33:49 UTC 2012 (d73692b) x86_64 x86_64 x86_64 GNU/Linux
erik@faust:~$ cat /etc/SuSE-release
SUSE Linux Enterprise Server 11 (x86_64)
VERSION = 11
PATCHLEVEL = 2
erik@faust:~$
|
TypeError
|
def model_json_decoder(dct):
"""
Automatically deserialize Mopidy models from JSON.
Usage::
>>> import json
>>> json.loads(
... '{"a_track": {"__model__": "Track", "name": "name"}}',
... object_hook=model_json_decoder)
{u'a_track': Track(artists=[], name=u'name')}
"""
if "__model__" in dct:
model_name = dct.pop("__model__")
cls = globals().get(model_name, None)
if issubclass(cls, ImmutableObject):
kwargs = {}
for key, value in dct.items():
kwargs[key] = value
return cls(**kwargs)
return dct
|
def model_json_decoder(dct):
"""
Automatically deserialize Mopidy models from JSON.
Usage::
>>> import json
>>> json.loads(
... '{"a_track": {"__model__": "Track", "name": "name"}}',
... object_hook=model_json_decoder)
{u'a_track': Track(artists=[], name=u'name')}
"""
# NOTE kwargs dict keys must be bytestrings to work on Python < 2.6.5
# See https://github.com/mopidy/mopidy/issues/302 for details.
if "__model__" in dct:
model_name = dct.pop("__model__")
cls = globals().get(model_name, None)
if issubclass(cls, ImmutableObject):
kwargs = {}
for key, value in dct.items():
kwargs[str(key)] = value
return cls(**kwargs)
return dct
|
https://github.com/mopidy/mopidy/issues/302
|
erik@faust:~$ mopidy
Traceback (most recent call last):
File "/usr/local/bin/mopidy", line 5, in <module>
main()
File "/usr/local/lib64/python2.6/site-packages/mopidy/__main__.py", line 48, in main
options = parse_options()
File "/usr/local/lib64/python2.6/site-packages/mopidy/__main__.py", line 85, in parse_options
help='show GStreamer help options')
File "/usr/lib64/python2.6/optparse.py", line 1012, in add_option
raise TypeError, "invalid arguments"
TypeError: invalid arguments
erik@faust:~$ python --version
Python 2.6
erik@faust:~$ uname -a
Linux faust 3.0.13-0.27-default #1 SMP Wed Feb 15 13:33:49 UTC 2012 (d73692b) x86_64 x86_64 x86_64 GNU/Linux
erik@faust:~$ cat /etc/SuSE-release
SUSE Linux Enterprise Server 11 (x86_64)
VERSION = 11
PATCHLEVEL = 2
erik@faust:~$
|
TypeError
|
def __init__(self, *args, **kwargs):
self.__dict__["artists"] = frozenset(kwargs.pop("artists", []))
self.__dict__["images"] = frozenset(kwargs.pop("images", []))
super(Album, self).__init__(*args, **kwargs)
|
def __init__(self, *args, **kwargs):
# NOTE kwargs dict keys must be bytestrings to work on Python < 2.6.5
# See https://github.com/mopidy/mopidy/issues/302 for details
self.__dict__[b"artists"] = frozenset(kwargs.pop("artists", []))
self.__dict__[b"images"] = frozenset(kwargs.pop("images", []))
super(Album, self).__init__(*args, **kwargs)
|
https://github.com/mopidy/mopidy/issues/302
|
erik@faust:~$ mopidy
Traceback (most recent call last):
File "/usr/local/bin/mopidy", line 5, in <module>
main()
File "/usr/local/lib64/python2.6/site-packages/mopidy/__main__.py", line 48, in main
options = parse_options()
File "/usr/local/lib64/python2.6/site-packages/mopidy/__main__.py", line 85, in parse_options
help='show GStreamer help options')
File "/usr/lib64/python2.6/optparse.py", line 1012, in add_option
raise TypeError, "invalid arguments"
TypeError: invalid arguments
erik@faust:~$ python --version
Python 2.6
erik@faust:~$ uname -a
Linux faust 3.0.13-0.27-default #1 SMP Wed Feb 15 13:33:49 UTC 2012 (d73692b) x86_64 x86_64 x86_64 GNU/Linux
erik@faust:~$ cat /etc/SuSE-release
SUSE Linux Enterprise Server 11 (x86_64)
VERSION = 11
PATCHLEVEL = 2
erik@faust:~$
|
TypeError
|
def __init__(self, *args, **kwargs):
self.__dict__["artists"] = frozenset(kwargs.pop("artists", []))
super(Track, self).__init__(*args, **kwargs)
|
def __init__(self, *args, **kwargs):
# NOTE kwargs dict keys must be bytestrings to work on Python < 2.6.5
# See https://github.com/mopidy/mopidy/issues/302 for details
self.__dict__[b"artists"] = frozenset(kwargs.pop("artists", []))
super(Track, self).__init__(*args, **kwargs)
|
https://github.com/mopidy/mopidy/issues/302
|
erik@faust:~$ mopidy
Traceback (most recent call last):
File "/usr/local/bin/mopidy", line 5, in <module>
main()
File "/usr/local/lib64/python2.6/site-packages/mopidy/__main__.py", line 48, in main
options = parse_options()
File "/usr/local/lib64/python2.6/site-packages/mopidy/__main__.py", line 85, in parse_options
help='show GStreamer help options')
File "/usr/lib64/python2.6/optparse.py", line 1012, in add_option
raise TypeError, "invalid arguments"
TypeError: invalid arguments
erik@faust:~$ python --version
Python 2.6
erik@faust:~$ uname -a
Linux faust 3.0.13-0.27-default #1 SMP Wed Feb 15 13:33:49 UTC 2012 (d73692b) x86_64 x86_64 x86_64 GNU/Linux
erik@faust:~$ cat /etc/SuSE-release
SUSE Linux Enterprise Server 11 (x86_64)
VERSION = 11
PATCHLEVEL = 2
erik@faust:~$
|
TypeError
|
def __init__(self, *args, **kwargs):
if len(args) == 2 and len(kwargs) == 0:
kwargs["tlid"] = args[0]
kwargs["track"] = args[1]
args = []
super(TlTrack, self).__init__(*args, **kwargs)
|
def __init__(self, *args, **kwargs):
# NOTE kwargs dict keys must be bytestrings to work on Python < 2.6.5
# See https://github.com/mopidy/mopidy/issues/302 for details
if len(args) == 2 and len(kwargs) == 0:
kwargs[b"tlid"] = args[0]
kwargs[b"track"] = args[1]
args = []
super(TlTrack, self).__init__(*args, **kwargs)
|
https://github.com/mopidy/mopidy/issues/302
|
erik@faust:~$ mopidy
Traceback (most recent call last):
File "/usr/local/bin/mopidy", line 5, in <module>
main()
File "/usr/local/lib64/python2.6/site-packages/mopidy/__main__.py", line 48, in main
options = parse_options()
File "/usr/local/lib64/python2.6/site-packages/mopidy/__main__.py", line 85, in parse_options
help='show GStreamer help options')
File "/usr/lib64/python2.6/optparse.py", line 1012, in add_option
raise TypeError, "invalid arguments"
TypeError: invalid arguments
erik@faust:~$ python --version
Python 2.6
erik@faust:~$ uname -a
Linux faust 3.0.13-0.27-default #1 SMP Wed Feb 15 13:33:49 UTC 2012 (d73692b) x86_64 x86_64 x86_64 GNU/Linux
erik@faust:~$ cat /etc/SuSE-release
SUSE Linux Enterprise Server 11 (x86_64)
VERSION = 11
PATCHLEVEL = 2
erik@faust:~$
|
TypeError
|
def __init__(self, *args, **kwargs):
self.__dict__["tracks"] = tuple(kwargs.pop("tracks", []))
super(Playlist, self).__init__(*args, **kwargs)
|
def __init__(self, *args, **kwargs):
# NOTE kwargs dict keys must be bytestrings to work on Python < 2.6.5
# See https://github.com/mopidy/mopidy/issues/302 for details
self.__dict__[b"tracks"] = tuple(kwargs.pop("tracks", []))
super(Playlist, self).__init__(*args, **kwargs)
|
https://github.com/mopidy/mopidy/issues/302
|
erik@faust:~$ mopidy
Traceback (most recent call last):
File "/usr/local/bin/mopidy", line 5, in <module>
main()
File "/usr/local/lib64/python2.6/site-packages/mopidy/__main__.py", line 48, in main
options = parse_options()
File "/usr/local/lib64/python2.6/site-packages/mopidy/__main__.py", line 85, in parse_options
help='show GStreamer help options')
File "/usr/lib64/python2.6/optparse.py", line 1012, in add_option
raise TypeError, "invalid arguments"
TypeError: invalid arguments
erik@faust:~$ python --version
Python 2.6
erik@faust:~$ uname -a
Linux faust 3.0.13-0.27-default #1 SMP Wed Feb 15 13:33:49 UTC 2012 (d73692b) x86_64 x86_64 x86_64 GNU/Linux
erik@faust:~$ cat /etc/SuSE-release
SUSE Linux Enterprise Server 11 (x86_64)
VERSION = 11
PATCHLEVEL = 2
erik@faust:~$
|
TypeError
|
def __init__(self, *args, **kwargs):
self.__dict__["tracks"] = tuple(kwargs.pop("tracks", []))
self.__dict__["artists"] = tuple(kwargs.pop("artists", []))
self.__dict__["albums"] = tuple(kwargs.pop("albums", []))
super(SearchResult, self).__init__(*args, **kwargs)
|
def __init__(self, *args, **kwargs):
# NOTE kwargs dict keys must be bytestrings to work on Python < 2.6.5
# See https://github.com/mopidy/mopidy/issues/302 for details
self.__dict__[b"tracks"] = tuple(kwargs.pop("tracks", []))
self.__dict__[b"artists"] = tuple(kwargs.pop("artists", []))
self.__dict__[b"albums"] = tuple(kwargs.pop("albums", []))
super(SearchResult, self).__init__(*args, **kwargs)
|
https://github.com/mopidy/mopidy/issues/302
|
erik@faust:~$ mopidy
Traceback (most recent call last):
File "/usr/local/bin/mopidy", line 5, in <module>
main()
File "/usr/local/lib64/python2.6/site-packages/mopidy/__main__.py", line 48, in main
options = parse_options()
File "/usr/local/lib64/python2.6/site-packages/mopidy/__main__.py", line 85, in parse_options
help='show GStreamer help options')
File "/usr/lib64/python2.6/optparse.py", line 1012, in add_option
raise TypeError, "invalid arguments"
TypeError: invalid arguments
erik@faust:~$ python --version
Python 2.6
erik@faust:~$ uname -a
Linux faust 3.0.13-0.27-default #1 SMP Wed Feb 15 13:33:49 UTC 2012 (d73692b) x86_64 x86_64 x86_64 GNU/Linux
erik@faust:~$ cat /etc/SuSE-release
SUSE Linux Enterprise Server 11 (x86_64)
VERSION = 11
PATCHLEVEL = 2
erik@faust:~$
|
TypeError
|
def parse_options():
parser = optparse.OptionParser(version="Mopidy %s" % versioning.get_version())
parser.add_option(
"-q",
"--quiet",
action="store_const",
const=0,
dest="verbosity_level",
help="less output (warning level)",
)
parser.add_option(
"-v",
"--verbose",
action="count",
default=1,
dest="verbosity_level",
help="more output (debug level)",
)
return parser.parse_args(args=mopidy_args)[0]
|
def parse_options():
parser = optparse.OptionParser(version="Mopidy %s" % versioning.get_version())
# NOTE First argument to add_option must be bytestrings on Python < 2.6.2
# See https://github.com/mopidy/mopidy/issues/302 for details
parser.add_option(
b"-q",
"--quiet",
action="store_const",
const=0,
dest="verbosity_level",
help="less output (warning level)",
)
parser.add_option(
b"-v",
"--verbose",
action="count",
default=1,
dest="verbosity_level",
help="more output (debug level)",
)
return parser.parse_args(args=mopidy_args)[0]
|
https://github.com/mopidy/mopidy/issues/302
|
erik@faust:~$ mopidy
Traceback (most recent call last):
File "/usr/local/bin/mopidy", line 5, in <module>
main()
File "/usr/local/lib64/python2.6/site-packages/mopidy/__main__.py", line 48, in main
options = parse_options()
File "/usr/local/lib64/python2.6/site-packages/mopidy/__main__.py", line 85, in parse_options
help='show GStreamer help options')
File "/usr/lib64/python2.6/optparse.py", line 1012, in add_option
raise TypeError, "invalid arguments"
TypeError: invalid arguments
erik@faust:~$ python --version
Python 2.6
erik@faust:~$ uname -a
Linux faust 3.0.13-0.27-default #1 SMP Wed Feb 15 13:33:49 UTC 2012 (d73692b) x86_64 x86_64 x86_64 GNU/Linux
erik@faust:~$ cat /etc/SuSE-release
SUSE Linux Enterprise Server 11 (x86_64)
VERSION = 11
PATCHLEVEL = 2
erik@faust:~$
|
TypeError
|
def translator(data):
albumartist_kwargs = {}
album_kwargs = {}
artist_kwargs = {}
track_kwargs = {}
def _retrieve(source_key, target_key, target):
if source_key in data:
target[target_key] = data[source_key]
_retrieve(gst.TAG_ALBUM, "name", album_kwargs)
_retrieve(gst.TAG_TRACK_COUNT, "num_tracks", album_kwargs)
_retrieve(gst.TAG_ARTIST, "name", artist_kwargs)
if gst.TAG_DATE in data and data[gst.TAG_DATE]:
date = data[gst.TAG_DATE]
try:
date = datetime.date(date.year, date.month, date.day)
except ValueError:
pass # Ignore invalid dates
else:
track_kwargs["date"] = date.isoformat()
_retrieve(gst.TAG_TITLE, "name", track_kwargs)
_retrieve(gst.TAG_TRACK_NUMBER, "track_no", track_kwargs)
# Following keys don't seem to have TAG_* constant.
_retrieve("album-artist", "name", albumartist_kwargs)
_retrieve("musicbrainz-trackid", "musicbrainz_id", track_kwargs)
_retrieve("musicbrainz-artistid", "musicbrainz_id", artist_kwargs)
_retrieve("musicbrainz-albumid", "musicbrainz_id", album_kwargs)
_retrieve("musicbrainz-albumartistid", "musicbrainz_id", albumartist_kwargs)
if albumartist_kwargs:
album_kwargs["artists"] = [Artist(**albumartist_kwargs)]
track_kwargs["uri"] = data["uri"]
track_kwargs["length"] = data[gst.TAG_DURATION]
track_kwargs["album"] = Album(**album_kwargs)
track_kwargs["artists"] = [Artist(**artist_kwargs)]
return Track(**track_kwargs)
|
def translator(data):
albumartist_kwargs = {}
album_kwargs = {}
artist_kwargs = {}
track_kwargs = {}
# NOTE kwargs dict keys must be bytestrings to work on Python < 2.6.5
# See https://github.com/mopidy/mopidy/issues/302 for details.
def _retrieve(source_key, target_key, target):
if source_key in data:
target[str(target_key)] = data[source_key]
_retrieve(gst.TAG_ALBUM, "name", album_kwargs)
_retrieve(gst.TAG_TRACK_COUNT, "num_tracks", album_kwargs)
_retrieve(gst.TAG_ARTIST, "name", artist_kwargs)
if gst.TAG_DATE in data and data[gst.TAG_DATE]:
date = data[gst.TAG_DATE]
try:
date = datetime.date(date.year, date.month, date.day)
except ValueError:
pass # Ignore invalid dates
else:
track_kwargs[b"date"] = date.isoformat()
_retrieve(gst.TAG_TITLE, "name", track_kwargs)
_retrieve(gst.TAG_TRACK_NUMBER, "track_no", track_kwargs)
# Following keys don't seem to have TAG_* constant.
_retrieve("album-artist", "name", albumartist_kwargs)
_retrieve("musicbrainz-trackid", "musicbrainz_id", track_kwargs)
_retrieve("musicbrainz-artistid", "musicbrainz_id", artist_kwargs)
_retrieve("musicbrainz-albumid", "musicbrainz_id", album_kwargs)
_retrieve("musicbrainz-albumartistid", "musicbrainz_id", albumartist_kwargs)
if albumartist_kwargs:
album_kwargs[b"artists"] = [Artist(**albumartist_kwargs)]
track_kwargs[b"uri"] = data["uri"]
track_kwargs[b"length"] = data[gst.TAG_DURATION]
track_kwargs[b"album"] = Album(**album_kwargs)
track_kwargs[b"artists"] = [Artist(**artist_kwargs)]
return Track(**track_kwargs)
|
https://github.com/mopidy/mopidy/issues/302
|
erik@faust:~$ mopidy
Traceback (most recent call last):
File "/usr/local/bin/mopidy", line 5, in <module>
main()
File "/usr/local/lib64/python2.6/site-packages/mopidy/__main__.py", line 48, in main
options = parse_options()
File "/usr/local/lib64/python2.6/site-packages/mopidy/__main__.py", line 85, in parse_options
help='show GStreamer help options')
File "/usr/lib64/python2.6/optparse.py", line 1012, in add_option
raise TypeError, "invalid arguments"
TypeError: invalid arguments
erik@faust:~$ python --version
Python 2.6
erik@faust:~$ uname -a
Linux faust 3.0.13-0.27-default #1 SMP Wed Feb 15 13:33:49 UTC 2012 (d73692b) x86_64 x86_64 x86_64 GNU/Linux
erik@faust:~$ cat /etc/SuSE-release
SUSE Linux Enterprise Server 11 (x86_64)
VERSION = 11
PATCHLEVEL = 2
erik@faust:~$
|
TypeError
|
def _retrieve(source_key, target_key, target):
if source_key in data:
target[target_key] = data[source_key]
|
def _retrieve(source_key, target_key, target):
if source_key in data:
target[str(target_key)] = data[source_key]
|
https://github.com/mopidy/mopidy/issues/302
|
erik@faust:~$ mopidy
Traceback (most recent call last):
File "/usr/local/bin/mopidy", line 5, in <module>
main()
File "/usr/local/lib64/python2.6/site-packages/mopidy/__main__.py", line 48, in main
options = parse_options()
File "/usr/local/lib64/python2.6/site-packages/mopidy/__main__.py", line 85, in parse_options
help='show GStreamer help options')
File "/usr/lib64/python2.6/optparse.py", line 1012, in add_option
raise TypeError, "invalid arguments"
TypeError: invalid arguments
erik@faust:~$ python --version
Python 2.6
erik@faust:~$ uname -a
Linux faust 3.0.13-0.27-default #1 SMP Wed Feb 15 13:33:49 UTC 2012 (d73692b) x86_64 x86_64 x86_64 GNU/Linux
erik@faust:~$ cat /etc/SuSE-release
SUSE Linux Enterprise Server 11 (x86_64)
VERSION = 11
PATCHLEVEL = 2
erik@faust:~$
|
TypeError
|
def _select_mixer_track(self, mixer, track_label):
# Ignore tracks without volumes, then look for track with
# label == settings.MIXER_TRACK, otherwise fallback to first usable
# track hoping the mixer gave them to us in a sensible order.
usable_tracks = []
for track in mixer.list_tracks():
if not mixer.get_volume(track):
continue
if track_label and track.label == track_label:
return track
elif track.flags & (
gst.interfaces.MIXER_TRACK_MASTER | gst.interfaces.MIXER_TRACK_OUTPUT
):
usable_tracks.append(track)
if usable_tracks:
return usable_tracks[0]
|
def _select_mixer_track(self, mixer, track_label):
# Look for track with label == MIXER_TRACK, otherwise fallback to
# master track which is also an output.
for track in mixer.list_tracks():
if track_label:
if track.label == track_label:
return track
elif track.flags & (
gst.interfaces.MIXER_TRACK_MASTER | gst.interfaces.MIXER_TRACK_OUTPUT
):
return track
|
https://github.com/mopidy/mopidy/issues/307
|
INFO 2013-01-03 15:47:01,091 [7328:MainThread] mopidy.utils.log
Starting Mopidy 0.11.1
INFO 2013-01-03 15:47:01,095 [7328:MainThread] mopidy.utils.log
Platform: Linux-3.2.0-4-amd64-x86_64-with-debian-7.0
INFO 2013-01-03 15:47:01,095 [7328:MainThread] mopidy.utils.log
Python: CPython 2.7.3rc2
DEBUG 2013-01-03 15:47:01,097 [7328:MainThread] mopidy.utils
Loading: mopidy.backends.local.LocalBackend
WARNING 2013-01-03 15:47:01,101 [7328:MainThread] mopidy.backends.local
Could not open tag cache: [Errno 2] No such file or directory: u'/home/levesqu6/.local/share/mopidy/tag_cache'
INFO 2013-01-03 15:47:01,101 [7328:MainThread] mopidy.backends.local
Loading tracks from /home/levesqu6/None using /home/levesqu6/.local/share/mopidy/tag_cache
INFO 2013-01-03 15:47:01,102 [7328:MainThread] mopidy.backends.local
Loading playlists from /home/levesqu6/.local/share/mopidy/playlists
DEBUG 2013-01-03 15:47:01,104 [7328:MainThread] mopidy.utils
Loading: mopidy.backends.spotify.SpotifyBackend
INFO 2013-01-03 15:47:01,104 [7328:Audio-1] mopidy.audio
Audio output set to "alsasink"
DEBUG 2013-01-03 15:47:01,123 [7328:Audio-1] mopidy.audio.mixers.auto
AutoAudioMixer chose: alsamixerelement1
INFO 2013-01-03 15:47:01,124 [7328:Audio-1] mopidy.audio
Audio mixer set to "alsamixer" using track "Bass Boost"
INFO 2013-01-03 15:47:01,127 [7328:SpotifyBackend-4] mopidy.backends.spotify
Mopidy uses SPOTIFY(R) CORE
DEBUG 2013-01-03 15:47:01,129 [7328:SpotifyBackend-4] mopidy.backends.spotify
Connecting to Spotify
DEBUG 2013-01-03 15:47:01,132 [7328:SpotifyThread] mopidy.utils.process
SpotifyThread: Starting thread
DEBUG 2013-01-03 15:47:01,134 [7328:SpotifyThread] mopidy.backends.spotify
System message: 20:47:01.134 I [offline_authorizer.cpp:297] Unable to login offline: no such user
DEBUG 2013-01-03 15:47:01,134 [7328:SpotifyThread] pyspotify.manager.session
No message received before timeout. Processing events
DEBUG 2013-01-03 15:47:01,134 [7328:Dummy-5] mopidy.backends.spotify
System message: 20:47:01.134 I [ap:1752] Connecting to AP ap.spotify.com:4070
DEBUG 2013-01-03 15:47:01,135 [7328:SpotifyThread] pyspotify.manager.session
Will wait 300.041s for next message
ERROR 2013-01-03 15:47:01,139 [7328:MainThread] mopidy.main
float division by zero
Traceback (most recent call last):
File "/usr/lib/pymodules/python2.7/mopidy/__main__.py", line 61, in main
core = setup_core(audio, backends)
File "/usr/lib/pymodules/python2.7/mopidy/__main__.py", line 164, in setup_core
return Core.start(audio=audio, backends=backends).proxy()
File "/usr/lib/pymodules/python2.7/pykka/actor.py", line 461, in proxy
return _ActorProxy(self)
File "/usr/lib/pymodules/python2.7/pykka/proxy.py", line 99, in __init__
self._known_attrs = self._get_attributes()
File "/usr/lib/pymodules/python2.7/pykka/proxy.py", line 111, in _get_attributes
attr = self._actor._get_attribute_from_path(attr_path)
File "/usr/lib/pymodules/python2.7/pykka/actor.py", line 296, in _get_attribute_from_path
attr = getattr(attr, attr_name)
File "/usr/lib/pymodules/python2.7/mopidy/core/playback.py", line 280, in get_volume
return self.audio.get_volume().get()
File "/usr/lib/pymodules/python2.7/pykka/future.py", line 116, in get
'raise exc_info[0], exc_info[1], exc_info[2]')
File "/usr/lib/pymodules/python2.7/pykka/actor.py", line 194, in _actor_loop
response = self._handle_receive(message)
File "/usr/lib/pymodules/python2.7/pykka/actor.py", line 265, in _handle_receive
return callee(*message['args'], **message['kwargs'])
File "/usr/lib/pymodules/python2.7/mopidy/audio/actor.py", line 393, in get_volume
avg_volume = float(sum(volumes)) / len(volumes)
ZeroDivisionError: float division by zero
DEBUG 2013-01-03 15:47:01,141 [7328:MainThread] mopidy.utils
Loading: mopidy.frontends.mpd.MpdFrontend
DEBUG 2013-01-03 15:47:01,183 [7328:MainThread] mopidy.utils.process
Stopping 0 instance(s) of MpdFrontend
DEBUG 2013-01-03 15:47:01,183 [7328:MainThread] mopidy.utils
Loading: mopidy.frontends.lastfm.LastfmFrontend
DEBUG 2013-01-03 15:47:01,199 [7328:MainThread] mopidy.utils.process
Stopping 0 instance(s) of LastfmFrontend
DEBUG 2013-01-03 15:47:01,200 [7328:Dummy-5] mopidy.backends.spotify
System message: 20:47:01.184 I [ap:1226] Connected to AP: 193.182.8.35:4070
DEBUG 2013-01-03 15:47:01,200 [7328:MainThread] mopidy.utils
Loading: mopidy.frontends.mpris.MprisFrontend
DEBUG 2013-01-03 15:47:01,247 [7328:MainThread] mopidy.utils.process
Stopping 0 instance(s) of MprisFrontend
DEBUG 2013-01-03 15:47:01,248 [7328:MainThread] mopidy.utils.process
Stopping 1 instance(s) of Core
DEBUG 2013-01-03 15:47:01,248 [7328:MainThread] mopidy.utils
Loading: mopidy.backends.local.LocalBackend
DEBUG 2013-01-03 15:47:01,248 [7328:MainThread] mopidy.utils.process
Stopping 1 instance(s) of LocalBackend
DEBUG 2013-01-03 15:47:01,248 [7328:MainThread] mopidy.utils
Loading: mopidy.backends.spotify.SpotifyBackend
DEBUG 2013-01-03 15:47:01,249 [7328:MainThread] mopidy.utils.process
Stopping 1 instance(s) of SpotifyBackend
DEBUG 2013-01-03 15:47:01,249 [7328:SpotifyBackend-4] mopidy.backends.spotify
Logging out from Spotify
DEBUG 2013-01-03 15:47:01,249 [7328:MainThread] mopidy.utils.process
Stopping 1 instance(s) of Audio
DEBUG 2013-01-03 15:47:01,250 [7328:MainThread] mopidy.utils.process
All actors stopped.
|
ZeroDivisionError
|
def parse_m3u(file_path, music_folder):
"""
Convert M3U file list of uris
Example M3U data::
# This is a comment
Alternative\Band - Song.mp3
Classical\Other Band - New Song.mp3
Stuff.mp3
D:\More Music\Foo.mp3
http://www.example.com:8000/Listen.pls
http://www.example.com/~user/Mine.mp3
- Relative paths of songs should be with respect to location of M3U.
- Paths are normaly platform specific.
- Lines starting with # should be ignored.
- m3u files are latin-1.
- This function does not bother with Extended M3U directives.
"""
uris = []
try:
with open(file_path) as m3u:
contents = m3u.readlines()
except IOError as error:
logger.error("Couldn't open m3u: %s", locale_decode(error))
return uris
for line in contents:
line = line.strip().decode("latin1")
if line.startswith("#"):
continue
# FIXME what about other URI types?
if line.startswith("file://"):
uris.append(line)
else:
path = path_to_uri(music_folder, line)
uris.append(path)
return uris
|
def parse_m3u(file_path):
"""
Convert M3U file list of uris
Example M3U data::
# This is a comment
Alternative\Band - Song.mp3
Classical\Other Band - New Song.mp3
Stuff.mp3
D:\More Music\Foo.mp3
http://www.example.com:8000/Listen.pls
http://www.example.com/~user/Mine.mp3
- Relative paths of songs should be with respect to location of M3U.
- Paths are normaly platform specific.
- Lines starting with # should be ignored.
- m3u files are latin-1.
- This function does not bother with Extended M3U directives.
"""
uris = []
folder = os.path.dirname(file_path)
try:
with open(file_path) as m3u:
contents = m3u.readlines()
except IOError as error:
logger.error("Couldn't open m3u: %s", locale_decode(error))
return uris
for line in contents:
line = line.strip().decode("latin1")
if line.startswith("#"):
continue
# FIXME what about other URI types?
if line.startswith("file://"):
uris.append(line)
else:
path = path_to_uri(folder, line)
uris.append(path)
return uris
|
https://github.com/mopidy/mopidy/issues/189
|
$ PYTHONPATH=. ./bin/mopidy-scan
INFO 2012-09-11 15:56:32,431 [15354:MainThread] root
Scanning None
Traceback (most recent call last):
File "./bin/mopidy-scan", line 26, in <module>
scanner = Scanner(settings.LOCAL_MUSIC_PATH, store, debug)
File "/home/jodal/dev/mopidy/mopidy/scanner.py", line 55, in __init__
self.uris = [path_to_uri(f) for f in find_files(folder)]
File "/home/jodal/dev/mopidy/mopidy/utils/path.py", line 53, in find_files
if os.path.isfile(path):
File "/home/jodal/dev/virtualenvs/mopidy/lib/python2.7/genericpath.py", line 29, in isfile
st = os.stat(path)
TypeError: coercing to Unicode: need string or buffer, NoneType found
|
TypeError
|
def __init__(self, folder, data_callback, error_callback=None):
self.files = find_files(folder)
self.data_callback = data_callback
self.error_callback = error_callback
self.loop = gobject.MainLoop()
fakesink = gst.element_factory_make("fakesink")
self.uribin = gst.element_factory_make("uridecodebin")
self.uribin.set_property("caps", gst.Caps("audio/x-raw-int"))
self.uribin.connect("pad-added", self.process_new_pad, fakesink.get_pad("sink"))
self.pipe = gst.element_factory_make("pipeline")
self.pipe.add(self.uribin)
self.pipe.add(fakesink)
bus = self.pipe.get_bus()
bus.add_signal_watch()
bus.connect("message::tag", self.process_tags)
bus.connect("message::error", self.process_error)
|
def __init__(self, folder, data_callback, error_callback=None):
self.uris = [path_to_uri(f) for f in find_files(folder)]
self.data_callback = data_callback
self.error_callback = error_callback
self.loop = gobject.MainLoop()
fakesink = gst.element_factory_make("fakesink")
self.uribin = gst.element_factory_make("uridecodebin")
self.uribin.set_property("caps", gst.Caps("audio/x-raw-int"))
self.uribin.connect("pad-added", self.process_new_pad, fakesink.get_pad("sink"))
self.pipe = gst.element_factory_make("pipeline")
self.pipe.add(self.uribin)
self.pipe.add(fakesink)
bus = self.pipe.get_bus()
bus.add_signal_watch()
bus.connect("message::tag", self.process_tags)
bus.connect("message::error", self.process_error)
|
https://github.com/mopidy/mopidy/issues/189
|
$ PYTHONPATH=. ./bin/mopidy-scan
INFO 2012-09-11 15:56:32,431 [15354:MainThread] root
Scanning None
Traceback (most recent call last):
File "./bin/mopidy-scan", line 26, in <module>
scanner = Scanner(settings.LOCAL_MUSIC_PATH, store, debug)
File "/home/jodal/dev/mopidy/mopidy/scanner.py", line 55, in __init__
self.uris = [path_to_uri(f) for f in find_files(folder)]
File "/home/jodal/dev/mopidy/mopidy/utils/path.py", line 53, in find_files
if os.path.isfile(path):
File "/home/jodal/dev/virtualenvs/mopidy/lib/python2.7/genericpath.py", line 29, in isfile
st = os.stat(path)
TypeError: coercing to Unicode: need string or buffer, NoneType found
|
TypeError
|
def next_uri(self):
try:
uri = path_to_uri(self.files.next())
except StopIteration:
self.stop()
return False
self.pipe.set_state(gst.STATE_NULL)
self.uribin.set_property("uri", uri)
self.pipe.set_state(gst.STATE_PAUSED)
return True
|
def next_uri(self):
if not self.uris:
return self.stop()
self.pipe.set_state(gst.STATE_NULL)
self.uribin.set_property("uri", self.uris.pop())
self.pipe.set_state(gst.STATE_PAUSED)
|
https://github.com/mopidy/mopidy/issues/189
|
$ PYTHONPATH=. ./bin/mopidy-scan
INFO 2012-09-11 15:56:32,431 [15354:MainThread] root
Scanning None
Traceback (most recent call last):
File "./bin/mopidy-scan", line 26, in <module>
scanner = Scanner(settings.LOCAL_MUSIC_PATH, store, debug)
File "/home/jodal/dev/mopidy/mopidy/scanner.py", line 55, in __init__
self.uris = [path_to_uri(f) for f in find_files(folder)]
File "/home/jodal/dev/mopidy/mopidy/utils/path.py", line 53, in find_files
if os.path.isfile(path):
File "/home/jodal/dev/virtualenvs/mopidy/lib/python2.7/genericpath.py", line 29, in isfile
st = os.stat(path)
TypeError: coercing to Unicode: need string or buffer, NoneType found
|
TypeError
|
def start(self):
if self.next_uri():
self.loop.run()
|
def start(self):
if not self.uris:
return
self.next_uri()
self.loop.run()
|
https://github.com/mopidy/mopidy/issues/189
|
$ PYTHONPATH=. ./bin/mopidy-scan
INFO 2012-09-11 15:56:32,431 [15354:MainThread] root
Scanning None
Traceback (most recent call last):
File "./bin/mopidy-scan", line 26, in <module>
scanner = Scanner(settings.LOCAL_MUSIC_PATH, store, debug)
File "/home/jodal/dev/mopidy/mopidy/scanner.py", line 55, in __init__
self.uris = [path_to_uri(f) for f in find_files(folder)]
File "/home/jodal/dev/mopidy/mopidy/utils/path.py", line 53, in find_files
if os.path.isfile(path):
File "/home/jodal/dev/virtualenvs/mopidy/lib/python2.7/genericpath.py", line 29, in isfile
st = os.stat(path)
TypeError: coercing to Unicode: need string or buffer, NoneType found
|
TypeError
|
def import_targets(request):
context = {}
context["import_target_li"] = "active"
context["target_data_active"] = "true"
if request.method == "POST":
if "txtFile" in request.FILES:
txt_file = request.FILES["txtFile"]
if txt_file.content_type == "text/plain":
target_count = 0
txt_content = txt_file.read().decode("UTF-8")
io_string = io.StringIO(txt_content)
for target in io_string:
if validators.domain(target):
Domain.objects.create(
domain_name=target.rstrip("\n"), insert_date=timezone.now()
)
target_count += 1
if target_count:
messages.add_message(
request,
messages.SUCCESS,
str(target_count) + " targets added successfully!",
)
return http.HttpResponseRedirect(reverse("list_target"))
else:
messages.add_message(
request,
messages.ERROR,
"Oops! File format was invalid, could not import any targets.",
)
else:
messages.add_message(request, messages.ERROR, "Invalid File type!")
elif "csvFile" in request.FILES:
csv_file = request.FILES["csvFile"]
if csv_file.content_type == "text/csv":
target_count = 0
csv_content = csv_file.read().decode("UTF-8")
io_string = io.StringIO(csv_content)
for column in csv.reader(io_string, delimiter=","):
if validators.domain(column[0]):
Domain.objects.create(
domain_name=column[0],
domain_description=column[1],
insert_date=timezone.now(),
)
target_count += 1
if target_count:
messages.add_message(
request,
messages.SUCCESS,
str(target_count) + " targets added successfully!",
)
return http.HttpResponseRedirect(reverse("list_target"))
else:
messages.add_message(
request,
messages.ERROR,
"Oops! File format was invalid, could not import any targets.",
)
else:
messages.add_message(request, messages.ERROR, "Invalid File type!")
return render(request, "target/import.html", context)
|
def import_targets(request):
context = {}
context["import_target_li"] = "active"
context["target_data_active"] = "true"
if request.method == "POST":
if "txtFile" in request.FILES:
txt_file = request.FILES["txtFile"]
if txt_file.content_type == "text/plain":
target_count = 0
txt_content = txt_file.read().decode("UTF-8")
io_string = io.StringIO(txt_content)
for target in io_string:
if validators.domain(target):
Domain.objects.create(
domain_name=target, insert_date=timezone.now()
)
target_count += 1
if target_count:
messages.add_message(
request,
messages.SUCCESS,
str(target_count) + " targets added successfully!",
)
return http.HttpResponseRedirect(reverse("list_target"))
else:
messages.add_message(
request,
messages.ERROR,
"Oops! File format was invalid, could not import any targets.",
)
else:
messages.add_message(request, messages.ERROR, "Invalid File type!")
elif "csvFile" in request.FILES:
csv_file = request.FILES["csvFile"]
if csv_file.content_type == "text/csv":
target_count = 0
csv_content = csv_file.read().decode("UTF-8")
io_string = io.StringIO(csv_content)
for column in csv.reader(io_string, delimiter=","):
if validators.domain(column[0]):
Domain.objects.create(
domain_name=column[0],
domain_description=column[1],
insert_date=timezone.now(),
)
target_count += 1
if target_count:
messages.add_message(
request,
messages.SUCCESS,
str(target_count) + " targets added successfully!",
)
return http.HttpResponseRedirect(reverse("list_target"))
else:
messages.add_message(
request,
messages.ERROR,
"Oops! File format was invalid, could not import any targets.",
)
else:
messages.add_message(request, messages.ERROR, "Invalid File type!")
return render(request, "target/import.html", context)
|
https://github.com/yogeshojha/rengine/issues/228
|
Running migrations:
No migrations to apply.
Installed 3 object(s) from 1 fixture(s)
[2020-09-07 09:47:08 +0000] [1] [INFO] Starting gunicorn 20.0.4
[2020-09-07 09:47:08 +0000] [1] [INFO] Listening at: http://0.0.0.0:8000 (1)
[2020-09-07 09:47:08 +0000] [1] [INFO] Using worker: sync
[2020-09-07 09:47:08 +0000] [12] [INFO] Booting worker with pid: 12
cat: can't open '/*.txt': No such file or directory
sh: twitter.com: not found
sh: _2020_09_07_09_47_35: not found
Traceback (most recent call last):
File "/app/startScan/views.py", line 184, in doScan
with open(subdomain_scan_results_file) as subdomain_list:
FileNotFoundError: [Errno 2] No such file or directory: '/app/tools/scan_results/twitter.com\n_2020_09_07_09_47_35/sorted_subdomain_collection.txt'
|
FileNotFoundError
|
def __init__(self, key_prefixes, runtime_dirs=get_runtime_dirs()):
key_prefixes = map(self._sanitize, key_prefixes)
# compute read and write dirs from base runtime dirs: the first base
# dir is selected for writes and prefered for reads
self._read_dirs = [os.path.join(x, *key_prefixes) for x in runtime_dirs]
self._write_dir = self._read_dirs[0]
os.makedirs(self._write_dir, exist_ok=True)
if sys.platform == "linux":
# set the sticky bit to prevent removal during cleanup
os.chmod(self._write_dir, 0o1700)
_LOGGER.debug("data in %s", self._write_dir)
|
def __init__(self, key_prefixes):
key_prefixes = map(self._sanitize, key_prefixes)
# compute read and write dirs from base runtime dirs: the first base
# dir is selected for writes and prefered for reads
self._read_dirs = [os.path.join(x, *key_prefixes) for x in get_runtime_dirs()]
self._write_dir = self._read_dirs[0]
os.makedirs(self._write_dir, exist_ok=True)
if sys.platform == "linux":
# set the sticky bit to prevent removal during cleanup
os.chmod(self._write_dir, 0o1700)
_LOGGER.debug("data in %s", self._write_dir)
|
https://github.com/liquidctl/liquidctl/issues/278
|
C:\>liquidctl list --verbose
Device ID 0: Corsair H100i PRO XT (experimental)
βββ Vendor ID: 0x1b1c
βββ Product ID: 0x0c20
βββ Release number: 0x0100
βββ Bus: hid
βββ Address: \\?\hid#vid_1b1c&pid_0c20#7&1e4e78f5&0&0000#{4d1e55b2-f16f-11cf-88cb-001111000030}
βββ Driver: HydroPlatinum using module hid
C:\>liquidctl initialize all
ERROR: Unexpected error with Corsair H100i PRO XT (experimental)
Traceback (most recent call last):
ValueError: source code string cannot contain null bytes
|
ValueError
|
def load(self, key):
for base in self._read_dirs:
path = os.path.join(base, key)
if not os.path.isfile(path):
continue
try:
with open(path, mode="r") as f:
data = f.read().strip()
if len(data) == 0:
value = None
else:
value = literal_eval(data)
_LOGGER.debug("loaded %s=%r (from %s)", key, value, path)
except OSError as err:
_LOGGER.warning("%s exists but could not be read: %s", path, err)
except ValueError as err:
_LOGGER.warning("%s exists but was corrupted: %s", key, err)
else:
return value
_LOGGER.debug("no data (file) found for %s", key)
return None
|
def load(self, key):
for base in self._read_dirs:
path = os.path.join(base, key)
if not os.path.isfile(path):
continue
try:
with open(path, mode="r") as f:
data = f.read().strip()
if len(data) == 0:
value = None
else:
value = literal_eval(data)
_LOGGER.debug("loaded %s=%r (from %s)", key, value, path)
except OSError as err:
_LOGGER.warning("%s exists but cannot be read: %s", path, err)
continue
return value
_LOGGER.debug("no data (file) found for %s", key)
return None
|
https://github.com/liquidctl/liquidctl/issues/278
|
C:\>liquidctl list --verbose
Device ID 0: Corsair H100i PRO XT (experimental)
βββ Vendor ID: 0x1b1c
βββ Product ID: 0x0c20
βββ Release number: 0x0100
βββ Bus: hid
βββ Address: \\?\hid#vid_1b1c&pid_0c20#7&1e4e78f5&0&0000#{4d1e55b2-f16f-11cf-88cb-001111000030}
βββ Driver: HydroPlatinum using module hid
C:\>liquidctl initialize all
ERROR: Unexpected error with Corsair H100i PRO XT (experimental)
Traceback (most recent call last):
ValueError: source code string cannot contain null bytes
|
ValueError
|
def _write(self, data):
assert len(data) <= _REPORT_LENGTH
packet = bytearray(1 + _REPORT_LENGTH)
packet[1 : 1 + len(data)] = data # device doesn't use numbered reports
self.device.write(packet)
|
def _write(self, data):
padding = [0x0] * (_WRITE_LENGTH - len(data))
self.device.write(data + padding)
|
https://github.com/liquidctl/liquidctl/issues/166
|
PS E:\liquidctl> .\liquidctl status
NZXT Kraken X (X42, X52, X62 or X72)
βββ Liquid temperature 30.4 Β°C
βββ Fan speed 1100 rpm
βββ Pump speed 2703 rpm
βββ Firmware version 6.0.2
ERROR: Unexpected error with NZXT E850 (experimental)
Traceback (most recent call last):
AssertionError: invalid response (attempts=3)
And here is the debug output:
[DEBUG] __main__: device: NZXT E850 (experimental)
[DEBUG] liquidctl.driver.usb: discarded 0 previously enqueued reports
[DEBUG] liquidctl.driver.usb: writting report 0xad with 63 bytes: 00:03:01:60:fc:00:00:00:00:00:00:00:00:00:00:00:00:00:00:00:00:00:00:00:00:00:00:00:00:00:00:00:00:00:00:00:00:00:00:00:00:00:00:00:00:00:00:00:00:00:00:00:00:00:00:00:00:00:00:00:00:00:00
[DEBUG] liquidctl.driver.usb: read 64 bytes: aa:04:02:08:d2:45:00:00:00:00:00:00:00:00:00:00:00:00:00:00:00:00:00:00:00:00:00:00:00:00:00:00:00:00:00:00:00:00:00:00:00:00:00:00:00:00:00:00:00:00:00:00:00:00:00:00:00:00:00:00:00:00:00:00
[DEBUG] liquidctl.driver.usb: writting report 0xad with 63 bytes: 00:03:01:60:fc:00:00:00:00:00:00:00:00:00:00:00:00:00:00:00:00:00:00:00:00:00:00:00:00:00:00:00:00:00:00:00:00:00:00:00:00:00:00:00:00:00:00:00:00:00:00:00:00:00:00:00:00:00:00:00:00:00:00
[DEBUG] liquidctl.driver.usb: read 64 bytes: aa:04:02:08:d2:45:00:00:00:00:00:00:00:00:00:00:00:00:00:00:00:00:00:00:00:00:00:00:00:00:00:00:00:00:00:00:00:00:00:00:00:00:00:00:00:00:00:00:00:00:00:00:00:00:00:00:00:00:00:00:00:00:00:00
[DEBUG] liquidctl.driver.usb: writting report 0xad with 63 bytes: 00:03:01:60:fc:00:00:00:00:00:00:00:00:00:00:00:00:00:00:00:00:00:00:00:00:00:00:00:00:00:00:00:00:00:00:00:00:00:00:00:00:00:00:00:00:00:00:00:00:00:00:00:00:00:00:00:00:00:00:00:00:00:00
[DEBUG] liquidctl.driver.usb: read 64 bytes: aa:04:02:08:d2:45:00:00:00:00:00:00:00:00:00:00:00:00:00:00:00:00:00:00:00:00:00:00:00:00:00:00:00:00:00:00:00:00:00:00:00:00:00:00:00:00:00:00:00:00:00:00:00:00:00:00:00:00:00:00:00:00:00:00
[ERROR] __main__: Unexpected error with NZXT E850 (experimental)
Traceback (most recent call last):
File "liquidctl\cli.py", line 302, in main
File "liquidctl\driver\nzxt_epsu.py", line 59, in get_status
File "liquidctl\driver\nzxt_epsu.py", line 145, in _get_fw_versions
File "liquidctl\driver\nzxt_epsu.py", line 113, in _exec_read
AssertionError: invalid response (attempts=3)
|
AssertionError
|
def _read(self):
return self.device.read(_REPORT_LENGTH)
|
def _read(self):
return self.device.read(_READ_LENGTH)
|
https://github.com/liquidctl/liquidctl/issues/166
|
PS E:\liquidctl> .\liquidctl status
NZXT Kraken X (X42, X52, X62 or X72)
βββ Liquid temperature 30.4 Β°C
βββ Fan speed 1100 rpm
βββ Pump speed 2703 rpm
βββ Firmware version 6.0.2
ERROR: Unexpected error with NZXT E850 (experimental)
Traceback (most recent call last):
AssertionError: invalid response (attempts=3)
And here is the debug output:
[DEBUG] __main__: device: NZXT E850 (experimental)
[DEBUG] liquidctl.driver.usb: discarded 0 previously enqueued reports
[DEBUG] liquidctl.driver.usb: writting report 0xad with 63 bytes: 00:03:01:60:fc:00:00:00:00:00:00:00:00:00:00:00:00:00:00:00:00:00:00:00:00:00:00:00:00:00:00:00:00:00:00:00:00:00:00:00:00:00:00:00:00:00:00:00:00:00:00:00:00:00:00:00:00:00:00:00:00:00:00
[DEBUG] liquidctl.driver.usb: read 64 bytes: aa:04:02:08:d2:45:00:00:00:00:00:00:00:00:00:00:00:00:00:00:00:00:00:00:00:00:00:00:00:00:00:00:00:00:00:00:00:00:00:00:00:00:00:00:00:00:00:00:00:00:00:00:00:00:00:00:00:00:00:00:00:00:00:00
[DEBUG] liquidctl.driver.usb: writting report 0xad with 63 bytes: 00:03:01:60:fc:00:00:00:00:00:00:00:00:00:00:00:00:00:00:00:00:00:00:00:00:00:00:00:00:00:00:00:00:00:00:00:00:00:00:00:00:00:00:00:00:00:00:00:00:00:00:00:00:00:00:00:00:00:00:00:00:00:00
[DEBUG] liquidctl.driver.usb: read 64 bytes: aa:04:02:08:d2:45:00:00:00:00:00:00:00:00:00:00:00:00:00:00:00:00:00:00:00:00:00:00:00:00:00:00:00:00:00:00:00:00:00:00:00:00:00:00:00:00:00:00:00:00:00:00:00:00:00:00:00:00:00:00:00:00:00:00
[DEBUG] liquidctl.driver.usb: writting report 0xad with 63 bytes: 00:03:01:60:fc:00:00:00:00:00:00:00:00:00:00:00:00:00:00:00:00:00:00:00:00:00:00:00:00:00:00:00:00:00:00:00:00:00:00:00:00:00:00:00:00:00:00:00:00:00:00:00:00:00:00:00:00:00:00:00:00:00:00
[DEBUG] liquidctl.driver.usb: read 64 bytes: aa:04:02:08:d2:45:00:00:00:00:00:00:00:00:00:00:00:00:00:00:00:00:00:00:00:00:00:00:00:00:00:00:00:00:00:00:00:00:00:00:00:00:00:00:00:00:00:00:00:00:00:00:00:00:00:00:00:00:00:00:00:00:00:00
[ERROR] __main__: Unexpected error with NZXT E850 (experimental)
Traceback (most recent call last):
File "liquidctl\cli.py", line 302, in main
File "liquidctl\driver\nzxt_epsu.py", line 59, in get_status
File "liquidctl\driver\nzxt_epsu.py", line 145, in _get_fw_versions
File "liquidctl\driver\nzxt_epsu.py", line 113, in _exec_read
AssertionError: invalid response (attempts=3)
|
AssertionError
|
def connect(self, **kwargs):
"""Connect to the device."""
super().connect(**kwargs)
ids = f"vid{self.vendor_id:04x}_pid{self.product_id:04x}"
# must use the HID path because there is no serial number; however,
# these can be quite long on Windows and macOS, so only take the
# numbers, since they are likely the only parts that vary between two
# devices of the same model
loc = "loc" + "_".join(re.findall(r"\d+", self.address))
self._data = RuntimeStorage(key_prefixes=[ids, loc])
self._sequence = _sequence(self._data)
|
def connect(self, **kwargs):
"""Connect to the device."""
super().connect(**kwargs)
ids = f"vid{self.vendor_id:04x}_pid{self.product_id:04x}"
# must use the HID path because there is no serial number; however,
# these can be quite long on Windows and macOS, so only take the
# numbers, since they are likely the only parts that vary between two
# devices of the same model
loc = "loc" + "_".join((num.decode() for num in re.findall(b"\\d+", self.address)))
self._data = RuntimeStorage(key_prefixes=[ids, loc])
self._sequence = _sequence(self._data)
|
https://github.com/liquidctl/liquidctl/issues/162
|
[DEBUG] __main__: device: Corsair H115i PRO XT (experimental)
self.address: "/dev/hidraw1"
Traceback (most recent call last):
File "/usr/lib/python3.8/runpy.py", line 194, in _run_module_as_main
return _run_code(code, main_globals, None,
File "/usr/lib/python3.8/runpy.py", line 87, in _run_code
exec(code, run_globals)
File "/home/meta/src/liquidctl/liquidctl/cli.py", line 327, in <module>
main()
File "/home/meta/src/liquidctl/liquidctl/cli.py", line 297, in main
dev.connect(**opts)
File "/home/meta/src/liquidctl/liquidctl/driver/hydro_platinum.py", line 158, in connect
loc = 'loc' + '_'.join((num.decode() for num in re.findall(b'\\d+', self.address)))
File "/usr/lib/python3.8/re.py", line 241, in findall
return _compile(pattern, flags).findall(string)
TypeError: cannot use a bytes pattern on a string-like object
|
TypeError
|
def connect(self, **kwargs):
"""Connect to the device.
Enables the device to send data to the host."""
super().connect(**kwargs)
self._configure_flow_control(clear_to_send=True)
|
def connect(self, **kwargs):
"""Connect to the device."""
super().connect()
try:
self._open()
except usb.core.USBError as err:
LOGGER.warning("report: failed to open right away, will close first")
LOGGER.debug(err, exc_info=True)
self._close()
self._open()
finally:
self.device.release()
|
https://github.com/liquidctl/liquidctl/issues/42
|
# extra/liquiddump --product 0xb200 --interval 3
{"Asetek 690LC (assuming EVGA CLC)": [["Liquid temperature", 28.8, "\u00b0C"], ["Fan speed", 420, "rpm"], ["Pump speed", 2670, "rpm"], ["Firmware version", "2.10.0.0", ""]]}
{"Asetek 690LC (assuming EVGA CLC)": [["Liquid temperature", 28.8, "\u00b0C"], ["Fan speed", 480, "rpm"], ["Pump speed", 2610, "rpm"], ["Firmware version", "2.10.0.0", ""]]}
{"Asetek 690LC (assuming EVGA CLC)": [["Liquid temperature", 28.8, "\u00b0C"], ["Fan speed", 420, "rpm"], ["Pump speed", 2640, "rpm"], ["Firmware version", "2.10.0.0", ""]]}
Unexpected error
Traceback (most recent call last):
File "extra/liquiddump", line 92, in <module>
status[d.description] = d.get_status()
File "/usr/lib/python3.7/site-packages/liquidctl/driver/asetek.py", line 221, in get_status
msg = self._end_transaction_and_read()
File "/usr/lib/python3.7/site-packages/liquidctl/driver/asetek.py", line 140, in _end_transaction_and_read
msg = self.device.read(_READ_ENDPOINT, _READ_LENGTH, _READ_TIMEOUT)
File "/usr/lib/python3.7/site-packages/liquidctl/driver/usb.py", line 260, in read
return self.usbdev.read(endpoint, length, timeout=timeout)
File "/usr/lib/python3.7/site-packages/usb/core.py", line 988, in read
self.__get_timeout(timeout))
File "/usr/lib/python3.7/site-packages/usb/backend/libusb1.py", line 833, in bulk_read
timeout)
File "/usr/lib/python3.7/site-packages/usb/backend/libusb1.py", line 936, in __read
_check(retval)
File "/usr/lib/python3.7/site-packages/usb/backend/libusb1.py", line 595, in _check
raise USBError(_strerror(ret), ret, _libusb_errno[ret])
usb.core.USBError: [Errno 110] Operation timed out
$ echo $?
0
|
usb.core.USBError
|
def disconnect(self, **kwargs):
"""Disconnect from the device.
Implementation note: unlike SI_Close is supposed to do,ΒΉ do not send
_USBXPRESS_NOT_CLEAR_TO_SEND to the device. This allows one program to
disconnect without sotping reads from another.
Surrounding device.read() with _USBXPRESS_[NOT_]CLEAR_TO_SEND would
make more sense, but there seems to be a yet unknown minimum delay
necessary for that to work well.
ΒΉ https://github.com/craigshelley/SiUSBXp/blob/master/SiUSBXp.c
"""
super().disconnect(**kwargs)
|
def disconnect(self, **kwargs):
"""Disconnect from the device."""
self._close()
super().disconnect()
self.device.release()
|
https://github.com/liquidctl/liquidctl/issues/42
|
# extra/liquiddump --product 0xb200 --interval 3
{"Asetek 690LC (assuming EVGA CLC)": [["Liquid temperature", 28.8, "\u00b0C"], ["Fan speed", 420, "rpm"], ["Pump speed", 2670, "rpm"], ["Firmware version", "2.10.0.0", ""]]}
{"Asetek 690LC (assuming EVGA CLC)": [["Liquid temperature", 28.8, "\u00b0C"], ["Fan speed", 480, "rpm"], ["Pump speed", 2610, "rpm"], ["Firmware version", "2.10.0.0", ""]]}
{"Asetek 690LC (assuming EVGA CLC)": [["Liquid temperature", 28.8, "\u00b0C"], ["Fan speed", 420, "rpm"], ["Pump speed", 2640, "rpm"], ["Firmware version", "2.10.0.0", ""]]}
Unexpected error
Traceback (most recent call last):
File "extra/liquiddump", line 92, in <module>
status[d.description] = d.get_status()
File "/usr/lib/python3.7/site-packages/liquidctl/driver/asetek.py", line 221, in get_status
msg = self._end_transaction_and_read()
File "/usr/lib/python3.7/site-packages/liquidctl/driver/asetek.py", line 140, in _end_transaction_and_read
msg = self.device.read(_READ_ENDPOINT, _READ_LENGTH, _READ_TIMEOUT)
File "/usr/lib/python3.7/site-packages/liquidctl/driver/usb.py", line 260, in read
return self.usbdev.read(endpoint, length, timeout=timeout)
File "/usr/lib/python3.7/site-packages/usb/core.py", line 988, in read
self.__get_timeout(timeout))
File "/usr/lib/python3.7/site-packages/usb/backend/libusb1.py", line 833, in bulk_read
timeout)
File "/usr/lib/python3.7/site-packages/usb/backend/libusb1.py", line 936, in __read
_check(retval)
File "/usr/lib/python3.7/site-packages/usb/backend/libusb1.py", line 595, in _check
raise USBError(_strerror(ret), ret, _libusb_errno[ret])
usb.core.USBError: [Errno 110] Operation timed out
$ echo $?
0
|
usb.core.USBError
|
def dump(self):
"""
Returns the string that represents the nyan file.
"""
fileinfo_str = f"# NYAN FILE\nversion {FILE_VERSION}\n\n"
import_str = ""
objects_str = ""
for nyan_object in self.nyan_objects:
objects_str += nyan_object.dump(import_tree=self.import_tree)
# Removes one empty newline at the end of the objects definition
objects_str = objects_str[:-1]
import_aliases = self.import_tree.get_import_dict()
self.import_tree.clear_marks()
for alias, fqon in import_aliases.items():
import_str += "import "
import_str += ".".join(fqon)
import_str += f" as {alias}\n"
import_str += "\n"
output_str = fileinfo_str + import_str + objects_str
return output_str
|
def dump(self):
"""
Returns the string that represents the nyan file.
"""
output_str = f"# NYAN FILE\nversion {FILE_VERSION}\n\n"
import_aliases = self.import_tree.establish_import_dict(
self, ignore_names=["type", "types"]
)
for alias, fqon in import_aliases.items():
output_str += "import "
output_str += ".".join(fqon)
output_str += f" as {alias}\n"
output_str += "\n"
for nyan_object in self.nyan_objects:
output_str += nyan_object.dump(import_tree=self.import_tree)
self.import_tree.clear_marks()
# Removes one empty line at the end of the file
output_str = output_str[:-1]
return output_str
|
https://github.com/SFTtech/openage/issues/1355
|
INFO [py] launching openage v0.4.1-377-g2be30ba76
INFO [py] compiled by GNU 10.2.0
Do you want to convert assets? [Y/n]
Y
Should we call wine to determine an AOE installation? [Y/n]
n
Could not find any installation directory automatically.
Please enter an AOE2 install path manually.
/home/schatzi/nvme/Steam/steamapps/common/AoE2DE/
converting from '/home/schatzi/nvme/Steam/steamapps/common/AoE2DE'
INFO [py] Game edition detected:
INFO [py] * Age of Empires 2: Definitive Edition
INFO [py] converting metadata
INFO [py] [0] palette
INFO [py] [1] empires.dat
INFO [py] Starting conversion...
INFO [py] Extracting Genie data...
INFO [py] Creating API-like objects...
INFO [py] Linking API-like objects...
INFO [py] Generating auxiliary objects...
INFO [py] Creating nyan objects...
Traceback (most recent call last):
File "/usr/bin/openage", line 15, in <module>
main()
File "/usr/lib/python3.9/site-packages/openage/__main__.py", line 132, in main
return args.entrypoint(args, cli.error)
File "/usr/lib/python3.9/site-packages/openage/game/main.py", line 71, in main
used_asset_path = convert_assets(
File "/usr/lib/python3.9/site-packages/openage/convert/main.py", line 100, in convert_assets
for current_item in convert(args):
File "/usr/lib/python3.9/site-packages/openage/convert/tool/driver.py", line 48, in convert
yield from convert_metadata(args)
File "/usr/lib/python3.9/site-packages/openage/convert/tool/driver.py", line 96, in convert_metadata
modpacks = args.converter.convert(gamespec,
File "/usr/lib/python3.9/site-packages/openage/convert/processor/conversion/de2/processor.py", line 63, in convert
modpacks = cls._post_processor(dataset)
File "/usr/lib/python3.9/site-packages/openage/convert/processor/conversion/de2/processor.py", line 153, in _post_processor
DE2NyanSubprocessor.convert(full_data_set)
File "/usr/lib/python3.9/site-packages/openage/convert/processor/conversion/de2/nyan_subprocessor.py", line 38, in convert
cls._process_game_entities(gamedata)
File "/usr/lib/python3.9/site-packages/openage/convert/processor/conversion/de2/nyan_subprocessor.py", line 120, in _process_game_entities
cls.tech_group_to_tech(tech_group)
File "/usr/lib/python3.9/site-packages/openage/convert/processor/conversion/de2/nyan_subprocessor.py", line 596, in tech_group_to_tech
patches.extend(DE2TechSubprocessor.get_patches(tech_group))
File "/usr/lib/python3.9/site-packages/openage/convert/processor/conversion/de2/tech_subprocessor.py", line 147, in get_patches
patches.extend(cls.resource_modify_effect(converter_group,
File "/usr/lib/python3.9/site-packages/openage/convert/processor/conversion/de2/tech_subprocessor.py", line 276, in resource_modify_effect
upgrade_func = DE2TechSubprocessor.upgrade_resource_funcs[resource_id]
KeyError: 208
|
KeyError
|
def convert_assets(assets, args, srcdir=None, prev_source_dir_path=None):
"""
Perform asset conversion.
Requires original assets and stores them in usable and free formats.
assets must be a filesystem-like object pointing at the game's asset dir.
srcdir must be None, or point at some source directory.
If gen_extra_files is True, some more files, mostly for debugging purposes,
are created.
This method prepares srcdir and targetdir to allow a pleasant, unified
conversion experience, then passes them to .driver.convert().
"""
# acquire conversion source directory
if srcdir is None:
srcdir = acquire_conversion_source_dir(prev_source_dir_path)
converted_path = assets / "converted"
converted_path.mkdirs()
targetdir = DirectoryCreator(converted_path).root
# Set compression level for media output if it was not set
if "compression_level" not in vars(args):
args.compression_level = 1
# Set verbosity for debug output
if "debug_info" not in vars(args) or not args.debug_info:
if args.devmode:
args.debug_info = 3
else:
args.debug_info = 0
# add a dir for debug info
debug_log_path = (
converted_path / "debug" / datetime.now().strftime("%Y-%m-%d-%H-%M-%S")
)
debugdir = DirectoryCreator(debug_log_path).root
args.debugdir = AccessSynchronizer(debugdir).root
# Create CLI args info
debug_cli_args(args.debugdir, args.debug_info, args)
# Initialize game versions data
auxiliary_files_dir = args.cfg_dir / "converter" / "games"
args.avail_game_eds, args.avail_game_exps = create_version_objects(
auxiliary_files_dir
)
# Acquire game version info
args.game_version = get_game_version(
srcdir, args.avail_game_eds, args.avail_game_exps
)
debug_game_version(args.debugdir, args.debug_info, args)
# Mount assets into conversion folder
data_dir = mount_asset_dirs(srcdir, args.game_version)
if not data_dir:
return None
# make srcdir and targetdir safe for threaded conversion
args.srcdir = AccessSynchronizer(data_dir).root
args.targetdir = AccessSynchronizer(targetdir).root
# Create mountpoint info
debug_mounts(args.debugdir, args.debug_info, args)
def flag(name):
"""
Convenience function for accessing boolean flags in args.
Flags default to False if they don't exist.
"""
return getattr(args, name, False)
args.flag = flag
# import here so codegen.py doesn't depend on it.
from .tool.driver import convert
converted_count = 0
total_count = None
for current_item in convert(args):
if isinstance(current_item, int):
# convert is informing us about the estimated number of remaining
# items.
total_count = current_item + converted_count
continue
# TODO a GUI would be nice here.
if total_count is None:
info("[%s] %s", converted_count, current_item)
else:
info("[%s] %s", format_progress(converted_count, total_count), current_item)
converted_count += 1
# clean args
del args.srcdir
del args.targetdir
return data_dir.resolve_native_path()
|
def convert_assets(assets, args, srcdir=None, prev_source_dir_path=None):
"""
Perform asset conversion.
Requires original assets and stores them in usable and free formats.
assets must be a filesystem-like object pointing at the game's asset dir.
srcdir must be None, or point at some source directory.
If gen_extra_files is True, some more files, mostly for debugging purposes,
are created.
This method prepares srcdir and targetdir to allow a pleasant, unified
conversion experience, then passes them to .driver.convert().
"""
# acquire conversion source directory
if srcdir is None:
srcdir = acquire_conversion_source_dir(prev_source_dir_path)
converted_path = assets / "converted"
converted_path.mkdirs()
targetdir = DirectoryCreator(converted_path).root
# Set compression level for media output if it was not set
if "compression_level" not in vars(args):
args.compression_level = 1
# Set verbosity for debug output
if "debug_info" not in vars(args):
if args.devmode:
args.debug_info = 3
else:
args.debug_info = 0
# add a dir for debug info
debug_log_path = (
converted_path / "debug" / datetime.now().strftime("%Y-%m-%d-%H-%M-%S")
)
debugdir = DirectoryCreator(debug_log_path).root
args.debugdir = AccessSynchronizer(debugdir).root
# Create CLI args info
debug_cli_args(args.debugdir, args.debug_info, args)
# Initialize game versions data
auxiliary_files_dir = args.cfg_dir / "converter" / "games"
args.avail_game_eds, args.avail_game_exps = create_version_objects(
auxiliary_files_dir
)
# Acquire game version info
args.game_version = get_game_version(
srcdir, args.avail_game_eds, args.avail_game_exps
)
debug_game_version(args.debugdir, args.debug_info, args)
# Mount assets into conversion folder
data_dir = mount_asset_dirs(srcdir, args.game_version)
if not data_dir:
return None
# make srcdir and targetdir safe for threaded conversion
args.srcdir = AccessSynchronizer(data_dir).root
args.targetdir = AccessSynchronizer(targetdir).root
# Create mountpoint info
debug_mounts(args.debugdir, args.debug_info, args)
def flag(name):
"""
Convenience function for accessing boolean flags in args.
Flags default to False if they don't exist.
"""
return getattr(args, name, False)
args.flag = flag
# import here so codegen.py doesn't depend on it.
from .tool.driver import convert
converted_count = 0
total_count = None
for current_item in convert(args):
if isinstance(current_item, int):
# convert is informing us about the estimated number of remaining
# items.
total_count = current_item + converted_count
continue
# TODO a GUI would be nice here.
if total_count is None:
info("[%s] %s", converted_count, current_item)
else:
info("[%s] %s", format_progress(converted_count, total_count), current_item)
converted_count += 1
# clean args
del args.srcdir
del args.targetdir
return data_dir.resolve_native_path()
|
https://github.com/SFTtech/openage/issues/1355
|
INFO [py] launching openage v0.4.1-377-g2be30ba76
INFO [py] compiled by GNU 10.2.0
Do you want to convert assets? [Y/n]
Y
Should we call wine to determine an AOE installation? [Y/n]
n
Could not find any installation directory automatically.
Please enter an AOE2 install path manually.
/home/schatzi/nvme/Steam/steamapps/common/AoE2DE/
converting from '/home/schatzi/nvme/Steam/steamapps/common/AoE2DE'
INFO [py] Game edition detected:
INFO [py] * Age of Empires 2: Definitive Edition
INFO [py] converting metadata
INFO [py] [0] palette
INFO [py] [1] empires.dat
INFO [py] Starting conversion...
INFO [py] Extracting Genie data...
INFO [py] Creating API-like objects...
INFO [py] Linking API-like objects...
INFO [py] Generating auxiliary objects...
INFO [py] Creating nyan objects...
Traceback (most recent call last):
File "/usr/bin/openage", line 15, in <module>
main()
File "/usr/lib/python3.9/site-packages/openage/__main__.py", line 132, in main
return args.entrypoint(args, cli.error)
File "/usr/lib/python3.9/site-packages/openage/game/main.py", line 71, in main
used_asset_path = convert_assets(
File "/usr/lib/python3.9/site-packages/openage/convert/main.py", line 100, in convert_assets
for current_item in convert(args):
File "/usr/lib/python3.9/site-packages/openage/convert/tool/driver.py", line 48, in convert
yield from convert_metadata(args)
File "/usr/lib/python3.9/site-packages/openage/convert/tool/driver.py", line 96, in convert_metadata
modpacks = args.converter.convert(gamespec,
File "/usr/lib/python3.9/site-packages/openage/convert/processor/conversion/de2/processor.py", line 63, in convert
modpacks = cls._post_processor(dataset)
File "/usr/lib/python3.9/site-packages/openage/convert/processor/conversion/de2/processor.py", line 153, in _post_processor
DE2NyanSubprocessor.convert(full_data_set)
File "/usr/lib/python3.9/site-packages/openage/convert/processor/conversion/de2/nyan_subprocessor.py", line 38, in convert
cls._process_game_entities(gamedata)
File "/usr/lib/python3.9/site-packages/openage/convert/processor/conversion/de2/nyan_subprocessor.py", line 120, in _process_game_entities
cls.tech_group_to_tech(tech_group)
File "/usr/lib/python3.9/site-packages/openage/convert/processor/conversion/de2/nyan_subprocessor.py", line 596, in tech_group_to_tech
patches.extend(DE2TechSubprocessor.get_patches(tech_group))
File "/usr/lib/python3.9/site-packages/openage/convert/processor/conversion/de2/tech_subprocessor.py", line 147, in get_patches
patches.extend(cls.resource_modify_effect(converter_group,
File "/usr/lib/python3.9/site-packages/openage/convert/processor/conversion/de2/tech_subprocessor.py", line 276, in resource_modify_effect
upgrade_func = DE2TechSubprocessor.upgrade_resource_funcs[resource_id]
KeyError: 208
|
KeyError
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.