after_merge
stringlengths 28
79.6k
| before_merge
stringlengths 20
79.6k
| url
stringlengths 38
71
| full_traceback
stringlengths 43
922k
| traceback_type
stringclasses 555
values |
|---|---|---|---|---|
def _parse_args(self, *args, **kwargs):
"""
Parses an args list for data-header pairs. args can contain any
mixture of the following entries:
* tuples of data,header
* data, header not in a tuple
* filename, which will be read
* directory, from which all files will be read
* glob, from which all files will be read
* url, which will be downloaded and read
* lists containing any of the above.
Example
-------
self._parse_args(data, header,
(data, header),
['file1', 'file2', 'file3'],
'file4',
'directory1',
'*.fits')
"""
data_header_pairs = list()
already_maps = list()
# Account for nested lists of items
args = expand_list(args)
# For each of the arguments, handle each of the cases
i = 0
while i < len(args):
arg = args[i]
# Data-header pair in a tuple
if (
(type(arg) in [tuple, list])
and len(arg) == 2
and isinstance(arg[0], np.ndarray)
and self._validate_meta(arg[1])
):
arg[1] = OrderedDict(arg[1])
data_header_pairs.append(arg)
# Data-header pair not in a tuple
elif isinstance(arg, np.ndarray) and self._validate_meta(args[i + 1]):
pair = (args[i], OrderedDict(args[i + 1]))
data_header_pairs.append(pair)
i += 1 # an extra increment to account for the data-header pairing
# File name
elif isinstance(arg, six.string_types) and os.path.isfile(
os.path.expanduser(arg)
):
path = os.path.expanduser(arg)
pairs = self._read_file(path, **kwargs)
data_header_pairs += pairs
# Directory
elif isinstance(arg, six.string_types) and os.path.isdir(
os.path.expanduser(arg)
):
path = os.path.expanduser(arg)
files = [os.path.join(path, elem) for elem in os.listdir(path)]
for afile in files:
data_header_pairs += self._read_file(afile, **kwargs)
# Glob
elif isinstance(arg, six.string_types) and "*" in arg:
files = glob.glob(os.path.expanduser(arg))
for afile in files:
data_header_pairs += self._read_file(afile, **kwargs)
# Already a Map
elif isinstance(arg, GenericMap):
already_maps.append(arg)
# A URL
elif isinstance(arg, six.string_types) and _is_url(arg):
default_dir = sunpy.config.get("downloads", "download_dir")
url = arg
path = download_file(url, default_dir)
pairs = self._read_file(path, **kwargs)
data_header_pairs += pairs
# A database Entry
elif isinstance(arg, DatabaseEntry):
data_header_pairs += self._read_file(arg.path, **kwargs)
else:
raise ValueError("File not found or invalid input")
i += 1
# TODO:
# In the end, if there are already maps it should be put in the same
# order as the input, currently they are not.
return data_header_pairs, already_maps
|
def _parse_args(self, *args, **kwargs):
"""
Parses an args list for data-header pairs. args can contain any mixture
of the following entries:
* tuples of data,header
* data, header not in a tuple
* filename, which will be read
* directory, from which all files will be read
* glob, from which all files will be read
* url, which will be downloaded and read
* lists containing any of the above.
Example
-------
self._parse_args(data, header,
(data, header),
['file1', 'file2', 'file3'],
'file4',
'directory1',
'*.fits')
"""
data_header_pairs = list()
already_maps = list()
# Account for nested lists of items
args = expand_list(args)
# For each of the arguments, handle each of the cases
i = 0
while i < len(args):
arg = args[i]
# Data-header pair in a tuple
if (
(type(arg) in [tuple, list])
and len(arg) == 2
and isinstance(arg[0], np.ndarray)
and isinstance(arg[1], dict)
):
data_header_pairs.append(arg)
# Data-header pair not in a tuple
elif isinstance(arg, np.ndarray) and isinstance(args[i + 1], dict):
pair = (args[i], args[i + 1])
data_header_pairs.append(pair)
i += 1 # an extra increment to account for the data-header pairing
# File name
elif isinstance(arg, six.string_types) and os.path.isfile(
os.path.expanduser(arg)
):
path = os.path.expanduser(arg)
pairs = self._read_file(path, **kwargs)
data_header_pairs += pairs
# Directory
elif isinstance(arg, six.string_types) and os.path.isdir(
os.path.expanduser(arg)
):
path = os.path.expanduser(arg)
files = [os.path.join(path, elem) for elem in os.listdir(path)]
for afile in files:
data_header_pairs += self._read_file(afile, **kwargs)
# Glob
elif isinstance(arg, six.string_types) and "*" in arg:
files = glob.glob(os.path.expanduser(arg))
for afile in files:
data_header_pairs += self._read_file(afile, **kwargs)
# Already a Map
elif isinstance(arg, GenericMap):
already_maps.append(arg)
# A URL
elif isinstance(arg, six.string_types) and _is_url(arg):
default_dir = sunpy.config.get("downloads", "download_dir")
url = arg
path = download_file(url, default_dir)
pairs = self._read_file(path, **kwargs)
data_header_pairs += pairs
# A database Entry
elif isinstance(arg, DatabaseEntry):
data_header_pairs += self._read_file(arg.path, **kwargs)
else:
raise ValueError("File not found or invalid input")
i += 1
# TODO:
# In the end, if there are already maps it should be put in the same
# order as the input, currently they are not.
return data_header_pairs, already_maps
|
https://github.com/sunpy/sunpy/issues/1664
|
f = fits.open(files[0])
data = f[0].data[0]
header = f[0].header
m = sunpy.map.Map((data, header))
---------------------------------------------------------------------------
ValueError Traceback (most recent call last)
<ipython-input-25-656e1b1b7829> in <module>()
2 data = f[0].data[0]
3 header = f[0].header
----> 4 m = sunpy.map.Map((data, header))
/home/stuart/GitHub/sunpy/sunpy/map/map_factory.py in __call__(self, *args, **kwargs)
239 silence_errors = kwargs.pop('silence_errors', False)
240
--> 241 data_header_pairs, already_maps = self._parse_args(*args, **kwargs)
242
243 new_maps = list()
/home/stuart/GitHub/sunpy/sunpy/map/map_factory.py in _parse_args(self, *args, **kwargs)
202
203 else:
--> 204 raise ValueError("File not found or invalid input")
205
206 i += 1
ValueError: File not found or invalid input
|
ValueError
|
def _parse_args(self, *args, **kwargs):
"""
Parses an args list for data-header pairs. args can contain any
mixture of the following entries:
* tuples of data,header
* data, header not in a tuple
* filename, which will be read
* directory, from which all files will be read
* glob, from which all files will be read
* url, which will be downloaded and read
* lists containing any of the above.
Example
-------
self._parse_args(data, header,
(data, header),
['file1', 'file2', 'file3'],
'file4',
'directory1',
'*.fits')
"""
data_header_pairs = list()
already_maps = list()
# Account for nested lists of items
args = expand_list(args)
# For each of the arguments, handle each of the cases
i = 0
while i < len(args):
arg = args[i]
# Data-header pair in a tuple
if (
(type(arg) in [tuple, list])
and len(arg) == 2
and isinstance(arg[0], np.ndarray)
and self._validate_meta(arg[1])
):
arg[1] = OrderedDict(arg[1])
data_header_pairs.append(arg)
# Data-header pair not in a tuple
elif isinstance(arg, np.ndarray) and self._validate_meta(args[i + 1]):
pair = (args[i], OrderedDict(args[i + 1]))
data_header_pairs.append(pair)
i += 1 # an extra increment to account for the data-header pairing
# File name
elif isinstance(arg, basestring) and os.path.isfile(os.path.expanduser(arg)):
path = os.path.expanduser(arg)
pairs = self._read_file(path, **kwargs)
data_header_pairs += pairs
# Directory
elif isinstance(arg, basestring) and os.path.isdir(os.path.expanduser(arg)):
path = os.path.expanduser(arg)
files = [os.path.join(path, elem) for elem in os.listdir(path)]
for afile in files:
data_header_pairs += self._read_file(afile, **kwargs)
# Glob
elif isinstance(arg, basestring) and "*" in arg:
files = glob.glob(os.path.expanduser(arg))
for afile in files:
data_header_pairs += self._read_file(afile, **kwargs)
# Already a Map
elif isinstance(arg, GenericMap):
already_maps.append(arg)
# A URL
elif isinstance(arg, basestring) and _is_url(arg):
default_dir = sunpy.config.get("downloads", "download_dir")
url = arg
path = download_file(url, default_dir)
pairs = self._read_file(path, **kwargs)
data_header_pairs += pairs
# A database Entry
elif isinstance(arg, DatabaseEntry):
data_header_pairs += self._read_file(arg.path, **kwargs)
else:
raise ValueError("File not found or invalid input")
i += 1
# TODO:
# In the end, if there are already maps it should be put in the same
# order as the input, currently they are not.
return data_header_pairs, already_maps
|
def _parse_args(self, *args, **kwargs):
"""
Parses an args list for data-header pairs. args can contain any mixture
of the following entries:
* tuples of data,header
* data, header not in a tuple
* filename, which will be read
* directory, from which all files will be read
* glob, from which all files will be read
* url, which will be downloaded and read
* lists containing any of the above.
Example
-------
self._parse_args(data, header,
(data, header),
['file1', 'file2', 'file3'],
'file4',
'directory1',
'*.fits')
"""
data_header_pairs = list()
already_maps = list()
# Account for nested lists of items
args = expand_list(args)
# For each of the arguments, handle each of the cases
i = 0
while i < len(args):
arg = args[i]
# Data-header pair in a tuple
if (
(type(arg) in [tuple, list])
and len(arg) == 2
and isinstance(arg[0], np.ndarray)
and isinstance(arg[1], dict)
):
data_header_pairs.append(arg)
# Data-header pair not in a tuple
elif isinstance(arg, np.ndarray) and isinstance(args[i + 1], dict):
pair = (args[i], args[i + 1])
data_header_pairs.append(pair)
i += 1 # an extra increment to account for the data-header pairing
# File name
elif isinstance(arg, basestring) and os.path.isfile(os.path.expanduser(arg)):
path = os.path.expanduser(arg)
pairs = self._read_file(path, **kwargs)
data_header_pairs += pairs
# Directory
elif isinstance(arg, basestring) and os.path.isdir(os.path.expanduser(arg)):
path = os.path.expanduser(arg)
files = [os.path.join(path, elem) for elem in os.listdir(path)]
for afile in files:
data_header_pairs += self._read_file(afile, **kwargs)
# Glob
elif isinstance(arg, basestring) and "*" in arg:
files = glob.glob(os.path.expanduser(arg))
for afile in files:
data_header_pairs += self._read_file(afile, **kwargs)
# Already a Map
elif isinstance(arg, GenericMap):
already_maps.append(arg)
# A URL
elif isinstance(arg, basestring) and _is_url(arg):
default_dir = sunpy.config.get("downloads", "download_dir")
url = arg
path = download_file(url, default_dir)
pairs = self._read_file(path, **kwargs)
data_header_pairs += pairs
# A database Entry
elif isinstance(arg, DatabaseEntry):
data_header_pairs += self._read_file(arg.path, **kwargs)
else:
raise ValueError("File not found or invalid input")
i += 1
# TODO:
# In the end, if there are already maps it should be put in the same
# order as the input, currently they are not.
return data_header_pairs, already_maps
|
https://github.com/sunpy/sunpy/issues/1664
|
f = fits.open(files[0])
data = f[0].data[0]
header = f[0].header
m = sunpy.map.Map((data, header))
---------------------------------------------------------------------------
ValueError Traceback (most recent call last)
<ipython-input-25-656e1b1b7829> in <module>()
2 data = f[0].data[0]
3 header = f[0].header
----> 4 m = sunpy.map.Map((data, header))
/home/stuart/GitHub/sunpy/sunpy/map/map_factory.py in __call__(self, *args, **kwargs)
239 silence_errors = kwargs.pop('silence_errors', False)
240
--> 241 data_header_pairs, already_maps = self._parse_args(*args, **kwargs)
242
243 new_maps = list()
/home/stuart/GitHub/sunpy/sunpy/map/map_factory.py in _parse_args(self, *args, **kwargs)
202
203 else:
--> 204 raise ValueError("File not found or invalid input")
205
206 i += 1
ValueError: File not found or invalid input
|
ValueError
|
def from_file(cls, filename):
filename = os.path.expanduser(filename)
header, data = cls._parse_filepath(filename)
if data.empty == True:
raise ValueError("No data found!")
else:
return cls(data, header)
|
def from_file(cls, filename):
filename = os.path.expanduser(filename)
header, data = cls._parse_filepath(filename)
return cls(data, header)
|
https://github.com/sunpy/sunpy/issues/471
|
In[15] import sunpy
In[16] from sunpy.time import TimeRange
In[17] times = TimeRange('2010/03/04 00:10', '2010/03/04 00:20')
In[18] goes = sunpy.lightcurve.GOESLightCurve.create(times)
In[19] goes.peek()
---------------------------------------------------------------------------
KeyError Traceback (most recent call last)
<ipython-input-19-0dabb23c4f23> in <module>()
----> 1 goes.peek()
/usr/lib/python3.3/site-packages/sunpy-0.2.0-py3.3-linux-x86_64.egg/sunpy/lightcurve/sources/goes.py in peek(self, title, **kwargs)
40 dates = matplotlib.dates.date2num(self.data.index)
41
---> 42 axes.plot_date(dates, self.data['A_FLUX'], '-',
43 label='0.5--4.0 $\AA$', color='blue', lw=2)
44 axes.plot_date(dates, self.data['B_FLUX'], '-',
/usr/lib/python3.3/site-packages/pandas/core/frame.py in __getitem__(self, key)
1926 else:
1927 # get column
-> 1928 return self._get_item_cache(key)
1929
1930 def _getitem_slice(self, key):
/usr/lib/python3.3/site-packages/pandas/core/generic.py in _get_item_cache(self, item)
568 return cache[item]
569 except Exception:
--> 570 values = self._data.get(item)
571 res = self._box_item_values(item, values)
572 cache[item] = res
/usr/lib/python3.3/site-packages/pandas/core/internals.py in get(self, item)
1382
1383 def get(self, item):
-> 1384 _, block = self._find_block(item)
1385 return block.get(item)
1386
/usr/lib/python3.3/site-packages/pandas/core/internals.py in _find_block(self, item)
1524
1525 def _find_block(self, item):
-> 1526 self._check_have(item)
1527 for i, block in enumerate(self.blocks):
1528 if item in block:
/usr/lib/python3.3/site-packages/pandas/core/internals.py in _check_have(self, item)
1531 def _check_have(self, item):
1532 if item not in self.items:
-> 1533 raise KeyError('no item named %s' % com.pprint_thing(item))
1534
1535 def reindex_axis(self, new_axis, method=None, axis=0, copy=True):
KeyError: 'no item named A_FLUX'
In[20] goes.data
Out[20]
Empty DataFrame
Columns: []
Index: []
In[21] goes.header
Out[21] ''
|
KeyError
|
def _parse_csv(filepath):
"""Parses an GOES CSV"""
with open(filepath, "rb") as fp:
return "", read_csv(fp, sep=",", index_col=0, parse_dates=True)
|
def _parse_csv(filepath):
"""Parses an GOES CSV"""
with open(filepath, "rb") as fp:
# @todo: check for:
# "No-Data-Found for the time period requested..." error
return "", read_csv(fp, sep=",", index_col=0, parse_dates=True)
|
https://github.com/sunpy/sunpy/issues/471
|
In[15] import sunpy
In[16] from sunpy.time import TimeRange
In[17] times = TimeRange('2010/03/04 00:10', '2010/03/04 00:20')
In[18] goes = sunpy.lightcurve.GOESLightCurve.create(times)
In[19] goes.peek()
---------------------------------------------------------------------------
KeyError Traceback (most recent call last)
<ipython-input-19-0dabb23c4f23> in <module>()
----> 1 goes.peek()
/usr/lib/python3.3/site-packages/sunpy-0.2.0-py3.3-linux-x86_64.egg/sunpy/lightcurve/sources/goes.py in peek(self, title, **kwargs)
40 dates = matplotlib.dates.date2num(self.data.index)
41
---> 42 axes.plot_date(dates, self.data['A_FLUX'], '-',
43 label='0.5--4.0 $\AA$', color='blue', lw=2)
44 axes.plot_date(dates, self.data['B_FLUX'], '-',
/usr/lib/python3.3/site-packages/pandas/core/frame.py in __getitem__(self, key)
1926 else:
1927 # get column
-> 1928 return self._get_item_cache(key)
1929
1930 def _getitem_slice(self, key):
/usr/lib/python3.3/site-packages/pandas/core/generic.py in _get_item_cache(self, item)
568 return cache[item]
569 except Exception:
--> 570 values = self._data.get(item)
571 res = self._box_item_values(item, values)
572 cache[item] = res
/usr/lib/python3.3/site-packages/pandas/core/internals.py in get(self, item)
1382
1383 def get(self, item):
-> 1384 _, block = self._find_block(item)
1385 return block.get(item)
1386
/usr/lib/python3.3/site-packages/pandas/core/internals.py in _find_block(self, item)
1524
1525 def _find_block(self, item):
-> 1526 self._check_have(item)
1527 for i, block in enumerate(self.blocks):
1528 if item in block:
/usr/lib/python3.3/site-packages/pandas/core/internals.py in _check_have(self, item)
1531 def _check_have(self, item):
1532 if item not in self.items:
-> 1533 raise KeyError('no item named %s' % com.pprint_thing(item))
1534
1535 def reindex_axis(self, new_axis, method=None, axis=0, copy=True):
KeyError: 'no item named A_FLUX'
In[20] goes.data
Out[20]
Empty DataFrame
Columns: []
Index: []
In[21] goes.header
Out[21] ''
|
KeyError
|
def to_angstrom(value, unit):
"""Given a value with a unit (given in a string), convert to angstroms"""
value_quantity = value * units.Unit(unit)
return value_quantity.to(units.angstrom, equivalencies=units.spectral()).value
|
def to_angstrom(value, unit):
C = 299792458.0
ANGSTROM = units["Angstrom"][1]
try:
type_, n = units[unit]
except KeyError:
raise ValueError("Cannot convert %s to Angstrom" % unit)
if type_ == "wavelength":
x = n / ANGSTROM
return value / x
elif type_ == "frequency":
x = 1 / ANGSTROM / n
return x * (C / value)
elif type_ == "energy":
x = 1 / (ANGSTROM / 1e-2) / n
return x * (1 / (8065.53 * value))
else:
raise ValueError("Unable to convert %s to Angstrom" % type_)
|
https://github.com/sunpy/sunpy/issues/471
|
In[15] import sunpy
In[16] from sunpy.time import TimeRange
In[17] times = TimeRange('2010/03/04 00:10', '2010/03/04 00:20')
In[18] goes = sunpy.lightcurve.GOESLightCurve.create(times)
In[19] goes.peek()
---------------------------------------------------------------------------
KeyError Traceback (most recent call last)
<ipython-input-19-0dabb23c4f23> in <module>()
----> 1 goes.peek()
/usr/lib/python3.3/site-packages/sunpy-0.2.0-py3.3-linux-x86_64.egg/sunpy/lightcurve/sources/goes.py in peek(self, title, **kwargs)
40 dates = matplotlib.dates.date2num(self.data.index)
41
---> 42 axes.plot_date(dates, self.data['A_FLUX'], '-',
43 label='0.5--4.0 $\AA$', color='blue', lw=2)
44 axes.plot_date(dates, self.data['B_FLUX'], '-',
/usr/lib/python3.3/site-packages/pandas/core/frame.py in __getitem__(self, key)
1926 else:
1927 # get column
-> 1928 return self._get_item_cache(key)
1929
1930 def _getitem_slice(self, key):
/usr/lib/python3.3/site-packages/pandas/core/generic.py in _get_item_cache(self, item)
568 return cache[item]
569 except Exception:
--> 570 values = self._data.get(item)
571 res = self._box_item_values(item, values)
572 cache[item] = res
/usr/lib/python3.3/site-packages/pandas/core/internals.py in get(self, item)
1382
1383 def get(self, item):
-> 1384 _, block = self._find_block(item)
1385 return block.get(item)
1386
/usr/lib/python3.3/site-packages/pandas/core/internals.py in _find_block(self, item)
1524
1525 def _find_block(self, item):
-> 1526 self._check_have(item)
1527 for i, block in enumerate(self.blocks):
1528 if item in block:
/usr/lib/python3.3/site-packages/pandas/core/internals.py in _check_have(self, item)
1531 def _check_have(self, item):
1532 if item not in self.items:
-> 1533 raise KeyError('no item named %s' % com.pprint_thing(item))
1534
1535 def reindex_axis(self, new_axis, method=None, axis=0, copy=True):
KeyError: 'no item named A_FLUX'
In[20] goes.data
Out[20]
Empty DataFrame
Columns: []
Index: []
In[21] goes.header
Out[21] ''
|
KeyError
|
def _call_metadata_identity_endpoint(self, request):
"""Request ID token from metadata identity endpoint.
Args:
request (google.auth.transport.Request): The object used to make
HTTP requests.
Returns:
Tuple[str, datetime.datetime]: The ID token and the expiry of the ID token.
Raises:
google.auth.exceptions.RefreshError: If the Compute Engine metadata
service can't be reached or if the instance has no credentials.
ValueError: If extracting expiry from the obtained ID token fails.
"""
try:
id_token = _metadata.get(
request,
"instance/service-accounts/default/identity?audience={}&format=full".format(
self._target_audience
),
)
except exceptions.TransportError as caught_exc:
new_exc = exceptions.RefreshError(caught_exc)
six.raise_from(new_exc, caught_exc)
_, payload, _, _ = jwt._unverified_decode(id_token)
return id_token, datetime.datetime.fromtimestamp(payload["exp"])
|
def _call_metadata_identity_endpoint(self, request):
"""Request ID token from metadata identity endpoint.
Args:
request (google.auth.transport.Request): The object used to make
HTTP requests.
Raises:
google.auth.exceptions.RefreshError: If the Compute Engine metadata
service can't be reached or if the instance has no credentials.
ValueError: If extracting expiry from the obtained ID token fails.
"""
try:
id_token = _metadata.get(
request,
"instance/service-accounts/default/identity?audience={}&format=full".format(
self._target_audience
),
)
except exceptions.TransportError as caught_exc:
new_exc = exceptions.RefreshError(caught_exc)
six.raise_from(new_exc, caught_exc)
_, payload, _, _ = jwt._unverified_decode(id_token)
return id_token, payload["exp"]
|
https://github.com/googleapis/google-auth-library-python/issues/479
|
Traceback (most recent call last):
File "<stdin>", line 1, in <module>
File "/usr/local/lib/python3.8/site-packages/google/auth/credentials.py", line 66, in expired
skewed_expiry = self.expiry - _helpers.CLOCK_SKEW
TypeError: unsupported operand type(s) for -: 'int' and 'datetime.timedelta'
|
TypeError
|
def __init__(
self,
source_credentials,
target_principal,
target_scopes,
delegates=None,
lifetime=_DEFAULT_TOKEN_LIFETIME_SECS,
):
"""
Args:
source_credentials (google.auth.Credentials): The source credential
used as to acquire the impersonated credentials.
target_principal (str): The service account to impersonate.
target_scopes (Sequence[str]): Scopes to request during the
authorization grant.
delegates (Sequence[str]): The chained list of delegates required
to grant the final access_token. If set, the sequence of
identities must have "Service Account Token Creator" capability
granted to the prceeding identity. For example, if set to
[serviceAccountB, serviceAccountC], the source_credential
must have the Token Creator role on serviceAccountB.
serviceAccountB must have the Token Creator on
serviceAccountC.
Finally, C must have Token Creator on target_principal.
If left unset, source_credential must have that role on
target_principal.
lifetime (int): Number of seconds the delegated credential should
be valid for (upto 3600).
"""
super(Credentials, self).__init__()
self._source_credentials = copy.copy(source_credentials)
# Service account source credentials must have the _IAM_SCOPE
# added to refresh correctly. User credentials cannot have
# their original scopes modified.
if isinstance(self._source_credentials, credentials.Scoped):
self._source_credentials = self._source_credentials.with_scopes(_IAM_SCOPE)
self._target_principal = target_principal
self._target_scopes = target_scopes
self._delegates = delegates
self._lifetime = lifetime
self.token = None
self.expiry = _helpers.utcnow()
|
def __init__(
self,
source_credentials,
target_principal,
target_scopes,
delegates=None,
lifetime=_DEFAULT_TOKEN_LIFETIME_SECS,
):
"""
Args:
source_credentials (google.auth.Credentials): The source credential
used as to acquire the impersonated credentials.
target_principal (str): The service account to impersonate.
target_scopes (Sequence[str]): Scopes to request during the
authorization grant.
delegates (Sequence[str]): The chained list of delegates required
to grant the final access_token. If set, the sequence of
identities must have "Service Account Token Creator" capability
granted to the prceeding identity. For example, if set to
[serviceAccountB, serviceAccountC], the source_credential
must have the Token Creator role on serviceAccountB.
serviceAccountB must have the Token Creator on
serviceAccountC.
Finally, C must have Token Creator on target_principal.
If left unset, source_credential must have that role on
target_principal.
lifetime (int): Number of seconds the delegated credential should
be valid for (upto 3600).
"""
super(Credentials, self).__init__()
self._source_credentials = copy.copy(source_credentials)
self._source_credentials._scopes = _IAM_SCOPE
self._target_principal = target_principal
self._target_scopes = target_scopes
self._delegates = delegates
self._lifetime = lifetime
self.token = None
self.expiry = _helpers.utcnow()
|
https://github.com/googleapis/google-auth-library-python/issues/416
|
Traceback (most recent call last):
File "main.py", line 13, in <module>
creds.refresh(Request())
File "/google/lib/python3.7/site-packages/google/auth/impersonated_credentials.py", line 218, in refresh
self._update_token(request)
File "/google/lib/python3.7/site-packages/google/auth/impersonated_credentials.py", line 234, in _update_token
self._source_credentials.refresh(request)
File "/google/lib/python3.7/site-packages/google/oauth2/credentials.py", line 152, in refresh
self._scopes,
File "/google/lib/python3.7/site-packages/google/oauth2/_client.py", line 241, in refresh_grant
response_data = _token_endpoint_request(request, token_uri, body)
File "/google/lib/python3.7/site-packages/google/oauth2/_client.py", line 115, in _token_endpoint_request
_handle_error_response(response_body)
File "/google/lib/python3.7/site-packages/google/oauth2/_client.py", line 60, in _handle_error_response
raise exceptions.RefreshError(error_details, response_body)
google.auth.exceptions.RefreshError: ('invalid_scope: Bad Request', '{\n "error": "invalid_scope",\n "error_description": "Bad Request"\n}')
|
google.auth.exceptions.RefreshError
|
def _get_data_points(
sdk_metric_record: MetricRecord, data_point_class: Type[DataPointT]
) -> List[DataPointT]:
if isinstance(sdk_metric_record.aggregator, SumAggregator):
value = sdk_metric_record.aggregator.checkpoint
elif isinstance(sdk_metric_record.aggregator, MinMaxSumCountAggregator):
# FIXME: How are values to be interpreted from this aggregator?
raise Exception("MinMaxSumCount aggregator data not supported")
elif isinstance(sdk_metric_record.aggregator, HistogramAggregator):
# FIXME: How are values to be interpreted from this aggregator?
raise Exception("Histogram aggregator data not supported")
elif isinstance(sdk_metric_record.aggregator, LastValueAggregator):
value = sdk_metric_record.aggregator.checkpoint
elif isinstance(sdk_metric_record.aggregator, ValueObserverAggregator):
value = sdk_metric_record.aggregator.checkpoint.last
return [
data_point_class(
labels=[
StringKeyValue(key=str(label_key), value=str(label_value))
for label_key, label_value in sdk_metric_record.labels
],
value=value,
start_time_unix_nano=(
sdk_metric_record.aggregator.initial_checkpoint_timestamp
),
time_unix_nano=(sdk_metric_record.aggregator.last_update_timestamp),
)
]
|
def _get_data_points(
sdk_metric: MetricRecord, data_point_class: Type[DataPointT]
) -> List[DataPointT]:
data_points = []
for (
label,
bound_counter,
) in sdk_metric.instrument.bound_instruments.items():
string_key_values = []
for label_key, label_value in label:
string_key_values.append(StringKeyValue(key=label_key, value=label_value))
for view_data in bound_counter.view_datas:
if view_data.labels == label:
data_points.append(
data_point_class(
labels=string_key_values,
value=view_data.aggregator.current,
start_time_unix_nano=(
view_data.aggregator.last_checkpoint_timestamp
),
time_unix_nano=(view_data.aggregator.last_update_timestamp),
)
)
break
return data_points
|
https://github.com/open-telemetry/opentelemetry-python/issues/1236
|
Exception in thread Thread-1:
Traceback (most recent call last):
File "/home/ocelotl/.pyenv/versions/3.8.3/lib/python3.8/threading.py", line 932, in _bootstrap_inner
self.run()
File "/home/ocelotl/codeboten/opentelemetry-python/opentelemetry-sdk/src/opentelemetry/sdk/metrics/export/controller.py", line 48, in run
self.tick()
File "/home/ocelotl/codeboten/opentelemetry-python/opentelemetry-sdk/src/opentelemetry/sdk/metrics/export/controller.py", line 60, in tick
self.exporter.export(self.meter.processor.checkpoint_set())
File "/home/ocelotl/codeboten/opentelemetry-python/exporter/opentelemetry-exporter-otlp/src/opentelemetry/exporter/otlp/metrics_exporter/__init__.py", line 248, in export
return self._export(metrics)
File "/home/ocelotl/codeboten/opentelemetry-python/exporter/opentelemetry-exporter-otlp/src/opentelemetry/exporter/otlp/exporter.py", line 166, in _export
request=self._translate_data(data),
File "/home/ocelotl/codeboten/opentelemetry-python/exporter/opentelemetry-exporter-otlp/src/opentelemetry/exporter/otlp/metrics_exporter/__init__.py", line 201, in _translate_data
data_points=_get_data_points(sdk_metric, data_point_class),
File "/home/ocelotl/codeboten/opentelemetry-python/exporter/opentelemetry-exporter-otlp/src/opentelemetry/exporter/otlp/metrics_exporter/__init__.py", line 71, in _get_data_points
) in sdk_metric.instrument.bound_instruments.items():
AttributeError: 'SumObserver' object has no attribute 'bound_instruments'
|
AttributeError
|
def _translate_data(self, data: Sequence[MetricRecord]) -> ExportMetricsServiceRequest:
# pylint: disable=too-many-locals,no-member
# pylint: disable=attribute-defined-outside-init
sdk_resource_instrumentation_library_metrics = {}
# The criteria to decide how to translate data is based on this table
# taken directly from OpenTelemetry Proto v0.5.0:
# TODO: Update table after the decision on:
# https://github.com/open-telemetry/opentelemetry-specification/issues/731.
# By default, metrics recording using the OpenTelemetry API are exported as
# (the table does not include MeasurementValueType to avoid extra rows):
#
# Instrument Type
# ----------------------------------------------
# Counter Sum(aggregation_temporality=delta;is_monotonic=true)
# UpDownCounter Sum(aggregation_temporality=delta;is_monotonic=false)
# ValueRecorder TBD
# SumObserver Sum(aggregation_temporality=cumulative;is_monotonic=true)
# UpDownSumObserver Sum(aggregation_temporality=cumulative;is_monotonic=false)
# ValueObserver Gauge()
for sdk_metric_record in data:
if sdk_metric_record.resource not in (
sdk_resource_instrumentation_library_metrics.keys()
):
sdk_resource_instrumentation_library_metrics[sdk_metric_record.resource] = (
InstrumentationLibraryMetrics()
)
type_class = {
int: {
"sum": {"class": IntSum, "argument": "int_sum"},
"gauge": {"class": IntGauge, "argument": "int_gauge"},
"data_point_class": IntDataPoint,
},
float: {
"sum": {"class": DoubleSum, "argument": "double_sum"},
"gauge": {
"class": DoubleGauge,
"argument": "double_gauge",
},
"data_point_class": DoubleDataPoint,
},
}
value_type = sdk_metric_record.instrument.value_type
sum_class = type_class[value_type]["sum"]["class"]
gauge_class = type_class[value_type]["gauge"]["class"]
data_point_class = type_class[value_type]["data_point_class"]
if isinstance(sdk_metric_record.instrument, Counter):
otlp_metric_data = sum_class(
data_points=_get_data_points(sdk_metric_record, data_point_class),
aggregation_temporality=(
AggregationTemporality.AGGREGATION_TEMPORALITY_DELTA
),
is_monotonic=True,
)
argument = type_class[value_type]["sum"]["argument"]
elif isinstance(sdk_metric_record.instrument, UpDownCounter):
otlp_metric_data = sum_class(
data_points=_get_data_points(sdk_metric_record, data_point_class),
aggregation_temporality=(
AggregationTemporality.AGGREGATION_TEMPORALITY_DELTA
),
is_monotonic=False,
)
argument = type_class[value_type]["sum"]["argument"]
elif isinstance(sdk_metric_record.instrument, (ValueRecorder)):
logger.warning("Skipping exporting of ValueRecorder metric")
continue
elif isinstance(sdk_metric_record.instrument, SumObserver):
otlp_metric_data = sum_class(
data_points=_get_data_points(sdk_metric_record, data_point_class),
aggregation_temporality=(
AggregationTemporality.AGGREGATION_TEMPORALITY_CUMULATIVE
),
is_monotonic=True,
)
argument = type_class[value_type]["sum"]["argument"]
elif isinstance(sdk_metric_record.instrument, UpDownSumObserver):
otlp_metric_data = sum_class(
data_points=_get_data_points(sdk_metric_record, data_point_class),
aggregation_temporality=(
AggregationTemporality.AGGREGATION_TEMPORALITY_CUMULATIVE
),
is_monotonic=False,
)
argument = type_class[value_type]["sum"]["argument"]
elif isinstance(sdk_metric_record.instrument, (ValueObserver)):
otlp_metric_data = gauge_class(
data_points=_get_data_points(sdk_metric_record, data_point_class)
)
argument = type_class[value_type]["gauge"]["argument"]
sdk_resource_instrumentation_library_metrics[
sdk_metric_record.resource
].metrics.append(
OTLPMetric(
**{
"name": sdk_metric_record.instrument.name,
"description": (sdk_metric_record.instrument.description),
"unit": sdk_metric_record.instrument.unit,
argument: otlp_metric_data,
}
)
)
return ExportMetricsServiceRequest(
resource_metrics=_get_resource_data(
sdk_resource_instrumentation_library_metrics,
ResourceMetrics,
"metrics",
)
)
|
def _translate_data(self, data: Sequence[MetricRecord]) -> ExportMetricsServiceRequest:
# pylint: disable=too-many-locals,no-member
# pylint: disable=attribute-defined-outside-init
sdk_resource_instrumentation_library_metrics = {}
# The criteria to decide how to translate data is based on this table
# taken directly from OpenTelemetry Proto v0.5.0:
# TODO: Update table after the decision on:
# https://github.com/open-telemetry/opentelemetry-specification/issues/731.
# By default, metrics recording using the OpenTelemetry API are exported as
# (the table does not include MeasurementValueType to avoid extra rows):
#
# Instrument Type
# ----------------------------------------------
# Counter Sum(aggregation_temporality=delta;is_monotonic=true)
# UpDownCounter Sum(aggregation_temporality=delta;is_monotonic=false)
# ValueRecorder TBD
# SumObserver Sum(aggregation_temporality=cumulative;is_monotonic=true)
# UpDownSumObserver Sum(aggregation_temporality=cumulative;is_monotonic=false)
# ValueObserver Gauge()
for sdk_metric in data:
if sdk_metric.resource not in (
sdk_resource_instrumentation_library_metrics.keys()
):
sdk_resource_instrumentation_library_metrics[sdk_metric.resource] = (
InstrumentationLibraryMetrics()
)
type_class = {
int: {
"sum": {"class": IntSum, "argument": "int_sum"},
"gauge": {"class": IntGauge, "argument": "int_gauge"},
"data_point_class": IntDataPoint,
},
float: {
"sum": {"class": DoubleSum, "argument": "double_sum"},
"gauge": {
"class": DoubleGauge,
"argument": "double_gauge",
},
"data_point_class": DoubleDataPoint,
},
}
value_type = sdk_metric.instrument.value_type
sum_class = type_class[value_type]["sum"]["class"]
gauge_class = type_class[value_type]["gauge"]["class"]
data_point_class = type_class[value_type]["data_point_class"]
if isinstance(sdk_metric.instrument, Counter):
otlp_metric_data = sum_class(
data_points=_get_data_points(sdk_metric, data_point_class),
aggregation_temporality=(
AggregationTemporality.AGGREGATION_TEMPORALITY_DELTA
),
is_monotonic=True,
)
argument = type_class[value_type]["sum"]["argument"]
elif isinstance(sdk_metric.instrument, UpDownCounter):
otlp_metric_data = sum_class(
data_points=_get_data_points(sdk_metric, data_point_class),
aggregation_temporality=(
AggregationTemporality.AGGREGATION_TEMPORALITY_DELTA
),
is_monotonic=False,
)
argument = type_class[value_type]["sum"]["argument"]
elif isinstance(sdk_metric.instrument, (ValueRecorder)):
logger.warning("Skipping exporting of ValueRecorder metric")
continue
elif isinstance(sdk_metric.instrument, SumObserver):
otlp_metric_data = sum_class(
data_points=_get_data_points(sdk_metric, data_point_class),
aggregation_temporality=(
AggregationTemporality.AGGREGATION_TEMPORALITY_CUMULATIVE
),
is_monotonic=True,
)
argument = type_class[value_type]["sum"]["argument"]
elif isinstance(sdk_metric.instrument, UpDownSumObserver):
otlp_metric_data = sum_class(
data_points=_get_data_points(sdk_metric, data_point_class),
aggregation_temporality=(
AggregationTemporality.AGGREGATION_TEMPORALITY_CUMULATIVE
),
is_monotonic=False,
)
argument = type_class[value_type]["sum"]["argument"]
elif isinstance(sdk_metric.instrument, (ValueObserver)):
otlp_metric_data = gauge_class(
data_points=_get_data_points(sdk_metric, data_point_class)
)
argument = type_class[value_type]["gauge"]["argument"]
sdk_resource_instrumentation_library_metrics[
sdk_metric.resource
].metrics.append(
OTLPMetric(
**{
"name": sdk_metric.instrument.name,
"description": sdk_metric.instrument.description,
"unit": sdk_metric.instrument.unit,
argument: otlp_metric_data,
}
)
)
return ExportMetricsServiceRequest(
resource_metrics=_get_resource_data(
sdk_resource_instrumentation_library_metrics,
ResourceMetrics,
"metrics",
)
)
|
https://github.com/open-telemetry/opentelemetry-python/issues/1236
|
Exception in thread Thread-1:
Traceback (most recent call last):
File "/home/ocelotl/.pyenv/versions/3.8.3/lib/python3.8/threading.py", line 932, in _bootstrap_inner
self.run()
File "/home/ocelotl/codeboten/opentelemetry-python/opentelemetry-sdk/src/opentelemetry/sdk/metrics/export/controller.py", line 48, in run
self.tick()
File "/home/ocelotl/codeboten/opentelemetry-python/opentelemetry-sdk/src/opentelemetry/sdk/metrics/export/controller.py", line 60, in tick
self.exporter.export(self.meter.processor.checkpoint_set())
File "/home/ocelotl/codeboten/opentelemetry-python/exporter/opentelemetry-exporter-otlp/src/opentelemetry/exporter/otlp/metrics_exporter/__init__.py", line 248, in export
return self._export(metrics)
File "/home/ocelotl/codeboten/opentelemetry-python/exporter/opentelemetry-exporter-otlp/src/opentelemetry/exporter/otlp/exporter.py", line 166, in _export
request=self._translate_data(data),
File "/home/ocelotl/codeboten/opentelemetry-python/exporter/opentelemetry-exporter-otlp/src/opentelemetry/exporter/otlp/metrics_exporter/__init__.py", line 201, in _translate_data
data_points=_get_data_points(sdk_metric, data_point_class),
File "/home/ocelotl/codeboten/opentelemetry-python/exporter/opentelemetry-exporter-otlp/src/opentelemetry/exporter/otlp/metrics_exporter/__init__.py", line 71, in _get_data_points
) in sdk_metric.instrument.bound_instruments.items():
AttributeError: 'SumObserver' object has no attribute 'bound_instruments'
|
AttributeError
|
def __init__(self, config=None):
self._lock = threading.Lock()
self.last_update_timestamp = 0
self.initial_checkpoint_timestamp = 0
self.checkpointed = True
if config is not None:
self.config = config
else:
self.config = {}
|
def __init__(self, config=None):
self._lock = threading.Lock()
self.last_update_timestamp = 0
self.last_checkpoint_timestamp = 0
if config is not None:
self.config = config
else:
self.config = {}
|
https://github.com/open-telemetry/opentelemetry-python/issues/1236
|
Exception in thread Thread-1:
Traceback (most recent call last):
File "/home/ocelotl/.pyenv/versions/3.8.3/lib/python3.8/threading.py", line 932, in _bootstrap_inner
self.run()
File "/home/ocelotl/codeboten/opentelemetry-python/opentelemetry-sdk/src/opentelemetry/sdk/metrics/export/controller.py", line 48, in run
self.tick()
File "/home/ocelotl/codeboten/opentelemetry-python/opentelemetry-sdk/src/opentelemetry/sdk/metrics/export/controller.py", line 60, in tick
self.exporter.export(self.meter.processor.checkpoint_set())
File "/home/ocelotl/codeboten/opentelemetry-python/exporter/opentelemetry-exporter-otlp/src/opentelemetry/exporter/otlp/metrics_exporter/__init__.py", line 248, in export
return self._export(metrics)
File "/home/ocelotl/codeboten/opentelemetry-python/exporter/opentelemetry-exporter-otlp/src/opentelemetry/exporter/otlp/exporter.py", line 166, in _export
request=self._translate_data(data),
File "/home/ocelotl/codeboten/opentelemetry-python/exporter/opentelemetry-exporter-otlp/src/opentelemetry/exporter/otlp/metrics_exporter/__init__.py", line 201, in _translate_data
data_points=_get_data_points(sdk_metric, data_point_class),
File "/home/ocelotl/codeboten/opentelemetry-python/exporter/opentelemetry-exporter-otlp/src/opentelemetry/exporter/otlp/metrics_exporter/__init__.py", line 71, in _get_data_points
) in sdk_metric.instrument.bound_instruments.items():
AttributeError: 'SumObserver' object has no attribute 'bound_instruments'
|
AttributeError
|
def update(self, value):
"""Updates the current with the new value."""
if self.checkpointed:
self.initial_checkpoint_timestamp = time_ns()
self.checkpointed = False
self.last_update_timestamp = time_ns()
|
def update(self, value):
"""Updates the current with the new value."""
self.last_update_timestamp = time_ns()
|
https://github.com/open-telemetry/opentelemetry-python/issues/1236
|
Exception in thread Thread-1:
Traceback (most recent call last):
File "/home/ocelotl/.pyenv/versions/3.8.3/lib/python3.8/threading.py", line 932, in _bootstrap_inner
self.run()
File "/home/ocelotl/codeboten/opentelemetry-python/opentelemetry-sdk/src/opentelemetry/sdk/metrics/export/controller.py", line 48, in run
self.tick()
File "/home/ocelotl/codeboten/opentelemetry-python/opentelemetry-sdk/src/opentelemetry/sdk/metrics/export/controller.py", line 60, in tick
self.exporter.export(self.meter.processor.checkpoint_set())
File "/home/ocelotl/codeboten/opentelemetry-python/exporter/opentelemetry-exporter-otlp/src/opentelemetry/exporter/otlp/metrics_exporter/__init__.py", line 248, in export
return self._export(metrics)
File "/home/ocelotl/codeboten/opentelemetry-python/exporter/opentelemetry-exporter-otlp/src/opentelemetry/exporter/otlp/exporter.py", line 166, in _export
request=self._translate_data(data),
File "/home/ocelotl/codeboten/opentelemetry-python/exporter/opentelemetry-exporter-otlp/src/opentelemetry/exporter/otlp/metrics_exporter/__init__.py", line 201, in _translate_data
data_points=_get_data_points(sdk_metric, data_point_class),
File "/home/ocelotl/codeboten/opentelemetry-python/exporter/opentelemetry-exporter-otlp/src/opentelemetry/exporter/otlp/metrics_exporter/__init__.py", line 71, in _get_data_points
) in sdk_metric.instrument.bound_instruments.items():
AttributeError: 'SumObserver' object has no attribute 'bound_instruments'
|
AttributeError
|
def take_checkpoint(self):
"""Stores a snapshot of the current value."""
self.checkpointed = True
|
def take_checkpoint(self):
"""Stores a snapshot of the current value."""
self.last_checkpoint_timestamp = time_ns()
|
https://github.com/open-telemetry/opentelemetry-python/issues/1236
|
Exception in thread Thread-1:
Traceback (most recent call last):
File "/home/ocelotl/.pyenv/versions/3.8.3/lib/python3.8/threading.py", line 932, in _bootstrap_inner
self.run()
File "/home/ocelotl/codeboten/opentelemetry-python/opentelemetry-sdk/src/opentelemetry/sdk/metrics/export/controller.py", line 48, in run
self.tick()
File "/home/ocelotl/codeboten/opentelemetry-python/opentelemetry-sdk/src/opentelemetry/sdk/metrics/export/controller.py", line 60, in tick
self.exporter.export(self.meter.processor.checkpoint_set())
File "/home/ocelotl/codeboten/opentelemetry-python/exporter/opentelemetry-exporter-otlp/src/opentelemetry/exporter/otlp/metrics_exporter/__init__.py", line 248, in export
return self._export(metrics)
File "/home/ocelotl/codeboten/opentelemetry-python/exporter/opentelemetry-exporter-otlp/src/opentelemetry/exporter/otlp/exporter.py", line 166, in _export
request=self._translate_data(data),
File "/home/ocelotl/codeboten/opentelemetry-python/exporter/opentelemetry-exporter-otlp/src/opentelemetry/exporter/otlp/metrics_exporter/__init__.py", line 201, in _translate_data
data_points=_get_data_points(sdk_metric, data_point_class),
File "/home/ocelotl/codeboten/opentelemetry-python/exporter/opentelemetry-exporter-otlp/src/opentelemetry/exporter/otlp/metrics_exporter/__init__.py", line 71, in _get_data_points
) in sdk_metric.instrument.bound_instruments.items():
AttributeError: 'SumObserver' object has no attribute 'bound_instruments'
|
AttributeError
|
def merge(self, other):
"""Combines two aggregator values."""
self.last_update_timestamp = max(
self.last_update_timestamp, other.last_update_timestamp
)
self.initial_checkpoint_timestamp = max(
self.initial_checkpoint_timestamp,
other.initial_checkpoint_timestamp,
)
|
def merge(self, other):
"""Combines two aggregator values."""
self.last_update_timestamp = max(
self.last_update_timestamp, other.last_update_timestamp
)
self.last_checkpoint_timestamp = max(
self.last_checkpoint_timestamp, other.last_checkpoint_timestamp
)
|
https://github.com/open-telemetry/opentelemetry-python/issues/1236
|
Exception in thread Thread-1:
Traceback (most recent call last):
File "/home/ocelotl/.pyenv/versions/3.8.3/lib/python3.8/threading.py", line 932, in _bootstrap_inner
self.run()
File "/home/ocelotl/codeboten/opentelemetry-python/opentelemetry-sdk/src/opentelemetry/sdk/metrics/export/controller.py", line 48, in run
self.tick()
File "/home/ocelotl/codeboten/opentelemetry-python/opentelemetry-sdk/src/opentelemetry/sdk/metrics/export/controller.py", line 60, in tick
self.exporter.export(self.meter.processor.checkpoint_set())
File "/home/ocelotl/codeboten/opentelemetry-python/exporter/opentelemetry-exporter-otlp/src/opentelemetry/exporter/otlp/metrics_exporter/__init__.py", line 248, in export
return self._export(metrics)
File "/home/ocelotl/codeboten/opentelemetry-python/exporter/opentelemetry-exporter-otlp/src/opentelemetry/exporter/otlp/exporter.py", line 166, in _export
request=self._translate_data(data),
File "/home/ocelotl/codeboten/opentelemetry-python/exporter/opentelemetry-exporter-otlp/src/opentelemetry/exporter/otlp/metrics_exporter/__init__.py", line 201, in _translate_data
data_points=_get_data_points(sdk_metric, data_point_class),
File "/home/ocelotl/codeboten/opentelemetry-python/exporter/opentelemetry-exporter-otlp/src/opentelemetry/exporter/otlp/metrics_exporter/__init__.py", line 71, in _get_data_points
) in sdk_metric.instrument.bound_instruments.items():
AttributeError: 'SumObserver' object has no attribute 'bound_instruments'
|
AttributeError
|
def started(self, event: monitoring.CommandStartedEvent):
"""Method to handle a pymongo CommandStartedEvent"""
if not self.is_enabled:
return
command = event.command.get(event.command_name, "")
name = DATABASE_TYPE + "." + event.command_name
statement = event.command_name
if command:
name += "." + str(command)
statement += " " + str(command)
try:
span = self._tracer.start_span(name, kind=SpanKind.CLIENT)
span.set_attribute("component", DATABASE_TYPE)
span.set_attribute("db.type", DATABASE_TYPE)
span.set_attribute("db.instance", event.database_name)
span.set_attribute("db.statement", statement)
if event.connection_id is not None:
span.set_attribute("net.peer.name", event.connection_id[0])
span.set_attribute("net.peer.port", event.connection_id[1])
# pymongo specific, not specified by spec
span.set_attribute("db.mongo.operation_id", event.operation_id)
span.set_attribute("db.mongo.request_id", event.request_id)
for attr in COMMAND_ATTRIBUTES:
_attr = event.command.get(attr)
if _attr is not None:
span.set_attribute("db.mongo." + attr, str(_attr))
# Add Span to dictionary
self._span_dict[_get_span_dict_key(event)] = span
except Exception as ex: # noqa pylint: disable=broad-except
if span is not None:
span.set_status(Status(StatusCanonicalCode.INTERNAL, str(ex)))
span.end()
self._pop_span(event)
|
def started(self, event: monitoring.CommandStartedEvent):
"""Method to handle a pymongo CommandStartedEvent"""
if not self.is_enabled:
return
command = event.command.get(event.command_name, "")
name = DATABASE_TYPE + "." + event.command_name
statement = event.command_name
if command:
name += "." + command
statement += " " + command
try:
span = self._tracer.start_span(name, kind=SpanKind.CLIENT)
span.set_attribute("component", DATABASE_TYPE)
span.set_attribute("db.type", DATABASE_TYPE)
span.set_attribute("db.instance", event.database_name)
span.set_attribute("db.statement", statement)
if event.connection_id is not None:
span.set_attribute("net.peer.name", event.connection_id[0])
span.set_attribute("net.peer.port", event.connection_id[1])
# pymongo specific, not specified by spec
span.set_attribute("db.mongo.operation_id", event.operation_id)
span.set_attribute("db.mongo.request_id", event.request_id)
for attr in COMMAND_ATTRIBUTES:
_attr = event.command.get(attr)
if _attr is not None:
span.set_attribute("db.mongo." + attr, str(_attr))
# Add Span to dictionary
self._span_dict[_get_span_dict_key(event)] = span
except Exception as ex: # noqa pylint: disable=broad-except
if span is not None:
span.set_status(Status(StatusCanonicalCode.INTERNAL, str(ex)))
span.end()
self._pop_span(event)
|
https://github.com/open-telemetry/opentelemetry-python/issues/1012
|
Traceback (most recent call last):
File "/Users/drubin/cargurus/analytics/snowblower/.venv/lib/python3.7/site-packages/pymongo/monitoring.py", line 1266, in publish_command_start
subscriber.started(event)
File "/Users/drubin/cargurus/analytics/snowblower/.venv/lib/python3.7/site-packages/opentelemetry/instrumentation/pymongo/__init__.py", line 69, in started
name += "." + command
TypeError: can only concatenate str (not "int") to str
|
TypeError
|
def _common_request( # pylint: disable=too-many-locals
self,
args_name,
traced_args,
operation_name,
original_func,
instance,
args,
kwargs,
):
endpoint_name = getattr(instance, "host").split(".")[0]
with self._tracer.start_as_current_span(
"{}.command".format(endpoint_name),
kind=SpanKind.CONSUMER,
) as span:
if args:
http_method = args[0]
span.resource = Resource(
labels={
"endpoint": endpoint_name,
"http_method": http_method.lower(),
}
)
else:
span.resource = Resource(labels={"endpoint": endpoint_name})
add_span_arg_tags(
span,
endpoint_name,
args,
args_name,
traced_args,
)
# Obtaining region name
region_name = _get_instance_region_name(instance)
meta = {
"aws.agent": "boto",
"aws.operation": operation_name,
}
if region_name:
meta["aws.region"] = region_name
for key, value in meta.items():
span.set_attribute(key, value)
# Original func returns a boto.connection.HTTPResponse object
result = original_func(*args, **kwargs)
span.set_attribute("http.status_code", getattr(result, "status"))
span.set_attribute("http.method", getattr(result, "_method"))
return result
|
def _common_request( # pylint: disable=too-many-locals
self,
args_name,
traced_args,
operation_name,
original_func,
instance,
args,
kwargs,
):
endpoint_name = getattr(instance, "host").split(".")[0]
with self._tracer.start_as_current_span(
"{}.command".format(endpoint_name),
kind=SpanKind.CONSUMER,
) as span:
if args:
http_method = args[0]
span.resource = "%s.%s" % (endpoint_name, http_method.lower())
else:
span.resource = endpoint_name
add_span_arg_tags(
span,
endpoint_name,
args,
args_name,
traced_args,
)
# Obtaining region name
region_name = _get_instance_region_name(instance)
meta = {
"aws.agent": "boto",
"aws.operation": operation_name,
}
if region_name:
meta["aws.region"] = region_name
for key, value in meta.items():
span.set_attribute(key, value)
# Original func returns a boto.connection.HTTPResponse object
result = original_func(*args, **kwargs)
span.set_attribute("http.status_code", getattr(result, "status"))
span.set_attribute("http.method", getattr(result, "_method"))
return result
|
https://github.com/open-telemetry/opentelemetry-python/issues/817
|
Traceback (most recent call last):
File "/opt/cadre/web/.venv/lib/python3.7/site-packages/opentelemetry/sdk/trace/export/__init__.py", line 80, in on_end
self.span_exporter.export((span,))
File "/opt/cadre/web/.venv/lib/python3.7/site-packages/opentelemetry/ext/jaeger/__init__.py", line 156, in export
jaeger_spans = _translate_to_jaeger(spans)
File "/opt/cadre/web/.venv/lib/python3.7/site-packages/opentelemetry/ext/jaeger/__init__.py", line 200, in _translate_to_jaeger
tags.extend(_extract_tags(span.resource.labels))
AttributeError: 'str' object has no attribute 'labels'
|
AttributeError
|
def _patched_api_call(self, original_func, instance, args, kwargs):
endpoint_name = deep_getattr(instance, "_endpoint._endpoint_prefix")
with self._tracer.start_as_current_span(
"{}.command".format(endpoint_name),
kind=SpanKind.CONSUMER,
) as span:
operation = None
if args:
operation = args[0]
span.resource = Resource(
labels={
"endpoint": endpoint_name,
"operation": operation.lower(),
}
)
else:
span.resource = Resource(labels={"endpoint": endpoint_name})
add_span_arg_tags(
span,
endpoint_name,
args,
("action", "params", "path", "verb"),
{"params", "path", "verb"},
)
region_name = deep_getattr(instance, "meta.region_name")
meta = {
"aws.agent": "botocore",
"aws.operation": operation,
"aws.region": region_name,
}
for key, value in meta.items():
span.set_attribute(key, value)
result = original_func(*args, **kwargs)
span.set_attribute(
"http.status_code",
result["ResponseMetadata"]["HTTPStatusCode"],
)
span.set_attribute(
"retry_attempts",
result["ResponseMetadata"]["RetryAttempts"],
)
return result
|
def _patched_api_call(self, original_func, instance, args, kwargs):
endpoint_name = deep_getattr(instance, "_endpoint._endpoint_prefix")
with self._tracer.start_as_current_span(
"{}.command".format(endpoint_name),
kind=SpanKind.CONSUMER,
) as span:
operation = None
if args:
operation = args[0]
span.resource = "%s.%s" % (endpoint_name, operation.lower())
else:
span.resource = endpoint_name
add_span_arg_tags(
span,
endpoint_name,
args,
("action", "params", "path", "verb"),
{"params", "path", "verb"},
)
region_name = deep_getattr(instance, "meta.region_name")
meta = {
"aws.agent": "botocore",
"aws.operation": operation,
"aws.region": region_name,
}
for key, value in meta.items():
span.set_attribute(key, value)
result = original_func(*args, **kwargs)
span.set_attribute(
"http.status_code",
result["ResponseMetadata"]["HTTPStatusCode"],
)
span.set_attribute(
"retry_attempts",
result["ResponseMetadata"]["RetryAttempts"],
)
return result
|
https://github.com/open-telemetry/opentelemetry-python/issues/817
|
Traceback (most recent call last):
File "/opt/cadre/web/.venv/lib/python3.7/site-packages/opentelemetry/sdk/trace/export/__init__.py", line 80, in on_end
self.span_exporter.export((span,))
File "/opt/cadre/web/.venv/lib/python3.7/site-packages/opentelemetry/ext/jaeger/__init__.py", line 156, in export
jaeger_spans = _translate_to_jaeger(spans)
File "/opt/cadre/web/.venv/lib/python3.7/site-packages/opentelemetry/ext/jaeger/__init__.py", line 200, in _translate_to_jaeger
tags.extend(_extract_tags(span.resource.labels))
AttributeError: 'str' object has no attribute 'labels'
|
AttributeError
|
def __init__(self, thrift_url="", auth=None):
self.thrift_url = thrift_url
self.auth = auth
self.http_transport = THttpClient.THttpClient(uri_or_host=self.thrift_url)
self.protocol = TBinaryProtocol.TBinaryProtocol(self.http_transport)
# set basic auth header
if auth is not None:
auth_header = "{}:{}".format(*auth)
decoded = base64.b64encode(auth_header.encode()).decode("ascii")
basic_auth = dict(Authorization="Basic {}".format(decoded))
self.http_transport.setCustomHeaders(basic_auth)
|
def __init__(
self,
thrift_url="",
auth=None,
client=jaeger.Client,
http_transport=THttpClient.THttpClient,
):
self.thrift_url = thrift_url
self.auth = auth
self.http_transport = http_transport(uri_or_host=thrift_url)
self.client = client(
iprot=TBinaryProtocol.TBinaryProtocol(trans=self.http_transport)
)
# set basic auth header
if auth is not None:
auth_header = "{}:{}".format(*auth)
decoded = base64.b64encode(auth_header.encode()).decode("ascii")
basic_auth = dict(Authorization="Basic {}".format(decoded))
self.http_transport.setCustomHeaders(basic_auth)
|
https://github.com/open-telemetry/opentelemetry-python/issues/493
|
Exception while exporting Span.
Traceback (most recent call last):
File "/home/tsutsumi/workspace/opentelemetry-python/opentelemetry-sdk/src/opentelemetry/sdk/trace/export/__init__.py", line 81, in on_end
self.span_exporter.export((span,))
File "/home/tsutsumi/workspace/opentelemetry-python/ext/opentelemetry-ext-jaeger/src/opentelemetry/ext/jaeger/__init__.py", line 118, in export
self.collector.submit(batch)
File "/home/tsutsumi/workspace/opentelemetry-python/ext/opentelemetry-ext-jaeger/src/opentelemetry/ext/jaeger/__init__.py", line 377, in submit
self.client.submitBatches([batch])
File "/home/tsutsumi/workspace/opentelemetry-python/ext/opentelemetry-ext-jaeger/src/opentelemetry/ext/jaeger/gen/jaeger/Collector.py", line 46, in submitBatches
return self.recv_submitBatches()
File "/home/tsutsumi/workspace/opentelemetry-python/ext/opentelemetry-ext-jaeger/src/opentelemetry/ext/jaeger/gen/jaeger/Collector.py", line 60, in recv_submitBatches
(fname, mtype, rseqid) = iprot.readMessageBegin()
File "/home/tsutsumi/.pyenv/versions/3.8.1/envs/otel/lib/python3.8/site-packages/thrift/protocol/TBinaryProtocol.py", line 148, in readMessageBegin
name = self.trans.readAll(sz)
File "/home/tsutsumi/.pyenv/versions/3.8.1/envs/otel/lib/python3.8/site-packages/thrift/transport/TTransport.py", line 68, in readAll
raise EOFError()
EOFError
|
EOFError
|
def submit(self, batch: jaeger.Batch):
"""Submits batches to Thrift HTTP Server through Binary Protocol.
Args:
batch: Object to emit Jaeger spans.
"""
batch.write(self.protocol)
self.http_transport.flush()
code = self.http_transport.code
msg = self.http_transport.message
if code >= 300 or code < 200:
logger.error(
"Traces cannot be uploaded; HTTP status code: %s, message: %s",
code,
msg,
)
|
def submit(self, batch: jaeger.Batch):
"""Submits batches to Thrift HTTP Server through Binary Protocol.
Args:
batch: Object to emit Jaeger spans.
"""
try:
self.client.submitBatches([batch])
# it will call http_transport.flush() and
# status code and message will be updated
code = self.http_transport.code
msg = self.http_transport.message
if code >= 300 or code < 200:
logger.error(
"Traces cannot be uploaded; HTTP status code: %s, message %s",
code,
msg,
)
finally:
if self.http_transport.isOpen():
self.http_transport.close()
|
https://github.com/open-telemetry/opentelemetry-python/issues/493
|
Exception while exporting Span.
Traceback (most recent call last):
File "/home/tsutsumi/workspace/opentelemetry-python/opentelemetry-sdk/src/opentelemetry/sdk/trace/export/__init__.py", line 81, in on_end
self.span_exporter.export((span,))
File "/home/tsutsumi/workspace/opentelemetry-python/ext/opentelemetry-ext-jaeger/src/opentelemetry/ext/jaeger/__init__.py", line 118, in export
self.collector.submit(batch)
File "/home/tsutsumi/workspace/opentelemetry-python/ext/opentelemetry-ext-jaeger/src/opentelemetry/ext/jaeger/__init__.py", line 377, in submit
self.client.submitBatches([batch])
File "/home/tsutsumi/workspace/opentelemetry-python/ext/opentelemetry-ext-jaeger/src/opentelemetry/ext/jaeger/gen/jaeger/Collector.py", line 46, in submitBatches
return self.recv_submitBatches()
File "/home/tsutsumi/workspace/opentelemetry-python/ext/opentelemetry-ext-jaeger/src/opentelemetry/ext/jaeger/gen/jaeger/Collector.py", line 60, in recv_submitBatches
(fname, mtype, rseqid) = iprot.readMessageBegin()
File "/home/tsutsumi/.pyenv/versions/3.8.1/envs/otel/lib/python3.8/site-packages/thrift/protocol/TBinaryProtocol.py", line 148, in readMessageBegin
name = self.trans.readAll(sz)
File "/home/tsutsumi/.pyenv/versions/3.8.1/envs/otel/lib/python3.8/site-packages/thrift/transport/TTransport.py", line 68, in readAll
raise EOFError()
EOFError
|
EOFError
|
def get_or_create_warehouse(apps):
Warehouse = apps.get_model("warehouse", "Warehouse")
ShippingZone = apps.get_model("shipping", "ShippingZone")
Site = apps.get_model("sites", "Site")
warehouses = Warehouse.objects.annotate(
zones_count=models.Count("shipping_zones")
).filter(zones_count=ShippingZone.objects.count())
if warehouses.first() is not None:
return warehouses.first()
site_settings = Site.objects.get_current().settings
address = getattr(site_settings, "company_address", None)
if address is None:
Address = apps.get_model("account", "Address")
address = Address.objects.create()
warehouse = Warehouse.objects.create(name="Default warehouse", address=address)
warehouse.shipping_zones.add(*ShippingZone.objects.all())
return warehouse
|
def get_or_create_warehouse(apps):
Warehouse = apps.get_model("warehouse", "Warehouse")
ShippingZone = apps.get_model("shipping", "ShippingZone")
Site = apps.get_model("sites", "Site")
warehouses = Warehouse.objects.annotate(
zones_count=models.Count("shipping_zones")
).filter(zones_count=ShippingZone.objects.count())
if warehouses.first() is not None:
return warehouses.first()
site_settings = Site.objects.get_current().settings
address = getattr(site_settings, "company_address", None)
if address is None:
Address = apps.get_model("account", "Address")
address = Address.objects.create()
warehouse = Warehouse.objects.create(address=address)
warehouse.shipping_zones.add(*ShippingZone.objects.all())
return warehouse
|
https://github.com/mirumee/saleor/issues/5607
|
Applying warehouse.0003_warehouse_slug...Traceback (most recent call last):
File "manage.py", line 10, in <module>
execute_from_command_line(sys.argv)
File "/Users/timur/Code/pyenv/lib/python3.8/site-packages/django/core/management/__init__.py", line 401, in execute_from_command_line
utility.execute()
File "/Users/timur/Code/pyenv/lib/python3.8/site-packages/django/core/management/__init__.py", line 395, in execute
self.fetch_command(subcommand).run_from_argv(self.argv)
File "/Users/timur/Code/pyenv/lib/python3.8/site-packages/django/core/management/base.py", line 328, in run_from_argv
self.execute(*args, **cmd_options)
File "/Users/timur/Code/pyenv/lib/python3.8/site-packages/django/core/management/base.py", line 369, in execute
output = self.handle(*args, **options)
File "/Users/timur/Code/pyenv/lib/python3.8/site-packages/django/core/management/base.py", line 83, in wrapped
res = handle_func(*args, **kwargs)
File "/Users/timur/Code/pyenv/lib/python3.8/site-packages/django/core/management/commands/migrate.py", line 231, in handle
post_migrate_state = executor.migrate(
File "/Users/timur/Code/pyenv/lib/python3.8/site-packages/django/db/migrations/executor.py", line 117, in migrate
state = self._migrate_all_forwards(state, plan, full_plan, fake=fake, fake_initial=fake_initial)
File "/Users/timur/Code/pyenv/lib/python3.8/site-packages/django/db/migrations/executor.py", line 147, in _migrate_all_forwards
state = self.apply_migration(state, migration, fake=fake, fake_initial=fake_initial)
File "/Users/timur/Code/pyenv/lib/python3.8/site-packages/django/db/migrations/executor.py", line 245, in apply_migration
state = migration.apply(state, schema_editor)
File "/Users/timur/Code/pyenv/lib/python3.8/site-packages/django/db/migrations/migration.py", line 124, in apply
operation.database_forwards(self.app_label, schema_editor, old_state, project_state)
File "/Users/timur/Code/pyenv/lib/python3.8/site-packages/django/db/migrations/operations/special.py", line 190, in database_forwards
self.code(from_state.apps, schema_editor)
File "/Users/timur/Code/saleor/saleor/warehouse/migrations/0003_warehouse_slug.py", line 17, in create_unique_slug_for_warehouses
first_char = warehouse.name[0].lower()
IndexError: string index out of range
|
IndexError
|
def create_unique_slug_for_warehouses(apps, schema_editor):
Warehouse = apps.get_model("warehouse", "Warehouse")
warehouses = (
Warehouse.objects.filter(slug__isnull=True).order_by(Lower("name")).iterator()
)
previous_char = None
slug_values = []
for warehouse in warehouses:
if warehouse.name:
first_char = warehouse.name[0].lower()
if first_char != previous_char:
previous_char = first_char
slug_values = list(
Warehouse.objects.filter(slug__istartswith=first_char).values_list(
"slug", flat=True
)
)
elif previous_char is None:
previous_char = ""
slug_values = list(
Warehouse.objects.filter(
slug__istartswith=DEFAULT_SLUG_VALUE
).values_list("slug", flat=True)
)
slug = generate_unique_slug(warehouse, slug_values)
warehouse.slug = slug
warehouse.save(update_fields=["slug"])
slug_values.append(slug)
|
def create_unique_slug_for_warehouses(apps, schema_editor):
Warehouse = apps.get_model("warehouse", "Warehouse")
warehouses = (
Warehouse.objects.filter(slug__isnull=True).order_by(Lower("name")).iterator()
)
previous_char = ""
slug_values = []
for warehouse in warehouses:
first_char = warehouse.name[0].lower()
if first_char != previous_char:
previous_char = first_char
slug_values = list(
Warehouse.objects.filter(slug__istartswith=first_char).values_list(
"slug", flat=True
)
)
slug = generate_unique_slug(warehouse, slug_values)
warehouse.slug = slug
warehouse.save(update_fields=["slug"])
slug_values.append(slug)
|
https://github.com/mirumee/saleor/issues/5607
|
Applying warehouse.0003_warehouse_slug...Traceback (most recent call last):
File "manage.py", line 10, in <module>
execute_from_command_line(sys.argv)
File "/Users/timur/Code/pyenv/lib/python3.8/site-packages/django/core/management/__init__.py", line 401, in execute_from_command_line
utility.execute()
File "/Users/timur/Code/pyenv/lib/python3.8/site-packages/django/core/management/__init__.py", line 395, in execute
self.fetch_command(subcommand).run_from_argv(self.argv)
File "/Users/timur/Code/pyenv/lib/python3.8/site-packages/django/core/management/base.py", line 328, in run_from_argv
self.execute(*args, **cmd_options)
File "/Users/timur/Code/pyenv/lib/python3.8/site-packages/django/core/management/base.py", line 369, in execute
output = self.handle(*args, **options)
File "/Users/timur/Code/pyenv/lib/python3.8/site-packages/django/core/management/base.py", line 83, in wrapped
res = handle_func(*args, **kwargs)
File "/Users/timur/Code/pyenv/lib/python3.8/site-packages/django/core/management/commands/migrate.py", line 231, in handle
post_migrate_state = executor.migrate(
File "/Users/timur/Code/pyenv/lib/python3.8/site-packages/django/db/migrations/executor.py", line 117, in migrate
state = self._migrate_all_forwards(state, plan, full_plan, fake=fake, fake_initial=fake_initial)
File "/Users/timur/Code/pyenv/lib/python3.8/site-packages/django/db/migrations/executor.py", line 147, in _migrate_all_forwards
state = self.apply_migration(state, migration, fake=fake, fake_initial=fake_initial)
File "/Users/timur/Code/pyenv/lib/python3.8/site-packages/django/db/migrations/executor.py", line 245, in apply_migration
state = migration.apply(state, schema_editor)
File "/Users/timur/Code/pyenv/lib/python3.8/site-packages/django/db/migrations/migration.py", line 124, in apply
operation.database_forwards(self.app_label, schema_editor, old_state, project_state)
File "/Users/timur/Code/pyenv/lib/python3.8/site-packages/django/db/migrations/operations/special.py", line 190, in database_forwards
self.code(from_state.apps, schema_editor)
File "/Users/timur/Code/saleor/saleor/warehouse/migrations/0003_warehouse_slug.py", line 17, in create_unique_slug_for_warehouses
first_char = warehouse.name[0].lower()
IndexError: string index out of range
|
IndexError
|
def generate_unique_slug(instance, slug_values):
slug = slugify(instance.name) if instance.name else DEFAULT_SLUG_VALUE
unique_slug = slug
extension = 1
while unique_slug in slug_values:
extension += 1
unique_slug = f"{slug}-{extension}"
return unique_slug
|
def generate_unique_slug(instance, slug_values):
slug = slugify(instance.name)
unique_slug = slug
extension = 1
while unique_slug in slug_values:
extension += 1
unique_slug = f"{slug}-{extension}"
return unique_slug
|
https://github.com/mirumee/saleor/issues/5607
|
Applying warehouse.0003_warehouse_slug...Traceback (most recent call last):
File "manage.py", line 10, in <module>
execute_from_command_line(sys.argv)
File "/Users/timur/Code/pyenv/lib/python3.8/site-packages/django/core/management/__init__.py", line 401, in execute_from_command_line
utility.execute()
File "/Users/timur/Code/pyenv/lib/python3.8/site-packages/django/core/management/__init__.py", line 395, in execute
self.fetch_command(subcommand).run_from_argv(self.argv)
File "/Users/timur/Code/pyenv/lib/python3.8/site-packages/django/core/management/base.py", line 328, in run_from_argv
self.execute(*args, **cmd_options)
File "/Users/timur/Code/pyenv/lib/python3.8/site-packages/django/core/management/base.py", line 369, in execute
output = self.handle(*args, **options)
File "/Users/timur/Code/pyenv/lib/python3.8/site-packages/django/core/management/base.py", line 83, in wrapped
res = handle_func(*args, **kwargs)
File "/Users/timur/Code/pyenv/lib/python3.8/site-packages/django/core/management/commands/migrate.py", line 231, in handle
post_migrate_state = executor.migrate(
File "/Users/timur/Code/pyenv/lib/python3.8/site-packages/django/db/migrations/executor.py", line 117, in migrate
state = self._migrate_all_forwards(state, plan, full_plan, fake=fake, fake_initial=fake_initial)
File "/Users/timur/Code/pyenv/lib/python3.8/site-packages/django/db/migrations/executor.py", line 147, in _migrate_all_forwards
state = self.apply_migration(state, migration, fake=fake, fake_initial=fake_initial)
File "/Users/timur/Code/pyenv/lib/python3.8/site-packages/django/db/migrations/executor.py", line 245, in apply_migration
state = migration.apply(state, schema_editor)
File "/Users/timur/Code/pyenv/lib/python3.8/site-packages/django/db/migrations/migration.py", line 124, in apply
operation.database_forwards(self.app_label, schema_editor, old_state, project_state)
File "/Users/timur/Code/pyenv/lib/python3.8/site-packages/django/db/migrations/operations/special.py", line 190, in database_forwards
self.code(from_state.apps, schema_editor)
File "/Users/timur/Code/saleor/saleor/warehouse/migrations/0003_warehouse_slug.py", line 17, in create_unique_slug_for_warehouses
first_char = warehouse.name[0].lower()
IndexError: string index out of range
|
IndexError
|
def create_allocations(apps, schema_editor):
Allocation = apps.get_model("warehouse", "Allocation")
OrderLine = apps.get_model("order", "OrderLine")
Warehouse = apps.get_model("warehouse", "Warehouse")
for warehouse in Warehouse.objects.iterator():
shipping_zone = warehouse.shipping_zones.first()
if not shipping_zone:
continue
shipping_zone_pk = shipping_zone.pk
for order_line in OrderLine.objects.filter(
order__shipping_method__shipping_zone__pk=shipping_zone_pk,
).iterator():
quantity_unfulfilled = order_line.quantity - order_line.quantity_fulfilled
if quantity_unfulfilled > 0 and order_line.variant:
create_allocation(
order_line.variant,
warehouse,
order_line,
quantity_unfulfilled,
Allocation,
)
|
def create_allocations(apps, schema_editor):
Allocation = apps.get_model("warehouse", "Allocation")
OrderLine = apps.get_model("order", "OrderLine")
Warehouse = apps.get_model("warehouse", "Warehouse")
for warehouse in Warehouse.objects.iterator():
shipping_zone_pk = warehouse.shipping_zones.first().pk
for order_line in OrderLine.objects.filter(
order__shipping_method__shipping_zone__pk=shipping_zone_pk,
).iterator():
quantity_unfulfilled = order_line.quantity - order_line.quantity_fulfilled
if quantity_unfulfilled > 0 and order_line.variant:
create_allocation(
order_line.variant,
warehouse,
order_line,
quantity_unfulfilled,
Allocation,
)
|
https://github.com/mirumee/saleor/issues/5607
|
Applying warehouse.0003_warehouse_slug...Traceback (most recent call last):
File "manage.py", line 10, in <module>
execute_from_command_line(sys.argv)
File "/Users/timur/Code/pyenv/lib/python3.8/site-packages/django/core/management/__init__.py", line 401, in execute_from_command_line
utility.execute()
File "/Users/timur/Code/pyenv/lib/python3.8/site-packages/django/core/management/__init__.py", line 395, in execute
self.fetch_command(subcommand).run_from_argv(self.argv)
File "/Users/timur/Code/pyenv/lib/python3.8/site-packages/django/core/management/base.py", line 328, in run_from_argv
self.execute(*args, **cmd_options)
File "/Users/timur/Code/pyenv/lib/python3.8/site-packages/django/core/management/base.py", line 369, in execute
output = self.handle(*args, **options)
File "/Users/timur/Code/pyenv/lib/python3.8/site-packages/django/core/management/base.py", line 83, in wrapped
res = handle_func(*args, **kwargs)
File "/Users/timur/Code/pyenv/lib/python3.8/site-packages/django/core/management/commands/migrate.py", line 231, in handle
post_migrate_state = executor.migrate(
File "/Users/timur/Code/pyenv/lib/python3.8/site-packages/django/db/migrations/executor.py", line 117, in migrate
state = self._migrate_all_forwards(state, plan, full_plan, fake=fake, fake_initial=fake_initial)
File "/Users/timur/Code/pyenv/lib/python3.8/site-packages/django/db/migrations/executor.py", line 147, in _migrate_all_forwards
state = self.apply_migration(state, migration, fake=fake, fake_initial=fake_initial)
File "/Users/timur/Code/pyenv/lib/python3.8/site-packages/django/db/migrations/executor.py", line 245, in apply_migration
state = migration.apply(state, schema_editor)
File "/Users/timur/Code/pyenv/lib/python3.8/site-packages/django/db/migrations/migration.py", line 124, in apply
operation.database_forwards(self.app_label, schema_editor, old_state, project_state)
File "/Users/timur/Code/pyenv/lib/python3.8/site-packages/django/db/migrations/operations/special.py", line 190, in database_forwards
self.code(from_state.apps, schema_editor)
File "/Users/timur/Code/saleor/saleor/warehouse/migrations/0003_warehouse_slug.py", line 17, in create_unique_slug_for_warehouses
first_char = warehouse.name[0].lower()
IndexError: string index out of range
|
IndexError
|
def create_unique_slugs_for_producttypes(apps, schema_editor):
ProductType = apps.get_model("product", "ProductType")
product_types = (
ProductType.objects.filter(slug__isnull=True).order_by(Lower("name")).iterator()
)
previous_char = ""
slug_values = []
for product_type in product_types:
first_char = product_type.name[0].lower()
if first_char != previous_char:
previous_char = first_char
slug_values = list(
ProductType.objects.filter(slug__istartswith=first_char).values_list(
"slug", flat=True
)
)
slug = generate_unique_slug(product_type, slug_values)
product_type.slug = slug
product_type.save(update_fields=["slug"])
slug_values.append(slug)
|
def create_unique_slugs_for_producttypes(apps, schema_editor):
ProductType = apps.get_model("product", "ProductType")
product_types = (
ProductType.objects.filter(slug__isnull=True).order_by(Lower("name")).iterator()
)
previous_char = ""
slug_values = []
for product_type in product_types:
first_char = product_type.name[0].lower()
if first_char != previous_char:
previous_char = first_char
slug_values = list(
ProductType.objects.filter(slug__istartswith=first_char).values_list(
"slug", flat=True
)
)
slug = generate_unique_slug(product_type, slug_values)
product_type.slug = slug
slug_values.append(slug)
|
https://github.com/mirumee/saleor/issues/5592
|
Applying product.0111_auto_20191209_0437... OK
Applying product.0112_auto_20200129_0050...Traceback (most recent call last):
File "/usr/local/lib/python3.8/site-packages/django/db/backends/utils.py", line 86, in _execute
return self.cursor.execute(sql, params)
psycopg2.errors.NotNullViolation: column "slug" contains null values
The above exception was the direct cause of the following exception:
Traceback (most recent call last):
File "manage.py", line 10, in <module>
execute_from_command_line(sys.argv)
File "/usr/local/lib/python3.8/site-packages/django/core/management/__init__.py", line 401, in execute_from_command_line
utility.execute()
File "/usr/local/lib/python3.8/site-packages/django/core/management/__init__.py", line 395, in execute
self.fetch_command(subcommand).run_from_argv(self.argv)
File "/usr/local/lib/python3.8/site-packages/django/core/management/base.py", line 328, in run_from_argv
self.execute(*args, **cmd_options)
File "/usr/local/lib/python3.8/site-packages/django/core/management/base.py", line 369, in execute
output = self.handle(*args, **options)
File "/usr/local/lib/python3.8/site-packages/django/core/management/base.py", line 83, in wrapped
res = handle_func(*args, **kwargs)
File "/usr/local/lib/python3.8/site-packages/django/core/management/commands/migrate.py", line 231, in handle
post_migrate_state = executor.migrate(
File "/usr/local/lib/python3.8/site-packages/django/db/migrations/executor.py", line 117, in migrate
state = self._migrate_all_forwards(state, plan, full_plan, fake=fake, fake_initial=fake_initial)
File "/usr/local/lib/python3.8/site-packages/django/db/migrations/executor.py", line 147, in _migrate_all_forwards
state = self.apply_migration(state, migration, fake=fake, fake_initial=fake_initial)
File "/usr/local/lib/python3.8/site-packages/django/db/migrations/executor.py", line 245, in apply_migration
state = migration.apply(state, schema_editor)
File "/usr/local/lib/python3.8/site-packages/django/db/migrations/migration.py", line 124, in apply
operation.database_forwards(self.app_label, schema_editor, old_state, project_state)
File "/usr/local/lib/python3.8/site-packages/django/db/migrations/operations/fields.py", line 249, in database_forwards
schema_editor.alter_field(from_model, from_field, to_field)
File "/usr/local/lib/python3.8/site-packages/django/db/backends/base/schema.py", line 564, in alter_field
self._alter_field(model, old_field, new_field, old_type, new_type,
File "/usr/local/lib/python3.8/site-packages/django/db/backends/postgresql/schema.py", line 152, in _alter_field
super()._alter_field(
File "/usr/local/lib/python3.8/site-packages/django/db/backends/base/schema.py", line 710, in _alter_field
self.execute(
File "/usr/local/lib/python3.8/site-packages/django/db/backends/base/schema.py", line 142, in execute
cursor.execute(sql, params)
File "/usr/local/lib/python3.8/site-packages/django/db/backends/utils.py", line 100, in execute
return super().execute(sql, params)
File "/usr/local/lib/python3.8/site-packages/django/db/backends/utils.py", line 68, in execute
return self._execute_with_wrappers(sql, params, many=False, executor=self._execute)
File "/usr/local/lib/python3.8/site-packages/django/db/backends/utils.py", line 77, in _execute_with_wrappers
return executor(sql, params, many, context)
File "/usr/local/lib/python3.8/site-packages/django/db/backends/utils.py", line 86, in _execute
return self.cursor.execute(sql, params)
File "/usr/local/lib/python3.8/site-packages/django/db/utils.py", line 90, in __exit__
raise dj_exc_value.with_traceback(traceback) from exc_value
File "/usr/local/lib/python3.8/site-packages/django/db/backends/utils.py", line 86, in _execute
return self.cursor.execute(sql, params)
django.db.utils.IntegrityError: column "slug" contains null values
|
django.db.utils.IntegrityError
|
def update_non_unique_slugs_for_models(apps, schema_editor):
models_to_update = ["Category", "Collection"]
for model in models_to_update:
Model = apps.get_model("product", model)
duplicated_slugs = (
Model.objects.all()
.values("slug")
.annotate(duplicated_slug_num=models.Count("slug"))
.filter(duplicated_slug_num__gt=1)
)
slugs_counter = defaultdict(int)
for data in duplicated_slugs:
slugs_counter[data["slug"]] = data["duplicated_slug_num"]
queryset = Model.objects.filter(slug__in=slugs_counter.keys()).order_by("name")
for instance in queryset:
slugs_counter[instance.slug] -= 1
slug = update_slug_to_unique_value(instance.slug, slugs_counter)
instance.slug = slug
instance.save(update_fields=["slug"])
slugs_counter[slug] += 1
|
def update_non_unique_slugs_for_models(apps, schema_editor):
models_to_update = ["Category", "Collection"]
for model in models_to_update:
Model = apps.get_model("product", model)
duplicated_slugs = (
Model.objects.all()
.values("slug")
.annotate(duplicated_slug_num=models.Count("slug"))
.filter(duplicated_slug_num__gt=1)
)
slugs_counter = defaultdict(int)
for data in duplicated_slugs:
slugs_counter[data["slug"]] = data["duplicated_slug_num"]
queryset = Model.objects.filter(slug__in=slugs_counter.keys()).order_by("name")
for instance in queryset:
slugs_counter[instance.slug] -= 1
slug = update_slug_to_unique_value(instance.slug, slugs_counter)
instance.slug = slug
slugs_counter[slug] += 1
|
https://github.com/mirumee/saleor/issues/5592
|
Applying product.0111_auto_20191209_0437... OK
Applying product.0112_auto_20200129_0050...Traceback (most recent call last):
File "/usr/local/lib/python3.8/site-packages/django/db/backends/utils.py", line 86, in _execute
return self.cursor.execute(sql, params)
psycopg2.errors.NotNullViolation: column "slug" contains null values
The above exception was the direct cause of the following exception:
Traceback (most recent call last):
File "manage.py", line 10, in <module>
execute_from_command_line(sys.argv)
File "/usr/local/lib/python3.8/site-packages/django/core/management/__init__.py", line 401, in execute_from_command_line
utility.execute()
File "/usr/local/lib/python3.8/site-packages/django/core/management/__init__.py", line 395, in execute
self.fetch_command(subcommand).run_from_argv(self.argv)
File "/usr/local/lib/python3.8/site-packages/django/core/management/base.py", line 328, in run_from_argv
self.execute(*args, **cmd_options)
File "/usr/local/lib/python3.8/site-packages/django/core/management/base.py", line 369, in execute
output = self.handle(*args, **options)
File "/usr/local/lib/python3.8/site-packages/django/core/management/base.py", line 83, in wrapped
res = handle_func(*args, **kwargs)
File "/usr/local/lib/python3.8/site-packages/django/core/management/commands/migrate.py", line 231, in handle
post_migrate_state = executor.migrate(
File "/usr/local/lib/python3.8/site-packages/django/db/migrations/executor.py", line 117, in migrate
state = self._migrate_all_forwards(state, plan, full_plan, fake=fake, fake_initial=fake_initial)
File "/usr/local/lib/python3.8/site-packages/django/db/migrations/executor.py", line 147, in _migrate_all_forwards
state = self.apply_migration(state, migration, fake=fake, fake_initial=fake_initial)
File "/usr/local/lib/python3.8/site-packages/django/db/migrations/executor.py", line 245, in apply_migration
state = migration.apply(state, schema_editor)
File "/usr/local/lib/python3.8/site-packages/django/db/migrations/migration.py", line 124, in apply
operation.database_forwards(self.app_label, schema_editor, old_state, project_state)
File "/usr/local/lib/python3.8/site-packages/django/db/migrations/operations/fields.py", line 249, in database_forwards
schema_editor.alter_field(from_model, from_field, to_field)
File "/usr/local/lib/python3.8/site-packages/django/db/backends/base/schema.py", line 564, in alter_field
self._alter_field(model, old_field, new_field, old_type, new_type,
File "/usr/local/lib/python3.8/site-packages/django/db/backends/postgresql/schema.py", line 152, in _alter_field
super()._alter_field(
File "/usr/local/lib/python3.8/site-packages/django/db/backends/base/schema.py", line 710, in _alter_field
self.execute(
File "/usr/local/lib/python3.8/site-packages/django/db/backends/base/schema.py", line 142, in execute
cursor.execute(sql, params)
File "/usr/local/lib/python3.8/site-packages/django/db/backends/utils.py", line 100, in execute
return super().execute(sql, params)
File "/usr/local/lib/python3.8/site-packages/django/db/backends/utils.py", line 68, in execute
return self._execute_with_wrappers(sql, params, many=False, executor=self._execute)
File "/usr/local/lib/python3.8/site-packages/django/db/backends/utils.py", line 77, in _execute_with_wrappers
return executor(sql, params, many, context)
File "/usr/local/lib/python3.8/site-packages/django/db/backends/utils.py", line 86, in _execute
return self.cursor.execute(sql, params)
File "/usr/local/lib/python3.8/site-packages/django/db/utils.py", line 90, in __exit__
raise dj_exc_value.with_traceback(traceback) from exc_value
File "/usr/local/lib/python3.8/site-packages/django/db/backends/utils.py", line 86, in _execute
return self.cursor.execute(sql, params)
django.db.utils.IntegrityError: column "slug" contains null values
|
django.db.utils.IntegrityError
|
def create_unique_slug_for_products(apps, schema_editor):
Product = apps.get_model("product", "Product")
products = (
Product.objects.filter(slug__isnull=True).order_by(Lower("name")).iterator()
)
previous_char = ""
slug_values = []
for product in products:
first_char = product.name[0].lower()
if first_char != previous_char:
previous_char = first_char
slug_values = list(
Product.objects.filter(slug__istartswith=first_char).values_list(
"slug", flat=True
)
)
slug = generate_unique_slug(product, slug_values)
product.slug = slug
product.save(update_fields=["slug"])
slug_values.append(slug)
|
def create_unique_slug_for_products(apps, schema_editor):
Product = apps.get_model("product", "Product")
products = (
Product.objects.filter(slug__isnull=True).order_by(Lower("name")).iterator()
)
previous_char = ""
slug_values = []
for product in products:
first_char = product.name[0].lower()
if first_char != previous_char:
previous_char = first_char
slug_values = Product.objects.filter(
slug__istartswith=first_char
).values_list("slug", flat=True)
slug = generate_unique_slug(product, slug_values)
product.slug = slug
slug_values.append(slug)
|
https://github.com/mirumee/saleor/issues/5592
|
Applying product.0111_auto_20191209_0437... OK
Applying product.0112_auto_20200129_0050...Traceback (most recent call last):
File "/usr/local/lib/python3.8/site-packages/django/db/backends/utils.py", line 86, in _execute
return self.cursor.execute(sql, params)
psycopg2.errors.NotNullViolation: column "slug" contains null values
The above exception was the direct cause of the following exception:
Traceback (most recent call last):
File "manage.py", line 10, in <module>
execute_from_command_line(sys.argv)
File "/usr/local/lib/python3.8/site-packages/django/core/management/__init__.py", line 401, in execute_from_command_line
utility.execute()
File "/usr/local/lib/python3.8/site-packages/django/core/management/__init__.py", line 395, in execute
self.fetch_command(subcommand).run_from_argv(self.argv)
File "/usr/local/lib/python3.8/site-packages/django/core/management/base.py", line 328, in run_from_argv
self.execute(*args, **cmd_options)
File "/usr/local/lib/python3.8/site-packages/django/core/management/base.py", line 369, in execute
output = self.handle(*args, **options)
File "/usr/local/lib/python3.8/site-packages/django/core/management/base.py", line 83, in wrapped
res = handle_func(*args, **kwargs)
File "/usr/local/lib/python3.8/site-packages/django/core/management/commands/migrate.py", line 231, in handle
post_migrate_state = executor.migrate(
File "/usr/local/lib/python3.8/site-packages/django/db/migrations/executor.py", line 117, in migrate
state = self._migrate_all_forwards(state, plan, full_plan, fake=fake, fake_initial=fake_initial)
File "/usr/local/lib/python3.8/site-packages/django/db/migrations/executor.py", line 147, in _migrate_all_forwards
state = self.apply_migration(state, migration, fake=fake, fake_initial=fake_initial)
File "/usr/local/lib/python3.8/site-packages/django/db/migrations/executor.py", line 245, in apply_migration
state = migration.apply(state, schema_editor)
File "/usr/local/lib/python3.8/site-packages/django/db/migrations/migration.py", line 124, in apply
operation.database_forwards(self.app_label, schema_editor, old_state, project_state)
File "/usr/local/lib/python3.8/site-packages/django/db/migrations/operations/fields.py", line 249, in database_forwards
schema_editor.alter_field(from_model, from_field, to_field)
File "/usr/local/lib/python3.8/site-packages/django/db/backends/base/schema.py", line 564, in alter_field
self._alter_field(model, old_field, new_field, old_type, new_type,
File "/usr/local/lib/python3.8/site-packages/django/db/backends/postgresql/schema.py", line 152, in _alter_field
super()._alter_field(
File "/usr/local/lib/python3.8/site-packages/django/db/backends/base/schema.py", line 710, in _alter_field
self.execute(
File "/usr/local/lib/python3.8/site-packages/django/db/backends/base/schema.py", line 142, in execute
cursor.execute(sql, params)
File "/usr/local/lib/python3.8/site-packages/django/db/backends/utils.py", line 100, in execute
return super().execute(sql, params)
File "/usr/local/lib/python3.8/site-packages/django/db/backends/utils.py", line 68, in execute
return self._execute_with_wrappers(sql, params, many=False, executor=self._execute)
File "/usr/local/lib/python3.8/site-packages/django/db/backends/utils.py", line 77, in _execute_with_wrappers
return executor(sql, params, many, context)
File "/usr/local/lib/python3.8/site-packages/django/db/backends/utils.py", line 86, in _execute
return self.cursor.execute(sql, params)
File "/usr/local/lib/python3.8/site-packages/django/db/utils.py", line 90, in __exit__
raise dj_exc_value.with_traceback(traceback) from exc_value
File "/usr/local/lib/python3.8/site-packages/django/db/backends/utils.py", line 86, in _execute
return self.cursor.execute(sql, params)
django.db.utils.IntegrityError: column "slug" contains null values
|
django.db.utils.IntegrityError
|
def create_unique_slug_for_warehouses(apps, schema_editor):
Warehouse = apps.get_model("warehouse", "Warehouse")
warehouses = (
Warehouse.objects.filter(slug__isnull=True).order_by(Lower("name")).iterator()
)
previous_char = ""
slug_values = []
for warehouse in warehouses:
first_char = warehouse.name[0].lower()
if first_char != previous_char:
previous_char = first_char
slug_values = list(
Warehouse.objects.filter(slug__istartswith=first_char).values_list(
"slug", flat=True
)
)
slug = generate_unique_slug(warehouse, slug_values)
warehouse.slug = slug
warehouse.save(update_fields=["slug"])
slug_values.append(slug)
|
def create_unique_slug_for_warehouses(apps, schema_editor):
Warehouse = apps.get_model("warehouse", "Warehouse")
warehouses = (
Warehouse.objects.filter(slug__isnull=True).order_by(Lower("name")).iterator()
)
previous_char = ""
slug_values = []
for warehouse in warehouses:
first_char = warehouse.name[0].lower()
if first_char != previous_char:
previous_char = first_char
slug_values = Warehouse.objects.filter(
slug__istartswith=first_char
).values_list("slug", flat=True)
slug = generate_unique_slug(warehouse, slug_values)
warehouse.slug = slug
slug_values.append(slug)
|
https://github.com/mirumee/saleor/issues/5592
|
Applying product.0111_auto_20191209_0437... OK
Applying product.0112_auto_20200129_0050...Traceback (most recent call last):
File "/usr/local/lib/python3.8/site-packages/django/db/backends/utils.py", line 86, in _execute
return self.cursor.execute(sql, params)
psycopg2.errors.NotNullViolation: column "slug" contains null values
The above exception was the direct cause of the following exception:
Traceback (most recent call last):
File "manage.py", line 10, in <module>
execute_from_command_line(sys.argv)
File "/usr/local/lib/python3.8/site-packages/django/core/management/__init__.py", line 401, in execute_from_command_line
utility.execute()
File "/usr/local/lib/python3.8/site-packages/django/core/management/__init__.py", line 395, in execute
self.fetch_command(subcommand).run_from_argv(self.argv)
File "/usr/local/lib/python3.8/site-packages/django/core/management/base.py", line 328, in run_from_argv
self.execute(*args, **cmd_options)
File "/usr/local/lib/python3.8/site-packages/django/core/management/base.py", line 369, in execute
output = self.handle(*args, **options)
File "/usr/local/lib/python3.8/site-packages/django/core/management/base.py", line 83, in wrapped
res = handle_func(*args, **kwargs)
File "/usr/local/lib/python3.8/site-packages/django/core/management/commands/migrate.py", line 231, in handle
post_migrate_state = executor.migrate(
File "/usr/local/lib/python3.8/site-packages/django/db/migrations/executor.py", line 117, in migrate
state = self._migrate_all_forwards(state, plan, full_plan, fake=fake, fake_initial=fake_initial)
File "/usr/local/lib/python3.8/site-packages/django/db/migrations/executor.py", line 147, in _migrate_all_forwards
state = self.apply_migration(state, migration, fake=fake, fake_initial=fake_initial)
File "/usr/local/lib/python3.8/site-packages/django/db/migrations/executor.py", line 245, in apply_migration
state = migration.apply(state, schema_editor)
File "/usr/local/lib/python3.8/site-packages/django/db/migrations/migration.py", line 124, in apply
operation.database_forwards(self.app_label, schema_editor, old_state, project_state)
File "/usr/local/lib/python3.8/site-packages/django/db/migrations/operations/fields.py", line 249, in database_forwards
schema_editor.alter_field(from_model, from_field, to_field)
File "/usr/local/lib/python3.8/site-packages/django/db/backends/base/schema.py", line 564, in alter_field
self._alter_field(model, old_field, new_field, old_type, new_type,
File "/usr/local/lib/python3.8/site-packages/django/db/backends/postgresql/schema.py", line 152, in _alter_field
super()._alter_field(
File "/usr/local/lib/python3.8/site-packages/django/db/backends/base/schema.py", line 710, in _alter_field
self.execute(
File "/usr/local/lib/python3.8/site-packages/django/db/backends/base/schema.py", line 142, in execute
cursor.execute(sql, params)
File "/usr/local/lib/python3.8/site-packages/django/db/backends/utils.py", line 100, in execute
return super().execute(sql, params)
File "/usr/local/lib/python3.8/site-packages/django/db/backends/utils.py", line 68, in execute
return self._execute_with_wrappers(sql, params, many=False, executor=self._execute)
File "/usr/local/lib/python3.8/site-packages/django/db/backends/utils.py", line 77, in _execute_with_wrappers
return executor(sql, params, many, context)
File "/usr/local/lib/python3.8/site-packages/django/db/backends/utils.py", line 86, in _execute
return self.cursor.execute(sql, params)
File "/usr/local/lib/python3.8/site-packages/django/db/utils.py", line 90, in __exit__
raise dj_exc_value.with_traceback(traceback) from exc_value
File "/usr/local/lib/python3.8/site-packages/django/db/backends/utils.py", line 86, in _execute
return self.cursor.execute(sql, params)
django.db.utils.IntegrityError: column "slug" contains null values
|
django.db.utils.IntegrityError
|
def resolve_category(root: models.Product, info):
category_id = root.category_id
if category_id is None:
return None
return CategoryByIdLoader(info.context).load(category_id)
|
def resolve_category(root: models.Product, info):
return CategoryByIdLoader(info.context).load(root.category_id)
|
https://github.com/mirumee/saleor/issues/5589
|
{
"errors": [
{
"message": "The loader.load() function must be called with a value,but got: None.",
"locations": [
{
"line": 82,
"column": 3
}
],
"path": [
"productCreate",
"product",
"category"
],
"extensions": {
"exception": {
"code": "TypeError",
"stacktrace": [
"Traceback (most recent call last):",
" File \"/Users/anders/.pyenv/versions/saleor/lib/python3.8/site-packages/promise/promise.py\", line 489, in _resolve_from_executor",
" executor(resolve, reject)",
" File \"/Users/anders/.pyenv/versions/saleor/lib/python3.8/site-packages/promise/promise.py\", line 756, in executor",
" return resolve(f(*args, **kwargs))",
" File \"/Users/anders/.pyenv/versions/saleor/lib/python3.8/site-packages/graphql/execution/middleware.py\", line 75, in make_it_promise",
" return next(*args, **kwargs)",
" File \"/Users/anders/projects/saleor/saleor/graphql/product/types/products.py\", line 462, in resolve_category",
" return CategoryByIdLoader(info.context).load(root.category_id)",
" File \"/Users/anders/.pyenv/versions/saleor/lib/python3.8/site-packages/promise/dataloader.py\", line 86, in load",
" raise TypeError(",
"TypeError: The loader.load() function must be called with a value,but got: None."
]
}
}
}
],
"data": {
"productCreate": {
"errors": [],
"product": {
"id": "UHJvZHVjdDoxMTk=",
"attributes": [
{
"attribute": {
"id": "QXR0cmlidXRlOjIx",
"slug": "abv",
"name": "ABV",
"inputType": "DROPDOWN",
"valueRequired": false,
"values": [
{
"id": "QXR0cmlidXRlVmFsdWU6Njg=",
"name": "5.1%",
"slug": "51",
"__typename": "AttributeValue"
},
{
"id": "QXR0cmlidXRlVmFsdWU6Njk=",
"name": "6.7%",
"slug": "67",
"__typename": "AttributeValue"
},
{
"id": "QXR0cmlidXRlVmFsdWU6ODI=",
"name": "1%",
"slug": "1",
"__typename": "AttributeValue"
}
],
"__typename": "Attribute"
},
"values": [
{
"id": "QXR0cmlidXRlVmFsdWU6ODI=",
"name": "1%",
"slug": "1",
"__typename": "AttributeValue"
}
],
"__typename": "SelectedAttribute"
}
],
"productType": {
"id": "UHJvZHVjdFR5cGU6MTE=",
"variantAttributes": [],
"__typename": "ProductType",
"name": "Beer",
"hasVariants": false
},
"__typename": "Product",
"name": "piwko",
"descriptionJson": "{}",
"seoTitle": "",
"seoDescription": "",
"category": null,
"collections": [],
"basePrice": {
"amount": 0,
"currency": "USD",
"__typename": "Money"
},
"margin": {
"start": 0,
"stop": 0,
"__typename": "Margin"
},
"purchaseCost": {
"start": {
"amount": 0,
"currency": "USD",
"__typename": "Money"
},
"stop": {
"amount": 0,
"currency": "USD",
"__typename": "Money"
},
"__typename": "MoneyRange"
},
"isAvailable": false,
"isPublished": false,
"chargeTaxes": false,
"publicationDate": null,
"pricing": {
"priceRange": {
"start": {
"net": {
"amount": 0,
"currency": "USD",
"__typename": "Money"
},
"__typename": "TaxedMoney"
},
"stop": {
"net": {
"amount": 0,
"currency": "USD",
"__typename": "Money"
},
"__typename": "TaxedMoney"
},
"__typename": "TaxedMoneyRange"
},
"__typename": "ProductPricingInfo"
},
"images": [],
"variants": [
{
"id": "UHJvZHVjdFZhcmlhbnQ6MzE2",
"sku": "3123",
"name": "",
"priceOverride": null,
"margin": null,
"stocks": [
{
"id": "U3RvY2s6NjI3",
"quantity": 9,
"quantityAllocated": 0,
"warehouse": {
"id": "V2FyZWhvdXNlOmUyZjAyNDlmLTc1MzEtNDU2Ny1hODExLTM4NmY4ZGJkNzlkNQ==",
"name": "Americas",
"__typename": "Warehouse"
},
"__typename": "Stock"
}
],
"trackInventory": false,
"__typename": "ProductVariant"
}
]
},
"__typename": "ProductCreate"
}
}
}
|
TypeError
|
def add_users_to_groups_based_on_users_permissions(apps, schema_editor):
"""Add every user to group with "user_permissions" if exists, else create new one.
For each user, if the group with the exact scope of permissions exists,
add the user to it, else create a new group with this scope of permissions
and add the user to it.
"""
User = apps.get_model("account", "User")
Group = apps.get_model("auth", "Group")
groups = Group.objects.all().prefetch_related("permissions")
counter = get_counter_value(Group)
mapping = create_permissions_mapping(User)
for perms, users in mapping.items():
group = get_group_with_given_permissions(perms, groups)
if group:
group.user_set.add(*users)
continue
group = create_group_with_given_permissions(perms, counter, Group)
group.user_set.add(*users)
counter += 1
|
def add_users_to_groups_based_on_users_permissions(apps, schema_editor):
"""Add every user to group with "user_permissions" if exists, else create new one.
For each user, if the group with the exact scope of permissions exists,
add the user to it, else create a new group with this scope of permissions
and add the user to it.
"""
User = apps.get_model("account", "User")
Group = apps.get_model("auth", "Group")
GroupData = namedtuple("GroupData", ["users", "group_name"])
groups = Group.objects.all().prefetch_related("permissions")
mapping = create_permissions_mapping(User, GroupData)
for perms, group_data in mapping.items():
group = get_group_with_given_permissions(perms, groups)
users = group_data.users
if group:
group.user_set.add(*users)
continue
group = create_group_with_given_permissions(perms, group_data.group_name, Group)
group.user_set.add(*users)
|
https://github.com/mirumee/saleor/issues/5555
|
Running migrations:
Applying account.0041_permissions_to_groups...Traceback (most recent call last):
File "/usr/local/lib/python3.8/site-packages/django/db/backends/utils.py", line 86, in _execute
return self.cursor.execute(sql, params)
psycopg2.errors.StringDataRightTruncation: value too long for type character varying(150)
The above exception was the direct cause of the following exception:
Traceback (most recent call last):
File "manage.py", line 10, in <module>
execute_from_command_line(sys.argv)
File "/usr/local/lib/python3.8/site-packages/django/core/management/__init__.py", line 401, in execute_from_command_line
utility.execute()
File "/usr/local/lib/python3.8/site-packages/django/core/management/__init__.py", line 395, in execute
self.fetch_command(subcommand).run_from_argv(self.argv)
File "/usr/local/lib/python3.8/site-packages/django/core/management/base.py", line 328, in run_from_argv
self.execute(*args, **cmd_options)
File "/usr/local/lib/python3.8/site-packages/django/core/management/base.py", line 369, in execute
output = self.handle(*args, **options)
File "/usr/local/lib/python3.8/site-packages/django/core/management/base.py", line 83, in wrapped
res = handle_func(*args, **kwargs)
File "/usr/local/lib/python3.8/site-packages/django/core/management/commands/migrate.py", line 231, in handle
post_migrate_state = executor.migrate(
File "/usr/local/lib/python3.8/site-packages/django/db/migrations/executor.py", line 117, in migrate
state = self._migrate_all_forwards(state, plan, full_plan, fake=fake, fake_initial=fake_initial)
File "/usr/local/lib/python3.8/site-packages/django/db/migrations/executor.py", line 147, in _migrate_all_forwards
state = self.apply_migration(state, migration, fake=fake, fake_initial=fake_initial)
File "/usr/local/lib/python3.8/site-packages/django/db/migrations/executor.py", line 245, in apply_migration
state = migration.apply(state, schema_editor)
File "/usr/local/lib/python3.8/site-packages/django/db/migrations/migration.py", line 124, in apply
operation.database_forwards(self.app_label, schema_editor, old_state, project_state)
File "/usr/local/lib/python3.8/site-packages/django/db/migrations/operations/special.py", line 190, in database_forwards
self.code(from_state.apps, schema_editor)
File "/app/saleor/account/migrations/0041_permissions_to_groups.py", line 26, in add_users_to_groups_based_on_users_permissions
group = create_group_with_given_permissions(perms, group_data.group_name, Group)
File "/app/saleor/account/migrations/0041_permissions_to_groups.py", line 65, in create_group_with_given_permissions
group = Group.objects.create(name=group_name)
File "/usr/local/lib/python3.8/site-packages/django/db/models/manager.py", line 82, in manager_method
return getattr(self.get_queryset(), name)(*args, **kwargs)
File "/usr/local/lib/python3.8/site-packages/django/db/models/query.py", line 433, in create
obj.save(force_insert=True, using=self.db)
File "/usr/local/lib/python3.8/site-packages/django/db/models/base.py", line 745, in save
self.save_base(using=using, force_insert=force_insert,
File "/usr/local/lib/python3.8/site-packages/django/db/models/base.py", line 782, in save_base
updated = self._save_table(
File "/usr/local/lib/python3.8/site-packages/django/db/models/base.py", line 887, in _save_table
results = self._do_insert(cls._base_manager, using, fields, returning_fields, raw)
File "/usr/local/lib/python3.8/site-packages/django/db/models/base.py", line 924, in _do_insert
return manager._insert(
File "/usr/local/lib/python3.8/site-packages/django/db/models/manager.py", line 82, in manager_method
return getattr(self.get_queryset(), name)(*args, **kwargs)
File "/usr/local/lib/python3.8/site-packages/django/db/models/query.py", line 1204, in _insert
return query.get_compiler(using=using).execute_sql(returning_fields)
File "/usr/local/lib/python3.8/site-packages/django/db/models/sql/compiler.py", line 1391, in execute_sql
cursor.execute(sql, params)
File "/usr/local/lib/python3.8/site-packages/django/db/backends/utils.py", line 68, in execute
return self._execute_with_wrappers(sql, params, many=False, executor=self._execute)
File "/usr/local/lib/python3.8/site-packages/django/db/backends/utils.py", line 77, in _execute_with_wrappers
return executor(sql, params, many, context)
File "/usr/local/lib/python3.8/site-packages/django/db/backends/utils.py", line 86, in _execute
return self.cursor.execute(sql, params)
File "/usr/local/lib/python3.8/site-packages/django/db/utils.py", line 90, in __exit__
File "/usr/local/lib/python3.8/site-packages/django/db/migrations/executor.py", line 245, in apply_migration
state = migration.apply(state, schema_editor)
File "/usr/local/lib/python3.8/site-packages/django/db/migrations/migration.py", line 124, in apply
operation.database_forwards(self.app_label, schema_editor, old_state, project_state)
File "/usr/local/lib/python3.8/site-packages/django/db/migrations/operations/special.py", line 190, in database_forwards
self.code(from_state.apps, schema_editor)
File "/app/saleor/account/migrations/0041_permissions_to_groups.py", line 26, in add_users_to_groups_based_on_users_permissions
group = create_group_with_given_permissions(perms, group_data.group_name, Group)
File "/app/saleor/account/migrations/0041_permissions_to_groups.py", line 65, in create_group_with_given_permissions
group = Group.objects.create(name=group_name)
File "/usr/local/lib/python3.8/site-packages/django/db/models/manager.py", line 82, in manager_method
return getattr(self.get_queryset(), name)(*args, **kwargs)
File "/usr/local/lib/python3.8/site-packages/django/db/models/query.py", line 433, in create
obj.save(force_insert=True, using=self.db)
File "/usr/local/lib/python3.8/site-packages/django/db/models/base.py", line 745, in save
self.save_base(using=using, force_insert=force_insert,
File "/usr/local/lib/python3.8/site-packages/django/db/models/base.py", line 782, in save_base
updated = self._save_table(
File "/usr/local/lib/python3.8/site-packages/django/db/models/base.py", line 887, in _save_table
results = self._do_insert(cls._base_manager, using, fields, returning_fields, raw)
File "/usr/local/lib/python3.8/site-packages/django/db/models/base.py", line 924, in _do_insert
return manager._insert(
File "/usr/local/lib/python3.8/site-packages/django/db/models/manager.py", line 82, in manager_method
return getattr(self.get_queryset(), name)(*args, **kwargs)
File "/usr/local/lib/python3.8/site-packages/django/db/models/query.py", line 1204, in _insert
return query.get_compiler(using=using).execute_sql(returning_fields)
File "/usr/local/lib/python3.8/site-packages/django/db/models/sql/compiler.py", line 1391, in execute_sql
cursor.execute(sql, params)
File "/usr/local/lib/python3.8/site-packages/django/db/backends/utils.py", line 68, in execute
return self._execute_with_wrappers(sql, params, many=False, executor=self._execute)
File "/usr/local/lib/python3.8/site-packages/django/db/backends/utils.py", line 77, in _execute_with_wrappers
return executor(sql, params, many, context)
File "/usr/local/lib/python3.8/site-packages/django/db/backends/utils.py", line 86, in _execute
return self.cursor.execute(sql, params)
File "/usr/local/lib/python3.8/site-packages/django/db/utils.py", line 90, in __exit__
raise dj_exc_value.with_traceback(traceback) from exc_value
File "/usr/local/lib/python3.8/site-packages/django/db/backends/utils.py", line 86, in _execute
return self.cursor.execute(sql, params)
django.db.utils.DataError: value too long for type character varying(150)
|
django.db.utils.DataError
|
def create_permissions_mapping(User):
"""Create mapping permissions to users and potential new group name."""
mapping = defaultdict(set)
users = (
User.objects.filter(user_permissions__isnull=False)
.distinct()
.prefetch_related("user_permissions")
)
for user in users:
permissions = user.user_permissions.all().order_by("pk")
perm_pks = tuple([perm.pk for perm in permissions])
mapping[perm_pks].add(user.pk)
user.user_permissions.clear()
return mapping
|
def create_permissions_mapping(User, GroupData):
"""Create mapping permissions to users and potential new group name."""
mapping = {}
users = User.objects.filter(user_permissions__isnull=False).prefetch_related(
"user_permissions"
)
for user in users:
permissions = user.user_permissions.all()
perm_pks = (perm.pk for perm in permissions)
if perm_pks not in mapping:
group_name = create_group_name(permissions)
mapping[perm_pks] = GroupData({user.pk}, group_name)
else:
mapping[perm_pks].users.add(user.pk)
user.user_permissions.clear()
return mapping
|
https://github.com/mirumee/saleor/issues/5555
|
Running migrations:
Applying account.0041_permissions_to_groups...Traceback (most recent call last):
File "/usr/local/lib/python3.8/site-packages/django/db/backends/utils.py", line 86, in _execute
return self.cursor.execute(sql, params)
psycopg2.errors.StringDataRightTruncation: value too long for type character varying(150)
The above exception was the direct cause of the following exception:
Traceback (most recent call last):
File "manage.py", line 10, in <module>
execute_from_command_line(sys.argv)
File "/usr/local/lib/python3.8/site-packages/django/core/management/__init__.py", line 401, in execute_from_command_line
utility.execute()
File "/usr/local/lib/python3.8/site-packages/django/core/management/__init__.py", line 395, in execute
self.fetch_command(subcommand).run_from_argv(self.argv)
File "/usr/local/lib/python3.8/site-packages/django/core/management/base.py", line 328, in run_from_argv
self.execute(*args, **cmd_options)
File "/usr/local/lib/python3.8/site-packages/django/core/management/base.py", line 369, in execute
output = self.handle(*args, **options)
File "/usr/local/lib/python3.8/site-packages/django/core/management/base.py", line 83, in wrapped
res = handle_func(*args, **kwargs)
File "/usr/local/lib/python3.8/site-packages/django/core/management/commands/migrate.py", line 231, in handle
post_migrate_state = executor.migrate(
File "/usr/local/lib/python3.8/site-packages/django/db/migrations/executor.py", line 117, in migrate
state = self._migrate_all_forwards(state, plan, full_plan, fake=fake, fake_initial=fake_initial)
File "/usr/local/lib/python3.8/site-packages/django/db/migrations/executor.py", line 147, in _migrate_all_forwards
state = self.apply_migration(state, migration, fake=fake, fake_initial=fake_initial)
File "/usr/local/lib/python3.8/site-packages/django/db/migrations/executor.py", line 245, in apply_migration
state = migration.apply(state, schema_editor)
File "/usr/local/lib/python3.8/site-packages/django/db/migrations/migration.py", line 124, in apply
operation.database_forwards(self.app_label, schema_editor, old_state, project_state)
File "/usr/local/lib/python3.8/site-packages/django/db/migrations/operations/special.py", line 190, in database_forwards
self.code(from_state.apps, schema_editor)
File "/app/saleor/account/migrations/0041_permissions_to_groups.py", line 26, in add_users_to_groups_based_on_users_permissions
group = create_group_with_given_permissions(perms, group_data.group_name, Group)
File "/app/saleor/account/migrations/0041_permissions_to_groups.py", line 65, in create_group_with_given_permissions
group = Group.objects.create(name=group_name)
File "/usr/local/lib/python3.8/site-packages/django/db/models/manager.py", line 82, in manager_method
return getattr(self.get_queryset(), name)(*args, **kwargs)
File "/usr/local/lib/python3.8/site-packages/django/db/models/query.py", line 433, in create
obj.save(force_insert=True, using=self.db)
File "/usr/local/lib/python3.8/site-packages/django/db/models/base.py", line 745, in save
self.save_base(using=using, force_insert=force_insert,
File "/usr/local/lib/python3.8/site-packages/django/db/models/base.py", line 782, in save_base
updated = self._save_table(
File "/usr/local/lib/python3.8/site-packages/django/db/models/base.py", line 887, in _save_table
results = self._do_insert(cls._base_manager, using, fields, returning_fields, raw)
File "/usr/local/lib/python3.8/site-packages/django/db/models/base.py", line 924, in _do_insert
return manager._insert(
File "/usr/local/lib/python3.8/site-packages/django/db/models/manager.py", line 82, in manager_method
return getattr(self.get_queryset(), name)(*args, **kwargs)
File "/usr/local/lib/python3.8/site-packages/django/db/models/query.py", line 1204, in _insert
return query.get_compiler(using=using).execute_sql(returning_fields)
File "/usr/local/lib/python3.8/site-packages/django/db/models/sql/compiler.py", line 1391, in execute_sql
cursor.execute(sql, params)
File "/usr/local/lib/python3.8/site-packages/django/db/backends/utils.py", line 68, in execute
return self._execute_with_wrappers(sql, params, many=False, executor=self._execute)
File "/usr/local/lib/python3.8/site-packages/django/db/backends/utils.py", line 77, in _execute_with_wrappers
return executor(sql, params, many, context)
File "/usr/local/lib/python3.8/site-packages/django/db/backends/utils.py", line 86, in _execute
return self.cursor.execute(sql, params)
File "/usr/local/lib/python3.8/site-packages/django/db/utils.py", line 90, in __exit__
File "/usr/local/lib/python3.8/site-packages/django/db/migrations/executor.py", line 245, in apply_migration
state = migration.apply(state, schema_editor)
File "/usr/local/lib/python3.8/site-packages/django/db/migrations/migration.py", line 124, in apply
operation.database_forwards(self.app_label, schema_editor, old_state, project_state)
File "/usr/local/lib/python3.8/site-packages/django/db/migrations/operations/special.py", line 190, in database_forwards
self.code(from_state.apps, schema_editor)
File "/app/saleor/account/migrations/0041_permissions_to_groups.py", line 26, in add_users_to_groups_based_on_users_permissions
group = create_group_with_given_permissions(perms, group_data.group_name, Group)
File "/app/saleor/account/migrations/0041_permissions_to_groups.py", line 65, in create_group_with_given_permissions
group = Group.objects.create(name=group_name)
File "/usr/local/lib/python3.8/site-packages/django/db/models/manager.py", line 82, in manager_method
return getattr(self.get_queryset(), name)(*args, **kwargs)
File "/usr/local/lib/python3.8/site-packages/django/db/models/query.py", line 433, in create
obj.save(force_insert=True, using=self.db)
File "/usr/local/lib/python3.8/site-packages/django/db/models/base.py", line 745, in save
self.save_base(using=using, force_insert=force_insert,
File "/usr/local/lib/python3.8/site-packages/django/db/models/base.py", line 782, in save_base
updated = self._save_table(
File "/usr/local/lib/python3.8/site-packages/django/db/models/base.py", line 887, in _save_table
results = self._do_insert(cls._base_manager, using, fields, returning_fields, raw)
File "/usr/local/lib/python3.8/site-packages/django/db/models/base.py", line 924, in _do_insert
return manager._insert(
File "/usr/local/lib/python3.8/site-packages/django/db/models/manager.py", line 82, in manager_method
return getattr(self.get_queryset(), name)(*args, **kwargs)
File "/usr/local/lib/python3.8/site-packages/django/db/models/query.py", line 1204, in _insert
return query.get_compiler(using=using).execute_sql(returning_fields)
File "/usr/local/lib/python3.8/site-packages/django/db/models/sql/compiler.py", line 1391, in execute_sql
cursor.execute(sql, params)
File "/usr/local/lib/python3.8/site-packages/django/db/backends/utils.py", line 68, in execute
return self._execute_with_wrappers(sql, params, many=False, executor=self._execute)
File "/usr/local/lib/python3.8/site-packages/django/db/backends/utils.py", line 77, in _execute_with_wrappers
return executor(sql, params, many, context)
File "/usr/local/lib/python3.8/site-packages/django/db/backends/utils.py", line 86, in _execute
return self.cursor.execute(sql, params)
File "/usr/local/lib/python3.8/site-packages/django/db/utils.py", line 90, in __exit__
raise dj_exc_value.with_traceback(traceback) from exc_value
File "/usr/local/lib/python3.8/site-packages/django/db/backends/utils.py", line 86, in _execute
return self.cursor.execute(sql, params)
django.db.utils.DataError: value too long for type character varying(150)
|
django.db.utils.DataError
|
def create_group_with_given_permissions(perm_pks, counter, Group):
"""Create new group with given set of permissions."""
group_name = f"Group {counter:03d}"
group = Group.objects.create(name=group_name)
group.permissions.add(*perm_pks)
return group
|
def create_group_with_given_permissions(perm_pks, group_name, Group):
"""Create new group with given set of permissions."""
group = Group.objects.create(name=group_name)
group.permissions.add(*perm_pks)
return group
|
https://github.com/mirumee/saleor/issues/5555
|
Running migrations:
Applying account.0041_permissions_to_groups...Traceback (most recent call last):
File "/usr/local/lib/python3.8/site-packages/django/db/backends/utils.py", line 86, in _execute
return self.cursor.execute(sql, params)
psycopg2.errors.StringDataRightTruncation: value too long for type character varying(150)
The above exception was the direct cause of the following exception:
Traceback (most recent call last):
File "manage.py", line 10, in <module>
execute_from_command_line(sys.argv)
File "/usr/local/lib/python3.8/site-packages/django/core/management/__init__.py", line 401, in execute_from_command_line
utility.execute()
File "/usr/local/lib/python3.8/site-packages/django/core/management/__init__.py", line 395, in execute
self.fetch_command(subcommand).run_from_argv(self.argv)
File "/usr/local/lib/python3.8/site-packages/django/core/management/base.py", line 328, in run_from_argv
self.execute(*args, **cmd_options)
File "/usr/local/lib/python3.8/site-packages/django/core/management/base.py", line 369, in execute
output = self.handle(*args, **options)
File "/usr/local/lib/python3.8/site-packages/django/core/management/base.py", line 83, in wrapped
res = handle_func(*args, **kwargs)
File "/usr/local/lib/python3.8/site-packages/django/core/management/commands/migrate.py", line 231, in handle
post_migrate_state = executor.migrate(
File "/usr/local/lib/python3.8/site-packages/django/db/migrations/executor.py", line 117, in migrate
state = self._migrate_all_forwards(state, plan, full_plan, fake=fake, fake_initial=fake_initial)
File "/usr/local/lib/python3.8/site-packages/django/db/migrations/executor.py", line 147, in _migrate_all_forwards
state = self.apply_migration(state, migration, fake=fake, fake_initial=fake_initial)
File "/usr/local/lib/python3.8/site-packages/django/db/migrations/executor.py", line 245, in apply_migration
state = migration.apply(state, schema_editor)
File "/usr/local/lib/python3.8/site-packages/django/db/migrations/migration.py", line 124, in apply
operation.database_forwards(self.app_label, schema_editor, old_state, project_state)
File "/usr/local/lib/python3.8/site-packages/django/db/migrations/operations/special.py", line 190, in database_forwards
self.code(from_state.apps, schema_editor)
File "/app/saleor/account/migrations/0041_permissions_to_groups.py", line 26, in add_users_to_groups_based_on_users_permissions
group = create_group_with_given_permissions(perms, group_data.group_name, Group)
File "/app/saleor/account/migrations/0041_permissions_to_groups.py", line 65, in create_group_with_given_permissions
group = Group.objects.create(name=group_name)
File "/usr/local/lib/python3.8/site-packages/django/db/models/manager.py", line 82, in manager_method
return getattr(self.get_queryset(), name)(*args, **kwargs)
File "/usr/local/lib/python3.8/site-packages/django/db/models/query.py", line 433, in create
obj.save(force_insert=True, using=self.db)
File "/usr/local/lib/python3.8/site-packages/django/db/models/base.py", line 745, in save
self.save_base(using=using, force_insert=force_insert,
File "/usr/local/lib/python3.8/site-packages/django/db/models/base.py", line 782, in save_base
updated = self._save_table(
File "/usr/local/lib/python3.8/site-packages/django/db/models/base.py", line 887, in _save_table
results = self._do_insert(cls._base_manager, using, fields, returning_fields, raw)
File "/usr/local/lib/python3.8/site-packages/django/db/models/base.py", line 924, in _do_insert
return manager._insert(
File "/usr/local/lib/python3.8/site-packages/django/db/models/manager.py", line 82, in manager_method
return getattr(self.get_queryset(), name)(*args, **kwargs)
File "/usr/local/lib/python3.8/site-packages/django/db/models/query.py", line 1204, in _insert
return query.get_compiler(using=using).execute_sql(returning_fields)
File "/usr/local/lib/python3.8/site-packages/django/db/models/sql/compiler.py", line 1391, in execute_sql
cursor.execute(sql, params)
File "/usr/local/lib/python3.8/site-packages/django/db/backends/utils.py", line 68, in execute
return self._execute_with_wrappers(sql, params, many=False, executor=self._execute)
File "/usr/local/lib/python3.8/site-packages/django/db/backends/utils.py", line 77, in _execute_with_wrappers
return executor(sql, params, many, context)
File "/usr/local/lib/python3.8/site-packages/django/db/backends/utils.py", line 86, in _execute
return self.cursor.execute(sql, params)
File "/usr/local/lib/python3.8/site-packages/django/db/utils.py", line 90, in __exit__
File "/usr/local/lib/python3.8/site-packages/django/db/migrations/executor.py", line 245, in apply_migration
state = migration.apply(state, schema_editor)
File "/usr/local/lib/python3.8/site-packages/django/db/migrations/migration.py", line 124, in apply
operation.database_forwards(self.app_label, schema_editor, old_state, project_state)
File "/usr/local/lib/python3.8/site-packages/django/db/migrations/operations/special.py", line 190, in database_forwards
self.code(from_state.apps, schema_editor)
File "/app/saleor/account/migrations/0041_permissions_to_groups.py", line 26, in add_users_to_groups_based_on_users_permissions
group = create_group_with_given_permissions(perms, group_data.group_name, Group)
File "/app/saleor/account/migrations/0041_permissions_to_groups.py", line 65, in create_group_with_given_permissions
group = Group.objects.create(name=group_name)
File "/usr/local/lib/python3.8/site-packages/django/db/models/manager.py", line 82, in manager_method
return getattr(self.get_queryset(), name)(*args, **kwargs)
File "/usr/local/lib/python3.8/site-packages/django/db/models/query.py", line 433, in create
obj.save(force_insert=True, using=self.db)
File "/usr/local/lib/python3.8/site-packages/django/db/models/base.py", line 745, in save
self.save_base(using=using, force_insert=force_insert,
File "/usr/local/lib/python3.8/site-packages/django/db/models/base.py", line 782, in save_base
updated = self._save_table(
File "/usr/local/lib/python3.8/site-packages/django/db/models/base.py", line 887, in _save_table
results = self._do_insert(cls._base_manager, using, fields, returning_fields, raw)
File "/usr/local/lib/python3.8/site-packages/django/db/models/base.py", line 924, in _do_insert
return manager._insert(
File "/usr/local/lib/python3.8/site-packages/django/db/models/manager.py", line 82, in manager_method
return getattr(self.get_queryset(), name)(*args, **kwargs)
File "/usr/local/lib/python3.8/site-packages/django/db/models/query.py", line 1204, in _insert
return query.get_compiler(using=using).execute_sql(returning_fields)
File "/usr/local/lib/python3.8/site-packages/django/db/models/sql/compiler.py", line 1391, in execute_sql
cursor.execute(sql, params)
File "/usr/local/lib/python3.8/site-packages/django/db/backends/utils.py", line 68, in execute
return self._execute_with_wrappers(sql, params, many=False, executor=self._execute)
File "/usr/local/lib/python3.8/site-packages/django/db/backends/utils.py", line 77, in _execute_with_wrappers
return executor(sql, params, many, context)
File "/usr/local/lib/python3.8/site-packages/django/db/backends/utils.py", line 86, in _execute
return self.cursor.execute(sql, params)
File "/usr/local/lib/python3.8/site-packages/django/db/utils.py", line 90, in __exit__
raise dj_exc_value.with_traceback(traceback) from exc_value
File "/usr/local/lib/python3.8/site-packages/django/db/backends/utils.py", line 86, in _execute
return self.cursor.execute(sql, params)
django.db.utils.DataError: value too long for type character varying(150)
|
django.db.utils.DataError
|
def order_created(self, order: "Order", previous_value: Any) -> Any:
if not self.active:
return previous_value
data = get_order_tax_data(order, self.config, force_refresh=True)
transaction_url = urljoin(
get_api_url(self.config.use_sandbox), "transactions/createoradjust"
)
api_post_request_task.delay(transaction_url, data, asdict(self.config))
return previous_value
|
def order_created(self, order: "Order", previous_value: Any) -> Any:
if not self.active:
return previous_value
data = get_order_tax_data(order, self.config, force_refresh=True)
transaction_url = urljoin(
get_api_url(self.config.use_sandbox), "transactions/createoradjust"
)
api_post_request_task.delay(transaction_url, data, self.config)
return previous_value
|
https://github.com/mirumee/saleor/issues/5490
|
web_1 | Traceback (most recent call last):
web_1 | File "/usr/local/lib/python3.8/site-packages/promise/promise.py", line 489, in _resolve_from_executor
web_1 | executor(resolve, reject)
web_1 | File "/usr/local/lib/python3.8/site-packages/promise/promise.py", line 756, in executor
web_1 | return resolve(f(*args, **kwargs))
web_1 | File "/usr/local/lib/python3.8/site-packages/graphql/execution/middleware.py", line 75, in make_it_promise
web_1 | return next(*args, **kwargs)
web_1 | File "/app/saleor/graphql/core/mutations.py", line 284, in mutate
web_1 | response = cls.perform_mutation(root, info, **data)
web_1 | File "/app/saleor/graphql/checkout/mutations.py", line 777, in perform_mutation
web_1 | order = create_order(
web_1 | File "/usr/local/lib/python3.8/contextlib.py", line 75, in inner
web_1 | return func(*args, **kwds)
web_1 | File "/app/saleor/checkout/utils.py", line 758, in create_order
web_1 | order_created(order=order, user=user)
web_1 | File "/app/saleor/order/actions.py", line 34, in order_created
web_1 | manager.order_created(order)
web_1 | File "/app/saleor/extensions/manager.py", line 215, in order_created
web_1 | return self.__run_method_on_plugins("order_created", default_value, order)
web_1 | File "/app/saleor/extensions/manager.py", line 56, in __run_method_on_plugins
web_1 | value = self.__run_method_on_single_plugin(
web_1 | File "/app/saleor/extensions/manager.py", line 79, in __run_method_on_single_plugin
web_1 | returned_value = plugin_method(*args, **kwargs, previous_value=previous_value)
web_1 | File "/app/saleor/extensions/plugins/avatax/plugin.py", line 258, in order_created
web_1 | api_post_request_task.delay(transaction_url, data, self.config)
web_1 | File "/usr/local/lib/python3.8/site-packages/celery/app/task.py", line 425, in delay
web_1 | return self.apply_async(args, kwargs)
web_1 | File "/usr/local/lib/python3.8/site-packages/celery/app/task.py", line 564, in apply_async
web_1 | return app.send_task(
web_1 | File "/usr/local/lib/python3.8/site-packages/celery/app/base.py", line 775, in send_task
web_1 | amqp.send_task_message(P, name, message, **options)
web_1 | File "/usr/local/lib/python3.8/site-packages/celery/app/amqp.py", line 550, in send_task_message
web_1 | ret = producer.publish(
web_1 | File "/usr/local/lib/python3.8/site-packages/kombu/messaging.py", line 167, in publish
web_1 | body, content_type, content_encoding = self._prepare(
web_1 | File "/usr/local/lib/python3.8/site-packages/kombu/messaging.py", line 252, in _prepare
web_1 | body) = dumps(body, serializer=serializer)
web_1 | File "/usr/local/lib/python3.8/site-packages/kombu/serialization.py", line 221, in dumps
web_1 | payload = encoder(data)
web_1 | File "/usr/local/lib/python3.8/contextlib.py", line 131, in __exit__
web_1 | self.gen.throw(type, value, traceback)
web_1 | File "/usr/local/lib/python3.8/site-packages/kombu/serialization.py", line 54, in _reraise_errors
web_1 | reraise(wrapper, wrapper(exc), sys.exc_info()[2])
web_1 | File "/usr/local/lib/python3.8/site-packages/vine/five.py", line 194, in reraise
web_1 | raise value.with_traceback(tb)
web_1 | File "/usr/local/lib/python3.8/site-packages/kombu/serialization.py", line 50, in _reraise_errors
web_1 | yield
web_1 | File "/usr/local/lib/python3.8/site-packages/kombu/serialization.py", line 221, in dumps
web_1 | payload = encoder(data)
web_1 | File "/usr/local/lib/python3.8/site-packages/kombu/utils/json.py", line 69, in dumps
web_1 | return _dumps(s, cls=cls or _default_encoder,
web_1 | File "/usr/local/lib/python3.8/json/__init__.py", line 234, in dumps
web_1 | return cls(
web_1 | File "/usr/local/lib/python3.8/json/encoder.py", line 199, in encode
web_1 | chunks = self.iterencode(o, _one_shot=True)
web_1 | File "/usr/local/lib/python3.8/json/encoder.py", line 257, in iterencode
web_1 | return _iterencode(o, 0)
web_1 | File "/usr/local/lib/python3.8/site-packages/kombu/utils/json.py", line 59, in default
web_1 | return super(JSONEncoder, self).default(o)
web_1 | File "/usr/local/lib/python3.8/json/encoder.py", line 179, in default
web_1 | raise TypeError(f'Object of type {o.__class__.__name__} '
web_1 | kombu.exceptions.EncodeError: Object of type AvataxConfiguration is not JSON serializable
|
TypeError
|
def api_post_request_task(transaction_url, data, config):
config = AvataxConfiguration(**config)
api_post_request(transaction_url, data, config)
|
def api_post_request_task(transaction_url, data, config):
api_post_request(transaction_url, data, config)
|
https://github.com/mirumee/saleor/issues/5490
|
web_1 | Traceback (most recent call last):
web_1 | File "/usr/local/lib/python3.8/site-packages/promise/promise.py", line 489, in _resolve_from_executor
web_1 | executor(resolve, reject)
web_1 | File "/usr/local/lib/python3.8/site-packages/promise/promise.py", line 756, in executor
web_1 | return resolve(f(*args, **kwargs))
web_1 | File "/usr/local/lib/python3.8/site-packages/graphql/execution/middleware.py", line 75, in make_it_promise
web_1 | return next(*args, **kwargs)
web_1 | File "/app/saleor/graphql/core/mutations.py", line 284, in mutate
web_1 | response = cls.perform_mutation(root, info, **data)
web_1 | File "/app/saleor/graphql/checkout/mutations.py", line 777, in perform_mutation
web_1 | order = create_order(
web_1 | File "/usr/local/lib/python3.8/contextlib.py", line 75, in inner
web_1 | return func(*args, **kwds)
web_1 | File "/app/saleor/checkout/utils.py", line 758, in create_order
web_1 | order_created(order=order, user=user)
web_1 | File "/app/saleor/order/actions.py", line 34, in order_created
web_1 | manager.order_created(order)
web_1 | File "/app/saleor/extensions/manager.py", line 215, in order_created
web_1 | return self.__run_method_on_plugins("order_created", default_value, order)
web_1 | File "/app/saleor/extensions/manager.py", line 56, in __run_method_on_plugins
web_1 | value = self.__run_method_on_single_plugin(
web_1 | File "/app/saleor/extensions/manager.py", line 79, in __run_method_on_single_plugin
web_1 | returned_value = plugin_method(*args, **kwargs, previous_value=previous_value)
web_1 | File "/app/saleor/extensions/plugins/avatax/plugin.py", line 258, in order_created
web_1 | api_post_request_task.delay(transaction_url, data, self.config)
web_1 | File "/usr/local/lib/python3.8/site-packages/celery/app/task.py", line 425, in delay
web_1 | return self.apply_async(args, kwargs)
web_1 | File "/usr/local/lib/python3.8/site-packages/celery/app/task.py", line 564, in apply_async
web_1 | return app.send_task(
web_1 | File "/usr/local/lib/python3.8/site-packages/celery/app/base.py", line 775, in send_task
web_1 | amqp.send_task_message(P, name, message, **options)
web_1 | File "/usr/local/lib/python3.8/site-packages/celery/app/amqp.py", line 550, in send_task_message
web_1 | ret = producer.publish(
web_1 | File "/usr/local/lib/python3.8/site-packages/kombu/messaging.py", line 167, in publish
web_1 | body, content_type, content_encoding = self._prepare(
web_1 | File "/usr/local/lib/python3.8/site-packages/kombu/messaging.py", line 252, in _prepare
web_1 | body) = dumps(body, serializer=serializer)
web_1 | File "/usr/local/lib/python3.8/site-packages/kombu/serialization.py", line 221, in dumps
web_1 | payload = encoder(data)
web_1 | File "/usr/local/lib/python3.8/contextlib.py", line 131, in __exit__
web_1 | self.gen.throw(type, value, traceback)
web_1 | File "/usr/local/lib/python3.8/site-packages/kombu/serialization.py", line 54, in _reraise_errors
web_1 | reraise(wrapper, wrapper(exc), sys.exc_info()[2])
web_1 | File "/usr/local/lib/python3.8/site-packages/vine/five.py", line 194, in reraise
web_1 | raise value.with_traceback(tb)
web_1 | File "/usr/local/lib/python3.8/site-packages/kombu/serialization.py", line 50, in _reraise_errors
web_1 | yield
web_1 | File "/usr/local/lib/python3.8/site-packages/kombu/serialization.py", line 221, in dumps
web_1 | payload = encoder(data)
web_1 | File "/usr/local/lib/python3.8/site-packages/kombu/utils/json.py", line 69, in dumps
web_1 | return _dumps(s, cls=cls or _default_encoder,
web_1 | File "/usr/local/lib/python3.8/json/__init__.py", line 234, in dumps
web_1 | return cls(
web_1 | File "/usr/local/lib/python3.8/json/encoder.py", line 199, in encode
web_1 | chunks = self.iterencode(o, _one_shot=True)
web_1 | File "/usr/local/lib/python3.8/json/encoder.py", line 257, in iterencode
web_1 | return _iterencode(o, 0)
web_1 | File "/usr/local/lib/python3.8/site-packages/kombu/utils/json.py", line 59, in default
web_1 | return super(JSONEncoder, self).default(o)
web_1 | File "/usr/local/lib/python3.8/json/encoder.py", line 179, in default
web_1 | raise TypeError(f'Object of type {o.__class__.__name__} '
web_1 | kombu.exceptions.EncodeError: Object of type AvataxConfiguration is not JSON serializable
|
TypeError
|
def update_products_minimal_variant_prices_of_catalogues(
product_ids=None, category_ids=None, collection_ids=None
):
# Building the matching products query
q_list = []
if product_ids:
q_list.append(Q(pk__in=product_ids))
if category_ids:
q_list.append(Q(category_id__in=category_ids))
if collection_ids:
q_list.append(Q(collectionproduct__collection_id__in=collection_ids))
# Asserting that the function was called with some ids
if q_list:
# Querying the products
q_or = reduce(operator.or_, q_list)
products = Product.objects.filter(q_or).distinct()
update_products_minimal_variant_prices(products)
|
def update_products_minimal_variant_prices_of_catalogues(
product_ids=None, category_ids=None, collection_ids=None
):
# Building the matching products query
q_list = []
if product_ids:
q_list.append(Q(pk__in=product_ids))
if category_ids:
q_list.append(Q(category_id__in=category_ids))
if collection_ids:
q_list.append(Q(collectionproduct__collection_id__in=collection_ids))
# Asserting that the function was called with some ids
if not q_list:
raise ValueError(
"Provide at least one of the ID lists:\n"
"\tproduct_ids,\n"
"\tcategory_ids,\n"
"\tcollection_ids."
)
# Querying the products
q_or = reduce(operator.or_, q_list)
products = Product.objects.filter(q_or).distinct()
update_products_minimal_variant_prices(products)
|
https://github.com/mirumee/saleor/issues/5351
|
ERROR celery.app.trace Task saleor.product.tasks.update_products_minimal_variant_prices_of_discount_task[4ec46245-d1f1-47ae-ab23-0c0ab73a9981] raised unexpected: ValueError('Provide at least one of the ID lists:\n\tproduct_ids,\n\tcategory_ids,\n\tcollection_ids.') [PID:31316:Thread-175]
Traceback (most recent call last):
File "/Users/marcin/.pyenv/versions/saleor3.8.1/lib/python3.8/site-packages/celery/app/trace.py", line 385, in trace_task
R = retval = fun(*args, **kwargs)
File "/Users/marcin/mirumee/saleor-platform/saleor/saleor/product/tasks.py", line 64, in update_products_minimal_variant_prices_of_discount_task
update_products_minimal_variant_prices_of_discount(discount)
File "/Users/marcin/mirumee/saleor-platform/saleor/saleor/product/utils/variant_prices.py", line 76, in update_products_minimal_variant_prices_of_discount
update_products_minimal_variant_prices_of_catalogues(
File "/Users/marcin/mirumee/saleor-platform/saleor/saleor/product/utils/variant_prices.py", line 62, in update_products_minimal_variant_prices_of_catalogues
raise ValueError(
ValueError: Provide at least one of the ID lists:
product_ids,
category_ids,
collection_ids.
|
ValueError
|
def validate_image_file(file, field_name):
"""Validate if the file is an image."""
if not file:
raise ValidationError(
{field_name: ValidationError("File is required", code="required")}
)
if not file.content_type.startswith("image/"):
raise ValidationError(
{field_name: ValidationError("Invalid file type", code="invalid")}
)
|
def validate_image_file(file, field_name):
"""Validate if the file is an image."""
if not file.content_type.startswith("image/"):
raise ValidationError(
{field_name: ValidationError("Invalid file type", code="invalid")}
)
|
https://github.com/mirumee/saleor/issues/5230
|
Traceback (most recent call last):
File "./saleor/graphql/core/mutations.py", line 279, in mutate
response = cls.perform_mutation(root, info, **data)
File "./saleor/graphql/product/mutations/products.py", line 1400, in perform_mutation
validate_image_file(image_data, "image")
File "./saleor/graphql/core/utils/__init__.py", line 33, in validate_image_file
if not file.content_type.startswith("image/"):
AttributeError: 'NoneType' object has no attribute 'content_type'
|
AttributeError
|
def create_unique_slugs_for_producttypes(apps, schema_editor):
ProductType = apps.get_model("product", "ProductType")
product_types = (
ProductType.objects.filter(slug__isnull=True).order_by(Lower("name")).iterator()
)
previous_char = ""
slug_values = []
for product_type in product_types:
first_char = product_type.name[0].lower()
if first_char != previous_char:
previous_char = first_char
slug_values = list(
ProductType.objects.filter(slug__istartswith=first_char).values_list(
"slug", flat=True
)
)
slug = generate_unique_slug(product_type, slug_values)
product_type.slug = slug
slug_values.append(slug)
|
def create_unique_slugs_for_producttypes(apps, schema_editor):
ProductType = apps.get_model("product", "ProductType")
product_types = (
ProductType.objects.filter(slug__isnull=True).order_by(Lower("name")).iterator()
)
previous_char = ""
slug_values = []
for product_type in product_types:
first_char = product_type.name[0].lower()
if first_char != previous_char:
previous_char = first_char
slug_values = ProductType.objects.filter(
slug__istartswith=first_char
).values_list("slug", flat=True)
slug = generate_unique_slug(product_type, slug_values)
product_type.slug = slug
slug_values.append(slug)
|
https://github.com/mirumee/saleor/issues/5391
|
Operations to perform:
Apply all migrations: account, auth, checkout, contenttypes, core, discount, django_prices_openexchangerates, django_prices_vatlayer, extensions, giftcard, menu, order, page, payment, product, shipping, site, sites, warehouse, webhook, wishlist
Running migrations:
Applying product.0112_auto_20200129_0050...Traceback (most recent call last):
File "manage.py", line 10, in <module>
execute_from_command_line(sys.argv)
File "/usr/local/lib/python3.8/site-packages/django/core/management/__init__.py", line 401, in execute_from_command_line
utility.execute()
File "/usr/local/lib/python3.8/site-packages/django/core/management/__init__.py", line 395, in execute
self.fetch_command(subcommand).run_from_argv(self.argv)
File "/usr/local/lib/python3.8/site-packages/django/core/management/base.py", line 328, in run_from_argv
self.execute(*args, **cmd_options)
File "/usr/local/lib/python3.8/site-packages/django/core/management/base.py", line 369, in execute
output = self.handle(*args, **options)
File "/usr/local/lib/python3.8/site-packages/django/core/management/base.py", line 83, in wrapped
res = handle_func(*args, **kwargs)
File "/usr/local/lib/python3.8/site-packages/django/core/management/commands/migrate.py", line 231, in handle
post_migrate_state = executor.migrate(
File "/usr/local/lib/python3.8/site-packages/django/db/migrations/executor.py", line 117, in migrate
state = self._migrate_all_forwards(state, plan, full_plan, fake=fake, fake_initial=fake_initial)
File "/usr/local/lib/python3.8/site-packages/django/db/migrations/executor.py", line 147, in _migrate_all_forwards
state = self.apply_migration(state, migration, fake=fake, fake_initial=fake_initial)
File "/usr/local/lib/python3.8/site-packages/django/db/migrations/executor.py", line 245, in apply_migration
state = migration.apply(state, schema_editor)
File "/usr/local/lib/python3.8/site-packages/django/db/migrations/migration.py", line 124, in apply
operation.database_forwards(self.app_label, schema_editor, old_state, project_state)
File "/usr/local/lib/python3.8/site-packages/django/db/migrations/operations/special.py", line 190, in database_forwards
self.code(from_state.apps, schema_editor)
File "/app/saleor/product/migrations/0112_auto_20200129_0050.py", line 28, in create_unique_slugs_for_producttypes
slug_values.append(slug)
AttributeError: 'QuerySet' object has no attribute 'append'
|
AttributeError
|
def create_permission_groups():
super_users = User.objects.filter(is_superuser=True)
if not super_users:
super_users = create_staff_users(1, True)
group = create_group("Full Access", get_permissions(), super_users)
yield f"Group: {group}"
staff_users = create_staff_users()
customer_support_codenames = [
perm.codename
for enum in [CheckoutPermissions, OrderPermissions, GiftcardPermissions]
for perm in enum
]
customer_support_codenames.append(AccountPermissions.MANAGE_USERS.codename)
customer_support_permissions = Permission.objects.filter(
codename__in=customer_support_codenames
)
group = create_group("Customer Support", customer_support_permissions, staff_users)
yield f"Group: {group}"
|
def create_permission_groups():
super_users = User.objects.filter(is_superuser=True)
if not super_users:
super_users = create_staff_users(1, True)
group = create_group("Full Access", Permission.objects.all(), super_users)
yield f"Group: {group}"
staff_users = create_staff_users()
customer_support_codenames = [
perm.codename
for enum in [CheckoutPermissions, OrderPermissions, GiftcardPermissions]
for perm in enum
]
customer_support_codenames.append(AccountPermissions.MANAGE_USERS.codename)
customer_support_permissions = Permission.objects.filter(
codename__in=customer_support_codenames
)
group = create_group("Customer Support", customer_support_permissions, staff_users)
yield f"Group: {group}"
|
https://github.com/mirumee/saleor/issues/5340
|
{
"errors": [
{
"message": "'account.add_address' is not a valid PermissionEnum",
"locations": [
{
"line": 34,
"column": 3
}
],
"path": [
"permissionGroup",
"permissions"
],
"extensions": {
"exception": {
"code": "ValueError",
"stacktrace": [
"ValueError: 'account.add_address' is not a valid PermissionEnum",
"",
"During handling of the above exception, another exception occurred:",
"Traceback (most recent call last):",
" File \"/usr/local/lib/python3.8/site-packages/promise/promise.py\", line 489, in _resolve_from_executor",
" executor(resolve, reject)",
" File \"/usr/local/lib/python3.8/site-packages/promise/promise.py\", line 756, in executor",
" return resolve(f(*args, **kwargs))",
" File \"/usr/local/lib/python3.8/site-packages/graphql/execution/middleware.py\", line 75, in make_it_promise",
" return next(*args, **kwargs)",
" File \"/app/saleor/graphql/account/types.py\", line 474, in resolve_permissions",
" return format_permissions_for_display(permissions)",
" File \"/app/saleor/graphql/utils.py\", line 175, in format_permissions_for_display",
" PermissionDisplay(code=PermissionEnum.get(codename), name=permission.name)",
" File \"/usr/local/lib/python3.8/site-packages/graphene/types/enum.py\", line 38, in get",
" return cls._meta.enum(value)",
" File \"/usr/local/lib/python3.8/enum.py\", line 304, in __call__",
" return cls.__new__(cls, value)",
" File \"/usr/local/lib/python3.8/enum.py\", line 595, in __new__",
" raise exc",
" File \"/usr/local/lib/python3.8/enum.py\", line 579, in __new__",
" result = cls._missing_(value)",
" File \"/usr/local/lib/python3.8/enum.py\", line 608, in _missing_",
" raise ValueError(\"%r is not a valid %s\" % (value, cls.__name__))",
"ValueError: 'account.add_address' is not a valid PermissionEnum"
]
}
}
}
],
"data": {
"permissionGroup": {
"id": "R3JvdXA6MQ==",
"name": "Full Access",
"users": [
{
"id": "VXNlcjoyMQ==",
"firstName": "",
"lastName": "",
"__typename": "User",
"email": "admin@example.com",
"isActive": true,
"avatar": {
"url": "http://localhost:8000/media/user-avatars/avatar8.png",
"__typename": "Image"
}
}
],
"__typename": "Group",
"permissions": null
}
}
}
|
ValueError
|
def category_delete(request, pk):
category = get_object_or_404(Category, pk=pk)
if request.method == "POST":
descendants = category.get_descendants()
menus = get_menus_that_need_update(categories=descendants)
category.delete()
if menus:
update_menus(menus)
messages.success(
request,
pgettext_lazy("Dashboard message", "Removed category %s") % category,
)
root_pk = None
if category.parent:
root_pk = category.parent.pk
if root_pk:
if request.is_ajax():
response = {
"redirectUrl": reverse(
"dashboard:category-details", kwargs={"pk": root_pk}
)
}
return JsonResponse(response)
return redirect("dashboard:category-details", pk=root_pk)
else:
if request.is_ajax():
response = {"redirectUrl": reverse("dashboard:category-list")}
return JsonResponse(response)
return redirect("dashboard:category-list")
ctx = {
"category": category,
"descendants": list(category.get_descendants()),
"products_count": len(category.products.all()),
}
return TemplateResponse(
request, "dashboard/category/modal/confirm_delete.html", ctx
)
|
def category_delete(request, pk):
category = get_object_or_404(Category, pk=pk)
if request.method == "POST":
descendants = category.get_descendants()
menus = get_menus_that_needs_update(categories=descendants)
category.delete()
if menus:
update_menus(menus)
messages.success(
request,
pgettext_lazy("Dashboard message", "Removed category %s") % category,
)
root_pk = None
if category.parent:
root_pk = category.parent.pk
if root_pk:
if request.is_ajax():
response = {
"redirectUrl": reverse(
"dashboard:category-details", kwargs={"pk": root_pk}
)
}
return JsonResponse(response)
return redirect("dashboard:category-details", pk=root_pk)
else:
if request.is_ajax():
response = {"redirectUrl": reverse("dashboard:category-list")}
return JsonResponse(response)
return redirect("dashboard:category-list")
ctx = {
"category": category,
"descendants": list(category.get_descendants()),
"products_count": len(category.products.all()),
}
return TemplateResponse(
request, "dashboard/category/modal/confirm_delete.html", ctx
)
|
https://github.com/mirumee/saleor/issues/4471
|
Traceback (most recent call last):
File "/Users/mikail/Development/saleor-venv/lib/python3.7/site-packages/promise/promise.py", line 487, in _resolve_from_executor
executor(resolve, reject)
File "/Users/mikail/Development/saleor-venv/lib/python3.7/site-packages/promise/promise.py", line 754, in executor
return resolve(f(*args, **kwargs))
File "/Users/mikail/Development/saleor-venv/lib/python3.7/site-packages/graphql/execution/middleware.py", line 75, in make_it_promise
return next(*args, **kwargs)
File "/Users/mikail/Development/saleor/saleor/graphql/core/mutations.py", line 207, in mutate
response = cls.perform_mutation(root, info, **data)
File "/Users/mikail/Development/saleor/saleor/graphql/core/mutations.py", line 354, in perform_mutation
instance = cls.construct_instance(instance, cleaned_input)
File "/Users/mikail/Development/saleor/saleor/graphql/core/mutations.py", line 184, in construct_instance
f.save_form_data(instance, data)
File "/Users/mikail/Development/saleor-venv/lib/python3.7/site-packages/django/db/models/fields/__init__.py", line 853, in save_form_data
setattr(instance, self.name, data)
File "/Users/mikail/Development/saleor-venv/lib/python3.7/site-packages/django/db/models/fields/related_descriptors.py", line 211, in __set__
self.field.remote_field.model._meta.object_name,
ValueError: Cannot assign "<Page: About>": "MenuItem.category" must be a "Category" instance.
|
ValueError
|
def collection_delete(request, pk=None):
collection = get_object_or_404(Collection, pk=pk)
if request.method == "POST":
menus = get_menus_that_need_update(collection=collection)
collection.delete()
if menus:
update_menus(menus)
msg = pgettext_lazy("Collection message", "Deleted collection")
messages.success(request, msg)
if request.is_ajax():
response = {"redirectUrl": reverse("dashboard:collection-list")}
return JsonResponse(response)
return redirect("dashboard:collection-list")
ctx = {"collection": collection}
return TemplateResponse(request, "dashboard/collection/confirm_delete.html", ctx)
|
def collection_delete(request, pk=None):
collection = get_object_or_404(Collection, pk=pk)
if request.method == "POST":
menus = get_menus_that_needs_update(collection=collection)
collection.delete()
if menus:
update_menus(menus)
msg = pgettext_lazy("Collection message", "Deleted collection")
messages.success(request, msg)
if request.is_ajax():
response = {"redirectUrl": reverse("dashboard:collection-list")}
return JsonResponse(response)
return redirect("dashboard:collection-list")
ctx = {"collection": collection}
return TemplateResponse(request, "dashboard/collection/confirm_delete.html", ctx)
|
https://github.com/mirumee/saleor/issues/4471
|
Traceback (most recent call last):
File "/Users/mikail/Development/saleor-venv/lib/python3.7/site-packages/promise/promise.py", line 487, in _resolve_from_executor
executor(resolve, reject)
File "/Users/mikail/Development/saleor-venv/lib/python3.7/site-packages/promise/promise.py", line 754, in executor
return resolve(f(*args, **kwargs))
File "/Users/mikail/Development/saleor-venv/lib/python3.7/site-packages/graphql/execution/middleware.py", line 75, in make_it_promise
return next(*args, **kwargs)
File "/Users/mikail/Development/saleor/saleor/graphql/core/mutations.py", line 207, in mutate
response = cls.perform_mutation(root, info, **data)
File "/Users/mikail/Development/saleor/saleor/graphql/core/mutations.py", line 354, in perform_mutation
instance = cls.construct_instance(instance, cleaned_input)
File "/Users/mikail/Development/saleor/saleor/graphql/core/mutations.py", line 184, in construct_instance
f.save_form_data(instance, data)
File "/Users/mikail/Development/saleor-venv/lib/python3.7/site-packages/django/db/models/fields/__init__.py", line 853, in save_form_data
setattr(instance, self.name, data)
File "/Users/mikail/Development/saleor-venv/lib/python3.7/site-packages/django/db/models/fields/related_descriptors.py", line 211, in __set__
self.field.remote_field.model._meta.object_name,
ValueError: Cannot assign "<Page: About>": "MenuItem.category" must be a "Category" instance.
|
ValueError
|
def page_delete(request, pk):
page = get_object_or_404(Page, pk=pk)
if request.POST:
menus = get_menus_that_need_update(page=page)
page.delete()
if menus:
update_menus(menus)
msg = pgettext_lazy("Dashboard message", "Removed page %s") % (page.title,)
messages.success(request, msg)
return redirect("dashboard:page-list")
ctx = {"page": page}
return TemplateResponse(request, "dashboard/page/modal_delete.html", ctx)
|
def page_delete(request, pk):
page = get_object_or_404(Page, pk=pk)
if request.POST:
menus = get_menus_that_needs_update(page=page)
page.delete()
if menus:
update_menus(menus)
msg = pgettext_lazy("Dashboard message", "Removed page %s") % (page.title,)
messages.success(request, msg)
return redirect("dashboard:page-list")
ctx = {"page": page}
return TemplateResponse(request, "dashboard/page/modal_delete.html", ctx)
|
https://github.com/mirumee/saleor/issues/4471
|
Traceback (most recent call last):
File "/Users/mikail/Development/saleor-venv/lib/python3.7/site-packages/promise/promise.py", line 487, in _resolve_from_executor
executor(resolve, reject)
File "/Users/mikail/Development/saleor-venv/lib/python3.7/site-packages/promise/promise.py", line 754, in executor
return resolve(f(*args, **kwargs))
File "/Users/mikail/Development/saleor-venv/lib/python3.7/site-packages/graphql/execution/middleware.py", line 75, in make_it_promise
return next(*args, **kwargs)
File "/Users/mikail/Development/saleor/saleor/graphql/core/mutations.py", line 207, in mutate
response = cls.perform_mutation(root, info, **data)
File "/Users/mikail/Development/saleor/saleor/graphql/core/mutations.py", line 354, in perform_mutation
instance = cls.construct_instance(instance, cleaned_input)
File "/Users/mikail/Development/saleor/saleor/graphql/core/mutations.py", line 184, in construct_instance
f.save_form_data(instance, data)
File "/Users/mikail/Development/saleor-venv/lib/python3.7/site-packages/django/db/models/fields/__init__.py", line 853, in save_form_data
setattr(instance, self.name, data)
File "/Users/mikail/Development/saleor-venv/lib/python3.7/site-packages/django/db/models/fields/related_descriptors.py", line 211, in __set__
self.field.remote_field.model._meta.object_name,
ValueError: Cannot assign "<Page: About>": "MenuItem.category" must be a "Category" instance.
|
ValueError
|
def clean_input(cls, info, instance, data):
cleaned_input = super().clean_input(info, instance, data)
_validate_menu_item_instance(cleaned_input, "page", page_models.Page)
_validate_menu_item_instance(cleaned_input, "collection", product_models.Collection)
_validate_menu_item_instance(cleaned_input, "category", product_models.Category)
items = [
cleaned_input.get("page"),
cleaned_input.get("collection"),
cleaned_input.get("url"),
cleaned_input.get("category"),
]
items = [item for item in items if item is not None]
if len(items) > 1:
raise ValidationError("More than one item provided.")
return cleaned_input
|
def clean_input(cls, info, instance, data):
cleaned_input = super().clean_input(info, instance, data)
items = [
cleaned_input.get("page"),
cleaned_input.get("collection"),
cleaned_input.get("url"),
cleaned_input.get("category"),
]
items = [item for item in items if item is not None]
if len(items) > 1:
raise ValidationError({"items": "More than one item provided."})
return cleaned_input
|
https://github.com/mirumee/saleor/issues/4471
|
Traceback (most recent call last):
File "/Users/mikail/Development/saleor-venv/lib/python3.7/site-packages/promise/promise.py", line 487, in _resolve_from_executor
executor(resolve, reject)
File "/Users/mikail/Development/saleor-venv/lib/python3.7/site-packages/promise/promise.py", line 754, in executor
return resolve(f(*args, **kwargs))
File "/Users/mikail/Development/saleor-venv/lib/python3.7/site-packages/graphql/execution/middleware.py", line 75, in make_it_promise
return next(*args, **kwargs)
File "/Users/mikail/Development/saleor/saleor/graphql/core/mutations.py", line 207, in mutate
response = cls.perform_mutation(root, info, **data)
File "/Users/mikail/Development/saleor/saleor/graphql/core/mutations.py", line 354, in perform_mutation
instance = cls.construct_instance(instance, cleaned_input)
File "/Users/mikail/Development/saleor/saleor/graphql/core/mutations.py", line 184, in construct_instance
f.save_form_data(instance, data)
File "/Users/mikail/Development/saleor-venv/lib/python3.7/site-packages/django/db/models/fields/__init__.py", line 853, in save_form_data
setattr(instance, self.name, data)
File "/Users/mikail/Development/saleor-venv/lib/python3.7/site-packages/django/db/models/fields/related_descriptors.py", line 211, in __set__
self.field.remote_field.model._meta.object_name,
ValueError: Cannot assign "<Page: About>": "MenuItem.category" must be a "Category" instance.
|
ValueError
|
def perform_mutation(cls, _root, info, menu, moves):
_type, menu_id = from_global_id(menu) # type: str, int
assert _type == "Menu", "Expected a menu of type Menu"
operations = cls.clean_moves(info, menu_id, moves)
for operation in operations:
cls.perform_operation(operation)
menu = models.Menu.objects.get(pk=menu_id)
update_menu(menu)
return cls(menu=menu)
|
def perform_mutation(cls, _root, info, menu, moves):
_type, menu_id = from_global_id(menu) # type: str, int
assert _type == "Menu", "Expected a menu of type Menu"
operations = cls.clean_moves(info, menu_id, moves)
for operation in operations:
cls.perform_operation(operation)
return cls(menu=models.Menu.objects.get(pk=menu_id))
|
https://github.com/mirumee/saleor/issues/4471
|
Traceback (most recent call last):
File "/Users/mikail/Development/saleor-venv/lib/python3.7/site-packages/promise/promise.py", line 487, in _resolve_from_executor
executor(resolve, reject)
File "/Users/mikail/Development/saleor-venv/lib/python3.7/site-packages/promise/promise.py", line 754, in executor
return resolve(f(*args, **kwargs))
File "/Users/mikail/Development/saleor-venv/lib/python3.7/site-packages/graphql/execution/middleware.py", line 75, in make_it_promise
return next(*args, **kwargs)
File "/Users/mikail/Development/saleor/saleor/graphql/core/mutations.py", line 207, in mutate
response = cls.perform_mutation(root, info, **data)
File "/Users/mikail/Development/saleor/saleor/graphql/core/mutations.py", line 354, in perform_mutation
instance = cls.construct_instance(instance, cleaned_input)
File "/Users/mikail/Development/saleor/saleor/graphql/core/mutations.py", line 184, in construct_instance
f.save_form_data(instance, data)
File "/Users/mikail/Development/saleor-venv/lib/python3.7/site-packages/django/db/models/fields/__init__.py", line 853, in save_form_data
setattr(instance, self.name, data)
File "/Users/mikail/Development/saleor-venv/lib/python3.7/site-packages/django/db/models/fields/related_descriptors.py", line 211, in __set__
self.field.remote_field.model._meta.object_name,
ValueError: Cannot assign "<Page: About>": "MenuItem.category" must be a "Category" instance.
|
ValueError
|
def try_payment_action(self, action):
amount = self.cleaned_data["amount"]
try:
action(amount.gross)
except (PaymentError, ValueError) as e:
self.payment_error(str(e))
return False
return True
|
def try_payment_action(self, action):
amount = self.cleaned_data["amount"]
try:
action(amount.gross)
except (PaymentError, ValueError) as e:
self.payment_error(e.message)
return False
return True
|
https://github.com/mirumee/saleor/issues/1667
|
Traceback (most recent call last):
File "py3venv/lib/python3.6/site-packages/django/core/handlers/exception.py", line 41, in inner
response = get_response(request)
File "py3venv/lib/python3.6/site-packages/django/core/handlers/base.py", line 187, in _get_response
response = self.process_exception_by_middleware(e, request)
File "py3venv/lib/python3.6/site-packages/django/core/handlers/base.py", line 185, in _get_response
response = wrapped_callback(request, *callback_args, **callback_kwargs)
File "py3venv/lib/python3.6/site-packages/django/contrib/auth/decorators.py", line 23, in _wrapped_view
return view_func(request, *args, **kwargs)
File "py3venv/lib/python3.6/site-packages/django/contrib/auth/decorators.py", line 23, in _wrapped_view
return view_func(request, *args, **kwargs)
File "development/saleor/saleor/dashboard/order/views.py", line 137, in refund_payment
if form.is_valid() and form.refund():
File "development/saleor/saleor/dashboard/order/forms.py", line 97, in refund
return self.try_payment_action(self.payment.refund)
File "development/saleor/saleor/dashboard/order/forms.py", line 75, in try_payment_action
self.payment_error(e.message)
AttributeError: 'ValueError' object has no attribute 'message'
|
AttributeError
|
def done(self):
arr = numpy.array(self.data, dtype=self.dtype)
if self.shape:
if len(arr.shape) != len(self.shape):
try:
arr = arr.reshape(self.shape)
except ValueError:
raise ValueError(
"Reshape error. What is defined in data layer is {}, but receive {}".format(
self.shape, arr.shape
)
)
# else:
# self._check_shape(arr.shape)
t = core.LoDTensor()
t.set(arr, self.place)
if self.lod_level > 0:
t.set_recursive_sequence_lengths(self.lod)
return t
|
def done(self):
arr = numpy.array(self.data, dtype=self.dtype)
if self.shape:
if len(arr.shape) != len(self.shape):
try:
arr = arr.reshape(self.shape)
except ValueError:
raise ValueError(
"Reshape error. What is defined in data layer is {}, but receive {}".format(
self.shape, arr.shape
)
)
else:
self._check_shape(arr.shape)
t = core.LoDTensor()
t.set(arr, self.place)
if self.lod_level > 0:
t.set_recursive_sequence_lengths(self.lod)
return t
|
https://github.com/PaddlePaddle/Paddle/issues/15317
|
W0114 15:51:07.217496 116714 device_context.cc:262] Please NOTE: device: 0, CUDA Capability: 70, Driver API Version: 9.0, Runtime API Version: 9.0
W0114 15:51:07.217563 116714 device_context.cc:270] device: 0, cuDNN Version: 7.0.
W0114 15:51:07.217572 116714 device_context.cc:294] WARNING: device: 0. The installed Paddle is compiled with CUDNN 7.1, but CUDNN version in your machine is 7.0, which may cause serious incompatible bug. Please recompile or reinstall Paddle with compatible CUDNN version.
Exception in thread Thread-1:
Traceback (most recent call last):
File "/home/paddle/anaconda2/lib/python2.7/threading.py", line 801, in __bootstrap_inner
self.run()
File "/home/paddle/anaconda2/lib/python2.7/threading.py", line 754, in run
self.__target(*self.__args, **self.__kwargs)
File "/home/paddle/anaconda2/lib/python2.7/site-packages/paddle/fluid/layers/io.py", line 563, in __provider_thread__
for tensors in func():
File "/home/paddle/anaconda2/lib/python2.7/site-packages/paddle/fluid/layers/io.py", line 610, in __tensor_provider__
for slots in paddle_reader():
File "/home/paddle/anaconda2/lib/python2.7/site-packages/paddle/fluid/data_feeder.py", line 287, in __reader_creator__
yield self.feed(item)
File "/home/paddle/anaconda2/lib/python2.7/site-packages/paddle/fluid/data_feeder.py", line 206, in feed
ret_dict[each_name] = each_converter.done()
File "/home/paddle/anaconda2/lib/python2.7/site-packages/paddle/fluid/data_feeder.py", line 92, in done
self._check_shape(arr.shape)
File "/home/paddle/anaconda2/lib/python2.7/site-packages/paddle/fluid/data_feeder.py", line 79, in _check_shape
format(self.shape, shape))
ValueError: Shape not match. What is defined in data layer is (-1L, 3L, 1333L, 1333L), but receive (1, 3, 800, 1205)
|
ValueError
|
def generate_proposal_labels(
rpn_rois,
gt_classes,
is_crowd,
gt_boxes,
im_info,
batch_size_per_im=256,
fg_fraction=0.25,
fg_thresh=0.25,
bg_thresh_hi=0.5,
bg_thresh_lo=0.0,
bbox_reg_weights=[0.1, 0.1, 0.2, 0.2],
class_nums=None,
use_random=True,
):
"""
** Generate Proposal Labels of Faster-RCNN **
This operator can be, for given the GenerateProposalOp output bounding boxes and groundtruth,
to sample foreground boxes and background boxes, and compute loss target.
RpnRois is the output boxes of RPN and was processed by generate_proposal_op, these boxes
were combined with groundtruth boxes and sampled according to batch_size_per_im and fg_fraction,
If an instance with a groundtruth overlap greater than fg_thresh, then it was considered as a foreground sample.
If an instance with a groundtruth overlap greater than bg_thresh_lo and lower than bg_thresh_hi,
then it was considered as a background sample.
After all foreground and background boxes are chosen (so called Rois),
then we apply random sampling to make sure
the number of foreground boxes is no more than batch_size_per_im * fg_fraction.
For each box in Rois, we assign the classification (class label) and regression targets (box label) to it.
Finally BboxInsideWeights and BboxOutsideWeights are used to specify whether it would contribute to training loss.
Args:
rpn_rois(Variable): A 2-D LoDTensor with shape [N, 4]. N is the number of the GenerateProposalOp's output, each element is a bounding box with [xmin, ymin, xmax, ymax] format.
gt_classes(Variable): A 2-D LoDTensor with shape [M, 1]. M is the number of groundtruth, each element is a class label of groundtruth.
is_crowd(Variable): A 2-D LoDTensor with shape [M, 1]. M is the number of groundtruth, each element is a flag indicates whether a groundtruth is crowd.
gt_boxes(Variable): A 2-D LoDTensor with shape [M, 4]. M is the number of groundtruth, each element is a bounding box with [xmin, ymin, xmax, ymax] format.
im_info(Variable): A 2-D LoDTensor with shape [B, 3]. B is the number of input images, each element consists of im_height, im_width, im_scale.
batch_size_per_im(int): Batch size of rois per images.
fg_fraction(float): Foreground fraction in total batch_size_per_im.
fg_thresh(float): Overlap threshold which is used to chose foreground sample.
bg_thresh_hi(float): Overlap threshold upper bound which is used to chose background sample.
bg_thresh_lo(float): Overlap threshold lower bound which is used to chose background sample.
bbox_reg_weights(list|tuple): Box regression weights.
class_nums(int): Class number.
use_random(bool): Use random sampling to choose foreground and background boxes.
"""
helper = LayerHelper("generate_proposal_labels", **locals())
rois = helper.create_variable_for_type_inference(dtype=rpn_rois.dtype)
labels_int32 = helper.create_variable_for_type_inference(dtype=gt_classes.dtype)
bbox_targets = helper.create_variable_for_type_inference(dtype=rpn_rois.dtype)
bbox_inside_weights = helper.create_variable_for_type_inference(
dtype=rpn_rois.dtype
)
bbox_outside_weights = helper.create_variable_for_type_inference(
dtype=rpn_rois.dtype
)
helper.append_op(
type="generate_proposal_labels",
inputs={
"RpnRois": rpn_rois,
"GtClasses": gt_classes,
"IsCrowd": is_crowd,
"GtBoxes": gt_boxes,
"ImInfo": im_info,
},
outputs={
"Rois": rois,
"LabelsInt32": labels_int32,
"BboxTargets": bbox_targets,
"BboxInsideWeights": bbox_inside_weights,
"BboxOutsideWeights": bbox_outside_weights,
},
attrs={
"batch_size_per_im": batch_size_per_im,
"fg_fraction": fg_fraction,
"fg_thresh": fg_thresh,
"bg_thresh_hi": bg_thresh_hi,
"bg_thresh_lo": bg_thresh_lo,
"bbox_reg_weights": bbox_reg_weights,
"class_nums": class_nums,
"use_random": use_random,
},
)
rois.stop_gradient = True
labels_int32.stop_gradient = True
bbox_targets.stop_gradient = True
bbox_inside_weights.stop_gradient = True
bbox_outside_weights.stop_gradient = True
return rois, labels_int32, bbox_targets, bbox_inside_weights, bbox_outside_weights
|
def generate_proposal_labels(
rpn_rois,
gt_classes,
is_crowd,
gt_boxes,
im_info,
batch_size_per_im=256,
fg_fraction=0.25,
fg_thresh=0.25,
bg_thresh_hi=0.5,
bg_thresh_lo=0.0,
bbox_reg_weights=[0.1, 0.1, 0.2, 0.2],
class_nums=None,
use_random=True,
):
"""
** Generate proposal labels Faster-RCNN **
This operator can be, for given the GenerateProposalOp output bounding boxes and groundtruth,
to sample foreground boxes and background boxes, and compute loss target.
RpnRois is the output boxes of RPN and was processed by generate_proposal_op, these boxes
were combined with groundtruth boxes and sampled according to batch_size_per_im and fg_fraction,
If an instance with a groundtruth overlap greater than fg_thresh, then it was considered as a foreground sample.
If an instance with a groundtruth overlap greater than bg_thresh_lo and lower than bg_thresh_hi,
then it was considered as a background sample.
After all foreground and background boxes are chosen (so called Rois),
then we apply random sampling to make sure
the number of foreground boxes is no more than batch_size_per_im * fg_fraction.
For each box in Rois, we assign the classification (class label) and regression targets (box label) to it.
Finally BboxInsideWeights and BboxOutsideWeights are used to specify whether it would contribute to training loss.
Args:
rpn_rois(Variable): A 2-D LoDTensor with shape [N, 4]. N is the number of the GenerateProposalOp's output, each element is a bounding box with [xmin, ymin, xmax, ymax] format.
gt_classes(Variable): A 2-D LoDTensor with shape [M, 1]. M is the number of groundtruth, each element is a class label of groundtruth.
is_crowd(Variable): A 2-D LoDTensor with shape [M, 1]. M is the number of groundtruth, each element is a flag indicates whether a groundtruth is crowd.
gt_boxes(Variable): A 2-D LoDTensor with shape [M, 4]. M is the number of groundtruth, each element is a bounding box with [xmin, ymin, xmax, ymax] format.
im_info(Variable): A 2-D LoDTensor with shape [B, 3]. B is the number of input images, each element consists of im_height, im_width, im_scale.
batch_size_per_im(int): Batch size of rois per images.
fg_fraction(float): Foreground fraction in total batch_size_per_im.
fg_thresh(float): Overlap threshold which is used to chose foreground sample.
bg_thresh_hi(float): Overlap threshold upper bound which is used to chose background sample.
bg_thresh_lo(float): Overlap threshold lower bound which is used to chose background sample.
bbox_reg_weights(list|tuple): Box regression weights.
class_nums(int): Class number.
use_random(bool): Use random sampling to choose foreground and background boxes.
"""
helper = LayerHelper("generate_proposal_labels", **locals())
rois = helper.create_variable_for_type_inference(dtype=rpn_rois.dtype)
labels_int32 = helper.create_variable_for_type_inference(dtype=gt_classes.dtype)
bbox_targets = helper.create_variable_for_type_inference(dtype=rpn_rois.dtype)
bbox_inside_weights = helper.create_variable_for_type_inference(
dtype=rpn_rois.dtype
)
bbox_outside_weights = helper.create_variable_for_type_inference(
dtype=rpn_rois.dtype
)
helper.append_op(
type="generate_proposal_labels",
inputs={
"RpnRois": rpn_rois,
"GtClasses": gt_classes,
"IsCrowd": is_crowd,
"GtBoxes": gt_boxes,
"ImInfo": im_info,
},
outputs={
"Rois": rois,
"LabelsInt32": labels_int32,
"BboxTargets": bbox_targets,
"BboxInsideWeights": bbox_inside_weights,
"BboxOutsideWeights": bbox_outside_weights,
},
attrs={
"batch_size_per_im": batch_size_per_im,
"fg_fraction": fg_fraction,
"fg_thresh": fg_thresh,
"bg_thresh_hi": bg_thresh_hi,
"bg_thresh_lo": bg_thresh_lo,
"bbox_reg_weights": bbox_reg_weights,
"class_nums": class_nums,
"use_random": use_random,
},
)
rois.stop_gradient = True
labels_int32.stop_gradient = True
bbox_targets.stop_gradient = True
bbox_inside_weights.stop_gradient = True
bbox_outside_weights.stop_gradient = True
return rois, labels_int32, bbox_targets, bbox_inside_weights, bbox_outside_weights
|
https://github.com/PaddlePaddle/Paddle/issues/15317
|
W0114 15:51:07.217496 116714 device_context.cc:262] Please NOTE: device: 0, CUDA Capability: 70, Driver API Version: 9.0, Runtime API Version: 9.0
W0114 15:51:07.217563 116714 device_context.cc:270] device: 0, cuDNN Version: 7.0.
W0114 15:51:07.217572 116714 device_context.cc:294] WARNING: device: 0. The installed Paddle is compiled with CUDNN 7.1, but CUDNN version in your machine is 7.0, which may cause serious incompatible bug. Please recompile or reinstall Paddle with compatible CUDNN version.
Exception in thread Thread-1:
Traceback (most recent call last):
File "/home/paddle/anaconda2/lib/python2.7/threading.py", line 801, in __bootstrap_inner
self.run()
File "/home/paddle/anaconda2/lib/python2.7/threading.py", line 754, in run
self.__target(*self.__args, **self.__kwargs)
File "/home/paddle/anaconda2/lib/python2.7/site-packages/paddle/fluid/layers/io.py", line 563, in __provider_thread__
for tensors in func():
File "/home/paddle/anaconda2/lib/python2.7/site-packages/paddle/fluid/layers/io.py", line 610, in __tensor_provider__
for slots in paddle_reader():
File "/home/paddle/anaconda2/lib/python2.7/site-packages/paddle/fluid/data_feeder.py", line 287, in __reader_creator__
yield self.feed(item)
File "/home/paddle/anaconda2/lib/python2.7/site-packages/paddle/fluid/data_feeder.py", line 206, in feed
ret_dict[each_name] = each_converter.done()
File "/home/paddle/anaconda2/lib/python2.7/site-packages/paddle/fluid/data_feeder.py", line 92, in done
self._check_shape(arr.shape)
File "/home/paddle/anaconda2/lib/python2.7/site-packages/paddle/fluid/data_feeder.py", line 79, in _check_shape
format(self.shape, shape))
ValueError: Shape not match. What is defined in data layer is (-1L, 3L, 1333L, 1333L), but receive (1, 3, 800, 1205)
|
ValueError
|
def sigmoid_cross_entropy_with_logits(
x, label, ignore_index=kIgnoreIndex, name=None, normalize=False
):
"""
${comment}
Args:
x(${x_type}): ${x_comment}
label(${label_type}): ${label_comment}
ignore_index(&{ignore_index}): ${ignore_index_comment}
name(basestring|None): Name of the output.
normalize(bool): If true, divide the output by the number of
targets != ignore_index.
Returns:
out(${out_type}): ${out_comment}
Examples:
.. code-block:: python
input = fluid.layers.data(
name='data', shape=[10], dtype='float32')
label = fluid.layers.data(
name='data', shape=[10], dtype='float32')
loss = fluid.layers.sigmoid_cross_entropy_with_logits(
x=input,
label=label,
ignore_index=-1,
normalize=True) # or False
# loss = fluid.layers.reduce_sum(loss) # summation of loss
"""
helper = LayerHelper("sigmoid_cross_entropy_with_logits", **locals())
if name is None:
out = helper.create_variable_for_type_inference(dtype=x.dtype)
else:
out = helper.create_variable(name=name, dtype=x.dtype, persistable=False)
helper.append_op(
type="sigmoid_cross_entropy_with_logits",
inputs={"X": x, "Label": label},
attrs={"ignore_index": ignore_index, "normalize": normalize},
outputs={"Out": out},
)
return out
|
def sigmoid_cross_entropy_with_logits(x, label, ignore_index=kIgnoreIndex, name=None):
"""
${comment}
Args:
x(${x_type}): ${x_comment}
label(${label_type}): ${label_comment}
ignore_index(&{ignore_index}): ${ignore_index_comment}
name(basestring|None): Name of the output.
Returns:
out(${out_type}): ${out_comment}
"""
helper = LayerHelper("sigmoid_cross_entropy_with_logits", **locals())
if name is None:
out = helper.create_variable_for_type_inference(dtype=x.dtype)
else:
out = helper.create_variable(name=name, dtype=x.dtype, persistable=False)
helper.append_op(
type="sigmoid_cross_entropy_with_logits",
inputs={"X": x, "Label": label},
attrs={"ignore_index": ignore_index},
outputs={"Out": out},
)
return out
|
https://github.com/PaddlePaddle/Paddle/issues/15317
|
W0114 15:51:07.217496 116714 device_context.cc:262] Please NOTE: device: 0, CUDA Capability: 70, Driver API Version: 9.0, Runtime API Version: 9.0
W0114 15:51:07.217563 116714 device_context.cc:270] device: 0, cuDNN Version: 7.0.
W0114 15:51:07.217572 116714 device_context.cc:294] WARNING: device: 0. The installed Paddle is compiled with CUDNN 7.1, but CUDNN version in your machine is 7.0, which may cause serious incompatible bug. Please recompile or reinstall Paddle with compatible CUDNN version.
Exception in thread Thread-1:
Traceback (most recent call last):
File "/home/paddle/anaconda2/lib/python2.7/threading.py", line 801, in __bootstrap_inner
self.run()
File "/home/paddle/anaconda2/lib/python2.7/threading.py", line 754, in run
self.__target(*self.__args, **self.__kwargs)
File "/home/paddle/anaconda2/lib/python2.7/site-packages/paddle/fluid/layers/io.py", line 563, in __provider_thread__
for tensors in func():
File "/home/paddle/anaconda2/lib/python2.7/site-packages/paddle/fluid/layers/io.py", line 610, in __tensor_provider__
for slots in paddle_reader():
File "/home/paddle/anaconda2/lib/python2.7/site-packages/paddle/fluid/data_feeder.py", line 287, in __reader_creator__
yield self.feed(item)
File "/home/paddle/anaconda2/lib/python2.7/site-packages/paddle/fluid/data_feeder.py", line 206, in feed
ret_dict[each_name] = each_converter.done()
File "/home/paddle/anaconda2/lib/python2.7/site-packages/paddle/fluid/data_feeder.py", line 92, in done
self._check_shape(arr.shape)
File "/home/paddle/anaconda2/lib/python2.7/site-packages/paddle/fluid/data_feeder.py", line 79, in _check_shape
format(self.shape, shape))
ValueError: Shape not match. What is defined in data layer is (-1L, 3L, 1333L, 1333L), but receive (1, 3, 800, 1205)
|
ValueError
|
def get_trainer_program(self):
# remove optimize ops and add a send op to main_program
self.program.global_block().delete_ops(self.optimize_ops)
# FIXME(typhoonzero): serialize once will fix error occurs when clone.
self.program.__str__()
return self.program
|
def get_trainer_program(self):
# remove optimize ops and add a send op to main_program
self.program.global_block().delete_ops(self.optimize_ops)
return self.program
|
https://github.com/PaddlePaddle/Paddle/issues/9019
|
Traceback (most recent call last):
File "dist_test.py", line 281, in <module>
main(False, False, "conv", False)
File "dist_test.py", line 237, in main
params_filename=params_filename)
File "dist_test.py", line 177, in train
pserver_prog = t.get_pserver_program(current_endpoint)
File "/paddle/build/python/build/lib-python/paddle/fluid/distribute_transpiler.py", line 312, in get_pserver_program
self._append_pserver_ops(optimize_block, op, endpoint)
File "/paddle/build/python/build/lib-python/paddle/fluid/distribute_transpiler.py", line 579, in _append_pserver_ops
new_inputs[key] = pserver_block.vars[opt_op.input(key)[0]]
KeyError: u'learning_rate_0'
|
KeyError
|
def get_pserver_program(self, endpoint):
"""
Get pserver side program using the endpoint.
NOTE: assume blocks of the same variable is not distributed
on the same pserver, only change param/grad varnames for
trainers to fetch.
"""
# step1
pserver_program = Program()
# step2
recv_inputs = []
for v in self.param_grad_ep_mapping[endpoint]["params"]:
self._clone_var(pserver_program.global_block(), v)
for v in self.param_grad_ep_mapping[endpoint]["grads"]:
# create vars for each trainer in global scope, so
# we don't need to create them when grad arrives.
# change client side var name to origin name by
# removing ".trainer_%d" suffix
suff_idx = v.name.find(".trainer_")
if suff_idx >= 0:
orig_var_name = v.name[:suff_idx]
pserver_program.global_block().create_var(
name=orig_var_name,
persistable=True,
type=v.type,
dtype=v.dtype,
shape=v.shape,
)
for trainer_id in xrange(self.trainers):
var = pserver_program.global_block().create_var(
name="%s.trainer_%d" % (orig_var_name, trainer_id),
persistable=False,
type=v.type,
dtype=v.dtype,
shape=v.shape,
)
recv_inputs.append(var)
# step3
optimize_block = pserver_program.create_block(0)
# step 4
# Create a union-find data struct from optimize ops,
# If two ops are connected, we could add these two ops
# into one set.
ufind = self._create_ufind(self.optimize_ops)
# step 4.2
# Iterate through the ops and append optimize op which
# located on current pserver
opt_op_on_pserver = []
for _, op in enumerate(self.optimize_ops):
if self._is_opt_op(op) and self._is_opt_op_on_pserver(endpoint, op):
opt_op_on_pserver.append(op)
# step 4.3
# Iterate through the ops, and if an op and the optimize ops
# which located on current pserver are in one set, then
# append it into the sub program.
for _, op in enumerate(self.optimize_ops):
for _, opt_op in enumerate(opt_op_on_pserver):
if ufind.is_connected(op, opt_op):
if self._is_opt_op(op):
self._append_pserver_ops(
optimize_block, op, endpoint, default_main_program()
)
else:
self._append_pserver_non_opt_ops(optimize_block, op)
break
# step5 append the listen_and_serv op
pserver_program.global_block().append_op(
type="listen_and_serv",
inputs={"X": recv_inputs},
outputs={},
attrs={
"OptimizeBlock": optimize_block,
"endpoint": endpoint,
"Fanin": self.trainers,
},
)
pserver_program.sync_with_cpp()
return pserver_program
|
def get_pserver_program(self, endpoint):
"""
Get pserver side program using the endpoint.
NOTE: assume blocks of the same variable is not distributed
on the same pserver, only change param/grad varnames for
trainers to fetch.
"""
# step1
pserver_program = Program()
# step2
recv_inputs = []
for v in self.param_grad_ep_mapping[endpoint]["params"]:
self._clone_var(pserver_program.global_block(), v)
for v in self.param_grad_ep_mapping[endpoint]["grads"]:
# create vars for each trainer in global scope, so
# we don't need to create them when grad arrives.
# change client side var name to origin name by
# removing ".trainer_%d" suffix
suff_idx = v.name.find(".trainer_")
if suff_idx >= 0:
orig_var_name = v.name[:suff_idx]
pserver_program.global_block().create_var(
name=orig_var_name,
persistable=True,
type=v.type,
dtype=v.dtype,
shape=v.shape,
)
for trainer_id in xrange(self.trainers):
var = pserver_program.global_block().create_var(
name="%s.trainer_%d" % (orig_var_name, trainer_id),
persistable=False,
type=v.type,
dtype=v.dtype,
shape=v.shape,
)
recv_inputs.append(var)
# step3
optimize_block = pserver_program.create_block(0)
# step 4
# Create a union-find data struct from optimize ops,
# If two ops are connected, we could add these two ops
# into one set.
ufind = self._create_ufind(self.optimize_ops)
# step 4.2
# Iterate through the ops and append optimize op which
# located on current pserver
opt_op_on_pserver = []
for _, op in enumerate(self.optimize_ops):
if self._is_opt_op(op) and self._is_opt_op_on_pserver(endpoint, op):
opt_op_on_pserver.append(op)
# step 4.3
# Iterate through the ops, and if an op and the optimize ops
# which located on current pserver are in one set, then
# append it into the sub program.
for _, op in enumerate(self.optimize_ops):
for _, opt_op in enumerate(opt_op_on_pserver):
if ufind.is_connected(op, opt_op):
if self._is_opt_op(op):
self._append_pserver_ops(optimize_block, op, endpoint)
else:
self._append_pserver_non_opt_ops(optimize_block, op)
break
# step5 append the listen_and_serv op
pserver_program.global_block().append_op(
type="listen_and_serv",
inputs={"X": recv_inputs},
outputs={},
attrs={
"OptimizeBlock": optimize_block,
"endpoint": endpoint,
"Fanin": self.trainers,
},
)
pserver_program.sync_with_cpp()
return pserver_program
|
https://github.com/PaddlePaddle/Paddle/issues/9019
|
Traceback (most recent call last):
File "dist_test.py", line 281, in <module>
main(False, False, "conv", False)
File "dist_test.py", line 237, in main
params_filename=params_filename)
File "dist_test.py", line 177, in train
pserver_prog = t.get_pserver_program(current_endpoint)
File "/paddle/build/python/build/lib-python/paddle/fluid/distribute_transpiler.py", line 312, in get_pserver_program
self._append_pserver_ops(optimize_block, op, endpoint)
File "/paddle/build/python/build/lib-python/paddle/fluid/distribute_transpiler.py", line 579, in _append_pserver_ops
new_inputs[key] = pserver_block.vars[opt_op.input(key)[0]]
KeyError: u'learning_rate_0'
|
KeyError
|
def _append_pserver_ops(self, optimize_block, opt_op, endpoint, origin_program):
program = optimize_block.program
pserver_block = program.global_block()
new_inputs = dict()
# update param/grad shape first, then other inputs like
# moment can use the updated shape
for key in opt_op.input_names:
if key == "Grad":
grad_block = None
for g in self.param_grad_ep_mapping[endpoint]["grads"]:
if same_or_split_var(self._orig_varname(g.name), opt_op.input(key)[0]):
grad_block = g
break
if not grad_block:
# do not append this op if current endpoint
# is not dealing with this grad block
return
merged_var = pserver_block.vars[self._orig_varname(grad_block.name)]
if self.trainers > 1:
vars2merge = []
for i in xrange(self.trainers):
per_trainer_name = "%s.trainer_%d" % (
self._orig_varname(grad_block.name),
i,
)
vars2merge.append(pserver_block.vars[per_trainer_name])
optimize_block.append_op(
type="sum", inputs={"X": vars2merge}, outputs={"Out": merged_var}
)
if not merged_var.type == core.VarDesc.VarType.SELECTED_ROWS:
optimize_block.append_op(
type="scale",
inputs={"X": merged_var},
outputs={"Out": merged_var},
attrs={"scale": 1.0 / float(self.trainers)},
)
new_inputs[key] = merged_var
elif key == "Param":
# param is already created on global program
param_block = None
for p in self.param_grad_ep_mapping[endpoint]["params"]:
if same_or_split_var(p.name, opt_op.input(key)[0]):
param_block = p
break
if not param_block:
return
tmpvar = pserver_block.create_var(
name=param_block.name,
persistable=True,
dtype=param_block.dtype,
shape=param_block.shape,
)
new_inputs[key] = tmpvar
elif key == "LearningRate":
# leraning rate variable has already be created by non-optimize op,
# don't create it once again.
lr_varname = opt_op.input(key)[0]
if pserver_block.vars.has_key(lr_varname):
new_inputs[key] = pserver_block.vars[opt_op.input(key)[0]]
else:
origin_var = origin_program.global_block().vars[lr_varname]
tmpvar = pserver_block.create_var(
name=origin_var.name,
persistable=origin_var.persistable,
dtype=origin_var.dtype,
shape=origin_var.shape,
)
new_inputs[key] = tmpvar
for key in opt_op.input_names:
new_shape = None
if key in ["Param", "Grad", "LearningRate"]:
continue
var = self.program.global_block().vars[opt_op.input(key)[0]]
# update accumulator variable shape
param_shape = new_inputs["Param"].shape
new_shape = self._get_optimizer_input_shape(
opt_op.type, key, var.shape, param_shape
)
tmpvar = pserver_block.create_var(
name=var.name, persistable=var.persistable, dtype=var.dtype, shape=new_shape
)
new_inputs[key] = tmpvar
# change output's ParamOut variable
outputs = self._get_output_map_from_op(self.program.global_block().vars, opt_op)
outputs["ParamOut"] = new_inputs["Param"]
optimize_block.append_op(
type=opt_op.type, inputs=new_inputs, outputs=outputs, attrs=opt_op.attrs
)
|
def _append_pserver_ops(self, optimize_block, opt_op, endpoint):
program = optimize_block.program
pserver_block = program.global_block()
new_inputs = dict()
# update param/grad shape first, then other inputs like
# moment can use the updated shape
for key in opt_op.input_names:
if key == "Grad":
grad_block = None
for g in self.param_grad_ep_mapping[endpoint]["grads"]:
if same_or_split_var(self._orig_varname(g.name), opt_op.input(key)[0]):
grad_block = g
break
if not grad_block:
# do not append this op if current endpoint
# is not dealing with this grad block
return
merged_var = pserver_block.vars[self._orig_varname(grad_block.name)]
if self.trainers > 1:
vars2merge = []
for i in xrange(self.trainers):
per_trainer_name = "%s.trainer_%d" % (
self._orig_varname(grad_block.name),
i,
)
vars2merge.append(pserver_block.vars[per_trainer_name])
optimize_block.append_op(
type="sum", inputs={"X": vars2merge}, outputs={"Out": merged_var}
)
if not merged_var.type == core.VarDesc.VarType.SELECTED_ROWS:
optimize_block.append_op(
type="scale",
inputs={"X": merged_var},
outputs={"Out": merged_var},
attrs={"scale": 1.0 / float(self.trainers)},
)
new_inputs[key] = merged_var
elif key == "Param":
# param is already created on global program
param_block = None
for p in self.param_grad_ep_mapping[endpoint]["params"]:
if same_or_split_var(p.name, opt_op.input(key)[0]):
param_block = p
break
if not param_block:
return
tmpvar = pserver_block.create_var(
name=param_block.name,
persistable=True,
dtype=param_block.dtype,
shape=param_block.shape,
)
new_inputs[key] = tmpvar
elif key == "LearningRate":
# leraning rate variable has already be created by non-optimize op,
# don't create it once again.
new_inputs[key] = pserver_block.vars[opt_op.input(key)[0]]
for key in opt_op.input_names:
new_shape = None
if key in ["Param", "Grad", "LearningRate"]:
continue
var = self.program.global_block().vars[opt_op.input(key)[0]]
# update accumulator variable shape
param_shape = new_inputs["Param"].shape
new_shape = self._get_optimizer_input_shape(
opt_op.type, key, var.shape, param_shape
)
tmpvar = pserver_block.create_var(
name=var.name, persistable=var.persistable, dtype=var.dtype, shape=new_shape
)
new_inputs[key] = tmpvar
# change output's ParamOut variable
outputs = self._get_output_map_from_op(self.program.global_block().vars, opt_op)
outputs["ParamOut"] = new_inputs["Param"]
optimize_block.append_op(
type=opt_op.type, inputs=new_inputs, outputs=outputs, attrs=opt_op.attrs
)
|
https://github.com/PaddlePaddle/Paddle/issues/9019
|
Traceback (most recent call last):
File "dist_test.py", line 281, in <module>
main(False, False, "conv", False)
File "dist_test.py", line 237, in main
params_filename=params_filename)
File "dist_test.py", line 177, in train
pserver_prog = t.get_pserver_program(current_endpoint)
File "/paddle/build/python/build/lib-python/paddle/fluid/distribute_transpiler.py", line 312, in get_pserver_program
self._append_pserver_ops(optimize_block, op, endpoint)
File "/paddle/build/python/build/lib-python/paddle/fluid/distribute_transpiler.py", line 579, in _append_pserver_ops
new_inputs[key] = pserver_block.vars[opt_op.input(key)[0]]
KeyError: u'learning_rate_0'
|
KeyError
|
def begin_parse():
init_config_environment()
for hook in _parse_config_hooks:
hook()
logger.findCaller = find_caller
logger.fatal = my_fatal
g_config.model_config.type = "nn"
global g_current_submodel, g_root_submodel
g_root_submodel = g_config.model_config.sub_models.add()
g_root_submodel.name = "root"
g_root_submodel.is_recurrent_layer_group = False
g_current_submodel = g_root_submodel
|
def begin_parse(config_arg_str=""):
"""
@param config_arg_str: a string of the form var1=val1,var2=val2. It will be
passed to config script as a dictionary CONFIG_ARGS
"""
init_config_environment()
for hook in _parse_config_hooks:
hook()
logger.findCaller = find_caller
logger.fatal = my_fatal
g_config.model_config.type = "nn"
global g_current_submodel, g_root_submodel
g_root_submodel = g_config.model_config.sub_models.add()
g_root_submodel.name = "root"
g_root_submodel.is_recurrent_layer_group = False
g_current_submodel = g_root_submodel
|
https://github.com/PaddlePaddle/Paddle/issues/2349
|
AttributeError Traceback (most recent call last)
<ipython-input-5-5ce86945bbbe> in <module>()
----> 1 cost = seqToseq_net(source_dict_dim, target_dict_dim)
/Users/liling.tan/seqtoseq.py in seqToseq_net(source_dict_dim, target_dict_dim, is_generating)
160
161 decoder_group_name = "decoder_group"
--> 162 group_input1 = paddle.layer.StaticInputV2(input=encoded_vector, is_seq=True)
163 group_input2 = StaticInputV2(input=encoded_proj, is_seq=True)
164 group_inputs = [group_input1, group_input2]
AttributeError: 'module' object has no attribute 'StaticInputV2'
|
AttributeError
|
def parse_config(trainer_config, config_arg_str):
"""
@param config_arg_str: a string of the form var1=val1,var2=val2. It will be
passed to config script as a dictionary CONFIG_ARGS
"""
begin_parse()
config_args = {}
if config_arg_str:
config_args = dict([f.split("=") for f in config_arg_str.split(",")])
global g_command_config_args
g_command_config_args.update(config_args)
extension_module_name = config_args.get("extension_module_name")
if extension_module_name:
global g_extended_config_funcs
extension_module = importlib(extension_module_name)
g_extended_config_funcs = extension_module.get_config_funcs(g_config)
if hasattr(trainer_config, "__call__"):
trainer_config.func_globals.update(make_config_environment("", config_args))
trainer_config()
else:
execfile(trainer_config, make_config_environment(trainer_config, config_args))
return update_g_config()
|
def parse_config(trainer_config, config_arg_str):
begin_parse(config_arg_str)
config_args = {}
if config_arg_str:
config_args = dict([f.split("=") for f in config_arg_str.split(",")])
global g_command_config_args
g_command_config_args.update(config_args)
extension_module_name = config_args.get("extension_module_name")
if extension_module_name:
global g_extended_config_funcs
extension_module = importlib(extension_module_name)
g_extended_config_funcs = extension_module.get_config_funcs(g_config)
if hasattr(trainer_config, "__call__"):
trainer_config.func_globals.update(make_config_environment("", config_args))
trainer_config()
else:
execfile(trainer_config, make_config_environment(trainer_config, config_args))
return update_g_config()
|
https://github.com/PaddlePaddle/Paddle/issues/2349
|
AttributeError Traceback (most recent call last)
<ipython-input-5-5ce86945bbbe> in <module>()
----> 1 cost = seqToseq_net(source_dict_dim, target_dict_dim)
/Users/liling.tan/seqtoseq.py in seqToseq_net(source_dict_dim, target_dict_dim, is_generating)
160
161 decoder_group_name = "decoder_group"
--> 162 group_input1 = paddle.layer.StaticInputV2(input=encoded_vector, is_seq=True)
163 group_input2 = StaticInputV2(input=encoded_proj, is_seq=True)
164 group_inputs = [group_input1, group_input2]
AttributeError: 'module' object has no attribute 'StaticInputV2'
|
AttributeError
|
def beam_search(
step,
input,
bos_id,
eos_id,
beam_size,
max_length=500,
name=None,
num_results_per_sample=None,
):
"""
Beam search is a heuristic search algorithm used in sequence generation.
It explores a graph by expanding the most promising nodes in a limited set
to maintain tractability.
The example usage is:
.. code-block:: python
def rnn_step(input):
last_time_step_output = memory(name='rnn', size=512)
with mixed_layer(size=512, name='rnn') as simple_rnn:
simple_rnn += full_matrix_projection(input)
simple_rnn += last_time_step_output
return simple_rnn
generated_word_embedding = GeneratedInput(
size=target_dictionary_dim,
embedding_name="target_language_embedding",
embedding_size=word_vector_dim)
beam_gen = beam_search(name="decoder",
step=rnn_step,
input=[StaticInput(encoder_last),
generated_word_embedding],
bos_id=0,
eos_id=1,
beam_size=5)
Please see the following demo for more details:
- machine translation : demo/seqToseq/translation/gen.conf \
demo/seqToseq/seqToseq_net.py
:param name: Name of the recurrent unit that generates sequences.
:type name: base string
:param step: A callable function that defines the calculation in a time
step, and it is applied to sequences with arbitrary length by
sharing a same set of weights.
You can refer to the first parameter of recurrent_group, or
demo/seqToseq/seqToseq_net.py for more details.
:type step: callable
:param input: Input data for the recurrent unit, which should include the
previously generated words as a GeneratedInput object.
:type input: list
:param bos_id: Index of the start symbol in the dictionary. The start symbol
is a special token for NLP task, which indicates the
beginning of a sequence. In the generation task, the start
symbol is essential, since it is used to initialize the RNN
internal state.
:type bos_id: int
:param eos_id: Index of the end symbol in the dictionary. The end symbol is
a special token for NLP task, which indicates the end of a
sequence. The generation process will stop once the end
symbol is generated, or a pre-defined max iteration number
is exceeded.
:type eos_id: int
:param max_length: Max generated sequence length.
:type max_length: int
:param beam_size: Beam search for sequence generation is an iterative search
algorithm. To maintain tractability, every iteration only
only stores a predetermined number, called the beam_size,
of the most promising next words. The greater the beam
size, the fewer candidate words are pruned.
:type beam_size: int
:param num_results_per_sample: Number of the generated results per input
sequence. This number must always be less than
beam size.
:type num_results_per_sample: int
:return: The generated word index.
:rtype: LayerOutput
"""
if num_results_per_sample is None:
num_results_per_sample = beam_size
if num_results_per_sample > beam_size:
logger.warning("num_results_per_sample should be less than beam_size")
if isinstance(input, StaticInput) or isinstance(input, BaseGeneratedInput):
input = [input]
generated_input_index = -1
real_input = []
for i, each_input in enumerate(input):
assert isinstance(each_input, StaticInput) or isinstance(
each_input, BaseGeneratedInput
)
if isinstance(each_input, BaseGeneratedInput):
assert generated_input_index == -1
generated_input_index = i
else:
real_input.append(each_input)
assert generated_input_index != -1
gipt = input[generated_input_index]
gipt.bos_id = bos_id
gipt.eos_id = eos_id
def __real_step__(*args):
eos_name = "__%s_eos_layer__" % name
RecurrentLayerGroupSetGenerator(
Generator(
eos_layer_name=eos_name,
max_num_frames=max_length,
beam_size=beam_size,
num_results_per_sample=num_results_per_sample,
)
)
args = list(args)
args.insert(generated_input_index, gipt.before_real_step())
predict = gipt.after_real_step(step(*args))
eos_layer(input=predict, eos_id=eos_id, name=eos_name)
return predict
tmp = recurrent_group(
step=__real_step__,
input=real_input,
reverse=False,
name=name,
is_generating=True,
)
return tmp
|
def beam_search(
step,
input,
bos_id,
eos_id,
beam_size,
max_length=500,
name=None,
num_results_per_sample=None,
):
"""
Beam search is a heuristic search algorithm used in sequence generation.
It explores a graph by expanding the most promising nodes in a limited set
to maintain tractability.
The example usage is:
.. code-block:: python
def rnn_step(input):
last_time_step_output = memory(name='rnn', size=512)
with mixed_layer(size=512, name='rnn') as simple_rnn:
simple_rnn += full_matrix_projection(input)
simple_rnn += last_time_step_output
return simple_rnn
generated_word_embedding = GeneratedInput(
size=target_dictionary_dim,
embedding_name="target_language_embedding",
embedding_size=word_vector_dim)
beam_gen = beam_search(name="decoder",
step=rnn_step,
input=[StaticInput(encoder_last),
generated_word_embedding],
bos_id=0,
eos_id=1,
beam_size=5)
Please see the following demo for more details:
- machine translation : demo/seqToseq/translation/gen.conf \
demo/seqToseq/seqToseq_net.py
:param name: Name of the recurrent unit that generates sequences.
:type name: base string
:param step: A callable function that defines the calculation in a time
step, and it is applied to sequences with arbitrary length by
sharing a same set of weights.
You can refer to the first parameter of recurrent_group, or
demo/seqToseq/seqToseq_net.py for more details.
:type step: callable
:param input: Input data for the recurrent unit, which should include the
previously generated words as a GeneratedInput object.
:type input: list
:param bos_id: Index of the start symbol in the dictionary. The start symbol
is a special token for NLP task, which indicates the
beginning of a sequence. In the generation task, the start
symbol is essential, since it is used to initialize the RNN
internal state.
:type bos_id: int
:param eos_id: Index of the end symbol in the dictionary. The end symbol is
a special token for NLP task, which indicates the end of a
sequence. The generation process will stop once the end
symbol is generated, or a pre-defined max iteration number
is exceeded.
:type eos_id: int
:param max_length: Max generated sequence length.
:type max_length: int
:param beam_size: Beam search for sequence generation is an iterative search
algorithm. To maintain tractability, every iteration only
only stores a predetermined number, called the beam_size,
of the most promising next words. The greater the beam
size, the fewer candidate words are pruned.
:type beam_size: int
:param num_results_per_sample: Number of the generated results per input
sequence. This number must always be less than
beam size.
:type num_results_per_sample: int
:return: The generated word index.
:rtype: LayerOutput
"""
if num_results_per_sample is None:
num_results_per_sample = beam_size
if num_results_per_sample > beam_size:
logger.warning("num_results_per_sample should be less than beam_size")
if isinstance(input, StaticInput) or isinstance(input, BaseGeneratedInput):
input = [input]
generated_input_index = -1
real_input = []
for i, each_input in enumerate(input):
assert isinstance(each_input, StaticInput) or isinstance(
each_input, BaseGeneratedInput
)
if isinstance(each_input, BaseGeneratedInput):
assert generated_input_index == -1
generated_input_index = i
else:
real_input.append(each_input)
assert generated_input_index != -1
gipt = input[generated_input_index]
assert isinstance(gipt, BaseGeneratedInput)
gipt.bos_id = bos_id
gipt.eos_id = eos_id
def __real_step__(*args):
eos_name = "__%s_eos_layer__" % name
RecurrentLayerGroupSetGenerator(
Generator(
eos_layer_name=eos_name,
max_num_frames=max_length,
beam_size=beam_size,
num_results_per_sample=num_results_per_sample,
)
)
args = list(args)
args.insert(generated_input_index, gipt.before_real_step())
predict = gipt.after_real_step(step(*args))
eos_layer(input=predict, eos_id=eos_id, name=eos_name)
return predict
tmp = recurrent_group(
step=__real_step__,
input=real_input,
reverse=False,
name=name,
is_generating=True,
)
return tmp
|
https://github.com/PaddlePaddle/Paddle/issues/2349
|
AttributeError Traceback (most recent call last)
<ipython-input-5-5ce86945bbbe> in <module>()
----> 1 cost = seqToseq_net(source_dict_dim, target_dict_dim)
/Users/liling.tan/seqtoseq.py in seqToseq_net(source_dict_dim, target_dict_dim, is_generating)
160
161 decoder_group_name = "decoder_group"
--> 162 group_input1 = paddle.layer.StaticInputV2(input=encoded_vector, is_seq=True)
163 group_input2 = StaticInputV2(input=encoded_proj, is_seq=True)
164 group_inputs = [group_input1, group_input2]
AttributeError: 'module' object has no attribute 'StaticInputV2'
|
AttributeError
|
def __need_to_keep__(name):
return name in [
"StaticInput",
"SubsequenceInput",
"GeneratedInput",
"LayerType",
"layer_support",
]
|
def __need_to_keep__(name):
if name in ["StaticInput", "LayerType", "layer_support"]:
return False
return True
|
https://github.com/PaddlePaddle/Paddle/issues/2349
|
AttributeError Traceback (most recent call last)
<ipython-input-5-5ce86945bbbe> in <module>()
----> 1 cost = seqToseq_net(source_dict_dim, target_dict_dim)
/Users/liling.tan/seqtoseq.py in seqToseq_net(source_dict_dim, target_dict_dim, is_generating)
160
161 decoder_group_name = "decoder_group"
--> 162 group_input1 = paddle.layer.StaticInputV2(input=encoded_vector, is_seq=True)
163 group_input2 = StaticInputV2(input=encoded_proj, is_seq=True)
164 group_inputs = [group_input1, group_input2]
AttributeError: 'module' object has no attribute 'StaticInputV2'
|
AttributeError
|
def __convert_name__(inname):
if __need_to_keep__(inname):
return inname
if inname == "maxid_layer":
return "max_id"
elif (
inname.endswith("memory")
or inname.endswith("_seq")
or inname.endswith("_sim")
or inname == "hsigmoid"
):
return inname
elif inname in [
"cross_entropy",
"multi_binary_label_cross_entropy",
"cross_entropy_with_selfnorm",
]:
return inname + "_cost"
elif inname.endswith("_cost"):
return inname
elif inname.endswith("_layer"):
return inname[: -len("_layer")]
else:
return inname
|
def __convert_name__(inname):
if inname == "maxid_layer":
return "max_id"
elif (
inname.endswith("memory")
or inname.endswith("_seq")
or inname.endswith("_sim")
or inname == "hsigmoid"
):
return inname
elif inname in [
"cross_entropy",
"multi_binary_label_cross_entropy",
"cross_entropy_with_selfnorm",
]:
return inname + "_cost"
elif inname.endswith("_cost"):
return inname
elif inname.endswith("_layer"):
return inname[: -len("_layer")]
else:
return inname
|
https://github.com/PaddlePaddle/Paddle/issues/2349
|
AttributeError Traceback (most recent call last)
<ipython-input-5-5ce86945bbbe> in <module>()
----> 1 cost = seqToseq_net(source_dict_dim, target_dict_dim)
/Users/liling.tan/seqtoseq.py in seqToseq_net(source_dict_dim, target_dict_dim, is_generating)
160
161 decoder_group_name = "decoder_group"
--> 162 group_input1 = paddle.layer.StaticInputV2(input=encoded_vector, is_seq=True)
163 group_input2 = StaticInputV2(input=encoded_proj, is_seq=True)
164 group_inputs = [group_input1, group_input2]
AttributeError: 'module' object has no attribute 'StaticInputV2'
|
AttributeError
|
def __get_used_layers__(output_layers):
layer_names = set()
parents = {}
def add_parent(child, parent):
if child in parents:
parents[child].append(parent)
else:
parents[child] = [parent]
def add_additional_parents():
for sub_model in cp.g_config.model_config.sub_models:
if sub_model.name == "root":
continue
for link in sub_model.in_links:
add_parent(link.link_name, link.layer_name)
add_parent(sub_model.name, link.layer_name)
for link in sub_model.out_links:
add_parent(link.link_name, link.layer_name)
add_parent(link.link_name, sub_model.name)
for mem in sub_model.memories:
if mem.boot_layer_name:
add_parent(mem.layer_name, mem.boot_layer_name)
add_parent(mem.link_name, mem.layer_name)
if sub_model.HasField("generator"):
# according to the implementation of text generation
# in recurrent layer group, the generated word must be
# the first out link
add_parent(
sub_model.out_links[0].layer_name,
sub_model.generator.eos_layer_name,
)
def dfs_travel(layer_name):
if layer_name in layer_names:
return
layer_names.add(layer_name)
layer = cp.g_layer_map[layer_name]
for inp in layer.inputs:
dfs_travel(inp.input_layer_name)
if layer.name in parents:
for p in parents[layer.name]:
dfs_travel(p)
add_additional_parents()
for layer in output_layers:
dfs_travel(layer.full_name)
# print layer needs to be specially handled because no other
# layer depends on it. It is used to print the result of some
# layers when running the model for debug purpose. So we explicitly
# add a print layer to the topolty if its input is in the toplogy.
for layer in cp.g_config.model_config.layers:
if layer.type == "print":
used = True
for inp in layer.inputs:
if inp.input_layer_name not in layer_names:
used = False
break
if used:
layer_names.add(layer.name)
return layer_names
|
def __get_used_layers__(output_layers, extra_layers=None):
layer_names = set()
parents = {}
def add_parent(child, parent):
if child in parents:
parents[child].append(parent)
else:
parents[child] = [parent]
def add_additional_parents():
for sub_model in cp.g_config.model_config.sub_models:
if sub_model.name == "root":
continue
for link in sub_model.in_links:
add_parent(link.link_name, link.layer_name)
add_parent(sub_model.name, link.layer_name)
for link in sub_model.out_links:
add_parent(link.link_name, link.layer_name)
add_parent(link.link_name, sub_model.name)
for mem in sub_model.memories:
if mem.boot_layer_name:
add_parent(mem.layer_name, mem.boot_layer_name)
add_parent(mem.link_name, mem.layer_name)
def dfs_travel(layer_name):
if layer_name in layer_names:
return
layer_names.add(layer_name)
layer = cp.g_layer_map[layer_name]
for inp in layer.inputs:
dfs_travel(inp.input_layer_name)
if layer.name in parents:
for p in parents[layer.name]:
dfs_travel(p)
add_additional_parents()
for layer in output_layers:
dfs_travel(layer.full_name)
# print layer needs to be specially handled because no other
# layer depends on it. It is used to print the result of some
# layers when running the model for debug purpose. So we explicitly
# add a print layer to the topolty if its input is in the toplogy.
for layer in cp.g_config.model_config.layers:
if layer.type == "print":
used = True
for inp in layer.inputs:
if inp.input_layer_name not in layer_names:
used = False
break
if used:
layer_names.add(layer.name)
return layer_names
|
https://github.com/PaddlePaddle/Paddle/issues/2349
|
AttributeError Traceback (most recent call last)
<ipython-input-5-5ce86945bbbe> in <module>()
----> 1 cost = seqToseq_net(source_dict_dim, target_dict_dim)
/Users/liling.tan/seqtoseq.py in seqToseq_net(source_dict_dim, target_dict_dim, is_generating)
160
161 decoder_group_name = "decoder_group"
--> 162 group_input1 = paddle.layer.StaticInputV2(input=encoded_vector, is_seq=True)
163 group_input2 = StaticInputV2(input=encoded_proj, is_seq=True)
164 group_inputs = [group_input1, group_input2]
AttributeError: 'module' object has no attribute 'StaticInputV2'
|
AttributeError
|
def add_additional_parents():
for sub_model in cp.g_config.model_config.sub_models:
if sub_model.name == "root":
continue
for link in sub_model.in_links:
add_parent(link.link_name, link.layer_name)
add_parent(sub_model.name, link.layer_name)
for link in sub_model.out_links:
add_parent(link.link_name, link.layer_name)
add_parent(link.link_name, sub_model.name)
for mem in sub_model.memories:
if mem.boot_layer_name:
add_parent(mem.layer_name, mem.boot_layer_name)
add_parent(mem.link_name, mem.layer_name)
if sub_model.HasField("generator"):
# according to the implementation of text generation
# in recurrent layer group, the generated word must be
# the first out link
add_parent(
sub_model.out_links[0].layer_name, sub_model.generator.eos_layer_name
)
|
def add_additional_parents():
for sub_model in cp.g_config.model_config.sub_models:
if sub_model.name == "root":
continue
for link in sub_model.in_links:
add_parent(link.link_name, link.layer_name)
add_parent(sub_model.name, link.layer_name)
for link in sub_model.out_links:
add_parent(link.link_name, link.layer_name)
add_parent(link.link_name, sub_model.name)
for mem in sub_model.memories:
if mem.boot_layer_name:
add_parent(mem.layer_name, mem.boot_layer_name)
add_parent(mem.link_name, mem.layer_name)
|
https://github.com/PaddlePaddle/Paddle/issues/2349
|
AttributeError Traceback (most recent call last)
<ipython-input-5-5ce86945bbbe> in <module>()
----> 1 cost = seqToseq_net(source_dict_dim, target_dict_dim)
/Users/liling.tan/seqtoseq.py in seqToseq_net(source_dict_dim, target_dict_dim, is_generating)
160
161 decoder_group_name = "decoder_group"
--> 162 group_input1 = paddle.layer.StaticInputV2(input=encoded_vector, is_seq=True)
163 group_input2 = StaticInputV2(input=encoded_proj, is_seq=True)
164 group_inputs = [group_input1, group_input2]
AttributeError: 'module' object has no attribute 'StaticInputV2'
|
AttributeError
|
def parse_network(output_layers, extra_layers=None):
if not isinstance(output_layers, collections.Sequence):
output_layers = [output_layers]
if extra_layers is not None:
if not isinstance(extra_layers, collections.Sequence):
extra_layers = [extra_layers]
else:
extra_layers = []
layer_names = __get_used_layers__(output_layers + extra_layers)
submodel_names = __get_used_submodels__(layer_names)
submodel_names.add("root")
evaluator_names = __get_used_evaluators__(layer_names)
input_layer_names = set()
output_layer_names = set()
model_config = ModelConfig()
model_config.type = cp.g_config.model_config.type
for layer in output_layers:
model_config.output_layer_names.append(layer.full_name)
output_layer_names.add(layer.full_name)
for l in cp.g_config.model_config.layers:
if l.name not in layer_names:
continue
model_config.layers.extend([l])
if l.type == "data":
if l.name in model_config.output_layer_names:
"""
In text generation, the outlink to save the generated word
indices is a data_layer defined in recurrent_group. This
data_layer is sure to be the output of the network in text
generation task, so this statement excludes such a special
data_layer from being inputs of the network, otherwise an error
will occur during data feeding.
"""
continue
model_config.input_layer_names.append(l.name)
input_layer_names.add(l.name)
for e in cp.g_config.model_config.evaluators:
if e.name in evaluator_names:
model_config.evaluators.extend([e])
for s in cp.g_config.model_config.sub_models:
if s.name in submodel_names:
s = __trim_submodel__(
s, layer_names, input_layer_names, output_layer_names, evaluator_names
)
model_config.sub_models.extend([s])
parameter_names = __get_used_parameters__(layer_names, model_config.sub_models)
for p in cp.g_config.model_config.parameters:
if p.name in parameter_names:
model_config.parameters.extend([p])
return model_config
|
def parse_network(output_layers, extra_layers=None):
if not isinstance(output_layers, collections.Sequence):
output_layers = [output_layers]
if extra_layers is not None and not isinstance(extra_layers, collections.Sequence):
extra_layers = [extra_layers]
else:
extra_layers = []
layer_names = __get_used_layers__(output_layers + extra_layers)
submodel_names = __get_used_submodels__(layer_names)
submodel_names.add("root")
evaluator_names = __get_used_evaluators__(layer_names)
input_layer_names = set()
output_layer_names = set()
model_config = ModelConfig()
model_config.type = cp.g_config.model_config.type
for l in cp.g_config.model_config.layers:
if l.name not in layer_names:
continue
model_config.layers.extend([l])
if l.type == "data":
model_config.input_layer_names.append(l.name)
input_layer_names.add(l.name)
for layer in output_layers:
model_config.output_layer_names.append(layer.full_name)
output_layer_names.add(layer.full_name)
for e in cp.g_config.model_config.evaluators:
if e.name in evaluator_names:
model_config.evaluators.extend([e])
for s in cp.g_config.model_config.sub_models:
if s.name in submodel_names:
s = __trim_submodel__(
s, layer_names, input_layer_names, output_layer_names, evaluator_names
)
model_config.sub_models.extend([s])
parameter_names = __get_used_parameters__(layer_names, model_config.sub_models)
for p in cp.g_config.model_config.parameters:
if p.name in parameter_names:
model_config.parameters.extend([p])
return model_config
|
https://github.com/PaddlePaddle/Paddle/issues/2349
|
AttributeError Traceback (most recent call last)
<ipython-input-5-5ce86945bbbe> in <module>()
----> 1 cost = seqToseq_net(source_dict_dim, target_dict_dim)
/Users/liling.tan/seqtoseq.py in seqToseq_net(source_dict_dim, target_dict_dim, is_generating)
160
161 decoder_group_name = "decoder_group"
--> 162 group_input1 = paddle.layer.StaticInputV2(input=encoded_vector, is_seq=True)
163 group_input2 = StaticInputV2(input=encoded_proj, is_seq=True)
164 group_inputs = [group_input1, group_input2]
AttributeError: 'module' object has no attribute 'StaticInputV2'
|
AttributeError
|
def __init__(self, layers, extra_layers=None):
def __check__(layers):
if not isinstance(layers, collections.Sequence):
layers = [layers]
for layer in layers:
__check_layer_type__(layer)
return layers
layers = __check__(layers)
self.layers = layers
if extra_layers is not None:
extra_layers = __check__(extra_layers)
self.__model_config__ = v2_layer.parse_network(layers, extra_layers=extra_layers)
if extra_layers is not None:
self.layers.extend(extra_layers)
assert isinstance(self.__model_config__, ModelConfig)
|
def __init__(self, layers, extra_layers=None):
def __check__(layers):
if not isinstance(layers, collections.Sequence):
__check_layer_type__(layers)
layers = [layers]
for layer in layers:
__check_layer_type__(layer)
return layers
layers = __check__(layers)
self.layers = layers
if extra_layers is not None:
extra_layers = __check__(extra_layers)
self.__model_config__ = v2_layer.parse_network(layers, extra_layers=extra_layers)
if extra_layers is not None:
self.layers.extend(extra_layers)
assert isinstance(self.__model_config__, ModelConfig)
|
https://github.com/PaddlePaddle/Paddle/issues/2349
|
AttributeError Traceback (most recent call last)
<ipython-input-5-5ce86945bbbe> in <module>()
----> 1 cost = seqToseq_net(source_dict_dim, target_dict_dim)
/Users/liling.tan/seqtoseq.py in seqToseq_net(source_dict_dim, target_dict_dim, is_generating)
160
161 decoder_group_name = "decoder_group"
--> 162 group_input1 = paddle.layer.StaticInputV2(input=encoded_vector, is_seq=True)
163 group_input2 = StaticInputV2(input=encoded_proj, is_seq=True)
164 group_inputs = [group_input1, group_input2]
AttributeError: 'module' object has no attribute 'StaticInputV2'
|
AttributeError
|
def __check__(layers):
if not isinstance(layers, collections.Sequence):
layers = [layers]
for layer in layers:
__check_layer_type__(layer)
return layers
|
def __check__(layers):
if not isinstance(layers, collections.Sequence):
__check_layer_type__(layers)
layers = [layers]
for layer in layers:
__check_layer_type__(layer)
return layers
|
https://github.com/PaddlePaddle/Paddle/issues/2349
|
AttributeError Traceback (most recent call last)
<ipython-input-5-5ce86945bbbe> in <module>()
----> 1 cost = seqToseq_net(source_dict_dim, target_dict_dim)
/Users/liling.tan/seqtoseq.py in seqToseq_net(source_dict_dim, target_dict_dim, is_generating)
160
161 decoder_group_name = "decoder_group"
--> 162 group_input1 = paddle.layer.StaticInputV2(input=encoded_vector, is_seq=True)
163 group_input2 = StaticInputV2(input=encoded_proj, is_seq=True)
164 group_inputs = [group_input1, group_input2]
AttributeError: 'module' object has no attribute 'StaticInputV2'
|
AttributeError
|
def serialize(self, name, f):
"""
:param name:
:param f:
:type f: file
:return:
"""
param = self.get(name)
size = reduce(lambda a, b: a * b, param.shape)
f.write(struct.pack("IIQ", 0, 4, size))
param = param.astype(np.float32)
f.write(param.tostring())
|
def serialize(self, name, f):
"""
:param name:
:param f:
:type f: file
:return:
"""
param = self.get(name)
size = reduce(lambda a, b: a * b, param.shape)
f.write(struct.pack("IIQ", 0, 4, size))
param = param.astype(np.float32)
f.write(param.tobytes())
|
https://github.com/PaddlePaddle/Paddle/issues/2036
|
/paddle/build {develop} ctest -R test_v2_api -V
UpdateCTestConfiguration from :/paddle/build/DartConfiguration.tcl
UpdateCTestConfiguration from :/paddle/build/DartConfiguration.tcl
Test project /paddle/build
Constructing a list of tests
Done constructing a list of tests
Checking test dependency graph...
Checking test dependency graph end
test 67
Start 67: test_v2_api
67: Test command: /bin/bash "/paddle/python/paddle/v2/tests/run_tests.sh" "/usr/bin/python2.7"
67: Test timeout computed to be: 9.99988e+06
67: Processing /paddle/paddle/dist/py_paddle-0.10.0-py2-none-any.whl
67: Requirement already satisfied: protobuf==3.1 in /usr/local/lib/python2.7/dist-packages (from py-paddle==0.10.0)
67: Requirement already satisfied: numpy>=1.8.0 in /usr/lib/python2.7/dist-packages (from py-paddle==0.10.0)
67: Requirement already satisfied: nltk>=3.2.2 in /usr/local/lib/python2.7/dist-packages (from py-paddle==0.10.0)
67: Requirement already satisfied: six>=1.9 in /usr/local/lib/python2.7/dist-packages (from protobuf==3.1->py-paddle==0.10.0)
67: Requirement already satisfied: setuptools in /usr/local/lib/python2.7/dist-packages (from protobuf==3.1->py-paddle==0.10.0)
67: Requirement already satisfied: appdirs>=1.4.0 in /usr/local/lib/python2.7/dist-packages (from setuptools->protobuf==3.1->py-paddle==0.10.0)
67: Requirement already satisfied: packaging>=16.8 in /usr/local/lib/python2.7/dist-packages (from setuptools->protobuf==3.1->py-paddle==0.10.0)
67: Requirement already satisfied: pyparsing in /usr/local/lib/python2.7/dist-packages (from packaging>=16.8->setuptools->protobuf==3.1->py-paddle==0.10.0)
67: Installing collected packages: py-paddle
67: Successfully installed py-paddle-0.10.0
67: test test_data_feeder.py
67: I0506 10:55:05.557036 24457 Util.cpp:166] commandline: --use_gpu=0
67: .......
67: ----------------------------------------------------------------------
67: Ran 7 tests in 0.011s
67:
67: OK
67: .......
67: ----------------------------------------------------------------------
67: Ran 7 tests in 0.359s
67:
67: OK
67: test test_parameters.py
67: E
67: ======================================================================
67: ERROR: test_serialization (__main__.TestParameters)
67: ----------------------------------------------------------------------
67: Traceback (most recent call last):
67: File "test_parameters.py", line 46, in test_serialization
67: params.to_tar(tmp_file)
67: File "/paddle/python/paddle/v2/parameters.py", line 270, in to_tar
67: self.serialize(nm, buf)
67: File "/paddle/python/paddle/v2/parameters.py", line 252, in serialize
67: f.write(param.tobytes())
67: AttributeError: 'numpy.ndarray' object has no attribute 'tobytes'
67:
67: ----------------------------------------------------------------------
67: Ran 1 test in 0.007s
67:
67: FAILED (errors=1)
1/1 Test #67: test_v2_api ......................***Failed 2.17 sec
0% tests passed, 1 tests failed out of 1
|
AttributeError
|
def array_back(
param, nodes, vul_function=None, file_path=None, isback=None
): # 回溯数组定义赋值
"""
递归回溯数组赋值定义
:param isback:
:param file_path:
:param vul_function:
:param param:
:param nodes:
:return:
"""
param_name = param.node.name
param_expr = param.expr
is_co = 3
cp = param
expr_lineno = param.lineno
for node in nodes[::-1]:
if isinstance(node, php.Assignment):
param_node_name = get_node_name(node.node)
param_node = node.node
param_node_expr = node.expr
if (
param_node_name == param_name or param == param_node
): # 处理数组中值被改变的问题
if isinstance(param_node_expr, php.Array):
for p_node in node.expr.nodes:
if p_node.key == param_expr:
if isinstance(
p_node.value, php.ArrayOffset
): # 如果赋值值仍然是数组,先经过判断在进入递归
is_co, cp = is_controllable(p_node.value.node.name)
if is_co != 1:
is_co, cp, expr_lineno = array_back(
param, nodes, file_path=file_path, isback=isback
)
else:
n_node = php.Variable(p_node.value)
is_co, cp, expr_lineno = parameters_back(
n_node,
nodes,
vul_function=vul_function,
file_path=file_path,
isback=isback,
)
# if param == param_node: # 处理数组一次性赋值,左值为数组
if isinstance(
param_node_expr, php.ArrayOffset
): # 如果赋值值仍然是数组,先经过判断在进入递归
is_co, cp = is_controllable(param_node_expr.node.name)
if is_co != 1:
is_co, cp, expr_lineno = array_back(
param, nodes, file_path=file_path, isback=isback
)
else:
is_co, cp = is_controllable(param_node_expr)
if is_co != 1 and is_co != -1:
n_node = php.Variable(param_node_expr.node.value)
is_co, cp, expr_lineno = parameters_back(
n_node,
nodes,
vul_function=vul_function,
file_path=file_path,
isback=isback,
)
return is_co, cp, expr_lineno
|
def array_back(
param, nodes, vul_function=None, file_path=None, isback=None
): # 回溯数组定义赋值
"""
递归回溯数组赋值定义
:param isback:
:param file_path:
:param vul_function:
:param param:
:param nodes:
:return:
"""
param_name = param.node.name
param_expr = param.expr
is_co = 3
cp = param
expr_lineno = 0
# print nodes
for node in nodes[::-1]:
if isinstance(node, php.Assignment):
param_node_name = get_node_name(node.node)
param_node = node.node
param_node_expr = node.expr
if param_node_name == param_name: # 处理数组中值被改变的问题
if isinstance(node.expr, php.Array):
for p_node in node.expr.nodes:
if p_node.key == param_expr:
if isinstance(
p_node.value, php.ArrayOffset
): # 如果赋值值仍然是数组,先经过判断在进入递归
is_co, cp = is_controllable(p_node.value.node.name)
if is_co != 1:
is_co, cp, expr_lineno = array_back(
param, nodes, file_path=file_path, isback=isback
)
else:
n_node = php.Variable(p_node.value)
is_co, cp, expr_lineno = parameters_back(
n_node,
nodes,
vul_function=vul_function,
file_path=file_path,
isback=isback,
)
if param == param_node: # 处理数组一次性赋值,左值为数组
if isinstance(
param_node_expr, php.ArrayOffset
): # 如果赋值值仍然是数组,先经过判断在进入递归
is_co, cp = is_controllable(param_node_expr.node.name)
if is_co != 1:
is_co, cp, expr_lineno = array_back(
param, nodes, file_path=file_path, isback=isback
)
else:
is_co, cp = is_controllable(param_node_expr)
if is_co != 1 and is_co != -1:
n_node = php.Variable(param_node_expr.node.value)
is_co, cp, expr_lineno = parameters_back(
n_node,
nodes,
vul_function=vul_function,
file_path=file_path,
isback=isback,
)
return is_co, cp, expr_lineno
|
https://github.com/LoRexxar/Kunlun-M/issues/65
|
[DEBUG] [MainThread] [18:13:28] [engine.py:801] [RULE_MATCH] ['mysql_query', 'mysql_db_query']
[DEBUG] [MainThread] [18:13:28] [parser.py:1316] [AST] vul_function:mysql_query
[DEBUG] [MainThread] [18:13:28] [parser.py:1123] [AST] AST to find param Variable('$c')
[DEBUG] [MainThread] [18:13:28] [parser.py:598] [BT] param=Variable('$c'),nodes=[Function('random', [FormalParameter('$val', None, False, None)], [Assignment(Variable('$b'), ArrayOffset(Variable('$_GET'), 'maple'), False), Assignment(Variable('$c'), ArrayOffset(Variable('$b'), 0), False), FunctionCall('mysql_query', [Parameter(Variable('$c'), False)])], False)],function_params=None, lineno=6,function_flag=0,vul_function=mysql_query,file_path=/root/cobra/tests/vulnerabilities/sql.php,isback=False,parent_node=0
[DEBUG] [MainThread] [18:13:28] [parser.py:793] [AST] param $c line 6 in function random line 3, start ast in function
[DEBUG] [MainThread] [18:13:28] [parser.py:598] [BT] param=Variable('$c'),nodes=[Assignment(Variable('$b'), ArrayOffset(Variable('$_GET'), 'maple'), False), Assignment(Variable('$c'), ArrayOffset(Variable('$b'), 0), False)],function_params=[FormalParameter('$val', None, False, None)], lineno=3,function_flag=1,vul_function=mysql_query,file_path=/root/cobra/tests/vulnerabilities/sql.php,isback=False,parent_node=None
[DEBUG] [MainThread] [18:13:28] [parser.py:641] [AST] Find $c=$b in line 5, start ast for param $b
[DEBUG] [MainThread] [18:13:28] [parser.py:598] [BT] param=ArrayOffset(Variable('$b'), 0),nodes=[Assignment(Variable('$b'), ArrayOffset(Variable('$_GET'), 'maple'), False)],function_params=[FormalParameter('$val', None, False, None)], lineno=3,function_flag=1,vul_function=mysql_query,file_path=/root/cobra/tests/vulnerabilities/sql.php,isback=False,parent_node=0
[DEBUG] [MainThread] [18:13:28] [parser.py:615] [AST] AST analysis for ArrayOffset in line 5
[DEBUG] [MainThread] [18:13:28] [parser.py:1169] Traceback (most recent call last):
File "/root/Cobra-W/cobra/core_engine/php/parser.py", line 1155, in anlysis_function
file_path=file_path)
File "/root/Cobra-W/cobra/core_engine/php/parser.py", line 1322, in analysis_variable_node
is_co, cp, expr_lineno, chain = anlysis_params(param, file_path, param_lineno, vul_function=vul_function)
File "/root/Cobra-W/cobra/core_engine/php/parser.py", line 1133, in anlysis_params
vul_function=vul_function)
File "/root/Cobra-W/cobra/core_engine/php/parser.py", line 967, in deep_parameters_back
file_path=file_path, isback=isback, parent_node=0)
File "/root/Cobra-W/cobra/core_engine/php/parser.py", line 812, in parameters_back
if node_param.name == cp.name:
AttributeError: 'ArrayOffset' object has no attribute 'name'
[DEBUG] [MainThread] [18:13:28] [engine.py:809] [AST] [RET] []
|
AttributeError
|
def serve(
panels,
port=0,
address=None,
websocket_origin=None,
loop=None,
show=True,
start=True,
title=None,
verbose=True,
location=True,
threaded=False,
**kwargs,
):
"""
Allows serving one or more panel objects on a single server.
The panels argument should be either a Panel object or a function
returning a Panel object or a dictionary of these two. If a
dictionary is supplied the keys represent the slugs at which
each app is served, e.g. `serve({'app': panel1, 'app2': panel2})`
will serve apps at /app and /app2 on the server.
Arguments
---------
panel: Viewable, function or {str: Viewable or function}
A Panel object, a function returning a Panel object or a
dictionary mapping from the URL slug to either.
port: int (optional, default=0)
Allows specifying a specific port
address : str
The address the server should listen on for HTTP requests.
websocket_origin: str or list(str) (optional)
A list of hosts that can connect to the websocket.
This is typically required when embedding a server app in
an external web site.
If None, "localhost" is used.
loop : tornado.ioloop.IOLoop (optional, default=IOLoop.current())
The tornado IOLoop to run the Server on
show : boolean (optional, default=False)
Whether to open the server in a new browser tab on start
start : boolean(optional, default=False)
Whether to start the Server
title: str or {str: str} (optional, default=None)
An HTML title for the application or a dictionary mapping
from the URL slug to a customized title
verbose: boolean (optional, default=True)
Whether to print the address and port
location : boolean or panel.io.location.Location
Whether to create a Location component to observe and
set the URL location.
threaded: boolean (default=False)
Whether to start the server on a new Thread
kwargs: dict
Additional keyword arguments to pass to Server instance
"""
kwargs = dict(
kwargs,
**dict(
port=port,
address=address,
websocket_origin=websocket_origin,
loop=loop,
show=show,
start=start,
title=title,
verbose=verbose,
location=location,
),
)
if threaded:
from tornado.ioloop import IOLoop
kwargs["loop"] = loop = IOLoop() if loop is None else loop
server = StoppableThread(
target=get_server, io_loop=loop, args=(panels,), kwargs=kwargs
)
server.start()
else:
server = get_server(panels, **kwargs)
return server
|
def serve(
panels,
port=0,
address=None,
websocket_origin=None,
loop=None,
show=True,
start=True,
title=None,
verbose=True,
location=True,
**kwargs,
):
"""
Allows serving one or more panel objects on a single server.
The panels argument should be either a Panel object or a function
returning a Panel object or a dictionary of these two. If a
dictionary is supplied the keys represent the slugs at which
each app is served, e.g. `serve({'app': panel1, 'app2': panel2})`
will serve apps at /app and /app2 on the server.
Arguments
---------
panel: Viewable, function or {str: Viewable or function}
A Panel object, a function returning a Panel object or a
dictionary mapping from the URL slug to either.
port: int (optional, default=0)
Allows specifying a specific port
address : str
The address the server should listen on for HTTP requests.
websocket_origin: str or list(str) (optional)
A list of hosts that can connect to the websocket.
This is typically required when embedding a server app in
an external web site.
If None, "localhost" is used.
loop : tornado.ioloop.IOLoop (optional, default=IOLoop.current())
The tornado IOLoop to run the Server on
show : boolean (optional, default=False)
Whether to open the server in a new browser tab on start
start : boolean(optional, default=False)
Whether to start the Server
title: str or {str: str} (optional, default=None)
An HTML title for the application or a dictionary mapping
from the URL slug to a customized title
verbose: boolean (optional, default=True)
Whether to print the address and port
location : boolean or panel.io.location.Location
Whether to create a Location component to observe and
set the URL location.
kwargs: dict
Additional keyword arguments to pass to Server instance
"""
return get_server(
panels,
port,
address,
websocket_origin,
loop,
show,
start,
title,
verbose,
location,
**kwargs,
)
|
https://github.com/holoviz/panel/issues/1447
|
---------------------------------------------------------------------------
RuntimeError Traceback (most recent call last)
<ipython-input-15-da8b0df4fb70> in <module>
----> 1 color_mapper.update(high=100)
~/miniconda3/envs/PyX/lib/python3.7/site-packages/bokeh/core/has_props.py in update(self, **kwargs)
368 '''
369 for k,v in kwargs.items():
--> 370 setattr(self, k, v)
371
372 def update_from_json(self, json_attributes, models=None, setter=None):
~/miniconda3/envs/PyX/lib/python3.7/site-packages/bokeh/core/has_props.py in __setattr__(self, name, value)
272
273 if name in props or (descriptor is not None and descriptor.fset is not None):
--> 274 super().__setattr__(name, value)
275 else:
276 matches, text = difflib.get_close_matches(name.lower(), props), "similar"
~/miniconda3/envs/PyX/lib/python3.7/site-packages/bokeh/core/property/descriptors.py in __set__(self, obj, value, setter)
537 raise RuntimeError("%s.%s is a readonly property" % (obj.__class__.__name__, self.name))
538
--> 539 self._internal_set(obj, value, setter=setter)
540
541 def __delete__(self, obj):
~/miniconda3/envs/PyX/lib/python3.7/site-packages/bokeh/core/property/descriptors.py in _internal_set(self, obj, value, hint, setter)
761
762 old = self.__get__(obj, obj.__class__)
--> 763 self._real_set(obj, old, value, hint=hint, setter=setter)
764
765 def _real_set(self, obj, old, value, hint=None, setter=None):
~/miniconda3/envs/PyX/lib/python3.7/site-packages/bokeh/core/property/descriptors.py in _real_set(self, obj, old, value, hint, setter)
830
831 # for notification purposes, "old" should be the logical old
--> 832 self._trigger(obj, old, value, hint=hint, setter=setter)
833
834 # called when a container is mutated "behind our back" and
~/miniconda3/envs/PyX/lib/python3.7/site-packages/bokeh/core/property/descriptors.py in _trigger(self, obj, old, value, hint, setter)
907 '''
908 if hasattr(obj, 'trigger'):
--> 909 obj.trigger(self.name, old, value, hint, setter)
910
911
~/miniconda3/envs/PyX/lib/python3.7/site-packages/bokeh/model.py in trigger(self, attr, old, new, hint, setter)
659 self._document._invalidate_all_models()
660 # chain up to invoke callbacks
--> 661 super().trigger(attr, old, new, hint=hint, setter=setter)
662
663 def _attach_document(self, doc):
~/miniconda3/envs/PyX/lib/python3.7/site-packages/bokeh/util/callback_manager.py in trigger(self, attr, old, new, hint, setter)
155 callback(attr, old, new)
156 if hasattr(self, '_document') and self._document is not None:
--> 157 self._document._notify_change(self, attr, old, new, hint, setter, invoke)
158 else:
159 invoke()
~/miniconda3/envs/PyX/lib/python3.7/site-packages/bokeh/document/document.py in _notify_change(self, model, attr, old, new, hint, setter, callback_invoker)
1040
1041 event = ModelChangedEvent(self, model, attr, old, new, serializable_new, hint, setter, callback_invoker)
-> 1042 self._trigger_on_change(event)
1043
1044 def _push_all_models_freeze(self):
~/miniconda3/envs/PyX/lib/python3.7/site-packages/bokeh/document/document.py in _trigger_on_change(self, event)
1135 for cb in self._callbacks.values():
1136 cb(event)
-> 1137 self._with_self_as_curdoc(invoke_callbacks)
1138
1139 def _with_self_as_curdoc(self, f):
~/miniconda3/envs/PyX/lib/python3.7/site-packages/bokeh/document/document.py in _with_self_as_curdoc(self, f)
1148 else:
1149 set_curdoc(self)
-> 1150 return f()
1151 finally:
1152 set_curdoc(old_doc)
~/miniconda3/envs/PyX/lib/python3.7/site-packages/bokeh/document/document.py in invoke_callbacks()
1134 def invoke_callbacks():
1135 for cb in self._callbacks.values():
-> 1136 cb(event)
1137 self._with_self_as_curdoc(invoke_callbacks)
1138
~/miniconda3/envs/PyX/lib/python3.7/site-packages/bokeh/document/document.py in <lambda>(event)
702 def on_change_dispatch_to(self, receiver):
703 if not receiver in self._callbacks:
--> 704 self._callbacks[receiver] = lambda event: event.dispatch(receiver)
705
706 def on_session_destroyed(self, *callbacks):
~/miniconda3/envs/PyX/lib/python3.7/site-packages/bokeh/document/events.py in dispatch(self, receiver)
267
268 '''
--> 269 super().dispatch(receiver)
270 if hasattr(receiver, '_document_model_changed'):
271 receiver._document_model_changed(self)
~/miniconda3/envs/PyX/lib/python3.7/site-packages/bokeh/document/events.py in dispatch(self, receiver)
122 super().dispatch(receiver)
123 if hasattr(receiver, '_document_patched'):
--> 124 receiver._document_patched(self)
125
126 def generate(self, references, buffers):
~/miniconda3/envs/PyX/lib/python3.7/site-packages/bokeh/server/session.py in _document_patched(self, event)
216
217 if self._pending_writes is None:
--> 218 raise RuntimeError("_pending_writes should be non-None when we have a document lock, and we should have the lock when the document changes")
219
220 # TODO (havocp): our "change sync" protocol is flawed because if both
RuntimeError: _pending_writes should be non-None when we have a document lock, and we should have the lock when the document changes
|
RuntimeError
|
def get_server(
panel,
port=0,
address=None,
websocket_origin=None,
loop=None,
show=False,
start=False,
title=None,
verbose=False,
location=True,
static_dirs={},
oauth_provider=None,
oauth_key=None,
oauth_secret=None,
oauth_extra_params={},
cookie_secret=None,
oauth_encryption_key=None,
**kwargs,
):
"""
Returns a Server instance with this panel attached as the root
app.
Arguments
---------
panel: Viewable, function or {str: Viewable}
A Panel object, a function returning a Panel object or a
dictionary mapping from the URL slug to either.
port: int (optional, default=0)
Allows specifying a specific port
address : str
The address the server should listen on for HTTP requests.
websocket_origin: str or list(str) (optional)
A list of hosts that can connect to the websocket.
This is typically required when embedding a server app in
an external web site.
If None, "localhost" is used.
loop : tornado.ioloop.IOLoop (optional, default=IOLoop.current())
The tornado IOLoop to run the Server on.
show : boolean (optional, default=False)
Whether to open the server in a new browser tab on start.
start : boolean(optional, default=False)
Whether to start the Server.
title : str or {str: str} (optional, default=None)
An HTML title for the application or a dictionary mapping
from the URL slug to a customized title.
verbose: boolean (optional, default=False)
Whether to report the address and port.
location : boolean or panel.io.location.Location
Whether to create a Location component to observe and
set the URL location.
static_dirs: dict (optional, default={})
A dictionary of routes and local paths to serve as static file
directories on those routes.
oauth_provider: str
One of the available OAuth providers
oauth_key: str (optional, default=None)
The public OAuth identifier
oauth_secret: str (optional, default=None)
The client secret for the OAuth provider
oauth_extra_params: dict (optional, default={})
Additional information for the OAuth provider
cookie_secret: str (optional, default=None)
A random secret string to sign cookies (required for OAuth)
oauth_encryption_key: str (optional, default=False)
A random encryption key used for encrypting OAuth user
information and access tokens.
kwargs: dict
Additional keyword arguments to pass to Server instance.
Returns
-------
server : bokeh.server.server.Server
Bokeh Server instance running this panel
"""
server_id = kwargs.pop("server_id", uuid.uuid4().hex)
kwargs["extra_patterns"] = extra_patterns = kwargs.get("extra_patterns", [])
if isinstance(panel, dict):
apps = {}
for slug, app in panel.items():
if isinstance(title, dict):
try:
title_ = title[slug]
except KeyError:
raise KeyError(
"Keys of the title dictionnary and of the apps "
f"dictionary must match. No {slug} key found in the "
"title dictionnary."
)
else:
title_ = title
slug = slug if slug.startswith("/") else "/" + slug
if "flask" in sys.modules:
from flask import Flask
if isinstance(app, Flask):
wsgi = WSGIContainer(app)
if slug == "/":
raise ValueError("Flask apps must be served on a subpath.")
if not slug.endswith("/"):
slug += "/"
extra_patterns.append(
(
"^" + slug + ".*",
ProxyFallbackHandler,
dict(fallback=wsgi, proxy=slug),
)
)
continue
apps[slug] = partial(_eval_panel, app, server_id, title_, location)
else:
apps = {"/": partial(_eval_panel, panel, server_id, title, location)}
dist_dir = os.path.join(os.path.split(os.path.dirname(__file__))[0], "dist")
static_dirs = dict(static_dirs, panel_dist=dist_dir)
extra_patterns += get_static_routes(static_dirs)
opts = dict(kwargs)
if loop:
loop.make_current()
opts["io_loop"] = loop
elif opts.get("num_procs", 1) == 1:
opts["io_loop"] = IOLoop.current()
if "index" not in opts:
opts["index"] = INDEX_HTML
if address is not None:
opts["address"] = address
if websocket_origin:
if not isinstance(websocket_origin, list):
websocket_origin = [websocket_origin]
opts["allow_websocket_origin"] = websocket_origin
# Configure OAuth
from ..config import config
if config.oauth_provider:
from ..auth import OAuthProvider
opts["auth_provider"] = OAuthProvider()
if oauth_provider:
config.oauth_provider = oauth_provider
if oauth_key:
config.oauth_key = oauth_key
if oauth_extra_params:
config.oauth_extra_params = oauth_extra_params
if cookie_secret:
config.cookie_secret = cookie_secret
opts["cookie_secret"] = config.cookie_secret
server = Server(apps, port=port, **opts)
if verbose:
address = server.address or "localhost"
print("Launching server at http://%s:%s" % (address, server.port))
state._servers[server_id] = (server, panel, [])
if show:
def show_callback():
server.show("/login" if config.oauth_provider else "/")
server.io_loop.add_callback(show_callback)
def sig_exit(*args, **kwargs):
server.io_loop.add_callback_from_signal(do_stop)
def do_stop(*args, **kwargs):
server.io_loop.stop()
try:
signal.signal(signal.SIGINT, sig_exit)
except ValueError:
pass # Can't use signal on a thread
if start:
server.start()
try:
server.io_loop.start()
except RuntimeError:
pass
return server
|
def get_server(
panel,
port=0,
address=None,
websocket_origin=None,
loop=None,
show=False,
start=False,
title=None,
verbose=False,
location=True,
static_dirs={},
oauth_provider=None,
oauth_key=None,
oauth_secret=None,
oauth_extra_params={},
cookie_secret=None,
oauth_encryption_key=None,
**kwargs,
):
"""
Returns a Server instance with this panel attached as the root
app.
Arguments
---------
panel: Viewable, function or {str: Viewable}
A Panel object, a function returning a Panel object or a
dictionary mapping from the URL slug to either.
port: int (optional, default=0)
Allows specifying a specific port
address : str
The address the server should listen on for HTTP requests.
websocket_origin: str or list(str) (optional)
A list of hosts that can connect to the websocket.
This is typically required when embedding a server app in
an external web site.
If None, "localhost" is used.
loop : tornado.ioloop.IOLoop (optional, default=IOLoop.current())
The tornado IOLoop to run the Server on.
show : boolean (optional, default=False)
Whether to open the server in a new browser tab on start.
start : boolean(optional, default=False)
Whether to start the Server.
title : str or {str: str} (optional, default=None)
An HTML title for the application or a dictionary mapping
from the URL slug to a customized title.
verbose: boolean (optional, default=False)
Whether to report the address and port.
location : boolean or panel.io.location.Location
Whether to create a Location component to observe and
set the URL location.
static_dirs: dict (optional, default={})
A dictionary of routes and local paths to serve as static file
directories on those routes.
oauth_provider: str
One of the available OAuth providers
oauth_key: str (optional, default=None)
The public OAuth identifier
oauth_secret: str (optional, default=None)
The client secret for the OAuth provider
oauth_extra_params: dict (optional, default={})
Additional information for the OAuth provider
cookie_secret: str (optional, default=None)
A random secret string to sign cookies (required for OAuth)
oauth_encryption_key: str (optional, default=False)
A random encryption key used for encrypting OAuth user
information and access tokens.
kwargs: dict
Additional keyword arguments to pass to Server instance.
Returns
-------
server : bokeh.server.server.Server
Bokeh Server instance running this panel
"""
from tornado.ioloop import IOLoop
server_id = kwargs.pop("server_id", uuid.uuid4().hex)
kwargs["extra_patterns"] = extra_patterns = kwargs.get("extra_patterns", [])
if isinstance(panel, dict):
apps = {}
for slug, app in panel.items():
if isinstance(title, dict):
try:
title_ = title[slug]
except KeyError:
raise KeyError(
"Keys of the title dictionnary and of the apps "
f"dictionary must match. No {slug} key found in the "
"title dictionnary."
)
else:
title_ = title
slug = slug if slug.startswith("/") else "/" + slug
if "flask" in sys.modules:
from flask import Flask
if isinstance(app, Flask):
wsgi = WSGIContainer(app)
if slug == "/":
raise ValueError("Flask apps must be served on a subpath.")
if not slug.endswith("/"):
slug += "/"
extra_patterns.append(
(
"^" + slug + ".*",
ProxyFallbackHandler,
dict(fallback=wsgi, proxy=slug),
)
)
continue
apps[slug] = partial(_eval_panel, app, server_id, title_, location)
else:
apps = {"/": partial(_eval_panel, panel, server_id, title, location)}
dist_dir = os.path.join(os.path.split(os.path.dirname(__file__))[0], "dist")
static_dirs = dict(static_dirs, panel_dist=dist_dir)
extra_patterns += get_static_routes(static_dirs)
opts = dict(kwargs)
if loop:
loop.make_current()
opts["io_loop"] = loop
elif opts.get("num_procs", 1) == 1:
opts["io_loop"] = IOLoop.current()
if "index" not in opts:
opts["index"] = INDEX_HTML
if address is not None:
opts["address"] = address
if websocket_origin:
if not isinstance(websocket_origin, list):
websocket_origin = [websocket_origin]
opts["allow_websocket_origin"] = websocket_origin
# Configure OAuth
from ..config import config
if config.oauth_provider:
from ..auth import OAuthProvider
opts["auth_provider"] = OAuthProvider()
if oauth_provider:
config.oauth_provider = oauth_provider
if oauth_key:
config.oauth_key = oauth_key
if oauth_extra_params:
config.oauth_extra_params = oauth_extra_params
if cookie_secret:
config.cookie_secret = cookie_secret
opts["cookie_secret"] = config.cookie_secret
server = Server(apps, port=port, **opts)
if verbose:
address = server.address or "localhost"
print("Launching server at http://%s:%s" % (address, server.port))
state._servers[server_id] = (server, panel, [])
if show:
def show_callback():
server.show("/login" if config.oauth_provider else "/")
server.io_loop.add_callback(show_callback)
def sig_exit(*args, **kwargs):
server.io_loop.add_callback_from_signal(do_stop)
def do_stop(*args, **kwargs):
server.io_loop.stop()
try:
signal.signal(signal.SIGINT, sig_exit)
except ValueError:
pass # Can't use signal on a thread
if start:
server.start()
try:
server.io_loop.start()
except RuntimeError:
pass
return server
|
https://github.com/holoviz/panel/issues/1447
|
---------------------------------------------------------------------------
RuntimeError Traceback (most recent call last)
<ipython-input-15-da8b0df4fb70> in <module>
----> 1 color_mapper.update(high=100)
~/miniconda3/envs/PyX/lib/python3.7/site-packages/bokeh/core/has_props.py in update(self, **kwargs)
368 '''
369 for k,v in kwargs.items():
--> 370 setattr(self, k, v)
371
372 def update_from_json(self, json_attributes, models=None, setter=None):
~/miniconda3/envs/PyX/lib/python3.7/site-packages/bokeh/core/has_props.py in __setattr__(self, name, value)
272
273 if name in props or (descriptor is not None and descriptor.fset is not None):
--> 274 super().__setattr__(name, value)
275 else:
276 matches, text = difflib.get_close_matches(name.lower(), props), "similar"
~/miniconda3/envs/PyX/lib/python3.7/site-packages/bokeh/core/property/descriptors.py in __set__(self, obj, value, setter)
537 raise RuntimeError("%s.%s is a readonly property" % (obj.__class__.__name__, self.name))
538
--> 539 self._internal_set(obj, value, setter=setter)
540
541 def __delete__(self, obj):
~/miniconda3/envs/PyX/lib/python3.7/site-packages/bokeh/core/property/descriptors.py in _internal_set(self, obj, value, hint, setter)
761
762 old = self.__get__(obj, obj.__class__)
--> 763 self._real_set(obj, old, value, hint=hint, setter=setter)
764
765 def _real_set(self, obj, old, value, hint=None, setter=None):
~/miniconda3/envs/PyX/lib/python3.7/site-packages/bokeh/core/property/descriptors.py in _real_set(self, obj, old, value, hint, setter)
830
831 # for notification purposes, "old" should be the logical old
--> 832 self._trigger(obj, old, value, hint=hint, setter=setter)
833
834 # called when a container is mutated "behind our back" and
~/miniconda3/envs/PyX/lib/python3.7/site-packages/bokeh/core/property/descriptors.py in _trigger(self, obj, old, value, hint, setter)
907 '''
908 if hasattr(obj, 'trigger'):
--> 909 obj.trigger(self.name, old, value, hint, setter)
910
911
~/miniconda3/envs/PyX/lib/python3.7/site-packages/bokeh/model.py in trigger(self, attr, old, new, hint, setter)
659 self._document._invalidate_all_models()
660 # chain up to invoke callbacks
--> 661 super().trigger(attr, old, new, hint=hint, setter=setter)
662
663 def _attach_document(self, doc):
~/miniconda3/envs/PyX/lib/python3.7/site-packages/bokeh/util/callback_manager.py in trigger(self, attr, old, new, hint, setter)
155 callback(attr, old, new)
156 if hasattr(self, '_document') and self._document is not None:
--> 157 self._document._notify_change(self, attr, old, new, hint, setter, invoke)
158 else:
159 invoke()
~/miniconda3/envs/PyX/lib/python3.7/site-packages/bokeh/document/document.py in _notify_change(self, model, attr, old, new, hint, setter, callback_invoker)
1040
1041 event = ModelChangedEvent(self, model, attr, old, new, serializable_new, hint, setter, callback_invoker)
-> 1042 self._trigger_on_change(event)
1043
1044 def _push_all_models_freeze(self):
~/miniconda3/envs/PyX/lib/python3.7/site-packages/bokeh/document/document.py in _trigger_on_change(self, event)
1135 for cb in self._callbacks.values():
1136 cb(event)
-> 1137 self._with_self_as_curdoc(invoke_callbacks)
1138
1139 def _with_self_as_curdoc(self, f):
~/miniconda3/envs/PyX/lib/python3.7/site-packages/bokeh/document/document.py in _with_self_as_curdoc(self, f)
1148 else:
1149 set_curdoc(self)
-> 1150 return f()
1151 finally:
1152 set_curdoc(old_doc)
~/miniconda3/envs/PyX/lib/python3.7/site-packages/bokeh/document/document.py in invoke_callbacks()
1134 def invoke_callbacks():
1135 for cb in self._callbacks.values():
-> 1136 cb(event)
1137 self._with_self_as_curdoc(invoke_callbacks)
1138
~/miniconda3/envs/PyX/lib/python3.7/site-packages/bokeh/document/document.py in <lambda>(event)
702 def on_change_dispatch_to(self, receiver):
703 if not receiver in self._callbacks:
--> 704 self._callbacks[receiver] = lambda event: event.dispatch(receiver)
705
706 def on_session_destroyed(self, *callbacks):
~/miniconda3/envs/PyX/lib/python3.7/site-packages/bokeh/document/events.py in dispatch(self, receiver)
267
268 '''
--> 269 super().dispatch(receiver)
270 if hasattr(receiver, '_document_model_changed'):
271 receiver._document_model_changed(self)
~/miniconda3/envs/PyX/lib/python3.7/site-packages/bokeh/document/events.py in dispatch(self, receiver)
122 super().dispatch(receiver)
123 if hasattr(receiver, '_document_patched'):
--> 124 receiver._document_patched(self)
125
126 def generate(self, references, buffers):
~/miniconda3/envs/PyX/lib/python3.7/site-packages/bokeh/server/session.py in _document_patched(self, event)
216
217 if self._pending_writes is None:
--> 218 raise RuntimeError("_pending_writes should be non-None when we have a document lock, and we should have the lock when the document changes")
219
220 # TODO (havocp): our "change sync" protocol is flawed because if both
RuntimeError: _pending_writes should be non-None when we have a document lock, and we should have the lock when the document changes
|
RuntimeError
|
def sync_busy(self, indicator):
"""
Syncs the busy state with an indicator with a boolean value
parameter.
Arguments
---------
indicator: An BooleanIndicator to sync with the busy property
"""
if not isinstance(indicator.param.value, param.Boolean):
raise ValueError("Busy indicator must have a value parameterof Boolean type.")
self._indicators.append(indicator)
|
def sync_busy(self, indicator):
"""
Syncs the busy state with an indicator with a boolean value
parameter.
"""
self._indicators.append(indicator)
|
https://github.com/holoviz/panel/issues/1447
|
---------------------------------------------------------------------------
RuntimeError Traceback (most recent call last)
<ipython-input-15-da8b0df4fb70> in <module>
----> 1 color_mapper.update(high=100)
~/miniconda3/envs/PyX/lib/python3.7/site-packages/bokeh/core/has_props.py in update(self, **kwargs)
368 '''
369 for k,v in kwargs.items():
--> 370 setattr(self, k, v)
371
372 def update_from_json(self, json_attributes, models=None, setter=None):
~/miniconda3/envs/PyX/lib/python3.7/site-packages/bokeh/core/has_props.py in __setattr__(self, name, value)
272
273 if name in props or (descriptor is not None and descriptor.fset is not None):
--> 274 super().__setattr__(name, value)
275 else:
276 matches, text = difflib.get_close_matches(name.lower(), props), "similar"
~/miniconda3/envs/PyX/lib/python3.7/site-packages/bokeh/core/property/descriptors.py in __set__(self, obj, value, setter)
537 raise RuntimeError("%s.%s is a readonly property" % (obj.__class__.__name__, self.name))
538
--> 539 self._internal_set(obj, value, setter=setter)
540
541 def __delete__(self, obj):
~/miniconda3/envs/PyX/lib/python3.7/site-packages/bokeh/core/property/descriptors.py in _internal_set(self, obj, value, hint, setter)
761
762 old = self.__get__(obj, obj.__class__)
--> 763 self._real_set(obj, old, value, hint=hint, setter=setter)
764
765 def _real_set(self, obj, old, value, hint=None, setter=None):
~/miniconda3/envs/PyX/lib/python3.7/site-packages/bokeh/core/property/descriptors.py in _real_set(self, obj, old, value, hint, setter)
830
831 # for notification purposes, "old" should be the logical old
--> 832 self._trigger(obj, old, value, hint=hint, setter=setter)
833
834 # called when a container is mutated "behind our back" and
~/miniconda3/envs/PyX/lib/python3.7/site-packages/bokeh/core/property/descriptors.py in _trigger(self, obj, old, value, hint, setter)
907 '''
908 if hasattr(obj, 'trigger'):
--> 909 obj.trigger(self.name, old, value, hint, setter)
910
911
~/miniconda3/envs/PyX/lib/python3.7/site-packages/bokeh/model.py in trigger(self, attr, old, new, hint, setter)
659 self._document._invalidate_all_models()
660 # chain up to invoke callbacks
--> 661 super().trigger(attr, old, new, hint=hint, setter=setter)
662
663 def _attach_document(self, doc):
~/miniconda3/envs/PyX/lib/python3.7/site-packages/bokeh/util/callback_manager.py in trigger(self, attr, old, new, hint, setter)
155 callback(attr, old, new)
156 if hasattr(self, '_document') and self._document is not None:
--> 157 self._document._notify_change(self, attr, old, new, hint, setter, invoke)
158 else:
159 invoke()
~/miniconda3/envs/PyX/lib/python3.7/site-packages/bokeh/document/document.py in _notify_change(self, model, attr, old, new, hint, setter, callback_invoker)
1040
1041 event = ModelChangedEvent(self, model, attr, old, new, serializable_new, hint, setter, callback_invoker)
-> 1042 self._trigger_on_change(event)
1043
1044 def _push_all_models_freeze(self):
~/miniconda3/envs/PyX/lib/python3.7/site-packages/bokeh/document/document.py in _trigger_on_change(self, event)
1135 for cb in self._callbacks.values():
1136 cb(event)
-> 1137 self._with_self_as_curdoc(invoke_callbacks)
1138
1139 def _with_self_as_curdoc(self, f):
~/miniconda3/envs/PyX/lib/python3.7/site-packages/bokeh/document/document.py in _with_self_as_curdoc(self, f)
1148 else:
1149 set_curdoc(self)
-> 1150 return f()
1151 finally:
1152 set_curdoc(old_doc)
~/miniconda3/envs/PyX/lib/python3.7/site-packages/bokeh/document/document.py in invoke_callbacks()
1134 def invoke_callbacks():
1135 for cb in self._callbacks.values():
-> 1136 cb(event)
1137 self._with_self_as_curdoc(invoke_callbacks)
1138
~/miniconda3/envs/PyX/lib/python3.7/site-packages/bokeh/document/document.py in <lambda>(event)
702 def on_change_dispatch_to(self, receiver):
703 if not receiver in self._callbacks:
--> 704 self._callbacks[receiver] = lambda event: event.dispatch(receiver)
705
706 def on_session_destroyed(self, *callbacks):
~/miniconda3/envs/PyX/lib/python3.7/site-packages/bokeh/document/events.py in dispatch(self, receiver)
267
268 '''
--> 269 super().dispatch(receiver)
270 if hasattr(receiver, '_document_model_changed'):
271 receiver._document_model_changed(self)
~/miniconda3/envs/PyX/lib/python3.7/site-packages/bokeh/document/events.py in dispatch(self, receiver)
122 super().dispatch(receiver)
123 if hasattr(receiver, '_document_patched'):
--> 124 receiver._document_patched(self)
125
126 def generate(self, references, buffers):
~/miniconda3/envs/PyX/lib/python3.7/site-packages/bokeh/server/session.py in _document_patched(self, event)
216
217 if self._pending_writes is None:
--> 218 raise RuntimeError("_pending_writes should be non-None when we have a document lock, and we should have the lock when the document changes")
219
220 # TODO (havocp): our "change sync" protocol is flawed because if both
RuntimeError: _pending_writes should be non-None when we have a document lock, and we should have the lock when the document changes
|
RuntimeError
|
def add_periodic_callback(
self, callback, period=500, count=None, timeout=None, start=True
):
"""
Schedules a periodic callback to be run at an interval set by
the period. Returns a PeriodicCallback object with the option
to stop and start the callback.
Arguments
---------
callback: callable
Callable function to be executed at periodic interval.
period: int
Interval in milliseconds at which callback will be executed.
count: int
Maximum number of times callback will be invoked.
timeout: int
Timeout in seconds when the callback should be stopped.
start: boolean (default=True)
Whether to start callback immediately.
Returns
-------
Return a PeriodicCallback object with start and stop methods.
"""
self.param.warning(
"Calling add_periodic_callback on a Panel component is "
"deprecated and will be removed in the next minor release. "
"Use the pn.state.add_periodic_callback API instead."
)
cb = PeriodicCallback(
callback=callback, period=period, count=count, timeout=timeout
)
if start:
cb.start()
return cb
|
def add_periodic_callback(
self, callback, period=500, count=None, timeout=None, start=True
):
"""
Schedules a periodic callback to be run at an interval set by
the period. Returns a PeriodicCallback object with the option
to stop and start the callback.
Arguments
---------
callback: callable
Callable function to be executed at periodic interval.
period: int
Interval in milliseconds at which callback will be executed.
count: int
Maximum number of times callback will be invoked.
timeout: int
Timeout in seconds when the callback should be stopped.
start: boolean (default=True)
Whether to start callback immediately.
Returns
-------
Return a PeriodicCallback object with start and stop methods.
"""
cb = PeriodicCallback(
callback=callback, period=period, count=count, timeout=timeout
)
if start:
cb.start()
return cb
|
https://github.com/holoviz/panel/issues/1447
|
---------------------------------------------------------------------------
RuntimeError Traceback (most recent call last)
<ipython-input-15-da8b0df4fb70> in <module>
----> 1 color_mapper.update(high=100)
~/miniconda3/envs/PyX/lib/python3.7/site-packages/bokeh/core/has_props.py in update(self, **kwargs)
368 '''
369 for k,v in kwargs.items():
--> 370 setattr(self, k, v)
371
372 def update_from_json(self, json_attributes, models=None, setter=None):
~/miniconda3/envs/PyX/lib/python3.7/site-packages/bokeh/core/has_props.py in __setattr__(self, name, value)
272
273 if name in props or (descriptor is not None and descriptor.fset is not None):
--> 274 super().__setattr__(name, value)
275 else:
276 matches, text = difflib.get_close_matches(name.lower(), props), "similar"
~/miniconda3/envs/PyX/lib/python3.7/site-packages/bokeh/core/property/descriptors.py in __set__(self, obj, value, setter)
537 raise RuntimeError("%s.%s is a readonly property" % (obj.__class__.__name__, self.name))
538
--> 539 self._internal_set(obj, value, setter=setter)
540
541 def __delete__(self, obj):
~/miniconda3/envs/PyX/lib/python3.7/site-packages/bokeh/core/property/descriptors.py in _internal_set(self, obj, value, hint, setter)
761
762 old = self.__get__(obj, obj.__class__)
--> 763 self._real_set(obj, old, value, hint=hint, setter=setter)
764
765 def _real_set(self, obj, old, value, hint=None, setter=None):
~/miniconda3/envs/PyX/lib/python3.7/site-packages/bokeh/core/property/descriptors.py in _real_set(self, obj, old, value, hint, setter)
830
831 # for notification purposes, "old" should be the logical old
--> 832 self._trigger(obj, old, value, hint=hint, setter=setter)
833
834 # called when a container is mutated "behind our back" and
~/miniconda3/envs/PyX/lib/python3.7/site-packages/bokeh/core/property/descriptors.py in _trigger(self, obj, old, value, hint, setter)
907 '''
908 if hasattr(obj, 'trigger'):
--> 909 obj.trigger(self.name, old, value, hint, setter)
910
911
~/miniconda3/envs/PyX/lib/python3.7/site-packages/bokeh/model.py in trigger(self, attr, old, new, hint, setter)
659 self._document._invalidate_all_models()
660 # chain up to invoke callbacks
--> 661 super().trigger(attr, old, new, hint=hint, setter=setter)
662
663 def _attach_document(self, doc):
~/miniconda3/envs/PyX/lib/python3.7/site-packages/bokeh/util/callback_manager.py in trigger(self, attr, old, new, hint, setter)
155 callback(attr, old, new)
156 if hasattr(self, '_document') and self._document is not None:
--> 157 self._document._notify_change(self, attr, old, new, hint, setter, invoke)
158 else:
159 invoke()
~/miniconda3/envs/PyX/lib/python3.7/site-packages/bokeh/document/document.py in _notify_change(self, model, attr, old, new, hint, setter, callback_invoker)
1040
1041 event = ModelChangedEvent(self, model, attr, old, new, serializable_new, hint, setter, callback_invoker)
-> 1042 self._trigger_on_change(event)
1043
1044 def _push_all_models_freeze(self):
~/miniconda3/envs/PyX/lib/python3.7/site-packages/bokeh/document/document.py in _trigger_on_change(self, event)
1135 for cb in self._callbacks.values():
1136 cb(event)
-> 1137 self._with_self_as_curdoc(invoke_callbacks)
1138
1139 def _with_self_as_curdoc(self, f):
~/miniconda3/envs/PyX/lib/python3.7/site-packages/bokeh/document/document.py in _with_self_as_curdoc(self, f)
1148 else:
1149 set_curdoc(self)
-> 1150 return f()
1151 finally:
1152 set_curdoc(old_doc)
~/miniconda3/envs/PyX/lib/python3.7/site-packages/bokeh/document/document.py in invoke_callbacks()
1134 def invoke_callbacks():
1135 for cb in self._callbacks.values():
-> 1136 cb(event)
1137 self._with_self_as_curdoc(invoke_callbacks)
1138
~/miniconda3/envs/PyX/lib/python3.7/site-packages/bokeh/document/document.py in <lambda>(event)
702 def on_change_dispatch_to(self, receiver):
703 if not receiver in self._callbacks:
--> 704 self._callbacks[receiver] = lambda event: event.dispatch(receiver)
705
706 def on_session_destroyed(self, *callbacks):
~/miniconda3/envs/PyX/lib/python3.7/site-packages/bokeh/document/events.py in dispatch(self, receiver)
267
268 '''
--> 269 super().dispatch(receiver)
270 if hasattr(receiver, '_document_model_changed'):
271 receiver._document_model_changed(self)
~/miniconda3/envs/PyX/lib/python3.7/site-packages/bokeh/document/events.py in dispatch(self, receiver)
122 super().dispatch(receiver)
123 if hasattr(receiver, '_document_patched'):
--> 124 receiver._document_patched(self)
125
126 def generate(self, references, buffers):
~/miniconda3/envs/PyX/lib/python3.7/site-packages/bokeh/server/session.py in _document_patched(self, event)
216
217 if self._pending_writes is None:
--> 218 raise RuntimeError("_pending_writes should be non-None when we have a document lock, and we should have the lock when the document changes")
219
220 # TODO (havocp): our "change sync" protocol is flawed because if both
RuntimeError: _pending_writes should be non-None when we have a document lock, and we should have the lock when the document changes
|
RuntimeError
|
def show(
self,
title=None,
port=0,
address=None,
websocket_origin=None,
threaded=False,
verbose=True,
open=True,
location=True,
**kwargs,
):
"""
Starts a Bokeh server and displays the Viewable in a new tab.
Arguments
---------
title : str
A string title to give the Document (if served as an app)
port: int (optional, default=0)
Allows specifying a specific port
address : str
The address the server should listen on for HTTP requests.
websocket_origin: str or list(str) (optional)
A list of hosts that can connect to the websocket.
This is typically required when embedding a server app in
an external web site.
If None, "localhost" is used.
threaded: boolean (optional, default=False)
Whether to launch the Server on a separate thread, allowing
interactive use.
verbose: boolean (optional, default=True)
Whether to print the address and port
open : boolean (optional, default=True)
Whether to open the server in a new browser tab
location : boolean or panel.io.location.Location
Whether to create a Location component to observe and
set the URL location.
Returns
-------
server: bokeh.server.Server or threading.Thread
Returns the Bokeh server instance or the thread the server
was launched on (if threaded=True)
"""
return serve(
self,
port=port,
address=address,
websocket_origin=websocket_origin,
show=open,
start=True,
title=title,
verbose=verbose,
location=location,
threaded=threaded,
**kwargs,
)
|
def show(
self,
title=None,
port=0,
address=None,
websocket_origin=None,
threaded=False,
verbose=True,
open=True,
location=True,
**kwargs,
):
"""
Starts a Bokeh server and displays the Viewable in a new tab.
Arguments
---------
title : str
A string title to give the Document (if served as an app)
port: int (optional, default=0)
Allows specifying a specific port
address : str
The address the server should listen on for HTTP requests.
websocket_origin: str or list(str) (optional)
A list of hosts that can connect to the websocket.
This is typically required when embedding a server app in
an external web site.
If None, "localhost" is used.
threaded: boolean (optional, default=False)
Whether to launch the Server on a separate thread, allowing
interactive use.
verbose: boolean (optional, default=True)
Whether to print the address and port
open : boolean (optional, default=True)
Whether to open the server in a new browser tab
location : boolean or panel.io.location.Location
Whether to create a Location component to observe and
set the URL location.
Returns
-------
server: bokeh.server.Server or threading.Thread
Returns the Bokeh server instance or the thread the server
was launched on (if threaded=True)
"""
if threaded:
from tornado.ioloop import IOLoop
loop = IOLoop()
server = StoppableThread(
target=self._get_server,
io_loop=loop,
args=(
port,
address,
websocket_origin,
loop,
open,
True,
title,
verbose,
location,
),
kwargs=kwargs,
)
server.start()
else:
server = self._get_server(
port,
address,
websocket_origin,
show=open,
start=True,
title=title,
verbose=verbose,
location=location,
**kwargs,
)
return server
|
https://github.com/holoviz/panel/issues/1447
|
---------------------------------------------------------------------------
RuntimeError Traceback (most recent call last)
<ipython-input-15-da8b0df4fb70> in <module>
----> 1 color_mapper.update(high=100)
~/miniconda3/envs/PyX/lib/python3.7/site-packages/bokeh/core/has_props.py in update(self, **kwargs)
368 '''
369 for k,v in kwargs.items():
--> 370 setattr(self, k, v)
371
372 def update_from_json(self, json_attributes, models=None, setter=None):
~/miniconda3/envs/PyX/lib/python3.7/site-packages/bokeh/core/has_props.py in __setattr__(self, name, value)
272
273 if name in props or (descriptor is not None and descriptor.fset is not None):
--> 274 super().__setattr__(name, value)
275 else:
276 matches, text = difflib.get_close_matches(name.lower(), props), "similar"
~/miniconda3/envs/PyX/lib/python3.7/site-packages/bokeh/core/property/descriptors.py in __set__(self, obj, value, setter)
537 raise RuntimeError("%s.%s is a readonly property" % (obj.__class__.__name__, self.name))
538
--> 539 self._internal_set(obj, value, setter=setter)
540
541 def __delete__(self, obj):
~/miniconda3/envs/PyX/lib/python3.7/site-packages/bokeh/core/property/descriptors.py in _internal_set(self, obj, value, hint, setter)
761
762 old = self.__get__(obj, obj.__class__)
--> 763 self._real_set(obj, old, value, hint=hint, setter=setter)
764
765 def _real_set(self, obj, old, value, hint=None, setter=None):
~/miniconda3/envs/PyX/lib/python3.7/site-packages/bokeh/core/property/descriptors.py in _real_set(self, obj, old, value, hint, setter)
830
831 # for notification purposes, "old" should be the logical old
--> 832 self._trigger(obj, old, value, hint=hint, setter=setter)
833
834 # called when a container is mutated "behind our back" and
~/miniconda3/envs/PyX/lib/python3.7/site-packages/bokeh/core/property/descriptors.py in _trigger(self, obj, old, value, hint, setter)
907 '''
908 if hasattr(obj, 'trigger'):
--> 909 obj.trigger(self.name, old, value, hint, setter)
910
911
~/miniconda3/envs/PyX/lib/python3.7/site-packages/bokeh/model.py in trigger(self, attr, old, new, hint, setter)
659 self._document._invalidate_all_models()
660 # chain up to invoke callbacks
--> 661 super().trigger(attr, old, new, hint=hint, setter=setter)
662
663 def _attach_document(self, doc):
~/miniconda3/envs/PyX/lib/python3.7/site-packages/bokeh/util/callback_manager.py in trigger(self, attr, old, new, hint, setter)
155 callback(attr, old, new)
156 if hasattr(self, '_document') and self._document is not None:
--> 157 self._document._notify_change(self, attr, old, new, hint, setter, invoke)
158 else:
159 invoke()
~/miniconda3/envs/PyX/lib/python3.7/site-packages/bokeh/document/document.py in _notify_change(self, model, attr, old, new, hint, setter, callback_invoker)
1040
1041 event = ModelChangedEvent(self, model, attr, old, new, serializable_new, hint, setter, callback_invoker)
-> 1042 self._trigger_on_change(event)
1043
1044 def _push_all_models_freeze(self):
~/miniconda3/envs/PyX/lib/python3.7/site-packages/bokeh/document/document.py in _trigger_on_change(self, event)
1135 for cb in self._callbacks.values():
1136 cb(event)
-> 1137 self._with_self_as_curdoc(invoke_callbacks)
1138
1139 def _with_self_as_curdoc(self, f):
~/miniconda3/envs/PyX/lib/python3.7/site-packages/bokeh/document/document.py in _with_self_as_curdoc(self, f)
1148 else:
1149 set_curdoc(self)
-> 1150 return f()
1151 finally:
1152 set_curdoc(old_doc)
~/miniconda3/envs/PyX/lib/python3.7/site-packages/bokeh/document/document.py in invoke_callbacks()
1134 def invoke_callbacks():
1135 for cb in self._callbacks.values():
-> 1136 cb(event)
1137 self._with_self_as_curdoc(invoke_callbacks)
1138
~/miniconda3/envs/PyX/lib/python3.7/site-packages/bokeh/document/document.py in <lambda>(event)
702 def on_change_dispatch_to(self, receiver):
703 if not receiver in self._callbacks:
--> 704 self._callbacks[receiver] = lambda event: event.dispatch(receiver)
705
706 def on_session_destroyed(self, *callbacks):
~/miniconda3/envs/PyX/lib/python3.7/site-packages/bokeh/document/events.py in dispatch(self, receiver)
267
268 '''
--> 269 super().dispatch(receiver)
270 if hasattr(receiver, '_document_model_changed'):
271 receiver._document_model_changed(self)
~/miniconda3/envs/PyX/lib/python3.7/site-packages/bokeh/document/events.py in dispatch(self, receiver)
122 super().dispatch(receiver)
123 if hasattr(receiver, '_document_patched'):
--> 124 receiver._document_patched(self)
125
126 def generate(self, references, buffers):
~/miniconda3/envs/PyX/lib/python3.7/site-packages/bokeh/server/session.py in _document_patched(self, event)
216
217 if self._pending_writes is None:
--> 218 raise RuntimeError("_pending_writes should be non-None when we have a document lock, and we should have the lock when the document changes")
219
220 # TODO (havocp): our "change sync" protocol is flawed because if both
RuntimeError: _pending_writes should be non-None when we have a document lock, and we should have the lock when the document changes
|
RuntimeError
|
def get_server(
panel,
port=0,
websocket_origin=None,
loop=None,
show=False,
start=False,
title=None,
verbose=False,
location=True,
static_dirs={},
**kwargs,
):
"""
Returns a Server instance with this panel attached as the root
app.
Arguments
---------
panel: Viewable, function or {str: Viewable}
A Panel object, a function returning a Panel object or a
dictionary mapping from the URL slug to either.
port: int (optional, default=0)
Allows specifying a specific port
websocket_origin: str or list(str) (optional)
A list of hosts that can connect to the websocket.
This is typically required when embedding a server app in
an external web site.
If None, "localhost" is used.
loop : tornado.ioloop.IOLoop (optional, default=IOLoop.current())
The tornado IOLoop to run the Server on
show : boolean (optional, default=False)
Whether to open the server in a new browser tab on start
start : boolean(optional, default=False)
Whether to start the Server
title: str or {str: str} (optional, default=None)
An HTML title for the application or a dictionary mapping
from the URL slug to a customized title
verbose: boolean (optional, default=False)
Whether to report the address and port
location : boolean or panel.io.location.Location
Whether to create a Location component to observe and
set the URL location.
static_dirs: dict (optional, default={})
A dictionary of routes and local paths to serve as static file
directories on those routes
kwargs: dict
Additional keyword arguments to pass to Server instance
Returns
-------
server : bokeh.server.server.Server
Bokeh Server instance running this panel
"""
from tornado.ioloop import IOLoop
server_id = kwargs.pop("server_id", uuid.uuid4().hex)
kwargs["extra_patterns"] = extra_patterns = kwargs.get("extra_patterns", [])
if isinstance(panel, dict):
apps = {}
for slug, app in panel.items():
if isinstance(title, dict):
try:
title_ = title[slug]
except KeyError:
raise KeyError(
"Keys of the title dictionnary and of the apps "
f"dictionary must match. No {slug} key found in the "
"title dictionnary."
)
else:
title_ = title
slug = slug if slug.startswith("/") else "/" + slug
if "flask" in sys.modules:
from flask import Flask
if isinstance(app, Flask):
wsgi = WSGIContainer(app)
if slug == "/":
raise ValueError("Flask apps must be served on a subpath.")
if not slug.endswith("/"):
slug += "/"
extra_patterns.append(
(
"^" + slug + ".*",
ProxyFallbackHandler,
dict(fallback=wsgi, proxy=slug),
)
)
continue
apps[slug] = partial(_eval_panel, app, server_id, title_, location)
else:
apps = {"/": partial(_eval_panel, panel, server_id, title, location)}
extra_patterns += get_static_routes(static_dirs)
opts = dict(kwargs)
if loop:
loop.make_current()
opts["io_loop"] = loop
elif opts.get("num_procs", 1) == 1:
opts["io_loop"] = IOLoop.current()
if "index" not in opts:
opts["index"] = INDEX_HTML
if websocket_origin:
if not isinstance(websocket_origin, list):
websocket_origin = [websocket_origin]
opts["allow_websocket_origin"] = websocket_origin
server = Server(apps, port=port, **opts)
if verbose:
address = server.address or "localhost"
print("Launching server at http://%s:%s" % (address, server.port))
state._servers[server_id] = (server, panel, [])
if show:
def show_callback():
server.show("/")
server.io_loop.add_callback(show_callback)
def sig_exit(*args, **kwargs):
server.io_loop.add_callback_from_signal(do_stop)
def do_stop(*args, **kwargs):
server.io_loop.stop()
try:
signal.signal(signal.SIGINT, sig_exit)
except ValueError:
pass # Can't use signal on a thread
if start:
server.start()
try:
server.io_loop.start()
except RuntimeError:
pass
return server
|
def get_server(
panel,
port=0,
websocket_origin=None,
loop=None,
show=False,
start=False,
title=None,
verbose=False,
location=True,
static_dirs={},
**kwargs,
):
"""
Returns a Server instance with this panel attached as the root
app.
Arguments
---------
panel: Viewable, function or {str: Viewable}
A Panel object, a function returning a Panel object or a
dictionary mapping from the URL slug to either.
port: int (optional, default=0)
Allows specifying a specific port
websocket_origin: str or list(str) (optional)
A list of hosts that can connect to the websocket.
This is typically required when embedding a server app in
an external web site.
If None, "localhost" is used.
loop : tornado.ioloop.IOLoop (optional, default=IOLoop.current())
The tornado IOLoop to run the Server on
show : boolean (optional, default=False)
Whether to open the server in a new browser tab on start
start : boolean(optional, default=False)
Whether to start the Server
title: str or {str: str} (optional, default=None)
An HTML title for the application or a dictionary mapping
from the URL slug to a customized title
verbose: boolean (optional, default=False)
Whether to report the address and port
location : boolean or panel.io.location.Location
Whether to create a Location component to observe and
set the URL location.
static_dirs: dict (optional, default={})
A dictionary of routes and local paths to serve as static file
directories on those routes
kwargs: dict
Additional keyword arguments to pass to Server instance
Returns
-------
server : bokeh.server.server.Server
Bokeh Server instance running this panel
"""
from tornado.ioloop import IOLoop
server_id = kwargs.pop("server_id", uuid.uuid4().hex)
kwargs["extra_patterns"] = extra_patterns = kwargs.get("extra_patterns", [])
if isinstance(panel, dict):
apps = {}
for slug, app in panel.items():
if isinstance(title, dict):
try:
title_ = title[slug]
except KeyError:
raise KeyError(
"Keys of the title dictionnary and of the apps "
f"dictionary must match. No {slug} key found in the "
"title dictionnary."
)
else:
title_ = title
slug = slug if slug.startswith("/") else "/" + slug
if "flask" in sys.modules:
from flask import Flask
if isinstance(app, Flask):
wsgi = WSGIContainer(app)
if slug == "/":
raise ValueError("Flask apps must be served on a subpath.")
if not slug.endswith("/"):
slug += "/"
extra_patterns.append(
(
"^" + slug + ".*",
ProxyFallbackHandler,
dict(fallback=wsgi, proxy=slug),
)
)
continue
apps[slug] = partial(_eval_panel, app, server_id, title_, location)
else:
apps = {"/": partial(_eval_panel, panel, server_id, title, location)}
extra_patterns += get_static_routes(static_dirs)
opts = dict(kwargs)
if loop:
loop.make_current()
opts["io_loop"] = loop
else:
opts["io_loop"] = IOLoop.current()
if "index" not in opts:
opts["index"] = INDEX_HTML
if websocket_origin:
if not isinstance(websocket_origin, list):
websocket_origin = [websocket_origin]
opts["allow_websocket_origin"] = websocket_origin
server = Server(apps, port=port, **opts)
if verbose:
address = server.address or "localhost"
print("Launching server at http://%s:%s" % (address, server.port))
state._servers[server_id] = (server, panel, [])
if show:
def show_callback():
server.show("/")
server.io_loop.add_callback(show_callback)
def sig_exit(*args, **kwargs):
server.io_loop.add_callback_from_signal(do_stop)
def do_stop(*args, **kwargs):
server.io_loop.stop()
try:
signal.signal(signal.SIGINT, sig_exit)
except ValueError:
pass # Can't use signal on a thread
if start:
server.start()
try:
server.io_loop.start()
except RuntimeError:
pass
return server
|
https://github.com/holoviz/panel/issues/1405
|
$ docker run -it --entrypoint=//bin/bash --rm python:3.7.7-stretch
root@4074b09e57fc:/# pip install panel ipython
Collecting panel
Downloading panel-0.9.5-py2.py3-none-any.whl (1.3 MB)
|████████████████████████████████| 1.3 MB 762 kB/s
Collecting ipython
Downloading ipython-7.15.0-py3-none-any.whl (783 kB)
|████████████████████████████████| 783 kB 2.0 MB/s
Collecting param>=1.9.3
Downloading param-1.9.3-py2.py3-none-any.whl (70 kB)
|████████████████████████████████| 70 kB 1.4 MB/s
Collecting pyviz-comms>=0.7.4
Downloading pyviz_comms-0.7.5-py2.py3-none-any.whl (10 kB)
Collecting pyct>=0.4.4
Downloading pyct-0.4.6-py2.py3-none-any.whl (12 kB)
Collecting bokeh>=2.0.0
Downloading bokeh-2.0.2.tar.gz (8.6 MB)
|████████████████████████████████| 8.6 MB 1.5 MB/s
Collecting markdown
Downloading Markdown-3.2.2-py3-none-any.whl (88 kB)
|████████████████████████████████| 88 kB 1.8 MB/s
Collecting tqdm
Downloading tqdm-4.46.1-py2.py3-none-any.whl (63 kB)
|████████████████████████████████| 63 kB 1.2 MB/s
Requirement already satisfied: setuptools>=18.5 in /usr/local/lib/python3.7/site-packages (from ipython) (47.1.1)
Collecting jedi>=0.10
Downloading jedi-0.17.0-py2.py3-none-any.whl (1.1 MB)
|████████████████████████████████| 1.1 MB 1.7 MB/s
Collecting prompt-toolkit!=3.0.0,!=3.0.1,<3.1.0,>=2.0.0
Downloading prompt_toolkit-3.0.5-py3-none-any.whl (351 kB)
|████████████████████████████████| 351 kB 1.9 MB/s
Collecting pygments
Downloading Pygments-2.6.1-py3-none-any.whl (914 kB)
|████████████████████████████████| 914 kB 2.0 MB/s
Collecting pexpect; sys_platform != "win32"
Downloading pexpect-4.8.0-py2.py3-none-any.whl (59 kB)
|████████████████████████████████| 59 kB 2.3 MB/s
Collecting decorator
Downloading decorator-4.4.2-py2.py3-none-any.whl (9.2 kB)
Collecting traitlets>=4.2
Downloading traitlets-4.3.3-py2.py3-none-any.whl (75 kB)
|████████████████████████████████| 75 kB 2.4 MB/s
Collecting pickleshare
Downloading pickleshare-0.7.5-py2.py3-none-any.whl (6.9 kB)
Collecting backcall
Downloading backcall-0.2.0-py2.py3-none-any.whl (11 kB)
Collecting PyYAML>=3.10
Downloading PyYAML-5.3.1.tar.gz (269 kB)
|████████████████████████████████| 269 kB 2.1 MB/s
Collecting python-dateutil>=2.1
Downloading python_dateutil-2.8.1-py2.py3-none-any.whl (227 kB)
|████████████████████████████████| 227 kB 2.1 MB/s
Collecting Jinja2>=2.7
Downloading Jinja2-2.11.2-py2.py3-none-any.whl (125 kB)
|████████████████████████████████| 125 kB 2.1 MB/s
Collecting numpy>=1.11.3
Downloading numpy-1.18.5-cp37-cp37m-manylinux1_x86_64.whl (20.1 MB)
|████████████████████████████████| 20.1 MB 2.8 MB/s
Collecting pillow>=4.0
Downloading Pillow-7.1.2-cp37-cp37m-manylinux1_x86_64.whl (2.1 MB)
|████████████████████████████████| 2.1 MB 2.7 MB/s
Collecting packaging>=16.8
Downloading packaging-20.4-py2.py3-none-any.whl (37 kB)
Collecting tornado>=5
Downloading tornado-6.0.4.tar.gz (496 kB)
|████████████████████████████████| 496 kB 2.1 MB/s
Collecting typing_extensions>=3.7.4
Downloading typing_extensions-3.7.4.2-py3-none-any.whl (22 kB)
Collecting importlib-metadata; python_version < "3.8"
Downloading importlib_metadata-1.6.1-py2.py3-none-any.whl (31 kB)
Collecting parso>=0.7.0
Downloading parso-0.7.0-py2.py3-none-any.whl (100 kB)
|████████████████████████████████| 100 kB 1.6 MB/s
Collecting wcwidth
Downloading wcwidth-0.2.4-py2.py3-none-any.whl (30 kB)
Collecting ptyprocess>=0.5
Downloading ptyprocess-0.6.0-py2.py3-none-any.whl (39 kB)
Collecting ipython-genutils
Downloading ipython_genutils-0.2.0-py2.py3-none-any.whl (26 kB)
Collecting six
Downloading six-1.15.0-py2.py3-none-any.whl (10 kB)
Collecting MarkupSafe>=0.23
Downloading MarkupSafe-1.1.1-cp37-cp37m-manylinux1_x86_64.whl (27 kB)
Collecting pyparsing>=2.0.2
Downloading pyparsing-2.4.7-py2.py3-none-any.whl (67 kB)
|████████████████████████████████| 67 kB 1.8 MB/s
Collecting zipp>=0.5
Downloading zipp-3.1.0-py3-none-any.whl (4.9 kB)
Building wheels for collected packages: bokeh, PyYAML, tornado
Building wheel for bokeh (setup.py) ... done
Created wheel for bokeh: filename=bokeh-2.0.2-py3-none-any.whl size=9072535 sha256=92af4d4c38b6492b4801df51c3ab39a797e03c82ca40c8cac75aced96b17f95f
Stored in directory: /root/.cache/pip/wheels/5e/9a/e0/2ce591d3bc02114f16ae1cdd7a4e34ff4d3b99eca54eeee53a
Building wheel for PyYAML (setup.py) ... done
Created wheel for PyYAML: filename=PyYAML-5.3.1-cp37-cp37m-linux_x86_64.whl size=411549 sha256=2ba23d0576a1b8efe148f7e6e188a6c664c9d35344b7146be82263a39de35a3c
Stored in directory: /root/.cache/pip/wheels/5e/03/1e/e1e954795d6f35dfc7b637fe2277bff021303bd9570ecea653
Building wheel for tornado (setup.py) ... done
Created wheel for tornado: filename=tornado-6.0.4-cp37-cp37m-linux_x86_64.whl size=428633 sha256=86bcd8bfc85c4fa27c9424006dee442777a61091ab26909db3cca11a76412a9d
Stored in directory: /root/.cache/pip/wheels/7d/14/fa/d88fb5da77d813ea0ffca38a2ab2a052874e9e1142bad0b348
Successfully built bokeh PyYAML tornado
Installing collected packages: param, pyviz-comms, pyct, PyYAML, six, python-dateutil, MarkupSafe, Jinja2, numpy, pillow, pyparsing, packaging, tornado, typing-extensions, bokeh, zipp, importlib-metadata, markdown, tqdm, panel, parso, jedi, wcwidth, prompt-toolkit, pygments, ptyprocess, pexpect, decorator, ipython-genutils, traitlets, pickleshare, backcall, ipython
Successfully installed Jinja2-2.11.2 MarkupSafe-1.1.1 PyYAML-5.3.1 backcall-0.2.0 bokeh-2.0.2 decorator-4.4.2 importlib-metadata-1.6.1 ipython-7.15.0 ipython-genutils-0.2.0 jedi-0.17.0 markdown-3.2.2 numpy-1.18.5 packaging-20.4 panel-0.9.5 param-1.9.3 parso-0.7.0 pexpect-4.8.0 pickleshare-0.7.5 pillow-7.1.2 prompt-toolkit-3.0.5 ptyprocess-0.6.0 pyct-0.4.6 pygments-2.6.1 pyparsing-2.4.7 python-dateutil-2.8.1 pyviz-comms-0.7.5 six-1.15.0 tornado-6.0.4 tqdm-4.46.1 traitlets-4.3.3 typing-extensions-3.7.4.2 wcwidth-0.2.4 zipp-3.1.0
root@4074b09e57fc:/# ipython
Python 3.7.7 (default, Jun 9 2020, 18:17:41)
Type 'copyright', 'credits' or 'license' for more information
IPython 7.15.0 -- An enhanced Interactive Python. Type '?' for help.
In [1]: import panel as pn
In [2]: def view():
...: return pn.pane.Markdown("# Hello World")
...:
In [3]: app_routes = {"hello-world": view}
In [4]: pn.serve(app_routes, port=14033, dev=False, title="Panel App", num_procs=0)
Launching server at http://localhost:14033
---------------------------------------------------------------------------
KeyError Traceback (most recent call last)
/usr/local/lib/python3.7/asyncio/selector_events.py in _add_reader(self, fd, callback, *args)
255 try:
--> 256 key = self._selector.get_key(fd)
257 except KeyError:
/usr/local/lib/python3.7/selectors.py in get_key(self, fileobj)
191 except KeyError:
--> 192 raise KeyError("{!r} is not registered".format(fileobj)) from None
193
KeyError: '14 is not registered'
During handling of the above exception, another exception occurred:
FileExistsError Traceback (most recent call last)
<ipython-input-4-df09cb8ef7a7> in <module>
----> 1 pn.serve(app_routes, port=14033, dev=False, title="Panel App", num_procs=0)
/usr/local/lib/python3.7/site-packages/panel/io/server.py in serve(panels, port, websocket_origin, loop, show, start, title, verbose, **kwargs)
139 """
140 return get_server(panels, port, websocket_origin, loop, show, start,
--> 141 title, verbose, **kwargs)
142
143
/usr/local/lib/python3.7/site-packages/panel/io/server.py in get_server(panel, port, websocket_origin, loop, show, start, title, verbose, **kwargs)
235 opts['allow_websocket_origin'] = websocket_origin
236
--> 237 server = Server(apps, port=port, **opts)
238 if verbose:
239 address = server.address or 'localhost'
/usr/local/lib/python3.7/site-packages/bokeh/server/server.py in __init__(self, applications, io_loop, http_server_kwargs, **kwargs)
402
403 http_server.start(opts.num_procs)
--> 404 http_server.add_sockets(sockets)
405
406 except Exception:
/usr/local/lib/python3.7/site-packages/tornado/tcpserver.py in add_sockets(self, sockets)
164 self._sockets[sock.fileno()] = sock
165 self._handlers[sock.fileno()] = add_accept_handler(
--> 166 sock, self._handle_connection
167 )
168
/usr/local/lib/python3.7/site-packages/tornado/netutil.py in add_accept_handler(sock, callback)
277 removed[0] = True
278
--> 279 io_loop.add_handler(sock, accept_handler, IOLoop.READ)
280 return remove_handler
281
/usr/local/lib/python3.7/site-packages/tornado/platform/asyncio.py in add_handler(self, fd, handler, events)
98 self.handlers[fd] = (fileobj, handler)
99 if events & IOLoop.READ:
--> 100 self.asyncio_loop.add_reader(fd, self._handle_events, fd, IOLoop.READ)
101 self.readers.add(fd)
102 if events & IOLoop.WRITE:
/usr/local/lib/python3.7/asyncio/selector_events.py in add_reader(self, fd, callback, *args)
327 """Add a reader callback."""
328 self._ensure_fd_no_transport(fd)
--> 329 return self._add_reader(fd, callback, *args)
330
331 def remove_reader(self, fd):
/usr/local/lib/python3.7/asyncio/selector_events.py in _add_reader(self, fd, callback, *args)
257 except KeyError:
258 self._selector.register(fd, selectors.EVENT_READ,
--> 259 (handle, None))
260 else:
261 mask, (reader, writer) = key.events, key.data
/usr/local/lib/python3.7/selectors.py in register(self, fileobj, events, data)
357 poller_events |= self._EVENT_WRITE
358 try:
--> 359 self._selector.register(key.fd, poller_events)
360 except:
361 super().unregister(fileobj)
FileExistsError: [Errno 17] File exists
|
KeyError
|
def jslink(self, target, code=None, args=None, bidirectional=False, **links):
"""
Links properties on the source object to those on the target
object in JS code. Supports two modes, either specify a
mapping between the source and target model properties as
keywords or provide a dictionary of JS code snippets which
maps from the source parameter to a JS code snippet which is
executed when the property changes.
Arguments
----------
target: HoloViews object or bokeh Model or panel Viewable
The target to link the value to.
code: dict
Custom code which will be executed when the widget value
changes.
bidirectional: boolean
Whether to link source and target bi-directionally
**links: dict
A mapping between properties on the source model and the
target model property to link it to.
Returns
-------
link: GenericLink
The GenericLink which can be used unlink the widget and
the target model.
"""
if links and code:
raise ValueError(
"Either supply a set of properties to "
"link as keywords or a set of JS code "
"callbacks, not both."
)
elif not links and not code:
raise ValueError(
"Declare parameters to link or a set of callbacks, neither was defined."
)
if args is None:
args = {}
mapping = code or links
for k in mapping:
if k.startswith("event:"):
continue
elif hasattr(self, "object") and isinstance(self.object, LayoutDOM):
current = self.object
for attr in k.split("."):
if not hasattr(current, attr):
raise ValueError(
f"Could not resolve {k} on "
f"{self.object} model. Ensure "
"you jslink an attribute that "
"exists on the bokeh model."
)
current = getattr(current, attr)
elif k not in self.param and k not in list(self._rename.values()):
matches = difflib.get_close_matches(k, list(self.param))
if matches:
matches = " Similar parameters include: %r" % matches
else:
matches = ""
raise ValueError(
"Could not jslink %r parameter (or property) "
"on %s object because it was not found.%s"
% (k, type(self).__name__, matches)
)
elif (
self._source_transforms.get(k, False) is None
or self._rename.get(k, False) is None
):
raise ValueError(
"Cannot jslink %r parameter on %s object, "
"the parameter requires a live Python kernel "
"to have an effect." % (k, type(self).__name__)
)
if isinstance(target, Syncable) and code is None:
for k, p in mapping.items():
if k.startswith("event:"):
continue
elif p not in target.param and p not in list(target._rename.values()):
matches = difflib.get_close_matches(p, list(target.param))
if matches:
matches = " Similar parameters include: %r" % matches
else:
matches = ""
raise ValueError(
"Could not jslink %r parameter (or property) "
"on %s object because it was not found.%s"
% (p, type(self).__name__, matches)
)
elif (
target._source_transforms.get(p, False) is None
or target._rename.get(p, False) is None
):
raise ValueError(
"Cannot jslink %r parameter on %s object "
"to %r parameter on %s object. It requires "
"a live Python kernel to have an effect."
% (k, type(self).__name__, p, type(target).__name__)
)
from .links import Link
return Link(
self,
target,
properties=links,
code=code,
args=args,
bidirectional=bidirectional,
)
|
def jslink(self, target, code=None, args=None, bidirectional=False, **links):
"""
Links properties on the source object to those on the target
object in JS code. Supports two modes, either specify a
mapping between the source and target model properties as
keywords or provide a dictionary of JS code snippets which
maps from the source parameter to a JS code snippet which is
executed when the property changes.
Arguments
----------
target: HoloViews object or bokeh Model or panel Viewable
The target to link the value to.
code: dict
Custom code which will be executed when the widget value
changes.
bidirectional: boolean
Whether to link source and target bi-directionally
**links: dict
A mapping between properties on the source model and the
target model property to link it to.
Returns
-------
link: GenericLink
The GenericLink which can be used unlink the widget and
the target model.
"""
if links and code:
raise ValueError(
"Either supply a set of properties to "
"link as keywords or a set of JS code "
"callbacks, not both."
)
elif not links and not code:
raise ValueError(
"Declare parameters to link or a set of callbacks, neither was defined."
)
if args is None:
args = {}
mapping = code or links
for k in mapping:
if k.startswith("event:"):
continue
elif k not in self.param and k not in list(self._rename.values()):
matches = difflib.get_close_matches(k, list(self.param))
if matches:
matches = " Similar parameters include: %r" % matches
else:
matches = ""
raise ValueError(
"Could not jslink %r parameter (or property) "
"on %s object because it was not found.%s"
% (k, type(self).__name__, matches)
)
elif (
self._source_transforms.get(k, False) is None
or self._rename.get(k, False) is None
):
raise ValueError(
"Cannot jslink %r parameter on %s object, "
"the parameter requires a live Python kernel "
"to have an effect." % (k, type(self).__name__)
)
if isinstance(target, Syncable) and code is None:
for k, p in mapping.items():
if k.startswith("event:"):
continue
elif p not in target.param and p not in list(target._rename.values()):
matches = difflib.get_close_matches(p, list(target.param))
if matches:
matches = " Similar parameters include: %r" % matches
else:
matches = ""
raise ValueError(
"Could not jslink %r parameter (or property) "
"on %s object because it was not found.%s"
% (p, type(self).__name__, matches)
)
elif (
target._source_transforms.get(p, False) is None
or target._rename.get(p, False) is None
):
raise ValueError(
"Cannot jslink %r parameter on %s object "
"to %r parameter on %s object. It requires "
"a live Python kernel to have an effect."
% (k, type(self).__name__, p, type(target).__name__)
)
from .links import Link
return Link(
self,
target,
properties=links,
code=code,
args=args,
bidirectional=bidirectional,
)
|
https://github.com/holoviz/panel/issues/1346
|
ValueError Traceback (most recent call last)
<ipython-input-27-627e90bb010e> in <module>
16 '''
17 #pp.jslink(m, code={'x_range.start': jsupdateinfo})
---> 18 pp.jslink(s, **{'x_range.start': 'value'})
19
20 # params -> plot
~/anaconda3/envs/panel/lib/python3.8/site-packages/panel/viewable.py in jslink(self, target, code, args, bidirectional, **links)
1083 else:
1084 matches = ''
-> 1085 raise ValueError("Could not jslink %r parameter (or property) "
1086 "on %s object because it was not found.%s"
1087 % (k, type(self).__name__, matches))
ValueError: Could not jslink 'x_range.start' parameter (or property) on Bokeh object because it was not found.
|
ValueError
|
def __init__(self, root_model, link, source, target=None, arg_overrides={}):
self.root_model = root_model
self.link = link
self.source = source
self.target = target
self.arg_overrides = arg_overrides
self.validate()
specs = self._get_specs(link, source, target)
for src_spec, tgt_spec, code in specs:
try:
self._init_callback(
root_model, link, source, src_spec, target, tgt_spec, code
)
except Exception:
pass
|
def __init__(self, root_model, link, source, target=None, arg_overrides={}):
self.root_model = root_model
self.link = link
self.source = source
self.target = target
self.arg_overrides = arg_overrides
self.validate()
specs = self._get_specs(link, source, target)
for src_spec, tgt_spec, code in specs:
self._init_callback(root_model, link, source, src_spec, target, tgt_spec, code)
|
https://github.com/holoviz/panel/issues/1084
|
---------------------------------------------------------------------------
KeyError Traceback (most recent call last)
~/PythonWorkspace/py37/lib/python3.7/site-packages/IPython/core/formatters.py in __call__(self, obj, include, exclude)
968
969 if method is not None:
--> 970 return method(include=include, exclude=exclude)
971 return None
972 else:
~/PythonWorkspace/modules/panel/panel/template.py in _repr_mimebundle_(self, include, exclude)
156 doc = _Document()
157 comm = state._comm_manager.get_server_comm()
--> 158 self._init_doc(doc, comm, notebook=True)
159 return render_template(doc, comm)
160
~/PythonWorkspace/modules/panel/panel/template.py in _init_doc(self, doc, comm, title, notebook)
113 ref = preprocess_root.ref['id']
114 for name, (obj, tags) in self._render_items.items():
--> 115 model = obj.get_root(doc, comm)
116 doc.on_session_destroyed(obj._server_destroy)
117 for sub in obj.select(Viewable):
~/PythonWorkspace/modules/panel/panel/viewable.py in get_root(self, doc, comm)
527 doc = doc or _curdoc()
528 root = self._get_model(doc, comm=comm)
--> 529 self._preprocess(root)
530 ref = root.ref['id']
531 state._views[ref] = (self, root, doc, comm)
~/PythonWorkspace/modules/panel/panel/viewable.py in _preprocess(self, root)
348 """
349 for hook in self._preprocessing_hooks:
--> 350 hook(self, root)
351
352 def _render_model(self, doc=None, comm=None):
~/PythonWorkspace/modules/panel/panel/links.py in _process_callbacks(cls, root_view, root_model)
126 overrides = arg_overrides.get(id(link), {})
127 callbacks.append(cb(root_model, link, src, tgt,
--> 128 arg_overrides=overrides))
129 return callbacks
130
~/PythonWorkspace/modules/panel/panel/links.py in __init__(self, root_model, link, source, target, arg_overrides)
205 specs = self._get_specs(link, source, target)
206 for src_spec, tgt_spec, code in specs:
--> 207 self._init_callback(root_model, link, source, src_spec, target, tgt_spec, code)
208
209 @classmethod
~/PythonWorkspace/modules/panel/panel/links.py in _init_callback(self, root_model, link, source, src_spec, target, tgt_spec, code)
270
271 for k, v in dict(link.args, **self.arg_overrides).items():
--> 272 arg_model = self._resolve_model(root_model, v, None)
273 if arg_model is not None:
274 references[k] = arg_model
~/PythonWorkspace/modules/panel/panel/links.py in _resolve_model(cls, root_model, obj, model_spec)
243 model = obj.handles[handle_spec]
244 elif isinstance(obj, Viewable):
--> 245 model, _ = obj._models[root_model.ref['id']]
246 elif isinstance(obj, BkModel):
247 model = obj
KeyError: '1046'
|
KeyError
|
def init(self):
"""
Registers the Callback
"""
if self.source in self.registry:
links = self.registry[self.source]
params = {k: v for k, v in self.get_param_values() if k != "name"}
for link in links:
link_params = {k: v for k, v in link.get_param_values() if k != "name"}
if not hasattr(link, "target"):
pass
elif (
type(link) is type(self)
and link.source is self.source
and link.target is self.target
and params == link_params
):
return
self.registry[self.source].append(self)
else:
self.registry[self.source] = [self]
|
def init(self):
"""
Registers the Callback
"""
if self.source in self.registry:
links = self.registry[self.source]
params = {k: v for k, v in self.get_param_values() if k != "name"}
for link in links:
link_params = {k: v for k, v in link.get_param_values() if k != "name"}
if (
type(link) is type(self)
and link.source is self.source
and link.target is self.target
and params == link_params
):
return
self.registry[self.source].append(self)
else:
self.registry[self.source] = [self]
|
https://github.com/holoviz/panel/issues/830
|
---------------------------------------------------------------------------
AttributeError Traceback (most recent call last)
<ipython-input-9-885dd74dd613> in <module>
3 mkd2 = pn.pane.Markdown(object=ti.value)
4 ti.jscallback(args={'mkd1': mkd1},value="mkd1.text = cb_obj.value")
----> 5 ti.jscallback(args={'mkd2': mkd2},value="mkd2.text = cb_obj.value")
6 pn.Row(ti, pn.Column(mkd1, mkd2))
~/PythonWorkspace/py37/lib/python3.7/site-packages/panel/viewable.py in jscallback(self, args, **callbacks)
890 for k, v in list(callbacks.items()):
891 callbacks[k] = self._rename.get(v, v)
--> 892 return Callback(self, code=callbacks, args=args)
893
894 def jslink(self, target, code=None, args=None, bidirectional=False, **links):
~/PythonWorkspace/py37/lib/python3.7/site-packages/panel/links.py in __init__(self, source, target, **params)
47 self._source = None if source is None else weakref.ref(source)
48 super(Callback, self).__init__(**params)
---> 49 self.init()
50
51 def init(self):
~/PythonWorkspace/py37/lib/python3.7/site-packages/panel/links.py in init(self)
61 k: v for k, v in link.get_param_values() if k != 'name'}
62 if (type(link) is type(self) and link.source is self.source
---> 63 and link.target is self.target and params == link_params):
64 return
65 self.registry[self.source].append(self)
AttributeError: 'Callback' object has no attribute 'target'
|
AttributeError
|
def _get_model(self, doc, root=None, parent=None, comm=None):
"""
Should return the bokeh model to be rendered.
"""
if "panel.models.vtk" not in sys.modules:
if isinstance(comm, JupyterComm):
self.param.warning(
"VTKVolumePlot was not imported on instantiation "
"and may not render in a notebook. Restart "
"the notebook kernel and ensure you load "
"it as part of the extension using:"
"\n\npn.extension('vtk')\n"
)
from ...models.vtk import VTKVolumePlot
else:
VTKVolumePlot = getattr(sys.modules["panel.models.vtk"], "VTKVolumePlot")
props = self._process_param_change(self._init_properties())
volume_data = self._get_volume_data()
model = VTKVolumePlot(data=volume_data, **props)
if root is None:
root = model
self._models[root.ref["id"]] = (model, parent)
return model
|
def _get_model(self, doc, root=None, parent=None, comm=None):
"""
Should return the bokeh model to be rendered.
"""
if "panel.models.vtk" not in sys.modules:
if isinstance(comm, JupyterComm):
self.param.warning(
"VTKVolumePlot was not imported on instantiation "
"and may not render in a notebook. Restart "
"the notebook kernel and ensure you load "
"it as part of the extension using:"
"\n\npn.extension('vtk')\n"
)
from ...models.vtk import VTKVolumePlot
else:
VTKVolumePlot = getattr(sys.modules["panel.models.vtk"], "VTKVolumePlot")
props = self._process_param_change(self._init_properties())
volume_data = self._get_volume_data()
model = VTKVolumePlot(data=volume_data, **props)
if root is None:
root = model
self._link_props(model, ["data"], doc, root, comm)
self._models[root.ref["id"]] = (model, parent)
return model
|
https://github.com/holoviz/panel/issues/819
|
2019-11-27 14:11:22,010 Exception in callback functools.partial(<function wrap.<locals>.null_wrapper at 0x0000
019A696F59D8>, <Future finished exception=ValueError("'data' is not a parameter of VTK00004")>)
Traceback (most recent call last):
File "D:\Python\Python37-64\pyenv\py37\lib\site-packages\tornado\ioloop.py", line 758, in _run_callback
ret = callback()
File "D:\Python\Python37-64\pyenv\py37\lib\site-packages\tornado\stack_context.py", line 300, in null_wrappe
r
return fn(*args, **kwargs)
File "D:\Python\Python37-64\pyenv\py37\lib\site-packages\tornado\ioloop.py", line 779, in _discard_future_re
sult
future.result()
File "D:\Python\Python37-64\pyenv\py37\lib\site-packages\tornado\gen.py", line 1147, in run
yielded = self.gen.send(value)
File "D:\Python\Python37-64\pyenv\py37\lib\site-packages\bokeh\server\session.py", line 70, in _needs_docume
nt_lock_wrapper
result = yield yield_for_all_futures(func(self, *args, **kwargs))
File "D:\Python\Python37-64\pyenv\py37\lib\site-packages\bokeh\server\session.py", line 191, in with_documen
t_locked
return func(*args, **kwargs)
File "D:\Python\Python37-64\pyenv\py37\lib\site-packages\bokeh\document\document.py", line 1127, in wrapper
return doc._with_self_as_curdoc(invoke)
File "D:\Python\Python37-64\pyenv\py37\lib\site-packages\bokeh\document\document.py", line 1113, in _with_se
lf_as_curdoc
return f()
File "D:\Python\Python37-64\pyenv\py37\lib\site-packages\bokeh\document\document.py", line 1126, in invoke
return f(*args, **kwargs)
File "D:\Python\Python37-64\pyenv\py37\lib\site-packages\bokeh\document\document.py", line 916, in remove_th
en_invoke
return callback(*args, **kwargs)
File "D:\Python\Python37-64\pyenv\py37\lib\site-packages\panel\viewable.py", line 714, in _change_event
self._process_events(events)
File "D:\Python\Python37-64\pyenv\py37\lib\site-packages\panel\viewable.py", line 704, in _process_events
self.set_param(**self._process_property_change(events))
File "D:\Python\Python37-64\pyenv\py37\lib\site-packages\param\parameterized.py", line 1219, in inner
return fn(*args, **kwargs)
File "D:\Python\Python37-64\pyenv\py37\lib\site-packages\param\parameterized.py", line 2572, in set_param
return self_or_cls.param.set_param(*args,**kwargs)
File "D:\Python\Python37-64\pyenv\py37\lib\site-packages\param\parameterized.py", line 1356, in set_param
raise ValueError("'%s' is not a parameter of %s" % (k, self_or_cls.name))
ValueError: 'data' is not a parameter of VTK00004
|
ValueError
|
def _get_model(self, doc, root=None, parent=None, comm=None):
"""
Should return the bokeh model to be rendered.
"""
if "panel.models.vtk" not in sys.modules:
if isinstance(comm, JupyterComm):
self.param.warning(
"VTKPlot was not imported on instantiation "
"and may not render in a notebook. Restart "
"the notebook kernel and ensure you load "
"it as part of the extension using:"
"\n\npn.extension('vtk')\n"
)
from ...models.vtk import VTKPlot
else:
VTKPlot = getattr(sys.modules["panel.models.vtk"], "VTKPlot")
vtkjs = self._get_vtkjs()
data = base64encode(vtkjs) if vtkjs is not None else vtkjs
props = self._process_param_change(self._init_properties())
model = VTKPlot(data=data, **props)
if root is None:
root = model
self._link_props(
model, ["camera", "enable_keybindings", "orientation_widget"], doc, root, comm
)
self._models[root.ref["id"]] = (model, parent)
return model
|
def _get_model(self, doc, root=None, parent=None, comm=None):
"""
Should return the bokeh model to be rendered.
"""
if "panel.models.vtk" not in sys.modules:
if isinstance(comm, JupyterComm):
self.param.warning(
"VTKPlot was not imported on instantiation "
"and may not render in a notebook. Restart "
"the notebook kernel and ensure you load "
"it as part of the extension using:"
"\n\npn.extension('vtk')\n"
)
from ...models.vtk import VTKPlot
else:
VTKPlot = getattr(sys.modules["panel.models.vtk"], "VTKPlot")
vtkjs = self._get_vtkjs()
data = base64encode(vtkjs) if vtkjs is not None else vtkjs
props = self._process_param_change(self._init_properties())
model = VTKPlot(data=data, **props)
if root is None:
root = model
self._link_props(
model,
["data", "camera", "enable_keybindings", "orientation_widget"],
doc,
root,
comm,
)
self._models[root.ref["id"]] = (model, parent)
return model
|
https://github.com/holoviz/panel/issues/819
|
2019-11-27 14:11:22,010 Exception in callback functools.partial(<function wrap.<locals>.null_wrapper at 0x0000
019A696F59D8>, <Future finished exception=ValueError("'data' is not a parameter of VTK00004")>)
Traceback (most recent call last):
File "D:\Python\Python37-64\pyenv\py37\lib\site-packages\tornado\ioloop.py", line 758, in _run_callback
ret = callback()
File "D:\Python\Python37-64\pyenv\py37\lib\site-packages\tornado\stack_context.py", line 300, in null_wrappe
r
return fn(*args, **kwargs)
File "D:\Python\Python37-64\pyenv\py37\lib\site-packages\tornado\ioloop.py", line 779, in _discard_future_re
sult
future.result()
File "D:\Python\Python37-64\pyenv\py37\lib\site-packages\tornado\gen.py", line 1147, in run
yielded = self.gen.send(value)
File "D:\Python\Python37-64\pyenv\py37\lib\site-packages\bokeh\server\session.py", line 70, in _needs_docume
nt_lock_wrapper
result = yield yield_for_all_futures(func(self, *args, **kwargs))
File "D:\Python\Python37-64\pyenv\py37\lib\site-packages\bokeh\server\session.py", line 191, in with_documen
t_locked
return func(*args, **kwargs)
File "D:\Python\Python37-64\pyenv\py37\lib\site-packages\bokeh\document\document.py", line 1127, in wrapper
return doc._with_self_as_curdoc(invoke)
File "D:\Python\Python37-64\pyenv\py37\lib\site-packages\bokeh\document\document.py", line 1113, in _with_se
lf_as_curdoc
return f()
File "D:\Python\Python37-64\pyenv\py37\lib\site-packages\bokeh\document\document.py", line 1126, in invoke
return f(*args, **kwargs)
File "D:\Python\Python37-64\pyenv\py37\lib\site-packages\bokeh\document\document.py", line 916, in remove_th
en_invoke
return callback(*args, **kwargs)
File "D:\Python\Python37-64\pyenv\py37\lib\site-packages\panel\viewable.py", line 714, in _change_event
self._process_events(events)
File "D:\Python\Python37-64\pyenv\py37\lib\site-packages\panel\viewable.py", line 704, in _process_events
self.set_param(**self._process_property_change(events))
File "D:\Python\Python37-64\pyenv\py37\lib\site-packages\param\parameterized.py", line 1219, in inner
return fn(*args, **kwargs)
File "D:\Python\Python37-64\pyenv\py37\lib\site-packages\param\parameterized.py", line 2572, in set_param
return self_or_cls.param.set_param(*args,**kwargs)
File "D:\Python\Python37-64\pyenv\py37\lib\site-packages\param\parameterized.py", line 1356, in set_param
raise ValueError("'%s' is not a parameter of %s" % (k, self_or_cls.name))
ValueError: 'data' is not a parameter of VTK00004
|
ValueError
|
def _get_sources(self, json, sources):
datasets = json.get("datasets", {})
for name in list(datasets):
if name in sources or isinstance(datasets[name], dict):
continue
data = datasets.pop(name)
columns = set(data[0]) if data else []
if self.is_altair(self.object):
import altair as alt
if not isinstance(
self.object.data, (alt.Data, alt.UrlData)
) and columns == set(self.object.data):
data = ColumnDataSource.from_df(self.object.data)
else:
data = ds_as_cds(data)
sources[name] = ColumnDataSource(data=data)
else:
sources[name] = ColumnDataSource(data=ds_as_cds(data))
data = json.get("data", {}).pop("values", {})
if data:
sources["data"] = ColumnDataSource(data=ds_as_cds(data))
|
def _get_sources(self, json, sources):
datasets = json.get("datasets", {})
for name in list(datasets):
if name in sources or isinstance(datasets[name], dict):
continue
data = datasets.pop(name)
columns = set(data[0]) if data else []
if self.is_altair(self.object):
import altair as alt
if not isinstance(self.object.data, alt.Data) and columns == set(
self.object.data
):
data = ColumnDataSource.from_df(self.object.data)
else:
data = ds_as_cds(data)
sources[name] = ColumnDataSource(data=data)
else:
sources[name] = ColumnDataSource(data=ds_as_cds(data))
data = json.get("data", {}).pop("values", {})
if data:
sources["data"] = ColumnDataSource(data=ds_as_cds(data))
|
https://github.com/holoviz/panel/issues/780
|
KeyError Traceback (most recent call last)
/opt/conda/lib/python3.6/site-packages/IPython/core/formatters.py in __call__(self, obj, include, exclude)
968
969 if method is not None:
--> 970 return method(include=include, exclude=exclude)
971 return None
972 else:
/opt/conda/lib/python3.6/site-packages/panel/viewable.py in _repr_mimebundle_(self, include, exclude)
292 comm = state._comm_manager.get_server_comm()
293 doc = _Document()
--> 294 model = self._render_model(doc, comm)
295 if config.embed:
296 return render_model(model)
/opt/conda/lib/python3.6/site-packages/panel/viewable.py in _render_model(self, doc, comm)
261 if comm is None:
262 comm = state._comm_manager.get_server_comm()
--> 263 model = self.get_root(doc, comm)
264
265 if config.embed:
/opt/conda/lib/python3.6/site-packages/panel/viewable.py in get_root(self, doc, comm)
416 """
417 doc = doc or _curdoc()
--> 418 root = self._get_model(doc, comm=comm)
419 self._preprocess(root)
420 ref = root.ref['id']
/opt/conda/lib/python3.6/site-packages/panel/layout.py in _get_model(self, doc, root, parent, comm)
113 if root is None:
114 root = model
--> 115 objects = self._get_objects(model, [], doc, root, comm)
116 props = dict(self._init_properties(), objects=objects)
117 model.update(**self._process_param_change(props))
/opt/conda/lib/python3.6/site-packages/panel/layout.py in _get_objects(self, model, old_objects, doc, root, comm)
105 child, _ = pane._models[root.ref['id']]
106 else:
--> 107 child = pane._get_model(doc, root, model, comm)
108 new_models.append(child)
109 return new_models
/opt/conda/lib/python3.6/site-packages/panel/param.py in _get_model(self, doc, root, parent, comm)
614 if ref in self._models:
615 self._cleanup(root)
--> 616 model = self._inner_layout._get_model(doc, root, parent, comm)
617 self._models[ref] = (model, parent)
618 return model
/opt/conda/lib/python3.6/site-packages/panel/layout.py in _get_model(self, doc, root, parent, comm)
113 if root is None:
114 root = model
--> 115 objects = self._get_objects(model, [], doc, root, comm)
116 props = dict(self._init_properties(), objects=objects)
117 model.update(**self._process_param_change(props))
/opt/conda/lib/python3.6/site-packages/panel/layout.py in _get_objects(self, model, old_objects, doc, root, comm)
105 child, _ = pane._models[root.ref['id']]
106 else:
--> 107 child = pane._get_model(doc, root, model, comm)
108 new_models.append(child)
109 return new_models
/opt/conda/lib/python3.6/site-packages/panel/pane/vega.py in _get_model(self, doc, root, parent, comm)
104 else:
105 json = self._to_json(self.object)
--> 106 self._get_sources(json, sources)
107 props = self._process_param_change(self._init_properties())
108 model = VegaPlot(data=json, data_sources=sources, **props)
/opt/conda/lib/python3.6/site-packages/panel/pane/vega.py in _get_sources(self, json, sources)
76 import altair as alt
77 if (not isinstance(self.object.data, alt.Data) and
---> 78 columns == set(self.object.data)):
79 data = ColumnDataSource.from_df(self.object.data)
80 else:
/opt/conda/lib/python3.6/site-packages/altair/utils/schemapi.py in __getitem__(self, item)
233
234 def __getitem__(self, item):
--> 235 return self._kwds[item]
236
237 def __setitem__(self, item, val):
KeyError: 0
Row
[0] Column
[0] Select(name='Area type:', options=['Country', 'Sub-Region', ...], value='Country')
[1] ParamFunction(function)
|
KeyError
|
def _get_model(self, doc, root=None, parent=None, comm=None):
if "panel.models.vega" not in sys.modules:
if isinstance(comm, JupyterComm):
self.param.warning(
"VegaPlot was not imported on instantiation "
"and may not render in a notebook. Restart "
"the notebook kernel and ensure you load "
"it as part of the extension using:"
"\n\npn.extension('vega')\n"
)
from ..models.vega import VegaPlot
else:
VegaPlot = getattr(sys.modules["panel.models.vega"], "VegaPlot")
sources = {}
if self.object is None:
json = None
else:
json = self._to_json(self.object)
self._get_sources(json, sources)
props = self._process_param_change(self._init_properties())
self._get_dimensions(json, props)
model = VegaPlot(data=json, data_sources=sources, **props)
if root is None:
root = model
self._models[root.ref["id"]] = (model, parent)
return model
|
def _get_model(self, doc, root=None, parent=None, comm=None):
if "panel.models.vega" not in sys.modules:
if isinstance(comm, JupyterComm):
self.param.warning(
"VegaPlot was not imported on instantiation "
"and may not render in a notebook. Restart "
"the notebook kernel and ensure you load "
"it as part of the extension using:"
"\n\npn.extension('vega')\n"
)
from ..models.vega import VegaPlot
else:
VegaPlot = getattr(sys.modules["panel.models.vega"], "VegaPlot")
sources = {}
if self.object is None:
json = None
else:
json = self._to_json(self.object)
self._get_sources(json, sources)
props = self._process_param_change(self._init_properties())
model = VegaPlot(data=json, data_sources=sources, **props)
if root is None:
root = model
self._models[root.ref["id"]] = (model, parent)
return model
|
https://github.com/holoviz/panel/issues/780
|
KeyError Traceback (most recent call last)
/opt/conda/lib/python3.6/site-packages/IPython/core/formatters.py in __call__(self, obj, include, exclude)
968
969 if method is not None:
--> 970 return method(include=include, exclude=exclude)
971 return None
972 else:
/opt/conda/lib/python3.6/site-packages/panel/viewable.py in _repr_mimebundle_(self, include, exclude)
292 comm = state._comm_manager.get_server_comm()
293 doc = _Document()
--> 294 model = self._render_model(doc, comm)
295 if config.embed:
296 return render_model(model)
/opt/conda/lib/python3.6/site-packages/panel/viewable.py in _render_model(self, doc, comm)
261 if comm is None:
262 comm = state._comm_manager.get_server_comm()
--> 263 model = self.get_root(doc, comm)
264
265 if config.embed:
/opt/conda/lib/python3.6/site-packages/panel/viewable.py in get_root(self, doc, comm)
416 """
417 doc = doc or _curdoc()
--> 418 root = self._get_model(doc, comm=comm)
419 self._preprocess(root)
420 ref = root.ref['id']
/opt/conda/lib/python3.6/site-packages/panel/layout.py in _get_model(self, doc, root, parent, comm)
113 if root is None:
114 root = model
--> 115 objects = self._get_objects(model, [], doc, root, comm)
116 props = dict(self._init_properties(), objects=objects)
117 model.update(**self._process_param_change(props))
/opt/conda/lib/python3.6/site-packages/panel/layout.py in _get_objects(self, model, old_objects, doc, root, comm)
105 child, _ = pane._models[root.ref['id']]
106 else:
--> 107 child = pane._get_model(doc, root, model, comm)
108 new_models.append(child)
109 return new_models
/opt/conda/lib/python3.6/site-packages/panel/param.py in _get_model(self, doc, root, parent, comm)
614 if ref in self._models:
615 self._cleanup(root)
--> 616 model = self._inner_layout._get_model(doc, root, parent, comm)
617 self._models[ref] = (model, parent)
618 return model
/opt/conda/lib/python3.6/site-packages/panel/layout.py in _get_model(self, doc, root, parent, comm)
113 if root is None:
114 root = model
--> 115 objects = self._get_objects(model, [], doc, root, comm)
116 props = dict(self._init_properties(), objects=objects)
117 model.update(**self._process_param_change(props))
/opt/conda/lib/python3.6/site-packages/panel/layout.py in _get_objects(self, model, old_objects, doc, root, comm)
105 child, _ = pane._models[root.ref['id']]
106 else:
--> 107 child = pane._get_model(doc, root, model, comm)
108 new_models.append(child)
109 return new_models
/opt/conda/lib/python3.6/site-packages/panel/pane/vega.py in _get_model(self, doc, root, parent, comm)
104 else:
105 json = self._to_json(self.object)
--> 106 self._get_sources(json, sources)
107 props = self._process_param_change(self._init_properties())
108 model = VegaPlot(data=json, data_sources=sources, **props)
/opt/conda/lib/python3.6/site-packages/panel/pane/vega.py in _get_sources(self, json, sources)
76 import altair as alt
77 if (not isinstance(self.object.data, alt.Data) and
---> 78 columns == set(self.object.data)):
79 data = ColumnDataSource.from_df(self.object.data)
80 else:
/opt/conda/lib/python3.6/site-packages/altair/utils/schemapi.py in __getitem__(self, item)
233
234 def __getitem__(self, item):
--> 235 return self._kwds[item]
236
237 def __setitem__(self, item, val):
KeyError: 0
Row
[0] Column
[0] Select(name='Area type:', options=['Country', 'Sub-Region', ...], value='Country')
[1] ParamFunction(function)
|
KeyError
|
def _update(self, model):
if self.object is None:
json = None
else:
json = self._to_json(self.object)
self._get_sources(json, model.data_sources)
props = {
p: getattr(self, p)
for p in list(Layoutable.param)
if getattr(self, p) is not None
}
self._get_dimensions(json, props)
props["data"] = json
model.update(**props)
|
def _update(self, model):
if self.object is None:
json = None
else:
json = self._to_json(self.object)
self._get_sources(json, model.data_sources)
model.data = json
|
https://github.com/holoviz/panel/issues/780
|
KeyError Traceback (most recent call last)
/opt/conda/lib/python3.6/site-packages/IPython/core/formatters.py in __call__(self, obj, include, exclude)
968
969 if method is not None:
--> 970 return method(include=include, exclude=exclude)
971 return None
972 else:
/opt/conda/lib/python3.6/site-packages/panel/viewable.py in _repr_mimebundle_(self, include, exclude)
292 comm = state._comm_manager.get_server_comm()
293 doc = _Document()
--> 294 model = self._render_model(doc, comm)
295 if config.embed:
296 return render_model(model)
/opt/conda/lib/python3.6/site-packages/panel/viewable.py in _render_model(self, doc, comm)
261 if comm is None:
262 comm = state._comm_manager.get_server_comm()
--> 263 model = self.get_root(doc, comm)
264
265 if config.embed:
/opt/conda/lib/python3.6/site-packages/panel/viewable.py in get_root(self, doc, comm)
416 """
417 doc = doc or _curdoc()
--> 418 root = self._get_model(doc, comm=comm)
419 self._preprocess(root)
420 ref = root.ref['id']
/opt/conda/lib/python3.6/site-packages/panel/layout.py in _get_model(self, doc, root, parent, comm)
113 if root is None:
114 root = model
--> 115 objects = self._get_objects(model, [], doc, root, comm)
116 props = dict(self._init_properties(), objects=objects)
117 model.update(**self._process_param_change(props))
/opt/conda/lib/python3.6/site-packages/panel/layout.py in _get_objects(self, model, old_objects, doc, root, comm)
105 child, _ = pane._models[root.ref['id']]
106 else:
--> 107 child = pane._get_model(doc, root, model, comm)
108 new_models.append(child)
109 return new_models
/opt/conda/lib/python3.6/site-packages/panel/param.py in _get_model(self, doc, root, parent, comm)
614 if ref in self._models:
615 self._cleanup(root)
--> 616 model = self._inner_layout._get_model(doc, root, parent, comm)
617 self._models[ref] = (model, parent)
618 return model
/opt/conda/lib/python3.6/site-packages/panel/layout.py in _get_model(self, doc, root, parent, comm)
113 if root is None:
114 root = model
--> 115 objects = self._get_objects(model, [], doc, root, comm)
116 props = dict(self._init_properties(), objects=objects)
117 model.update(**self._process_param_change(props))
/opt/conda/lib/python3.6/site-packages/panel/layout.py in _get_objects(self, model, old_objects, doc, root, comm)
105 child, _ = pane._models[root.ref['id']]
106 else:
--> 107 child = pane._get_model(doc, root, model, comm)
108 new_models.append(child)
109 return new_models
/opt/conda/lib/python3.6/site-packages/panel/pane/vega.py in _get_model(self, doc, root, parent, comm)
104 else:
105 json = self._to_json(self.object)
--> 106 self._get_sources(json, sources)
107 props = self._process_param_change(self._init_properties())
108 model = VegaPlot(data=json, data_sources=sources, **props)
/opt/conda/lib/python3.6/site-packages/panel/pane/vega.py in _get_sources(self, json, sources)
76 import altair as alt
77 if (not isinstance(self.object.data, alt.Data) and
---> 78 columns == set(self.object.data)):
79 data = ColumnDataSource.from_df(self.object.data)
80 else:
/opt/conda/lib/python3.6/site-packages/altair/utils/schemapi.py in __getitem__(self, item)
233
234 def __getitem__(self, item):
--> 235 return self._kwds[item]
236
237 def __setitem__(self, item, val):
KeyError: 0
Row
[0] Column
[0] Select(name='Area type:', options=['Country', 'Sub-Region', ...], value='Country')
[1] ParamFunction(function)
|
KeyError
|
def widgets_from_dimensions(cls, object, widget_types=None, widgets_type="individual"):
from holoviews.core import Dimension, DynamicMap
from holoviews.core.options import SkipRendering
from holoviews.core.util import isnumeric, unicode, datetime_types, unique_iterator
from holoviews.core.traversal import unique_dimkeys
from holoviews.plotting.plot import Plot, GenericCompositePlot
from holoviews.plotting.util import get_dynamic_mode
from ..widgets import (
Widget,
DiscreteSlider,
Select,
FloatSlider,
DatetimeInput,
IntSlider,
)
if widget_types is None:
widget_types = {}
if isinstance(object, GenericCompositePlot):
object = object.layout
elif isinstance(object, Plot):
object = object.hmap
if isinstance(object, DynamicMap) and object.unbounded:
dims = ", ".join("%r" % dim for dim in object.unbounded)
msg = (
"DynamicMap cannot be displayed without explicit indexing "
"as {dims} dimension(s) are unbounded. "
"\nSet dimensions bounds with the DynamicMap redim.range "
"or redim.values methods."
)
raise SkipRendering(msg.format(dims=dims))
dynamic, bounded = get_dynamic_mode(object)
dims, keys = unique_dimkeys(object)
if dims == [Dimension("Frame")] and keys == [(0,)]:
return [], {}
nframes = 1
values = dict() if dynamic else dict(zip(dims, zip(*keys)))
dim_values = OrderedDict()
widgets = []
dims = [
d
for d in dims
if values.get(d) is not None or d.values or d.range != (None, None)
]
for i, dim in enumerate(dims):
widget_type, widget, widget_kwargs = None, None, {}
if widgets_type == "individual":
if i == 0 and i == (len(dims) - 1):
margin = (20, 20, 20, 20)
elif i == 0:
margin = (20, 20, 5, 20)
elif i == (len(dims) - 1):
margin = (5, 20, 20, 20)
else:
margin = (0, 20, 5, 20)
kwargs = {"margin": margin, "width": 250}
else:
kwargs = {}
vals = dim.values or values.get(dim, None)
if vals is not None:
vals = list(unique_iterator(vals))
dim_values[dim.name] = vals
if widgets_type == "scrubber":
if not vals:
raise ValueError(
"Scrubber widget may only be used if all dimensions define values."
)
nframes *= len(vals)
elif dim.name in widget_types:
widget = widget_types[dim.name]
if isinstance(widget, Widget):
widget.set_param(**kwargs)
if not widget.name:
widget.name = dim.label
widgets.append(widget)
continue
elif isinstance(widget, dict):
widget_type = widget.get("type", widget_type)
widget_kwargs = dict(widget)
elif isinstance(widget, type) and issubclass(widget, Widget):
widget_type = widget
else:
raise ValueError(
"Explicit widget definitions expected "
"to be a widget instance or type, %s "
"dimension widget declared as %s." % (dim, widget)
)
widget_kwargs.update(kwargs)
if vals:
if (
all(isnumeric(v) or isinstance(v, datetime_types) for v in vals)
and len(vals) > 1
):
vals = sorted(vals)
labels = [unicode(dim.pprint_value(v)) for v in vals]
options = OrderedDict(zip(labels, vals))
widget_type = widget_type or DiscreteSlider
else:
options = list(vals)
widget_type = widget_type or Select
default = vals[0] if dim.default is None else dim.default
widget_kwargs = dict(
dict(name=dim.label, options=options, value=default), **widget_kwargs
)
widget = widget_type(**widget_kwargs)
elif dim.range != (None, None):
start, end = dim.range
if start == end:
continue
default = start if dim.default is None else dim.default
if widget_type is not None:
pass
elif all(isinstance(v, int) for v in (start, end, default)):
widget_type = IntSlider
step = 1 if dim.step is None else dim.step
elif isinstance(default, datetime_types):
widget_type = DatetimeInput
else:
widget_type = FloatSlider
step = 0.1 if dim.step is None else dim.step
widget_kwargs = dict(
dict(
step=step,
name=dim.label,
start=dim.range[0],
end=dim.range[1],
value=default,
),
**widget_kwargs,
)
widget = widget_type(**widget_kwargs)
if widget is not None:
widgets.append(widget)
if widgets_type == "scrubber":
widgets = [Player(length=nframes, width=550)]
return widgets, dim_values
|
def widgets_from_dimensions(cls, object, widget_types=None, widgets_type="individual"):
from holoviews.core import Dimension, DynamicMap
from holoviews.core.options import SkipRendering
from holoviews.core.util import isnumeric, unicode, datetime_types, unique_iterator
from holoviews.core.traversal import unique_dimkeys
from holoviews.plotting.plot import Plot, GenericCompositePlot
from holoviews.plotting.util import get_dynamic_mode
from ..widgets import (
Widget,
DiscreteSlider,
Select,
FloatSlider,
DatetimeInput,
IntSlider,
)
if widget_types is None:
widget_types = {}
if isinstance(object, GenericCompositePlot):
object = object.layout
elif isinstance(object, Plot):
object = object.hmap
if isinstance(object, DynamicMap) and object.unbounded:
dims = ", ".join("%r" % dim for dim in object.unbounded)
msg = (
"DynamicMap cannot be displayed without explicit indexing "
"as {dims} dimension(s) are unbounded. "
"\nSet dimensions bounds with the DynamicMap redim.range "
"or redim.values methods."
)
raise SkipRendering(msg.format(dims=dims))
dynamic, bounded = get_dynamic_mode(object)
dims, keys = unique_dimkeys(object)
if dims == [Dimension("Frame")] and keys == [(0,)]:
return [], {}
nframes = 1
values = dict() if dynamic else dict(zip(dims, zip(*keys)))
dim_values = OrderedDict()
widgets = []
dims = [
d
for d in dims
if values.get(d) is not None or d.values or d.range != (None, None)
]
for i, dim in enumerate(dims):
widget_type, widget, widget_kwargs = None, None, {}
if widgets_type == "individual":
if i == 0 and i == (len(dims) - 1):
margin = (20, 20, 20, 20)
elif i == 0:
margin = (20, 20, 5, 20)
elif i == (len(dims) - 1):
margin = (5, 20, 20, 20)
else:
margin = (0, 20, 5, 20)
kwargs = {"margin": margin, "width": 250}
else:
kwargs = {}
vals = dim.values or values.get(dim, None)
if vals is not None:
vals = list(unique_iterator(vals))
dim_values[dim.name] = vals
if widgets_type == "scrubber":
if not vals:
raise ValueError(
"Scrubber widget may only be used if all dimensions define values."
)
nframes *= len(vals)
elif dim.name in widget_types:
widget = widget_types[dim.name]
if isinstance(widget, Widget):
widgets.append(widget)
continue
elif isinstance(widget, dict):
widget_type = widget.get("type", widget_type)
widget_kwargs = dict(widget)
elif isinstance(widget, type) and issubclass(widget, Widget):
widget_type = widget
else:
raise ValueError(
"Explicit widget definitions expected "
"to be a widget instance or type, %s "
"dimension widget declared as %s." % (dim, widget)
)
widget_kwargs.update(kwargs)
if vals:
if (
all(isnumeric(v) or isinstance(v, datetime_types) for v in vals)
and len(vals) > 1
):
vals = sorted(vals)
labels = [unicode(dim.pprint_value(v)) for v in vals]
options = OrderedDict(zip(labels, vals))
widget_type = widget_type or DiscreteSlider
else:
options = list(vals)
widget_type = widget_type or Select
default = vals[0] if dim.default is None else dim.default
widget_kwargs = dict(
dict(name=dim.label, options=options, value=default), **widget_kwargs
)
widget = widget_type(**widget_kwargs)
elif dim.range != (None, None):
start, end = dim.range
if start == end:
continue
default = start if dim.default is None else dim.default
if widget_type is not None:
pass
elif all(isinstance(v, int) for v in (start, end, default)):
widget_type = IntSlider
step = 1 if dim.step is None else dim.step
elif isinstance(default, datetime_types):
widget_type = DatetimeInput
else:
widget_type = FloatSlider
step = 0.1 if dim.step is None else dim.step
widget_kwargs = dict(
dict(
step=step,
name=dim.label,
start=dim.range[0],
end=dim.range[1],
value=default,
),
**widget_kwargs,
)
widget = widget_type(**widget_kwargs)
if widget is not None:
widgets.append(widget)
if widgets_type == "scrubber":
widgets = [Player(length=nframes, width=550)]
return widgets, dim_values
|
https://github.com/holoviz/panel/issues/759
|
Traceback (most recent call last):
File "/home/travis/miniconda/envs/test-environment/lib/python3.6/site-packages/holoviews/plotting/util.py", line 273, in get_plot_frame
return map_obj[key]
File "/home/travis/miniconda/envs/test-environment/lib/python3.6/site-packages/holoviews/core/spaces.py", line 1324, in __getitem__
val = self._execute_callback(*tuple_key)
File "/home/travis/miniconda/envs/test-environment/lib/python3.6/site-packages/holoviews/core/spaces.py", line 1079, in _execute_callback
self._validate_key(args) # Validate input key
File "/home/travis/miniconda/envs/test-environment/lib/python3.6/site-packages/holoviews/core/spaces.py", line 1022, in _validate_key
if val < low:
TypeError: '>' not supported between instances of 'float' and 'NoneType'
|
TypeError
|
def _from_numpy(self, data):
from scipy.io import wavfile
buffer = BytesIO()
wavfile.write(buffer, self.sample_rate, data)
return buffer
|
def _from_numpy(self, data):
buffer = BytesIO()
wavfile.write(buffer, self.sample_rate, data)
return buffer
|
https://github.com/holoviz/panel/issues/720
|
$ conda create -n panel -c pyviz/label/dev panel
...
$ conda activate panel
(panel) $ python
Python 3.7.4 (default, Aug 13 2019, 15:17:50)
[Clang 4.0.1 (tags/RELEASE_401/final)] :: Anaconda, Inc. on darwin
Type "help", "copyright", "credits" or "license" for more information.
import panel
Traceback (most recent call last):
File "<stdin>", line 1, in <module>
File "/Users/rditlsc9/miniconda/envs/panel/lib/python3.7/site-packages/panel/__init__.py", line 6, in <module>
from . import links # noqa
File "/Users/rditlsc9/miniconda/envs/panel/lib/python3.7/site-packages/panel/links.py", line 12, in <module>
from .pane.holoviews import HoloViews, generate_panel_bokeh_map, is_bokeh_element_plot
File "/Users/rditlsc9/miniconda/envs/panel/lib/python3.7/site-packages/panel/pane/__init__.py", line 13, in <module>
from .holoviews import HoloViews # noqa
File "/Users/rditlsc9/miniconda/envs/panel/lib/python3.7/site-packages/panel/pane/holoviews.py", line 20, in <module>
from ..widgets import Player
File "/Users/rditlsc9/miniconda/envs/panel/lib/python3.7/site-packages/panel/widgets/__init__.py", line 12, in <module>
from .misc import Audio, VideoStream # noqa
File "/Users/rditlsc9/miniconda/envs/panel/lib/python3.7/site-packages/panel/widgets/misc.py", line 14, in <module>
from scipy.io import wavfile
ModuleNotFoundError: No module named 'scipy'
|
ModuleNotFoundError
|
def _get_model(self, doc, root=None, parent=None, comm=None):
model = self._widget_type(**self._process_param_change(self._init_properties()))
if root is None:
root = model
# Link parameters and bokeh model
values = dict(self.get_param_values())
properties = self._filter_properties(list(self._process_param_change(values)))
self._models[root.ref["id"]] = (model, parent)
self._link_props(model, properties, doc, root, comm)
return model
|
def _get_model(self, doc, root=None, parent=None, comm=None):
model = self._widget_type(**self._process_param_change(self._init_properties()))
if root is None:
root = model
# Link parameters and bokeh model
values = dict(self.get_param_values())
properties = list(self._process_param_change(values))
self._models[root.ref["id"]] = (model, parent)
self._link_props(model, properties, doc, root, comm)
return model
|
https://github.com/holoviz/panel/issues/548
|
Traceback (most recent call last):
File "/users/huang/anaconda3/lib/python3.7/site-packages/tornado/ioloop.py", line 743, in _run_callback
ret = callback()
File "/users/huang/anaconda3/lib/python3.7/site-packages/tornado/ioloop.py", line 767, in _discard_future_result
future.result()
File "/users/huang/anaconda3/lib/python3.7/site-packages/tornado/gen.py", line 742, in run
yielded = self.gen.send(value)
File "/users/huang/anaconda3/lib/python3.7/site-packages/bokeh/server/session.py", line 70, in _needs_document_lock_wrapper
result = yield yield_for_all_futures(func(self, *args, **kwargs))
File "/users/huang/anaconda3/lib/python3.7/site-packages/bokeh/server/session.py", line 191, in with_document_locked
return func(*args, **kwargs)
File "/users/huang/anaconda3/lib/python3.7/site-packages/bokeh/document/document.py", line 1127, in wrapper
return doc._with_self_as_curdoc(invoke)
File "/users/huang/anaconda3/lib/python3.7/site-packages/bokeh/document/document.py", line 1113, in _with_self_as_curdoc
return f()
File "/users/huang/anaconda3/lib/python3.7/site-packages/bokeh/document/document.py", line 1126, in invoke
return f(*args, **kwargs)
File "/users/huang/anaconda3/lib/python3.7/site-packages/bokeh/document/document.py", line 916, in remove_then_invoke
return callback(*args, **kwargs)
File "/users/huang/anaconda3/lib/python3.7/site-packages/panel/viewable.py", line 653, in _change_event
self.set_param(**self._process_property_change(events))
File "/users/huang/anaconda3/lib/python3.7/site-packages/panel/widgets/input.py", line 59, in _process_property_change
header, content = msg['value'].split(",", 1)
ValueError: not enough values to unpack (expected 2, got 1)
|
ValueError
|
def _process_param_change(self, msg):
msg = super(FileInput, self)._process_param_change(msg)
if "value" in msg:
msg.pop("value")
if "mime_type" in msg:
msg.pop("mime_type")
return msg
|
def _process_param_change(self, msg):
msg = super(FileInput, self)._process_param_change(msg)
if "value" in msg:
if self.mime_type:
template = "data:{mime};base64,{data}"
data = b64encode(msg["value"])
msg["value"] = template.format(
data=data.decode("utf-8"), mime=self.mime_type
)
else:
msg["value"] = ""
return msg
|
https://github.com/holoviz/panel/issues/548
|
Traceback (most recent call last):
File "/users/huang/anaconda3/lib/python3.7/site-packages/tornado/ioloop.py", line 743, in _run_callback
ret = callback()
File "/users/huang/anaconda3/lib/python3.7/site-packages/tornado/ioloop.py", line 767, in _discard_future_result
future.result()
File "/users/huang/anaconda3/lib/python3.7/site-packages/tornado/gen.py", line 742, in run
yielded = self.gen.send(value)
File "/users/huang/anaconda3/lib/python3.7/site-packages/bokeh/server/session.py", line 70, in _needs_document_lock_wrapper
result = yield yield_for_all_futures(func(self, *args, **kwargs))
File "/users/huang/anaconda3/lib/python3.7/site-packages/bokeh/server/session.py", line 191, in with_document_locked
return func(*args, **kwargs)
File "/users/huang/anaconda3/lib/python3.7/site-packages/bokeh/document/document.py", line 1127, in wrapper
return doc._with_self_as_curdoc(invoke)
File "/users/huang/anaconda3/lib/python3.7/site-packages/bokeh/document/document.py", line 1113, in _with_self_as_curdoc
return f()
File "/users/huang/anaconda3/lib/python3.7/site-packages/bokeh/document/document.py", line 1126, in invoke
return f(*args, **kwargs)
File "/users/huang/anaconda3/lib/python3.7/site-packages/bokeh/document/document.py", line 916, in remove_then_invoke
return callback(*args, **kwargs)
File "/users/huang/anaconda3/lib/python3.7/site-packages/panel/viewable.py", line 653, in _change_event
self.set_param(**self._process_property_change(events))
File "/users/huang/anaconda3/lib/python3.7/site-packages/panel/widgets/input.py", line 59, in _process_property_change
header, content = msg['value'].split(",", 1)
ValueError: not enough values to unpack (expected 2, got 1)
|
ValueError
|
def _process_property_change(self, msg):
msg = super(FileInput, self)._process_property_change(msg)
if "value" in msg:
msg["value"] = b64decode(msg["value"])
return msg
|
def _process_property_change(self, msg):
msg = super(FileInput, self)._process_property_change(msg)
if "value" in msg:
header, content = msg["value"].split(",", 1)
msg["mime_type"] = header.split(":")[1].split(";")[0]
msg["value"] = b64decode(content)
return msg
|
https://github.com/holoviz/panel/issues/548
|
Traceback (most recent call last):
File "/users/huang/anaconda3/lib/python3.7/site-packages/tornado/ioloop.py", line 743, in _run_callback
ret = callback()
File "/users/huang/anaconda3/lib/python3.7/site-packages/tornado/ioloop.py", line 767, in _discard_future_result
future.result()
File "/users/huang/anaconda3/lib/python3.7/site-packages/tornado/gen.py", line 742, in run
yielded = self.gen.send(value)
File "/users/huang/anaconda3/lib/python3.7/site-packages/bokeh/server/session.py", line 70, in _needs_document_lock_wrapper
result = yield yield_for_all_futures(func(self, *args, **kwargs))
File "/users/huang/anaconda3/lib/python3.7/site-packages/bokeh/server/session.py", line 191, in with_document_locked
return func(*args, **kwargs)
File "/users/huang/anaconda3/lib/python3.7/site-packages/bokeh/document/document.py", line 1127, in wrapper
return doc._with_self_as_curdoc(invoke)
File "/users/huang/anaconda3/lib/python3.7/site-packages/bokeh/document/document.py", line 1113, in _with_self_as_curdoc
return f()
File "/users/huang/anaconda3/lib/python3.7/site-packages/bokeh/document/document.py", line 1126, in invoke
return f(*args, **kwargs)
File "/users/huang/anaconda3/lib/python3.7/site-packages/bokeh/document/document.py", line 916, in remove_then_invoke
return callback(*args, **kwargs)
File "/users/huang/anaconda3/lib/python3.7/site-packages/panel/viewable.py", line 653, in _change_event
self.set_param(**self._process_property_change(events))
File "/users/huang/anaconda3/lib/python3.7/site-packages/panel/widgets/input.py", line 59, in _process_property_change
header, content = msg['value'].split(",", 1)
ValueError: not enough values to unpack (expected 2, got 1)
|
ValueError
|
def append(self, pane):
from .pane import panel
name = None
if isinstance(pane, tuple):
name, pane = pane
new_objects = list(self)
new_objects.append(panel(pane, name=name))
name = param_name(new_objects[-1].name) if name is None else name
self._names.append(name)
self.objects = new_objects
|
def append(self, pane):
from .pane import panel
name = None
if isinstance(pane, tuple):
name, pane = pane
new_objects = list(self)
new_objects.append(panel(pane, name=name))
name = param_name(new_objects[-1].name) if name is None else name
self._names[-1] = name
self.objects = new_objects
|
https://github.com/holoviz/panel/issues/280
|
---------------------------------------------------------------------------
ModuleNotFoundError Traceback (most recent call last)
<ipython-input-100-8bd431d8f26d> in <module>
----> 1 pn.Column([])
~/panel/panel/layout.py in __init__(self, *objects, **params)
33 def __init__(self, *objects, **params):
34 from .pane import panel
---> 35 objects = [panel(pane) for pane in objects]
36 super(Panel, self).__init__(objects=objects, **params)
37
~/panel/panel/layout.py in <listcomp>(.0)
33 def __init__(self, *objects, **params):
34 from .pane import panel
---> 35 objects = [panel(pane) for pane in objects]
36 super(Panel, self).__init__(objects=objects, **params)
37
~/panel/panel/pane.py in panel(obj, **kwargs)
45 if kwargs.get('name', False) is None:
46 kwargs.pop('name')
---> 47 pane = PaneBase.get_pane_type(obj)(obj, **kwargs)
48 if len(pane.layout) == 1 and pane._unpack:
49 return pane.layout[0]
~/panel/panel/plotly.py in __init__(self, object, layout, **params)
41
42 def __init__(self, object, layout=None, **params):
---> 43 super(Plotly, self).__init__(self._to_figure(object, layout),
44 plotly_layout=layout, **params)
45
~/panel/panel/plotly.py in _to_figure(self, obj, layout)
50
51 def _to_figure(self, obj, layout={}):
---> 52 import plotly.graph_objs as go
53 if isinstance(obj, go.Figure):
54 fig = obj
ModuleNotFoundError: No module named 'plotly'
|
ModuleNotFoundError
|
def applies(cls, obj):
return (
isinstance(obj, list) and obj and all(cls.applies(o) for o in obj)
) or hasattr(obj, "to_plotly_json")
|
def applies(cls, obj):
return (isinstance(obj, list) and all(cls.applies(o) for o in obj)) or hasattr(
obj, "to_plotly_json"
)
|
https://github.com/holoviz/panel/issues/280
|
---------------------------------------------------------------------------
ModuleNotFoundError Traceback (most recent call last)
<ipython-input-100-8bd431d8f26d> in <module>
----> 1 pn.Column([])
~/panel/panel/layout.py in __init__(self, *objects, **params)
33 def __init__(self, *objects, **params):
34 from .pane import panel
---> 35 objects = [panel(pane) for pane in objects]
36 super(Panel, self).__init__(objects=objects, **params)
37
~/panel/panel/layout.py in <listcomp>(.0)
33 def __init__(self, *objects, **params):
34 from .pane import panel
---> 35 objects = [panel(pane) for pane in objects]
36 super(Panel, self).__init__(objects=objects, **params)
37
~/panel/panel/pane.py in panel(obj, **kwargs)
45 if kwargs.get('name', False) is None:
46 kwargs.pop('name')
---> 47 pane = PaneBase.get_pane_type(obj)(obj, **kwargs)
48 if len(pane.layout) == 1 and pane._unpack:
49 return pane.layout[0]
~/panel/panel/plotly.py in __init__(self, object, layout, **params)
41
42 def __init__(self, object, layout=None, **params):
---> 43 super(Plotly, self).__init__(self._to_figure(object, layout),
44 plotly_layout=layout, **params)
45
~/panel/panel/plotly.py in _to_figure(self, obj, layout)
50
51 def _to_figure(self, obj, layout={}):
---> 52 import plotly.graph_objs as go
53 if isinstance(obj, go.Figure):
54 fig = obj
ModuleNotFoundError: No module named 'plotly'
|
ModuleNotFoundError
|
def main(args):
jax_config.update("jax_platform_name", args.device)
print("Start vanilla HMC...")
vanilla_samples = mcmc(
args.num_warmup,
args.num_samples,
init_params=np.array([2.0, 0.0]),
potential_fn=dual_moon_pe,
progbar=True,
)
opt_init, opt_update, get_params = optimizers.adam(0.001)
rng_guide, rng_init, rng_train = random.split(random.PRNGKey(1), 3)
guide = AutoIAFNormal(
rng_guide, dual_moon_model, get_params, hidden_dims=[args.num_hidden]
)
svi_init, svi_update, _ = svi(
dual_moon_model, guide, elbo, opt_init, opt_update, get_params
)
opt_state, _ = svi_init(rng_init)
def body_fn(val, i):
opt_state_, rng_ = val
loss, opt_state_, rng_ = svi_update(i, rng_, opt_state_)
return (opt_state_, rng_), loss
print("Start training guide...")
(last_state, _), losses = lax.scan(
body_fn, (opt_state, rng_train), np.arange(args.num_iters)
)
print("Finish training guide. Extract samples...")
guide_samples = guide.sample_posterior(
random.PRNGKey(0), last_state, sample_shape=(args.num_samples,)
)
transform = guide.get_transform(last_state)
unpack_fn = guide.unpack_latent
_, potential_fn, constrain_fn = initialize_model(random.PRNGKey(0), dual_moon_model)
transformed_potential_fn = make_transformed_pe(potential_fn, transform, unpack_fn)
transformed_constrain_fn = lambda x: constrain_fn(unpack_fn(transform(x))) # noqa: E731
init_params = np.zeros(guide.latent_size)
print("\nStart NeuTra HMC...")
zs = mcmc(
args.num_warmup,
args.num_samples,
init_params,
potential_fn=transformed_potential_fn,
)
print("Transform samples into unwarped space...")
samples = vmap(transformed_constrain_fn)(zs)
summary(tree_map(lambda x: x[None, ...], samples))
# make plots
# IAF guide samples (for plotting)
iaf_base_samples = dist.Normal(np.zeros(2), 1.0).sample(random.PRNGKey(0), (1000,))
iaf_trans_samples = vmap(transformed_constrain_fn)(iaf_base_samples)["x"]
x1 = np.linspace(-3, 3, 100)
x2 = np.linspace(-3, 3, 100)
X1, X2 = np.meshgrid(x1, x2)
P = np.clip(np.exp(-dual_moon_pe(np.stack([X1, X2], axis=-1))), a_min=0.0)
fig = plt.figure(figsize=(12, 16), constrained_layout=True)
gs = GridSpec(3, 2, figure=fig)
ax1 = fig.add_subplot(gs[0, 0])
ax2 = fig.add_subplot(gs[0, 1])
ax3 = fig.add_subplot(gs[1, 0])
ax4 = fig.add_subplot(gs[1, 1])
ax5 = fig.add_subplot(gs[2, 0])
ax6 = fig.add_subplot(gs[2, 1])
ax1.plot(np.log(losses[1000:]))
ax1.set_title("Autoguide training log loss (after 1000 steps)")
ax2.contourf(X1, X2, P, cmap="OrRd")
sns.kdeplot(
guide_samples["x"][:, 0].copy(),
guide_samples["x"][:, 1].copy(),
n_levels=30,
ax=ax2,
)
ax2.set(
xlim=[-3, 3],
ylim=[-3, 3],
xlabel="x0",
ylabel="x1",
title="Posterior using AutoIAFNormal guide",
)
sns.scatterplot(
iaf_base_samples[:, 0],
iaf_base_samples[:, 1],
ax=ax3,
hue=iaf_trans_samples[:, 0] < 0.0,
)
ax3.set(
xlim=[-3, 3],
ylim=[-3, 3],
xlabel="x0",
ylabel="x1",
title="AutoIAFNormal base samples (True=left moon; False=right moon)",
)
ax4.contourf(X1, X2, P, cmap="OrRd")
sns.kdeplot(
vanilla_samples[:, 0].copy(), vanilla_samples[:, 1].copy(), n_levels=30, ax=ax4
)
ax4.plot(vanilla_samples[-50:, 0], vanilla_samples[-50:, 1], "bo-", alpha=0.5)
ax4.set(
xlim=[-3, 3],
ylim=[-3, 3],
xlabel="x0",
ylabel="x1",
title="Posterior using vanilla HMC sampler",
)
sns.scatterplot(
zs[:, 0],
zs[:, 1],
ax=ax5,
hue=samples["x"][:, 0] < 0.0,
s=30,
alpha=0.5,
edgecolor="none",
)
ax5.set(
xlim=[-5, 5],
ylim=[-5, 5],
xlabel="x0",
ylabel="x1",
title="Samples from the warped posterior - p(z)",
)
ax6.contourf(X1, X2, P, cmap="OrRd")
sns.kdeplot(
samples["x"][:, 0].copy(), samples["x"][:, 1].copy(), n_levels=30, ax=ax6
)
ax6.plot(samples["x"][-50:, 0], samples["x"][-50:, 1], "bo-", alpha=0.2)
ax6.set(
xlim=[-3, 3],
ylim=[-3, 3],
xlabel="x0",
ylabel="x1",
title="Posterior using NeuTra HMC sampler",
)
plt.savefig("neutra.pdf")
plt.close()
|
def main(args):
jax_config.update("jax_platform_name", args.device)
print("Start vanilla HMC...")
vanilla_samples = mcmc(
args.num_warmup,
args.num_samples,
init_params=np.array([2.0, 0.0]),
potential_fn=dual_moon_pe,
progbar=True,
)
opt_init, opt_update, get_params = optimizers.adam(0.001)
rng_guide, rng_init, rng_train = random.split(random.PRNGKey(1), 3)
guide = AutoIAFNormal(
rng_guide, dual_moon_model, get_params, hidden_dims=[args.num_hidden]
)
svi_init, svi_update, _ = svi(
dual_moon_model, guide, elbo, opt_init, opt_update, get_params
)
opt_state, _ = svi_init(rng_init)
def body_fn(val):
i, loss, opt_state_, rng_ = val
loss, opt_state_, rng_ = svi_update(i, rng_, opt_state_)
return i + 1, loss, opt_state_, rng_
print("Start training guide...")
# TODO: remove the warning when the issue is fixed upstream
warnings.warn(
"Due to the bug https://github.com/google/jax/issues/939, to"
" train AutoIAFNormal we should set the environment flag"
' "XLA_FLAGS=--xla_cpu_enable_fast_math=false".'
)
losses, opt_states = fori_collect(
0,
args.num_iters,
jit(body_fn),
(0, 0.0, opt_state, rng_train),
transform=lambda x: (x[1], x[2]),
progbar=False,
)
last_state = tree_map(lambda x: x[-1], opt_states)
print("Finish training guide. Extract samples...")
guide_samples = guide.sample_posterior(
random.PRNGKey(0), last_state, sample_shape=(args.num_samples,)
)
transform = guide.get_transform(last_state)
unpack_fn = lambda u: guide.unpack_latent(u, transform=False) # noqa: E731
_, potential_fn, constrain_fn = initialize_model(random.PRNGKey(0), dual_moon_model)
transformed_potential_fn = make_transformed_pe(potential_fn, transform, unpack_fn)
transformed_constrain_fn = lambda x: constrain_fn(unpack_fn(transform(x))) # noqa: E731
init_params = np.zeros(guide.latent_size)
print("\nStart NeuTra HMC...")
zs = mcmc(
args.num_warmup,
args.num_samples,
init_params,
potential_fn=transformed_potential_fn,
)
print("Transform samples into unwarped space...")
samples = vmap(transformed_constrain_fn)(zs)
summary(tree_map(lambda x: x[None, ...], samples))
# make plots
# IAF guide samples (for plotting)
iaf_base_samples = dist.Normal(np.zeros(2), 1.0).sample(random.PRNGKey(0), (1000,))
iaf_trans_samples = vmap(transformed_constrain_fn)(iaf_base_samples)["x"]
x1 = np.linspace(-3, 3, 100)
x2 = np.linspace(-3, 3, 100)
X1, X2 = np.meshgrid(x1, x2)
P = np.clip(np.exp(-dual_moon_pe(np.stack([X1, X2], axis=-1))), a_min=0.0)
fig = plt.figure(figsize=(12, 16), constrained_layout=True)
gs = GridSpec(3, 2, figure=fig)
ax1 = fig.add_subplot(gs[0, 0])
ax2 = fig.add_subplot(gs[0, 1])
ax3 = fig.add_subplot(gs[1, 0])
ax4 = fig.add_subplot(gs[1, 1])
ax5 = fig.add_subplot(gs[2, 0])
ax6 = fig.add_subplot(gs[2, 1])
ax1.plot(np.log(losses[1000:]))
ax1.set_title("Autoguide training log loss (after 1000 steps)")
ax2.contourf(X1, X2, P, cmap="OrRd")
sns.kdeplot(
guide_samples["x"][:, 0].copy(),
guide_samples["x"][:, 1].copy(),
n_levels=30,
ax=ax2,
)
ax2.set(
xlim=[-3, 3],
ylim=[-3, 3],
xlabel="x0",
ylabel="x1",
title="Posterior using AutoIAFNormal guide",
)
sns.scatterplot(
iaf_base_samples[:, 0],
iaf_base_samples[:, 1],
ax=ax3,
hue=iaf_trans_samples[:, 0] < 0.0,
)
ax3.set(
xlim=[-3, 3],
ylim=[-3, 3],
xlabel="x0",
ylabel="x1",
title="AutoIAFNormal base samples (True=left moon; False=right moon)",
)
ax4.contourf(X1, X2, P, cmap="OrRd")
sns.kdeplot(
vanilla_samples[:, 0].copy(), vanilla_samples[:, 1].copy(), n_levels=30, ax=ax4
)
ax4.plot(vanilla_samples[-50:, 0], vanilla_samples[-50:, 1], "bo-", alpha=0.5)
ax4.set(
xlim=[-3, 3],
ylim=[-3, 3],
xlabel="x0",
ylabel="x1",
title="Posterior using vanilla HMC sampler",
)
sns.scatterplot(
zs[:, 0],
zs[:, 1],
ax=ax5,
hue=samples["x"][:, 0] < 0.0,
s=30,
alpha=0.5,
edgecolor="none",
)
ax5.set(
xlim=[-5, 5],
ylim=[-5, 5],
xlabel="x0",
ylabel="x1",
title="Samples from the warped posterior - p(z)",
)
ax6.contourf(X1, X2, P, cmap="OrRd")
sns.kdeplot(
samples["x"][:, 0].copy(), samples["x"][:, 1].copy(), n_levels=30, ax=ax6
)
ax6.plot(samples["x"][-50:, 0], samples["x"][-50:, 1], "bo-", alpha=0.2)
ax6.set(
xlim=[-3, 3],
ylim=[-3, 3],
xlabel="x0",
ylabel="x1",
title="Posterior using NeuTra HMC sampler",
)
plt.savefig("neutra.pdf")
plt.close()
|
https://github.com/pyro-ppl/numpyro/issues/279
|
test/test_examples.py::test_cpu[hmm.py --num-samples 100 --num-warmup 100 --num-chains 2] Running:
python examples/hmm.py --num-samples 100 --num-warmup 100 --num-chains 2
Simulating data...
Starting inference...
Traceback (most recent call last):
File "/home/travis/build/pyro-ppl/numpyro/examples/hmm.py", line 180, in <module>
main(args)
File "/home/travis/build/pyro-ppl/numpyro/examples/hmm.py", line 159, in main
supervised_words, unsupervised_words,
File "/home/travis/build/pyro-ppl/numpyro/numpyro/hmc_util.py", line 775, in initialize_model
raise RuntimeError("Cannot find valid initial parameters. Please check your model again.")
RuntimeError: Cannot find valid initial parameters. Please check your model again.
FAILED
|
RuntimeError
|
def body_fn(val, i):
opt_state_, rng_ = val
loss, opt_state_, rng_ = svi_update(i, rng_, opt_state_)
return (opt_state_, rng_), loss
|
def body_fn(val):
i, loss, opt_state_, rng_ = val
loss, opt_state_, rng_ = svi_update(i, rng_, opt_state_)
return i + 1, loss, opt_state_, rng_
|
https://github.com/pyro-ppl/numpyro/issues/279
|
test/test_examples.py::test_cpu[hmm.py --num-samples 100 --num-warmup 100 --num-chains 2] Running:
python examples/hmm.py --num-samples 100 --num-warmup 100 --num-chains 2
Simulating data...
Starting inference...
Traceback (most recent call last):
File "/home/travis/build/pyro-ppl/numpyro/examples/hmm.py", line 180, in <module>
main(args)
File "/home/travis/build/pyro-ppl/numpyro/examples/hmm.py", line 159, in main
supervised_words, unsupervised_words,
File "/home/travis/build/pyro-ppl/numpyro/numpyro/hmc_util.py", line 775, in initialize_model
raise RuntimeError("Cannot find valid initial parameters. Please check your model again.")
RuntimeError: Cannot find valid initial parameters. Please check your model again.
FAILED
|
RuntimeError
|
def decoder(hidden_dim, out_dim):
return stax.serial(
stax.Dense(hidden_dim, W_init=stax.randn()),
stax.Softplus,
stax.Dense(out_dim, W_init=stax.randn()),
stax.Sigmoid,
)
|
def decoder(hidden_dim, out_dim):
return stax.serial(
stax.Dense(hidden_dim, W_init=stax.randn()),
stax.Softplus,
stax.Dense(out_dim, W_init=stax.randn()),
Sigmoid,
)
|
https://github.com/pyro-ppl/numpyro/issues/279
|
test/test_examples.py::test_cpu[hmm.py --num-samples 100 --num-warmup 100 --num-chains 2] Running:
python examples/hmm.py --num-samples 100 --num-warmup 100 --num-chains 2
Simulating data...
Starting inference...
Traceback (most recent call last):
File "/home/travis/build/pyro-ppl/numpyro/examples/hmm.py", line 180, in <module>
main(args)
File "/home/travis/build/pyro-ppl/numpyro/examples/hmm.py", line 159, in main
supervised_words, unsupervised_words,
File "/home/travis/build/pyro-ppl/numpyro/numpyro/hmc_util.py", line 775, in initialize_model
raise RuntimeError("Cannot find valid initial parameters. Please check your model again.")
RuntimeError: Cannot find valid initial parameters. Please check your model again.
FAILED
|
RuntimeError
|
def main(args):
encoder_init, encode = encoder(args.hidden_dim, args.z_dim)
decoder_init, decode = decoder(args.hidden_dim, 28 * 28)
opt_init, opt_update, get_params = optimizers.adam(args.learning_rate)
svi_init, svi_update, svi_eval = svi(
model,
guide,
elbo,
opt_init,
opt_update,
get_params,
encode=encode,
decode=decode,
z_dim=args.z_dim,
)
rng = PRNGKey(0)
train_init, train_fetch = load_dataset(
MNIST, batch_size=args.batch_size, split="train"
)
test_init, test_fetch = load_dataset(
MNIST, batch_size=args.batch_size, split="test"
)
num_train, train_idx = train_init()
rng, rng_enc, rng_dec, rng_binarize, rng_init = random.split(rng, 5)
_, encoder_params = encoder_init(rng_enc, (args.batch_size, 28 * 28))
_, decoder_params = decoder_init(rng_dec, (args.batch_size, args.z_dim))
params = {"encoder": encoder_params, "decoder": decoder_params}
sample_batch = binarize(rng_binarize, train_fetch(0, train_idx)[0])
opt_state, constrain_fn = svi_init(
rng_init, (sample_batch,), (sample_batch,), params
)
@jit
def epoch_train(opt_state, rng):
def body_fn(i, val):
loss_sum, opt_state, rng = val
rng, rng_binarize = random.split(rng)
batch = binarize(rng_binarize, train_fetch(i, train_idx)[0])
# TODO: we will want to merge (i, rng, opt_state) into `svi_state`
# Here the index `i` is reseted after each epoch, which causes no
# problem for static learning rate, but it is not a right way for
# scheduled learning rate.
loss, opt_state, rng = svi_update(
i,
rng,
opt_state,
(batch,),
(batch,),
)
loss_sum += loss
return loss_sum, opt_state, rng
return lax.fori_loop(0, num_train, body_fn, (0.0, opt_state, rng))
@jit
def eval_test(opt_state, rng):
def body_fun(i, val):
loss_sum, rng = val
rng, rng_binarize, rng_eval = random.split(rng, 3)
batch = binarize(rng_binarize, test_fetch(i, test_idx)[0])
loss = svi_eval(rng_eval, opt_state, (batch,), (batch,)) / len(batch)
loss_sum += loss
return loss_sum, rng
loss, _ = lax.fori_loop(0, num_test, body_fun, (0.0, rng))
loss = loss / num_test
return loss
def reconstruct_img(epoch, rng):
img = test_fetch(0, test_idx)[0][0]
plt.imsave(
os.path.join(RESULTS_DIR, "original_epoch={}.png".format(epoch)),
img,
cmap="gray",
)
rng_binarize, rng_sample = random.split(rng)
test_sample = binarize(rng_binarize, img)
params = get_params(opt_state)
z_mean, z_var = encode(params["encoder"], test_sample.reshape([1, -1]))
z = dist.Normal(z_mean, z_var).sample(rng_sample)
img_loc = decode(params["decoder"], z).reshape([28, 28])
plt.imsave(
os.path.join(RESULTS_DIR, "recons_epoch={}.png".format(epoch)),
img_loc,
cmap="gray",
)
for i in range(args.num_epochs):
t_start = time.time()
num_train, train_idx = train_init()
_, opt_state, rng = epoch_train(opt_state, rng)
rng, rng_test, rng_reconstruct = random.split(rng, 3)
num_test, test_idx = test_init()
test_loss = eval_test(opt_state, rng_test)
reconstruct_img(i, rng_reconstruct)
print(
"Epoch {}: loss = {} ({:.2f} s.)".format(
i, test_loss, time.time() - t_start
)
)
|
def main(args):
encoder_init, encode = encoder(args.hidden_dim, args.z_dim)
decoder_init, decode = decoder(args.hidden_dim, 28 * 28)
opt_init, opt_update, get_params = optimizers.adam(args.learning_rate)
svi_init, svi_update, svi_eval = svi(
model,
guide,
elbo,
opt_init,
opt_update,
get_params,
encode=encode,
decode=decode,
z_dim=args.z_dim,
)
rng = PRNGKey(0)
train_init, train_fetch = load_dataset(
MNIST, batch_size=args.batch_size, split="train"
)
test_init, test_fetch = load_dataset(
MNIST, batch_size=args.batch_size, split="test"
)
num_train, train_idx = train_init()
rng, rng_enc, rng_dec, rng_binarize, rng_init = random.split(rng, 5)
_, encoder_params = encoder_init(rng_enc, (args.batch_size, 28 * 28))
_, decoder_params = decoder_init(rng_dec, (args.batch_size, args.z_dim))
params = {"encoder": encoder_params, "decoder": decoder_params}
sample_batch = binarize(rng_binarize, train_fetch(0, train_idx)[0])
opt_state, constrain_fn = svi_init(
rng_init, (sample_batch,), (sample_batch,), params
)
@jit
def epoch_train(opt_state, rng):
def body_fn(i, val):
loss_sum, opt_state, rng = val
rng, rng_binarize = random.split(rng)
batch = binarize(rng_binarize, train_fetch(i, train_idx)[0])
loss, opt_state, rng = svi_update(
i,
rng,
opt_state,
(batch,),
(batch,),
)
loss_sum += loss
return loss_sum, opt_state, rng
return lax.fori_loop(0, num_train, body_fn, (0.0, opt_state, rng))
@jit
def eval_test(opt_state, rng):
def body_fun(i, val):
loss_sum, rng = val
rng, rng_binarize, rng_eval = random.split(rng, 3)
batch = binarize(rng_binarize, test_fetch(i, test_idx)[0])
loss = svi_eval(rng_eval, opt_state, (batch,), (batch,)) / len(batch)
loss_sum += loss
return loss_sum, rng
loss, _ = lax.fori_loop(0, num_test, body_fun, (0.0, rng))
loss = loss / num_test
return loss
def reconstruct_img(epoch, rng):
img = test_fetch(0, test_idx)[0][0]
plt.imsave(
os.path.join(RESULTS_DIR, "original_epoch={}.png".format(epoch)),
img,
cmap="gray",
)
rng_binarize, rng_sample = random.split(rng)
test_sample = binarize(rng_binarize, img)
params = get_params(opt_state)
z_mean, z_var = encode(params["encoder"], test_sample.reshape([1, -1]))
z = dist.Normal(z_mean, z_var).sample(rng_sample)
img_loc = decode(params["decoder"], z).reshape([28, 28])
plt.imsave(
os.path.join(RESULTS_DIR, "recons_epoch={}.png".format(epoch)),
img_loc,
cmap="gray",
)
for i in range(args.num_epochs):
t_start = time.time()
num_train, train_idx = train_init()
_, opt_state, rng = epoch_train(opt_state, rng)
rng, rng_test, rng_reconstruct = random.split(rng, 3)
num_test, test_idx = test_init()
test_loss = eval_test(opt_state, rng_test)
reconstruct_img(i, rng_reconstruct)
print(
"Epoch {}: loss = {} ({:.2f} s.)".format(
i, test_loss, time.time() - t_start
)
)
|
https://github.com/pyro-ppl/numpyro/issues/279
|
test/test_examples.py::test_cpu[hmm.py --num-samples 100 --num-warmup 100 --num-chains 2] Running:
python examples/hmm.py --num-samples 100 --num-warmup 100 --num-chains 2
Simulating data...
Starting inference...
Traceback (most recent call last):
File "/home/travis/build/pyro-ppl/numpyro/examples/hmm.py", line 180, in <module>
main(args)
File "/home/travis/build/pyro-ppl/numpyro/examples/hmm.py", line 159, in main
supervised_words, unsupervised_words,
File "/home/travis/build/pyro-ppl/numpyro/numpyro/hmc_util.py", line 775, in initialize_model
raise RuntimeError("Cannot find valid initial parameters. Please check your model again.")
RuntimeError: Cannot find valid initial parameters. Please check your model again.
FAILED
|
RuntimeError
|
def epoch_train(opt_state, rng):
def body_fn(i, val):
loss_sum, opt_state, rng = val
rng, rng_binarize = random.split(rng)
batch = binarize(rng_binarize, train_fetch(i, train_idx)[0])
# TODO: we will want to merge (i, rng, opt_state) into `svi_state`
# Here the index `i` is reseted after each epoch, which causes no
# problem for static learning rate, but it is not a right way for
# scheduled learning rate.
loss, opt_state, rng = svi_update(
i,
rng,
opt_state,
(batch,),
(batch,),
)
loss_sum += loss
return loss_sum, opt_state, rng
return lax.fori_loop(0, num_train, body_fn, (0.0, opt_state, rng))
|
def epoch_train(opt_state, rng):
def body_fn(i, val):
loss_sum, opt_state, rng = val
rng, rng_binarize = random.split(rng)
batch = binarize(rng_binarize, train_fetch(i, train_idx)[0])
loss, opt_state, rng = svi_update(
i,
rng,
opt_state,
(batch,),
(batch,),
)
loss_sum += loss
return loss_sum, opt_state, rng
return lax.fori_loop(0, num_train, body_fn, (0.0, opt_state, rng))
|
https://github.com/pyro-ppl/numpyro/issues/279
|
test/test_examples.py::test_cpu[hmm.py --num-samples 100 --num-warmup 100 --num-chains 2] Running:
python examples/hmm.py --num-samples 100 --num-warmup 100 --num-chains 2
Simulating data...
Starting inference...
Traceback (most recent call last):
File "/home/travis/build/pyro-ppl/numpyro/examples/hmm.py", line 180, in <module>
main(args)
File "/home/travis/build/pyro-ppl/numpyro/examples/hmm.py", line 159, in main
supervised_words, unsupervised_words,
File "/home/travis/build/pyro-ppl/numpyro/numpyro/hmc_util.py", line 775, in initialize_model
raise RuntimeError("Cannot find valid initial parameters. Please check your model again.")
RuntimeError: Cannot find valid initial parameters. Please check your model again.
FAILED
|
RuntimeError
|
def body_fn(i, val):
loss_sum, opt_state, rng = val
rng, rng_binarize = random.split(rng)
batch = binarize(rng_binarize, train_fetch(i, train_idx)[0])
# TODO: we will want to merge (i, rng, opt_state) into `svi_state`
# Here the index `i` is reseted after each epoch, which causes no
# problem for static learning rate, but it is not a right way for
# scheduled learning rate.
loss, opt_state, rng = svi_update(
i,
rng,
opt_state,
(batch,),
(batch,),
)
loss_sum += loss
return loss_sum, opt_state, rng
|
def body_fn(i, val):
loss_sum, opt_state, rng = val
rng, rng_binarize = random.split(rng)
batch = binarize(rng_binarize, train_fetch(i, train_idx)[0])
loss, opt_state, rng = svi_update(
i,
rng,
opt_state,
(batch,),
(batch,),
)
loss_sum += loss
return loss_sum, opt_state, rng
|
https://github.com/pyro-ppl/numpyro/issues/279
|
test/test_examples.py::test_cpu[hmm.py --num-samples 100 --num-warmup 100 --num-chains 2] Running:
python examples/hmm.py --num-samples 100 --num-warmup 100 --num-chains 2
Simulating data...
Starting inference...
Traceback (most recent call last):
File "/home/travis/build/pyro-ppl/numpyro/examples/hmm.py", line 180, in <module>
main(args)
File "/home/travis/build/pyro-ppl/numpyro/examples/hmm.py", line 159, in main
supervised_words, unsupervised_words,
File "/home/travis/build/pyro-ppl/numpyro/numpyro/hmc_util.py", line 775, in initialize_model
raise RuntimeError("Cannot find valid initial parameters. Please check your model again.")
RuntimeError: Cannot find valid initial parameters. Please check your model again.
FAILED
|
RuntimeError
|
def get_dtypes(*args):
return [canonicalize_dtype(lax.dtype(arg)) for arg in args]
|
def get_dtypes(*args):
return [canonicalize_dtype(onp.result_type(arg)) for arg in args]
|
https://github.com/pyro-ppl/numpyro/issues/279
|
test/test_examples.py::test_cpu[hmm.py --num-samples 100 --num-warmup 100 --num-chains 2] Running:
python examples/hmm.py --num-samples 100 --num-warmup 100 --num-chains 2
Simulating data...
Starting inference...
Traceback (most recent call last):
File "/home/travis/build/pyro-ppl/numpyro/examples/hmm.py", line 180, in <module>
main(args)
File "/home/travis/build/pyro-ppl/numpyro/examples/hmm.py", line 159, in main
supervised_words, unsupervised_words,
File "/home/travis/build/pyro-ppl/numpyro/numpyro/hmc_util.py", line 775, in initialize_model
raise RuntimeError("Cannot find valid initial parameters. Please check your model again.")
RuntimeError: Cannot find valid initial parameters. Please check your model again.
FAILED
|
RuntimeError
|
def matrix_to_tril_vec(x, diagonal=0):
idxs = np.tril_indices(x.shape[-1], diagonal)
return x[..., idxs[0], idxs[1]]
|
def matrix_to_tril_vec(x, diagonal=0):
idxs = onp.tril_indices(x.shape[-1], diagonal)
return x[..., idxs[0], idxs[1]]
|
https://github.com/pyro-ppl/numpyro/issues/279
|
test/test_examples.py::test_cpu[hmm.py --num-samples 100 --num-warmup 100 --num-chains 2] Running:
python examples/hmm.py --num-samples 100 --num-warmup 100 --num-chains 2
Simulating data...
Starting inference...
Traceback (most recent call last):
File "/home/travis/build/pyro-ppl/numpyro/examples/hmm.py", line 180, in <module>
main(args)
File "/home/travis/build/pyro-ppl/numpyro/examples/hmm.py", line 159, in main
supervised_words, unsupervised_words,
File "/home/travis/build/pyro-ppl/numpyro/numpyro/hmc_util.py", line 775, in initialize_model
raise RuntimeError("Cannot find valid initial parameters. Please check your model again.")
RuntimeError: Cannot find valid initial parameters. Please check your model again.
FAILED
|
RuntimeError
|
def vec_to_tril_matrix(t, diagonal=0):
# NB: the following formula only works for diagonal <= 0
n = round((math.sqrt(1 + 8 * t.shape[-1]) - 1) / 2) - diagonal
n2 = n * n
idx = np.reshape(np.arange(n2), (n, n))[np.tril_indices(n, diagonal)]
x = lax.scatter_add(
np.zeros(t.shape[:-1] + (n2,)),
np.expand_dims(idx, axis=-1),
t,
lax.ScatterDimensionNumbers(
update_window_dims=range(t.ndim - 1),
inserted_window_dims=(t.ndim - 1,),
scatter_dims_to_operand_dims=(t.ndim - 1,),
),
)
return np.reshape(x, x.shape[:-1] + (n, n))
|
def vec_to_tril_matrix(t, diagonal=0):
# NB: the following formula only works for diagonal <= 0
n = round((math.sqrt(1 + 8 * t.shape[-1]) - 1) / 2) - diagonal
n2 = n * n
idx = np.reshape(np.arange(n2), (n, n))[onp.tril_indices(n, diagonal)]
x = lax.scatter_add(
np.zeros(t.shape[:-1] + (n2,)),
np.expand_dims(idx, axis=-1),
t,
lax.ScatterDimensionNumbers(
update_window_dims=range(t.ndim - 1),
inserted_window_dims=(t.ndim - 1,),
scatter_dims_to_operand_dims=(t.ndim - 1,),
),
)
return np.reshape(x, x.shape[:-1] + (n, n))
|
https://github.com/pyro-ppl/numpyro/issues/279
|
test/test_examples.py::test_cpu[hmm.py --num-samples 100 --num-warmup 100 --num-chains 2] Running:
python examples/hmm.py --num-samples 100 --num-warmup 100 --num-chains 2
Simulating data...
Starting inference...
Traceback (most recent call last):
File "/home/travis/build/pyro-ppl/numpyro/examples/hmm.py", line 180, in <module>
main(args)
File "/home/travis/build/pyro-ppl/numpyro/examples/hmm.py", line 159, in main
supervised_words, unsupervised_words,
File "/home/travis/build/pyro-ppl/numpyro/numpyro/hmc_util.py", line 775, in initialize_model
raise RuntimeError("Cannot find valid initial parameters. Please check your model again.")
RuntimeError: Cannot find valid initial parameters. Please check your model again.
FAILED
|
RuntimeError
|
def consensus(subposteriors, num_draws=None, diagonal=False, rng=None):
"""
Merges subposteriors following consensus Monte Carlo algorithm.
**References:**
1. *Bayes and big data: The consensus Monte Carlo algorithm*,
Steven L. Scott, Alexander W. Blocker, Fernando V. Bonassi, Hugh A. Chipman,
Edward I. George, Robert E. McCulloch
:param list subposteriors: a list in which each element is a collection of samples.
:param int num_draws: number of draws from the merged posterior.
:param bool diagonal: whether to compute weights using variance or covariance, defaults to
`False` (using covariance).
:param jax.random.PRNGKey rng: source of the randomness, defaults to `jax.random.PRNGKey(0)`.
:return: if `num_draws` is None, merges subposteriors without resampling; otherwise, returns
a collection of `num_draws` samples with the same data structure as each subposterior.
"""
# stack subposteriors
joined_subposteriors = tree_multimap(lambda *args: np.stack(args), *subposteriors)
# shape of joined_subposteriors: n_subs x n_samples x sample_shape
joined_subposteriors = vmap(vmap(lambda sample: ravel_pytree(sample)[0]))(
joined_subposteriors
)
if num_draws is not None:
rng = random.PRNGKey(0) if rng is None else rng
# randomly gets num_draws from subposteriors
n_subs = len(subposteriors)
n_samples = tree_flatten(subposteriors[0])[0][0].shape[0]
# shape of draw_idxs: n_subs x num_draws x sample_shape
draw_idxs = random.randint(
rng, shape=(n_subs, num_draws), minval=0, maxval=n_samples
)
joined_subposteriors = vmap(lambda x, idx: x[idx])(
joined_subposteriors, draw_idxs
)
if diagonal:
# compute weights for each subposterior (ref: Section 3.1 of [1])
weights = vmap(lambda x: 1 / np.var(x, ddof=1, axis=0))(joined_subposteriors)
normalized_weights = weights / np.sum(weights, axis=0)
# get weighted samples
samples_flat = np.einsum("ij,ikj->kj", normalized_weights, joined_subposteriors)
else:
weights = vmap(lambda x: np.linalg.inv(np.cov(x.T)))(joined_subposteriors)
normalized_weights = np.matmul(np.linalg.inv(np.sum(weights, axis=0)), weights)
samples_flat = np.einsum(
"ijk,ilk->lj", normalized_weights, joined_subposteriors
)
# unravel_fn acts on 1 sample of a subposterior
_, unravel_fn = ravel_pytree(tree_map(lambda x: x[0], subposteriors[0]))
return vmap(lambda x: unravel_fn(x))(samples_flat)
|
def consensus(subposteriors, num_draws=None, diagonal=False, rng=None):
"""
Merges subposteriors following consensus Monte Carlo algorithm.
**References:**
1. *Bayes and big data: The consensus Monte Carlo algorithm*,
Steven L. Scott, Alexander W. Blocker, Fernando V. Bonassi, Hugh A. Chipman,
Edward I. George, Robert E. McCulloch
:param list subposteriors: a list in which each element is a collection of samples.
:param int num_draws: number of draws from the merged posterior.
:param bool diagonal: whether to compute weights using variance or covariance, defaults to
`False` (using covariance).
:param jax.random.PRNGKey rng: source of the randomness, defaults to `jax.random.PRNGKey(0)`.
:return: if `num_draws` is None, merges subposteriors without resampling; otherwise, returns
a collection of `num_draws` samples with the same data structure as each subposterior.
"""
# stack subposteriors
joined_subposteriors = tree_multimap(lambda *args: np.stack(args), *subposteriors)
# shape of joined_subposteriors: n_subs x n_samples x sample_shape
joined_subposteriors = vmap(vmap(lambda sample: ravel_pytree(sample)[0]))(
joined_subposteriors
)
if num_draws is not None:
rng = random.PRNGKey(0) if rng is None else rng
# randomly gets num_draws from subposteriors
n_subs = len(subposteriors)
n_samples = tree_flatten(subposteriors[0])[0][0].shape[0]
# shape of draw_idxs: n_subs x num_draws x sample_shape
draw_idxs = random.randint(
rng, shape=(n_subs, num_draws), minval=0, maxval=n_samples
)
joined_subposteriors = vmap(lambda x, idx: x[idx])(
joined_subposteriors, draw_idxs
)
if diagonal:
# compute weights for each subposterior (ref: Section 3.1 of [1])
weights = vmap(lambda x: (1 - 1 / n_samples) / np.var(x, axis=0))(
joined_subposteriors
)
normalized_weights = weights / np.sum(weights, axis=0)
# get weighted samples
samples_flat = np.einsum("ij,ikj->kj", normalized_weights, joined_subposteriors)
else:
weights = vmap(lambda x: np.linalg.inv(_cov(x)))(joined_subposteriors)
normalized_weights = np.matmul(np.linalg.inv(np.sum(weights, axis=0)), weights)
samples_flat = np.einsum(
"ijk,ilk->lj", normalized_weights, joined_subposteriors
)
# unravel_fn acts on 1 sample of a subposterior
_, unravel_fn = ravel_pytree(tree_map(lambda x: x[0], subposteriors[0]))
return vmap(lambda x: unravel_fn(x))(samples_flat)
|
https://github.com/pyro-ppl/numpyro/issues/279
|
test/test_examples.py::test_cpu[hmm.py --num-samples 100 --num-warmup 100 --num-chains 2] Running:
python examples/hmm.py --num-samples 100 --num-warmup 100 --num-chains 2
Simulating data...
Starting inference...
Traceback (most recent call last):
File "/home/travis/build/pyro-ppl/numpyro/examples/hmm.py", line 180, in <module>
main(args)
File "/home/travis/build/pyro-ppl/numpyro/examples/hmm.py", line 159, in main
supervised_words, unsupervised_words,
File "/home/travis/build/pyro-ppl/numpyro/numpyro/hmc_util.py", line 775, in initialize_model
raise RuntimeError("Cannot find valid initial parameters. Please check your model again.")
RuntimeError: Cannot find valid initial parameters. Please check your model again.
FAILED
|
RuntimeError
|
def parametric(subposteriors, diagonal=False):
"""
Merges subposteriors following (embarrassingly parallel) parametric Monte Carlo algorithm.
**References:**
1. *Asymptotically Exact, Embarrassingly Parallel MCMC*,
Willie Neiswanger, Chong Wang, Eric Xing
:param list subposteriors: a list in which each element is a collection of samples.
:param bool diagonal: whether to compute weights using variance or covariance, defaults to
`False` (using covariance).
:return: the estimated mean and variance/covariance parameters of the joined posterior
"""
joined_subposteriors = tree_multimap(lambda *args: np.stack(args), *subposteriors)
joined_subposteriors = vmap(vmap(lambda sample: ravel_pytree(sample)[0]))(
joined_subposteriors
)
submeans = np.mean(joined_subposteriors, axis=1)
if diagonal:
# NB: jax.numpy.var does not support ddof=1, so we do it manually
weights = vmap(lambda x: 1 / np.var(x, ddof=1, axis=0))(joined_subposteriors)
var = 1 / np.sum(weights, axis=0)
normalized_weights = var * weights
# comparing to consensus implementation, we compute weighted mean here
mean = np.einsum("ij,ij->j", normalized_weights, submeans)
return mean, var
else:
weights = vmap(lambda x: np.linalg.inv(np.cov(x.T)))(joined_subposteriors)
cov = np.linalg.inv(np.sum(weights, axis=0))
normalized_weights = np.matmul(cov, weights)
# comparing to consensus implementation, we compute weighted mean here
mean = np.einsum("ijk,ik->j", normalized_weights, submeans)
return mean, cov
|
def parametric(subposteriors, diagonal=False):
"""
Merges subposteriors following (embarrassingly parallel) parametric Monte Carlo algorithm.
**References:**
1. *Asymptotically Exact, Embarrassingly Parallel MCMC*,
Willie Neiswanger, Chong Wang, Eric Xing
:param list subposteriors: a list in which each element is a collection of samples.
:param bool diagonal: whether to compute weights using variance or covariance, defaults to
`False` (using covariance).
:return: the estimated mean and variance/covariance parameters of the joined posterior
"""
joined_subposteriors = tree_multimap(lambda *args: np.stack(args), *subposteriors)
joined_subposteriors = vmap(vmap(lambda sample: ravel_pytree(sample)[0]))(
joined_subposteriors
)
submeans = np.mean(joined_subposteriors, axis=1)
n_samples = tree_flatten(subposteriors[0])[0][0].shape[0]
if diagonal:
# NB: jax.numpy.var does not support ddof=1, so we do it manually
weights = vmap(lambda x: (1 - 1 / n_samples) / np.var(x, axis=0))(
joined_subposteriors
)
var = 1 / np.sum(weights, axis=0)
normalized_weights = var * weights
# comparing to consensus implementation, we compute weighted mean here
mean = np.einsum("ij,ij->j", normalized_weights, submeans)
return mean, var
else:
weights = vmap(lambda x: np.linalg.inv(_cov(x)))(joined_subposteriors)
cov = np.linalg.inv(np.sum(weights, axis=0))
normalized_weights = np.matmul(cov, weights)
# comparing to consensus implementation, we compute weighted mean here
mean = np.einsum("ijk,ik->j", normalized_weights, submeans)
return mean, cov
|
https://github.com/pyro-ppl/numpyro/issues/279
|
test/test_examples.py::test_cpu[hmm.py --num-samples 100 --num-warmup 100 --num-chains 2] Running:
python examples/hmm.py --num-samples 100 --num-warmup 100 --num-chains 2
Simulating data...
Starting inference...
Traceback (most recent call last):
File "/home/travis/build/pyro-ppl/numpyro/examples/hmm.py", line 180, in <module>
main(args)
File "/home/travis/build/pyro-ppl/numpyro/examples/hmm.py", line 159, in main
supervised_words, unsupervised_words,
File "/home/travis/build/pyro-ppl/numpyro/numpyro/hmc_util.py", line 775, in initialize_model
raise RuntimeError("Cannot find valid initial parameters. Please check your model again.")
RuntimeError: Cannot find valid initial parameters. Please check your model again.
FAILED
|
RuntimeError
|
def init_to_median(site, num_samples=15, skip_param=False):
"""
Initialize to the prior median.
"""
if site["type"] == "sample" and not site["is_observed"]:
if isinstance(site["fn"], dist.TransformedDistribution):
fn = site["fn"].base_dist
else:
fn = site["fn"]
samples = sample("_init", fn, sample_shape=(num_samples,))
return np.median(samples, axis=0)
if site["type"] == "param" and not skip_param:
# return base value of param site
constraint = site["kwargs"].pop("constraint", real)
transform = biject_to(constraint)
value = site["args"][0]
if isinstance(transform, ComposeTransform):
base_transform = transform.parts[0]
value = base_transform(transform.inv(value))
return value
|
def init_to_median(site, num_samples=15, skip_param=False):
"""
Initialize to the prior median.
"""
if site["type"] == "sample" and not site["is_observed"]:
if isinstance(site["fn"], dist.TransformedDistribution):
fn = site["fn"].base_dist
else:
fn = site["fn"]
samples = sample("_init", fn, sample_shape=(num_samples,))
# TODO: use np.median when it is available upstream
return np.mean(samples, axis=0)
if site["type"] == "param" and not skip_param:
# return base value of param site
constraint = site["kwargs"].pop("constraint", real)
transform = biject_to(constraint)
value = site["args"][0]
if isinstance(transform, ComposeTransform):
base_transform = transform.parts[0]
value = base_transform(transform.inv(value))
return value
|
https://github.com/pyro-ppl/numpyro/issues/279
|
test/test_examples.py::test_cpu[hmm.py --num-samples 100 --num-warmup 100 --num-chains 2] Running:
python examples/hmm.py --num-samples 100 --num-warmup 100 --num-chains 2
Simulating data...
Starting inference...
Traceback (most recent call last):
File "/home/travis/build/pyro-ppl/numpyro/examples/hmm.py", line 180, in <module>
main(args)
File "/home/travis/build/pyro-ppl/numpyro/examples/hmm.py", line 159, in main
supervised_words, unsupervised_words,
File "/home/travis/build/pyro-ppl/numpyro/numpyro/hmc_util.py", line 775, in initialize_model
raise RuntimeError("Cannot find valid initial parameters. Please check your model again.")
RuntimeError: Cannot find valid initial parameters. Please check your model again.
FAILED
|
RuntimeError
|
def mcmc(
num_warmup,
num_samples,
init_params,
num_chains=1,
sampler="hmc",
constrain_fn=None,
print_summary=True,
**sampler_kwargs,
):
"""
Convenience wrapper for MCMC samplers -- runs warmup, prints
diagnostic summary and returns a collections of samples
from the posterior.
:param num_warmup: Number of warmup steps.
:param num_samples: Number of samples to generate from the Markov chain.
:param init_params: Initial parameters to begin sampling. The type can
must be consistent with the input type to `potential_fn`.
:param sampler: currently, only `hmc` is implemented (default).
:param constrain_fn: Callable that converts a collection of unconstrained
sample values returned from the sampler to constrained values that
lie within the support of the sample sites.
:param print_summary: Whether to print diagnostics summary for
each sample site. Default is ``True``.
:param `**sampler_kwargs`: Sampler specific keyword arguments.
- *HMC*: Refer to :func:`~numpyro.mcmc.hmc` and
:func:`~numpyro.mcmc.hmc.init_kernel` for accepted arguments. Note
that all arguments must be provided as keywords.
:return: collection of samples from the posterior.
.. testsetup::
import jax
from jax import random
import jax.numpy as np
import numpyro.distributions as dist
from numpyro.handlers import sample
from numpyro.hmc_util import initialize_model
from numpyro.mcmc import hmc
from numpyro.util import fori_collect
.. doctest::
>>> true_coefs = np.array([1., 2., 3.])
>>> data = random.normal(random.PRNGKey(2), (2000, 3))
>>> dim = 3
>>> labels = dist.Bernoulli(logits=(true_coefs * data).sum(-1)).sample(random.PRNGKey(3))
>>>
>>> def model(data, labels):
... coefs_mean = np.zeros(dim)
... coefs = sample('beta', dist.Normal(coefs_mean, np.ones(3)))
... intercept = sample('intercept', dist.Normal(0., 10.))
... return sample('y', dist.Bernoulli(logits=(coefs * data + intercept).sum(-1)), obs=labels)
>>>
>>> init_params, potential_fn, constrain_fn = initialize_model(random.PRNGKey(0), model,
... data, labels)
>>> num_warmup, num_samples = 1000, 1000
>>> samples = mcmc(num_warmup, num_samples, init_params,
... potential_fn=potential_fn,
... constrain_fn=constrain_fn) # doctest: +SKIP
warmup: 100%|██████████| 1000/1000 [00:09<00:00, 109.40it/s, 1 steps of size 5.83e-01. acc. prob=0.79]
sample: 100%|██████████| 1000/1000 [00:00<00:00, 1252.39it/s, 1 steps of size 5.83e-01. acc. prob=0.85]
mean sd 5.5% 94.5% n_eff Rhat
coefs[0] 0.96 0.07 0.85 1.07 455.35 1.01
coefs[1] 2.05 0.09 1.91 2.20 332.00 1.01
coefs[2] 3.18 0.13 2.96 3.37 320.27 1.00
intercept -0.03 0.02 -0.06 0.00 402.53 1.00
"""
sequential_chain = False
if xla_bridge.device_count() < num_chains:
sequential_chain = True
warnings.warn(
"There are not enough devices to run parallel chains: expected {} but got {}."
" Chains will be drawn sequentially. If you are running `mcmc` in CPU,"
" consider to disable XLA intra-op parallelism by setting the environment"
' flag "XLA_FLAGS=--xla_force_host_platform_device_count={}".'.format(
num_chains, xla_bridge.device_count(), num_chains
)
)
progbar = sampler_kwargs.pop("progbar", True)
if num_chains > 1:
progbar = False
if sampler == "hmc":
if constrain_fn is None:
constrain_fn = identity
potential_fn = sampler_kwargs.pop("potential_fn")
kinetic_fn = sampler_kwargs.pop("kinetic_fn", None)
algo = sampler_kwargs.pop("algo", "NUTS")
if num_chains > 1:
rngs = sampler_kwargs.pop("rng", vmap(PRNGKey)(np.arange(num_chains)))
else:
rng = sampler_kwargs.pop("rng", PRNGKey(0))
init_kernel, sample_kernel = hmc(potential_fn, kinetic_fn, algo)
if progbar:
hmc_state = init_kernel(
init_params, num_warmup, progbar=progbar, rng=rng, **sampler_kwargs
)
samples_flat = fori_collect(
0,
num_samples,
sample_kernel,
hmc_state,
transform=lambda x: constrain_fn(x.z),
progbar=progbar,
diagnostics_fn=get_diagnostics_str,
progbar_desc="sample",
)
samples = tree_map(lambda x: x[np.newaxis, ...], samples_flat)
else:
def single_chain_mcmc(rng, init_params):
hmc_state = init_kernel(
init_params, num_warmup, run_warmup=False, rng=rng, **sampler_kwargs
)
samples = fori_collect(
num_warmup,
num_warmup + num_samples,
sample_kernel,
hmc_state,
transform=lambda x: constrain_fn(x.z),
progbar=progbar,
)
return samples
if num_chains == 1:
samples_flat = single_chain_mcmc(rng, init_params)
samples = tree_map(lambda x: x[np.newaxis, ...], samples_flat)
else:
if sequential_chain:
samples = lax.map(
lambda args: single_chain_mcmc(*args), (rngs, init_params)
)
else:
samples = pmap(single_chain_mcmc)(rngs, init_params)
samples_flat = tree_map(
lambda x: np.reshape(x, (-1,) + x.shape[2:]), samples
)
if print_summary:
summary(samples)
return samples_flat
else:
raise ValueError("sampler: {} not recognized".format(sampler))
|
def mcmc(
num_warmup,
num_samples,
init_params,
num_chains=1,
sampler="hmc",
constrain_fn=None,
print_summary=True,
**sampler_kwargs,
):
"""
Convenience wrapper for MCMC samplers -- runs warmup, prints
diagnostic summary and returns a collections of samples
from the posterior.
:param num_warmup: Number of warmup steps.
:param num_samples: Number of samples to generate from the Markov chain.
:param init_params: Initial parameters to begin sampling. The type can
must be consistent with the input type to `potential_fn`.
:param sampler: currently, only `hmc` is implemented (default).
:param constrain_fn: Callable that converts a collection of unconstrained
sample values returned from the sampler to constrained values that
lie within the support of the sample sites.
:param print_summary: Whether to print diagnostics summary for
each sample site. Default is ``True``.
:param `**sampler_kwargs`: Sampler specific keyword arguments.
- *HMC*: Refer to :func:`~numpyro.mcmc.hmc` and
:func:`~numpyro.mcmc.hmc.init_kernel` for accepted arguments. Note
that all arguments must be provided as keywords.
:return: collection of samples from the posterior.
.. testsetup::
import jax
from jax import random
import jax.numpy as np
import numpyro.distributions as dist
from numpyro.handlers import sample
from numpyro.hmc_util import initialize_model
from numpyro.mcmc import hmc
from numpyro.util import fori_collect
.. doctest::
>>> true_coefs = np.array([1., 2., 3.])
>>> data = random.normal(random.PRNGKey(2), (2000, 3))
>>> dim = 3
>>> labels = dist.Bernoulli(logits=(true_coefs * data).sum(-1)).sample(random.PRNGKey(3))
>>>
>>> def model(data, labels):
... coefs_mean = np.zeros(dim)
... coefs = sample('beta', dist.Normal(coefs_mean, np.ones(3)))
... intercept = sample('intercept', dist.Normal(0., 10.))
... return sample('y', dist.Bernoulli(logits=(coefs * data + intercept).sum(-1)), obs=labels)
>>>
>>> init_params, potential_fn, constrain_fn = initialize_model(random.PRNGKey(0), model,
... data, labels)
>>> num_warmup, num_samples = 1000, 1000
>>> samples = mcmc(num_warmup, num_samples, init_params,
... potential_fn=potential_fn,
... constrain_fn=constrain_fn) # doctest: +SKIP
warmup: 100%|██████████| 1000/1000 [00:09<00:00, 109.40it/s, 1 steps of size 5.83e-01. acc. prob=0.79]
sample: 100%|██████████| 1000/1000 [00:00<00:00, 1252.39it/s, 1 steps of size 5.83e-01. acc. prob=0.85]
mean sd 5.5% 94.5% n_eff Rhat
coefs[0] 0.96 0.07 0.85 1.07 455.35 1.01
coefs[1] 2.05 0.09 1.91 2.20 332.00 1.01
coefs[2] 3.18 0.13 2.96 3.37 320.27 1.00
intercept -0.03 0.02 -0.06 0.00 402.53 1.00
"""
sequential_chain = False
if xla_bridge.device_count() < num_chains:
sequential_chain = True
warnings.warn(
"There are not enough devices to run parallel chains: expected {} but got {}."
" Chains will be drawn sequentially. If you are running `mcmc` in CPU,"
" consider to disable XLA intra-op parallelism by setting the environment"
' flag "XLA_FLAGS=--xla_force_host_platform_device_count={}".'.format(
num_chains, xla_bridge.device_count(), num_chains
)
)
progbar = sampler_kwargs.pop("progbar", True)
if num_chains > 1:
progbar = False
if sampler == "hmc":
if constrain_fn is None:
constrain_fn = identity
potential_fn = sampler_kwargs.pop("potential_fn")
kinetic_fn = sampler_kwargs.pop("kinetic_fn", None)
algo = sampler_kwargs.pop("algo", "NUTS")
if num_chains > 1:
rngs = sampler_kwargs.pop("rng", vmap(PRNGKey)(np.arange(num_chains)))
else:
rng = sampler_kwargs.pop("rng", PRNGKey(0))
init_kernel, sample_kernel = hmc(potential_fn, kinetic_fn, algo)
if progbar:
hmc_state = init_kernel(
init_params, num_warmup, progbar=progbar, rng=rng, **sampler_kwargs
)
samples_flat = fori_collect(
0,
num_samples,
sample_kernel,
hmc_state,
transform=lambda x: constrain_fn(x.z),
progbar=progbar,
diagnostics_fn=get_diagnostics_str,
progbar_desc="sample",
)
samples = tree_map(lambda x: x[np.newaxis, ...], samples_flat)
else:
def single_chain_mcmc(rng, init_params):
hmc_state = init_kernel(
init_params, num_warmup, run_warmup=False, rng=rng, **sampler_kwargs
)
samples = fori_collect(
num_warmup,
num_warmup + num_samples,
sample_kernel,
hmc_state,
transform=lambda x: constrain_fn(x.z),
progbar=progbar,
)
return samples
if num_chains == 1:
samples_flat = single_chain_mcmc(rng, init_params)
samples = tree_map(lambda x: x[np.newaxis, ...], samples_flat)
else:
if sequential_chain:
samples = []
for i in range(num_chains):
init_params_i = tree_map(lambda x: x[i], init_params)
samples.append(jit(single_chain_mcmc)(rngs[i], init_params_i))
samples = tree_multimap(lambda *args: np.stack(args), *samples)
else:
samples = pmap(single_chain_mcmc)(rngs, init_params)
samples_flat = tree_map(
lambda x: np.reshape(x, (-1,) + x.shape[2:]), samples
)
if print_summary:
summary(samples)
return samples_flat
else:
raise ValueError("sampler: {} not recognized".format(sampler))
|
https://github.com/pyro-ppl/numpyro/issues/279
|
test/test_examples.py::test_cpu[hmm.py --num-samples 100 --num-warmup 100 --num-chains 2] Running:
python examples/hmm.py --num-samples 100 --num-warmup 100 --num-chains 2
Simulating data...
Starting inference...
Traceback (most recent call last):
File "/home/travis/build/pyro-ppl/numpyro/examples/hmm.py", line 180, in <module>
main(args)
File "/home/travis/build/pyro-ppl/numpyro/examples/hmm.py", line 159, in main
supervised_words, unsupervised_words,
File "/home/travis/build/pyro-ppl/numpyro/numpyro/hmc_util.py", line 775, in initialize_model
raise RuntimeError("Cannot find valid initial parameters. Please check your model again.")
RuntimeError: Cannot find valid initial parameters. Please check your model again.
FAILED
|
RuntimeError
|
def while_loop(cond_fun, body_fun, init_val):
if _DISABLE_CONTROL_FLOW_PRIM:
val = init_val
while cond_fun(val):
val = body_fun(val)
return val
else:
return lax.while_loop(cond_fun, body_fun, init_val)
|
def while_loop(cond_fun, body_fun, init_val):
if _DISABLE_CONTROL_FLOW_PRIM:
val = init_val
while cond_fun(val):
val = body_fun(val)
return val
else:
# TODO: consider jitting while_loop similar to fori_loop
return lax.while_loop(cond_fun, body_fun, init_val)
|
https://github.com/pyro-ppl/numpyro/issues/279
|
test/test_examples.py::test_cpu[hmm.py --num-samples 100 --num-warmup 100 --num-chains 2] Running:
python examples/hmm.py --num-samples 100 --num-warmup 100 --num-chains 2
Simulating data...
Starting inference...
Traceback (most recent call last):
File "/home/travis/build/pyro-ppl/numpyro/examples/hmm.py", line 180, in <module>
main(args)
File "/home/travis/build/pyro-ppl/numpyro/examples/hmm.py", line 159, in main
supervised_words, unsupervised_words,
File "/home/travis/build/pyro-ppl/numpyro/numpyro/hmc_util.py", line 775, in initialize_model
raise RuntimeError("Cannot find valid initial parameters. Please check your model again.")
RuntimeError: Cannot find valid initial parameters. Please check your model again.
FAILED
|
RuntimeError
|
def fori_loop(lower, upper, body_fun, init_val):
if _DISABLE_CONTROL_FLOW_PRIM:
val = init_val
for i in range(int(lower), int(upper)):
val = body_fun(i, val)
return val
else:
return lax.fori_loop(lower, upper, body_fun, init_val)
|
def fori_loop(lower, upper, body_fun, init_val):
if _DISABLE_CONTROL_FLOW_PRIM:
val = init_val
for i in range(int(lower), int(upper)):
val = body_fun(i, val)
return val
else:
return jit(lax.fori_loop, static_argnums=(2,))(lower, upper, body_fun, init_val)
|
https://github.com/pyro-ppl/numpyro/issues/279
|
test/test_examples.py::test_cpu[hmm.py --num-samples 100 --num-warmup 100 --num-chains 2] Running:
python examples/hmm.py --num-samples 100 --num-warmup 100 --num-chains 2
Simulating data...
Starting inference...
Traceback (most recent call last):
File "/home/travis/build/pyro-ppl/numpyro/examples/hmm.py", line 180, in <module>
main(args)
File "/home/travis/build/pyro-ppl/numpyro/examples/hmm.py", line 159, in main
supervised_words, unsupervised_words,
File "/home/travis/build/pyro-ppl/numpyro/numpyro/hmc_util.py", line 775, in initialize_model
raise RuntimeError("Cannot find valid initial parameters. Please check your model again.")
RuntimeError: Cannot find valid initial parameters. Please check your model again.
FAILED
|
RuntimeError
|
def _build_import_removals() -> Dict[MinVersion, Dict[str, Tuple[str, ...]]]:
ret = {}
future: Tuple[Tuple[MinVersion, Tuple[str, ...]], ...] = (
((2, 7), ("nested_scopes", "generators", "with_statement")),
(
(3,),
(
"absolute_import",
"division",
"print_function",
"unicode_literals",
),
),
((3, 6), ()),
((3, 7), ("generator_stop",)),
((3, 8), ()),
((3, 9), ()),
)
prev: Tuple[str, ...] = ()
for min_version, names in future:
prev += names
ret[min_version] = {"__future__": prev}
# see reorder_python_imports
for k, v in ret.items():
if k >= (3,):
v.update(
{
"builtins": (
"ascii",
"bytes",
"chr",
"dict",
"filter",
"hex",
"input",
"int",
"list",
"map",
"max",
"min",
"next",
"object",
"oct",
"open",
"pow",
"range",
"round",
"str",
"super",
"zip",
"*",
),
"io": ("open",),
"six": ("callable", "next"),
"six.moves": ("filter", "input", "map", "range", "zip"),
}
)
return ret
|
def _build_import_removals() -> Dict[MinVersion, Dict[str, Tuple[str, ...]]]:
ret = {}
future: Tuple[Tuple[MinVersion, Tuple[str, ...]], ...] = (
((2, 7), ("nested_scopes", "generators", "with_statement")),
(
(3,),
(
"absolute_import",
"division",
"print_function",
"unicode_literals",
),
),
((3, 6), ()),
((3, 7), ("generator_stop",)),
((3, 8), ()),
)
prev: Tuple[str, ...] = ()
for min_version, names in future:
prev += names
ret[min_version] = {"__future__": prev}
# see reorder_python_imports
for k, v in ret.items():
if k >= (3,):
v.update(
{
"builtins": (
"ascii",
"bytes",
"chr",
"dict",
"filter",
"hex",
"input",
"int",
"list",
"map",
"max",
"min",
"next",
"object",
"oct",
"open",
"pow",
"range",
"round",
"str",
"super",
"zip",
"*",
),
"io": ("open",),
"six": ("callable", "next"),
"six.moves": ("filter", "input", "map", "range", "zip"),
}
)
return ret
|
https://github.com/asottile/pyupgrade/issues/378
|
venv/bin/pyupgrade --py39-plus src/**.py
Traceback (most recent call last):
File "/Users/bgabor8/git/a/venv/bin/pyupgrade", line 8, in <module>
sys.exit(main())
File "/Users/bgabor8/git/a/venv/lib/python3.9/site-packages/pyupgrade.py", line 2823, in main
ret |= _fix_file(filename, args)
File "/Users/bgabor8/git/a/venv/lib/python3.9/site-packages/pyupgrade.py", line 2770, in _fix_file
contents_text = _fix_tokens(contents_text, min_version=args.min_version)
File "/Users/bgabor8/git/a/venv/lib/python3.9/site-packages/pyupgrade.py", line 808, in _fix_tokens
_fix_import_removals(tokens, i, min_version)
File "/Users/bgabor8/git/a/venv/lib/python3.9/site-packages/pyupgrade.py", line 732, in _fix_import_removals
if modname not in IMPORT_REMOVALS[min_version]:
KeyError: (3, 9)
|
KeyError
|
def visit_Call(self, node: ast.Call) -> None:
if (
isinstance(node.func, ast.Name)
and node.func.id in {"isinstance", "issubclass"}
and len(node.args) == 2
and self._is_six(node.args[1], SIX_TYPE_CTX_ATTRS)
):
arg = node.args[1]
# _is_six() enforces this
assert isinstance(arg, (ast.Name, ast.Attribute))
self.six_type_ctx[_ast_to_offset(node.args[1])] = arg
elif self._is_six(node.func, ("b", "ensure_binary")):
self.six_b.add(_ast_to_offset(node))
elif self._is_six(node.func, SIX_CALLS) and not _starargs(node):
self.six_calls[_ast_to_offset(node)] = node
elif (
isinstance(node.func, ast.Name)
and node.func.id == "next"
and not _starargs(node)
and len(node.args) == 1
and isinstance(node.args[0], ast.Call)
and self._is_six(
node.args[0].func,
("iteritems", "iterkeys", "itervalues"),
)
and not _starargs(node.args[0])
):
self.six_iter[_ast_to_offset(node.args[0])] = node.args[0]
elif (
isinstance(self._previous_node, ast.Expr)
and self._is_six(node.func, ("raise_from",))
and not _starargs(node)
):
self.six_raise_from.add(_ast_to_offset(node))
elif (
isinstance(self._previous_node, ast.Expr)
and self._is_six(node.func, ("reraise",))
and not _starargs(node)
):
self.six_reraise.add(_ast_to_offset(node))
elif (
not self._in_comp
and self._class_info_stack
and self._class_info_stack[-1].def_depth == 1
and isinstance(node.func, ast.Name)
and node.func.id == "super"
and len(node.args) == 2
and isinstance(node.args[0], ast.Name)
and isinstance(node.args[1], ast.Name)
and node.args[0].id == self._class_info_stack[-1].name
and node.args[1].id == self._class_info_stack[-1].first_arg_name
):
self.super_calls[_ast_to_offset(node)] = node
elif (
(
self._is_six(node.func, SIX_NATIVE_STR)
or isinstance(node.func, ast.Name)
and node.func.id == "str"
)
and not node.keywords
and not _starargs(node)
and (
len(node.args) == 0
or (len(node.args) == 1 and isinstance(node.args[0], ast.Str))
)
):
self.native_literals.add(_ast_to_offset(node))
elif (
isinstance(node.func, ast.Attribute)
and isinstance(node.func.value, ast.Str)
and node.func.attr == "encode"
and not _starargs(node)
and len(node.args) == 1
and isinstance(node.args[0], ast.Str)
and _is_codec(node.args[0].s, "utf-8")
):
self.encode_calls[_ast_to_offset(node)] = node
elif self._is_io_open(node.func):
self.io_open_calls.add(_ast_to_offset(node))
elif (
isinstance(node.func, ast.Name)
and node.func.id == "open"
and not _starargs(node)
and len(node.args) >= 2
and isinstance(node.args[1], ast.Str)
and (
node.args[1].s in U_MODE_REPLACE
or (len(node.args) == 2 and node.args[1].s in U_MODE_REMOVE)
)
):
self.open_mode_calls.add(_ast_to_offset(node))
elif not node.args and not node.keywords and self._is_lru_cache(node.func):
self.no_arg_decorators.add(_ast_to_offset(node))
self.generic_visit(node)
|
def visit_Call(self, node: ast.Call) -> None:
if (
isinstance(node.func, ast.Name)
and node.func.id in {"isinstance", "issubclass"}
and len(node.args) == 2
and self._is_six(node.args[1], SIX_TYPE_CTX_ATTRS)
):
arg = node.args[1]
# _is_six() enforces this
assert isinstance(arg, (ast.Name, ast.Attribute))
self.six_type_ctx[_ast_to_offset(node.args[1])] = arg
elif self._is_six(node.func, ("b", "ensure_binary")):
self.six_b.add(_ast_to_offset(node))
elif self._is_six(node.func, SIX_CALLS) and not _starargs(node):
self.six_calls[_ast_to_offset(node)] = node
elif (
isinstance(node.func, ast.Name)
and node.func.id == "next"
and not _starargs(node)
and len(node.args) == 1
and isinstance(node.args[0], ast.Call)
and self._is_six(
node.args[0].func,
("iteritems", "iterkeys", "itervalues"),
)
and not _starargs(node.args[0])
):
self.six_iter[_ast_to_offset(node.args[0])] = node.args[0]
elif (
isinstance(self._previous_node, ast.Expr)
and self._is_six(node.func, ("raise_from",))
and not _starargs(node)
):
self.six_raise_from.add(_ast_to_offset(node))
elif (
isinstance(self._previous_node, ast.Expr)
and self._is_six(node.func, ("reraise",))
and not _starargs(node)
):
self.six_reraise.add(_ast_to_offset(node))
elif (
not self._in_comp
and self._class_info_stack
and self._class_info_stack[-1].def_depth == 1
and isinstance(node.func, ast.Name)
and node.func.id == "super"
and len(node.args) == 2
and isinstance(node.args[0], ast.Name)
and isinstance(node.args[1], ast.Name)
and node.args[0].id == self._class_info_stack[-1].name
and node.args[1].id == self._class_info_stack[-1].first_arg_name
):
self.super_calls[_ast_to_offset(node)] = node
elif (
(
self._is_six(node.func, SIX_NATIVE_STR)
or isinstance(node.func, ast.Name)
and node.func.id == "str"
)
and not node.keywords
and not _starargs(node)
and (
len(node.args) == 0
or (len(node.args) == 1 and isinstance(node.args[0], ast.Str))
)
):
self.native_literals.add(_ast_to_offset(node))
elif (
isinstance(node.func, ast.Attribute)
and isinstance(node.func.value, ast.Str)
and node.func.attr == "encode"
and not _starargs(node)
and len(node.args) == 1
and isinstance(node.args[0], ast.Str)
and _is_codec(node.args[0].s, "utf-8")
):
self.encode_calls[_ast_to_offset(node)] = node
elif self._is_io_open(node.func):
self.io_open_calls.add(_ast_to_offset(node))
elif (
isinstance(node.func, ast.Name)
and node.func.id == "open"
and len(node.args) >= 2
and not _starargs(node)
and isinstance(node.args[1], ast.Str)
and node.args[1].s in U_MODE_ALL
):
self.open_mode_calls.add(_ast_to_offset(node))
elif not node.args and not node.keywords and self._is_lru_cache(node.func):
self.no_arg_decorators.add(_ast_to_offset(node))
self.generic_visit(node)
|
https://github.com/asottile/pyupgrade/issues/312
|
Traceback (most recent call last):
File "blah.py", line 1, in <module>
with open('blah.txt', 'utf-8') as fp:
ValueError: invalid mode: 'utf-8'
|
ValueError
|
def visit_Name(self, node: ast.Name) -> None:
if self._is_six(node, SIX_SIMPLE_ATTRS):
self.six_simple[_ast_to_offset(node)] = node
if self._scope_stack:
if isinstance(node.ctx, ast.Load):
self._scope_stack[-1].reads.add(node.id)
elif isinstance(node.ctx, (ast.Store, ast.Del)):
self._scope_stack[-1].writes.add(node.id)
else:
raise AssertionError(node)
self.generic_visit(node)
|
def visit_Name(self, node: ast.Name) -> None:
if self._is_six(node, SIX_SIMPLE_ATTRS):
self.six_simple[_ast_to_offset(node)] = node
if self._scope_stack:
if isinstance(node.ctx, ast.Load):
self._scope_stack[-1].reads.add(node.id)
elif isinstance(node.ctx, ast.Store):
self._scope_stack[-1].writes.add(node.id)
else:
raise AssertionError(node)
self.generic_visit(node)
|
https://github.com/asottile/pyupgrade/issues/306
|
Traceback (most recent call last):
File "./venv/bin/pyupgrade", line 8, in <module>
sys.exit(main())
File "/home/jon/venv/lib64/python3.8/site-packages/pyupgrade.py", line 2680, in main
ret |= _fix_file(filename, args)
File "/home/jon/venv/lib64/python3.8/site-packages/pyupgrade.py", line 2638, in _fix_file
contents_text = _fix_py3_plus(contents_text, args.min_version)
File "/home/jon/venv/lib64/python3.8/site-packages/pyupgrade.py", line 2002, in _fix_py3_plus
visitor.visit(ast_obj)
File "/usr/lib64/python3.8/ast.py", line 360, in visit
return visitor(node)
File "/home/jon/venv/lib64/python3.8/site-packages/pyupgrade.py", line 1705, in generic_visit
super().generic_visit(node)
File "/usr/lib64/python3.8/ast.py", line 368, in generic_visit
self.visit(item)
File "/usr/lib64/python3.8/ast.py", line 360, in visit
return visitor(node)
File "/home/jon/venv/lib64/python3.8/site-packages/pyupgrade.py", line 1418, in _visit_sync_func
self._visit_func(node)
File "/home/jon/venv/lib64/python3.8/site-packages/pyupgrade.py", line 1414, in _visit_func
self.generic_visit(node)
File "/home/jon/venv/lib64/python3.8/site-packages/pyupgrade.py", line 1705, in generic_visit
super().generic_visit(node)
File "/usr/lib64/python3.8/ast.py", line 368, in generic_visit
self.visit(item)
File "/usr/lib64/python3.8/ast.py", line 360, in visit
return visitor(node)
File "/home/jon/venv/lib64/python3.8/site-packages/pyupgrade.py", line 1705, in generic_visit
super().generic_visit(node)
File "/usr/lib64/python3.8/ast.py", line 368, in generic_visit
self.visit(item)
File "/usr/lib64/python3.8/ast.py", line 360, in visit
return visitor(node)
File "/home/jon/venv/lib64/python3.8/site-packages/pyupgrade.py", line 1454, in visit_Name
raise AssertionError(node)
AssertionError: <_ast.Name object at 0x7f6b26fd0a90>
|
AssertionError
|
def visit_Call(self, node): # type: (ast.Call) -> None
if (
isinstance(node.func, ast.Name)
and node.func.id in {"isinstance", "issubclass"}
and len(node.args) == 2
and self._is_six(node.args[1], SIX_TYPE_CTX_ATTRS)
):
arg = node.args[1]
# _is_six() enforces this
assert isinstance(arg, (ast.Name, ast.Attribute))
self.six_type_ctx[_ast_to_offset(node.args[1])] = arg
elif self._is_six(node.func, ("b", "ensure_binary")):
self.six_b.add(_ast_to_offset(node))
elif self._is_six(node.func, SIX_CALLS) and not _starargs(node):
self.six_calls[_ast_to_offset(node)] = node
elif (
isinstance(node.func, ast.Name)
and node.func.id == "next"
and not _starargs(node)
and len(node.args) == 1
and isinstance(node.args[0], ast.Call)
and self._is_six(
node.args[0].func,
("iteritems", "iterkeys", "itervalues"),
)
and not _starargs(node.args[0])
):
self.six_iter[_ast_to_offset(node.args[0])] = node.args[0]
elif (
isinstance(self._previous_node, ast.Expr)
and self._is_six(node.func, ("raise_from",))
and not _starargs(node)
):
self.six_raise_from.add(_ast_to_offset(node))
elif (
isinstance(self._previous_node, ast.Expr)
and self._is_six(node.func, ("reraise",))
and not _starargs(node)
):
self.six_reraise.add(_ast_to_offset(node))
elif (
not self._in_comp
and self._class_info_stack
and self._class_info_stack[-1].def_depth == 1
and isinstance(node.func, ast.Name)
and node.func.id == "super"
and len(node.args) == 2
and isinstance(node.args[0], ast.Name)
and isinstance(node.args[1], ast.Name)
and node.args[0].id == self._class_info_stack[-1].name
and node.args[1].id == self._class_info_stack[-1].first_arg_name
):
self.super_calls[_ast_to_offset(node)] = node
elif (
(
self._is_six(node.func, ("ensure_str", "ensure_text"))
or isinstance(node.func, ast.Name)
and node.func.id == "str"
)
and not node.keywords
and not _starargs(node)
and (
len(node.args) == 0
or (len(node.args) == 1 and isinstance(node.args[0], ast.Str))
)
):
self.native_literals.add(_ast_to_offset(node))
elif (
isinstance(node.func, ast.Attribute)
and isinstance(node.func.value, ast.Str)
and node.func.attr == "encode"
and not _starargs(node)
and len(node.args) == 1
and isinstance(node.args[0], ast.Str)
and _is_codec(node.args[0].s, "utf-8")
):
self.encode_calls[_ast_to_offset(node)] = node
elif self._is_io_open(node.func):
self.io_open_calls[_ast_to_offset(node)] = node
self.generic_visit(node)
|
def visit_Call(self, node): # type: (ast.Call) -> None
if (
isinstance(node.func, ast.Name)
and node.func.id in {"isinstance", "issubclass"}
and len(node.args) == 2
and self._is_six(node.args[1], SIX_TYPE_CTX_ATTRS)
):
arg = node.args[1]
# _is_six() enforces this
assert isinstance(arg, (ast.Name, ast.Attribute))
self.six_type_ctx[_ast_to_offset(node.args[1])] = arg
elif self._is_six(node.func, ("b", "ensure_binary")):
self.six_b.add(_ast_to_offset(node))
elif self._is_six(node.func, SIX_CALLS) and not _starargs(node):
self.six_calls[_ast_to_offset(node)] = node
elif (
isinstance(node.func, ast.Name)
and node.func.id == "next"
and not _starargs(node)
and len(node.args) == 1
and isinstance(node.args[0], ast.Call)
and self._is_six(
node.args[0].func,
("iteritems", "iterkeys", "itervalues"),
)
and not _starargs(node.args[0])
):
self.six_iter[_ast_to_offset(node.args[0])] = node.args[0]
elif (
isinstance(self._previous_node, ast.Expr)
and self._is_six(node.func, SIX_RAISES)
and not _starargs(node)
):
self.six_raises[_ast_to_offset(node)] = node
elif (
not self._in_comp
and self._class_info_stack
and self._class_info_stack[-1].def_depth == 1
and isinstance(node.func, ast.Name)
and node.func.id == "super"
and len(node.args) == 2
and isinstance(node.args[0], ast.Name)
and isinstance(node.args[1], ast.Name)
and node.args[0].id == self._class_info_stack[-1].name
and node.args[1].id == self._class_info_stack[-1].first_arg_name
):
self.super_calls[_ast_to_offset(node)] = node
elif (
(
self._is_six(node.func, ("ensure_str", "ensure_text"))
or isinstance(node.func, ast.Name)
and node.func.id == "str"
)
and not node.keywords
and not _starargs(node)
and (
len(node.args) == 0
or (len(node.args) == 1 and isinstance(node.args[0], ast.Str))
)
):
self.native_literals.add(_ast_to_offset(node))
elif (
isinstance(node.func, ast.Attribute)
and isinstance(node.func.value, ast.Str)
and node.func.attr == "encode"
and not _starargs(node)
and len(node.args) == 1
and isinstance(node.args[0], ast.Str)
and _is_codec(node.args[0].s, "utf-8")
):
self.encode_calls[_ast_to_offset(node)] = node
elif self._is_io_open(node.func):
self.io_open_calls[_ast_to_offset(node)] = node
self.generic_visit(node)
|
https://github.com/asottile/pyupgrade/issues/246
|
Traceback (most recent call last):
File ".../venv/bin/pyupgrade", line 10, in <module>
sys.exit(main())
File ".../venv/lib64/python3.8/site-packages/pyupgrade.py", line 2318, in main
ret |= _fix_file(filename, args)
File ".../venv/lib64/python3.8/site-packages/pyupgrade.py", line 2280, in _fix_file
contents_text = _fix_py3_plus(contents_text)
File ".../venv/lib64/python3.8/site-packages/pyupgrade.py", line 1984, in _fix_py3_plus
_replace_call(tokens, i, end, func_args, template)
File ".../venv/lib64/python3.8/site-packages/pyupgrade.py", line 1849, in _replace_call
src = tmpl.format(args=arg_strs, rest=rest)
IndexError: list index out of range
|
IndexError
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.