after_merge
stringlengths 28
79.6k
| before_merge
stringlengths 20
79.6k
| url
stringlengths 38
71
| full_traceback
stringlengths 43
922k
| traceback_type
stringclasses 555
values |
|---|---|---|---|---|
def main(args=None):
args = args or sys.argv[1:]
parser, resolver_options_builder = configure_clp()
try:
separator = args.index("--")
args, cmdline = args[:separator], args[separator + 1 :]
except ValueError:
args, cmdline = args, []
options, reqs = parser.parse_args(args=args)
if options.pex_root:
ENV.set("PEX_ROOT", options.pex_root)
else:
options.pex_root = (
ENV.PEX_ROOT
) # If option not specified fallback to env variable.
# Don't alter cache if it is disabled.
if options.cache_dir:
options.cache_dir = make_relative_to_root(options.cache_dir)
options.interpreter_cache_dir = make_relative_to_root(options.interpreter_cache_dir)
with ENV.patch(PEX_VERBOSE=str(options.verbosity)):
with TRACER.timed("Building pex"):
pex_builder = build_pex(reqs, options, resolver_options_builder)
if options.pex_name is not None:
log("Saving PEX file to %s" % options.pex_name, v=options.verbosity)
tmp_name = options.pex_name + "~"
safe_delete(tmp_name)
pex_builder.build(tmp_name)
os.rename(tmp_name, options.pex_name)
return 0
if options.platform != Platform.current():
log("WARNING: attempting to run PEX with differing platform!")
pex_builder.freeze()
log(
"Running PEX file at %s with args %s" % (pex_builder.path(), cmdline),
v=options.verbosity,
)
pex = PEX(pex_builder.path(), interpreter=pex_builder.interpreter)
sys.exit(pex.run(args=list(cmdline)))
|
def main(args=None):
args = args or sys.argv[1:]
parser, resolver_options_builder = configure_clp()
try:
separator = args.index("--")
args, cmdline = args[:separator], args[separator + 1 :]
except ValueError:
args, cmdline = args, []
options, reqs = parser.parse_args(args=args)
if options.pex_root:
ENV.set("PEX_ROOT", options.pex_root)
else:
options.pex_root = (
ENV.PEX_ROOT
) # If option not specified fallback to env variable.
options.cache_dir = make_relative_to_root(options.cache_dir)
options.interpreter_cache_dir = make_relative_to_root(options.interpreter_cache_dir)
with ENV.patch(PEX_VERBOSE=str(options.verbosity)):
with TRACER.timed("Building pex"):
pex_builder = build_pex(reqs, options, resolver_options_builder)
if options.pex_name is not None:
log("Saving PEX file to %s" % options.pex_name, v=options.verbosity)
tmp_name = options.pex_name + "~"
safe_delete(tmp_name)
pex_builder.build(tmp_name)
os.rename(tmp_name, options.pex_name)
return 0
if options.platform != Platform.current():
log("WARNING: attempting to run PEX with differing platform!")
pex_builder.freeze()
log(
"Running PEX file at %s with args %s" % (pex_builder.path(), cmdline),
v=options.verbosity,
)
pex = PEX(pex_builder.path(), interpreter=pex_builder.interpreter)
sys.exit(pex.run(args=list(cmdline)))
|
https://github.com/pantsbuild/pex/issues/260
|
Traceback (most recent call last):
File ".tox/package/bin/pex", line 11, in <module>
sys.exit(main())
File ".../lib/python3.4/site-packages/pex/bin/pex.py", line 533, in main
options.cache_dir = make_relative_to_root(options.cache_dir)
File ".../.tox/package/lib/python3.4/site-packages/pex/bin/pex.py", line 514, in make_relative_to_root
return os.path.normpath(path.format(pex_root=ENV.PEX_ROOT))
AttributeError: 'list' object has no attribute 'format'
|
AttributeError
|
def main(args=None):
args = args or sys.argv[1:]
parser, resolver_options_builder = configure_clp()
try:
separator = args.index("--")
args, cmdline = args[:separator], args[separator + 1 :]
except ValueError:
args, cmdline = args, []
options, reqs = parser.parse_args(args=args)
if options.pex_root:
ENV.set("PEX_ROOT", options.pex_root)
else:
options.pex_root = (
ENV.PEX_ROOT
) # If option not specified fallback to env variable.
options.cache_dir = make_relative_to_root(options.cache_dir)
options.interpreter_cache_dir = make_relative_to_root(options.interpreter_cache_dir)
with ENV.patch(PEX_VERBOSE=str(options.verbosity)):
with TRACER.timed("Building pex"):
pex_builder = build_pex(reqs, options, resolver_options_builder)
if options.pex_name is not None:
log("Saving PEX file to %s" % options.pex_name, v=options.verbosity)
tmp_name = options.pex_name + "~"
safe_delete(tmp_name)
pex_builder.build(tmp_name)
os.rename(tmp_name, options.pex_name)
return 0
if options.platform != Platform.current():
log("WARNING: attempting to run PEX with differing platform!")
pex_builder.freeze()
log(
"Running PEX file at %s with args %s" % (pex_builder.path(), cmdline),
v=options.verbosity,
)
pex = PEX(pex_builder.path(), interpreter=pex_builder.interpreter)
sys.exit(pex.run(args=list(cmdline)))
|
def main(args):
parser, resolver_options_builder = configure_clp()
try:
separator = args.index("--")
args, cmdline = args[:separator], args[separator + 1 :]
except ValueError:
args, cmdline = args, []
options, reqs = parser.parse_args(args=args)
if options.pex_root:
ENV.set("PEX_ROOT", options.pex_root)
else:
options.pex_root = (
ENV.PEX_ROOT
) # If option not specified fallback to env variable.
options.cache_dir = make_relative_to_root(options.cache_dir)
options.interpreter_cache_dir = make_relative_to_root(options.interpreter_cache_dir)
with ENV.patch(PEX_VERBOSE=str(options.verbosity)):
with TRACER.timed("Building pex"):
pex_builder = build_pex(reqs, options, resolver_options_builder)
if options.pex_name is not None:
log("Saving PEX file to %s" % options.pex_name, v=options.verbosity)
tmp_name = options.pex_name + "~"
safe_delete(tmp_name)
pex_builder.build(tmp_name)
os.rename(tmp_name, options.pex_name)
return 0
if options.platform != Platform.current():
log("WARNING: attempting to run PEX with differing platform!")
pex_builder.freeze()
log(
"Running PEX file at %s with args %s" % (pex_builder.path(), cmdline),
v=options.verbosity,
)
pex = PEX(pex_builder.path(), interpreter=pex_builder.interpreter)
sys.exit(pex.run(args=list(cmdline)))
|
https://github.com/pantsbuild/pex/issues/252
|
$ tox -e py27-package
GLOB sdist-make: /Users/billg/ws/git/pex/setup.py
py27-package inst-nodeps: /Users/billg/ws/git/pex/.tox/dist/pex-1.1.6.zip
py27-package installed: funcsigs==1.0.2,mock==2.0.0,pbr==1.9.1,pex==1.1.6,py==1.4.31,pytest==2.9.1,six==1.10.0,twitter.common.contextutil==0.3.4,twitter.common.dirutil==0.3.4,twitter.common.lang==0.3.4,twitter.common.testing==0.3.4
py27-package runtests: PYTHONHASHSEED='374850053'
py27-package runtests: commands[0] | pex --cache-dir /Users/billg/ws/git/pex/.tox/py27-package/tmp/buildcache wheel requests . -o dist/pex27 -e pex.bin.pex:main -v
Traceback (most recent call last):
File ".tox/py27-package/bin/pex", line 11, in <module>
sys.exit(main())
TypeError: main() takes exactly 1 argument (0 given)
ERROR: InvocationError: '/Users/billg/ws/git/pex/.tox/py27-package/bin/pex --cache-dir /Users/billg/ws/git/pex/.tox/py27-package/tmp/buildcache wheel requests . -o dist/pex27 -e pex.bin.pex:main -v'
________________________________________________________________________________________ summary _________________________________________________________________________________________
ERROR: py27-package: commands failed
|
TypeError
|
def packages_from_requirement_cached(
local_iterator, ttl, iterator, requirement, *args, **kw
):
packages = packages_from_requirement(local_iterator, requirement, *args, **kw)
if packages:
# match with exact requirement, always accept.
if requirement_is_exact(requirement):
TRACER.log("Package cache hit: %s" % requirement, V=3)
return packages
# match with inexact requirement, consider if ttl supplied.
if ttl:
now = time.time()
packages = [
package
for package in packages
if package.remote
or package.local
and (now - os.path.getmtime(package.path)) < ttl
]
if packages:
TRACER.log("Package cache hit (inexact): %s" % requirement, V=3)
return packages
# no matches in the local cache
TRACER.log("Package cache miss: %s" % requirement, V=3)
return packages_from_requirement(iterator, requirement, *args, **kw)
|
def packages_from_requirement_cached(
local_iterator, ttl, iterator, requirement, *args, **kw
):
packages = packages_from_requirement(local_iterator, requirement, *args, **kw)
if packages:
# match with exact requirement, always accept.
if requirement_is_exact(requirement):
TRACER.log("Package cache hit: %s" % requirement, V=3)
return packages
# match with inexact requirement, consider if ttl supplied.
if ttl:
now = time.time()
packages = [
package
for package in packages
if (now - os.path.getmtime(package.path)) < ttl
]
if packages:
TRACER.log("Package cache hit (inexact): %s" % requirement, V=3)
return packages
# no matches in the local cache
TRACER.log("Package cache miss: %s" % requirement, V=3)
return packages_from_requirement(iterator, requirement, *args, **kw)
|
https://github.com/pantsbuild/pex/issues/29
|
mba=pex=; pex -r pytest -r setuptools -r py==1.4.25 -o /tmp/pt.pex --cache-ttl=3600 -v -v -v -v -v
pex: Package cache hit (inexact): pytest
pex: Package cache hit (inexact): setuptools
pex: Package cache miss: py==1.4.25
pex: Resolving distributions :: Fetching https://pypi.python.org/packages/source/p/py/py-1.4.25.tar.gz#md5pex: Validated py-1.4.25.tar.gz (md5=1f0b84dab1681fc1bf07c0528e091efb)
pex: Validated py-1.4.25.tar.gz (md5=1f0b84dab1681fc1bf07c0528e091efb)
pex: Resolving distributions :: Packaging py :: Installing /var/folders/rd/_tjz8zts3g14md1kmf38z6w80000gn/pex: Resolving distributions :: Packaging pytest pex: Resolving distributions :: Packaging pytest :: Installing /var/folders/rd/_tjz8zts3g14md1kmf38z6w8000pex: Resolving distributions :: Resolving py>=1.4.25 Traceback (most recent call last):
File "/Users/wickman/Local/bin/pex/.bootstrap/_pex/pex.py", line 272, in execute
self.execute_entry(entry_point, args)
File "/Users/wickman/Local/bin/pex/.bootstrap/_pex/pex.py", line 320, in execute_entry
runner(entry_point)
File "/Users/wickman/Local/bin/pex/.bootstrap/_pex/pex.py", line 343, in execute_pkg_resources
runner()
File "/Users/wickman/.pex/install/pex-0.8.0.dev0-py2.py3-none-any.whl.0d5be588022f0aad6de2b84b77a8a890c43ad942/pex-0.8.0.dev0-py2.py3-none-any.whl/pex/bin/pex.py", line 325, in main
pex_builder = build_pex(args, options)
File "/Users/wickman/.pex/install/pex-0.8.0.dev0-py2.py3-none-any.whl.0d5be588022f0aad6de2b84b77a8a890c43ad942/pex-0.8.0.dev0-py2.py3-none-any.whl/pex/bin/pex.py", line 303, in build_pex
cache_ttl=options.cache_ttl)
File "/Users/wickman/.pex/install/pex-0.8.0.dev0-py2.py3-none-any.whl.0d5be588022f0aad6de2b84b77a8a890c43ad942/pex-0.8.0.dev0-py2.py3-none-any.whl/pex/resolver.py", line 215, in resolve
existing=distribution_set.get(requirement.key))
File "/Users/wickman/.pex/install/pex-0.8.0.dev0-py2.py3-none-any.whl.0d5be588022f0aad6de2b84b77a8a890c43ad942/pex-0.8.0.dev0-py2.py3-none-any.whl/pex/resolver.py", line 94, in packages_from_requirement_cached
packages = [package for package in packages if (now - os.path.getmtime(package.path)) < ttl]
File "/Users/wickman/Python/CPython-2.7.8/lib/python2.7/genericpath.py", line 54, in getmtime
return os.stat(filename).st_mtime
OSError: [Errno 2] No such file or directory: '/packages/source/p/py/py-1.4.25.tar.gz'
|
OSError
|
def resolve(
requirements,
fetchers=None,
translator=None,
interpreter=None,
platform=None,
context=None,
threads=1,
precedence=None,
cache=None,
cache_ttl=None,
):
"""Produce all distributions needed to (recursively) meet `requirements`
:param requirements: An iterator of Requirement-like things, either
:class:`pkg_resources.Requirement` objects or requirement strings.
:keyword fetchers: (optional) A list of :class:`Fetcher` objects for locating packages. If
unspecified, the default is to look for packages on PyPI.
:keyword translator: (optional) A :class:`Translator` object for translating packages into
distributions. If unspecified, the default is constructed from `Translator.default`.
:keyword interpreter: (optional) A :class:`PythonInterpreter` object to use for building
distributions and for testing distribution compatibility.
:keyword platform: (optional) A PEP425-compatible platform string to use for filtering
compatible distributions. If unspecified, the current platform is used, as determined by
`Platform.current()`.
:keyword context: (optional) A :class:`Context` object to use for network access. If
unspecified, the resolver will attempt to use the best available network context.
:keyword threads: (optional) A number of parallel threads to use for resolving distributions.
By default 1.
:type threads: int
:keyword precedence: (optional) An ordered list of allowable :class:`Package` classes
to be used for producing distributions. For example, if precedence is supplied as
``(WheelPackage, SourcePackage)``, wheels will be preferred over building from source, and
eggs will not be used at all. If ``(WheelPackage, EggPackage)`` is suppplied, both wheels and
eggs will be used, but the resolver will not resort to building anything from source.
:keyword cache: (optional) A directory to use to cache distributions locally.
:keyword cache_ttl: (optional integer in seconds) If specified, consider non-exact matches when
resolving requirements. For example, if ``setuptools==2.2`` is specified and setuptools 2.2 is
available in the cache, it will always be used. However, if a non-exact requirement such as
``setuptools>=2,<3`` is specified and there exists a setuptools distribution newer than
cache_ttl seconds that satisfies the requirement, then it will be used. If the distribution
is older than cache_ttl seconds, it will be ignored. If ``cache_ttl`` is not specified,
resolving inexact requirements will always result in making network calls through the
``context``.
:returns: List of :class:`pkg_resources.Distribution` instances meeting ``requirements``.
:raises Unsatisfiable: If ``requirements`` is not transitively satisfiable.
:raises Untranslateable: If no compatible distributions could be acquired for
a particular requirement.
This method improves upon the setuptools dependency resolution algorithm by maintaining sets of
all compatible distributions encountered for each requirement rather than the single best
distribution encountered for each requirement. This prevents situations where ``tornado`` and
``tornado==2.0`` could be treated as incompatible with each other because the "best
distribution" when encountering ``tornado`` was tornado 3.0. Instead, ``resolve`` maintains the
set of compatible distributions for each requirement as it is encountered, and iteratively filters
the set. If the set of distributions ever becomes empty, then ``Unsatisfiable`` is raised.
.. versionchanged:: 0.8
A number of keywords were added to make requirement resolution slightly easier to configure.
The optional ``obtainer`` keyword was replaced by ``fetchers``, ``translator``, ``context``,
``threads``, ``precedence``, ``cache`` and ``cache_ttl``, also all optional keywords.
"""
distributions = _DistributionCache()
interpreter = interpreter or PythonInterpreter.get()
platform = platform or Platform.current()
context = context or Context.get()
crawler = Crawler(context, threads=threads)
fetchers = fetchers[:] if fetchers is not None else [PyPIFetcher()]
translator = translator or Translator.default(
interpreter=interpreter, platform=platform
)
if cache:
local_fetcher = Fetcher([cache])
local_iterator = Iterator(
fetchers=[local_fetcher], crawler=crawler, precedence=precedence
)
package_iterator = partial(
packages_from_requirement_cached, local_iterator, cache_ttl
)
else:
package_iterator = packages_from_requirement
iterator = Iterator(fetchers=fetchers, crawler=crawler, precedence=precedence)
requirements = maybe_requirement_list(requirements)
distribution_set = defaultdict(list)
requirement_set = defaultdict(list)
processed_requirements = set()
def requires(package, requirement):
if not distributions.has(package):
with TRACER.timed("Fetching %s" % package.url, V=2):
local_package = Package.from_href(context.fetch(package, into=cache))
if package.remote:
# this was a remote resolution -- so if we copy from remote to local but the
# local already existed, update the mtime of the local so that it is correct
# with respect to cache_ttl.
os.utime(local_package.path, None)
with TRACER.timed(
"Translating %s into distribution" % local_package.path, V=2
):
dist = translator.translate(local_package, into=cache)
if dist is None:
raise Untranslateable("Package %s is not translateable." % package)
if not distribution_compatible(dist, interpreter, platform):
raise Untranslateable(
"Could not get distribution for %s on appropriate platform."
% package
)
distributions.put(package, dist)
dist = distributions.get(package)
return dist.requires(extras=requirement.extras)
while True:
while requirements:
requirement = requirements.pop(0)
requirement_set[requirement.key].append(requirement)
distribution_list = distribution_set[requirement.key] = package_iterator(
iterator,
requirement,
interpreter,
platform,
existing=distribution_set.get(requirement.key),
)
if not distribution_list:
raise Unsatisfiable(
"Cannot satisfy requirements: %s" % requirement_set[requirement.key]
)
# get their dependencies
for requirement_key, requirement_list in requirement_set.items():
new_requirements = OrderedSet()
highest_package = distribution_set[requirement_key][0]
for requirement in requirement_list:
if requirement in processed_requirements:
continue
new_requirements.update(requires(highest_package, requirement))
processed_requirements.add(requirement)
requirements.extend(list(new_requirements))
if not requirements:
break
to_activate = set()
for distribution_list in distribution_set.values():
to_activate.add(distributions.get(distribution_list[0]))
return to_activate
|
def resolve(
requirements,
fetchers=None,
translator=None,
interpreter=None,
platform=None,
context=None,
threads=1,
precedence=None,
cache=None,
cache_ttl=None,
):
"""Produce all distributions needed to (recursively) meet `requirements`
:param requirements: An iterator of Requirement-like things, either
:class:`pkg_resources.Requirement` objects or requirement strings.
:keyword fetchers: (optional) A list of :class:`Fetcher` objects for locating packages. If
unspecified, the default is to look for packages on PyPI.
:keyword translator: (optional) A :class:`Translator` object for translating packages into
distributions. If unspecified, the default is constructed from `Translator.default`.
:keyword interpreter: (optional) A :class:`PythonInterpreter` object to use for building
distributions and for testing distribution compatibility.
:keyword platform: (optional) A PEP425-compatible platform string to use for filtering
compatible distributions. If unspecified, the current platform is used, as determined by
`Platform.current()`.
:keyword context: (optional) A :class:`Context` object to use for network access. If
unspecified, the resolver will attempt to use the best available network context.
:keyword threads: (optional) A number of parallel threads to use for resolving distributions.
By default 1.
:type threads: int
:keyword precedence: (optional) An ordered list of allowable :class:`Package` classes
to be used for producing distributions. For example, if precedence is supplied as
``(WheelPackage, SourcePackage)``, wheels will be preferred over building from source, and
eggs will not be used at all. If ``(WheelPackage, EggPackage)`` is suppplied, both wheels and
eggs will be used, but the resolver will not resort to building anything from source.
:keyword cache: (optional) A directory to use to cache distributions locally.
:keyword cache_ttl: (optional integer in seconds) If specified, consider non-exact matches when
resolving requirements. For example, if ``setuptools==2.2`` is specified and setuptools 2.2 is
available in the cache, it will always be used. However, if a non-exact requirement such as
``setuptools>=2,<3`` is specified and there exists a setuptools distribution newer than
cache_ttl seconds that satisfies the requirement, then it will be used. If the distribution
is older than cache_ttl seconds, it will be ignored. If ``cache_ttl`` is not specified,
resolving inexact requirements will always result in making network calls through the
``context``.
:returns: List of :class:`pkg_resources.Distribution` instances meeting ``requirements``.
:raises Unsatisfiable: If ``requirements`` is not transitively satisfiable.
:raises Untranslateable: If no compatible distributions could be acquired for
a particular requirement.
This method improves upon the setuptools dependency resolution algorithm by maintaining sets of
all compatible distributions encountered for each requirement rather than the single best
distribution encountered for each requirement. This prevents situations where ``tornado`` and
``tornado==2.0`` could be treated as incompatible with each other because the "best
distribution" when encountering ``tornado`` was tornado 3.0. Instead, ``resolve`` maintains the
set of compatible distributions for each requirement as it is encountered, and iteratively filters
the set. If the set of distributions ever becomes empty, then ``Unsatisfiable`` is raised.
.. versionchanged:: 0.8
A number of keywords were added to make requirement resolution slightly easier to configure.
The optional ``obtainer`` keyword was replaced by ``fetchers``, ``translator``, ``context``,
``threads``, ``precedence``, ``cache`` and ``cache_ttl``, also all optional keywords.
"""
distributions = _DistributionCache()
interpreter = interpreter or PythonInterpreter.get()
platform = platform or Platform.current()
context = context or Context.get()
crawler = Crawler(context, threads=threads)
fetchers = fetchers[:] if fetchers is not None else [PyPIFetcher()]
translator = translator or Translator.default(
interpreter=interpreter, platform=platform
)
if cache:
local_fetcher = Fetcher([cache])
local_iterator = Iterator(
fetchers=[local_fetcher], crawler=crawler, precedence=precedence
)
package_iterator = partial(
packages_from_requirement_cached, local_iterator, cache_ttl
)
else:
package_iterator = packages_from_requirement
iterator = Iterator(fetchers=fetchers, crawler=crawler, precedence=precedence)
requirements = maybe_requirement_list(requirements)
distribution_set = defaultdict(list)
requirement_set = defaultdict(list)
processed_requirements = set()
def requires(package, requirement):
if not distributions.has(package):
local_package = Package.from_href(context.fetch(package, into=cache))
if package.remote:
# this was a remote resolution -- so if we copy from remote to local but the
# local already existed, update the mtime of the local so that it is correct
# with respect to cache_ttl.
os.utime(local_package.path, None)
dist = translator.translate(local_package, into=cache)
if dist is None:
raise Untranslateable("Package %s is not translateable." % package)
if not distribution_compatible(dist, interpreter, platform):
raise Untranslateable(
"Could not get distribution for %s on appropriate platform."
% package
)
distributions.put(package, dist)
dist = distributions.get(package)
return dist.requires(extras=requirement.extras)
while True:
while requirements:
requirement = requirements.pop(0)
requirement_set[requirement.key].append(requirement)
distribution_list = distribution_set[requirement.key] = package_iterator(
iterator,
requirement,
interpreter,
platform,
existing=distribution_set.get(requirement.key),
)
if not distribution_list:
raise Unsatisfiable(
"Cannot satisfy requirements: %s" % requirement_set[requirement.key]
)
# get their dependencies
for requirement_key, requirement_list in requirement_set.items():
new_requirements = OrderedSet()
highest_package = distribution_set[requirement_key][0]
for requirement in requirement_list:
if requirement in processed_requirements:
continue
new_requirements.update(requires(highest_package, requirement))
processed_requirements.add(requirement)
requirements.extend(list(new_requirements))
if not requirements:
break
to_activate = set()
for distribution_list in distribution_set.values():
to_activate.add(distributions.get(distribution_list[0]))
return to_activate
|
https://github.com/pantsbuild/pex/issues/29
|
mba=pex=; pex -r pytest -r setuptools -r py==1.4.25 -o /tmp/pt.pex --cache-ttl=3600 -v -v -v -v -v
pex: Package cache hit (inexact): pytest
pex: Package cache hit (inexact): setuptools
pex: Package cache miss: py==1.4.25
pex: Resolving distributions :: Fetching https://pypi.python.org/packages/source/p/py/py-1.4.25.tar.gz#md5pex: Validated py-1.4.25.tar.gz (md5=1f0b84dab1681fc1bf07c0528e091efb)
pex: Validated py-1.4.25.tar.gz (md5=1f0b84dab1681fc1bf07c0528e091efb)
pex: Resolving distributions :: Packaging py :: Installing /var/folders/rd/_tjz8zts3g14md1kmf38z6w80000gn/pex: Resolving distributions :: Packaging pytest pex: Resolving distributions :: Packaging pytest :: Installing /var/folders/rd/_tjz8zts3g14md1kmf38z6w8000pex: Resolving distributions :: Resolving py>=1.4.25 Traceback (most recent call last):
File "/Users/wickman/Local/bin/pex/.bootstrap/_pex/pex.py", line 272, in execute
self.execute_entry(entry_point, args)
File "/Users/wickman/Local/bin/pex/.bootstrap/_pex/pex.py", line 320, in execute_entry
runner(entry_point)
File "/Users/wickman/Local/bin/pex/.bootstrap/_pex/pex.py", line 343, in execute_pkg_resources
runner()
File "/Users/wickman/.pex/install/pex-0.8.0.dev0-py2.py3-none-any.whl.0d5be588022f0aad6de2b84b77a8a890c43ad942/pex-0.8.0.dev0-py2.py3-none-any.whl/pex/bin/pex.py", line 325, in main
pex_builder = build_pex(args, options)
File "/Users/wickman/.pex/install/pex-0.8.0.dev0-py2.py3-none-any.whl.0d5be588022f0aad6de2b84b77a8a890c43ad942/pex-0.8.0.dev0-py2.py3-none-any.whl/pex/bin/pex.py", line 303, in build_pex
cache_ttl=options.cache_ttl)
File "/Users/wickman/.pex/install/pex-0.8.0.dev0-py2.py3-none-any.whl.0d5be588022f0aad6de2b84b77a8a890c43ad942/pex-0.8.0.dev0-py2.py3-none-any.whl/pex/resolver.py", line 215, in resolve
existing=distribution_set.get(requirement.key))
File "/Users/wickman/.pex/install/pex-0.8.0.dev0-py2.py3-none-any.whl.0d5be588022f0aad6de2b84b77a8a890c43ad942/pex-0.8.0.dev0-py2.py3-none-any.whl/pex/resolver.py", line 94, in packages_from_requirement_cached
packages = [package for package in packages if (now - os.path.getmtime(package.path)) < ttl]
File "/Users/wickman/Python/CPython-2.7.8/lib/python2.7/genericpath.py", line 54, in getmtime
return os.stat(filename).st_mtime
OSError: [Errno 2] No such file or directory: '/packages/source/p/py/py-1.4.25.tar.gz'
|
OSError
|
def requires(package, requirement):
if not distributions.has(package):
with TRACER.timed("Fetching %s" % package.url, V=2):
local_package = Package.from_href(context.fetch(package, into=cache))
if package.remote:
# this was a remote resolution -- so if we copy from remote to local but the
# local already existed, update the mtime of the local so that it is correct
# with respect to cache_ttl.
os.utime(local_package.path, None)
with TRACER.timed("Translating %s into distribution" % local_package.path, V=2):
dist = translator.translate(local_package, into=cache)
if dist is None:
raise Untranslateable("Package %s is not translateable." % package)
if not distribution_compatible(dist, interpreter, platform):
raise Untranslateable(
"Could not get distribution for %s on appropriate platform." % package
)
distributions.put(package, dist)
dist = distributions.get(package)
return dist.requires(extras=requirement.extras)
|
def requires(package, requirement):
if not distributions.has(package):
local_package = Package.from_href(context.fetch(package, into=cache))
if package.remote:
# this was a remote resolution -- so if we copy from remote to local but the
# local already existed, update the mtime of the local so that it is correct
# with respect to cache_ttl.
os.utime(local_package.path, None)
dist = translator.translate(local_package, into=cache)
if dist is None:
raise Untranslateable("Package %s is not translateable." % package)
if not distribution_compatible(dist, interpreter, platform):
raise Untranslateable(
"Could not get distribution for %s on appropriate platform." % package
)
distributions.put(package, dist)
dist = distributions.get(package)
return dist.requires(extras=requirement.extras)
|
https://github.com/pantsbuild/pex/issues/29
|
mba=pex=; pex -r pytest -r setuptools -r py==1.4.25 -o /tmp/pt.pex --cache-ttl=3600 -v -v -v -v -v
pex: Package cache hit (inexact): pytest
pex: Package cache hit (inexact): setuptools
pex: Package cache miss: py==1.4.25
pex: Resolving distributions :: Fetching https://pypi.python.org/packages/source/p/py/py-1.4.25.tar.gz#md5pex: Validated py-1.4.25.tar.gz (md5=1f0b84dab1681fc1bf07c0528e091efb)
pex: Validated py-1.4.25.tar.gz (md5=1f0b84dab1681fc1bf07c0528e091efb)
pex: Resolving distributions :: Packaging py :: Installing /var/folders/rd/_tjz8zts3g14md1kmf38z6w80000gn/pex: Resolving distributions :: Packaging pytest pex: Resolving distributions :: Packaging pytest :: Installing /var/folders/rd/_tjz8zts3g14md1kmf38z6w8000pex: Resolving distributions :: Resolving py>=1.4.25 Traceback (most recent call last):
File "/Users/wickman/Local/bin/pex/.bootstrap/_pex/pex.py", line 272, in execute
self.execute_entry(entry_point, args)
File "/Users/wickman/Local/bin/pex/.bootstrap/_pex/pex.py", line 320, in execute_entry
runner(entry_point)
File "/Users/wickman/Local/bin/pex/.bootstrap/_pex/pex.py", line 343, in execute_pkg_resources
runner()
File "/Users/wickman/.pex/install/pex-0.8.0.dev0-py2.py3-none-any.whl.0d5be588022f0aad6de2b84b77a8a890c43ad942/pex-0.8.0.dev0-py2.py3-none-any.whl/pex/bin/pex.py", line 325, in main
pex_builder = build_pex(args, options)
File "/Users/wickman/.pex/install/pex-0.8.0.dev0-py2.py3-none-any.whl.0d5be588022f0aad6de2b84b77a8a890c43ad942/pex-0.8.0.dev0-py2.py3-none-any.whl/pex/bin/pex.py", line 303, in build_pex
cache_ttl=options.cache_ttl)
File "/Users/wickman/.pex/install/pex-0.8.0.dev0-py2.py3-none-any.whl.0d5be588022f0aad6de2b84b77a8a890c43ad942/pex-0.8.0.dev0-py2.py3-none-any.whl/pex/resolver.py", line 215, in resolve
existing=distribution_set.get(requirement.key))
File "/Users/wickman/.pex/install/pex-0.8.0.dev0-py2.py3-none-any.whl.0d5be588022f0aad6de2b84b77a8a890c43ad942/pex-0.8.0.dev0-py2.py3-none-any.whl/pex/resolver.py", line 94, in packages_from_requirement_cached
packages = [package for package in packages if (now - os.path.getmtime(package.path)) < ttl]
File "/Users/wickman/Python/CPython-2.7.8/lib/python2.7/genericpath.py", line 54, in getmtime
return os.stat(filename).st_mtime
OSError: [Errno 2] No such file or directory: '/packages/source/p/py/py-1.4.25.tar.gz'
|
OSError
|
def deserialize(collection, topological=True):
"""
Load a collection from file system.
:param collection: The collection to deserialize.
:param topological: If the collection list should be sorted by the
collection dict depth value or not.
:type topological: bool
"""
datastruct = deserialize_raw(collection.collection_types())
if topological and type(datastruct) == list:
datastruct.sort(key=lambda x: x["depth"])
if type(datastruct) == dict:
collection.from_dict(datastruct)
elif type(datastruct) == list:
collection.from_list(datastruct)
|
def deserialize(collection, topological=True):
"""
Load a collection from file system.
:param collection: The collection type the deserialize
:param topological: If the dict/list should be sorted or not.
:type topological: bool
"""
datastruct = deserialize_raw(collection.collection_types())
if topological and type(datastruct) == list:
# FIXME
# datastruct.sort(key=__depth_cmp)
pass
if type(datastruct) == dict:
collection.from_dict(datastruct)
elif type(datastruct) == list:
collection.from_list(datastruct)
|
https://github.com/cobbler/cobbler/issues/2259
|
Traceback (most recent call last):
File "/usr/local/lib/python3.7/dist-packages/cobbler/cobbler_collections/manager.py", line 185, in deserialize
serializer.deserialize(collection)
File "/usr/local/lib/python3.7/dist-packages/cobbler/serializer.py", line 124, in deserialize
storage_module.deserialize(collection, topological)
File "/usr/local/lib/python3.7/dist-packages/cobbler/modules/serializers/file.py", line 187, in deserialize
collection.from_list(datastruct)
File "/usr/local/lib/python3.7/dist-packages/cobbler/cobbler_collections/collection.py", line 179, in from_list
self.add(item)
File "/usr/local/lib/python3.7/dist-packages/cobbler/cobbler_collections/collection.py", line 299, in add
ref.check_if_valid()
File "/usr/local/lib/python3.7/dist-packages/cobbler/items/profile.py", line 126, in check_if_valid
raise CX("Error with profile %s - distro is required" % (self.name))
cobbler.cexceptions.CX: 'Error with profile testSP - distro is required'
During handling of the above exception, another exception occurred:
Traceback (most recent call last):
File "/usr/local/bin/cobblerd", line 75, in main
api = cobbler_api.CobblerAPI(is_cobblerd=True)
File "/usr/local/lib/python3.7/dist-packages/cobbler/api.py", line 109, in __init__
self.deserialize()
File "/usr/local/lib/python3.7/dist-packages/cobbler/api.py", line 893, in deserialize
return self._collection_mgr.deserialize()
File "/usr/local/lib/python3.7/dist-packages/cobbler/cobbler_collections/manager.py", line 187, in deserialize
raise CX("serializer: error loading collection %s: %s. Check /etc/cobbler/modules.conf" % (collection.collection_type(), e))
cobbler.cexceptions.CX: "serializer: error loading collection profile: 'Error with profile testSP - distro is required'. Check /etc/cobbler/modules.conf"
|
cobbler.cexceptions.CX
|
def deserialize(collection, topological=True):
"""
Load a collection from the database.
:param collection: The collection to deserialize.
:param topological: If the collection list should be sorted by the
collection dict depth value or not.
:type topological: bool
"""
datastruct = deserialize_raw(collection.collection_type())
if topological and type(datastruct) == list:
datastruct.sort(key=lambda x: x["depth"])
if type(datastruct) == dict:
collection.from_dict(datastruct)
elif type(datastruct) == list:
collection.from_list(datastruct)
|
def deserialize(collection, topological=True):
"""
Load a collection from the database.
:param collection: The collection to deserialize.
:param topological: This sorts the returned dict.
:type topological: bool
"""
datastruct = deserialize_raw(collection.collection_type())
if topological and type(datastruct) == list:
datastruct.sort(__depth_cmp)
if type(datastruct) == dict:
collection.from_dict(datastruct)
elif type(datastruct) == list:
collection.from_list(datastruct)
|
https://github.com/cobbler/cobbler/issues/2259
|
Traceback (most recent call last):
File "/usr/local/lib/python3.7/dist-packages/cobbler/cobbler_collections/manager.py", line 185, in deserialize
serializer.deserialize(collection)
File "/usr/local/lib/python3.7/dist-packages/cobbler/serializer.py", line 124, in deserialize
storage_module.deserialize(collection, topological)
File "/usr/local/lib/python3.7/dist-packages/cobbler/modules/serializers/file.py", line 187, in deserialize
collection.from_list(datastruct)
File "/usr/local/lib/python3.7/dist-packages/cobbler/cobbler_collections/collection.py", line 179, in from_list
self.add(item)
File "/usr/local/lib/python3.7/dist-packages/cobbler/cobbler_collections/collection.py", line 299, in add
ref.check_if_valid()
File "/usr/local/lib/python3.7/dist-packages/cobbler/items/profile.py", line 126, in check_if_valid
raise CX("Error with profile %s - distro is required" % (self.name))
cobbler.cexceptions.CX: 'Error with profile testSP - distro is required'
During handling of the above exception, another exception occurred:
Traceback (most recent call last):
File "/usr/local/bin/cobblerd", line 75, in main
api = cobbler_api.CobblerAPI(is_cobblerd=True)
File "/usr/local/lib/python3.7/dist-packages/cobbler/api.py", line 109, in __init__
self.deserialize()
File "/usr/local/lib/python3.7/dist-packages/cobbler/api.py", line 893, in deserialize
return self._collection_mgr.deserialize()
File "/usr/local/lib/python3.7/dist-packages/cobbler/cobbler_collections/manager.py", line 187, in deserialize
raise CX("serializer: error loading collection %s: %s. Check /etc/cobbler/modules.conf" % (collection.collection_type(), e))
cobbler.cexceptions.CX: "serializer: error loading collection profile: 'Error with profile testSP - distro is required'. Check /etc/cobbler/modules.conf"
|
cobbler.cexceptions.CX
|
def deserialize(collection, topological=True):
"""
Load a collection from disk.
:param collection: The Cobbler collection to know the type of the item.
:param topological: Sort collection based on each items' depth attribute
in the list of collection items. This ensures
properly ordered object loading from disk with
objects having parent/child relationships, i.e.
profiles/subprofiles. See cobbler/items/item.py
:type topological: bool
"""
__grab_lock()
storage_module = __get_storage_module(collection.collection_type())
storage_module.deserialize(collection, topological)
__release_lock()
|
def deserialize(collection, topological=True):
"""
Load a collection from disk.
:param collection: The Cobbler collection to know the type of the item.
:param topological: Unkown parameter.
:type topological: bool
"""
__grab_lock()
storage_module = __get_storage_module(collection.collection_type())
storage_module.deserialize(collection, topological)
__release_lock()
|
https://github.com/cobbler/cobbler/issues/2259
|
Traceback (most recent call last):
File "/usr/local/lib/python3.7/dist-packages/cobbler/cobbler_collections/manager.py", line 185, in deserialize
serializer.deserialize(collection)
File "/usr/local/lib/python3.7/dist-packages/cobbler/serializer.py", line 124, in deserialize
storage_module.deserialize(collection, topological)
File "/usr/local/lib/python3.7/dist-packages/cobbler/modules/serializers/file.py", line 187, in deserialize
collection.from_list(datastruct)
File "/usr/local/lib/python3.7/dist-packages/cobbler/cobbler_collections/collection.py", line 179, in from_list
self.add(item)
File "/usr/local/lib/python3.7/dist-packages/cobbler/cobbler_collections/collection.py", line 299, in add
ref.check_if_valid()
File "/usr/local/lib/python3.7/dist-packages/cobbler/items/profile.py", line 126, in check_if_valid
raise CX("Error with profile %s - distro is required" % (self.name))
cobbler.cexceptions.CX: 'Error with profile testSP - distro is required'
During handling of the above exception, another exception occurred:
Traceback (most recent call last):
File "/usr/local/bin/cobblerd", line 75, in main
api = cobbler_api.CobblerAPI(is_cobblerd=True)
File "/usr/local/lib/python3.7/dist-packages/cobbler/api.py", line 109, in __init__
self.deserialize()
File "/usr/local/lib/python3.7/dist-packages/cobbler/api.py", line 893, in deserialize
return self._collection_mgr.deserialize()
File "/usr/local/lib/python3.7/dist-packages/cobbler/cobbler_collections/manager.py", line 187, in deserialize
raise CX("serializer: error loading collection %s: %s. Check /etc/cobbler/modules.conf" % (collection.collection_type(), e))
cobbler.cexceptions.CX: "serializer: error loading collection profile: 'Error with profile testSP - distro is required'. Check /etc/cobbler/modules.conf"
|
cobbler.cexceptions.CX
|
def heats(diagrams, sampling, step_size, sigma):
# WARNING: modifies `diagrams` in place
heats_ = np.zeros((len(diagrams), len(sampling), len(sampling)), dtype=float)
# If the step size is zero, we return a trivial image
if step_size == 0:
return heats_
# Set the values outside of the sampling range
first_sampling, last_sampling = sampling[0, 0, 0], sampling[-1, 0, 0]
diagrams[diagrams < first_sampling] = first_sampling
diagrams[diagrams > last_sampling] = last_sampling
# Calculate the value of `sigma` in pixel units
sigma_pixel = sigma / step_size
for i, diagram in enumerate(diagrams):
nontrivial_points_idx = np.flatnonzero(diagram[:, 1] != diagram[:, 0])
diagram_nontrivial_pixel_coords = np.array(
(diagram - first_sampling) / step_size, dtype=int
)[nontrivial_points_idx]
image = heats_[i]
_sample_image(image, diagram_nontrivial_pixel_coords)
gaussian_filter(image, sigma_pixel, mode="constant", output=image)
heats_ -= np.transpose(heats_, (0, 2, 1))
heats_ /= step_size**2
heats_ = np.rot90(heats_, k=1, axes=(1, 2))
return heats_
|
def heats(diagrams, sampling, step_size, sigma):
heats_ = np.zeros((diagrams.shape[0], sampling.shape[0], sampling.shape[0]))
diagrams[diagrams < sampling[0, 0]] = sampling[0, 0]
diagrams[diagrams > sampling[-1, 0]] = sampling[-1, 0]
diagrams = np.array((diagrams - sampling[0, 0]) / step_size, dtype=int)
[_heat(heats_[i], sampled_diag, sigma) for i, sampled_diag in enumerate(diagrams)]
heats_ = heats_ - np.transpose(heats_, (0, 2, 1))
heats_ = np.rot90(heats_, k=1, axes=(1, 2))
return heats_
|
https://github.com/giotto-ai/giotto-tda/issues/438
|
---------------------------------------------------------------------------
TypeError Traceback (most recent call last)
<ipython-input-5-5e233492b878> in <module>
----> 1 Amplitude(metric="persistence_image").fit_transform(Xd)
~\Workspace\giotto-tda_ulupo\gtda\utils\_docs.py in fit_transform_wrapper(*args, **kwargs)
104 @wraps(original_fit_transform)
105 def fit_transform_wrapper(*args, **kwargs):
--> 106 return original_fit_transform(*args, **kwargs)
107 fit_transform_wrapper.__doc__ = \
108 make_fit_transform_docs(fit_docs, transform_docs)
~\Anaconda3\envs\gtda-ulupo-py38\lib\site-packages\sklearn\base.py in fit_transform(self, X, y, **fit_params)
688 if y is None:
689 # fit method of arity 1 (unsupervised transformation)
--> 690 return self.fit(X, **fit_params).transform(X)
691 else:
692 # fit method of arity 2 (supervised transformation)
~\Workspace\giotto-tda_ulupo\gtda\diagrams\features.py in fit(self, X, y)
273 if self.metric == 'persistence_image':
274 self.effective_metric_params_['weights'] = \
--> 275 _calculate_weights(X, **self.effective_metric_params_)
276
277 return self
TypeError: _calculate_weights() missing 1 required positional argument: 'weight_function'
|
TypeError
|
def persistence_images(diagrams, sampling, step_size, sigma, weights):
# For persistence images, `sampling` is a tall matrix with two columns
# (the first for birth and the second for persistence), and `step_size` is
# a 2d array
# WARNING: modifies `diagrams` in place
persistence_images_ = np.zeros(
(len(diagrams), len(sampling), len(sampling)), dtype=float
)
# If both step sizes are zero, we return a trivial image
if (step_size == 0).all():
return persistence_images_
# Transform diagrams from (birth, death, dim) to (birth, persistence, dim)
diagrams[:, :, 1] -= diagrams[:, :, 0]
sigma_pixel = []
first_samplings = sampling[0]
last_samplings = sampling[-1]
for ax in [0, 1]:
diagrams_ax = diagrams[:, :, ax]
# Set the values outside of the sampling range
diagrams_ax[diagrams_ax < first_samplings[ax]] = first_samplings[ax]
diagrams_ax[diagrams_ax > last_samplings[ax]] = last_samplings[ax]
# Calculate the value of the component of `sigma` in pixel units
sigma_pixel.append(sigma / step_size[ax])
# Sample the image, apply the weights, smoothen
for i, diagram in enumerate(diagrams):
nontrivial_points_idx = np.flatnonzero(diagram[:, 1])
diagram_nontrivial_pixel_coords = np.array(
(diagram - first_samplings) / step_size, dtype=int
)[nontrivial_points_idx]
image = persistence_images_[i]
_sample_image(image, diagram_nontrivial_pixel_coords)
image *= weights
gaussian_filter(image, sigma_pixel, mode="constant", output=image)
persistence_images_ = np.rot90(persistence_images_, k=1, axes=(1, 2))
persistence_images_ /= np.product(step_size)
return persistence_images_
|
def persistence_images(diagrams, sampling, step_size, weights, sigma):
persistence_images_ = np.zeros(
(diagrams.shape[0], sampling.shape[0], sampling.shape[0])
)
# Transform diagrams from (birth, death, dim) to (birth, persistence, dim)
diagrams[:, :, 1] = diagrams[:, :, 1] - diagrams[:, :, 0]
for axis in [0, 1]:
# Set the values outside of the sampling range to the sampling range.
diagrams[:, :, axis][diagrams[:, :, axis] < sampling[0, axis]] = sampling[
0, axis
]
diagrams[:, :, axis][diagrams[:, :, axis] > sampling[-1, axis]] = sampling[
-1, axis
]
# Convert into pixel
diagrams[:, :, axis] = np.array(
(diagrams[:, :, axis] - sampling[0, axis]) / step_size[axis], dtype=int
)
# Sample the image
[
_sample_image(persistence_images_[i], sampled_diag)
for i, sampled_diag in enumerate(diagrams)
]
# Apply the weights
persistence_images_ *= weights / np.max(weights)
# Smoothen the weighted-image
for i, image in enumerate(persistence_images_):
persistence_images_[i] = gaussian_filter(image, sigma, mode="reflect")
persistence_images_ = np.rot90(persistence_images_, k=1, axes=(1, 2))
return persistence_images_
|
https://github.com/giotto-ai/giotto-tda/issues/438
|
---------------------------------------------------------------------------
TypeError Traceback (most recent call last)
<ipython-input-5-5e233492b878> in <module>
----> 1 Amplitude(metric="persistence_image").fit_transform(Xd)
~\Workspace\giotto-tda_ulupo\gtda\utils\_docs.py in fit_transform_wrapper(*args, **kwargs)
104 @wraps(original_fit_transform)
105 def fit_transform_wrapper(*args, **kwargs):
--> 106 return original_fit_transform(*args, **kwargs)
107 fit_transform_wrapper.__doc__ = \
108 make_fit_transform_docs(fit_docs, transform_docs)
~\Anaconda3\envs\gtda-ulupo-py38\lib\site-packages\sklearn\base.py in fit_transform(self, X, y, **fit_params)
688 if y is None:
689 # fit method of arity 1 (unsupervised transformation)
--> 690 return self.fit(X, **fit_params).transform(X)
691 else:
692 # fit method of arity 2 (supervised transformation)
~\Workspace\giotto-tda_ulupo\gtda\diagrams\features.py in fit(self, X, y)
273 if self.metric == 'persistence_image':
274 self.effective_metric_params_['weights'] = \
--> 275 _calculate_weights(X, **self.effective_metric_params_)
276
277 return self
TypeError: _calculate_weights() missing 1 required positional argument: 'weight_function'
|
TypeError
|
def silhouettes(diagrams, sampling, power, **kwargs):
"""Input: a batch of persistence diagrams with a sampling (3d array
returned by _bin) of a one-dimensional range.
"""
sampling = np.transpose(sampling, axes=(1, 2, 0))
weights = np.diff(diagrams, axis=2)
if power > 8.0:
weights = weights / np.max(weights, axis=1, keepdims=True)
weights = weights**power
total_weights = np.sum(weights, axis=1)
# Next line is a trick to avoid NaNs when computing `fibers_weighted_sum`
total_weights[total_weights == 0.0] = np.inf
midpoints = (diagrams[:, :, [1]] + diagrams[:, :, [0]]) / 2.0
heights = (diagrams[:, :, [1]] - diagrams[:, :, [0]]) / 2.0
fibers = np.maximum(-np.abs(sampling - midpoints) + heights, 0)
fibers_weighted_sum = np.sum(weights * fibers, axis=1) / total_weights
return fibers_weighted_sum
|
def silhouettes(diagrams, sampling, power, **kwargs):
"""Input: a batch of persistence diagrams with a sampling (3d array
returned by _bin) of a one-dimensional range.
"""
sampling = np.transpose(sampling, axes=(1, 2, 0))
weights = np.diff(diagrams, axis=2)[:, :, [0]]
if power > 8.0:
weights = weights / np.max(weights, axis=1, keepdims=True)
weights = weights**power
total_weights = np.sum(weights, axis=1)
midpoints = (diagrams[:, :, [1]] + diagrams[:, :, [0]]) / 2.0
heights = (diagrams[:, :, [1]] - diagrams[:, :, [0]]) / 2.0
fibers = np.maximum(-np.abs(sampling - midpoints) + heights, 0)
fibers_weighted_sum = np.sum(weights * fibers, axis=1) / total_weights
return fibers_weighted_sum
|
https://github.com/giotto-ai/giotto-tda/issues/438
|
---------------------------------------------------------------------------
TypeError Traceback (most recent call last)
<ipython-input-5-5e233492b878> in <module>
----> 1 Amplitude(metric="persistence_image").fit_transform(Xd)
~\Workspace\giotto-tda_ulupo\gtda\utils\_docs.py in fit_transform_wrapper(*args, **kwargs)
104 @wraps(original_fit_transform)
105 def fit_transform_wrapper(*args, **kwargs):
--> 106 return original_fit_transform(*args, **kwargs)
107 fit_transform_wrapper.__doc__ = \
108 make_fit_transform_docs(fit_docs, transform_docs)
~\Anaconda3\envs\gtda-ulupo-py38\lib\site-packages\sklearn\base.py in fit_transform(self, X, y, **fit_params)
688 if y is None:
689 # fit method of arity 1 (unsupervised transformation)
--> 690 return self.fit(X, **fit_params).transform(X)
691 else:
692 # fit method of arity 2 (supervised transformation)
~\Workspace\giotto-tda_ulupo\gtda\diagrams\features.py in fit(self, X, y)
273 if self.metric == 'persistence_image':
274 self.effective_metric_params_['weights'] = \
--> 275 _calculate_weights(X, **self.effective_metric_params_)
276
277 return self
TypeError: _calculate_weights() missing 1 required positional argument: 'weight_function'
|
TypeError
|
def betti_distances(diagrams_1, diagrams_2, sampling, step_size, p=2.0, **kwargs):
step_size_factor = step_size ** (1 / p)
are_arrays_equal = np.array_equal(diagrams_1, diagrams_2)
betti_curves_1 = betti_curves(diagrams_1, sampling)
if are_arrays_equal:
distances = pdist(betti_curves_1, "minkowski", p=p)
distances *= step_size_factor
return squareform(distances)
betti_curves_2 = betti_curves(diagrams_2, sampling)
distances = cdist(betti_curves_1, betti_curves_2, "minkowski", p=p)
distances *= step_size_factor
return distances
|
def betti_distances(diagrams_1, diagrams_2, sampling, step_size, p=2.0, **kwargs):
betti_curves_1 = betti_curves(diagrams_1, sampling)
if np.array_equal(diagrams_1, diagrams_2):
unnorm_dist = squareform(pdist(betti_curves_1, "minkowski", p=p))
return (step_size ** (1 / p)) * unnorm_dist
betti_curves_2 = betti_curves(diagrams_2, sampling)
unnorm_dist = cdist(betti_curves_1, betti_curves_2, "minkowski", p=p)
return (step_size ** (1 / p)) * unnorm_dist
|
https://github.com/giotto-ai/giotto-tda/issues/438
|
---------------------------------------------------------------------------
TypeError Traceback (most recent call last)
<ipython-input-5-5e233492b878> in <module>
----> 1 Amplitude(metric="persistence_image").fit_transform(Xd)
~\Workspace\giotto-tda_ulupo\gtda\utils\_docs.py in fit_transform_wrapper(*args, **kwargs)
104 @wraps(original_fit_transform)
105 def fit_transform_wrapper(*args, **kwargs):
--> 106 return original_fit_transform(*args, **kwargs)
107 fit_transform_wrapper.__doc__ = \
108 make_fit_transform_docs(fit_docs, transform_docs)
~\Anaconda3\envs\gtda-ulupo-py38\lib\site-packages\sklearn\base.py in fit_transform(self, X, y, **fit_params)
688 if y is None:
689 # fit method of arity 1 (unsupervised transformation)
--> 690 return self.fit(X, **fit_params).transform(X)
691 else:
692 # fit method of arity 2 (supervised transformation)
~\Workspace\giotto-tda_ulupo\gtda\diagrams\features.py in fit(self, X, y)
273 if self.metric == 'persistence_image':
274 self.effective_metric_params_['weights'] = \
--> 275 _calculate_weights(X, **self.effective_metric_params_)
276
277 return self
TypeError: _calculate_weights() missing 1 required positional argument: 'weight_function'
|
TypeError
|
def landscape_distances(
diagrams_1, diagrams_2, sampling, step_size, p=2.0, n_layers=1, **kwargs
):
step_size_factor = step_size ** (1 / p)
n_samples_1, n_points_1 = diagrams_1.shape[:2]
n_layers_1 = min(n_layers, n_points_1)
if np.array_equal(diagrams_1, diagrams_2):
ls_1 = landscapes(diagrams_1, sampling, n_layers_1).reshape(n_samples_1, -1)
distances = pdist(ls_1, "minkowski", p=p)
distances *= step_size_factor
return squareform(distances)
n_samples_2, n_points_2 = diagrams_2.shape[:2]
n_layers_2 = min(n_layers, n_points_2)
n_layers = max(n_layers_1, n_layers_2)
ls_1 = landscapes(diagrams_1, sampling, n_layers).reshape(n_samples_1, -1)
ls_2 = landscapes(diagrams_2, sampling, n_layers).reshape(n_samples_2, -1)
distances = cdist(ls_1, ls_2, "minkowski", p=p)
distances *= step_size_factor
return distances
|
def landscape_distances(
diagrams_1, diagrams_2, sampling, step_size, p=2.0, n_layers=1, **kwargs
):
n_samples_1, n_points_1 = diagrams_1.shape[:2]
n_layers_1 = min(n_layers, n_points_1)
if np.array_equal(diagrams_1, diagrams_2):
ls_1 = landscapes(diagrams_1, sampling, n_layers_1).reshape(n_samples_1, -1)
unnorm_dist = squareform(pdist(ls_1, "minkowski", p=p))
return (step_size ** (1 / p)) * unnorm_dist
n_samples_2, n_points_2 = diagrams_2.shape[:2]
n_layers_2 = min(n_layers, n_points_2)
n_layers = max(n_layers_1, n_layers_2)
ls_1 = landscapes(diagrams_1, sampling, n_layers).reshape(n_samples_1, -1)
ls_2 = landscapes(diagrams_2, sampling, n_layers).reshape(n_samples_2, -1)
unnorm_dist = cdist(ls_1, ls_2, "minkowski", p=p)
return (step_size ** (1 / p)) * unnorm_dist
|
https://github.com/giotto-ai/giotto-tda/issues/438
|
---------------------------------------------------------------------------
TypeError Traceback (most recent call last)
<ipython-input-5-5e233492b878> in <module>
----> 1 Amplitude(metric="persistence_image").fit_transform(Xd)
~\Workspace\giotto-tda_ulupo\gtda\utils\_docs.py in fit_transform_wrapper(*args, **kwargs)
104 @wraps(original_fit_transform)
105 def fit_transform_wrapper(*args, **kwargs):
--> 106 return original_fit_transform(*args, **kwargs)
107 fit_transform_wrapper.__doc__ = \
108 make_fit_transform_docs(fit_docs, transform_docs)
~\Anaconda3\envs\gtda-ulupo-py38\lib\site-packages\sklearn\base.py in fit_transform(self, X, y, **fit_params)
688 if y is None:
689 # fit method of arity 1 (unsupervised transformation)
--> 690 return self.fit(X, **fit_params).transform(X)
691 else:
692 # fit method of arity 2 (supervised transformation)
~\Workspace\giotto-tda_ulupo\gtda\diagrams\features.py in fit(self, X, y)
273 if self.metric == 'persistence_image':
274 self.effective_metric_params_['weights'] = \
--> 275 _calculate_weights(X, **self.effective_metric_params_)
276
277 return self
TypeError: _calculate_weights() missing 1 required positional argument: 'weight_function'
|
TypeError
|
def heat_distances(
diagrams_1, diagrams_2, sampling, step_size, sigma=0.1, p=2.0, **kwargs
):
# WARNING: `heats` modifies `diagrams` in place
step_size_factor = step_size ** (2 / p)
are_arrays_equal = np.array_equal(diagrams_1, diagrams_2)
heats_1 = heats(diagrams_1, sampling, step_size, sigma).reshape(len(diagrams_1), -1)
if are_arrays_equal:
distances = pdist(heats_1, "minkowski", p=p)
distances *= step_size_factor
return squareform(distances)
heats_2 = heats(diagrams_2, sampling, step_size, sigma).reshape(len(diagrams_2), -1)
distances = cdist(heats_1, heats_2, "minkowski", p=p)
distances *= step_size_factor
return distances
|
def heat_distances(
diagrams_1, diagrams_2, sampling, step_size, sigma=1.0, p=2.0, **kwargs
):
heat_1 = heats(diagrams_1, sampling, step_size, sigma).reshape(
diagrams_1.shape[0], -1
)
if np.array_equal(diagrams_1, diagrams_2):
unnorm_dist = squareform(pdist(heat_1, "minkowski", p=p))
return (step_size ** (1 / p)) * unnorm_dist
heat_2 = heats(diagrams_2, sampling, step_size, sigma).reshape(
diagrams_2.shape[0], -1
)
unnorm_dist = cdist(heat_1, heat_2, "minkowski", p=p)
return (step_size ** (1 / p)) * unnorm_dist
|
https://github.com/giotto-ai/giotto-tda/issues/438
|
---------------------------------------------------------------------------
TypeError Traceback (most recent call last)
<ipython-input-5-5e233492b878> in <module>
----> 1 Amplitude(metric="persistence_image").fit_transform(Xd)
~\Workspace\giotto-tda_ulupo\gtda\utils\_docs.py in fit_transform_wrapper(*args, **kwargs)
104 @wraps(original_fit_transform)
105 def fit_transform_wrapper(*args, **kwargs):
--> 106 return original_fit_transform(*args, **kwargs)
107 fit_transform_wrapper.__doc__ = \
108 make_fit_transform_docs(fit_docs, transform_docs)
~\Anaconda3\envs\gtda-ulupo-py38\lib\site-packages\sklearn\base.py in fit_transform(self, X, y, **fit_params)
688 if y is None:
689 # fit method of arity 1 (unsupervised transformation)
--> 690 return self.fit(X, **fit_params).transform(X)
691 else:
692 # fit method of arity 2 (supervised transformation)
~\Workspace\giotto-tda_ulupo\gtda\diagrams\features.py in fit(self, X, y)
273 if self.metric == 'persistence_image':
274 self.effective_metric_params_['weights'] = \
--> 275 _calculate_weights(X, **self.effective_metric_params_)
276
277 return self
TypeError: _calculate_weights() missing 1 required positional argument: 'weight_function'
|
TypeError
|
def persistence_image_distances(
diagrams_1,
diagrams_2,
sampling,
step_size,
sigma=0.1,
weight_function=np.ones_like,
p=2.0,
**kwargs,
):
# For persistence images, `sampling` is a tall matrix with two columns
# (the first for birth and the second for persistence), and `step_size` is
# a 2d array
weights = weight_function(sampling[:, 1])
step_sizes_factor = np.product(step_size) ** (1 / p)
# WARNING: `persistence_images` modifies `diagrams` in place
are_arrays_equal = np.array_equal(diagrams_1, diagrams_2)
persistence_images_1 = persistence_images(
diagrams_1, sampling, step_size, sigma, weights
).reshape(len(diagrams_1), -1)
if are_arrays_equal:
distances = pdist(persistence_images_1, "minkowski", p=p)
distances *= step_sizes_factor
return squareform(distances)
persistence_images_2 = persistence_images(
diagrams_2, sampling, step_size, sigma, weights
).reshape(len(diagrams_2), -1)
distances = cdist(persistence_images_1, persistence_images_2, "minkowski", p=p)
distances *= step_sizes_factor
return distances
|
def persistence_image_distances(
diagrams_1,
diagrams_2,
sampling,
step_size,
weight_function=lambda x: x,
sigma=1.0,
p=2.0,
**kwargs,
):
sampling_ = np.copy(sampling.reshape((-1,)))
weights = weight_function(sampling_ - sampling_[0])
persistence_image_1 = persistence_images(
diagrams_1, sampling_, step_size, weights, sigma
).reshape(diagrams_1.shape[0], -1)
if np.array_equal(diagrams_1, diagrams_2):
unnorm_dist = squareform(pdist(persistence_image_1, "minkowski", p=p))
return (step_size ** (1 / p)) * unnorm_dist
persistence_image_2 = persistence_images(
diagrams_2,
sampling_,
step_size,
weights,
sigma,
).reshape(diagrams_2.shape[0], -1)
unnorm_dist = cdist(persistence_image_1, persistence_image_2, "minkowski", p=p)
return (step_size ** (1 / p)) * unnorm_dist
|
https://github.com/giotto-ai/giotto-tda/issues/438
|
---------------------------------------------------------------------------
TypeError Traceback (most recent call last)
<ipython-input-5-5e233492b878> in <module>
----> 1 Amplitude(metric="persistence_image").fit_transform(Xd)
~\Workspace\giotto-tda_ulupo\gtda\utils\_docs.py in fit_transform_wrapper(*args, **kwargs)
104 @wraps(original_fit_transform)
105 def fit_transform_wrapper(*args, **kwargs):
--> 106 return original_fit_transform(*args, **kwargs)
107 fit_transform_wrapper.__doc__ = \
108 make_fit_transform_docs(fit_docs, transform_docs)
~\Anaconda3\envs\gtda-ulupo-py38\lib\site-packages\sklearn\base.py in fit_transform(self, X, y, **fit_params)
688 if y is None:
689 # fit method of arity 1 (unsupervised transformation)
--> 690 return self.fit(X, **fit_params).transform(X)
691 else:
692 # fit method of arity 2 (supervised transformation)
~\Workspace\giotto-tda_ulupo\gtda\diagrams\features.py in fit(self, X, y)
273 if self.metric == 'persistence_image':
274 self.effective_metric_params_['weights'] = \
--> 275 _calculate_weights(X, **self.effective_metric_params_)
276
277 return self
TypeError: _calculate_weights() missing 1 required positional argument: 'weight_function'
|
TypeError
|
def silhouette_distances(
diagrams_1, diagrams_2, sampling, step_size, power=1.0, p=2.0, **kwargs
):
step_size_factor = step_size ** (1 / p)
are_arrays_equal = np.array_equal(diagrams_1, diagrams_2)
silhouettes_1 = silhouettes(diagrams_1, sampling, power)
if are_arrays_equal:
distances = pdist(silhouettes_1, "minkowski", p=p)
distances *= step_size_factor
return squareform(distances)
silhouettes_2 = silhouettes(diagrams_2, sampling, power)
distances = cdist(silhouettes_1, silhouettes_2, "minkowski", p=p)
distances *= step_size_factor
return distances
|
def silhouette_distances(
diagrams_1, diagrams_2, sampling, step_size, power=2.0, p=2.0, **kwargs
):
silhouette_1 = silhouettes(diagrams_1, sampling, power)
if np.array_equal(diagrams_1, diagrams_2):
unnorm_dist = squareform(pdist(silhouette_1, "minkowski", p=p))
else:
silhouette_2 = silhouettes(diagrams_2, sampling, power)
unnorm_dist = cdist(silhouette_1, silhouette_2, "minkowski", p=p)
return (step_size ** (1 / p)) * unnorm_dist
|
https://github.com/giotto-ai/giotto-tda/issues/438
|
---------------------------------------------------------------------------
TypeError Traceback (most recent call last)
<ipython-input-5-5e233492b878> in <module>
----> 1 Amplitude(metric="persistence_image").fit_transform(Xd)
~\Workspace\giotto-tda_ulupo\gtda\utils\_docs.py in fit_transform_wrapper(*args, **kwargs)
104 @wraps(original_fit_transform)
105 def fit_transform_wrapper(*args, **kwargs):
--> 106 return original_fit_transform(*args, **kwargs)
107 fit_transform_wrapper.__doc__ = \
108 make_fit_transform_docs(fit_docs, transform_docs)
~\Anaconda3\envs\gtda-ulupo-py38\lib\site-packages\sklearn\base.py in fit_transform(self, X, y, **fit_params)
688 if y is None:
689 # fit method of arity 1 (unsupervised transformation)
--> 690 return self.fit(X, **fit_params).transform(X)
691 else:
692 # fit method of arity 2 (supervised transformation)
~\Workspace\giotto-tda_ulupo\gtda\diagrams\features.py in fit(self, X, y)
273 if self.metric == 'persistence_image':
274 self.effective_metric_params_['weights'] = \
--> 275 _calculate_weights(X, **self.effective_metric_params_)
276
277 return self
TypeError: _calculate_weights() missing 1 required positional argument: 'weight_function'
|
TypeError
|
def betti_amplitudes(diagrams, sampling, step_size, p=2.0, **kwargs):
step_size_factor = step_size ** (1 / p)
bcs = betti_curves(diagrams, sampling)
amplitudes = np.linalg.norm(bcs, axis=1, ord=p)
amplitudes *= step_size_factor
return amplitudes
|
def betti_amplitudes(diagrams, sampling, step_size, p=2.0, **kwargs):
bcs = betti_curves(diagrams, sampling)
return (step_size ** (1 / p)) * np.linalg.norm(bcs, axis=1, ord=p)
|
https://github.com/giotto-ai/giotto-tda/issues/438
|
---------------------------------------------------------------------------
TypeError Traceback (most recent call last)
<ipython-input-5-5e233492b878> in <module>
----> 1 Amplitude(metric="persistence_image").fit_transform(Xd)
~\Workspace\giotto-tda_ulupo\gtda\utils\_docs.py in fit_transform_wrapper(*args, **kwargs)
104 @wraps(original_fit_transform)
105 def fit_transform_wrapper(*args, **kwargs):
--> 106 return original_fit_transform(*args, **kwargs)
107 fit_transform_wrapper.__doc__ = \
108 make_fit_transform_docs(fit_docs, transform_docs)
~\Anaconda3\envs\gtda-ulupo-py38\lib\site-packages\sklearn\base.py in fit_transform(self, X, y, **fit_params)
688 if y is None:
689 # fit method of arity 1 (unsupervised transformation)
--> 690 return self.fit(X, **fit_params).transform(X)
691 else:
692 # fit method of arity 2 (supervised transformation)
~\Workspace\giotto-tda_ulupo\gtda\diagrams\features.py in fit(self, X, y)
273 if self.metric == 'persistence_image':
274 self.effective_metric_params_['weights'] = \
--> 275 _calculate_weights(X, **self.effective_metric_params_)
276
277 return self
TypeError: _calculate_weights() missing 1 required positional argument: 'weight_function'
|
TypeError
|
def landscape_amplitudes(diagrams, sampling, step_size, p=2.0, n_layers=1, **kwargs):
step_size_factor = step_size ** (1 / p)
ls = landscapes(diagrams, sampling, n_layers).reshape(len(diagrams), -1)
amplitudes = np.linalg.norm(ls, axis=1, ord=p)
amplitudes *= step_size_factor
return amplitudes
|
def landscape_amplitudes(diagrams, sampling, step_size, p=2.0, n_layers=1, **kwargs):
ls = landscapes(diagrams, sampling, n_layers).reshape(len(diagrams), -1)
return (step_size ** (1 / p)) * np.linalg.norm(ls, axis=1, ord=p)
|
https://github.com/giotto-ai/giotto-tda/issues/438
|
---------------------------------------------------------------------------
TypeError Traceback (most recent call last)
<ipython-input-5-5e233492b878> in <module>
----> 1 Amplitude(metric="persistence_image").fit_transform(Xd)
~\Workspace\giotto-tda_ulupo\gtda\utils\_docs.py in fit_transform_wrapper(*args, **kwargs)
104 @wraps(original_fit_transform)
105 def fit_transform_wrapper(*args, **kwargs):
--> 106 return original_fit_transform(*args, **kwargs)
107 fit_transform_wrapper.__doc__ = \
108 make_fit_transform_docs(fit_docs, transform_docs)
~\Anaconda3\envs\gtda-ulupo-py38\lib\site-packages\sklearn\base.py in fit_transform(self, X, y, **fit_params)
688 if y is None:
689 # fit method of arity 1 (unsupervised transformation)
--> 690 return self.fit(X, **fit_params).transform(X)
691 else:
692 # fit method of arity 2 (supervised transformation)
~\Workspace\giotto-tda_ulupo\gtda\diagrams\features.py in fit(self, X, y)
273 if self.metric == 'persistence_image':
274 self.effective_metric_params_['weights'] = \
--> 275 _calculate_weights(X, **self.effective_metric_params_)
276
277 return self
TypeError: _calculate_weights() missing 1 required positional argument: 'weight_function'
|
TypeError
|
def heat_amplitudes(diagrams, sampling, step_size, sigma=0.1, p=2.0, **kwargs):
# WARNING: `heats` modifies `diagrams` in place
step_size_factor = step_size ** (2 / p)
heats_ = heats(diagrams, sampling, step_size, sigma).reshape(len(diagrams), -1)
amplitudes = np.linalg.norm(heats_, axis=1, ord=p)
amplitudes *= step_size_factor
return amplitudes
|
def heat_amplitudes(diagrams, sampling, step_size, sigma=1.0, p=2.0, **kwargs):
heat = heats(diagrams, sampling, step_size, sigma)
return np.linalg.norm(heat, axis=(1, 2), ord=p)
|
https://github.com/giotto-ai/giotto-tda/issues/438
|
---------------------------------------------------------------------------
TypeError Traceback (most recent call last)
<ipython-input-5-5e233492b878> in <module>
----> 1 Amplitude(metric="persistence_image").fit_transform(Xd)
~\Workspace\giotto-tda_ulupo\gtda\utils\_docs.py in fit_transform_wrapper(*args, **kwargs)
104 @wraps(original_fit_transform)
105 def fit_transform_wrapper(*args, **kwargs):
--> 106 return original_fit_transform(*args, **kwargs)
107 fit_transform_wrapper.__doc__ = \
108 make_fit_transform_docs(fit_docs, transform_docs)
~\Anaconda3\envs\gtda-ulupo-py38\lib\site-packages\sklearn\base.py in fit_transform(self, X, y, **fit_params)
688 if y is None:
689 # fit method of arity 1 (unsupervised transformation)
--> 690 return self.fit(X, **fit_params).transform(X)
691 else:
692 # fit method of arity 2 (supervised transformation)
~\Workspace\giotto-tda_ulupo\gtda\diagrams\features.py in fit(self, X, y)
273 if self.metric == 'persistence_image':
274 self.effective_metric_params_['weights'] = \
--> 275 _calculate_weights(X, **self.effective_metric_params_)
276
277 return self
TypeError: _calculate_weights() missing 1 required positional argument: 'weight_function'
|
TypeError
|
def persistence_image_amplitudes(
diagrams,
sampling,
step_size,
sigma=0.1,
weight_function=np.ones_like,
p=2.0,
**kwargs,
):
# For persistence images, `sampling` is a tall matrix with two columns
# (the first for birth and the second for persistence), and `step_size` is
# a 2d array
weights = weight_function(sampling[:, 1])
step_sizes_factor = np.product(step_size) ** (1 / p)
# WARNING: `persistence_images` modifies `diagrams` in place
persistence_images_ = persistence_images(
diagrams, sampling, step_size, sigma, weights
).reshape(len(diagrams), -1)
amplitudes = np.linalg.norm(persistence_images_, axis=1, ord=p)
amplitudes *= step_sizes_factor
return amplitudes
|
def persistence_image_amplitudes(
diagrams,
sampling,
step_size,
weight_function=lambda x: x,
sigma=1.0,
p=2.0,
**kwargs,
):
persistence_image = persistence_images(
diagrams, sampling, step_size, weight_function, sigma
)
return np.linalg.norm(persistence_image, axis=(1, 2), ord=p)
|
https://github.com/giotto-ai/giotto-tda/issues/438
|
---------------------------------------------------------------------------
TypeError Traceback (most recent call last)
<ipython-input-5-5e233492b878> in <module>
----> 1 Amplitude(metric="persistence_image").fit_transform(Xd)
~\Workspace\giotto-tda_ulupo\gtda\utils\_docs.py in fit_transform_wrapper(*args, **kwargs)
104 @wraps(original_fit_transform)
105 def fit_transform_wrapper(*args, **kwargs):
--> 106 return original_fit_transform(*args, **kwargs)
107 fit_transform_wrapper.__doc__ = \
108 make_fit_transform_docs(fit_docs, transform_docs)
~\Anaconda3\envs\gtda-ulupo-py38\lib\site-packages\sklearn\base.py in fit_transform(self, X, y, **fit_params)
688 if y is None:
689 # fit method of arity 1 (unsupervised transformation)
--> 690 return self.fit(X, **fit_params).transform(X)
691 else:
692 # fit method of arity 2 (supervised transformation)
~\Workspace\giotto-tda_ulupo\gtda\diagrams\features.py in fit(self, X, y)
273 if self.metric == 'persistence_image':
274 self.effective_metric_params_['weights'] = \
--> 275 _calculate_weights(X, **self.effective_metric_params_)
276
277 return self
TypeError: _calculate_weights() missing 1 required positional argument: 'weight_function'
|
TypeError
|
def silhouette_amplitudes(diagrams, sampling, step_size, power=1.0, p=2.0, **kwargs):
step_size_factor = step_size ** (1 / p)
silhouettes_ = silhouettes(diagrams, sampling, power)
amplitudes = np.linalg.norm(silhouettes_, axis=1, ord=p)
amplitudes *= step_size_factor
return amplitudes
|
def silhouette_amplitudes(diagrams, sampling, step_size, power=2.0, p=2.0, **kwargs):
sht = silhouettes(diagrams, sampling, power)
return (step_size ** (1 / p)) * np.linalg.norm(sht, axis=1, ord=p)
|
https://github.com/giotto-ai/giotto-tda/issues/438
|
---------------------------------------------------------------------------
TypeError Traceback (most recent call last)
<ipython-input-5-5e233492b878> in <module>
----> 1 Amplitude(metric="persistence_image").fit_transform(Xd)
~\Workspace\giotto-tda_ulupo\gtda\utils\_docs.py in fit_transform_wrapper(*args, **kwargs)
104 @wraps(original_fit_transform)
105 def fit_transform_wrapper(*args, **kwargs):
--> 106 return original_fit_transform(*args, **kwargs)
107 fit_transform_wrapper.__doc__ = \
108 make_fit_transform_docs(fit_docs, transform_docs)
~\Anaconda3\envs\gtda-ulupo-py38\lib\site-packages\sklearn\base.py in fit_transform(self, X, y, **fit_params)
688 if y is None:
689 # fit method of arity 1 (unsupervised transformation)
--> 690 return self.fit(X, **fit_params).transform(X)
691 else:
692 # fit method of arity 2 (supervised transformation)
~\Workspace\giotto-tda_ulupo\gtda\diagrams\features.py in fit(self, X, y)
273 if self.metric == 'persistence_image':
274 self.effective_metric_params_['weights'] = \
--> 275 _calculate_weights(X, **self.effective_metric_params_)
276
277 return self
TypeError: _calculate_weights() missing 1 required positional argument: 'weight_function'
|
TypeError
|
def _subdiagrams(X, homology_dimensions, remove_dim=False):
"""For each diagram in a collection, extract the subdiagrams in a given
list of homology dimensions. It is assumed that all diagrams in X contain
the same number of points in each homology dimension."""
n_samples = len(X)
X_0 = X[0]
def _subdiagrams_single_homology_dimension(homology_dimension):
n_features_in_dim = np.sum(X_0[:, 2] == homology_dimension)
try:
# In this case, reshape ensures copy
Xs = X[X[:, :, 2] == homology_dimension].reshape(
n_samples, n_features_in_dim, 3
)
return Xs
except ValueError as e:
if e.args[0].lower().startswith("cannot reshape array"):
raise ValueError(
f"All persistence diagrams in the collection must have "
f"the same number of birth-death-dimension triples in any "
f"given homology dimension. This is not true in homology "
f"dimension {homology_dimension}. Trivial triples for "
f"which birth = death may be added or removed to fulfill "
f"this requirement."
)
else:
raise e
if len(homology_dimensions) == 1:
Xs = _subdiagrams_single_homology_dimension(homology_dimensions[0])
else:
# np.concatenate will also create a copy
Xs = np.concatenate(
[
_subdiagrams_single_homology_dimension(dim)
for dim in homology_dimensions
],
axis=1,
)
if remove_dim:
Xs = Xs[:, :, :2]
return Xs
|
def _subdiagrams(X, homology_dimensions, remove_dim=False):
"""For each diagram in a collection, extract the subdiagrams in a given
list of homology dimensions. It is assumed that all diagrams in X contain
the same number of points in each homology dimension."""
n = len(X)
if len(homology_dimensions) == 1:
Xs = X[X[:, :, 2] == homology_dimensions[0]].reshape(n, -1, 3)
else:
Xs = np.concatenate(
[X[X[:, :, 2] == dim].reshape(n, -1, 3) for dim in homology_dimensions],
axis=1,
)
if remove_dim:
Xs = Xs[:, :, :2]
return Xs
|
https://github.com/giotto-ai/giotto-tda/issues/438
|
---------------------------------------------------------------------------
TypeError Traceback (most recent call last)
<ipython-input-5-5e233492b878> in <module>
----> 1 Amplitude(metric="persistence_image").fit_transform(Xd)
~\Workspace\giotto-tda_ulupo\gtda\utils\_docs.py in fit_transform_wrapper(*args, **kwargs)
104 @wraps(original_fit_transform)
105 def fit_transform_wrapper(*args, **kwargs):
--> 106 return original_fit_transform(*args, **kwargs)
107 fit_transform_wrapper.__doc__ = \
108 make_fit_transform_docs(fit_docs, transform_docs)
~\Anaconda3\envs\gtda-ulupo-py38\lib\site-packages\sklearn\base.py in fit_transform(self, X, y, **fit_params)
688 if y is None:
689 # fit method of arity 1 (unsupervised transformation)
--> 690 return self.fit(X, **fit_params).transform(X)
691 else:
692 # fit method of arity 2 (supervised transformation)
~\Workspace\giotto-tda_ulupo\gtda\diagrams\features.py in fit(self, X, y)
273 if self.metric == 'persistence_image':
274 self.effective_metric_params_['weights'] = \
--> 275 _calculate_weights(X, **self.effective_metric_params_)
276
277 return self
TypeError: _calculate_weights() missing 1 required positional argument: 'weight_function'
|
TypeError
|
def _sample_image(image, diagram_pixel_coords):
# WARNING: Modifies `image` in-place
unique, counts = np.unique(diagram_pixel_coords, axis=0, return_counts=True)
unique = tuple(tuple(row) for row in unique.astype(np.int).T)
image[unique] = counts
|
def _sample_image(image, sampled_diag):
# NOTE: Modifies `image` in-place
unique, counts = np.unique(sampled_diag, axis=0, return_counts=True)
unique = tuple(tuple(row) for row in unique.astype(np.int).T)
image[unique] = counts
|
https://github.com/giotto-ai/giotto-tda/issues/438
|
---------------------------------------------------------------------------
TypeError Traceback (most recent call last)
<ipython-input-5-5e233492b878> in <module>
----> 1 Amplitude(metric="persistence_image").fit_transform(Xd)
~\Workspace\giotto-tda_ulupo\gtda\utils\_docs.py in fit_transform_wrapper(*args, **kwargs)
104 @wraps(original_fit_transform)
105 def fit_transform_wrapper(*args, **kwargs):
--> 106 return original_fit_transform(*args, **kwargs)
107 fit_transform_wrapper.__doc__ = \
108 make_fit_transform_docs(fit_docs, transform_docs)
~\Anaconda3\envs\gtda-ulupo-py38\lib\site-packages\sklearn\base.py in fit_transform(self, X, y, **fit_params)
688 if y is None:
689 # fit method of arity 1 (unsupervised transformation)
--> 690 return self.fit(X, **fit_params).transform(X)
691 else:
692 # fit method of arity 2 (supervised transformation)
~\Workspace\giotto-tda_ulupo\gtda\diagrams\features.py in fit(self, X, y)
273 if self.metric == 'persistence_image':
274 self.effective_metric_params_['weights'] = \
--> 275 _calculate_weights(X, **self.effective_metric_params_)
276
277 return self
TypeError: _calculate_weights() missing 1 required positional argument: 'weight_function'
|
TypeError
|
def _filter(X, filtered_homology_dimensions, cutoff):
n = len(X)
homology_dimensions = sorted(np.unique(X[0, :, 2]))
unfiltered_homology_dimensions = [
dim for dim in homology_dimensions if dim not in filtered_homology_dimensions
]
if len(unfiltered_homology_dimensions) == 0:
Xuf = np.empty((n, 0, 3), dtype=X.dtype)
else:
Xuf = _subdiagrams(X, unfiltered_homology_dimensions)
# Compute a global 2D cutoff mask once
cutoff_mask = X[:, :, 1] - X[:, :, 0] > cutoff
Xf = []
for dim in filtered_homology_dimensions:
# Compute a 2D mask for persistence pairs in dimension dim
dim_mask = X[:, :, 2] == dim
# Need the indices relative to X of persistence triples in dimension
# dim surviving the cutoff
indices = np.nonzero(np.logical_and(dim_mask, cutoff_mask))
if not indices[0].size:
Xdim = np.tile([0.0, 0.0, dim], (n, 1, 1))
else:
# A unique element k is repeated N times *consecutively* in
# indices[0] iff there are exactly N valid persistence triples
# in the k-th diagram
unique, counts = np.unique(indices[0], return_counts=True)
max_n_points = np.max(counts)
# Make a global 2D array of all valid triples
X_indices = X[indices]
min_value = np.min(X_indices[:, 0]) # For padding
# Initialise the array of filtered subdiagrams in dimension m
Xdim = np.tile([min_value, min_value, dim], (n, max_n_points, 1))
# Since repeated indices in indices[0] are consecutive and we know
# the counts per unique index, we can fill the top portion of
# each 2D array entry of Xdim with the filtered triples from the
# corresponding entry of X
Xdim[indices[0], _multirange(counts)] = X_indices
Xf.append(Xdim)
Xf.append(Xuf)
Xf = np.concatenate(Xf, axis=1)
return Xf
|
def _filter(X, filtered_homology_dimensions, cutoff):
n = len(X)
homology_dimensions = sorted(list(set(X[0, :, 2])))
unfiltered_homology_dimensions = [
dim for dim in homology_dimensions if dim not in filtered_homology_dimensions
]
if len(unfiltered_homology_dimensions) == 0:
Xuf = np.empty((n, 0, 3), dtype=X.dtype)
else:
Xuf = _subdiagrams(X, unfiltered_homology_dimensions)
# Compute a global 2D cutoff mask once
cutoff_mask = X[:, :, 1] - X[:, :, 0] > cutoff
Xf = []
for dim in filtered_homology_dimensions:
# Compute a 2D mask for persistence pairs in dimension dim
dim_mask = X[:, :, 2] == dim
# Need the indices relative to X of persistence triples in dimension
# dim surviving the cutoff
indices = np.nonzero(np.logical_and(dim_mask, cutoff_mask))
if not indices[0].size:
Xdim = np.tile([0.0, 0.0, dim], (n, 1, 1))
else:
# A unique element k is repeated N times *consecutively* in
# indices[0] iff there are exactly N valid persistence triples
# in the k-th diagram
unique, counts = np.unique(indices[0], return_counts=True)
max_n_points = np.max(counts)
# Make a global 2D array of all valid triples
X_indices = X[indices]
min_value = np.min(X_indices[:, 0]) # For padding
# Initialise the array of filtered subdiagrams in dimension m
Xdim = np.tile([min_value, min_value, dim], (n, max_n_points, 1))
# Since repeated indices in indices[0] are consecutive and we know
# the counts per unique index, we can fill the top portion of
# each 2D array entry of Xdim with the filtered triples from the
# corresponding entry of X
Xdim[indices[0], _multirange(counts)] = X_indices
Xf.append(Xdim)
Xf.append(Xuf)
Xf = np.concatenate(Xf, axis=1)
return Xf
|
https://github.com/giotto-ai/giotto-tda/issues/438
|
---------------------------------------------------------------------------
TypeError Traceback (most recent call last)
<ipython-input-5-5e233492b878> in <module>
----> 1 Amplitude(metric="persistence_image").fit_transform(Xd)
~\Workspace\giotto-tda_ulupo\gtda\utils\_docs.py in fit_transform_wrapper(*args, **kwargs)
104 @wraps(original_fit_transform)
105 def fit_transform_wrapper(*args, **kwargs):
--> 106 return original_fit_transform(*args, **kwargs)
107 fit_transform_wrapper.__doc__ = \
108 make_fit_transform_docs(fit_docs, transform_docs)
~\Anaconda3\envs\gtda-ulupo-py38\lib\site-packages\sklearn\base.py in fit_transform(self, X, y, **fit_params)
688 if y is None:
689 # fit method of arity 1 (unsupervised transformation)
--> 690 return self.fit(X, **fit_params).transform(X)
691 else:
692 # fit method of arity 2 (supervised transformation)
~\Workspace\giotto-tda_ulupo\gtda\diagrams\features.py in fit(self, X, y)
273 if self.metric == 'persistence_image':
274 self.effective_metric_params_['weights'] = \
--> 275 _calculate_weights(X, **self.effective_metric_params_)
276
277 return self
TypeError: _calculate_weights() missing 1 required positional argument: 'weight_function'
|
TypeError
|
def _bin(X, metric, n_bins=100, homology_dimensions=None, **kw_args):
if homology_dimensions is None:
homology_dimensions = sorted(np.unique(X[0, :, 2]))
# For some vectorizations, we force the values to be the same + widest
sub_diags = {
dim: _subdiagrams(X, [dim], remove_dim=True) for dim in homology_dimensions
}
# For persistence images, move into birth-persistence
if metric == "persistence_image":
for dim in homology_dimensions:
sub_diags[dim][:, :, [1]] = (
sub_diags[dim][:, :, [1]] - sub_diags[dim][:, :, [0]]
)
min_vals = {dim: np.min(sub_diags[dim], axis=(0, 1)) for dim in homology_dimensions}
max_vals = {dim: np.max(sub_diags[dim], axis=(0, 1)) for dim in homology_dimensions}
if metric in ["landscape", "betti", "heat", "silhouette"]:
# Taking the min(resp. max) of a tuple `m` amounts to extracting
# the birth (resp. death) value
min_vals = {d: np.array(2 * [np.min(m)]) for d, m in min_vals.items()}
max_vals = {d: np.array(2 * [np.max(m)]) for d, m in max_vals.items()}
# Scales between axes should be kept the same, but not between dimension
all_max_values = np.stack(list(max_vals.values()))
if len(homology_dimensions) == 1:
all_max_values = all_max_values.reshape(1, -1)
global_max_val = np.max(all_max_values, axis=0)
max_vals = {
dim: np.array(
[
max_vals[dim][k]
if (max_vals[dim][k] != min_vals[dim][k])
else global_max_val[k]
for k in range(2)
]
)
for dim in homology_dimensions
}
samplings = {}
step_sizes = {}
for dim in homology_dimensions:
samplings[dim], step_sizes[dim] = np.linspace(
min_vals[dim], max_vals[dim], retstep=True, num=n_bins
)
if metric in ["landscape", "betti", "heat", "silhouette"]:
for dim in homology_dimensions:
samplings[dim] = samplings[dim][:, [0], None]
step_sizes[dim] = step_sizes[dim][0]
return samplings, step_sizes
|
def _bin(X, metric, n_bins=100, **kw_args):
homology_dimensions = sorted(list(set(X[0, :, 2])))
# For some vectorizations, we force the values to be the same + widest
sub_diags = {
dim: _subdiagrams(X, [dim], remove_dim=True) for dim in homology_dimensions
}
# For persistence images, move into birth-persistence
if metric == "persistence_image":
for dim in homology_dimensions:
sub_diags[dim][:, :, [1]] = (
sub_diags[dim][:, :, [1]] - sub_diags[dim][:, :, [0]]
)
min_vals = {dim: np.min(sub_diags[dim], axis=(0, 1)) for dim in homology_dimensions}
max_vals = {dim: np.max(sub_diags[dim], axis=(0, 1)) for dim in homology_dimensions}
if metric in ["landscape", "betti", "heat", "silhouette"]:
# Taking the min(resp. max) of a tuple `m` amounts to extracting
# the birth (resp. death) value
min_vals = {d: np.array(2 * [np.min(m)]) for d, m in min_vals.items()}
max_vals = {d: np.array(2 * [np.max(m)]) for d, m in max_vals.items()}
# Scales between axes should be kept the same, but not between dimension
all_max_values = np.stack(list(max_vals.values()))
if len(homology_dimensions) == 1:
all_max_values = all_max_values.reshape(1, -1)
global_max_val = np.max(all_max_values, axis=0)
max_vals = {
dim: np.array(
[
max_vals[dim][k]
if (max_vals[dim][k] != min_vals[dim][k])
else global_max_val[k]
for k in range(2)
]
)
for dim in homology_dimensions
}
samplings = {}
step_sizes = {}
for dim in homology_dimensions:
samplings[dim], step_sizes[dim] = np.linspace(
min_vals[dim], max_vals[dim], retstep=True, num=n_bins
)
if metric in ["landscape", "betti", "heat", "silhouette"]:
for dim in homology_dimensions:
samplings[dim] = samplings[dim][:, [0], None]
step_sizes[dim] = step_sizes[dim][0]
return samplings, step_sizes
|
https://github.com/giotto-ai/giotto-tda/issues/438
|
---------------------------------------------------------------------------
TypeError Traceback (most recent call last)
<ipython-input-5-5e233492b878> in <module>
----> 1 Amplitude(metric="persistence_image").fit_transform(Xd)
~\Workspace\giotto-tda_ulupo\gtda\utils\_docs.py in fit_transform_wrapper(*args, **kwargs)
104 @wraps(original_fit_transform)
105 def fit_transform_wrapper(*args, **kwargs):
--> 106 return original_fit_transform(*args, **kwargs)
107 fit_transform_wrapper.__doc__ = \
108 make_fit_transform_docs(fit_docs, transform_docs)
~\Anaconda3\envs\gtda-ulupo-py38\lib\site-packages\sklearn\base.py in fit_transform(self, X, y, **fit_params)
688 if y is None:
689 # fit method of arity 1 (unsupervised transformation)
--> 690 return self.fit(X, **fit_params).transform(X)
691 else:
692 # fit method of arity 2 (supervised transformation)
~\Workspace\giotto-tda_ulupo\gtda\diagrams\features.py in fit(self, X, y)
273 if self.metric == 'persistence_image':
274 self.effective_metric_params_['weights'] = \
--> 275 _calculate_weights(X, **self.effective_metric_params_)
276
277 return self
TypeError: _calculate_weights() missing 1 required positional argument: 'weight_function'
|
TypeError
|
def fit(self, X, y=None):
"""Store all observed homology dimensions in
:attr:`homology_dimensions_` and compute
:attr:`effective_metric_params`. Then, return the estimator.
This method is here to implement the usual scikit-learn API and hence
work in pipelines.
Parameters
----------
X : ndarray of shape (n_samples_fit, n_features, 3)
Input data. Array of persistence diagrams, each a collection of
triples [b, d, q] representing persistent topological features
through their birth (b), death (d) and homology dimension (q).
It is important that, for each possible homology dimension, the
number of triples for which q equals that homology dimension is
constants across the entries of `X`.
y : None
There is no need for a target in a transformer, yet the pipeline
API requires this parameter.
Returns
-------
self : object
"""
X = check_diagrams(X)
validate_params(self.get_params(), self._hyperparameters, exclude=["n_jobs"])
if self.metric_params is None:
self.effective_metric_params_ = {}
else:
self.effective_metric_params_ = self.metric_params.copy()
validate_params(self.effective_metric_params_, _AVAILABLE_METRICS[self.metric])
# Find the unique homology dimensions in the 3D array X passed to `fit`
# assuming that they can all be found in its zero-th entry
homology_dimensions_fit = np.unique(X[0, :, 2])
self.homology_dimensions_ = _homology_dimensions_to_sorted_ints(
homology_dimensions_fit
)
(
self.effective_metric_params_["samplings"],
self.effective_metric_params_["step_sizes"],
) = _bin(X, self.metric, **self.effective_metric_params_)
if self.metric == "persistence_image":
weight_function = self.effective_metric_params_.get("weight_function", None)
weight_function = np.ones_like if weight_function is None else weight_function
self.effective_metric_params_["weight_function"] = weight_function
self._X = X
return self
|
def fit(self, X, y=None):
"""Store all observed homology dimensions in
:attr:`homology_dimensions_` and compute
:attr:`effective_metric_params`. Then, return the estimator.
This method is here to implement the usual scikit-learn API and hence
work in pipelines.
Parameters
----------
X : ndarray of shape (n_samples_fit, n_features, 3)
Input data. Array of persistence diagrams, each a collection of
triples [b, d, q] representing persistent topological features
through their birth (b), death (d) and homology dimension (q).
It is important that, for each possible homology dimension, the
number of triples for which q equals that homology dimension is
constants across the entries of `X`.
y : None
There is no need for a target in a transformer, yet the pipeline
API requires this parameter.
Returns
-------
self : object
"""
X = check_diagrams(X)
validate_params(self.get_params(), self._hyperparameters, exclude=["n_jobs"])
if self.metric_params is None:
self.effective_metric_params_ = {}
else:
self.effective_metric_params_ = self.metric_params.copy()
validate_params(self.effective_metric_params_, _AVAILABLE_METRICS[self.metric])
self.homology_dimensions_ = sorted(set(X[0, :, 2]))
(
self.effective_metric_params_["samplings"],
self.effective_metric_params_["step_sizes"],
) = _bin(X, metric=self.metric, **self.effective_metric_params_)
if self.metric == "persistence_image":
self.effective_metric_params_["weights"] = _calculate_weights(
X, **self.effective_metric_params_
)
self._X = X
return self
|
https://github.com/giotto-ai/giotto-tda/issues/438
|
---------------------------------------------------------------------------
TypeError Traceback (most recent call last)
<ipython-input-5-5e233492b878> in <module>
----> 1 Amplitude(metric="persistence_image").fit_transform(Xd)
~\Workspace\giotto-tda_ulupo\gtda\utils\_docs.py in fit_transform_wrapper(*args, **kwargs)
104 @wraps(original_fit_transform)
105 def fit_transform_wrapper(*args, **kwargs):
--> 106 return original_fit_transform(*args, **kwargs)
107 fit_transform_wrapper.__doc__ = \
108 make_fit_transform_docs(fit_docs, transform_docs)
~\Anaconda3\envs\gtda-ulupo-py38\lib\site-packages\sklearn\base.py in fit_transform(self, X, y, **fit_params)
688 if y is None:
689 # fit method of arity 1 (unsupervised transformation)
--> 690 return self.fit(X, **fit_params).transform(X)
691 else:
692 # fit method of arity 2 (supervised transformation)
~\Workspace\giotto-tda_ulupo\gtda\diagrams\features.py in fit(self, X, y)
273 if self.metric == 'persistence_image':
274 self.effective_metric_params_['weights'] = \
--> 275 _calculate_weights(X, **self.effective_metric_params_)
276
277 return self
TypeError: _calculate_weights() missing 1 required positional argument: 'weight_function'
|
TypeError
|
def fit(self, X, y=None):
"""Store all observed homology dimensions in
:attr:`homology_dimensions_`. Then, return the estimator.
This method is here to implement the usual scikit-learn API and hence
work in pipelines.
Parameters
----------
X : ndarray of shape (n_samples, n_features, 3)
Input data. Array of persistence diagrams, each a collection of
triples [b, d, q] representing persistent topological features
through their birth (b), death (d) and homology dimension (q).
It is important that, for each possible homology dimension, the
number of triples for which q equals that homology dimension is
constants across the entries of `X`.
y : None
There is no need for a target in a transformer, yet the pipeline
API requires this parameter.
Returns
-------
self : object
"""
X = check_diagrams(X)
validate_params(self.get_params(), self._hyperparameters, exclude=["n_jobs"])
# Find the unique homology dimensions in the 3D array X passed to `fit`
# assuming that they can all be found in its zero-th entry
homology_dimensions_fit = np.unique(X[0, :, 2])
self.homology_dimensions_ = _homology_dimensions_to_sorted_ints(
homology_dimensions_fit
)
self._n_dimensions = len(self.homology_dimensions_)
return self
|
def fit(self, X, y=None):
"""Store all observed homology dimensions in
:attr:`homology_dimensions_`. Then, return the estimator.
This method is here to implement the usual scikit-learn API and hence
work in pipelines.
Parameters
----------
X : ndarray of shape (n_samples, n_features, 3)
Input data. Array of persistence diagrams, each a collection of
triples [b, d, q] representing persistent topological features
through their birth (b), death (d) and homology dimension (q).
It is important that, for each possible homology dimension, the
number of triples for which q equals that homology dimension is
constants across the entries of `X`.
y : None
There is no need for a target in a transformer, yet the pipeline
API requires this parameter.
Returns
-------
self : object
"""
X = check_diagrams(X)
validate_params(self.get_params(), self._hyperparameters, exclude=["n_jobs"])
self.homology_dimensions_ = sorted(set(X[0, :, 2]))
self._n_dimensions = len(self.homology_dimensions_)
return self
|
https://github.com/giotto-ai/giotto-tda/issues/438
|
---------------------------------------------------------------------------
TypeError Traceback (most recent call last)
<ipython-input-5-5e233492b878> in <module>
----> 1 Amplitude(metric="persistence_image").fit_transform(Xd)
~\Workspace\giotto-tda_ulupo\gtda\utils\_docs.py in fit_transform_wrapper(*args, **kwargs)
104 @wraps(original_fit_transform)
105 def fit_transform_wrapper(*args, **kwargs):
--> 106 return original_fit_transform(*args, **kwargs)
107 fit_transform_wrapper.__doc__ = \
108 make_fit_transform_docs(fit_docs, transform_docs)
~\Anaconda3\envs\gtda-ulupo-py38\lib\site-packages\sklearn\base.py in fit_transform(self, X, y, **fit_params)
688 if y is None:
689 # fit method of arity 1 (unsupervised transformation)
--> 690 return self.fit(X, **fit_params).transform(X)
691 else:
692 # fit method of arity 2 (supervised transformation)
~\Workspace\giotto-tda_ulupo\gtda\diagrams\features.py in fit(self, X, y)
273 if self.metric == 'persistence_image':
274 self.effective_metric_params_['weights'] = \
--> 275 _calculate_weights(X, **self.effective_metric_params_)
276
277 return self
TypeError: _calculate_weights() missing 1 required positional argument: 'weight_function'
|
TypeError
|
def __init__(self, metric="landscape", metric_params=None, order=None, n_jobs=None):
self.metric = metric
self.metric_params = metric_params
self.order = order
self.n_jobs = n_jobs
|
def __init__(self, metric="landscape", metric_params=None, order=2.0, n_jobs=None):
self.metric = metric
self.metric_params = metric_params
self.order = order
self.n_jobs = n_jobs
|
https://github.com/giotto-ai/giotto-tda/issues/438
|
---------------------------------------------------------------------------
TypeError Traceback (most recent call last)
<ipython-input-5-5e233492b878> in <module>
----> 1 Amplitude(metric="persistence_image").fit_transform(Xd)
~\Workspace\giotto-tda_ulupo\gtda\utils\_docs.py in fit_transform_wrapper(*args, **kwargs)
104 @wraps(original_fit_transform)
105 def fit_transform_wrapper(*args, **kwargs):
--> 106 return original_fit_transform(*args, **kwargs)
107 fit_transform_wrapper.__doc__ = \
108 make_fit_transform_docs(fit_docs, transform_docs)
~\Anaconda3\envs\gtda-ulupo-py38\lib\site-packages\sklearn\base.py in fit_transform(self, X, y, **fit_params)
688 if y is None:
689 # fit method of arity 1 (unsupervised transformation)
--> 690 return self.fit(X, **fit_params).transform(X)
691 else:
692 # fit method of arity 2 (supervised transformation)
~\Workspace\giotto-tda_ulupo\gtda\diagrams\features.py in fit(self, X, y)
273 if self.metric == 'persistence_image':
274 self.effective_metric_params_['weights'] = \
--> 275 _calculate_weights(X, **self.effective_metric_params_)
276
277 return self
TypeError: _calculate_weights() missing 1 required positional argument: 'weight_function'
|
TypeError
|
def fit(self, X, y=None):
"""Store all observed homology dimensions in
:attr:`homology_dimensions_` and compute
:attr:`effective_metric_params`. Then, return the estimator.
This method is here to implement the usual scikit-learn API and hence
work in pipelines.
Parameters
----------
X : ndarray of shape (n_samples, n_features, 3)
Input data. Array of persistence diagrams, each a collection of
triples [b, d, q] representing persistent topological features
through their birth (b), death (d) and homology dimension (q).
It is important that, for each possible homology dimension, the
number of triples for which q equals that homology dimension is
constants across the entries of X.
y : None
There is no need for a target in a transformer, yet the pipeline
API requires this parameter.
Returns
-------
self : object
"""
X = check_diagrams(X)
validate_params(self.get_params(), self._hyperparameters, exclude=["n_jobs"])
if self.metric_params is None:
self.effective_metric_params_ = {}
else:
self.effective_metric_params_ = self.metric_params.copy()
validate_params(
self.effective_metric_params_, _AVAILABLE_AMPLITUDE_METRICS[self.metric]
)
# Find the unique homology dimensions in the 3D array X passed to `fit`
# assuming that they can all be found in its zero-th entry
homology_dimensions_fit = np.unique(X[0, :, 2])
self.homology_dimensions_ = _homology_dimensions_to_sorted_ints(
homology_dimensions_fit
)
(
self.effective_metric_params_["samplings"],
self.effective_metric_params_["step_sizes"],
) = _bin(X, self.metric, **self.effective_metric_params_)
if self.metric == "persistence_image":
weight_function = self.effective_metric_params_.get("weight_function", None)
weight_function = np.ones_like if weight_function is None else weight_function
self.effective_metric_params_["weight_function"] = weight_function
return self
|
def fit(self, X, y=None):
"""Store all observed homology dimensions in
:attr:`homology_dimensions_` and compute
:attr:`effective_metric_params`. Then, return the estimator.
This method is here to implement the usual scikit-learn API and hence
work in pipelines.
Parameters
----------
X : ndarray of shape (n_samples, n_features, 3)
Input data. Array of persistence diagrams, each a collection of
triples [b, d, q] representing persistent topological features
through their birth (b), death (d) and homology dimension (q).
It is important that, for each possible homology dimension, the
number of triples for which q equals that homology dimension is
constants across the entries of X.
y : None
There is no need for a target in a transformer, yet the pipeline
API requires this parameter.
Returns
-------
self : object
"""
X = check_diagrams(X)
validate_params(self.get_params(), self._hyperparameters, exclude=["n_jobs"])
if self.metric_params is None:
self.effective_metric_params_ = {}
else:
self.effective_metric_params_ = self.metric_params.copy()
validate_params(
self.effective_metric_params_, _AVAILABLE_AMPLITUDE_METRICS[self.metric]
)
self.homology_dimensions_ = sorted(set(X[0, :, 2]))
(
self.effective_metric_params_["samplings"],
self.effective_metric_params_["step_sizes"],
) = _bin(X, metric=self.metric, **self.effective_metric_params_)
if self.metric == "persistence_image":
self.effective_metric_params_["weights"] = _calculate_weights(
X, **self.effective_metric_params_
)
return self
|
https://github.com/giotto-ai/giotto-tda/issues/438
|
---------------------------------------------------------------------------
TypeError Traceback (most recent call last)
<ipython-input-5-5e233492b878> in <module>
----> 1 Amplitude(metric="persistence_image").fit_transform(Xd)
~\Workspace\giotto-tda_ulupo\gtda\utils\_docs.py in fit_transform_wrapper(*args, **kwargs)
104 @wraps(original_fit_transform)
105 def fit_transform_wrapper(*args, **kwargs):
--> 106 return original_fit_transform(*args, **kwargs)
107 fit_transform_wrapper.__doc__ = \
108 make_fit_transform_docs(fit_docs, transform_docs)
~\Anaconda3\envs\gtda-ulupo-py38\lib\site-packages\sklearn\base.py in fit_transform(self, X, y, **fit_params)
688 if y is None:
689 # fit method of arity 1 (unsupervised transformation)
--> 690 return self.fit(X, **fit_params).transform(X)
691 else:
692 # fit method of arity 2 (supervised transformation)
~\Workspace\giotto-tda_ulupo\gtda\diagrams\features.py in fit(self, X, y)
273 if self.metric == 'persistence_image':
274 self.effective_metric_params_['weights'] = \
--> 275 _calculate_weights(X, **self.effective_metric_params_)
276
277 return self
TypeError: _calculate_weights() missing 1 required positional argument: 'weight_function'
|
TypeError
|
def fit(self, X, y=None):
"""Store all observed homology dimensions in
:attr:`homology_dimensions_` and compute :attr:`scale_`.
Then, return the estimator.
Parameters
----------
X : ndarray of shape (n_samples, n_features, 3)
Input data. Array of persistence diagrams, each a collection of
triples [b, d, q] representing persistent topological features
through their birth (b), death (d) and homology dimension (q).
It is important that, for each possible homology dimension, the
number of triples for which q equals that homology dimension is
constants across the entries of X.
y : None
There is no need for a target in a transformer, yet the pipeline
API requires this parameter.
Returns
-------
self : object
"""
X = check_diagrams(X)
validate_params(self.get_params(), self._hyperparameters, exclude=["n_jobs"])
if self.metric_params is None:
self.effective_metric_params_ = {}
else:
self.effective_metric_params_ = self.metric_params.copy()
validate_params(
self.effective_metric_params_, _AVAILABLE_AMPLITUDE_METRICS[self.metric]
)
# Find the unique homology dimensions in the 3D array X passed to `fit`
# assuming that they can all be found in its zero-th entry
homology_dimensions_fit = np.unique(X[0, :, 2])
self.homology_dimensions_ = _homology_dimensions_to_sorted_ints(
homology_dimensions_fit
)
(
self.effective_metric_params_["samplings"],
self.effective_metric_params_["step_sizes"],
) = _bin(X, self.metric, **self.effective_metric_params_)
if self.metric == "persistence_image":
weight_function = self.effective_metric_params_.get("weight_function", None)
weight_function = np.ones_like if weight_function is None else weight_function
self.effective_metric_params_["weight_function"] = weight_function
amplitude_array = _parallel_amplitude(
X,
self.metric,
self.effective_metric_params_,
self.homology_dimensions_,
self.n_jobs,
)
self.scale_ = self.function(amplitude_array)
return self
|
def fit(self, X, y=None):
"""Store all observed homology dimensions in
:attr:`homology_dimensions_` and compute :attr:`scale_`.
Then, return the estimator.
Parameters
----------
X : ndarray of shape (n_samples, n_features, 3)
Input data. Array of persistence diagrams, each a collection of
triples [b, d, q] representing persistent topological features
through their birth (b), death (d) and homology dimension (q).
It is important that, for each possible homology dimension, the
number of triples for which q equals that homology dimension is
constants across the entries of X.
y : None
There is no need for a target in a transformer, yet the pipeline
API requires this parameter.
Returns
-------
self : object
"""
X = check_diagrams(X)
validate_params(self.get_params(), self._hyperparameters, exclude=["n_jobs"])
if self.metric_params is None:
self.effective_metric_params_ = {}
else:
self.effective_metric_params_ = self.metric_params.copy()
validate_params(
self.effective_metric_params_, _AVAILABLE_AMPLITUDE_METRICS[self.metric]
)
self.homology_dimensions_ = sorted(set(X[0, :, 2]))
(
self.effective_metric_params_["samplings"],
self.effective_metric_params_["step_sizes"],
) = _bin(X, metric=self.metric, **self.effective_metric_params_)
if self.metric == "persistence_image":
self.effective_metric_params_["weights"] = _calculate_weights(
X, **self.effective_metric_params_
)
amplitude_array = _parallel_amplitude(
X,
self.metric,
self.effective_metric_params_,
self.homology_dimensions_,
self.n_jobs,
)
self.scale_ = self.function(amplitude_array)
return self
|
https://github.com/giotto-ai/giotto-tda/issues/438
|
---------------------------------------------------------------------------
TypeError Traceback (most recent call last)
<ipython-input-5-5e233492b878> in <module>
----> 1 Amplitude(metric="persistence_image").fit_transform(Xd)
~\Workspace\giotto-tda_ulupo\gtda\utils\_docs.py in fit_transform_wrapper(*args, **kwargs)
104 @wraps(original_fit_transform)
105 def fit_transform_wrapper(*args, **kwargs):
--> 106 return original_fit_transform(*args, **kwargs)
107 fit_transform_wrapper.__doc__ = \
108 make_fit_transform_docs(fit_docs, transform_docs)
~\Anaconda3\envs\gtda-ulupo-py38\lib\site-packages\sklearn\base.py in fit_transform(self, X, y, **fit_params)
688 if y is None:
689 # fit method of arity 1 (unsupervised transformation)
--> 690 return self.fit(X, **fit_params).transform(X)
691 else:
692 # fit method of arity 2 (supervised transformation)
~\Workspace\giotto-tda_ulupo\gtda\diagrams\features.py in fit(self, X, y)
273 if self.metric == 'persistence_image':
274 self.effective_metric_params_['weights'] = \
--> 275 _calculate_weights(X, **self.effective_metric_params_)
276
277 return self
TypeError: _calculate_weights() missing 1 required positional argument: 'weight_function'
|
TypeError
|
def transform(self, X, y=None):
"""Divide all birth and death values in `X` by :attr:`scale_`.
Parameters
----------
X : ndarray of shape (n_samples, n_features, 3)
Input data. Array of persistence diagrams, each a collection of
triples [b, d, q] representing persistent topological features
through their birth (b), death (d) and homology dimension (q).
It is important that, for each possible homology dimension, the
number of triples for which q equals that homology dimension is
constants across the entries of X.
y : None
There is no need for a target in a transformer, yet the pipeline
API requires this parameter.
Returns
-------
Xs : ndarray of shape (n_samples, n_features, 3)
Rescaled diagrams.
"""
check_is_fitted(self)
Xs = check_diagrams(X, copy=True)
Xs[:, :, :2] /= self.scale_
return Xs
|
def transform(self, X, y=None):
"""Divide all birth and death values in `X` by :attr:`scale_`.
Parameters
----------
X : ndarray of shape (n_samples, n_features, 3)
Input data. Array of persistence diagrams, each a collection of
triples [b, d, q] representing persistent topological features
through their birth (b), death (d) and homology dimension (q).
It is important that, for each possible homology dimension, the
number of triples for which q equals that homology dimension is
constants across the entries of X.
y : None
There is no need for a target in a transformer, yet the pipeline
API requires this parameter.
Returns
-------
Xs : ndarray of shape (n_samples, n_features, 3)
Rescaled diagrams.
"""
check_is_fitted(self)
Xs = check_diagrams(X)
Xs[:, :, :2] /= self.scale_
return Xs
|
https://github.com/giotto-ai/giotto-tda/issues/438
|
---------------------------------------------------------------------------
TypeError Traceback (most recent call last)
<ipython-input-5-5e233492b878> in <module>
----> 1 Amplitude(metric="persistence_image").fit_transform(Xd)
~\Workspace\giotto-tda_ulupo\gtda\utils\_docs.py in fit_transform_wrapper(*args, **kwargs)
104 @wraps(original_fit_transform)
105 def fit_transform_wrapper(*args, **kwargs):
--> 106 return original_fit_transform(*args, **kwargs)
107 fit_transform_wrapper.__doc__ = \
108 make_fit_transform_docs(fit_docs, transform_docs)
~\Anaconda3\envs\gtda-ulupo-py38\lib\site-packages\sklearn\base.py in fit_transform(self, X, y, **fit_params)
688 if y is None:
689 # fit method of arity 1 (unsupervised transformation)
--> 690 return self.fit(X, **fit_params).transform(X)
691 else:
692 # fit method of arity 2 (supervised transformation)
~\Workspace\giotto-tda_ulupo\gtda\diagrams\features.py in fit(self, X, y)
273 if self.metric == 'persistence_image':
274 self.effective_metric_params_['weights'] = \
--> 275 _calculate_weights(X, **self.effective_metric_params_)
276
277 return self
TypeError: _calculate_weights() missing 1 required positional argument: 'weight_function'
|
TypeError
|
def inverse_transform(self, X):
"""Scale back the data to the original representation. Multiplies by
the scale found in :meth:`fit`.
Parameters
----------
X : ndarray of shape (n_samples, n_features, 3)
Data to apply the inverse transform to, c.f. :meth:`transform`.
Returns
-------
Xs : ndarray of shape (n_samples, n_features, 3)
Rescaled diagrams.
"""
check_is_fitted(self)
Xs = check_diagrams(X, copy=True)
Xs[:, :, :2] *= self.scale_
return Xs
|
def inverse_transform(self, X):
"""Scale back the data to the original representation. Multiplies by
the scale found in :meth:`fit`.
Parameters
----------
X : ndarray of shape (n_samples, n_features, 3)
Data to apply the inverse transform to, c.f. :meth:`transform`.
Returns
-------
Xs : ndarray of shape (n_samples, n_features, 3)
Rescaled diagrams.
"""
check_is_fitted(self)
Xs = check_diagrams(X)
Xs[:, :, :2] *= self.scale_
return Xs
|
https://github.com/giotto-ai/giotto-tda/issues/438
|
---------------------------------------------------------------------------
TypeError Traceback (most recent call last)
<ipython-input-5-5e233492b878> in <module>
----> 1 Amplitude(metric="persistence_image").fit_transform(Xd)
~\Workspace\giotto-tda_ulupo\gtda\utils\_docs.py in fit_transform_wrapper(*args, **kwargs)
104 @wraps(original_fit_transform)
105 def fit_transform_wrapper(*args, **kwargs):
--> 106 return original_fit_transform(*args, **kwargs)
107 fit_transform_wrapper.__doc__ = \
108 make_fit_transform_docs(fit_docs, transform_docs)
~\Anaconda3\envs\gtda-ulupo-py38\lib\site-packages\sklearn\base.py in fit_transform(self, X, y, **fit_params)
688 if y is None:
689 # fit method of arity 1 (unsupervised transformation)
--> 690 return self.fit(X, **fit_params).transform(X)
691 else:
692 # fit method of arity 2 (supervised transformation)
~\Workspace\giotto-tda_ulupo\gtda\diagrams\features.py in fit(self, X, y)
273 if self.metric == 'persistence_image':
274 self.effective_metric_params_['weights'] = \
--> 275 _calculate_weights(X, **self.effective_metric_params_)
276
277 return self
TypeError: _calculate_weights() missing 1 required positional argument: 'weight_function'
|
TypeError
|
def fit(self, X, y=None):
"""Store relevant homology dimensions in
:attr:`homology_dimensions_`. Then, return the estimator.
This method is here to implement the usual scikit-learn API and hence
work in pipelines.
Parameters
----------
X : ndarray of shape (n_samples, n_features, 3)
Input data. Array of persistence diagrams, each a collection of
triples [b, d, q] representing persistent topological features
through their birth (b), death (d) and homology dimension (q).
It is important that, for each possible homology dimension, the
number of triples for which q equals that homology dimension is
constants across the entries of `X`.
y : None
There is no need for a target in a transformer, yet the pipeline
API requires this parameter.
Returns
-------
self : object
"""
X = check_diagrams(X)
validate_params(self.get_params(), self._hyperparameters)
if self.homology_dimensions is None:
# Find the unique homology dimensions in the 3D array X passed to
# `fit` assuming that they can all be found in its zero-th entry
homology_dimensions = np.unique(X[0, :, 2])
else:
homology_dimensions = self.homology_dimensions
self.homology_dimensions_ = _homology_dimensions_to_sorted_ints(homology_dimensions)
return self
|
def fit(self, X, y=None):
"""Store relevant homology dimensions in
:attr:`homology_dimensions_`. Then, return the estimator.
This method is here to implement the usual scikit-learn API and hence
work in pipelines.
Parameters
----------
X : ndarray of shape (n_samples, n_features, 3)
Input data. Array of persistence diagrams, each a collection of
triples [b, d, q] representing persistent topological features
through their birth (b), death (d) and homology dimension (q).
It is important that, for each possible homology dimension, the
number of triples for which q equals that homology dimension is
constants across the entries of `X`.
y : None
There is no need for a target in a transformer, yet the pipeline
API requires this parameter.
Returns
-------
self : object
"""
X = check_diagrams(X)
validate_params(self.get_params(), self._hyperparameters)
if self.homology_dimensions is None:
self.homology_dimensions_ = [int(dim) for dim in set(X[0, :, 2])]
else:
self.homology_dimensions_ = self.homology_dimensions
self.homology_dimensions_ = sorted(self.homology_dimensions_)
return self
|
https://github.com/giotto-ai/giotto-tda/issues/438
|
---------------------------------------------------------------------------
TypeError Traceback (most recent call last)
<ipython-input-5-5e233492b878> in <module>
----> 1 Amplitude(metric="persistence_image").fit_transform(Xd)
~\Workspace\giotto-tda_ulupo\gtda\utils\_docs.py in fit_transform_wrapper(*args, **kwargs)
104 @wraps(original_fit_transform)
105 def fit_transform_wrapper(*args, **kwargs):
--> 106 return original_fit_transform(*args, **kwargs)
107 fit_transform_wrapper.__doc__ = \
108 make_fit_transform_docs(fit_docs, transform_docs)
~\Anaconda3\envs\gtda-ulupo-py38\lib\site-packages\sklearn\base.py in fit_transform(self, X, y, **fit_params)
688 if y is None:
689 # fit method of arity 1 (unsupervised transformation)
--> 690 return self.fit(X, **fit_params).transform(X)
691 else:
692 # fit method of arity 2 (supervised transformation)
~\Workspace\giotto-tda_ulupo\gtda\diagrams\features.py in fit(self, X, y)
273 if self.metric == 'persistence_image':
274 self.effective_metric_params_['weights'] = \
--> 275 _calculate_weights(X, **self.effective_metric_params_)
276
277 return self
TypeError: _calculate_weights() missing 1 required positional argument: 'weight_function'
|
TypeError
|
def fit(self, X, y=None):
"""Store all observed homology dimensions in
:attr:`homology_dimensions_` and, for each dimension separately,
store evenly sample filtration parameter values in :attr:`samplings_`.
Then, return the estimator.
This method is here to implement the usual scikit-learn API and hence
work in pipelines.
Parameters
----------
X : ndarray of shape (n_samples, n_features, 3)
Input data. Array of persistence diagrams, each a collection of
triples [b, d, q] representing persistent topological features
through their birth (b), death (d) and homology dimension (q).
It is important that, for each possible homology dimension, the
number of triples for which q equals that homology dimension is
constants across the entries of X.
y : None
There is no need for a target in a transformer, yet the pipeline
API requires this parameter.
Returns
-------
self : object
"""
X = check_diagrams(X)
validate_params(self.get_params(), self._hyperparameters, exclude=["n_jobs"])
# Find the unique homology dimensions in the 3D array X passed to `fit`
# assuming that they can all be found in its zero-th entry
homology_dimensions_fit = np.unique(X[0, :, 2])
self.homology_dimensions_ = _homology_dimensions_to_sorted_ints(
homology_dimensions_fit
)
self._n_dimensions = len(self.homology_dimensions_)
self._samplings, _ = _bin(
X, "betti", n_bins=self.n_bins, homology_dimensions=self.homology_dimensions_
)
self.samplings_ = {dim: s.flatten() for dim, s in self._samplings.items()}
return self
|
def fit(self, X, y=None):
"""Store all observed homology dimensions in
:attr:`homology_dimensions_` and, for each dimension separately,
store evenly sample filtration parameter values in :attr:`samplings_`.
Then, return the estimator.
This method is here to implement the usual scikit-learn API and hence
work in pipelines.
Parameters
----------
X : ndarray of shape (n_samples, n_features, 3)
Input data. Array of persistence diagrams, each a collection of
triples [b, d, q] representing persistent topological features
through their birth (b), death (d) and homology dimension (q).
It is important that, for each possible homology dimension, the
number of triples for which q equals that homology dimension is
constants across the entries of X.
y : None
There is no need for a target in a transformer, yet the pipeline
API requires this parameter.
Returns
-------
self : object
"""
X = check_diagrams(X)
validate_params(self.get_params(), self._hyperparameters, exclude=["n_jobs"])
self.homology_dimensions_ = sorted(list(set(X[0, :, 2])))
self._n_dimensions = len(self.homology_dimensions_)
self._samplings, _ = _bin(X, metric="betti", n_bins=self.n_bins)
self.samplings_ = {dim: s.flatten() for dim, s in self._samplings.items()}
return self
|
https://github.com/giotto-ai/giotto-tda/issues/438
|
---------------------------------------------------------------------------
TypeError Traceback (most recent call last)
<ipython-input-5-5e233492b878> in <module>
----> 1 Amplitude(metric="persistence_image").fit_transform(Xd)
~\Workspace\giotto-tda_ulupo\gtda\utils\_docs.py in fit_transform_wrapper(*args, **kwargs)
104 @wraps(original_fit_transform)
105 def fit_transform_wrapper(*args, **kwargs):
--> 106 return original_fit_transform(*args, **kwargs)
107 fit_transform_wrapper.__doc__ = \
108 make_fit_transform_docs(fit_docs, transform_docs)
~\Anaconda3\envs\gtda-ulupo-py38\lib\site-packages\sklearn\base.py in fit_transform(self, X, y, **fit_params)
688 if y is None:
689 # fit method of arity 1 (unsupervised transformation)
--> 690 return self.fit(X, **fit_params).transform(X)
691 else:
692 # fit method of arity 2 (supervised transformation)
~\Workspace\giotto-tda_ulupo\gtda\diagrams\features.py in fit(self, X, y)
273 if self.metric == 'persistence_image':
274 self.effective_metric_params_['weights'] = \
--> 275 _calculate_weights(X, **self.effective_metric_params_)
276
277 return self
TypeError: _calculate_weights() missing 1 required positional argument: 'weight_function'
|
TypeError
|
def plot(self, Xt, sample=0, homology_dimensions=None, plotly_params=None):
"""Plot a sample from a collection of Betti curves arranged as in the
output of :meth:`transform`. Include homology in multiple dimensions.
Parameters
----------
Xt : ndarray of shape (n_samples, n_homology_dimensions, n_bins)
Collection of Betti curves, such as returned by :meth:`transform`.
sample : int, optional, default: ``0``
Index of the sample in `Xt` to be plotted.
homology_dimensions : list, tuple or None, optional, default: ``None``
Which homology dimensions to include in the plot. ``None`` means
plotting all dimensions present in :attr:`homology_dimensions_`.
plotly_params : dict or None, optional, default: ``None``
Custom parameters to configure the plotly figure. Allowed keys are
``"traces"`` and ``"layout"``, and the corresponding values should
be dictionaries containing keyword arguments as would be fed to the
:meth:`update_traces` and :meth:`update_layout` methods of
:class:`plotly.graph_objects.Figure`.
Returns
-------
fig : :class:`plotly.graph_objects.Figure` object
Plotly figure.
"""
check_is_fitted(self)
homology_dimensions_mapping = _make_homology_dimensions_mapping(
homology_dimensions, self.homology_dimensions_
)
layout_axes_common = {
"type": "linear",
"ticks": "outside",
"showline": True,
"zeroline": True,
"linewidth": 1,
"linecolor": "black",
"mirror": False,
"showexponent": "all",
"exponentformat": "e",
}
layout = {
"xaxis1": {
"title": "Filtration parameter",
"side": "bottom",
"anchor": "y1",
**layout_axes_common,
},
"yaxis1": {
"title": "Betti number",
"side": "left",
"anchor": "x1",
**layout_axes_common,
},
"plot_bgcolor": "white",
"title": f"Betti curves from diagram {sample}",
}
fig = Figure(layout=layout)
for ix, dim in homology_dimensions_mapping:
fig.add_trace(
Scatter(
x=self.samplings_[dim],
y=Xt[sample][ix],
mode="lines",
showlegend=True,
name=f"H{dim}",
)
)
# Update traces and layout according to user input
if plotly_params:
fig.update_traces(plotly_params.get("traces", None))
fig.update_layout(plotly_params.get("layout", None))
return fig
|
def plot(self, Xt, sample=0, homology_dimensions=None, plotly_params=None):
"""Plot a sample from a collection of Betti curves arranged as in
the output of :meth:`transform`. Include homology in multiple
dimensions.
Parameters
----------
Xt : ndarray of shape (n_samples, n_homology_dimensions, n_bins)
Collection of Betti curves, such as returned by :meth:`transform`.
sample : int, optional, default: ``0``
Index of the sample in `Xt` to be plotted.
homology_dimensions : list, tuple or None, optional, default: ``None``
Which homology dimensions to include in the plot. ``None`` means
plotting all dimensions present in :attr:`homology_dimensions_`.
plotly_params : dict or None, optional, default: ``None``
Custom parameters to configure the plotly figure. Allowed keys are
``"traces"`` and ``"layout"``, and the corresponding values should
be dictionaries containing keyword arguments as would be fed to the
:meth:`update_traces` and :meth:`update_layout` methods of
:class:`plotly.graph_objects.Figure`.
Returns
-------
fig : :class:`plotly.graph_objects.Figure` object
Plotly figure.
"""
check_is_fitted(self)
if homology_dimensions is None:
_homology_dimensions = list(enumerate(self.homology_dimensions_))
else:
_homology_dimensions = []
for dim in homology_dimensions:
if dim not in self.homology_dimensions_:
raise ValueError(
f"All homology dimensions must be in "
f"self.homology_dimensions_ which is "
f"{self.homology_dimensions_}. {dim} is not."
)
else:
homology_dimensions_arr = np.array(self.homology_dimensions_)
ix = np.flatnonzero(homology_dimensions_arr == dim)[0]
_homology_dimensions.append((ix, dim))
layout = {
"xaxis1": {
"title": "Filtration parameter",
"side": "bottom",
"type": "linear",
"ticks": "outside",
"anchor": "x1",
"showline": True,
"zeroline": True,
"showexponent": "all",
"exponentformat": "e",
},
"yaxis1": {
"title": "Betti number",
"side": "left",
"type": "linear",
"ticks": "outside",
"anchor": "y1",
"showline": True,
"zeroline": True,
"showexponent": "all",
"exponentformat": "e",
},
"plot_bgcolor": "white",
"title": f"Betti curves from diagram {sample}",
}
fig = gobj.Figure(layout=layout)
fig.update_xaxes(zeroline=True, linewidth=1, linecolor="black", mirror=False)
fig.update_yaxes(zeroline=True, linewidth=1, linecolor="black", mirror=False)
for ix, dim in _homology_dimensions:
fig.add_trace(
gobj.Scatter(
x=self.samplings_[dim],
y=Xt[sample][ix],
mode="lines",
showlegend=True,
name=f"H{int(dim)}",
)
)
# Update traces and layout according to user input
if plotly_params:
fig.update_traces(plotly_params.get("traces", None))
fig.update_layout(plotly_params.get("layout", None))
return fig
|
https://github.com/giotto-ai/giotto-tda/issues/438
|
---------------------------------------------------------------------------
TypeError Traceback (most recent call last)
<ipython-input-5-5e233492b878> in <module>
----> 1 Amplitude(metric="persistence_image").fit_transform(Xd)
~\Workspace\giotto-tda_ulupo\gtda\utils\_docs.py in fit_transform_wrapper(*args, **kwargs)
104 @wraps(original_fit_transform)
105 def fit_transform_wrapper(*args, **kwargs):
--> 106 return original_fit_transform(*args, **kwargs)
107 fit_transform_wrapper.__doc__ = \
108 make_fit_transform_docs(fit_docs, transform_docs)
~\Anaconda3\envs\gtda-ulupo-py38\lib\site-packages\sklearn\base.py in fit_transform(self, X, y, **fit_params)
688 if y is None:
689 # fit method of arity 1 (unsupervised transformation)
--> 690 return self.fit(X, **fit_params).transform(X)
691 else:
692 # fit method of arity 2 (supervised transformation)
~\Workspace\giotto-tda_ulupo\gtda\diagrams\features.py in fit(self, X, y)
273 if self.metric == 'persistence_image':
274 self.effective_metric_params_['weights'] = \
--> 275 _calculate_weights(X, **self.effective_metric_params_)
276
277 return self
TypeError: _calculate_weights() missing 1 required positional argument: 'weight_function'
|
TypeError
|
def fit(self, X, y=None):
"""Store all observed homology dimensions in
:attr:`homology_dimensions_` and, for each dimension separately,
store evenly sample filtration parameter values in :attr:`samplings_`.
Then, return the estimator.
This method is here to implement the usual scikit-learn API and hence
work in pipelines.
Parameters
----------
X : ndarray of shape (n_samples, n_features, 3)
Input data. Array of persistence diagrams, each a collection of
triples [b, d, q] representing persistent topological features
through their birth (b), death (d) and homology dimension (q).
It is important that, for each possible homology dimension, the
number of triples for which q equals that homology dimension is
constants across the entries of X.
y : None
There is no need for a target in a transformer, yet the pipeline
API requires this parameter.
Returns
-------
self : object
"""
X = check_diagrams(X)
validate_params(self.get_params(), self._hyperparameters, exclude=["n_jobs"])
# Find the unique homology dimensions in the 3D array X passed to `fit`
# assuming that they can all be found in its zero-th entry
homology_dimensions_fit = np.unique(X[0, :, 2])
self.homology_dimensions_ = _homology_dimensions_to_sorted_ints(
homology_dimensions_fit
)
self._n_dimensions = len(self.homology_dimensions_)
self._samplings, _ = _bin(
X,
"landscape",
n_bins=self.n_bins,
homology_dimensions=self.homology_dimensions_,
)
self.samplings_ = {dim: s.flatten() for dim, s in self._samplings.items()}
return self
|
def fit(self, X, y=None):
"""Store all observed homology dimensions in
:attr:`homology_dimensions_` and, for each dimension separately,
store evenly sample filtration parameter values in :attr:`samplings_`.
Then, return the estimator.
This method is here to implement the usual scikit-learn API and hence
work in pipelines.
Parameters
----------
X : ndarray of shape (n_samples, n_features, 3)
Input data. Array of persistence diagrams, each a collection of
triples [b, d, q] representing persistent topological features
through their birth (b), death (d) and homology dimension (q).
It is important that, for each possible homology dimension, the
number of triples for which q equals that homology dimension is
constants across the entries of X.
y : None
There is no need for a target in a transformer, yet the pipeline
API requires this parameter.
Returns
-------
self : object
"""
X = check_diagrams(X)
validate_params(self.get_params(), self._hyperparameters, exclude=["n_jobs"])
self.homology_dimensions_ = sorted(list(set(X[0, :, 2])))
self._n_dimensions = len(self.homology_dimensions_)
self._samplings, _ = _bin(X, metric="landscape", n_bins=self.n_bins)
self.samplings_ = {dim: s.flatten() for dim, s in self._samplings.items()}
return self
|
https://github.com/giotto-ai/giotto-tda/issues/438
|
---------------------------------------------------------------------------
TypeError Traceback (most recent call last)
<ipython-input-5-5e233492b878> in <module>
----> 1 Amplitude(metric="persistence_image").fit_transform(Xd)
~\Workspace\giotto-tda_ulupo\gtda\utils\_docs.py in fit_transform_wrapper(*args, **kwargs)
104 @wraps(original_fit_transform)
105 def fit_transform_wrapper(*args, **kwargs):
--> 106 return original_fit_transform(*args, **kwargs)
107 fit_transform_wrapper.__doc__ = \
108 make_fit_transform_docs(fit_docs, transform_docs)
~\Anaconda3\envs\gtda-ulupo-py38\lib\site-packages\sklearn\base.py in fit_transform(self, X, y, **fit_params)
688 if y is None:
689 # fit method of arity 1 (unsupervised transformation)
--> 690 return self.fit(X, **fit_params).transform(X)
691 else:
692 # fit method of arity 2 (supervised transformation)
~\Workspace\giotto-tda_ulupo\gtda\diagrams\features.py in fit(self, X, y)
273 if self.metric == 'persistence_image':
274 self.effective_metric_params_['weights'] = \
--> 275 _calculate_weights(X, **self.effective_metric_params_)
276
277 return self
TypeError: _calculate_weights() missing 1 required positional argument: 'weight_function'
|
TypeError
|
def plot(self, Xt, sample=0, homology_dimensions=None, plotly_params=None):
"""Plot a sample from a collection of persistence landscapes arranged
as in the output of :meth:`transform`. Include homology in multiple
dimensions.
Parameters
----------
Xt : ndarray of shape (n_samples, n_homology_dimensions, n_layers, \
n_bins
Collection of persistence landscapes, such as returned by
:meth:`transform`.
sample : int, optional, default: ``0``
Index of the sample in `Xt` to be plotted.
homology_dimensions : list, tuple or None, optional, default: ``None``
Homology dimensions for which the landscape should be plotted.
``None`` means plotting all dimensions present in
:attr:`homology_dimensions_`.
plotly_params : dict or None, optional, default: ``None``
Custom parameters to configure the plotly figure. Allowed keys are
``"traces"`` and ``"layout"``, and the corresponding values should
be dictionaries containing keyword arguments as would be fed to the
:meth:`update_traces` and :meth:`update_layout` methods of
:class:`plotly.graph_objects.Figure`.
Returns
-------
fig : :class:`plotly.graph_objects.Figure` object
Plotly figure.
"""
check_is_fitted(self)
homology_dimensions_mapping = _make_homology_dimensions_mapping(
homology_dimensions, self.homology_dimensions_
)
layout_axes_common = {
"type": "linear",
"ticks": "outside",
"showline": True,
"zeroline": True,
"linewidth": 1,
"linecolor": "black",
"mirror": False,
"showexponent": "all",
"exponentformat": "e",
}
layout = {
"xaxis1": {"side": "bottom", "anchor": "y1", **layout_axes_common},
"yaxis1": {"side": "left", "anchor": "x1", **layout_axes_common},
"plot_bgcolor": "white",
}
Xt_sample = Xt[sample]
n_layers = Xt_sample.shape[1]
subplot_titles = [f"H{dim}" for _, dim in homology_dimensions_mapping]
fig = make_subplots(
rows=len(homology_dimensions_mapping), cols=1, subplot_titles=subplot_titles
)
has_many_homology_dim = len(homology_dimensions_mapping) - 1
for i, (inv_idx, dim) in enumerate(homology_dimensions_mapping):
hom_dim_str = f" ({subplot_titles[i]})" if has_many_homology_dim else ""
for layer in range(n_layers):
fig.add_trace(
Scatter(
x=self.samplings_[dim],
y=Xt_sample[inv_idx, layer],
mode="lines",
showlegend=True,
hoverinfo="none",
name=f"Layer {layer + 1}{hom_dim_str}",
),
row=i + 1,
col=1,
)
fig.update_layout(
title_text=f"Landscape representations of diagram {sample}", **layout.copy()
)
# Update traces and layout according to user input
if plotly_params:
fig.update_traces(plotly_params.get("traces", None))
fig.update_layout(plotly_params.get("layout", None))
return fig
|
def plot(self, Xt, sample=0, homology_dimensions=None, plotly_params=None):
"""Plot a sample from a collection of persistence landscapes arranged
as in the output of :meth:`transform`. Include homology in multiple
dimensions.
Parameters
----------
Xt : ndarray of shape (n_samples, n_homology_dimensions, n_layers, \
n_bins
Collection of persistence landscapes, such as returned by
:meth:`transform`.
sample : int, optional, default: ``0``
Index of the sample in `Xt` to be plotted.
homology_dimensions : list, tuple or None, optional, default: ``None``
Homology dimensions for which the landscape should be plotted.
``None`` means plotting all dimensions present in
:attr:`homology_dimensions_`.
plotly_params : dict or None, optional, default: ``None``
Custom parameters to configure the plotly figure. Allowed keys are
``"traces"`` and ``"layout"``, and the corresponding values should
be dictionaries containing keyword arguments as would be fed to the
:meth:`update_traces` and :meth:`update_layout` methods of
:class:`plotly.graph_objects.Figure`.
Returns
-------
fig : :class:`plotly.graph_objects.Figure` object
Plotly figure.
"""
check_is_fitted(self)
if homology_dimensions is None:
_homology_dimensions = list(enumerate(self.homology_dimensions_))
else:
_homology_dimensions = []
for dim in homology_dimensions:
if dim not in self.homology_dimensions_:
raise ValueError(
f"All homology dimensions must be in "
f"self.homology_dimensions_ which is "
f"{self.homology_dimensions_}. {dim} is not."
)
else:
homology_dimensions_arr = np.array(self.homology_dimensions_)
ix = np.flatnonzero(homology_dimensions_arr == dim)[0]
_homology_dimensions.append((ix, dim))
layout = {
"xaxis1": {
"side": "bottom",
"type": "linear",
"ticks": "outside",
"anchor": "y1",
"showline": True,
"zeroline": True,
"showexponent": "all",
"exponentformat": "e",
},
"yaxis1": {
"side": "left",
"type": "linear",
"ticks": "outside",
"anchor": "x1",
"showline": True,
"zeroline": True,
"showexponent": "all",
"exponentformat": "e",
},
"plot_bgcolor": "white",
"title": f"Landscape representation of diagram {sample}",
}
Xt_sample = Xt[sample]
for ix, dim in _homology_dimensions:
layout_dim = layout.copy()
layout_dim["title"] = (
"Persistence landscape for homology " + "dimension {}".format(int(dim))
)
fig = gobj.Figure(layout=layout_dim)
fig.update_xaxes(zeroline=True, linewidth=1, linecolor="black", mirror=False)
fig.update_yaxes(zeroline=True, linewidth=1, linecolor="black", mirror=False)
n_layers = Xt_sample.shape[1]
for layer in range(n_layers):
fig.add_trace(
gobj.Scatter(
x=self.samplings_[dim],
y=Xt_sample[ix, layer],
mode="lines",
showlegend=True,
hoverinfo="none",
name=f"Layer {layer + 1}",
)
)
# Update traces and layout according to user input
if plotly_params:
fig.update_traces(plotly_params.get("traces", None))
fig.update_layout(plotly_params.get("layout", None))
return fig
|
https://github.com/giotto-ai/giotto-tda/issues/438
|
---------------------------------------------------------------------------
TypeError Traceback (most recent call last)
<ipython-input-5-5e233492b878> in <module>
----> 1 Amplitude(metric="persistence_image").fit_transform(Xd)
~\Workspace\giotto-tda_ulupo\gtda\utils\_docs.py in fit_transform_wrapper(*args, **kwargs)
104 @wraps(original_fit_transform)
105 def fit_transform_wrapper(*args, **kwargs):
--> 106 return original_fit_transform(*args, **kwargs)
107 fit_transform_wrapper.__doc__ = \
108 make_fit_transform_docs(fit_docs, transform_docs)
~\Anaconda3\envs\gtda-ulupo-py38\lib\site-packages\sklearn\base.py in fit_transform(self, X, y, **fit_params)
688 if y is None:
689 # fit method of arity 1 (unsupervised transformation)
--> 690 return self.fit(X, **fit_params).transform(X)
691 else:
692 # fit method of arity 2 (supervised transformation)
~\Workspace\giotto-tda_ulupo\gtda\diagrams\features.py in fit(self, X, y)
273 if self.metric == 'persistence_image':
274 self.effective_metric_params_['weights'] = \
--> 275 _calculate_weights(X, **self.effective_metric_params_)
276
277 return self
TypeError: _calculate_weights() missing 1 required positional argument: 'weight_function'
|
TypeError
|
def __init__(self, sigma=0.1, n_bins=100, n_jobs=None):
self.sigma = sigma
self.n_bins = n_bins
self.n_jobs = n_jobs
|
def __init__(self, sigma=1.0, n_bins=100, n_jobs=None):
self.sigma = sigma
self.n_bins = n_bins
self.n_jobs = n_jobs
|
https://github.com/giotto-ai/giotto-tda/issues/438
|
---------------------------------------------------------------------------
TypeError Traceback (most recent call last)
<ipython-input-5-5e233492b878> in <module>
----> 1 Amplitude(metric="persistence_image").fit_transform(Xd)
~\Workspace\giotto-tda_ulupo\gtda\utils\_docs.py in fit_transform_wrapper(*args, **kwargs)
104 @wraps(original_fit_transform)
105 def fit_transform_wrapper(*args, **kwargs):
--> 106 return original_fit_transform(*args, **kwargs)
107 fit_transform_wrapper.__doc__ = \
108 make_fit_transform_docs(fit_docs, transform_docs)
~\Anaconda3\envs\gtda-ulupo-py38\lib\site-packages\sklearn\base.py in fit_transform(self, X, y, **fit_params)
688 if y is None:
689 # fit method of arity 1 (unsupervised transformation)
--> 690 return self.fit(X, **fit_params).transform(X)
691 else:
692 # fit method of arity 2 (supervised transformation)
~\Workspace\giotto-tda_ulupo\gtda\diagrams\features.py in fit(self, X, y)
273 if self.metric == 'persistence_image':
274 self.effective_metric_params_['weights'] = \
--> 275 _calculate_weights(X, **self.effective_metric_params_)
276
277 return self
TypeError: _calculate_weights() missing 1 required positional argument: 'weight_function'
|
TypeError
|
def fit(self, X, y=None):
"""Store all observed homology dimensions in
:attr:`homology_dimensions_` and, for each dimension separately,
store evenly sample filtration parameter values in :attr:`samplings_`.
Then, return the estimator.
This method is here to implement the usual scikit-learn API and hence
work in pipelines.
Parameters
----------
X : ndarray of shape (n_samples, n_features, 3)
Input data. Array of persistence diagrams, each a collection of
triples [b, d, q] representing persistent topological features
through their birth (b), death (d) and homology dimension (q).
It is important that, for each possible homology dimension, the
number of triples for which q equals that homology dimension is
constants across the entries of X.
y : None
There is no need for a target in a transformer, yet the pipeline
API requires this parameter.
Returns
-------
self : object
"""
X = check_diagrams(X)
validate_params(self.get_params(), self._hyperparameters, exclude=["n_jobs"])
# Find the unique homology dimensions in the 3D array X passed to `fit`
# assuming that they can all be found in its zero-th entry
homology_dimensions_fit = np.unique(X[0, :, 2])
self.homology_dimensions_ = _homology_dimensions_to_sorted_ints(
homology_dimensions_fit
)
self._n_dimensions = len(self.homology_dimensions_)
self._samplings, self._step_size = _bin(
X, "heat", n_bins=self.n_bins, homology_dimensions=self.homology_dimensions_
)
self.samplings_ = {dim: s.flatten() for dim, s in self._samplings.items()}
return self
|
def fit(self, X, y=None):
"""Store all observed homology dimensions in
:attr:`homology_dimensions_` and, for each dimension separately,
store evenly sample filtration parameter values in :attr:`samplings_`.
Then, return the estimator.
This method is here to implement the usual scikit-learn API and hence
work in pipelines.
Parameters
----------
X : ndarray of shape (n_samples, n_features, 3)
Input data. Array of persistence diagrams, each a collection of
triples [b, d, q] representing persistent topological features
through their birth (b), death (d) and homology dimension (q).
It is important that, for each possible homology dimension, the
number of triples for which q equals that homology dimension is
constants across the entries of X.
y : None
There is no need for a target in a transformer, yet the pipeline
API requires this parameter.
Returns
-------
self : object
"""
X = check_diagrams(X)
validate_params(self.get_params(), self._hyperparameters, exclude=["n_jobs"])
self.homology_dimensions_ = sorted(list(set(X[0, :, 2])))
self._n_dimensions = len(self.homology_dimensions_)
self._samplings, self._step_size = _bin(X, metric="heat", n_bins=self.n_bins)
self.samplings_ = {dim: s.flatten() for dim, s in self._samplings.items()}
return self
|
https://github.com/giotto-ai/giotto-tda/issues/438
|
---------------------------------------------------------------------------
TypeError Traceback (most recent call last)
<ipython-input-5-5e233492b878> in <module>
----> 1 Amplitude(metric="persistence_image").fit_transform(Xd)
~\Workspace\giotto-tda_ulupo\gtda\utils\_docs.py in fit_transform_wrapper(*args, **kwargs)
104 @wraps(original_fit_transform)
105 def fit_transform_wrapper(*args, **kwargs):
--> 106 return original_fit_transform(*args, **kwargs)
107 fit_transform_wrapper.__doc__ = \
108 make_fit_transform_docs(fit_docs, transform_docs)
~\Anaconda3\envs\gtda-ulupo-py38\lib\site-packages\sklearn\base.py in fit_transform(self, X, y, **fit_params)
688 if y is None:
689 # fit method of arity 1 (unsupervised transformation)
--> 690 return self.fit(X, **fit_params).transform(X)
691 else:
692 # fit method of arity 2 (supervised transformation)
~\Workspace\giotto-tda_ulupo\gtda\diagrams\features.py in fit(self, X, y)
273 if self.metric == 'persistence_image':
274 self.effective_metric_params_['weights'] = \
--> 275 _calculate_weights(X, **self.effective_metric_params_)
276
277 return self
TypeError: _calculate_weights() missing 1 required positional argument: 'weight_function'
|
TypeError
|
def plot(
self, Xt, sample=0, homology_dimension_idx=0, colorscale="blues", plotly_params=None
):
"""Plot a single channel –- corresponding to a given homology
dimension -- in a sample from a collection of heat kernel images.
Parameters
----------
Xt : ndarray of shape (n_samples, n_homology_dimensions, n_bins, \
n_bins)
Collection of multi-channel raster images, such as returned by
:meth:`transform`.
sample : int, optional, default: ``0``
Index of the sample in `Xt` to be selected.
homology_dimension_idx : int, optional, default: ``0``
Index of the channel in the selected sample to be plotted. If `Xt`
is the result of a call to :meth:`transform` and this index is i,
the plot corresponds to the homology dimension given by the i-th
entry in :attr:`homology_dimensions_`.
colorscale : str, optional, default: ``"blues"``
Color scale to be used in the heat map. Can be anything allowed by
:class:`plotly.graph_objects.Heatmap`.
plotly_params : dict or None, optional, default: ``None``
Custom parameters to configure the plotly figure. Allowed keys are
``"trace"`` and ``"layout"``, and the corresponding values should
be dictionaries containing keyword arguments as would be fed to the
:meth:`update_traces` and :meth:`update_layout` methods of
:class:`plotly.graph_objects.Figure`.
Returns
-------
fig : :class:`plotly.graph_objects.Figure` object
Plotly figure.
"""
check_is_fitted(self)
homology_dimension = self.homology_dimensions_[homology_dimension_idx]
if homology_dimension != np.inf:
homology_dimension = int(homology_dimension)
x = self.samplings_[homology_dimension]
return plot_heatmap(
Xt[sample][homology_dimension_idx],
x=x,
y=x[::-1],
colorscale=colorscale,
origin="lower",
title=f"Heat kernel representation of diagram {sample} in "
f"homology dimension {homology_dimension}",
plotly_params=plotly_params,
)
|
def plot(
self, Xt, sample=0, homology_dimension_idx=0, colorscale="blues", plotly_params=None
):
"""Plot a single channel – corresponding to a given homology
dimension – in a sample from a collection of heat kernel images.
Parameters
----------
Xt : ndarray of shape (n_samples, n_homology_dimensions, n_bins, \
n_bins)
Collection of multi-channel raster images, such as returned by
:meth:`transform`.
sample : int, optional, default: ``0``
Index of the sample in `Xt` to be selected.
homology_dimension_idx : int, optional, default: ``0``
Index of the channel in the selected sample to be plotted. If `Xt`
is the result of a call to :meth:`transform` and this index is i,
the plot corresponds to the homology dimension given by the i-th
entry in :attr:`homology_dimensions_`.
colorscale : str, optional, default: ``"blues"``
Color scale to be used in the heat map. Can be anything allowed by
:class:`plotly.graph_objects.Heatmap`.
plotly_params : dict or None, optional, default: ``None``
Custom parameters to configure the plotly figure. Allowed keys are
``"trace"`` and ``"layout"``, and the corresponding values should
be dictionaries containing keyword arguments as would be fed to the
:meth:`update_traces` and :meth:`update_layout` methods of
:class:`plotly.graph_objects.Figure`.
Returns
-------
fig : :class:`plotly.graph_objects.Figure` object
Plotly figure.
"""
check_is_fitted(self)
homology_dimension = self.homology_dimensions_[homology_dimension_idx]
if homology_dimension != np.inf:
homology_dimension = int(homology_dimension)
x = self.samplings_[homology_dimension]
return plot_heatmap(
Xt[sample][homology_dimension_idx],
x=x,
y=x[::-1],
colorscale=colorscale,
origin="lower",
title=f"Heat kernel representation of diagram {sample} in "
f"homology dimension {homology_dimension}",
plotly_params=plotly_params,
)
|
https://github.com/giotto-ai/giotto-tda/issues/438
|
---------------------------------------------------------------------------
TypeError Traceback (most recent call last)
<ipython-input-5-5e233492b878> in <module>
----> 1 Amplitude(metric="persistence_image").fit_transform(Xd)
~\Workspace\giotto-tda_ulupo\gtda\utils\_docs.py in fit_transform_wrapper(*args, **kwargs)
104 @wraps(original_fit_transform)
105 def fit_transform_wrapper(*args, **kwargs):
--> 106 return original_fit_transform(*args, **kwargs)
107 fit_transform_wrapper.__doc__ = \
108 make_fit_transform_docs(fit_docs, transform_docs)
~\Anaconda3\envs\gtda-ulupo-py38\lib\site-packages\sklearn\base.py in fit_transform(self, X, y, **fit_params)
688 if y is None:
689 # fit method of arity 1 (unsupervised transformation)
--> 690 return self.fit(X, **fit_params).transform(X)
691 else:
692 # fit method of arity 2 (supervised transformation)
~\Workspace\giotto-tda_ulupo\gtda\diagrams\features.py in fit(self, X, y)
273 if self.metric == 'persistence_image':
274 self.effective_metric_params_['weights'] = \
--> 275 _calculate_weights(X, **self.effective_metric_params_)
276
277 return self
TypeError: _calculate_weights() missing 1 required positional argument: 'weight_function'
|
TypeError
|
def __init__(self, sigma=0.1, n_bins=100, weight_function=None, n_jobs=None):
self.sigma = sigma
self.n_bins = n_bins
self.weight_function = weight_function
self.n_jobs = n_jobs
|
def __init__(self, sigma=1.0, n_bins=100, weight_function=None, n_jobs=None):
self.sigma = sigma
self.n_bins = n_bins
self.weight_function = weight_function
self.n_jobs = n_jobs
|
https://github.com/giotto-ai/giotto-tda/issues/438
|
---------------------------------------------------------------------------
TypeError Traceback (most recent call last)
<ipython-input-5-5e233492b878> in <module>
----> 1 Amplitude(metric="persistence_image").fit_transform(Xd)
~\Workspace\giotto-tda_ulupo\gtda\utils\_docs.py in fit_transform_wrapper(*args, **kwargs)
104 @wraps(original_fit_transform)
105 def fit_transform_wrapper(*args, **kwargs):
--> 106 return original_fit_transform(*args, **kwargs)
107 fit_transform_wrapper.__doc__ = \
108 make_fit_transform_docs(fit_docs, transform_docs)
~\Anaconda3\envs\gtda-ulupo-py38\lib\site-packages\sklearn\base.py in fit_transform(self, X, y, **fit_params)
688 if y is None:
689 # fit method of arity 1 (unsupervised transformation)
--> 690 return self.fit(X, **fit_params).transform(X)
691 else:
692 # fit method of arity 2 (supervised transformation)
~\Workspace\giotto-tda_ulupo\gtda\diagrams\features.py in fit(self, X, y)
273 if self.metric == 'persistence_image':
274 self.effective_metric_params_['weights'] = \
--> 275 _calculate_weights(X, **self.effective_metric_params_)
276
277 return self
TypeError: _calculate_weights() missing 1 required positional argument: 'weight_function'
|
TypeError
|
def fit(self, X, y=None):
"""Store all observed homology dimensions in
:attr:`homology_dimensions_` and, for each dimension separately,
store evenly sample filtration parameter values in :attr:`samplings_`.
Then, return the estimator.
This method is here to implement the usual scikit-learn API and hence
work in pipelines.
Parameters
----------
X : ndarray of shape (n_samples, n_features, 3)
Input data. Array of persistence diagrams, each a collection of
triples [b, d, q] representing persistent topological features
through their birth (b), death (d) and homology dimension (q).
It is important that, for each possible homology dimension, the
number of triples for which q equals that homology dimension is
constants across the entries of X.
y : None
There is no need for a target in a transformer, yet the pipeline
API requires this parameter.
Returns
-------
self : object
"""
X = check_diagrams(X)
validate_params(self.get_params(), self._hyperparameters, exclude=["n_jobs"])
if self.weight_function is None:
self.effective_weight_function_ = np.ones_like
else:
self.effective_weight_function_ = self.weight_function
# Find the unique homology dimensions in the 3D array X passed to `fit`
# assuming that they can all be found in its zero-th entry
homology_dimensions_fit = np.unique(X[0, :, 2])
self.homology_dimensions_ = _homology_dimensions_to_sorted_ints(
homology_dimensions_fit
)
self._n_dimensions = len(self.homology_dimensions_)
self._samplings, self._step_size = _bin(
X,
"persistence_image",
n_bins=self.n_bins,
homology_dimensions=self.homology_dimensions_,
)
self.weights_ = {
dim: self.effective_weight_function_(samplings_dim[:, 1])
for dim, samplings_dim in self._samplings.items()
}
self.samplings_ = {dim: s.T for dim, s in self._samplings.items()}
return self
|
def fit(self, X, y=None):
"""Store all observed homology dimensions in
:attr:`homology_dimensions_` and, for each dimension separately,
store evenly sample filtration parameter values in :attr:`samplings_`.
Then, return the estimator.
This method is here to implement the usual scikit-learn API and hence
work in pipelines.
Parameters
----------
X : ndarray of shape (n_samples, n_features, 3)
Input data. Array of persistence diagrams, each a collection of
triples [b, d, q] representing persistent topological features
through their birth (b), death (d) and homology dimension (q).
It is important that, for each possible homology dimension, the
number of triples for which q equals that homology dimension is
constants across the entries of X.
y : None
There is no need for a target in a transformer, yet the pipeline
API requires this parameter.
Returns
-------
self : object
"""
X = check_diagrams(X)
validate_params(self.get_params(), self._hyperparameters, exclude=["n_jobs"])
if self.weight_function is None:
self.effective_weight_function_ = identity
else:
self.effective_weight_function_ = self.weight_function
self.homology_dimensions_ = sorted(list(set(X[0, :, 2])))
self._n_dimensions = len(self.homology_dimensions_)
self._samplings, self._step_size = _bin(
X, metric="persistence_image", n_bins=self.n_bins
)
self.samplings_ = {dim: s.transpose() for dim, s in self._samplings.items()}
self.weights_ = _calculate_weights(
X, self.effective_weight_function_, self._samplings
)
return self
|
https://github.com/giotto-ai/giotto-tda/issues/438
|
---------------------------------------------------------------------------
TypeError Traceback (most recent call last)
<ipython-input-5-5e233492b878> in <module>
----> 1 Amplitude(metric="persistence_image").fit_transform(Xd)
~\Workspace\giotto-tda_ulupo\gtda\utils\_docs.py in fit_transform_wrapper(*args, **kwargs)
104 @wraps(original_fit_transform)
105 def fit_transform_wrapper(*args, **kwargs):
--> 106 return original_fit_transform(*args, **kwargs)
107 fit_transform_wrapper.__doc__ = \
108 make_fit_transform_docs(fit_docs, transform_docs)
~\Anaconda3\envs\gtda-ulupo-py38\lib\site-packages\sklearn\base.py in fit_transform(self, X, y, **fit_params)
688 if y is None:
689 # fit method of arity 1 (unsupervised transformation)
--> 690 return self.fit(X, **fit_params).transform(X)
691 else:
692 # fit method of arity 2 (supervised transformation)
~\Workspace\giotto-tda_ulupo\gtda\diagrams\features.py in fit(self, X, y)
273 if self.metric == 'persistence_image':
274 self.effective_metric_params_['weights'] = \
--> 275 _calculate_weights(X, **self.effective_metric_params_)
276
277 return self
TypeError: _calculate_weights() missing 1 required positional argument: 'weight_function'
|
TypeError
|
def transform(self, X, y=None):
"""Compute multi-channel raster images from diagrams in `X` by
convolution with a Gaussian kernel.
Parameters
----------
X : ndarray of shape (n_samples, n_features, 3)
Input data. Array of persistence diagrams, each a collection of
triples [b, d, q] representing persistent topological features
through their birth (b), death (d) and homology dimension (q).
It is important that, for each possible homology dimension, the
number of triples for which q equals that homology dimension is
constants across the entries of X.
y : None
There is no need for a target in a transformer, yet the pipeline
API requires this parameter.
Returns
-------
Xt : ndarray of shape (n_samples, n_homology_dimensions, n_bins, \
n_bins)
Multi-channel raster images: one image per sample and one channel
per homology dimension seen in :meth:`fit`. Index i along axis 1
corresponds to the i-th homology dimension in
:attr:`homology_dimensions_`.
"""
check_is_fitted(self)
X = check_diagrams(X, copy=True)
Xt = Parallel(n_jobs=self.n_jobs, mmap_mode="c")(
delayed(persistence_images)(
_subdiagrams(X[s], [dim], remove_dim=True),
self._samplings[dim],
self._step_size[dim],
self.sigma,
self.weights_[dim],
)
for dim in self.homology_dimensions_
for s in gen_even_slices(len(X), effective_n_jobs(self.n_jobs))
)
Xt = (
np.concatenate(Xt)
.reshape(self._n_dimensions, len(X), self.n_bins, self.n_bins)
.transpose((1, 0, 2, 3))
)
return Xt
|
def transform(self, X, y=None):
"""Compute multi-channel raster images from diagrams in `X` by
convolution with a Gaussian kernel.
Parameters
----------
X : ndarray of shape (n_samples, n_features, 3)
Input data. Array of persistence diagrams, each a collection of
triples [b, d, q] representing persistent topological features
through their birth (b), death (d) and homology dimension (q).
It is important that, for each possible homology dimension, the
number of triples for which q equals that homology dimension is
constants across the entries of X.
y : None
There is no need for a target in a transformer, yet the pipeline
API requires this parameter.
Returns
-------
Xt : ndarray of shape (n_samples, n_homology_dimensions, n_bins, \
n_bins)
Multi-channel raster images: one image per sample and one channel
per homology dimension seen in :meth:`fit`. Index i along axis 1
corresponds to the i-th homology dimension in
:attr:`homology_dimensions_`.
"""
check_is_fitted(self)
X = check_diagrams(X, copy=True)
Xt = Parallel(n_jobs=self.n_jobs, mmap_mode="c")(
delayed(persistence_images)(
_subdiagrams(X[s], [dim], remove_dim=True),
self._samplings[dim],
self._step_size[dim],
self.weights_[dim],
self.sigma,
)
for dim in self.homology_dimensions_
for s in gen_even_slices(len(X), effective_n_jobs(self.n_jobs))
)
Xt = (
np.concatenate(Xt)
.reshape(self._n_dimensions, len(X), self.n_bins, self.n_bins)
.transpose((1, 0, 2, 3))
)
return Xt
|
https://github.com/giotto-ai/giotto-tda/issues/438
|
---------------------------------------------------------------------------
TypeError Traceback (most recent call last)
<ipython-input-5-5e233492b878> in <module>
----> 1 Amplitude(metric="persistence_image").fit_transform(Xd)
~\Workspace\giotto-tda_ulupo\gtda\utils\_docs.py in fit_transform_wrapper(*args, **kwargs)
104 @wraps(original_fit_transform)
105 def fit_transform_wrapper(*args, **kwargs):
--> 106 return original_fit_transform(*args, **kwargs)
107 fit_transform_wrapper.__doc__ = \
108 make_fit_transform_docs(fit_docs, transform_docs)
~\Anaconda3\envs\gtda-ulupo-py38\lib\site-packages\sklearn\base.py in fit_transform(self, X, y, **fit_params)
688 if y is None:
689 # fit method of arity 1 (unsupervised transformation)
--> 690 return self.fit(X, **fit_params).transform(X)
691 else:
692 # fit method of arity 2 (supervised transformation)
~\Workspace\giotto-tda_ulupo\gtda\diagrams\features.py in fit(self, X, y)
273 if self.metric == 'persistence_image':
274 self.effective_metric_params_['weights'] = \
--> 275 _calculate_weights(X, **self.effective_metric_params_)
276
277 return self
TypeError: _calculate_weights() missing 1 required positional argument: 'weight_function'
|
TypeError
|
def plot(
self, Xt, sample=0, homology_dimension_idx=0, colorscale="blues", plotly_params=None
):
"""Plot a single channel -– corresponding to a given homology
dimension -– in a sample from a collection of persistence images.
Parameters
----------
Xt : ndarray of shape (n_samples, n_homology_dimensions, n_bins, \
n_bins)
Collection of multi-channel raster images, such as returned by
:meth:`transform`.
sample : int, optional, default: ``0``
Index of the sample in `Xt` to be selected.
homology_dimension_idx : int, optional, default: ``0``
Index of the channel in the selected sample to be plotted. If `Xt`
is the result of a call to :meth:`transform` and this index is i,
the plot corresponds to the homology dimension given by the i-th
entry in :attr:`homology_dimensions_`.
colorscale : str, optional, default: ``"blues"``
Color scale to be used in the heat map. Can be anything allowed by
:class:`plotly.graph_objects.Heatmap`.
plotly_params : dict or None, optional, default: ``None``
Custom parameters to configure the plotly figure. Allowed keys are
``"trace"`` and ``"layout"``, and the corresponding values should
be dictionaries containing keyword arguments as would be fed to the
:meth:`update_traces` and :meth:`update_layout` methods of
:class:`plotly.graph_objects.Figure`.
Returns
-------
fig : :class:`plotly.graph_objects.Figure` object
Plotly figure.
"""
check_is_fitted(self)
homology_dimension = self.homology_dimensions_[homology_dimension_idx]
if homology_dimension != np.inf:
homology_dimension = int(homology_dimension)
samplings_x, samplings_y = self.samplings_[homology_dimension]
return plot_heatmap(
Xt[sample][homology_dimension_idx],
x=samplings_x,
y=samplings_y[::-1],
colorscale=colorscale,
origin="lower",
title=f"Persistence image representation of diagram {sample} in "
f"homology dimension {homology_dimension}",
plotly_params=plotly_params,
)
|
def plot(
self, Xt, sample=0, homology_dimension_idx=0, colorscale="blues", plotly_params=None
):
"""Plot a single channel – corresponding to a given homology
dimension – in a sample from a collection of persistence images.
Parameters
----------
Xt : ndarray of shape (n_samples, n_homology_dimensions, n_bins, \
n_bins)
Collection of multi-channel raster images, such as returned by
:meth:`transform`.
sample : int, optional, default: ``0``
Index of the sample in `Xt` to be selected.
homology_dimension_idx : int, optional, default: ``0``
Index of the channel in the selected sample to be plotted. If `Xt`
is the result of a call to :meth:`transform` and this index is i,
the plot corresponds to the homology dimension given by the i-th
entry in :attr:`homology_dimensions_`.
colorscale : str, optional, default: ``"blues"``
Color scale to be used in the heat map. Can be anything allowed by
:class:`plotly.graph_objects.Heatmap`.
plotly_params : dict or None, optional, default: ``None``
Custom parameters to configure the plotly figure. Allowed keys are
``"trace"`` and ``"layout"``, and the corresponding values should
be dictionaries containing keyword arguments as would be fed to the
:meth:`update_traces` and :meth:`update_layout` methods of
:class:`plotly.graph_objects.Figure`.
Returns
-------
fig : :class:`plotly.graph_objects.Figure` object
Plotly figure.
"""
check_is_fitted(self)
homology_dimension = self.homology_dimensions_[homology_dimension_idx]
if homology_dimension != np.inf:
homology_dimension = int(homology_dimension)
samplings_x, samplings_y = self.samplings_[homology_dimension]
return plot_heatmap(
Xt[sample][homology_dimension_idx],
x=samplings_x,
y=samplings_y,
colorscale=colorscale,
title=f"Persistence image representation of diagram {sample} in "
f"homology dimension {homology_dimension}",
plotly_params=plotly_params,
)
|
https://github.com/giotto-ai/giotto-tda/issues/438
|
---------------------------------------------------------------------------
TypeError Traceback (most recent call last)
<ipython-input-5-5e233492b878> in <module>
----> 1 Amplitude(metric="persistence_image").fit_transform(Xd)
~\Workspace\giotto-tda_ulupo\gtda\utils\_docs.py in fit_transform_wrapper(*args, **kwargs)
104 @wraps(original_fit_transform)
105 def fit_transform_wrapper(*args, **kwargs):
--> 106 return original_fit_transform(*args, **kwargs)
107 fit_transform_wrapper.__doc__ = \
108 make_fit_transform_docs(fit_docs, transform_docs)
~\Anaconda3\envs\gtda-ulupo-py38\lib\site-packages\sklearn\base.py in fit_transform(self, X, y, **fit_params)
688 if y is None:
689 # fit method of arity 1 (unsupervised transformation)
--> 690 return self.fit(X, **fit_params).transform(X)
691 else:
692 # fit method of arity 2 (supervised transformation)
~\Workspace\giotto-tda_ulupo\gtda\diagrams\features.py in fit(self, X, y)
273 if self.metric == 'persistence_image':
274 self.effective_metric_params_['weights'] = \
--> 275 _calculate_weights(X, **self.effective_metric_params_)
276
277 return self
TypeError: _calculate_weights() missing 1 required positional argument: 'weight_function'
|
TypeError
|
def fit(self, X, y=None):
"""Store all observed homology dimensions in
:attr:`homology_dimensions_` and, for each dimension separately,
store evenly sample filtration parameter values in :attr:`samplings_`.
Then, return the estimator.
This method is here to implement the usual scikit-learn API and hence
work in pipelines.
Parameters
----------
X : ndarray of shape (n_samples, n_features, 3)
Input data. Array of persistence diagrams, each a collection of
triples [b, d, q] representing persistent topological features
through their birth (b), death (d) and homology dimension (q).
It is important that, for each possible homology dimension, the
number of triples for which q equals that homology dimension is
constants across the entries of X.
y : None
There is no need for a target in a transformer, yet the pipeline
API requires this parameter.
Returns
-------
self : object
"""
X = check_diagrams(X)
validate_params(self.get_params(), self._hyperparameters, exclude=["n_jobs"])
# Find the unique homology dimensions in the 3D array X passed to `fit`
# assuming that they can all be found in its zero-th entry
homology_dimensions_fit = np.unique(X[0, :, 2])
self.homology_dimensions_ = _homology_dimensions_to_sorted_ints(
homology_dimensions_fit
)
self._n_dimensions = len(self.homology_dimensions_)
self._samplings, _ = _bin(
X,
"silhouette",
n_bins=self.n_bins,
homology_dimensions=self.homology_dimensions_,
)
self.samplings_ = {dim: s.flatten() for dim, s in self._samplings.items()}
return self
|
def fit(self, X, y=None):
"""Store all observed homology dimensions in
:attr:`homology_dimensions_` and, for each dimension separately,
store evenly sample filtration parameter values in :attr:`samplings_`.
Then, return the estimator.
This method is here to implement the usual scikit-learn API and hence
work in pipelines.
Parameters
----------
X : ndarray of shape (n_samples, n_features, 3)
Input data. Array of persistence diagrams, each a collection of
triples [b, d, q] representing persistent topological features
through their birth (b), death (d) and homology dimension (q).
It is important that, for each possible homology dimension, the
number of triples for which q equals that homology dimension is
constants across the entries of X.
y : None
There is no need for a target in a transformer, yet the pipeline
API requires this parameter.
Returns
-------
self : object
"""
X = check_diagrams(X)
validate_params(self.get_params(), self._hyperparameters, exclude=["n_jobs"])
self.homology_dimensions_ = sorted(list(set(X[0, :, 2])))
self._n_dimensions = len(self.homology_dimensions_)
self._samplings, _ = _bin(X, metric="silhouette", n_bins=self.n_bins)
self.samplings_ = {dim: s.flatten() for dim, s in self._samplings.items()}
return self
|
https://github.com/giotto-ai/giotto-tda/issues/438
|
---------------------------------------------------------------------------
TypeError Traceback (most recent call last)
<ipython-input-5-5e233492b878> in <module>
----> 1 Amplitude(metric="persistence_image").fit_transform(Xd)
~\Workspace\giotto-tda_ulupo\gtda\utils\_docs.py in fit_transform_wrapper(*args, **kwargs)
104 @wraps(original_fit_transform)
105 def fit_transform_wrapper(*args, **kwargs):
--> 106 return original_fit_transform(*args, **kwargs)
107 fit_transform_wrapper.__doc__ = \
108 make_fit_transform_docs(fit_docs, transform_docs)
~\Anaconda3\envs\gtda-ulupo-py38\lib\site-packages\sklearn\base.py in fit_transform(self, X, y, **fit_params)
688 if y is None:
689 # fit method of arity 1 (unsupervised transformation)
--> 690 return self.fit(X, **fit_params).transform(X)
691 else:
692 # fit method of arity 2 (supervised transformation)
~\Workspace\giotto-tda_ulupo\gtda\diagrams\features.py in fit(self, X, y)
273 if self.metric == 'persistence_image':
274 self.effective_metric_params_['weights'] = \
--> 275 _calculate_weights(X, **self.effective_metric_params_)
276
277 return self
TypeError: _calculate_weights() missing 1 required positional argument: 'weight_function'
|
TypeError
|
def plot(self, Xt, sample=0, homology_dimensions=None, plotly_params=None):
"""Plot a sample from a collection of silhouettes arranged as in the
output of :meth:`transform`. Include homology in multiple dimensions.
Parameters
----------
Xt : ndarray of shape (n_samples, n_homology_dimensions, n_bins)
Collection of silhouettes, such as returned by :meth:`transform`.
sample : int, optional, default: ``0``
Index of the sample in `Xt` to be plotted.
homology_dimensions : list, tuple or None, optional, default: ``None``
Which homology dimensions to include in the plot. ``None`` means
plotting all dimensions present in :attr:`homology_dimensions_`.
plotly_params : dict or None, optional, default: ``None``
Custom parameters to configure the plotly figure. Allowed keys are
``"traces"`` and ``"layout"``, and the corresponding values should
be dictionaries containing keyword arguments as would be fed to the
:meth:`update_traces` and :meth:`update_layout` methods of
:class:`plotly.graph_objects.Figure`.
Returns
-------
fig : :class:`plotly.graph_objects.Figure` object
Plotly figure.
"""
check_is_fitted(self)
homology_dimensions_mapping = _make_homology_dimensions_mapping(
homology_dimensions, self.homology_dimensions_
)
layout_axes_common = {
"type": "linear",
"ticks": "outside",
"showline": True,
"zeroline": True,
"linewidth": 1,
"linecolor": "black",
"mirror": False,
"showexponent": "all",
"exponentformat": "e",
}
layout = {
"xaxis1": {
"title": "Filtration parameter",
"side": "bottom",
"anchor": "y1",
**layout_axes_common,
},
"yaxis1": {"side": "left", "anchor": "x1", **layout_axes_common},
"plot_bgcolor": "white",
"title": f"Silhouette representation of diagram {sample}",
}
fig = Figure(layout=layout)
for ix, dim in homology_dimensions_mapping:
fig.add_trace(
Scatter(
x=self.samplings_[dim],
y=Xt[sample][ix],
mode="lines",
showlegend=True,
hoverinfo="none",
name=f"H{dim}",
)
)
# Update traces and layout according to user input
if plotly_params:
fig.update_traces(plotly_params.get("traces", None))
fig.update_layout(plotly_params.get("layout", None))
return fig
|
def plot(self, Xt, sample=0, homology_dimensions=None, plotly_params=None):
"""Plot a sample from a collection of silhouettes arranged as in
the output of :meth:`transform`. Include homology in multiple
dimensions.
Parameters
----------
Xt : ndarray of shape (n_samples, n_homology_dimensions, n_bins)
Collection of silhouettes, such as returned by :meth:`transform`.
sample : int, optional, default: ``0``
Index of the sample in `Xt` to be plotted.
homology_dimensions : list, tuple or None, optional, default: ``None``
Which homology dimensions to include in the plot. ``None`` means
plotting all dimensions present in :attr:`homology_dimensions_`.
plotly_params : dict or None, optional, default: ``None``
Custom parameters to configure the plotly figure. Allowed keys are
``"traces"`` and ``"layout"``, and the corresponding values should
be dictionaries containing keyword arguments as would be fed to the
:meth:`update_traces` and :meth:`update_layout` methods of
:class:`plotly.graph_objects.Figure`.
Returns
-------
fig : :class:`plotly.graph_objects.Figure` object
Plotly figure.
"""
check_is_fitted(self)
if homology_dimensions is None:
_homology_dimensions = list(enumerate(self.homology_dimensions_))
else:
_homology_dimensions = []
for dim in homology_dimensions:
if dim not in self.homology_dimensions_:
raise ValueError(
f"All homology dimensions must be in "
f"self.homology_dimensions_ which is "
f"{self.homology_dimensions_}. {dim} is not."
)
else:
homology_dimensions_arr = np.array(self.homology_dimensions_)
ix = np.flatnonzero(homology_dimensions_arr == dim)[0]
_homology_dimensions.append((ix, dim))
layout = {
"xaxis1": {
"title": "Filtration parameter",
"side": "bottom",
"type": "linear",
"ticks": "outside",
"anchor": "x1",
"showline": True,
"zeroline": True,
"showexponent": "all",
"exponentformat": "e",
},
"yaxis1": {
"side": "left",
"type": "linear",
"ticks": "outside",
"anchor": "y1",
"showline": True,
"zeroline": True,
"showexponent": "all",
"exponentformat": "e",
},
"plot_bgcolor": "white",
"title": f"Silhouette representation of diagram {sample}",
}
fig = gobj.Figure(layout=layout)
fig.update_xaxes(zeroline=True, linewidth=1, linecolor="black", mirror=False)
fig.update_yaxes(zeroline=True, linewidth=1, linecolor="black", mirror=False)
for ix, dim in _homology_dimensions:
fig.add_trace(
gobj.Scatter(
x=self.samplings_[dim],
y=Xt[sample][ix],
mode="lines",
showlegend=True,
hoverinfo="none",
name=f"H{int(dim)}",
)
)
# Update traces and layout according to user input
if plotly_params:
fig.update_traces(plotly_params.get("traces", None))
fig.update_layout(plotly_params.get("layout", None))
return fig
|
https://github.com/giotto-ai/giotto-tda/issues/438
|
---------------------------------------------------------------------------
TypeError Traceback (most recent call last)
<ipython-input-5-5e233492b878> in <module>
----> 1 Amplitude(metric="persistence_image").fit_transform(Xd)
~\Workspace\giotto-tda_ulupo\gtda\utils\_docs.py in fit_transform_wrapper(*args, **kwargs)
104 @wraps(original_fit_transform)
105 def fit_transform_wrapper(*args, **kwargs):
--> 106 return original_fit_transform(*args, **kwargs)
107 fit_transform_wrapper.__doc__ = \
108 make_fit_transform_docs(fit_docs, transform_docs)
~\Anaconda3\envs\gtda-ulupo-py38\lib\site-packages\sklearn\base.py in fit_transform(self, X, y, **fit_params)
688 if y is None:
689 # fit method of arity 1 (unsupervised transformation)
--> 690 return self.fit(X, **fit_params).transform(X)
691 else:
692 # fit method of arity 2 (supervised transformation)
~\Workspace\giotto-tda_ulupo\gtda\diagrams\features.py in fit(self, X, y)
273 if self.metric == 'persistence_image':
274 self.effective_metric_params_['weights'] = \
--> 275 _calculate_weights(X, **self.effective_metric_params_)
276
277 return self
TypeError: _calculate_weights() missing 1 required positional argument: 'weight_function'
|
TypeError
|
def plot_diagram(diagram, homology_dimensions=None, plotly_params=None):
"""Plot a single persistence diagram.
Parameters
----------
diagram : ndarray of shape (n_points, 3)
The persistence diagram to plot, where the third dimension along axis 1
contains homology dimensions, and the first two contain (birth, death)
pairs to be used as coordinates in the two-dimensional plot.
homology_dimensions : list of int or None, optional, default: ``None``
Homology dimensions which will appear on the plot. If ``None``, all
homology dimensions which appear in `diagram` will be plotted.
plotly_params : dict or None, optional, default: ``None``
Custom parameters to configure the plotly figure. Allowed keys are
``"traces"`` and ``"layout"``, and the corresponding values should be
dictionaries containing keyword arguments as would be fed to the
:meth:`update_traces` and :meth:`update_layout` methods of
:class:`plotly.graph_objects.Figure`.
Returns
-------
fig : :class:`plotly.graph_objects.Figure` object
Figure representing the persistence diagram.
"""
# TODO: increase the marker size
if homology_dimensions is None:
homology_dimensions = np.unique(diagram[:, 2])
diagram_no_dims = diagram[:, :2]
max_val_display = np.max(
np.where(np.isposinf(diagram_no_dims), -np.inf, diagram_no_dims)
)
min_val_display = np.min(
np.where(np.isneginf(diagram_no_dims), np.inf, diagram_no_dims)
)
parameter_range = max_val_display - min_val_display
extra_space = 0.02 * parameter_range
min_val_display -= extra_space
max_val_display += extra_space
fig = gobj.Figure()
fig.add_trace(
gobj.Scatter(
x=[min_val_display, max_val_display],
y=[min_val_display, max_val_display],
mode="lines",
line={"dash": "dash", "width": 1, "color": "black"},
showlegend=False,
hoverinfo="none",
)
)
for dim in homology_dimensions:
name = f"H{int(dim)}" if dim != np.inf else "Any homology dimension"
subdiagram = diagram[diagram[:, 2] == dim]
diff = subdiagram[:, 1] != subdiagram[:, 0]
subdiagram = subdiagram[diff]
unique, inverse, counts = np.unique(
subdiagram, axis=0, return_inverse=True, return_counts=True
)
hovertext = [
f"{tuple(unique[unique_row_index][:2])}"
+ (
f", multiplicity: {counts[unique_row_index]}"
if counts[unique_row_index] > 1
else ""
)
for unique_row_index in inverse
]
fig.add_trace(
gobj.Scatter(
x=subdiagram[:, 0],
y=subdiagram[:, 1],
mode="markers",
hoverinfo="text",
hovertext=hovertext,
name=name,
)
)
fig.update_layout(
width=500,
height=500,
xaxis1={
"title": "Birth",
"side": "bottom",
"type": "linear",
"range": [min_val_display, max_val_display],
"autorange": False,
"ticks": "outside",
"showline": True,
"zeroline": True,
"linewidth": 1,
"linecolor": "black",
"mirror": False,
"showexponent": "all",
"exponentformat": "e",
},
yaxis1={
"title": "Death",
"side": "left",
"type": "linear",
"range": [min_val_display, max_val_display],
"autorange": False,
"scaleanchor": "x",
"scaleratio": 1,
"ticks": "outside",
"showline": True,
"zeroline": True,
"linewidth": 1,
"linecolor": "black",
"mirror": False,
"showexponent": "all",
"exponentformat": "e",
},
plot_bgcolor="white",
)
# Update traces and layout according to user input
if plotly_params:
fig.update_traces(plotly_params.get("traces", None))
fig.update_layout(plotly_params.get("layout", None))
return fig
|
def plot_diagram(diagram, homology_dimensions=None, plotly_params=None):
"""Plot a single persistence diagram.
Parameters
----------
diagram : ndarray of shape (n_points, 3)
The persistence diagram to plot, where the third dimension along axis 1
contains homology dimensions, and the first two contain (birth, death)
pairs to be used as coordinates in the two-dimensional plot.
homology_dimensions : list of int or None, optional, default: ``None``
Homology dimensions which will appear on the plot. If ``None``, all
homology dimensions which appear in `diagram` will be plotted.
plotly_params : dict or None, optional, default: ``None``
Custom parameters to configure the plotly figure. Allowed keys are
``"traces"`` and ``"layout"``, and the corresponding values should be
dictionaries containing keyword arguments as would be fed to the
:meth:`update_traces` and :meth:`update_layout` methods of
:class:`plotly.graph_objects.Figure`.
Returns
-------
fig : :class:`plotly.graph_objects.Figure` object
Figure representing the persistence diagram.
"""
# TODO: increase the marker size
if homology_dimensions is None:
homology_dimensions = np.unique(diagram[:, 2])
diagram_no_dims = diagram[:, :2]
max_val_display = np.max(
np.where(np.isposinf(diagram_no_dims), -np.inf, diagram_no_dims)
)
min_val_display = np.min(
np.where(np.isneginf(diagram_no_dims), np.inf, diagram_no_dims)
)
parameter_range = max_val_display - min_val_display
extra_space = 0.02 * parameter_range
min_val_display -= extra_space
max_val_display += extra_space
fig = gobj.Figure()
fig.add_trace(
gobj.Scatter(
x=[min_val_display, max_val_display],
y=[min_val_display, max_val_display],
mode="lines",
line={"dash": "dash", "width": 1, "color": "black"},
showlegend=False,
hoverinfo="none",
)
)
for dim in homology_dimensions:
name = f"H{int(dim)}" if dim != np.inf else "Any homology dimension"
subdiagram = diagram[diagram[:, 2] == dim]
diff = subdiagram[:, 1] != subdiagram[:, 0]
subdiagram = subdiagram[diff]
fig.add_trace(
gobj.Scatter(
x=subdiagram[:, 0], y=subdiagram[:, 1], mode="markers", name=name
)
)
fig.update_layout(
width=500,
height=500,
xaxis1={
"title": "Birth",
"side": "bottom",
"type": "linear",
"range": [min_val_display, max_val_display],
"autorange": False,
"ticks": "outside",
"showline": True,
"zeroline": True,
"linewidth": 1,
"linecolor": "black",
"mirror": False,
"showexponent": "all",
"exponentformat": "e",
},
yaxis1={
"title": "Death",
"side": "left",
"type": "linear",
"range": [min_val_display, max_val_display],
"autorange": False,
"scaleanchor": "x",
"scaleratio": 1,
"ticks": "outside",
"showline": True,
"zeroline": True,
"linewidth": 1,
"linecolor": "black",
"mirror": False,
"showexponent": "all",
"exponentformat": "e",
},
plot_bgcolor="white",
)
# Update traces and layout according to user input
if plotly_params:
fig.update_traces(plotly_params.get("traces", None))
fig.update_layout(plotly_params.get("layout", None))
return fig
|
https://github.com/giotto-ai/giotto-tda/issues/438
|
---------------------------------------------------------------------------
TypeError Traceback (most recent call last)
<ipython-input-5-5e233492b878> in <module>
----> 1 Amplitude(metric="persistence_image").fit_transform(Xd)
~\Workspace\giotto-tda_ulupo\gtda\utils\_docs.py in fit_transform_wrapper(*args, **kwargs)
104 @wraps(original_fit_transform)
105 def fit_transform_wrapper(*args, **kwargs):
--> 106 return original_fit_transform(*args, **kwargs)
107 fit_transform_wrapper.__doc__ = \
108 make_fit_transform_docs(fit_docs, transform_docs)
~\Anaconda3\envs\gtda-ulupo-py38\lib\site-packages\sklearn\base.py in fit_transform(self, X, y, **fit_params)
688 if y is None:
689 # fit method of arity 1 (unsupervised transformation)
--> 690 return self.fit(X, **fit_params).transform(X)
691 else:
692 # fit method of arity 2 (supervised transformation)
~\Workspace\giotto-tda_ulupo\gtda\diagrams\features.py in fit(self, X, y)
273 if self.metric == 'persistence_image':
274 self.effective_metric_params_['weights'] = \
--> 275 _calculate_weights(X, **self.effective_metric_params_)
276
277 return self
TypeError: _calculate_weights() missing 1 required positional argument: 'weight_function'
|
TypeError
|
def check_diagrams(X, copy=False):
"""Input validation for collections of persistence diagrams.
Basic type and sanity checks are run on the input collection and the
array is converted to float type before returning. In particular,
the input is checked to be an ndarray of shape ``(n_samples, n_points,
3)``.
Parameters
----------
X : object
Input object to check/convert.
copy : bool, optional, default: ``False``
Whether a forced copy should be triggered.
Returns
-------
X_validated : ndarray of shape (n_samples, n_points, 3)
The converted and validated array of persistence diagrams.
"""
X_array = np.asarray(X)
if X_array.ndim == 0:
raise ValueError(
f"Expected 3D array, got scalar array instead:\narray={X_array}."
)
if X_array.ndim != 3:
raise ValueError(f"Input should be a 3D ndarray, the shape is {X_array.shape}.")
if X_array.shape[2] != 3:
raise ValueError(
f"Input should be a 3D ndarray with a 3rd dimension of 3 "
f"components, but there are {X_array.shape[2]} components."
)
X_array = X_array.astype(float, copy=False)
homology_dimensions = sorted(np.unique(X_array[0, :, 2]))
for dim in homology_dimensions:
if dim == np.inf:
if len(homology_dimensions) != 1:
raise ValueError(
f"np.inf is a valid homology dimension for a stacked "
f"diagram but it should be the only one: "
f"homology_dimensions = {homology_dimensions}."
)
else:
if dim != int(dim):
raise ValueError(
f"All homology dimensions should be integer valued: "
f"{dim} can't be cast to an int of the same value."
)
if dim != np.abs(dim):
raise ValueError(
f"All homology dimensions should be integer valued: "
f"{dim} can't be cast to an int of the same value."
)
n_points_above_diag = np.sum(X_array[:, :, 1] >= X_array[:, :, 0])
n_points_global = X_array.shape[0] * X_array.shape[1]
if n_points_above_diag != n_points_global:
raise ValueError(
f"All points of all persistence diagrams should be above the "
f"diagonal, i.e. X[:,:,1] >= X[:,:,0]. "
f"{n_points_global - n_points_above_diag} points are under the "
f"diagonal."
)
if copy:
X_array = np.copy(X_array)
return X_array
|
def check_diagrams(X, copy=False):
"""Input validation for collections of persistence diagrams.
Basic type and sanity checks are run on the input collection and the
array is converted to float type before returning. In particular,
the input is checked to be an ndarray of shape ``(n_samples, n_points,
3)``.
Parameters
----------
X : object
Input object to check/convert.
copy : bool, optional, default: ``False``
Whether a forced copy should be triggered.
Returns
-------
X_validated : ndarray of shape (n_samples, n_points, 3)
The converted and validated array of persistence diagrams.
"""
X_array = np.asarray(X)
if X_array.ndim == 0:
raise ValueError(
f"Expected 3D array, got scalar array instead:\narray={X_array}."
)
if X_array.ndim != 3:
raise ValueError(f"Input should be a 3D ndarray, the shape is {X_array.shape}.")
if X_array.shape[2] != 3:
raise ValueError(
f"Input should be a 3D ndarray with a 3rd dimension of 3 "
f"components, but there are {X_array.shape[2]} components."
)
X_array = X_array.astype(float, copy=False)
homology_dimensions = sorted(list(set(X_array[0, :, 2])))
for dim in homology_dimensions:
if dim == np.inf:
if len(homology_dimensions) != 1:
raise ValueError(
f"np.inf is a valid homology dimension for a stacked "
f"diagram but it should be the only one: "
f"homology_dimensions = {homology_dimensions}."
)
else:
if dim != int(dim):
raise ValueError(
f"All homology dimensions should be integer valued: "
f"{dim} can't be cast to an int of the same value."
)
if dim != np.abs(dim):
raise ValueError(
f"All homology dimensions should be integer valued: "
f"{dim} can't be cast to an int of the same value."
)
n_points_above_diag = np.sum(X_array[:, :, 1] >= X_array[:, :, 0])
n_points_global = X_array.shape[0] * X_array.shape[1]
if n_points_above_diag != n_points_global:
raise ValueError(
f"All points of all persistence diagrams should be above the "
f"diagonal, i.e. X[:,:,1] >= X[:,:,0]. "
f"{n_points_global - n_points_above_diag} points are under the "
f"diagonal."
)
if copy:
X_array = np.copy(X_array)
return X_array
|
https://github.com/giotto-ai/giotto-tda/issues/438
|
---------------------------------------------------------------------------
TypeError Traceback (most recent call last)
<ipython-input-5-5e233492b878> in <module>
----> 1 Amplitude(metric="persistence_image").fit_transform(Xd)
~\Workspace\giotto-tda_ulupo\gtda\utils\_docs.py in fit_transform_wrapper(*args, **kwargs)
104 @wraps(original_fit_transform)
105 def fit_transform_wrapper(*args, **kwargs):
--> 106 return original_fit_transform(*args, **kwargs)
107 fit_transform_wrapper.__doc__ = \
108 make_fit_transform_docs(fit_docs, transform_docs)
~\Anaconda3\envs\gtda-ulupo-py38\lib\site-packages\sklearn\base.py in fit_transform(self, X, y, **fit_params)
688 if y is None:
689 # fit method of arity 1 (unsupervised transformation)
--> 690 return self.fit(X, **fit_params).transform(X)
691 else:
692 # fit method of arity 2 (supervised transformation)
~\Workspace\giotto-tda_ulupo\gtda\diagrams\features.py in fit(self, X, y)
273 if self.metric == 'persistence_image':
274 self.effective_metric_params_['weights'] = \
--> 275 _calculate_weights(X, **self.effective_metric_params_)
276
277 return self
TypeError: _calculate_weights() missing 1 required positional argument: 'weight_function'
|
TypeError
|
def _heat(image, sampled_diag, sigma):
_sample_image(image, sampled_diag)
image[:] = gaussian_filter(image, sigma, mode="reflect")
|
def _heat(image, sampled_diag, sigma):
_sample_image(image, sampled_diag) # modifies `heat` inplace
image[:] = gaussian_filter(image, sigma, mode="reflect")
|
https://github.com/giotto-ai/giotto-tda/issues/427
|
---------------------------------------------------------------------------
_RemoteTraceback Traceback (most recent call last)
_RemoteTraceback:
"""
Traceback (most recent call last):
File "C:\Users\nicho\Anaconda3\lib\site-packages\joblib\externals\loky\process_executor.py", line 418, in _process_worker
r = call_item()
File "C:\Users\nicho\Anaconda3\lib\site-packages\joblib\externals\loky\process_executor.py", line 272, in __call__
return self.fn(*self.args, **self.kwargs)
File "C:\Users\nicho\Anaconda3\lib\site-packages\joblib\_parallel_backends.py", line 567, in __call__
return self.func(*args, **kwargs)
File "C:\Users\nicho\Anaconda3\lib\site-packages\joblib\parallel.py", line 225, in __call__
for func, args, kwargs in self.items]
File "C:\Users\nicho\Anaconda3\lib\site-packages\joblib\parallel.py", line 225, in <listcomp>
for func, args, kwargs in self.items]
File "C:\Users\nicho\Anaconda3\lib\site-packages\gtda\diagrams\_metrics.py", line 86, in heats
diagrams[diagrams < sampling[0, 0]] = sampling[0, 0]
ValueError: assignment destination is read-only
"""
The above exception was the direct cause of the following exception:
ValueError Traceback (most recent call last)
<ipython-input-132-73d0b2c011fe> in <module>
7 hk = HeatKernel(sigma=1, n_bins=n_bins, n_jobs=2)
8 num_dimensions = 1
----> 9 x_t = hk.fit_transform(diagrams)
~\Anaconda3\lib\site-packages\gtda\utils\_docs.py in fit_transform_wrapper(*args, **kwargs)
104 @wraps(original_fit_transform)
105 def fit_transform_wrapper(*args, **kwargs):
--> 106 return original_fit_transform(*args, **kwargs)
107 fit_transform_wrapper.__doc__ = \
108 make_fit_transform_docs(fit_docs, transform_docs)
~\Anaconda3\lib\site-packages\sklearn\base.py in fit_transform(self, X, y, **fit_params)
569 if y is None:
570 # fit method of arity 1 (unsupervised transformation)
--> 571 return self.fit(X, **fit_params).transform(X)
572 else:
573 # fit method of arity 2 (supervised transformation)
~\Anaconda3\lib\site-packages\gtda\diagrams\representations.py in transform(self, X, y)
591 heats)(_subdiagrams(X, [dim], remove_dim=True)[s],
592 self._samplings[dim], self._step_size[dim], self.sigma)
--> 593 for dim in self.homology_dimensions_
594 for s in gen_even_slices(X.shape[0],
595 effective_n_jobs(self.n_jobs)))
~\Anaconda3\lib\site-packages\joblib\parallel.py in __call__(self, iterable)
932
933 with self._backend.retrieval_context():
--> 934 self.retrieve()
935 # Make sure that we get a last message telling us we are done
936 elapsed_time = time.time() - self._start_time
~\Anaconda3\lib\site-packages\joblib\parallel.py in retrieve(self)
831 try:
832 if getattr(self._backend, 'supports_timeout', False):
--> 833 self._output.extend(job.get(timeout=self.timeout))
834 else:
835 self._output.extend(job.get())
~\Anaconda3\lib\site-packages\joblib\_parallel_backends.py in wrap_future_result(future, timeout)
519 AsyncResults.get from multiprocessing."""
520 try:
--> 521 return future.result(timeout=timeout)
522 except LokyTimeoutError:
523 raise TimeoutError()
~\Anaconda3\lib\concurrent\futures\_base.py in result(self, timeout)
430 raise CancelledError()
431 elif self._state == FINISHED:
--> 432 return self.__get_result()
433 else:
434 raise TimeoutError()
~\Anaconda3\lib\concurrent\futures\_base.py in __get_result(self)
382 def __get_result(self):
383 if self._exception:
--> 384 raise self._exception
385 else:
386 return self._result
ValueError: assignment destination is read-only
|
ValueError
|
def _parallel_pairwise(X1, X2, metric, metric_params, homology_dimensions, n_jobs):
metric_func = implemented_metric_recipes[metric]
effective_metric_params = metric_params.copy()
none_dict = {dim: None for dim in homology_dimensions}
samplings = effective_metric_params.pop("samplings", none_dict)
step_sizes = effective_metric_params.pop("step_sizes", none_dict)
if X2 is None:
X2 = X1
n_columns = len(X2)
distance_matrices = Parallel(n_jobs=n_jobs)(
delayed(metric_func)(
_subdiagrams(X1, [dim], remove_dim=True),
_subdiagrams(X2[s], [dim], remove_dim=True),
sampling=samplings[dim],
step_size=step_sizes[dim],
**effective_metric_params,
)
for dim in homology_dimensions
for s in gen_even_slices(n_columns, effective_n_jobs(n_jobs))
)
distance_matrices = np.concatenate(distance_matrices, axis=1)
distance_matrices = np.stack(
[
distance_matrices[:, i * n_columns : (i + 1) * n_columns]
for i in range(len(homology_dimensions))
],
axis=2,
)
return distance_matrices
|
def _parallel_pairwise(X1, X2, metric, metric_params, homology_dimensions, n_jobs):
metric_func = implemented_metric_recipes[metric]
effective_metric_params = metric_params.copy()
none_dict = {dim: None for dim in homology_dimensions}
samplings = effective_metric_params.pop("samplings", none_dict)
step_sizes = effective_metric_params.pop("step_sizes", none_dict)
if X2 is None:
X2 = X1
distance_matrices = Parallel(n_jobs=n_jobs)(
delayed(metric_func)(
_subdiagrams(X1, [dim], remove_dim=True),
_subdiagrams(X2[s], [dim], remove_dim=True),
sampling=samplings[dim],
step_size=step_sizes[dim],
**effective_metric_params,
)
for dim in homology_dimensions
for s in gen_even_slices(X2.shape[0], effective_n_jobs(n_jobs))
)
distance_matrices = np.concatenate(distance_matrices, axis=1)
distance_matrices = np.stack(
[
distance_matrices[:, i * X2.shape[0] : (i + 1) * X2.shape[0]]
for i in range(len(homology_dimensions))
],
axis=2,
)
return distance_matrices
|
https://github.com/giotto-ai/giotto-tda/issues/427
|
---------------------------------------------------------------------------
_RemoteTraceback Traceback (most recent call last)
_RemoteTraceback:
"""
Traceback (most recent call last):
File "C:\Users\nicho\Anaconda3\lib\site-packages\joblib\externals\loky\process_executor.py", line 418, in _process_worker
r = call_item()
File "C:\Users\nicho\Anaconda3\lib\site-packages\joblib\externals\loky\process_executor.py", line 272, in __call__
return self.fn(*self.args, **self.kwargs)
File "C:\Users\nicho\Anaconda3\lib\site-packages\joblib\_parallel_backends.py", line 567, in __call__
return self.func(*args, **kwargs)
File "C:\Users\nicho\Anaconda3\lib\site-packages\joblib\parallel.py", line 225, in __call__
for func, args, kwargs in self.items]
File "C:\Users\nicho\Anaconda3\lib\site-packages\joblib\parallel.py", line 225, in <listcomp>
for func, args, kwargs in self.items]
File "C:\Users\nicho\Anaconda3\lib\site-packages\gtda\diagrams\_metrics.py", line 86, in heats
diagrams[diagrams < sampling[0, 0]] = sampling[0, 0]
ValueError: assignment destination is read-only
"""
The above exception was the direct cause of the following exception:
ValueError Traceback (most recent call last)
<ipython-input-132-73d0b2c011fe> in <module>
7 hk = HeatKernel(sigma=1, n_bins=n_bins, n_jobs=2)
8 num_dimensions = 1
----> 9 x_t = hk.fit_transform(diagrams)
~\Anaconda3\lib\site-packages\gtda\utils\_docs.py in fit_transform_wrapper(*args, **kwargs)
104 @wraps(original_fit_transform)
105 def fit_transform_wrapper(*args, **kwargs):
--> 106 return original_fit_transform(*args, **kwargs)
107 fit_transform_wrapper.__doc__ = \
108 make_fit_transform_docs(fit_docs, transform_docs)
~\Anaconda3\lib\site-packages\sklearn\base.py in fit_transform(self, X, y, **fit_params)
569 if y is None:
570 # fit method of arity 1 (unsupervised transformation)
--> 571 return self.fit(X, **fit_params).transform(X)
572 else:
573 # fit method of arity 2 (supervised transformation)
~\Anaconda3\lib\site-packages\gtda\diagrams\representations.py in transform(self, X, y)
591 heats)(_subdiagrams(X, [dim], remove_dim=True)[s],
592 self._samplings[dim], self._step_size[dim], self.sigma)
--> 593 for dim in self.homology_dimensions_
594 for s in gen_even_slices(X.shape[0],
595 effective_n_jobs(self.n_jobs)))
~\Anaconda3\lib\site-packages\joblib\parallel.py in __call__(self, iterable)
932
933 with self._backend.retrieval_context():
--> 934 self.retrieve()
935 # Make sure that we get a last message telling us we are done
936 elapsed_time = time.time() - self._start_time
~\Anaconda3\lib\site-packages\joblib\parallel.py in retrieve(self)
831 try:
832 if getattr(self._backend, 'supports_timeout', False):
--> 833 self._output.extend(job.get(timeout=self.timeout))
834 else:
835 self._output.extend(job.get())
~\Anaconda3\lib\site-packages\joblib\_parallel_backends.py in wrap_future_result(future, timeout)
519 AsyncResults.get from multiprocessing."""
520 try:
--> 521 return future.result(timeout=timeout)
522 except LokyTimeoutError:
523 raise TimeoutError()
~\Anaconda3\lib\concurrent\futures\_base.py in result(self, timeout)
430 raise CancelledError()
431 elif self._state == FINISHED:
--> 432 return self.__get_result()
433 else:
434 raise TimeoutError()
~\Anaconda3\lib\concurrent\futures\_base.py in __get_result(self)
382 def __get_result(self):
383 if self._exception:
--> 384 raise self._exception
385 else:
386 return self._result
ValueError: assignment destination is read-only
|
ValueError
|
def _parallel_amplitude(X, metric, metric_params, homology_dimensions, n_jobs):
amplitude_func = implemented_amplitude_recipes[metric]
effective_metric_params = metric_params.copy()
none_dict = {dim: None for dim in homology_dimensions}
samplings = effective_metric_params.pop("samplings", none_dict)
step_sizes = effective_metric_params.pop("step_sizes", none_dict)
amplitude_arrays = Parallel(n_jobs=n_jobs)(
delayed(amplitude_func)(
_subdiagrams(X[s], [dim], remove_dim=True),
sampling=samplings[dim],
step_size=step_sizes[dim],
**effective_metric_params,
)
for dim in homology_dimensions
for s in gen_even_slices(_num_samples(X), effective_n_jobs(n_jobs))
)
amplitude_arrays = (
np.concatenate(amplitude_arrays).reshape(len(homology_dimensions), len(X)).T
)
return amplitude_arrays
|
def _parallel_amplitude(X, metric, metric_params, homology_dimensions, n_jobs):
amplitude_func = implemented_amplitude_recipes[metric]
effective_metric_params = metric_params.copy()
none_dict = {dim: None for dim in homology_dimensions}
samplings = effective_metric_params.pop("samplings", none_dict)
step_sizes = effective_metric_params.pop("step_sizes", none_dict)
amplitude_arrays = Parallel(n_jobs=n_jobs)(
delayed(amplitude_func)(
_subdiagrams(X, [dim], remove_dim=True)[s],
sampling=samplings[dim],
step_size=step_sizes[dim],
**effective_metric_params,
)
for dim in homology_dimensions
for s in gen_even_slices(_num_samples(X), effective_n_jobs(n_jobs))
)
amplitude_arrays = (
np.concatenate(amplitude_arrays).reshape(len(homology_dimensions), X.shape[0]).T
)
return amplitude_arrays
|
https://github.com/giotto-ai/giotto-tda/issues/427
|
---------------------------------------------------------------------------
_RemoteTraceback Traceback (most recent call last)
_RemoteTraceback:
"""
Traceback (most recent call last):
File "C:\Users\nicho\Anaconda3\lib\site-packages\joblib\externals\loky\process_executor.py", line 418, in _process_worker
r = call_item()
File "C:\Users\nicho\Anaconda3\lib\site-packages\joblib\externals\loky\process_executor.py", line 272, in __call__
return self.fn(*self.args, **self.kwargs)
File "C:\Users\nicho\Anaconda3\lib\site-packages\joblib\_parallel_backends.py", line 567, in __call__
return self.func(*args, **kwargs)
File "C:\Users\nicho\Anaconda3\lib\site-packages\joblib\parallel.py", line 225, in __call__
for func, args, kwargs in self.items]
File "C:\Users\nicho\Anaconda3\lib\site-packages\joblib\parallel.py", line 225, in <listcomp>
for func, args, kwargs in self.items]
File "C:\Users\nicho\Anaconda3\lib\site-packages\gtda\diagrams\_metrics.py", line 86, in heats
diagrams[diagrams < sampling[0, 0]] = sampling[0, 0]
ValueError: assignment destination is read-only
"""
The above exception was the direct cause of the following exception:
ValueError Traceback (most recent call last)
<ipython-input-132-73d0b2c011fe> in <module>
7 hk = HeatKernel(sigma=1, n_bins=n_bins, n_jobs=2)
8 num_dimensions = 1
----> 9 x_t = hk.fit_transform(diagrams)
~\Anaconda3\lib\site-packages\gtda\utils\_docs.py in fit_transform_wrapper(*args, **kwargs)
104 @wraps(original_fit_transform)
105 def fit_transform_wrapper(*args, **kwargs):
--> 106 return original_fit_transform(*args, **kwargs)
107 fit_transform_wrapper.__doc__ = \
108 make_fit_transform_docs(fit_docs, transform_docs)
~\Anaconda3\lib\site-packages\sklearn\base.py in fit_transform(self, X, y, **fit_params)
569 if y is None:
570 # fit method of arity 1 (unsupervised transformation)
--> 571 return self.fit(X, **fit_params).transform(X)
572 else:
573 # fit method of arity 2 (supervised transformation)
~\Anaconda3\lib\site-packages\gtda\diagrams\representations.py in transform(self, X, y)
591 heats)(_subdiagrams(X, [dim], remove_dim=True)[s],
592 self._samplings[dim], self._step_size[dim], self.sigma)
--> 593 for dim in self.homology_dimensions_
594 for s in gen_even_slices(X.shape[0],
595 effective_n_jobs(self.n_jobs)))
~\Anaconda3\lib\site-packages\joblib\parallel.py in __call__(self, iterable)
932
933 with self._backend.retrieval_context():
--> 934 self.retrieve()
935 # Make sure that we get a last message telling us we are done
936 elapsed_time = time.time() - self._start_time
~\Anaconda3\lib\site-packages\joblib\parallel.py in retrieve(self)
831 try:
832 if getattr(self._backend, 'supports_timeout', False):
--> 833 self._output.extend(job.get(timeout=self.timeout))
834 else:
835 self._output.extend(job.get())
~\Anaconda3\lib\site-packages\joblib\_parallel_backends.py in wrap_future_result(future, timeout)
519 AsyncResults.get from multiprocessing."""
520 try:
--> 521 return future.result(timeout=timeout)
522 except LokyTimeoutError:
523 raise TimeoutError()
~\Anaconda3\lib\concurrent\futures\_base.py in result(self, timeout)
430 raise CancelledError()
431 elif self._state == FINISHED:
--> 432 return self.__get_result()
433 else:
434 raise TimeoutError()
~\Anaconda3\lib\concurrent\futures\_base.py in __get_result(self)
382 def __get_result(self):
383 if self._exception:
--> 384 raise self._exception
385 else:
386 return self._result
ValueError: assignment destination is read-only
|
ValueError
|
def _sample_image(image, sampled_diag):
# NOTE: Modifies `image` in-place
unique, counts = np.unique(sampled_diag, axis=0, return_counts=True)
unique = tuple(tuple(row) for row in unique.astype(np.int).T)
image[unique] = counts
|
def _sample_image(image, sampled_diag):
unique, counts = np.unique(sampled_diag, axis=0, return_counts=True)
unique = tuple(tuple(row) for row in unique.astype(np.int).T)
image[unique] = counts
|
https://github.com/giotto-ai/giotto-tda/issues/427
|
---------------------------------------------------------------------------
_RemoteTraceback Traceback (most recent call last)
_RemoteTraceback:
"""
Traceback (most recent call last):
File "C:\Users\nicho\Anaconda3\lib\site-packages\joblib\externals\loky\process_executor.py", line 418, in _process_worker
r = call_item()
File "C:\Users\nicho\Anaconda3\lib\site-packages\joblib\externals\loky\process_executor.py", line 272, in __call__
return self.fn(*self.args, **self.kwargs)
File "C:\Users\nicho\Anaconda3\lib\site-packages\joblib\_parallel_backends.py", line 567, in __call__
return self.func(*args, **kwargs)
File "C:\Users\nicho\Anaconda3\lib\site-packages\joblib\parallel.py", line 225, in __call__
for func, args, kwargs in self.items]
File "C:\Users\nicho\Anaconda3\lib\site-packages\joblib\parallel.py", line 225, in <listcomp>
for func, args, kwargs in self.items]
File "C:\Users\nicho\Anaconda3\lib\site-packages\gtda\diagrams\_metrics.py", line 86, in heats
diagrams[diagrams < sampling[0, 0]] = sampling[0, 0]
ValueError: assignment destination is read-only
"""
The above exception was the direct cause of the following exception:
ValueError Traceback (most recent call last)
<ipython-input-132-73d0b2c011fe> in <module>
7 hk = HeatKernel(sigma=1, n_bins=n_bins, n_jobs=2)
8 num_dimensions = 1
----> 9 x_t = hk.fit_transform(diagrams)
~\Anaconda3\lib\site-packages\gtda\utils\_docs.py in fit_transform_wrapper(*args, **kwargs)
104 @wraps(original_fit_transform)
105 def fit_transform_wrapper(*args, **kwargs):
--> 106 return original_fit_transform(*args, **kwargs)
107 fit_transform_wrapper.__doc__ = \
108 make_fit_transform_docs(fit_docs, transform_docs)
~\Anaconda3\lib\site-packages\sklearn\base.py in fit_transform(self, X, y, **fit_params)
569 if y is None:
570 # fit method of arity 1 (unsupervised transformation)
--> 571 return self.fit(X, **fit_params).transform(X)
572 else:
573 # fit method of arity 2 (supervised transformation)
~\Anaconda3\lib\site-packages\gtda\diagrams\representations.py in transform(self, X, y)
591 heats)(_subdiagrams(X, [dim], remove_dim=True)[s],
592 self._samplings[dim], self._step_size[dim], self.sigma)
--> 593 for dim in self.homology_dimensions_
594 for s in gen_even_slices(X.shape[0],
595 effective_n_jobs(self.n_jobs)))
~\Anaconda3\lib\site-packages\joblib\parallel.py in __call__(self, iterable)
932
933 with self._backend.retrieval_context():
--> 934 self.retrieve()
935 # Make sure that we get a last message telling us we are done
936 elapsed_time = time.time() - self._start_time
~\Anaconda3\lib\site-packages\joblib\parallel.py in retrieve(self)
831 try:
832 if getattr(self._backend, 'supports_timeout', False):
--> 833 self._output.extend(job.get(timeout=self.timeout))
834 else:
835 self._output.extend(job.get())
~\Anaconda3\lib\site-packages\joblib\_parallel_backends.py in wrap_future_result(future, timeout)
519 AsyncResults.get from multiprocessing."""
520 try:
--> 521 return future.result(timeout=timeout)
522 except LokyTimeoutError:
523 raise TimeoutError()
~\Anaconda3\lib\concurrent\futures\_base.py in result(self, timeout)
430 raise CancelledError()
431 elif self._state == FINISHED:
--> 432 return self.__get_result()
433 else:
434 raise TimeoutError()
~\Anaconda3\lib\concurrent\futures\_base.py in __get_result(self)
382 def __get_result(self):
383 if self._exception:
--> 384 raise self._exception
385 else:
386 return self._result
ValueError: assignment destination is read-only
|
ValueError
|
def transform(self, X, y=None):
"""Compute the persistence entropies of diagrams in `X`.
Parameters
----------
X : ndarray of shape (n_samples, n_features, 3)
Input data. Array of persistence diagrams, each a collection of
triples [b, d, q] representing persistent topological features
through their birth (b), death (d) and homology dimension (q).
y : None
There is no need for a target in a transformer, yet the pipeline
API requires this parameter.
Returns
-------
Xt : ndarray of shape (n_samples, n_homology_dimensions)
Persistence entropies: one value per sample and per homology
dimension seen in :meth:`fit`. Index i along axis 1 corresponds
to the i-th homology dimension in :attr:`homology_dimensions_`.
"""
check_is_fitted(self)
X = check_diagrams(X)
with np.errstate(divide="ignore", invalid="ignore"):
Xt = Parallel(n_jobs=self.n_jobs)(
delayed(self._persistence_entropy)(_subdiagrams(X[s], [dim]))
for dim in self.homology_dimensions_
for s in gen_even_slices(len(X), effective_n_jobs(self.n_jobs))
)
Xt = np.concatenate(Xt).reshape(self._n_dimensions, X.shape[0]).T
return Xt
|
def transform(self, X, y=None):
"""Compute the persistence entropies of diagrams in `X`.
Parameters
----------
X : ndarray of shape (n_samples, n_features, 3)
Input data. Array of persistence diagrams, each a collection of
triples [b, d, q] representing persistent topological features
through their birth (b), death (d) and homology dimension (q).
y : None
There is no need for a target in a transformer, yet the pipeline
API requires this parameter.
Returns
-------
Xt : ndarray of shape (n_samples, n_homology_dimensions)
Persistence entropies: one value per sample and per homology
dimension seen in :meth:`fit`. Index i along axis 1 corresponds
to the i-th homology dimension in :attr:`homology_dimensions_`.
"""
check_is_fitted(self)
X = check_diagrams(X)
with np.errstate(divide="ignore", invalid="ignore"):
Xt = Parallel(n_jobs=self.n_jobs)(
delayed(self._persistence_entropy)(_subdiagrams(X, [dim])[s])
for dim in self.homology_dimensions_
for s in gen_even_slices(X.shape[0], effective_n_jobs(self.n_jobs))
)
Xt = np.concatenate(Xt).reshape(self._n_dimensions, X.shape[0]).T
return Xt
|
https://github.com/giotto-ai/giotto-tda/issues/427
|
---------------------------------------------------------------------------
_RemoteTraceback Traceback (most recent call last)
_RemoteTraceback:
"""
Traceback (most recent call last):
File "C:\Users\nicho\Anaconda3\lib\site-packages\joblib\externals\loky\process_executor.py", line 418, in _process_worker
r = call_item()
File "C:\Users\nicho\Anaconda3\lib\site-packages\joblib\externals\loky\process_executor.py", line 272, in __call__
return self.fn(*self.args, **self.kwargs)
File "C:\Users\nicho\Anaconda3\lib\site-packages\joblib\_parallel_backends.py", line 567, in __call__
return self.func(*args, **kwargs)
File "C:\Users\nicho\Anaconda3\lib\site-packages\joblib\parallel.py", line 225, in __call__
for func, args, kwargs in self.items]
File "C:\Users\nicho\Anaconda3\lib\site-packages\joblib\parallel.py", line 225, in <listcomp>
for func, args, kwargs in self.items]
File "C:\Users\nicho\Anaconda3\lib\site-packages\gtda\diagrams\_metrics.py", line 86, in heats
diagrams[diagrams < sampling[0, 0]] = sampling[0, 0]
ValueError: assignment destination is read-only
"""
The above exception was the direct cause of the following exception:
ValueError Traceback (most recent call last)
<ipython-input-132-73d0b2c011fe> in <module>
7 hk = HeatKernel(sigma=1, n_bins=n_bins, n_jobs=2)
8 num_dimensions = 1
----> 9 x_t = hk.fit_transform(diagrams)
~\Anaconda3\lib\site-packages\gtda\utils\_docs.py in fit_transform_wrapper(*args, **kwargs)
104 @wraps(original_fit_transform)
105 def fit_transform_wrapper(*args, **kwargs):
--> 106 return original_fit_transform(*args, **kwargs)
107 fit_transform_wrapper.__doc__ = \
108 make_fit_transform_docs(fit_docs, transform_docs)
~\Anaconda3\lib\site-packages\sklearn\base.py in fit_transform(self, X, y, **fit_params)
569 if y is None:
570 # fit method of arity 1 (unsupervised transformation)
--> 571 return self.fit(X, **fit_params).transform(X)
572 else:
573 # fit method of arity 2 (supervised transformation)
~\Anaconda3\lib\site-packages\gtda\diagrams\representations.py in transform(self, X, y)
591 heats)(_subdiagrams(X, [dim], remove_dim=True)[s],
592 self._samplings[dim], self._step_size[dim], self.sigma)
--> 593 for dim in self.homology_dimensions_
594 for s in gen_even_slices(X.shape[0],
595 effective_n_jobs(self.n_jobs)))
~\Anaconda3\lib\site-packages\joblib\parallel.py in __call__(self, iterable)
932
933 with self._backend.retrieval_context():
--> 934 self.retrieve()
935 # Make sure that we get a last message telling us we are done
936 elapsed_time = time.time() - self._start_time
~\Anaconda3\lib\site-packages\joblib\parallel.py in retrieve(self)
831 try:
832 if getattr(self._backend, 'supports_timeout', False):
--> 833 self._output.extend(job.get(timeout=self.timeout))
834 else:
835 self._output.extend(job.get())
~\Anaconda3\lib\site-packages\joblib\_parallel_backends.py in wrap_future_result(future, timeout)
519 AsyncResults.get from multiprocessing."""
520 try:
--> 521 return future.result(timeout=timeout)
522 except LokyTimeoutError:
523 raise TimeoutError()
~\Anaconda3\lib\concurrent\futures\_base.py in result(self, timeout)
430 raise CancelledError()
431 elif self._state == FINISHED:
--> 432 return self.__get_result()
433 else:
434 raise TimeoutError()
~\Anaconda3\lib\concurrent\futures\_base.py in __get_result(self)
382 def __get_result(self):
383 if self._exception:
--> 384 raise self._exception
385 else:
386 return self._result
ValueError: assignment destination is read-only
|
ValueError
|
def transform(self, X, y=None):
"""Compute the Betti curves of diagrams in `X`.
Parameters
----------
X : ndarray of shape (n_samples, n_features, 3)
Input data. Array of persistence diagrams, each a collection of
triples [b, d, q] representing persistent topological features
through their birth (b), death (d) and homology dimension (q).
y : None
There is no need for a target in a transformer, yet the pipeline
API requires this parameter.
Returns
-------
Xt : ndarray of shape (n_samples, n_homology_dimensions, n_bins)
Betti curves: one curve (represented as a one-dimensional array
of integer values) per sample and per homology dimension seen
in :meth:`fit`. Index i along axis 1 corresponds to the i-th
homology dimension in :attr:`homology_dimensions_`.
"""
check_is_fitted(self)
X = check_diagrams(X)
Xt = Parallel(n_jobs=self.n_jobs)(
delayed(betti_curves)(
_subdiagrams(X[s], [dim], remove_dim=True), self._samplings[dim]
)
for dim in self.homology_dimensions_
for s in gen_even_slices(len(X), effective_n_jobs(self.n_jobs))
)
Xt = np.concatenate(Xt).reshape(self._n_dimensions, len(X), -1).transpose((1, 0, 2))
return Xt
|
def transform(self, X, y=None):
"""Compute the Betti curves of diagrams in `X`.
Parameters
----------
X : ndarray of shape (n_samples, n_features, 3)
Input data. Array of persistence diagrams, each a collection of
triples [b, d, q] representing persistent topological features
through their birth (b), death (d) and homology dimension (q).
y : None
There is no need for a target in a transformer, yet the pipeline
API requires this parameter.
Returns
-------
Xt : ndarray of shape (n_samples, n_homology_dimensions, n_bins)
Betti curves: one curve (represented as a one-dimensional array
of integer values) per sample and per homology dimension seen
in :meth:`fit`. Index i along axis 1 corresponds to the i-th
homology dimension in :attr:`homology_dimensions_`.
"""
check_is_fitted(self)
X = check_diagrams(X)
Xt = Parallel(n_jobs=self.n_jobs)(
delayed(betti_curves)(
_subdiagrams(X, [dim], remove_dim=True)[s], self._samplings[dim]
)
for dim in self.homology_dimensions_
for s in gen_even_slices(X.shape[0], effective_n_jobs(self.n_jobs))
)
Xt = (
np.concatenate(Xt)
.reshape(self._n_dimensions, X.shape[0], -1)
.transpose((1, 0, 2))
)
return Xt
|
https://github.com/giotto-ai/giotto-tda/issues/427
|
---------------------------------------------------------------------------
_RemoteTraceback Traceback (most recent call last)
_RemoteTraceback:
"""
Traceback (most recent call last):
File "C:\Users\nicho\Anaconda3\lib\site-packages\joblib\externals\loky\process_executor.py", line 418, in _process_worker
r = call_item()
File "C:\Users\nicho\Anaconda3\lib\site-packages\joblib\externals\loky\process_executor.py", line 272, in __call__
return self.fn(*self.args, **self.kwargs)
File "C:\Users\nicho\Anaconda3\lib\site-packages\joblib\_parallel_backends.py", line 567, in __call__
return self.func(*args, **kwargs)
File "C:\Users\nicho\Anaconda3\lib\site-packages\joblib\parallel.py", line 225, in __call__
for func, args, kwargs in self.items]
File "C:\Users\nicho\Anaconda3\lib\site-packages\joblib\parallel.py", line 225, in <listcomp>
for func, args, kwargs in self.items]
File "C:\Users\nicho\Anaconda3\lib\site-packages\gtda\diagrams\_metrics.py", line 86, in heats
diagrams[diagrams < sampling[0, 0]] = sampling[0, 0]
ValueError: assignment destination is read-only
"""
The above exception was the direct cause of the following exception:
ValueError Traceback (most recent call last)
<ipython-input-132-73d0b2c011fe> in <module>
7 hk = HeatKernel(sigma=1, n_bins=n_bins, n_jobs=2)
8 num_dimensions = 1
----> 9 x_t = hk.fit_transform(diagrams)
~\Anaconda3\lib\site-packages\gtda\utils\_docs.py in fit_transform_wrapper(*args, **kwargs)
104 @wraps(original_fit_transform)
105 def fit_transform_wrapper(*args, **kwargs):
--> 106 return original_fit_transform(*args, **kwargs)
107 fit_transform_wrapper.__doc__ = \
108 make_fit_transform_docs(fit_docs, transform_docs)
~\Anaconda3\lib\site-packages\sklearn\base.py in fit_transform(self, X, y, **fit_params)
569 if y is None:
570 # fit method of arity 1 (unsupervised transformation)
--> 571 return self.fit(X, **fit_params).transform(X)
572 else:
573 # fit method of arity 2 (supervised transformation)
~\Anaconda3\lib\site-packages\gtda\diagrams\representations.py in transform(self, X, y)
591 heats)(_subdiagrams(X, [dim], remove_dim=True)[s],
592 self._samplings[dim], self._step_size[dim], self.sigma)
--> 593 for dim in self.homology_dimensions_
594 for s in gen_even_slices(X.shape[0],
595 effective_n_jobs(self.n_jobs)))
~\Anaconda3\lib\site-packages\joblib\parallel.py in __call__(self, iterable)
932
933 with self._backend.retrieval_context():
--> 934 self.retrieve()
935 # Make sure that we get a last message telling us we are done
936 elapsed_time = time.time() - self._start_time
~\Anaconda3\lib\site-packages\joblib\parallel.py in retrieve(self)
831 try:
832 if getattr(self._backend, 'supports_timeout', False):
--> 833 self._output.extend(job.get(timeout=self.timeout))
834 else:
835 self._output.extend(job.get())
~\Anaconda3\lib\site-packages\joblib\_parallel_backends.py in wrap_future_result(future, timeout)
519 AsyncResults.get from multiprocessing."""
520 try:
--> 521 return future.result(timeout=timeout)
522 except LokyTimeoutError:
523 raise TimeoutError()
~\Anaconda3\lib\concurrent\futures\_base.py in result(self, timeout)
430 raise CancelledError()
431 elif self._state == FINISHED:
--> 432 return self.__get_result()
433 else:
434 raise TimeoutError()
~\Anaconda3\lib\concurrent\futures\_base.py in __get_result(self)
382 def __get_result(self):
383 if self._exception:
--> 384 raise self._exception
385 else:
386 return self._result
ValueError: assignment destination is read-only
|
ValueError
|
def transform(self, X, y=None):
"""Compute the persistence landscapes of diagrams in `X`.
Parameters
----------
X : ndarray of shape (n_samples, n_features, 3)
Input data. Array of persistence diagrams, each a collection of
triples [b, d, q] representing persistent topological features
through their birth (b), death (d) and homology dimension (q).
y : None
There is no need for a target in a transformer, yet the pipeline
API requires this parameter.
Returns
-------
Xt : ndarray of shape (n_samples, n_homology_dimensions, \
n_layers, n_bins)
Persistence lanscapes: one landscape (represented as a
two-dimensional array) per sample and per homology dimension seen
in :meth:`fit`. Each landscape contains a number `n_layers` of
layers. Index i along axis 1 corresponds to the i-th homology
dimension in :attr:`homology_dimensions_`.
"""
check_is_fitted(self)
X = check_diagrams(X)
Xt = Parallel(n_jobs=self.n_jobs)(
delayed(landscapes)(
_subdiagrams(X[s], [dim], remove_dim=True),
self._samplings[dim],
self.n_layers,
)
for dim in self.homology_dimensions_
for s in gen_even_slices(len(X), effective_n_jobs(self.n_jobs))
)
Xt = (
np.concatenate(Xt)
.reshape(self._n_dimensions, len(X), self.n_layers, self.n_bins)
.transpose((1, 0, 2, 3))
)
return Xt
|
def transform(self, X, y=None):
"""Compute the persistence landscapes of diagrams in `X`.
Parameters
----------
X : ndarray of shape (n_samples, n_features, 3)
Input data. Array of persistence diagrams, each a collection of
triples [b, d, q] representing persistent topological features
through their birth (b), death (d) and homology dimension (q).
y : None
There is no need for a target in a transformer, yet the pipeline
API requires this parameter.
Returns
-------
Xt : ndarray of shape (n_samples, n_homology_dimensions, \
n_layers, n_bins)
Persistence lanscapes: one landscape (represented as a
two-dimensional array) per sample and per homology dimension seen
in :meth:`fit`. Each landscape contains a number `n_layers` of
layers. Index i along axis 1 corresponds to the i-th homology
dimension in :attr:`homology_dimensions_`.
"""
check_is_fitted(self)
X = check_diagrams(X)
Xt = Parallel(n_jobs=self.n_jobs)(
delayed(landscapes)(
_subdiagrams(X, [dim], remove_dim=True)[s],
self._samplings[dim],
self.n_layers,
)
for dim in self.homology_dimensions_
for s in gen_even_slices(X.shape[0], effective_n_jobs(self.n_jobs))
)
Xt = (
np.concatenate(Xt)
.reshape(self._n_dimensions, X.shape[0], self.n_layers, self.n_bins)
.transpose((1, 0, 2, 3))
)
return Xt
|
https://github.com/giotto-ai/giotto-tda/issues/427
|
---------------------------------------------------------------------------
_RemoteTraceback Traceback (most recent call last)
_RemoteTraceback:
"""
Traceback (most recent call last):
File "C:\Users\nicho\Anaconda3\lib\site-packages\joblib\externals\loky\process_executor.py", line 418, in _process_worker
r = call_item()
File "C:\Users\nicho\Anaconda3\lib\site-packages\joblib\externals\loky\process_executor.py", line 272, in __call__
return self.fn(*self.args, **self.kwargs)
File "C:\Users\nicho\Anaconda3\lib\site-packages\joblib\_parallel_backends.py", line 567, in __call__
return self.func(*args, **kwargs)
File "C:\Users\nicho\Anaconda3\lib\site-packages\joblib\parallel.py", line 225, in __call__
for func, args, kwargs in self.items]
File "C:\Users\nicho\Anaconda3\lib\site-packages\joblib\parallel.py", line 225, in <listcomp>
for func, args, kwargs in self.items]
File "C:\Users\nicho\Anaconda3\lib\site-packages\gtda\diagrams\_metrics.py", line 86, in heats
diagrams[diagrams < sampling[0, 0]] = sampling[0, 0]
ValueError: assignment destination is read-only
"""
The above exception was the direct cause of the following exception:
ValueError Traceback (most recent call last)
<ipython-input-132-73d0b2c011fe> in <module>
7 hk = HeatKernel(sigma=1, n_bins=n_bins, n_jobs=2)
8 num_dimensions = 1
----> 9 x_t = hk.fit_transform(diagrams)
~\Anaconda3\lib\site-packages\gtda\utils\_docs.py in fit_transform_wrapper(*args, **kwargs)
104 @wraps(original_fit_transform)
105 def fit_transform_wrapper(*args, **kwargs):
--> 106 return original_fit_transform(*args, **kwargs)
107 fit_transform_wrapper.__doc__ = \
108 make_fit_transform_docs(fit_docs, transform_docs)
~\Anaconda3\lib\site-packages\sklearn\base.py in fit_transform(self, X, y, **fit_params)
569 if y is None:
570 # fit method of arity 1 (unsupervised transformation)
--> 571 return self.fit(X, **fit_params).transform(X)
572 else:
573 # fit method of arity 2 (supervised transformation)
~\Anaconda3\lib\site-packages\gtda\diagrams\representations.py in transform(self, X, y)
591 heats)(_subdiagrams(X, [dim], remove_dim=True)[s],
592 self._samplings[dim], self._step_size[dim], self.sigma)
--> 593 for dim in self.homology_dimensions_
594 for s in gen_even_slices(X.shape[0],
595 effective_n_jobs(self.n_jobs)))
~\Anaconda3\lib\site-packages\joblib\parallel.py in __call__(self, iterable)
932
933 with self._backend.retrieval_context():
--> 934 self.retrieve()
935 # Make sure that we get a last message telling us we are done
936 elapsed_time = time.time() - self._start_time
~\Anaconda3\lib\site-packages\joblib\parallel.py in retrieve(self)
831 try:
832 if getattr(self._backend, 'supports_timeout', False):
--> 833 self._output.extend(job.get(timeout=self.timeout))
834 else:
835 self._output.extend(job.get())
~\Anaconda3\lib\site-packages\joblib\_parallel_backends.py in wrap_future_result(future, timeout)
519 AsyncResults.get from multiprocessing."""
520 try:
--> 521 return future.result(timeout=timeout)
522 except LokyTimeoutError:
523 raise TimeoutError()
~\Anaconda3\lib\concurrent\futures\_base.py in result(self, timeout)
430 raise CancelledError()
431 elif self._state == FINISHED:
--> 432 return self.__get_result()
433 else:
434 raise TimeoutError()
~\Anaconda3\lib\concurrent\futures\_base.py in __get_result(self)
382 def __get_result(self):
383 if self._exception:
--> 384 raise self._exception
385 else:
386 return self._result
ValueError: assignment destination is read-only
|
ValueError
|
def transform(self, X, y=None):
"""Compute multi-channel raster images from diagrams in `X` by
convolution with a Gaussian kernel and reflection about the diagonal.
Parameters
----------
X : ndarray of shape (n_samples, n_features, 3)
Input data. Array of persistence diagrams, each a collection of
triples [b, d, q] representing persistent topological features
through their birth (b), death (d) and homology dimension (q).
y : None
There is no need for a target in a transformer, yet the pipeline
API requires this parameter.
Returns
-------
Xt : ndarray of shape (n_samples, n_homology_dimensions, n_bins, \
n_bins)
Multi-channel raster images: one image per sample and one
channel per homology dimension seen in :meth:`fit`. Index i
along axis 1 corresponds to the i-th homology dimension in
:attr:`homology_dimensions_`.
"""
check_is_fitted(self)
X = check_diagrams(X, copy=True)
Xt = Parallel(n_jobs=self.n_jobs, mmap_mode="c")(
delayed(heats)(
_subdiagrams(X[s], [dim], remove_dim=True),
self._samplings[dim],
self._step_size[dim],
self.sigma,
)
for dim in self.homology_dimensions_
for s in gen_even_slices(len(X), effective_n_jobs(self.n_jobs))
)
Xt = (
np.concatenate(Xt)
.reshape(self._n_dimensions, len(X), self.n_bins, self.n_bins)
.transpose((1, 0, 2, 3))
)
return Xt
|
def transform(self, X, y=None):
"""Compute multi-channel raster images from diagrams in `X` by
convolution with a Gaussian kernel and reflection about the diagonal.
Parameters
----------
X : ndarray of shape (n_samples, n_features, 3)
Input data. Array of persistence diagrams, each a collection of
triples [b, d, q] representing persistent topological features
through their birth (b), death (d) and homology dimension (q).
y : None
There is no need for a target in a transformer, yet the pipeline
API requires this parameter.
Returns
-------
Xt : ndarray of shape (n_samples, n_homology_dimensions, n_bins, \
n_bins)
Multi-channel raster images: one image per sample and one
channel per homology dimension seen in :meth:`fit`. Index i
along axis 1 corresponds to the i-th homology dimension in
:attr:`homology_dimensions_`.
"""
check_is_fitted(self)
X = check_diagrams(X, copy=True)
Xt = Parallel(n_jobs=self.n_jobs)(
delayed(heats)(
_subdiagrams(X, [dim], remove_dim=True)[s],
self._samplings[dim],
self._step_size[dim],
self.sigma,
)
for dim in self.homology_dimensions_
for s in gen_even_slices(X.shape[0], effective_n_jobs(self.n_jobs))
)
Xt = (
np.concatenate(Xt)
.reshape(self._n_dimensions, X.shape[0], self.n_bins, self.n_bins)
.transpose((1, 0, 2, 3))
)
return Xt
|
https://github.com/giotto-ai/giotto-tda/issues/427
|
---------------------------------------------------------------------------
_RemoteTraceback Traceback (most recent call last)
_RemoteTraceback:
"""
Traceback (most recent call last):
File "C:\Users\nicho\Anaconda3\lib\site-packages\joblib\externals\loky\process_executor.py", line 418, in _process_worker
r = call_item()
File "C:\Users\nicho\Anaconda3\lib\site-packages\joblib\externals\loky\process_executor.py", line 272, in __call__
return self.fn(*self.args, **self.kwargs)
File "C:\Users\nicho\Anaconda3\lib\site-packages\joblib\_parallel_backends.py", line 567, in __call__
return self.func(*args, **kwargs)
File "C:\Users\nicho\Anaconda3\lib\site-packages\joblib\parallel.py", line 225, in __call__
for func, args, kwargs in self.items]
File "C:\Users\nicho\Anaconda3\lib\site-packages\joblib\parallel.py", line 225, in <listcomp>
for func, args, kwargs in self.items]
File "C:\Users\nicho\Anaconda3\lib\site-packages\gtda\diagrams\_metrics.py", line 86, in heats
diagrams[diagrams < sampling[0, 0]] = sampling[0, 0]
ValueError: assignment destination is read-only
"""
The above exception was the direct cause of the following exception:
ValueError Traceback (most recent call last)
<ipython-input-132-73d0b2c011fe> in <module>
7 hk = HeatKernel(sigma=1, n_bins=n_bins, n_jobs=2)
8 num_dimensions = 1
----> 9 x_t = hk.fit_transform(diagrams)
~\Anaconda3\lib\site-packages\gtda\utils\_docs.py in fit_transform_wrapper(*args, **kwargs)
104 @wraps(original_fit_transform)
105 def fit_transform_wrapper(*args, **kwargs):
--> 106 return original_fit_transform(*args, **kwargs)
107 fit_transform_wrapper.__doc__ = \
108 make_fit_transform_docs(fit_docs, transform_docs)
~\Anaconda3\lib\site-packages\sklearn\base.py in fit_transform(self, X, y, **fit_params)
569 if y is None:
570 # fit method of arity 1 (unsupervised transformation)
--> 571 return self.fit(X, **fit_params).transform(X)
572 else:
573 # fit method of arity 2 (supervised transformation)
~\Anaconda3\lib\site-packages\gtda\diagrams\representations.py in transform(self, X, y)
591 heats)(_subdiagrams(X, [dim], remove_dim=True)[s],
592 self._samplings[dim], self._step_size[dim], self.sigma)
--> 593 for dim in self.homology_dimensions_
594 for s in gen_even_slices(X.shape[0],
595 effective_n_jobs(self.n_jobs)))
~\Anaconda3\lib\site-packages\joblib\parallel.py in __call__(self, iterable)
932
933 with self._backend.retrieval_context():
--> 934 self.retrieve()
935 # Make sure that we get a last message telling us we are done
936 elapsed_time = time.time() - self._start_time
~\Anaconda3\lib\site-packages\joblib\parallel.py in retrieve(self)
831 try:
832 if getattr(self._backend, 'supports_timeout', False):
--> 833 self._output.extend(job.get(timeout=self.timeout))
834 else:
835 self._output.extend(job.get())
~\Anaconda3\lib\site-packages\joblib\_parallel_backends.py in wrap_future_result(future, timeout)
519 AsyncResults.get from multiprocessing."""
520 try:
--> 521 return future.result(timeout=timeout)
522 except LokyTimeoutError:
523 raise TimeoutError()
~\Anaconda3\lib\concurrent\futures\_base.py in result(self, timeout)
430 raise CancelledError()
431 elif self._state == FINISHED:
--> 432 return self.__get_result()
433 else:
434 raise TimeoutError()
~\Anaconda3\lib\concurrent\futures\_base.py in __get_result(self)
382 def __get_result(self):
383 if self._exception:
--> 384 raise self._exception
385 else:
386 return self._result
ValueError: assignment destination is read-only
|
ValueError
|
def transform(self, X, y=None):
"""Compute multi-channel raster images from diagrams in `X` by
convolution with a Gaussian kernel.
Parameters
----------
X : ndarray of shape (n_samples, n_features, 3)
Input data. Array of persistence diagrams, each a collection of
triples [b, d, q] representing persistent topological features
through their birth (b), death (d) and homology dimension (q).
y : None
There is no need for a target in a transformer, yet the pipeline
API requires this parameter.
Returns
-------
Xt : ndarray of shape (n_samples, n_homology_dimensions, n_bins, \
n_bins)
Multi-channel raster images: one image per sample and one channel
per homology dimension seen in :meth:`fit`. Index i along axis 1
corresponds to the i-th homology dimension in
:attr:`homology_dimensions_`.
"""
check_is_fitted(self)
X = check_diagrams(X, copy=True)
Xt = Parallel(n_jobs=self.n_jobs, mmap_mode="c")(
delayed(persistence_images)(
_subdiagrams(X[s], [dim], remove_dim=True),
self._samplings[dim],
self._step_size[dim],
self.weights_[dim],
self.sigma,
)
for dim in self.homology_dimensions_
for s in gen_even_slices(len(X), effective_n_jobs(self.n_jobs))
)
Xt = (
np.concatenate(Xt)
.reshape(self._n_dimensions, len(X), self.n_bins, self.n_bins)
.transpose((1, 0, 2, 3))
)
return Xt
|
def transform(self, X, y=None):
"""Compute multi-channel raster images from diagrams in `X` by
convolution with a Gaussian kernel.
Parameters
----------
X : ndarray of shape (n_samples, n_features, 3)
Input data. Array of persistence diagrams, each a collection of
triples [b, d, q] representing persistent topological features
through their birth (b), death (d) and homology dimension (q).
y : None
There is no need for a target in a transformer, yet the pipeline
API requires this parameter.
Returns
-------
Xt : ndarray of shape (n_samples, n_homology_dimensions, n_bins, \
n_bins)
Multi-channel raster images: one image per sample and one channel
per homology dimension seen in :meth:`fit`. Index i along axis 1
corresponds to the i-th homology dimension in
:attr:`homology_dimensions_`.
"""
check_is_fitted(self)
X = check_diagrams(X, copy=True)
Xt = Parallel(n_jobs=self.n_jobs)(
delayed(persistence_images)(
_subdiagrams(X, [dim], remove_dim=True)[s],
self._samplings[dim],
self._step_size[dim],
self.weights_[dim],
self.sigma,
)
for dim in self.homology_dimensions_
for s in gen_even_slices(X.shape[0], effective_n_jobs(self.n_jobs))
)
Xt = (
np.concatenate(Xt)
.reshape(self._n_dimensions, X.shape[0], self.n_bins, self.n_bins)
.transpose((1, 0, 2, 3))
)
return Xt
|
https://github.com/giotto-ai/giotto-tda/issues/427
|
---------------------------------------------------------------------------
_RemoteTraceback Traceback (most recent call last)
_RemoteTraceback:
"""
Traceback (most recent call last):
File "C:\Users\nicho\Anaconda3\lib\site-packages\joblib\externals\loky\process_executor.py", line 418, in _process_worker
r = call_item()
File "C:\Users\nicho\Anaconda3\lib\site-packages\joblib\externals\loky\process_executor.py", line 272, in __call__
return self.fn(*self.args, **self.kwargs)
File "C:\Users\nicho\Anaconda3\lib\site-packages\joblib\_parallel_backends.py", line 567, in __call__
return self.func(*args, **kwargs)
File "C:\Users\nicho\Anaconda3\lib\site-packages\joblib\parallel.py", line 225, in __call__
for func, args, kwargs in self.items]
File "C:\Users\nicho\Anaconda3\lib\site-packages\joblib\parallel.py", line 225, in <listcomp>
for func, args, kwargs in self.items]
File "C:\Users\nicho\Anaconda3\lib\site-packages\gtda\diagrams\_metrics.py", line 86, in heats
diagrams[diagrams < sampling[0, 0]] = sampling[0, 0]
ValueError: assignment destination is read-only
"""
The above exception was the direct cause of the following exception:
ValueError Traceback (most recent call last)
<ipython-input-132-73d0b2c011fe> in <module>
7 hk = HeatKernel(sigma=1, n_bins=n_bins, n_jobs=2)
8 num_dimensions = 1
----> 9 x_t = hk.fit_transform(diagrams)
~\Anaconda3\lib\site-packages\gtda\utils\_docs.py in fit_transform_wrapper(*args, **kwargs)
104 @wraps(original_fit_transform)
105 def fit_transform_wrapper(*args, **kwargs):
--> 106 return original_fit_transform(*args, **kwargs)
107 fit_transform_wrapper.__doc__ = \
108 make_fit_transform_docs(fit_docs, transform_docs)
~\Anaconda3\lib\site-packages\sklearn\base.py in fit_transform(self, X, y, **fit_params)
569 if y is None:
570 # fit method of arity 1 (unsupervised transformation)
--> 571 return self.fit(X, **fit_params).transform(X)
572 else:
573 # fit method of arity 2 (supervised transformation)
~\Anaconda3\lib\site-packages\gtda\diagrams\representations.py in transform(self, X, y)
591 heats)(_subdiagrams(X, [dim], remove_dim=True)[s],
592 self._samplings[dim], self._step_size[dim], self.sigma)
--> 593 for dim in self.homology_dimensions_
594 for s in gen_even_slices(X.shape[0],
595 effective_n_jobs(self.n_jobs)))
~\Anaconda3\lib\site-packages\joblib\parallel.py in __call__(self, iterable)
932
933 with self._backend.retrieval_context():
--> 934 self.retrieve()
935 # Make sure that we get a last message telling us we are done
936 elapsed_time = time.time() - self._start_time
~\Anaconda3\lib\site-packages\joblib\parallel.py in retrieve(self)
831 try:
832 if getattr(self._backend, 'supports_timeout', False):
--> 833 self._output.extend(job.get(timeout=self.timeout))
834 else:
835 self._output.extend(job.get())
~\Anaconda3\lib\site-packages\joblib\_parallel_backends.py in wrap_future_result(future, timeout)
519 AsyncResults.get from multiprocessing."""
520 try:
--> 521 return future.result(timeout=timeout)
522 except LokyTimeoutError:
523 raise TimeoutError()
~\Anaconda3\lib\concurrent\futures\_base.py in result(self, timeout)
430 raise CancelledError()
431 elif self._state == FINISHED:
--> 432 return self.__get_result()
433 else:
434 raise TimeoutError()
~\Anaconda3\lib\concurrent\futures\_base.py in __get_result(self)
382 def __get_result(self):
383 if self._exception:
--> 384 raise self._exception
385 else:
386 return self._result
ValueError: assignment destination is read-only
|
ValueError
|
def transform(self, X, y=None):
"""Compute silhouettes of diagrams in `X`.
Parameters
----------
X : ndarray of shape (n_samples, n_features, 3)
Input data. Array of persistence diagrams, each a collection of
triples [b, d, q] representing persistent topological features
through their birth (b), death (d) and homology dimension (q).
y : None
There is no need for a target in a transformer, yet the pipeline
API requires this parameter.
Returns
-------
Xt : ndarray of shape (n_samples, n_homology_dimensions, n_bins)
One silhouette (represented as a one-dimensional array)
per sample and per homology dimension seen
in :meth:`fit`. Index i along axis 1 corresponds to the i-th
homology dimension in :attr:`homology_dimensions_`.
"""
check_is_fitted(self)
X = check_diagrams(X)
Xt = Parallel(n_jobs=self.n_jobs)(
delayed(silhouettes)(
_subdiagrams(X[s], [dim], remove_dim=True),
self._samplings[dim],
power=self.power,
)
for dim in self.homology_dimensions_
for s in gen_even_slices(len(X), effective_n_jobs(self.n_jobs))
)
Xt = np.concatenate(Xt).reshape(self._n_dimensions, len(X), -1).transpose((1, 0, 2))
return Xt
|
def transform(self, X, y=None):
"""Compute silhouettes of diagrams in `X`.
Parameters
----------
X : ndarray of shape (n_samples, n_features, 3)
Input data. Array of persistence diagrams, each a collection of
triples [b, d, q] representing persistent topological features
through their birth (b), death (d) and homology dimension (q).
y : None
There is no need for a target in a transformer, yet the pipeline
API requires this parameter.
Returns
-------
Xt : ndarray of shape (n_samples, n_homology_dimensions, n_bins)
One silhouette (represented as a one-dimensional array)
per sample and per homology dimension seen
in :meth:`fit`. Index i along axis 1 corresponds to the i-th
homology dimension in :attr:`homology_dimensions_`.
"""
check_is_fitted(self)
X = check_diagrams(X)
Xt = Parallel(n_jobs=self.n_jobs)(
delayed(silhouettes)(
_subdiagrams(X, [dim], remove_dim=True)[s],
self._samplings[dim],
power=self.power,
)
for dim in self.homology_dimensions_
for s in gen_even_slices(X.shape[0], effective_n_jobs(self.n_jobs))
)
Xt = (
np.concatenate(Xt)
.reshape(self._n_dimensions, X.shape[0], -1)
.transpose((1, 0, 2))
)
return Xt
|
https://github.com/giotto-ai/giotto-tda/issues/427
|
---------------------------------------------------------------------------
_RemoteTraceback Traceback (most recent call last)
_RemoteTraceback:
"""
Traceback (most recent call last):
File "C:\Users\nicho\Anaconda3\lib\site-packages\joblib\externals\loky\process_executor.py", line 418, in _process_worker
r = call_item()
File "C:\Users\nicho\Anaconda3\lib\site-packages\joblib\externals\loky\process_executor.py", line 272, in __call__
return self.fn(*self.args, **self.kwargs)
File "C:\Users\nicho\Anaconda3\lib\site-packages\joblib\_parallel_backends.py", line 567, in __call__
return self.func(*args, **kwargs)
File "C:\Users\nicho\Anaconda3\lib\site-packages\joblib\parallel.py", line 225, in __call__
for func, args, kwargs in self.items]
File "C:\Users\nicho\Anaconda3\lib\site-packages\joblib\parallel.py", line 225, in <listcomp>
for func, args, kwargs in self.items]
File "C:\Users\nicho\Anaconda3\lib\site-packages\gtda\diagrams\_metrics.py", line 86, in heats
diagrams[diagrams < sampling[0, 0]] = sampling[0, 0]
ValueError: assignment destination is read-only
"""
The above exception was the direct cause of the following exception:
ValueError Traceback (most recent call last)
<ipython-input-132-73d0b2c011fe> in <module>
7 hk = HeatKernel(sigma=1, n_bins=n_bins, n_jobs=2)
8 num_dimensions = 1
----> 9 x_t = hk.fit_transform(diagrams)
~\Anaconda3\lib\site-packages\gtda\utils\_docs.py in fit_transform_wrapper(*args, **kwargs)
104 @wraps(original_fit_transform)
105 def fit_transform_wrapper(*args, **kwargs):
--> 106 return original_fit_transform(*args, **kwargs)
107 fit_transform_wrapper.__doc__ = \
108 make_fit_transform_docs(fit_docs, transform_docs)
~\Anaconda3\lib\site-packages\sklearn\base.py in fit_transform(self, X, y, **fit_params)
569 if y is None:
570 # fit method of arity 1 (unsupervised transformation)
--> 571 return self.fit(X, **fit_params).transform(X)
572 else:
573 # fit method of arity 2 (supervised transformation)
~\Anaconda3\lib\site-packages\gtda\diagrams\representations.py in transform(self, X, y)
591 heats)(_subdiagrams(X, [dim], remove_dim=True)[s],
592 self._samplings[dim], self._step_size[dim], self.sigma)
--> 593 for dim in self.homology_dimensions_
594 for s in gen_even_slices(X.shape[0],
595 effective_n_jobs(self.n_jobs)))
~\Anaconda3\lib\site-packages\joblib\parallel.py in __call__(self, iterable)
932
933 with self._backend.retrieval_context():
--> 934 self.retrieve()
935 # Make sure that we get a last message telling us we are done
936 elapsed_time = time.time() - self._start_time
~\Anaconda3\lib\site-packages\joblib\parallel.py in retrieve(self)
831 try:
832 if getattr(self._backend, 'supports_timeout', False):
--> 833 self._output.extend(job.get(timeout=self.timeout))
834 else:
835 self._output.extend(job.get())
~\Anaconda3\lib\site-packages\joblib\_parallel_backends.py in wrap_future_result(future, timeout)
519 AsyncResults.get from multiprocessing."""
520 try:
--> 521 return future.result(timeout=timeout)
522 except LokyTimeoutError:
523 raise TimeoutError()
~\Anaconda3\lib\concurrent\futures\_base.py in result(self, timeout)
430 raise CancelledError()
431 elif self._state == FINISHED:
--> 432 return self.__get_result()
433 else:
434 raise TimeoutError()
~\Anaconda3\lib\concurrent\futures\_base.py in __get_result(self)
382 def __get_result(self):
383 if self._exception:
--> 384 raise self._exception
385 else:
386 return self._result
ValueError: assignment destination is read-only
|
ValueError
|
def plot_diagram(diagram, homology_dimensions=None, **input_layout):
"""Plot a single persistence diagram.
Parameters
----------
diagram : ndarray of shape (n_points, 3)
The persistence diagram to plot, where the third dimension along axis 1
contains homology dimensions, and the first two contain (birth, death)
pairs to be used as coordinates in the two-dimensional plot.
homology_dimensions : list of int or None, optional, default: ``None``
Homology dimensions which will appear on the plot. If ``None``, all
homology dimensions which appear in `diagram` will be plotted.
"""
# TODO: increase the marker size
if homology_dimensions is None:
homology_dimensions = np.unique(diagram[:, 2])
max_filt_param = np.where(np.isinf(diagram), -np.inf, diagram).max()
layout = dict(
width=500,
height=500,
xaxis1=dict(
title="Birth",
side="bottom",
type="linear",
range=[0, 1.1 * max_filt_param],
ticks="outside",
anchor="y1",
showline=True,
zeroline=True,
showexponent="all",
exponentformat="e",
),
yaxis1=dict(
title="Death",
side="left",
type="linear",
range=[0, 1.1 * max_filt_param],
ticks="outside",
anchor="x1",
showline=True,
zeroline=True,
showexponent="all",
exponentformat="e",
),
plot_bgcolor="white",
)
layout.update(input_layout)
fig = gobj.Figure(layout=layout)
fig.update_xaxes(zeroline=True, linewidth=1, linecolor="black", mirror=False)
fig.update_yaxes(zeroline=True, linewidth=1, linecolor="black", mirror=False)
fig.add_trace(
gobj.Scatter(
x=np.array([-100 * max_filt_param, 100 * max_filt_param]),
y=np.array([-100 * max_filt_param, 100 * max_filt_param]),
mode="lines",
line=dict(dash="dash", width=1, color="black"),
showlegend=False,
hoverinfo="none",
)
)
for dim in homology_dimensions:
name = f"H{int(dim)}" if dim != np.inf else "Any homology dimension"
subdiagram = diagram[diagram[:, 2] == dim]
diff = subdiagram[:, 1] != subdiagram[:, 0]
subdiagram = subdiagram[diff]
fig.add_trace(
gobj.Scatter(
x=subdiagram[:, 0], y=subdiagram[:, 1], mode="markers", name=name
)
)
fig.show()
|
def plot_diagram(diagram, homology_dimensions=None, **input_layout):
"""Plot a single persistence diagram.
Parameters
----------
diagram : ndarray of shape (n_points, 3)
The persistence diagram to plot, where the third dimension along axis 1
contains homology dimensions, and the first two contain (birth, death)
pairs to be used as coordinates in the two-dimensional plot.
homology_dimensions : list of int or None, optional, default: ``None``
Homology dimensions which will appear on the plot. If ``None``, all
homology dimensions which appear in `diagram` will be plotted.
"""
from ..diagrams._utils import _subdiagrams # To avoid circular imports
# TODO: increase the marker size
if homology_dimensions is None:
homology_dimensions = np.unique(diagram[:, 2])
max_filt_param = np.where(np.isinf(diagram), -np.inf, diagram).max()
layout = dict(
width=500,
height=500,
xaxis1=dict(
title="Birth",
side="bottom",
type="linear",
range=[0, 1.1 * max_filt_param],
ticks="outside",
anchor="y1",
showline=True,
zeroline=True,
showexponent="all",
exponentformat="e",
),
yaxis1=dict(
title="Death",
side="left",
type="linear",
range=[0, 1.1 * max_filt_param],
ticks="outside",
anchor="x1",
showline=True,
zeroline=True,
showexponent="all",
exponentformat="e",
),
plot_bgcolor="white",
)
layout.update(input_layout)
fig = gobj.Figure(layout=layout)
fig.update_xaxes(zeroline=True, linewidth=1, linecolor="black", mirror=False)
fig.update_yaxes(zeroline=True, linewidth=1, linecolor="black", mirror=False)
fig.add_trace(
gobj.Scatter(
x=np.array([-100 * max_filt_param, 100 * max_filt_param]),
y=np.array([-100 * max_filt_param, 100 * max_filt_param]),
mode="lines",
line=dict(dash="dash", width=1, color="black"),
showlegend=False,
hoverinfo="none",
)
)
for dim in homology_dimensions:
name = f"H{int(dim)}" if dim != np.inf else "Any homology dimension"
subdiagram = _subdiagrams(np.asarray([diagram]), [dim], remove_dim=True)[0]
diff = subdiagram[:, 1] != subdiagram[:, 0]
subdiagram = subdiagram[diff]
fig.add_trace(
gobj.Scatter(
x=subdiagram[:, 0], y=subdiagram[:, 1], mode="markers", name=name
)
)
fig.show()
|
https://github.com/giotto-ai/giotto-tda/issues/427
|
---------------------------------------------------------------------------
_RemoteTraceback Traceback (most recent call last)
_RemoteTraceback:
"""
Traceback (most recent call last):
File "C:\Users\nicho\Anaconda3\lib\site-packages\joblib\externals\loky\process_executor.py", line 418, in _process_worker
r = call_item()
File "C:\Users\nicho\Anaconda3\lib\site-packages\joblib\externals\loky\process_executor.py", line 272, in __call__
return self.fn(*self.args, **self.kwargs)
File "C:\Users\nicho\Anaconda3\lib\site-packages\joblib\_parallel_backends.py", line 567, in __call__
return self.func(*args, **kwargs)
File "C:\Users\nicho\Anaconda3\lib\site-packages\joblib\parallel.py", line 225, in __call__
for func, args, kwargs in self.items]
File "C:\Users\nicho\Anaconda3\lib\site-packages\joblib\parallel.py", line 225, in <listcomp>
for func, args, kwargs in self.items]
File "C:\Users\nicho\Anaconda3\lib\site-packages\gtda\diagrams\_metrics.py", line 86, in heats
diagrams[diagrams < sampling[0, 0]] = sampling[0, 0]
ValueError: assignment destination is read-only
"""
The above exception was the direct cause of the following exception:
ValueError Traceback (most recent call last)
<ipython-input-132-73d0b2c011fe> in <module>
7 hk = HeatKernel(sigma=1, n_bins=n_bins, n_jobs=2)
8 num_dimensions = 1
----> 9 x_t = hk.fit_transform(diagrams)
~\Anaconda3\lib\site-packages\gtda\utils\_docs.py in fit_transform_wrapper(*args, **kwargs)
104 @wraps(original_fit_transform)
105 def fit_transform_wrapper(*args, **kwargs):
--> 106 return original_fit_transform(*args, **kwargs)
107 fit_transform_wrapper.__doc__ = \
108 make_fit_transform_docs(fit_docs, transform_docs)
~\Anaconda3\lib\site-packages\sklearn\base.py in fit_transform(self, X, y, **fit_params)
569 if y is None:
570 # fit method of arity 1 (unsupervised transformation)
--> 571 return self.fit(X, **fit_params).transform(X)
572 else:
573 # fit method of arity 2 (supervised transformation)
~\Anaconda3\lib\site-packages\gtda\diagrams\representations.py in transform(self, X, y)
591 heats)(_subdiagrams(X, [dim], remove_dim=True)[s],
592 self._samplings[dim], self._step_size[dim], self.sigma)
--> 593 for dim in self.homology_dimensions_
594 for s in gen_even_slices(X.shape[0],
595 effective_n_jobs(self.n_jobs)))
~\Anaconda3\lib\site-packages\joblib\parallel.py in __call__(self, iterable)
932
933 with self._backend.retrieval_context():
--> 934 self.retrieve()
935 # Make sure that we get a last message telling us we are done
936 elapsed_time = time.time() - self._start_time
~\Anaconda3\lib\site-packages\joblib\parallel.py in retrieve(self)
831 try:
832 if getattr(self._backend, 'supports_timeout', False):
--> 833 self._output.extend(job.get(timeout=self.timeout))
834 else:
835 self._output.extend(job.get())
~\Anaconda3\lib\site-packages\joblib\_parallel_backends.py in wrap_future_result(future, timeout)
519 AsyncResults.get from multiprocessing."""
520 try:
--> 521 return future.result(timeout=timeout)
522 except LokyTimeoutError:
523 raise TimeoutError()
~\Anaconda3\lib\concurrent\futures\_base.py in result(self, timeout)
430 raise CancelledError()
431 elif self._state == FINISHED:
--> 432 return self.__get_result()
433 else:
434 raise TimeoutError()
~\Anaconda3\lib\concurrent\futures\_base.py in __get_result(self)
382 def __get_result(self):
383 if self._exception:
--> 384 raise self._exception
385 else:
386 return self._result
ValueError: assignment destination is read-only
|
ValueError
|
def caller_attributes(cls, module_name: str = "") -> Tuple[str, str, str]:
"""
Determine extra or changed fields for the caller.
1. qualname finds the relevant object and its __qualname__
2. caller_class_names is just the full class names of the calling
class if relevant.
3. module is munged if we detect the caller is an EDMC plugin,
whether internal or found.
"""
frame = cls.find_caller_frame()
caller_qualname = caller_class_names = ""
if frame:
# <https://stackoverflow.com/questions/2203424/python-how-to-retrieve-class-information-from-a-frame-object#2220759>
try:
frame_info = inspect.getframeinfo(frame)
# raise(IndexError) # TODO: Remove, only for testing
except Exception:
# Separate from the print below to guarantee we see at least this much.
print(
"EDMCLogging:EDMCContextFilter:caller_attributes(): Failed in `inspect.getframinfo(frame)`"
)
# We want to *attempt* to show something about the nature of 'frame',
# but at this point we can't trust it will work.
try:
print(f"frame: {frame}")
except Exception:
pass
# We've given up, so just return all '??' to signal we couldn't get the info
return "??", "??", "??"
args, _, _, value_dict = inspect.getargvalues(frame)
if len(args) and args[0] in ("self", "cls"):
frame_class: "object" = value_dict[args[0]]
if frame_class:
# See https://en.wikipedia.org/wiki/Name_mangling#Python for how name mangling works.
name = frame_info.function
if name.startswith("__") and not name.endswith("__"):
name = f"_{frame_class.__class__.__name__}{frame_info.function}"
# Find __qualname__ of the caller
fn = inspect.getattr_static(frame_class, name, None)
if fn is None:
# For some reason getattr_static cant grab this. Try and grab it with getattr, bail out
# if we get a RecursionError indicating a property
try:
fn = getattr(frame_class, name, None)
except RecursionError:
print(
"EDMCLogging:EDMCContextFilter:caller_attributes():"
"Failed to get attribute for function info. Bailing out"
)
return "??", "??", "??"
if fn is not None:
if isinstance(fn, property):
class_name = str(frame_class)
# If somehow you make your __class__ or __class__.__qualname__ recursive, I'll be impressed.
if hasattr(frame_class, "__class__") and hasattr(
frame_class.__class__, "__qualname__"
):
class_name = frame_class.__class__.__qualname__
caller_qualname = f"{class_name}.{name}(property)"
else:
caller_qualname = f"<property {name} on {class_name}>"
elif fn.__qualname__:
caller_qualname = fn.__qualname__
# Find containing class name(s) of caller, if any
if frame_class.__class__ and frame_class.__class__.__qualname__:
caller_class_names = frame_class.__class__.__qualname__
# It's a call from the top level module file
elif frame_info.function == "<module>":
caller_class_names = "<none>"
caller_qualname = value_dict["__name__"]
elif frame_info.function != "":
caller_class_names = "<none>"
caller_qualname = frame_info.function
module_name = cls.munge_module_name(frame_info, module_name)
# https://docs.python.org/3.7/library/inspect.html#the-interpreter-stack
del frame
if caller_qualname == "":
print("ALERT! Something went wrong with finding caller qualname for logging!")
caller_qualname = (
'<ERROR in EDMCLogging.caller_class_and_qualname() for "qualname">'
)
if caller_class_names == "":
print(
"ALERT! Something went wrong with finding caller class name(s) for logging!"
)
caller_class_names = (
'<ERROR in EDMCLogging.caller_class_and_qualname() for "class">'
)
return caller_class_names, caller_qualname, module_name
|
def caller_attributes(cls, module_name: str = "") -> Tuple[str, str, str]:
"""
Determine extra or changed fields for the caller.
1. qualname finds the relevant object and its __qualname__
2. caller_class_names is just the full class names of the calling
class if relevant.
3. module is munged if we detect the caller is an EDMC plugin,
whether internal or found.
"""
frame = cls.find_caller_frame()
caller_qualname = caller_class_names = ""
if frame:
# <https://stackoverflow.com/questions/2203424/python-how-to-retrieve-class-information-from-a-frame-object#2220759>
try:
frame_info = inspect.getframeinfo(frame)
# raise(IndexError) # TODO: Remove, only for testing
except Exception:
# Separate from the print below to guarantee we see at least this much.
print(
"EDMCLogging:EDMCContextFilter:caller_attributes(): Failed in `inspect.getframinfo(frame)`"
)
# We want to *attempt* to show something about the nature of 'frame',
# but at this point we can't trust it will work.
try:
print(f"frame: {frame}")
except Exception:
pass
# We've given up, so just return all '??' to signal we couldn't get the info
return "??", "??", "??"
args, _, _, value_dict = inspect.getargvalues(frame)
if len(args) and args[0] in ("self", "cls"):
frame_class: "object" = value_dict[args[0]]
if frame_class:
# See https://en.wikipedia.org/wiki/Name_mangling#Python for how name mangling works.
name = frame_info.function
if name.startswith("__") and not name.endswith("__"):
name = f"_{frame_class.__class__.__name__}{frame_info.function}"
# Find __qualname__ of the caller
fn = getattr(frame_class, name, None)
if fn and fn.__qualname__:
caller_qualname = fn.__qualname__
# Find containing class name(s) of caller, if any
if frame_class.__class__ and frame_class.__class__.__qualname__:
caller_class_names = frame_class.__class__.__qualname__
# It's a call from the top level module file
elif frame_info.function == "<module>":
caller_class_names = "<none>"
caller_qualname = value_dict["__name__"]
elif frame_info.function != "":
caller_class_names = "<none>"
caller_qualname = frame_info.function
module_name = cls.munge_module_name(frame_info, module_name)
# https://docs.python.org/3.7/library/inspect.html#the-interpreter-stack
del frame
if caller_qualname == "":
print("ALERT! Something went wrong with finding caller qualname for logging!")
caller_qualname = (
'<ERROR in EDMCLogging.caller_class_and_qualname() for "qualname">'
)
if caller_class_names == "":
print(
"ALERT! Something went wrong with finding caller class name(s) for logging!"
)
caller_class_names = (
'<ERROR in EDMCLogging.caller_class_and_qualname() for "class">'
)
return caller_class_names, caller_qualname, module_name
|
https://github.com/EDCD/EDMarketConnector/issues/808
|
File "/home/ash/.local/share/EDMarketConnector/plugins/edrodent/load.py", line 38, in ready_to_rat
logger.debug("ready_to_rat: %r %r %r %r %r", self.open, self.low_fuel, self.hud_in_analysis_mode, self.has_fuel_transfer_limpet_controller, self.limpets)
File "/usr/local/lib/python3.7/logging/__init__.py", line 1366, in debug
self._log(DEBUG, msg, args, **kwargs)
File "/usr/local/lib/python3.7/logging/__init__.py", line 1514, in _log
self.handle(record)
File "/usr/local/lib/python3.7/logging/__init__.py", line 1523, in handle
if (not self.disabled) and self.filter(record):
File "/usr/local/lib/python3.7/logging/__init__.py", line 751, in filter
result = f.filter(record)
File "/home/ash/src/EDMarketConnector/EDMCLogging.py", line 255, in filter
(class_name, qualname, module_name) = self.caller_attributes(module_name=getattr(record, 'module'))
File "/home/ash/src/EDMarketConnector/EDMCLogging.py", line 317, in caller_attributes
fn = getattr(frame_class, name, None)
File "/home/ash/.local/share/EDMarketConnector/plugins/edrodent/load.py", line 38, in ready_to_rat
logger.debug("ready_to_rat: %r %r %r %r %r", self.open, self.low_fuel, self.hud_in_analysis_mode, self.has_fuel_transfer_limpet_controller, self.limpets)
File "/usr/local/lib/python3.7/logging/__init__.py", line 1366, in debug
self._log(DEBUG, msg, args, **kwargs)
File "/usr/local/lib/python3.7/logging/__init__.py", line 1513, in _log
exc_info, func, extra, sinfo)
File "/usr/local/lib/python3.7/logging/__init__.py", line 1483, in makeRecord
sinfo)
File "/usr/local/lib/python3.7/logging/__init__.py", line 316, in __init__
self.filename = os.path.basename(pathname)
File "/usr/local/lib/python3.7/posixpath.py", line 147, in basename
sep = _get_sep(p)
File "/usr/local/lib/python3.7/posixpath.py", line 42, in _get_sep
if isinstance(path, bytes):
RecursionError: maximum recursion depth exceeded while calling a Python object
|
RecursionError
|
def __init__(self):
logger.debug("A call from A.B.__init__")
self.__test()
_ = self.test_prop
|
def __init__(self):
logger.debug("A call from A.B.__init__")
self.__test()
|
https://github.com/EDCD/EDMarketConnector/issues/808
|
File "/home/ash/.local/share/EDMarketConnector/plugins/edrodent/load.py", line 38, in ready_to_rat
logger.debug("ready_to_rat: %r %r %r %r %r", self.open, self.low_fuel, self.hud_in_analysis_mode, self.has_fuel_transfer_limpet_controller, self.limpets)
File "/usr/local/lib/python3.7/logging/__init__.py", line 1366, in debug
self._log(DEBUG, msg, args, **kwargs)
File "/usr/local/lib/python3.7/logging/__init__.py", line 1514, in _log
self.handle(record)
File "/usr/local/lib/python3.7/logging/__init__.py", line 1523, in handle
if (not self.disabled) and self.filter(record):
File "/usr/local/lib/python3.7/logging/__init__.py", line 751, in filter
result = f.filter(record)
File "/home/ash/src/EDMarketConnector/EDMCLogging.py", line 255, in filter
(class_name, qualname, module_name) = self.caller_attributes(module_name=getattr(record, 'module'))
File "/home/ash/src/EDMarketConnector/EDMCLogging.py", line 317, in caller_attributes
fn = getattr(frame_class, name, None)
File "/home/ash/.local/share/EDMarketConnector/plugins/edrodent/load.py", line 38, in ready_to_rat
logger.debug("ready_to_rat: %r %r %r %r %r", self.open, self.low_fuel, self.hud_in_analysis_mode, self.has_fuel_transfer_limpet_controller, self.limpets)
File "/usr/local/lib/python3.7/logging/__init__.py", line 1366, in debug
self._log(DEBUG, msg, args, **kwargs)
File "/usr/local/lib/python3.7/logging/__init__.py", line 1513, in _log
exc_info, func, extra, sinfo)
File "/usr/local/lib/python3.7/logging/__init__.py", line 1483, in makeRecord
sinfo)
File "/usr/local/lib/python3.7/logging/__init__.py", line 316, in __init__
self.filename = os.path.basename(pathname)
File "/usr/local/lib/python3.7/posixpath.py", line 147, in basename
sep = _get_sep(p)
File "/usr/local/lib/python3.7/posixpath.py", line 42, in _get_sep
if isinstance(path, bytes):
RecursionError: maximum recursion depth exceeded while calling a Python object
|
RecursionError
|
def caller_attributes(cls, module_name: str = "") -> Tuple[str, str, str]: # noqa: CCR001, E501 # this is as refactored as is sensible
"""
Determine extra or changed fields for the caller.
1. qualname finds the relevant object and its __qualname__
2. caller_class_names is just the full class names of the calling
class if relevant.
3. module is munged if we detect the caller is an EDMC plugin,
whether internal or found.
"""
frame = cls.find_caller_frame()
caller_qualname = caller_class_names = ""
if frame:
# <https://stackoverflow.com/questions/2203424/python-how-to-retrieve-class-information-from-a-frame-object#2220759>
frame_info = inspect.getframeinfo(frame)
args, _, _, value_dict = inspect.getargvalues(frame)
if len(args) and args[0] in ("self", "cls"):
frame_class: "object" = value_dict[args[0]]
if frame_class:
# See https://en.wikipedia.org/wiki/Name_mangling#Python for how name mangling works.
name = frame_info.function
if name.startswith("__") and not name.endswith("__"):
name = f"_{frame_class.__class__.__name__}{frame_info.function}"
# Find __qualname__ of the caller
fn = inspect.getattr_static(frame_class, name, None)
if fn is None:
# For some reason getattr_static cant grab this. Try and grab it with getattr, bail out
# if we get a RecursionError indicating a property
try:
fn = getattr(frame_class, name, None)
except RecursionError:
print(
"EDMCLogging:EDMCContextFilter:caller_attributes():"
"Failed to get attribute for function info. Bailing out"
)
return "??", "??", "??"
if fn is not None:
if isinstance(fn, property):
class_name = str(frame_class)
# If somehow you make your __class__ or __class__.__qualname__ recursive, I'll be impressed.
if hasattr(frame_class, "__class__") and hasattr(
frame_class.__class__, "__qualname__"
):
class_name = frame_class.__class__.__qualname__
caller_qualname = f"{class_name}.{name}(property)"
else:
caller_qualname = f"<property {name} on {class_name}>"
elif fn.__qualname__:
caller_qualname = fn.__qualname__
# Find containing class name(s) of caller, if any
if frame_class.__class__ and frame_class.__class__.__qualname__:
caller_class_names = frame_class.__class__.__qualname__
# It's a call from the top level module file
elif frame_info.function == "<module>":
caller_class_names = "<none>"
caller_qualname = value_dict["__name__"]
elif frame_info.function != "":
caller_class_names = "<none>"
caller_qualname = frame_info.function
module_name = cls.munge_module_name(frame_info, module_name)
# https://docs.python.org/3.7/library/inspect.html#the-interpreter-stack
del frame
if caller_qualname == "":
print("ALERT! Something went wrong with finding caller qualname for logging!")
caller_qualname = (
'<ERROR in EDMCLogging.caller_class_and_qualname() for "qualname">'
)
if caller_class_names == "":
print(
"ALERT! Something went wrong with finding caller class name(s) for logging!"
)
caller_class_names = (
'<ERROR in EDMCLogging.caller_class_and_qualname() for "class">'
)
return caller_class_names, caller_qualname, module_name
|
def caller_attributes(cls, module_name: str = "") -> Tuple[str, str, str]: # noqa: CCR001, E501 # this is as refactored as is sensible
"""
Determine extra or changed fields for the caller.
1. qualname finds the relevant object and its __qualname__
2. caller_class_names is just the full class names of the calling
class if relevant.
3. module is munged if we detect the caller is an EDMC plugin,
whether internal or found.
"""
frame = cls.find_caller_frame()
caller_qualname = caller_class_names = ""
if frame:
# <https://stackoverflow.com/questions/2203424/python-how-to-retrieve-class-information-from-a-frame-object#2220759>
frame_info = inspect.getframeinfo(frame)
args, _, _, value_dict = inspect.getargvalues(frame)
if len(args) and args[0] in ("self", "cls"):
frame_class: "object" = value_dict[args[0]]
if frame_class:
# See https://en.wikipedia.org/wiki/Name_mangling#Python for how name mangling works.
name = frame_info.function
if name.startswith("__") and not name.endswith("__"):
name = f"_{frame_class.__class__.__name__}{frame_info.function}"
# Find __qualname__ of the caller
fn = getattr(frame_class, name, None)
if fn and fn.__qualname__:
caller_qualname = fn.__qualname__
# Find containing class name(s) of caller, if any
if frame_class.__class__ and frame_class.__class__.__qualname__:
caller_class_names = frame_class.__class__.__qualname__
# It's a call from the top level module file
elif frame_info.function == "<module>":
caller_class_names = "<none>"
caller_qualname = value_dict["__name__"]
elif frame_info.function != "":
caller_class_names = "<none>"
caller_qualname = frame_info.function
module_name = cls.munge_module_name(frame_info, module_name)
# https://docs.python.org/3.7/library/inspect.html#the-interpreter-stack
del frame
if caller_qualname == "":
print("ALERT! Something went wrong with finding caller qualname for logging!")
caller_qualname = (
'<ERROR in EDMCLogging.caller_class_and_qualname() for "qualname">'
)
if caller_class_names == "":
print(
"ALERT! Something went wrong with finding caller class name(s) for logging!"
)
caller_class_names = (
'<ERROR in EDMCLogging.caller_class_and_qualname() for "class">'
)
return caller_class_names, caller_qualname, module_name
|
https://github.com/EDCD/EDMarketConnector/issues/808
|
File "/home/ash/.local/share/EDMarketConnector/plugins/edrodent/load.py", line 38, in ready_to_rat
logger.debug("ready_to_rat: %r %r %r %r %r", self.open, self.low_fuel, self.hud_in_analysis_mode, self.has_fuel_transfer_limpet_controller, self.limpets)
File "/usr/local/lib/python3.7/logging/__init__.py", line 1366, in debug
self._log(DEBUG, msg, args, **kwargs)
File "/usr/local/lib/python3.7/logging/__init__.py", line 1514, in _log
self.handle(record)
File "/usr/local/lib/python3.7/logging/__init__.py", line 1523, in handle
if (not self.disabled) and self.filter(record):
File "/usr/local/lib/python3.7/logging/__init__.py", line 751, in filter
result = f.filter(record)
File "/home/ash/src/EDMarketConnector/EDMCLogging.py", line 255, in filter
(class_name, qualname, module_name) = self.caller_attributes(module_name=getattr(record, 'module'))
File "/home/ash/src/EDMarketConnector/EDMCLogging.py", line 317, in caller_attributes
fn = getattr(frame_class, name, None)
File "/home/ash/.local/share/EDMarketConnector/plugins/edrodent/load.py", line 38, in ready_to_rat
logger.debug("ready_to_rat: %r %r %r %r %r", self.open, self.low_fuel, self.hud_in_analysis_mode, self.has_fuel_transfer_limpet_controller, self.limpets)
File "/usr/local/lib/python3.7/logging/__init__.py", line 1366, in debug
self._log(DEBUG, msg, args, **kwargs)
File "/usr/local/lib/python3.7/logging/__init__.py", line 1513, in _log
exc_info, func, extra, sinfo)
File "/usr/local/lib/python3.7/logging/__init__.py", line 1483, in makeRecord
sinfo)
File "/usr/local/lib/python3.7/logging/__init__.py", line 316, in __init__
self.filename = os.path.basename(pathname)
File "/usr/local/lib/python3.7/posixpath.py", line 147, in basename
sep = _get_sep(p)
File "/usr/local/lib/python3.7/posixpath.py", line 42, in _get_sep
if isinstance(path, bytes):
RecursionError: maximum recursion depth exceeded while calling a Python object
|
RecursionError
|
def caller_attributes(cls, module_name: str = "") -> Tuple[str, str, str]:
"""
Determine extra or changed fields for the caller.
1. qualname finds the relevant object and its __qualname__
2. caller_class_names is just the full class names of the calling
class if relevant.
3. module is munged if we detect the caller is an EDMC plugin,
whether internal or found.
"""
frame = cls.find_caller_frame()
caller_qualname = caller_class_names = ""
if frame:
# <https://stackoverflow.com/questions/2203424/python-how-to-retrieve-class-information-from-a-frame-object#2220759>
frame_info = inspect.getframeinfo(frame)
args, _, _, value_dict = inspect.getargvalues(frame)
if len(args) and args[0] in ("self", "cls"):
frame_class: "object" = value_dict[args[0]]
if frame_class:
# See https://en.wikipedia.org/wiki/Name_mangling#Python for how name mangling works.
name = frame_info.function
if name.startswith("__") and not name.endswith("__"):
name = f"_{frame_class.__class__.__name__}{frame_info.function}"
# Find __qualname__ of the caller
fn = inspect.getattr_static(frame_class, name, None)
if fn is None:
# For some reason getattr_static cant grab this. Try and grab it with getattr, bail out
# if we get a RecursionError indicating a property
try:
fn = getattr(frame_class, name, None)
except RecursionError:
print(
"EDMCLogging:EDMCContextFilter:caller_attributes():"
"Failed to get attribute for function info. Bailing out"
)
return "??", "??", "??"
if fn is not None:
if isinstance(fn, property):
class_name = str(frame_class)
# If somehow you make your __class__ or __class__.__qualname__ recursive, I'll be impressed.
if hasattr(frame_class, "__class__") and hasattr(
frame_class.__class__, "__qualname__"
):
class_name = frame_class.__class__.__qualname__
caller_qualname = f"{class_name}.{name}(property)"
else:
caller_qualname = f"<property {name} on {class_name}>"
elif fn.__qualname__:
caller_qualname = fn.__qualname__
# Find containing class name(s) of caller, if any
if frame_class.__class__ and frame_class.__class__.__qualname__:
caller_class_names = frame_class.__class__.__qualname__
# It's a call from the top level module file
elif frame_info.function == "<module>":
caller_class_names = "<none>"
caller_qualname = value_dict["__name__"]
elif frame_info.function != "":
caller_class_names = "<none>"
caller_qualname = frame_info.function
module_name = cls.munge_module_name(frame_info, module_name)
# https://docs.python.org/3.7/library/inspect.html#the-interpreter-stack
del frame
if caller_qualname == "":
print("ALERT! Something went wrong with finding caller qualname for logging!")
caller_qualname = (
'<ERROR in EDMCLogging.caller_class_and_qualname() for "qualname">'
)
if caller_class_names == "":
print(
"ALERT! Something went wrong with finding caller class name(s) for logging!"
)
caller_class_names = (
'<ERROR in EDMCLogging.caller_class_and_qualname() for "class">'
)
return caller_class_names, caller_qualname, module_name
|
def caller_attributes(cls, module_name: str = "") -> Tuple[str, str, str]:
"""
Determine extra or changed fields for the caller.
1. qualname finds the relevant object and its __qualname__
2. caller_class_names is just the full class names of the calling
class if relevant.
3. module is munged if we detect the caller is an EDMC plugin,
whether internal or found.
"""
frame = cls.find_caller_frame()
caller_qualname = caller_class_names = ""
if frame:
# <https://stackoverflow.com/questions/2203424/python-how-to-retrieve-class-information-from-a-frame-object#2220759>
frame_info = inspect.getframeinfo(frame)
args, _, _, value_dict = inspect.getargvalues(frame)
if len(args) and args[0] in ("self", "cls"):
frame_class: "object" = value_dict[args[0]]
if frame_class:
# See https://en.wikipedia.org/wiki/Name_mangling#Python for how name mangling works.
name = frame_info.function
if name.startswith("__") and not name.endswith("__"):
name = f"_{frame_class.__class__.__name__}{frame_info.function}"
# Find __qualname__ of the caller
fn = getattr(frame_class, name, None)
if fn and fn.__qualname__:
caller_qualname = fn.__qualname__
# Find containing class name(s) of caller, if any
if frame_class.__class__ and frame_class.__class__.__qualname__:
caller_class_names = frame_class.__class__.__qualname__
# It's a call from the top level module file
elif frame_info.function == "<module>":
caller_class_names = "<none>"
caller_qualname = value_dict["__name__"]
elif frame_info.function != "":
caller_class_names = "<none>"
caller_qualname = frame_info.function
module_name = cls.munge_module_name(frame_info, module_name)
# https://docs.python.org/3.7/library/inspect.html#the-interpreter-stack
del frame
if caller_qualname == "":
print("ALERT! Something went wrong with finding caller qualname for logging!")
caller_qualname = (
'<ERROR in EDMCLogging.caller_class_and_qualname() for "qualname">'
)
if caller_class_names == "":
print(
"ALERT! Something went wrong with finding caller class name(s) for logging!"
)
caller_class_names = (
'<ERROR in EDMCLogging.caller_class_and_qualname() for "class">'
)
return caller_class_names, caller_qualname, module_name
|
https://github.com/EDCD/EDMarketConnector/issues/808
|
File "/home/ash/.local/share/EDMarketConnector/plugins/edrodent/load.py", line 38, in ready_to_rat
logger.debug("ready_to_rat: %r %r %r %r %r", self.open, self.low_fuel, self.hud_in_analysis_mode, self.has_fuel_transfer_limpet_controller, self.limpets)
File "/usr/local/lib/python3.7/logging/__init__.py", line 1366, in debug
self._log(DEBUG, msg, args, **kwargs)
File "/usr/local/lib/python3.7/logging/__init__.py", line 1514, in _log
self.handle(record)
File "/usr/local/lib/python3.7/logging/__init__.py", line 1523, in handle
if (not self.disabled) and self.filter(record):
File "/usr/local/lib/python3.7/logging/__init__.py", line 751, in filter
result = f.filter(record)
File "/home/ash/src/EDMarketConnector/EDMCLogging.py", line 255, in filter
(class_name, qualname, module_name) = self.caller_attributes(module_name=getattr(record, 'module'))
File "/home/ash/src/EDMarketConnector/EDMCLogging.py", line 317, in caller_attributes
fn = getattr(frame_class, name, None)
File "/home/ash/.local/share/EDMarketConnector/plugins/edrodent/load.py", line 38, in ready_to_rat
logger.debug("ready_to_rat: %r %r %r %r %r", self.open, self.low_fuel, self.hud_in_analysis_mode, self.has_fuel_transfer_limpet_controller, self.limpets)
File "/usr/local/lib/python3.7/logging/__init__.py", line 1366, in debug
self._log(DEBUG, msg, args, **kwargs)
File "/usr/local/lib/python3.7/logging/__init__.py", line 1513, in _log
exc_info, func, extra, sinfo)
File "/usr/local/lib/python3.7/logging/__init__.py", line 1483, in makeRecord
sinfo)
File "/usr/local/lib/python3.7/logging/__init__.py", line 316, in __init__
self.filename = os.path.basename(pathname)
File "/usr/local/lib/python3.7/posixpath.py", line 147, in basename
sep = _get_sep(p)
File "/usr/local/lib/python3.7/posixpath.py", line 42, in _get_sep
if isinstance(path, bytes):
RecursionError: maximum recursion depth exceeded while calling a Python object
|
RecursionError
|
def caller_attributes(cls, module_name: str = "") -> Tuple[str, str, str]: # noqa: CCR001, E501 # this is as refactored as is sensible
"""
Determine extra or changed fields for the caller.
1. qualname finds the relevant object and its __qualname__
2. caller_class_names is just the full class names of the calling
class if relevant.
3. module is munged if we detect the caller is an EDMC plugin,
whether internal or found.
"""
frame = cls.find_caller_frame()
caller_qualname = caller_class_names = ""
if frame:
# <https://stackoverflow.com/questions/2203424/python-how-to-retrieve-class-information-from-a-frame-object#2220759>
frame_info = inspect.getframeinfo(frame)
args, _, _, value_dict = inspect.getargvalues(frame)
if len(args) and args[0] in ("self", "cls"):
frame_class: "object" = value_dict[args[0]]
if frame_class:
# See https://en.wikipedia.org/wiki/Name_mangling#Python for how name mangling works.
if (name := frame_info.function).startswith("__") and not name.endswith(
"__"
):
name = f"_{frame_class.__class__.__name__}{frame_info.function}"
# Find __qualname__ of the caller
fn = getattr(frame_class, name, None)
if fn and fn.__qualname__:
caller_qualname = fn.__qualname__
# Find containing class name(s) of caller, if any
if frame_class.__class__ and frame_class.__class__.__qualname__:
caller_class_names = frame_class.__class__.__qualname__
# It's a call from the top level module file
elif frame_info.function == "<module>":
caller_class_names = "<none>"
caller_qualname = value_dict["__name__"]
elif frame_info.function != "":
caller_class_names = "<none>"
caller_qualname = frame_info.function
module_name = cls.munge_module_name(frame_info, module_name)
# https://docs.python.org/3.7/library/inspect.html#the-interpreter-stack
del frame
if caller_qualname == "":
print("ALERT! Something went wrong with finding caller qualname for logging!")
caller_qualname = (
'<ERROR in EDMCLogging.caller_class_and_qualname() for "qualname">'
)
if caller_class_names == "":
print(
"ALERT! Something went wrong with finding caller class name(s) for logging!"
)
caller_class_names = (
'<ERROR in EDMCLogging.caller_class_and_qualname() for "class">'
)
return caller_class_names, caller_qualname, module_name
|
def caller_attributes(cls, module_name: str = "") -> Tuple[str, str, str]: # noqa: CCR001, E501 # this is as refactored as is sensible
"""
Determine extra or changed fields for the caller.
1. qualname finds the relevant object and its __qualname__
2. caller_class_names is just the full class names of the calling
class if relevant.
3. module is munged if we detect the caller is an EDMC plugin,
whether internal or found.
"""
frame = cls.find_caller_frame()
caller_qualname = caller_class_names = ""
if frame:
# <https://stackoverflow.com/questions/2203424/python-how-to-retrieve-class-information-from-a-frame-object#2220759>
frame_info = inspect.getframeinfo(frame)
args, _, _, value_dict = inspect.getargvalues(frame)
if len(args) and args[0] in ("self", "cls"):
frame_class = value_dict[args[0]]
if frame_class:
# Find __qualname__ of the caller
fn = getattr(frame_class, frame_info.function)
if fn and fn.__qualname__:
caller_qualname = fn.__qualname__
# Find containing class name(s) of caller, if any
if frame_class.__class__ and frame_class.__class__.__qualname__:
caller_class_names = frame_class.__class__.__qualname__
# It's a call from the top level module file
elif frame_info.function == "<module>":
caller_class_names = "<none>"
caller_qualname = value_dict["__name__"]
elif frame_info.function != "":
caller_class_names = "<none>"
caller_qualname = frame_info.function
module_name = cls.munge_module_name(frame_info, module_name)
# https://docs.python.org/3.7/library/inspect.html#the-interpreter-stack
del frame
if caller_qualname == "":
print("ALERT! Something went wrong with finding caller qualname for logging!")
caller_qualname = (
'<ERROR in EDMCLogging.caller_class_and_qualname() for "qualname">'
)
if caller_class_names == "":
print(
"ALERT! Something went wrong with finding caller class name(s) for logging!"
)
caller_class_names = (
'<ERROR in EDMCLogging.caller_class_and_qualname() for "class">'
)
return caller_class_names, caller_qualname, module_name
|
https://github.com/EDCD/EDMarketConnector/issues/764
|
Exception in thread Thread-2:
Traceback (most recent call last):
File "/usr/local/lib/python3.7/threading.py", line 926, in _bootstrap_inner
self.run()
File "/usr/local/lib/python3.7/threading.py", line 870, in run
self._target(*self._args, **self._kwargs)
File "/home/ash/.local/share/EDMarketConnector/plugins/edmcoverlay/_edmcoverlay.py", line 94, in __server
logger.info("edmcoverlay2: server running")
File "/usr/local/lib/python3.7/logging/__init__.py", line 1378, in info
self._log(INFO, msg, args, **kwargs)
File "/usr/local/lib/python3.7/logging/__init__.py", line 1514, in _log
self.handle(record)
File "/usr/local/lib/python3.7/logging/__init__.py", line 1523, in handle
if (not self.disabled) and self.filter(record):
File "/usr/local/lib/python3.7/logging/__init__.py", line 751, in filter
result = f.filter(record)
File "/home/ash/src/EDMarketConnector/EDMCLogging.py", line 255, in filter
(class_name, qualname, module_name) = self.caller_attributes(module_name=getattr(record, 'module'))
File "/home/ash/src/EDMarketConnector/EDMCLogging.py", line 294, in caller_attributes
fn = getattr(frame_class, frame_info.function)
AttributeError: 'Overlay' object has no attribute '__server'
|
AttributeError
|
def __init__(self):
logger.debug("A call from A.B.__init__")
self.__test()
|
def __init__(self):
logger.debug("A call from A.B.__init__")
|
https://github.com/EDCD/EDMarketConnector/issues/764
|
Exception in thread Thread-2:
Traceback (most recent call last):
File "/usr/local/lib/python3.7/threading.py", line 926, in _bootstrap_inner
self.run()
File "/usr/local/lib/python3.7/threading.py", line 870, in run
self._target(*self._args, **self._kwargs)
File "/home/ash/.local/share/EDMarketConnector/plugins/edmcoverlay/_edmcoverlay.py", line 94, in __server
logger.info("edmcoverlay2: server running")
File "/usr/local/lib/python3.7/logging/__init__.py", line 1378, in info
self._log(INFO, msg, args, **kwargs)
File "/usr/local/lib/python3.7/logging/__init__.py", line 1514, in _log
self.handle(record)
File "/usr/local/lib/python3.7/logging/__init__.py", line 1523, in handle
if (not self.disabled) and self.filter(record):
File "/usr/local/lib/python3.7/logging/__init__.py", line 751, in filter
result = f.filter(record)
File "/home/ash/src/EDMarketConnector/EDMCLogging.py", line 255, in filter
(class_name, qualname, module_name) = self.caller_attributes(module_name=getattr(record, 'module'))
File "/home/ash/src/EDMarketConnector/EDMCLogging.py", line 294, in caller_attributes
fn = getattr(frame_class, frame_info.function)
AttributeError: 'Overlay' object has no attribute '__server'
|
AttributeError
|
def caller_attributes(cls, module_name: str = "") -> Tuple[str, str, str]:
"""
Determine extra or changed fields for the caller.
1. qualname finds the relevant object and its __qualname__
2. caller_class_names is just the full class names of the calling
class if relevant.
3. module is munged if we detect the caller is an EDMC plugin,
whether internal or found.
"""
frame = cls.find_caller_frame()
caller_qualname = caller_class_names = ""
if frame:
# <https://stackoverflow.com/questions/2203424/python-how-to-retrieve-class-information-from-a-frame-object#2220759>
frame_info = inspect.getframeinfo(frame)
args, _, _, value_dict = inspect.getargvalues(frame)
if len(args) and args[0] in ("self", "cls"):
frame_class: "object" = value_dict[args[0]]
if frame_class:
# See https://en.wikipedia.org/wiki/Name_mangling#Python for how name mangling works.
if (name := frame_info.function).startswith("__") and not name.endswith(
"__"
):
name = f"_{frame_class.__class__.__name__}{frame_info.function}"
# Find __qualname__ of the caller
fn = getattr(frame_class, name, None)
if fn and fn.__qualname__:
caller_qualname = fn.__qualname__
# Find containing class name(s) of caller, if any
if frame_class.__class__ and frame_class.__class__.__qualname__:
caller_class_names = frame_class.__class__.__qualname__
# It's a call from the top level module file
elif frame_info.function == "<module>":
caller_class_names = "<none>"
caller_qualname = value_dict["__name__"]
elif frame_info.function != "":
caller_class_names = "<none>"
caller_qualname = frame_info.function
module_name = cls.munge_module_name(frame_info, module_name)
# https://docs.python.org/3.7/library/inspect.html#the-interpreter-stack
del frame
if caller_qualname == "":
print("ALERT! Something went wrong with finding caller qualname for logging!")
caller_qualname = (
'<ERROR in EDMCLogging.caller_class_and_qualname() for "qualname">'
)
if caller_class_names == "":
print(
"ALERT! Something went wrong with finding caller class name(s) for logging!"
)
caller_class_names = (
'<ERROR in EDMCLogging.caller_class_and_qualname() for "class">'
)
return caller_class_names, caller_qualname, module_name
|
def caller_attributes(cls, module_name: str = "") -> Tuple[str, str, str]:
"""
Determine extra or changed fields for the caller.
1. qualname finds the relevant object and its __qualname__
2. caller_class_names is just the full class names of the calling
class if relevant.
3. module is munged if we detect the caller is an EDMC plugin,
whether internal or found.
"""
frame = cls.find_caller_frame()
caller_qualname = caller_class_names = ""
if frame:
# <https://stackoverflow.com/questions/2203424/python-how-to-retrieve-class-information-from-a-frame-object#2220759>
frame_info = inspect.getframeinfo(frame)
args, _, _, value_dict = inspect.getargvalues(frame)
if len(args) and args[0] in ("self", "cls"):
frame_class = value_dict[args[0]]
if frame_class:
# Find __qualname__ of the caller
fn = getattr(frame_class, frame_info.function)
if fn and fn.__qualname__:
caller_qualname = fn.__qualname__
# Find containing class name(s) of caller, if any
if frame_class.__class__ and frame_class.__class__.__qualname__:
caller_class_names = frame_class.__class__.__qualname__
# It's a call from the top level module file
elif frame_info.function == "<module>":
caller_class_names = "<none>"
caller_qualname = value_dict["__name__"]
elif frame_info.function != "":
caller_class_names = "<none>"
caller_qualname = frame_info.function
module_name = cls.munge_module_name(frame_info, module_name)
# https://docs.python.org/3.7/library/inspect.html#the-interpreter-stack
del frame
if caller_qualname == "":
print("ALERT! Something went wrong with finding caller qualname for logging!")
caller_qualname = (
'<ERROR in EDMCLogging.caller_class_and_qualname() for "qualname">'
)
if caller_class_names == "":
print(
"ALERT! Something went wrong with finding caller class name(s) for logging!"
)
caller_class_names = (
'<ERROR in EDMCLogging.caller_class_and_qualname() for "class">'
)
return caller_class_names, caller_qualname, module_name
|
https://github.com/EDCD/EDMarketConnector/issues/764
|
Exception in thread Thread-2:
Traceback (most recent call last):
File "/usr/local/lib/python3.7/threading.py", line 926, in _bootstrap_inner
self.run()
File "/usr/local/lib/python3.7/threading.py", line 870, in run
self._target(*self._args, **self._kwargs)
File "/home/ash/.local/share/EDMarketConnector/plugins/edmcoverlay/_edmcoverlay.py", line 94, in __server
logger.info("edmcoverlay2: server running")
File "/usr/local/lib/python3.7/logging/__init__.py", line 1378, in info
self._log(INFO, msg, args, **kwargs)
File "/usr/local/lib/python3.7/logging/__init__.py", line 1514, in _log
self.handle(record)
File "/usr/local/lib/python3.7/logging/__init__.py", line 1523, in handle
if (not self.disabled) and self.filter(record):
File "/usr/local/lib/python3.7/logging/__init__.py", line 751, in filter
result = f.filter(record)
File "/home/ash/src/EDMarketConnector/EDMCLogging.py", line 255, in filter
(class_name, qualname, module_name) = self.caller_attributes(module_name=getattr(record, 'module'))
File "/home/ash/src/EDMarketConnector/EDMCLogging.py", line 294, in caller_attributes
fn = getattr(frame_class, frame_info.function)
AttributeError: 'Overlay' object has no attribute '__server'
|
AttributeError
|
def export_outfitting(self, data: Mapping[str, Any], is_beta: bool) -> None:
"""
export_outfitting updates EDDN with the current (lastStarport) station's outfitting options, if any.
Once the send is complete, this.outfitting is updated with the given data.
:param data: dict containing the outfitting data
:param is_beta: whether or not we're currently in beta mode
"""
modules: Dict[str, Any] = data["lastStarport"].get("modules")
if not modules:
logger.debug("modules was None")
modules = {}
ships: Dict[str, Any] = data["lastStarport"].get("ships")
if not ships:
logger.debug("ships was None")
ships = {"shipyard_list": {}, "unavailable_list": []}
# Horizons flag - will hit at least Int_PlanetApproachSuite other than at engineer bases ("Colony"),
# prison or rescue Megaships, or under Pirate Attack etc
horizons: bool = is_horizons(
data["lastStarport"].get("economies", {}), modules, ships
)
to_search: Iterator[Mapping[str, Any]] = filter(
lambda m: self.MODULE_RE.search(m["name"])
and m.get("sku") in (None, HORIZ_SKU)
and m["name"] != "Int_PlanetApproachSuite",
modules.values(),
)
outfitting: List[str] = sorted(
self.MODULE_RE.sub(
lambda match: match.group(0).capitalize(), mod["name"].lower()
)
for mod in to_search
)
# Don't send empty modules list - schema won't allow it
if outfitting and this.outfitting != (horizons, outfitting):
self.send(
data["commander"]["name"],
{
"$schemaRef": f"https://eddn.edcd.io/schemas/outfitting/2{'/test' if is_beta else ''}",
"message": OrderedDict(
[
("timestamp", data["timestamp"]),
("systemName", data["lastSystem"]["name"]),
("stationName", data["lastStarport"]["name"]),
("marketId", data["lastStarport"]["id"]),
("horizons", horizons),
("modules", outfitting),
]
),
},
)
this.outfitting = (horizons, outfitting)
|
def export_outfitting(self, data: Mapping[str, Any], is_beta: bool) -> None:
"""
export_outfitting updates EDDN with the current (lastStarport) station's outfitting options, if any.
Once the send is complete, this.outfitting is updated with the given data.
:param data: dict containing the outfitting data
:param is_beta: whether or not we're currently in beta mode
"""
modules: Dict[str, Any] = data["lastStarport"].get("modules") or {}
# Horizons flag - will hit at least Int_PlanetApproachSuite other than at engineer bases ("Colony"),
# prison or rescue Megaships, or under Pirate Attack etc
horizons: bool = is_horizons(
data["lastStarport"].get("economies", {}),
modules,
data["lastStarport"].get(
"ships", {"shipyard_list": {}, "unavailable_list": []}
),
)
to_search: Iterator[Mapping[str, Any]] = filter(
lambda m: self.MODULE_RE.search(m["name"])
and m.get("sku") in (None, HORIZ_SKU)
and m["name"] != "Int_PlanetApproachSuite",
modules.values(),
)
outfitting: List[str] = sorted(
self.MODULE_RE.sub(
lambda match: match.group(0).capitalize(), mod["name"].lower()
)
for mod in to_search
)
# Don't send empty modules list - schema won't allow it
if outfitting and this.outfitting != (horizons, outfitting):
self.send(
data["commander"]["name"],
{
"$schemaRef": f"https://eddn.edcd.io/schemas/outfitting/2{'/test' if is_beta else ''}",
"message": OrderedDict(
[
("timestamp", data["timestamp"]),
("systemName", data["lastSystem"]["name"]),
("stationName", data["lastStarport"]["name"]),
("marketId", data["lastStarport"]["id"]),
("horizons", horizons),
("modules", outfitting),
]
),
},
)
this.outfitting = (horizons, outfitting)
|
https://github.com/EDCD/EDMarketConnector/issues/671
|
2020-08-21 13:06:28.140 - DEBUG - plugins.eddn.cmdr_data:726: Failed exporting data
Traceback (most recent call last):
File "/home/ad/development/python/EDMarketConnector/plugins/eddn.py", line 716, in cmdr_data
this.eddn.export_shipyard(data, is_beta)
File "/home/ad/development/python/EDMarketConnector/plugins/eddn.py", line 312, in export_shipyard
horizons: bool = is_horizons(
File "/home/ad/development/python/EDMarketConnector/plugins/eddn.py", line 736, in is_horizons
any(module.get('sku') == HORIZ_SKU for module in modules.values()) or
AttributeError: 'list' object has no attribute 'values'
|
AttributeError
|
def export_shipyard(self, data: Dict[str, Any], is_beta: bool) -> None:
"""
export_shipyard updates EDDN with the current (lastStarport) station's outfitting options, if any.
once the send is complete, this.shipyard is updated to the new data.
:param data: dict containing the shipyard data
:param is_beta: whether or not we are in beta mode
"""
modules: Dict[str, Any] = data["lastStarport"].get("modules")
if not modules:
logger.debug("modules was None")
modules = {}
ships: Dict[str, Any] = data["lastStarport"].get("ships")
if not ships:
logger.debug("ships was None")
ships = {"shipyard_list": {}, "unavailable_list": []}
horizons: bool = is_horizons(
data["lastStarport"].get("economies", {}), modules, ships
)
shipyard: List[Mapping[str, Any]] = sorted(
itertools.chain(
(ship["name"].lower() for ship in (ships["shipyard_list"] or {}).values()),
ships["unavailable_list"],
)
)
# Don't send empty ships list - shipyard data is only guaranteed present if user has visited the shipyard.
if shipyard and this.shipyard != (horizons, shipyard):
self.send(
data["commander"]["name"],
{
"$schemaRef": f"https://eddn.edcd.io/schemas/shipyard/2{'/test' if is_beta else ''}",
"message": OrderedDict(
[
("timestamp", data["timestamp"]),
("systemName", data["lastSystem"]["name"]),
("stationName", data["lastStarport"]["name"]),
("marketId", data["lastStarport"]["id"]),
("horizons", horizons),
("ships", shipyard),
]
),
},
)
this.shipyard = (horizons, shipyard)
|
def export_shipyard(self, data: Dict[str, Any], is_beta: bool) -> None:
"""
export_shipyard updates EDDN with the current (lastStarport) station's outfitting options, if any.
once the send is complete, this.shipyard is updated to the new data.
:param data: dict containing the shipyard data
:param is_beta: whether or not we are in beta mode
"""
ships: Dict[str, Any] = data["lastStarport"].get(
"ships", {"shipyard_list": {}, "unavailable_list": []}
)
horizons: bool = is_horizons(
data["lastStarport"].get("economies", {}),
data["lastStarport"].get("modules", {}),
ships,
)
shipyard: List[Mapping[str, Any]] = sorted(
itertools.chain(
(ship["name"].lower() for ship in (ships["shipyard_list"] or {}).values()),
ships["unavailable_list"],
)
)
# Don't send empty ships list - shipyard data is only guaranteed present if user has visited the shipyard.
if shipyard and this.shipyard != (horizons, shipyard):
self.send(
data["commander"]["name"],
{
"$schemaRef": f"https://eddn.edcd.io/schemas/shipyard/2{'/test' if is_beta else ''}",
"message": OrderedDict(
[
("timestamp", data["timestamp"]),
("systemName", data["lastSystem"]["name"]),
("stationName", data["lastStarport"]["name"]),
("marketId", data["lastStarport"]["id"]),
("horizons", horizons),
("ships", shipyard),
]
),
},
)
this.shipyard = (horizons, shipyard)
|
https://github.com/EDCD/EDMarketConnector/issues/671
|
2020-08-21 13:06:28.140 - DEBUG - plugins.eddn.cmdr_data:726: Failed exporting data
Traceback (most recent call last):
File "/home/ad/development/python/EDMarketConnector/plugins/eddn.py", line 716, in cmdr_data
this.eddn.export_shipyard(data, is_beta)
File "/home/ad/development/python/EDMarketConnector/plugins/eddn.py", line 312, in export_shipyard
horizons: bool = is_horizons(
File "/home/ad/development/python/EDMarketConnector/plugins/eddn.py", line 736, in is_horizons
any(module.get('sku') == HORIZ_SKU for module in modules.values()) or
AttributeError: 'list' object has no attribute 'values'
|
AttributeError
|
def is_horizons(economies: MAP_STR_ANY, modules: Dict, ships: MAP_STR_ANY) -> bool:
return (
any(economy["name"] == "Colony" for economy in economies.values())
or any(module.get("sku") == HORIZ_SKU for module in modules.values())
or any(
ship.get("sku") == HORIZ_SKU
for ship in (ships["shipyard_list"] or {}).values()
)
)
|
def is_horizons(
economies: MAP_STR_ANY, modules: MAP_STR_ANY, ships: MAP_STR_ANY
) -> bool:
return (
any(economy["name"] == "Colony" for economy in economies.values())
or any(module.get("sku") == HORIZ_SKU for module in modules.values())
or any(
ship.get("sku") == HORIZ_SKU
for ship in (ships["shipyard_list"] or {}).values()
)
)
|
https://github.com/EDCD/EDMarketConnector/issues/671
|
2020-08-21 13:06:28.140 - DEBUG - plugins.eddn.cmdr_data:726: Failed exporting data
Traceback (most recent call last):
File "/home/ad/development/python/EDMarketConnector/plugins/eddn.py", line 716, in cmdr_data
this.eddn.export_shipyard(data, is_beta)
File "/home/ad/development/python/EDMarketConnector/plugins/eddn.py", line 312, in export_shipyard
horizons: bool = is_horizons(
File "/home/ad/development/python/EDMarketConnector/plugins/eddn.py", line 736, in is_horizons
any(module.get('sku') == HORIZ_SKU for module in modules.values()) or
AttributeError: 'list' object has no attribute 'values'
|
AttributeError
|
def start(self, root):
self.root = root
journal_dir = config.get("journaldir") or config.default_journal_dir
if journal_dir is None:
journal_dir = ""
# TODO(A_D): this is ignored for type checking due to all the different types config.get returns
# When that is refactored, remove the magic comment
logdir = expanduser(journal_dir) # type: ignore # config is weird
if not logdir or not isdir(logdir):
self.stop()
return False
if self.currentdir and self.currentdir != logdir:
self.stop()
self.currentdir = logdir
# Latest pre-existing logfile - e.g. if E:D is already running. Assumes logs sort alphabetically.
# Do this before setting up the observer in case the journal directory has gone away
try:
logfiles = sorted(
[
x
for x in listdir(self.currentdir)
if re.search("^Journal(Beta)?\.[0-9]{12}\.[0-9]{2}\.log$", x)
],
key=lambda x: x.split(".")[1:],
)
self.logfile = logfiles and join(self.currentdir, logfiles[-1]) or None
except:
self.logfile = None
return False
# Set up a watchdog observer.
# File system events are unreliable/non-existent over network drives on Linux.
# We can't easily tell whether a path points to a network drive, so assume
# any non-standard logdir might be on a network drive and poll instead.
polling = bool(config.get("journaldir")) and platform != "win32"
if not polling and not self.observer:
self.observer = Observer()
self.observer.daemon = True
self.observer.start()
elif polling and self.observer:
self.observer.stop()
self.observer = None
if not self.observed and not polling:
self.observed = self.observer.schedule(self, self.currentdir)
if __debug__:
print(
'%s Journal "%s"' % (polling and "Polling" or "Monitoring", self.currentdir)
)
print('Start logfile "%s"' % self.logfile)
if not self.running():
self.thread = threading.Thread(target=self.worker, name="Journal worker")
self.thread.daemon = True
self.thread.start()
return True
|
def start(self, root):
self.root = root
logdir = expanduser(config.get("journaldir") or config.default_journal_dir) # type: ignore # config is weird
if not logdir or not isdir(logdir): # type: ignore # config does weird things in its get
self.stop()
return False
if self.currentdir and self.currentdir != logdir:
self.stop()
self.currentdir = logdir
# Latest pre-existing logfile - e.g. if E:D is already running. Assumes logs sort alphabetically.
# Do this before setting up the observer in case the journal directory has gone away
try:
logfiles = sorted(
[
x
for x in listdir(self.currentdir)
if re.search("^Journal(Beta)?\.[0-9]{12}\.[0-9]{2}\.log$", x)
],
key=lambda x: x.split(".")[1:],
)
self.logfile = logfiles and join(self.currentdir, logfiles[-1]) or None
except:
self.logfile = None
return False
# Set up a watchdog observer.
# File system events are unreliable/non-existent over network drives on Linux.
# We can't easily tell whether a path points to a network drive, so assume
# any non-standard logdir might be on a network drive and poll instead.
polling = bool(config.get("journaldir")) and platform != "win32"
if not polling and not self.observer:
self.observer = Observer()
self.observer.daemon = True
self.observer.start()
elif polling and self.observer:
self.observer.stop()
self.observer = None
if not self.observed and not polling:
self.observed = self.observer.schedule(self, self.currentdir)
if __debug__:
print(
'%s Journal "%s"' % (polling and "Polling" or "Monitoring", self.currentdir)
)
print('Start logfile "%s"' % self.logfile)
if not self.running():
self.thread = threading.Thread(target=self.worker, name="Journal worker")
self.thread.daemon = True
self.thread.start()
return True
|
https://github.com/EDCD/EDMarketConnector/issues/639
|
PS /home/[1000]/edmc> ./EDMarketConnector.py
loading plugin coriolis from "/home/chr0me/edmc/plugins/coriolis.py"
loading plugin eddb from "/home/chr0me/edmc/plugins/eddb.py"
loading plugin eddn from "/home/chr0me/edmc/plugins/eddn.py"
loading plugin edsm from "/home/chr0me/edmc/plugins/edsm.py"
loading plugin edsy from "/home/chr0me/edmc/plugins/edsy.py"
loading plugin inara from "/home/chr0me/edmc/plugins/inara.py"
Traceback (most recent call last):
File "/home/[1000]edmc/EDMarketConnector.py", line 947, in <module>
app = AppWindow(root)
File "/home/[1000]/edmc/EDMarketConnector.py", line 322, in __init__
self.postprefs(False) # Companion login happens in callback from monitor
File "/home/[1000]/edmc/EDMarketConnector.py", line 339, in postprefs
if not monitor.start(self.w):
File "/home/[1000]/edmc/monitor.py", line 121, in start
logdir = expanduser(config.get('journaldir') or config.default_journal_dir) # type: ignore # config is weird
File "/usr/lib/python3.8/posixpath.py", line 231, in expanduser
path = os.fspath(path)
TypeError: expected str, bytes or os.PathLike object, not NoneType
|
TypeError
|
def __init__(self, address_or_ble_device: Union[BLEDevice, str], **kwargs):
super(BleakClientDotNet, self).__init__(address_or_ble_device, **kwargs)
# Backend specific. Python.NET objects.
if isinstance(address_or_ble_device, BLEDevice):
self._device_info = address_or_ble_device.details.BluetoothAddress
else:
self._device_info = None
self._requester = None
self._bridge = None
self._disconnect_events: list[asyncio.Event] = []
self._address_type = (
kwargs["address_type"]
if "address_type" in kwargs and kwargs["address_type"] in ("public", "random")
else None
)
|
def __init__(self, address_or_ble_device: Union[BLEDevice, str], **kwargs):
super(BleakClientDotNet, self).__init__(address_or_ble_device, **kwargs)
# Backend specific. Python.NET objects.
if isinstance(address_or_ble_device, BLEDevice):
self._device_info = address_or_ble_device.details.BluetoothAddress
else:
self._device_info = None
self._requester = None
self._bridge = None
self._address_type = (
kwargs["address_type"]
if "address_type" in kwargs and kwargs["address_type"] in ("public", "random")
else None
)
|
https://github.com/hbldh/bleak/issues/313
|
Connecting to mac: [mac here]
Poking services...
Disconnect on device end now...
0...
1...
2...
3...
4...
Waiting for device to indicate disconnection... ensure it is ready to reconnect
0...
1...
2...
3...
4...
5...
6...
7...
8...
9...
10...
11...
12...
13...
14...
Reconnecting...
Poking services...
Disconnecting...
Waiting for device to indicate disconnection...
0...
1...
2...
3...
4...
5...
6...
7...
8...
9...
At this point, nothing has happened and the device still shows as connected... trying again
Disconnecting...
Traceback (most recent call last):
File "c:/Users/user/Documents/ble/test.py", line 338, in <module>
test.start()
File "c:/Users/user/Documents/ble/test.py", line 330, in start
self.loop.run_until_complete(self.__loop())
File "C:\Python38\lib\asyncio\base_events.py", line 616, in run_until_complete
return future.result()
File "c:/Users/user/Documents/ble/test.py", line 323, in __loop
await self.__stdin_msg_handler(msgs)
File "c:/Users/user/Documents/ble/test.py", line 306, in __stdin_msg_handler
await self.__attributeerror_test(mac)
File "c:/Users/user/Documents/ble/test.py", line 197, in __attributeerror_test
await client.disconnect() # this will throw an AttributeError
File "C:\Python38\lib\site-packages\bleak\backends\dotnet\client.py", line 230, in disconnect
self._requester.Dispose()
AttributeError: 'NoneType' object has no attribute 'Dispose'
|
AttributeError
|
async def connect(self, **kwargs) -> bool:
"""Connect to the specified GATT server.
Keyword Args:
timeout (float): Timeout for required ``BleakScanner.find_device_by_address`` call. Defaults to 10.0.
Returns:
Boolean representing connection status.
"""
# Create a new BleakBridge here.
self._bridge = Bridge()
# Try to find the desired device.
if self._device_info is None:
timeout = kwargs.get("timeout", self._timeout)
device = await BleakScannerDotNet.find_device_by_address(
self.address, timeout=timeout
)
if device:
self._device_info = device.details.BluetoothAddress
else:
raise BleakError(
"Device with address {0} was not found.".format(self.address)
)
logger.debug("Connecting to BLE device @ {0}".format(self.address))
args = [UInt64(self._device_info)]
if self._address_type is not None:
args.append(
BluetoothAddressType.Public
if self._address_type == "public"
else BluetoothAddressType.Random
)
self._requester = await wrap_IAsyncOperation(
IAsyncOperation[BluetoothLEDevice](
BluetoothLEDevice.FromBluetoothAddressAsync(*args)
),
return_type=BluetoothLEDevice,
)
loop = asyncio.get_event_loop()
def _ConnectionStatusChanged_Handler(sender, args):
logger.debug("_ConnectionStatusChanged_Handler: %d", sender.ConnectionStatus)
if sender.ConnectionStatus == BluetoothConnectionStatus.Disconnected:
if self._disconnected_callback:
loop.call_soon_threadsafe(self._disconnected_callback, self)
for e in self._disconnect_events:
loop.call_soon_threadsafe(e.set)
def handle_disconnect():
self._requester = None
loop.call_soon_threadsafe(handle_disconnect)
self._requester.ConnectionStatusChanged += _ConnectionStatusChanged_Handler
# Obtain services, which also leads to connection being established.
services = await self.get_services()
connected = False
if self._services_resolved:
# If services has been resolved, then we assume that we are connected. This is due to
# some issues with getting `is_connected` to give correct response here.
connected = True
else:
for _ in range(5):
await asyncio.sleep(0.2)
connected = await self.is_connected()
if connected:
break
if connected:
logger.debug("Connection successful.")
else:
raise BleakError("Connection to {0} was not successful!".format(self.address))
return connected
|
async def connect(self, **kwargs) -> bool:
"""Connect to the specified GATT server.
Keyword Args:
timeout (float): Timeout for required ``BleakScanner.find_device_by_address`` call. Defaults to 10.0.
Returns:
Boolean representing connection status.
"""
# Create a new BleakBridge here.
self._bridge = Bridge()
# Try to find the desired device.
if self._device_info is None:
timeout = kwargs.get("timeout", self._timeout)
device = await BleakScannerDotNet.find_device_by_address(
self.address, timeout=timeout
)
if device:
self._device_info = device.details.BluetoothAddress
else:
raise BleakError(
"Device with address {0} was not found.".format(self.address)
)
logger.debug("Connecting to BLE device @ {0}".format(self.address))
args = [UInt64(self._device_info)]
if self._address_type is not None:
args.append(
BluetoothAddressType.Public
if self._address_type == "public"
else BluetoothAddressType.Random
)
self._requester = await wrap_IAsyncOperation(
IAsyncOperation[BluetoothLEDevice](
BluetoothLEDevice.FromBluetoothAddressAsync(*args)
),
return_type=BluetoothLEDevice,
)
loop = asyncio.get_event_loop()
def _ConnectionStatusChanged_Handler(sender, args):
logger.debug("_ConnectionStatusChanged_Handler: %d", sender.ConnectionStatus)
if (
sender.ConnectionStatus == BluetoothConnectionStatus.Disconnected
and self._disconnected_callback
):
loop.call_soon_threadsafe(self._disconnected_callback, self)
self._requester.ConnectionStatusChanged += _ConnectionStatusChanged_Handler
# Obtain services, which also leads to connection being established.
services = await self.get_services()
connected = False
if self._services_resolved:
# If services has been resolved, then we assume that we are connected. This is due to
# some issues with getting `is_connected` to give correct response here.
connected = True
else:
for _ in range(5):
await asyncio.sleep(0.2)
connected = await self.is_connected()
if connected:
break
if connected:
logger.debug("Connection successful.")
else:
raise BleakError("Connection to {0} was not successful!".format(self.address))
return connected
|
https://github.com/hbldh/bleak/issues/313
|
Connecting to mac: [mac here]
Poking services...
Disconnect on device end now...
0...
1...
2...
3...
4...
Waiting for device to indicate disconnection... ensure it is ready to reconnect
0...
1...
2...
3...
4...
5...
6...
7...
8...
9...
10...
11...
12...
13...
14...
Reconnecting...
Poking services...
Disconnecting...
Waiting for device to indicate disconnection...
0...
1...
2...
3...
4...
5...
6...
7...
8...
9...
At this point, nothing has happened and the device still shows as connected... trying again
Disconnecting...
Traceback (most recent call last):
File "c:/Users/user/Documents/ble/test.py", line 338, in <module>
test.start()
File "c:/Users/user/Documents/ble/test.py", line 330, in start
self.loop.run_until_complete(self.__loop())
File "C:\Python38\lib\asyncio\base_events.py", line 616, in run_until_complete
return future.result()
File "c:/Users/user/Documents/ble/test.py", line 323, in __loop
await self.__stdin_msg_handler(msgs)
File "c:/Users/user/Documents/ble/test.py", line 306, in __stdin_msg_handler
await self.__attributeerror_test(mac)
File "c:/Users/user/Documents/ble/test.py", line 197, in __attributeerror_test
await client.disconnect() # this will throw an AttributeError
File "C:\Python38\lib\site-packages\bleak\backends\dotnet\client.py", line 230, in disconnect
self._requester.Dispose()
AttributeError: 'NoneType' object has no attribute 'Dispose'
|
AttributeError
|
def _ConnectionStatusChanged_Handler(sender, args):
logger.debug("_ConnectionStatusChanged_Handler: %d", sender.ConnectionStatus)
if sender.ConnectionStatus == BluetoothConnectionStatus.Disconnected:
if self._disconnected_callback:
loop.call_soon_threadsafe(self._disconnected_callback, self)
for e in self._disconnect_events:
loop.call_soon_threadsafe(e.set)
def handle_disconnect():
self._requester = None
loop.call_soon_threadsafe(handle_disconnect)
|
def _ConnectionStatusChanged_Handler(sender, args):
logger.debug("_ConnectionStatusChanged_Handler: %d", sender.ConnectionStatus)
if (
sender.ConnectionStatus == BluetoothConnectionStatus.Disconnected
and self._disconnected_callback
):
loop.call_soon_threadsafe(self._disconnected_callback, self)
|
https://github.com/hbldh/bleak/issues/313
|
Connecting to mac: [mac here]
Poking services...
Disconnect on device end now...
0...
1...
2...
3...
4...
Waiting for device to indicate disconnection... ensure it is ready to reconnect
0...
1...
2...
3...
4...
5...
6...
7...
8...
9...
10...
11...
12...
13...
14...
Reconnecting...
Poking services...
Disconnecting...
Waiting for device to indicate disconnection...
0...
1...
2...
3...
4...
5...
6...
7...
8...
9...
At this point, nothing has happened and the device still shows as connected... trying again
Disconnecting...
Traceback (most recent call last):
File "c:/Users/user/Documents/ble/test.py", line 338, in <module>
test.start()
File "c:/Users/user/Documents/ble/test.py", line 330, in start
self.loop.run_until_complete(self.__loop())
File "C:\Python38\lib\asyncio\base_events.py", line 616, in run_until_complete
return future.result()
File "c:/Users/user/Documents/ble/test.py", line 323, in __loop
await self.__stdin_msg_handler(msgs)
File "c:/Users/user/Documents/ble/test.py", line 306, in __stdin_msg_handler
await self.__attributeerror_test(mac)
File "c:/Users/user/Documents/ble/test.py", line 197, in __attributeerror_test
await client.disconnect() # this will throw an AttributeError
File "C:\Python38\lib\site-packages\bleak\backends\dotnet\client.py", line 230, in disconnect
self._requester.Dispose()
AttributeError: 'NoneType' object has no attribute 'Dispose'
|
AttributeError
|
async def disconnect(self) -> bool:
"""Disconnect from the specified GATT server.
Returns:
Boolean representing if device is disconnected.
Raises:
asyncio.TimeoutError: If device did not disconnect with 10 seconds.
"""
logger.debug("Disconnecting from BLE device...")
# Remove notifications. Remove them first in the BleakBridge and then clear
# remaining notifications in Python as well.
for characteristic in self.services.characteristics.values():
self._bridge.RemoveValueChangedCallback(characteristic.obj)
self._notification_callbacks.clear()
# Dispose all service components that we have requested and created.
for service in self.services:
service.obj.Dispose()
self.services = BleakGATTServiceCollection()
self._services_resolved = False
# Dispose of the BluetoothLEDevice and see that the connection
# status is now Disconnected.
if self._requester:
event = asyncio.Event()
self._disconnect_events.append(event)
try:
self._requester.Dispose()
await asyncio.wait_for(event.wait(), timeout=10)
finally:
self._disconnect_events.remove(event)
# Set device info to None as well.
self._device_info = None
# Finally, dispose of the Bleak Bridge as well.
self._bridge.Dispose()
self._bridge = None
return True
|
async def disconnect(self) -> bool:
"""Disconnect from the specified GATT server.
Returns:
Boolean representing if device is disconnected.
"""
logger.debug("Disconnecting from BLE device...")
# Remove notifications. Remove them first in the BleakBridge and then clear
# remaining notifications in Python as well.
for characteristic in self.services.characteristics.values():
self._bridge.RemoveValueChangedCallback(characteristic.obj)
self._notification_callbacks.clear()
# Dispose all service components that we have requested and created.
for service in self.services:
service.obj.Dispose()
self.services = BleakGATTServiceCollection()
self._services_resolved = False
# Dispose of the BluetoothLEDevice and see that the connection
# status is now Disconnected.
self._requester.Dispose()
is_disconnected = (
self._requester.ConnectionStatus == BluetoothConnectionStatus.Disconnected
)
self._requester = None
# Set device info to None as well.
self._device_info = None
# Finally, dispose of the Bleak Bridge as well.
self._bridge.Dispose()
self._bridge = None
return is_disconnected
|
https://github.com/hbldh/bleak/issues/313
|
Connecting to mac: [mac here]
Poking services...
Disconnect on device end now...
0...
1...
2...
3...
4...
Waiting for device to indicate disconnection... ensure it is ready to reconnect
0...
1...
2...
3...
4...
5...
6...
7...
8...
9...
10...
11...
12...
13...
14...
Reconnecting...
Poking services...
Disconnecting...
Waiting for device to indicate disconnection...
0...
1...
2...
3...
4...
5...
6...
7...
8...
9...
At this point, nothing has happened and the device still shows as connected... trying again
Disconnecting...
Traceback (most recent call last):
File "c:/Users/user/Documents/ble/test.py", line 338, in <module>
test.start()
File "c:/Users/user/Documents/ble/test.py", line 330, in start
self.loop.run_until_complete(self.__loop())
File "C:\Python38\lib\asyncio\base_events.py", line 616, in run_until_complete
return future.result()
File "c:/Users/user/Documents/ble/test.py", line 323, in __loop
await self.__stdin_msg_handler(msgs)
File "c:/Users/user/Documents/ble/test.py", line 306, in __stdin_msg_handler
await self.__attributeerror_test(mac)
File "c:/Users/user/Documents/ble/test.py", line 197, in __attributeerror_test
await client.disconnect() # this will throw an AttributeError
File "C:\Python38\lib\site-packages\bleak\backends\dotnet\client.py", line 230, in disconnect
self._requester.Dispose()
AttributeError: 'NoneType' object has no attribute 'Dispose'
|
AttributeError
|
async def get_discovered_devices(self) -> List[BLEDevice]:
found = []
peripherals = self._manager.central_manager.retrievePeripheralsWithIdentifiers_(
NSArray(self._identifiers.keys()),
)
for i, peripheral in enumerate(peripherals):
address = peripheral.identifier().UUIDString()
name = peripheral.name() or "Unknown"
details = peripheral
advertisementData = self._identifiers[peripheral.identifier()]
manufacturer_binary_data = advertisementData.get("kCBAdvDataManufacturerData")
manufacturer_data = {}
if manufacturer_binary_data:
manufacturer_id = int.from_bytes(
manufacturer_binary_data[0:2], byteorder="little"
)
manufacturer_value = bytes(manufacturer_binary_data[2:])
manufacturer_data = {manufacturer_id: manufacturer_value}
uuids = [
cb_uuid_to_str(u)
for u in advertisementData.get("kCBAdvDataServiceUUIDs", [])
]
found.append(
BLEDevice(
address,
name,
details,
uuids=uuids,
manufacturer_data=manufacturer_data,
delegate=self._manager.central_manager.delegate(),
)
)
return found
|
async def get_discovered_devices(self) -> List[BLEDevice]:
found = []
peripherals = self._manager.central_manager.retrievePeripheralsWithIdentifiers_(
self._identifiers.keys(),
)
for i, peripheral in enumerate(peripherals):
address = peripheral.identifier().UUIDString()
name = peripheral.name() or "Unknown"
details = peripheral
advertisementData = self._identifiers[peripheral.identifier()]
manufacturer_binary_data = advertisementData.get("kCBAdvDataManufacturerData")
manufacturer_data = {}
if manufacturer_binary_data:
manufacturer_id = int.from_bytes(
manufacturer_binary_data[0:2], byteorder="little"
)
manufacturer_value = bytes(manufacturer_binary_data[2:])
manufacturer_data = {manufacturer_id: manufacturer_value}
uuids = [
cb_uuid_to_str(u)
for u in advertisementData.get("kCBAdvDataServiceUUIDs", [])
]
found.append(
BLEDevice(
address,
name,
details,
uuids=uuids,
manufacturer_data=manufacturer_data,
delegate=self._manager.central_manager.delegate(),
)
)
return found
|
https://github.com/hbldh/bleak/issues/331
|
2020-10-12 22:16:31.801 Python[3820:332546] *** Assertion failure in -[CBCentralManager retrievePeripheralsWithIdentifiers:], /BuildRoot/Library/Caches/com.apple.xbs/Sources/CoreBluetooth/CoreBluetooth-102.23/CBCentralManager.m:203
Traceback (most recent call last):
File "bleak_test3.py", line 52, in <module>
loop.run_until_complete(run())
File "/usr/local/Cellar/python@3.8/3.8.5/Frameworks/Python.framework/Versions/3.8/lib/python3.8/asyncio/base_events.py", line 616, in run_until_complete
return future.result()
File "bleak_test3.py", line 12, in run
await client.connect()
File "/Users/GirlOne/dev/formid_hfs/venv/lib/python3.8/site-packages/bleak/backends/corebluetooth/client.py", line 76, in connect
device = await BleakScannerCoreBluetooth.find_device_by_address(
File "/Users/GirlOne/dev/formid_hfs/venv/lib/python3.8/site-packages/bleak/backends/corebluetooth/scanner.py", line 157, in find_device_by_address
return await scanner._find_device_by_address(
File "/Users/GirlOne/dev/formid_hfs/venv/lib/python3.8/site-packages/bleak/backends/scanner.py", line 107, in _find_device_by_address
for d in await self.get_discovered_devices()
File "/Users/GirlOne/dev/formid_hfs/venv/lib/python3.8/site-packages/bleak/backends/corebluetooth/scanner.py", line 82, in get_discovered_devices
peripherals = self._manager.central_manager.retrievePeripheralsWithIdentifiers_(
objc.error: NSInternalInconsistencyException - Invalid parameter not satisfying: [identifiers isKindOfClass:[NSArray class]]
|
objc.error
|
async def is_connected(self) -> bool:
"""Check connection status between this client and the server.
Returns:
Boolean representing connection status.
"""
# TODO: Listen to connected property changes.
is_connected = False
try:
is_connected = await self._bus.callRemote(
self._device_path,
"Get",
interface=defs.PROPERTIES_INTERFACE,
destination=defs.BLUEZ_SERVICE,
signature="ss",
body=[defs.DEVICE_INTERFACE, "Connected"],
returnSignature="v",
).asFuture(asyncio.get_event_loop())
except AttributeError:
# The `self._bus` object had already been cleaned up due to disconnect...
pass
except ConnectionDone:
# Twisted error stating that "Connection was closed cleanly."
pass
except RemoteError as e:
if e.errName != "org.freedesktop.DBus.Error.UnknownObject":
raise
except Exception as e:
# Do not want to silence unknown errors. Send this upwards.
raise
return is_connected
|
async def is_connected(self) -> bool:
"""Check connection status between this client and the server.
Returns:
Boolean representing connection status.
"""
# TODO: Listen to connected property changes.
is_connected = False
try:
is_connected = await self._bus.callRemote(
self._device_path,
"Get",
interface=defs.PROPERTIES_INTERFACE,
destination=defs.BLUEZ_SERVICE,
signature="ss",
body=[defs.DEVICE_INTERFACE, "Connected"],
returnSignature="v",
).asFuture(asyncio.get_event_loop())
except AttributeError:
# The `self._bus` object had already been cleaned up due to disconnect...
pass
except ConnectionDone:
# Twisted error stating that "Connection was closed cleanly."
pass
except Exception as e:
# Do not want to silence unknown errors. Send this upwards.
raise
return is_connected
|
https://github.com/hbldh/bleak/issues/310
|
client = BleakClient(address)
await client.connect()
await client.disconnect()
Traceback (most recent call last):
File "/home/pi/venv/lib/python3.7/site-packages/aioconsole/execute.py", line 87, in aexec
result, new_local = await coro
File "<aexec>", line 2, in __corofn
File "/home/pi/venv/lib/python3.7/site-packages/bleak/backends/bluezdbus/client.py", line 245, in disconnect
is_disconnected = not await self.is_connected()
File "/home/pi/venv/lib/python3.7/site-packages/bleak/backends/bluezdbus/client.py", line 346, in is_connected
).asFuture(asyncio.get_event_loop())
txdbus.error.RemoteError: org.freedesktop.DBus.Error.UnknownObject: Method "Get" with signature "ss" on interface "org.freedesktop.DBus.Properties" doesn't exist
|
txdbus.error.RemoteError
|
async def is_connected(self) -> bool:
"""Check connection status between this client and the server.
Returns:
Boolean representing connection status.
"""
# TODO: Listen to connected property changes.
is_connected = False
try:
is_connected = await self._bus.callRemote(
self._device_path,
"Get",
interface=defs.PROPERTIES_INTERFACE,
destination=defs.BLUEZ_SERVICE,
signature="ss",
body=[defs.DEVICE_INTERFACE, "Connected"],
returnSignature="v",
).asFuture(asyncio.get_event_loop())
except AttributeError:
# The `self._bus` object had already been cleaned up due to disconnect...
pass
except ConnectionDone:
# Twisted error stating that "Connection was closed cleanly."
pass
except Exception as e:
# Do not want to silence unknown errors. Send this upwards.
raise
return is_connected
|
async def is_connected(self) -> bool:
"""Check connection status between this client and the server.
Returns:
Boolean representing connection status.
"""
# TODO: Listen to connected property changes.
return await self._bus.callRemote(
self._device_path,
"Get",
interface=defs.PROPERTIES_INTERFACE,
destination=defs.BLUEZ_SERVICE,
signature="ss",
body=[defs.DEVICE_INTERFACE, "Connected"],
returnSignature="v",
).asFuture(asyncio.get_event_loop())
|
https://github.com/hbldh/bleak/issues/259
|
INFO:__main__:Connected: True
True
True
...
True
True
DEBUG:bleak.backends.bluezdbus.client:DBUS: path: /org/bluez/hci0/dev_REDACTED, domain: org.bluez.Device1, body: {'ServicesResolved': False, 'Connected': False}
DEBUG:bleak.backends.bluezdbus.client:Device REDACTED disconnected.
DEBUG:bleak.backends.bluezdbus.client:Removing rule PropChanged, ID: 1
DEBUG:bleak.backends.bluezdbus.client:Disconnecting from BLE device...
Traceback (most recent call last):
File "disconnect_test.py", line 21, in <module>
asyncio.run(run(address))
File "/usr/lib/python3.7/asyncio/runners.py", line 43, in run
return loop.run_until_complete(main)
File "/usr/lib/python3.7/asyncio/base_events.py", line 584, in run_until_complete
return future.result()
File "disconnect_test.py", line 16, in run
print(await client.is_connected())
File "/home/pi/.local/lib/python3.7/site-packages/bleak/backends/bluezdbus/client.py", line 282, in is_connected
return await self._bus.callRemote(
AttributeError: 'NoneType' object has no attribute 'callRemote'
|
AttributeError
|
async def scanForPeripherals_(self, scan_options) -> List[CBPeripheral]:
"""
Scan for peripheral devices
scan_options = { service_uuids, timeout }
"""
# remove old
self.devices = {}
service_uuids = []
if "service_uuids" in scan_options:
service_uuids_str = scan_options["service_uuids"]
service_uuids = NSArray.alloc().initWithArray_(
list(map(string2uuid, service_uuids_str))
)
timeout = 0
if "timeout" in scan_options:
timeout = float(scan_options["timeout"])
self.central_manager.scanForPeripheralsWithServices_options_(service_uuids, None)
if timeout > 0:
await asyncio.sleep(timeout)
self.central_manager.stopScan()
# Wait a while to allow central manager to stop scanning.
# The `isScanning` attribute is added in macOS 10.13, so before that
# just waiting some will have to do. In 10.13+ I have never seen
# bleak enter the while-loop, so this fix is most probably safe.
if _IS_PRE_10_13:
await asyncio.sleep(0.1)
else:
while self.central_manager.isScanning():
await asyncio.sleep(0.1)
return []
|
async def scanForPeripherals_(self, scan_options) -> List[CBPeripheral]:
"""
Scan for peripheral devices
scan_options = { service_uuids, timeout }
"""
# remove old
self.devices = {}
service_uuids = []
if "service_uuids" in scan_options:
service_uuids_str = scan_options["service_uuids"]
service_uuids = NSArray.alloc().initWithArray_(
list(map(string2uuid, service_uuids_str))
)
timeout = 0
if "timeout" in scan_options:
timeout = float(scan_options["timeout"])
self.central_manager.scanForPeripheralsWithServices_options_(service_uuids, None)
if timeout > 0:
await asyncio.sleep(timeout)
self.central_manager.stopScan()
while self.central_manager.isScanning():
await asyncio.sleep(0.1)
return []
|
https://github.com/hbldh/bleak/issues/234
|
$ python scanner.py
Traceback (most recent call last):
File "scanner.py", line 12, in <module>
loop.run_until_complete(run())
File "/usr/local/Cellar/python/3.7.0/Frameworks/Python.framework/Versions/3.7/lib/python3.7/asyncio/base_events.py", line 568, in run_until_complete
return future.result()
File "scanner.py", line 6, in run
devices = await discover()
File "/Users/matteo/virtualenvs/bt-scanner/lib/python3.7/site-packages/bleak/backends/corebluetooth/discovery.py", line 35, in discover
await cbapp.central_manager_delegate.scanForPeripherals_(scan_options)
File "/Users/matteo/virtualenvs/bt-scanner/lib/python3.7/site-packages/bleak/backends/corebluetooth/CentralManagerDelegate.py", line 121, in scanForPeripherals_
while self.central_manager.isScanning():
AttributeError: 'CBCentralManager' object has no attribute 'isScanning'
|
AttributeError
|
async def discover(
timeout: float = 5.0, loop: AbstractEventLoop = None, **kwargs
) -> List[BLEDevice]:
"""Perform a Bluetooth LE Scan using Windows.Devices.Bluetooth.Advertisement
Args:
timeout (float): Time to scan for.
loop (Event Loop): The event loop to use.
Keyword Args:
string_output (bool): If set to false, ``discover`` returns .NET
device objects instead.
Returns:
List of strings or objects found.
"""
loop = loop if loop else asyncio.get_event_loop()
watcher = BluetoothLEAdvertisementWatcher()
devices = {}
scan_responses = {}
def _format_bdaddr(a):
return ":".join("{:02X}".format(x) for x in a.to_bytes(6, byteorder="big"))
def _format_event_args(e):
try:
return "{0}: {1}".format(
_format_bdaddr(e.BluetoothAddress),
e.Advertisement.LocalName or "Unknown",
)
except Exception:
return e.BluetoothAddress
def AdvertisementWatcher_Received(sender, e):
if sender == watcher:
logger.debug("Received {0}.".format(_format_event_args(e)))
if e.AdvertisementType == BluetoothLEAdvertisementType.ScanResponse:
if e.BluetoothAddress not in scan_responses:
scan_responses[e.BluetoothAddress] = e
else:
if e.BluetoothAddress not in devices:
devices[e.BluetoothAddress] = e
def AdvertisementWatcher_Stopped(sender, e):
if sender == watcher:
logger.debug(
"{0} devices found. Watcher status: {1}.".format(
len(devices), watcher.Status
)
)
watcher.Received += AdvertisementWatcher_Received
watcher.Stopped += AdvertisementWatcher_Stopped
watcher.ScanningMode = BluetoothLEScanningMode.Active
# Watcher works outside of the Python process.
watcher.Start()
await asyncio.sleep(timeout, loop=loop)
watcher.Stop()
try:
watcher.Received -= AdvertisementWatcher_Received
watcher.Stopped -= AdvertisementWatcher_Stopped
except Exception as e:
logger.debug("Could not remove event handlers: {0}...".format(e))
found = []
for d in list(devices.values()):
bdaddr = _format_bdaddr(d.BluetoothAddress)
uuids = []
for u in d.Advertisement.ServiceUuids:
uuids.append(u.ToString())
data = {}
for m in d.Advertisement.ManufacturerData:
md = IBuffer(m.Data)
b = Array.CreateInstance(Byte, md.Length)
reader = DataReader.FromBuffer(md)
reader.ReadBytes(b)
data[m.CompanyId] = bytes(b)
local_name = d.Advertisement.LocalName
if not local_name and d.BluetoothAddress in scan_responses:
local_name = scan_responses[d.BluetoothAddress].Advertisement.LocalName
found.append(
BLEDevice(
bdaddr,
local_name,
d,
uuids=uuids,
manufacturer_data=data,
)
)
return found
|
async def discover(
timeout: float = 5.0, loop: AbstractEventLoop = None, **kwargs
) -> List[BLEDevice]:
"""Perform a Bluetooth LE Scan using Windows.Devices.Bluetooth.Advertisement
Args:
timeout (float): Time to scan for.
loop (Event Loop): The event loop to use.
Keyword Args:
string_output (bool): If set to false, ``discover`` returns .NET
device objects instead.
Returns:
List of strings or objects found.
"""
loop = loop if loop else asyncio.get_event_loop()
watcher = BluetoothLEAdvertisementWatcher()
devices = {}
scan_responses = {}
def _format_bdaddr(a):
return ":".join("{:02X}".format(x) for x in a.to_bytes(6, byteorder="big"))
def _format_event_args(e):
try:
return "{0}: {1}".format(
_format_bdaddr(e.BluetoothAddress),
e.Advertisement.LocalName or "Unknown",
)
except Exception:
return e.BluetoothAddress
def AdvertisementWatcher_Received(sender, e):
if sender == watcher:
logger.debug("Received {0}.".format(_format_event_args(e)))
if e.AdvertisementType == BluetoothLEAdvertisementType.ScanResponse:
if e.BluetoothAddress not in scan_responses:
scan_responses[e.BluetoothAddress] = e
else:
if e.BluetoothAddress not in devices:
devices[e.BluetoothAddress] = e
def AdvertisementWatcher_Stopped(sender, e):
if sender == watcher:
logger.debug(
"{0} devices found. Watcher status: {1}.".format(
len(devices), watcher.Status
)
)
watcher.Received += AdvertisementWatcher_Received
watcher.Stopped += AdvertisementWatcher_Stopped
watcher.ScanningMode = BluetoothLEScanningMode.Active
# Watcher works outside of the Python process.
watcher.Start()
await asyncio.sleep(timeout, loop=loop)
watcher.Stop()
try:
watcher.Received -= AdvertisementWatcher_Received
watcher.Stopped -= AdvertisementWatcher_Stopped
except Exception as e:
logger.debug("Could not remove event handlers: {0}...".format(e))
found = []
for d in devices.values():
bdaddr = _format_bdaddr(d.BluetoothAddress)
uuids = []
for u in d.Advertisement.ServiceUuids:
uuids.append(u.ToString())
data = {}
for m in d.Advertisement.ManufacturerData:
md = IBuffer(m.Data)
b = Array.CreateInstance(Byte, md.Length)
reader = DataReader.FromBuffer(md)
reader.ReadBytes(b)
data[m.CompanyId] = bytes(b)
local_name = d.Advertisement.LocalName
if not local_name and d.BluetoothAddress in scan_responses:
local_name = scan_responses[d.BluetoothAddress].Advertisement.LocalName
found.append(
BLEDevice(
bdaddr,
local_name,
d,
uuids=uuids,
manufacturer_data=data,
)
)
return found
|
https://github.com/hbldh/bleak/issues/87
|
Traceback (most recent call last):
File "[my script].py", line 140, in <module>
loop.run_until_complete(scan_loop())
File "[My user folder]\Anaconda3\lib\asyncio\base_events.py", line 584, in run_until_complete
return future.result()
File "[my script].py", line 102, in scan_loop
devices = await discover(device="hci0", timeout=3)
File "[My user folder]\lib\site-packages\bleak\backends\dotnet\discovery.py", line 91, in discover
for d in devices.values():
RuntimeError: dictionary changed size during iteration
|
RuntimeError
|
async def _cleanup(self) -> None:
for rule_name, rule_id in self._rules.items():
logger.debug("Removing rule {0}, ID: {1}".format(rule_name, rule_id))
try:
await self._bus.delMatch(rule_id).asFuture(self.loop)
except Exception as e:
logger.error(
"Could not remove rule {0} ({1}): {2}".format(rule_id, rule_name, e)
)
self._rules = {}
for _uuid in list(self._subscriptions):
try:
await self.stop_notify(_uuid)
except Exception as e:
logger.error(
"Could not remove notifications on characteristic {0}: {1}".format(
_uuid, e
)
)
self._subscriptions = []
|
async def _cleanup(self) -> None:
for rule_name, rule_id in self._rules.items():
logger.debug("Removing rule {0}, ID: {1}".format(rule_name, rule_id))
try:
await self._bus.delMatch(rule_id).asFuture(self.loop)
except Exception as e:
logger.error(
"Could not remove rule {0} ({1}): {2}".format(rule_id, rule_name, e)
)
self._rules = {}
await asyncio.gather(*(self.stop_notify(_uuid) for _uuid in self._subscriptions))
|
https://github.com/hbldh/bleak/issues/145
|
INFO:bleak.backends.bluezdbus.client:605: GATT Char Properties Changed: gorg/bluez/hci0/dev_F2_1F_2B_52_48_9E/service000e/char0017 | [{'Value': [23, 8, 160]}, []]
DEBUG:bleak.backends.bluezdbus.client:597: DBUS: path: gorg/bluez/hci0/dev_F2_1F_2B_52_48_9E/service000e/char0017, domain: org.bluez.GattCharacteristic1, body: {'Value': [2, 188, 1, 83]}
INFO:bleak.backends.bluezdbus.client:605: GATT Char Properties Changed: gorg/bluez/hci0/dev_F2_1F_2B_52_48_9E/service000e/char0017 | [{'Value': [2, 188, 1, 83]}, []]
DEBUG:bleak.backends.bluezdbus.client:597: DBUS: path: gorg/bluez/hci0/dev_F2_1F_2B_52_48_9E, domain: org.bluez.Device1, body: {'ServicesResolved': False, 'Connected': False}
DEBUG:bleak.backends.bluezdbus.client:622: Device F2:1F:2B:52:48:9E disconnected.
DEBUG:bleak.backends.bluezdbus.client:149: Removing rule PropChanged, ID: 1
DEBUG:bleak.backends.bluezdbus.client:166: Disconnecting from BLE device...
Traceback (most recent call last):
[...]
File "/home/sfrank/.local/lib/python3.5/site-packages/bleak/backends/bluezdbus/client.py", line 168, in disconnect
await self._cleanup()
File "/home/sfrank/.local/lib/python3.5/site-packages/bleak/backends/bluezdbus/client.py", line 156, in _cleanup
*(self.stop_notify(_uuid) for _uuid in self._subscriptions)
File "/usr/lib/python3.5/asyncio/futures.py", line 380, in __iter__
yield self # This tells Task to wait for completion.
File "/usr/lib/python3.5/asyncio/tasks.py", line 304, in _wakeup
future.result()
File "/usr/lib/python3.5/asyncio/futures.py", line 293, in result
raise self._exception
File "/usr/lib/python3.5/asyncio/tasks.py", line 241, in _step
result = coro.throw(exc)
File "/home/sfrank/.local/lib/python3.5/site-packages/bleak/backends/bluezdbus/client.py", line 529, in stop_notify
).asFuture(self.loop)
File "/usr/lib/python3.5/asyncio/futures.py", line 380, in __iter__
yield self # This tells Task to wait for completion.
File "/usr/lib/python3.5/asyncio/tasks.py", line 304, in _wakeup
future.result()
File "/usr/lib/python3.5/asyncio/futures.py", line 293, in result
raise self._exception
txdbus.error.RemoteError: org.bluez.Error.Failed: No notify session started
|
txdbus.error.RemoteError
|
async def connect(self, **kwargs) -> bool:
"""Connect to the specified GATT server.
Keyword Args:
timeout (float): Timeout for required ``discover`` call. Defaults to 2.0.
Returns:
Boolean representing connection status.
"""
# A Discover must have been run before connecting to any devices. Do a quick one here
# to ensure that it has been done.
timeout = kwargs.get("timeout", self._timeout)
await discover(timeout=timeout, device=self.device, loop=self.loop)
# Create system bus
self._bus = await txdbus_connect(reactor, busAddress="system").asFuture(self.loop)
# TODO: Handle path errors from txdbus/dbus
self._device_path = get_device_object_path(self.device, self.address)
def _services_resolved_callback(message):
iface, changed, invalidated = message.body
is_resolved = defs.DEVICE_INTERFACE and changed.get("ServicesResolved", False)
if iface == is_resolved:
logger.info("Services resolved.")
self.services_resolved = True
rule_id = await signals.listen_properties_changed(
self._bus, self.loop, _services_resolved_callback
)
logger.debug(
"Connecting to BLE device @ {0} with {1}".format(self.address, self.device)
)
try:
await self._bus.callRemote(
self._device_path,
"Connect",
interface="org.bluez.Device1",
destination="org.bluez",
).asFuture(self.loop)
except RemoteError as e:
raise BleakError(str(e))
if await self.is_connected():
logger.debug("Connection successful.")
else:
raise BleakError("Connection to {0} was not successful!".format(self.address))
# Get all services. This means making the actual connection.
await self.get_services()
properties = await self._get_device_properties()
if not properties.get("Connected"):
raise BleakError("Connection failed!")
await self._bus.delMatch(rule_id).asFuture(self.loop)
self._rules["PropChanged"] = await signals.listen_properties_changed(
self._bus, self.loop, self._properties_changed_callback
)
return True
|
async def connect(self, **kwargs) -> bool:
"""Connect to the specified GATT server.
Keyword Args:
timeout (float): Timeout for required ``discover`` call. Defaults to 0.1.
Returns:
Boolean representing connection status.
"""
# A Discover must have been run before connecting to any devices. Do a quick one here
# to ensure that it has been done.
await discover(
timeout=kwargs.get("timeout", 0.1), device=self.device, loop=self.loop
)
# Create system bus
self._bus = await txdbus_connect(reactor, busAddress="system").asFuture(self.loop)
# TODO: Handle path errors from txdbus/dbus
self._device_path = get_device_object_path(self.device, self.address)
def _services_resolved_callback(message):
iface, changed, invalidated = message.body
is_resolved = defs.DEVICE_INTERFACE and changed.get("ServicesResolved", False)
if iface == is_resolved:
logger.info("Services resolved.")
self.services_resolved = True
rule_id = await signals.listen_properties_changed(
self._bus, self.loop, _services_resolved_callback
)
logger.debug(
"Connecting to BLE device @ {0} with {1}".format(self.address, self.device)
)
try:
await self._bus.callRemote(
self._device_path,
"Connect",
interface="org.bluez.Device1",
destination="org.bluez",
).asFuture(self.loop)
except RemoteError as e:
raise BleakError(str(e))
if await self.is_connected():
logger.debug("Connection successful.")
else:
raise BleakError("Connection to {0} was not successful!".format(self.address))
# Get all services. This means making the actual connection.
await self.get_services()
properties = await self._get_device_properties()
if not properties.get("Connected"):
raise BleakError("Connection failed!")
await self._bus.delMatch(rule_id).asFuture(self.loop)
self._rules["PropChanged"] = await signals.listen_properties_changed(
self._bus, self.loop, self._properties_changed_callback
)
return True
|
https://github.com/hbldh/bleak/issues/101
|
/Users/zaytsev/PycharmProjects/bluetooth/venv/bin/python /Users/zaytsev/PycharmProjects/bluetooth/bleak_demo.py
DEBUG:bleak.backends.corebluetooth.CentralManagerDelegate:Bluetooth powered on
DEBUG:bleak.backends.corebluetooth.CentralManagerDelegate:Discovered device A0E49DB2-B7F1-4A65-AB2E-D75121192329: Unknown @ RSSI: -79
A0E49DB2-B7F1-4A65-AB2E-D75121192329: HUAWEI Band 2-b6e
DEBUG:bleak.backends.corebluetooth.client:Connecting to BLE device @ A0E49DB2-B7F1-4A65-AB2E-D75121192329
DEBUG:bleak.backends.corebluetooth.CentralManagerDelegate:Successfully connected to device uuid A0E49DB2-B7F1-4A65-AB2E-D75121192329
WARNING:bleak.backends.corebluetooth.PeripheralDelegate:PeripheralDelegate is not compliant
DEBUG:bleak.backends.corebluetooth.client:Retrieving services...
DEBUG:bleak.backends.corebluetooth.PeripheralDelegate:Services discovered
DEBUG:bleak.backends.corebluetooth.client:Retrieving characteristics for service 180F
DEBUG:bleak.backends.corebluetooth.PeripheralDelegate:Characteristics discovered
DEBUG:bleak.backends.corebluetooth.client:Retrieving descriptors for characteristic 2A19
DEBUG:bleak.backends.corebluetooth.PeripheralDelegate:Descriptor discovered 2A19
DEBUG:bleak.backends.corebluetooth.client:Retrieving characteristics for service 180A
DEBUG:bleak.backends.corebluetooth.PeripheralDelegate:Characteristics discovered
DEBUG:bleak.backends.corebluetooth.client:Retrieving descriptors for characteristic 2A29
DEBUG:bleak.backends.corebluetooth.PeripheralDelegate:Descriptor discovered 2A29
DEBUG:bleak.backends.corebluetooth.client:Retrieving descriptors for characteristic 2A24
DEBUG:bleak.backends.corebluetooth.PeripheralDelegate:Descriptor discovered 2A24
DEBUG:bleak.backends.corebluetooth.client:Retrieving descriptors for characteristic 2A26
DEBUG:bleak.backends.corebluetooth.PeripheralDelegate:Descriptor discovered 2A26
DEBUG:bleak.backends.corebluetooth.client:Retrieving descriptors for characteristic 2A28
DEBUG:bleak.backends.corebluetooth.PeripheralDelegate:Descriptor discovered 2A28
DEBUG:bleak.backends.corebluetooth.client:Retrieving descriptors for characteristic 2A50
DEBUG:bleak.backends.corebluetooth.PeripheralDelegate:Descriptor discovered 2A50
DEBUG:bleak.backends.corebluetooth.client:Retrieving characteristics for service 180D
DEBUG:bleak.backends.corebluetooth.PeripheralDelegate:Characteristics discovered
DEBUG:bleak.backends.corebluetooth.client:Retrieving descriptors for characteristic 2A37
DEBUG:bleak.backends.corebluetooth.PeripheralDelegate:Descriptor discovered 2A37
DEBUG:bleak.backends.corebluetooth.client:Retrieving descriptors for characteristic 2A38
DEBUG:bleak.backends.corebluetooth.PeripheralDelegate:Descriptor discovered 2A38
DEBUG:bleak.backends.corebluetooth.client:Retrieving characteristics for service FE86
DEBUG:bleak.backends.corebluetooth.PeripheralDelegate:Characteristics discovered
DEBUG:bleak.backends.corebluetooth.client:Retrieving descriptors for characteristic FE01
DEBUG:bleak.backends.corebluetooth.PeripheralDelegate:Descriptor discovered FE01
DEBUG:bleak.backends.corebluetooth.client:Retrieving descriptors for characteristic FE02
DEBUG:bleak.backends.corebluetooth.PeripheralDelegate:Descriptor discovered FE02
DEBUG:bleak.backends.corebluetooth.client:Retrieving descriptors for characteristic FE03
DEBUG:bleak.backends.corebluetooth.PeripheralDelegate:Descriptor discovered FE03
DEBUG:bleak.backends.corebluetooth.client:Retrieving descriptors for characteristic FE04
DEBUG:bleak.backends.corebluetooth.PeripheralDelegate:Descriptor discovered FE04
DEBUG:bleak.backends.corebluetooth.client:Retrieving services...
DEBUG:bleak.backends.corebluetooth.client:Retrieving characteristics for service 180F
DEBUG:bleak.backends.corebluetooth.CentralManagerDelegate:Peripheral Device disconnected!
Traceback (most recent call last):
File "/Users/zaytsev/PycharmProjects/bluetooth/bleak_demo.py", line 25, in <module>
loop.run_until_complete(ble_get_services(address, loop))
File "/usr/local/Cellar/python/3.7.4/Frameworks/Python.framework/Versions/3.7/lib/python3.7/asyncio/base_events.py", line 579, in run_until_complete
return future.result()
File "/Users/zaytsev/PycharmProjects/bluetooth/bleak_demo.py", line 18, in ble_get_services
svcs = await client.get_services()
File "/Users/zaytsev/PycharmProjects/bluetooth/venv/lib/python3.7/site-packages/bleak/backends/corebluetooth/client.py", line 118, in get_services
self.services.add_service(BleakGATTServiceCoreBluetooth(service))
File "/Users/zaytsev/PycharmProjects/bluetooth/venv/lib/python3.7/site-packages/bleak/backends/service.py", line 101, in add_service
"This service is already present in this BleakGATTServiceCollection!"
bleak.exc.BleakError: This service is already present in this BleakGATTServiceCollection!
|
bleak.exc.BleakError
|
async def _cleanup(self) -> None:
for rule_name, rule_id in self._rules.items():
logger.debug("Removing rule {0}, ID: {1}".format(rule_name, rule_id))
try:
await self._bus.delMatch(rule_id).asFuture(self.loop)
except Exception as e:
logger.error(
"Could not remove rule {0} ({1}): {2}".format(rule_id, rule_name, e)
)
self._rules = {}
await asyncio.gather(*(self.stop_notify(_uuid) for _uuid in self._subscriptions))
|
async def _cleanup(self) -> None:
for rule_name, rule_id in self._rules.items():
logger.debug("Removing rule {0}, ID: {1}".format(rule_name, rule_id))
await self._bus.delMatch(rule_id).asFuture(self.loop)
await asyncio.gather(*(self.stop_notify(_uuid) for _uuid in self._subscriptions))
|
https://github.com/hbldh/bleak/issues/101
|
/Users/zaytsev/PycharmProjects/bluetooth/venv/bin/python /Users/zaytsev/PycharmProjects/bluetooth/bleak_demo.py
DEBUG:bleak.backends.corebluetooth.CentralManagerDelegate:Bluetooth powered on
DEBUG:bleak.backends.corebluetooth.CentralManagerDelegate:Discovered device A0E49DB2-B7F1-4A65-AB2E-D75121192329: Unknown @ RSSI: -79
A0E49DB2-B7F1-4A65-AB2E-D75121192329: HUAWEI Band 2-b6e
DEBUG:bleak.backends.corebluetooth.client:Connecting to BLE device @ A0E49DB2-B7F1-4A65-AB2E-D75121192329
DEBUG:bleak.backends.corebluetooth.CentralManagerDelegate:Successfully connected to device uuid A0E49DB2-B7F1-4A65-AB2E-D75121192329
WARNING:bleak.backends.corebluetooth.PeripheralDelegate:PeripheralDelegate is not compliant
DEBUG:bleak.backends.corebluetooth.client:Retrieving services...
DEBUG:bleak.backends.corebluetooth.PeripheralDelegate:Services discovered
DEBUG:bleak.backends.corebluetooth.client:Retrieving characteristics for service 180F
DEBUG:bleak.backends.corebluetooth.PeripheralDelegate:Characteristics discovered
DEBUG:bleak.backends.corebluetooth.client:Retrieving descriptors for characteristic 2A19
DEBUG:bleak.backends.corebluetooth.PeripheralDelegate:Descriptor discovered 2A19
DEBUG:bleak.backends.corebluetooth.client:Retrieving characteristics for service 180A
DEBUG:bleak.backends.corebluetooth.PeripheralDelegate:Characteristics discovered
DEBUG:bleak.backends.corebluetooth.client:Retrieving descriptors for characteristic 2A29
DEBUG:bleak.backends.corebluetooth.PeripheralDelegate:Descriptor discovered 2A29
DEBUG:bleak.backends.corebluetooth.client:Retrieving descriptors for characteristic 2A24
DEBUG:bleak.backends.corebluetooth.PeripheralDelegate:Descriptor discovered 2A24
DEBUG:bleak.backends.corebluetooth.client:Retrieving descriptors for characteristic 2A26
DEBUG:bleak.backends.corebluetooth.PeripheralDelegate:Descriptor discovered 2A26
DEBUG:bleak.backends.corebluetooth.client:Retrieving descriptors for characteristic 2A28
DEBUG:bleak.backends.corebluetooth.PeripheralDelegate:Descriptor discovered 2A28
DEBUG:bleak.backends.corebluetooth.client:Retrieving descriptors for characteristic 2A50
DEBUG:bleak.backends.corebluetooth.PeripheralDelegate:Descriptor discovered 2A50
DEBUG:bleak.backends.corebluetooth.client:Retrieving characteristics for service 180D
DEBUG:bleak.backends.corebluetooth.PeripheralDelegate:Characteristics discovered
DEBUG:bleak.backends.corebluetooth.client:Retrieving descriptors for characteristic 2A37
DEBUG:bleak.backends.corebluetooth.PeripheralDelegate:Descriptor discovered 2A37
DEBUG:bleak.backends.corebluetooth.client:Retrieving descriptors for characteristic 2A38
DEBUG:bleak.backends.corebluetooth.PeripheralDelegate:Descriptor discovered 2A38
DEBUG:bleak.backends.corebluetooth.client:Retrieving characteristics for service FE86
DEBUG:bleak.backends.corebluetooth.PeripheralDelegate:Characteristics discovered
DEBUG:bleak.backends.corebluetooth.client:Retrieving descriptors for characteristic FE01
DEBUG:bleak.backends.corebluetooth.PeripheralDelegate:Descriptor discovered FE01
DEBUG:bleak.backends.corebluetooth.client:Retrieving descriptors for characteristic FE02
DEBUG:bleak.backends.corebluetooth.PeripheralDelegate:Descriptor discovered FE02
DEBUG:bleak.backends.corebluetooth.client:Retrieving descriptors for characteristic FE03
DEBUG:bleak.backends.corebluetooth.PeripheralDelegate:Descriptor discovered FE03
DEBUG:bleak.backends.corebluetooth.client:Retrieving descriptors for characteristic FE04
DEBUG:bleak.backends.corebluetooth.PeripheralDelegate:Descriptor discovered FE04
DEBUG:bleak.backends.corebluetooth.client:Retrieving services...
DEBUG:bleak.backends.corebluetooth.client:Retrieving characteristics for service 180F
DEBUG:bleak.backends.corebluetooth.CentralManagerDelegate:Peripheral Device disconnected!
Traceback (most recent call last):
File "/Users/zaytsev/PycharmProjects/bluetooth/bleak_demo.py", line 25, in <module>
loop.run_until_complete(ble_get_services(address, loop))
File "/usr/local/Cellar/python/3.7.4/Frameworks/Python.framework/Versions/3.7/lib/python3.7/asyncio/base_events.py", line 579, in run_until_complete
return future.result()
File "/Users/zaytsev/PycharmProjects/bluetooth/bleak_demo.py", line 18, in ble_get_services
svcs = await client.get_services()
File "/Users/zaytsev/PycharmProjects/bluetooth/venv/lib/python3.7/site-packages/bleak/backends/corebluetooth/client.py", line 118, in get_services
self.services.add_service(BleakGATTServiceCoreBluetooth(service))
File "/Users/zaytsev/PycharmProjects/bluetooth/venv/lib/python3.7/site-packages/bleak/backends/service.py", line 101, in add_service
"This service is already present in this BleakGATTServiceCollection!"
bleak.exc.BleakError: This service is already present in this BleakGATTServiceCollection!
|
bleak.exc.BleakError
|
async def get_services(self) -> BleakGATTServiceCollection:
"""Get all services registered for this GATT server.
Returns:
A :py:class:`bleak.backends.service.BleakGATTServiceCollection` with this device's services tree.
"""
if self._services_resolved:
return self.services
sleep_loop_sec = 0.02
total_slept_sec = 0
while total_slept_sec < 5.0:
properties = await self._get_device_properties()
services_resolved = properties.get("ServicesResolved", False)
if services_resolved:
break
await asyncio.sleep(sleep_loop_sec, loop=self.loop)
total_slept_sec += sleep_loop_sec
if not services_resolved:
raise BleakError("Services discovery error")
logger.debug("Get Services...")
objs = await get_managed_objects(
self._bus, self.loop, self._device_path + "/service"
)
# There is no guarantee that services are listed before characteristics
# Managed Objects dict.
# Need multiple iterations to construct the Service Collection
_chars, _descs = [], []
for object_path, interfaces in objs.items():
logger.debug(utils.format_GATT_object(object_path, interfaces))
if defs.GATT_SERVICE_INTERFACE in interfaces:
service = interfaces.get(defs.GATT_SERVICE_INTERFACE)
self.services.add_service(BleakGATTServiceBlueZDBus(service, object_path))
elif defs.GATT_CHARACTERISTIC_INTERFACE in interfaces:
char = interfaces.get(defs.GATT_CHARACTERISTIC_INTERFACE)
_chars.append([char, object_path])
elif defs.GATT_DESCRIPTOR_INTERFACE in interfaces:
desc = interfaces.get(defs.GATT_DESCRIPTOR_INTERFACE)
_descs.append([desc, object_path])
for char, object_path in _chars:
_service = list(filter(lambda x: x.path == char["Service"], self.services))
self.services.add_characteristic(
BleakGATTCharacteristicBlueZDBus(char, object_path, _service[0].uuid)
)
self._char_path_to_uuid[object_path] = char.get("UUID")
for desc, object_path in _descs:
_characteristic = list(
filter(
lambda x: x.path == desc["Characteristic"],
self.services.characteristics.values(),
)
)
self.services.add_descriptor(
BleakGATTDescriptorBlueZDBus(desc, object_path, _characteristic[0].uuid)
)
self._services_resolved = True
return self.services
|
async def get_services(self) -> BleakGATTServiceCollection:
"""Get all services registered for this GATT server.
Returns:
A :py:class:`bleak.backends.service.BleakGATTServiceCollection` with this device's services tree.
"""
if self._services_resolved:
return self.services
while True:
properties = await self._get_device_properties()
services_resolved = properties.get("ServicesResolved", False)
if services_resolved:
break
await asyncio.sleep(0.02, loop=self.loop)
logger.debug("Get Services...")
objs = await get_managed_objects(
self._bus, self.loop, self._device_path + "/service"
)
# There is no guarantee that services are listed before characteristics
# Managed Objects dict.
# Need multiple iterations to construct the Service Collection
_chars, _descs = [], []
for object_path, interfaces in objs.items():
logger.debug(utils.format_GATT_object(object_path, interfaces))
if defs.GATT_SERVICE_INTERFACE in interfaces:
service = interfaces.get(defs.GATT_SERVICE_INTERFACE)
self.services.add_service(BleakGATTServiceBlueZDBus(service, object_path))
elif defs.GATT_CHARACTERISTIC_INTERFACE in interfaces:
char = interfaces.get(defs.GATT_CHARACTERISTIC_INTERFACE)
_chars.append([char, object_path])
elif defs.GATT_DESCRIPTOR_INTERFACE in interfaces:
desc = interfaces.get(defs.GATT_DESCRIPTOR_INTERFACE)
_descs.append([desc, object_path])
for char, object_path in _chars:
_service = list(filter(lambda x: x.path == char["Service"], self.services))
self.services.add_characteristic(
BleakGATTCharacteristicBlueZDBus(char, object_path, _service[0].uuid)
)
self._char_path_to_uuid[object_path] = char.get("UUID")
for desc, object_path in _descs:
_characteristic = list(
filter(
lambda x: x.path == desc["Characteristic"],
self.services.characteristics.values(),
)
)
self.services.add_descriptor(
BleakGATTDescriptorBlueZDBus(desc, object_path, _characteristic[0].uuid)
)
self._services_resolved = True
return self.services
|
https://github.com/hbldh/bleak/issues/101
|
/Users/zaytsev/PycharmProjects/bluetooth/venv/bin/python /Users/zaytsev/PycharmProjects/bluetooth/bleak_demo.py
DEBUG:bleak.backends.corebluetooth.CentralManagerDelegate:Bluetooth powered on
DEBUG:bleak.backends.corebluetooth.CentralManagerDelegate:Discovered device A0E49DB2-B7F1-4A65-AB2E-D75121192329: Unknown @ RSSI: -79
A0E49DB2-B7F1-4A65-AB2E-D75121192329: HUAWEI Band 2-b6e
DEBUG:bleak.backends.corebluetooth.client:Connecting to BLE device @ A0E49DB2-B7F1-4A65-AB2E-D75121192329
DEBUG:bleak.backends.corebluetooth.CentralManagerDelegate:Successfully connected to device uuid A0E49DB2-B7F1-4A65-AB2E-D75121192329
WARNING:bleak.backends.corebluetooth.PeripheralDelegate:PeripheralDelegate is not compliant
DEBUG:bleak.backends.corebluetooth.client:Retrieving services...
DEBUG:bleak.backends.corebluetooth.PeripheralDelegate:Services discovered
DEBUG:bleak.backends.corebluetooth.client:Retrieving characteristics for service 180F
DEBUG:bleak.backends.corebluetooth.PeripheralDelegate:Characteristics discovered
DEBUG:bleak.backends.corebluetooth.client:Retrieving descriptors for characteristic 2A19
DEBUG:bleak.backends.corebluetooth.PeripheralDelegate:Descriptor discovered 2A19
DEBUG:bleak.backends.corebluetooth.client:Retrieving characteristics for service 180A
DEBUG:bleak.backends.corebluetooth.PeripheralDelegate:Characteristics discovered
DEBUG:bleak.backends.corebluetooth.client:Retrieving descriptors for characteristic 2A29
DEBUG:bleak.backends.corebluetooth.PeripheralDelegate:Descriptor discovered 2A29
DEBUG:bleak.backends.corebluetooth.client:Retrieving descriptors for characteristic 2A24
DEBUG:bleak.backends.corebluetooth.PeripheralDelegate:Descriptor discovered 2A24
DEBUG:bleak.backends.corebluetooth.client:Retrieving descriptors for characteristic 2A26
DEBUG:bleak.backends.corebluetooth.PeripheralDelegate:Descriptor discovered 2A26
DEBUG:bleak.backends.corebluetooth.client:Retrieving descriptors for characteristic 2A28
DEBUG:bleak.backends.corebluetooth.PeripheralDelegate:Descriptor discovered 2A28
DEBUG:bleak.backends.corebluetooth.client:Retrieving descriptors for characteristic 2A50
DEBUG:bleak.backends.corebluetooth.PeripheralDelegate:Descriptor discovered 2A50
DEBUG:bleak.backends.corebluetooth.client:Retrieving characteristics for service 180D
DEBUG:bleak.backends.corebluetooth.PeripheralDelegate:Characteristics discovered
DEBUG:bleak.backends.corebluetooth.client:Retrieving descriptors for characteristic 2A37
DEBUG:bleak.backends.corebluetooth.PeripheralDelegate:Descriptor discovered 2A37
DEBUG:bleak.backends.corebluetooth.client:Retrieving descriptors for characteristic 2A38
DEBUG:bleak.backends.corebluetooth.PeripheralDelegate:Descriptor discovered 2A38
DEBUG:bleak.backends.corebluetooth.client:Retrieving characteristics for service FE86
DEBUG:bleak.backends.corebluetooth.PeripheralDelegate:Characteristics discovered
DEBUG:bleak.backends.corebluetooth.client:Retrieving descriptors for characteristic FE01
DEBUG:bleak.backends.corebluetooth.PeripheralDelegate:Descriptor discovered FE01
DEBUG:bleak.backends.corebluetooth.client:Retrieving descriptors for characteristic FE02
DEBUG:bleak.backends.corebluetooth.PeripheralDelegate:Descriptor discovered FE02
DEBUG:bleak.backends.corebluetooth.client:Retrieving descriptors for characteristic FE03
DEBUG:bleak.backends.corebluetooth.PeripheralDelegate:Descriptor discovered FE03
DEBUG:bleak.backends.corebluetooth.client:Retrieving descriptors for characteristic FE04
DEBUG:bleak.backends.corebluetooth.PeripheralDelegate:Descriptor discovered FE04
DEBUG:bleak.backends.corebluetooth.client:Retrieving services...
DEBUG:bleak.backends.corebluetooth.client:Retrieving characteristics for service 180F
DEBUG:bleak.backends.corebluetooth.CentralManagerDelegate:Peripheral Device disconnected!
Traceback (most recent call last):
File "/Users/zaytsev/PycharmProjects/bluetooth/bleak_demo.py", line 25, in <module>
loop.run_until_complete(ble_get_services(address, loop))
File "/usr/local/Cellar/python/3.7.4/Frameworks/Python.framework/Versions/3.7/lib/python3.7/asyncio/base_events.py", line 579, in run_until_complete
return future.result()
File "/Users/zaytsev/PycharmProjects/bluetooth/bleak_demo.py", line 18, in ble_get_services
svcs = await client.get_services()
File "/Users/zaytsev/PycharmProjects/bluetooth/venv/lib/python3.7/site-packages/bleak/backends/corebluetooth/client.py", line 118, in get_services
self.services.add_service(BleakGATTServiceCoreBluetooth(service))
File "/Users/zaytsev/PycharmProjects/bluetooth/venv/lib/python3.7/site-packages/bleak/backends/service.py", line 101, in add_service
"This service is already present in this BleakGATTServiceCollection!"
bleak.exc.BleakError: This service is already present in this BleakGATTServiceCollection!
|
bleak.exc.BleakError
|
async def read_gatt_char(self, _uuid: str, **kwargs) -> bytearray:
"""Perform read operation on the specified GATT characteristic.
Args:
_uuid (str or UUID): The uuid of the characteristics to read from.
Returns:
(bytearray) The read data.
"""
characteristic = self.services.get_characteristic(str(_uuid))
if not characteristic:
# Special handling for BlueZ >= 5.48, where Battery Service (0000180f-0000-1000-8000-00805f9b34fb:)
# has been moved to interface org.bluez.Battery1 instead of as a regular service.
if _uuid == "00002a19-0000-1000-8000-00805f9b34fb" and (
self._bluez_version[0] == 5 and self._bluez_version[1] >= 48
):
props = await self._get_device_properties(interface=defs.BATTERY_INTERFACE)
# Simulate regular characteristics read to be consistent over all platforms.
value = bytearray([props.get("Percentage", "")])
logger.debug(
"Read Battery Level {0} | {1}: {2}".format(
_uuid, self._device_path, value
)
)
return value
if _uuid == "00002a00-0000-1000-8000-00805f9b34fb" and (
self._bluez_version[0] == 5 and self._bluez_version[1] >= 48
):
props = await self._get_device_properties(interface=defs.DEVICE_INTERFACE)
# Simulate regular characteristics read to be consistent over all platforms.
value = bytearray(props.get("Name", "").encode("ascii"))
logger.debug(
"Read Device Name {0} | {1}: {2}".format(
_uuid, self._device_path, value
)
)
return value
raise BleakError(
"Characteristic with UUID {0} could not be found!".format(_uuid)
)
value = bytearray(
await self._bus.callRemote(
characteristic.path,
"ReadValue",
interface=defs.GATT_CHARACTERISTIC_INTERFACE,
destination=defs.BLUEZ_SERVICE,
signature="a{sv}",
body=[{}],
returnSignature="ay",
).asFuture(self.loop)
)
logger.debug(
"Read Characteristic {0} | {1}: {2}".format(_uuid, characteristic.path, value)
)
return value
|
async def read_gatt_char(self, _uuid: str, **kwargs) -> bytearray:
"""Perform read operation on the specified GATT characteristic.
Args:
_uuid (str or UUID): The uuid of the characteristics to read from.
Returns:
(bytearray) The read data.
"""
characteristic = self.services.get_characteristic(str(_uuid))
if not characteristic:
# Special handling for BlueZ >= 5.48, where Battery Service (0000180f-0000-1000-8000-00805f9b34fb:)
# has been moved to interface org.bluez.Battery1 instead of as a regular service.
if _uuid == "00002a19-0000-1000-8000-00805f9b34fb" and (
self._bluez_version[0] == 5 and self._bluez_version[1] >= 48
):
props = await self._get_device_properties(interface=defs.BATTERY_INTERFACE)
# Simulate regular characteristics read to be consistent over all platforms.
value = bytearray([props.get("Percentage", "")])
logger.debug(
"Read Battery Level {0} | {1}: {2}".format(
_uuid, self._device_path, value
)
)
return value
raise BleakError(
"Characteristic with UUID {0} could not be found!".format(_uuid)
)
value = bytearray(
await self._bus.callRemote(
characteristic.path,
"ReadValue",
interface=defs.GATT_CHARACTERISTIC_INTERFACE,
destination=defs.BLUEZ_SERVICE,
signature="a{sv}",
body=[{}],
returnSignature="ay",
).asFuture(self.loop)
)
logger.debug(
"Read Characteristic {0} | {1}: {2}".format(_uuid, characteristic.path, value)
)
return value
|
https://github.com/hbldh/bleak/issues/101
|
/Users/zaytsev/PycharmProjects/bluetooth/venv/bin/python /Users/zaytsev/PycharmProjects/bluetooth/bleak_demo.py
DEBUG:bleak.backends.corebluetooth.CentralManagerDelegate:Bluetooth powered on
DEBUG:bleak.backends.corebluetooth.CentralManagerDelegate:Discovered device A0E49DB2-B7F1-4A65-AB2E-D75121192329: Unknown @ RSSI: -79
A0E49DB2-B7F1-4A65-AB2E-D75121192329: HUAWEI Band 2-b6e
DEBUG:bleak.backends.corebluetooth.client:Connecting to BLE device @ A0E49DB2-B7F1-4A65-AB2E-D75121192329
DEBUG:bleak.backends.corebluetooth.CentralManagerDelegate:Successfully connected to device uuid A0E49DB2-B7F1-4A65-AB2E-D75121192329
WARNING:bleak.backends.corebluetooth.PeripheralDelegate:PeripheralDelegate is not compliant
DEBUG:bleak.backends.corebluetooth.client:Retrieving services...
DEBUG:bleak.backends.corebluetooth.PeripheralDelegate:Services discovered
DEBUG:bleak.backends.corebluetooth.client:Retrieving characteristics for service 180F
DEBUG:bleak.backends.corebluetooth.PeripheralDelegate:Characteristics discovered
DEBUG:bleak.backends.corebluetooth.client:Retrieving descriptors for characteristic 2A19
DEBUG:bleak.backends.corebluetooth.PeripheralDelegate:Descriptor discovered 2A19
DEBUG:bleak.backends.corebluetooth.client:Retrieving characteristics for service 180A
DEBUG:bleak.backends.corebluetooth.PeripheralDelegate:Characteristics discovered
DEBUG:bleak.backends.corebluetooth.client:Retrieving descriptors for characteristic 2A29
DEBUG:bleak.backends.corebluetooth.PeripheralDelegate:Descriptor discovered 2A29
DEBUG:bleak.backends.corebluetooth.client:Retrieving descriptors for characteristic 2A24
DEBUG:bleak.backends.corebluetooth.PeripheralDelegate:Descriptor discovered 2A24
DEBUG:bleak.backends.corebluetooth.client:Retrieving descriptors for characteristic 2A26
DEBUG:bleak.backends.corebluetooth.PeripheralDelegate:Descriptor discovered 2A26
DEBUG:bleak.backends.corebluetooth.client:Retrieving descriptors for characteristic 2A28
DEBUG:bleak.backends.corebluetooth.PeripheralDelegate:Descriptor discovered 2A28
DEBUG:bleak.backends.corebluetooth.client:Retrieving descriptors for characteristic 2A50
DEBUG:bleak.backends.corebluetooth.PeripheralDelegate:Descriptor discovered 2A50
DEBUG:bleak.backends.corebluetooth.client:Retrieving characteristics for service 180D
DEBUG:bleak.backends.corebluetooth.PeripheralDelegate:Characteristics discovered
DEBUG:bleak.backends.corebluetooth.client:Retrieving descriptors for characteristic 2A37
DEBUG:bleak.backends.corebluetooth.PeripheralDelegate:Descriptor discovered 2A37
DEBUG:bleak.backends.corebluetooth.client:Retrieving descriptors for characteristic 2A38
DEBUG:bleak.backends.corebluetooth.PeripheralDelegate:Descriptor discovered 2A38
DEBUG:bleak.backends.corebluetooth.client:Retrieving characteristics for service FE86
DEBUG:bleak.backends.corebluetooth.PeripheralDelegate:Characteristics discovered
DEBUG:bleak.backends.corebluetooth.client:Retrieving descriptors for characteristic FE01
DEBUG:bleak.backends.corebluetooth.PeripheralDelegate:Descriptor discovered FE01
DEBUG:bleak.backends.corebluetooth.client:Retrieving descriptors for characteristic FE02
DEBUG:bleak.backends.corebluetooth.PeripheralDelegate:Descriptor discovered FE02
DEBUG:bleak.backends.corebluetooth.client:Retrieving descriptors for characteristic FE03
DEBUG:bleak.backends.corebluetooth.PeripheralDelegate:Descriptor discovered FE03
DEBUG:bleak.backends.corebluetooth.client:Retrieving descriptors for characteristic FE04
DEBUG:bleak.backends.corebluetooth.PeripheralDelegate:Descriptor discovered FE04
DEBUG:bleak.backends.corebluetooth.client:Retrieving services...
DEBUG:bleak.backends.corebluetooth.client:Retrieving characteristics for service 180F
DEBUG:bleak.backends.corebluetooth.CentralManagerDelegate:Peripheral Device disconnected!
Traceback (most recent call last):
File "/Users/zaytsev/PycharmProjects/bluetooth/bleak_demo.py", line 25, in <module>
loop.run_until_complete(ble_get_services(address, loop))
File "/usr/local/Cellar/python/3.7.4/Frameworks/Python.framework/Versions/3.7/lib/python3.7/asyncio/base_events.py", line 579, in run_until_complete
return future.result()
File "/Users/zaytsev/PycharmProjects/bluetooth/bleak_demo.py", line 18, in ble_get_services
svcs = await client.get_services()
File "/Users/zaytsev/PycharmProjects/bluetooth/venv/lib/python3.7/site-packages/bleak/backends/corebluetooth/client.py", line 118, in get_services
self.services.add_service(BleakGATTServiceCoreBluetooth(service))
File "/Users/zaytsev/PycharmProjects/bluetooth/venv/lib/python3.7/site-packages/bleak/backends/service.py", line 101, in add_service
"This service is already present in this BleakGATTServiceCollection!"
bleak.exc.BleakError: This service is already present in this BleakGATTServiceCollection!
|
bleak.exc.BleakError
|
def _properties_changed_callback(self, message):
"""Notification handler.
In the BlueZ DBus API, notifications come as
PropertiesChanged callbacks on the GATT Characteristic interface
that StartNotify has been called on.
Args:
message (): The PropertiesChanged DBus signal message relaying
the new data on the GATT Characteristic.
"""
logger.debug(
"DBUS: path: {}, domain: {}, body: {}".format(
message.path, message.body[0], message.body[1]
)
)
if message.body[0] == defs.GATT_CHARACTERISTIC_INTERFACE:
if message.path in self._notification_callbacks:
logger.info(
"GATT Char Properties Changed: {0} | {1}".format(
message.path, message.body[1:]
)
)
self._notification_callbacks[message.path](message.path, message.body[1])
elif message.body[0] == defs.DEVICE_INTERFACE:
device_path = "/org/bluez/%s/dev_%s" % (
self.device,
self.address.replace(":", "_"),
)
if message.path.lower() == device_path.lower():
message_body_map = message.body[1]
if "Connected" in message_body_map and not message_body_map["Connected"]:
logger.debug("Device {} disconnected.".format(self.address))
self.loop.create_task(self._cleanup())
if self._disconnected_callback is not None:
self._disconnected_callback(self)
|
def _properties_changed_callback(self, message):
"""Notification handler.
In the BlueZ DBus API, notifications come as
PropertiesChanged callbacks on the GATT Characteristic interface
that StartNotify has been called on.
Args:
message (): The PropertiesChanged DBus signal message relaying
the new data on the GATT Characteristic.
"""
logger.debug(
"DBUS: path: {}, domain: {}, body: {}".format(
message.path, message.body[0], message.body[1]
)
)
if message.body[0] == defs.GATT_CHARACTERISTIC_INTERFACE:
if message.path in self._notification_callbacks:
logger.info(
"GATT Char Properties Changed: {0} | {1}".format(
message.path, message.body[1:]
)
)
self._notification_callbacks[message.path](message.path, message.body[1])
elif message.body[0] == defs.DEVICE_INTERFACE:
device_path = "/org/bluez/%s/dev_%s" % (
self.device,
self.address.replace(":", "_"),
)
if message.path == device_path:
message_body_map = message.body[1]
if "Connected" in message_body_map and not message_body_map["Connected"]:
logger.debug("Device {} disconnected.".format(self.address))
self.loop.create_task(self._cleanup())
if self._disconnected_callback is not None:
self._disconnected_callback(self)
|
https://github.com/hbldh/bleak/issues/101
|
/Users/zaytsev/PycharmProjects/bluetooth/venv/bin/python /Users/zaytsev/PycharmProjects/bluetooth/bleak_demo.py
DEBUG:bleak.backends.corebluetooth.CentralManagerDelegate:Bluetooth powered on
DEBUG:bleak.backends.corebluetooth.CentralManagerDelegate:Discovered device A0E49DB2-B7F1-4A65-AB2E-D75121192329: Unknown @ RSSI: -79
A0E49DB2-B7F1-4A65-AB2E-D75121192329: HUAWEI Band 2-b6e
DEBUG:bleak.backends.corebluetooth.client:Connecting to BLE device @ A0E49DB2-B7F1-4A65-AB2E-D75121192329
DEBUG:bleak.backends.corebluetooth.CentralManagerDelegate:Successfully connected to device uuid A0E49DB2-B7F1-4A65-AB2E-D75121192329
WARNING:bleak.backends.corebluetooth.PeripheralDelegate:PeripheralDelegate is not compliant
DEBUG:bleak.backends.corebluetooth.client:Retrieving services...
DEBUG:bleak.backends.corebluetooth.PeripheralDelegate:Services discovered
DEBUG:bleak.backends.corebluetooth.client:Retrieving characteristics for service 180F
DEBUG:bleak.backends.corebluetooth.PeripheralDelegate:Characteristics discovered
DEBUG:bleak.backends.corebluetooth.client:Retrieving descriptors for characteristic 2A19
DEBUG:bleak.backends.corebluetooth.PeripheralDelegate:Descriptor discovered 2A19
DEBUG:bleak.backends.corebluetooth.client:Retrieving characteristics for service 180A
DEBUG:bleak.backends.corebluetooth.PeripheralDelegate:Characteristics discovered
DEBUG:bleak.backends.corebluetooth.client:Retrieving descriptors for characteristic 2A29
DEBUG:bleak.backends.corebluetooth.PeripheralDelegate:Descriptor discovered 2A29
DEBUG:bleak.backends.corebluetooth.client:Retrieving descriptors for characteristic 2A24
DEBUG:bleak.backends.corebluetooth.PeripheralDelegate:Descriptor discovered 2A24
DEBUG:bleak.backends.corebluetooth.client:Retrieving descriptors for characteristic 2A26
DEBUG:bleak.backends.corebluetooth.PeripheralDelegate:Descriptor discovered 2A26
DEBUG:bleak.backends.corebluetooth.client:Retrieving descriptors for characteristic 2A28
DEBUG:bleak.backends.corebluetooth.PeripheralDelegate:Descriptor discovered 2A28
DEBUG:bleak.backends.corebluetooth.client:Retrieving descriptors for characteristic 2A50
DEBUG:bleak.backends.corebluetooth.PeripheralDelegate:Descriptor discovered 2A50
DEBUG:bleak.backends.corebluetooth.client:Retrieving characteristics for service 180D
DEBUG:bleak.backends.corebluetooth.PeripheralDelegate:Characteristics discovered
DEBUG:bleak.backends.corebluetooth.client:Retrieving descriptors for characteristic 2A37
DEBUG:bleak.backends.corebluetooth.PeripheralDelegate:Descriptor discovered 2A37
DEBUG:bleak.backends.corebluetooth.client:Retrieving descriptors for characteristic 2A38
DEBUG:bleak.backends.corebluetooth.PeripheralDelegate:Descriptor discovered 2A38
DEBUG:bleak.backends.corebluetooth.client:Retrieving characteristics for service FE86
DEBUG:bleak.backends.corebluetooth.PeripheralDelegate:Characteristics discovered
DEBUG:bleak.backends.corebluetooth.client:Retrieving descriptors for characteristic FE01
DEBUG:bleak.backends.corebluetooth.PeripheralDelegate:Descriptor discovered FE01
DEBUG:bleak.backends.corebluetooth.client:Retrieving descriptors for characteristic FE02
DEBUG:bleak.backends.corebluetooth.PeripheralDelegate:Descriptor discovered FE02
DEBUG:bleak.backends.corebluetooth.client:Retrieving descriptors for characteristic FE03
DEBUG:bleak.backends.corebluetooth.PeripheralDelegate:Descriptor discovered FE03
DEBUG:bleak.backends.corebluetooth.client:Retrieving descriptors for characteristic FE04
DEBUG:bleak.backends.corebluetooth.PeripheralDelegate:Descriptor discovered FE04
DEBUG:bleak.backends.corebluetooth.client:Retrieving services...
DEBUG:bleak.backends.corebluetooth.client:Retrieving characteristics for service 180F
DEBUG:bleak.backends.corebluetooth.CentralManagerDelegate:Peripheral Device disconnected!
Traceback (most recent call last):
File "/Users/zaytsev/PycharmProjects/bluetooth/bleak_demo.py", line 25, in <module>
loop.run_until_complete(ble_get_services(address, loop))
File "/usr/local/Cellar/python/3.7.4/Frameworks/Python.framework/Versions/3.7/lib/python3.7/asyncio/base_events.py", line 579, in run_until_complete
return future.result()
File "/Users/zaytsev/PycharmProjects/bluetooth/bleak_demo.py", line 18, in ble_get_services
svcs = await client.get_services()
File "/Users/zaytsev/PycharmProjects/bluetooth/venv/lib/python3.7/site-packages/bleak/backends/corebluetooth/client.py", line 118, in get_services
self.services.add_service(BleakGATTServiceCoreBluetooth(service))
File "/Users/zaytsev/PycharmProjects/bluetooth/venv/lib/python3.7/site-packages/bleak/backends/service.py", line 101, in add_service
"This service is already present in this BleakGATTServiceCollection!"
bleak.exc.BleakError: This service is already present in this BleakGATTServiceCollection!
|
bleak.exc.BleakError
|
def _device_info(path, props):
try:
name = props.get("Name", props.get("Alias", path.split("/")[-1]))
address = props.get("Address", None)
if address is None:
try:
address = path[-17:].replace("_", ":")
if not validate_mac_address(address):
address = None
except Exception:
address = None
rssi = props.get("RSSI", "?")
return name, address, rssi, path
except Exception as e:
# logger.exception(e, exc_info=True)
return None, None, None, None
|
def _device_info(path, props):
try:
name = props.get("Name", props.get("Alias", path.split("/")[-1]))
address = props.get("Address", None)
if address is None:
try:
address = path[-17:].replace("_", ":")
if not validate_mac_address(address):
address = None
except Exception:
address = None
rssi = props.get("RSSI", "?")
return name, address, rssi, path
except Exception as e:
logger.exception(e, exc_info=True)
return None, None, None, None
|
https://github.com/hbldh/bleak/issues/101
|
/Users/zaytsev/PycharmProjects/bluetooth/venv/bin/python /Users/zaytsev/PycharmProjects/bluetooth/bleak_demo.py
DEBUG:bleak.backends.corebluetooth.CentralManagerDelegate:Bluetooth powered on
DEBUG:bleak.backends.corebluetooth.CentralManagerDelegate:Discovered device A0E49DB2-B7F1-4A65-AB2E-D75121192329: Unknown @ RSSI: -79
A0E49DB2-B7F1-4A65-AB2E-D75121192329: HUAWEI Band 2-b6e
DEBUG:bleak.backends.corebluetooth.client:Connecting to BLE device @ A0E49DB2-B7F1-4A65-AB2E-D75121192329
DEBUG:bleak.backends.corebluetooth.CentralManagerDelegate:Successfully connected to device uuid A0E49DB2-B7F1-4A65-AB2E-D75121192329
WARNING:bleak.backends.corebluetooth.PeripheralDelegate:PeripheralDelegate is not compliant
DEBUG:bleak.backends.corebluetooth.client:Retrieving services...
DEBUG:bleak.backends.corebluetooth.PeripheralDelegate:Services discovered
DEBUG:bleak.backends.corebluetooth.client:Retrieving characteristics for service 180F
DEBUG:bleak.backends.corebluetooth.PeripheralDelegate:Characteristics discovered
DEBUG:bleak.backends.corebluetooth.client:Retrieving descriptors for characteristic 2A19
DEBUG:bleak.backends.corebluetooth.PeripheralDelegate:Descriptor discovered 2A19
DEBUG:bleak.backends.corebluetooth.client:Retrieving characteristics for service 180A
DEBUG:bleak.backends.corebluetooth.PeripheralDelegate:Characteristics discovered
DEBUG:bleak.backends.corebluetooth.client:Retrieving descriptors for characteristic 2A29
DEBUG:bleak.backends.corebluetooth.PeripheralDelegate:Descriptor discovered 2A29
DEBUG:bleak.backends.corebluetooth.client:Retrieving descriptors for characteristic 2A24
DEBUG:bleak.backends.corebluetooth.PeripheralDelegate:Descriptor discovered 2A24
DEBUG:bleak.backends.corebluetooth.client:Retrieving descriptors for characteristic 2A26
DEBUG:bleak.backends.corebluetooth.PeripheralDelegate:Descriptor discovered 2A26
DEBUG:bleak.backends.corebluetooth.client:Retrieving descriptors for characteristic 2A28
DEBUG:bleak.backends.corebluetooth.PeripheralDelegate:Descriptor discovered 2A28
DEBUG:bleak.backends.corebluetooth.client:Retrieving descriptors for characteristic 2A50
DEBUG:bleak.backends.corebluetooth.PeripheralDelegate:Descriptor discovered 2A50
DEBUG:bleak.backends.corebluetooth.client:Retrieving characteristics for service 180D
DEBUG:bleak.backends.corebluetooth.PeripheralDelegate:Characteristics discovered
DEBUG:bleak.backends.corebluetooth.client:Retrieving descriptors for characteristic 2A37
DEBUG:bleak.backends.corebluetooth.PeripheralDelegate:Descriptor discovered 2A37
DEBUG:bleak.backends.corebluetooth.client:Retrieving descriptors for characteristic 2A38
DEBUG:bleak.backends.corebluetooth.PeripheralDelegate:Descriptor discovered 2A38
DEBUG:bleak.backends.corebluetooth.client:Retrieving characteristics for service FE86
DEBUG:bleak.backends.corebluetooth.PeripheralDelegate:Characteristics discovered
DEBUG:bleak.backends.corebluetooth.client:Retrieving descriptors for characteristic FE01
DEBUG:bleak.backends.corebluetooth.PeripheralDelegate:Descriptor discovered FE01
DEBUG:bleak.backends.corebluetooth.client:Retrieving descriptors for characteristic FE02
DEBUG:bleak.backends.corebluetooth.PeripheralDelegate:Descriptor discovered FE02
DEBUG:bleak.backends.corebluetooth.client:Retrieving descriptors for characteristic FE03
DEBUG:bleak.backends.corebluetooth.PeripheralDelegate:Descriptor discovered FE03
DEBUG:bleak.backends.corebluetooth.client:Retrieving descriptors for characteristic FE04
DEBUG:bleak.backends.corebluetooth.PeripheralDelegate:Descriptor discovered FE04
DEBUG:bleak.backends.corebluetooth.client:Retrieving services...
DEBUG:bleak.backends.corebluetooth.client:Retrieving characteristics for service 180F
DEBUG:bleak.backends.corebluetooth.CentralManagerDelegate:Peripheral Device disconnected!
Traceback (most recent call last):
File "/Users/zaytsev/PycharmProjects/bluetooth/bleak_demo.py", line 25, in <module>
loop.run_until_complete(ble_get_services(address, loop))
File "/usr/local/Cellar/python/3.7.4/Frameworks/Python.framework/Versions/3.7/lib/python3.7/asyncio/base_events.py", line 579, in run_until_complete
return future.result()
File "/Users/zaytsev/PycharmProjects/bluetooth/bleak_demo.py", line 18, in ble_get_services
svcs = await client.get_services()
File "/Users/zaytsev/PycharmProjects/bluetooth/venv/lib/python3.7/site-packages/bleak/backends/corebluetooth/client.py", line 118, in get_services
self.services.add_service(BleakGATTServiceCoreBluetooth(service))
File "/Users/zaytsev/PycharmProjects/bluetooth/venv/lib/python3.7/site-packages/bleak/backends/service.py", line 101, in add_service
"This service is already present in this BleakGATTServiceCollection!"
bleak.exc.BleakError: This service is already present in this BleakGATTServiceCollection!
|
bleak.exc.BleakError
|
async def discover(timeout=5.0, loop=None, **kwargs):
"""Discover nearby Bluetooth Low Energy devices.
Args:
timeout (float): Duration to scan for.
loop (asyncio.AbstractEventLoop): Optional event loop to use.
Keyword Args:
device (str): Bluetooth device to use for discovery.
Returns:
List of tuples containing name, address and signal strength
of nearby devices.
"""
device = kwargs.get("device", "hci0")
loop = loop if loop else asyncio.get_event_loop()
cached_devices = {}
devices = {}
rules = list()
def parse_msg(message):
if message.member == "InterfacesAdded":
msg_path = message.body[0]
try:
device_interface = message.body[1].get("org.bluez.Device1", {})
except Exception as e:
raise e
devices[msg_path] = (
{**devices[msg_path], **device_interface}
if msg_path in devices
else device_interface
)
elif message.member == "PropertiesChanged":
iface, changed, invalidated = message.body
if iface != defs.DEVICE_INTERFACE:
return
msg_path = message.path
# the PropertiesChanged signal only sends changed properties, so we
# need to get remaining properties from cached_devices. However, we
# don't want to add all cached_devices to the devices dict since
# they may not actually be nearby or powered on.
if msg_path not in devices and msg_path in cached_devices:
devices[msg_path] = cached_devices[msg_path]
devices[msg_path] = (
{**devices[msg_path], **changed} if msg_path in devices else changed
)
elif (
message.member == "InterfacesRemoved"
and message.body[1][0] == defs.BATTERY_INTERFACE
):
logger.info(
"{0}, {1} ({2}): {3}".format(
message.member, message.interface, message.path, message.body
)
)
return
else:
msg_path = message.path
logger.info(
"{0}, {1} ({2}): {3}".format(
message.member, message.interface, message.path, message.body
)
)
logger.info(
"{0}, {1} ({2} dBm), Object Path: {3}".format(
*_device_info(msg_path, devices.get(msg_path))
)
)
bus = await client.connect(reactor, "system").asFuture(loop)
# Add signal listeners
rules.append(
await bus.addMatch(
parse_msg,
interface="org.freedesktop.DBus.ObjectManager",
member="InterfacesAdded",
).asFuture(loop)
)
rules.append(
await bus.addMatch(
parse_msg,
interface="org.freedesktop.DBus.ObjectManager",
member="InterfacesRemoved",
).asFuture(loop)
)
rules.append(
await bus.addMatch(
parse_msg,
interface="org.freedesktop.DBus.Properties",
member="PropertiesChanged",
).asFuture(loop)
)
# Find the HCI device to use for scanning and get cached device properties
objects = await bus.callRemote(
"/",
"GetManagedObjects",
interface=defs.OBJECT_MANAGER_INTERFACE,
destination=defs.BLUEZ_SERVICE,
).asFuture(loop)
adapter_path, interface = _filter_on_adapter(objects, device)
cached_devices = dict(_filter_on_device(objects))
# dd = {'objectPath': '/org/bluez/hci0', 'methodName': 'StartDiscovery',
# 'interface': 'org.bluez.Adapter1', 'destination': 'org.bluez',
# 'signature': '', 'body': (), 'expectReply': True, 'autoStart': True,
# 'timeout': None, 'returnSignature': ''}
# Running Discovery loop.
await bus.callRemote(
adapter_path,
"SetDiscoveryFilter",
interface="org.bluez.Adapter1",
destination="org.bluez",
signature="a{sv}",
body=[{"Transport": "le"}],
).asFuture(loop)
await bus.callRemote(
adapter_path,
"StartDiscovery",
interface="org.bluez.Adapter1",
destination="org.bluez",
).asFuture(loop)
await asyncio.sleep(timeout)
await bus.callRemote(
adapter_path,
"StopDiscovery",
interface="org.bluez.Adapter1",
destination="org.bluez",
).asFuture(loop)
# Reduce output.
# out = []
# for path, props in devices.items():
# properties = await cli.callRemote(
# path, 'GetAll',
# interface=defs.PROPERTIES_INTERFACE,
# destination=defs.BLUEZ_SERVICE,
# signature='s',
# body=[defs.DEVICE_INTERFACE, ],
# returnSignature='a{sv}').asFuture(loop)
# print(properties)
#
discovered_devices = []
for path, props in devices.items():
if not props:
logger.debug(
"Disregarding %s since no properties could be obtained." % path
)
continue
name, address, _, path = _device_info(path, props)
if address is None:
continue
uuids = props.get("UUIDs", [])
manufacturer_data = props.get("ManufacturerData", {})
discovered_devices.append(
BLEDevice(
address,
name,
{"path": path, "props": props},
uuids=uuids,
manufacturer_data=manufacturer_data,
)
)
for rule in rules:
await bus.delMatch(rule).asFuture(loop)
bus.disconnect()
return discovered_devices
|
async def discover(timeout=5.0, loop=None, **kwargs):
"""Discover nearby Bluetooth Low Energy devices.
Args:
timeout (float): Duration to scan for.
loop (asyncio.AbstractEventLoop): Optional event loop to use.
Keyword Args:
device (str): Bluetooth device to use for discovery.
Returns:
List of tuples containing name, address and signal strength
of nearby devices.
"""
device = kwargs.get("device", "hci0")
loop = loop if loop else asyncio.get_event_loop()
cached_devices = {}
devices = {}
rules = list()
def parse_msg(message):
if message.member == "InterfacesAdded":
msg_path = message.body[0]
try:
device_interface = message.body[1].get("org.bluez.Device1", {})
except Exception as e:
raise e
devices[msg_path] = (
{**devices[msg_path], **device_interface}
if msg_path in devices
else device_interface
)
elif message.member == "PropertiesChanged":
iface, changed, invalidated = message.body
if iface != defs.DEVICE_INTERFACE:
return
msg_path = message.path
# the PropertiesChanged signal only sends changed properties, so we
# need to get remaining properties from cached_devices. However, we
# don't want to add all cached_devices to the devices dict since
# they may not actually be nearby or powered on.
if msg_path not in devices and msg_path in cached_devices:
devices[msg_path] = cached_devices[msg_path]
devices[msg_path] = (
{**devices[msg_path], **changed} if msg_path in devices else changed
)
elif (
message.member == "InterfacesRemoved"
and message.body[1][0] == defs.BATTERY_INTERFACE
):
logger.info(
"{0}, {1} ({2}): {3}".format(
message.member, message.interface, message.path, message.body
)
)
return
else:
msg_path = message.path
logger.info(
"{0}, {1} ({2}): {3}".format(
message.member, message.interface, message.path, message.body
)
)
logger.info(
"{0}, {1} ({2} dBm), Object Path: {3}".format(
*_device_info(msg_path, devices.get(msg_path))
)
)
bus = await client.connect(reactor, "system").asFuture(loop)
# Add signal listeners
rules.append(
await bus.addMatch(
parse_msg,
interface="org.freedesktop.DBus.ObjectManager",
member="InterfacesAdded",
).asFuture(loop)
)
rules.append(
await bus.addMatch(
parse_msg,
interface="org.freedesktop.DBus.ObjectManager",
member="InterfacesRemoved",
).asFuture(loop)
)
rules.append(
await bus.addMatch(
parse_msg,
interface="org.freedesktop.DBus.Properties",
member="PropertiesChanged",
).asFuture(loop)
)
# Find the HCI device to use for scanning and get cached device properties
objects = await bus.callRemote(
"/",
"GetManagedObjects",
interface=defs.OBJECT_MANAGER_INTERFACE,
destination=defs.BLUEZ_SERVICE,
).asFuture(loop)
adapter_path, interface = _filter_on_adapter(objects, device)
cached_devices = dict(_filter_on_device(objects))
# dd = {'objectPath': '/org/bluez/hci0', 'methodName': 'StartDiscovery',
# 'interface': 'org.bluez.Adapter1', 'destination': 'org.bluez',
# 'signature': '', 'body': (), 'expectReply': True, 'autoStart': True,
# 'timeout': None, 'returnSignature': ''}
# Running Discovery loop.
await bus.callRemote(
adapter_path,
"SetDiscoveryFilter",
interface="org.bluez.Adapter1",
destination="org.bluez",
signature="a{sv}",
body=[{"Transport": "le"}],
).asFuture(loop)
await bus.callRemote(
adapter_path,
"StartDiscovery",
interface="org.bluez.Adapter1",
destination="org.bluez",
).asFuture(loop)
await asyncio.sleep(timeout)
await bus.callRemote(
adapter_path,
"StopDiscovery",
interface="org.bluez.Adapter1",
destination="org.bluez",
).asFuture(loop)
# Reduce output.
# out = []
# for path, props in devices.items():
# properties = await cli.callRemote(
# path, 'GetAll',
# interface=defs.PROPERTIES_INTERFACE,
# destination=defs.BLUEZ_SERVICE,
# signature='s',
# body=[defs.DEVICE_INTERFACE, ],
# returnSignature='a{sv}').asFuture(loop)
# print(properties)
#
discovered_devices = []
for path, props in devices.items():
if not props:
logger.debug(
"Disregarding %s since no properties could be obtained." % path
)
continue
name, address, _, path = _device_info(path, props)
uuids = props.get("UUIDs", [])
manufacturer_data = props.get("ManufacturerData", {})
discovered_devices.append(
BLEDevice(
address,
name,
{"path": path, "props": props},
uuids=uuids,
manufacturer_data=manufacturer_data,
)
)
for rule in rules:
await bus.delMatch(rule).asFuture(loop)
bus.disconnect()
return discovered_devices
|
https://github.com/hbldh/bleak/issues/101
|
/Users/zaytsev/PycharmProjects/bluetooth/venv/bin/python /Users/zaytsev/PycharmProjects/bluetooth/bleak_demo.py
DEBUG:bleak.backends.corebluetooth.CentralManagerDelegate:Bluetooth powered on
DEBUG:bleak.backends.corebluetooth.CentralManagerDelegate:Discovered device A0E49DB2-B7F1-4A65-AB2E-D75121192329: Unknown @ RSSI: -79
A0E49DB2-B7F1-4A65-AB2E-D75121192329: HUAWEI Band 2-b6e
DEBUG:bleak.backends.corebluetooth.client:Connecting to BLE device @ A0E49DB2-B7F1-4A65-AB2E-D75121192329
DEBUG:bleak.backends.corebluetooth.CentralManagerDelegate:Successfully connected to device uuid A0E49DB2-B7F1-4A65-AB2E-D75121192329
WARNING:bleak.backends.corebluetooth.PeripheralDelegate:PeripheralDelegate is not compliant
DEBUG:bleak.backends.corebluetooth.client:Retrieving services...
DEBUG:bleak.backends.corebluetooth.PeripheralDelegate:Services discovered
DEBUG:bleak.backends.corebluetooth.client:Retrieving characteristics for service 180F
DEBUG:bleak.backends.corebluetooth.PeripheralDelegate:Characteristics discovered
DEBUG:bleak.backends.corebluetooth.client:Retrieving descriptors for characteristic 2A19
DEBUG:bleak.backends.corebluetooth.PeripheralDelegate:Descriptor discovered 2A19
DEBUG:bleak.backends.corebluetooth.client:Retrieving characteristics for service 180A
DEBUG:bleak.backends.corebluetooth.PeripheralDelegate:Characteristics discovered
DEBUG:bleak.backends.corebluetooth.client:Retrieving descriptors for characteristic 2A29
DEBUG:bleak.backends.corebluetooth.PeripheralDelegate:Descriptor discovered 2A29
DEBUG:bleak.backends.corebluetooth.client:Retrieving descriptors for characteristic 2A24
DEBUG:bleak.backends.corebluetooth.PeripheralDelegate:Descriptor discovered 2A24
DEBUG:bleak.backends.corebluetooth.client:Retrieving descriptors for characteristic 2A26
DEBUG:bleak.backends.corebluetooth.PeripheralDelegate:Descriptor discovered 2A26
DEBUG:bleak.backends.corebluetooth.client:Retrieving descriptors for characteristic 2A28
DEBUG:bleak.backends.corebluetooth.PeripheralDelegate:Descriptor discovered 2A28
DEBUG:bleak.backends.corebluetooth.client:Retrieving descriptors for characteristic 2A50
DEBUG:bleak.backends.corebluetooth.PeripheralDelegate:Descriptor discovered 2A50
DEBUG:bleak.backends.corebluetooth.client:Retrieving characteristics for service 180D
DEBUG:bleak.backends.corebluetooth.PeripheralDelegate:Characteristics discovered
DEBUG:bleak.backends.corebluetooth.client:Retrieving descriptors for characteristic 2A37
DEBUG:bleak.backends.corebluetooth.PeripheralDelegate:Descriptor discovered 2A37
DEBUG:bleak.backends.corebluetooth.client:Retrieving descriptors for characteristic 2A38
DEBUG:bleak.backends.corebluetooth.PeripheralDelegate:Descriptor discovered 2A38
DEBUG:bleak.backends.corebluetooth.client:Retrieving characteristics for service FE86
DEBUG:bleak.backends.corebluetooth.PeripheralDelegate:Characteristics discovered
DEBUG:bleak.backends.corebluetooth.client:Retrieving descriptors for characteristic FE01
DEBUG:bleak.backends.corebluetooth.PeripheralDelegate:Descriptor discovered FE01
DEBUG:bleak.backends.corebluetooth.client:Retrieving descriptors for characteristic FE02
DEBUG:bleak.backends.corebluetooth.PeripheralDelegate:Descriptor discovered FE02
DEBUG:bleak.backends.corebluetooth.client:Retrieving descriptors for characteristic FE03
DEBUG:bleak.backends.corebluetooth.PeripheralDelegate:Descriptor discovered FE03
DEBUG:bleak.backends.corebluetooth.client:Retrieving descriptors for characteristic FE04
DEBUG:bleak.backends.corebluetooth.PeripheralDelegate:Descriptor discovered FE04
DEBUG:bleak.backends.corebluetooth.client:Retrieving services...
DEBUG:bleak.backends.corebluetooth.client:Retrieving characteristics for service 180F
DEBUG:bleak.backends.corebluetooth.CentralManagerDelegate:Peripheral Device disconnected!
Traceback (most recent call last):
File "/Users/zaytsev/PycharmProjects/bluetooth/bleak_demo.py", line 25, in <module>
loop.run_until_complete(ble_get_services(address, loop))
File "/usr/local/Cellar/python/3.7.4/Frameworks/Python.framework/Versions/3.7/lib/python3.7/asyncio/base_events.py", line 579, in run_until_complete
return future.result()
File "/Users/zaytsev/PycharmProjects/bluetooth/bleak_demo.py", line 18, in ble_get_services
svcs = await client.get_services()
File "/Users/zaytsev/PycharmProjects/bluetooth/venv/lib/python3.7/site-packages/bleak/backends/corebluetooth/client.py", line 118, in get_services
self.services.add_service(BleakGATTServiceCoreBluetooth(service))
File "/Users/zaytsev/PycharmProjects/bluetooth/venv/lib/python3.7/site-packages/bleak/backends/service.py", line 101, in add_service
"This service is already present in this BleakGATTServiceCollection!"
bleak.exc.BleakError: This service is already present in this BleakGATTServiceCollection!
|
bleak.exc.BleakError
|
def __init__(self, address, loop=None, **kwargs):
self.address = address
self.loop = loop if loop else asyncio.get_event_loop()
self.services = BleakGATTServiceCollection()
self._services_resolved = False
self._notification_callbacks = {}
self._timeout = kwargs.get("timeout", 2.0)
|
def __init__(self, address, loop=None, **kwargs):
self.address = address
self.loop = loop if loop else asyncio.get_event_loop()
self.services = BleakGATTServiceCollection()
self._services_resolved = False
self._notification_callbacks = {}
|
https://github.com/hbldh/bleak/issues/101
|
/Users/zaytsev/PycharmProjects/bluetooth/venv/bin/python /Users/zaytsev/PycharmProjects/bluetooth/bleak_demo.py
DEBUG:bleak.backends.corebluetooth.CentralManagerDelegate:Bluetooth powered on
DEBUG:bleak.backends.corebluetooth.CentralManagerDelegate:Discovered device A0E49DB2-B7F1-4A65-AB2E-D75121192329: Unknown @ RSSI: -79
A0E49DB2-B7F1-4A65-AB2E-D75121192329: HUAWEI Band 2-b6e
DEBUG:bleak.backends.corebluetooth.client:Connecting to BLE device @ A0E49DB2-B7F1-4A65-AB2E-D75121192329
DEBUG:bleak.backends.corebluetooth.CentralManagerDelegate:Successfully connected to device uuid A0E49DB2-B7F1-4A65-AB2E-D75121192329
WARNING:bleak.backends.corebluetooth.PeripheralDelegate:PeripheralDelegate is not compliant
DEBUG:bleak.backends.corebluetooth.client:Retrieving services...
DEBUG:bleak.backends.corebluetooth.PeripheralDelegate:Services discovered
DEBUG:bleak.backends.corebluetooth.client:Retrieving characteristics for service 180F
DEBUG:bleak.backends.corebluetooth.PeripheralDelegate:Characteristics discovered
DEBUG:bleak.backends.corebluetooth.client:Retrieving descriptors for characteristic 2A19
DEBUG:bleak.backends.corebluetooth.PeripheralDelegate:Descriptor discovered 2A19
DEBUG:bleak.backends.corebluetooth.client:Retrieving characteristics for service 180A
DEBUG:bleak.backends.corebluetooth.PeripheralDelegate:Characteristics discovered
DEBUG:bleak.backends.corebluetooth.client:Retrieving descriptors for characteristic 2A29
DEBUG:bleak.backends.corebluetooth.PeripheralDelegate:Descriptor discovered 2A29
DEBUG:bleak.backends.corebluetooth.client:Retrieving descriptors for characteristic 2A24
DEBUG:bleak.backends.corebluetooth.PeripheralDelegate:Descriptor discovered 2A24
DEBUG:bleak.backends.corebluetooth.client:Retrieving descriptors for characteristic 2A26
DEBUG:bleak.backends.corebluetooth.PeripheralDelegate:Descriptor discovered 2A26
DEBUG:bleak.backends.corebluetooth.client:Retrieving descriptors for characteristic 2A28
DEBUG:bleak.backends.corebluetooth.PeripheralDelegate:Descriptor discovered 2A28
DEBUG:bleak.backends.corebluetooth.client:Retrieving descriptors for characteristic 2A50
DEBUG:bleak.backends.corebluetooth.PeripheralDelegate:Descriptor discovered 2A50
DEBUG:bleak.backends.corebluetooth.client:Retrieving characteristics for service 180D
DEBUG:bleak.backends.corebluetooth.PeripheralDelegate:Characteristics discovered
DEBUG:bleak.backends.corebluetooth.client:Retrieving descriptors for characteristic 2A37
DEBUG:bleak.backends.corebluetooth.PeripheralDelegate:Descriptor discovered 2A37
DEBUG:bleak.backends.corebluetooth.client:Retrieving descriptors for characteristic 2A38
DEBUG:bleak.backends.corebluetooth.PeripheralDelegate:Descriptor discovered 2A38
DEBUG:bleak.backends.corebluetooth.client:Retrieving characteristics for service FE86
DEBUG:bleak.backends.corebluetooth.PeripheralDelegate:Characteristics discovered
DEBUG:bleak.backends.corebluetooth.client:Retrieving descriptors for characteristic FE01
DEBUG:bleak.backends.corebluetooth.PeripheralDelegate:Descriptor discovered FE01
DEBUG:bleak.backends.corebluetooth.client:Retrieving descriptors for characteristic FE02
DEBUG:bleak.backends.corebluetooth.PeripheralDelegate:Descriptor discovered FE02
DEBUG:bleak.backends.corebluetooth.client:Retrieving descriptors for characteristic FE03
DEBUG:bleak.backends.corebluetooth.PeripheralDelegate:Descriptor discovered FE03
DEBUG:bleak.backends.corebluetooth.client:Retrieving descriptors for characteristic FE04
DEBUG:bleak.backends.corebluetooth.PeripheralDelegate:Descriptor discovered FE04
DEBUG:bleak.backends.corebluetooth.client:Retrieving services...
DEBUG:bleak.backends.corebluetooth.client:Retrieving characteristics for service 180F
DEBUG:bleak.backends.corebluetooth.CentralManagerDelegate:Peripheral Device disconnected!
Traceback (most recent call last):
File "/Users/zaytsev/PycharmProjects/bluetooth/bleak_demo.py", line 25, in <module>
loop.run_until_complete(ble_get_services(address, loop))
File "/usr/local/Cellar/python/3.7.4/Frameworks/Python.framework/Versions/3.7/lib/python3.7/asyncio/base_events.py", line 579, in run_until_complete
return future.result()
File "/Users/zaytsev/PycharmProjects/bluetooth/bleak_demo.py", line 18, in ble_get_services
svcs = await client.get_services()
File "/Users/zaytsev/PycharmProjects/bluetooth/venv/lib/python3.7/site-packages/bleak/backends/corebluetooth/client.py", line 118, in get_services
self.services.add_service(BleakGATTServiceCoreBluetooth(service))
File "/Users/zaytsev/PycharmProjects/bluetooth/venv/lib/python3.7/site-packages/bleak/backends/service.py", line 101, in add_service
"This service is already present in this BleakGATTServiceCollection!"
bleak.exc.BleakError: This service is already present in this BleakGATTServiceCollection!
|
bleak.exc.BleakError
|
async def connect(self, **kwargs) -> bool:
"""Connect to a specified Peripheral
Keyword Args:
timeout (float): Timeout for required ``discover`` call. Defaults to 2.0.
Returns:
Boolean representing connection status.
"""
timeout = kwargs.get("timeout", self._timeout)
devices = await discover(timeout=timeout, loop=self.loop)
sought_device = list(
filter(lambda x: x.address.upper() == self.address.upper(), devices)
)
if len(sought_device):
self._device_info = sought_device[0].details
else:
raise BleakError("Device with address {} was not found".format(self.address))
logger.debug("Connecting to BLE device @ {}".format(self.address))
await cbapp.central_manager_delegate.connect_(sought_device[0].details)
# Now get services
await self.get_services()
return True
|
async def connect(self, **kwargs) -> bool:
"""Connect to a specified Peripheral
Keyword Args:
timeout (float): Timeout for required ``discover`` call. Defaults to 2.0.
Returns:
Boolean representing connection status.
"""
devices = await discover(timeout=kwargs.get("timeout", 5.0), loop=self.loop)
sought_device = list(
filter(lambda x: x.address.upper() == self.address.upper(), devices)
)
if len(sought_device):
self._device_info = sought_device[0].details
else:
raise BleakError("Device with address {} was not found".format(self.address))
logger.debug("Connecting to BLE device @ {}".format(self.address))
await cbapp.central_manager_delegate.connect_(sought_device[0].details)
# Now get services
await self.get_services()
return True
|
https://github.com/hbldh/bleak/issues/101
|
/Users/zaytsev/PycharmProjects/bluetooth/venv/bin/python /Users/zaytsev/PycharmProjects/bluetooth/bleak_demo.py
DEBUG:bleak.backends.corebluetooth.CentralManagerDelegate:Bluetooth powered on
DEBUG:bleak.backends.corebluetooth.CentralManagerDelegate:Discovered device A0E49DB2-B7F1-4A65-AB2E-D75121192329: Unknown @ RSSI: -79
A0E49DB2-B7F1-4A65-AB2E-D75121192329: HUAWEI Band 2-b6e
DEBUG:bleak.backends.corebluetooth.client:Connecting to BLE device @ A0E49DB2-B7F1-4A65-AB2E-D75121192329
DEBUG:bleak.backends.corebluetooth.CentralManagerDelegate:Successfully connected to device uuid A0E49DB2-B7F1-4A65-AB2E-D75121192329
WARNING:bleak.backends.corebluetooth.PeripheralDelegate:PeripheralDelegate is not compliant
DEBUG:bleak.backends.corebluetooth.client:Retrieving services...
DEBUG:bleak.backends.corebluetooth.PeripheralDelegate:Services discovered
DEBUG:bleak.backends.corebluetooth.client:Retrieving characteristics for service 180F
DEBUG:bleak.backends.corebluetooth.PeripheralDelegate:Characteristics discovered
DEBUG:bleak.backends.corebluetooth.client:Retrieving descriptors for characteristic 2A19
DEBUG:bleak.backends.corebluetooth.PeripheralDelegate:Descriptor discovered 2A19
DEBUG:bleak.backends.corebluetooth.client:Retrieving characteristics for service 180A
DEBUG:bleak.backends.corebluetooth.PeripheralDelegate:Characteristics discovered
DEBUG:bleak.backends.corebluetooth.client:Retrieving descriptors for characteristic 2A29
DEBUG:bleak.backends.corebluetooth.PeripheralDelegate:Descriptor discovered 2A29
DEBUG:bleak.backends.corebluetooth.client:Retrieving descriptors for characteristic 2A24
DEBUG:bleak.backends.corebluetooth.PeripheralDelegate:Descriptor discovered 2A24
DEBUG:bleak.backends.corebluetooth.client:Retrieving descriptors for characteristic 2A26
DEBUG:bleak.backends.corebluetooth.PeripheralDelegate:Descriptor discovered 2A26
DEBUG:bleak.backends.corebluetooth.client:Retrieving descriptors for characteristic 2A28
DEBUG:bleak.backends.corebluetooth.PeripheralDelegate:Descriptor discovered 2A28
DEBUG:bleak.backends.corebluetooth.client:Retrieving descriptors for characteristic 2A50
DEBUG:bleak.backends.corebluetooth.PeripheralDelegate:Descriptor discovered 2A50
DEBUG:bleak.backends.corebluetooth.client:Retrieving characteristics for service 180D
DEBUG:bleak.backends.corebluetooth.PeripheralDelegate:Characteristics discovered
DEBUG:bleak.backends.corebluetooth.client:Retrieving descriptors for characteristic 2A37
DEBUG:bleak.backends.corebluetooth.PeripheralDelegate:Descriptor discovered 2A37
DEBUG:bleak.backends.corebluetooth.client:Retrieving descriptors for characteristic 2A38
DEBUG:bleak.backends.corebluetooth.PeripheralDelegate:Descriptor discovered 2A38
DEBUG:bleak.backends.corebluetooth.client:Retrieving characteristics for service FE86
DEBUG:bleak.backends.corebluetooth.PeripheralDelegate:Characteristics discovered
DEBUG:bleak.backends.corebluetooth.client:Retrieving descriptors for characteristic FE01
DEBUG:bleak.backends.corebluetooth.PeripheralDelegate:Descriptor discovered FE01
DEBUG:bleak.backends.corebluetooth.client:Retrieving descriptors for characteristic FE02
DEBUG:bleak.backends.corebluetooth.PeripheralDelegate:Descriptor discovered FE02
DEBUG:bleak.backends.corebluetooth.client:Retrieving descriptors for characteristic FE03
DEBUG:bleak.backends.corebluetooth.PeripheralDelegate:Descriptor discovered FE03
DEBUG:bleak.backends.corebluetooth.client:Retrieving descriptors for characteristic FE04
DEBUG:bleak.backends.corebluetooth.PeripheralDelegate:Descriptor discovered FE04
DEBUG:bleak.backends.corebluetooth.client:Retrieving services...
DEBUG:bleak.backends.corebluetooth.client:Retrieving characteristics for service 180F
DEBUG:bleak.backends.corebluetooth.CentralManagerDelegate:Peripheral Device disconnected!
Traceback (most recent call last):
File "/Users/zaytsev/PycharmProjects/bluetooth/bleak_demo.py", line 25, in <module>
loop.run_until_complete(ble_get_services(address, loop))
File "/usr/local/Cellar/python/3.7.4/Frameworks/Python.framework/Versions/3.7/lib/python3.7/asyncio/base_events.py", line 579, in run_until_complete
return future.result()
File "/Users/zaytsev/PycharmProjects/bluetooth/bleak_demo.py", line 18, in ble_get_services
svcs = await client.get_services()
File "/Users/zaytsev/PycharmProjects/bluetooth/venv/lib/python3.7/site-packages/bleak/backends/corebluetooth/client.py", line 118, in get_services
self.services.add_service(BleakGATTServiceCoreBluetooth(service))
File "/Users/zaytsev/PycharmProjects/bluetooth/venv/lib/python3.7/site-packages/bleak/backends/service.py", line 101, in add_service
"This service is already present in this BleakGATTServiceCollection!"
bleak.exc.BleakError: This service is already present in this BleakGATTServiceCollection!
|
bleak.exc.BleakError
|
async def get_services(self) -> BleakGATTServiceCollection:
"""Get all services registered for this GATT server.
Returns:
A :py:class:`bleak.backends.service.BleakGATTServiceCollection` with this device's services tree.
"""
if self._services is not None:
return self._services
logger.debug("Retrieving services...")
services = await cbapp.central_manager_delegate.connected_peripheral_delegate.discoverServices()
for service in services:
serviceUUID = service.UUID().UUIDString()
logger.debug("Retrieving characteristics for service {}".format(serviceUUID))
characteristics = await cbapp.central_manager_delegate.connected_peripheral_delegate.discoverCharacteristics_(
service
)
self.services.add_service(BleakGATTServiceCoreBluetooth(service))
for characteristic in characteristics:
cUUID = characteristic.UUID().UUIDString()
logger.debug("Retrieving descriptors for characteristic {}".format(cUUID))
descriptors = await cbapp.central_manager_delegate.connected_peripheral_delegate.discoverDescriptors_(
characteristic
)
self.services.add_characteristic(
BleakGATTCharacteristicCoreBluetooth(characteristic)
)
for descriptor in descriptors:
self.services.add_descriptor(
BleakGATTDescriptorCoreBluetooth(
descriptor, characteristic.UUID().UUIDString()
)
)
self._services_resolved = True
self._services = services
return self.services
|
async def get_services(self) -> BleakGATTServiceCollection:
"""Get all services registered for this GATT server.
Returns:
A :py:class:`bleak.backends.service.BleakGATTServiceCollection` with this device's services tree.
"""
if self._services != None:
return self._services
logger.debug("Retrieving services...")
services = await cbapp.central_manager_delegate.connected_peripheral_delegate.discoverServices()
for service in services:
serviceUUID = service.UUID().UUIDString()
logger.debug("Retrieving characteristics for service {}".format(serviceUUID))
characteristics = await cbapp.central_manager_delegate.connected_peripheral_delegate.discoverCharacteristics_(
service
)
self.services.add_service(BleakGATTServiceCoreBluetooth(service))
for characteristic in characteristics:
cUUID = characteristic.UUID().UUIDString()
logger.debug("Retrieving descriptors for characteristic {}".format(cUUID))
descriptors = await cbapp.central_manager_delegate.connected_peripheral_delegate.discoverDescriptors_(
characteristic
)
self.services.add_characteristic(
BleakGATTCharacteristicCoreBluetooth(characteristic)
)
for descriptor in descriptors:
self.services.add_descriptor(
BleakGATTDescriptorCoreBluetooth(
descriptor, characteristic.UUID().UUIDString()
)
)
self._services_resolved = True
return self.services
|
https://github.com/hbldh/bleak/issues/101
|
/Users/zaytsev/PycharmProjects/bluetooth/venv/bin/python /Users/zaytsev/PycharmProjects/bluetooth/bleak_demo.py
DEBUG:bleak.backends.corebluetooth.CentralManagerDelegate:Bluetooth powered on
DEBUG:bleak.backends.corebluetooth.CentralManagerDelegate:Discovered device A0E49DB2-B7F1-4A65-AB2E-D75121192329: Unknown @ RSSI: -79
A0E49DB2-B7F1-4A65-AB2E-D75121192329: HUAWEI Band 2-b6e
DEBUG:bleak.backends.corebluetooth.client:Connecting to BLE device @ A0E49DB2-B7F1-4A65-AB2E-D75121192329
DEBUG:bleak.backends.corebluetooth.CentralManagerDelegate:Successfully connected to device uuid A0E49DB2-B7F1-4A65-AB2E-D75121192329
WARNING:bleak.backends.corebluetooth.PeripheralDelegate:PeripheralDelegate is not compliant
DEBUG:bleak.backends.corebluetooth.client:Retrieving services...
DEBUG:bleak.backends.corebluetooth.PeripheralDelegate:Services discovered
DEBUG:bleak.backends.corebluetooth.client:Retrieving characteristics for service 180F
DEBUG:bleak.backends.corebluetooth.PeripheralDelegate:Characteristics discovered
DEBUG:bleak.backends.corebluetooth.client:Retrieving descriptors for characteristic 2A19
DEBUG:bleak.backends.corebluetooth.PeripheralDelegate:Descriptor discovered 2A19
DEBUG:bleak.backends.corebluetooth.client:Retrieving characteristics for service 180A
DEBUG:bleak.backends.corebluetooth.PeripheralDelegate:Characteristics discovered
DEBUG:bleak.backends.corebluetooth.client:Retrieving descriptors for characteristic 2A29
DEBUG:bleak.backends.corebluetooth.PeripheralDelegate:Descriptor discovered 2A29
DEBUG:bleak.backends.corebluetooth.client:Retrieving descriptors for characteristic 2A24
DEBUG:bleak.backends.corebluetooth.PeripheralDelegate:Descriptor discovered 2A24
DEBUG:bleak.backends.corebluetooth.client:Retrieving descriptors for characteristic 2A26
DEBUG:bleak.backends.corebluetooth.PeripheralDelegate:Descriptor discovered 2A26
DEBUG:bleak.backends.corebluetooth.client:Retrieving descriptors for characteristic 2A28
DEBUG:bleak.backends.corebluetooth.PeripheralDelegate:Descriptor discovered 2A28
DEBUG:bleak.backends.corebluetooth.client:Retrieving descriptors for characteristic 2A50
DEBUG:bleak.backends.corebluetooth.PeripheralDelegate:Descriptor discovered 2A50
DEBUG:bleak.backends.corebluetooth.client:Retrieving characteristics for service 180D
DEBUG:bleak.backends.corebluetooth.PeripheralDelegate:Characteristics discovered
DEBUG:bleak.backends.corebluetooth.client:Retrieving descriptors for characteristic 2A37
DEBUG:bleak.backends.corebluetooth.PeripheralDelegate:Descriptor discovered 2A37
DEBUG:bleak.backends.corebluetooth.client:Retrieving descriptors for characteristic 2A38
DEBUG:bleak.backends.corebluetooth.PeripheralDelegate:Descriptor discovered 2A38
DEBUG:bleak.backends.corebluetooth.client:Retrieving characteristics for service FE86
DEBUG:bleak.backends.corebluetooth.PeripheralDelegate:Characteristics discovered
DEBUG:bleak.backends.corebluetooth.client:Retrieving descriptors for characteristic FE01
DEBUG:bleak.backends.corebluetooth.PeripheralDelegate:Descriptor discovered FE01
DEBUG:bleak.backends.corebluetooth.client:Retrieving descriptors for characteristic FE02
DEBUG:bleak.backends.corebluetooth.PeripheralDelegate:Descriptor discovered FE02
DEBUG:bleak.backends.corebluetooth.client:Retrieving descriptors for characteristic FE03
DEBUG:bleak.backends.corebluetooth.PeripheralDelegate:Descriptor discovered FE03
DEBUG:bleak.backends.corebluetooth.client:Retrieving descriptors for characteristic FE04
DEBUG:bleak.backends.corebluetooth.PeripheralDelegate:Descriptor discovered FE04
DEBUG:bleak.backends.corebluetooth.client:Retrieving services...
DEBUG:bleak.backends.corebluetooth.client:Retrieving characteristics for service 180F
DEBUG:bleak.backends.corebluetooth.CentralManagerDelegate:Peripheral Device disconnected!
Traceback (most recent call last):
File "/Users/zaytsev/PycharmProjects/bluetooth/bleak_demo.py", line 25, in <module>
loop.run_until_complete(ble_get_services(address, loop))
File "/usr/local/Cellar/python/3.7.4/Frameworks/Python.framework/Versions/3.7/lib/python3.7/asyncio/base_events.py", line 579, in run_until_complete
return future.result()
File "/Users/zaytsev/PycharmProjects/bluetooth/bleak_demo.py", line 18, in ble_get_services
svcs = await client.get_services()
File "/Users/zaytsev/PycharmProjects/bluetooth/venv/lib/python3.7/site-packages/bleak/backends/corebluetooth/client.py", line 118, in get_services
self.services.add_service(BleakGATTServiceCoreBluetooth(service))
File "/Users/zaytsev/PycharmProjects/bluetooth/venv/lib/python3.7/site-packages/bleak/backends/service.py", line 101, in add_service
"This service is already present in this BleakGATTServiceCollection!"
bleak.exc.BleakError: This service is already present in this BleakGATTServiceCollection!
|
bleak.exc.BleakError
|
async def connect(self, **kwargs) -> bool:
"""Connect to the specified GATT server.
Keyword Args:
timeout (float): Timeout for required ``discover`` call. Defaults to 2.0.
Returns:
Boolean representing connection status.
"""
# Try to find the desired device.
timeout = kwargs.get("timeout", self._timeout)
devices = await discover(timeout=timeout, loop=self.loop)
sought_device = list(
filter(lambda x: x.address.upper() == self.address.upper(), devices)
)
if len(sought_device):
self._device_info = sought_device[0].details
else:
raise BleakError("Device with address {0} was not found.".format(self.address))
logger.debug("Connecting to BLE device @ {0}".format(self.address))
args = [UInt64(self._device_info.BluetoothAddress)]
if self._address_type is not None:
args.append(
BluetoothAddressType.Public
if self._address_type == "public"
else BluetoothAddressType.Random
)
self._requester = await wrap_IAsyncOperation(
IAsyncOperation[BluetoothLEDevice](
BluetoothLEDevice.FromBluetoothAddressAsync(*args)
),
return_type=BluetoothLEDevice,
loop=self.loop,
)
def _ConnectionStatusChanged_Handler(sender, args):
logger.debug("_ConnectionStatusChanged_Handler: " + args.ToString())
self._requester.ConnectionStatusChanged += _ConnectionStatusChanged_Handler
# Obtain services, which also leads to connection being established.
services = await self.get_services()
connected = False
if self._services_resolved:
# If services has been resolved, then we assume that we are connected. This is due to
# some issues with getting `is_connected` to give correct response here.
connected = True
else:
for _ in range(5):
await asyncio.sleep(0.2, loop=self.loop)
connected = await self.is_connected()
if connected:
break
if connected:
logger.debug("Connection successful.")
else:
raise BleakError("Connection to {0} was not successful!".format(self.address))
return connected
|
async def connect(self, **kwargs) -> bool:
"""Connect to the specified GATT server.
Keyword Args:
timeout (float): Timeout for required ``discover`` call. Defaults to 2.0.
Returns:
Boolean representing connection status.
"""
# Try to find the desired device.
devices = await discover(timeout=kwargs.get("timeout", 2.0), loop=self.loop)
sought_device = list(
filter(lambda x: x.address.upper() == self.address.upper(), devices)
)
if len(sought_device):
self._device_info = sought_device[0].details
else:
raise BleakError("Device with address {0} was not found.".format(self.address))
logger.debug("Connecting to BLE device @ {0}".format(self.address))
args = [UInt64(self._device_info.BluetoothAddress)]
if self._address_type is not None:
args.append(
BluetoothAddressType.Public
if self._address_type == "public"
else BluetoothAddressType.Random
)
self._requester = await wrap_IAsyncOperation(
IAsyncOperation[BluetoothLEDevice](
BluetoothLEDevice.FromBluetoothAddressAsync(*args)
),
return_type=BluetoothLEDevice,
loop=self.loop,
)
def _ConnectionStatusChanged_Handler(sender, args):
logger.debug("_ConnectionStatusChanged_Handler: " + args.ToString())
self._requester.ConnectionStatusChanged += _ConnectionStatusChanged_Handler
# Obtain services, which also leads to connection being established.
services = await self.get_services()
connected = False
if self._services_resolved:
# If services has been resolved, then we assume that we are connected. This is due to
# some issues with getting `is_connected` to give correct response here.
connected = True
else:
for _ in range(5):
await asyncio.sleep(0.2, loop=self.loop)
connected = await self.is_connected()
if connected:
break
if connected:
logger.debug("Connection successful.")
else:
raise BleakError("Connection to {0} was not successful!".format(self.address))
return connected
|
https://github.com/hbldh/bleak/issues/101
|
/Users/zaytsev/PycharmProjects/bluetooth/venv/bin/python /Users/zaytsev/PycharmProjects/bluetooth/bleak_demo.py
DEBUG:bleak.backends.corebluetooth.CentralManagerDelegate:Bluetooth powered on
DEBUG:bleak.backends.corebluetooth.CentralManagerDelegate:Discovered device A0E49DB2-B7F1-4A65-AB2E-D75121192329: Unknown @ RSSI: -79
A0E49DB2-B7F1-4A65-AB2E-D75121192329: HUAWEI Band 2-b6e
DEBUG:bleak.backends.corebluetooth.client:Connecting to BLE device @ A0E49DB2-B7F1-4A65-AB2E-D75121192329
DEBUG:bleak.backends.corebluetooth.CentralManagerDelegate:Successfully connected to device uuid A0E49DB2-B7F1-4A65-AB2E-D75121192329
WARNING:bleak.backends.corebluetooth.PeripheralDelegate:PeripheralDelegate is not compliant
DEBUG:bleak.backends.corebluetooth.client:Retrieving services...
DEBUG:bleak.backends.corebluetooth.PeripheralDelegate:Services discovered
DEBUG:bleak.backends.corebluetooth.client:Retrieving characteristics for service 180F
DEBUG:bleak.backends.corebluetooth.PeripheralDelegate:Characteristics discovered
DEBUG:bleak.backends.corebluetooth.client:Retrieving descriptors for characteristic 2A19
DEBUG:bleak.backends.corebluetooth.PeripheralDelegate:Descriptor discovered 2A19
DEBUG:bleak.backends.corebluetooth.client:Retrieving characteristics for service 180A
DEBUG:bleak.backends.corebluetooth.PeripheralDelegate:Characteristics discovered
DEBUG:bleak.backends.corebluetooth.client:Retrieving descriptors for characteristic 2A29
DEBUG:bleak.backends.corebluetooth.PeripheralDelegate:Descriptor discovered 2A29
DEBUG:bleak.backends.corebluetooth.client:Retrieving descriptors for characteristic 2A24
DEBUG:bleak.backends.corebluetooth.PeripheralDelegate:Descriptor discovered 2A24
DEBUG:bleak.backends.corebluetooth.client:Retrieving descriptors for characteristic 2A26
DEBUG:bleak.backends.corebluetooth.PeripheralDelegate:Descriptor discovered 2A26
DEBUG:bleak.backends.corebluetooth.client:Retrieving descriptors for characteristic 2A28
DEBUG:bleak.backends.corebluetooth.PeripheralDelegate:Descriptor discovered 2A28
DEBUG:bleak.backends.corebluetooth.client:Retrieving descriptors for characteristic 2A50
DEBUG:bleak.backends.corebluetooth.PeripheralDelegate:Descriptor discovered 2A50
DEBUG:bleak.backends.corebluetooth.client:Retrieving characteristics for service 180D
DEBUG:bleak.backends.corebluetooth.PeripheralDelegate:Characteristics discovered
DEBUG:bleak.backends.corebluetooth.client:Retrieving descriptors for characteristic 2A37
DEBUG:bleak.backends.corebluetooth.PeripheralDelegate:Descriptor discovered 2A37
DEBUG:bleak.backends.corebluetooth.client:Retrieving descriptors for characteristic 2A38
DEBUG:bleak.backends.corebluetooth.PeripheralDelegate:Descriptor discovered 2A38
DEBUG:bleak.backends.corebluetooth.client:Retrieving characteristics for service FE86
DEBUG:bleak.backends.corebluetooth.PeripheralDelegate:Characteristics discovered
DEBUG:bleak.backends.corebluetooth.client:Retrieving descriptors for characteristic FE01
DEBUG:bleak.backends.corebluetooth.PeripheralDelegate:Descriptor discovered FE01
DEBUG:bleak.backends.corebluetooth.client:Retrieving descriptors for characteristic FE02
DEBUG:bleak.backends.corebluetooth.PeripheralDelegate:Descriptor discovered FE02
DEBUG:bleak.backends.corebluetooth.client:Retrieving descriptors for characteristic FE03
DEBUG:bleak.backends.corebluetooth.PeripheralDelegate:Descriptor discovered FE03
DEBUG:bleak.backends.corebluetooth.client:Retrieving descriptors for characteristic FE04
DEBUG:bleak.backends.corebluetooth.PeripheralDelegate:Descriptor discovered FE04
DEBUG:bleak.backends.corebluetooth.client:Retrieving services...
DEBUG:bleak.backends.corebluetooth.client:Retrieving characteristics for service 180F
DEBUG:bleak.backends.corebluetooth.CentralManagerDelegate:Peripheral Device disconnected!
Traceback (most recent call last):
File "/Users/zaytsev/PycharmProjects/bluetooth/bleak_demo.py", line 25, in <module>
loop.run_until_complete(ble_get_services(address, loop))
File "/usr/local/Cellar/python/3.7.4/Frameworks/Python.framework/Versions/3.7/lib/python3.7/asyncio/base_events.py", line 579, in run_until_complete
return future.result()
File "/Users/zaytsev/PycharmProjects/bluetooth/bleak_demo.py", line 18, in ble_get_services
svcs = await client.get_services()
File "/Users/zaytsev/PycharmProjects/bluetooth/venv/lib/python3.7/site-packages/bleak/backends/corebluetooth/client.py", line 118, in get_services
self.services.add_service(BleakGATTServiceCoreBluetooth(service))
File "/Users/zaytsev/PycharmProjects/bluetooth/venv/lib/python3.7/site-packages/bleak/backends/service.py", line 101, in add_service
"This service is already present in this BleakGATTServiceCollection!"
bleak.exc.BleakError: This service is already present in this BleakGATTServiceCollection!
|
bleak.exc.BleakError
|
async def discover(
timeout: float = 5.0, loop: AbstractEventLoop = None, **kwargs
) -> List[BLEDevice]:
"""Perform a Bluetooth LE Scan using Windows.Devices.Bluetooth.Advertisement
Args:
timeout (float): Time to scan for.
loop (Event Loop): The event loop to use.
Keyword Args:
string_output (bool): If set to false, ``discover`` returns .NET
device objects instead.
Returns:
List of strings or objects found.
"""
loop = loop if loop else asyncio.get_event_loop()
watcher = BluetoothLEAdvertisementWatcher()
devices = {}
scan_responses = {}
def _format_bdaddr(a):
return ":".join("{:02X}".format(x) for x in a.to_bytes(6, byteorder="big"))
def _format_event_args(e):
try:
return "{0}: {1}".format(
_format_bdaddr(e.BluetoothAddress),
e.Advertisement.LocalName or "Unknown",
)
except Exception:
return e.BluetoothAddress
def AdvertisementWatcher_Received(sender, e):
if sender == watcher:
logger.debug("Received {0}.".format(_format_event_args(e)))
if e.AdvertisementType == BluetoothLEAdvertisementType.ScanResponse:
if e.BluetoothAddress not in scan_responses:
scan_responses[e.BluetoothAddress] = e
else:
if e.BluetoothAddress not in devices:
devices[e.BluetoothAddress] = e
def AdvertisementWatcher_Stopped(sender, e):
if sender == watcher:
logger.debug(
"{0} devices found. Watcher status: {1}.".format(
len(devices), watcher.Status
)
)
watcher.Received += AdvertisementWatcher_Received
watcher.Stopped += AdvertisementWatcher_Stopped
watcher.ScanningMode = BluetoothLEScanningMode.Active
# Watcher works outside of the Python process.
watcher.Start()
await asyncio.sleep(timeout, loop=loop)
watcher.Stop()
try:
watcher.Received -= AdvertisementWatcher_Received
watcher.Stopped -= AdvertisementWatcher_Stopped
except Exception as e:
logger.debug("Could not remove event handlers: {0}...".format(e))
found = []
for d in devices.values():
bdaddr = _format_bdaddr(d.BluetoothAddress)
uuids = []
for u in d.Advertisement.ServiceUuids:
uuids.append(u.ToString())
data = {}
for m in d.Advertisement.ManufacturerData:
md = IBuffer(m.Data)
b = Array.CreateInstance(Byte, md.Length)
reader = DataReader.FromBuffer(md)
reader.ReadBytes(b)
data[m.CompanyId] = bytes(b)
local_name = d.Advertisement.LocalName
if not local_name and d.BluetoothAddress in scan_responses:
local_name = scan_responses[d.BluetoothAddress].Advertisement.LocalName
found.append(
BLEDevice(
bdaddr,
local_name,
d,
uuids=uuids,
manufacturer_data=data,
)
)
return found
|
async def discover(
timeout: float = 5.0, loop: AbstractEventLoop = None, **kwargs
) -> List[BLEDevice]:
"""Perform a Bluetooth LE Scan using Windows.Devices.Bluetooth.Advertisement
Args:
timeout (float): Time to scan for.
loop (Event Loop): The event loop to use.
Keyword Args:
string_output (bool): If set to false, ``discover`` returns .NET
device objects instead.
Returns:
List of strings or objects found.
"""
loop = loop if loop else asyncio.get_event_loop()
watcher = BluetoothLEAdvertisementWatcher()
devices = {}
def _format_bdaddr(a):
return ":".join("{:02X}".format(x) for x in a.to_bytes(6, byteorder="big"))
def _format_event_args(e):
try:
return "{0}: {1}".format(
_format_bdaddr(e.BluetoothAddress),
e.Advertisement.LocalName or "Unknown",
)
except Exception:
return e.BluetoothAddress
def AdvertisementWatcher_Received(sender, e):
if sender == watcher:
logger.debug("Received {0}.".format(_format_event_args(e)))
if e.BluetoothAddress not in devices:
devices[e.BluetoothAddress] = e
def AdvertisementWatcher_Stopped(sender, e):
if sender == watcher:
logger.debug(
"{0} devices found. Watcher status: {1}.".format(
len(devices), watcher.Status
)
)
watcher.Received += AdvertisementWatcher_Received
watcher.Stopped += AdvertisementWatcher_Stopped
# Watcher works outside of the Python process.
watcher.Start()
await asyncio.sleep(timeout, loop=loop)
watcher.Stop()
try:
watcher.Received -= AdvertisementWatcher_Received
watcher.Stopped -= AdvertisementWatcher_Stopped
except Exception as e:
logger.debug("Could not remove event handlers: {0}...".format(e))
found = []
for d in devices.values():
bdaddr = _format_bdaddr(d.BluetoothAddress)
uuids = []
for u in d.Advertisement.ServiceUuids:
uuids.append(u.ToString())
data = {}
for m in d.Advertisement.ManufacturerData:
md = IBuffer(m.Data)
b = Array.CreateInstance(Byte, md.Length)
reader = DataReader.FromBuffer(md)
reader.ReadBytes(b)
data[m.CompanyId] = bytes(b)
found.append(
BLEDevice(
bdaddr,
d.Advertisement.LocalName,
d,
uuids=uuids,
manufacturer_data=data,
)
)
return found
|
https://github.com/hbldh/bleak/issues/101
|
/Users/zaytsev/PycharmProjects/bluetooth/venv/bin/python /Users/zaytsev/PycharmProjects/bluetooth/bleak_demo.py
DEBUG:bleak.backends.corebluetooth.CentralManagerDelegate:Bluetooth powered on
DEBUG:bleak.backends.corebluetooth.CentralManagerDelegate:Discovered device A0E49DB2-B7F1-4A65-AB2E-D75121192329: Unknown @ RSSI: -79
A0E49DB2-B7F1-4A65-AB2E-D75121192329: HUAWEI Band 2-b6e
DEBUG:bleak.backends.corebluetooth.client:Connecting to BLE device @ A0E49DB2-B7F1-4A65-AB2E-D75121192329
DEBUG:bleak.backends.corebluetooth.CentralManagerDelegate:Successfully connected to device uuid A0E49DB2-B7F1-4A65-AB2E-D75121192329
WARNING:bleak.backends.corebluetooth.PeripheralDelegate:PeripheralDelegate is not compliant
DEBUG:bleak.backends.corebluetooth.client:Retrieving services...
DEBUG:bleak.backends.corebluetooth.PeripheralDelegate:Services discovered
DEBUG:bleak.backends.corebluetooth.client:Retrieving characteristics for service 180F
DEBUG:bleak.backends.corebluetooth.PeripheralDelegate:Characteristics discovered
DEBUG:bleak.backends.corebluetooth.client:Retrieving descriptors for characteristic 2A19
DEBUG:bleak.backends.corebluetooth.PeripheralDelegate:Descriptor discovered 2A19
DEBUG:bleak.backends.corebluetooth.client:Retrieving characteristics for service 180A
DEBUG:bleak.backends.corebluetooth.PeripheralDelegate:Characteristics discovered
DEBUG:bleak.backends.corebluetooth.client:Retrieving descriptors for characteristic 2A29
DEBUG:bleak.backends.corebluetooth.PeripheralDelegate:Descriptor discovered 2A29
DEBUG:bleak.backends.corebluetooth.client:Retrieving descriptors for characteristic 2A24
DEBUG:bleak.backends.corebluetooth.PeripheralDelegate:Descriptor discovered 2A24
DEBUG:bleak.backends.corebluetooth.client:Retrieving descriptors for characteristic 2A26
DEBUG:bleak.backends.corebluetooth.PeripheralDelegate:Descriptor discovered 2A26
DEBUG:bleak.backends.corebluetooth.client:Retrieving descriptors for characteristic 2A28
DEBUG:bleak.backends.corebluetooth.PeripheralDelegate:Descriptor discovered 2A28
DEBUG:bleak.backends.corebluetooth.client:Retrieving descriptors for characteristic 2A50
DEBUG:bleak.backends.corebluetooth.PeripheralDelegate:Descriptor discovered 2A50
DEBUG:bleak.backends.corebluetooth.client:Retrieving characteristics for service 180D
DEBUG:bleak.backends.corebluetooth.PeripheralDelegate:Characteristics discovered
DEBUG:bleak.backends.corebluetooth.client:Retrieving descriptors for characteristic 2A37
DEBUG:bleak.backends.corebluetooth.PeripheralDelegate:Descriptor discovered 2A37
DEBUG:bleak.backends.corebluetooth.client:Retrieving descriptors for characteristic 2A38
DEBUG:bleak.backends.corebluetooth.PeripheralDelegate:Descriptor discovered 2A38
DEBUG:bleak.backends.corebluetooth.client:Retrieving characteristics for service FE86
DEBUG:bleak.backends.corebluetooth.PeripheralDelegate:Characteristics discovered
DEBUG:bleak.backends.corebluetooth.client:Retrieving descriptors for characteristic FE01
DEBUG:bleak.backends.corebluetooth.PeripheralDelegate:Descriptor discovered FE01
DEBUG:bleak.backends.corebluetooth.client:Retrieving descriptors for characteristic FE02
DEBUG:bleak.backends.corebluetooth.PeripheralDelegate:Descriptor discovered FE02
DEBUG:bleak.backends.corebluetooth.client:Retrieving descriptors for characteristic FE03
DEBUG:bleak.backends.corebluetooth.PeripheralDelegate:Descriptor discovered FE03
DEBUG:bleak.backends.corebluetooth.client:Retrieving descriptors for characteristic FE04
DEBUG:bleak.backends.corebluetooth.PeripheralDelegate:Descriptor discovered FE04
DEBUG:bleak.backends.corebluetooth.client:Retrieving services...
DEBUG:bleak.backends.corebluetooth.client:Retrieving characteristics for service 180F
DEBUG:bleak.backends.corebluetooth.CentralManagerDelegate:Peripheral Device disconnected!
Traceback (most recent call last):
File "/Users/zaytsev/PycharmProjects/bluetooth/bleak_demo.py", line 25, in <module>
loop.run_until_complete(ble_get_services(address, loop))
File "/usr/local/Cellar/python/3.7.4/Frameworks/Python.framework/Versions/3.7/lib/python3.7/asyncio/base_events.py", line 579, in run_until_complete
return future.result()
File "/Users/zaytsev/PycharmProjects/bluetooth/bleak_demo.py", line 18, in ble_get_services
svcs = await client.get_services()
File "/Users/zaytsev/PycharmProjects/bluetooth/venv/lib/python3.7/site-packages/bleak/backends/corebluetooth/client.py", line 118, in get_services
self.services.add_service(BleakGATTServiceCoreBluetooth(service))
File "/Users/zaytsev/PycharmProjects/bluetooth/venv/lib/python3.7/site-packages/bleak/backends/service.py", line 101, in add_service
"This service is already present in this BleakGATTServiceCollection!"
bleak.exc.BleakError: This service is already present in this BleakGATTServiceCollection!
|
bleak.exc.BleakError
|
def AdvertisementWatcher_Received(sender, e):
if sender == watcher:
logger.debug("Received {0}.".format(_format_event_args(e)))
if e.AdvertisementType == BluetoothLEAdvertisementType.ScanResponse:
if e.BluetoothAddress not in scan_responses:
scan_responses[e.BluetoothAddress] = e
else:
if e.BluetoothAddress not in devices:
devices[e.BluetoothAddress] = e
|
def AdvertisementWatcher_Received(sender, e):
if sender == watcher:
logger.debug("Received {0}.".format(_format_event_args(e)))
if e.BluetoothAddress not in devices:
devices[e.BluetoothAddress] = e
|
https://github.com/hbldh/bleak/issues/101
|
/Users/zaytsev/PycharmProjects/bluetooth/venv/bin/python /Users/zaytsev/PycharmProjects/bluetooth/bleak_demo.py
DEBUG:bleak.backends.corebluetooth.CentralManagerDelegate:Bluetooth powered on
DEBUG:bleak.backends.corebluetooth.CentralManagerDelegate:Discovered device A0E49DB2-B7F1-4A65-AB2E-D75121192329: Unknown @ RSSI: -79
A0E49DB2-B7F1-4A65-AB2E-D75121192329: HUAWEI Band 2-b6e
DEBUG:bleak.backends.corebluetooth.client:Connecting to BLE device @ A0E49DB2-B7F1-4A65-AB2E-D75121192329
DEBUG:bleak.backends.corebluetooth.CentralManagerDelegate:Successfully connected to device uuid A0E49DB2-B7F1-4A65-AB2E-D75121192329
WARNING:bleak.backends.corebluetooth.PeripheralDelegate:PeripheralDelegate is not compliant
DEBUG:bleak.backends.corebluetooth.client:Retrieving services...
DEBUG:bleak.backends.corebluetooth.PeripheralDelegate:Services discovered
DEBUG:bleak.backends.corebluetooth.client:Retrieving characteristics for service 180F
DEBUG:bleak.backends.corebluetooth.PeripheralDelegate:Characteristics discovered
DEBUG:bleak.backends.corebluetooth.client:Retrieving descriptors for characteristic 2A19
DEBUG:bleak.backends.corebluetooth.PeripheralDelegate:Descriptor discovered 2A19
DEBUG:bleak.backends.corebluetooth.client:Retrieving characteristics for service 180A
DEBUG:bleak.backends.corebluetooth.PeripheralDelegate:Characteristics discovered
DEBUG:bleak.backends.corebluetooth.client:Retrieving descriptors for characteristic 2A29
DEBUG:bleak.backends.corebluetooth.PeripheralDelegate:Descriptor discovered 2A29
DEBUG:bleak.backends.corebluetooth.client:Retrieving descriptors for characteristic 2A24
DEBUG:bleak.backends.corebluetooth.PeripheralDelegate:Descriptor discovered 2A24
DEBUG:bleak.backends.corebluetooth.client:Retrieving descriptors for characteristic 2A26
DEBUG:bleak.backends.corebluetooth.PeripheralDelegate:Descriptor discovered 2A26
DEBUG:bleak.backends.corebluetooth.client:Retrieving descriptors for characteristic 2A28
DEBUG:bleak.backends.corebluetooth.PeripheralDelegate:Descriptor discovered 2A28
DEBUG:bleak.backends.corebluetooth.client:Retrieving descriptors for characteristic 2A50
DEBUG:bleak.backends.corebluetooth.PeripheralDelegate:Descriptor discovered 2A50
DEBUG:bleak.backends.corebluetooth.client:Retrieving characteristics for service 180D
DEBUG:bleak.backends.corebluetooth.PeripheralDelegate:Characteristics discovered
DEBUG:bleak.backends.corebluetooth.client:Retrieving descriptors for characteristic 2A37
DEBUG:bleak.backends.corebluetooth.PeripheralDelegate:Descriptor discovered 2A37
DEBUG:bleak.backends.corebluetooth.client:Retrieving descriptors for characteristic 2A38
DEBUG:bleak.backends.corebluetooth.PeripheralDelegate:Descriptor discovered 2A38
DEBUG:bleak.backends.corebluetooth.client:Retrieving characteristics for service FE86
DEBUG:bleak.backends.corebluetooth.PeripheralDelegate:Characteristics discovered
DEBUG:bleak.backends.corebluetooth.client:Retrieving descriptors for characteristic FE01
DEBUG:bleak.backends.corebluetooth.PeripheralDelegate:Descriptor discovered FE01
DEBUG:bleak.backends.corebluetooth.client:Retrieving descriptors for characteristic FE02
DEBUG:bleak.backends.corebluetooth.PeripheralDelegate:Descriptor discovered FE02
DEBUG:bleak.backends.corebluetooth.client:Retrieving descriptors for characteristic FE03
DEBUG:bleak.backends.corebluetooth.PeripheralDelegate:Descriptor discovered FE03
DEBUG:bleak.backends.corebluetooth.client:Retrieving descriptors for characteristic FE04
DEBUG:bleak.backends.corebluetooth.PeripheralDelegate:Descriptor discovered FE04
DEBUG:bleak.backends.corebluetooth.client:Retrieving services...
DEBUG:bleak.backends.corebluetooth.client:Retrieving characteristics for service 180F
DEBUG:bleak.backends.corebluetooth.CentralManagerDelegate:Peripheral Device disconnected!
Traceback (most recent call last):
File "/Users/zaytsev/PycharmProjects/bluetooth/bleak_demo.py", line 25, in <module>
loop.run_until_complete(ble_get_services(address, loop))
File "/usr/local/Cellar/python/3.7.4/Frameworks/Python.framework/Versions/3.7/lib/python3.7/asyncio/base_events.py", line 579, in run_until_complete
return future.result()
File "/Users/zaytsev/PycharmProjects/bluetooth/bleak_demo.py", line 18, in ble_get_services
svcs = await client.get_services()
File "/Users/zaytsev/PycharmProjects/bluetooth/venv/lib/python3.7/site-packages/bleak/backends/corebluetooth/client.py", line 118, in get_services
self.services.add_service(BleakGATTServiceCoreBluetooth(service))
File "/Users/zaytsev/PycharmProjects/bluetooth/venv/lib/python3.7/site-packages/bleak/backends/service.py", line 101, in add_service
"This service is already present in this BleakGATTServiceCollection!"
bleak.exc.BleakError: This service is already present in this BleakGATTServiceCollection!
|
bleak.exc.BleakError
|
async def run(address, loop, debug=False):
if debug:
import sys
# loop.set_debug(True)
# l = logging.getLogger("asyncio")
# l.setLevel(logging.DEBUG)
# h = logging.StreamHandler(sys.stdout)
# h.setLevel(logging.DEBUG)
# l.addHandler(h)
async with BleakClient(address, loop=loop) as client:
x = await client.is_connected()
logger.info("Connected: {0}".format(x))
system_id = await client.read_gatt_char(SYSTEM_ID_UUID)
print(
"System ID: {0}".format(
":".join(["{:02x}".format(x) for x in system_id[::-1]])
)
)
model_number = await client.read_gatt_char(MODEL_NBR_UUID)
print("Model Number: {0}".format("".join(map(chr, model_number))))
device_name = await client.read_gatt_char(DEVICE_NAME_UUID)
print("Device Name: {0}".format("".join(map(chr, device_name))))
manufacturer_name = await client.read_gatt_char(MANUFACTURER_NAME_UUID)
print("Manufacturer Name: {0}".format("".join(map(chr, manufacturer_name))))
firmware_revision = await client.read_gatt_char(FIRMWARE_REV_UUID)
print("Firmware Revision: {0}".format("".join(map(chr, firmware_revision))))
hardware_revision = await client.read_gatt_char(HARDWARE_REV_UUID)
print("Hardware Revision: {0}".format("".join(map(chr, hardware_revision))))
software_revision = await client.read_gatt_char(SOFTWARE_REV_UUID)
print("Software Revision: {0}".format("".join(map(chr, software_revision))))
battery_level = await client.read_gatt_char(BATTERY_LEVEL_UUID)
print("Battery Level: {0}%".format(int(battery_level[0])))
def keypress_handler(sender, data):
print("{0}: {1}".format(sender, data))
write_value = bytearray([0xA0])
value = await client.read_gatt_char(IO_DATA_CHAR_UUID)
print("I/O Data Pre-Write Value: {0}".format(value))
await client.write_gatt_char(IO_DATA_CHAR_UUID, write_value)
value = await client.read_gatt_char(IO_DATA_CHAR_UUID)
print("I/O Data Post-Write Value: {0}".format(value))
assert value == write_value
await client.start_notify(KEY_PRESS_UUID, keypress_handler)
await asyncio.sleep(5.0, loop=loop)
await client.stop_notify(KEY_PRESS_UUID)
|
async def run(address, loop, debug=False):
if debug:
import sys
loop.set_debug(True)
l = logging.getLogger("asyncio")
l.setLevel(logging.DEBUG)
h = logging.StreamHandler(sys.stdout)
h.setLevel(logging.DEBUG)
l.addHandler(h)
async with BleakClient(address, loop=loop) as client:
x = await client.is_connected()
logger.info("Connected: {0}".format(x))
system_id = await client.read_gatt_char(SYSTEM_ID_UUID)
print(
"System ID: {0}".format(
":".join(["{:02x}".format(x) for x in system_id[::-1]])
)
)
model_number = await client.read_gatt_char(MODEL_NBR_UUID)
print("Model Number: {0}".format("".join(map(chr, model_number))))
manufacturer_name = await client.read_gatt_char(MANUFACTURER_NAME_UUID)
print("Manufacturer Name: {0}".format("".join(map(chr, manufacturer_name))))
firmware_revision = await client.read_gatt_char(FIRMWARE_REV_UUID)
print("Firmware Revision: {0}".format("".join(map(chr, firmware_revision))))
hardware_revision = await client.read_gatt_char(HARDWARE_REV_UUID)
print("Hardware Revision: {0}".format("".join(map(chr, hardware_revision))))
software_revision = await client.read_gatt_char(SOFTWARE_REV_UUID)
print("Software Revision: {0}".format("".join(map(chr, software_revision))))
battery_level = await client.read_gatt_char(BATTERY_LEVEL_UUID)
print("Battery Level: {0}%".format(int(battery_level[0])))
def keypress_handler(sender, data):
print("{0}: {1}".format(sender, data))
write_value = bytearray([0xA0])
value = await client.read_gatt_char(IO_DATA_CHAR_UUID)
print("I/O Data Pre-Write Value: {0}".format(value))
await client.write_gatt_char(IO_DATA_CHAR_UUID, write_value)
value = await client.read_gatt_char(IO_DATA_CHAR_UUID)
print("I/O Data Post-Write Value: {0}".format(value))
assert value == write_value
await client.start_notify(KEY_PRESS_UUID, keypress_handler)
await asyncio.sleep(5.0, loop=loop)
await client.stop_notify(KEY_PRESS_UUID)
|
https://github.com/hbldh/bleak/issues/101
|
/Users/zaytsev/PycharmProjects/bluetooth/venv/bin/python /Users/zaytsev/PycharmProjects/bluetooth/bleak_demo.py
DEBUG:bleak.backends.corebluetooth.CentralManagerDelegate:Bluetooth powered on
DEBUG:bleak.backends.corebluetooth.CentralManagerDelegate:Discovered device A0E49DB2-B7F1-4A65-AB2E-D75121192329: Unknown @ RSSI: -79
A0E49DB2-B7F1-4A65-AB2E-D75121192329: HUAWEI Band 2-b6e
DEBUG:bleak.backends.corebluetooth.client:Connecting to BLE device @ A0E49DB2-B7F1-4A65-AB2E-D75121192329
DEBUG:bleak.backends.corebluetooth.CentralManagerDelegate:Successfully connected to device uuid A0E49DB2-B7F1-4A65-AB2E-D75121192329
WARNING:bleak.backends.corebluetooth.PeripheralDelegate:PeripheralDelegate is not compliant
DEBUG:bleak.backends.corebluetooth.client:Retrieving services...
DEBUG:bleak.backends.corebluetooth.PeripheralDelegate:Services discovered
DEBUG:bleak.backends.corebluetooth.client:Retrieving characteristics for service 180F
DEBUG:bleak.backends.corebluetooth.PeripheralDelegate:Characteristics discovered
DEBUG:bleak.backends.corebluetooth.client:Retrieving descriptors for characteristic 2A19
DEBUG:bleak.backends.corebluetooth.PeripheralDelegate:Descriptor discovered 2A19
DEBUG:bleak.backends.corebluetooth.client:Retrieving characteristics for service 180A
DEBUG:bleak.backends.corebluetooth.PeripheralDelegate:Characteristics discovered
DEBUG:bleak.backends.corebluetooth.client:Retrieving descriptors for characteristic 2A29
DEBUG:bleak.backends.corebluetooth.PeripheralDelegate:Descriptor discovered 2A29
DEBUG:bleak.backends.corebluetooth.client:Retrieving descriptors for characteristic 2A24
DEBUG:bleak.backends.corebluetooth.PeripheralDelegate:Descriptor discovered 2A24
DEBUG:bleak.backends.corebluetooth.client:Retrieving descriptors for characteristic 2A26
DEBUG:bleak.backends.corebluetooth.PeripheralDelegate:Descriptor discovered 2A26
DEBUG:bleak.backends.corebluetooth.client:Retrieving descriptors for characteristic 2A28
DEBUG:bleak.backends.corebluetooth.PeripheralDelegate:Descriptor discovered 2A28
DEBUG:bleak.backends.corebluetooth.client:Retrieving descriptors for characteristic 2A50
DEBUG:bleak.backends.corebluetooth.PeripheralDelegate:Descriptor discovered 2A50
DEBUG:bleak.backends.corebluetooth.client:Retrieving characteristics for service 180D
DEBUG:bleak.backends.corebluetooth.PeripheralDelegate:Characteristics discovered
DEBUG:bleak.backends.corebluetooth.client:Retrieving descriptors for characteristic 2A37
DEBUG:bleak.backends.corebluetooth.PeripheralDelegate:Descriptor discovered 2A37
DEBUG:bleak.backends.corebluetooth.client:Retrieving descriptors for characteristic 2A38
DEBUG:bleak.backends.corebluetooth.PeripheralDelegate:Descriptor discovered 2A38
DEBUG:bleak.backends.corebluetooth.client:Retrieving characteristics for service FE86
DEBUG:bleak.backends.corebluetooth.PeripheralDelegate:Characteristics discovered
DEBUG:bleak.backends.corebluetooth.client:Retrieving descriptors for characteristic FE01
DEBUG:bleak.backends.corebluetooth.PeripheralDelegate:Descriptor discovered FE01
DEBUG:bleak.backends.corebluetooth.client:Retrieving descriptors for characteristic FE02
DEBUG:bleak.backends.corebluetooth.PeripheralDelegate:Descriptor discovered FE02
DEBUG:bleak.backends.corebluetooth.client:Retrieving descriptors for characteristic FE03
DEBUG:bleak.backends.corebluetooth.PeripheralDelegate:Descriptor discovered FE03
DEBUG:bleak.backends.corebluetooth.client:Retrieving descriptors for characteristic FE04
DEBUG:bleak.backends.corebluetooth.PeripheralDelegate:Descriptor discovered FE04
DEBUG:bleak.backends.corebluetooth.client:Retrieving services...
DEBUG:bleak.backends.corebluetooth.client:Retrieving characteristics for service 180F
DEBUG:bleak.backends.corebluetooth.CentralManagerDelegate:Peripheral Device disconnected!
Traceback (most recent call last):
File "/Users/zaytsev/PycharmProjects/bluetooth/bleak_demo.py", line 25, in <module>
loop.run_until_complete(ble_get_services(address, loop))
File "/usr/local/Cellar/python/3.7.4/Frameworks/Python.framework/Versions/3.7/lib/python3.7/asyncio/base_events.py", line 579, in run_until_complete
return future.result()
File "/Users/zaytsev/PycharmProjects/bluetooth/bleak_demo.py", line 18, in ble_get_services
svcs = await client.get_services()
File "/Users/zaytsev/PycharmProjects/bluetooth/venv/lib/python3.7/site-packages/bleak/backends/corebluetooth/client.py", line 118, in get_services
self.services.add_service(BleakGATTServiceCoreBluetooth(service))
File "/Users/zaytsev/PycharmProjects/bluetooth/venv/lib/python3.7/site-packages/bleak/backends/service.py", line 101, in add_service
"This service is already present in this BleakGATTServiceCollection!"
bleak.exc.BleakError: This service is already present in this BleakGATTServiceCollection!
|
bleak.exc.BleakError
|
async def discover(timeout=5.0, loop=None, **kwargs):
"""Discover nearby Bluetooth Low Energy devices.
Args:
timeout (float): Duration to scan for.
loop (asyncio.AbstractEventLoop): Optional event loop to use.
Keyword Args:
device (str): Bluetooth device to use for discovery.
Returns:
List of tuples containing name, address and signal strength
of nearby devices.
"""
device = kwargs.get("device", "hci0")
loop = loop if loop else asyncio.get_event_loop()
cached_devices = {}
devices = {}
def parse_msg(message):
if message.member == "InterfacesAdded":
msg_path = message.body[0]
try:
device_interface = message.body[1].get("org.bluez.Device1", {})
except Exception as e:
raise e
devices[msg_path] = (
{**devices[msg_path], **device_interface}
if msg_path in devices
else device_interface
)
elif message.member == "PropertiesChanged":
iface, changed, invalidated = message.body
if iface != defs.DEVICE_INTERFACE:
return
msg_path = message.path
# the PropertiesChanged signal only sends changed properties, so we
# need to get remaining properties from cached_devices. However, we
# don't want to add all cached_devices to the devices dict since
# they may not actually be nearby or powered on.
if msg_path not in devices and msg_path in cached_devices:
devices[msg_path] = cached_devices[msg_path]
devices[msg_path] = (
{**devices[msg_path], **changed} if msg_path in devices else changed
)
elif (
message.member == "InterfacesRemoved"
and message.body[1][0] == defs.BATTERY_INTERFACE
):
logger.info(
"{0}, {1} ({2}): {3}".format(
message.member, message.interface, message.path, message.body
)
)
return
else:
msg_path = message.path
logger.info(
"{0}, {1} ({2}): {3}".format(
message.member, message.interface, message.path, message.body
)
)
logger.info(
"{0}, {1} ({2} dBm), Object Path: {3}".format(
*_device_info(msg_path, devices.get(msg_path))
)
)
bus = await client.connect(reactor, "system").asFuture(loop)
# Add signal listeners
await bus.addMatch(
parse_msg,
interface="org.freedesktop.DBus.ObjectManager",
member="InterfacesAdded",
).asFuture(loop)
await bus.addMatch(
parse_msg,
interface="org.freedesktop.DBus.ObjectManager",
member="InterfacesRemoved",
).asFuture(loop)
await bus.addMatch(
parse_msg,
interface="org.freedesktop.DBus.Properties",
member="PropertiesChanged",
).asFuture(loop)
# Find the HCI device to use for scanning and get cached device properties
objects = await bus.callRemote(
"/",
"GetManagedObjects",
interface=defs.OBJECT_MANAGER_INTERFACE,
destination=defs.BLUEZ_SERVICE,
).asFuture(loop)
adapter_path, interface = _filter_on_adapter(objects, device)
cached_devices = dict(_filter_on_device(objects))
# dd = {'objectPath': '/org/bluez/hci0', 'methodName': 'StartDiscovery',
# 'interface': 'org.bluez.Adapter1', 'destination': 'org.bluez',
# 'signature': '', 'body': (), 'expectReply': True, 'autoStart': True,
# 'timeout': None, 'returnSignature': ''}
# Running Discovery loop.
await bus.callRemote(
adapter_path,
"SetDiscoveryFilter",
interface="org.bluez.Adapter1",
destination="org.bluez",
signature="a{sv}",
body=[{"Transport": "le"}],
).asFuture(loop)
await bus.callRemote(
adapter_path,
"StartDiscovery",
interface="org.bluez.Adapter1",
destination="org.bluez",
).asFuture(loop)
await asyncio.sleep(timeout)
await bus.callRemote(
adapter_path,
"StopDiscovery",
interface="org.bluez.Adapter1",
destination="org.bluez",
).asFuture(loop)
# Reduce output.
# out = []
# for path, props in devices.items():
# properties = await cli.callRemote(
# path, 'GetAll',
# interface=defs.PROPERTIES_INTERFACE,
# destination=defs.BLUEZ_SERVICE,
# signature='s',
# body=[defs.DEVICE_INTERFACE, ],
# returnSignature='a{sv}').asFuture(loop)
# print(properties)
#
discovered_devices = []
for path, props in devices.items():
if not props:
logger.debug(
"Disregarding %s since no properties could be obtained." % path
)
continue
name, address, _, path = _device_info(path, props)
uuids = props.get("UUIDs", [])
manufacturer_data = props.get("ManufacturerData", {})
discovered_devices.append(
BLEDevice(
address,
name,
{"path": path, "props": props},
uuids=uuids,
manufacturer_data=manufacturer_data,
)
)
bus.disconnect()
return discovered_devices
|
async def discover(timeout=5.0, loop=None, **kwargs):
"""Discover nearby Bluetooth Low Energy devices.
Args:
timeout (float): Duration to scan for.
loop (asyncio.AbstractEventLoop): Optional event loop to use.
Keyword Args:
device (str): Bluetooth device to use for discovery.
Returns:
List of tuples containing name, address and signal strength
of nearby devices.
"""
device = kwargs.get("device", "hci0")
loop = loop if loop else asyncio.get_event_loop()
cached_devices = {}
devices = {}
def parse_msg(message):
if message.member == "InterfacesAdded":
msg_path = message.body[0]
try:
device_interface = message.body[1].get("org.bluez.Device1", {})
except Exception as e:
raise e
devices[msg_path] = (
{**devices[msg_path], **device_interface}
if msg_path in devices
else device_interface
)
elif message.member == "PropertiesChanged":
iface, changed, invalidated = message.body
if iface != defs.DEVICE_INTERFACE:
return
msg_path = message.path
# the PropertiesChanged signal only sends changed properties, so we
# need to get remaining properties from cached_devices. However, we
# don't want to add all cached_devices to the devices dict since
# they may not actually be nearby or powered on.
if msg_path not in devices and msg_path in cached_devices:
devices[msg_path] = cached_devices[msg_path]
devices[msg_path] = (
{**devices[msg_path], **changed} if msg_path in devices else changed
)
elif (
message.member == "InterfacesRemoved"
and message.body[1][0] == defs.BATTERY_INTERFACE
):
logger.info(
"{0}, {1} ({2}): {3}".format(
message.member, message.interface, message.path, message.body
)
)
return
else:
msg_path = message.path
logger.info(
"{0}, {1} ({2}): {3}".format(
message.member, message.interface, message.path, message.body
)
)
logger.info(
"{0}, {1} ({2} dBm), Object Path: {3}".format(
*_device_info(msg_path, devices.get(msg_path))
)
)
bus = await client.connect(reactor, "system").asFuture(loop)
# Add signal listeners
await bus.addMatch(
parse_msg,
interface="org.freedesktop.DBus.ObjectManager",
member="InterfacesAdded",
).asFuture(loop)
await bus.addMatch(
parse_msg,
interface="org.freedesktop.DBus.ObjectManager",
member="InterfacesRemoved",
).asFuture(loop)
await bus.addMatch(
parse_msg,
interface="org.freedesktop.DBus.Properties",
member="PropertiesChanged",
).asFuture(loop)
# Find the HCI device to use for scanning and get cached device properties
objects = await bus.callRemote(
"/",
"GetManagedObjects",
interface=defs.OBJECT_MANAGER_INTERFACE,
destination=defs.BLUEZ_SERVICE,
).asFuture(loop)
adapter_path, interface = _filter_on_adapter(objects, device)
cached_devices = dict(_filter_on_device(objects))
# dd = {'objectPath': '/org/bluez/hci0', 'methodName': 'StartDiscovery',
# 'interface': 'org.bluez.Adapter1', 'destination': 'org.bluez',
# 'signature': '', 'body': (), 'expectReply': True, 'autoStart': True,
# 'timeout': None, 'returnSignature': ''}
# Running Discovery loop.
await bus.callRemote(
adapter_path,
"SetDiscoveryFilter",
interface="org.bluez.Adapter1",
destination="org.bluez",
signature="a{sv}",
body=[{"Transport": "le"}],
).asFuture(loop)
await bus.callRemote(
adapter_path,
"StartDiscovery",
interface="org.bluez.Adapter1",
destination="org.bluez",
).asFuture(loop)
await asyncio.sleep(timeout)
await bus.callRemote(
adapter_path,
"StopDiscovery",
interface="org.bluez.Adapter1",
destination="org.bluez",
).asFuture(loop)
# Reduce output.
# out = []
# for path, props in devices.items():
# properties = await cli.callRemote(
# path, 'GetAll',
# interface=defs.PROPERTIES_INTERFACE,
# destination=defs.BLUEZ_SERVICE,
# signature='s',
# body=[defs.DEVICE_INTERFACE, ],
# returnSignature='a{sv}').asFuture(loop)
# print(properties)
#
discovered_devices = []
for path, props in devices.items():
if not props:
logger.debug(
"Disregarding %s since no properties could be obtained." % path
)
continue
name, address, _, path = _device_info(path, props)
uuids = props.get("UUIDs", [])
manufacturer_data = props.get("ManufacturerData", {})
discovered_devices.append(
BLEDevice(
address,
name,
{"path": path, "props": props},
uuids=uuids,
manufacturer_data=manufacturer_data,
)
)
return discovered_devices
|
https://github.com/hbldh/bleak/issues/91
|
Traceback (most recent call last):
File "discover.py", line 29, in <module>
loop.run_until_complete(run())
File "/usr/lib64/python3.7/asyncio/base_events.py", line 584, in run_until_complete
return future.result()
File "discover.py", line 22, in run
devices = await discover(timeout=1)
File "/home/joe/code/proglove/python/bleak/bleak/backends/bluezdbus/discovery.py", line 125, in discover
bus = await client.connect(reactor, "system").asFuture(loop)
txdbus.error.RemoteError: org.freedesktop.DBus.Error.LimitsExceeded: The maximum number of active connections for UID 1000 has been reached
|
txdbus.error.RemoteError
|
def __init__(self, address, loop=None, **kwargs):
super(BleakClientBlueZDBus, self).__init__(address, loop, **kwargs)
self.device = kwargs.get("device") if kwargs.get("device") else "hci0"
self.address = address
# Backend specific, TXDBus objects and data
self._device_path = None
self._bus = None
self._rules = {}
self._subscriptions = list()
self._disconnected_callback = None
self._char_path_to_uuid = {}
# We need to know BlueZ version since battery level characteristic
# are stored in a separate DBus interface in the BlueZ >= 5.48.
p = subprocess.Popen(["bluetoothctl", "--version"], stdout=subprocess.PIPE)
out, _ = p.communicate()
s = re.search(b"(\\d+).(\\d+)", out.strip(b"'"))
self._bluez_version = tuple(map(int, s.groups()))
|
def __init__(self, address, loop=None, **kwargs):
super(BleakClientBlueZDBus, self).__init__(address, loop, **kwargs)
self.device = kwargs.get("device") if kwargs.get("device") else "hci0"
self.address = address
# Backend specific, TXDBus objects and data
self._device_path = None
self._bus = None
self._rules = {}
self._char_path_to_uuid = {}
# We need to know BlueZ version since battery level characteristic
# are stored in a separate DBus interface in the BlueZ >= 5.48.
p = subprocess.Popen(["bluetoothctl", "--version"], stdout=subprocess.PIPE)
out, _ = p.communicate()
s = re.search(b"(\\d+).(\\d+)", out.strip(b"'"))
self._bluez_version = tuple(map(int, s.groups()))
|
https://github.com/hbldh/bleak/issues/91
|
Traceback (most recent call last):
File "discover.py", line 29, in <module>
loop.run_until_complete(run())
File "/usr/lib64/python3.7/asyncio/base_events.py", line 584, in run_until_complete
return future.result()
File "discover.py", line 22, in run
devices = await discover(timeout=1)
File "/home/joe/code/proglove/python/bleak/bleak/backends/bluezdbus/discovery.py", line 125, in discover
bus = await client.connect(reactor, "system").asFuture(loop)
txdbus.error.RemoteError: org.freedesktop.DBus.Error.LimitsExceeded: The maximum number of active connections for UID 1000 has been reached
|
txdbus.error.RemoteError
|
async def connect(self, **kwargs) -> bool:
"""Connect to the specified GATT server.
Keyword Args:
timeout (float): Timeout for required ``discover`` call. Defaults to 0.1.
Returns:
Boolean representing connection status.
"""
# A Discover must have been run before connecting to any devices. Do a quick one here
# to ensure that it has been done.
await discover(
timeout=kwargs.get("timeout", 0.1), device=self.device, loop=self.loop
)
# Create system bus
self._bus = await txdbus_connect(reactor, busAddress="system").asFuture(self.loop)
# TODO: Handle path errors from txdbus/dbus
self._device_path = get_device_object_path(self.device, self.address)
def _services_resolved_callback(message):
iface, changed, invalidated = message.body
is_resolved = defs.DEVICE_INTERFACE and changed.get("ServicesResolved", False)
if iface == is_resolved:
logger.info("Services resolved.")
self.services_resolved = True
rule_id = await signals.listen_properties_changed(
self._bus, self.loop, _services_resolved_callback
)
logger.debug(
"Connecting to BLE device @ {0} with {1}".format(self.address, self.device)
)
try:
await self._bus.callRemote(
self._device_path,
"Connect",
interface="org.bluez.Device1",
destination="org.bluez",
).asFuture(self.loop)
except RemoteError as e:
raise BleakError(str(e))
if await self.is_connected():
logger.debug("Connection successful.")
else:
raise BleakError("Connection to {0} was not successful!".format(self.address))
# Get all services. This means making the actual connection.
await self.get_services()
properties = await self._get_device_properties()
if not properties.get("Connected"):
raise BleakError("Connection failed!")
await self._bus.delMatch(rule_id).asFuture(self.loop)
self._rules["PropChanged"] = await signals.listen_properties_changed(
self._bus, self.loop, self._properties_changed_callback
)
return True
|
async def connect(self, **kwargs) -> bool:
"""Connect to the specified GATT server.
Keyword Args:
timeout (float): Timeout for required ``discover`` call. Defaults to 0.1.
Returns:
Boolean representing connection status.
"""
# A Discover must have been run before connecting to any devices. Do a quick one here
# to ensure that it has been done.
await discover(
timeout=kwargs.get("timeout", 0.1), device=self.device, loop=self.loop
)
# Create system bus
self._bus = await txdbus_connect(reactor, busAddress="system").asFuture(self.loop)
# TODO: Handle path errors from txdbus/dbus
self._device_path = get_device_object_path(self.device, self.address)
def _services_resolved_callback(message):
iface, changed, invalidated = message.body
is_resolved = defs.DEVICE_INTERFACE and changed.get("ServicesResolved", False)
if iface == is_resolved:
logger.info("Services resolved.")
self.services_resolved = True
rule_id = await signals.listen_properties_changed(
self._bus, self.loop, _services_resolved_callback
)
logger.debug(
"Connecting to BLE device @ {0} with {1}".format(self.address, self.device)
)
try:
await self._bus.callRemote(
self._device_path,
"Connect",
interface="org.bluez.Device1",
destination="org.bluez",
).asFuture(self.loop)
except RemoteError as e:
raise BleakError(str(e))
if await self.is_connected():
logger.debug("Connection successful.")
else:
raise BleakError("Connection to {0} was not successful!".format(self.address))
# Get all services. This means making the actual connection.
await self.get_services()
properties = await self._get_device_properties()
if not properties.get("Connected"):
raise BleakError("Connection failed!")
await self._bus.delMatch(rule_id).asFuture(self.loop)
self._rules["PropChanged"] = await signals.listen_properties_changed(
self._bus, self.loop, self._properties_changed_callback
)
return True
|
https://github.com/hbldh/bleak/issues/91
|
Traceback (most recent call last):
File "discover.py", line 29, in <module>
loop.run_until_complete(run())
File "/usr/lib64/python3.7/asyncio/base_events.py", line 584, in run_until_complete
return future.result()
File "discover.py", line 22, in run
devices = await discover(timeout=1)
File "/home/joe/code/proglove/python/bleak/bleak/backends/bluezdbus/discovery.py", line 125, in discover
bus = await client.connect(reactor, "system").asFuture(loop)
txdbus.error.RemoteError: org.freedesktop.DBus.Error.LimitsExceeded: The maximum number of active connections for UID 1000 has been reached
|
txdbus.error.RemoteError
|
async def disconnect(self) -> bool:
"""Disconnect from the specified GATT server.
Returns:
Boolean representing connection status.
"""
logger.debug("Disconnecting from BLE device...")
await self._cleanup()
await self._bus.callRemote(
self._device_path,
"Disconnect",
interface=defs.DEVICE_INTERFACE,
destination=defs.BLUEZ_SERVICE,
).asFuture(self.loop)
return not await self.is_connected()
|
async def disconnect(self) -> bool:
"""Disconnect from the specified GATT server.
Returns:
Boolean representing connection status.
"""
logger.debug("Disconnecting from BLE device...")
for rule_name, rule_id in self._rules.items():
logger.debug("Removing rule {0}, ID: {1}".format(rule_name, rule_id))
await self._bus.delMatch(rule_id).asFuture(self.loop)
await self._bus.callRemote(
self._device_path,
"Disconnect",
interface=defs.DEVICE_INTERFACE,
destination=defs.BLUEZ_SERVICE,
).asFuture(self.loop)
return not await self.is_connected()
|
https://github.com/hbldh/bleak/issues/91
|
Traceback (most recent call last):
File "discover.py", line 29, in <module>
loop.run_until_complete(run())
File "/usr/lib64/python3.7/asyncio/base_events.py", line 584, in run_until_complete
return future.result()
File "discover.py", line 22, in run
devices = await discover(timeout=1)
File "/home/joe/code/proglove/python/bleak/bleak/backends/bluezdbus/discovery.py", line 125, in discover
bus = await client.connect(reactor, "system").asFuture(loop)
txdbus.error.RemoteError: org.freedesktop.DBus.Error.LimitsExceeded: The maximum number of active connections for UID 1000 has been reached
|
txdbus.error.RemoteError
|
async def write_gatt_char(
self, _uuid: str, data: bytearray, response: bool = False
) -> None:
"""Perform a write operation on the specified GATT characteristic.
Args:
_uuid (str or UUID): The uuid of the characteristics to write to.
data (bytes or bytearray): The data to send.
response (bool): If write-with-response operation should be done. Defaults to `False`.
"""
characteristic = self.services.get_characteristic(str(_uuid))
if (
"write" not in characteristic.properties
and "write-without-response" not in characteristic.properties
):
raise BleakError(
"Characteristic %s does not support write operations!" % str(_uuid)
)
if not response and "write-without-response" not in characteristic.properties:
response = True
# Force response here, since the device only supports that.
if (
response
and "write" not in characteristic.properties
and "write-without-response" in characteristic.properties
):
response = False
logger.warning(
"Characteristic %s does not support Write with response. Trying without..."
% str(_uuid)
)
if response or (self._bluez_version[0] == 5 and self._bluez_version[1] > 50):
# TODO: Add OnValueUpdated handler for response=True?
await self._bus.callRemote(
characteristic.path,
"WriteValue",
interface=defs.GATT_CHARACTERISTIC_INTERFACE,
destination=defs.BLUEZ_SERVICE,
signature="aya{sv}",
body=[data, {"type": "request" if response else "command"}],
returnSignature="",
).asFuture(self.loop)
else:
# Older versions of BlueZ don't have the "type" option, so we have
# to write the hard way. This isn't the most efficient way of doing
# things, but it works. Also, watch out for txdbus bug that causes
# returned fd to be None. https://github.com/cocagne/txdbus/pull/81
fd, _ = await self._bus.callRemote(
characteristic.path,
"AcquireWrite",
interface=defs.GATT_CHARACTERISTIC_INTERFACE,
destination=defs.BLUEZ_SERVICE,
signature="a{sv}",
body=[{}],
returnSignature="hq",
).asFuture(self.loop)
os.write(fd, data)
os.close(fd)
logger.debug(
"Write Characteristic {0} | {1}: {2}".format(_uuid, characteristic.path, data)
)
|
async def write_gatt_char(
self, _uuid: str, data: bytearray, response: bool = False
) -> None:
"""Perform a write operation on the specified GATT characteristic.
Args:
_uuid (str or UUID): The uuid of the characteristics to write to.
data (bytes or bytearray): The data to send.
response (bool): If write-with-response operation should be done. Defaults to `False`.
"""
characteristic = self.services.get_characteristic(str(_uuid))
if (
"write" not in characteristic.properties
and "write-without-response" not in characteristic.properties
):
raise BleakError(
"Characteristic %s does not support write operations!" % str(_uuid)
)
if not response and "write-without-response" not in characteristic.properties:
response = True
# Force response here, since the device only supports that.
if (
response
and "write" not in characteristic.properties
and "write-without-response" in characteristic.properties
):
response = False
logger.warning(
"Characteristic %s does not support Write with response. Trying without..."
% str(_uuid)
)
if response or (self._bluez_version[0] == 5 and self._bluez_version[1] > 50):
# TODO: Add OnValueUpdated handler for response=True?
await self._bus.callRemote(
characteristic.path,
"WriteValue",
interface=defs.GATT_CHARACTERISTIC_INTERFACE,
destination=defs.BLUEZ_SERVICE,
signature="aya{sv}",
body=[data, {"type": "request" if response else "command"}],
returnSignature="",
).asFuture(self.loop)
else:
# Older versions of BlueZ don't have the "type" option, so we have
# to write the hard way. This isn't the most efficient way of doing
# things, but it works. Also, watch out for txdbus bug that causes
# returned fd to be None. https://github.com/cocagne/txdbus/pull/81
fd, _ = await self._bus.callRemote(
characteristic.path,
"AcquireWrite",
interface=defs.GATT_CHARACTERISTIC_INTERFACE,
destination=defs.BLUEZ_SERVICE,
signature="a{sv}",
body=[{}],
returnSignature="hq",
).asFuture(self.loop)
os.write(fd, data)
os.close(fd)
logger.debug(
"Write Characteristic {0} | {1}: {2}".format(_uuid, characteristic.path, data)
)
|
https://github.com/hbldh/bleak/issues/91
|
Traceback (most recent call last):
File "discover.py", line 29, in <module>
loop.run_until_complete(run())
File "/usr/lib64/python3.7/asyncio/base_events.py", line 584, in run_until_complete
return future.result()
File "discover.py", line 22, in run
devices = await discover(timeout=1)
File "/home/joe/code/proglove/python/bleak/bleak/backends/bluezdbus/discovery.py", line 125, in discover
bus = await client.connect(reactor, "system").asFuture(loop)
txdbus.error.RemoteError: org.freedesktop.DBus.Error.LimitsExceeded: The maximum number of active connections for UID 1000 has been reached
|
txdbus.error.RemoteError
|
async def start_notify(
self, _uuid: str, callback: Callable[[str, Any], Any], **kwargs
) -> None:
"""Activate notifications/indications on a characteristic.
Callbacks must accept two inputs. The first will be a uuid string
object and the second will be a bytearray.
.. code-block:: python
def callback(sender, data):
print(f"{sender}: {data}")
client.start_notify(char_uuid, callback)
Args:
_uuid (str or UUID): The uuid of the characteristics to start notification on.
callback (function): The function to be called on notification.
Keyword Args:
notification_wrapper (bool): Set to `False` to avoid parsing of
notification to bytearray.
"""
_wrap = kwargs.get("notification_wrapper", True)
characteristic = self.services.get_characteristic(str(_uuid))
if not characteristic:
# Special handling for BlueZ >= 5.48, where Battery Service (0000180f-0000-1000-8000-00805f9b34fb:)
# has been moved to interface org.bluez.Battery1 instead of as a regular service.
# The org.bluez.Battery1 on the other hand does not provide a notification method, so here we cannot
# provide this functionality...
# See https://kernel.googlesource.com/pub/scm/bluetooth/bluez/+/refs/tags/5.48/doc/battery-api.txt
if _uuid == "00002a19-0000-1000-8000-00805f9b34fb" and (
self._bluez_version[0] == 5 and self._bluez_version[1] >= 48
):
raise BleakError(
"Notifications on Battery Level Char ({0}) is not "
"possible in BlueZ >= 5.48. Use regular read instead.".format(_uuid)
)
raise BleakError(
"Characteristic with UUID {0} could not be found!".format(_uuid)
)
await self._bus.callRemote(
characteristic.path,
"StartNotify",
interface=defs.GATT_CHARACTERISTIC_INTERFACE,
destination=defs.BLUEZ_SERVICE,
signature="",
body=[],
returnSignature="",
).asFuture(self.loop)
if _wrap:
self._notification_callbacks[characteristic.path] = _data_notification_wrapper(
callback, self._char_path_to_uuid
) # noqa | E123 error in flake8...
else:
self._notification_callbacks[characteristic.path] = (
_regular_notification_wrapper(callback, self._char_path_to_uuid)
) # noqa | E123 error in flake8...
self._subscriptions.append(_uuid)
|
async def start_notify(
self, _uuid: str, callback: Callable[[str, Any], Any], **kwargs
) -> None:
"""Activate notifications/indications on a characteristic.
Callbacks must accept two inputs. The first will be a uuid string
object and the second will be a bytearray.
.. code-block:: python
def callback(sender, data):
print(f"{sender}: {data}")
client.start_notify(char_uuid, callback)
Args:
_uuid (str or UUID): The uuid of the characteristics to start notification on.
callback (function): The function to be called on notification.
Keyword Args:
notification_wrapper (bool): Set to `False` to avoid parsing of
notification to bytearray.
"""
_wrap = kwargs.get("notification_wrapper", True)
characteristic = self.services.get_characteristic(str(_uuid))
if not characteristic:
# Special handling for BlueZ >= 5.48, where Battery Service (0000180f-0000-1000-8000-00805f9b34fb:)
# has been moved to interface org.bluez.Battery1 instead of as a regular service.
# The org.bluez.Battery1 on the other hand does not provide a notification method, so here we cannot
# provide this functionality...
# See https://kernel.googlesource.com/pub/scm/bluetooth/bluez/+/refs/tags/5.48/doc/battery-api.txt
if _uuid == "00002a19-0000-1000-8000-00805f9b34fb" and (
self._bluez_version[0] == 5 and self._bluez_version[1] >= 48
):
raise BleakError(
"Notifications on Battery Level Char ({0}) is not "
"possible in BlueZ >= 5.48. Use regular read instead.".format(_uuid)
)
raise BleakError(
"Characteristic with UUID {0} could not be found!".format(_uuid)
)
await self._bus.callRemote(
characteristic.path,
"StartNotify",
interface=defs.GATT_CHARACTERISTIC_INTERFACE,
destination=defs.BLUEZ_SERVICE,
signature="",
body=[],
returnSignature="",
).asFuture(self.loop)
if _wrap:
self._notification_callbacks[characteristic.path] = _data_notification_wrapper(
callback, self._char_path_to_uuid
) # noqa | E123 error in flake8...
else:
self._notification_callbacks[characteristic.path] = (
_regular_notification_wrapper(callback, self._char_path_to_uuid)
) # noqa | E123 error in flake8...
|
https://github.com/hbldh/bleak/issues/91
|
Traceback (most recent call last):
File "discover.py", line 29, in <module>
loop.run_until_complete(run())
File "/usr/lib64/python3.7/asyncio/base_events.py", line 584, in run_until_complete
return future.result()
File "discover.py", line 22, in run
devices = await discover(timeout=1)
File "/home/joe/code/proglove/python/bleak/bleak/backends/bluezdbus/discovery.py", line 125, in discover
bus = await client.connect(reactor, "system").asFuture(loop)
txdbus.error.RemoteError: org.freedesktop.DBus.Error.LimitsExceeded: The maximum number of active connections for UID 1000 has been reached
|
txdbus.error.RemoteError
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.